From e4662ef5f2e05c6cd4c04adefb0b502183fd51c6 Mon Sep 17 00:00:00 2001 From: shogo4405 Date: Mon, 9 Oct 2023 08:25:09 +0900 Subject: [PATCH] Update Example iOS. --- ...oller.swift => IngestViewController.swift} | 156 ++++++------------ Examples/iOS/Main.storyboard | 7 +- Examples/iOS/NetStreamSwitcher.swift | 113 +++++++++++++ HaishinKit.xcodeproj/project.pbxproj | 14 +- SRTHaishinKit/SRTStream.swift | 2 +- Sources/Net/NetStream.swift | 13 ++ Sources/RTMP/RTMPStream.swift | 11 -- 7 files changed, 192 insertions(+), 124 deletions(-) rename Examples/iOS/{LiveViewController.swift => IngestViewController.swift} (68%) create mode 100644 Examples/iOS/NetStreamSwitcher.swift diff --git a/Examples/iOS/LiveViewController.swift b/Examples/iOS/IngestViewController.swift similarity index 68% rename from Examples/iOS/LiveViewController.swift rename to Examples/iOS/IngestViewController.swift index 8c5f6008a..b892166d9 100644 --- a/Examples/iOS/LiveViewController.swift +++ b/Examples/iOS/IngestViewController.swift @@ -4,9 +4,7 @@ import Photos import UIKit import VideoToolbox -final class LiveViewController: UIViewController { - private static let maxRetryCount: Int = 5 - +final class IngestViewController: UIViewController { @IBOutlet private weak var currentFPSLabel: UILabel! @IBOutlet private weak var publishButton: UIButton! @IBOutlet private weak var pauseButton: UIButton! @@ -21,13 +19,14 @@ final class LiveViewController: UIViewController { @IBOutlet private weak var audioMonoStereoSegmentCOntrol: UISegmentedControl! private var pipIntentView = UIView() - private var rtmpConnection = RTMPConnection() - private var rtmpStream: RTMPStream! - private var sharedObject: RTMPSharedObject! private var currentEffect: VideoEffect? private var currentPosition: AVCaptureDevice.Position = .back private var retryCount: Int = 0 private var preferedStereo = false + private let netStreamSwitcher: NetStreamSwitcher = .init() + private var stream: NetStream { + return netStreamSwitcher.stream + } private lazy var audioCapture: AudioCapture = { let audioCapture = AudioCapture() audioCapture.delegate = self @@ -37,23 +36,22 @@ final class LiveViewController: UIViewController { override func viewDidLoad() { super.viewDidLoad() + netStreamSwitcher.uri = Preference.defaultInstance.uri ?? "" + pipIntentView.layer.borderWidth = 1.0 pipIntentView.layer.borderColor = UIColor.white.cgColor pipIntentView.bounds = MultiCamCaptureSettings.default.regionOfInterest pipIntentView.isUserInteractionEnabled = true view.addSubview(pipIntentView) - rtmpConnection.delegate = self - - rtmpStream = RTMPStream(connection: rtmpConnection) if let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) { - rtmpStream.videoOrientation = orientation + stream.videoOrientation = orientation } - rtmpStream.isMonitoringEnabled = DeviceUtil.isHeadphoneConnected() - rtmpStream.audioSettings.bitRate = 64 * 1000 - rtmpStream.bitrateStrategy = VideoAdaptiveNetBitRateStrategy(mamimumVideoBitrate: VideoCodecSettings.default.bitRate) - rtmpStream.mixer.recorder.delegate = self + stream.isMonitoringEnabled = DeviceUtil.isHeadphoneConnected() + stream.audioSettings.bitRate = 64 * 1000 + stream.bitrateStrategy = VideoAdaptiveNetBitRateStrategy(mamimumVideoBitrate: VideoCodecSettings.default.bitRate) + stream.mixer.recorder.delegate = self videoBitrateSlider?.value = Float(VideoCodecSettings.default.bitRate) / 1000 audioBitrateSlider?.value = Float(AudioCodecSettings.default.bitRate) / 1000 @@ -68,17 +66,17 @@ final class LiveViewController: UIViewController { // If you're using multi-camera functionality, please make sure to call the attachMultiCamera method first. This is required for iOS 14 and 15, among others. if #available(iOS 13.0, *) { let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) - rtmpStream.videoCapture(for: 1)?.isVideoMirrored = true - rtmpStream.attachMultiCamera(front) + stream.videoCapture(for: 1)?.isVideoMirrored = true + stream.attachMultiCamera(front) } - rtmpStream.attachCamera(back) { error in + stream.attachCamera(back) { error in logger.warn(error) } - rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio), automaticallyConfiguresApplicationAudioSession: false) { error in + stream.attachAudio(AVCaptureDevice.default(for: .audio), automaticallyConfiguresApplicationAudioSession: false) { error in logger.warn(error) } - rtmpStream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil) - (view as? (any NetStreamDrawable))?.attachStream(rtmpStream) + stream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil) + (view as? (any NetStreamDrawable))?.attachStream(stream) NotificationCenter.default.addObserver(self, selector: #selector(didInterruptionNotification(_:)), name: AVAudioSession.interruptionNotification, object: nil) NotificationCenter.default.addObserver(self, selector: #selector(didRouteChangeNotification(_:)), name: AVAudioSession.routeChangeNotification, object: nil) } @@ -86,12 +84,12 @@ final class LiveViewController: UIViewController { override func viewWillDisappear(_ animated: Bool) { logger.info("viewWillDisappear") super.viewWillDisappear(animated) - rtmpStream.removeObserver(self, forKeyPath: "currentFPS") - rtmpStream.close() - rtmpStream.attachAudio(nil) - rtmpStream.attachCamera(nil) + stream.removeObserver(self, forKeyPath: "currentFPS") + (stream as? RTMPStream)?.close() + stream.attachAudio(nil) + stream.attachCamera(nil) if #available(iOS 13.0, *) { - rtmpStream.attachMultiCamera(nil) + stream.attachMultiCamera(nil) } // swiftlint:disable:next notification_center_detachment NotificationCenter.default.removeObserver(self) @@ -100,7 +98,7 @@ final class LiveViewController: UIViewController { // swiftlint:disable:next block_based_kvo override func observeValue(forKeyPath keyPath: String?, of object: Any?, change: [NSKeyValueChangeKey: Any]?, context: UnsafeMutableRawPointer?) { if Thread.isMainThread { - currentFPSLabel?.text = "\(rtmpStream.currentFPS)" + currentFPSLabel?.text = "\(stream.currentFPS)" } } @@ -117,8 +115,8 @@ final class LiveViewController: UIViewController { currentFrame.origin.x += deltaX currentFrame.origin.y += deltaY pipIntentView.frame = currentFrame - rtmpStream.multiCamCaptureSettings = MultiCamCaptureSettings( - mode: rtmpStream.multiCamCaptureSettings.mode, + stream.multiCamCaptureSettings = MultiCamCaptureSettings( + mode: stream.multiCamCaptureSettings.mode, cornerRadius: 16.0, regionOfInterest: currentFrame, direction: .east @@ -129,13 +127,13 @@ final class LiveViewController: UIViewController { @IBAction func rotateCamera(_ sender: UIButton) { logger.info("rotateCamera") let position: AVCaptureDevice.Position = currentPosition == .back ? .front : .back - rtmpStream.videoCapture(for: 0)?.isVideoMirrored = position == .front - rtmpStream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position)) { error in + stream.videoCapture(for: 0)?.isVideoMirrored = position == .front + stream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position)) { error in logger.warn(error) } if #available(iOS 13.0, *) { - rtmpStream.videoCapture(for: 1)?.isVideoMirrored = currentPosition == .front - rtmpStream.attachMultiCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: currentPosition)) { error in + stream.videoCapture(for: 1)?.isVideoMirrored = currentPosition == .front + stream.attachMultiCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: currentPosition)) { error in logger.warn(error) } } @@ -143,21 +141,21 @@ final class LiveViewController: UIViewController { } @IBAction func toggleTorch(_ sender: UIButton) { - rtmpStream.torch.toggle() + stream.torch.toggle() } @IBAction func on(slider: UISlider) { if slider == audioBitrateSlider { audioBitrateLabel?.text = "audio \(Int(slider.value))/kbps" - rtmpStream.audioSettings.bitRate = Int(slider.value * 1000) + stream.audioSettings.bitRate = Int(slider.value * 1000) } if slider == videoBitrateSlider { videoBitrateLabel?.text = "video \(Int(slider.value))/kbps" - rtmpStream.bitrateStrategy = VideoAdaptiveNetBitRateStrategy(mamimumVideoBitrate: Int(slider.value * 1000)) + stream.bitrateStrategy = VideoAdaptiveNetBitRateStrategy(mamimumVideoBitrate: Int(slider.value * 1000)) } if slider == zoomSlider { let zoomFactor = CGFloat(slider.value) - guard let device = rtmpStream.videoCapture(for: 0)?.device, 1 <= zoomFactor && zoomFactor < device.activeFormat.videoMaxZoomFactor else { + guard let device = stream.videoCapture(for: 0)?.device, 1 <= zoomFactor && zoomFactor < device.activeFormat.videoMaxZoomFactor else { return } do { @@ -171,7 +169,7 @@ final class LiveViewController: UIViewController { } @IBAction func on(pause: UIButton) { - rtmpStream.paused.toggle() + (stream as? RTMPStream)?.paused.toggle() } @IBAction func on(close: UIButton) { @@ -181,56 +179,22 @@ final class LiveViewController: UIViewController { @IBAction func on(publish: UIButton) { if publish.isSelected { UIApplication.shared.isIdleTimerDisabled = false - rtmpConnection.close() - rtmpConnection.removeEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self) - rtmpConnection.removeEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self) + netStreamSwitcher.close() publish.setTitle("●", for: []) } else { UIApplication.shared.isIdleTimerDisabled = true - rtmpConnection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self) - rtmpConnection.addEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self) - rtmpConnection.connect(Preference.defaultInstance.uri!) + netStreamSwitcher.connect() publish.setTitle("■", for: []) } publish.isSelected.toggle() } - @objc - private func rtmpStatusHandler(_ notification: Notification) { - let e = Event.from(notification) - guard let data: ASObject = e.data as? ASObject, let code: String = data["code"] as? String else { - return - } - logger.info(code) - switch code { - case RTMPConnection.Code.connectSuccess.rawValue: - retryCount = 0 - rtmpStream.publish(Preference.defaultInstance.streamName!) - // sharedObject!.connect(rtmpConnection) - case RTMPConnection.Code.connectFailed.rawValue, RTMPConnection.Code.connectClosed.rawValue: - guard retryCount <= LiveViewController.maxRetryCount else { - return - } - Thread.sleep(forTimeInterval: pow(2.0, Double(retryCount))) - rtmpConnection.connect(Preference.defaultInstance.uri!) - retryCount += 1 - default: - break - } - } - - @objc - private func rtmpErrorHandler(_ notification: Notification) { - logger.error(notification) - rtmpConnection.connect(Preference.defaultInstance.uri!) - } - func tapScreen(_ gesture: UIGestureRecognizer) { if let gestureView = gesture.view, gesture.state == .ended { let touchPoint: CGPoint = gesture.location(in: gestureView) let pointOfInterest = CGPoint(x: touchPoint.x / gestureView.bounds.size.width, y: touchPoint.y / gestureView.bounds.size.height) guard - let device = rtmpStream.videoCapture(for: 0)?.device, device.isFocusPointOfInterestSupported else { + let device = stream.videoCapture(for: 0)?.device, device.isFocusPointOfInterestSupported else { return } do { @@ -264,11 +228,11 @@ final class LiveViewController: UIViewController { @IBAction private func onFPSValueChanged(_ segment: UISegmentedControl) { switch segment.selectedSegmentIndex { case 0: - rtmpStream.frameRate = 15 + stream.frameRate = 15 case 1: - rtmpStream.frameRate = 30 + stream.frameRate = 30 case 2: - rtmpStream.frameRate = 60 + stream.frameRate = 60 default: break } @@ -276,15 +240,15 @@ final class LiveViewController: UIViewController { @IBAction private func onEffectValueChanged(_ segment: UISegmentedControl) { if let currentEffect: VideoEffect = currentEffect { - _ = rtmpStream.unregisterVideoEffect(currentEffect) + _ = stream.unregisterVideoEffect(currentEffect) } switch segment.selectedSegmentIndex { case 1: currentEffect = MonochromeEffect() - _ = rtmpStream.registerVideoEffect(currentEffect!) + _ = stream.registerVideoEffect(currentEffect!) case 2: currentEffect = PronamaEffect() - _ = rtmpStream.registerVideoEffect(currentEffect!) + _ = stream.registerVideoEffect(currentEffect!) default: break } @@ -321,9 +285,9 @@ final class LiveViewController: UIViewController { } audioDevicePicker.reloadAllComponents() if DeviceUtil.isHeadphoneDisconnected(notification) { - rtmpStream.isMonitoringEnabled = false + stream.isMonitoringEnabled = false } else { - rtmpStream.isMonitoringEnabled = DeviceUtil.isHeadphoneConnected() + stream.isMonitoringEnabled = DeviceUtil.isHeadphoneConnected() } } @@ -332,25 +296,11 @@ final class LiveViewController: UIViewController { guard let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) else { return } - rtmpStream.videoOrientation = orientation - } -} - -extension LiveViewController: RTMPConnectionDelegate { - func connection(_ connection: RTMPConnection, publishInsufficientBWOccured stream: RTMPStream) { - } - - func connection(_ connection: RTMPConnection, publishSufficientBWOccured stream: RTMPStream) { - } - - func connection(_ connection: RTMPConnection, updateStats stream: RTMPStream) { - } - - func connection(_ connection: RTMPConnection, didClear stream: RTMPStream) { + stream.videoOrientation = orientation } } -extension LiveViewController: IORecorderDelegate { +extension IngestViewController: IORecorderDelegate { // MARK: IORecorderDelegate func recorder(_ recorder: IORecorder, errorOccured error: IORecorder.Error) { logger.error(error) @@ -369,14 +319,14 @@ extension LiveViewController: IORecorderDelegate { } } -extension LiveViewController: AudioCaptureDelegate { +extension IngestViewController: AudioCaptureDelegate { // MARK: AudioCaptureDelegate func audioCapture(_ audioCapture: AudioCapture, buffer: AVAudioBuffer, time: AVAudioTime) { - rtmpStream.appendAudioBuffer(buffer, when: time) + stream.appendAudioBuffer(buffer, when: time) } } -extension LiveViewController: UIPickerViewDelegate { +extension IngestViewController: UIPickerViewDelegate { // MARK: UIPickerViewDelegate func pickerView(_ pickerView: UIPickerView, didSelectRow row: Int, inComponent component: Int) { let session = AVAudioSession.sharedInstance() @@ -399,13 +349,13 @@ extension LiveViewController: UIPickerViewDelegate { } catch { logger.warn("can't set supported setPreferredDataSource") } - rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio), automaticallyConfiguresApplicationAudioSession: false) { error in + stream.attachAudio(AVCaptureDevice.default(for: .audio), automaticallyConfiguresApplicationAudioSession: false) { error in logger.warn(error) } } } -extension LiveViewController: UIPickerViewDataSource { +extension IngestViewController: UIPickerViewDataSource { // MARK: UIPickerViewDataSource func numberOfComponents(in pickerView: UIPickerView) -> Int { return 1 diff --git a/Examples/iOS/Main.storyboard b/Examples/iOS/Main.storyboard index e020e6b57..4008f75d2 100644 --- a/Examples/iOS/Main.storyboard +++ b/Examples/iOS/Main.storyboard @@ -1,9 +1,8 @@ - + - - + @@ -12,7 +11,7 @@ - + diff --git a/Examples/iOS/NetStreamSwitcher.swift b/Examples/iOS/NetStreamSwitcher.swift new file mode 100644 index 000000000..a9586f23d --- /dev/null +++ b/Examples/iOS/NetStreamSwitcher.swift @@ -0,0 +1,113 @@ +import Foundation +import HaishinKit +import SRTHaishinKit + +final class NetStreamSwitcher { + private static let maxRetryCount: Int = 5 + + enum Mode { + case rtmp + case srt + case http + + func makeStream(_ swithcer: NetStreamSwitcher) -> NetStream { + switch self { + case .rtmp: + let connection = RTMPConnection() + swithcer.connection = connection + return RTMPStream(connection: connection) + case .srt: + let connection = SRTConnection() + swithcer.connection = connection + return SRTStream(connection) + case .http: + return HTTPStream() + } + } + } + + var uri = "" { + didSet { + if uri.contains("http://") { + mode = .http + return + } + if uri.contains("srt://") { + mode = .srt + return + } + mode = .rtmp + } + } + private(set) var mode: Mode = .rtmp { + didSet { + stream = mode.makeStream(self) + } + } + private var retryCount = 0 + private var connection: Any? + private(set) var stream: NetStream = .init() + + func connect() { + switch mode { + case .rtmp: + guard let connection = connection as? RTMPConnection else { + return + } + connection.addEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self) + connection.addEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self) + connection.connect(uri) + case .srt: + (connection as? SRTConnection)?.open(URL(string: uri)) + (stream as? SRTStream)?.publish("") + case .http: + break + } + } + + func close() { + switch mode { + case .rtmp: + guard let connection = connection as? RTMPConnection else { + return + } + connection.close() + connection.removeEventListener(.rtmpStatus, selector: #selector(rtmpStatusHandler), observer: self) + connection.removeEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self) + case .srt: + (stream as? SRTStream)?.close() + (connection as? SRTConnection)?.close() + case .http: + break + } + } + + @objc + private func rtmpStatusHandler(_ notification: Notification) { + let e = Event.from(notification) + guard let data: ASObject = e.data as? ASObject, let code: String = data["code"] as? String else { + return + } + logger.info(code) + switch code { + case RTMPConnection.Code.connectSuccess.rawValue: + retryCount = 0 + (stream as? RTMPStream)?.publish(Preference.defaultInstance.streamName!) + case RTMPConnection.Code.connectFailed.rawValue, RTMPConnection.Code.connectClosed.rawValue: + guard retryCount <= NetStreamSwitcher.maxRetryCount else { + return + } + Thread.sleep(forTimeInterval: pow(2.0, Double(retryCount))) + (connection as? RTMPConnection)?.connect(uri) + retryCount += 1 + default: + break + } + } + + @objc + private func rtmpErrorHandler(_ notification: Notification) { + logger.error(notification) + (connection as? RTMPConnection)?.connect(Preference.defaultInstance.uri!) + } +} diff --git a/HaishinKit.xcodeproj/project.pbxproj b/HaishinKit.xcodeproj/project.pbxproj index 6fcaffdd3..0c72e960e 100644 --- a/HaishinKit.xcodeproj/project.pbxproj +++ b/HaishinKit.xcodeproj/project.pbxproj @@ -62,7 +62,7 @@ 296897651CDB028C0074D5F0 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 296897421CDB01D20074D5F0 /* Assets.xcassets */; }; 296897661CDB028C0074D5F0 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 2968974D1CDB01DD0074D5F0 /* LaunchScreen.storyboard */; }; 296897671CDB02940074D5F0 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 296897411CDB01D20074D5F0 /* AppDelegate.swift */; }; - 296897681CDB02940074D5F0 /* LiveViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 296897441CDB01D20074D5F0 /* LiveViewController.swift */; }; + 296897681CDB02940074D5F0 /* IngestViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 296897441CDB01D20074D5F0 /* IngestViewController.swift */; }; 2976077F20A89FBB00DCF24F /* RTMPMessageTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2976077E20A89FBB00DCF24F /* RTMPMessageTests.swift */; }; 2976A47E1D48C5C700B53EF2 /* IORecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2976A47D1D48C5C700B53EF2 /* IORecorder.swift */; }; 2976A4861D4903C300B53EF2 /* DeviceUtil.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2976A4851D4903C300B53EF2 /* DeviceUtil.swift */; }; @@ -263,6 +263,7 @@ BCD63AE126FDF3500084842D /* Logboard.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; }; BCD63AE226FDF3500084842D /* Logboard.xcframework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; BCD91C0D2A700FF50033F9E1 /* IOAudioRingBufferTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCD91C0C2A700FF50033F9E1 /* IOAudioRingBufferTests.swift */; }; + BCE0E33D2AD369550082C16F /* NetStreamSwitcher.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCE0E33B2AD369410082C16F /* NetStreamSwitcher.swift */; }; BCFB355524FA27EA00DC5108 /* PlaybackViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCFB355324FA275600DC5108 /* PlaybackViewController.swift */; }; BCFB355A24FA40DD00DC5108 /* PlaybackContainerViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCFB355924FA40DD00DC5108 /* PlaybackContainerViewController.swift */; }; BCFC51FE2AAB420700014428 /* IOAudioResampler.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCFC51FD2AAB420700014428 /* IOAudioResampler.swift */; }; @@ -472,7 +473,7 @@ 296897411CDB01D20074D5F0 /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 296897421CDB01D20074D5F0 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 296897431CDB01D20074D5F0 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; - 296897441CDB01D20074D5F0 /* LiveViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LiveViewController.swift; sourceTree = ""; }; + 296897441CDB01D20074D5F0 /* IngestViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = IngestViewController.swift; sourceTree = ""; }; 296897461CDB01D20074D5F0 /* VisualEffect.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VisualEffect.swift; sourceTree = ""; }; 2968974E1CDB01DD0074D5F0 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 2976077E20A89FBB00DCF24F /* RTMPMessageTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTMPMessageTests.swift; sourceTree = ""; }; @@ -657,6 +658,7 @@ BCD63AB826FDF12A0084842D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; BCD63ABB26FDF12A0084842D /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; BCD91C0C2A700FF50033F9E1 /* IOAudioRingBufferTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioRingBufferTests.swift; sourceTree = ""; }; + BCE0E33B2AD369410082C16F /* NetStreamSwitcher.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NetStreamSwitcher.swift; sourceTree = ""; }; BCFB355324FA275600DC5108 /* PlaybackViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlaybackViewController.swift; sourceTree = ""; }; BCFB355924FA40DD00DC5108 /* PlaybackContainerViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlaybackContainerViewController.swift; sourceTree = ""; }; BCFC51FD2AAB420700014428 /* IOAudioResampler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioResampler.swift; sourceTree = ""; }; @@ -979,20 +981,21 @@ 2968973F1CDB01AD0074D5F0 /* iOS */ = { isa = PBXGroup; children = ( - 29A39C801D85BEFA007C27E9 /* Screencast */, 296897411CDB01D20074D5F0 /* AppDelegate.swift */, 296897421CDB01D20074D5F0 /* Assets.xcassets */, BC56765A2AD2AED800524F7E /* AudioCapture.swift */, 291F4E361CF206E200F59C51 /* Icon.png */, 296897431CDB01D20074D5F0 /* Info.plist */, + 296897441CDB01D20074D5F0 /* IngestViewController.swift */, 2968974D1CDB01DD0074D5F0 /* LaunchScreen.storyboard */, - 296897441CDB01D20074D5F0 /* LiveViewController.swift */, 29205CBD1E461F4E009D3FFF /* Main.storyboard */, BC0F1FD42ACBD39600C326FF /* MemoryUsage.swift */, + BCE0E33B2AD369410082C16F /* NetStreamSwitcher.swift */, BCFB355924FA40DD00DC5108 /* PlaybackContainerViewController.swift */, BCFB355324FA275600DC5108 /* PlaybackViewController.swift */, 291468161E581C7D00E619BA /* Preference.swift */, 2950742E1E4620B7007F15A4 /* PreferenceViewController.swift */, + 29A39C801D85BEFA007C27E9 /* Screencast */, BC1BC9032AC80531009005D3 /* VideoAdaptiveNetBitRateStrategy.swift */, 296897461CDB01D20074D5F0 /* VisualEffect.swift */, ); @@ -1875,7 +1878,8 @@ BCFB355A24FA40DD00DC5108 /* PlaybackContainerViewController.swift in Sources */, BC0F1FD52ACBD39600C326FF /* MemoryUsage.swift in Sources */, 296897671CDB02940074D5F0 /* AppDelegate.swift in Sources */, - 296897681CDB02940074D5F0 /* LiveViewController.swift in Sources */, + 296897681CDB02940074D5F0 /* IngestViewController.swift in Sources */, + BCE0E33D2AD369550082C16F /* NetStreamSwitcher.swift in Sources */, BCFB355524FA27EA00DC5108 /* PlaybackViewController.swift in Sources */, 2955F51F1D09EBAD004CC995 /* VisualEffect.swift in Sources */, 295074311E462105007F15A4 /* PreferenceViewController.swift in Sources */, diff --git a/SRTHaishinKit/SRTStream.swift b/SRTHaishinKit/SRTStream.swift index 5f8cdb959..5a1ff658c 100644 --- a/SRTHaishinKit/SRTStream.swift +++ b/SRTHaishinKit/SRTStream.swift @@ -67,7 +67,7 @@ public class SRTStream: NetStream { } /// Creates a new SRTStream object. - public init(_ connection: SRTConnection) { + public init(connection: SRTConnection) { super.init() self.connection = connection self.connection?.streams.append(self) diff --git a/Sources/Net/NetStream.swift b/Sources/Net/NetStream.swift index 51489cd7d..82cd9c6fe 100644 --- a/Sources/Net/NetStream.swift +++ b/Sources/Net/NetStream.swift @@ -186,6 +186,19 @@ open class NetStream: NSObject { } } + /// The number of frames per second being displayed. + @objc public internal(set) dynamic var currentFPS: UInt16 = 0 + + /// Specifies the controls sound. + public var soundTransform: SoundTransform { + get { + mixer.audioIO.soundTransform + } + set { + mixer.audioIO.soundTransform = newValue + } + } + /// Creates a NetStream object. override public init() { super.init() diff --git a/Sources/RTMP/RTMPStream.swift b/Sources/RTMP/RTMPStream.swift index 826adbe5c..5d491fdfa 100644 --- a/Sources/RTMP/RTMPStream.swift +++ b/Sources/RTMP/RTMPStream.swift @@ -168,17 +168,6 @@ open class RTMPStream: NetStream { public internal(set) var info = RTMPStreamInfo() /// The object encoding (AMF). Framework supports AMF0 only. public private(set) var objectEncoding: RTMPObjectEncoding = RTMPConnection.defaultObjectEncoding - /// The number of frames per second being displayed. - @objc public private(set) dynamic var currentFPS: UInt16 = 0 - /// Specifies the controls sound. - public var soundTransform: SoundTransform { - get { - mixer.audioIO.soundTransform - } - set { - mixer.audioIO.soundTransform = newValue - } - } /// Incoming audio plays on the stream or not. public var receiveAudio = true { didSet {