diff --git a/Examples/iOS/IngestViewController.swift b/Examples/iOS/IngestViewController.swift index 7ac860e8e..ee3e637aa 100644 --- a/Examples/iOS/IngestViewController.swift +++ b/Examples/iOS/IngestViewController.swift @@ -44,10 +44,11 @@ final class IngestViewController: UIViewController { pipIntentView.isUserInteractionEnabled = true view.addSubview(pipIntentView) + // If you're using multi-camera functionality, please make sure isMultiCamSessionEnabled = true. Initialization point. + stream.isMultiCamSessionEnabled = true if let orientation = DeviceUtil.videoOrientation(by: UIApplication.shared.statusBarOrientation) { stream.videoOrientation = orientation } - stream.isMonitoringEnabled = DeviceUtil.isHeadphoneConnected() stream.audioSettings.bitRate = 64 * 1000 stream.bitrateStrategy = VideoAdaptiveNetBitRateStrategy(mamimumVideoBitrate: VideoCodecSettings.default.bitRate) @@ -62,18 +63,19 @@ final class IngestViewController: UIViewController { super.viewWillAppear(animated) let back = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: currentPosition) - // If you're using multi-camera functionality, please make sure to call the attachMultiCamera method first. This is required for iOS 14 and 15, among others. - if #available(iOS 13.0, *) { - let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) - stream.videoCapture(for: 1)?.isVideoMirrored = true - stream.attachMultiCamera(front) - } stream.attachCamera(back) { error in logger.warn(error) } stream.attachAudio(AVCaptureDevice.default(for: .audio), automaticallyConfiguresApplicationAudioSession: false) { error in logger.warn(error) } + + if #available(iOS 13.0, *) { + let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) + stream.videoCapture(for: 1)?.isVideoMirrored = true + stream.attachMultiCamera(front) + } + stream.addObserver(self, forKeyPath: "currentFPS", options: .new, context: nil) (view as? (any NetStreamDrawable))?.attachStream(stream) NotificationCenter.default.addObserver(self, selector: #selector(didInterruptionNotification(_:)), name: AVAudioSession.interruptionNotification, object: nil) diff --git a/HaishinKit.xcodeproj/project.pbxproj b/HaishinKit.xcodeproj/project.pbxproj index 75233ed24..df67131c1 100644 --- a/HaishinKit.xcodeproj/project.pbxproj +++ b/HaishinKit.xcodeproj/project.pbxproj @@ -190,6 +190,7 @@ BC56765B2AD2AED800524F7E /* AudioCapture.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC56765A2AD2AED800524F7E /* AudioCapture.swift */; }; BC570B4828E9ACC10098A12C /* IOUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC570B4728E9ACC10098A12C /* IOUnit.swift */; }; BC6692F32AC2F717009EC058 /* NetBitRateStrategyConvertible.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC6692F22AC2F717009EC058 /* NetBitRateStrategyConvertible.swift */; }; + BC6889782B011AEB0026A4C2 /* IOCaptureSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC6889772B011AEB0026A4C2 /* IOCaptureSession.swift */; }; BC6FC91E29609A6800A746EE /* ShapeFactory.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC6FC91D29609A6800A746EE /* ShapeFactory.swift */; }; BC6FC9222961B3D800A746EE /* vImage_CGImageFormat+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC6FC9212961B3D800A746EE /* vImage_CGImageFormat+Extension.swift */; }; BC701F332AAD808F00C4BEFE /* SRTHaishinKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCCC45962AA289FA0016EFE8 /* SRTHaishinKit.framework */; }; @@ -602,6 +603,7 @@ BC56765A2AD2AED800524F7E /* AudioCapture.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioCapture.swift; sourceTree = ""; }; BC570B4728E9ACC10098A12C /* IOUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOUnit.swift; sourceTree = ""; }; BC6692F22AC2F717009EC058 /* NetBitRateStrategyConvertible.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NetBitRateStrategyConvertible.swift; sourceTree = ""; }; + BC6889772B011AEB0026A4C2 /* IOCaptureSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOCaptureSession.swift; sourceTree = ""; }; BC6FC91D29609A6800A746EE /* ShapeFactory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ShapeFactory.swift; sourceTree = ""; }; BC6FC9212961B3D800A746EE /* vImage_CGImageFormat+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "vImage_CGImageFormat+Extension.swift"; sourceTree = ""; }; BC7C56B6299E579F00C41A9B /* AudioCodecSettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioCodecSettings.swift; sourceTree = ""; }; @@ -1048,6 +1050,7 @@ BCFC51FD2AAB420700014428 /* IOAudioResampler.swift */, BC5019C02A6D266B0046E02F /* IOAudioRingBuffer.swift */, 29B876891CD70AFE00FC07DA /* IOAudioUnit.swift */, + BC6889772B011AEB0026A4C2 /* IOCaptureSession.swift */, BC1102492925147300D48035 /* IOCaptureUnit.swift */, BC0F1FD92ACC4CC100C326FF /* IOCaptureVideoPreview.swift */, 29B8768B1CD70AFE00FC07DA /* IOMixer.swift */, @@ -1727,6 +1730,7 @@ BC1DC4FB2A02868900E928ED /* FLVVideoFourCC.swift in Sources */, 2976A47E1D48C5C700B53EF2 /* IORecorder.swift in Sources */, BC110257292E661E00D48035 /* MultiCamCaptureSettings.swift in Sources */, + BC6889782B011AEB0026A4C2 /* IOCaptureSession.swift in Sources */, BC3802142AB5E7CC001AE399 /* IOAudioCaptureUnit.swift in Sources */, 29B876B21CD70B2800FC07DA /* RTMPMuxer.swift in Sources */, 2958912E1EEB8F4100CE51E1 /* FLVSoundType.swift in Sources */, diff --git a/Sources/Media/IOAudioCaptureUnit.swift b/Sources/Media/IOAudioCaptureUnit.swift index 0325d8084..c51a46822 100644 --- a/Sources/Media/IOAudioCaptureUnit.swift +++ b/Sources/Media/IOAudioCaptureUnit.swift @@ -1,7 +1,7 @@ +#if os(iOS) || os(tvOS) || os(macOS) import AVFoundation import Foundation -#if os(iOS) || os(tvOS) || os(macOS) @available(tvOS 17.0, *) final class IOAudioCaptureUnit: IOCaptureUnit { typealias Output = AVCaptureAudioDataOutput @@ -13,7 +13,7 @@ final class IOAudioCaptureUnit: IOCaptureUnit { func attachDevice(_ device: AVCaptureDevice?, audioUnit: IOAudioUnit) throws { setSampleBufferDelegate(nil) - detachSession(audioUnit.mixer?.session) + audioUnit.mixer?.session.detachCapture(self) guard let device else { self.device = nil input = nil @@ -23,7 +23,7 @@ final class IOAudioCaptureUnit: IOCaptureUnit { self.device = device input = try AVCaptureDeviceInput(device: device) output = AVCaptureAudioDataOutput() - attachSession(audioUnit.mixer?.session) + audioUnit.mixer?.session.attachCapture(self) setSampleBufferDelegate(audioUnit) } diff --git a/Sources/Media/IOAudioUnit.swift b/Sources/Media/IOAudioUnit.swift index 2c24c054c..dc7c6f0c4 100644 --- a/Sources/Media/IOAudioUnit.swift +++ b/Sources/Media/IOAudioUnit.swift @@ -59,35 +59,30 @@ final class IOAudioUnit: NSObject, IOUnit { #if os(tvOS) private var _capture: Any? @available(tvOS 17.0, *) - var capture: IOAudioCaptureUnit { + private var capture: IOAudioCaptureUnit { if _capture == nil { _capture = IOAudioCaptureUnit() } return _capture as! IOAudioCaptureUnit } #elseif os(iOS) || os(macOS) - private(set) var capture: IOAudioCaptureUnit = .init() + private var capture: IOAudioCaptureUnit = .init() #endif #if os(iOS) || os(macOS) || os(tvOS) @available(tvOS 17.0, *) func attachAudio(_ device: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool) throws { - guard let mixer else { - return - } - mixer.session.beginConfiguration() - defer { - mixer.session.commitConfiguration() - } - guard let device else { - try capture.attachDevice(nil, audioUnit: self) - inputFormat = nil - return + try mixer?.session.configuration { session in + guard let device else { + try capture.attachDevice(nil, audioUnit: self) + inputFormat = nil + return + } + try capture.attachDevice(device, audioUnit: self) + #if os(iOS) + session.automaticallyConfiguresApplicationAudioSession = automaticallyConfiguresApplicationAudioSession + #endif } - try capture.attachDevice(device, audioUnit: self) - #if os(iOS) - mixer.session.automaticallyConfiguresApplicationAudioSession = automaticallyConfiguresApplicationAudioSession - #endif } #endif diff --git a/Sources/Media/IOCaptureSession.swift b/Sources/Media/IOCaptureSession.swift new file mode 100644 index 000000000..6bc64f1e1 --- /dev/null +++ b/Sources/Media/IOCaptureSession.swift @@ -0,0 +1,283 @@ +#if os(iOS) || os(tvOS) || os(macOS) +import AVFoundation + +protocol IOCaptureSessionDelegate: AnyObject { + @available(tvOS 17.0, *) + func session(_ session: IOCaptureSession, sessionRuntimeError session: AVCaptureSession, error: AVError) + #if os(iOS) || os(tvOS) + @available(tvOS 17.0, *) + func session(_ session: IOCaptureSession, sessionWasInterrupted session: AVCaptureSession, reason: AVCaptureSession.InterruptionReason?) + @available(tvOS 17.0, *) + func session(_ session: IOCaptureSession, sessionInterruptionEnded session: AVCaptureSession) + #endif +} + +final class IOCaptureSession { + #if os(iOS) || os(tvOS) + static var isMultiCamSupported: Bool { + if #available(iOS 13.0, tvOS 17.0, *) { + return AVCaptureMultiCamSession.isMultiCamSupported + } else { + return false + } + } + #else + static let isMultiCamSupported = true + #endif + + #if os(tvOS) + private var _session: Any? + /// The capture session instance. + @available(tvOS 17.0, *) + var session: AVCaptureSession { + if _session == nil { + _session = makeSession() + } + return _session as! AVCaptureSession + } + #elseif os(iOS) || os(macOS) + /// The capture session instance. + private(set) lazy var session: AVCaptureSession = makeSession() + #endif + + #if os(iOS) || os(tvOS) + var isMultiCamSessionEnabled = false + @available(tvOS 17.0, *) + var isMultitaskingCameraAccessEnabled: Bool { + return session.isMultitaskingCameraAccessEnabled + } + #else + let isMultiCamSessionEnabled = true + let isMultitaskingCameraAccessEnabled = true + #endif + + weak var delegate: (any IOCaptureSessionDelegate)? + private(set) var isRunning: Atomic = .init(false) + + #if os(tvOS) + private var _sessionPreset: Any? + @available(tvOS 17.0, *) + var sessionPreset: AVCaptureSession.Preset { + get { + if _sessionPreset == nil { + _sessionPreset = AVCaptureSession.Preset.default + } + return _sessionPreset as! AVCaptureSession.Preset + } + set { + guard sessionPreset != newValue, session.canSetSessionPreset(newValue) else { + return + } + session.beginConfiguration() + session.sessionPreset = newValue + session.commitConfiguration() + } + } + #elseif os(iOS) || os(macOS) + var sessionPreset: AVCaptureSession.Preset = .default { + didSet { + guard sessionPreset != oldValue, session.canSetSessionPreset(sessionPreset) else { + return + } + session.beginConfiguration() + session.sessionPreset = sessionPreset + session.commitConfiguration() + } + } + #endif + + @available(tvOS 17.0, *) + private var isMultiCamSession: Bool { + #if os(iOS) || os(tvOS) + if #available(iOS 13.0, *) { + return session is AVCaptureMultiCamSession + } else { + return false + } + #else + return false + #endif + } + + deinit { + if #available(tvOS 17.0, *) { + if session.isRunning { + session.stopRunning() + } + } + } + + @available(tvOS 17.0, *) + func configuration(_ lambda: (_ session: AVCaptureSession) throws -> Void ) rethrows { + session.beginConfiguration() + defer { + session.commitConfiguration() + } + try lambda(session) + } + + @available(tvOS 17.0, *) + func attachCapture(_ capture: any IOCaptureUnit) { + if let connection = capture.connection { + if let input = capture.input, session.canAddInput(input) { + session.addInputWithNoConnections(input) + } + if let output = capture.output, session.canAddOutput(output) { + session.addOutputWithNoConnections(output) + } + if session.canAddConnection(connection) { + session.addConnection(connection) + } + } else { + if let input = capture.input, session.canAddInput(input) { + session.addInput(input) + } + if let output = capture.output, session.canAddOutput(output) { + session.addOutput(output) + } + } + } + + @available(tvOS 17.0, *) + func detachCapture(_ capture: any IOCaptureUnit) { + if let connection = capture.connection { + if capture.output?.connections.contains(connection) == true { + session.removeConnection(connection) + } + } + if let input = capture.input, session.inputs.contains(input) { + session.removeInput(input) + } + if let output = capture.output, session.outputs.contains(output) { + session.removeOutput(output) + } + } + + @available(tvOS 17.0, *) + func startRunningIfNeeded() { + guard isRunning.value && !session.isRunning else { + return + } + session.startRunning() + isRunning.mutate { $0 = session.isRunning } + } + + #if os(iOS) || os(tvOS) + @available(tvOS 17.0, *) + private func makeSession() -> AVCaptureSession { + let session: AVCaptureSession + if isMultiCamSessionEnabled, #available(iOS 13.0, *) { + session = AVCaptureMultiCamSession() + } else { + session = AVCaptureSession() + } + if session.canSetSessionPreset(sessionPreset) { + session.sessionPreset = sessionPreset + } + if session.isMultitaskingCameraAccessSupported { + session.isMultitaskingCameraAccessEnabled = true + } + return session + } + #elseif os(macOS) + private func makeSession() -> AVCaptureSession { + let session = AVCaptureSession() + if session.canSetSessionPreset(sessionPreset) { + session.sessionPreset = sessionPreset + } + return session + } + #endif + + @available(tvOS 17.0, *) + private func addSessionObservers(_ session: AVCaptureSession) { + NotificationCenter.default.addObserver(self, selector: #selector(sessionRuntimeError(_:)), name: .AVCaptureSessionRuntimeError, object: session) + #if os(iOS) || os(tvOS) + NotificationCenter.default.addObserver(self, selector: #selector(sessionInterruptionEnded(_:)), name: .AVCaptureSessionInterruptionEnded, object: session) + NotificationCenter.default.addObserver(self, selector: #selector(sessionWasInterrupted(_:)), name: .AVCaptureSessionWasInterrupted, object: session) + #endif + } + + @available(tvOS 17.0, *) + private func removeSessionObservers(_ session: AVCaptureSession) { + #if os(iOS) || os(tvOS) + NotificationCenter.default.removeObserver(self, name: .AVCaptureSessionWasInterrupted, object: session) + NotificationCenter.default.removeObserver(self, name: .AVCaptureSessionInterruptionEnded, object: session) + #endif + NotificationCenter.default.removeObserver(self, name: .AVCaptureSessionRuntimeError, object: session) + } + + @available(tvOS 17.0, *) + @objc + private func sessionRuntimeError(_ notification: NSNotification) { + guard + let session = notification.object as? AVCaptureSession, + let errorValue = notification.userInfo?[AVCaptureSessionErrorKey] as? NSError else { + return + } + let error = AVError(_nsError: errorValue) + switch error.code { + #if os(iOS) || os(tvOS) + case .mediaServicesWereReset: + startRunningIfNeeded() + #endif + default: + break + } + delegate?.session(self, sessionRuntimeError: session, error: error) + } + + #if os(iOS) || os(tvOS) + @available(tvOS 17.0, *) + @objc + private func sessionWasInterrupted(_ notification: Notification) { + guard let session = notification.object as? AVCaptureSession else { + return + } + guard let userInfoValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as AnyObject?, + let reasonIntegerValue = userInfoValue.integerValue, + let reason = AVCaptureSession.InterruptionReason(rawValue: reasonIntegerValue) else { + delegate?.session(self, sessionWasInterrupted: session, reason: nil) + return + } + delegate?.session(self, sessionWasInterrupted: session, reason: reason) + } + + @available(tvOS 17.0, *) + @objc + private func sessionInterruptionEnded(_ notification: Notification) { + delegate?.session(self, sessionInterruptionEnded: session) + } + #endif +} + +extension IOCaptureSession: Running { + // MARK: Running + func startRunning() { + guard !isRunning.value else { + return + } + if #available(tvOS 17.0, *) { + addSessionObservers(session) + session.startRunning() + isRunning.mutate { $0 = session.isRunning } + } else { + isRunning.mutate { $0 = true } + } + } + + func stopRunning() { + guard isRunning.value else { + return + } + if #available(tvOS 17.0, *) { + removeSessionObservers(session) + session.stopRunning() + isRunning.mutate { $0 = session.isRunning } + } else { + isRunning.mutate { $0 = false } + } + } +} + +#endif diff --git a/Sources/Media/IOCaptureUnit.swift b/Sources/Media/IOCaptureUnit.swift index 8bc687a37..c3fdb66ad 100644 --- a/Sources/Media/IOCaptureUnit.swift +++ b/Sources/Media/IOCaptureUnit.swift @@ -14,49 +14,4 @@ protocol IOCaptureUnit { var output: Output? { get set } var connection: AVCaptureConnection? { get set } } - -@available(tvOS 17.0, *) -extension IOCaptureUnit { - func attachSession(_ session: AVCaptureSession?) { - guard let session else { - return - } - if let connection { - if let input, session.canAddInput(input) { - session.addInputWithNoConnections(input) - } - if let output, session.canAddOutput(output) { - session.addOutputWithNoConnections(output) - } - if session.canAddConnection(connection) { - session.addConnection(connection) - } - } else { - if let input, session.canAddInput(input) { - session.addInput(input) - } - if let output, session.canAddOutput(output) { - session.addOutput(output) - } - } - } - - func detachSession(_ session: AVCaptureSession?) { - guard let session else { - return - } - if let connection { - if output?.connections.contains(connection) == true { - session.removeConnection(connection) - } - } - if let input, session.inputs.contains(input) { - session.removeInput(input) - } - if let output, session.outputs.contains(output) { - session.removeOutput(output) - } - } -} - #endif diff --git a/Sources/Media/IOCaptureVideoPreview.swift b/Sources/Media/IOCaptureVideoPreview.swift index 235e079f9..23eeb9cda 100644 --- a/Sources/Media/IOCaptureVideoPreview.swift +++ b/Sources/Media/IOCaptureVideoPreview.swift @@ -53,7 +53,7 @@ final class IOCaptureVideoPreview: UIView { } func attachStream(_ stream: NetStream?) { - layer.session = stream?.mixer.session + layer.session = stream?.mixer.session.session #if os(iOS) if let videoOrientation = stream?.videoOrientation, layer.connection?.isVideoOrientationSupported == true { layer.connection?.videoOrientation = videoOrientation diff --git a/Sources/Media/IOMixer.swift b/Sources/Media/IOMixer.swift index 9b9372d88..77894b67b 100644 --- a/Sources/Media/IOMixer.swift +++ b/Sources/Media/IOMixer.swift @@ -1,10 +1,8 @@ import AVFoundation + #if canImport(SwiftPMSupport) import SwiftPMSupport #endif -#if os(iOS) -import UIKit -#endif protocol IOMixerDelegate: AnyObject { func mixer(_ mixer: IOMixer, didOutput audio: AVAudioPCMBuffer, when: AVAudioTime) @@ -21,327 +19,77 @@ protocol IOMixerDelegate: AnyObject { /// An object that mixies audio and video for streaming. final class IOMixer { - /// The default fps for an IOMixer, value is 30. static let defaultFrameRate: Float64 = 30 - #if os(tvOS) - private var _session: Any? - /// The capture session instance. - @available(tvOS 17.0, *) - var session: AVCaptureSession { - get { - if _session == nil { - _session = makeSession() - } - return _session as! AVCaptureSession - } - set { - _session = newValue - } - } - #elseif os(iOS) || os(macOS) - /// The capture session instance. - lazy var session: AVCaptureSession = makeSession() { - willSet { - if session.isRunning { - removeSessionObservers(session) - session.stopRunning() - } - audioIO.capture.detachSession(session) - videoIO.capture.detachSession(session) - } - didSet { - if session.canSetSessionPreset(sessionPreset) { - session.sessionPreset = sessionPreset - } - audioIO.capture.attachSession(session) - videoIO.capture.attachSession(session) - if isRunning.value { - addSessionObservers(session) - session.startRunning() - } - } - } - #endif + weak var muxer: (any IOMuxer)? + + weak var delegate: (any IOMixerDelegate)? private(set) var isRunning: Atomic = .init(false) - /// The recorder instance. - private(set) lazy var recorder = IORecorder() - weak var muxer: (any IOMuxer)? - weak var delegate: (any IOMixerDelegate)? + private(set) lazy var recorder = IORecorder() - lazy var audioIO = { + private(set) lazy var audioIO = { var audioIO = IOAudioUnit() audioIO.mixer = self return audioIO }() - lazy var videoIO = { + private(set) lazy var videoIO = { var videoIO = IOVideoUnit() videoIO.mixer = self return videoIO }() - var isMultiCamSessionEnabled = false { - didSet { - guard oldValue != isMultiCamSessionEnabled else { - return - } - #if os(iOS) - session = makeSession() - #endif - } - } - - #if os(tvOS) - private var _sessionPreset: Any? - @available(tvOS 17.0, *) - var sessionPreset: AVCaptureSession.Preset { - get { - if _sessionPreset == nil { - _sessionPreset = AVCaptureSession.Preset.default - } - return _sessionPreset as! AVCaptureSession.Preset - } - set { - guard sessionPreset != newValue, session.canSetSessionPreset(newValue) else { - return - } - session.beginConfiguration() - session.sessionPreset = newValue - session.commitConfiguration() - } - } - #elseif os(iOS) || os(macOS) - var sessionPreset: AVCaptureSession.Preset = .default { - didSet { - guard sessionPreset != oldValue, session.canSetSessionPreset(sessionPreset) else { - return - } - session.beginConfiguration() - session.sessionPreset = sessionPreset - session.commitConfiguration() - } - } - #endif - - #if os(iOS) || os(macOS) || os(tvOS) - var inBackgroundMode = false { - didSet { - if #available(tvOS 17.0, *) { - guard inBackgroundMode != oldValue else { - return - } - if inBackgroundMode { - if !session.isMultitaskingCameraAccessEnabled { - videoIO.multiCamCapture.detachSession(session) - videoIO.capture.detachSession(session) - } - } else { - startCaptureSessionIfNeeded() - if !session.isMultitaskingCameraAccessEnabled { - videoIO.capture.attachSession(session) - videoIO.multiCamCapture.attachSession(session) - } - } - } - } - } - #endif + private(set) lazy var session = { + var session = IOCaptureSession() + session.delegate = self + return session + }() private(set) lazy var audioEngine: AVAudioEngine? = { return NetStream.audioEngineHolder.retain() }() - @available(tvOS 17.0, *) - private var isMultiCamSupported: Bool { - #if os(iOS) || os(tvOS) - if #available(iOS 13.0, *) { - return session is AVCaptureMultiCamSession - } else { - return false - } - #else - return false - #endif - } - deinit { - #if os(iOS) || os(macOS) || os(tvOS) - if #available(tvOS 17.0, *) { - if session.isRunning { - session.stopRunning() - } - } - #endif NetStream.audioEngineHolder.release(audioEngine) } #if os(iOS) || os(tvOS) - @available(tvOS 17.0, *) - private func makeSession() -> AVCaptureSession { - let session: AVCaptureSession - if isMultiCamSessionEnabled, #available(iOS 13.0, *) { - session = AVCaptureMultiCamSession() - } else { - session = AVCaptureSession() - } - if session.canSetSessionPreset(sessionPreset) { - session.sessionPreset = sessionPreset - } - if session.isMultitaskingCameraAccessSupported { - session.isMultitaskingCameraAccessEnabled = true + func setBackgroundMode(_ background: Bool) { + guard #available(tvOS 17.0, *) else { + return } - return session - } - #elseif os(macOS) - private func makeSession() -> AVCaptureSession { - let session = AVCaptureSession() - if session.canSetSessionPreset(sessionPreset) { - session.sessionPreset = sessionPreset + if background { + videoIO.setBackgroundMode(background) + } else { + videoIO.setBackgroundMode(background) + session.startRunningIfNeeded() } - return session } #endif } -#if os(iOS) || os(macOS) || os(tvOS) extension IOMixer: Running { // MARK: Running func startRunning() { guard !isRunning.value else { return } - if #available(tvOS 17.0, *) { - addSessionObservers(session) - session.startRunning() - isRunning.mutate { $0 = session.isRunning } - } + muxer?.startRunning() + audioIO.startRunning() + videoIO.startRunning() + isRunning.mutate { $0 = true } } func stopRunning() { guard isRunning.value else { return } - if #available(tvOS 17.0, *) { - removeSessionObservers(session) - session.stopRunning() - isRunning.mutate { $0 = session.isRunning } - } - } - - @available(tvOS 17.0, *) - func startCaptureSessionIfNeeded() { - guard isRunning.value && !session.isRunning else { - return - } - session.startRunning() - isRunning.mutate { $0 = session.isRunning } - } - - @available(tvOS 17.0, *) - private func addSessionObservers(_ session: AVCaptureSession) { - NotificationCenter.default.addObserver(self, selector: #selector(sessionRuntimeError(_:)), name: .AVCaptureSessionRuntimeError, object: session) - #if os(iOS) || os(tvOS) - NotificationCenter.default.addObserver(self, selector: #selector(sessionInterruptionEnded(_:)), name: .AVCaptureSessionInterruptionEnded, object: session) - NotificationCenter.default.addObserver(self, selector: #selector(sessionWasInterrupted(_:)), name: .AVCaptureSessionWasInterrupted, object: session) - #endif - } - - @available(tvOS 17.0, *) - private func removeSessionObservers(_ session: AVCaptureSession) { - #if os(iOS) || os(tvOS) - NotificationCenter.default.removeObserver(self, name: .AVCaptureSessionWasInterrupted, object: session) - NotificationCenter.default.removeObserver(self, name: .AVCaptureSessionInterruptionEnded, object: session) - #endif - NotificationCenter.default.removeObserver(self, name: .AVCaptureSessionRuntimeError, object: session) - } - - @available(tvOS 17.0, *) - @objc - private func sessionRuntimeError(_ notification: NSNotification) { - guard - let errorValue = notification.userInfo?[AVCaptureSessionErrorKey] as? NSError else { - return - } - let error = AVError(_nsError: errorValue) - switch error.code { - case .unsupportedDeviceActiveFormat: - guard let device = error.device, let format = device.videoFormat( - width: sessionPreset.width ?? Int32(videoIO.settings.videoSize.width), - height: sessionPreset.height ?? Int32(videoIO.settings.videoSize.height), - frameRate: videoIO.frameRate, - isMultiCamSupported: isMultiCamSupported - ), device.activeFormat != format else { - return - } - do { - try device.lockForConfiguration() - device.activeFormat = format - if format.isFrameRateSupported(videoIO.frameRate) { - device.activeVideoMinFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * videoIO.frameRate)) - device.activeVideoMaxFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * videoIO.frameRate)) - } - device.unlockForConfiguration() - session.startRunning() - } catch { - logger.warn(error) - } - #if os(iOS) || os(tvOS) - case .mediaServicesWereReset: - startCaptureSessionIfNeeded() - #endif - default: - break - } - } - - #if os(iOS) || os(tvOS) - @available(tvOS 17.0, *) - @objc - private func sessionWasInterrupted(_ notification: Notification) { - guard let session = notification.object as? AVCaptureSession else { - return - } - guard let userInfoValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as AnyObject?, - let reasonIntegerValue = userInfoValue.integerValue, - let reason = AVCaptureSession.InterruptionReason(rawValue: reasonIntegerValue) else { - delegate?.mixer(self, sessionWasInterrupted: session, reason: nil) - return - } - delegate?.mixer(self, sessionWasInterrupted: session, reason: reason) - } - - @available(tvOS 17.0, *) - @objc - private func sessionInterruptionEnded(_ notification: Notification) { - delegate?.mixer(self, sessionInterruptionEnded: session) - } - #endif -} -#else -extension IOMixer: Running { - func startRunning() { - } - - func stopRunning() { - } -} -#endif - -extension IOMixer { - func startMuxing(_ muxer: any IOMuxer) { - self.muxer = muxer - muxer.startRunning() - audioIO.startRunning() - videoIO.startRunning() - } - - func stopMuxing() { videoIO.stopRunning() audioIO.stopRunning() muxer?.stopRunning() + isRunning.mutate { $0 = false } } } @@ -382,6 +130,50 @@ extension IOMixer: AudioCodecDelegate { } } +extension IOMixer: IOCaptureSessionDelegate { + // MARK: IOCaptureSessionDelegate + @available(tvOS 17.0, *) + func session(_ capture: IOCaptureSession, sessionRuntimeError session: AVCaptureSession, error: AVError) { + switch error.code { + case .unsupportedDeviceActiveFormat: + guard let device = error.device, let format = device.videoFormat( + width: session.sessionPreset.width ?? Int32(videoIO.settings.videoSize.width), + height: session.sessionPreset.height ?? Int32(videoIO.settings.videoSize.height), + frameRate: videoIO.frameRate, + isMultiCamSupported: capture.isMultiCamSessionEnabled + ), device.activeFormat != format else { + return + } + do { + try device.lockForConfiguration() + device.activeFormat = format + if format.isFrameRateSupported(videoIO.frameRate) { + device.activeVideoMinFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * videoIO.frameRate)) + device.activeVideoMaxFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * videoIO.frameRate)) + } + device.unlockForConfiguration() + capture.startRunningIfNeeded() + } catch { + logger.warn(error) + } + default: + break + } + } + + #if os(iOS) || os(tvOS) + @available(tvOS 17.0, *) + func session(_ _: IOCaptureSession, sessionWasInterrupted session: AVCaptureSession, reason: AVCaptureSession.InterruptionReason?) { + delegate?.mixer(self, sessionWasInterrupted: session, reason: reason) + } + + @available(tvOS 17.0, *) + func session(_ _: IOCaptureSession, sessionInterruptionEnded session: AVCaptureSession) { + delegate?.mixer(self, sessionInterruptionEnded: session) + } + #endif +} + extension IOMixer: IOAudioUnitDelegate { // MARK: IOAudioUnitDelegate func audioUnit(_ audioUnit: IOAudioUnit, errorOccurred error: IOAudioUnitError) { diff --git a/Sources/Media/IOVideoCaptureUnit.swift b/Sources/Media/IOVideoCaptureUnit.swift index 6bd71eb54..1c8004e0b 100644 --- a/Sources/Media/IOVideoCaptureUnit.swift +++ b/Sources/Media/IOVideoCaptureUnit.swift @@ -1,10 +1,10 @@ +#if os(iOS) || os(tvOS) || os(macOS) import AVFoundation import Foundation -#if os(iOS) || os(tvOS) || os(macOS) /// An object that provides the interface to control the AVCaptureDevice's transport behavior. @available(tvOS 17.0, *) -public class IOVideoCaptureUnit: IOCaptureUnit { +public final class IOVideoCaptureUnit: IOCaptureUnit { #if os(iOS) || os(macOS) /// The default color format. public static let colorFormat = kCVPixelFormatType_32BGRA @@ -71,7 +71,7 @@ public class IOVideoCaptureUnit: IOCaptureUnit { func attachDevice(_ device: AVCaptureDevice?, videoUnit: IOVideoUnit) throws { setSampleBufferDelegate(nil) - detachSession(videoUnit.mixer?.session) + videoUnit.mixer?.session.detachCapture(self) guard let device else { self.device = nil input = nil @@ -95,7 +95,7 @@ public class IOVideoCaptureUnit: IOCaptureUnit { connection = nil } #endif - attachSession(videoUnit.mixer?.session) + videoUnit.mixer?.session.attachCapture(self) output?.connections.forEach { if $0.isVideoMirroringSupported { $0.isVideoMirrored = isVideoMirrored @@ -117,12 +117,12 @@ public class IOVideoCaptureUnit: IOCaptureUnit { #if os(macOS) func attachScreen(_ screen: AVCaptureScreenInput?, videoUnit: IOVideoUnit) { setSampleBufferDelegate(nil) - detachSession(videoUnit.mixer?.session) + videoUnit.mixer?.session.detachCapture(self) device = nil input = screen output = AVCaptureVideoDataOutput() connection = nil - attachSession(videoUnit.mixer?.session) + videoUnit.mixer?.session.attachCapture(self) setSampleBufferDelegate(videoUnit) } #endif diff --git a/Sources/Media/IOVideoUnit.swift b/Sources/Media/IOVideoUnit.swift index 259828c16..b9077928c 100644 --- a/Sources/Media/IOVideoUnit.swift +++ b/Sources/Media/IOVideoUnit.swift @@ -56,8 +56,9 @@ final class IOVideoUnit: NSObject, IOUnit { var frameRate = IOMixer.defaultFrameRate { didSet { if #available(tvOS 17.0, *) { - capture.setFrameRate(frameRate) - multiCamCapture.setFrameRate(frameRate) + for capture in captures.values { + capture.setFrameRate(frameRate) + } } } } @@ -85,6 +86,11 @@ final class IOVideoUnit: NSObject, IOUnit { } } + @available(tvOS 17.0, *) + var hasDevice: Bool { + !captures.lazy.filter { $0.value.device != nil }.isEmpty + } + var isRunning: Atomic { return codec.isRunning } @@ -95,51 +101,39 @@ final class IOVideoUnit: NSObject, IOUnit { guard videoOrientation != oldValue else { return } - mixer?.session.beginConfiguration() - defer { - mixer?.session.commitConfiguration() - // https://github.com/shogo4405/HaishinKit.swift/issues/190 - DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) { - if self.torch { - self.setTorchMode(.on) - } + mixer?.session.configuration { _ in + drawable?.videoOrientation = videoOrientation + for capture in captures.values { + capture.videoOrientation = videoOrientation + } + } + // https://github.com/shogo4405/HaishinKit.swift/issues/190 + DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) { + if self.torch { + self.setTorchMode(.on) } } - drawable?.videoOrientation = videoOrientation - capture.videoOrientation = videoOrientation - multiCamCapture.videoOrientation = videoOrientation } } #endif #if os(tvOS) - private var _capture: Any? - @available(tvOS 17.0, *) - var capture: IOVideoCaptureUnit { - if _capture == nil { - _capture = IOVideoCaptureUnit() - } - return _capture as! IOVideoCaptureUnit - } - - private var _multiCamCapture: Any? + private var _captures: [UInt8: Any] = [:] @available(tvOS 17.0, *) - var multiCamCapture: IOVideoCaptureUnit { - if _multiCamCapture == nil { - _multiCamCapture = IOVideoCaptureUnit() - } - return _multiCamCapture as! IOVideoCaptureUnit + private var captures: [UInt8: IOVideoCaptureUnit] { + return _captures as! [UInt8: IOVideoCaptureUnit] } #elseif os(iOS) || os(macOS) - private(set) var capture: IOVideoCaptureUnit = .init() - private(set) var multiCamCapture: IOVideoCaptureUnit = .init() + private var captures: [UInt8: IOVideoCaptureUnit] = [:] #endif - private lazy var videoMixer: IOVideoMixer = { + + private lazy var videoMixer = { var videoMixer = IOVideoMixer() videoMixer.delegate = self return videoMixer }() - private lazy var codec: VideoCodec = { + + private lazy var codec = { var codec = VideoCodec(lockQueue: lockQueue) codec.delegate = mixer return codec @@ -155,88 +149,6 @@ final class IOVideoUnit: NSObject, IOUnit { } } - #if os(iOS) || os(tvOS) || os(macOS) - @available(tvOS 17.0, *) - func attachCamera(_ device: AVCaptureDevice?) throws { - guard let mixer, self.capture.device != device else { - return - } - guard let device else { - mixer.session.beginConfiguration() - defer { - mixer.session.commitConfiguration() - } - capture.detachSession(mixer.session) - try capture.attachDevice(nil, videoUnit: self) - inputFormat = nil - return - } - mixer.session.beginConfiguration() - defer { - mixer.session.commitConfiguration() - if torch { - setTorchMode(.on) - } - } - if multiCamCapture.device == device { - try multiCamCapture.attachDevice(nil, videoUnit: self) - } - try capture.attachDevice(device, videoUnit: self) - } - - @available(iOS 13.0, tvOS 17.0, *) - func attachMultiCamera(_ device: AVCaptureDevice?) throws { - #if os(iOS) - guard AVCaptureMultiCamSession.isMultiCamSupported else { - throw Error.multiCamNotSupported - } - #endif - guard let mixer, multiCamCapture.device != device else { - return - } - guard let device else { - mixer.session.beginConfiguration() - defer { - mixer.session.commitConfiguration() - } - multiCamCapture.detachSession(mixer.session) - try multiCamCapture.attachDevice(nil, videoUnit: self) - return - } - mixer.isMultiCamSessionEnabled = true - mixer.session.beginConfiguration() - defer { - mixer.session.commitConfiguration() - } - if capture.device == device { - try multiCamCapture.attachDevice(nil, videoUnit: self) - } - try multiCamCapture.attachDevice(device, videoUnit: self) - } - - @available(tvOS 17.0, *) - func setTorchMode(_ torchMode: AVCaptureDevice.TorchMode) { - capture.setTorchMode(torchMode) - multiCamCapture.setTorchMode(torchMode) - } - #endif - - #if os(macOS) - func attachScreen(_ input: AVCaptureScreenInput?) { - guard let mixer else { - return - } - mixer.session.beginConfiguration() - defer { - mixer.session.commitConfiguration() - } - guard let input else { - return - } - multiCamCapture.attachScreen(input, videoUnit: self) - } - #endif - func registerEffect(_ effect: VideoEffect) -> Bool { return videoMixer.registerEffect(effect) } @@ -302,13 +214,90 @@ final class IOVideoUnit: NSObject, IOUnit { codec.append(sampleBuffer) } } + + #if os(iOS) || os(tvOS) || os(macOS) + @available(tvOS 17.0, *) + func attachCamera(_ device: AVCaptureDevice?, channel: UInt8) throws { + guard self.captures[channel]?.device != device else { + return + } + if hasDevice && device != nil && mixer?.session.isMultiCamSessionEnabled == false { + throw Error.multiCamNotSupported + } + try mixer?.session.configuration { _ in + let capture = capture(for: channel) + for capture in captures.values where capture.device == device { + try capture.attachDevice(nil, videoUnit: self) + } + try capture?.attachDevice(device, videoUnit: self) + } + if drawable != nil && device != nil { + // Start captureing if not running. + mixer?.session.startRunning() + } + } + + @available(tvOS 17.0, *) + func setTorchMode(_ torchMode: AVCaptureDevice.TorchMode) { + for capture in captures.values { + capture.setTorchMode(torchMode) + } + } + + @available(tvOS 17.0, *) + func capture(for channel: UInt8) -> IOVideoCaptureUnit? { + #if os(tvOS) + return lockQueue.sync { + if _captures[channel] == nil { + _captures[channel] = IOVideoCaptureUnit() + } + return _captures[channel] as? IOVideoCaptureUnit + } + #else + return lockQueue.sync { + if captures[channel] == nil { + captures[channel] = .init() + } + return captures[channel] + } + #endif + } + + @available(tvOS 17.0, *) + func setBackgroundMode(_ background: Bool) { + guard let session = mixer?.session, !session.isMultitaskingCameraAccessEnabled else { + return + } + if background { + for capture in captures.values { + mixer?.session.detachCapture(capture) + } + } else { + for capture in captures.values { + mixer?.session.attachCapture(capture) + } + } + } + #endif + + #if os(macOS) + func attachScreen(_ input: AVCaptureScreenInput?, channel: UInt8) { + mixer?.session.configuration { _ in + let capture = capture(for: channel) + for capture in captures.values where capture.input == input { + capture.attachScreen(nil, videoUnit: self) + } + capture?.attachScreen(input, videoUnit: self) + } + } + #endif } extension IOVideoUnit: Running { // MARK: Running func startRunning() { #if os(iOS) - codec.passthrough = capture.preferredVideoStabilizationMode == .off + codec.passthrough = captures[0]?.preferredVideoStabilizationMode == .off #endif codec.startRunning() } @@ -323,11 +312,11 @@ extension IOVideoUnit: Running { extension IOVideoUnit: AVCaptureVideoDataOutputSampleBufferDelegate { // MARK: AVCaptureVideoDataOutputSampleBufferDelegate func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { - if capture.output == captureOutput { + if captures[0]?.output == captureOutput { inputFormat = sampleBuffer.formatDescription videoMixer.append(sampleBuffer, channel: 0, isVideoMirrored: connection.isVideoMirrored) drawable?.enqueue(sampleBuffer) - } else if multiCamCapture.output == captureOutput { + } else if captures[1]?.output == captureOutput { videoMixer.append(sampleBuffer, channel: 1, isVideoMirrored: connection.isVideoMirrored) } } diff --git a/Sources/Media/MTHKView.swift b/Sources/Media/MTHKView.swift index 12a9d578b..8ba70a60d 100644 --- a/Sources/Media/MTHKView.swift +++ b/Sources/Media/MTHKView.swift @@ -53,26 +53,25 @@ public class MTHKView: MTKView { }() private var captureVideoPreview: View? { + willSet { + captureVideoPreview?.removeFromSuperview() + } didSet { - if let oldValue { - oldValue.removeFromSuperview() - } - if let captureVideoPreview { - addSubview(captureVideoPreview) - sendSubviewToBack(captureVideoPreview) + captureVideoPreview.map { + addSubview($0) + sendSubviewToBack($0) } } } private weak var currentStream: NetStream? { + willSet { + currentStream?.setNetStreamDrawable(nil) + } didSet { - oldValue?.mixer.videoIO.drawable = nil - if let currentStream = currentStream { - currentStream.mixer.videoIO.context = CIContext(mtlDevice: device!) - currentStream.lockQueue.async { - currentStream.mixer.videoIO.drawable = self - currentStream.mixer.startRunning() - } + currentStream.map { + $0.mixer.videoIO.context = CIContext(mtlDevice: device!) + currentStream?.setNetStreamDrawable(self) } } } diff --git a/Sources/Media/PiPHKView.swift b/Sources/Media/PiPHKView.swift index 2067a110b..9f01051a7 100644 --- a/Sources/Media/PiPHKView.swift +++ b/Sources/Media/PiPHKView.swift @@ -67,16 +67,19 @@ public class PiPHKView: UIView { private var currentSampleBuffer: CMSampleBuffer? private weak var currentStream: NetStream? { + willSet { + currentStream?.setNetStreamDrawable(nil) + } didSet { - oldValue?.mixer.videoIO.drawable = nil + currentStream?.setNetStreamDrawable(self) } } private var captureVideoPreview: UIView? { + willSet { + captureVideoPreview?.removeFromSuperview() + } didSet { - if let oldValue { - oldValue.removeFromSuperview() - } if let captureVideoPreview { addSubview(captureVideoPreview) sendSubviewToBack(captureVideoPreview) @@ -111,14 +114,12 @@ public class PiPHKView: UIView { extension PiPHKView: NetStreamDrawable { // MARK: NetStreamDrawable public func attachStream(_ stream: NetStream?) { - guard let stream: NetStream = stream else { - currentStream = nil - return - } - stream.lockQueue.async { - stream.mixer.videoIO.drawable = self - self.currentStream = stream - stream.mixer.startRunning() + if Thread.isMainThread { + currentStream = stream + } else { + DispatchQueue.main.async { + self.currentStream = stream + } } } @@ -185,13 +186,13 @@ public class PiPHKView: NSView { } private var captureVideoPreview: NSView? { + willSet { + captureVideoPreview?.removeFromSuperview() + } didSet { - if let oldValue { - oldValue.removeFromSuperview() - } - if let captureVideoPreview { - addSubview(captureVideoPreview) - sendSubviewToBack(captureVideoPreview) + captureVideoPreview.map { + addSubview($0) + sendSubviewToBack($0) } } } @@ -199,8 +200,11 @@ public class PiPHKView: NSView { private var currentSampleBuffer: CMSampleBuffer? private weak var currentStream: NetStream? { + willSet { + currentStream?.setNetStreamDrawable(nil) + } didSet { - oldValue?.mixer.videoIO.drawable = nil + currentStream?.setNetStreamDrawable(self) } } @@ -232,14 +236,12 @@ public class PiPHKView: NSView { extension PiPHKView: NetStreamDrawable { // MARK: NetStreamDrawable public func attachStream(_ stream: NetStream?) { - guard let stream: NetStream = stream else { - currentStream = nil - return - } - stream.lockQueue.async { - stream.mixer.videoIO.drawable = self - self.currentStream = stream - stream.mixer.startRunning() + if Thread.isMainThread { + currentStream = stream + } else { + DispatchQueue.main.async { + self.currentStream = stream + } } } diff --git a/Sources/Net/NetStream.swift b/Sources/Net/NetStream.swift index ec0c9a395..f2b3e88d6 100644 --- a/Sources/Net/NetStream.swift +++ b/Sources/Net/NetStream.swift @@ -7,7 +7,7 @@ import SwiftPMSupport #if canImport(ScreenCaptureKit) import ScreenCaptureKit #endif -#if os(iOS) || os(tvOS) +#if canImport(UIKit) import UIKit #endif @@ -137,15 +137,29 @@ open class NetStream: NSObject { } } + #if os(iOS) || os(tvOS) + /// Specifies the AVCaptureMultiCamSession enabled. + /// Warning: If there is a possibility of using multiple cameras, please set it to true initially. + @available(tvOS 17.0, iOS 13.0, *) + public var isMultiCamSessionEnabled: Bool { + get { + return mixer.session.isMultiCamSessionEnabled + } + set { + mixer.session.isMultiCamSessionEnabled = newValue + } + } + #endif + /// Specifies the sessionPreset for the AVCaptureSession. @available(tvOS 17.0, *) public var sessionPreset: AVCaptureSession.Preset { get { - return lockQueue.sync { self.mixer.sessionPreset } + return lockQueue.sync { self.mixer.session.sessionPreset } } set { lockQueue.async { - self.mixer.sessionPreset = newValue + self.mixer.session.sessionPreset = newValue } } } @@ -260,13 +274,13 @@ open class NetStream: NSObject { } } - private(set) lazy var mixer: IOMixer = { + private(set) lazy var mixer = { let mixer = IOMixer() mixer.delegate = self return mixer }() - private(set) lazy var telly: IOTellyUnit = { + private lazy var telly = { let telly = IOTellyUnit() telly.delegate = self return telly @@ -288,7 +302,7 @@ open class NetStream: NSObject { public func attachCamera(_ device: AVCaptureDevice?, onError: ((_ error: any Error) -> Void)? = nil) { lockQueue.async { do { - try self.mixer.videoIO.attachCamera(device) + try self.mixer.videoIO.attachCamera(device, channel: 0) } catch { onError?(error) } @@ -301,7 +315,7 @@ open class NetStream: NSObject { public func attachMultiCamera(_ device: AVCaptureDevice?, onError: ((_ error: any Error) -> Void)? = nil) { lockQueue.async { do { - try self.mixer.videoIO.attachMultiCamera(device) + try self.mixer.videoIO.attachCamera(device, channel: 1) } catch { onError?(error) } @@ -323,25 +337,18 @@ open class NetStream: NSObject { /// Returns the IOVideoCaptureUnit by index. @available(tvOS 17.0, *) - public func videoCapture(for index: Int) -> IOVideoCaptureUnit? { + public func videoCapture(for channel: UInt8) -> IOVideoCaptureUnit? { return lockQueue.sync { - switch index { - case 0: - return self.mixer.videoIO.capture - case 1: - return self.mixer.videoIO.multiCamCapture - default: - return nil - } + return self.mixer.videoIO.capture(for: channel) } } #endif #if os(macOS) /// Attaches the screen input object. - public func attachScreen(_ input: AVCaptureScreenInput?) { + public func attachScreen(_ input: AVCaptureScreenInput?, channel: UInt8 = 0) { lockQueue.async { - self.mixer.videoIO.attachScreen(input) + self.mixer.videoIO.attachScreen(input, channel: channel) } } #endif @@ -402,9 +409,9 @@ open class NetStream: NSObject { open func readyStateWillChange(to readyState: ReadyState) { switch readyState { case .publishing: - mixer.stopMuxing() + mixer.stopRunning() case .playing: - mixer.stopMuxing() + mixer.stopRunning() default: break } @@ -416,27 +423,42 @@ open class NetStream: NSObject { switch readyState { case .play: audioSettings.format = .pcm - mixer.startMuxing(telly) - case .publish: + mixer.muxer = telly mixer.startRunning() + case .publish: + // Start capture audio and video data. + mixer.session.startRunning() case .publishing(let muxer): - mixer.startMuxing(muxer) + mixer.muxer = muxer + mixer.startRunning() default: break } } + func setNetStreamDrawable(_ drawable: (any NetStreamDrawable)?) { + lockQueue.async { + self.mixer.videoIO.drawable = drawable + guard #available(tvOS 17.0, *) else { + return + } + if self.mixer.videoIO.hasDevice { + self.mixer.session.startRunning() + } + } + } + #if os(iOS) || os(tvOS) @objc private func didEnterBackground(_ notification: Notification) { // Require main thread. Otherwise the microphone cannot be used in the background. - mixer.inBackgroundMode = true + mixer.setBackgroundMode(true) } @objc private func willEnterForeground(_ notification: Notification) { lockQueue.async { - self.mixer.inBackgroundMode = false + self.mixer.setBackgroundMode(false) } } #endif