diff --git a/HaishinKit.xcodeproj/project.pbxproj b/HaishinKit.xcodeproj/project.pbxproj index 8d58ed531..5c276dcf9 100644 --- a/HaishinKit.xcodeproj/project.pbxproj +++ b/HaishinKit.xcodeproj/project.pbxproj @@ -170,6 +170,8 @@ BC34DFF025EBB53F005F975A /* Logboard.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; }; BC34E00225EBB59C005F975A /* Logboard.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; }; BC34FA0B286CB90A00EFAF27 /* PiPHKView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC34FA0A286CB90A00EFAF27 /* PiPHKView.swift */; }; + BC3802122AB5E770001AE399 /* IOVideoCaptureUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3802112AB5E770001AE399 /* IOVideoCaptureUnit.swift */; }; + BC3802142AB5E7CC001AE399 /* IOAudioCaptureUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3802132AB5E7CC001AE399 /* IOAudioCaptureUnit.swift */; }; BC3E384429C216BB007CD972 /* ADTSReaderTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3E384329C216BB007CD972 /* ADTSReaderTests.swift */; }; BC44A1A923D31E92002D4297 /* AudioCodecRingBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC44A1A823D31E92002D4297 /* AudioCodecRingBuffer.swift */; }; BC4914A228DDD33D009E2DF6 /* VTSessionConvertible.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4914A128DDD33D009E2DF6 /* VTSessionConvertible.swift */; }; @@ -576,6 +578,8 @@ BC32E88729C9971100051507 /* InstanceHolder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InstanceHolder.swift; sourceTree = ""; }; BC34DFD125EBB12C005F975A /* Logboard.xcframework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcframework; name = Logboard.xcframework; path = Carthage/Build/Logboard.xcframework; sourceTree = ""; }; BC34FA0A286CB90A00EFAF27 /* PiPHKView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PiPHKView.swift; sourceTree = ""; }; + BC3802112AB5E770001AE399 /* IOVideoCaptureUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOVideoCaptureUnit.swift; sourceTree = ""; }; + BC3802132AB5E7CC001AE399 /* IOAudioCaptureUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioCaptureUnit.swift; sourceTree = ""; }; BC3E384329C216BB007CD972 /* ADTSReaderTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ADTSReaderTests.swift; sourceTree = ""; }; BC44A1A823D31E92002D4297 /* AudioCodecRingBuffer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioCodecRingBuffer.swift; sourceTree = ""; wrapsLines = 1; }; BC4914A128DDD33D009E2DF6 /* VTSessionConvertible.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTSessionConvertible.swift; sourceTree = ""; }; @@ -1044,6 +1048,7 @@ 2941746A22D069B300A2944F /* AudioEffect.swift */, BC9F9C7726F8C16600B01ED0 /* Choreographer.swift */, 299B13261D3B751400A1E8F5 /* HKView.swift */, + BC3802132AB5E7CC001AE399 /* IOAudioCaptureUnit.swift */, BC31DBD12A653D1600C4DEA3 /* IOAudioMonitor.swift */, BC5019C02A6D266B0046E02F /* IOAudioMonitorRingBuffer.swift */, 29B876891CD70AFE00FC07DA /* IOAudioUnit.swift */, @@ -1053,6 +1058,7 @@ BCA2252B293CC5B600DD7CB2 /* IOScreenCaptureUnit.swift */, 299B131C1D35272D00A1E8F5 /* IOUIScreenCaptureUnit.swift */, BC570B4728E9ACC10098A12C /* IOUnit.swift */, + BC3802112AB5E770001AE399 /* IOVideoCaptureUnit.swift */, 29B8768E1CD70AFE00FC07DA /* IOVideoUnit.swift */, BC959EEE296EE4190067BA97 /* ImageTransform.swift */, 2999C3742071138F00892E55 /* MTHKView.swift */, @@ -1715,6 +1721,7 @@ 29B876871CD70AE800FC07DA /* TSProgram.swift in Sources */, BC4C9EAF23F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift in Sources */, BC558268240BB40E00011AC0 /* RTMPStreamInfo.swift in Sources */, + BC3802122AB5E770001AE399 /* IOVideoCaptureUnit.swift in Sources */, 298BCF331DD4C44A007FF86A /* AnyUtil.swift in Sources */, 29B876B01CD70B2800FC07DA /* RTMPConnection.swift in Sources */, BCC1A72B264FAC1800661156 /* ESSpecificData.swift in Sources */, @@ -1729,6 +1736,7 @@ BC1DC4FB2A02868900E928ED /* FLVVideoFourCC.swift in Sources */, 2976A47E1D48C5C700B53EF2 /* IORecorder.swift in Sources */, BC110257292E661E00D48035 /* MultiCamCaptureSettings.swift in Sources */, + BC3802142AB5E7CC001AE399 /* IOAudioCaptureUnit.swift in Sources */, 29B876B21CD70B2800FC07DA /* RTMPMuxer.swift in Sources */, 2958912E1EEB8F4100CE51E1 /* FLVSoundType.swift in Sources */, BC0D236D26331BAB001DDA0C /* DataBuffer.swift in Sources */, @@ -2336,7 +2344,7 @@ FRAMEWORK_SEARCH_PATHS = "$(inherited)"; GCC_OPTIMIZATION_LEVEL = 0; INFOPLIST_FILE = "$(SRCROOT)/Examples/iOS/Info.plist"; - IPHONEOS_DEPLOYMENT_TARGET = 11.0; + IPHONEOS_DEPLOYMENT_TARGET = 12.0; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", @@ -2368,7 +2376,7 @@ ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = "$(inherited)"; INFOPLIST_FILE = "$(SRCROOT)/Examples/iOS/Info.plist"; - IPHONEOS_DEPLOYMENT_TARGET = 11.0; + IPHONEOS_DEPLOYMENT_TARGET = 12.0; LD_RUNPATH_SEARCH_PATHS = ( "$(inherited)", "@executable_path/Frameworks", diff --git a/Sources/Extension/AVCaptureDevice+Extension.swift b/Sources/Extension/AVCaptureDevice+Extension.swift index a8d11833b..827c020f2 100644 --- a/Sources/Extension/AVCaptureDevice+Extension.swift +++ b/Sources/Extension/AVCaptureDevice+Extension.swift @@ -1,7 +1,7 @@ import AVFoundation import Foundation -#if os(iOS) || os(macOS) +@available(tvOS 17.0, *) extension AVCaptureDevice { func videoFormat(width: Int32, height: Int32, frameRate: Float64, isMultiCamSupported: Bool) -> AVCaptureDevice.Format? { if isMultiCamSupported { @@ -19,4 +19,3 @@ extension AVCaptureDevice { } } } -#endif diff --git a/Sources/Extension/AVCaptureDevice.Format+Extension.swift b/Sources/Extension/AVCaptureDevice.Format+Extension.swift index 9539dcb6e..ee0bdf286 100644 --- a/Sources/Extension/AVCaptureDevice.Format+Extension.swift +++ b/Sources/Extension/AVCaptureDevice.Format+Extension.swift @@ -16,7 +16,7 @@ extension AVCaptureDevice.Format { } #endif -#if os(iOS) || os(macOS) +@available(tvOS 17.0, *) extension AVCaptureDevice.Format { func isFrameRateSupported(_ frameRate: Float64) -> Bool { var durations: [CMTime] = [] @@ -41,4 +41,3 @@ extension AVCaptureDevice.Format { return false } } -#endif diff --git a/Sources/Extension/AVCaptureSession+Extension.swift b/Sources/Extension/AVCaptureSession+Extension.swift index b613a2f45..338cd0610 100644 --- a/Sources/Extension/AVCaptureSession+Extension.swift +++ b/Sources/Extension/AVCaptureSession+Extension.swift @@ -17,7 +17,8 @@ extension AVCaptureSession { } } } -#elseif os(iOS) || os(macOS) +#else +@available(tvOS 17.0, *) extension AVCaptureSession { @available(iOS, obsoleted: 16.0) var isMultitaskingCameraAccessSupported: Bool { diff --git a/Sources/Extension/AVCaptureSession.Preset+Extension.swift b/Sources/Extension/AVCaptureSession.Preset+Extension.swift index eed21b09d..160e0e0d2 100644 --- a/Sources/Extension/AVCaptureSession.Preset+Extension.swift +++ b/Sources/Extension/AVCaptureSession.Preset+Extension.swift @@ -8,8 +8,10 @@ extension AVCaptureSession.Preset { } #endif -#if os(iOS) || os(macOS) +@available(tvOS 17.0, *) extension AVCaptureSession.Preset { + static let `default`: AVCaptureSession.Preset = .hd1280x720 + var width: Int32? { switch self { case .hd1920x1080: @@ -40,4 +42,3 @@ extension AVCaptureSession.Preset { } } } -#endif diff --git a/Sources/Extension/AVFrameRateRange+Extension.swift b/Sources/Extension/AVFrameRateRange+Extension.swift index e62cd6dbe..ae961a8dd 100644 --- a/Sources/Extension/AVFrameRateRange+Extension.swift +++ b/Sources/Extension/AVFrameRateRange+Extension.swift @@ -1,7 +1,7 @@ import AVFoundation import Foundation -#if os(iOS) || os(macOS) +@available(tvOS 17.0, *) extension AVFrameRateRange { func clamp(rate: Float64) -> Float64 { max(minFrameRate, min(maxFrameRate, rate)) @@ -11,4 +11,3 @@ extension AVFrameRateRange { (minFrameRate...maxFrameRate) ~= frameRate } } -#endif diff --git a/Sources/Media/IOAudioCaptureUnit.swift b/Sources/Media/IOAudioCaptureUnit.swift new file mode 100644 index 000000000..bc75bcd1e --- /dev/null +++ b/Sources/Media/IOAudioCaptureUnit.swift @@ -0,0 +1,34 @@ +import AVFoundation +import Foundation + +#if !os(tvOS) +@available(tvOS 17.0, *) +final class IOAudioCaptureUnit: IOCaptureUnit { + typealias Output = AVCaptureAudioDataOutput + + private(set) var device: AVCaptureDevice? + var input: AVCaptureInput? + var output: Output? + var connection: AVCaptureConnection? + + func attachDevice(_ device: AVCaptureDevice?, audioUnit: IOAudioUnit) throws { + setSampleBufferDelegate(nil) + detachSession(audioUnit.mixer?.session) + guard let device else { + self.device = nil + input = nil + output = nil + return + } + self.device = device + input = try AVCaptureDeviceInput(device: device) + output = AVCaptureAudioDataOutput() + attachSession(audioUnit.mixer?.session) + setSampleBufferDelegate(audioUnit) + } + + func setSampleBufferDelegate(_ audioUnit: IOAudioUnit?) { + output?.setSampleBufferDelegate(audioUnit, queue: audioUnit?.lockQueue) + } +} +#endif diff --git a/Sources/Media/IOCaptureUnit.swift b/Sources/Media/IOCaptureUnit.swift index 7844941a8..30d614545 100644 --- a/Sources/Media/IOCaptureUnit.swift +++ b/Sources/Media/IOCaptureUnit.swift @@ -1,4 +1,3 @@ -#if os(iOS) || os(macOS) import AVFoundation import Foundation @@ -6,6 +5,7 @@ enum IOCaptureUnitError: Error { case noDeviceAvailable } +@available(tvOS 17.0, *) protocol IOCaptureUnit { associatedtype Output: AVCaptureOutput @@ -14,6 +14,7 @@ protocol IOCaptureUnit { var connection: AVCaptureConnection? { get set } } +@available(tvOS 17.0, *) extension IOCaptureUnit { func attachSession(_ session: AVCaptureSession?) { guard let session else { @@ -56,186 +57,3 @@ extension IOCaptureUnit { } } } - -/// An object that provides the interface to control the AVCaptureDevice's transport behavior. -public class IOVideoCaptureUnit: IOCaptureUnit { - /// The default videoSettings for a device. - public static let defaultVideoSettings: [NSString: AnyObject] = [ - kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_32BGRA) - ] - - typealias Output = AVCaptureVideoDataOutput - - /// The current video device object. - public private(set) var device: AVCaptureDevice? - var input: AVCaptureInput? - var output: Output? { - didSet { - output?.alwaysDiscardsLateVideoFrames = true - output?.videoSettings = IOVideoCaptureUnit.defaultVideoSettings as [String: Any] - } - } - var connection: AVCaptureConnection? - - /// Specifies the videoOrientation indicates whether to rotate the video flowing through the connection to a given orientation. - public var videoOrientation: AVCaptureVideoOrientation = .portrait { - didSet { - output?.connections.filter { $0.isVideoOrientationSupported }.forEach { - $0.videoOrientation = videoOrientation - } - } - } - - /// Spcifies the video mirroed indicates whether the video flowing through the connection should be mirrored about its vertical axis. - public var isVideoMirrored = false { - didSet { - output?.connections.filter { $0.isVideoMirroringSupported }.forEach { - $0.isVideoMirrored = isVideoMirrored - } - } - } - - #if os(iOS) - /// Specifies the preferredVideoStabilizationMode most appropriate for use with the connection. - public var preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode = .off { - didSet { - output?.connections.filter { $0.isVideoStabilizationSupported }.forEach { - $0.preferredVideoStabilizationMode = preferredVideoStabilizationMode - } - } - } - #endif - - func attachDevice(_ device: AVCaptureDevice?, videoUnit: IOVideoUnit) throws { - setSampleBufferDelegate(nil) - detachSession(videoUnit.mixer?.session) - guard let device else { - self.device = nil - input = nil - output = nil - connection = nil - return - } - self.device = device - input = try AVCaptureDeviceInput(device: device) - output = AVCaptureVideoDataOutput() - #if os(iOS) - if let output, #available(iOS 13, *), let port = input?.ports.first(where: { $0.mediaType == .video && $0.sourceDeviceType == device.deviceType && $0.sourceDevicePosition == device.position }) { - connection = AVCaptureConnection(inputPorts: [port], output: output) - } else { - connection = nil - } - #else - if let output, let port = input?.ports.first(where: { $0.mediaType == .video }) { - connection = AVCaptureConnection(inputPorts: [port], output: output) - } else { - connection = nil - } - #endif - attachSession(videoUnit.mixer?.session) - output?.connections.forEach { - if $0.isVideoMirroringSupported { - $0.isVideoMirrored = isVideoMirrored - } - if $0.isVideoOrientationSupported { - $0.videoOrientation = videoOrientation - } - #if os(iOS) - if $0.isVideoStabilizationSupported { - $0.preferredVideoStabilizationMode = preferredVideoStabilizationMode - } - #endif - } - setSampleBufferDelegate(videoUnit) - } - - @available(iOS, unavailable) - func attachScreen(_ screen: AVCaptureScreenInput?, videoUnit: IOVideoUnit) { - setSampleBufferDelegate(nil) - detachSession(videoUnit.mixer?.session) - device = nil - input = screen - output = AVCaptureVideoDataOutput() - connection = nil - attachSession(videoUnit.mixer?.session) - setSampleBufferDelegate(videoUnit) - } - - func setFrameRate(_ frameRate: Float64) { - guard let device else { - return - } - do { - try device.lockForConfiguration() - if device.activeFormat.isFrameRateSupported(frameRate) { - device.activeVideoMinFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate)) - device.activeVideoMaxFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate)) - } else { - if let format = device.videoFormat( - width: device.activeFormat.formatDescription.dimensions.width, - height: device.activeFormat.formatDescription.dimensions.height, - frameRate: frameRate, - isMultiCamSupported: device.activeFormat.isMultiCamSupported - ) { - device.activeFormat = format - device.activeVideoMinFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate)) - device.activeVideoMaxFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate)) - } - } - device.unlockForConfiguration() - } catch { - logger.error("while locking device for fps:", error) - } - } - - func setTorchMode(_ torchMode: AVCaptureDevice.TorchMode) { - guard let device, device.isTorchModeSupported(torchMode) else { - return - } - do { - try device.lockForConfiguration() - device.torchMode = torchMode - device.unlockForConfiguration() - } catch { - logger.error("while setting torch:", error) - } - } - - func setSampleBufferDelegate(_ videoUnit: IOVideoUnit?) { - if let videoUnit { - videoOrientation = videoUnit.videoOrientation - setFrameRate(videoUnit.frameRate) - } - output?.setSampleBufferDelegate(videoUnit, queue: videoUnit?.lockQueue) - } -} - -class IOAudioCaptureUnit: IOCaptureUnit { - typealias Output = AVCaptureAudioDataOutput - - private(set) var device: AVCaptureDevice? - var input: AVCaptureInput? - var output: Output? - var connection: AVCaptureConnection? - - func attachDevice(_ device: AVCaptureDevice?, audioUnit: IOAudioUnit) throws { - setSampleBufferDelegate(nil) - detachSession(audioUnit.mixer?.session) - guard let device else { - self.device = nil - input = nil - output = nil - return - } - self.device = device - input = try AVCaptureDeviceInput(device: device) - output = AVCaptureAudioDataOutput() - attachSession(audioUnit.mixer?.session) - setSampleBufferDelegate(audioUnit) - } - - func setSampleBufferDelegate(_ audioUnit: IOAudioUnit?) { - output?.setSampleBufferDelegate(audioUnit, queue: audioUnit?.lockQueue) - } -} -#endif diff --git a/Sources/Media/IOMixer.swift b/Sources/Media/IOMixer.swift index 8834aa1c7..e4e666707 100644 --- a/Sources/Media/IOMixer.swift +++ b/Sources/Media/IOMixer.swift @@ -2,15 +2,9 @@ import AVFoundation #if canImport(SwiftPMSupport) import SwiftPMSupport #endif - #if os(iOS) import UIKit #endif -#if os(iOS) || os(macOS) -extension AVCaptureSession.Preset { - static let `default`: AVCaptureSession.Preset = .hd1280x720 -} -#endif protocol IOMixerDelegate: AnyObject { func mixer(_ mixer: IOMixer, didOutput audio: AVAudioPCMBuffer, presentationTimeStamp: CMTime) diff --git a/Sources/Media/IOVideoCaptureUnit.swift b/Sources/Media/IOVideoCaptureUnit.swift new file mode 100644 index 000000000..db4b0832b --- /dev/null +++ b/Sources/Media/IOVideoCaptureUnit.swift @@ -0,0 +1,157 @@ +#if !os(tvOS) +import AVFoundation +import Foundation + +/// An object that provides the interface to control the AVCaptureDevice's transport behavior. +public class IOVideoCaptureUnit: IOCaptureUnit { + /// The default videoSettings for a device. + public static let defaultVideoSettings: [NSString: AnyObject] = [ + kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_32BGRA) + ] + + typealias Output = AVCaptureVideoDataOutput + + /// The current video device object. + public private(set) var device: AVCaptureDevice? + var input: AVCaptureInput? + var output: Output? { + didSet { + output?.alwaysDiscardsLateVideoFrames = true + output?.videoSettings = IOVideoCaptureUnit.defaultVideoSettings as [String: Any] + } + } + var connection: AVCaptureConnection? + + /// Specifies the videoOrientation indicates whether to rotate the video flowing through the connection to a given orientation. + public var videoOrientation: AVCaptureVideoOrientation = .portrait { + didSet { + output?.connections.filter { $0.isVideoOrientationSupported }.forEach { + $0.videoOrientation = videoOrientation + } + } + } + + /// Spcifies the video mirroed indicates whether the video flowing through the connection should be mirrored about its vertical axis. + public var isVideoMirrored = false { + didSet { + output?.connections.filter { $0.isVideoMirroringSupported }.forEach { + $0.isVideoMirrored = isVideoMirrored + } + } + } + + #if os(iOS) + /// Specifies the preferredVideoStabilizationMode most appropriate for use with the connection. + public var preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode = .off { + didSet { + output?.connections.filter { $0.isVideoStabilizationSupported }.forEach { + $0.preferredVideoStabilizationMode = preferredVideoStabilizationMode + } + } + } + #endif + + func attachDevice(_ device: AVCaptureDevice?, videoUnit: IOVideoUnit) throws { + setSampleBufferDelegate(nil) + detachSession(videoUnit.mixer?.session) + guard let device else { + self.device = nil + input = nil + output = nil + connection = nil + return + } + self.device = device + input = try AVCaptureDeviceInput(device: device) + output = AVCaptureVideoDataOutput() + #if os(iOS) + if let output, #available(iOS 13, *), let port = input?.ports.first(where: { $0.mediaType == .video && $0.sourceDeviceType == device.deviceType && $0.sourceDevicePosition == device.position }) { + connection = AVCaptureConnection(inputPorts: [port], output: output) + } else { + connection = nil + } + #else + if let output, let port = input?.ports.first(where: { $0.mediaType == .video }) { + connection = AVCaptureConnection(inputPorts: [port], output: output) + } else { + connection = nil + } + #endif + attachSession(videoUnit.mixer?.session) + output?.connections.forEach { + if $0.isVideoMirroringSupported { + $0.isVideoMirrored = isVideoMirrored + } + if $0.isVideoOrientationSupported { + $0.videoOrientation = videoOrientation + } + #if os(iOS) + if $0.isVideoStabilizationSupported { + $0.preferredVideoStabilizationMode = preferredVideoStabilizationMode + } + #endif + } + setSampleBufferDelegate(videoUnit) + } + + @available(iOS, unavailable) + func attachScreen(_ screen: AVCaptureScreenInput?, videoUnit: IOVideoUnit) { + setSampleBufferDelegate(nil) + detachSession(videoUnit.mixer?.session) + device = nil + input = screen + output = AVCaptureVideoDataOutput() + connection = nil + attachSession(videoUnit.mixer?.session) + setSampleBufferDelegate(videoUnit) + } + + func setFrameRate(_ frameRate: Float64) { + guard let device else { + return + } + do { + try device.lockForConfiguration() + if device.activeFormat.isFrameRateSupported(frameRate) { + device.activeVideoMinFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate)) + device.activeVideoMaxFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate)) + } else { + if let format = device.videoFormat( + width: device.activeFormat.formatDescription.dimensions.width, + height: device.activeFormat.formatDescription.dimensions.height, + frameRate: frameRate, + isMultiCamSupported: device.activeFormat.isMultiCamSupported + ) { + device.activeFormat = format + device.activeVideoMinFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate)) + device.activeVideoMaxFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate)) + } + } + device.unlockForConfiguration() + } catch { + logger.error("while locking device for fps:", error) + } + } + + func setTorchMode(_ torchMode: AVCaptureDevice.TorchMode) { + guard let device, device.isTorchModeSupported(torchMode) else { + return + } + do { + try device.lockForConfiguration() + device.torchMode = torchMode + device.unlockForConfiguration() + } catch { + logger.error("while setting torch:", error) + } + } + + func setSampleBufferDelegate(_ videoUnit: IOVideoUnit?) { + if let videoUnit { + videoOrientation = videoUnit.videoOrientation + setFrameRate(videoUnit.frameRate) + } + output?.setSampleBufferDelegate(videoUnit, queue: videoUnit?.lockQueue) + } +} +#endif