diff --git a/Sources/Codec/AudioCodec.swift b/Sources/Codec/AudioCodec.swift index 40cb110c2..1a5038a2b 100644 --- a/Sources/Codec/AudioCodec.swift +++ b/Sources/Codec/AudioCodec.swift @@ -53,6 +53,9 @@ public class AudioCodec { } } } + var outputFormat: AVAudioFormat? { + return audioConverter?.outputFormat + } private var cursor: Int = 0 private var inputBuffers: [AVAudioBuffer] = [] private var outputBuffers: [AVAudioBuffer] = [] diff --git a/Sources/Codec/VTSessionMode.swift b/Sources/Codec/VTSessionMode.swift index fc5b0b5cf..72d21aa62 100644 --- a/Sources/Codec/VTSessionMode.swift +++ b/Sources/Codec/VTSessionMode.swift @@ -37,7 +37,7 @@ enum VTSessionMode { } return session case .decompression: - guard let formatDescription = videoCodec.formatDescription else { + guard let formatDescription = videoCodec.outputFormat else { videoCodec.delegate?.videoCodec(videoCodec, errorOccurred: .failedToCreate(status: kVTParameterErr)) return nil } diff --git a/Sources/Codec/VideoCodec.swift b/Sources/Codec/VideoCodec.swift index 1e3ff7966..51cf2ad69 100644 --- a/Sources/Codec/VideoCodec.swift +++ b/Sources/Codec/VideoCodec.swift @@ -62,12 +62,12 @@ public class VideoCodec { var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.VideoCodec.lock") var expectedFrameRate = IOMixer.defaultFrameRate - var formatDescription: CMFormatDescription? { + private(set) var outputFormat: CMFormatDescription? { didSet { - guard !CMFormatDescriptionEqual(formatDescription, otherFormatDescription: oldValue) else { + guard !CMFormatDescriptionEqual(outputFormat, otherFormatDescription: oldValue) else { return } - delegate?.videoCodec(self, didOutput: formatDescription) + delegate?.videoCodec(self, didOutput: outputFormat) } } var needsSync: Atomic = .init(true) @@ -108,7 +108,7 @@ public class VideoCodec { delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status)) return } - formatDescription = sampleBuffer.formatDescription + outputFormat = sampleBuffer.formatDescription delegate?.videoCodec(self, didOutput: sampleBuffer) } } @@ -214,7 +214,7 @@ extension VideoCodec: Running { self.session = nil self.invalidateSession = true self.needsSync.mutate { $0 = true } - self.formatDescription = nil + self.outputFormat = nil #if os(iOS) NotificationCenter.default.removeObserver(self, name: AVAudioSession.interruptionNotification, object: nil) NotificationCenter.default.removeObserver(self, name: UIApplication.willEnterForegroundNotification, object: nil) diff --git a/Sources/Media/HKView.swift b/Sources/Media/HKView.swift index 203f8408c..6a2bc8c7a 100644 --- a/Sources/Media/HKView.swift +++ b/Sources/Media/HKView.swift @@ -28,11 +28,6 @@ public class HKView: UIView { } } - /// A value that displays a video format. - public var videoFormatDescription: CMVideoFormatDescription? { - currentStream?.mixer.videoIO.formatDescription - } - #if !os(tvOS) public var videoOrientation: AVCaptureVideoOrientation = .portrait { didSet { @@ -139,11 +134,6 @@ public class HKView: NSView { } } - /// A value that displays a video format. - public var videoFormatDescription: CMVideoFormatDescription? { - currentStream?.mixer.videoIO.formatDescription - } - public var videoOrientation: AVCaptureVideoOrientation = .portrait private var currentSampleBuffer: CMSampleBuffer? diff --git a/Sources/Media/IOAudioResampler.swift b/Sources/Media/IOAudioResampler.swift index 11ad13ba3..41bbf32cb 100644 --- a/Sources/Media/IOAudioResampler.swift +++ b/Sources/Media/IOAudioResampler.swift @@ -76,6 +76,10 @@ final class IOAudioResampler { } weak var delegate: T? + var inputFormat: AVAudioFormat? { + return audioConverter?.inputFormat + } + var outputFormat: AVAudioFormat? { return audioConverter?.outputFormat } diff --git a/Sources/Media/IOAudioUnit.swift b/Sources/Media/IOAudioUnit.swift index 1748fd864..90fec9e6a 100644 --- a/Sources/Media/IOAudioUnit.swift +++ b/Sources/Media/IOAudioUnit.swift @@ -5,6 +5,8 @@ import SwiftPMSupport #endif final class IOAudioUnit: NSObject, IOUnit { + typealias FormatDescription = CMAudioFormatDescription + lazy var codec: AudioCodec = { var codec = AudioCodec() codec.lockQueue = lockQueue @@ -33,6 +35,10 @@ final class IOAudioUnit: NSObject, IOUnit { resampler.settings = settings.makeAudioResamplerSettings() } } + var inputFormat: FormatDescription? + var outputFormat: FormatDescription? { + return codec.outputFormat?.formatDescription + } private(set) var presentationTimeStamp: CMTime = .invalid private lazy var resampler: IOAudioResampler = { var resampler = IOAudioResampler() @@ -65,6 +71,7 @@ final class IOAudioUnit: NSObject, IOUnit { guard let device else { try capture.attachDevice(nil, audioUnit: self) presentationTimeStamp = .invalid + inputFormat = nil return } try capture.attachDevice(device, audioUnit: self) @@ -152,6 +159,7 @@ extension IOAudioUnit: IOAudioResamplerDelegate { } func resampler(_ resampler: IOAudioResampler, didOutput audioFormat: AVAudioFormat) { + inputFormat = resampler.inputFormat?.formatDescription codec.inSourceFormat = audioFormat.formatDescription.audioStreamBasicDescription monitor.inSourceFormat = audioFormat.formatDescription.audioStreamBasicDescription } diff --git a/Sources/Media/IOMixer.swift b/Sources/Media/IOMixer.swift index 9883f2839..0c958370d 100644 --- a/Sources/Media/IOMixer.swift +++ b/Sources/Media/IOMixer.swift @@ -222,7 +222,6 @@ public final class IOMixer { case kCMMediaType_Audio: audioIO.codec.appendSampleBuffer(sampleBuffer) case kCMMediaType_Video: - videoIO.codec.formatDescription = sampleBuffer.formatDescription videoIO.codec.appendSampleBuffer(sampleBuffer) default: break diff --git a/Sources/Media/IOUnit.swift b/Sources/Media/IOUnit.swift index 82cf807b1..e29758805 100644 --- a/Sources/Media/IOUnit.swift +++ b/Sources/Media/IOUnit.swift @@ -5,8 +5,12 @@ import Foundation public typealias AVCodecDelegate = AudioCodecDelegate & VideoCodecDelegate protocol IOUnit { + associatedtype FormatDescription + var mixer: IOMixer? { get set } var muted: Bool { get set } + var inputFormat: FormatDescription? { get } + var outputFormat: FormatDescription? { get } func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) } diff --git a/Sources/Media/IOVideoUnit.swift b/Sources/Media/IOVideoUnit.swift index 76e37c2d6..9f7347206 100644 --- a/Sources/Media/IOVideoUnit.swift +++ b/Sources/Media/IOVideoUnit.swift @@ -2,6 +2,8 @@ import AVFoundation import CoreImage final class IOVideoUnit: NSObject, IOUnit { + typealias FormatDescription = CMVideoFormatDescription + enum Error: Swift.Error { case multiCamNotSupported } @@ -12,7 +14,6 @@ final class IOVideoUnit: NSObject, IOUnit { ] let lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.VideoIOComponent.lock") - var context: CIContext = .init() { didSet { for effect in effects { @@ -20,7 +21,6 @@ final class IOVideoUnit: NSObject, IOUnit { } } } - weak var drawable: (any NetStreamDrawable)? { didSet { #if os(iOS) || os(macOS) @@ -28,45 +28,14 @@ final class IOVideoUnit: NSObject, IOUnit { #endif } } - - var formatDescription: CMVideoFormatDescription? { - didSet { - codec.formatDescription = formatDescription - } - } - + var multiCamCaptureSettings: MultiCamCaptureSettings = .default lazy var codec: VideoCodec = { var codec = VideoCodec() codec.lockQueue = lockQueue return codec }() - weak var mixer: IOMixer? - var muted = false - - private(set) var presentationTimeStamp: CMTime = .invalid - private(set) var effects: Set = [] - - private var extent = CGRect.zero { - didSet { - guard extent != oldValue else { - return - } - CVPixelBufferPoolCreate(nil, nil, attributes as CFDictionary?, &pixelBufferPool) - pixelBufferPool?.createPixelBuffer(&pixelBuffer) - } - } - - private var attributes: [NSString: NSObject] { - var attributes: [NSString: NSObject] = Self.defaultAttributes - attributes[kCVPixelBufferWidthKey] = NSNumber(value: Int(extent.width)) - attributes[kCVPixelBufferHeightKey] = NSNumber(value: Int(extent.height)) - return attributes - } - - private var pixelBufferPool: CVPixelBufferPool? - var frameRate = IOMixer.defaultFrameRate { didSet { if #available(tvOS 17.0, *) { @@ -75,7 +44,6 @@ final class IOVideoUnit: NSObject, IOUnit { } } } - #if !os(tvOS) var videoOrientation: AVCaptureVideoOrientation = .portrait { didSet { @@ -98,7 +66,6 @@ final class IOVideoUnit: NSObject, IOUnit { } } #endif - var torch = false { didSet { guard torch != oldValue else { @@ -109,7 +76,10 @@ final class IOVideoUnit: NSObject, IOUnit { } } } - + var inputFormat: FormatDescription? + var outputFormat: FormatDescription? { + codec.outputFormat + } #if os(tvOS) private var _capture: Any? @available(tvOS 17.0, *) @@ -132,10 +102,25 @@ final class IOVideoUnit: NSObject, IOUnit { private(set) var capture: IOVideoCaptureUnit = .init() private(set) var multiCamCapture: IOVideoCaptureUnit = .init() #endif - - var multiCamCaptureSettings: MultiCamCaptureSettings = .default - + private(set) var presentationTimeStamp: CMTime = .invalid + private(set) var effects: Set = [] + private var extent = CGRect.zero { + didSet { + guard extent != oldValue else { + return + } + CVPixelBufferPoolCreate(nil, nil, attributes as CFDictionary?, &pixelBufferPool) + pixelBufferPool?.createPixelBuffer(&pixelBuffer) + } + } + private var attributes: [NSString: NSObject] { + var attributes: [NSString: NSObject] = Self.defaultAttributes + attributes[kCVPixelBufferWidthKey] = NSNumber(value: Int(extent.width)) + attributes[kCVPixelBufferHeightKey] = NSNumber(value: Int(extent.height)) + return attributes + } private var pixelBuffer: CVPixelBuffer? + private var pixelBufferPool: CVPixelBufferPool? private var multiCamSampleBuffer: CMSampleBuffer? deinit { @@ -161,6 +146,7 @@ final class IOVideoUnit: NSObject, IOUnit { capture.detachSession(mixer.session) try capture.attachDevice(nil, videoUnit: self) presentationTimeStamp = .invalid + inputFormat = nil return } mixer.session.beginConfiguration() @@ -248,6 +234,7 @@ final class IOVideoUnit: NSObject, IOUnit { } func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) { + inputFormat = sampleBuffer.formatDescription guard let buffer = sampleBuffer.imageBuffer else { return } diff --git a/Sources/Media/MTHKView.swift b/Sources/Media/MTHKView.swift index 72cad5642..e6a5e818a 100644 --- a/Sources/Media/MTHKView.swift +++ b/Sources/Media/MTHKView.swift @@ -9,10 +9,6 @@ public class MTHKView: MTKView { /// Specifies how the video is displayed within a player layer’s bounds. public var videoGravity: AVLayerVideoGravity = .resizeAspect - public var videoFormatDescription: CMVideoFormatDescription? { - currentStream?.mixer.videoIO.formatDescription - } - #if !os(tvOS) public var videoOrientation: AVCaptureVideoOrientation = .portrait #endif diff --git a/Sources/Media/PiPHKView.swift b/Sources/Media/PiPHKView.swift index 11d2a3e0f..58b171653 100644 --- a/Sources/Media/PiPHKView.swift +++ b/Sources/Media/PiPHKView.swift @@ -25,11 +25,6 @@ public class PiPHKView: UIView { } } - /// A value that displays a video format. - public var videoFormatDescription: CMVideoFormatDescription? { - currentStream?.mixer.videoIO.formatDescription - } - #if !os(tvOS) public var videoOrientation: AVCaptureVideoOrientation = .portrait { didSet { @@ -119,11 +114,6 @@ public class PiPHKView: NSView { } } - /// A value that displays a video format. - public var videoFormatDescription: CMVideoFormatDescription? { - currentStream?.mixer.videoIO.formatDescription - } - public var videoOrientation: AVCaptureVideoOrientation = .portrait { didSet { if Thread.isMainThread { diff --git a/Sources/Net/NetStreamDrawable.swift b/Sources/Net/NetStreamDrawable.swift index aa54ea7d3..6a40c3398 100644 --- a/Sources/Net/NetStreamDrawable.swift +++ b/Sources/Net/NetStreamDrawable.swift @@ -8,9 +8,6 @@ public protocol NetStreamDrawable: AnyObject { var videoOrientation: AVCaptureVideoOrientation { get set } #endif - /// The videoFormatDescription which is the current CMSampleBuffer. - var videoFormatDescription: CMVideoFormatDescription? { get } - /// Attaches a drawable to a new NetStream object. func attachStream(_ stream: NetStream?) diff --git a/Sources/RTMP/RTMPMessage.swift b/Sources/RTMP/RTMPMessage.swift index 84df0b064..989c9d889 100644 --- a/Sources/RTMP/RTMPMessage.swift +++ b/Sources/RTMP/RTMPMessage.swift @@ -709,7 +709,7 @@ final class RTMPVideoMessage: RTMPMessage { dataReady: true, makeDataReadyCallback: nil, refcon: nil, - formatDescription: stream.mixer.videoIO.formatDescription, + formatDescription: stream.mixer.videoIO.inputFormat, sampleCount: 1, sampleTimingEntryCount: 1, sampleTimingArray: &timing, @@ -728,11 +728,11 @@ final class RTMPVideoMessage: RTMPMessage { case .h264: var config = AVCDecoderConfigurationRecord() config.data = payload.subdata(in: FLVTagType.video.headerSize.. ASObject { var metadata: [String: Any] = [:] - #if os(iOS) || os(macOS) - if mixer.videoIO.capture.device != nil { - metadata["width"] = mixer.videoIO.codec.settings.videoSize.width - metadata["height"] = mixer.videoIO.codec.settings.videoSize.height - metadata["framerate"] = mixer.videoIO.frameRate - switch mixer.videoIO.codec.settings.format { + if mixer.videoIO.inputFormat != nil { + metadata["width"] = videoSettings.videoSize.width + metadata["height"] = videoSettings.videoSize.height + metadata["framerate"] = frameRate + switch videoSettings.format { case .h264: metadata["videocodecid"] = FLVVideoCodec.avc.rawValue case .hevc: metadata["videocodecid"] = FLVVideoFourCC.hevc.rawValue } - metadata["videodatarate"] = mixer.videoIO.codec.settings.bitRate / 1000 + metadata["videodatarate"] = videoSettings.bitRate / 1000 } - #endif - if let inSourceFormat = mixer.audioIO.codec.inSourceFormat { + if mixer.audioIO.inputFormat != nil { metadata["audiocodecid"] = FLVAudioCodec.aac.rawValue metadata["audiodatarate"] = audioSettings.bitRate / 1000 - metadata["audiosamplerate"] = inSourceFormat.mSampleRate + if let outputFormat = mixer.audioIO.outputFormat?.audioStreamBasicDescription { + metadata["audiosamplerate"] = outputFormat.mSampleRate + } } return metadata }