From 68ea5437360d182d23274df2f8e9bbf6a3f4cfdf Mon Sep 17 00:00:00 2001 From: shogo4405 Date: Wed, 11 Oct 2023 03:17:31 +0900 Subject: [PATCH] Implementation of structural changes related to IOMixer. --- Examples/iOS/IngestViewController.swift | 2 +- Examples/iOS/NetStreamSwitcher.swift | 4 +- Examples/iOS/Screencast/SampleHandler.swift | 6 +- HaishinKit.xcodeproj/project.pbxproj | 6 +- SRTHaishinKit/SRTStream.swift | 8 +- Sources/Codec/AudioCodec.swift | 47 +++---- Sources/Codec/VideoCodec.swift | 44 +++--- Sources/MPEG/TSWriter.swift | 131 ++++++++--------- Sources/Media/IOAudioMonitor.swift | 4 +- Sources/Media/IOAudioResampler.swift | 10 +- Sources/Media/IOAudioRingBuffer.swift | 12 +- Sources/Media/IOAudioUnit.swift | 34 ++--- Sources/Media/IOMixer.swift | 148 ++++++++++++++++---- Sources/Media/IOMuxer.swift | 10 ++ Sources/Media/IORecorder.swift | 10 +- Sources/Media/IOUnit.swift | 7 +- Sources/Media/IOVideoMixer.swift | 2 +- Sources/Media/IOVideoUnit.swift | 22 ++- Sources/Net/NetStream.swift | 30 ++-- Sources/RTMP/RTMPMessage.swift | 6 +- Sources/RTMP/RTMPMuxer.swift | 98 +++++++------ Sources/RTMP/RTMPStream.swift | 19 +-- Tests/Codec/AudioCodecTests.swift | 46 +++--- Tests/MPEG/TSReaderTests.swift | 12 +- Tests/Media/IOAudioResamplerTests.swift | 30 ++-- Tests/Media/IOAudioRingBufferTests.swift | 2 +- Tests/Media/IORecorderTests.swift | 6 +- 27 files changed, 414 insertions(+), 342 deletions(-) create mode 100644 Sources/Media/IOMuxer.swift diff --git a/Examples/iOS/IngestViewController.swift b/Examples/iOS/IngestViewController.swift index 22cec57e2..25601d5ff 100644 --- a/Examples/iOS/IngestViewController.swift +++ b/Examples/iOS/IngestViewController.swift @@ -322,7 +322,7 @@ extension IngestViewController: IORecorderDelegate { extension IngestViewController: AudioCaptureDelegate { // MARK: AudioCaptureDelegate func audioCapture(_ audioCapture: AudioCapture, buffer: AVAudioBuffer, time: AVAudioTime) { - stream.appendAudioBuffer(buffer, when: time) + stream.append(buffer, when: time) } } diff --git a/Examples/iOS/NetStreamSwitcher.swift b/Examples/iOS/NetStreamSwitcher.swift index 399c66167..a1671cb38 100644 --- a/Examples/iOS/NetStreamSwitcher.swift +++ b/Examples/iOS/NetStreamSwitcher.swift @@ -150,11 +150,11 @@ extension NetStreamSwitcher: NetStreamDelegate { #endif /// Tells the receiver to video codec error occured. - func stream(_ stream: NetStream, videoCodecErrorOccurred error: VideoCodec.Error) { + func stream(_ stream: NetStream, videoErrorOccurred error: IOMixerVideoError) { } /// Tells the receiver to audio codec error occured. - func stream(_ stream: NetStream, audioCodecErrorOccurred error: HaishinKit.AudioCodec.Error) { + func stream(_ stream: NetStream, audioErrorOccurred error: IOMixerAudioError) { } /// Tells the receiver to the stream opened. diff --git a/Examples/iOS/Screencast/SampleHandler.swift b/Examples/iOS/Screencast/SampleHandler.swift index 6e9ca5869..4a9c9551b 100644 --- a/Examples/iOS/Screencast/SampleHandler.swift +++ b/Examples/iOS/Screencast/SampleHandler.swift @@ -45,15 +45,15 @@ open class SampleHandler: RPBroadcastSampleHandler { rtmpStream.videoSettings.videoSize = .init(width: CGFloat(dimensions.width), height: CGFloat(dimensions.height)) rtmpStream.videoSettings.profileLevel = kVTProfileLevel_H264_Baseline_AutoLevel as String } - rtmpStream.appendSampleBuffer(sampleBuffer) + rtmpStream.append(sampleBuffer) case .audioMic: isMirophoneOn = true if CMSampleBufferDataIsReady(sampleBuffer) { - rtmpStream.appendSampleBuffer(sampleBuffer) + rtmpStream.append(sampleBuffer) } case .audioApp: if !isMirophoneOn && CMSampleBufferDataIsReady(sampleBuffer) { - rtmpStream.appendSampleBuffer(sampleBuffer) + rtmpStream.append(sampleBuffer) } @unknown default: break diff --git a/HaishinKit.xcodeproj/project.pbxproj b/HaishinKit.xcodeproj/project.pbxproj index 12696d892..4ad091a35 100644 --- a/HaishinKit.xcodeproj/project.pbxproj +++ b/HaishinKit.xcodeproj/project.pbxproj @@ -175,6 +175,7 @@ BC3802142AB5E7CC001AE399 /* IOAudioCaptureUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3802132AB5E7CC001AE399 /* IOAudioCaptureUnit.swift */; }; BC3802192AB6AD79001AE399 /* IOAudioResamplerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3802182AB6AD79001AE399 /* IOAudioResamplerTests.swift */; }; BC3E384429C216BB007CD972 /* ADTSReaderTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3E384329C216BB007CD972 /* ADTSReaderTests.swift */; }; + BC4078C42AD5CC7E00BBB4FA /* IOMuxer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4078C32AD5CC7E00BBB4FA /* IOMuxer.swift */; }; BC4914A228DDD33D009E2DF6 /* VTSessionConvertible.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4914A128DDD33D009E2DF6 /* VTSessionConvertible.swift */; }; BC4914A628DDD367009E2DF6 /* VTSessionOption.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4914A528DDD367009E2DF6 /* VTSessionOption.swift */; }; BC4914AE28DDF445009E2DF6 /* VTDecompressionSession+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4914AD28DDF445009E2DF6 /* VTDecompressionSession+Extension.swift */; }; @@ -586,6 +587,7 @@ BC3802132AB5E7CC001AE399 /* IOAudioCaptureUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioCaptureUnit.swift; sourceTree = ""; }; BC3802182AB6AD79001AE399 /* IOAudioResamplerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioResamplerTests.swift; sourceTree = ""; }; BC3E384329C216BB007CD972 /* ADTSReaderTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ADTSReaderTests.swift; sourceTree = ""; }; + BC4078C32AD5CC7E00BBB4FA /* IOMuxer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOMuxer.swift; sourceTree = ""; }; BC4914A128DDD33D009E2DF6 /* VTSessionConvertible.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTSessionConvertible.swift; sourceTree = ""; }; BC4914A528DDD367009E2DF6 /* VTSessionOption.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTSessionOption.swift; sourceTree = ""; }; BC4914AD28DDF445009E2DF6 /* VTDecompressionSession+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "VTDecompressionSession+Extension.swift"; sourceTree = ""; }; @@ -955,6 +957,7 @@ 2968973F1CDB01AD0074D5F0 /* iOS */ = { isa = PBXGroup; children = ( + 29A39C801D85BEFA007C27E9 /* Screencast */, 296897411CDB01D20074D5F0 /* AppDelegate.swift */, 296897421CDB01D20074D5F0 /* Assets.xcassets */, BC56765A2AD2AED800524F7E /* AudioCapture.swift */, @@ -969,7 +972,6 @@ BCFB355324FA275600DC5108 /* PlaybackViewController.swift */, 291468161E581C7D00E619BA /* Preference.swift */, 2950742E1E4620B7007F15A4 /* PreferenceViewController.swift */, - 29A39C801D85BEFA007C27E9 /* Screencast */, BC1BC9032AC80531009005D3 /* VideoAdaptiveNetBitRateStrategy.swift */, 296897461CDB01D20074D5F0 /* VisualEffect.swift */, ); @@ -1047,6 +1049,7 @@ BC1102492925147300D48035 /* IOCaptureUnit.swift */, BC0F1FD92ACC4CC100C326FF /* IOCaptureVideoPreview.swift */, 29B8768B1CD70AFE00FC07DA /* IOMixer.swift */, + BC4078C32AD5CC7E00BBB4FA /* IOMuxer.swift */, 2976A47D1D48C5C700B53EF2 /* IORecorder.swift */, BCA2252B293CC5B600DD7CB2 /* IOScreenCaptureUnit.swift */, 299B131C1D35272D00A1E8F5 /* IOUIScreenCaptureUnit.swift */, @@ -1699,6 +1702,7 @@ 29B876AF1CD70B2800FC07DA /* RTMPChunk.swift in Sources */, 29B876841CD70AE800FC07DA /* AVCDecoderConfigurationRecord.swift in Sources */, 296242621D8DB86500C451A3 /* TSWriter.swift in Sources */, + BC4078C42AD5CC7E00BBB4FA /* IOMuxer.swift in Sources */, BC9CFA9323BDE8B700917EEF /* NetStreamDrawable.swift in Sources */, 29B8769B1CD70B1100FC07DA /* MIME.swift in Sources */, BC1DC50E2A039E1900E928ED /* FLVVideoPacketType.swift in Sources */, diff --git a/SRTHaishinKit/SRTStream.swift b/SRTHaishinKit/SRTStream.swift index 4b529883c..094a6ebb9 100644 --- a/SRTHaishinKit/SRTStream.swift +++ b/SRTHaishinKit/SRTStream.swift @@ -55,6 +55,7 @@ public final class SRTStream: NetStream { mixer.startDecoding() readyState = .playing case .publish: + mixer.muxer = writer writer.expectedMedias.removeAll() if videoInputFormat != nil { writer.expectedMedias.insert(.video) @@ -62,7 +63,7 @@ public final class SRTStream: NetStream { if audioInputFormat != nil { writer.expectedMedias.insert(.audio) } - mixer.startEncoding(writer) + mixer.startEncoding() mixer.startRunning() writer.startRunning() readyState = .publishing @@ -159,6 +160,9 @@ extension SRTStream: TSWriterDelegate { } connection?.socket?.doOutput(data: data) } + + public func writer(_ writer: TSWriter, didRotateFileHandle timestamp: CMTime) { + } } extension SRTStream: TSReaderDelegate { @@ -179,6 +183,6 @@ extension SRTStream: TSReaderDelegate { guard readyState == .playing else { return } - appendSampleBuffer(sampleBuffer) + append(sampleBuffer) } } diff --git a/Sources/Codec/AudioCodec.swift b/Sources/Codec/AudioCodec.swift index f8532e5d6..2f0b660c2 100644 --- a/Sources/Codec/AudioCodec.swift +++ b/Sources/Codec/AudioCodec.swift @@ -3,40 +3,34 @@ import AVFoundation /** * The interface a AudioCodec uses to inform its delegate. */ -public protocol AudioCodecDelegate: AnyObject { +protocol AudioCodecDelegate: AnyObject { /// Tells the receiver to output an AVAudioFormat. - func audioCodec(_ codec: AudioCodec, didOutput audioFormat: AVAudioFormat) + func audioCodec(_ codec: AudioCodec, didOutput audioFormat: AVAudioFormat) /// Tells the receiver to output an encoded or decoded CMSampleBuffer. - func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime) + func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime) /// Tells the receiver to occured an error. - func audioCodec(_ codec: AudioCodec, errorOccurred error: AudioCodec.Error) + func audioCodec(_ codec: AudioCodec, errorOccurred error: IOMixerAudioError) } +private let kAudioCodec_frameCamacity: UInt32 = 1024 + // MARK: - /** * The AudioCodec translate audio data to another format. * - seealso: https://developer.apple.com/library/ios/technotes/tn2236/_index.html */ -public class AudioCodec { - private static let frameCapacity: UInt32 = 1024 - - /// The AudioCodec error domain codes. - public enum Error: Swift.Error { - case failedToCreate(from: AVAudioFormat?, to: AVAudioFormat?) - case failedToConvert(error: NSError) - } - +final class AudioCodec { /// Specifies the delegate. - public weak var delegate: (any AudioCodecDelegate)? + weak var delegate: T? /// This instance is running to process(true) or not(false). - public private(set) var isRunning: Atomic = .init(false) + private(set) var isRunning: Atomic = .init(false) /// Specifies the settings for audio codec. - public var settings: AudioCodecSettings = .default { + var settings: AudioCodecSettings = .default { didSet { settings.apply(audioConverter, oldValue: oldValue) } } - var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioCodec.lock") + let lockQueue: DispatchQueue var inputFormat: AVAudioFormat? { didSet { guard inputFormat != oldValue else { @@ -61,8 +55,11 @@ public class AudioCodec { private var outputBuffers: [AVAudioBuffer] = [] private var audioConverter: AVAudioConverter? - /// Append a CMSampleBuffer. - public func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) { + init(lockQueue: DispatchQueue) { + self.lockQueue = lockQueue + } + + func append(_ sampleBuffer: CMSampleBuffer) { guard isRunning.value else { return } @@ -81,7 +78,7 @@ public class AudioCodec { buffer.byteLength = UInt32(byteCount) if let blockBuffer = sampleBuffer.dataBuffer { CMBlockBufferCopyDataBytes(blockBuffer, atOffset: offset + ADTSHeader.size, dataLength: byteCount, destination: buffer.data) - appendAudioBuffer(buffer, when: presentationTimeStamp.makeAudioTime()) + append(buffer, when: presentationTimeStamp.makeAudioTime()) presentationTimeStamp = CMTimeAdd(presentationTimeStamp, CMTime(value: CMTimeValue(1024), timescale: sampleBuffer.presentationTimeStamp.timescale)) offset += sampleSize } @@ -91,7 +88,7 @@ public class AudioCodec { } } - func appendAudioBuffer(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) { + func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) { guard let audioConverter, isRunning.value else { return } @@ -131,8 +128,8 @@ public class AudioCodec { } switch inputFormat.formatDescription.audioStreamBasicDescription?.mFormatID { case kAudioFormatLinearPCM: - let buffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: Self.frameCapacity) - buffer?.frameLength = Self.frameCapacity + let buffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: kAudioCodec_frameCamacity) + buffer?.frameLength = kAudioCodec_frameCamacity return buffer default: return AVAudioCompressedBuffer(format: inputFormat, packetCapacity: 1, maximumPacketSize: 1024) @@ -184,7 +181,7 @@ extension AudioCodec: Codec { extension AudioCodec: Running { // MARK: Running - public func startRunning() { + func startRunning() { lockQueue.async { guard !self.isRunning.value else { return @@ -197,7 +194,7 @@ extension AudioCodec: Running { } } - public func stopRunning() { + func stopRunning() { lockQueue.async { guard self.isRunning.value else { return diff --git a/Sources/Codec/VideoCodec.swift b/Sources/Codec/VideoCodec.swift index 141458c49..4d7ea2136 100644 --- a/Sources/Codec/VideoCodec.swift +++ b/Sources/Codec/VideoCodec.swift @@ -8,44 +8,32 @@ import UIKit /** * The interface a VideoCodec uses to inform its delegate. */ -public protocol VideoCodecDelegate: AnyObject { +protocol VideoCodecDelegate: AnyObject { /// Tells the receiver to set a formatDescription. func videoCodec(_ codec: VideoCodec, didOutput formatDescription: CMFormatDescription?) /// Tells the receiver to output an encoded or decoded sampleBuffer. func videoCodec(_ codec: VideoCodec, didOutput sampleBuffer: CMSampleBuffer) /// Tells the receiver to occured an error. - func videoCodec(_ codec: VideoCodec, errorOccurred error: VideoCodec.Error) + func videoCodec(_ codec: VideoCodec, errorOccurred error: IOMixerVideoError) } // MARK: - /** * The VideoCodec class provides methods for encode or decode for video. */ -public final class VideoCodec { - static let defaultFrameInterval = 0.0 - - /** - * The VideoCodec error domain codes. - */ - public enum Error: Swift.Error { - /// The VideoCodec failed to create the VTSession. - case failedToCreate(status: OSStatus) - /// The VideoCodec failed to prepare the VTSession. - case failedToPrepare(status: OSStatus) - /// The VideoCodec failed to encode or decode a flame. - case failedToFlame(status: OSStatus) - /// The VideoCodec failed to set an option. - case failedToSetOption(status: OSStatus, option: VTSessionOption) - } +final class VideoCodec { + private static let defaultFrameInterval: Double = 0.0 /// The videoCodec's attributes value. - public static var defaultAttributes: [NSString: AnyObject]? = [ + static var defaultAttributes: [NSString: AnyObject]? = [ kCVPixelBufferIOSurfacePropertiesKey: NSDictionary(), kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue ] + let lockQueue: DispatchQueue + /// Specifies the settings for a VideoCodec. - public var settings: VideoCodecSettings = .default { + var settings: VideoCodecSettings = .default { didSet { let invalidateSession = settings.invalidateSession(oldValue) if invalidateSession { @@ -57,9 +45,7 @@ public final class VideoCodec { } /// The running value indicating whether the VideoCodec is running. - public private(set) var isRunning: Atomic = .init(false) - - var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.VideoCodec.lock") + private(set) var isRunning: Atomic = .init(false) var needsSync: Atomic = .init(true) var attributes: [NSString: AnyObject]? { guard VideoCodec.defaultAttributes != nil else { @@ -103,7 +89,11 @@ public final class VideoCodec { private var invalidateSession = true private var presentationTimeStamp: CMTime = .invalid - func appendImageBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime) { + init(lockQueue: DispatchQueue) { + self.lockQueue = lockQueue + } + + func append(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime) { guard isRunning.value, !willDropFrame(presentationTimeStamp) else { return } @@ -125,7 +115,7 @@ public final class VideoCodec { } } - func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) { + func append(_ sampleBuffer: CMSampleBuffer) { inputFormat = sampleBuffer.formatDescription guard isRunning.value else { return @@ -214,7 +204,7 @@ public final class VideoCodec { extension VideoCodec: Running { // MARK: Running - public func startRunning() { + func startRunning() { lockQueue.async { #if os(iOS) || os(tvOS) || os(visionOS) NotificationCenter.default.addObserver( @@ -235,7 +225,7 @@ extension VideoCodec: Running { } } - public func stopRunning() { + func stopRunning() { lockQueue.async { self.isRunning.mutate { $0 = false } self.session = nil diff --git a/Sources/MPEG/TSWriter.swift b/Sources/MPEG/TSWriter.swift index 7e319be6e..dd1175109 100644 --- a/Sources/MPEG/TSWriter.swift +++ b/Sources/MPEG/TSWriter.swift @@ -12,15 +12,8 @@ public protocol TSWriterDelegate: AnyObject { func writer(_ writer: TSWriter, didOutput data: Data) } -public extension TSWriterDelegate { - // default implementation noop - func writer(_ writer: TSWriter, didRotateFileHandle timestamp: CMTime) { - // noop - } -} - /// The TSWriter class represents writes MPEG-2 transport stream data. -public class TSWriter: Running { +public final class TSWriter { public static let defaultPATPID: UInt16 = 0 public static let defaultPMTPID: UInt16 = 4095 public static let defaultVideoPID: UInt16 = 256 @@ -35,6 +28,36 @@ public class TSWriter: Running { /// The exptected medias = [.video, .audio]. public var expectedMedias: Set = [] + public var audioFormat: AVAudioFormat? { + didSet { + guard let audioFormat else { + return + } + var data = ESSpecificData() + data.streamType = .adtsAac + data.elementaryPID = TSWriter.defaultAudioPID + PMT.elementaryStreamSpecificData.append(data) + audioContinuityCounter = 0 + audioConfig = AudioSpecificConfig(formatDescription: audioFormat.formatDescription) + } + } + + public var videoFormat: CMFormatDescription? { + didSet { + guard + let videoFormat, + let avcC = AVCDecoderConfigurationRecord.getData(videoFormat) else { + return + } + var data = ESSpecificData() + data.streamType = .h264 + data.elementaryPID = TSWriter.defaultVideoPID + PMT.elementaryStreamSpecificData.append(data) + videoContinuityCounter = 0 + videoConfig = AVCDecoderConfigurationRecord(data: avcC) + } + } + var audioContinuityCounter: UInt8 = 0 var videoContinuityCounter: UInt8 = 0 var PCRPID: UInt16 = TSWriter.defaultVideoPID @@ -81,31 +104,6 @@ public class TSWriter: Running { self.segmentDuration = segmentDuration } - public func startRunning() { - guard isRunning.value else { - return - } - isRunning.mutate { $0 = true } - } - - public func stopRunning() { - guard !isRunning.value else { - return - } - audioContinuityCounter = 0 - videoContinuityCounter = 0 - PCRPID = TSWriter.defaultVideoPID - PAT.programs.removeAll() - PAT.programs = [1: TSWriter.defaultPMTPID] - PMT = TSProgramMap() - audioConfig = nil - videoConfig = nil - videoTimestamp = .invalid - audioTimestamp = .invalid - PCRTimestamp = .invalid - isRunning.mutate { $0 = false } - } - // swiftlint:disable:next function_parameter_count final func writeSampleBuffer(_ PID: UInt16, streamID: UInt8, bytes: UnsafePointer?, count: UInt32, presentationTimeStamp: CMTime, decodeTimeStamp: CMTime, randomAccessIndicator: Bool) { guard canWriteFor else { @@ -217,24 +215,9 @@ public class TSWriter: Running { } } -extension TSWriter: AudioCodecDelegate { - // MARK: AudioCodecDelegate - public func audioCodec(_ codec: AudioCodec, errorOccurred error: AudioCodec.Error) { - } - - public func audioCodec(_ codec: AudioCodec, didOutput outputFormat: AVAudioFormat) { - var data = ESSpecificData() - data.streamType = .adtsAac - data.elementaryPID = TSWriter.defaultAudioPID - PMT.elementaryStreamSpecificData.append(data) - audioContinuityCounter = 0 - audioConfig = AudioSpecificConfig(formatDescription: outputFormat.formatDescription) - } - - public func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime) { - guard let audioBuffer = audioBuffer as? AVAudioCompressedBuffer else { - return - } +extension TSWriter: IOMuxer { + // IOMuxer + public func append(_ audioBuffer: AVAudioCompressedBuffer, when: AVAudioTime) { writeSampleBuffer( TSWriter.defaultAudioPID, streamID: 192, @@ -244,27 +227,9 @@ extension TSWriter: AudioCodecDelegate { decodeTimeStamp: .invalid, randomAccessIndicator: true ) - codec.releaseOutputBuffer(audioBuffer) } -} -extension TSWriter: VideoCodecDelegate { - // MARK: VideoCodecDelegate - public func videoCodec(_ codec: VideoCodec, didOutput formatDescription: CMFormatDescription?) { - guard - let formatDescription, - let avcC = AVCDecoderConfigurationRecord.getData(formatDescription) else { - return - } - var data = ESSpecificData() - data.streamType = .h264 - data.elementaryPID = TSWriter.defaultVideoPID - PMT.elementaryStreamSpecificData.append(data) - videoContinuityCounter = 0 - videoConfig = AVCDecoderConfigurationRecord(data: avcC) - } - - public func videoCodec(_ codec: VideoCodec, didOutput sampleBuffer: CMSampleBuffer) { + public func append(_ sampleBuffer: CMSampleBuffer) { guard let dataBuffer = sampleBuffer.dataBuffer else { return } @@ -286,7 +251,31 @@ extension TSWriter: VideoCodecDelegate { randomAccessIndicator: !sampleBuffer.isNotSync ) } +} - public func videoCodec(_ codec: VideoCodec, errorOccurred error: VideoCodec.Error) { +extension TSWriter: Running { + public func startRunning() { + guard isRunning.value else { + return + } + isRunning.mutate { $0 = true } + } + + public func stopRunning() { + guard !isRunning.value else { + return + } + audioContinuityCounter = 0 + videoContinuityCounter = 0 + PCRPID = TSWriter.defaultVideoPID + PAT.programs.removeAll() + PAT.programs = [1: TSWriter.defaultPMTPID] + PMT = TSProgramMap() + audioConfig = nil + videoConfig = nil + videoTimestamp = .invalid + audioTimestamp = .invalid + PCRTimestamp = .invalid + isRunning.mutate { $0 = false } } } diff --git a/Sources/Media/IOAudioMonitor.swift b/Sources/Media/IOAudioMonitor.swift index 2f61ae913..8dca3b908 100644 --- a/Sources/Media/IOAudioMonitor.swift +++ b/Sources/Media/IOAudioMonitor.swift @@ -40,11 +40,11 @@ final class IOAudioMonitor { stopRunning() } - func appendAudioPCMBuffer(_ audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + func append(_ audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { guard isRunning.value else { return } - ringBuffer?.appendAudioPCMBuffer(audioPCMBuffer, when: when) + ringBuffer?.append(audioPCMBuffer, when: when) } private func render(_ inNumberFrames: UInt32, ioData: UnsafeMutablePointer?) -> OSStatus { diff --git a/Sources/Media/IOAudioResampler.swift b/Sources/Media/IOAudioResampler.swift index a8d1b9c79..4a42e29fd 100644 --- a/Sources/Media/IOAudioResampler.swift +++ b/Sources/Media/IOAudioResampler.swift @@ -7,7 +7,7 @@ private let kIOAudioResampler_sampleTime: AVAudioFramePosition = 0 protocol IOAudioResamplerDelegate: AnyObject { func resampler(_ resampler: IOAudioResampler, didOutput audioFormat: AVAudioFormat) func resampler(_ resampler: IOAudioResampler, didOutput audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) - func resampler(_ resampler: IOAudioResampler, errorOccurred error: AudioCodec.Error) + func resampler(_ resampler: IOAudioResampler, errorOccurred error: IOMixerAudioError) } struct IOAudioResamplerSettings { @@ -108,7 +108,7 @@ final class IOAudioResampler { private var anchor: AVAudioTime? private var sampleTime: AVAudioFramePosition = kIOAudioResampler_sampleTime - func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) { + func append(_ sampleBuffer: CMSampleBuffer) { inSourceFormat = sampleBuffer.formatDescription?.audioStreamBasicDescription if sampleTime == kIOAudioResampler_sampleTime { sampleTime = sampleBuffer.presentationTimeStamp.value @@ -116,17 +116,17 @@ final class IOAudioResampler { anchor = .init(hostTime: AVAudioTime.hostTime(forSeconds: sampleBuffer.presentationTimeStamp.seconds), sampleTime: sampleTime, atRate: outputFormat.sampleRate) } } - ringBuffer?.appendSampleBuffer(sampleBuffer) + ringBuffer?.append(sampleBuffer) resample() } - func appendAudioPCMBuffer(_ audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + func append(_ audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) { inSourceFormat = audioBuffer.format.formatDescription.audioStreamBasicDescription if sampleTime == kIOAudioResampler_sampleTime { sampleTime = when.sampleTime anchor = when } - ringBuffer?.appendAudioPCMBuffer(audioBuffer, when: when) + ringBuffer?.append(audioBuffer, when: when) resample() } diff --git a/Sources/Media/IOAudioRingBuffer.swift b/Sources/Media/IOAudioRingBuffer.swift index 51b19f91a..f7cc60c94 100644 --- a/Sources/Media/IOAudioRingBuffer.swift +++ b/Sources/Media/IOAudioRingBuffer.swift @@ -36,7 +36,7 @@ final class IOAudioRingBuffer { self.outputBuffer.frameLength = self.outputBuffer.frameCapacity } - func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) { + func append(_ sampleBuffer: CMSampleBuffer) { guard CMSampleBufferDataIsReady(sampleBuffer) else { return } @@ -69,10 +69,10 @@ final class IOAudioRingBuffer { } skip = max(Int(sampleBuffer.presentationTimeStamp.value - sampleTime), 0) sampleTime += Int64(skip) - appendAudioPCMBuffer(inputBuffer) + append(inputBuffer) } - func appendAudioPCMBuffer(_ audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + func append(_ audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { if sampleTime == 0 { sampleTime = when.sampleTime } @@ -85,11 +85,11 @@ final class IOAudioRingBuffer { _ = inputBuffer.copy(audioPCMBuffer) skip = Int(max(when.sampleTime - sampleTime, 0)) sampleTime += Int64(skip) - appendAudioPCMBuffer(inputBuffer) + append(inputBuffer) } @inline(__always) - private func appendAudioPCMBuffer(_ audioPCMBuffer: AVAudioPCMBuffer, offset: Int = 0) { + private func append(_ audioPCMBuffer: AVAudioPCMBuffer, offset: Int = 0) { let numSamples = min(Int(audioPCMBuffer.frameLength) - offset, Int(outputBuffer.frameLength) - head) if inputFormat.isInterleaved { let channelCount = Int(inputFormat.channelCount) @@ -122,7 +122,7 @@ final class IOAudioRingBuffer { if head == outputBuffer.frameLength { head = 0 if 0 < Int(audioPCMBuffer.frameLength) - numSamples { - appendAudioPCMBuffer(audioPCMBuffer, offset: numSamples) + append(audioPCMBuffer, offset: numSamples) } } } diff --git a/Sources/Media/IOAudioUnit.swift b/Sources/Media/IOAudioUnit.swift index 3a7ef71b2..9adae453b 100644 --- a/Sources/Media/IOAudioUnit.swift +++ b/Sources/Media/IOAudioUnit.swift @@ -5,7 +5,7 @@ import SwiftPMSupport #endif protocol IOAudioUnitDelegate: AnyObject { - func audioUnit(_ audioUnit: IOAudioUnit, errorOccurred error: AudioCodec.Error) + func audioUnit(_ audioUnit: IOAudioUnit, errorOccurred error: IOMixerAudioError) func audioUnit(_ audioUnit: IOAudioUnit, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) } @@ -42,9 +42,9 @@ final class IOAudioUnit: NSObject, IOUnit { var inputBuffer: AVAudioBuffer? { return codec.inputBuffer } - private lazy var codec: AudioCodec = { - var codec = AudioCodec() - codec.lockQueue = lockQueue + private lazy var codec: AudioCodec = { + var codec = AudioCodec(lockQueue: lockQueue) + codec.delegate = mixer return codec }() private lazy var resampler: IOAudioResampler = { @@ -88,21 +88,21 @@ final class IOAudioUnit: NSObject, IOUnit { } #endif - func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) { + func append(_ sampleBuffer: CMSampleBuffer) { switch sampleBuffer.formatDescription?.audioStreamBasicDescription?.mFormatID { case kAudioFormatLinearPCM: - resampler.appendSampleBuffer(sampleBuffer.muted(muted)) + resampler.append(sampleBuffer.muted(muted)) default: - codec.appendSampleBuffer(sampleBuffer) + codec.append(sampleBuffer) } } - func appendAudioBuffer(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) { + func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) { switch audioBuffer { case let audioBuffer as AVAudioPCMBuffer: - resampler.appendAudioPCMBuffer(audioBuffer, when: when) + resampler.append(audioBuffer, when: when) case let audioBuffer as AVAudioCompressedBuffer: - codec.appendAudioBuffer(audioBuffer, when: when) + codec.append(audioBuffer, when: when) default: break } @@ -120,14 +120,12 @@ final class IOAudioUnit: NSObject, IOUnit { extension IOAudioUnit: IOUnitEncoding { // MARK: IOUnitEncoding - func startEncoding(_ delegate: any AVCodecDelegate) { - codec.delegate = delegate + func startEncoding() { codec.startRunning() } func stopEncoding() { codec.stopRunning() - codec.delegate = nil } } @@ -138,7 +136,6 @@ extension IOAudioUnit: IOUnitDecoding { if let playerNode = mixer?.mediaLink.playerNode { mixer?.audioEngine?.attach(playerNode) } - codec.delegate = mixer codec.startRunning() } @@ -147,7 +144,6 @@ extension IOAudioUnit: IOUnitDecoding { mixer?.audioEngine?.detach(playerNode) } codec.stopRunning() - codec.delegate = nil } } @@ -156,14 +152,14 @@ extension IOAudioUnit: IOUnitDecoding { extension IOAudioUnit: AVCaptureAudioDataOutputSampleBufferDelegate { // MARK: AVCaptureAudioDataOutputSampleBufferDelegate func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { - resampler.appendSampleBuffer(sampleBuffer.muted(muted)) + resampler.append(sampleBuffer.muted(muted)) } } #endif extension IOAudioUnit: IOAudioResamplerDelegate { // MARK: IOAudioResamplerDelegate - func resampler(_ resampler: IOAudioResampler, errorOccurred error: AudioCodec.Error) { + func resampler(_ resampler: IOAudioResampler, errorOccurred error: IOMixerAudioError) { mixer?.audioUnit(self, errorOccurred: error) } @@ -175,7 +171,7 @@ extension IOAudioUnit: IOAudioResamplerDelegate { func resampler(_ resampler: IOAudioResampler, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) { mixer?.audioUnit(self, didOutput: audioBuffer, when: when) - monitor.appendAudioPCMBuffer(audioBuffer, when: when) - codec.appendAudioBuffer(audioBuffer, when: when) + monitor.append(audioBuffer, when: when) + codec.append(audioBuffer, when: when) } } diff --git a/Sources/Media/IOMixer.swift b/Sources/Media/IOMixer.swift index ddbd6d57a..7f3a04684 100644 --- a/Sources/Media/IOMixer.swift +++ b/Sources/Media/IOMixer.swift @@ -9,8 +9,8 @@ import UIKit protocol IOMixerDelegate: AnyObject { func mixer(_ mixer: IOMixer, didOutput audio: AVAudioPCMBuffer, when: AVAudioTime) func mixer(_ mixer: IOMixer, didOutput video: CMSampleBuffer) - func mixer(_ mixer: IOMixer, videoCodecErrorOccurred error: VideoCodec.Error) - func mixer(_ mixer: IOMixer, audioCodecErrorOccurred error: AudioCodec.Error) + func mixer(_ mixer: IOMixer, videoErrorOccurred error: IOMixerVideoError) + func mixer(_ mixer: IOMixer, audioErrorOccurred error: IOMixerAudioError) #if os(iOS) || os(tvOS) @available(tvOS 17.0, *) func mixer(_ mixer: IOMixer, sessionWasInterrupted session: AVCaptureSession, reason: AVCaptureSession.InterruptionReason?) @@ -19,6 +19,28 @@ protocol IOMixerDelegate: AnyObject { #endif } +/** + * The IOMixer video error domain codes. + */ +public enum IOMixerVideoError: Swift.Error { + /// The IOMixer video failed to create the VTSession. + case failedToCreate(status: OSStatus) + /// The IOMixer video failed to prepare the VTSession. + case failedToPrepare(status: OSStatus) + /// The IOMixer video failed to encode or decode a flame. + case failedToFlame(status: OSStatus) + /// The IOMixer video failed to set an option. + case failedToSetOption(status: OSStatus, option: VTSessionOption) +} + +/// The IOMixer audio error domain codes. +public enum IOMixerAudioError: Swift.Error { + /// The IOMixer audio failed to create the AVAudioConverter.. + case failedToCreate(from: AVAudioFormat?, to: AVAudioFormat?) + /// THe IOMixer audio faild to convert the an audio buffer. + case failedToConvert(error: NSError) +} + /// An object that mixies audio and video for streaming. public final class IOMixer { /// The default fps for an IOMixer, value is 30. @@ -94,6 +116,8 @@ public final class IOMixer { } } + public weak var muxer: (any IOMuxer)? + weak var delegate: (any IOMixerDelegate)? lazy var audioIO: IOAudioUnit = { @@ -243,9 +267,9 @@ public final class IOMixer { extension IOMixer: IOUnitEncoding { /// Starts encoding for video and audio data. - public func startEncoding(_ delegate: any AVCodecDelegate) { - videoIO.startEncoding(delegate) - audioIO.startEncoding(delegate) + public func startEncoding() { + videoIO.startEncoding() + audioIO.startEncoding() } /// Stop encoding. @@ -419,52 +443,114 @@ extension IOMixer: Running { extension IOMixer: VideoCodecDelegate { // MARK: VideoCodecDelegate - public func videoCodec(_ codec: VideoCodec, didOutput formatDescription: CMFormatDescription?) { - } - - public func videoCodec(_ codec: VideoCodec, didOutput sampleBuffer: CMSampleBuffer) { - mediaLink.enqueueVideo(sampleBuffer) + func videoCodec(_ codec: VideoCodec, didOutput formatDescription: CMFormatDescription?) { + muxer?.videoFormat = formatDescription + } + + func videoCodec(_ codec: VideoCodec, didOutput sampleBuffer: CMSampleBuffer) { + switch sampleBuffer.formatDescription?._mediaSubType { + case kCVPixelFormatType_1Monochrome, + kCVPixelFormatType_2Indexed, + kCVPixelFormatType_8Indexed, + kCVPixelFormatType_1IndexedGray_WhiteIsZero, + kCVPixelFormatType_2IndexedGray_WhiteIsZero, + kCVPixelFormatType_4IndexedGray_WhiteIsZero, + kCVPixelFormatType_8IndexedGray_WhiteIsZero, + kCVPixelFormatType_16BE555, + kCVPixelFormatType_16LE555, + kCVPixelFormatType_16LE5551, + kCVPixelFormatType_16BE565, + kCVPixelFormatType_16LE565, + kCVPixelFormatType_24RGB, + kCVPixelFormatType_24BGR, + kCVPixelFormatType_32ARGB, + kCVPixelFormatType_32BGRA, + kCVPixelFormatType_32ABGR, + kCVPixelFormatType_32RGBA, + kCVPixelFormatType_64ARGB, + kCVPixelFormatType_48RGB, + kCVPixelFormatType_32AlphaGray, + kCVPixelFormatType_16Gray, + kCVPixelFormatType_30RGB, + kCVPixelFormatType_422YpCbCr8, + kCVPixelFormatType_4444YpCbCrA8, + kCVPixelFormatType_4444YpCbCrA8R, + kCVPixelFormatType_4444AYpCbCr8, + kCVPixelFormatType_4444AYpCbCr16, + kCVPixelFormatType_444YpCbCr8, + kCVPixelFormatType_422YpCbCr16, + kCVPixelFormatType_422YpCbCr10, + kCVPixelFormatType_444YpCbCr10, + kCVPixelFormatType_420YpCbCr8Planar, + kCVPixelFormatType_420YpCbCr8PlanarFullRange, + kCVPixelFormatType_422YpCbCr_4A_8BiPlanar, + kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, + kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, + kCVPixelFormatType_422YpCbCr8_yuvs, + kCVPixelFormatType_422YpCbCr8FullRange, + kCVPixelFormatType_OneComponent8, + kCVPixelFormatType_TwoComponent8, + kCVPixelFormatType_OneComponent16Half, + kCVPixelFormatType_OneComponent32Float, + kCVPixelFormatType_TwoComponent16Half, + kCVPixelFormatType_TwoComponent32Float, + kCVPixelFormatType_64RGBAHalf, + kCVPixelFormatType_128RGBAFloat: + mediaLink.enqueueVideo(sampleBuffer) + default: + muxer?.append(sampleBuffer) + } } - public func videoCodec(_ codec: VideoCodec, errorOccurred error: VideoCodec.Error) { - delegate?.mixer(self, videoCodecErrorOccurred: error) + func videoCodec(_ codec: VideoCodec, errorOccurred error: IOMixerVideoError) { + delegate?.mixer(self, videoErrorOccurred: error) } } extension IOMixer: AudioCodecDelegate { // MARK: AudioCodecDelegate - public func audioCodec(_ codec: AudioCodec, errorOccurred error: AudioCodec.Error) { - delegate?.mixer(self, audioCodecErrorOccurred: error) - } - - public func audioCodec(_ codec: AudioCodec, didOutput audioFormat: AVAudioFormat) { - do { - self.audioFormat = audioFormat - if let audioEngine = audioEngine, audioEngine.isRunning == false { - try audioEngine.start() + func audioCodec(_ codec: AudioCodec, didOutput audioFormat: AVAudioFormat) { + switch audioFormat.formatDescription.audioStreamBasicDescription?.mFormatID { + case kAudioFormatLinearPCM: + do { + self.audioFormat = audioFormat + if let audioEngine = audioEngine, audioEngine.isRunning == false { + try audioEngine.start() + } + } catch { + logger.error(error) } - } catch { - logger.error(error) + default: + muxer?.audioFormat = audioFormat } } - public func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime) { - guard let audioBuffer = audioBuffer as? AVAudioPCMBuffer else { - return + func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime) { + switch audioBuffer { + case let audioBuffer as AVAudioPCMBuffer: + delegate?.mixer(self, didOutput: audioBuffer, when: when) + mediaLink.enqueueAudio(audioBuffer) + case let audioBuffer as AVAudioCompressedBuffer: + muxer?.append(audioBuffer, when: when) + codec.releaseOutputBuffer(audioBuffer) + default: + break } - delegate?.mixer(self, didOutput: audioBuffer, when: when) - mediaLink.enqueueAudio(audioBuffer) + } + + func audioCodec(_ codec: AudioCodec, errorOccurred error: IOMixerAudioError) { + delegate?.mixer(self, audioErrorOccurred: error) } } extension IOMixer: IOAudioUnitDelegate { // MARK: IOAudioUnitDelegate - func audioUnit(_ audioUnit: IOAudioUnit, errorOccurred error: AudioCodec.Error) { - delegate?.mixer(self, audioCodecErrorOccurred: error) + func audioUnit(_ audioUnit: IOAudioUnit, errorOccurred error: IOMixerAudioError) { + delegate?.mixer(self, audioErrorOccurred: error) } func audioUnit(_ audioUnit: IOAudioUnit, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) { delegate?.mixer(self, didOutput: audioBuffer, when: when) - recorder.appendAudioPCMBuffer(audioBuffer, when: when) + recorder.append(audioBuffer, when: when) } } diff --git a/Sources/Media/IOMuxer.swift b/Sources/Media/IOMuxer.swift new file mode 100644 index 000000000..b36186f71 --- /dev/null +++ b/Sources/Media/IOMuxer.swift @@ -0,0 +1,10 @@ +import AVFoundation +import Foundation + +public protocol IOMuxer: AnyObject { + var audioFormat: AVAudioFormat? { get set } + var videoFormat: CMFormatDescription? { get set } + + func append(_ audioBuffer: AVAudioCompressedBuffer, when: AVAudioTime) + func append(_ sampleBuffer: CMSampleBuffer) +} diff --git a/Sources/Media/IORecorder.swift b/Sources/Media/IORecorder.swift index bbcc25495..ed21b54e5 100644 --- a/Sources/Media/IORecorder.swift +++ b/Sources/Media/IORecorder.swift @@ -13,7 +13,7 @@ public protocol IORecorderDelegate: AnyObject { // MARK: - /// The IORecorder class represents video and audio recorder. -public class IORecorder { +public final class IORecorder { /// The IORecorder error domain codes. public enum Error: Swift.Error { /// Failed to create the AVAssetWriter. @@ -72,7 +72,7 @@ public class IORecorder { #endif /// Append a sample buffer for recording. - public func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) { + public func append(_ sampleBuffer: CMSampleBuffer) { guard isRunning.value else { return } @@ -115,7 +115,7 @@ public class IORecorder { } /// Append a pixel buffer for recording. - public func appendPixelBuffer(_ pixelBuffer: CVPixelBuffer, withPresentationTime: CMTime) { + public func append(_ pixelBuffer: CVPixelBuffer, withPresentationTime: CMTime) { guard isRunning.value else { return } @@ -149,12 +149,12 @@ public class IORecorder { } } - func appendAudioPCMBuffer(_ audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + func append(_ audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { guard isRunning.value else { return } if let sampleBuffer = audioPCMBuffer.makeSampleBuffer(when) { - appendSampleBuffer(sampleBuffer) + append(sampleBuffer) } } diff --git a/Sources/Media/IOUnit.swift b/Sources/Media/IOUnit.swift index e29758805..ab582e9e4 100644 --- a/Sources/Media/IOUnit.swift +++ b/Sources/Media/IOUnit.swift @@ -1,9 +1,6 @@ import AVFAudio import Foundation -/// A type that can delegate itself to AudioCodec or VideoCodec. -public typealias AVCodecDelegate = AudioCodecDelegate & VideoCodecDelegate - protocol IOUnit { associatedtype FormatDescription @@ -12,11 +9,11 @@ protocol IOUnit { var inputFormat: FormatDescription? { get } var outputFormat: FormatDescription? { get } - func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) + func append(_ sampleBuffer: CMSampleBuffer) } protocol IOUnitEncoding { - func startEncoding(_ delegate: any AVCodecDelegate) + func startEncoding() func stopEncoding() } diff --git a/Sources/Media/IOVideoMixer.swift b/Sources/Media/IOVideoMixer.swift index fb543dcb1..dc9c91e61 100644 --- a/Sources/Media/IOVideoMixer.swift +++ b/Sources/Media/IOVideoMixer.swift @@ -56,7 +56,7 @@ final class IOVideoMixer { return effects.remove(effect) != nil } - func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer, channel: Int, isVideoMirrored: Bool) { + func append(_ sampleBuffer: CMSampleBuffer, channel: Int, isVideoMirrored: Bool) { switch channel { case 0: var imageBuffer: CVImageBuffer? diff --git a/Sources/Media/IOVideoUnit.swift b/Sources/Media/IOVideoUnit.swift index 7483ed2d0..7fa7b24c2 100644 --- a/Sources/Media/IOVideoUnit.swift +++ b/Sources/Media/IOVideoUnit.swift @@ -113,8 +113,8 @@ final class IOVideoUnit: NSObject, IOUnit { return videoMixer }() private lazy var codec: VideoCodec = { - var codec = VideoCodec() - codec.lockQueue = lockQueue + var codec = VideoCodec(lockQueue: lockQueue) + codec.delegate = mixer return codec }() @@ -220,7 +220,7 @@ final class IOVideoUnit: NSObject, IOUnit { return videoMixer.unregisterEffect(effect) } - func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) { + func append(_ sampleBuffer: CMSampleBuffer) { switch sampleBuffer.formatDescription?._mediaSubType { case kCVPixelFormatType_1Monochrome, kCVPixelFormatType_2Indexed, @@ -270,10 +270,10 @@ final class IOVideoUnit: NSObject, IOUnit { kCVPixelFormatType_64RGBAHalf, kCVPixelFormatType_128RGBAFloat: inputFormat = sampleBuffer.formatDescription - videoMixer.appendSampleBuffer(sampleBuffer, channel: 0, isVideoMirrored: false) + videoMixer.append(sampleBuffer, channel: 0, isVideoMirrored: false) default: inputFormat = sampleBuffer.formatDescription - codec.appendSampleBuffer(sampleBuffer) + codec.append(sampleBuffer) } } @@ -284,14 +284,12 @@ final class IOVideoUnit: NSObject, IOUnit { extension IOVideoUnit: IOUnitEncoding { // MARK: IOUnitEncoding - func startEncoding(_ delegate: any AVCodecDelegate) { - codec.delegate = delegate + func startEncoding() { codec.startRunning() } func stopEncoding() { codec.stopRunning() - codec.delegate = nil } } @@ -315,10 +313,10 @@ extension IOVideoUnit: AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput(_ captureOutput: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { if capture.output == captureOutput { inputFormat = sampleBuffer.formatDescription - videoMixer.appendSampleBuffer(sampleBuffer, channel: 0, isVideoMirrored: connection.isVideoMirrored) + videoMixer.append(sampleBuffer, channel: 0, isVideoMirrored: connection.isVideoMirrored) drawable?.enqueue(sampleBuffer) } else if multiCamCapture.output == captureOutput { - videoMixer.appendSampleBuffer(sampleBuffer, channel: 1, isVideoMirrored: connection.isVideoMirrored) + videoMixer.append(sampleBuffer, channel: 1, isVideoMirrored: connection.isVideoMirrored) } } } @@ -327,12 +325,12 @@ extension IOVideoUnit: AVCaptureVideoDataOutputSampleBufferDelegate { extension IOVideoUnit: IOVideoMixerDelegate { // MARK: IOVideoMixerDelegate func videoMixer(_ videoMixer: IOVideoMixer, didOutput imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime) { - codec.appendImageBuffer( + codec.append( imageBuffer, presentationTimeStamp: presentationTimeStamp, duration: .invalid ) - mixer?.recorder.appendPixelBuffer( + mixer?.recorder.append( imageBuffer, withPresentationTime: presentationTimeStamp ) diff --git a/Sources/Net/NetStream.swift b/Sources/Net/NetStream.swift index 7606c4feb..2f4df27ca 100644 --- a/Sources/Net/NetStream.swift +++ b/Sources/Net/NetStream.swift @@ -23,9 +23,9 @@ public protocol NetStreamDelegate: AnyObject { func stream(_ stream: NetStream, sessionInterruptionEnded session: AVCaptureSession) #endif /// Tells the receiver to video codec error occured. - func stream(_ stream: NetStream, videoCodecErrorOccurred error: VideoCodec.Error) + func stream(_ stream: NetStream, videoErrorOccurred error: IOMixerVideoError) /// Tells the receiver to audio codec error occured. - func stream(_ stream: NetStream, audioCodecErrorOccurred error: AudioCodec.Error) + func stream(_ stream: NetStream, audioErrorOccurred error: IOMixerAudioError) /// Tells the receiver to the stream opened. func streamDidOpen(_ stream: NetStream) } @@ -285,15 +285,15 @@ open class NetStream: NSObject { /// Append a CMSampleBuffer. /// - Warning: This method can't use attachCamera or attachAudio method at the same time. - open func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer, options: [NSObject: AnyObject]? = nil) { + public func append(_ sampleBuffer: CMSampleBuffer) { switch sampleBuffer.formatDescription?._mediaType { case kCMMediaType_Audio: mixer.audioIO.lockQueue.async { - self.mixer.audioIO.appendSampleBuffer(sampleBuffer) + self.mixer.audioIO.append(sampleBuffer) } case kCMMediaType_Video: mixer.videoIO.lockQueue.async { - self.mixer.videoIO.appendSampleBuffer(sampleBuffer) + self.mixer.videoIO.append(sampleBuffer) } default: break @@ -302,9 +302,9 @@ open class NetStream: NSObject { /// Append an AVAudioBuffer. /// - Warning: This method can't use attachAudio method at the same time. - public func appendAudioBuffer(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) { + public func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) { mixer.audioIO.lockQueue.async { - self.mixer.audioIO.appendAudioBuffer(audioBuffer, when: when) + self.mixer.audioIO.append(audioBuffer, when: when) } } @@ -359,12 +359,12 @@ extension NetStream: IOMixerDelegate { delegate?.stream(self, didOutput: audio, when: when) } - func mixer(_ mixer: IOMixer, audioCodecErrorOccurred error: AudioCodec.Error) { - delegate?.stream(self, audioCodecErrorOccurred: error) + func mixer(_ mixer: IOMixer, audioErrorOccurred error: IOMixerAudioError) { + delegate?.stream(self, audioErrorOccurred: error) } - func mixer(_ mixer: IOMixer, videoCodecErrorOccurred error: VideoCodec.Error) { - delegate?.stream(self, videoCodecErrorOccurred: error) + func mixer(_ mixer: IOMixer, videoErrorOccurred error: IOMixerVideoError) { + delegate?.stream(self, videoErrorOccurred: error) } #if os(iOS) || os(tvOS) @@ -411,7 +411,7 @@ extension NetStream: IOScreenCaptureUnitDelegate { guard let sampleBuffer, status == noErr else { return } - appendSampleBuffer(sampleBuffer) + append(sampleBuffer) } } @@ -422,12 +422,12 @@ extension NetStream: SCStreamOutput { if #available(macOS 13.0, *) { switch type { case .screen: - appendSampleBuffer(sampleBuffer) + append(sampleBuffer) default: - appendSampleBuffer(sampleBuffer) + append(sampleBuffer) } } else { - appendSampleBuffer(sampleBuffer) + append(sampleBuffer) } } } diff --git a/Sources/RTMP/RTMPMessage.swift b/Sources/RTMP/RTMPMessage.swift index c48297dab..47ae5b05c 100644 --- a/Sources/RTMP/RTMPMessage.swift +++ b/Sources/RTMP/RTMPMessage.swift @@ -592,7 +592,7 @@ final class RTMPAudioMessage: RTMPMessage { stream.mixer.audioIO.setAudioStreamBasicDescription(makeAudioStreamBasicDescription()) } if let audioBuffer = makeAudioBuffer(stream) { - stream.mixer.audioIO.appendAudioBuffer(audioBuffer, when: .init(hostTime: UInt64(stream.audioTimestamp))) + stream.mixer.audioIO.append(audioBuffer, when: .init(hostTime: UInt64(stream.audioTimestamp))) } default: break @@ -651,7 +651,7 @@ final class RTMPVideoMessage: RTMPMessage { makeFormatDescription(stream, format: .h264) case FLVAVCPacketType.nal.rawValue: if let sampleBuffer = makeSampleBuffer(stream, type: type, offset: 0) { - stream.mixer.videoIO.appendSampleBuffer(sampleBuffer) + stream.mixer.videoIO.append(sampleBuffer) } default: break @@ -666,7 +666,7 @@ final class RTMPVideoMessage: RTMPMessage { makeFormatDescription(stream, format: .hevc) case FLVVideoPacketType.codedFrames.rawValue: if let sampleBuffer = makeSampleBuffer(stream, type: type, offset: 3) { - stream.mixer.videoIO.appendSampleBuffer(sampleBuffer) + stream.mixer.videoIO.append(sampleBuffer) } default: break diff --git a/Sources/RTMP/RTMPMuxer.swift b/Sources/RTMP/RTMPMuxer.swift index 850536a69..0d0018aae 100644 --- a/Sources/RTMP/RTMPMuxer.swift +++ b/Sources/RTMP/RTMPMuxer.swift @@ -3,14 +3,49 @@ import AVFoundation protocol RTMPMuxerDelegate: AnyObject { func muxer(_ muxer: RTMPMuxer, didOutputAudio buffer: Data, withTimestamp: Double) func muxer(_ muxer: RTMPMuxer, didOutputVideo buffer: Data, withTimestamp: Double) - func muxer(_ muxer: RTMPMuxer, audioCodecErrorOccurred error: AudioCodec.Error) - func muxer(_ muxer: RTMPMuxer, videoCodecErrorOccurred error: VideoCodec.Error) } // MARK: - final class RTMPMuxer { static let aac: UInt8 = FLVAudioCodec.aac.rawValue << 4 | FLVSoundRate.kHz44.rawValue << 2 | FLVSoundSize.snd16bit.rawValue << 1 | FLVSoundType.stereo.rawValue + var audioFormat: AVAudioFormat? { + didSet { + guard let audioFormat else { + return + } + var buffer = Data([RTMPMuxer.aac, FLVAACPacketType.seq.rawValue]) + buffer.append(contentsOf: AudioSpecificConfig(formatDescription: audioFormat.formatDescription).bytes) + delegate?.muxer(self, didOutputAudio: buffer, withTimestamp: 0) + } + } + + var videoFormat: CMFormatDescription? { + didSet { + guard let videoFormat else { + return + } + switch CMFormatDescriptionGetMediaSubType(videoFormat) { + case kCMVideoCodecType_H264: + guard let avcC = AVCDecoderConfigurationRecord.getData(videoFormat) else { + return + } + var buffer = Data([FLVFrameType.key.rawValue << 4 | FLVVideoCodec.avc.rawValue, FLVAVCPacketType.seq.rawValue, 0, 0, 0]) + buffer.append(avcC) + delegate?.muxer(self, didOutputVideo: buffer, withTimestamp: 0) + case kCMVideoCodecType_HEVC: + guard let hvcC = HEVCDecoderConfigurationRecord.getData(videoFormat) else { + return + } + var buffer = Data([0b10000000 | FLVFrameType.key.rawValue << 4 | FLVVideoPacketType.sequenceStart.rawValue, 0x68, 0x76, 0x63, 0x31]) + buffer.append(hvcC) + delegate?.muxer(self, didOutputVideo: buffer, withTimestamp: 0) + default: + break + } + } + } + weak var delegate: (any RTMPMuxerDelegate)? private var videoTimeStamp: CMTime = .zero private var audioTimeStamp: AVAudioTime = .init(hostTime: 0) @@ -22,79 +57,40 @@ final class RTMPMuxer { } } -extension RTMPMuxer: AudioCodecDelegate { - // MARK: AudioCodecDelegate - func audioCodec(_ codec: AudioCodec, errorOccurred error: AudioCodec.Error) { - delegate?.muxer(self, audioCodecErrorOccurred: error) - } - - func audioCodec(_ codec: AudioCodec, didOutput audioFormat: AVAudioFormat) { - var buffer = Data([RTMPMuxer.aac, FLVAACPacketType.seq.rawValue]) - buffer.append(contentsOf: AudioSpecificConfig(formatDescription: audioFormat.formatDescription).bytes) - delegate?.muxer(self, didOutputAudio: buffer, withTimestamp: 0) - } - - func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime) { +extension RTMPMuxer: IOMuxer { + func append(_ audioBuffer: AVAudioCompressedBuffer, when: AVAudioTime) { let delta = audioTimeStamp.hostTime == 0 ? 0 : (AVAudioTime.seconds(forHostTime: when.hostTime) - AVAudioTime.seconds(forHostTime: audioTimeStamp.hostTime)) * 1000 - guard let audioBuffer = audioBuffer as? AVAudioCompressedBuffer, 0 <= delta else { + guard 0 <= delta else { return } var buffer = Data([RTMPMuxer.aac, FLVAACPacketType.raw.rawValue]) buffer.append(audioBuffer.data.assumingMemoryBound(to: UInt8.self), count: Int(audioBuffer.byteLength)) delegate?.muxer(self, didOutputAudio: buffer, withTimestamp: delta) audioTimeStamp = when - codec.releaseOutputBuffer(audioBuffer) - } -} - -extension RTMPMuxer: VideoCodecDelegate { - // MARK: VideoCodecDelegate - func videoCodec(_ codec: VideoCodec, errorOccurred error: VideoCodec.Error) { - delegate?.muxer(self, videoCodecErrorOccurred: error) - } - - func videoCodec(_ codec: VideoCodec, didOutput formatDescription: CMFormatDescription?) { - guard let formatDescription else { - return - } - switch codec.settings.format { - case .h264: - guard let avcC = AVCDecoderConfigurationRecord.getData(formatDescription) else { - return - } - var buffer = Data([FLVFrameType.key.rawValue << 4 | FLVVideoCodec.avc.rawValue, FLVAVCPacketType.seq.rawValue, 0, 0, 0]) - buffer.append(avcC) - delegate?.muxer(self, didOutputVideo: buffer, withTimestamp: 0) - case .hevc: - guard let hvcC = HEVCDecoderConfigurationRecord.getData(formatDescription) else { - return - } - var buffer = Data([0b10000000 | FLVFrameType.key.rawValue << 4 | FLVVideoPacketType.sequenceStart.rawValue, 0x68, 0x76, 0x63, 0x31]) - buffer.append(hvcC) - delegate?.muxer(self, didOutputVideo: buffer, withTimestamp: 0) - } } - func videoCodec(_ codec: VideoCodec, didOutput sampleBuffer: CMSampleBuffer) { + func append(_ sampleBuffer: CMSampleBuffer) { let keyframe = !sampleBuffer.isNotSync let decodeTimeStamp = sampleBuffer.decodeTimeStamp.isValid ? sampleBuffer.decodeTimeStamp : sampleBuffer.presentationTimeStamp let compositionTime = getCompositionTime(sampleBuffer) let delta = videoTimeStamp == .zero ? 0 : (decodeTimeStamp.seconds - videoTimeStamp.seconds) * 1000 - guard let data = sampleBuffer.dataBuffer?.data, 0 <= delta else { + guard let formatDescription = sampleBuffer.formatDescription, let data = sampleBuffer.dataBuffer?.data, 0 <= delta else { return } - switch codec.settings.format { - case .h264: + switch CMFormatDescriptionGetMediaSubType(formatDescription) { + case kCMVideoCodecType_H264: var buffer = Data([((keyframe ? FLVFrameType.key.rawValue : FLVFrameType.inter.rawValue) << 4) | FLVVideoCodec.avc.rawValue, FLVAVCPacketType.nal.rawValue]) buffer.append(contentsOf: compositionTime.bigEndian.data[1..<4]) buffer.append(data) delegate?.muxer(self, didOutputVideo: buffer, withTimestamp: delta) - case .hevc: + case kCMVideoCodecType_HEVC: var buffer = Data([0b10000000 | ((keyframe ? FLVFrameType.key.rawValue : FLVFrameType.inter.rawValue) << 4) | FLVVideoPacketType.codedFrames.rawValue, 0x68, 0x76, 0x63, 0x31]) buffer.append(contentsOf: compositionTime.bigEndian.data[1..<4]) buffer.append(data) delegate?.muxer(self, didOutputVideo: buffer, withTimestamp: delta) + default: + break } videoTimeStamp = decodeTimeStamp } diff --git a/Sources/RTMP/RTMPStream.swift b/Sources/RTMP/RTMPStream.swift index fe2e4e0a9..c56f111cf 100644 --- a/Sources/RTMP/RTMPStream.swift +++ b/Sources/RTMP/RTMPStream.swift @@ -246,7 +246,11 @@ open class RTMPStream: NetStream { var audioTimestampZero: Double = -1.0 var videoTimestamp: Double = 0.0 var videoTimestampZero: Double = -1.0 - private let muxer = RTMPMuxer() + private lazy var muxer = { + let muxer = RTMPMuxer() + muxer.delegate = self + return muxer + }() private var messages: [RTMPCommandMessage] = [] private var startedAt = Date() private var frameCount: UInt16 = 0 @@ -269,6 +273,7 @@ open class RTMPStream: NetStream { if rtmpConnection?.connected == true { rtmpConnection?.createStream(self) } + mixer.muxer = muxer } deinit { @@ -498,13 +503,11 @@ open class RTMPStream: NetStream { videoTimestampZero = -1.0 audioTimestamp = 0 audioTimestampZero = -1.0 - mixer.delegate = self mixer.startDecoding() case .publish: bitrateStrategy.setUp() startedAt = .init() muxer.dispose() - muxer.delegate = self mixer.startRunning() videoWasSent = false audioWasSent = false @@ -512,7 +515,7 @@ open class RTMPStream: NetStream { FCPublish() case .publishing: send(handlerName: "@setDataFrame", arguments: "onMetaData", makeMetaData()) - mixer.startEncoding(muxer) + mixer.startEncoding() default: break } @@ -613,12 +616,4 @@ extension RTMPStream: RTMPMuxerDelegate { videoTimestamp = withTimestamp + (videoTimestamp - floor(videoTimestamp)) frameCount += 1 } - - func muxer(_ muxer: RTMPMuxer, videoCodecErrorOccurred error: VideoCodec.Error) { - delegate?.stream(self, videoCodecErrorOccurred: error) - } - - func muxer(_ muxer: RTMPMuxer, audioCodecErrorOccurred error: AudioCodec.Error) { - delegate?.stream(self, audioCodecErrorOccurred: error) - } } diff --git a/Tests/Codec/AudioCodecTests.swift b/Tests/Codec/AudioCodecTests.swift index a012520e3..beaab52f5 100644 --- a/Tests/Codec/AudioCodecTests.swift +++ b/Tests/Codec/AudioCodecTests.swift @@ -6,92 +6,102 @@ import AVFoundation final class AudioCodecTests: XCTestCase { func testEncoderCMSampleBuffer44100_1024() { - let encoder = AudioCodec() + let encoder = HaishinKit.AudioCodec(lockQueue: DispatchQueue(label: "AudioCodecTests")) encoder.startRunning() for _ in 0..<10 { if let sampleBuffer = CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024) { - encoder.appendSampleBuffer(sampleBuffer) + encoder.append(sampleBuffer) } } } func testEncoderCMSampleBuffer48000_1024() { - let encoder = AudioCodec() + let encoder = HaishinKit.AudioCodec(lockQueue: DispatchQueue(label: "AudioCodecTests")) encoder.startRunning() for _ in 0..<10 { if let sampleBuffer = CMAudioSampleBufferFactory.makeSinWave(48000.0, numSamples: 1024) { - encoder.appendSampleBuffer(sampleBuffer) + encoder.append(sampleBuffer) } } } func testEncoderCMSampleBuffer24000_1024() { - let encoder = AudioCodec() + let encoder = HaishinKit.AudioCodec(lockQueue: DispatchQueue(label: "AudioCodecTests")) encoder.startRunning() for _ in 0..<10 { if let sampleBuffer = CMAudioSampleBufferFactory.makeSinWave(24000.0, numSamples: 1024) { - encoder.appendSampleBuffer(sampleBuffer) + encoder.append(sampleBuffer) } } } func testEncoderCMSampleBuffer16000_1024() { - let encoder = AudioCodec() + let encoder = HaishinKit.AudioCodec(lockQueue: DispatchQueue(label: "AudioCodecTests")) encoder.startRunning() for _ in 0..<10 { if let sampleBuffer = CMAudioSampleBufferFactory.makeSinWave(16000.0, numSamples: 1024) { - encoder.appendSampleBuffer(sampleBuffer) + encoder.append(sampleBuffer) } } } func testEncoderCMSampleBuffer8000_256() { - let encoder = AudioCodec() + let encoder = HaishinKit.AudioCodec(lockQueue: DispatchQueue(label: "AudioCodecTests")) encoder.startRunning() for _ in 0..<10 { if let sampleBuffer = CMAudioSampleBufferFactory.makeSinWave(8000.0, numSamples: 256) { - encoder.appendSampleBuffer(sampleBuffer) + encoder.append(sampleBuffer) } } } func testEncoderCMSampleBuffer8000_960() { - let encoder = AudioCodec() + let encoder = HaishinKit.AudioCodec(lockQueue: DispatchQueue(label: "AudioCodecTests")) encoder.startRunning() for _ in 0..<10 { if let sampleBuffer = CMAudioSampleBufferFactory.makeSinWave(8000.0, numSamples: 960) { - encoder.appendSampleBuffer(sampleBuffer) + encoder.append(sampleBuffer) } } } func testEncoderCMSampleBuffer8000_1224() { - let encoder = AudioCodec() + let encoder = HaishinKit.AudioCodec(lockQueue: DispatchQueue(label: "AudioCodecTests")) encoder.startRunning() for _ in 0..<10 { if let sampleBuffer = CMAudioSampleBufferFactory.makeSinWave(44100.0, numSamples: 1224) { - encoder.appendSampleBuffer(sampleBuffer) + encoder.append(sampleBuffer) } } } func testEncoderCMSampleBuffer8000_numSamples() { let numSamples: [Int] = [1024, 1024, 1028, 1024, 1028, 1028, 962, 962, 960, 2237, 2236] - let encoder = AudioCodec() + let encoder = HaishinKit.AudioCodec(lockQueue: DispatchQueue(label: "AudioCodecTests")) encoder.startRunning() for numSample in numSamples { if let sampleBuffer = CMAudioSampleBufferFactory.makeSinWave(44100.0, numSamples: numSample) { - encoder.appendSampleBuffer(sampleBuffer) + encoder.append(sampleBuffer) } } } func test3Channel_withoutCrash() { - let encoder = AudioCodec() + let encoder = HaishinKit.AudioCodec(lockQueue: DispatchQueue(label: "AudioCodecTests")) encoder.startRunning() if let sampleBuffer = CMAudioSampleBufferFactory.makeSilence(44100, numSamples: 256, channels: 3) { - encoder.appendSampleBuffer(sampleBuffer) + encoder.append(sampleBuffer) } } } +extension AudioCodecTests: AudioCodecDelegate { + func audioCodec(_ codec: HaishinKit.AudioCodec, didOutput audioFormat: AVAudioFormat) { + } + + func audioCodec(_ codec: HaishinKit.AudioCodec, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime) { + } + + func audioCodec(_ codec: HaishinKit.AudioCodec, errorOccurred error: HaishinKit.IOMixerAudioError) { + } +} diff --git a/Tests/MPEG/TSReaderTests.swift b/Tests/MPEG/TSReaderTests.swift index 1318a408a..f95273f69 100644 --- a/Tests/MPEG/TSReaderTests.swift +++ b/Tests/MPEG/TSReaderTests.swift @@ -21,8 +21,8 @@ final class TSReaderTests: XCTestCase { } } -private class TSReaderAudioCodec: TSReaderDelegate, AudioCodecDelegate { - private var audioCodec: HaishinKit.AudioCodec = .init() +private final class TSReaderAudioCodec: TSReaderDelegate, AudioCodecDelegate { + private var audioCodec: HaishinKit.AudioCodec = .init(lockQueue: DispatchQueue(label: "TSReaderAudioCodec")) init() { audioCodec.delegate = self @@ -36,18 +36,18 @@ private class TSReaderAudioCodec: TSReaderDelegate, AudioCodecDelegate { func reader(_ reader: HaishinKit.TSReader, id: UInt16, didRead sampleBuffer: CMSampleBuffer) { if sampleBuffer.formatDescription?.mediaType == .audio { - audioCodec.appendSampleBuffer(sampleBuffer) + audioCodec.append(sampleBuffer) } } - func audioCodec(_ codec: HaishinKit.AudioCodec, didOutput outputFormat: AVAudioFormat) { + func audioCodec(_ codec: HaishinKit.AudioCodec, didOutput outputFormat: AVAudioFormat) { } - func audioCodec(_ codec: HaishinKit.AudioCodec, errorOccurred error: HaishinKit.AudioCodec.Error) { + func audioCodec(_ codec: HaishinKit.AudioCodec, errorOccurred error: HaishinKit.IOMixerAudioError) { // XCTFail() } - func audioCodec(_ codec: HaishinKit.AudioCodec, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime) { + func audioCodec(_ codec: HaishinKit.AudioCodec, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime) { } } diff --git a/Tests/Media/IOAudioResamplerTests.swift b/Tests/Media/IOAudioResamplerTests.swift index 77fd0a713..68f0e8428 100644 --- a/Tests/Media/IOAudioResamplerTests.swift +++ b/Tests/Media/IOAudioResamplerTests.swift @@ -11,7 +11,7 @@ final class NullIOAudioResamplerDelegate: IOAudioResamplerDelegate { func resampler(_ resampler: HaishinKit.IOAudioResampler, didOutput audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { } - func resampler(_ resampler: HaishinKit.IOAudioResampler, errorOccurred error: HaishinKit.AudioCodec.Error) { + func resampler(_ resampler: HaishinKit.IOAudioResampler, errorOccurred error: HaishinKit.IOMixerAudioError) { } } @@ -22,9 +22,9 @@ final class IOAudioResamplerTests: XCTestCase { let resampler = IOAudioResampler() resampler.settings = .init(sampleRate: 16000, channels: 1) resampler.delegate = nullIOAudioResamplerDelegate - resampler.appendSampleBuffer(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) XCTAssertEqual(resampler.outputFormat?.sampleRate, 16000) - resampler.appendSampleBuffer(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) XCTAssertEqual(resampler.outputFormat?.sampleRate, 16000) } @@ -32,13 +32,13 @@ final class IOAudioResamplerTests: XCTestCase { let resampler = IOAudioResampler() resampler.settings = .init(sampleRate: 44100, channels: 1) resampler.delegate = nullIOAudioResamplerDelegate - resampler.appendSampleBuffer(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) XCTAssertEqual(resampler.outputFormat?.sampleRate, 44100) - resampler.appendSampleBuffer(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) XCTAssertEqual(resampler.outputFormat?.sampleRate, 44100) - resampler.appendSampleBuffer(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) XCTAssertEqual(resampler.outputFormat?.sampleRate, 44100) - resampler.appendSampleBuffer(CMAudioSampleBufferFactory.makeSinWave(16000, numSamples: 1024 * 20, channels: 1)!) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(16000, numSamples: 1024 * 20, channels: 1)!) XCTAssertEqual(resampler.outputFormat?.sampleRate, 44100) } @@ -46,9 +46,9 @@ final class IOAudioResamplerTests: XCTestCase { let resampler = IOAudioResampler() resampler.settings = .init(sampleRate: 48000, channels: 1) resampler.delegate = nullIOAudioResamplerDelegate - resampler.appendSampleBuffer(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) XCTAssertEqual(resampler.outputFormat?.sampleRate, 48000) - resampler.appendSampleBuffer(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024 * 2, channels: 1)!) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024 * 2, channels: 1)!) XCTAssertEqual(resampler.outputFormat?.sampleRate, 48000) } @@ -56,9 +56,9 @@ final class IOAudioResamplerTests: XCTestCase { let resampler = IOAudioResampler() resampler.settings = .init(sampleRate: 0, channels: 1) resampler.delegate = nullIOAudioResamplerDelegate - resampler.appendSampleBuffer(CMAudioSampleBufferFactory.makeSinWave(44000, numSamples: 1024, channels: 1)!) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(44000, numSamples: 1024, channels: 1)!) XCTAssertEqual(resampler.outputFormat?.sampleRate, 44000) - resampler.appendSampleBuffer(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) XCTAssertEqual(resampler.outputFormat?.sampleRate, 48000) } @@ -66,9 +66,9 @@ final class IOAudioResamplerTests: XCTestCase { let resampler = IOAudioResampler() resampler.settings = .init(sampleRate: 0, channels: 1) resampler.delegate = nullIOAudioResamplerDelegate - resampler.appendSampleBuffer(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(48000, numSamples: 1024, channels: 1)!) XCTAssertEqual(resampler.outputFormat?.sampleRate, 48000) - resampler.appendSampleBuffer(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) XCTAssertEqual(resampler.outputFormat?.sampleRate, 44100) } @@ -76,9 +76,9 @@ final class IOAudioResamplerTests: XCTestCase { let resampler = IOAudioResampler() resampler.settings = .init(sampleRate: 0, channels: 1) resampler.delegate = nullIOAudioResamplerDelegate - resampler.appendSampleBuffer(CMAudioSampleBufferFactory.makeSinWave(16000, numSamples: 1024, channels: 1)!) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(16000, numSamples: 1024, channels: 1)!) XCTAssertEqual(resampler.outputFormat?.sampleRate, 16000) - resampler.appendSampleBuffer(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) + resampler.append(CMAudioSampleBufferFactory.makeSinWave(44100, numSamples: 1024, channels: 1)!) XCTAssertEqual(resampler.outputFormat?.sampleRate, 44100) } } diff --git a/Tests/Media/IOAudioRingBufferTests.swift b/Tests/Media/IOAudioRingBufferTests.swift index ebfe08482..1b9791c25 100644 --- a/Tests/Media/IOAudioRingBufferTests.swift +++ b/Tests/Media/IOAudioRingBufferTests.swift @@ -43,7 +43,7 @@ final class IOAudioRingBufferTests: XCTestCase { let bufferList = UnsafeMutableAudioBufferListPointer(readBuffer.mutableAudioBufferList) readBuffer.frameLength = AVAudioFrameCount(numSamples) for _ in 0..<30 { - buffer?.appendSampleBuffer(sinWave) + buffer?.append(sinWave) readBuffer.int16ChannelData?[0].update(repeating: 0, count: numSamples) _ = buffer?.render(UInt32(numSamples), ioData: readBuffer.mutableAudioBufferList) XCTAssertEqual(sinWave.dataBuffer?.data?.bytes, Data(bytes: bufferList[0].mData!, count: numSamples * Int(channels) * 2).bytes) diff --git a/Tests/Media/IORecorderTests.swift b/Tests/Media/IORecorderTests.swift index 8a7722192..878f4d1b1 100644 --- a/Tests/Media/IORecorderTests.swift +++ b/Tests/Media/IORecorderTests.swift @@ -22,7 +22,7 @@ final class IORecorderTests: XCTestCase, IORecorderDelegate { return } presentationTimeStamp = CMTimeAdd(presentationTimeStamp, sampleBuffer.duration) - recorder.appendSampleBuffer(sampleBuffer) + recorder.append(sampleBuffer) } recorder.stopRunning() sleep(1) @@ -44,13 +44,13 @@ final class IORecorderTests: XCTestCase, IORecorderDelegate { return } presentationTimeStamp = CMTimeAdd(presentationTimeStamp, sampleBuffer.duration) - recorder.appendSampleBuffer(sampleBuffer) + recorder.append(sampleBuffer) } recorder.stopRunning() sleep(1) } - func recorder(_ recorder: HaishinKit.IORecorder, errorOccured error: HaishinKit.IORecorder.Error) { + func recorder(_ recorder: HaishinKit.IORecorder, errorOccured error: IORecorder.Error) { // print("recorder:errorOccured", error) }