diff --git a/Examples/iOS/AudioCapture.swift b/Examples/iOS/AudioCapture.swift new file mode 100644 index 000000000..89b76ee2d --- /dev/null +++ b/Examples/iOS/AudioCapture.swift @@ -0,0 +1,41 @@ +import AVFoundation +import Foundation +import HaishinKit + +protocol AudioCaptureDelegate: AnyObject { + func audioCapture(_ audioCapture: AudioCapture, buffer: AVAudioBuffer, time: AVAudioTime) +} + +final class AudioCapture { + var isRunning: Atomic = .init(false) + var delegate: (any AudioCaptureDelegate)? + private let audioEngine = AVAudioEngine() +} + +extension AudioCapture: Running { + func startRunning() { + guard !isRunning.value else { + return + } + let input = audioEngine.inputNode + let mixer = audioEngine.mainMixerNode + audioEngine.connect(input, to: mixer, format: input.inputFormat(forBus: 0)) + input.installTap(onBus: 0, bufferSize: 1024, format: input.inputFormat(forBus: 0)) { buffer, when in + self.delegate?.audioCapture(self, buffer: buffer, time: when) + } + do { + try audioEngine.start() + isRunning.mutate { $0 = true } + } catch { + logger.error(error) + } + } + + func stopRunning() { + guard isRunning.value else { + return + } + audioEngine.stop() + isRunning.mutate { $0 = false } + } +} diff --git a/Examples/iOS/LiveViewController.swift b/Examples/iOS/LiveViewController.swift index 508af4306..8c5f6008a 100644 --- a/Examples/iOS/LiveViewController.swift +++ b/Examples/iOS/LiveViewController.swift @@ -28,6 +28,11 @@ final class LiveViewController: UIViewController { private var currentPosition: AVCaptureDevice.Position = .back private var retryCount: Int = 0 private var preferedStereo = false + private lazy var audioCapture: AudioCapture = { + let audioCapture = AudioCapture() + audioCapture.delegate = self + return audioCapture + }() override func viewDidLoad() { super.viewDidLoad() @@ -364,6 +369,13 @@ extension LiveViewController: IORecorderDelegate { } } +extension LiveViewController: AudioCaptureDelegate { + // MARK: AudioCaptureDelegate + func audioCapture(_ audioCapture: AudioCapture, buffer: AVAudioBuffer, time: AVAudioTime) { + rtmpStream.appendAudioBuffer(buffer, when: time) + } +} + extension LiveViewController: UIPickerViewDelegate { // MARK: UIPickerViewDelegate func pickerView(_ pickerView: UIPickerView, didSelectRow row: Int, inComponent component: Int) { diff --git a/Examples/iOS/PlaybackViewController.swift b/Examples/iOS/PlaybackViewController.swift index 0909d1802..84fcf343d 100644 --- a/Examples/iOS/PlaybackViewController.swift +++ b/Examples/iOS/PlaybackViewController.swift @@ -123,7 +123,7 @@ extension PlaybackViewController: AVPictureInPictureSampleBufferPlaybackDelegate extension PlaybackViewController: NetStreamDelegate { // MARK: NetStreamDelegate - func stream(_ stream: NetStream, didOutput audio: AVAudioBuffer, presentationTimeStamp: CMTime) { + func stream(_ stream: NetStream, didOutput audio: AVAudioBuffer, when: AVAudioTime) { } func stream(_ stream: NetStream, didOutput video: CMSampleBuffer) { diff --git a/Examples/iOS/VideoAdaptiveNetBitRateStrategy.swift b/Examples/iOS/VideoAdaptiveNetBitRateStrategy.swift index 7481d2b20..5c082c36b 100644 --- a/Examples/iOS/VideoAdaptiveNetBitRateStrategy.swift +++ b/Examples/iOS/VideoAdaptiveNetBitRateStrategy.swift @@ -1,4 +1,5 @@ import Foundation +import HaishinKit public final class VideoAdaptiveNetBitRateStrategy: NetBitRateStrategyConvertible { public weak var stream: NetStream? diff --git a/HaishinKit.xcodeproj/project.pbxproj b/HaishinKit.xcodeproj/project.pbxproj index 6b4185b99..6fcaffdd3 100644 --- a/HaishinKit.xcodeproj/project.pbxproj +++ b/HaishinKit.xcodeproj/project.pbxproj @@ -137,6 +137,8 @@ 2EC97B7427880FF400D8BE32 /* MTHKSwiftUiView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2EC97B7027880FF400D8BE32 /* MTHKSwiftUiView.swift */; }; BC0394562AA8A384006EDE38 /* Logboard.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; }; BC03945F2AA8AFF5006EDE38 /* ExpressibleByIntegerLiteral+ExtensionTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC03945E2AA8AFF5006EDE38 /* ExpressibleByIntegerLiteral+ExtensionTests.swift */; }; + BC04A2D42AD2D1D700C87A3E /* AVAudioTime+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC04A2D32AD2D1D700C87A3E /* AVAudioTime+Extension.swift */; }; + BC04A2D62AD2D95500C87A3E /* CMTime+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC04A2D52AD2D95500C87A3E /* CMTime+Extension.swift */; }; BC0BF4F22985FA9000D72CB4 /* HaishinKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 2945CBBD1B4BE66000104112 /* HaishinKit.framework */; }; BC0BF4F529866FDE00D72CB4 /* IOMixerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0BF4F429866FDE00D72CB4 /* IOMixerTests.swift */; }; BC0D236D26331BAB001DDA0C /* DataBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC0D236C26331BAB001DDA0C /* DataBuffer.swift */; }; @@ -147,7 +149,6 @@ BC11024A2925147300D48035 /* IOCaptureUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1102492925147300D48035 /* IOCaptureUnit.swift */; }; BC110253292DD6E900D48035 /* vImage_Buffer+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC110252292DD6E900D48035 /* vImage_Buffer+Extension.swift */; }; BC110257292E661E00D48035 /* MultiCamCaptureSettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC110256292E661E00D48035 /* MultiCamCaptureSettings.swift */; }; - BC1BC9042AC80531009005D3 /* VideoAdaptiveNetBitRateStrategy.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1BC9032AC80531009005D3 /* VideoAdaptiveNetBitRateStrategy.swift */; }; BC1DC4A429F4F74F00E928ED /* AVCaptureSession+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1DC4A329F4F74F00E928ED /* AVCaptureSession+Extension.swift */; }; BC1DC4FB2A02868900E928ED /* FLVVideoFourCC.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1DC4FA2A02868900E928ED /* FLVVideoFourCC.swift */; }; BC1DC5042A02894D00E928ED /* FLVVideoFourCCTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1DC5032A02894D00E928ED /* FLVVideoFourCCTests.swift */; }; @@ -191,6 +192,8 @@ BC562DC7295767860048D89A /* AVCaptureDevice+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC562DC6295767860048D89A /* AVCaptureDevice+Extension.swift */; }; BC562DCB29576D220048D89A /* AVCaptureSession.Preset+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC562DCA29576D220048D89A /* AVCaptureSession.Preset+Extension.swift */; }; BC566F6E25D2ECC500573C4C /* HLSService.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC566F6D25D2ECC500573C4C /* HLSService.swift */; }; + BC56765B2AD2AED800524F7E /* AudioCapture.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC56765A2AD2AED800524F7E /* AudioCapture.swift */; }; + BC56765D2AD2B24800524F7E /* VideoAdaptiveNetBitRateStrategy.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC1BC9032AC80531009005D3 /* VideoAdaptiveNetBitRateStrategy.swift */; }; BC570B4828E9ACC10098A12C /* IOUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC570B4728E9ACC10098A12C /* IOUnit.swift */; }; BC6692F32AC2F717009EC058 /* NetBitRateStrategyConvertible.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC6692F22AC2F717009EC058 /* NetBitRateStrategyConvertible.swift */; }; BC6FC91E29609A6800A746EE /* ShapeFactory.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC6FC91D29609A6800A746EE /* ShapeFactory.swift */; }; @@ -553,6 +556,8 @@ 2EC97B6F27880FF400D8BE32 /* Views.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Views.swift; sourceTree = ""; }; 2EC97B7027880FF400D8BE32 /* MTHKSwiftUiView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MTHKSwiftUiView.swift; sourceTree = ""; }; BC03945E2AA8AFF5006EDE38 /* ExpressibleByIntegerLiteral+ExtensionTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "ExpressibleByIntegerLiteral+ExtensionTests.swift"; sourceTree = ""; }; + BC04A2D32AD2D1D700C87A3E /* AVAudioTime+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioTime+Extension.swift"; sourceTree = ""; }; + BC04A2D52AD2D95500C87A3E /* CMTime+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CMTime+Extension.swift"; sourceTree = ""; }; BC0BF4F429866FDE00D72CB4 /* IOMixerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOMixerTests.swift; sourceTree = ""; }; BC0D236C26331BAB001DDA0C /* DataBuffer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DataBuffer.swift; sourceTree = ""; }; BC0F1FD42ACBD39600C326FF /* MemoryUsage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MemoryUsage.swift; sourceTree = ""; }; @@ -602,6 +607,7 @@ BC562DC6295767860048D89A /* AVCaptureDevice+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+Extension.swift"; sourceTree = ""; }; BC562DCA29576D220048D89A /* AVCaptureSession.Preset+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVCaptureSession.Preset+Extension.swift"; sourceTree = ""; }; BC566F6D25D2ECC500573C4C /* HLSService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HLSService.swift; sourceTree = ""; }; + BC56765A2AD2AED800524F7E /* AudioCapture.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioCapture.swift; sourceTree = ""; }; BC570B4728E9ACC10098A12C /* IOUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOUnit.swift; sourceTree = ""; }; BC6692F22AC2F717009EC058 /* NetBitRateStrategyConvertible.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NetBitRateStrategyConvertible.swift; sourceTree = ""; }; BC6FC91D29609A6800A746EE /* ShapeFactory.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ShapeFactory.swift; sourceTree = ""; }; @@ -976,6 +982,7 @@ 29A39C801D85BEFA007C27E9 /* Screencast */, 296897411CDB01D20074D5F0 /* AppDelegate.swift */, 296897421CDB01D20074D5F0 /* Assets.xcassets */, + BC56765A2AD2AED800524F7E /* AudioCapture.swift */, 291F4E361CF206E200F59C51 /* Icon.png */, 296897431CDB01D20074D5F0 /* Info.plist */, 2968974D1CDB01DD0074D5F0 /* LaunchScreen.storyboard */, @@ -1124,6 +1131,7 @@ children = ( 1A2166D3A449D813866FE9D9 /* AVAudioFormat+Extension.swift */, BC22EEF12AAF5D6300E3406D /* AVAudioPCMBuffer+Extension.swift */, + BC04A2D32AD2D1D700C87A3E /* AVAudioTime+Extension.swift */, BC562DC6295767860048D89A /* AVCaptureDevice+Extension.swift */, BC2828AC2AA3225100741013 /* AVCaptureDevice.Format+Extension.swift */, BC1DC4A329F4F74F00E928ED /* AVCaptureSession+Extension.swift */, @@ -1134,6 +1142,7 @@ 29EA87E91E79A3B70043A5F8 /* CMBlockBuffer+Extension.swift */, 2916196B1E7F0768009FB344 /* CMFormatDescription+Extension.swift */, 29EA87DE1E79A0810043A5F8 /* CMSampleBuffer+Extension.swift */, + BC04A2D52AD2D95500C87A3E /* CMTime+Extension.swift */, 29EA87E11E79A1E90043A5F8 /* CMVideoFormatDescription+Extension.swift */, 29EA87EC1E79A3E30043A5F8 /* CVPixelBuffer+Extension.swift */, BC11023D2917C35B00D48035 /* CVPixelBufferPool+Extension.swift */, @@ -1710,7 +1719,6 @@ 2958910E1EEB8D3C00CE51E1 /* FLVVideoCodec.swift in Sources */, BC1DC5142A05428800E928ED /* HEVCNALUnit.swift in Sources */, BC6FC9222961B3D800A746EE /* vImage_CGImageFormat+Extension.swift in Sources */, - BC1BC9042AC80531009005D3 /* VideoAdaptiveNetBitRateStrategy.swift in Sources */, BC20DF38250377A3007BC608 /* IOUIScreenCaptureUnit.swift in Sources */, 29B876AF1CD70B2800FC07DA /* RTMPChunk.swift in Sources */, 29B876841CD70AE800FC07DA /* AVCDecoderConfigurationRecord.swift in Sources */, @@ -1745,6 +1753,7 @@ 2958912A1EEB8F1D00CE51E1 /* FLVSoundSize.swift in Sources */, 29EA87DC1E79A0460043A5F8 /* Data+Extension.swift in Sources */, 29DF20622312A3DD004057C3 /* RTMPNWSocket.swift in Sources */, + BC04A2D62AD2D95500C87A3E /* CMTime+Extension.swift in Sources */, BC22EEF22AAF5D6300E3406D /* AVAudioPCMBuffer+Extension.swift in Sources */, BCCBCE9729A90D880095B51C /* AVCNALUnit.swift in Sources */, 29B876BD1CD70B3900FC07DA /* CRC32.swift in Sources */, @@ -1797,6 +1806,7 @@ BC6692F32AC2F717009EC058 /* NetBitRateStrategyConvertible.swift in Sources */, BC6FC91E29609A6800A746EE /* ShapeFactory.swift in Sources */, BC32E88829C9971100051507 /* InstanceHolder.swift in Sources */, + BC04A2D42AD2D1D700C87A3E /* AVAudioTime+Extension.swift in Sources */, BC7C56B7299E579F00C41A9B /* AudioCodecSettings.swift in Sources */, 29B876AC1CD70B2800FC07DA /* AMF3Serializer.swift in Sources */, BC31DBD22A653D1600C4DEA3 /* IOAudioMonitor.swift in Sources */, @@ -1859,7 +1869,9 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + BC56765D2AD2B24800524F7E /* VideoAdaptiveNetBitRateStrategy.swift in Sources */, 291468191E581C8F00E619BA /* Preference.swift in Sources */, + BC56765B2AD2AED800524F7E /* AudioCapture.swift in Sources */, BCFB355A24FA40DD00DC5108 /* PlaybackContainerViewController.swift in Sources */, BC0F1FD52ACBD39600C326FF /* MemoryUsage.swift in Sources */, 296897671CDB02940074D5F0 /* AppDelegate.swift in Sources */, diff --git a/Sources/Codec/AudioCodec.swift b/Sources/Codec/AudioCodec.swift index 8b994e4fd..f8532e5d6 100644 --- a/Sources/Codec/AudioCodec.swift +++ b/Sources/Codec/AudioCodec.swift @@ -7,7 +7,7 @@ public protocol AudioCodecDelegate: AnyObject { /// Tells the receiver to output an AVAudioFormat. func audioCodec(_ codec: AudioCodec, didOutput audioFormat: AVAudioFormat) /// Tells the receiver to output an encoded or decoded CMSampleBuffer. - func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime) + func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime) /// Tells the receiver to occured an error. func audioCodec(_ codec: AudioCodec, errorOccurred error: AudioCodec.Error) } @@ -81,7 +81,7 @@ public class AudioCodec { buffer.byteLength = UInt32(byteCount) if let blockBuffer = sampleBuffer.dataBuffer { CMBlockBufferCopyDataBytes(blockBuffer, atOffset: offset + ADTSHeader.size, dataLength: byteCount, destination: buffer.data) - appendAudioBuffer(buffer, presentationTimeStamp: presentationTimeStamp) + appendAudioBuffer(buffer, when: presentationTimeStamp.makeAudioTime()) presentationTimeStamp = CMTimeAdd(presentationTimeStamp, CMTime(value: CMTimeValue(1024), timescale: sampleBuffer.presentationTimeStamp.timescale)) offset += sampleSize } @@ -91,7 +91,7 @@ public class AudioCodec { } } - func appendAudioBuffer(_ audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime) { + func appendAudioBuffer(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) { guard let audioConverter, isRunning.value else { return } @@ -111,7 +111,7 @@ public class AudioCodec { } switch outputStatus { case .haveData: - delegate?.audioCodec(self, didOutput: outputBuffer, presentationTimeStamp: presentationTimeStamp) + delegate?.audioCodec(self, didOutput: outputBuffer, when: when) case .error: if let error { delegate?.audioCodec(self, errorOccurred: .failedToConvert(error: error)) diff --git a/Sources/Extension/AVAudioPCMBuffer+Extension.swift b/Sources/Extension/AVAudioPCMBuffer+Extension.swift index 81ec75124..9e46ad94d 100644 --- a/Sources/Extension/AVAudioPCMBuffer+Extension.swift +++ b/Sources/Extension/AVAudioPCMBuffer+Extension.swift @@ -1,7 +1,7 @@ import AVFoundation extension AVAudioPCMBuffer { - final func makeSampleBuffer(_ presentationTimeStamp: CMTime) -> CMSampleBuffer? { + final func makeSampleBuffer(_ when: AVAudioTime) -> CMSampleBuffer? { var status: OSStatus = noErr var sampleBuffer: CMSampleBuffer? status = CMAudioSampleBufferCreateWithPacketDescriptions( @@ -12,7 +12,7 @@ extension AVAudioPCMBuffer { refcon: nil, formatDescription: format.formatDescription, sampleCount: Int(frameLength), - presentationTimeStamp: presentationTimeStamp, + presentationTimeStamp: when.makeTime(), packetDescriptions: nil, sampleBufferOut: &sampleBuffer ) diff --git a/Sources/Extension/AVAudioTime+Extension.swift b/Sources/Extension/AVAudioTime+Extension.swift new file mode 100644 index 000000000..ed9451731 --- /dev/null +++ b/Sources/Extension/AVAudioTime+Extension.swift @@ -0,0 +1,9 @@ +import AVFoundation +import CoreMedia +import Foundation + +extension AVAudioTime { + func makeTime() -> CMTime { + return .init(value: CMTimeValue(sampleTime), timescale: CMTimeScale(sampleRate)) + } +} diff --git a/Sources/Extension/CMTime+Extension.swift b/Sources/Extension/CMTime+Extension.swift new file mode 100644 index 000000000..ec166b5a5 --- /dev/null +++ b/Sources/Extension/CMTime+Extension.swift @@ -0,0 +1,8 @@ +import AVFoundation +import Foundation + +extension CMTime { + func makeAudioTime() -> AVAudioTime { + return .init(sampleTime: value, atRate: Double(timescale)) + } +} diff --git a/Sources/MPEG/TSWriter.swift b/Sources/MPEG/TSWriter.swift index 42e84616c..f88851e88 100644 --- a/Sources/MPEG/TSWriter.swift +++ b/Sources/MPEG/TSWriter.swift @@ -231,7 +231,7 @@ extension TSWriter: AudioCodecDelegate { audioConfig = AudioSpecificConfig(formatDescription: outputFormat.formatDescription) } - public func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime) { + public func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime) { guard let audioBuffer = audioBuffer as? AVAudioCompressedBuffer else { return } @@ -240,7 +240,7 @@ extension TSWriter: AudioCodecDelegate { streamID: 192, bytes: audioBuffer.data.assumingMemoryBound(to: UInt8.self), count: audioBuffer.byteLength, - presentationTimeStamp: presentationTimeStamp, + presentationTimeStamp: when.makeTime(), decodeTimeStamp: .invalid, randomAccessIndicator: true ) diff --git a/Sources/Media/IOAudioMonitor.swift b/Sources/Media/IOAudioMonitor.swift index 0861218c5..2f61ae913 100644 --- a/Sources/Media/IOAudioMonitor.swift +++ b/Sources/Media/IOAudioMonitor.swift @@ -4,10 +4,10 @@ import CoreMedia import Foundation final class IOAudioMonitor { - var inSourceFormat: AudioStreamBasicDescription? { + var inputFormat: AVAudioFormat? { didSet { - if var inSourceFormat { - ringBuffer = .init(&inSourceFormat) + if let inputFormat { + ringBuffer = .init(inputFormat) if isRunning.value { audioUnit = makeAudioUnit() } @@ -40,11 +40,11 @@ final class IOAudioMonitor { stopRunning() } - func appendAudioPCMBuffer(_ audioPCMBuffer: AVAudioPCMBuffer) { + func appendAudioPCMBuffer(_ audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { guard isRunning.value else { return } - ringBuffer?.appendAudioPCMBuffer(audioPCMBuffer) + ringBuffer?.appendAudioPCMBuffer(audioPCMBuffer, when: when) } private func render(_ inNumberFrames: UInt32, ioData: UnsafeMutablePointer?) -> OSStatus { @@ -64,9 +64,10 @@ final class IOAudioMonitor { } private func makeAudioUnit() -> AudioUnit? { - guard var inSourceFormat else { + guard let inputFormat else { return nil } + var inSourceFormat = inputFormat.formatDescription.audioStreamBasicDescription var audioUnit: AudioUnit? #if os(macOS) let subType = kAudioUnitSubType_DefaultOutput diff --git a/Sources/Media/IOAudioResampler.swift b/Sources/Media/IOAudioResampler.swift index a5256330a..32de0af94 100644 --- a/Sources/Media/IOAudioResampler.swift +++ b/Sources/Media/IOAudioResampler.swift @@ -2,11 +2,11 @@ import Accelerate import AVFoundation private let kIOAudioResampler_frameCapacity: AVAudioFrameCount = 1024 -private let kIOAudioResampler_presentationTimeStamp: CMTime = .zero +private let kIOAudioResampler_sampleTime: AVAudioFramePosition = 0 protocol IOAudioResamplerDelegate: AnyObject { func resampler(_ resampler: IOAudioResampler, didOutput audioFormat: AVAudioFormat) - func resampler(_ resampler: IOAudioResampler, didOutput audioPCMBuffer: AVAudioPCMBuffer, presentationTimeStamp: CMTime) + func resampler(_ resampler: IOAudioResampler, didOutput audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) func resampler(_ resampler: IOAudioResampler, errorOccurred error: AudioCodec.Error) } @@ -92,7 +92,6 @@ final class IOAudioResampler { setUp(&inSourceFormat) } } - private var sampleRate: Int32 = 0 private var ringBuffer: IOAudioRingBuffer? private var inputBuffer: AVAudioPCMBuffer? private var outputBuffer: AVAudioPCMBuffer? @@ -106,14 +105,31 @@ final class IOAudioResampler { delegate?.resampler(self, didOutput: audioConverter.outputFormat) } } - private var presentationTimeStamp: CMTime = kIOAudioResampler_presentationTimeStamp + private var sampleTime: AVAudioFramePosition = kIOAudioResampler_sampleTime func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) { inSourceFormat = sampleBuffer.formatDescription?.audioStreamBasicDescription - guard let inputBuffer, let outputBuffer, let ringBuffer else { + if sampleTime == kIOAudioResampler_sampleTime { + sampleTime = sampleBuffer.presentationTimeStamp.value + } + ringBuffer?.appendSampleBuffer(sampleBuffer) + resample() + } + + func appendAudioPCMBuffer(_ audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + inSourceFormat = audioBuffer.format.formatDescription.audioStreamBasicDescription + if sampleTime == kIOAudioResampler_sampleTime { + sampleTime = when.sampleTime + } + ringBuffer?.appendAudioPCMBuffer(audioBuffer, when: when) + resample() + } + + @inline(__always) + private func resample() { + guard let outputBuffer, let inputBuffer, let ringBuffer else { return } - ringBuffer.appendSampleBuffer(sampleBuffer) var status: AVAudioConverterOutputStatus? = .endOfStream repeat { var error: NSError? @@ -130,11 +146,8 @@ final class IOAudioResampler { } switch status { case .haveData: - if presentationTimeStamp == .zero { - presentationTimeStamp = CMTime(seconds: sampleBuffer.presentationTimeStamp.seconds, preferredTimescale: sampleRate) - } - delegate?.resampler(self, didOutput: outputBuffer, presentationTimeStamp: presentationTimeStamp) - self.presentationTimeStamp = CMTimeAdd(presentationTimeStamp, .init(value: 1024, timescale: sampleRate)) + delegate?.resampler(self, didOutput: outputBuffer, when: .init(sampleTime: sampleTime, atRate: outputBuffer.format.sampleRate)) + sampleTime += 1024 case .error: if let error { delegate?.resampler(self, errorOccurred: .failedToConvert(error: error)) @@ -148,9 +161,9 @@ final class IOAudioResampler { private func setUp(_ inSourceFormat: inout AudioStreamBasicDescription) { let inputFormat = AVAudioFormatFactory.makeAudioFormat(&inSourceFormat) let outputFormat = settings.makeOutputFormat(inputFormat) ?? inputFormat - ringBuffer = .init(&inSourceFormat) if let inputFormat { inputBuffer = .init(pcmFormat: inputFormat, frameCapacity: 1024 * 4) + ringBuffer = .init(inputFormat) } if let outputFormat { outputBuffer = .init(pcmFormat: outputFormat, frameCapacity: kIOAudioResampler_frameCapacity) @@ -159,8 +172,7 @@ final class IOAudioResampler { if logger.isEnabledFor(level: .info) { logger.info("inputFormat:", inputFormat, ",outputFormat:", outputFormat) } - sampleRate = Int32(outputFormat.sampleRate) - presentationTimeStamp = .zero + sampleTime = kIOAudioResampler_sampleTime audioConverter = .init(from: inputFormat, to: outputFormat) } else { delegate?.resampler(self, errorOccurred: .failedToCreate(from: inputFormat, to: outputFormat)) diff --git a/Sources/Media/IOAudioRingBuffer.swift b/Sources/Media/IOAudioRingBuffer.swift index bc1aab1d1..51b19f91a 100644 --- a/Sources/Media/IOAudioRingBuffer.swift +++ b/Sources/Media/IOAudioRingBuffer.swift @@ -11,98 +11,115 @@ final class IOAudioRingBuffer { if tail <= head { return head - tail + skip } - return Int(buffer.frameLength) - tail + head + skip + return Int(outputBuffer.frameLength) - tail + head + skip } - private(set) var presentationTimeStamp: CMTime = .zero private var head = 0 private var tail = 0 private var skip = 0 - private var format: AVAudioFormat - private var buffer: AVAudioPCMBuffer - private var workingBuffer: AVAudioPCMBuffer + private var sampleTime: AVAudioFramePosition = 0 + private var inputFormat: AVAudioFormat + private var inputBuffer: AVAudioPCMBuffer + private var outputBuffer: AVAudioPCMBuffer - init?(_ inSourceFormat: inout AudioStreamBasicDescription, bufferCounts: UInt32 = IOAudioRingBuffer.bufferCounts) { + init?(_ inputFormat: AVAudioFormat, bufferCounts: UInt32 = IOAudioRingBuffer.bufferCounts) { guard - inSourceFormat.mFormatID == kAudioFormatLinearPCM, - let format = AVAudioFormatFactory.makeAudioFormat(&inSourceFormat), - let workingBuffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: Self.numSamples) else { + let inputBuffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: Self.numSamples) else { return nil } - guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: Self.numSamples * bufferCounts) else { + guard let outputBuffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: Self.numSamples * bufferCounts) else { return nil } - self.format = format - self.buffer = buffer - self.buffer.frameLength = self.buffer.frameCapacity - self.workingBuffer = workingBuffer + self.inputFormat = inputFormat + self.inputBuffer = inputBuffer + self.outputBuffer = outputBuffer + self.outputBuffer.frameLength = self.outputBuffer.frameCapacity } func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) { guard CMSampleBufferDataIsReady(sampleBuffer) else { return } - if presentationTimeStamp == .zero { - presentationTimeStamp = sampleBuffer.presentationTimeStamp + if sampleTime == 0 { + sampleTime = sampleBuffer.presentationTimeStamp.value } - if workingBuffer.frameLength < sampleBuffer.numSamples { - if let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: AVAudioFrameCount(sampleBuffer.numSamples)) { - self.workingBuffer = buffer + if inputBuffer.frameLength < sampleBuffer.numSamples { + if let buffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: AVAudioFrameCount(sampleBuffer.numSamples)) { + self.inputBuffer = buffer } } - workingBuffer.frameLength = AVAudioFrameCount(sampleBuffer.numSamples) + inputBuffer.frameLength = AVAudioFrameCount(sampleBuffer.numSamples) let status = CMSampleBufferCopyPCMDataIntoAudioBufferList( sampleBuffer, at: 0, frameCount: Int32(sampleBuffer.numSamples), - into: workingBuffer.mutableAudioBufferList + into: inputBuffer.mutableAudioBufferList ) if status == noErr && kLinearPCMFormatFlagIsBigEndian == ((sampleBuffer.formatDescription?.audioStreamBasicDescription?.mFormatFlags ?? 0) & kLinearPCMFormatFlagIsBigEndian) { - if format.isInterleaved { - switch format.commonFormat { + if inputFormat.isInterleaved { + switch inputFormat.commonFormat { case .pcmFormatInt16: let length = sampleBuffer.dataBuffer?.dataLength ?? 0 - var image = vImage_Buffer(data: workingBuffer.mutableAudioBufferList[0].mBuffers.mData, height: 1, width: vImagePixelCount(length / 2), rowBytes: length) + var image = vImage_Buffer(data: inputBuffer.mutableAudioBufferList[0].mBuffers.mData, height: 1, width: vImagePixelCount(length / 2), rowBytes: length) vImageByteSwap_Planar16U(&image, &image, vImage_Flags(kvImageNoFlags)) default: break } } } - skip = numSamples(sampleBuffer) - appendAudioPCMBuffer(workingBuffer) + skip = max(Int(sampleBuffer.presentationTimeStamp.value - sampleTime), 0) + sampleTime += Int64(skip) + appendAudioPCMBuffer(inputBuffer) } - func appendAudioPCMBuffer(_ audioPCMBuffer: AVAudioPCMBuffer, offset: Int = 0) { - let numSamples = min(Int(audioPCMBuffer.frameLength) - offset, Int(buffer.frameLength) - head) - if format.isInterleaved { - let channelCount = Int(format.channelCount) - switch format.commonFormat { + func appendAudioPCMBuffer(_ audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + if sampleTime == 0 { + sampleTime = when.sampleTime + } + if inputBuffer.frameLength < audioPCMBuffer.frameLength { + if let buffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: audioPCMBuffer.frameCapacity) { + self.inputBuffer = buffer + } + } + inputBuffer.frameLength = audioPCMBuffer.frameLength + _ = inputBuffer.copy(audioPCMBuffer) + skip = Int(max(when.sampleTime - sampleTime, 0)) + sampleTime += Int64(skip) + appendAudioPCMBuffer(inputBuffer) + } + + @inline(__always) + private func appendAudioPCMBuffer(_ audioPCMBuffer: AVAudioPCMBuffer, offset: Int = 0) { + let numSamples = min(Int(audioPCMBuffer.frameLength) - offset, Int(outputBuffer.frameLength) - head) + if inputFormat.isInterleaved { + let channelCount = Int(inputFormat.channelCount) + switch inputFormat.commonFormat { case .pcmFormatInt16: - memcpy(buffer.int16ChannelData?[0].advanced(by: head * channelCount), audioPCMBuffer.int16ChannelData?[0].advanced(by: offset * channelCount), numSamples * channelCount * 2) + memcpy(outputBuffer.int16ChannelData?[0].advanced(by: head * channelCount), audioPCMBuffer.int16ChannelData?[0].advanced(by: offset * channelCount), numSamples * channelCount * 2) case .pcmFormatInt32: - memcpy(buffer.int32ChannelData?[0].advanced(by: head * channelCount), audioPCMBuffer.int32ChannelData?[0].advanced(by: offset * channelCount), numSamples * channelCount * 4) + memcpy(outputBuffer.int32ChannelData?[0].advanced(by: head * channelCount), audioPCMBuffer.int32ChannelData?[0].advanced(by: offset * channelCount), numSamples * channelCount * 4) case .pcmFormatFloat32: - memcpy(buffer.floatChannelData?[0].advanced(by: head * channelCount), audioPCMBuffer.floatChannelData?[0].advanced(by: offset * channelCount), numSamples * channelCount * 4) + memcpy(outputBuffer.floatChannelData?[0].advanced(by: head * channelCount), audioPCMBuffer.floatChannelData?[0].advanced(by: offset * channelCount), numSamples * channelCount * 4) default: break } } else { - for i in 0.. Int { - // Device audioMic or ReplayKit audioMic. - let sampleRate = Int32(format.sampleRate) - if presentationTimeStamp.timescale == sampleRate { - let presentationTimeStamp = CMTimeAdd(presentationTimeStamp, CMTime(value: CMTimeValue(counts), timescale: presentationTimeStamp.timescale)) - return max(Int(sampleBuffer.presentationTimeStamp.value - presentationTimeStamp.value), 0) - } - return 0 - } } diff --git a/Sources/Media/IOAudioUnit.swift b/Sources/Media/IOAudioUnit.swift index 5d525da6c..1ced50c21 100644 --- a/Sources/Media/IOAudioUnit.swift +++ b/Sources/Media/IOAudioUnit.swift @@ -6,7 +6,7 @@ import SwiftPMSupport protocol IOAudioUnitDelegate: AnyObject { func audioUnit(_ audioUnit: IOAudioUnit, errorOccurred error: AudioCodec.Error) - func audioUnit(_ audioUnit: IOAudioUnit, didOutput audioBuffer: AVAudioPCMBuffer, presentationTimeStamp: CMTime) + func audioUnit(_ audioUnit: IOAudioUnit, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) } final class IOAudioUnit: NSObject, IOUnit { @@ -99,8 +99,15 @@ final class IOAudioUnit: NSObject, IOUnit { } } - func appendAudioBuffer(_ audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime) { - codec.appendAudioBuffer(audioBuffer, presentationTimeStamp: presentationTimeStamp) + func appendAudioBuffer(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) { + switch audioBuffer { + case let audioBuffer as AVAudioPCMBuffer: + resampler.appendAudioPCMBuffer(audioBuffer, when: when) + case let audioBuffer as AVAudioCompressedBuffer: + codec.appendAudioBuffer(audioBuffer, when: when) + default: + break + } } func setAudioStreamBasicDescription(_ audioStreamBasicDescription: AudioStreamBasicDescription?) { @@ -165,13 +172,13 @@ extension IOAudioUnit: IOAudioResamplerDelegate { func resampler(_ resampler: IOAudioResampler, didOutput audioFormat: AVAudioFormat) { inputFormat = resampler.inputFormat codec.inputFormat = audioFormat - monitor.inSourceFormat = audioFormat.formatDescription.audioStreamBasicDescription + monitor.inputFormat = audioFormat } - func resampler(_ resampler: IOAudioResampler, didOutput audioBuffer: AVAudioPCMBuffer, presentationTimeStamp: CMTime) { - self.presentationTimeStamp = presentationTimeStamp - mixer?.audioUnit(self, didOutput: audioBuffer, presentationTimeStamp: presentationTimeStamp) - monitor.appendAudioPCMBuffer(audioBuffer) - codec.appendAudioBuffer(audioBuffer, presentationTimeStamp: presentationTimeStamp) + func resampler(_ resampler: IOAudioResampler, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + presentationTimeStamp = when.makeTime() + mixer?.audioUnit(self, didOutput: audioBuffer, when: when) + monitor.appendAudioPCMBuffer(audioBuffer, when: when) + codec.appendAudioBuffer(audioBuffer, when: when) } } diff --git a/Sources/Media/IOMixer.swift b/Sources/Media/IOMixer.swift index 1ce536e96..ddbd6d57a 100644 --- a/Sources/Media/IOMixer.swift +++ b/Sources/Media/IOMixer.swift @@ -7,7 +7,7 @@ import UIKit #endif protocol IOMixerDelegate: AnyObject { - func mixer(_ mixer: IOMixer, didOutput audio: AVAudioPCMBuffer, presentationTimeStamp: CMTime) + func mixer(_ mixer: IOMixer, didOutput audio: AVAudioPCMBuffer, when: AVAudioTime) func mixer(_ mixer: IOMixer, didOutput video: CMSampleBuffer) func mixer(_ mixer: IOMixer, videoCodecErrorOccurred error: VideoCodec.Error) func mixer(_ mixer: IOMixer, audioCodecErrorOccurred error: AudioCodec.Error) @@ -411,6 +411,7 @@ extension IOMixer: Running { extension IOMixer: Running { public func startRunning() { } + public func stopRunning() { } } @@ -447,11 +448,11 @@ extension IOMixer: AudioCodecDelegate { } } - public func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime) { + public func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime) { guard let audioBuffer = audioBuffer as? AVAudioPCMBuffer else { return } - delegate?.mixer(self, didOutput: audioBuffer, presentationTimeStamp: presentationTimeStamp) + delegate?.mixer(self, didOutput: audioBuffer, when: when) mediaLink.enqueueAudio(audioBuffer) } } @@ -462,8 +463,8 @@ extension IOMixer: IOAudioUnitDelegate { delegate?.mixer(self, audioCodecErrorOccurred: error) } - func audioUnit(_ audioUnit: IOAudioUnit, didOutput audioBuffer: AVAudioPCMBuffer, presentationTimeStamp: CMTime) { - delegate?.mixer(self, didOutput: audioBuffer, presentationTimeStamp: presentationTimeStamp) - recorder.appendAudioPCMBuffer(audioBuffer, presentationTimeStamp: presentationTimeStamp) + func audioUnit(_ audioUnit: IOAudioUnit, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) { + delegate?.mixer(self, didOutput: audioBuffer, when: when) + recorder.appendAudioPCMBuffer(audioBuffer, when: when) } } diff --git a/Sources/Media/IORecorder.swift b/Sources/Media/IORecorder.swift index 0ecaaf56e..bbcc25495 100644 --- a/Sources/Media/IORecorder.swift +++ b/Sources/Media/IORecorder.swift @@ -149,11 +149,11 @@ public class IORecorder { } } - func appendAudioPCMBuffer(_ audioPCMBuffer: AVAudioPCMBuffer, presentationTimeStamp: CMTime) { + func appendAudioPCMBuffer(_ audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { guard isRunning.value else { return } - if let sampleBuffer = audioPCMBuffer.makeSampleBuffer(presentationTimeStamp) { + if let sampleBuffer = audioPCMBuffer.makeSampleBuffer(when) { appendSampleBuffer(sampleBuffer) } } diff --git a/Sources/Net/NetStream.swift b/Sources/Net/NetStream.swift index 71cd89e0b..51489cd7d 100644 --- a/Sources/Net/NetStream.swift +++ b/Sources/Net/NetStream.swift @@ -11,7 +11,7 @@ import UIKit /// The interface a NetStream uses to inform its delegate. public protocol NetStreamDelegate: AnyObject { /// Tells the receiver to playback an audio packet incoming. - func stream(_ stream: NetStream, didOutput audio: AVAudioBuffer, presentationTimeStamp: CMTime) + func stream(_ stream: NetStream, didOutput audio: AVAudioBuffer, when: AVAudioTime) /// Tells the receiver to playback a video packet incoming. func stream(_ stream: NetStream, didOutput video: CMSampleBuffer) #if os(iOS) || os(tvOS) @@ -260,7 +260,7 @@ open class NetStream: NSObject { } #endif - /// Append a CMSampleBuffer?. + /// Append a CMSampleBuffer. /// - Warning: This method can't use attachCamera or attachAudio method at the same time. open func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer, options: [NSObject: AnyObject]? = nil) { switch sampleBuffer.formatDescription?._mediaType { @@ -277,6 +277,14 @@ open class NetStream: NSObject { } } + /// Append an AVAudioBuffer. + /// - Warning: This method can't use attachAudio method at the same time. + public func appendAudioBuffer(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) { + mixer.audioIO.lockQueue.async { + self.mixer.audioIO.appendAudioBuffer(audioBuffer, when: when) + } + } + /// Register a video effect. public func registerVideoEffect(_ effect: VideoEffect) -> Bool { mixer.videoIO.lockQueue.sync { @@ -324,8 +332,8 @@ extension NetStream: IOMixerDelegate { delegate?.stream(self, didOutput: video) } - func mixer(_ mixer: IOMixer, didOutput audio: AVAudioPCMBuffer, presentationTimeStamp: CMTime) { - delegate?.stream(self, didOutput: audio, presentationTimeStamp: presentationTimeStamp) + func mixer(_ mixer: IOMixer, didOutput audio: AVAudioPCMBuffer, when: AVAudioTime) { + delegate?.stream(self, didOutput: audio, when: when) } func mixer(_ mixer: IOMixer, audioCodecErrorOccurred error: AudioCodec.Error) { diff --git a/Sources/RTMP/RTMPMessage.swift b/Sources/RTMP/RTMPMessage.swift index cff362c0d..c48297dab 100644 --- a/Sources/RTMP/RTMPMessage.swift +++ b/Sources/RTMP/RTMPMessage.swift @@ -592,7 +592,7 @@ final class RTMPAudioMessage: RTMPMessage { stream.mixer.audioIO.setAudioStreamBasicDescription(makeAudioStreamBasicDescription()) } if let audioBuffer = makeAudioBuffer(stream) { - stream.mixer.audioIO.appendAudioBuffer(audioBuffer, presentationTimeStamp: CMTime(seconds: stream.audioTimestamp / 1000, preferredTimescale: 1000)) + stream.mixer.audioIO.appendAudioBuffer(audioBuffer, when: .init(hostTime: UInt64(stream.audioTimestamp))) } default: break diff --git a/Sources/RTMP/RTMPMuxer.swift b/Sources/RTMP/RTMPMuxer.swift index 3253fcade..90f95ba42 100644 --- a/Sources/RTMP/RTMPMuxer.swift +++ b/Sources/RTMP/RTMPMuxer.swift @@ -43,7 +43,8 @@ extension RTMPMuxer: AudioCodecDelegate { delegate?.muxer(self, didOutputAudio: buffer, withTimestamp: 0) } - func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime) { + func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime) { + let presentationTimeStamp = when.makeTime() let delta = (audioTimeStamp == CMTime.zero ? 0 : presentationTimeStamp.seconds - audioTimeStamp.seconds) * 1000 guard let audioBuffer = audioBuffer as? AVAudioCompressedBuffer, 0 <= delta && isReady(presentationTimeStamp) else { return diff --git a/Tests/MPEG/TSReaderTests.swift b/Tests/MPEG/TSReaderTests.swift index 38ae7e723..1318a408a 100644 --- a/Tests/MPEG/TSReaderTests.swift +++ b/Tests/MPEG/TSReaderTests.swift @@ -47,7 +47,7 @@ private class TSReaderAudioCodec: TSReaderDelegate, AudioCodecDelegate { // XCTFail() } - func audioCodec(_ codec: HaishinKit.AudioCodec, didOutput audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime) { + func audioCodec(_ codec: HaishinKit.AudioCodec, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime) { } } diff --git a/Tests/Media/IOAudioResamplerTests.swift b/Tests/Media/IOAudioResamplerTests.swift index 0ea9f36b4..77fd0a713 100644 --- a/Tests/Media/IOAudioResamplerTests.swift +++ b/Tests/Media/IOAudioResamplerTests.swift @@ -8,7 +8,7 @@ final class NullIOAudioResamplerDelegate: IOAudioResamplerDelegate { func resampler(_ resampler: HaishinKit.IOAudioResampler, didOutput audioFormat: AVAudioFormat) { } - func resampler(_ resampler: HaishinKit.IOAudioResampler, didOutput audioPCMBuffer: AVAudioPCMBuffer, presentationTimeStamp: CMTime) { + func resampler(_ resampler: HaishinKit.IOAudioResampler, didOutput audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { } func resampler(_ resampler: HaishinKit.IOAudioResampler, errorOccurred error: HaishinKit.AudioCodec.Error) {