From a475e05a0996cd7d41f2f196208bf080686588b2 Mon Sep 17 00:00:00 2001
From: levs42 <lsokolov@logitech.com>
Date: Tue, 12 Mar 2024 17:07:32 -0700
Subject: [PATCH] Add multichannel audio mixer

---
 Examples/iOS/Screencast/SampleHandler.swift   |   7 +-
 HaishinKit.xcodeproj/project.pbxproj          |  12 +
 Sources/Codec/AudioCodecSettings.swift        |  19 +-
 Sources/Extension/AudioNode+Extension.swift   | 154 +++++++++
 ...udioStreamBasicDescription+Extension.swift | 152 +++++++++
 Sources/IO/AudioNode.swift                    | 289 +++++++++++++++++
 Sources/IO/IOAudioMixer.swift                 | 298 ++++++++++++++++++
 Sources/IO/IOAudioResampler.swift             |   4 +-
 Sources/IO/IOAudioUnit.swift                  |  32 +-
 Sources/IO/IOStream.swift                     |  10 +-
 Sources/RTMP/RTMPMuxer.swift                  |   2 +-
 11 files changed, 952 insertions(+), 27 deletions(-)
 create mode 100644 Sources/Extension/AudioNode+Extension.swift
 create mode 100644 Sources/IO/AudioNode.swift
 create mode 100644 Sources/IO/IOAudioMixer.swift

diff --git a/Examples/iOS/Screencast/SampleHandler.swift b/Examples/iOS/Screencast/SampleHandler.swift
index 4a9c9551b..048b7bbad 100644
--- a/Examples/iOS/Screencast/SampleHandler.swift
+++ b/Examples/iOS/Screencast/SampleHandler.swift
@@ -47,13 +47,12 @@ open class SampleHandler: RPBroadcastSampleHandler {
             }
             rtmpStream.append(sampleBuffer)
         case .audioMic:
-            isMirophoneOn = true
             if CMSampleBufferDataIsReady(sampleBuffer) {
-                rtmpStream.append(sampleBuffer)
+                rtmpStream.append(sampleBuffer, channel: 0)
             }
         case .audioApp:
-            if !isMirophoneOn && CMSampleBufferDataIsReady(sampleBuffer) {
-                rtmpStream.append(sampleBuffer)
+            if CMSampleBufferDataIsReady(sampleBuffer) {
+                rtmpStream.append(sampleBuffer, channel: 1)
             }
         @unknown default:
             break
diff --git a/HaishinKit.xcodeproj/project.pbxproj b/HaishinKit.xcodeproj/project.pbxproj
index 65102b74b..ffddadede 100644
--- a/HaishinKit.xcodeproj/project.pbxproj
+++ b/HaishinKit.xcodeproj/project.pbxproj
@@ -130,6 +130,9 @@
 		2EC97B7227880FF400D8BE32 /* OnTapGestureView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2EC97B6E27880FF400D8BE32 /* OnTapGestureView.swift */; };
 		2EC97B7327880FF400D8BE32 /* Views.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2EC97B6F27880FF400D8BE32 /* Views.swift */; };
 		2EC97B7427880FF400D8BE32 /* MTHKSwiftUiView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2EC97B7027880FF400D8BE32 /* MTHKSwiftUiView.swift */; };
+		B3016D252B98FF9A0043DB39 /* AudioNode+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3016D242B98FF9A0043DB39 /* AudioNode+Extension.swift */; };
+		B34239852B9FD3E30068C3FB /* AudioNode.swift in Sources */ = {isa = PBXBuildFile; fileRef = B34239842B9FD3E30068C3FB /* AudioNode.swift */; };
+		B3D687822B80302B00E6A28E /* IOAudioMixer.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3D687812B80302B00E6A28E /* IOAudioMixer.swift */; };
 		BC0394562AA8A384006EDE38 /* Logboard.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; };
 		BC03945F2AA8AFF5006EDE38 /* ExpressibleByIntegerLiteral+ExtensionTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC03945E2AA8AFF5006EDE38 /* ExpressibleByIntegerLiteral+ExtensionTests.swift */; };
 		BC04A2D42AD2D1D700C87A3E /* AVAudioTime+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC04A2D32AD2D1D700C87A3E /* AVAudioTime+Extension.swift */; };
@@ -577,6 +580,9 @@
 		2EC97B6E27880FF400D8BE32 /* OnTapGestureView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OnTapGestureView.swift; sourceTree = "<group>"; };
 		2EC97B6F27880FF400D8BE32 /* Views.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Views.swift; sourceTree = "<group>"; };
 		2EC97B7027880FF400D8BE32 /* MTHKSwiftUiView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MTHKSwiftUiView.swift; sourceTree = "<group>"; };
+		B3016D242B98FF9A0043DB39 /* AudioNode+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AudioNode+Extension.swift"; sourceTree = "<group>"; };
+		B34239842B9FD3E30068C3FB /* AudioNode.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioNode.swift; sourceTree = "<group>"; };
+		B3D687812B80302B00E6A28E /* IOAudioMixer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = IOAudioMixer.swift; sourceTree = "<group>"; };
 		BC03945E2AA8AFF5006EDE38 /* ExpressibleByIntegerLiteral+ExtensionTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "ExpressibleByIntegerLiteral+ExtensionTests.swift"; sourceTree = "<group>"; };
 		BC04A2D32AD2D1D700C87A3E /* AVAudioTime+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioTime+Extension.swift"; sourceTree = "<group>"; };
 		BC04A2D52AD2D95500C87A3E /* CMTime+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CMTime+Extension.swift"; sourceTree = "<group>"; };
@@ -1087,9 +1093,11 @@
 		29BDE0BD1C65BC2400D6A768 /* IO */ = {
 			isa = PBXGroup;
 			children = (
+				B34239842B9FD3E30068C3FB /* AudioNode.swift */,
 				BC9F9C7726F8C16600B01ED0 /* Choreographer.swift */,
 				BC959EEE296EE4190067BA97 /* ImageTransform.swift */,
 				BC3802132AB5E7CC001AE399 /* IOAudioCaptureUnit.swift */,
+				B3D687812B80302B00E6A28E /* IOAudioMixer.swift */,
 				BC31DBD12A653D1600C4DEA3 /* IOAudioMonitor.swift */,
 				BCFC51FD2AAB420700014428 /* IOAudioResampler.swift */,
 				BC5019C02A6D266B0046E02F /* IOAudioRingBuffer.swift */,
@@ -1160,6 +1168,7 @@
 			isa = PBXGroup;
 			children = (
 				BC4C9EAE23F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift */,
+				B3016D242B98FF9A0043DB39 /* AudioNode+Extension.swift */,
 				BC93792E2ADD76BE001097DB /* AVAudioCompressedBuffer+Extension.swift */,
 				1A2166D3A449D813866FE9D9 /* AVAudioFormat+Extension.swift */,
 				BC22EEF12AAF5D6300E3406D /* AVAudioPCMBuffer+Extension.swift */,
@@ -1776,6 +1785,7 @@
 				BCB9773F2621812800C9A649 /* AVCFormatStream.swift in Sources */,
 				BC83A4732403D83B006BDE06 /* VTCompressionSession+Extension.swift in Sources */,
 				BC4914A228DDD33D009E2DF6 /* VTSessionConvertible.swift in Sources */,
+				B3016D252B98FF9A0043DB39 /* AudioNode+Extension.swift in Sources */,
 				2915EC4D1D85BB8C00621092 /* RTMPTSocket.swift in Sources */,
 				BC11023E2917C35B00D48035 /* CVPixelBufferPool+Extension.swift in Sources */,
 				29C2631C1D0083B50098D4EF /* IOVideoUnit.swift in Sources */,
@@ -1791,6 +1801,7 @@
 				2999C3752071138F00892E55 /* MTHKView.swift in Sources */,
 				29AF3FCF1D7C744C00E41212 /* IOStream.swift in Sources */,
 				2958910E1EEB8D3C00CE51E1 /* FLVVideoCodec.swift in Sources */,
+				B3D687822B80302B00E6A28E /* IOAudioMixer.swift in Sources */,
 				BC1DC5142A05428800E928ED /* HEVCNALUnit.swift in Sources */,
 				BC6FC9222961B3D800A746EE /* vImage_CGImageFormat+Extension.swift in Sources */,
 				BC20DF38250377A3007BC608 /* IOUIScreenCaptureUnit.swift in Sources */,
@@ -1867,6 +1878,7 @@
 				29B8766D1CD70AB300FC07DA /* DataConvertible.swift in Sources */,
 				BC570B4828E9ACC10098A12C /* IOUnit.swift in Sources */,
 				2976A4861D4903C300B53EF2 /* DeviceUtil.swift in Sources */,
+				B34239852B9FD3E30068C3FB /* AudioNode.swift in Sources */,
 				BC7C56BB299E595000C41A9B /* VideoCodecSettings.swift in Sources */,
 				29B876881CD70AE800FC07DA /* TSPacket.swift in Sources */,
 				BC22EEEE2AAF50F200E3406D /* Codec.swift in Sources */,
diff --git a/Sources/Codec/AudioCodecSettings.swift b/Sources/Codec/AudioCodecSettings.swift
index e37ff2206..15ac66e41 100644
--- a/Sources/Codec/AudioCodecSettings.swift
+++ b/Sources/Codec/AudioCodecSettings.swift
@@ -138,6 +138,8 @@ public struct AudioCodecSettings: Codable {
     /// channelMap = [2, 3]
     /// ```
     public var channelMap: [Int]?
+    /// Specifies settings for alternative audio sources.
+    public var sourceSettings: [Int: AudioCodecSettings]?
     /// Specifies the output format.
     var format: AudioCodecSettings.Format = .aac
 
@@ -147,13 +149,15 @@ public struct AudioCodecSettings: Codable {
         sampleRate: Float64 = 0,
         channels: UInt32 = 0,
         downmix: Bool = false,
-        channelMap: [Int]? = nil
+        channelMap: [Int]? = nil,
+        sourceSettings: [Int: AudioCodecSettings]? = nil
     ) {
         self.bitRate = bitRate
         self.sampleRate = sampleRate
         self.channels = channels
         self.downmix = downmix
         self.channelMap = channelMap
+        self.sourceSettings = sourceSettings
     }
 
     func apply(_ converter: AVAudioConverter?, oldValue: AudioCodecSettings?) {
@@ -171,6 +175,19 @@ public struct AudioCodecSettings: Codable {
         }
     }
 
+    func makeAudioMixerSettings() -> IOAudioMixerSettings {
+        guard let sourceSettings else {
+            return IOAudioMixerSettings(defaultResamplerSettings: makeAudioResamplerSettings())
+        }
+        var resamplersSettings: [Int: IOAudioResamplerSettings] = [
+            0: makeAudioResamplerSettings()
+        ]
+        for (source, codecSettings) in sourceSettings {
+            resamplersSettings[source] = codecSettings.makeAudioResamplerSettings()
+        }
+        return IOAudioMixerSettings(resamplersSettings: resamplersSettings)
+    }
+
     func makeAudioResamplerSettings() -> IOAudioResamplerSettings {
         return .init(
             sampleRate: sampleRate,
diff --git a/Sources/Extension/AudioNode+Extension.swift b/Sources/Extension/AudioNode+Extension.swift
new file mode 100644
index 000000000..0063a06b1
--- /dev/null
+++ b/Sources/Extension/AudioNode+Extension.swift
@@ -0,0 +1,154 @@
+import AudioUnit
+
+extension AudioNode: CustomStringConvertible {
+    public var description: String {
+        var description: [String] = []
+
+        for scope in BusScope.allCases {
+            guard let busCount = try? busCount(scope: scope) else {
+                description.append("failed to get \(scope.rawValue) bus count")
+                continue
+            }
+            guard busCount > 0 else {
+                continue
+            }
+            var busDescription: [String] = []
+            for busIndex in 0..<busCount {
+                guard let asbd = try? format(bus: busIndex, scope: scope) else {
+                    busDescription.append("failed to get \(scope.rawValue) bus format for bus \(busIndex)")
+                    continue
+                }
+                if let mixerNode = self as? MixerNode, let volume = try? mixerNode.volume(bus: busIndex, of: scope) {
+                    if scope != .input || scope == .input && (try? mixerNode.isEnabled(bus: busIndex, scope: scope)) ?? false {
+                        busDescription.append("bus: \(busIndex), volume: \(volume), format: \(asbd)")
+                    }
+                } else {
+                    busDescription.append("bus: \(busIndex), format: \(asbd)")
+                }
+            }
+
+            description.append("\(scope.rawValue) \(busDescription.count)/\(busCount)")
+            description.append(busDescription.joined(separator: "; "))
+        }
+
+        let parametersList = (try? parameters) ?? []
+        if !parametersList.isEmpty {
+            description.append("parameters: ")
+            for parameter in parametersList {
+                description.append("\(parameter)")
+            }
+        }
+
+        return "AudioNode(\(description.joined(separator: "; ")))"
+    }
+
+    private var parameters: [AudioUnitParameter] {
+        get throws {
+            var result = [AudioUnitParameter]()
+            var status: OSStatus = noErr
+
+            var parameterListSize: UInt32 = 0
+            AudioUnitGetPropertyInfo(audioUnit,
+                                     kAudioUnitProperty_ParameterList,
+                                     kAudioUnitScope_Global,
+                                     0,
+                                     &parameterListSize,
+                                     nil)
+
+            let numberOfParameters = Int(parameterListSize) / MemoryLayout<AudioUnitParameterID>.size
+            let parameterIds = UnsafeMutablePointer<AudioUnitParameterID>.allocate(capacity: numberOfParameters)
+            defer { parameterIds.deallocate() }
+
+            if numberOfParameters > 0 {
+                status = AudioUnitGetProperty(audioUnit,
+                                              kAudioUnitProperty_ParameterList,
+                                              kAudioUnitScope_Global,
+                                              0,
+                                              parameterIds,
+                                              &parameterListSize)
+                guard status == noErr else {
+                    throw AudioNodeError.unableToRetrieveValue(status)
+                }
+            }
+
+            var info = AudioUnitParameterInfo()
+            var infoSize = UInt32(MemoryLayout<AudioUnitParameterInfo>.size)
+
+            for i in 0..<numberOfParameters {
+                let id = parameterIds[i]
+                status = AudioUnitGetProperty(audioUnit,
+                                              kAudioUnitProperty_ParameterInfo,
+                                              kAudioUnitScope_Global,
+                                              id,
+                                              &info,
+                                              &infoSize)
+                guard status == noErr else {
+                    throw AudioNodeError.unableToRetrieveValue(status)
+                }
+                result.append(AudioUnitParameter(info, id: id))
+            }
+
+            return result
+        }
+    }
+}
+
+private struct AudioUnitParameter: CustomStringConvertible {
+    var id: Int
+    var name: String = ""
+    var minValue: Float
+    var maxValue: Float
+    var defaultValue: Float
+    var unit: AudioUnitParameterUnit
+
+    init(_ info: AudioUnitParameterInfo, id: AudioUnitParameterID) {
+        self.id = Int(id)
+        if let cfName = info.cfNameString?.takeUnretainedValue() {
+            name = String(cfName)
+        }
+        minValue = info.minValue
+        maxValue = info.maxValue
+        defaultValue = info.defaultValue
+        unit = info.unit
+    }
+
+    var description: String {
+        return "\(name), id: \(id), min: \(minValue), max: \(maxValue), default: \(defaultValue), unit: \(unit) \(unitName)"
+    }
+
+    var unitName: String {
+        switch unit {
+        // swiftlint:disable switch_case_on_newline
+        case .generic: return "generic"
+        case .indexed: return "indexed"
+        case .boolean: return "boolean"
+        case .percent: return "percent"
+        case .seconds: return "seconds"
+        case .sampleFrames: return "sampleFrames"
+        case .phase: return "phase"
+        case .rate: return "rate"
+        case .hertz: return "hertz"
+        case .cents: return "cents"
+        case .relativeSemiTones: return "relativeSemiTones"
+        case .midiNoteNumber: return "midiNoteNumber"
+        case .midiController: return "midiController"
+        case .decibels: return "decibels"
+        case .linearGain: return "linearGain"
+        case .degrees: return "degrees"
+        case .equalPowerCrossfade: return "equalPowerCrossfade"
+        case .mixerFaderCurve1: return "mixerFaderCurve1"
+        case .pan: return "pan"
+        case .meters: return "meters"
+        case .absoluteCents: return "absoluteCents"
+        case .octaves: return "octaves"
+        case .BPM: return "BPM"
+        case .beats: return "beats"
+        case .milliseconds: return "milliseconds"
+        case .ratio: return "ratio"
+        case .customUnit: return "customUnit"
+        case .midi2Controller: return "midi2Controller"
+        default: return "unknown_\(unit)"
+        // swiftlint:enable switch_case_on_newline
+        }
+    }
+}
diff --git a/Sources/Extension/AudioStreamBasicDescription+Extension.swift b/Sources/Extension/AudioStreamBasicDescription+Extension.swift
index e426486bc..031bb6ed9 100644
--- a/Sources/Extension/AudioStreamBasicDescription+Extension.swift
+++ b/Sources/Extension/AudioStreamBasicDescription+Extension.swift
@@ -14,3 +14,155 @@ extension AudioStreamBasicDescription: Equatable {
             lhs.mSampleRate == rhs.mSampleRate
     }
 }
+
+extension AudioStreamBasicDescription: CustomStringConvertible {
+    public enum ReadableFormatFlag: String, CaseIterable, CustomStringConvertible {
+        case audioFormatFlagIsFloat = "audio_IsFloat"
+        case audioFormatFlagIsBigEndian = "audio_IsBigEndian"
+        case audioFormatFlagIsSignedInteger = "audio_IsSignedInteger"
+        case audioFormatFlagIsPacked = "audio_IsPacked"
+        case audioFormatFlagIsAlignedHigh = "audio_IsAlignedHigh"
+        case audioFormatFlagIsNonInterleaved = "audio_IsNonInterleaved"
+        case audioFormatFlagIsNonMixable = "audio_IsNonMixable"
+        case audioFormatFlagsAreAllClear = "audio_sAreAllClear"
+        case linearPCMFormatFlagIsFloat = "pcm_IsFloat"
+        case linearPCMFormatFlagIsBigEndian = "pcm_IsBigEndian"
+        case linearPCMFormatFlagIsSignedInteger = "pcm_IsSignedInteger"
+        case linearPCMFormatFlagIsPacked = "pcm_IsPacked"
+        case linearPCMFormatFlagIsAlignedHigh = "pcm_IsAlignedHigh"
+        case linearPCMFormatFlagIsNonInterleaved = "pcm_IsNonInterleaved"
+        case linearPCMFormatFlagIsNonMixable = "pcm_IsNonMixable"
+        case linearPCMFormatFlagsSampleFractionShift = "pcm_SampleFractionShift"
+        case linearPCMFormatFlagsSampleFractionMask = "pcm_SampleFractionMask"
+        case linearPCMFormatFlagsAreAllClear = "pcm_AreAllClear"
+        case appleLosslessFormatFlag_16BitSourceData = "ll_16BitSourceData"
+        case appleLosslessFormatFlag_20BitSourceData = "ll_20BitSourceData"
+        case appleLosslessFormatFlag_24BitSourceData = "ll_24BitSourceData"
+        case appleLosslessFormatFlag_32BitSourceData = "ll_32BitSourceData"
+
+        var flagValue: AudioFormatFlags {
+            switch self {
+            // swiftlint:disable switch_case_on_newline
+            case .audioFormatFlagIsFloat: return kAudioFormatFlagIsFloat
+            case .audioFormatFlagIsBigEndian: return kAudioFormatFlagIsBigEndian
+            case .audioFormatFlagIsSignedInteger: return kAudioFormatFlagIsSignedInteger
+            case .audioFormatFlagIsPacked: return kAudioFormatFlagIsPacked
+            case .audioFormatFlagIsAlignedHigh: return kAudioFormatFlagIsAlignedHigh
+            case .audioFormatFlagIsNonInterleaved: return kAudioFormatFlagIsNonInterleaved
+            case .audioFormatFlagIsNonMixable: return kAudioFormatFlagIsNonMixable
+            case .audioFormatFlagsAreAllClear: return kAudioFormatFlagsAreAllClear
+            case .linearPCMFormatFlagIsFloat: return kLinearPCMFormatFlagIsFloat
+            case .linearPCMFormatFlagIsBigEndian: return kLinearPCMFormatFlagIsBigEndian
+            case .linearPCMFormatFlagIsSignedInteger: return kLinearPCMFormatFlagIsSignedInteger
+            case .linearPCMFormatFlagIsPacked: return kLinearPCMFormatFlagIsPacked
+            case .linearPCMFormatFlagIsAlignedHigh: return kLinearPCMFormatFlagIsAlignedHigh
+            case .linearPCMFormatFlagIsNonInterleaved: return kLinearPCMFormatFlagIsNonInterleaved
+            case .linearPCMFormatFlagIsNonMixable: return kLinearPCMFormatFlagIsNonMixable
+            case .linearPCMFormatFlagsSampleFractionShift: return kLinearPCMFormatFlagsSampleFractionShift
+            case .linearPCMFormatFlagsSampleFractionMask: return kLinearPCMFormatFlagsSampleFractionMask
+            case .linearPCMFormatFlagsAreAllClear: return kLinearPCMFormatFlagsAreAllClear
+            case .appleLosslessFormatFlag_16BitSourceData: return kAppleLosslessFormatFlag_16BitSourceData
+            case .appleLosslessFormatFlag_20BitSourceData: return kAppleLosslessFormatFlag_20BitSourceData
+            case .appleLosslessFormatFlag_24BitSourceData: return kAppleLosslessFormatFlag_24BitSourceData
+            case .appleLosslessFormatFlag_32BitSourceData: return kAppleLosslessFormatFlag_32BitSourceData
+            // swiftlint:enable switch_case_on_newline
+            }
+        }
+
+        static func flags(from flagOptionSet: AudioFormatFlags) -> Set<ReadableFormatFlag> {
+            var result = Set<ReadableFormatFlag>()
+            allCases.forEach { flag in
+                if flag.flagValue & flagOptionSet == flag.flagValue {
+                    result.insert(flag)
+                }
+            }
+            return result
+        }
+
+        static func flagOptionSet(from flagSet: Set<ReadableFormatFlag>) -> AudioFormatFlags {
+            var optionSet: AudioFormatFlags = 0
+            flagSet.forEach { flag in
+                optionSet |= flag.flagValue
+            }
+            return optionSet
+        }
+
+        public var description: String {
+            rawValue
+        }
+    }
+
+    public struct ReadableFlagOptionSet: OptionSet, CustomStringConvertible {
+        public let rawValue: AudioFormatFlags
+        public let flags: Set<ReadableFormatFlag>
+
+        public init(rawValue value: AudioFormatFlags) {
+            self.rawValue = value
+            flags = ReadableFormatFlag.flags(from: rawValue)
+        }
+
+        public var description: String {
+            guard ReadableFormatFlag.flagOptionSet(from: flags) == rawValue else {
+                return "Unable to parse AudioFormatFlags"
+            }
+            let result = flags.sorted(by: { $0.rawValue < $1.rawValue }).map { $0.description }.joined(separator: " | ")
+            return "AudioFormatFlags(\(result))"
+        }
+    }
+
+    public var readableFormatID: String {
+        switch mFormatID {
+        // swiftlint:disable switch_case_on_newline
+        case kAudioFormatLinearPCM: return "LinearPCM"
+        case kAudioFormatAC3: return "AC3"
+        case kAudioFormat60958AC3: return "60958AC3"
+        case kAudioFormatAppleIMA4: return "AppleIMA4"
+        case kAudioFormatMPEG4AAC: return "MPEG4AAC"
+        case kAudioFormatMPEG4CELP: return "MPEG4CELP"
+        case kAudioFormatMPEG4HVXC: return "MPEG4HVXC"
+        case kAudioFormatMPEG4TwinVQ: return "MPEG4TwinVQ"
+        case kAudioFormatMACE3: return "MACE3"
+        case kAudioFormatMACE6: return "MACE6"
+        case kAudioFormatULaw: return "ULaw"
+        case kAudioFormatALaw: return "ALaw"
+        case kAudioFormatQDesign: return "QDesign"
+        case kAudioFormatQDesign2: return "QDesign2"
+        case kAudioFormatQUALCOMM: return "QUALCOMM"
+        case kAudioFormatMPEGLayer1: return "MPEGLayer1"
+        case kAudioFormatMPEGLayer2: return "MPEGLayer2"
+        case kAudioFormatMPEGLayer3: return "MPEGLayer3"
+        case kAudioFormatTimeCode: return "TimeCode"
+        case kAudioFormatMIDIStream: return "MIDIStream"
+        case kAudioFormatParameterValueStream: return "ParameterValueStream"
+        case kAudioFormatAppleLossless: return "AppleLossless"
+        case kAudioFormatMPEG4AAC_HE: return "MPEG4AAC_HE"
+        case kAudioFormatMPEG4AAC_LD: return "MPEG4AAC_LD"
+        case kAudioFormatMPEG4AAC_ELD: return "MPEG4AAC_ELD"
+        case kAudioFormatMPEG4AAC_ELD_SBR: return "MPEG4AAC_ELD_SBR"
+        case kAudioFormatMPEG4AAC_ELD_V2: return "MPEG4AAC_ELD_V2"
+        case kAudioFormatMPEG4AAC_HE_V2: return "MPEG4AAC_HE_V2"
+        case kAudioFormatMPEG4AAC_Spatial: return "MPEG4AAC_Spatial"
+        case kAudioFormatAMR: return "AMR"
+        case kAudioFormatAMR_WB: return "AMR_WB"
+        case kAudioFormatAudible: return "Audible"
+        case kAudioFormatiLBC: return "iLBC"
+        case kAudioFormatDVIIntelIMA: return "DVIIntelIMA"
+        case kAudioFormatMicrosoftGSM: return "MicrosoftGSM"
+        case kAudioFormatAES3: return "AES3"
+        case kAudioFormatEnhancedAC3: return "EnhancedAC3"
+        default: return "unknown_(\(Int(mFormatID)))"
+        // swiftlint:enable switch_case_on_newline
+        }
+    }
+
+    public var readableFlags: ReadableFlagOptionSet {
+        ReadableFlagOptionSet(rawValue: mFormatFlags)
+    }
+
+    public var description: String {
+        "AudioStreamBasicDescription(mSampleRate: \(mSampleRate), mFormatID: \(mFormatID) \(readableFormatID), "
+            + "mFormatFlags: \(mFormatFlags) \(readableFlags), mBytesPerPacket: \(mBytesPerPacket), "
+            + "mFramesPerPacket: \(mFramesPerPacket), mBytesPerFrame: \(mBytesPerFrame), "
+            + "mChannelsPerFrame: \(mChannelsPerFrame), mBitsPerChannel: \(mBitsPerChannel), mReserved: \(mReserved)"
+    }
+}
diff --git a/Sources/IO/AudioNode.swift b/Sources/IO/AudioNode.swift
new file mode 100644
index 000000000..507ebde1c
--- /dev/null
+++ b/Sources/IO/AudioNode.swift
@@ -0,0 +1,289 @@
+import AVFoundation
+
+enum AudioNodeError: Error {
+    case unableToFindAudioComponent
+    case unableToCreateAudioUnit(_ status: OSStatus)
+    case unableToInitializeAudioUnit(_ status: OSStatus)
+    case unableToUpdateBus(_ status: OSStatus)
+    case unableToRetrieveValue(_ status: OSStatus)
+    case unableToConnectToNode(_ status: OSStatus)
+}
+
+class AudioNode {
+    enum BusScope: String, CaseIterable {
+        case input
+        case output
+
+        var audioUnitScope: AudioUnitScope {
+            switch self {
+            case .input:
+                return kAudioUnitScope_Input
+            case .output:
+                return kAudioUnitScope_Output
+            }
+        }
+    }
+
+    let audioUnit: AudioUnit
+
+    init(description: inout AudioComponentDescription) throws {
+        guard let audioComponent = AudioComponentFindNext(nil, &description) else {
+            throw AudioNodeError.unableToFindAudioComponent
+        }
+        var audioUnit: AudioUnit?
+        let status = AudioComponentInstanceNew(audioComponent, &audioUnit)
+        guard status == noErr, let audioUnit else {
+            throw AudioNodeError.unableToCreateAudioUnit(status)
+        }
+        self.audioUnit = audioUnit
+    }
+
+    deinit {
+        AudioOutputUnitStop(audioUnit)
+        AudioUnitUninitialize(audioUnit)
+        AudioComponentInstanceDispose(audioUnit)
+    }
+
+    func initializeAudioUnit() throws {
+        let status = AudioUnitInitialize(audioUnit)
+        guard status == noErr else {
+            throw AudioNodeError.unableToInitializeAudioUnit(status)
+        }
+    }
+
+    @discardableResult
+    func connect(to node: AudioNode, sourceBus: Int = 0, destBus: Int = 0) throws -> AudioUnitConnection {
+        var connection = AudioUnitConnection(sourceAudioUnit: audioUnit,
+                                             sourceOutputNumber: 0,
+                                             destInputNumber: 0)
+        let status = AudioUnitSetProperty(node.audioUnit,
+                                          kAudioUnitProperty_MakeConnection,
+                                          kAudioUnitScope_Input,
+                                          0,
+                                          &connection,
+                                          UInt32(MemoryLayout<AudioUnitConnection>.size))
+        guard status == noErr else {
+            throw AudioNodeError.unableToConnectToNode(status)
+        }
+        return connection
+    }
+
+    func update(format: AVAudioFormat, bus: Int, scope: BusScope) throws {
+        var asbd = format.streamDescription.pointee
+        let status = AudioUnitSetProperty(audioUnit,
+                                          kAudioUnitProperty_StreamFormat,
+                                          scope.audioUnitScope,
+                                          UInt32(bus),
+                                          &asbd,
+                                          UInt32(MemoryLayout<AudioStreamBasicDescription>.size))
+        guard status == noErr else {
+            throw AudioNodeError.unableToUpdateBus(status)
+        }
+    }
+
+    func format(bus: Int, scope: BusScope) throws -> AudioStreamBasicDescription {
+        var asbd = AudioStreamBasicDescription()
+        var propertySize = UInt32(MemoryLayout<AudioStreamBasicDescription>.size)
+        let status = AudioUnitGetProperty(audioUnit,
+                                          kAudioUnitProperty_StreamFormat,
+                                          scope.audioUnitScope,
+                                          UInt32(bus),
+                                          &asbd,
+                                          &propertySize)
+        guard status == noErr else {
+            throw AudioNodeError.unableToRetrieveValue(status)
+        }
+        return asbd
+    }
+
+    /// Apple bug: Cannot set to less than 8 buses
+    func update(busCount: Int, scope: BusScope) throws {
+        var busCount = UInt32(busCount)
+        let status = AudioUnitSetProperty(audioUnit,
+                                          kAudioUnitProperty_ElementCount,
+                                          scope.audioUnitScope,
+                                          0,
+                                          &busCount,
+                                          UInt32(MemoryLayout<UInt32>.size))
+        guard status == noErr else {
+            throw AudioNodeError.unableToUpdateBus(status)
+        }
+    }
+
+    func busCount(scope: BusScope) throws -> Int {
+        var busCount: UInt32 = 0
+        var propertySize = UInt32(MemoryLayout<UInt32>.size)
+        let status = AudioUnitGetProperty(audioUnit,
+                                          kAudioUnitProperty_ElementCount,
+                                          scope.audioUnitScope,
+                                          0,
+                                          &busCount,
+                                          &propertySize)
+        guard status == noErr else {
+            throw AudioNodeError.unableToUpdateBus(status)
+        }
+        return Int(busCount)
+    }
+}
+
+class MixerNode: AudioNode {
+    private var mixerComponentDescription = AudioComponentDescription(
+        componentType: kAudioUnitType_Mixer,
+        componentSubType: kAudioUnitSubType_MultiChannelMixer,
+        componentManufacturer: kAudioUnitManufacturer_Apple,
+        componentFlags: 0,
+        componentFlagsMask: 0)
+
+    init(format: AVAudioFormat) throws {
+        try super.init(description: &mixerComponentDescription)
+    }
+
+    func update(inputCallback: inout AURenderCallbackStruct, bus: Int) throws {
+        let status = AudioUnitSetProperty(audioUnit,
+                                          kAudioUnitProperty_SetRenderCallback,
+                                          kAudioUnitScope_Input,
+                                          UInt32(bus),
+                                          &inputCallback,
+                                          UInt32(MemoryLayout<AURenderCallbackStruct>.size))
+        guard status == noErr else {
+            throw AudioNodeError.unableToUpdateBus(status)
+        }
+    }
+
+    func enable(bus: Int, scope: AudioNode.BusScope, isEnabled: Bool) throws {
+        let value: AudioUnitParameterValue = isEnabled ? 1 : 0
+        let status = AudioUnitSetParameter(audioUnit,
+                                           kMultiChannelMixerParam_Enable,
+                                           scope.audioUnitScope,
+                                           UInt32(bus),
+                                           value,
+                                           0)
+        guard status == noErr else {
+            throw AudioNodeError.unableToUpdateBus(status)
+        }
+    }
+
+    func isEnabled(bus: Int, scope: AudioNode.BusScope) throws -> Bool {
+        var value: AudioUnitParameterValue = 0
+        let status = AudioUnitGetParameter(audioUnit,
+                                           kMultiChannelMixerParam_Enable,
+                                           scope.audioUnitScope,
+                                           UInt32(bus),
+                                           &value)
+        guard status == noErr else {
+            throw AudioNodeError.unableToRetrieveValue(status)
+        }
+        return value != 0
+    }
+
+    func update(volume: Float, bus: Int, scope: AudioNode.BusScope) throws {
+        let value: AudioUnitParameterValue = max(0, min(1, volume))
+        let status = AudioUnitSetParameter(audioUnit,
+                                           kMultiChannelMixerParam_Volume,
+                                           scope.audioUnitScope,
+                                           UInt32(bus),
+                                           value,
+                                           0)
+        guard status == noErr else {
+            throw AudioNodeError.unableToUpdateBus(status)
+        }
+    }
+
+    func volume(bus: Int, of scope: AudioNode.BusScope) throws -> Float {
+        var value: AudioUnitParameterValue = 0
+        let status = AudioUnitGetParameter(audioUnit,
+                                           kMultiChannelMixerParam_Volume,
+                                           scope.audioUnitScope,
+                                           UInt32(bus),
+                                           &value)
+        guard status == noErr else {
+            throw AudioNodeError.unableToUpdateBus(status)
+        }
+        return value
+    }
+}
+
+enum OutputNodeError: Error {
+    case unableToRenderFrames
+    case unableToAllocateBuffer
+}
+
+class OutputNode: AudioNode {
+    private var outputComponentDescription = AudioComponentDescription(
+        componentType: kAudioUnitType_Output,
+        componentSubType: kAudioUnitSubType_GenericOutput,
+        componentManufacturer: kAudioUnitManufacturer_Apple,
+        componentFlags: 0,
+        componentFlagsMask: 0)
+
+    let format: AVAudioFormat
+
+    init(format: AVAudioFormat) throws {
+        self.format = format
+        try super.init(description: &outputComponentDescription)
+    }
+
+    func render(numberOfFrames: AVAudioFrameCount,
+                sampleTime: AVAudioFramePosition) throws -> AVAudioPCMBuffer {
+        var timeStamp = AudioTimeStamp()
+        timeStamp.mFlags = .sampleTimeValid
+        timeStamp.mSampleTime = Float64(sampleTime)
+
+        let channelCount = format.channelCount
+        let audioBufferList = AudioBufferList.allocate(maximumBuffers: Int(channelCount))
+        defer {
+            free(audioBufferList.unsafeMutablePointer)
+        }
+        for i in 0..<Int(channelCount) {
+            audioBufferList[i] = AudioBuffer(mNumberChannels: 1,
+                                             mDataByteSize: format.streamDescription.pointee.mBytesPerFrame,
+                                             mData: nil)
+        }
+
+        let status = AudioUnitRender(audioUnit,
+                                     nil,
+                                     &timeStamp,
+                                     0,
+                                     numberOfFrames,
+                                     audioBufferList.unsafeMutablePointer)
+
+        guard status == noErr else {
+            throw OutputNodeError.unableToRenderFrames
+        }
+
+        guard let pcmBuffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: numberOfFrames) else {
+            throw OutputNodeError.unableToAllocateBuffer
+        }
+
+        pcmBuffer.frameLength = numberOfFrames
+
+        for channel in 0..<Int(channelCount) {
+            let mDataByteSize = Int(audioBufferList[channel].mDataByteSize)
+
+            switch format.commonFormat {
+            case .pcmFormatInt16:
+                let pcmChannelData = pcmBuffer.int16ChannelData?[channel]
+                let audioBufferData = audioBufferList[channel].mData?.assumingMemoryBound(to: Int16.self)
+                if let pcmChannelData, let audioBufferData {
+                    memcpy(pcmChannelData, audioBufferData, mDataByteSize)
+                }
+            case .pcmFormatInt32:
+                let pcmChannelData = pcmBuffer.int32ChannelData?[channel]
+                let audioBufferData = audioBufferList[channel].mData?.assumingMemoryBound(to: Int32.self)
+                if let pcmChannelData, let audioBufferData {
+                    memcpy(pcmChannelData, audioBufferData, mDataByteSize)
+                }
+            case .pcmFormatFloat32:
+                let pcmChannelData = pcmBuffer.floatChannelData?[channel]
+                let audioBufferData = audioBufferList[channel].mData?.assumingMemoryBound(to: Float32.self)
+                if let pcmChannelData, let audioBufferData {
+                    memcpy(pcmChannelData, audioBufferData, mDataByteSize)
+                }
+            default:
+                break
+            }
+        }
+
+        return pcmBuffer
+    }
+}
diff --git a/Sources/IO/IOAudioMixer.swift b/Sources/IO/IOAudioMixer.swift
new file mode 100644
index 000000000..3d24c99d5
--- /dev/null
+++ b/Sources/IO/IOAudioMixer.swift
@@ -0,0 +1,298 @@
+import AVFoundation
+
+private let kIOAudioMixer_frameCapacity: AVAudioFrameCount = 1024
+private let kIOAudioMixer_sampleTime: AVAudioFramePosition = 0
+private let kIOAudioMixer_defaultResamplerTag: Int = 0
+
+/// The IOAudioMixerError  error domain codes.
+public enum IOAudioMixerError: Swift.Error {
+    /// Invalid resample settings.
+    case invalidSampleRate
+    /// Mixer is unable to provide input data.
+    case unableToProvideInputData
+    /// Mixer is unable to make sure that all resamplers output the same audio format.
+    case unableToEnforceAudioFormat
+}
+
+protocol IOAudioMixerDelegate: AnyObject {
+    func audioMixer(_ audioMixer: IOAudioMixer, didOutput audioFormat: AVAudioFormat)
+    func audioMixer(_ audioMixer: IOAudioMixer, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime)
+    func audioMixer(_ audioMixer: IOAudioMixer, errorOccurred error: IOAudioUnitError)
+}
+
+struct IOAudioMixerSettings {
+    let defaultResamplerSettings: IOAudioResamplerSettings
+    let resamplersSettings: [Int: IOAudioResamplerSettings]
+
+    init(defaultResamplerSettings: IOAudioResamplerSettings) {
+        self.defaultResamplerSettings = defaultResamplerSettings
+        self.resamplersSettings = [
+            kIOAudioMixer_defaultResamplerTag: defaultResamplerSettings
+        ]
+    }
+
+    init(resamplersSettings: [Int: IOAudioResamplerSettings] = [:]) {
+        let defaultSettings = resamplersSettings[kIOAudioMixer_defaultResamplerTag] ?? .init()
+        self.defaultResamplerSettings = defaultSettings
+        self.resamplersSettings = resamplersSettings.merging([kIOAudioMixer_defaultResamplerTag: defaultSettings]) { _, settings in
+            settings
+        }
+    }
+
+    func resamplerSettings(channel: Int, sampleRate: Float64, channels: UInt32) -> IOAudioResamplerSettings {
+        let preferredSettings = resamplersSettings[channel] ?? .init()
+        return .init(
+            sampleRate: sampleRate,
+            channels: channels,
+            downmix: preferredSettings.downmix,
+            channelMap: preferredSettings.channelMap
+        )
+    }
+}
+
+final class IOAudioMixer {
+    private class Track {
+        let resampler: IOAudioResampler<IOAudioMixer>
+        var ringBuffer: IOAudioRingBuffer?
+
+        init(resampler: IOAudioResampler<IOAudioMixer>, format: AVAudioFormat? = nil) {
+            self.resampler = resampler
+            if let format {
+                self.ringBuffer = .init(format)
+            }
+        }
+    }
+
+    var delegate: (any IOAudioMixerDelegate)?
+    var settings: IOAudioMixerSettings = .init() {
+        didSet {
+            defaultTrack.resampler.settings = settings.defaultResamplerSettings
+            if !settings.defaultResamplerSettings.invalidate(oldValue.defaultResamplerSettings) {
+                enforceResamplersSettings()
+            }
+        }
+    }
+    var inputFormat: AVAudioFormat? {
+        return defaultTrack.resampler.inputFormat
+    }
+    var outputFormat: AVAudioFormat? {
+        return defaultTrack.resampler.outputFormat
+    }
+    private(set) var numberOfTracks = 0
+    private var tracks: [Int: Track] = [:] {
+        didSet {
+            numberOfTracks += 1
+            tryToSetupAudioNodes()
+        }
+    }
+    private var shouldMix: Bool {
+        numberOfTracks > 1
+    }
+    private var anchor: AVAudioTime?
+    private var sampleTime: AVAudioFramePosition = kIOAudioMixer_sampleTime
+    private var mixerNode: MixerNode?
+    private var outputNode: OutputNode?
+    private lazy var defaultTrack: Track = {
+        createTrack(channel: kIOAudioMixer_defaultResamplerTag, settings: settings.defaultResamplerSettings)
+    }()
+
+    private let inputRenderCallback: AURenderCallback = { (inRefCon: UnsafeMutableRawPointer, _: UnsafeMutablePointer<AudioUnitRenderActionFlags>, _: UnsafePointer<AudioTimeStamp>, inBusNumber: UInt32, inNumberFrames: UInt32, ioData: UnsafeMutablePointer<AudioBufferList>?) in
+        let audioMixer = Unmanaged<IOAudioMixer>.fromOpaque(inRefCon).takeUnretainedValue()
+        let status = audioMixer.provideInput(inNumberFrames, channel: Int(inBusNumber), ioData: ioData)
+        guard status == noErr else {
+            audioMixer.delegate?.audioMixer(audioMixer, errorOccurred: .failedToMix(error: IOAudioMixerError.unableToProvideInputData))
+            return noErr
+        }
+        return status
+    }
+
+    func append(_ sampleBuffer: CMSampleBuffer, channel: UInt8 = 0) {
+        if sampleTime == kIOAudioMixer_sampleTime, channel == kIOAudioMixer_defaultResamplerTag {
+            sampleTime = sampleBuffer.presentationTimeStamp.value
+            if let outputFormat {
+                anchor = .init(hostTime: AVAudioTime.hostTime(forSeconds: sampleBuffer.presentationTimeStamp.seconds), sampleTime: sampleTime, atRate: outputFormat.sampleRate)
+            }
+        }
+        track(channel: Int(channel))?.resampler.append(sampleBuffer)
+    }
+
+    func append(_ audioBuffer: AVAudioPCMBuffer, channel: UInt8, when: AVAudioTime) {
+        if sampleTime == kIOAudioMixer_sampleTime, channel == kIOAudioMixer_defaultResamplerTag {
+            sampleTime = when.sampleTime
+            anchor = when
+        }
+        track(channel: Int(channel))?.resampler.append(audioBuffer, when: when)
+    }
+
+    private func createTrack(channel: Int, settings: IOAudioResamplerSettings) -> Track {
+        let resampler = IOAudioResampler<IOAudioMixer>()
+        resampler.tag = channel
+        resampler.settings = settings
+        resampler.delegate = self
+        let track = Track(resampler: resampler)
+        tracks[channel] = track
+        return track
+    }
+
+    private func track(channel: Int) -> Track? {
+        if channel == kIOAudioMixer_defaultResamplerTag {
+            return defaultTrack
+        } else if let track = tracks[channel] {
+            return track
+        } else if let sampleRate = outputFormat?.sampleRate, let channels = outputFormat?.channelCount {
+            if tracks[kIOAudioMixer_defaultResamplerTag] == nil {
+                _ = createTrack(channel: kIOAudioMixer_defaultResamplerTag, settings: settings.defaultResamplerSettings)
+            }
+            return createTrack(channel: channel,
+                               settings: settings.resamplerSettings(channel: channel, sampleRate: sampleRate, channels: channels))
+        }
+        return nil
+    }
+
+    private func tryToSetupAudioNodes() {
+        guard shouldMix else {
+            return
+        }
+        do {
+            try setupAudioNodes()
+        } catch {
+            delegate?.audioMixer(self, errorOccurred: .failedToMix(error: error))
+        }
+    }
+
+    private func setupAudioNodes() throws {
+        mixerNode = nil
+        outputNode = nil
+        guard let outputFormat else {
+            return
+        }
+        sampleTime = kIOAudioMixer_sampleTime
+        let mixerNode = try MixerNode(format: outputFormat)
+        try mixerNode.update(busCount: numberOfTracks, scope: .input)
+        let busCount = try mixerNode.busCount(scope: .input)
+        if busCount > numberOfTracks {
+            for index in numberOfTracks..<busCount {
+                try mixerNode.enable(bus: index, scope: .input, isEnabled: false)
+            }
+        }
+        for (bus, _) in tracks {
+            try mixerNode.update(format: outputFormat, bus: bus, scope: .input)
+
+            var callbackStruct = AURenderCallbackStruct(inputProc: inputRenderCallback,
+                                                        inputProcRefCon: Unmanaged.passUnretained(self).toOpaque())
+            try mixerNode.update(inputCallback: &callbackStruct, bus: bus)
+            try mixerNode.update(volume: 1, bus: bus, scope: .input)
+        }
+        try mixerNode.update(format: outputFormat, bus: 0, scope: .output)
+        try mixerNode.update(volume: 1, bus: 0, scope: .output)
+        let outputNode = try OutputNode(format: outputFormat)
+        try outputNode.update(format: outputFormat, bus: 0, scope: .input)
+        try outputNode.update(format: outputFormat, bus: 0, scope: .output)
+        try mixerNode.connect(to: outputNode)
+        try mixerNode.initializeAudioUnit()
+        try outputNode.initializeAudioUnit()
+        self.mixerNode = mixerNode
+        self.outputNode = outputNode
+        if logger.isEnabledFor(level: .info) {
+            logger.info("mixerAudioUnit: \(mixerNode)")
+        }
+    }
+
+    private func provideInput(_ inNumberFrames: UInt32, channel: Int, ioData: UnsafeMutablePointer<AudioBufferList>?) -> OSStatus {
+        guard let ringBuffer = track(channel: channel)?.ringBuffer else {
+            return noErr
+        }
+        if ringBuffer.counts == 0 {
+            guard let bufferList = UnsafeMutableAudioBufferListPointer(ioData) else {
+                return noErr
+            }
+            for i in 0..<bufferList.count {
+                memset(bufferList[i].mData, 0, Int(bufferList[i].mDataByteSize))
+            }
+            return noErr
+        }
+        let status = ringBuffer.render(inNumberFrames, ioData: ioData)
+        return status
+    }
+
+    private func mix(numberOfFrames: AVAudioFrameCount) {
+        guard let outputNode else {
+            return
+        }
+        do {
+            let buffer = try outputNode.render(numberOfFrames: numberOfFrames, sampleTime: sampleTime)
+            let time = AVAudioTime(sampleTime: sampleTime, atRate: outputNode.format.sampleRate)
+            if let anchor, let when = time.extrapolateTime(fromAnchor: anchor) {
+                delegate?.audioMixer(self, didOutput: buffer, when: when)
+                sampleTime += Int64(numberOfFrames)
+            }
+        } catch {
+            delegate?.audioMixer(self, errorOccurred: .failedToMix(error: error))
+        }
+    }
+
+    private func enforceResamplersSettings() {
+        guard shouldMix else {
+            return
+        }
+        guard let outputFormat else {
+            delegate?.audioMixer(self, errorOccurred: .failedToMix(error: IOAudioMixerError.unableToEnforceAudioFormat))
+            return
+        }
+        for (channel, track) in tracks {
+            if channel == kIOAudioMixer_defaultResamplerTag {
+                continue
+            }
+            let preferredSettings = settings.resamplersSettings[channel] ?? .init()
+            track.resampler.settings = IOAudioResamplerSettings(
+                sampleRate: outputFormat.sampleRate,
+                channels: outputFormat.channelCount,
+                downmix: preferredSettings.downmix,
+                channelMap: preferredSettings.channelMap
+            )
+        }
+    }
+}
+
+extension IOAudioMixer: IOAudioResamplerDelegate {
+    // MARK: IOAudioResamplerDelegate
+    func resampler(_ resampler: IOAudioResampler<IOAudioMixer>, didOutput audioFormat: AVAudioFormat) {
+        guard shouldMix else {
+            if resampler.tag == kIOAudioMixer_defaultResamplerTag {
+                delegate?.audioMixer(self, didOutput: audioFormat)
+            }
+            return
+        }
+        if resampler.tag == kIOAudioMixer_defaultResamplerTag {
+            enforceResamplersSettings()
+            tryToSetupAudioNodes()
+            delegate?.audioMixer(self, didOutput: audioFormat)
+        }
+        track(channel: resampler.tag)?.ringBuffer = .init(audioFormat)
+    }
+
+    func resampler(_ resampler: IOAudioResampler<IOAudioMixer>, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) {
+        guard shouldMix else {
+            if resampler.tag == kIOAudioMixer_defaultResamplerTag {
+                delegate?.audioMixer(self, didOutput: audioBuffer, when: when)
+            }
+            return
+        }
+        guard audioBuffer.format.sampleRate == outputFormat?.sampleRate else {
+            delegate?.audioMixer(self, errorOccurred: .failedToMix(error: IOAudioMixerError.invalidSampleRate))
+            return
+        }
+        guard let track = track(channel: resampler.tag) else {
+            return
+        }
+        track.ringBuffer?.append(audioBuffer, when: when)
+
+        if resampler.tag == kIOAudioMixer_defaultResamplerTag {
+            mix(numberOfFrames: audioBuffer.frameLength)
+        }
+    }
+
+    func resampler(_ resampler: IOAudioResampler<IOAudioMixer>, errorOccurred error: IOAudioUnitError) {
+        delegate?.audioMixer(self, errorOccurred: error)
+    }
+}
diff --git a/Sources/IO/IOAudioResampler.swift b/Sources/IO/IOAudioResampler.swift
index fb230d883..e47642fa9 100644
--- a/Sources/IO/IOAudioResampler.swift
+++ b/Sources/IO/IOAudioResampler.swift
@@ -138,6 +138,8 @@ final class IOAudioResampler<T: IOAudioResamplerDelegate> {
         return audioConverter?.outputFormat
     }
 
+    var tag: Int = 0
+
     private var inSourceFormat: AudioStreamBasicDescription? {
         didSet {
             guard var inSourceFormat, inSourceFormat != oldValue else {
@@ -241,7 +243,7 @@ final class IOAudioResampler<T: IOAudioResamplerDelegate> {
         }
         if let inputFormat, let outputFormat {
             if logger.isEnabledFor(level: .info) {
-                logger.info("inputFormat:", inputFormat, ",outputFormat:", outputFormat)
+                logger.info("tag:", tag, ", inputFormat:", inputFormat, ", outputFormat:", outputFormat)
             }
             sampleTime = kIOAudioResampler_sampleTime
             audioConverter = .init(from: inputFormat, to: outputFormat)
diff --git a/Sources/IO/IOAudioUnit.swift b/Sources/IO/IOAudioUnit.swift
index 2b22ea5f7..1ebf2c835 100644
--- a/Sources/IO/IOAudioUnit.swift
+++ b/Sources/IO/IOAudioUnit.swift
@@ -10,6 +10,8 @@ public enum IOAudioUnitError: Swift.Error {
     case failedToCreate(from: AVAudioFormat?, to: AVAudioFormat?)
     /// The IOAudioUnit  faild to convert the an audio buffer.
     case failedToConvert(error: NSError)
+    /// The IOAudioUnit  failed to mix the audio buffers.
+    case failedToMix(error: any Error)
 }
 
 protocol IOAudioUnitDelegate: AnyObject {
@@ -35,7 +37,7 @@ final class IOAudioUnit: NSObject, IOUnit {
     var settings: AudioCodecSettings = .default {
         didSet {
             codec.settings = settings
-            resampler.settings = settings.makeAudioResamplerSettings()
+            audioMixer.settings = settings.makeAudioMixerSettings()
         }
     }
     var isRunning: Atomic<Bool> {
@@ -50,10 +52,10 @@ final class IOAudioUnit: NSObject, IOUnit {
         codec.delegate = mixer
         return codec
     }()
-    private lazy var resampler: IOAudioResampler<IOAudioUnit> = {
-        var resampler = IOAudioResampler<IOAudioUnit>()
-        resampler.delegate = self
-        return resampler
+    private lazy var audioMixer: IOAudioMixer = {
+        var audioMixer = IOAudioMixer()
+        audioMixer.delegate = self
+        return audioMixer
     }()
     private var monitor: IOAudioMonitor = .init()
     #if os(tvOS)
@@ -89,16 +91,16 @@ final class IOAudioUnit: NSObject, IOUnit {
     func append(_ sampleBuffer: CMSampleBuffer, channel: UInt8 = 0) {
         switch sampleBuffer.formatDescription?.audioStreamBasicDescription?.mFormatID {
         case kAudioFormatLinearPCM:
-            resampler.append(sampleBuffer.muted(muted))
+            audioMixer.append(sampleBuffer, channel: channel)
         default:
             codec.append(sampleBuffer)
         }
     }
 
-    func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) {
+    func append(_ audioBuffer: AVAudioBuffer, channel: UInt8 = 0, when: AVAudioTime) {
         switch audioBuffer {
         case let audioBuffer as AVAudioPCMBuffer:
-            resampler.append(audioBuffer, when: when)
+            audioMixer.append(audioBuffer, channel: channel, when: when)
         case let audioBuffer as AVAudioCompressedBuffer:
             codec.append(audioBuffer, when: when)
         default:
@@ -112,7 +114,7 @@ final class IOAudioUnit: NSObject, IOUnit {
 extension IOAudioUnit: AVCaptureAudioDataOutputSampleBufferDelegate {
     // MARK: AVCaptureAudioDataOutputSampleBufferDelegate
     func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
-        resampler.append(sampleBuffer.muted(muted))
+        audioMixer.append(sampleBuffer.muted(muted))
     }
 }
 #endif
@@ -128,19 +130,19 @@ extension IOAudioUnit: Running {
     }
 }
 
-extension IOAudioUnit: IOAudioResamplerDelegate {
-    // MARK: IOAudioResamplerDelegate
-    func resampler(_ resampler: IOAudioResampler<IOAudioUnit>, errorOccurred error: IOAudioUnitError) {
+extension IOAudioUnit: IOAudioMixerDelegate {
+    // MARK: IOAudioMixerDelegate
+    func audioMixer(_ audioMixer: IOAudioMixer, errorOccurred error: IOAudioUnitError) {
         mixer?.audioUnit(self, errorOccurred: error)
     }
 
-    func resampler(_ resampler: IOAudioResampler<IOAudioUnit>, didOutput audioFormat: AVAudioFormat) {
-        inputFormat = resampler.inputFormat
+    func audioMixer(_ audioMixer: IOAudioMixer, didOutput audioFormat: AVAudioFormat) {
+        inputFormat = audioMixer.inputFormat
         codec.inputFormat = audioFormat
         monitor.inputFormat = audioFormat
     }
 
-    func resampler(_ resampler: IOAudioResampler<IOAudioUnit>, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) {
+    func audioMixer(_ audioMixer: IOAudioMixer, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) {
         mixer?.audioUnit(self, didOutput: audioBuffer, when: when)
         monitor.append(audioBuffer, when: when)
         codec.append(audioBuffer, when: when)
diff --git a/Sources/IO/IOStream.swift b/Sources/IO/IOStream.swift
index 2cdbd3571..6ffaf4b40 100644
--- a/Sources/IO/IOStream.swift
+++ b/Sources/IO/IOStream.swift
@@ -412,15 +412,15 @@ open class IOStream: NSObject {
 
     /// Append a CMSampleBuffer.
     /// - Warning: This method can't use attachCamera or attachAudio method at the same time.
-    public func append(_ sampleBuffer: CMSampleBuffer) {
+    public func append(_ sampleBuffer: CMSampleBuffer, channel: UInt8 = 0) {
         switch sampleBuffer.formatDescription?._mediaType {
         case kCMMediaType_Audio:
             mixer.audioIO.lockQueue.async {
-                self.mixer.audioIO.append(sampleBuffer)
+                self.mixer.audioIO.append(sampleBuffer, channel: channel)
             }
         case kCMMediaType_Video:
             mixer.videoIO.lockQueue.async {
-                self.mixer.videoIO.append(sampleBuffer)
+                self.mixer.videoIO.append(sampleBuffer, channel: channel)
             }
         default:
             break
@@ -429,9 +429,9 @@ open class IOStream: NSObject {
 
     /// Append an AVAudioBuffer.
     /// - Warning: This method can't use attachAudio method at the same time.
-    public func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) {
+    public func append(_ audioBuffer: AVAudioBuffer, channel: UInt8 = 0, when: AVAudioTime) {
         mixer.audioIO.lockQueue.async {
-            self.mixer.audioIO.append(audioBuffer, when: when)
+            self.mixer.audioIO.append(audioBuffer, channel: channel, when: when)
         }
     }
 
diff --git a/Sources/RTMP/RTMPMuxer.swift b/Sources/RTMP/RTMPMuxer.swift
index b9de8cbdd..b4d8f1835 100644
--- a/Sources/RTMP/RTMPMuxer.swift
+++ b/Sources/RTMP/RTMPMuxer.swift
@@ -96,7 +96,7 @@ final class RTMPMuxer {
                 audioBuffer.packetCount = 1
                 audioBuffer.byteLength = UInt32(byteCount)
                 audioBuffer.data.copyMemory(from: baseAddress.advanced(by: codec.headerSize), byteCount: byteCount)
-                stream.mixer.audioIO.append(audioBuffer, when: audioTimeStamp)
+                stream.mixer.audioIO.append(audioBuffer, channel: 0, when: audioTimeStamp)
             }
         default:
             break