Skip to content

Commit

Permalink
Audio resampling.
Browse files Browse the repository at this point in the history
  • Loading branch information
shogo4405 committed Sep 15, 2023
1 parent ba5af2f commit f200f93
Show file tree
Hide file tree
Showing 18 changed files with 651 additions and 460 deletions.
32 changes: 20 additions & 12 deletions HaishinKit.xcodeproj/project.pbxproj

Large diffs are not rendered by default.

171 changes: 71 additions & 100 deletions Sources/Codec/AudioCodec.swift
Original file line number Diff line number Diff line change
Expand Up @@ -18,50 +18,14 @@ public protocol AudioCodecDelegate: AnyObject {
* - seealso: https://developer.apple.com/library/ios/technotes/tn2236/_index.html
*/
public class AudioCodec {
private static let frameCapacity: UInt32 = 1024

/// The AudioCodec error domain codes.
public enum Error: Swift.Error {
case failedToCreate(from: AVAudioFormat, to: AVAudioFormat)
case failedToConvert(error: NSError)
}

static func makeAudioFormat(_ inSourceFormat: inout AudioStreamBasicDescription) -> AVAudioFormat? {
if inSourceFormat.mFormatID == kAudioFormatLinearPCM && kLinearPCMFormatFlagIsBigEndian == (inSourceFormat.mFormatFlags & kLinearPCMFormatFlagIsBigEndian) {
// ReplayKit audioApp.
guard inSourceFormat.mBitsPerChannel == 16 else {
return nil
}
if let layout = Self.makeChannelLayout(inSourceFormat.mChannelsPerFrame) {
return .init(commonFormat: .pcmFormatInt16, sampleRate: inSourceFormat.mSampleRate, interleaved: true, channelLayout: layout)
}
return AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: inSourceFormat.mSampleRate, channels: inSourceFormat.mChannelsPerFrame, interleaved: true)
}
if let layout = Self.makeChannelLayout(inSourceFormat.mChannelsPerFrame) {
return .init(streamDescription: &inSourceFormat, channelLayout: layout)
}
return .init(streamDescription: &inSourceFormat)
}

static func makeChannelLayout(_ numberOfChannels: UInt32) -> AVAudioChannelLayout? {
guard numberOfChannels > 2 else {
return nil
}
return AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_DiscreteInOrder | numberOfChannels)
}

/// Creates a channel map for specific input and output format
static func makeChannelMap(inChannels: Int, outChannels: Int, outputChannelsMap: [Int: Int]) -> [NSNumber] {
var result = Array(repeating: -1, count: outChannels)
for inputIndex in 0..<min(inChannels, outChannels) {
result[inputIndex] = inputIndex
}
for currentIndex in 0..<outChannels {
if let inputIndex = outputChannelsMap[currentIndex], inputIndex < inChannels {
result[currentIndex] = inputIndex
}
}
return result.map { NSNumber(value: $0) }
}

/// Specifies the delegate.
public weak var delegate: (any AudioCodecDelegate)?
/// This instance is running to process(true) or not(false).
Expand All @@ -72,55 +36,33 @@ public class AudioCodec {
settings.apply(audioConverter, oldValue: oldValue)
}
}
var effects: Set<AudioEffect> = []
var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioCodec.lock")
var inSourceFormat: AudioStreamBasicDescription? {
didSet {
guard var inSourceFormat, inSourceFormat != oldValue else {
return
}
inputBuffers.removeAll()
outputBuffers.removeAll()
ringBuffer = .init(&inSourceFormat)
audioConverter = makeAudioConverter(&inSourceFormat)
for _ in 0..<settings.format.bufferCounts {
if let inputBuffer = makeInputBuffer() {
inputBuffers.append(inputBuffer)
}
}
}
}
private var ringBuffer: AudioCodecRingBuffer?
private var cursor: Int = 0
private var inputBuffers: [AVAudioBuffer] = []
private var outputBuffers: [AVAudioBuffer] = []
private var audioConverter: AVAudioConverter?

/// Append a CMSampleBuffer.
public func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer, offset: Int = 0) {
public func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
guard isRunning.value else {
return
}
switch settings.format {
case .aac:
guard let audioConverter, let ringBuffer else {
return
}
let numSamples = ringBuffer.appendSampleBuffer(sampleBuffer, offset: offset)
if ringBuffer.isReady {
guard let buffer = getOutputBuffer() else {
return
}
for effect in effects {
effect.execute(ringBuffer.current, presentationTimeStamp: ringBuffer.presentationTimeStamp)
}
var error: NSError?
audioConverter.convert(to: buffer, error: &error) { _, status in
status.pointee = .haveData
return ringBuffer.current
}
if let error {
delegate?.audioCodec(self, errorOccurred: .failedToConvert(error: error))
} else {
delegate?.audioCodec(self, didOutput: buffer, presentationTimeStamp: ringBuffer.presentationTimeStamp)
}
ringBuffer.next()
}
if offset + numSamples < sampleBuffer.numSamples {
appendSampleBuffer(sampleBuffer, offset: offset + numSamples)
}
case .pcm:
var offset = 0
var presentationTimeStamp = sampleBuffer.presentationTimeStamp
Expand All @@ -140,63 +82,70 @@ public class AudioCodec {
offset += sampleSize
}
}
default:
break
}
}

func appendAudioBuffer(_ audioBuffer: AVAudioBuffer, presentationTimeStamp: CMTime) {
guard isRunning.value, let audioConverter, let buffer = getOutputBuffer() else {
guard let audioConverter, isRunning.value else {
return
}
var error: NSError?
audioConverter.convert(to: buffer, error: &error) { _, status in
status.pointee = .haveData
return audioBuffer
let outputBuffer = self.outputBuffer
let outputStatus = audioConverter.convert(to: outputBuffer, error: &error) { _, inputStatus in
switch self.inputBuffer {
case let inputBuffer as AVAudioPCMBuffer:
if !inputBuffer.copy(audioBuffer) {
inputBuffer.muted()
}
default:
break
}
inputStatus.pointee = .haveData
return self.inputBuffer
}
switch outputStatus {
case .haveData:
delegate?.audioCodec(self, didOutput: outputBuffer, presentationTimeStamp: presentationTimeStamp)
case .error:
if let error {
delegate?.audioCodec(self, errorOccurred: .failedToConvert(error: error))
}
default:
break
}
if let error {
delegate?.audioCodec(self, errorOccurred: .failedToConvert(error: error))
} else {
delegate?.audioCodec(self, didOutput: buffer, presentationTimeStamp: presentationTimeStamp)
cursor += 1
if cursor == inputBuffers.count {
cursor = 0
}
}

func makeInputBuffer() -> AVAudioBuffer? {
private func makeInputBuffer() -> AVAudioBuffer? {
guard let inputFormat = audioConverter?.inputFormat else {
return nil
}
switch inSourceFormat?.mFormatID {
case kAudioFormatLinearPCM:
return AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: 1024)
let buffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: Self.frameCapacity)
buffer?.frameLength = Self.frameCapacity
return buffer
default:
return AVAudioCompressedBuffer(format: inputFormat, packetCapacity: 1, maximumPacketSize: 1024)
}
}

func releaseOutputBuffer(_ buffer: AVAudioBuffer) {
outputBuffers.append(buffer)
}

private func getOutputBuffer() -> AVAudioBuffer? {
guard let outputFormat = audioConverter?.outputFormat else {
return nil
}
if outputBuffers.isEmpty {
return settings.format.makeAudioBuffer(outputFormat)
}
return outputBuffers.removeFirst()
}

private func makeAudioConverter(_ inSourceFormat: inout AudioStreamBasicDescription) -> AVAudioConverter? {
guard
let inputFormat = Self.makeAudioFormat(&inSourceFormat),
let inputFormat = AVAudioFormatFactory.makeAudioFormat(&inSourceFormat),
let outputFormat = settings.format.makeAudioFormat(inSourceFormat) else {
return nil
}
logger.debug("inputFormat: \(inputFormat)")
logger.debug("outputFormat: \(outputFormat)")
if logger.isEnabledFor(level: .info) {
logger.info("inputFormat", "=>", inputFormat)
logger.info("outputFormat", "=>", outputFormat)
}
let converter = AVAudioConverter(from: inputFormat, to: outputFormat)
let channelMap = Self.makeChannelMap(inChannels: Int(inputFormat.channelCount), outChannels: Int(outputFormat.channelCount), outputChannelsMap: settings.outputChannelsMap)
logger.debug("channelMap: \(channelMap)")
converter?.channelMap = channelMap
settings.apply(converter, oldValue: nil)
if converter == nil {
delegate?.audioCodec(self, errorOccurred: .failedToCreate(from: inputFormat, to: outputFormat))
Expand All @@ -207,6 +156,29 @@ public class AudioCodec {
}
}

extension AudioCodec: Codec {
// MARK: Codec
typealias Buffer = AVAudioBuffer

var inputBuffer: AVAudioBuffer {
return inputBuffers[cursor]
}

var outputBuffer: AVAudioBuffer {
guard let outputFormat = audioConverter?.outputFormat else {
return .init()
}
if outputBuffers.isEmpty {
return settings.format.makeAudioBuffer(outputFormat) ?? .init()
}
return outputBuffers.removeFirst()
}

func releaseOutputBuffer(_ buffer: AVAudioBuffer) {
outputBuffers.append(buffer)
}
}

extension AudioCodec: Running {
// MARK: Running
public func startRunning() {
Expand All @@ -228,7 +200,6 @@ extension AudioCodec: Running {
}
self.inSourceFormat = nil
self.audioConverter = nil
self.ringBuffer = nil
self.isRunning.mutate { $0 = false }
}
}
Expand Down
131 changes: 0 additions & 131 deletions Sources/Codec/AudioCodecRingBuffer.swift

This file was deleted.

Loading

0 comments on commit f200f93

Please sign in to comment.