Skip to content

Commit

Permalink
Refactor IOAudioUnit.
Browse files Browse the repository at this point in the history
  • Loading branch information
shogo4405 committed Oct 16, 2023
1 parent 89e65c0 commit aaad70f
Show file tree
Hide file tree
Showing 11 changed files with 124 additions and 92 deletions.
14 changes: 9 additions & 5 deletions HaishinKit.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,7 @@
BC8A29912AA63D9E00F6D27F /* HaishinKit.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 2945CBBD1B4BE66000104112 /* HaishinKit.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
BC8A29942AA63DA300F6D27F /* HaishinKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 2945CBBD1B4BE66000104112 /* HaishinKit.framework */; };
BC8A29952AA63DA300F6D27F /* HaishinKit.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 2945CBBD1B4BE66000104112 /* HaishinKit.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
BC93792F2ADD76BE001097DB /* AVAudioCompressedBuffer+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC93792E2ADD76BE001097DB /* AVAudioCompressedBuffer+Extension.swift */; };
BC959EEF296EE4190067BA97 /* ImageTransform.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC959EEE296EE4190067BA97 /* ImageTransform.swift */; };
BC959F0E29705B1B0067BA97 /* SCStreamPublishViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC959F0D29705B1B0067BA97 /* SCStreamPublishViewController.swift */; };
BC959F1229717EDB0067BA97 /* PreferenceViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC959F1129717EDB0067BA97 /* PreferenceViewController.swift */; };
Expand Down Expand Up @@ -611,6 +612,7 @@
BC7C56CC29A786AE00C41A9B /* ADTS.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ADTS.swift; sourceTree = "<group>"; };
BC7C56D029A78D4F00C41A9B /* ADTSHeaderTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ADTSHeaderTests.swift; sourceTree = "<group>"; };
BC83A4722403D83B006BDE06 /* VTCompressionSession+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "VTCompressionSession+Extension.swift"; sourceTree = "<group>"; };
BC93792E2ADD76BE001097DB /* AVAudioCompressedBuffer+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioCompressedBuffer+Extension.swift"; sourceTree = "<group>"; };
BC959EEE296EE4190067BA97 /* ImageTransform.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageTransform.swift; sourceTree = "<group>"; };
BC959F0D29705B1B0067BA97 /* SCStreamPublishViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SCStreamPublishViewController.swift; sourceTree = "<group>"; };
BC959F1129717EDB0067BA97 /* PreferenceViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreferenceViewController.swift; sourceTree = "<group>"; };
Expand Down Expand Up @@ -1110,15 +1112,16 @@
29EA87D11E799EFF0043A5F8 /* Extension */ = {
isa = PBXGroup;
children = (
BC4C9EAE23F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift */,
BC93792E2ADD76BE001097DB /* AVAudioCompressedBuffer+Extension.swift */,
1A2166D3A449D813866FE9D9 /* AVAudioFormat+Extension.swift */,
BC22EEF12AAF5D6300E3406D /* AVAudioPCMBuffer+Extension.swift */,
BC04A2D32AD2D1D700C87A3E /* AVAudioTime+Extension.swift */,
BC562DC6295767860048D89A /* AVCaptureDevice+Extension.swift */,
BC2828AC2AA3225100741013 /* AVCaptureDevice.Format+Extension.swift */,
BC1DC4A329F4F74F00E928ED /* AVCaptureSession+Extension.swift */,
BC562DC6295767860048D89A /* AVCaptureDevice+Extension.swift */,
BC562DCA29576D220048D89A /* AVCaptureSession.Preset+Extension.swift */,
BC1DC4A329F4F74F00E928ED /* AVCaptureSession+Extension.swift */,
BC2828AE2AA322E400741013 /* AVFrameRateRange+Extension.swift */,
BC4C9EAE23F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift */,
29EA87E51E79A2780043A5F8 /* CMAudioFormatDescription+Extension.swift */,
29EA87E91E79A3B70043A5F8 /* CMBlockBuffer+Extension.swift */,
2916196B1E7F0768009FB344 /* CMFormatDescription+Extension.swift */,
Expand All @@ -1132,10 +1135,10 @@
29EA87D41E799F670043A5F8 /* Mirror+Extension.swift */,
BC0F1FDB2ACC630400C326FF /* NSView+Extension.swift */,
29EA87D71E79A0090043A5F8 /* URL+Extension.swift */,
BC83A4722403D83B006BDE06 /* VTCompressionSession+Extension.swift */,
BC4914AD28DDF445009E2DF6 /* VTDecompressionSession+Extension.swift */,
BC110252292DD6E900D48035 /* vImage_Buffer+Extension.swift */,
BC6FC9212961B3D800A746EE /* vImage_CGImageFormat+Extension.swift */,
BC83A4722403D83B006BDE06 /* VTCompressionSession+Extension.swift */,
BC4914AD28DDF445009E2DF6 /* VTDecompressionSession+Extension.swift */,
);
path = Extension;
sourceTree = "<group>";
Expand Down Expand Up @@ -1758,6 +1761,7 @@
BC1DC4A429F4F74F00E928ED /* AVCaptureSession+Extension.swift in Sources */,
29EA87D81E79A0090043A5F8 /* URL+Extension.swift in Sources */,
BC9F9C7826F8C16600B01ED0 /* Choreographer.swift in Sources */,
BC93792F2ADD76BE001097DB /* AVAudioCompressedBuffer+Extension.swift in Sources */,
29B876BC1CD70B3900FC07DA /* ByteArray.swift in Sources */,
29B876831CD70AE800FC07DA /* AudioSpecificConfig.swift in Sources */,
295891121EEB8D7200CE51E1 /* FLVFrameType.swift in Sources */,
Expand Down
11 changes: 7 additions & 4 deletions Sources/Codec/AudioCodec.swift
Original file line number Diff line number Diff line change
Expand Up @@ -89,13 +89,16 @@ final class AudioCodec<T: AudioCodecDelegate> {
}

func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) {
inputFormat = audioBuffer.format
guard let audioConverter, isRunning.value else {
return
}
var error: NSError?
let outputBuffer = self.outputBuffer
let outputStatus = audioConverter.convert(to: outputBuffer, error: &error) { _, inputStatus in
switch self.inputBuffer {
case let inputBuffer as AVAudioCompressedBuffer:
inputBuffer.copy(audioBuffer)
case let inputBuffer as AVAudioPCMBuffer:
if !inputBuffer.copy(audioBuffer) {
inputBuffer.muted()
Expand Down Expand Up @@ -160,10 +163,6 @@ extension AudioCodec: Codec {
// MARK: Codec
typealias Buffer = AVAudioBuffer

var inputBuffer: AVAudioBuffer {
return inputBuffers[cursor]
}

var outputBuffer: AVAudioBuffer {
guard let outputFormat = audioConverter?.outputFormat else {
return .init()
Expand All @@ -179,6 +178,10 @@ extension AudioCodec: Codec {
func releaseOutputBuffer(_ buffer: AVAudioBuffer) {
outputBuffers.append(buffer)
}

private var inputBuffer: AVAudioBuffer {
return inputBuffers[cursor]
}
}

extension AudioCodec: Running {
Expand Down
2 changes: 1 addition & 1 deletion Sources/Codec/AudioCodecSettings.swift
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ public struct AudioCodecSettings: Codable {
case .aac:
return 1
case .pcm:
return 6
return 10
}
}

Expand Down
1 change: 0 additions & 1 deletion Sources/Codec/Codec.swift
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ import Foundation
protocol Codec {
associatedtype Buffer

var inputBuffer: Buffer { get }
var outputBuffer: Buffer { get }

func releaseOutputBuffer(_ buffer: Buffer)
Expand Down
19 changes: 19 additions & 0 deletions Sources/Extension/AVAudioCompressedBuffer+Extension.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import AVFoundation
import Foundation

extension AVAudioCompressedBuffer {
@discardableResult
@inline(__always)
final func copy(_ buffer: AVAudioBuffer) -> Bool {
guard let buffer = buffer as? AVAudioCompressedBuffer else {
return false
}
if let packetDescriptions = buffer.packetDescriptions {
self.packetDescriptions?.pointee = packetDescriptions.pointee
}
packetCount = buffer.packetCount
byteLength = buffer.byteLength
data.copyMemory(from: buffer.data, byteCount: Int(buffer.byteLength))
return true
}
}
2 changes: 2 additions & 0 deletions Sources/Extension/AVAudioFormat+Extension.swift
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import AVFoundation

#if DEBUG
extension AVAudioCommonFormat: CustomDebugStringConvertible {
public var debugDescription: String {
switch self {
Expand Down Expand Up @@ -105,3 +106,4 @@ extension AudioFormatID: CustomDebugStringConvertible {
}
}
}
#endif
18 changes: 10 additions & 8 deletions Sources/Extension/AVAudioPCMBuffer+Extension.swift
Original file line number Diff line number Diff line change
Expand Up @@ -33,32 +33,34 @@ extension AVAudioPCMBuffer {
return sampleBuffer
}

final func copy(_ audioPCMBuffer: AVAudioBuffer) -> Bool {
guard let audioPCMBuffer = audioPCMBuffer as? AVAudioPCMBuffer, frameLength == audioPCMBuffer.frameLength else {
@discardableResult
@inline(__always)
final func copy(_ audioBuffer: AVAudioBuffer) -> Bool {
guard let audioBuffer = audioBuffer as? AVAudioPCMBuffer, frameLength == audioBuffer.frameLength else {
return false
}
let numSamples = Int(frameLength)
if format.isInterleaved {
let channelCount = Int(format.channelCount)
switch format.commonFormat {
case .pcmFormatInt16:
memcpy(int16ChannelData?[0], audioPCMBuffer.int16ChannelData?[0], numSamples * channelCount * 2)
memcpy(int16ChannelData?[0], audioBuffer.int16ChannelData?[0], numSamples * channelCount * 2)
case .pcmFormatInt32:
memcpy(int32ChannelData?[0], audioPCMBuffer.int32ChannelData?[0], numSamples * channelCount * 4)
memcpy(int32ChannelData?[0], audioBuffer.int32ChannelData?[0], numSamples * channelCount * 4)
case .pcmFormatFloat32:
memcpy(floatChannelData?[0], audioPCMBuffer.floatChannelData?[0], numSamples * channelCount * 4)
memcpy(floatChannelData?[0], audioBuffer.floatChannelData?[0], numSamples * channelCount * 4)
default:
break
}
} else {
for i in 0..<Int(format.channelCount) {
switch format.commonFormat {
case .pcmFormatInt16:
memcpy(int16ChannelData?[i], audioPCMBuffer.int16ChannelData?[i], numSamples * 2)
memcpy(int16ChannelData?[i], audioBuffer.int16ChannelData?[i], numSamples * 2)
case .pcmFormatInt32:
memcpy(int32ChannelData?[i], audioPCMBuffer.int32ChannelData?[i], numSamples * 4)
memcpy(int32ChannelData?[i], audioBuffer.int32ChannelData?[i], numSamples * 4)
case .pcmFormatFloat32:
memcpy(floatChannelData?[i], audioPCMBuffer.floatChannelData?[i], numSamples * 4)
memcpy(floatChannelData?[i], audioBuffer.floatChannelData?[i], numSamples * 4)
default:
break
}
Expand Down
7 changes: 6 additions & 1 deletion Sources/MPEG/AudioSpecificConfig.swift
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,12 @@ struct AudioSpecificConfig: Equatable {
return adts
}

func audioStreamBasicDescription() -> AudioStreamBasicDescription {
func makeAudioFormat() -> AVAudioFormat? {
var audioStreamBasicDescription = makeAudioStreamBasicDescription()
return AVAudioFormat(streamDescription: &audioStreamBasicDescription)
}

private func makeAudioStreamBasicDescription() -> AudioStreamBasicDescription {
AudioStreamBasicDescription(
mSampleRate: frequency.sampleRate,
mFormatID: kAudioFormatMPEG4AAC,
Expand Down
12 changes: 0 additions & 12 deletions Sources/Media/IOAudioUnit.swift
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,6 @@ final class IOAudioUnit: NSObject, IOUnit {
var outputFormat: FormatDescription? {
return codec.outputFormat
}
var inputBuffer: AVAudioBuffer? {
return codec.inputBuffer
}
private lazy var codec: AudioCodec<IOMixer> = {
var codec = AudioCodec<IOMixer>(lockQueue: lockQueue)
codec.delegate = mixer
Expand Down Expand Up @@ -113,15 +110,6 @@ final class IOAudioUnit: NSObject, IOUnit {
break
}
}

func setAudioStreamBasicDescription(_ audioStreamBasicDescription: AudioStreamBasicDescription?) {
guard var audioStreamBasicDescription else {
return
}
let audioFormat = AVAudioFormat(streamDescription: &audioStreamBasicDescription)
inputFormat = audioFormat
codec.inputFormat = audioFormat
}
}

#if os(iOS) || os(tvOS) || os(macOS)
Expand Down
49 changes: 5 additions & 44 deletions Sources/RTMP/RTMPMessage.swift
Original file line number Diff line number Diff line change
Expand Up @@ -568,53 +568,14 @@ final class RTMPAudioMessage: RTMPMessage {
guard let stream = connection.streams.first(where: { $0.id == streamId }) else {
return
}
stream.info.byteCount.mutate { $0 += Int64(payload.count) }
guard codec.isSupported else {
return
}
var duration = Int64(timestamp)
switch type {
case .zero:
if stream.audioTimestampZero == -1 {
stream.audioTimestampZero = Double(timestamp)
}
duration -= Int64(stream.audioTimestamp)
stream.audioTimestamp = Double(timestamp) - stream.audioTimestampZero
default:
stream.audioTimestamp += Double(timestamp)
}
switch payload[1] {
case FLVAACPacketType.seq.rawValue:
let config = AudioSpecificConfig(bytes: [UInt8](payload[codec.headerSize..<payload.count]))
stream.mixer.audioIO.setAudioStreamBasicDescription(config?.audioStreamBasicDescription())
case FLVAACPacketType.raw.rawValue:
if stream.mixer.audioIO.inputFormat == nil {
stream.mixer.audioIO.setAudioStreamBasicDescription(makeAudioStreamBasicDescription())
}
if let audioBuffer = makeAudioBuffer(stream) {
stream.mixer.audioIO.append(audioBuffer, when: .init(hostTime: UInt64(stream.audioTimestamp)))
}
default:
break
}
stream.muxer.append(self, type: type)
}

private func makeAudioBuffer(_ stream: RTMPStream) -> AVAudioBuffer? {
return payload.withUnsafeMutableBytes { (buffer: UnsafeMutableRawBufferPointer) -> AVAudioBuffer? in
guard let baseAddress = buffer.baseAddress, let buffer = stream.mixer.audioIO.inputBuffer as? AVAudioCompressedBuffer else {
return nil
}
let byteCount = payload.count - codec.headerSize
buffer.packetDescriptions?.pointee = AudioStreamPacketDescription(mStartOffset: 0, mVariableFramesInPacket: 0, mDataByteSize: UInt32(byteCount))
buffer.packetCount = 1
buffer.byteLength = UInt32(byteCount)
buffer.data.copyMemory(from: baseAddress.advanced(by: codec.headerSize), byteCount: byteCount)
return buffer
func makeAudioFormat() -> AVAudioFormat? {
guard var audioStreamBasicDescription = codec.audioStreamBasicDescription(soundRate, size: soundSize, type: soundType) else {
return nil
}
}

private func makeAudioStreamBasicDescription() -> AudioStreamBasicDescription? {
return codec.audioStreamBasicDescription(soundRate, size: soundSize, type: soundType)
return AVAudioFormat(streamDescription: &audioStreamBasicDescription)
}
}

Expand Down
81 changes: 65 additions & 16 deletions Sources/RTMP/RTMPMuxer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,12 @@ final class RTMPMuxer {
var buffer = Data([RTMPMuxer.aac, FLVAACPacketType.seq.rawValue])
buffer.append(contentsOf: AudioSpecificConfig(formatDescription: audioFormat.formatDescription).bytes)
stream?.outputAudio(buffer, withTimestamp: 0)
case .playing:
if let audioFormat {
audioBuffer = AVAudioCompressedBuffer(format: audioFormat, packetCapacity: 1, maximumPacketSize: 1024 * Int(audioFormat.channelCount))
} else {
audioBuffer = nil
}
default:
break
}
Expand Down Expand Up @@ -53,33 +59,55 @@ final class RTMPMuxer {

var isRunning: Atomic<Bool> = .init(false)
private var videoTimeStamp: CMTime = .zero
private var audioBuffer: AVAudioCompressedBuffer?
private var audioTimeStamp: AVAudioTime = .init(hostTime: 0)
private let compositiionTimeOffset: CMTime = .init(value: 3, timescale: 30)
private weak var stream: RTMPStream?

init(_ stream: RTMPStream) {
self.stream = stream
}
}

extension RTMPMuxer: Running {
// MARK: Running
func startRunning() {
guard !isRunning.value else {
func append(_ message: RTMPAudioMessage, type: RTMPChunkType) {
let payload = message.payload
let codec = message.codec
stream?.info.byteCount.mutate { $0 += Int64(payload.count) }
guard let stream, message.codec.isSupported else {
return
}
audioTimeStamp = .init(hostTime: 0)
videoTimeStamp = .zero
audioFormat = nil
videoFormat = nil
isRunning.mutate { $0 = true }
}

func stopRunning() {
guard isRunning.value else {
return
var duration = Int64(message.timestamp)
switch type {
case .zero:
if stream.audioTimestampZero == -1 {
stream.audioTimestampZero = Double(message.timestamp)
}
duration -= Int64(stream.audioTimestamp)
stream.audioTimestamp = Double(message.timestamp) - stream.audioTimestampZero
default:
stream.audioTimestamp += Double(message.timestamp)
}
switch payload[1] {
case FLVAACPacketType.seq.rawValue:
let config = AudioSpecificConfig(bytes: [UInt8](payload[message.codec.headerSize..<payload.count]))
stream.muxer.audioFormat = config?.makeAudioFormat()
case FLVAACPacketType.raw.rawValue:
if audioFormat == nil {
audioFormat = message.makeAudioFormat()
}
payload.withUnsafeBytes { (buffer: UnsafeRawBufferPointer) -> Void in
guard let baseAddress = buffer.baseAddress, let audioBuffer else {
return
}
let byteCount = payload.count - codec.headerSize
audioBuffer.packetDescriptions?.pointee = AudioStreamPacketDescription(mStartOffset: 0, mVariableFramesInPacket: 0, mDataByteSize: UInt32(byteCount))
audioBuffer.packetCount = 1
audioBuffer.byteLength = UInt32(byteCount)
audioBuffer.data.copyMemory(from: baseAddress.advanced(by: codec.headerSize), byteCount: byteCount)
stream.mixer.audioIO.append(audioBuffer, when: .init(hostTime: UInt64(stream.audioTimestamp)))
}
default:
break
}
isRunning.mutate { $0 = false }
}
}

Expand Down Expand Up @@ -132,3 +160,24 @@ extension RTMPMuxer: IOMuxer {
return Int32((sampleBuffer.presentationTimeStamp - videoTimeStamp + compositiionTimeOffset).seconds * 1000)
}
}

extension RTMPMuxer: Running {
// MARK: Running
func startRunning() {
guard !isRunning.value else {
return
}
audioTimeStamp = .init(hostTime: 0)
videoTimeStamp = .zero
audioFormat = nil
videoFormat = nil
isRunning.mutate { $0 = true }
}

func stopRunning() {
guard isRunning.value else {
return
}
isRunning.mutate { $0 = false }
}
}

0 comments on commit aaad70f

Please sign in to comment.