Skip to content

Commit

Permalink
Merge pull request #1319 from shogo4405/feature/muxer
Browse files Browse the repository at this point in the history
Refactoring of IOMixer.
  • Loading branch information
shogo4405 authored Oct 11, 2023
2 parents d3824dc + 68ea543 commit d382fdb
Show file tree
Hide file tree
Showing 27 changed files with 414 additions and 342 deletions.
2 changes: 1 addition & 1 deletion Examples/iOS/IngestViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,7 @@ extension IngestViewController: IORecorderDelegate {
extension IngestViewController: AudioCaptureDelegate {
// MARK: AudioCaptureDelegate
func audioCapture(_ audioCapture: AudioCapture, buffer: AVAudioBuffer, time: AVAudioTime) {
stream.appendAudioBuffer(buffer, when: time)
stream.append(buffer, when: time)
}
}

Expand Down
4 changes: 2 additions & 2 deletions Examples/iOS/NetStreamSwitcher.swift
Original file line number Diff line number Diff line change
Expand Up @@ -150,11 +150,11 @@ extension NetStreamSwitcher: NetStreamDelegate {

#endif
/// Tells the receiver to video codec error occured.
func stream(_ stream: NetStream, videoCodecErrorOccurred error: VideoCodec.Error) {
func stream(_ stream: NetStream, videoErrorOccurred error: IOMixerVideoError) {
}

/// Tells the receiver to audio codec error occured.
func stream(_ stream: NetStream, audioCodecErrorOccurred error: HaishinKit.AudioCodec.Error) {
func stream(_ stream: NetStream, audioErrorOccurred error: IOMixerAudioError) {
}

/// Tells the receiver to the stream opened.
Expand Down
6 changes: 3 additions & 3 deletions Examples/iOS/Screencast/SampleHandler.swift
Original file line number Diff line number Diff line change
Expand Up @@ -45,15 +45,15 @@ open class SampleHandler: RPBroadcastSampleHandler {
rtmpStream.videoSettings.videoSize = .init(width: CGFloat(dimensions.width), height: CGFloat(dimensions.height))
rtmpStream.videoSettings.profileLevel = kVTProfileLevel_H264_Baseline_AutoLevel as String
}
rtmpStream.appendSampleBuffer(sampleBuffer)
rtmpStream.append(sampleBuffer)
case .audioMic:
isMirophoneOn = true
if CMSampleBufferDataIsReady(sampleBuffer) {
rtmpStream.appendSampleBuffer(sampleBuffer)
rtmpStream.append(sampleBuffer)
}
case .audioApp:
if !isMirophoneOn && CMSampleBufferDataIsReady(sampleBuffer) {
rtmpStream.appendSampleBuffer(sampleBuffer)
rtmpStream.append(sampleBuffer)
}
@unknown default:
break
Expand Down
6 changes: 5 additions & 1 deletion HaishinKit.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,7 @@
BC3802142AB5E7CC001AE399 /* IOAudioCaptureUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3802132AB5E7CC001AE399 /* IOAudioCaptureUnit.swift */; };
BC3802192AB6AD79001AE399 /* IOAudioResamplerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3802182AB6AD79001AE399 /* IOAudioResamplerTests.swift */; };
BC3E384429C216BB007CD972 /* ADTSReaderTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3E384329C216BB007CD972 /* ADTSReaderTests.swift */; };
BC4078C42AD5CC7E00BBB4FA /* IOMuxer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4078C32AD5CC7E00BBB4FA /* IOMuxer.swift */; };
BC4914A228DDD33D009E2DF6 /* VTSessionConvertible.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4914A128DDD33D009E2DF6 /* VTSessionConvertible.swift */; };
BC4914A628DDD367009E2DF6 /* VTSessionOption.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4914A528DDD367009E2DF6 /* VTSessionOption.swift */; };
BC4914AE28DDF445009E2DF6 /* VTDecompressionSession+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4914AD28DDF445009E2DF6 /* VTDecompressionSession+Extension.swift */; };
Expand Down Expand Up @@ -586,6 +587,7 @@
BC3802132AB5E7CC001AE399 /* IOAudioCaptureUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioCaptureUnit.swift; sourceTree = "<group>"; };
BC3802182AB6AD79001AE399 /* IOAudioResamplerTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioResamplerTests.swift; sourceTree = "<group>"; };
BC3E384329C216BB007CD972 /* ADTSReaderTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ADTSReaderTests.swift; sourceTree = "<group>"; };
BC4078C32AD5CC7E00BBB4FA /* IOMuxer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOMuxer.swift; sourceTree = "<group>"; };
BC4914A128DDD33D009E2DF6 /* VTSessionConvertible.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTSessionConvertible.swift; sourceTree = "<group>"; };
BC4914A528DDD367009E2DF6 /* VTSessionOption.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTSessionOption.swift; sourceTree = "<group>"; };
BC4914AD28DDF445009E2DF6 /* VTDecompressionSession+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "VTDecompressionSession+Extension.swift"; sourceTree = "<group>"; };
Expand Down Expand Up @@ -955,6 +957,7 @@
2968973F1CDB01AD0074D5F0 /* iOS */ = {
isa = PBXGroup;
children = (
29A39C801D85BEFA007C27E9 /* Screencast */,
296897411CDB01D20074D5F0 /* AppDelegate.swift */,
296897421CDB01D20074D5F0 /* Assets.xcassets */,
BC56765A2AD2AED800524F7E /* AudioCapture.swift */,
Expand All @@ -969,7 +972,6 @@
BCFB355324FA275600DC5108 /* PlaybackViewController.swift */,
291468161E581C7D00E619BA /* Preference.swift */,
2950742E1E4620B7007F15A4 /* PreferenceViewController.swift */,
29A39C801D85BEFA007C27E9 /* Screencast */,
BC1BC9032AC80531009005D3 /* VideoAdaptiveNetBitRateStrategy.swift */,
296897461CDB01D20074D5F0 /* VisualEffect.swift */,
);
Expand Down Expand Up @@ -1047,6 +1049,7 @@
BC1102492925147300D48035 /* IOCaptureUnit.swift */,
BC0F1FD92ACC4CC100C326FF /* IOCaptureVideoPreview.swift */,
29B8768B1CD70AFE00FC07DA /* IOMixer.swift */,
BC4078C32AD5CC7E00BBB4FA /* IOMuxer.swift */,
2976A47D1D48C5C700B53EF2 /* IORecorder.swift */,
BCA2252B293CC5B600DD7CB2 /* IOScreenCaptureUnit.swift */,
299B131C1D35272D00A1E8F5 /* IOUIScreenCaptureUnit.swift */,
Expand Down Expand Up @@ -1699,6 +1702,7 @@
29B876AF1CD70B2800FC07DA /* RTMPChunk.swift in Sources */,
29B876841CD70AE800FC07DA /* AVCDecoderConfigurationRecord.swift in Sources */,
296242621D8DB86500C451A3 /* TSWriter.swift in Sources */,
BC4078C42AD5CC7E00BBB4FA /* IOMuxer.swift in Sources */,
BC9CFA9323BDE8B700917EEF /* NetStreamDrawable.swift in Sources */,
29B8769B1CD70B1100FC07DA /* MIME.swift in Sources */,
BC1DC50E2A039E1900E928ED /* FLVVideoPacketType.swift in Sources */,
Expand Down
8 changes: 6 additions & 2 deletions SRTHaishinKit/SRTStream.swift
Original file line number Diff line number Diff line change
Expand Up @@ -55,14 +55,15 @@ public final class SRTStream: NetStream {
mixer.startDecoding()
readyState = .playing
case .publish:
mixer.muxer = writer
writer.expectedMedias.removeAll()
if videoInputFormat != nil {
writer.expectedMedias.insert(.video)
}
if audioInputFormat != nil {
writer.expectedMedias.insert(.audio)
}
mixer.startEncoding(writer)
mixer.startEncoding()
mixer.startRunning()
writer.startRunning()
readyState = .publishing
Expand Down Expand Up @@ -159,6 +160,9 @@ extension SRTStream: TSWriterDelegate {
}
connection?.socket?.doOutput(data: data)
}

public func writer(_ writer: TSWriter, didRotateFileHandle timestamp: CMTime) {
}
}

extension SRTStream: TSReaderDelegate {
Expand All @@ -179,6 +183,6 @@ extension SRTStream: TSReaderDelegate {
guard readyState == .playing else {
return
}
appendSampleBuffer(sampleBuffer)
append(sampleBuffer)
}
}
47 changes: 22 additions & 25 deletions Sources/Codec/AudioCodec.swift
Original file line number Diff line number Diff line change
Expand Up @@ -3,40 +3,34 @@ import AVFoundation
/**
* The interface a AudioCodec uses to inform its delegate.
*/
public protocol AudioCodecDelegate: AnyObject {
protocol AudioCodecDelegate: AnyObject {
/// Tells the receiver to output an AVAudioFormat.
func audioCodec(_ codec: AudioCodec, didOutput audioFormat: AVAudioFormat)
func audioCodec(_ codec: AudioCodec<Self>, didOutput audioFormat: AVAudioFormat)
/// Tells the receiver to output an encoded or decoded CMSampleBuffer.
func audioCodec(_ codec: AudioCodec, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime)
func audioCodec(_ codec: AudioCodec<Self>, didOutput audioBuffer: AVAudioBuffer, when: AVAudioTime)
/// Tells the receiver to occured an error.
func audioCodec(_ codec: AudioCodec, errorOccurred error: AudioCodec.Error)
func audioCodec(_ codec: AudioCodec<Self>, errorOccurred error: IOMixerAudioError)
}

private let kAudioCodec_frameCamacity: UInt32 = 1024

// MARK: -
/**
* The AudioCodec translate audio data to another format.
* - seealso: https://developer.apple.com/library/ios/technotes/tn2236/_index.html
*/
public class AudioCodec {
private static let frameCapacity: UInt32 = 1024

/// The AudioCodec error domain codes.
public enum Error: Swift.Error {
case failedToCreate(from: AVAudioFormat?, to: AVAudioFormat?)
case failedToConvert(error: NSError)
}

final class AudioCodec<T: AudioCodecDelegate> {
/// Specifies the delegate.
public weak var delegate: (any AudioCodecDelegate)?
weak var delegate: T?
/// This instance is running to process(true) or not(false).
public private(set) var isRunning: Atomic<Bool> = .init(false)
private(set) var isRunning: Atomic<Bool> = .init(false)
/// Specifies the settings for audio codec.
public var settings: AudioCodecSettings = .default {
var settings: AudioCodecSettings = .default {
didSet {
settings.apply(audioConverter, oldValue: oldValue)
}
}
var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.AudioCodec.lock")
let lockQueue: DispatchQueue
var inputFormat: AVAudioFormat? {
didSet {
guard inputFormat != oldValue else {
Expand All @@ -61,8 +55,11 @@ public class AudioCodec {
private var outputBuffers: [AVAudioBuffer] = []
private var audioConverter: AVAudioConverter?

/// Append a CMSampleBuffer.
public func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
init(lockQueue: DispatchQueue) {
self.lockQueue = lockQueue
}

func append(_ sampleBuffer: CMSampleBuffer) {
guard isRunning.value else {
return
}
Expand All @@ -81,7 +78,7 @@ public class AudioCodec {
buffer.byteLength = UInt32(byteCount)
if let blockBuffer = sampleBuffer.dataBuffer {
CMBlockBufferCopyDataBytes(blockBuffer, atOffset: offset + ADTSHeader.size, dataLength: byteCount, destination: buffer.data)
appendAudioBuffer(buffer, when: presentationTimeStamp.makeAudioTime())
append(buffer, when: presentationTimeStamp.makeAudioTime())
presentationTimeStamp = CMTimeAdd(presentationTimeStamp, CMTime(value: CMTimeValue(1024), timescale: sampleBuffer.presentationTimeStamp.timescale))
offset += sampleSize
}
Expand All @@ -91,7 +88,7 @@ public class AudioCodec {
}
}

func appendAudioBuffer(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) {
func append(_ audioBuffer: AVAudioBuffer, when: AVAudioTime) {
guard let audioConverter, isRunning.value else {
return
}
Expand Down Expand Up @@ -131,8 +128,8 @@ public class AudioCodec {
}
switch inputFormat.formatDescription.audioStreamBasicDescription?.mFormatID {
case kAudioFormatLinearPCM:
let buffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: Self.frameCapacity)
buffer?.frameLength = Self.frameCapacity
let buffer = AVAudioPCMBuffer(pcmFormat: inputFormat, frameCapacity: kAudioCodec_frameCamacity)
buffer?.frameLength = kAudioCodec_frameCamacity
return buffer
default:
return AVAudioCompressedBuffer(format: inputFormat, packetCapacity: 1, maximumPacketSize: 1024)
Expand Down Expand Up @@ -184,7 +181,7 @@ extension AudioCodec: Codec {

extension AudioCodec: Running {
// MARK: Running
public func startRunning() {
func startRunning() {
lockQueue.async {
guard !self.isRunning.value else {
return
Expand All @@ -197,7 +194,7 @@ extension AudioCodec: Running {
}
}

public func stopRunning() {
func stopRunning() {
lockQueue.async {
guard self.isRunning.value else {
return
Expand Down
44 changes: 17 additions & 27 deletions Sources/Codec/VideoCodec.swift
Original file line number Diff line number Diff line change
Expand Up @@ -8,44 +8,32 @@ import UIKit
/**
* The interface a VideoCodec uses to inform its delegate.
*/
public protocol VideoCodecDelegate: AnyObject {
protocol VideoCodecDelegate: AnyObject {
/// Tells the receiver to set a formatDescription.
func videoCodec(_ codec: VideoCodec, didOutput formatDescription: CMFormatDescription?)
/// Tells the receiver to output an encoded or decoded sampleBuffer.
func videoCodec(_ codec: VideoCodec, didOutput sampleBuffer: CMSampleBuffer)
/// Tells the receiver to occured an error.
func videoCodec(_ codec: VideoCodec, errorOccurred error: VideoCodec.Error)
func videoCodec(_ codec: VideoCodec, errorOccurred error: IOMixerVideoError)
}

// MARK: -
/**
* The VideoCodec class provides methods for encode or decode for video.
*/
public final class VideoCodec {
static let defaultFrameInterval = 0.0

/**
* The VideoCodec error domain codes.
*/
public enum Error: Swift.Error {
/// The VideoCodec failed to create the VTSession.
case failedToCreate(status: OSStatus)
/// The VideoCodec failed to prepare the VTSession.
case failedToPrepare(status: OSStatus)
/// The VideoCodec failed to encode or decode a flame.
case failedToFlame(status: OSStatus)
/// The VideoCodec failed to set an option.
case failedToSetOption(status: OSStatus, option: VTSessionOption)
}
final class VideoCodec {
private static let defaultFrameInterval: Double = 0.0

/// The videoCodec's attributes value.
public static var defaultAttributes: [NSString: AnyObject]? = [
static var defaultAttributes: [NSString: AnyObject]? = [
kCVPixelBufferIOSurfacePropertiesKey: NSDictionary(),
kCVPixelBufferMetalCompatibilityKey: kCFBooleanTrue
]

let lockQueue: DispatchQueue

/// Specifies the settings for a VideoCodec.
public var settings: VideoCodecSettings = .default {
var settings: VideoCodecSettings = .default {
didSet {
let invalidateSession = settings.invalidateSession(oldValue)
if invalidateSession {
Expand All @@ -57,9 +45,7 @@ public final class VideoCodec {
}

/// The running value indicating whether the VideoCodec is running.
public private(set) var isRunning: Atomic<Bool> = .init(false)

var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.VideoCodec.lock")
private(set) var isRunning: Atomic<Bool> = .init(false)
var needsSync: Atomic<Bool> = .init(true)
var attributes: [NSString: AnyObject]? {
guard VideoCodec.defaultAttributes != nil else {
Expand Down Expand Up @@ -103,7 +89,11 @@ public final class VideoCodec {
private var invalidateSession = true
private var presentationTimeStamp: CMTime = .invalid

func appendImageBuffer(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime) {
init(lockQueue: DispatchQueue) {
self.lockQueue = lockQueue
}

func append(_ imageBuffer: CVImageBuffer, presentationTimeStamp: CMTime, duration: CMTime) {
guard isRunning.value, !willDropFrame(presentationTimeStamp) else {
return
}
Expand All @@ -125,7 +115,7 @@ public final class VideoCodec {
}
}

func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
func append(_ sampleBuffer: CMSampleBuffer) {
inputFormat = sampleBuffer.formatDescription
guard isRunning.value else {
return
Expand Down Expand Up @@ -214,7 +204,7 @@ public final class VideoCodec {

extension VideoCodec: Running {
// MARK: Running
public func startRunning() {
func startRunning() {
lockQueue.async {
#if os(iOS) || os(tvOS) || os(visionOS)
NotificationCenter.default.addObserver(
Expand All @@ -235,7 +225,7 @@ extension VideoCodec: Running {
}
}

public func stopRunning() {
func stopRunning() {
lockQueue.async {
self.isRunning.mutate { $0 = false }
self.session = nil
Expand Down
Loading

0 comments on commit d382fdb

Please sign in to comment.