Skip to content

Commit

Permalink
Merge pull request #1422 from shogo4405/feature/multi-audio-track-attach
Browse files Browse the repository at this point in the history
Added multi-capture functionality as microphone.
  • Loading branch information
shogo4405 authored Apr 15, 2024
2 parents 6ab05b3 + 11c3025 commit aeff5de
Show file tree
Hide file tree
Showing 13 changed files with 107 additions and 50 deletions.
4 changes: 2 additions & 2 deletions Examples/iOS/IngestViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ final class IngestViewController: UIViewController {
logger.warn(error)
}
}
stream.attachAudio(AVCaptureDevice.default(for: .audio), automaticallyConfiguresApplicationAudioSession: false) { error in
stream.attachAudio(AVCaptureDevice.default(for: .audio)) { _, error in
logger.warn(error)
}
let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
Expand Down Expand Up @@ -350,7 +350,7 @@ extension IngestViewController: UIPickerViewDelegate {
} catch {
logger.warn("can't set supported setPreferredDataSource")
}
stream.attachAudio(AVCaptureDevice.default(for: .audio), automaticallyConfiguresApplicationAudioSession: false) { error in
stream.attachAudio(AVCaptureDevice.default(for: .audio)) { _, error in
logger.warn(error)
}
}
Expand Down
2 changes: 1 addition & 1 deletion Examples/iOS/Screencast/SampleHandler.swift
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ open class SampleHandler: RPBroadcastSampleHandler {
}()

private lazy var rtmpStream: RTMPStream = {
FeatureUtil.setEnabled(feature: .multiTrackAudioMixing, isEnabled: true)
FeatureUtil.setEnabled(for: .multiTrackAudioMixing, isEnabled: true)
return RTMPStream(connection: rtmpConnection)
}()

Expand Down
10 changes: 7 additions & 3 deletions Examples/iOSSwiftUI/Model/ViewModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -93,11 +93,15 @@ final class ViewModel: ObservableObject {
}

func registerForPublishEvent() {
rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in
logger.error(error)
rtmpStream.attachAudio(AVCaptureDevice.default(for: .audio)) { _, error in
if let error {
logger.error(error)
}
}
rtmpStream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: currentPosition)) { _, error in
logger.error(error)
if let error {
logger.error(error)
}
}
rtmpStream.publisher(for: \.currentFPS)
.sink { [weak self] currentFPS in
Expand Down
1 change: 1 addition & 0 deletions Examples/macOS/AppDelegate.swift
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,6 @@ class AppDelegate: NSObject, NSApplicationDelegate {

func applicationDidFinishLaunching(_ aNotification: Notification) {
LBLogger.with(HaishinKitIdentifier).level = .info
FeatureUtil.setEnabled(for: .multiTrackAudioMixing, isEnabled: true)
}
}
13 changes: 10 additions & 3 deletions Examples/macOS/CameraIngestViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,17 @@ final class CameraIngestViewController: NSViewController {
override func viewDidAppear() {
super.viewDidAppear()
stream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio))

var audios = AVCaptureDevice.devices(for: .audio)
audios.removeFirst()
if let device = audios.first, FeatureUtil.isEnabled(for: .multiTrackAudioMixing) {
stream.attachAudio(device, track: 1)
}

stream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video), track: 0)
var devices = AVCaptureDevice.devices(for: .video)
devices.removeFirst()
if let device = devices.first {
var videos = AVCaptureDevice.devices(for: .video)
videos.removeFirst()
if let device = videos.first {
stream.attachCamera(device, track: 1)
}
}
Expand Down
6 changes: 4 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -181,8 +181,10 @@ do {
let connection = RTMPConnection()
let stream = RTMPStream(connection: connection)

stream.attachAudio(AVCaptureDevice.default(for: .audio)) { error in
// print(error)
stream.attachAudio(AVCaptureDevice.default(for: .audio)) { _, error in
if let error {
logger.warn(error)
}
}

stream.attachCamera(AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back), track: 0) { _, error in
Expand Down
31 changes: 18 additions & 13 deletions Sources/IO/IOAudioCaptureUnit.swift
Original file line number Diff line number Diff line change
Expand Up @@ -2,24 +2,31 @@
import AVFoundation
import Foundation

/// Configuration calback block for IOAudioCaptureUnit.
@available(tvOS 17.0, *)
final class IOAudioCaptureUnit: IOCaptureUnit {
typealias Output = AVCaptureAudioDataOutput

let track: UInt8
private(set) var device: AVCaptureDevice?
var input: AVCaptureInput?
var output: Output?
var connection: AVCaptureConnection?
public typealias IOAudioCaptureConfigurationBlock = (IOAudioCaptureUnit?, IOAudioUnitError?) -> Void

/// An object that provides the interface to control the AVCaptureDevice's transport behavior.
@available(tvOS 17.0, *)
public final class IOAudioCaptureUnit: IOCaptureUnit {
public typealias Output = AVCaptureAudioDataOutput

public let track: UInt8
public private(set) var input: AVCaptureInput?
public private(set) var device: AVCaptureDevice?
public private(set) var output: Output? {
didSet {
oldValue?.setSampleBufferDelegate(nil, queue: nil)
}
}
public private(set) var connection: AVCaptureConnection?
private var dataOutput: IOAudioCaptureUnitDataOutput?

init(_ track: UInt8) {
self.track = track
}

func attachDevice(_ device: AVCaptureDevice?, audioUnit: IOAudioUnit) throws {
setSampleBufferDelegate(nil)
audioUnit.mixer?.session.detachCapture(self)
func attachDevice(_ device: AVCaptureDevice?) throws {
guard let device else {
self.device = nil
input = nil
Expand All @@ -29,8 +36,6 @@ final class IOAudioCaptureUnit: IOCaptureUnit {
self.device = device
input = try AVCaptureDeviceInput(device: device)
output = AVCaptureAudioDataOutput()
audioUnit.mixer?.session.attachCapture(self)
setSampleBufferDelegate(audioUnit)
}

func setSampleBufferDelegate(_ audioUnit: IOAudioUnit?) {
Expand Down
22 changes: 12 additions & 10 deletions Sources/IO/IOAudioUnit.swift
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ import SwiftPMSupport

/// The IOAudioUnit error domain codes.
public enum IOAudioUnitError: Swift.Error {
/// The IOAudioUnit failed to attach device.
case failedToAttach(error: (any Error)?)
/// The IOAudioUnit failed to create the AVAudioConverter.
case failedToCreate(from: AVAudioFormat?, to: AVAudioFormat?)
/// The IOAudioUnit faild to convert the an audio buffer.
Expand Down Expand Up @@ -49,7 +51,7 @@ final class IOAudioUnit: IOUnit<IOAudioCaptureUnit> {
return codec
}()
private lazy var audioMixer: any IOAudioMixerConvertible = {
if FeatureUtil.isEnabled(feature: .multiTrackAudioMixing) {
if FeatureUtil.isEnabled(for: .multiTrackAudioMixing) {
var audioMixer = IOAudioMixerConvertibleByMultiTrack()
audioMixer.delegate = self
return audioMixer
Expand All @@ -63,18 +65,18 @@ final class IOAudioUnit: IOUnit<IOAudioCaptureUnit> {

#if os(iOS) || os(macOS) || os(tvOS)
@available(tvOS 17.0, *)
func attachAudio(_ device: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool) throws {
try mixer?.session.configuration { session in
func attachAudio(_ device: AVCaptureDevice?, track: UInt8, configuration: (_ capture: IOAudioCaptureUnit?) -> Void) throws {
try mixer?.session.configuration { _ in
mixer?.session.detachCapture(captures[track])
guard let device else {
try captures[0]?.attachDevice(nil, audioUnit: self)
inputFormat = nil
try captures[track]?.attachDevice(nil)
return
}
let capture = capture(for: 0)
try capture?.attachDevice(device, audioUnit: self)
#if os(iOS)
session.automaticallyConfiguresApplicationAudioSession = automaticallyConfiguresApplicationAudioSession
#endif
let capture = capture(for: track)
try capture?.attachDevice(device)
configuration(capture)
capture?.setSampleBufferDelegate(self)
mixer?.session.attachCapture(capture)
}
}
#endif
Expand Down
10 changes: 8 additions & 2 deletions Sources/IO/IOCaptureSession.swift
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,10 @@ final class IOCaptureSession {
}

@available(tvOS 17.0, *)
func attachCapture(_ capture: any IOCaptureUnit) {
func attachCapture(_ capture: (any IOCaptureUnit)?) {
guard let capture else {
return
}
#if !os(visionOS)
if let connection = capture.connection {
if let input = capture.input, session.canAddInput(input) {
Expand All @@ -151,7 +154,10 @@ final class IOCaptureSession {
}

@available(tvOS 17.0, *)
func detachCapture(_ capture: any IOCaptureUnit) {
func detachCapture(_ capture: (any IOCaptureUnit)?) {
guard let capture else {
return
}
#if !os(visionOS)
if let connection = capture.connection {
if capture.output?.connections.contains(connection) == true {
Expand Down
8 changes: 5 additions & 3 deletions Sources/IO/IOCaptureUnit.swift
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,11 @@ import Foundation
protocol IOCaptureUnit {
associatedtype Output: AVCaptureOutput

var input: AVCaptureInput? { get set }
var output: Output? { get set }
var connection: AVCaptureConnection? { get set }
var track: UInt8 { get }
var input: AVCaptureInput? { get }
var output: Output? { get }
var device: AVCaptureDevice? { get }
var connection: AVCaptureConnection? { get }

init(_ track: UInt8)
}
34 changes: 31 additions & 3 deletions Sources/IO/IOStream.swift
Original file line number Diff line number Diff line change
Expand Up @@ -333,16 +333,38 @@ open class IOStream: NSObject {

#if os(iOS) || os(macOS) || os(tvOS)
/// Attaches the audio device.
///
/// You can perform multi-microphone capture by specifying as follows on macOS. Unfortunately, it seems that only one microphone is available on iOS.
/// ```
/// FeatureUtil.setEnabled(for: .multiTrackAudioMixing, isEnabled: true)
/// var audios = AVCaptureDevice.devices(for: .audio)
/// if let device = audios.removeFirst() {
/// stream.attachAudio(device, track: 0)
/// }
/// if let device = audios.removeFirst() {
/// stream.attachAudio(device, track: 1)
/// }
/// ```
@available(tvOS 17.0, *)
public func attachAudio(_ device: AVCaptureDevice?, automaticallyConfiguresApplicationAudioSession: Bool = false, onError: ((_ error: any Error) -> Void)? = nil) {
public func attachAudio(_ device: AVCaptureDevice?, track: UInt8 = 0, configuration: IOAudioCaptureConfigurationBlock? = nil) {
lockQueue.async {
do {
try self.mixer.audioIO.attachAudio(device, automaticallyConfiguresApplicationAudioSession: automaticallyConfiguresApplicationAudioSession)
try self.mixer.audioIO.attachAudio(device, track: track) { capture in
configuration?(capture, nil)
}
} catch {
onError?(error)
configuration?(nil, IOAudioUnitError.failedToAttach(error: error))
}
}
}

/// Returns the IOAudioCaptureUnit by track.
@available(tvOS 17.0, *)
public func audioCapture(for track: UInt8) -> IOAudioCaptureUnit? {
return mixer.audioIO.lockQueue.sync {
return self.mixer.audioIO.capture(for: track)
}
}
#endif

/// Appends a CMSampleBuffer.
Expand Down Expand Up @@ -404,6 +426,12 @@ open class IOStream: NSObject {
}
}

/// Configurations for the AVCaptureSession.
@available(tvOS 17.0, *)
func configuration(_ lambda: (_ session: AVCaptureSession) throws -> Void) rethrows {
try mixer.session.configuration(lambda)
}

/// A handler that receives stream readyState will update.
/// - Warning: Please do not call this method yourself.
open func readyStateWillChange(to readyState: ReadyState) {
Expand Down
12 changes: 6 additions & 6 deletions Sources/IO/IOVideoCaptureUnit.swift
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import AVFoundation
import Foundation

/// Configuration calback block for IOVideoUnit.
/// Configuration calback block for IOVideoCaptureUnit.
@available(tvOS 17.0, *)
public typealias IOVideoCaptureConfigurationBlock = (IOVideoCaptureUnit?, IOVideoUnitError?) -> Void

Expand All @@ -16,7 +16,7 @@ public final class IOVideoCaptureUnit: IOCaptureUnit {
public static let colorFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
#endif

typealias Output = AVCaptureVideoDataOutput
public typealias Output = AVCaptureVideoDataOutput

/// The current video device object.
public private(set) var device: AVCaptureDevice?
Expand Down Expand Up @@ -58,9 +58,9 @@ public final class IOVideoCaptureUnit: IOCaptureUnit {
}
#endif

let track: UInt8
var input: AVCaptureInput?
var output: Output? {
public let track: UInt8
public private(set) var input: AVCaptureInput?
public private(set) var output: Output? {
didSet {
guard let output else {
return
Expand All @@ -75,7 +75,7 @@ public final class IOVideoCaptureUnit: IOCaptureUnit {
#endif
}
}
var connection: AVCaptureConnection?
public private(set) var connection: AVCaptureConnection?
private var dataOutput: IOVideoCaptureUnitDataOutput?

init(_ track: UInt8) {
Expand Down
4 changes: 2 additions & 2 deletions Sources/Util/FeatureUtil.swift
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,13 @@ public enum FeatureUtil {
private static var flags: [String: Bool] = [:]

/// Whether or not a flag is enabled.
public static func isEnabled(feature: Name) -> Bool {
public static func isEnabled(for feature: Name) -> Bool {
return flags[feature.rawValue] ?? false
}

/// Setter for a feature flag.
public static func setEnabled(
feature: Name,
for feature: Name,
isEnabled: Bool
) {
flags[feature.rawValue] = isEnabled
Expand Down

0 comments on commit aeff5de

Please sign in to comment.