Skip to content

Commit

Permalink
Replace AVAudioEngine with Audio Units; Temporarily add file audio pl…
Browse files Browse the repository at this point in the history
…ayer
  • Loading branch information
levs42 committed Mar 2, 2024
1 parent 3d9ce54 commit 70d960e
Show file tree
Hide file tree
Showing 8 changed files with 351 additions and 63 deletions.
13 changes: 13 additions & 0 deletions Examples/iOS/IngestViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ final class IngestViewController: UIViewController {
audioCapture.delegate = self
return audioCapture
}()
private let audioPlayer = AudioFilePlayer()

override func viewDidLoad() {
super.viewDidLoad()
Expand All @@ -55,6 +56,12 @@ final class IngestViewController: UIViewController {
videoBitrateSlider?.value = Float(VideoCodecSettings.default.bitRate) / 1000
audioBitrateSlider?.value = Float(AudioCodecSettings.default.bitRate) / 1000

audioPlayer.delegate = self

DispatchQueue.main.asyncAfter(deadline: .now() + 10) {
self.audioPlayer.play()
}

NotificationCenter.default.addObserver(self, selector: #selector(on(_:)), name: UIDevice.orientationDidChangeNotification, object: nil)
}

Expand Down Expand Up @@ -370,3 +377,9 @@ extension IngestViewController: UIPickerViewDataSource {
return AVAudioSession.sharedInstance().preferredInput?.dataSources?[row].dataSourceName ?? ""
}
}

extension IngestViewController: AudioFilePlayerDelegate {
func didReceiveAudioSample(_ buffer: AVAudioPCMBuffer, time: AVAudioTime) {
stream.append(buffer, channel: 1, when: time)
}
}
9 changes: 3 additions & 6 deletions Examples/iOS/Screencast/SampleHandler.swift
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@ open class SampleHandler: RPBroadcastSampleHandler {
RTMPStream(connection: rtmpConnection)
}()

private var isMirophoneOn = false

deinit {
rtmpConnection.removeEventListener(.ioError, selector: #selector(rtmpErrorHandler), observer: self)
rtmpConnection.removeEventListener(.rtmpStatus, selector: #selector(rtmpStatusEvent), observer: self)
Expand Down Expand Up @@ -47,13 +45,12 @@ open class SampleHandler: RPBroadcastSampleHandler {
}
rtmpStream.append(sampleBuffer)
case .audioMic:
isMirophoneOn = true
if CMSampleBufferDataIsReady(sampleBuffer) {
rtmpStream.append(sampleBuffer)
rtmpStream.append(sampleBuffer, channel: 0)
}
case .audioApp:
if !isMirophoneOn && CMSampleBufferDataIsReady(sampleBuffer) {
rtmpStream.append(sampleBuffer)
if CMSampleBufferDataIsReady(sampleBuffer) {
rtmpStream.append(sampleBuffer, channel: 1)
}
@unknown default:
break
Expand Down
71 changes: 71 additions & 0 deletions Examples/macOS/AudioFilePlayer.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import AVFoundation
import CoreMedia
import CoreAudio

protocol AudioFilePlayerDelegate: AnyObject {
func didReceiveAudioSample(_ buffer: AVAudioPCMBuffer, time: AVAudioTime)
}

class AudioFilePlayer: NSObject {
private var audioFile: AVAudioFile?
private var audioEngine: AVAudioEngine = AVAudioEngine()
private var audioPlayerNode: AVAudioPlayerNode = AVAudioPlayerNode()
private var format: AVAudioFormat?
weak var delegate: (any AudioFilePlayerDelegate)?

override init() {
super.init()
loadAudioFile()
setupAudioEngine()
}

private func loadAudioFile() {
guard let fileURL = Bundle.main.url(forResource: "audio", withExtension: "wav") else {
print("Audio file not found in bundle")
return
}

do {
let file = try AVAudioFile(forReading: fileURL)
self.audioFile = file
self.format = file.processingFormat
} catch {
print("Error loading audio file: \(error)")
}
}

private func setupAudioEngine() {
guard let format = self.format else {
print("Audio format is nil")
return
}

audioEngine.attach(audioPlayerNode)
audioEngine.connect(audioPlayerNode, to: audioEngine.mainMixerNode, format: format)

audioPlayerNode.volume = 0

do {
try audioEngine.start()
} catch {
print("Error starting audio engine: \(error)")
}
}

func play() {
guard let audioFile = self.audioFile, let format = self.format else {
print("Audio file or format is nil")
return
}

audioPlayerNode.scheduleFile(audioFile, at: nil) {
// File finished playing
}

audioPlayerNode.installTap(onBus: 0, bufferSize: 1024, format: format) { [weak self] (buffer, time) in
self?.delegate?.didReceiveAudioSample(buffer, time: time)
}

audioPlayerNode.play()
}
}
15 changes: 13 additions & 2 deletions Examples/macOS/CameraIngestViewController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ final class CameraIngestViewController: NSViewController {
private var stream: IOStream {
return netStreamSwitcher.stream
}
private let audioPlayer = AudioFilePlayer()

override func viewDidLoad() {
super.viewDidLoad()
Expand All @@ -30,19 +31,23 @@ final class CameraIngestViewController: NSViewController {
cameraPopUpButton?.present(mediaType: .video)
netStreamSwitcher.uri = Preference.defaultInstance.uri ?? ""
lfView?.attachStream(stream)
audioPlayer.delegate = self
}

override func viewDidAppear() {
super.viewDidAppear()
// TODO: Implement output sample rate sync of resamplers in the audio mixer
stream.audioSettings = .init(sampleRate: 48000)
stream.attachAudio(DeviceUtil.device(withLocalizedName: audioPopUpButton.titleOfSelectedItem!, mediaType: .audio))
stream.attachCamera(DeviceUtil.device(withLocalizedName: cameraPopUpButton.titleOfSelectedItem!, mediaType: .video), channel: 0)
var devices = AVCaptureDevice.devices(for: .video)
devices.removeFirst()
if let device = devices.first {
stream.attachCamera(device, channel: 1)
}

DispatchQueue.main.asyncAfter(deadline: .now() + 5) {
self.stream.isMonitoringEnabled = true
self.audioPlayer.play()
}
}

@IBAction private func publishOrStop(_ sender: NSButton) {
Expand Down Expand Up @@ -76,3 +81,9 @@ final class CameraIngestViewController: NSViewController {
stream.attachCamera(device, channel: 0)
}
}

extension CameraIngestViewController: AudioFilePlayerDelegate {
func didReceiveAudioSample(_ buffer: AVAudioPCMBuffer, time: AVAudioTime) {
stream.append(buffer, channel: 1, when: time)
}
}
Binary file added Examples/macOS/audio.wav
Binary file not shown.
14 changes: 13 additions & 1 deletion HaishinKit.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,11 @@
2EC97B7227880FF400D8BE32 /* OnTapGestureView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2EC97B6E27880FF400D8BE32 /* OnTapGestureView.swift */; };
2EC97B7327880FF400D8BE32 /* Views.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2EC97B6F27880FF400D8BE32 /* Views.swift */; };
2EC97B7427880FF400D8BE32 /* MTHKSwiftUiView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2EC97B7027880FF400D8BE32 /* MTHKSwiftUiView.swift */; };
B327845D2B928BBD00F9A7F1 /* audio.wav in Resources */ = {isa = PBXBuildFile; fileRef = B3D687842B8573D800E6A28E /* audio.wav */; };
B327845E2B928BBD00F9A7F1 /* AudioFilePlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3D687862B85743200E6A28E /* AudioFilePlayer.swift */; };
B3D687822B80302B00E6A28E /* IOAudioMixer.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3D687812B80302B00E6A28E /* IOAudioMixer.swift */; };
B3D687852B8573D800E6A28E /* audio.wav in Resources */ = {isa = PBXBuildFile; fileRef = B3D687842B8573D800E6A28E /* audio.wav */; };
B3D687872B85743200E6A28E /* AudioFilePlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = B3D687862B85743200E6A28E /* AudioFilePlayer.swift */; };
BC0394562AA8A384006EDE38 /* Logboard.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; };
BC03945F2AA8AFF5006EDE38 /* ExpressibleByIntegerLiteral+ExtensionTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC03945E2AA8AFF5006EDE38 /* ExpressibleByIntegerLiteral+ExtensionTests.swift */; };
BC04A2D42AD2D1D700C87A3E /* AVAudioTime+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC04A2D32AD2D1D700C87A3E /* AVAudioTime+Extension.swift */; };
Expand Down Expand Up @@ -579,6 +583,8 @@
2EC97B6F27880FF400D8BE32 /* Views.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Views.swift; sourceTree = "<group>"; };
2EC97B7027880FF400D8BE32 /* MTHKSwiftUiView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MTHKSwiftUiView.swift; sourceTree = "<group>"; };
B3D687812B80302B00E6A28E /* IOAudioMixer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = IOAudioMixer.swift; sourceTree = "<group>"; };
B3D687842B8573D800E6A28E /* audio.wav */ = {isa = PBXFileReference; lastKnownFileType = audio.wav; path = audio.wav; sourceTree = "<group>"; };
B3D687862B85743200E6A28E /* AudioFilePlayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioFilePlayer.swift; sourceTree = "<group>"; };
BC03945E2AA8AFF5006EDE38 /* ExpressibleByIntegerLiteral+ExtensionTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "ExpressibleByIntegerLiteral+ExtensionTests.swift"; sourceTree = "<group>"; };
BC04A2D32AD2D1D700C87A3E /* AVAudioTime+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioTime+Extension.swift"; sourceTree = "<group>"; };
BC04A2D52AD2D95500C87A3E /* CMTime+Extension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CMTime+Extension.swift"; sourceTree = "<group>"; };
Expand Down Expand Up @@ -978,6 +984,8 @@
296543541D62FE3E00734698 /* macOS */ = {
isa = PBXGroup;
children = (
B3D687842B8573D800E6A28E /* audio.wav */,
B3D687862B85743200E6A28E /* AudioFilePlayer.swift */,
BC3004FA296C3FC400119932 /* Extension */,
296543641D62FEB700734698 /* AppDelegate.swift */,
296543651D62FEB700734698 /* Assets.xcassets */,
Expand Down Expand Up @@ -1638,6 +1646,7 @@
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
B327845D2B928BBD00F9A7F1 /* audio.wav in Resources */,
291F4E381CF206E600F59C51 /* Icon.png in Resources */,
296897651CDB028C0074D5F0 /* Assets.xcassets in Resources */,
296897661CDB028C0074D5F0 /* LaunchScreen.storyboard in Resources */,
Expand All @@ -1650,6 +1659,7 @@
buildActionMask = 2147483647;
files = (
291619661E7EFB09009FB344 /* Main.storyboard in Resources */,
B3D687852B8573D800E6A28E /* audio.wav in Resources */,
2923A1F71D63005F0019FBCD /* Assets.xcassets in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
Expand Down Expand Up @@ -1955,6 +1965,7 @@
296897681CDB02940074D5F0 /* IngestViewController.swift in Sources */,
BCE0E33D2AD369550082C16F /* NetStreamSwitcher.swift in Sources */,
BCFB355524FA27EA00DC5108 /* PlaybackViewController.swift in Sources */,
B327845E2B928BBD00F9A7F1 /* AudioFilePlayer.swift in Sources */,
2955F51F1D09EBAD004CC995 /* VisualEffect.swift in Sources */,
295074311E462105007F15A4 /* PreferenceViewController.swift in Sources */,
);
Expand All @@ -1965,6 +1976,7 @@
buildActionMask = 2147483647;
files = (
BC3004F9296C351D00119932 /* PlaybackViewController.swift in Sources */,
B3D687872B85743200E6A28E /* AudioFilePlayer.swift in Sources */,
2923A1F81D6300650019FBCD /* AppDelegate.swift in Sources */,
BC3004F7296C20DF00119932 /* NSStoryboard.Name+Extension.swift in Sources */,
2923A1F41D6300510019FBCD /* MainWindowController.swift in Sources */,
Expand Down Expand Up @@ -2339,7 +2351,7 @@
FRAMEWORK_SEARCH_PATHS = "$(inherited)";
INFOPLIST_FILE = "$(SRCROOT)/Platforms/iOS/Info.plist";
INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
Expand Down
Loading

0 comments on commit 70d960e

Please sign in to comment.