From c74e21f2e8ec1ae80db7a85bf20e2012d1b92877 Mon Sep 17 00:00:00 2001 From: shogo4405 Date: Sun, 7 Apr 2024 14:58:59 +0900 Subject: [PATCH] IOStreamObserver and confirmed IOStreamRecorder. --- HaishinKit.xcodeproj/project.pbxproj | 12 ++-- Package.resolved | 18 ----- README.md | 16 +++-- Sources/IO/IOMixer.swift | 3 - Sources/IO/IOStream.swift | 54 +++++++------- Sources/IO/IOStreamObserver.swift | 11 +++ Sources/IO/IOStreamRecorder.swift | 72 +++++-------------- Sources/IO/IOVideoUnit.swift | 4 -- ...ests.swift => IOStreamRecorderTests.swift} | 6 +- .../HEVCDecoderConfigurationRecordTests.swift | 2 +- 10 files changed, 79 insertions(+), 119 deletions(-) create mode 100644 Sources/IO/IOStreamObserver.swift rename Tests/IO/{IORecorderTests.swift => IOStreamRecorderTests.swift} (92%) diff --git a/HaishinKit.xcodeproj/project.pbxproj b/HaishinKit.xcodeproj/project.pbxproj index 35f882e58..6ad33967e 100644 --- a/HaishinKit.xcodeproj/project.pbxproj +++ b/HaishinKit.xcodeproj/project.pbxproj @@ -226,7 +226,7 @@ BC959F1229717EDB0067BA97 /* PreferenceViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC959F1129717EDB0067BA97 /* PreferenceViewController.swift */; }; BC9CFA9323BDE8B700917EEF /* IOStreamView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9CFA9223BDE8B700917EEF /* IOStreamView.swift */; }; BC9F9C7826F8C16600B01ED0 /* Choreographer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC9F9C7726F8C16600B01ED0 /* Choreographer.swift */; }; - BCA7C24F2A91AA0500882D85 /* IORecorderTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCA7C24E2A91AA0500882D85 /* IORecorderTests.swift */; }; + BCA7C24F2A91AA0500882D85 /* IOStreamRecorderTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCA7C24E2A91AA0500882D85 /* IOStreamRecorderTests.swift */; }; BCAD0C18263ED67F00ADFB80 /* SampleVideo_360x240_5mb@m4v.m3u8 in Resources */ = {isa = PBXBuildFile; fileRef = BCAD0C16263ED67F00ADFB80 /* SampleVideo_360x240_5mb@m4v.m3u8 */; }; BCAD0C19263ED67F00ADFB80 /* SampleVideo_360x240_5mb@m4v in Resources */ = {isa = PBXBuildFile; fileRef = BCAD0C17263ED67F00ADFB80 /* SampleVideo_360x240_5mb@m4v */; }; BCB976DF26107B5600C9A649 /* TSField.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB976DE26107B5600C9A649 /* TSField.swift */; }; @@ -264,6 +264,7 @@ BCD63ADD26FDF34C0084842D /* HaishinKit.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 2945CBBD1B4BE66000104112 /* HaishinKit.framework */; platformFilter = ios; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; BCD63AE126FDF3500084842D /* Logboard.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; }; BCD63AE226FDF3500084842D /* Logboard.xcframework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; + BCD8702B2BC266CD009E495B /* IOStreamObserver.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCD8702A2BC266CD009E495B /* IOStreamObserver.swift */; }; BCD91C0D2A700FF50033F9E1 /* IOAudioRingBufferTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCD91C0C2A700FF50033F9E1 /* IOAudioRingBufferTests.swift */; }; BCE0E33D2AD369550082C16F /* NetStreamSwitcher.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCE0E33B2AD369410082C16F /* NetStreamSwitcher.swift */; }; BCFB355524FA27EA00DC5108 /* PlaybackViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCFB355324FA275600DC5108 /* PlaybackViewController.swift */; }; @@ -645,7 +646,7 @@ BC959F1129717EDB0067BA97 /* PreferenceViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreferenceViewController.swift; sourceTree = ""; }; BC9CFA9223BDE8B700917EEF /* IOStreamView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOStreamView.swift; sourceTree = ""; }; BC9F9C7726F8C16600B01ED0 /* Choreographer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Choreographer.swift; sourceTree = ""; }; - BCA7C24E2A91AA0500882D85 /* IORecorderTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IORecorderTests.swift; sourceTree = ""; }; + BCA7C24E2A91AA0500882D85 /* IOStreamRecorderTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOStreamRecorderTests.swift; sourceTree = ""; }; BCAD0C16263ED67F00ADFB80 /* SampleVideo_360x240_5mb@m4v.m3u8 */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = "SampleVideo_360x240_5mb@m4v.m3u8"; sourceTree = ""; }; BCAD0C17263ED67F00ADFB80 /* SampleVideo_360x240_5mb@m4v */ = {isa = PBXFileReference; lastKnownFileType = folder; path = "SampleVideo_360x240_5mb@m4v"; sourceTree = ""; }; BCB976DE26107B5600C9A649 /* TSField.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TSField.swift; sourceTree = ""; }; @@ -676,6 +677,7 @@ BCD63AB626FDF1250084842D /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; BCD63AB826FDF12A0084842D /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; BCD63ABB26FDF12A0084842D /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; + BCD8702A2BC266CD009E495B /* IOStreamObserver.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOStreamObserver.swift; sourceTree = ""; }; BCD91C0C2A700FF50033F9E1 /* IOAudioRingBufferTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioRingBufferTests.swift; sourceTree = ""; }; BCE0E33B2AD369410082C16F /* NetStreamSwitcher.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NetStreamSwitcher.swift; sourceTree = ""; }; BCFB355324FA275600DC5108 /* PlaybackViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlaybackViewController.swift; sourceTree = ""; }; @@ -1092,6 +1094,7 @@ BC4078C32AD5CC7E00BBB4FA /* IOMuxer.swift */, 29AF3FCE1D7C744C00E41212 /* IOStream.swift */, BC6692F22AC2F717009EC058 /* IOStreamBitRateStrategyConvertible.swift */, + BCD8702A2BC266CD009E495B /* IOStreamObserver.swift */, 2976A47D1D48C5C700B53EF2 /* IOStreamRecorder.swift */, BC9CFA9223BDE8B700917EEF /* IOStreamView.swift */, BCC4F4142AD6FC1100954EF5 /* IOTellyUnit.swift */, @@ -1208,7 +1211,7 @@ BC3802182AB6AD79001AE399 /* IOAudioResamplerTests.swift */, BCD91C0C2A700FF50033F9E1 /* IOAudioRingBufferTests.swift */, BC0BF4F429866FDE00D72CB4 /* IOMixerTests.swift */, - BCA7C24E2A91AA0500882D85 /* IORecorderTests.swift */, + BCA7C24E2A91AA0500882D85 /* IOStreamRecorderTests.swift */, ); path = IO; sourceTree = ""; @@ -1835,6 +1838,7 @@ BC1DC4A429F4F74F00E928ED /* AVCaptureSession+Extension.swift in Sources */, 29EA87D81E79A0090043A5F8 /* URL+Extension.swift in Sources */, BC9F9C7826F8C16600B01ED0 /* Choreographer.swift in Sources */, + BCD8702B2BC266CD009E495B /* IOStreamObserver.swift in Sources */, BC93792F2ADD76BE001097DB /* AVAudioCompressedBuffer+Extension.swift in Sources */, 29B876BC1CD70B3900FC07DA /* ByteArray.swift in Sources */, 29B876831CD70AE800FC07DA /* AudioSpecificConfig.swift in Sources */, @@ -1904,7 +1908,7 @@ BC3802192AB6AD79001AE399 /* IOAudioResamplerTests.swift in Sources */, BC1DC5042A02894D00E928ED /* FLVVideoFourCCTests.swift in Sources */, BC1DC5122A04E46E00E928ED /* HEVCDecoderConfigurationRecordTests.swift in Sources */, - BCA7C24F2A91AA0500882D85 /* IORecorderTests.swift in Sources */, + BCA7C24F2A91AA0500882D85 /* IOStreamRecorderTests.swift in Sources */, BCD91C0D2A700FF50033F9E1 /* IOAudioRingBufferTests.swift in Sources */, 2976077F20A89FBB00DCF24F /* RTMPMessageTests.swift in Sources */, BC7C56C729A7701F00C41A9B /* ESSpecificDataTests.swift in Sources */, diff --git a/Package.resolved b/Package.resolved index fed272172..b26a4b211 100644 --- a/Package.resolved +++ b/Package.resolved @@ -8,24 +8,6 @@ "revision" : "6a7cbf54553936103084ed72cfb6d6f836758229", "version" : "2.4.1" } - }, - { - "identity" : "swift-docc-plugin", - "kind" : "remoteSourceControl", - "location" : "https://github.com/apple/swift-docc-plugin", - "state" : { - "revision" : "26ac5758409154cc448d7ab82389c520fa8a8247", - "version" : "1.3.0" - } - }, - { - "identity" : "swift-docc-symbolkit", - "kind" : "remoteSourceControl", - "location" : "https://github.com/apple/swift-docc-symbolkit", - "state" : { - "revision" : "b45d1f2ed151d057b54504d653e0da5552844e34", - "version" : "1.0.0" - } } ], "version" : 2 diff --git a/README.md b/README.md index 6adb3f495..bd0d16e3c 100644 --- a/README.md +++ b/README.md @@ -282,7 +282,7 @@ stream.attachCamera(front, channel: 0) { videoUnit, error in ### πŸ”Š [AudioCodecSettings](https://shogo4405.github.io/HaishinKit.swift/Structs/AudioCodecSettings.html) When you specify the sampling rate, it will perform resampling. Additionally, in the case of multiple channels, downsampling can be applied. -``` +```swift stream.audioSettings = AudioCodecSettings( bitRate: Int = 64 * 1000, sampleRate: Float64 = 0, @@ -293,7 +293,7 @@ stream.audioSettings = AudioCodecSettings( ``` ### πŸŽ₯ [VideoCodecSettings](https://shogo4405.github.io/HaishinKit.swift/Structs/VideoCodecSettings.html) -``` +```swift stream.videoSettings = VideoCodecSettings( videoSize: .init(width: 854, height: 480), profileLevel: kVTProfileLevel_H264_Baseline_3_1 as String, @@ -307,9 +307,12 @@ stream.videoSettings = VideoCodecSettings( ``` ### ⏺️ Recording -``` +```swift // Specifies the recording settings. 0" means the same of input. -stream.startRecording(self, settings: [ +var recorder = IOStreamRecorder() +stream.addObserver(recorder) + +recorder.outputSettings = [ AVMediaType.audio: [ AVFormatIDKey: Int(kAudioFormatMPEG4AAC), AVSampleRateKey: 0, @@ -328,7 +331,10 @@ stream.startRecording(self, settings: [ ] */ ] -]) +] + +recorder.startRunning() + ``` ## πŸ“œ License diff --git a/Sources/IO/IOMixer.swift b/Sources/IO/IOMixer.swift index 865c4b4bd..9bc0215f2 100644 --- a/Sources/IO/IOMixer.swift +++ b/Sources/IO/IOMixer.swift @@ -27,8 +27,6 @@ final class IOMixer { private(set) var isRunning: Atomic = .init(false) - private(set) lazy var recorder = IOStreamRecorder() - private(set) lazy var audioIO = { var audioIO = IOAudioUnit() audioIO.mixer = self @@ -187,7 +185,6 @@ extension IOMixer: IOAudioUnitDelegate { func audioUnit(_ audioUnit: IOAudioUnit, didOutput audioBuffer: AVAudioPCMBuffer, when: AVAudioTime) { delegate?.mixer(self, didOutput: audioBuffer, when: when) - recorder.append(audioBuffer, when: when) } } diff --git a/Sources/IO/IOStream.swift b/Sources/IO/IOStream.swift index bba5fe8b2..9629a1649 100644 --- a/Sources/IO/IOStream.swift +++ b/Sources/IO/IOStream.swift @@ -10,10 +10,6 @@ import UIKit /// The interface an IOStream uses to inform its delegate. public protocol IOStreamDelegate: AnyObject { - /// Tells the receiver to an audio packet incoming. - func stream(_ stream: IOStream, didOutput audio: AVAudioBuffer, when: AVAudioTime) - /// Tells the receiver to a video incoming. - func stream(_ stream: IOStream, didOutput video: CMSampleBuffer) /// Tells the receiver to video error occured. func stream(_ stream: IOStream, videoErrorOccurred error: IOVideoUnitError) /// Tells the receiver to audio error occured. @@ -45,19 +41,19 @@ open class IOStream: NSObject { return lhs.rawValue == rhs.rawValue } - /// NetStream has been created. + /// IOStream has been created. case initialized - /// NetStream waiting for new method. + /// IOStream waiting for new method. case open - /// NetStream play() has been called. + /// IOStream play() has been called. case play - /// NetStream play and server was accepted as playing + /// IOStream play and server was accepted as playing case playing - /// NetStream publish() has been called + /// IOStream publish() has been called case publish - /// NetStream publish and server accpted as publising. + /// IOStream publish and server accpted as publising. case publishing(muxer: any IOMuxer) - /// NetStream close() has been called. + /// IOStream close() has been called. case closed var rawValue: UInt8 { @@ -139,7 +135,7 @@ open class IOStream: NSObject { #if os(iOS) || os(tvOS) /// Specifies the AVCaptureMultiCamSession enabled. /// Warning: If there is a possibility of using multiple cameras, please set it to true initially. - @available(tvOS 17.0, iOS 13.0, *) + @available(tvOS 17.0, *) public var isMultiCamSessionEnabled: Bool { get { return mixer.session.isMultiCamSessionEnabled @@ -236,11 +232,6 @@ open class IOStream: NSObject { return mixer.audioIO.inputFormat } - /// The isRecording value that indicates whether the recorder is recording. - public var isRecording: Bool { - return mixer.recorder.isRunning.value - } - /// Specifies the controls sound. public var soundTransform: SoundTransform { get { @@ -305,6 +296,8 @@ open class IOStream: NSObject { return telly }() + private var observers: [any IOStreamObserver] = [] + /// Creates a NetStream object. override public init() { super.init() @@ -314,6 +307,10 @@ open class IOStream: NSObject { #endif } + deinit { + observers.removeAll() + } + /// Attaches the camera object. @available(tvOS 17.0, *) public func attachCamera(_ device: AVCaptureDevice?, channel: UInt8 = 0, configuration: IOVideoCaptureConfigurationBlock? = nil) { @@ -388,16 +385,19 @@ open class IOStream: NSObject { } } - /// Starts recording. - public func startRecording(_ delegate: any IOStreamRecorderDelegate, settings: [AVMediaType: [String: Any]] = IOStreamRecorder.defaultOutputSettings) { - mixer.recorder.delegate = delegate - mixer.recorder.outputSettings = settings - mixer.recorder.startRunning() + /// Adds an observer. + public func addObserver(_ observer: any IOStreamObserver) { + guard observers.firstIndex(where: { $0 === observer }) == nil else { + return + } + observers.append(observer) } - /// Stop recording. - public func stopRecording() { - mixer.recorder.stopRunning() + /// Removes an observer. + public func removeObserver(_ observer: any IOStreamObserver) { + if let index = observers.firstIndex(where: { $0 === observer }) { + observers.remove(at: index) + } } /// A handler that receives stream readyState will update. @@ -450,11 +450,11 @@ open class IOStream: NSObject { extension IOStream: IOMixerDelegate { // MARK: IOMixerDelegate func mixer(_ mixer: IOMixer, didOutput video: CMSampleBuffer) { - delegate?.stream(self, didOutput: video) + observers.forEach { $0.stream(self, didOutput: video) } } func mixer(_ mixer: IOMixer, didOutput audio: AVAudioPCMBuffer, when: AVAudioTime) { - delegate?.stream(self, didOutput: audio, when: when) + observers.forEach { $0.stream(self, didOutput: audio, when: when) } } func mixer(_ mixer: IOMixer, audioErrorOccurred error: IOAudioUnitError) { diff --git a/Sources/IO/IOStreamObserver.swift b/Sources/IO/IOStreamObserver.swift new file mode 100644 index 000000000..42808f84a --- /dev/null +++ b/Sources/IO/IOStreamObserver.swift @@ -0,0 +1,11 @@ +import AVFoundation +import CoreMedia +import Foundation + +/// A delegate protocol your app implements to receive capture stream output events. +public protocol IOStreamObserver: AnyObject { + /// Tells the receiver to an audio packet incoming. + func stream(_ stream: IOStream, didOutput audio: AVAudioBuffer, when: AVAudioTime) + /// Tells the receiver to a video incoming. + func stream(_ stream: IOStream, didOutput video: CMSampleBuffer) +} diff --git a/Sources/IO/IOStreamRecorder.swift b/Sources/IO/IOStreamRecorder.swift index a0f66d049..48dc01a12 100644 --- a/Sources/IO/IOStreamRecorder.swift +++ b/Sources/IO/IOStreamRecorder.swift @@ -12,9 +12,9 @@ public protocol IOStreamRecorderDelegate: AnyObject { } // MARK: - -/// The IOStreamRecorderDelegate class represents video and audio recorder. +/// The IOStreamRecorder class represents video and audio recorder. public final class IOStreamRecorder { - /// The IOStreamRecorderDelegate error domain codes. + /// The IOStreamRecorder error domain codes. public enum Error: Swift.Error { /// Failed to create the AVAssetWriter. case failedToCreateAssetWriter(error: any Swift.Error) @@ -26,8 +26,8 @@ public final class IOStreamRecorder { case failedToFinishWriting(error: (any Swift.Error)?) } - /// The default output settings for an IORecorder. - public static let defaultOutputSettings: [AVMediaType: [String: Any]] = [ + /// The default output settings for an IOStreamRecorder. + public static let defaultSettings: [AVMediaType: [String: Any]] = [ .audio: [ AVFormatIDKey: Int(kAudioFormatMPEG4AAC), AVSampleRateKey: 0, @@ -43,7 +43,7 @@ public final class IOStreamRecorder { /// Specifies the delegate. public weak var delegate: (any IOStreamRecorderDelegate)? /// Specifies the recorder settings. - public var outputSettings: [AVMediaType: [String: Any]] = IOStreamRecorder.defaultOutputSettings + public var settings: [AVMediaType: [String: Any]] = IOStreamRecorder.defaultSettings /// The running indicies whether recording or not. public private(set) var isRunning: Atomic = .init(false) @@ -52,11 +52,10 @@ public final class IOStreamRecorder { guard let writer = writer else { return false } - return outputSettings.count == writer.inputs.count + return settings.count == writer.inputs.count } private var writer: AVAssetWriter? private var writerInputs: [AVMediaType: AVAssetWriterInput] = [:] - private var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor? private var audioPresentationTime: CMTime = .zero private var videoPresentationTime: CMTime = .zero private var dimensions: CMVideoDimensions = .init(width: 0, height: 0) @@ -72,7 +71,7 @@ public final class IOStreamRecorder { #endif /// Append a sample buffer for recording. - public func append(_ sampleBuffer: CMSampleBuffer) { + func append(_ sampleBuffer: CMSampleBuffer) { guard isRunning.value else { return } @@ -114,41 +113,6 @@ public final class IOStreamRecorder { } } - /// Append a pixel buffer for recording. - public func append(_ pixelBuffer: CVPixelBuffer, withPresentationTime: CMTime) { - guard isRunning.value else { - return - } - lockQueue.async { - if self.dimensions.width != pixelBuffer.width || self.dimensions.height != pixelBuffer.height { - self.dimensions = .init(width: Int32(pixelBuffer.width), height: Int32(pixelBuffer.height)) - } - guard - let writer = self.writer, - let input = self.makeWriterInput(.video, sourceFormatHint: nil), - let adaptor = self.makePixelBufferAdaptor(input), - self.isReadyForStartWriting && self.videoPresentationTime.seconds < withPresentationTime.seconds else { - return - } - - switch writer.status { - case .unknown: - writer.startWriting() - writer.startSession(atSourceTime: withPresentationTime) - default: - break - } - - if input.isReadyForMoreMediaData { - if adaptor.append(pixelBuffer, withPresentationTime: withPresentationTime) { - self.videoPresentationTime = withPresentationTime - } else { - self.delegate?.recorder(self, errorOccured: .failedToAppend(error: writer.error)) - } - } - } - } - func append(_ audioPCMBuffer: AVAudioPCMBuffer, when: AVAudioTime) { guard isRunning.value else { return @@ -172,7 +136,6 @@ public final class IOStreamRecorder { self.delegate?.recorder(self, finishWriting: writer) self.writer = nil self.writerInputs.removeAll() - self.pixelBufferAdaptor = nil dispatchGroup.leave() } dispatchGroup.wait() @@ -184,7 +147,7 @@ public final class IOStreamRecorder { } var outputSettings: [String: Any] = [:] - if let defaultOutputSettings: [String: Any] = self.outputSettings[mediaType] { + if let defaultOutputSettings: [String: Any] = self.settings[mediaType] { switch mediaType { case .audio: guard @@ -230,17 +193,18 @@ public final class IOStreamRecorder { } return input } +} + +extension IOStreamRecorder: IOStreamObserver { + // MARK: IOStreamObserver + public func stream(_ stream: IOStream, didOutput video: CMSampleBuffer) { + append(video) + } - private func makePixelBufferAdaptor(_ writerInput: AVAssetWriterInput?) -> AVAssetWriterInputPixelBufferAdaptor? { - guard pixelBufferAdaptor == nil else { - return pixelBufferAdaptor - } - guard let writerInput = writerInput else { - return nil + public func stream(_ stream: IOStream, didOutput audio: AVAudioBuffer, when: AVAudioTime) { + if let audio = audio as? AVAudioPCMBuffer { + append(audio, when: when) } - let adaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: [:]) - pixelBufferAdaptor = adaptor - return adaptor } } diff --git a/Sources/IO/IOVideoUnit.swift b/Sources/IO/IOVideoUnit.swift index 108ff4677..9f3557285 100644 --- a/Sources/IO/IOVideoUnit.swift +++ b/Sources/IO/IOVideoUnit.swift @@ -278,9 +278,5 @@ extension IOVideoUnit: IOVideoMixerDelegate { presentationTimeStamp: presentationTimeStamp, duration: .invalid ) - mixer?.recorder.append( - imageBuffer, - withPresentationTime: presentationTimeStamp - ) } } diff --git a/Tests/IO/IORecorderTests.swift b/Tests/IO/IOStreamRecorderTests.swift similarity index 92% rename from Tests/IO/IORecorderTests.swift rename to Tests/IO/IOStreamRecorderTests.swift index 2a5a06aa1..63f4cc5fa 100644 --- a/Tests/IO/IORecorderTests.swift +++ b/Tests/IO/IOStreamRecorderTests.swift @@ -5,11 +5,11 @@ import AVFoundation @testable import HaishinKit -final class IORecorderTests: XCTestCase, IOStreamRecorderDelegate { +final class IOStreamRecorderTests: XCTestCase, IOStreamRecorderDelegate { func testRecorder2channel() { let recorder = IOStreamRecorder() recorder.delegate = self - recorder.outputSettings = [.audio: [ + recorder.settings = [.audio: [ AVFormatIDKey: Int(kAudioFormatMPEG4AAC), AVSampleRateKey: 0, AVNumberOfChannelsKey: 0 @@ -31,7 +31,7 @@ final class IORecorderTests: XCTestCase, IOStreamRecorderDelegate { func testRecorder4channel() { let recorder = IOStreamRecorder() recorder.delegate = self - recorder.outputSettings = [.audio: [ + recorder.settings = [.audio: [ AVFormatIDKey: Int(kAudioFormatMPEG4AAC), AVSampleRateKey: 0, AVNumberOfChannelsKey: 0 diff --git a/Tests/ISO/HEVCDecoderConfigurationRecordTests.swift b/Tests/ISO/HEVCDecoderConfigurationRecordTests.swift index 6833a8bd6..49ffa9c99 100644 --- a/Tests/ISO/HEVCDecoderConfigurationRecordTests.swift +++ b/Tests/ISO/HEVCDecoderConfigurationRecordTests.swift @@ -8,7 +8,7 @@ final class HEVCDecoderConfigurationRecordTests: XCTestCase { func testMain() { let data = Data([1, 1, 96, 0, 0, 0, 176, 0, 0, 0, 0, 0, 93, 240, 0, 252, 253, 248, 248, 0, 0, 15, 3, 32, 0, 1, 0, 24, 64, 1, 12, 1, 255, 255, 1, 96, 0, 0, 3, 0, 176, 0, 0, 3, 0, 0, 3, 0, 93, 21, 192, 144, 33, 0, 1, 0, 36, 66, 1, 1, 1, 96, 0, 0, 3, 0, 176, 0, 0, 3, 0, 0, 3, 0, 93, 160, 2, 40, 128, 39, 28, 178, 226, 5, 123, 145, 101, 83, 80, 16, 16, 16, 8, 34, 0, 1, 0, 7, 68, 1, 192, 44, 188, 20, 201]) let hevc = HEVCDecoderConfigurationRecord(data: data) - var formatDescription = hevc.makeFormatDescription() + let formatDescription = hevc.makeFormatDescription() XCTAssertNotNil(formatDescription) } }