diff --git a/HaishinKit.xcodeproj/project.pbxproj b/HaishinKit.xcodeproj/project.pbxproj index e6357ab4e..0dcfcdfac 100644 --- a/HaishinKit.xcodeproj/project.pbxproj +++ b/HaishinKit.xcodeproj/project.pbxproj @@ -251,6 +251,7 @@ BCB976DF26107B5600C9A649 /* TSField.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB976DE26107B5600C9A649 /* TSField.swift */; }; BCB9773F2621812800C9A649 /* ISOTypeBufferUtil.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB9773E2621812800C9A649 /* ISOTypeBufferUtil.swift */; }; BCB9D79F2BF12AD8008C5B1B /* SampleVideo_360x240_5mb.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = 29B876D91CD70CE700FC07DA /* SampleVideo_360x240_5mb.mp4 */; }; + BCBF01762CE0AD1D00C63E7A /* FrameTracker.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCBF01752CE0AD1900C63E7A /* FrameTracker.swift */; }; BCC1A72B264FAC1800661156 /* ESSpecificData.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCC1A72A264FAC1800661156 /* ESSpecificData.swift */; }; BCC2237E2C1468C700F5F234 /* SRTHaishinKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCCC45962AA289FA0016EFE8 /* SRTHaishinKit.framework */; }; BCC2237F2C1468C700F5F234 /* SRTHaishinKit.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = BCCC45962AA289FA0016EFE8 /* SRTHaishinKit.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; @@ -714,6 +715,7 @@ BCABED202BDE23C600CC7E73 /* AudioNode+DebugExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AudioNode+DebugExtension.swift"; sourceTree = ""; }; BCB976DE26107B5600C9A649 /* TSField.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TSField.swift; sourceTree = ""; }; BCB9773E2621812800C9A649 /* ISOTypeBufferUtil.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ISOTypeBufferUtil.swift; sourceTree = ""; }; + BCBF01752CE0AD1900C63E7A /* FrameTracker.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FrameTracker.swift; sourceTree = ""; }; BCC1A72A264FAC1800661156 /* ESSpecificData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ESSpecificData.swift; sourceTree = ""; }; BCC4F4142AD6FC1100954EF5 /* IOTellyUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOTellyUnit.swift; sourceTree = ""; }; BCC9E9082636FF7400948774 /* DataBufferTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DataBufferTests.swift; sourceTree = ""; }; @@ -882,6 +884,7 @@ BC0D236C26331BAB001DDA0C /* DataBuffer.swift */, 29B876671CD70AB300FC07DA /* DataConvertible.swift */, 2976A4851D4903C300B53EF2 /* DeviceUtil.swift */, + BCBF01752CE0AD1900C63E7A /* FrameTracker.swift */, BC32E88729C9971100051507 /* InstanceHolder.swift */, 2942424C1CF4C01300D65DCB /* MD5.swift */, 2942A4F721A9418A004E1BEE /* Running.swift */, @@ -1861,6 +1864,7 @@ BC11023E2917C35B00D48035 /* CVPixelBufferPool+Extension.swift in Sources */, 29C2631C1D0083B50098D4EF /* IOVideoUnit.swift in Sources */, 29B876B41CD70B2800FC07DA /* RTMPSharedObject.swift in Sources */, + BCBF01762CE0AD1D00C63E7A /* FrameTracker.swift in Sources */, 2901A4EE1D437170002BBD23 /* MediaLink.swift in Sources */, 29B876941CD70AFE00FC07DA /* SoundTransform.swift in Sources */, 29DF20662312A436004057C3 /* RTMPSocketCompatible.swift in Sources */, diff --git a/Sources/IO/IOVideoMixer.swift b/Sources/IO/IOVideoMixer.swift index 88f47cfc5..7f95ffe65 100644 --- a/Sources/IO/IOVideoMixer.swift +++ b/Sources/IO/IOVideoMixer.swift @@ -41,6 +41,10 @@ final class IOVideoMixer { for screen in screens where screen.track == track { screen.enqueue(sampleBuffer) } + if track == settings.mainTrack { + let diff = ceil((screen.targetTimestamp.value - sampleBuffer.presentationTimeStamp.seconds) * 10000) / 10000 + screen.videoCaptureLatency.mutate { $0 = diff } + } case .passthrough: if settings.mainTrack == track { outputSampleBuffer(sampleBuffer) diff --git a/Sources/IO/IOVideoUnit.swift b/Sources/IO/IOVideoUnit.swift index aac06235c..2618c67d1 100644 --- a/Sources/IO/IOVideoUnit.swift +++ b/Sources/IO/IOVideoUnit.swift @@ -177,15 +177,13 @@ final class IOVideoUnit: IOUnit { } let capture = self.capture(for: track) configuration?(capture, nil) + videoMixer.reset(track) try capture?.attachDevice(device, videoUnit: self) } if device != nil && view != nil { // Start captureing if not running. mixer?.session.startRunning() } - if device == nil { - videoMixer.reset(track) - } } #if os(iOS) || os(tvOS) || os(macOS) diff --git a/Sources/Screen/Screen.swift b/Sources/Screen/Screen.swift index 1ca9685cb..41b46b8b1 100644 --- a/Sources/Screen/Screen.swift +++ b/Sources/Screen/Screen.swift @@ -79,6 +79,10 @@ public final class Screen: ScreenObjectContainerConvertible { } } #endif + + var videoCaptureLatency: Atomic = .init(0) + private(set) var targetTimestamp: Atomic = .init(0) + weak var observer: (any ScreenObserver)? private var root: ScreenObjectContainer = .init() private(set) var renderer = ScreenRendererByCPU() @@ -87,7 +91,6 @@ public final class Screen: ScreenObjectContainerConvertible { choreographer.delegate = self return choreographer }() - private var timeStamp: CMTime = .invalid private var attributes: [NSString: NSObject] { return [ kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_32ARGB), @@ -102,6 +105,7 @@ public final class Screen: ScreenObjectContainerConvertible { outputFormat = nil } } + private var presentationTimeStamp: CMTime = .zero /// Adds the specified screen object as a child of the current screen object container. public func addChild(_ child: ScreenObject?) throws { @@ -117,11 +121,14 @@ public final class Screen: ScreenObjectContainerConvertible { return root.getScreenObjects() } + var count: Int = 0 + func render(_ sampleBuffer: CMSampleBuffer) -> CMSampleBuffer { sampleBuffer.imageBuffer?.lockBaseAddress(Self.lockFrags) defer { sampleBuffer.imageBuffer?.unlockBaseAddress(Self.lockFrags) } + renderer.presentationTimeStamp = sampleBuffer.presentationTimeStamp renderer.setTarget(sampleBuffer.imageBuffer) if let dimensions = sampleBuffer.formatDescription?.dimensions { root.size = dimensions.size @@ -156,6 +163,10 @@ extension Screen: Running { extension Screen: ChoreographerDelegate { // MARK: ChoreographerDelegate func choreographer(_ choreographer: some Choreographer, didFrame timestamp: TimeInterval, targetTimestamp: TimeInterval) { + defer { + count += 1 + self.targetTimestamp.mutate { $0 = targetTimestamp } + } var pixelBuffer: CVPixelBuffer? pixelBufferPool?.createPixelBuffer(&pixelBuffer) guard let pixelBuffer else { @@ -174,13 +185,16 @@ extension Screen: ChoreographerDelegate { if let dictionary = CVBufferGetAttachments(pixelBuffer, .shouldNotPropagate) { CVBufferSetAttachments(pixelBuffer, dictionary, .shouldPropagate) } - let now = CMClock.hostTimeClock.time + let presentationTimeStamp = CMTime(seconds: timestamp - videoCaptureLatency.value, preferredTimescale: 1000000000) + guard self.presentationTimeStamp <= presentationTimeStamp else { + return + } + self.presentationTimeStamp = presentationTimeStamp var timingInfo = CMSampleTimingInfo( - duration: timeStamp == .invalid ? .zero : now - timeStamp, - presentationTimeStamp: now, + duration: CMTime(seconds: targetTimestamp - timestamp, preferredTimescale: 1000000000), + presentationTimeStamp: presentationTimeStamp, decodeTimeStamp: .invalid ) - timeStamp = now var sampleBuffer: CMSampleBuffer? guard CMSampleBufferCreateReadyWithImageBuffer( allocator: kCFAllocatorDefault, diff --git a/Sources/Screen/ScreenObject.swift b/Sources/Screen/ScreenObject.swift index 75e2bb1d2..4d21a2e78 100644 --- a/Sources/Screen/ScreenObject.swift +++ b/Sources/Screen/ScreenObject.swift @@ -207,6 +207,7 @@ public final class ImageScreenObject: ScreenObject { /// An object that manages offscreen rendering a video track source. public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable { + static let capacity: Int = 3 public var chromaKeyColor: CGColor? /// Specifies the track number how the displays the visual content. @@ -229,6 +230,10 @@ public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable { } } + public var frameRate: Int { + frameTracker.frameRate + } + override var blendMode: ScreenObject.BlendMode { if 0.0 < cornerRadius || chromaKeyColor != nil { return .alpha @@ -238,13 +243,14 @@ public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable { private var queue: TypedBlockQueue? private var effects: [VideoEffect] = .init() + private var frameTracker = FrameTracker() /// Create a screen object. override public init() { super.init() horizontalAlignment = .center do { - queue = try TypedBlockQueue(capacity: 1, handlers: .outputPTSSortedSampleBuffers) + queue = try TypedBlockQueue(capacity: Self.capacity, handlers: .outputPTSSortedSampleBuffers) } catch { logger.error(error) } @@ -269,9 +275,11 @@ public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable { } override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? { - guard let sampleBuffer = queue?.dequeue(), let pixelBuffer = sampleBuffer.imageBuffer else { + guard let sampleBuffer = queue?.dequeue(renderer.presentationTimeStamp), + let pixelBuffer = sampleBuffer.imageBuffer else { return nil } + frameTracker.update(sampleBuffer.presentationTimeStamp) // Resizing before applying the filter for performance optimization. var image = CIImage(cvPixelBuffer: pixelBuffer).transformed(by: videoGravity.scale( bounds.size, @@ -304,12 +312,20 @@ public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable { } } + override public func draw(_ renderer: some ScreenRenderer) { + super.draw(renderer) + if queue?.isEmpty == false { + invalidateLayout() + } + } + func enqueue(_ sampleBuffer: CMSampleBuffer) { try? queue?.enqueue(sampleBuffer) invalidateLayout() } func reset() { + frameTracker.clear() try? queue?.reset() invalidateLayout() } diff --git a/Sources/Screen/ScreenRenderer.swift b/Sources/Screen/ScreenRenderer.swift index 1c83046f6..88ba40d23 100644 --- a/Sources/Screen/ScreenRenderer.swift +++ b/Sources/Screen/ScreenRenderer.swift @@ -11,6 +11,8 @@ public protocol ScreenRenderer: AnyObject { var backgroundColor: CGColor { get set } /// The current screen bounds. var bounds: CGRect { get } + /// The current presentationTimeStamp. + var presentationTimeStamp: CMTime { get } /// Layouts a screen object. func layout(_ screenObject: ScreenObject) /// Draws a sceen object. @@ -24,6 +26,7 @@ final class ScreenRendererByCPU: ScreenRenderer { static let doNotTile = vImage_Flags(kvImageDoNotTile) var bounds: CGRect = .init(origin: .zero, size: Screen.size) + var presentationTimeStamp: CMTime = .zero lazy var context = { guard let deive = MTLCreateSystemDefaultDevice() else { @@ -64,6 +67,7 @@ final class ScreenRendererByCPU: ScreenRenderer { } } } + private var format = vImage_CGImageFormat( bitsPerComponent: 8, bitsPerPixel: 32, diff --git a/Sources/Util/FrameTracker.swift b/Sources/Util/FrameTracker.swift new file mode 100644 index 000000000..c80c876e5 --- /dev/null +++ b/Sources/Util/FrameTracker.swift @@ -0,0 +1,27 @@ +import CoreMedia + +struct FrameTracker { + static let seconds = 1.0 + + private(set) var frameRate: Int = 0 + private var count = 0 + private var rotated: CMTime = .zero + + init() { + } + + mutating func update(_ time: CMTime) { + count += 1 + if Self.seconds <= (time - rotated).seconds { + rotated = time + frameRate = count + count = 0 + logger.info(frameRate) + } + } + + mutating func clear() { + count = 0 + rotated = .zero + } +} diff --git a/Sources/Util/TypedBlockQueue.swift b/Sources/Util/TypedBlockQueue.swift index e5b914e8b..8744b8723 100644 --- a/Sources/Util/TypedBlockQueue.swift +++ b/Sources/Util/TypedBlockQueue.swift @@ -46,3 +46,20 @@ final class TypedBlockQueue { try queue.reset() } } + +extension TypedBlockQueue where T == CMSampleBuffer { + func dequeue(_ presentationTimeStamp: CMTime) -> CMSampleBuffer? { + var result: CMSampleBuffer? + while !queue.isEmpty { + guard let head else { + break + } + if head.presentationTimeStamp <= presentationTimeStamp { + result = dequeue() + } else { + return result + } + } + return result + } +}