Skip to content

Commit

Permalink
Revised the video buffer algorithm.
Browse files Browse the repository at this point in the history
  • Loading branch information
shogo4405 committed Nov 17, 2024
1 parent 1705f6e commit fbc38c7
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 22 deletions.
8 changes: 3 additions & 5 deletions Sources/IO/IOVideoMixer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,9 @@ final class IOVideoMixer<T: IOVideoMixerDelegate> {
for screen in screens where screen.track == track {
screen.enqueue(sampleBuffer)
}
if settings.mainTrack == track {
screen.videoPresentationTimeStamp = sampleBuffer.presentationTimeStamp
if track == settings.mainTrack {
let diff = ceil((screen.targetTimestamp.value - sampleBuffer.presentationTimeStamp.seconds) * 10000) / 10000
screen.videoCaptureLatency.mutate { $0 = diff }
}
case .passthrough:
if settings.mainTrack == track {
Expand All @@ -65,9 +66,6 @@ final class IOVideoMixer<T: IOVideoMixerDelegate> {
for screen in screens where screen.track == track {
screen.reset()
}
if settings.mainTrack == track {
screen.videoPresentationTimeStamp = .invalid
}
}

@inline(__always)
Expand Down
32 changes: 15 additions & 17 deletions Sources/Screen/Screen.swift
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,6 @@ public final class Screen: ScreenObjectContainerConvertible {
return choreographer.isRunning
}

var videoPresentationTimeStamp: CMTime = .invalid

#if os(macOS)
/// Specifies the background color.
public var backgroundColor: CGColor = NSColor.black.cgColor {
Expand All @@ -82,6 +80,9 @@ public final class Screen: ScreenObjectContainerConvertible {
}
#endif

var videoCaptureLatency: Atomic<TimeInterval> = .init(0)
private(set) var targetTimestamp: Atomic<TimeInterval> = .init(0)

weak var observer: (any ScreenObserver)?
private var root: ScreenObjectContainer = .init()
private(set) var renderer = ScreenRendererByCPU()
Expand All @@ -104,7 +105,7 @@ public final class Screen: ScreenObjectContainerConvertible {
outputFormat = nil
}
}
private var currentPresentationTimeStamp: CMTime = .zero
private var presentationTimeStamp: CMTime = .zero

/// Adds the specified screen object as a child of the current screen object container.
public func addChild(_ child: ScreenObject?) throws {
Expand All @@ -120,6 +121,8 @@ public final class Screen: ScreenObjectContainerConvertible {
return root.getScreenObjects()
}

var count: Int = 0

func render(_ sampleBuffer: CMSampleBuffer) -> CMSampleBuffer {
sampleBuffer.imageBuffer?.lockBaseAddress(Self.lockFrags)
defer {
Expand All @@ -144,8 +147,6 @@ extension Screen: Running {
return
}
CVPixelBufferPoolCreate(nil, nil, attributes as CFDictionary?, &pixelBufferPool)
videoPresentationTimeStamp = .invalid
currentPresentationTimeStamp = .zero
choreographer.preferredFramesPerSecond = frameRate
choreographer.startRunning()
choreographer.isPaused = false
Expand All @@ -162,6 +163,10 @@ extension Screen: Running {
extension Screen: ChoreographerDelegate {
// MARK: ChoreographerDelegate
func choreographer(_ choreographer: some Choreographer, didFrame timestamp: TimeInterval, targetTimestamp: TimeInterval) {
defer {
count += 1
self.targetTimestamp.mutate { $0 = targetTimestamp }
}
var pixelBuffer: CVPixelBuffer?
pixelBufferPool?.createPixelBuffer(&pixelBuffer)
guard let pixelBuffer else {
Expand All @@ -180,14 +185,14 @@ extension Screen: ChoreographerDelegate {
if let dictionary = CVBufferGetAttachments(pixelBuffer, .shouldNotPropagate) {
CVBufferSetAttachments(pixelBuffer, dictionary, .shouldPropagate)
}
let now = makePresentationTimeStamp(timestamp)
guard currentPresentationTimeStamp < now else {
print("⚠️", currentPresentationTimeStamp, now)
let presentationTimeStamp = CMTime(seconds: timestamp - videoCaptureLatency.value, preferredTimescale: 1000000000)
guard self.presentationTimeStamp <= presentationTimeStamp else {
return
}
self.presentationTimeStamp = presentationTimeStamp
var timingInfo = CMSampleTimingInfo(
duration: currentPresentationTimeStamp == .zero ? .zero : now - currentPresentationTimeStamp,
presentationTimeStamp: now,
duration: CMTime(seconds: targetTimestamp - timestamp, preferredTimescale: 1000000000),
presentationTimeStamp: presentationTimeStamp,
decodeTimeStamp: .invalid
)
var sampleBuffer: CMSampleBuffer?
Expand All @@ -203,12 +208,5 @@ extension Screen: ChoreographerDelegate {
if let sampleBuffer {
observer?.screen(self, didOutput: render(sampleBuffer))
}
currentPresentationTimeStamp = now
}

private func makePresentationTimeStamp(_ timestamp: TimeInterval) -> CMTime {
return videoPresentationTimeStamp == .invalid ?
CMTime(seconds: timestamp, preferredTimescale: 1000000000) :
videoPresentationTimeStamp
}
}

0 comments on commit fbc38c7

Please sign in to comment.