Skip to content

Commit

Permalink
Take timestamps into consideration VideoTrackScreenObject.
Browse files Browse the repository at this point in the history
  • Loading branch information
shogo4405 committed Nov 18, 2024
1 parent eb1d77f commit 38502f6
Show file tree
Hide file tree
Showing 8 changed files with 91 additions and 10 deletions.
4 changes: 4 additions & 0 deletions HaishinKit.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -251,6 +251,7 @@
BCB976DF26107B5600C9A649 /* TSField.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB976DE26107B5600C9A649 /* TSField.swift */; };
BCB9773F2621812800C9A649 /* ISOTypeBufferUtil.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB9773E2621812800C9A649 /* ISOTypeBufferUtil.swift */; };
BCB9D79F2BF12AD8008C5B1B /* SampleVideo_360x240_5mb.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = 29B876D91CD70CE700FC07DA /* SampleVideo_360x240_5mb.mp4 */; };
BCBF01762CE0AD1D00C63E7A /* FrameTracker.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCBF01752CE0AD1900C63E7A /* FrameTracker.swift */; };
BCC1A72B264FAC1800661156 /* ESSpecificData.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCC1A72A264FAC1800661156 /* ESSpecificData.swift */; };
BCC2237E2C1468C700F5F234 /* SRTHaishinKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCCC45962AA289FA0016EFE8 /* SRTHaishinKit.framework */; };
BCC2237F2C1468C700F5F234 /* SRTHaishinKit.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = BCCC45962AA289FA0016EFE8 /* SRTHaishinKit.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
Expand Down Expand Up @@ -714,6 +715,7 @@
BCABED202BDE23C600CC7E73 /* AudioNode+DebugExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AudioNode+DebugExtension.swift"; sourceTree = "<group>"; };
BCB976DE26107B5600C9A649 /* TSField.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TSField.swift; sourceTree = "<group>"; };
BCB9773E2621812800C9A649 /* ISOTypeBufferUtil.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ISOTypeBufferUtil.swift; sourceTree = "<group>"; };
BCBF01752CE0AD1900C63E7A /* FrameTracker.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FrameTracker.swift; sourceTree = "<group>"; };
BCC1A72A264FAC1800661156 /* ESSpecificData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ESSpecificData.swift; sourceTree = "<group>"; };
BCC4F4142AD6FC1100954EF5 /* IOTellyUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOTellyUnit.swift; sourceTree = "<group>"; };
BCC9E9082636FF7400948774 /* DataBufferTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DataBufferTests.swift; sourceTree = "<group>"; };
Expand Down Expand Up @@ -882,6 +884,7 @@
BC0D236C26331BAB001DDA0C /* DataBuffer.swift */,
29B876671CD70AB300FC07DA /* DataConvertible.swift */,
2976A4851D4903C300B53EF2 /* DeviceUtil.swift */,
BCBF01752CE0AD1900C63E7A /* FrameTracker.swift */,
BC32E88729C9971100051507 /* InstanceHolder.swift */,
2942424C1CF4C01300D65DCB /* MD5.swift */,
2942A4F721A9418A004E1BEE /* Running.swift */,
Expand Down Expand Up @@ -1861,6 +1864,7 @@
BC11023E2917C35B00D48035 /* CVPixelBufferPool+Extension.swift in Sources */,
29C2631C1D0083B50098D4EF /* IOVideoUnit.swift in Sources */,
29B876B41CD70B2800FC07DA /* RTMPSharedObject.swift in Sources */,
BCBF01762CE0AD1D00C63E7A /* FrameTracker.swift in Sources */,
2901A4EE1D437170002BBD23 /* MediaLink.swift in Sources */,
29B876941CD70AFE00FC07DA /* SoundTransform.swift in Sources */,
29DF20662312A436004057C3 /* RTMPSocketCompatible.swift in Sources */,
Expand Down
4 changes: 4 additions & 0 deletions Sources/IO/IOVideoMixer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,10 @@ final class IOVideoMixer<T: IOVideoMixerDelegate> {
for screen in screens where screen.track == track {
screen.enqueue(sampleBuffer)
}
if track == settings.mainTrack {
let diff = ceil((screen.targetTimestamp.value - sampleBuffer.presentationTimeStamp.seconds) * 10000) / 10000
screen.videoCaptureLatency.mutate { $0 = diff }
}
case .passthrough:
if settings.mainTrack == track {
outputSampleBuffer(sampleBuffer)
Expand Down
4 changes: 1 addition & 3 deletions Sources/IO/IOVideoUnit.swift
Original file line number Diff line number Diff line change
Expand Up @@ -177,15 +177,13 @@ final class IOVideoUnit: IOUnit {
}
let capture = self.capture(for: track)
configuration?(capture, nil)
videoMixer.reset(track)
try capture?.attachDevice(device, videoUnit: self)
}
if device != nil && view != nil {
// Start captureing if not running.
mixer?.session.startRunning()
}
if device == nil {
videoMixer.reset(track)
}
}

#if os(iOS) || os(tvOS) || os(macOS)
Expand Down
22 changes: 17 additions & 5 deletions Sources/Screen/Screen.swift
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ public final class Screen: ScreenObjectContainerConvertible {
public static let size = CGSize(width: 1280, height: 720)

private static let lockFrags = CVPixelBufferLockFlags(rawValue: 0)
private static let preferredTimescale: CMTimeScale = 1000000000

/// The total of child counts.
public var childCounts: Int {
Expand Down Expand Up @@ -79,6 +80,10 @@ public final class Screen: ScreenObjectContainerConvertible {
}
}
#endif

var videoCaptureLatency: Atomic<TimeInterval> = .init(0)
private(set) var targetTimestamp: Atomic<TimeInterval> = .init(0)

weak var observer: (any ScreenObserver)?
private var root: ScreenObjectContainer = .init()
private(set) var renderer = ScreenRendererByCPU()
Expand All @@ -87,7 +92,6 @@ public final class Screen: ScreenObjectContainerConvertible {
choreographer.delegate = self
return choreographer
}()
private var timeStamp: CMTime = .invalid
private var attributes: [NSString: NSObject] {
return [
kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_32ARGB),
Expand All @@ -102,6 +106,7 @@ public final class Screen: ScreenObjectContainerConvertible {
outputFormat = nil
}
}
private var presentationTimeStamp: CMTime = .zero

/// Adds the specified screen object as a child of the current screen object container.
public func addChild(_ child: ScreenObject?) throws {
Expand All @@ -122,6 +127,7 @@ public final class Screen: ScreenObjectContainerConvertible {
defer {
sampleBuffer.imageBuffer?.unlockBaseAddress(Self.lockFrags)
}
renderer.presentationTimeStamp = sampleBuffer.presentationTimeStamp
renderer.setTarget(sampleBuffer.imageBuffer)
if let dimensions = sampleBuffer.formatDescription?.dimensions {
root.size = dimensions.size
Expand Down Expand Up @@ -156,6 +162,9 @@ extension Screen: Running {
extension Screen: ChoreographerDelegate {
// MARK: ChoreographerDelegate
func choreographer(_ choreographer: some Choreographer, didFrame timestamp: TimeInterval, targetTimestamp: TimeInterval) {
defer {
self.targetTimestamp.mutate { $0 = targetTimestamp }
}
var pixelBuffer: CVPixelBuffer?
pixelBufferPool?.createPixelBuffer(&pixelBuffer)
guard let pixelBuffer else {
Expand All @@ -174,13 +183,16 @@ extension Screen: ChoreographerDelegate {
if let dictionary = CVBufferGetAttachments(pixelBuffer, .shouldNotPropagate) {
CVBufferSetAttachments(pixelBuffer, dictionary, .shouldPropagate)
}
let now = CMClock.hostTimeClock.time
let presentationTimeStamp = CMTime(seconds: timestamp - videoCaptureLatency.value, preferredTimescale: Self.preferredTimescale)
guard self.presentationTimeStamp <= presentationTimeStamp else {
return
}
self.presentationTimeStamp = presentationTimeStamp
var timingInfo = CMSampleTimingInfo(
duration: timeStamp == .invalid ? .zero : now - timeStamp,
presentationTimeStamp: now,
duration: CMTime(seconds: targetTimestamp - timestamp, preferredTimescale: Self.preferredTimescale),
presentationTimeStamp: presentationTimeStamp,
decodeTimeStamp: .invalid
)
timeStamp = now
var sampleBuffer: CMSampleBuffer?
guard CMSampleBufferCreateReadyWithImageBuffer(
allocator: kCFAllocatorDefault,
Expand Down
20 changes: 18 additions & 2 deletions Sources/Screen/ScreenObject.swift
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,7 @@ public final class ImageScreenObject: ScreenObject {

/// An object that manages offscreen rendering a video track source.
public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable {
static let capacity: Int = 3
public var chromaKeyColor: CGColor?

/// Specifies the track number how the displays the visual content.
Expand All @@ -229,6 +230,10 @@ public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable {
}
}

public var frameRate: Int {
frameTracker.frameRate
}

override var blendMode: ScreenObject.BlendMode {
if 0.0 < cornerRadius || chromaKeyColor != nil {
return .alpha
Expand All @@ -238,13 +243,14 @@ public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable {

private var queue: TypedBlockQueue<CMSampleBuffer>?
private var effects: [VideoEffect] = .init()
private var frameTracker = FrameTracker()

/// Create a screen object.
override public init() {
super.init()
horizontalAlignment = .center
do {
queue = try TypedBlockQueue(capacity: 1, handlers: .outputPTSSortedSampleBuffers)
queue = try TypedBlockQueue(capacity: Self.capacity, handlers: .outputPTSSortedSampleBuffers)
} catch {
logger.error(error)
}
Expand All @@ -269,9 +275,11 @@ public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable {
}

override public func makeImage(_ renderer: some ScreenRenderer) -> CGImage? {
guard let sampleBuffer = queue?.dequeue(), let pixelBuffer = sampleBuffer.imageBuffer else {
guard let sampleBuffer = queue?.dequeue(renderer.presentationTimeStamp),
let pixelBuffer = sampleBuffer.imageBuffer else {
return nil
}
frameTracker.update(sampleBuffer.presentationTimeStamp)
// Resizing before applying the filter for performance optimization.
var image = CIImage(cvPixelBuffer: pixelBuffer).transformed(by: videoGravity.scale(
bounds.size,
Expand Down Expand Up @@ -304,12 +312,20 @@ public final class VideoTrackScreenObject: ScreenObject, ChromaKeyProcessable {
}
}

override public func draw(_ renderer: some ScreenRenderer) {
super.draw(renderer)
if queue?.isEmpty == false {
invalidateLayout()
}
}

func enqueue(_ sampleBuffer: CMSampleBuffer) {
try? queue?.enqueue(sampleBuffer)
invalidateLayout()
}

func reset() {
frameTracker.clear()
try? queue?.reset()
invalidateLayout()
}
Expand Down
4 changes: 4 additions & 0 deletions Sources/Screen/ScreenRenderer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ public protocol ScreenRenderer: AnyObject {
var backgroundColor: CGColor { get set }
/// The current screen bounds.
var bounds: CGRect { get }
/// The current presentationTimeStamp.
var presentationTimeStamp: CMTime { get }
/// Layouts a screen object.
func layout(_ screenObject: ScreenObject)
/// Draws a sceen object.
Expand All @@ -24,6 +26,7 @@ final class ScreenRendererByCPU: ScreenRenderer {
static let doNotTile = vImage_Flags(kvImageDoNotTile)

var bounds: CGRect = .init(origin: .zero, size: Screen.size)
var presentationTimeStamp: CMTime = .zero

lazy var context = {
guard let deive = MTLCreateSystemDefaultDevice() else {
Expand Down Expand Up @@ -64,6 +67,7 @@ final class ScreenRendererByCPU: ScreenRenderer {
}
}
}

private var format = vImage_CGImageFormat(
bitsPerComponent: 8,
bitsPerPixel: 32,
Expand Down
26 changes: 26 additions & 0 deletions Sources/Util/FrameTracker.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import CoreMedia

struct FrameTracker {
static let seconds = 1.0

private(set) var frameRate: Int = 0
private var count = 0
private var rotated: CMTime = .zero

init() {
}

mutating func update(_ time: CMTime) {
count += 1
if Self.seconds <= (time - rotated).seconds {
rotated = time
frameRate = count
count = 0
}
}

mutating func clear() {
count = 0
rotated = .zero
}
}
17 changes: 17 additions & 0 deletions Sources/Util/TypedBlockQueue.swift
Original file line number Diff line number Diff line change
Expand Up @@ -46,3 +46,20 @@ final class TypedBlockQueue<T: AnyObject> {
try queue.reset()
}
}

extension TypedBlockQueue where T == CMSampleBuffer {
func dequeue(_ presentationTimeStamp: CMTime) -> CMSampleBuffer? {
var result: CMSampleBuffer?
while !queue.isEmpty {
guard let head else {
break
}
if head.presentationTimeStamp <= presentationTimeStamp {
result = dequeue()
} else {
return result
}
}
return result
}
}

0 comments on commit 38502f6

Please sign in to comment.