Skip to content

Commit

Permalink
tvOS 17.0 for AVCaptureSession.
Browse files Browse the repository at this point in the history
  • Loading branch information
shogo4405 committed Sep 16, 2023
1 parent ff27e71 commit 21a5538
Show file tree
Hide file tree
Showing 10 changed files with 211 additions and 201 deletions.
12 changes: 10 additions & 2 deletions HaishinKit.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,8 @@
BC34DFF025EBB53F005F975A /* Logboard.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; };
BC34E00225EBB59C005F975A /* Logboard.xcframework in Frameworks */ = {isa = PBXBuildFile; fileRef = BC34DFD125EBB12C005F975A /* Logboard.xcframework */; };
BC34FA0B286CB90A00EFAF27 /* PiPHKView.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC34FA0A286CB90A00EFAF27 /* PiPHKView.swift */; };
BC3802122AB5E770001AE399 /* IOVideoCaptureUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3802112AB5E770001AE399 /* IOVideoCaptureUnit.swift */; };
BC3802142AB5E7CC001AE399 /* IOAudioCaptureUnit.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3802132AB5E7CC001AE399 /* IOAudioCaptureUnit.swift */; };
BC3E384429C216BB007CD972 /* ADTSReaderTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC3E384329C216BB007CD972 /* ADTSReaderTests.swift */; };
BC44A1A923D31E92002D4297 /* AudioCodecRingBuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC44A1A823D31E92002D4297 /* AudioCodecRingBuffer.swift */; };
BC4914A228DDD33D009E2DF6 /* VTSessionConvertible.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC4914A128DDD33D009E2DF6 /* VTSessionConvertible.swift */; };
Expand Down Expand Up @@ -576,6 +578,8 @@
BC32E88729C9971100051507 /* InstanceHolder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InstanceHolder.swift; sourceTree = "<group>"; };
BC34DFD125EBB12C005F975A /* Logboard.xcframework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcframework; name = Logboard.xcframework; path = Carthage/Build/Logboard.xcframework; sourceTree = "<group>"; };
BC34FA0A286CB90A00EFAF27 /* PiPHKView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PiPHKView.swift; sourceTree = "<group>"; };
BC3802112AB5E770001AE399 /* IOVideoCaptureUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOVideoCaptureUnit.swift; sourceTree = "<group>"; };
BC3802132AB5E7CC001AE399 /* IOAudioCaptureUnit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IOAudioCaptureUnit.swift; sourceTree = "<group>"; };
BC3E384329C216BB007CD972 /* ADTSReaderTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ADTSReaderTests.swift; sourceTree = "<group>"; };
BC44A1A823D31E92002D4297 /* AudioCodecRingBuffer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioCodecRingBuffer.swift; sourceTree = "<group>"; wrapsLines = 1; };
BC4914A128DDD33D009E2DF6 /* VTSessionConvertible.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VTSessionConvertible.swift; sourceTree = "<group>"; };
Expand Down Expand Up @@ -1044,6 +1048,7 @@
2941746A22D069B300A2944F /* AudioEffect.swift */,
BC9F9C7726F8C16600B01ED0 /* Choreographer.swift */,
299B13261D3B751400A1E8F5 /* HKView.swift */,
BC3802132AB5E7CC001AE399 /* IOAudioCaptureUnit.swift */,
BC31DBD12A653D1600C4DEA3 /* IOAudioMonitor.swift */,
BC5019C02A6D266B0046E02F /* IOAudioMonitorRingBuffer.swift */,
29B876891CD70AFE00FC07DA /* IOAudioUnit.swift */,
Expand All @@ -1053,6 +1058,7 @@
BCA2252B293CC5B600DD7CB2 /* IOScreenCaptureUnit.swift */,
299B131C1D35272D00A1E8F5 /* IOUIScreenCaptureUnit.swift */,
BC570B4728E9ACC10098A12C /* IOUnit.swift */,
BC3802112AB5E770001AE399 /* IOVideoCaptureUnit.swift */,
29B8768E1CD70AFE00FC07DA /* IOVideoUnit.swift */,
BC959EEE296EE4190067BA97 /* ImageTransform.swift */,
2999C3742071138F00892E55 /* MTHKView.swift */,
Expand Down Expand Up @@ -1715,6 +1721,7 @@
29B876871CD70AE800FC07DA /* TSProgram.swift in Sources */,
BC4C9EAF23F2E736004A14F2 /* AudioStreamBasicDescription+Extension.swift in Sources */,
BC558268240BB40E00011AC0 /* RTMPStreamInfo.swift in Sources */,
BC3802122AB5E770001AE399 /* IOVideoCaptureUnit.swift in Sources */,
298BCF331DD4C44A007FF86A /* AnyUtil.swift in Sources */,
29B876B01CD70B2800FC07DA /* RTMPConnection.swift in Sources */,
BCC1A72B264FAC1800661156 /* ESSpecificData.swift in Sources */,
Expand All @@ -1729,6 +1736,7 @@
BC1DC4FB2A02868900E928ED /* FLVVideoFourCC.swift in Sources */,
2976A47E1D48C5C700B53EF2 /* IORecorder.swift in Sources */,
BC110257292E661E00D48035 /* MultiCamCaptureSettings.swift in Sources */,
BC3802142AB5E7CC001AE399 /* IOAudioCaptureUnit.swift in Sources */,
29B876B21CD70B2800FC07DA /* RTMPMuxer.swift in Sources */,
2958912E1EEB8F4100CE51E1 /* FLVSoundType.swift in Sources */,
BC0D236D26331BAB001DDA0C /* DataBuffer.swift in Sources */,
Expand Down Expand Up @@ -2336,7 +2344,7 @@
FRAMEWORK_SEARCH_PATHS = "$(inherited)";
GCC_OPTIMIZATION_LEVEL = 0;
INFOPLIST_FILE = "$(SRCROOT)/Examples/iOS/Info.plist";
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
Expand Down Expand Up @@ -2368,7 +2376,7 @@
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = "$(inherited)";
INFOPLIST_FILE = "$(SRCROOT)/Examples/iOS/Info.plist";
IPHONEOS_DEPLOYMENT_TARGET = 11.0;
IPHONEOS_DEPLOYMENT_TARGET = 12.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
Expand Down
3 changes: 1 addition & 2 deletions Sources/Extension/AVCaptureDevice+Extension.swift
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import AVFoundation
import Foundation

#if os(iOS) || os(macOS)
@available(tvOS 17.0, *)
extension AVCaptureDevice {
func videoFormat(width: Int32, height: Int32, frameRate: Float64, isMultiCamSupported: Bool) -> AVCaptureDevice.Format? {
if isMultiCamSupported {
Expand All @@ -19,4 +19,3 @@ extension AVCaptureDevice {
}
}
}
#endif
3 changes: 1 addition & 2 deletions Sources/Extension/AVCaptureDevice.Format+Extension.swift
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ extension AVCaptureDevice.Format {
}
#endif

#if os(iOS) || os(macOS)
@available(tvOS 17.0, *)
extension AVCaptureDevice.Format {
func isFrameRateSupported(_ frameRate: Float64) -> Bool {
var durations: [CMTime] = []
Expand All @@ -41,4 +41,3 @@ extension AVCaptureDevice.Format {
return false
}
}
#endif
3 changes: 2 additions & 1 deletion Sources/Extension/AVCaptureSession+Extension.swift
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ extension AVCaptureSession {
}
}
}
#elseif os(iOS) || os(macOS)
#else
@available(tvOS 17.0, *)
extension AVCaptureSession {
@available(iOS, obsoleted: 16.0)
var isMultitaskingCameraAccessSupported: Bool {
Expand Down
5 changes: 3 additions & 2 deletions Sources/Extension/AVCaptureSession.Preset+Extension.swift
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,10 @@ extension AVCaptureSession.Preset {
}
#endif

#if os(iOS) || os(macOS)
@available(tvOS 17.0, *)
extension AVCaptureSession.Preset {
static let `default`: AVCaptureSession.Preset = .hd1280x720

var width: Int32? {
switch self {
case .hd1920x1080:
Expand Down Expand Up @@ -40,4 +42,3 @@ extension AVCaptureSession.Preset {
}
}
}
#endif
3 changes: 1 addition & 2 deletions Sources/Extension/AVFrameRateRange+Extension.swift
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import AVFoundation
import Foundation

#if os(iOS) || os(macOS)
@available(tvOS 17.0, *)
extension AVFrameRateRange {
func clamp(rate: Float64) -> Float64 {
max(minFrameRate, min(maxFrameRate, rate))
Expand All @@ -11,4 +11,3 @@ extension AVFrameRateRange {
(minFrameRate...maxFrameRate) ~= frameRate
}
}
#endif
34 changes: 34 additions & 0 deletions Sources/Media/IOAudioCaptureUnit.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
import AVFoundation
import Foundation

#if !os(tvOS)
@available(tvOS 17.0, *)
final class IOAudioCaptureUnit: IOCaptureUnit {
typealias Output = AVCaptureAudioDataOutput

private(set) var device: AVCaptureDevice?
var input: AVCaptureInput?
var output: Output?
var connection: AVCaptureConnection?

func attachDevice(_ device: AVCaptureDevice?, audioUnit: IOAudioUnit) throws {
setSampleBufferDelegate(nil)
detachSession(audioUnit.mixer?.session)
guard let device else {
self.device = nil
input = nil
output = nil
return
}
self.device = device
input = try AVCaptureDeviceInput(device: device)
output = AVCaptureAudioDataOutput()
attachSession(audioUnit.mixer?.session)
setSampleBufferDelegate(audioUnit)
}

func setSampleBufferDelegate(_ audioUnit: IOAudioUnit?) {
output?.setSampleBufferDelegate(audioUnit, queue: audioUnit?.lockQueue)
}
}
#endif
186 changes: 2 additions & 184 deletions Sources/Media/IOCaptureUnit.swift
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
#if os(iOS) || os(macOS)
import AVFoundation
import Foundation

enum IOCaptureUnitError: Error {
case noDeviceAvailable
}

@available(tvOS 17.0, *)
protocol IOCaptureUnit {
associatedtype Output: AVCaptureOutput

Expand All @@ -14,6 +14,7 @@ protocol IOCaptureUnit {
var connection: AVCaptureConnection? { get set }
}

@available(tvOS 17.0, *)
extension IOCaptureUnit {
func attachSession(_ session: AVCaptureSession?) {
guard let session else {
Expand Down Expand Up @@ -56,186 +57,3 @@ extension IOCaptureUnit {
}
}
}

/// An object that provides the interface to control the AVCaptureDevice's transport behavior.
public class IOVideoCaptureUnit: IOCaptureUnit {
/// The default videoSettings for a device.
public static let defaultVideoSettings: [NSString: AnyObject] = [
kCVPixelBufferPixelFormatTypeKey: NSNumber(value: kCVPixelFormatType_32BGRA)
]

typealias Output = AVCaptureVideoDataOutput

/// The current video device object.
public private(set) var device: AVCaptureDevice?
var input: AVCaptureInput?
var output: Output? {
didSet {
output?.alwaysDiscardsLateVideoFrames = true
output?.videoSettings = IOVideoCaptureUnit.defaultVideoSettings as [String: Any]
}
}
var connection: AVCaptureConnection?

/// Specifies the videoOrientation indicates whether to rotate the video flowing through the connection to a given orientation.
public var videoOrientation: AVCaptureVideoOrientation = .portrait {
didSet {
output?.connections.filter { $0.isVideoOrientationSupported }.forEach {
$0.videoOrientation = videoOrientation
}
}
}

/// Spcifies the video mirroed indicates whether the video flowing through the connection should be mirrored about its vertical axis.
public var isVideoMirrored = false {
didSet {
output?.connections.filter { $0.isVideoMirroringSupported }.forEach {
$0.isVideoMirrored = isVideoMirrored
}
}
}

#if os(iOS)
/// Specifies the preferredVideoStabilizationMode most appropriate for use with the connection.
public var preferredVideoStabilizationMode: AVCaptureVideoStabilizationMode = .off {
didSet {
output?.connections.filter { $0.isVideoStabilizationSupported }.forEach {
$0.preferredVideoStabilizationMode = preferredVideoStabilizationMode
}
}
}
#endif

func attachDevice(_ device: AVCaptureDevice?, videoUnit: IOVideoUnit) throws {
setSampleBufferDelegate(nil)
detachSession(videoUnit.mixer?.session)
guard let device else {
self.device = nil
input = nil
output = nil
connection = nil
return
}
self.device = device
input = try AVCaptureDeviceInput(device: device)
output = AVCaptureVideoDataOutput()
#if os(iOS)
if let output, #available(iOS 13, *), let port = input?.ports.first(where: { $0.mediaType == .video && $0.sourceDeviceType == device.deviceType && $0.sourceDevicePosition == device.position }) {
connection = AVCaptureConnection(inputPorts: [port], output: output)
} else {
connection = nil
}
#else
if let output, let port = input?.ports.first(where: { $0.mediaType == .video }) {
connection = AVCaptureConnection(inputPorts: [port], output: output)
} else {
connection = nil
}
#endif
attachSession(videoUnit.mixer?.session)
output?.connections.forEach {
if $0.isVideoMirroringSupported {
$0.isVideoMirrored = isVideoMirrored
}
if $0.isVideoOrientationSupported {
$0.videoOrientation = videoOrientation
}
#if os(iOS)
if $0.isVideoStabilizationSupported {
$0.preferredVideoStabilizationMode = preferredVideoStabilizationMode
}
#endif
}
setSampleBufferDelegate(videoUnit)
}

@available(iOS, unavailable)
func attachScreen(_ screen: AVCaptureScreenInput?, videoUnit: IOVideoUnit) {
setSampleBufferDelegate(nil)
detachSession(videoUnit.mixer?.session)
device = nil
input = screen
output = AVCaptureVideoDataOutput()
connection = nil
attachSession(videoUnit.mixer?.session)
setSampleBufferDelegate(videoUnit)
}

func setFrameRate(_ frameRate: Float64) {
guard let device else {
return
}
do {
try device.lockForConfiguration()
if device.activeFormat.isFrameRateSupported(frameRate) {
device.activeVideoMinFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate))
device.activeVideoMaxFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate))
} else {
if let format = device.videoFormat(
width: device.activeFormat.formatDescription.dimensions.width,
height: device.activeFormat.formatDescription.dimensions.height,
frameRate: frameRate,
isMultiCamSupported: device.activeFormat.isMultiCamSupported
) {
device.activeFormat = format
device.activeVideoMinFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate))
device.activeVideoMaxFrameDuration = CMTime(value: 100, timescale: CMTimeScale(100 * frameRate))
}
}
device.unlockForConfiguration()
} catch {
logger.error("while locking device for fps:", error)
}
}

func setTorchMode(_ torchMode: AVCaptureDevice.TorchMode) {
guard let device, device.isTorchModeSupported(torchMode) else {
return
}
do {
try device.lockForConfiguration()
device.torchMode = torchMode
device.unlockForConfiguration()
} catch {
logger.error("while setting torch:", error)
}
}

func setSampleBufferDelegate(_ videoUnit: IOVideoUnit?) {
if let videoUnit {
videoOrientation = videoUnit.videoOrientation
setFrameRate(videoUnit.frameRate)
}
output?.setSampleBufferDelegate(videoUnit, queue: videoUnit?.lockQueue)
}
}

class IOAudioCaptureUnit: IOCaptureUnit {
typealias Output = AVCaptureAudioDataOutput

private(set) var device: AVCaptureDevice?
var input: AVCaptureInput?
var output: Output?
var connection: AVCaptureConnection?

func attachDevice(_ device: AVCaptureDevice?, audioUnit: IOAudioUnit) throws {
setSampleBufferDelegate(nil)
detachSession(audioUnit.mixer?.session)
guard let device else {
self.device = nil
input = nil
output = nil
return
}
self.device = device
input = try AVCaptureDeviceInput(device: device)
output = AVCaptureAudioDataOutput()
attachSession(audioUnit.mixer?.session)
setSampleBufferDelegate(audioUnit)
}

func setSampleBufferDelegate(_ audioUnit: IOAudioUnit?) {
output?.setSampleBufferDelegate(audioUnit, queue: audioUnit?.lockQueue)
}
}
#endif
6 changes: 0 additions & 6 deletions Sources/Media/IOMixer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,9 @@ import AVFoundation
#if canImport(SwiftPMSupport)
import SwiftPMSupport
#endif

#if os(iOS)
import UIKit
#endif
#if os(iOS) || os(macOS)
extension AVCaptureSession.Preset {
static let `default`: AVCaptureSession.Preset = .hd1280x720
}
#endif

protocol IOMixerDelegate: AnyObject {
func mixer(_ mixer: IOMixer, didOutput audio: AVAudioPCMBuffer, presentationTimeStamp: CMTime)
Expand Down
Loading

0 comments on commit 21a5538

Please sign in to comment.