forked from xamarin/xamarin-macios
-
Notifications
You must be signed in to change notification settings - Fork 1
AVFoundation watchOS xcode9 beta1
Manuel de la Pena edited this page Jul 5, 2017
·
2 revisions
#AVFoundation.framework ##mandel
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetDownloadStorageManager.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetDownloadStorageManager.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetDownloadStorageManager.h 1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetDownloadStorageManager.h 2017-05-24 00:41:53.000000000 -0400
@@ -0,0 +1,127 @@
+/*
+ File: AVAssetDownloadStorageManager.h
+
+ Framework: AVFoundation
+
+ Copyright 2017 Apple Inc. All rights reserved.
+
+ */
+
+/*!
+ @class AVAssetDownloadStorageManager
+
+ @abstract An AVAssetDownloadStorageManager manages the policy for automatic purging of downloaded AVAssets. The policy is vended as AVAssetDownloadStorageManagementPolicy object.
+
+ @discussion When a storage management policy needs to be set on an asset, sharedDownloadStorageManager singleton needs to be fetched.
+ The new policy can then be set by using setStorageManagementPolicy and the location of the downloaded asset.
+ */
+
+#import <AVFoundation/AVBase.h>
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class AVAssetDownloadStorageManagementPolicy;
+/*!
+ @group AVAssetDownloadedAssetEvictionPriority string constants
+ @brief Used by AVAssetDownloadStorageManagementPolicy.
+*/
+typedef NSString *AVAssetDownloadedAssetEvictionPriority NS_STRING_ENUM API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, tvos, watchos);
+
+/*!
+ @enum AVAssetDownloadedAssetEvictionPriority
+ @abstract These constants represents the eviction priority of downloaded assets.
+
+ @constant AVAssetDownloadedAssetEvictionPriorityImportant
+ Used to mark assets with the highest priority. They will be the last to be purged.
+ @constant AVAssetDownloadedAssetEvictionPriorityDefault
+ Used to mark assets have the default priority. They will be the first to be purged.
+*/
+AVF_EXPORT AVAssetDownloadedAssetEvictionPriority const AVAssetDownloadedAssetEvictionPriorityImportant API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, tvos, watchos);
+AVF_EXPORT AVAssetDownloadedAssetEvictionPriority const AVAssetDownloadedAssetEvictionPriorityDefault API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, tvos, watchos);
+
+API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, tvos, watchos)
+@interface AVAssetDownloadStorageManager : NSObject
+
+/*!
+ @method sharedDownloadStorageManager
+ @abstract returns singleton instance.
+*/
++ (AVAssetDownloadStorageManager *)sharedDownloadStorageManager;
+
+/*!
+ @method setStorageManagementPolicy: forURL
+ @abstract Sets the policy for asset with disk backing at downloadStorageURL.
+ @param downloadStorageURL
+ The location of downloaded asset.
+*/
+- (void)setStorageManagementPolicy:(AVAssetDownloadStorageManagementPolicy *)storageManagementPolicy forURL:(NSURL *)downloadStorageURL;
+
+/*!
+ @method storageManagementPolicyForURL:downloadStorageURL
+ @abstract Returns the storage management policy for asset downloaded at downloadStorageURL.
+ This may be nil if a storageManagementPolicy was never set on the downloaded asset.
+ @param downloadStorageURL
+ The location of downloaded asset.
+*/
+- (nullable AVAssetDownloadStorageManagementPolicy *)storageManagementPolicyForURL:(NSURL *)downloadStorageURL;
+
+@end
+
+/*!
+ @class AVAssetDownloadStorageManagementPolicy
+
+ @abstract A class to inform the system of a policy for automatic purging of downloaded AVAssets.
+
+ @discussion System will put in best-effort to evict all the assets based on expirationDate before evicting based on priority.
+ */
+@class AVAssetDownloadStorageManagementPolicyInternal;
+
+API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, tvos, watchos)
+@interface AVAssetDownloadStorageManagementPolicy : NSObject <NSCopying, NSMutableCopying> {
+@private
+ AVAssetDownloadStorageManagementPolicyInternal *_storageManagementPolicy;
+}
+
+/*
+ @property priority
+ @abstract Indicates the eviction priority of downloaded asset.
+ @discussion Assets with default priority will be purged first before assets with higher priorities.
+ In case this is not set, default priority is used.
+ */
+@property (nonatomic, readonly, copy) AVAssetDownloadedAssetEvictionPriority priority;
+
+/*
+ @property expirationDate
+ @abstract Returns the expiration date of asset.
+ */
+@property (nonatomic, readonly, copy) NSDate *expirationDate;
+
+@end
+/*!
+ @class AVMutableAssetDownloadStorageManagementPolicy
+
+ @abstract A mutable subclass of AVAssetDownloadStorageManagementPolicy.
+
+ @discussion System will put in best-effort to evict all the assets based on expirationDate before evicting based on priority.
+ */
+API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, tvos, watchos)
+@interface AVMutableAssetDownloadStorageManagementPolicy : AVAssetDownloadStorageManagementPolicy
+
+/*
+ @property priority
+ @abstract Indicates the eviction priority of downloaded asset.
+ @discussion Assets with default priority will be purged first before assets with higher priorities.
+ In case this is not set, default priority is used.
+ */
+@property (nonatomic, copy) AVAssetDownloadedAssetEvictionPriority priority;
+
+/*
+ @property expirationDate
+ @abstract Returns the expiration date of asset.
+ */
+@property (nonatomic, copy) NSDate *expirationDate;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h 2017-02-22 01:08:53.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h 2017-05-24 00:28:27.000000000 -0400
@@ -3,7 +3,7 @@
Framework: AVFoundation
- Copyright 2010-2015 Apple Inc. All rights reserved.
+ Copyright 2010-2017 Apple Inc. All rights reserved.
*/
@@ -16,30 +16,20 @@
#define AVF_EXPORT extern
#endif
-// Annotation for classes that inherit -init from NSObject but cannot be usefully initialized using -init
-#define AV_INIT_UNAVAILABLE - (instancetype)init NS_UNAVAILABLE;
+// Annotation for classes that inherit -init and +new from NSObject but cannot be usefully initialized using -init or +new
+#define AV_INIT_UNAVAILABLE - (instancetype)init NS_UNAVAILABLE; \
+ + (instancetype)new NS_UNAVAILABLE;
#ifndef __has_feature
#define __has_feature(FEATURE) 0
#endif
-// Generics
-
-// Use when declaring a variable of a generic type
-#if __has_feature(objc_generics)
- #define AV_GENERIC(BASETYPE, ...) BASETYPE<__VA_ARGS__>
-#else
- #define AV_GENERIC(BASETYPE, ...) BASETYPE
+#ifndef NS_STRING_ENUM
+ #define NS_STRING_ENUM
#endif
-// Use when declaring a generic class interface
-#define AV_GENERIC_CLASS AV_GENERIC
-
-// Use to refer to generic types in a generic class
-#if __has_feature(objc_generics)
- #define AV_PARAMETERIZED_TYPE(TYPENAME, TYPEBOUNDS) TYPENAME
-#else
- #define AV_PARAMETERIZED_TYPE(TYPENAME, TYPEBOUNDS) TYPEBOUNDS
+#ifndef NS_EXTENSIBLE_STRING_ENUM
+ #define NS_EXTENSIBLE_STRING_ENUM
#endif
// Pre-10.12
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCameraCalibrationData.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCameraCalibrationData.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCameraCalibrationData.h 1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCameraCalibrationData.h 2017-05-24 00:28:27.000000000 -0400
@@ -0,0 +1,171 @@
+/*
+ File: AVCameraCalibrationData.h
+
+ Framework: AVFoundation
+
+ Copyright 2016-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVBase.h>
+#import <Foundation/Foundation.h>
+#import <simd/matrix_types.h>
+#import <CoreGraphics/CGGeometry.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class AVCameraCalibrationDataInternal;
+
+/*!
+ @class AVCameraCalibrationData
+ @abstract
+ AVCameraCalibrationData is a model object describing a camera's calibration information.
+
+ @discussion
+ When rendering effects to images produced by cameras, or performing computer vision tasks such as correcting images for geometric distortions, it is necessary to characterize the camera's calibration information, such as its pixel focal length, principal point, lens distortion characteristics, etc. AVCameraCalibrationData provides this information.
+ */
+NS_CLASS_AVAILABLE(10_13, 11_0) __TVOS_AVAILABLE(11_0) __WATCHOS_PROHIBITED
+@interface AVCameraCalibrationData : NSObject
+{
+@private
+ AVCameraCalibrationDataInternal *_internal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @property intrinsicMatrix
+ @abstract
+ A camera's intrinsic (K) matrix describes its geometric properties.
+
+ @discussion
+ The intrinsic matrix allows one to transform 3D coordinates to 2D coordinates on an image plane using the pinhole camera model. All values are expressed in pixels. The elements in the matrix are:
+ / \
+ | fx 0 ox |
+ | 0 fy oy |
+ | 0 0 1 |
+ \ /
+ where fx and fy describe the focal length. For square pixels, their values are identical.
+ ox and oy are the offset of the principal point. The origin is the upper left of the frame.
+ */
+@property(nonatomic, readonly) matrix_float3x3 intrinsicMatrix;
+
+/*!
+ @property intrinsicMatrixReferenceDimensions
+ @abstract
+ The reference frame dimensions used in calculating a camera's principal point.
+
+ @discussion
+ A camera's intrinsic matrix expresses values in pixels with respect to a frame of this width and height.
+ */
+@property(nonatomic, readonly) CGSize intrinsicMatrixReferenceDimensions;
+
+/*!
+ @property extrinsicMatrix
+ @abstract
+ A camera's extrinsic matrix describes its pose (position and direction) in world coordinates.
+
+ @discussion
+ The extrinsic matrix consists of a unitless 3x3 rotation matrix (R) on the left and a translation (t) 3x1 column vector on the right. The translation vector's units are millimeters. The camera's pose is expressed with respect to a reference camera (camera-to-world view). If the rotation matrix is an identity matrix, then this camera is the reference camera. Note that a matrix_float4x3 matrix is column major with 3 rows and 4 columns.
+ / \
+ / \ | r1,1 r1,2 r1,3 | t1 |
+ |R|t| = | r2,1 r2,2 r2,3 | t2 |
+ \ / | r3,1 r3,2 r3,3 | t3 |
+ \ /
+ */
+@property(nonatomic, readonly) matrix_float4x3 extrinsicMatrix;
+
+/*!
+ @property pixelSize
+ @abstract
+ The size of one pixel in millimeters
+ */
+@property(nonatomic, readonly) float pixelSize;
+
+/*!
+ @property lensDistortionLookupTable
+ @abstract
+ An NSData of floats describing the camera lens' radial distortions.
+
+ @discussion
+ Images captured by a camera are geometrically warped by radial distortions in the lens. In order to project from the 2D image plane back into the 3D world, the images must be distortion corrected, or made rectilinear. Lens distortion is modeled using a one-dimensional lookup table of 32-bit float values evenly distributed along a radius from the center of the distortion to the farthest corner, with each value representing an elongation or compression of the radius (1.0 for any given point indicates no elongation). This model assumes radially symmetric lens distortion. When dealing with AVDepthData, the disparity / depth map representations are geometrically distorted to align with images produced by the camera. For more information, see the reference implementation below.
+ */
+@property(nonatomic, readonly) NSData *lensDistortionLookupTable;
+
+/*!
+ @property inverseLensDistortionLookupTable
+ @abstract
+ An NSData of floats describing the inverse lookup table required to reapply the camera lens' radial distortions to a rectified image.
+
+ @discussion
+ See lensDistortionLookupTable. If you've rectified an image by removing the distortions characterized by the lensDistortionLookupTable, and now wish to go back to geometrically distorted, you may use the inverseLensDistortionLookupTable. For more information, see the reference implementation below.
+ */
+@property(nonatomic, readonly) NSData *inverseLensDistortionLookupTable;
+
+/*!
+ @property lensDistortionCenter
+ @abstract
+ A CGPoint describing the offset of the lens' distortion center from the top left.
+
+ @discussion
+ Due to geometric distortions in the image, the center of the distortion may not be equal to the optical center (principal point) of the lens. When making an image rectilinear, the distortion center should be used rather than the optical center of the image. For more information, see the reference implementation below.
+ */
+@property(nonatomic, readonly) CGPoint lensDistortionCenter;
+
+/*
+ The following reference implementation illustrates how to use the lensDistortionLookupTable, inverseLensDistortionLookupTable, and lensDistortionCenter properties to find points in the lens-distorted or undistorted (rectilinear, corrected) space. If you have a distorted image (such as a photo taken by a camera) and want to find a particular point in a corresponding undistorted image, you would call the sample method below using the inverseLensDistortionLookupTable. If you have an undistorted (aka distortion-corrected) image and want to find a point in the distorted image's space, you would call the sample method below using the lensDistortionLookupTable.
+
+ To apply distortion correction to an image, you'd begin with an empty destination buffer and iterate through it row by row, calling the sample implementation below for each point in the output image, passing the lensDistortionLookupTable to find the corresponding value in the distorted image, and write it to your output buffer. Please note that the "point", "opticalCenter", and "imageSize" parameters below must be in the same coordinate system, i.e. both at full resolution, or both scaled to a different resolution but with the same aspect ratio.
+
+- (CGPoint)lensDistortionPointForPoint:(CGPoint)point
+ lookupTable:(NSData *)lookupTable
+ distortionOpticalCenter:(CGPoint)opticalCenter
+ imageSize:(CGSize)imageSize
+{
+ // The lookup table holds the radial magnification for n linearly spaced radii.
+ // The first position corresponds to radius = 0
+ // The last position corresponds to the largest radius found in the image.
+
+ // Determine the maximum radius.
+ float delta_ocx_max = MAX( opticalCenter.x, imageSize.width - opticalCenter.x );
+ float delta_ocy_max = MAX( opticalCenter.y, imageSize.height - opticalCenter.y );
+ float r_max = sqrtf( delta_ocx_max * delta_ocx_max + delta_ocy_max * delta_ocy_max );
+
+ // Determine the vector from the optical center to the given point.
+ float v_point_x = point.x - opticalCenter.x;
+ float v_point_y = point.y - opticalCenter.y;
+
+ // Determine the radius of the given point.
+ float r_point = sqrtf( v_point_x * v_point_x + v_point_y * v_point_y );
+
+ // Look up the radial magnification to apply in the provided lookup table
+ float magnification;
+ const float *lookupTableValues = lookupTable.bytes;
+ NSUInteger lookupTableCount = lookupTable.length / sizeof(float);
+
+ if ( r_point < r_max ) {
+ // Linear interpolation
+ float val = r_point * ( lookupTableCount - 1 ) / r_max;
+ int idx = (int)val;
+ float frac = val - idx;
+
+ float mag_1 = lookupTableValues[idx];
+ float mag_2 = lookupTableValues[idx + 1];
+
+ magnification = ( 1.0f - frac ) * mag_1 + frac * mag_2;
+ }
+ else {
+ magnification = lookupTableValues[lookupTableCount - 1];
+ }
+
+ // Apply radial magnification
+ float new_v_point_x = magnification * v_point_x;
+ float new_v_point_y = magnification * v_point_y;
+
+ // Construct output
+ return CGPointMake( opticalCenter.x + new_v_point_x, opticalCenter.y + new_v_point_y );
+}
+ */
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h 1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h 2017-05-24 00:28:07.000000000 -0400
@@ -0,0 +1,354 @@
+/*
+ File: AVCaptureDataOutputSynchronizer.h
+
+ Framework: AVFoundation
+
+ Copyright 2016-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVCaptureOutput.h>
+#import <CoreMedia/CMSampleBuffer.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureDataOutputSynchronizer
+
+@class AVCaptureDataOutputSynchronizerInternal;
+@class AVCaptureSynchronizedDataCollection;
+@protocol AVCaptureDataOutputSynchronizerDelegate;
+
+/*!
+ @class AVCaptureDataOutputSynchronizer
+ @abstract
+ AVCaptureDataOutputSynchronizer synchronizes the delivery of data from multiple capture data outputs (AVCaptureVideoDataOutput, AVCaptureDepthDataOutput, AVCaptureMetadataOutput, AVCaptureAudioDataOutput) to a single delegate callback.
+
+ @discussion
+ AVCaptureDataOutputSynchronizer is initialized with an array of data outputs (AVCaptureVideoDataOutput, AVCaptureDepthDataOutput, AVCaptureMetadataOutput, or AVCaptureAudioDataOutput) from which you'd like to receive a single, synchronized delegate callback. The first output in the array acts as the master data output and determines when the synchronized callback is delivered. When data is received for the master data output, it is held until all other data outputs have received data with an equal or later presentation time stamp, or it has been determined that there is no data for a particular output at the master data output's pts. Once all other outputs are ready, a single delegate callback is sent with all the data aligned with the master data output's data. Separate delegate callbacks are sent for any other data received with presentation time stamps earlier than the next master data output time.
+
+ For instance, if you specify a video data output as your first (master) output and a metadata output for detected faces as your second output, your data callback will not be called until there is face data ready for a video frame, or it is assured that there is no face metadata for that particular video frame.
+
+ Note that the AVCaptureDataOutputSynchronizer overrides each data output's -setSampleBufferDelegate:queue:, -setDepthDataDelegate:queue:, or -setMetadataObjectsDelegate:queue: method call. -[AVCaptureVideoDataOutput alwaysDiscardsLateVideoFrames] and -[AVCaptureDepthData alwaysDiscardsLateDepthData] properties are honored.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureDataOutputSynchronizer : NSObject
+{
+@private
+ AVCaptureDataOutputSynchronizerInternal *_internal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @method initWithDataOutputs:
+ @abstract
+ Instantiates an AVCaptureDataOutputSynchronizer from one or more capture data outputs.
+
+ @param dataOutputs
+ An array of capture data outputs where the first is the master.
+ @result
+ A newly initialized AVCaptureDataOutputSynchronizer instance.
+ */
+- (instancetype)initWithDataOutputs:(NSArray<AVCaptureOutput *> *)dataOutputs;
+
+/*!
+ @property dataOutputs
+ @abstract
+ The data outputs provided in the initializer method.
+ */
+@property(readonly, retain) NSArray<AVCaptureOutput *> *dataOutputs;
+
+/*!
+ @method setDelegate:queue:
+ @abstract
+ Sets the receiver's delegate that will accept synchronized data and the dispatch queue on which the delegate will be called.
+
+ @param delegate
+ An object conforming to the AVCaptureDataOutputSynchronizerDelegate protocol that will receive synchronized data from the provided data outputs.
+ @param delegateCallbackQueue
+ A dispatch queue on which all AVCaptureDataOutputSynchronizerDelegate methods will be called.
+
+ @discussion
+ AVCaptureDataOutputSynchronizer gathers data from its dataOutputs, and when it determines that all data has been received for a given timestamp, it calls the specified delegate on the specified delegateCallbackQueue. AVCaptureDataOutputSynchronizer overrides all the data outputs' delegates and callbacks. Data outputs under the control of AVCaptureDataOutputSynchronizer do not fire delegate callbacks. Delegate callbacks are restored to individual data outputs when you call this method with nil as your delegate and NULL as your delegateCallbackQueue.
+
+ A serial dispatch queue must be used to guarantee that synchronized data will be delivered in order. The delegateCallbackQueue parameter may not be NULL, except when setting the delegate to nil.
+ */
+- (void)setDelegate:(nullable id<AVCaptureDataOutputSynchronizerDelegate>)delegate queue:(nullable dispatch_queue_t)delegateCallbackQueue;
+
+/*!
+ @property delegate
+ @abstract
+ The receiver's delegate.
+
+ @discussion
+ The value of this property is an object conforming to the AVCaptureDataOutputSynchronizerDelegate protocol that will receive synchronized data output. The delegate is set using the -setDelegate:queue: method. This property is key-value observable.
+ */
+@property(nullable, nonatomic, readonly) id<AVCaptureDataOutputSynchronizerDelegate> delegate;
+
+/*!
+ @property delegateCallbackQueue
+ @abstract
+ The dispatch queue on which all AVCaptureDataOutputSynchronizerDelegate methods will be called.
+
+ @discussion
+ The value of this property is a dispatch_queue_t. The queue is set using the -setDelegate:queue: method.
+ */
+@property(nullable, nonatomic, readonly) dispatch_queue_t delegateCallbackQueue;
+
+@end
+
+
+NS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@protocol AVCaptureDataOutputSynchronizerDelegate <NSObject>
+
+@required
+/*!
+ @method captureOutputSynchronizer:didOutputSynchronizedData:
+ @abstract
+ Called when an AVCaptureDataOutputSynchronizer instance outputs synchronized data from one or more data outputs.
+
+ @param captureOutputSynchronizer
+ The AVCaptureDataOutputSynchronizer instance delivering synchronized data.
+ @param synchronizedDataCollection
+ A collection of synchronized data objects indexed by data output.
+
+ @discussion
+ The synchronized data collection only contains synchronized data for capture outputs with synchronized data ready.
+ */
+- (void)dataOutputSynchronizer:(AVCaptureDataOutputSynchronizer *)synchronizer didOutputSynchronizedDataCollection:(AVCaptureSynchronizedDataCollection *)synchronizedDataCollection;
+
+@end
+
+
+#pragma mark - AVCaptureSynchronizedDataCollection
+
+@class AVCaptureSynchronizedData;
+@class AVCaptureSynchronizedDataCollectionInternal;
+
+/*!
+ @class AVCaptureSynchronizedDataCollection
+ @abstract
+ A collection of AVCaptureSynchronizedData objects.
+
+ @discussion
+ AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedDataCollection: delegate method delivers a collection of AVCaptureSynchronizedData objects which can be iterated by use AVCaptureOutput. AVCaptureSynchronizedDataCollection supports object subscripting and fast enumeration of the data outputs as keys.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureSynchronizedDataCollection : NSObject <NSFastEnumeration>
+{
+@private
+ AVCaptureSynchronizedDataCollectionInternal *_internal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @method synchronizedDataForCaptureOutput:
+ @abstract
+ Provides the synchronized data object for a given capture output.
+
+ @param captureOutput
+ The data output whose synchronized data you'd like to inspect.
+ @result
+ The synchronized data object associated with the provided output, or nil, if there is none.
+ */
+- (nullable AVCaptureSynchronizedData *)synchronizedDataForCaptureOutput:(AVCaptureOutput *)captureOutput;
+
+/*!
+ @method objectForKeyedSubscript:
+ @abstract
+ Method that provides support for object subscripting.
+
+ @param key
+ The data output whose synchronized data you'd like to inspect.
+ @result
+ The synchronized data object associated with the provided output, or nil, if there is none.
+
+ @discussion
+ AVCaptureSynchronizedDataCollection supports object subscripting. If you'd like to find the synchronized data for a given data output, simply:
+ AVCaptureSynchronizedData *synchronizedData = synchronizedDataCollection[dataOutput];
+ */
+- (nullable AVCaptureSynchronizedData *)objectForKeyedSubscript:(AVCaptureOutput *)key;
+
+/*!
+ @property count
+ @abstract
+ The number of items in the collection.
+
+ @discussion
+ Returns the number of data output / synchronized data pairs present in the collection.
+ */
+@property(readonly) NSUInteger count;
+
+@end
+
+
+#pragma mark - AVCaptureSynchronizedData
+
+@class AVCaptureSynchronizedDataInternal;
+
+/*!
+ @class AVCaptureSynchronizedData
+ @abstract
+ An abstract base class representing the data delivered by a data output through the AVCaptureDataOutputSynchronizer interface.
+
+ @discussion
+ AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedData: delegate callback delivers a dictionary of key/value pairs, with the keys being the AVCaptureOutput instances returning data, and the values being concrete subclasses of AVCaptureSynchronizedData.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureSynchronizedData : NSObject
+{
+@private
+ AVCaptureSynchronizedDataInternal *_synchronizedDataInternal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @property timestamp
+ @abstract
+ The time at which this synchronized data was captured.
+
+ @discussion
+ Synchronized data is always clocked to the masterClock of the AVCaptureSession to which the data output is connected.
+ */
+@property(readonly) CMTime timestamp;
+
+@end
+
+
+#pragma mark - AVCaptureSynchronizedSampleBufferData
+
+@class AVCaptureSynchronizedSampleBufferDataInternal;
+
+/*!
+ @class AVCaptureSynchronizedSampleBufferData
+ @abstract
+ An concrete subclass of AVCaptureSynchronizedData representing the data delivered by an AVCaptureVideoDataOutput or AVCaptureAudioDataOutput.
+
+ @discussion
+ Synchronized sample buffer data is valid for the duration of AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedData: delegate callback. To extend the sample buffer data beyond the callback, you must CFRetain it, and later call CFRelease when you're done with it.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureSynchronizedSampleBufferData : AVCaptureSynchronizedData
+{
+@private
+ AVCaptureSynchronizedSampleBufferDataInternal *_internal;
+}
+
+/*!
+ @property sampleBuffer
+ @abstract
+ A sample buffer containing video or audio data.
+
+ @discussion
+ If sampleBufferWasDropped is YES, the returned sampleBuffer was dropped before it could be delivered to you, and thus this sample buffer is a shell containing metadata and format information, but no actual pixel data. This property is never NULL. If a data output has no data to return, it is simply not present in the dictionary of synchronized data returned by AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedData: delegate callback.
+ */
+@property(readonly) CMSampleBufferRef sampleBuffer;
+
+/*!
+ @property sampleBufferWasDropped
+ @abstract
+ YES if the sample buffer was dropped.
+
+ @discussion
+ AVCaptureVideoDataOutput has a delegate callback for dropped sample buffers. AVCaptureAudioDataOutput does not. Therefore, sampleBufferWasDropped may be YES for video, but never for audio.
+ */
+@property(readonly) BOOL sampleBufferWasDropped;
+
+/*!
+ @property droppedReason
+ @abstract
+ If sampleBufferWasDropped is YES, the reason for the drop, otherwise AVCaptureOutputDataDroppedReasonNone.
+
+ @discussion
+ AVCaptureOutputDataDroppedReasons are defined in AVCaptureOutputBase.h.
+ */
+@property(readonly) AVCaptureOutputDataDroppedReason droppedReason;
+
+@end
+
+
+#pragma mark - AVCaptureSynchronizedMetadataObjectData
+
+@class AVCaptureSynchronizedMetadataObjectDataInternal;
+
+/*!
+ @class AVCaptureSynchronizedMetadataObjectData
+ @abstract
+ An concrete subclass of AVCaptureSynchronizedData representing the data delivered by an AVCaptureMetadataOutput.
+
+ @discussion
+ A single AVCaptureMetadataOutput may be configured to deliver multiple kinds of metadata objects (such as QRCodes and detected faces). AVCaptureSynchronizedMetadataObjectData's -metadataObjects array may contain multiple AVMetadataObject subclasses, depending on how the AVCaptureMetadataOutput was configured. All synchronized metadata objects share a common timestamp.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureSynchronizedMetadataObjectData : AVCaptureSynchronizedData
+{
+@private
+ AVCaptureSynchronizedMetadataObjectDataInternal *_internal;
+}
+
+/*!
+ @property metadataObjects
+ @abstract
+ An array of AVMetadataObject subclasses.
+
+ @discussion
+ -metadataObjects is never nil. If no metadata objects are present for a given time, an empty array is returned.
+ */
+@property(readonly) NSArray<AVMetadataObject *> *metadataObjects;
+
+@end
+
+
+#pragma mark - AVCaptureSynchronizedDepthData
+
+@class AVCaptureSynchronizedDepthDataInternal;
+
+/*!
+ @class AVCaptureSynchronizedDepthData
+ @abstract
+ An concrete subclass of AVCaptureSynchronizedData representing the data delivered by an AVCaptureDepthDataOutput.
+
+ @discussion
+ Depth data, like video, may be dropped if not serviced in a timely fashion.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureSynchronizedDepthData : AVCaptureSynchronizedData
+{
+@private
+ AVCaptureSynchronizedDepthDataInternal *_internal;
+}
+
+/*!
+ @property depthData
+ @abstract
+ An instance of AVDepthData.
+
+ @discussion
+ If depthDataWasDropped is YES, the returned depthData was dropped before it could be delivered to you, and thus this AVDepthDAta is a shell containing format information and calibration data, but no actual pixel map data. This property is never nil. If a data output has no data to return, it is simply not present in the dictionary of synchronized data returned by AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedData: delegate callback.
+ */
+@property(readonly) AVDepthData *depthData;
+
+/*!
+ @property depthDataWasDropped
+ @abstract
+ YES if the depth data was dropped.
+
+ @discussion
+ If YES, inspect -droppedReason for the reason.
+ */
+@property(readonly) BOOL depthDataWasDropped;
+
+/*!
+ @property droppedReason
+ @abstract
+ If depthDataWasDropped is YES, the reason for the drop, otherwise AVCaptureOutputDataDroppedReasonNone.
+
+ @discussion
+ AVCaptureOutputDataDroppedReasons are defined in AVCaptureOutputBase.h.
+ */
+@property(readonly) AVCaptureOutputDataDroppedReason droppedReason;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h 1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h 2017-05-24 00:28:07.000000000 -0400
@@ -0,0 +1,160 @@
+/*
+ File: AVCaptureDepthDataOutput.h
+
+ Framework: AVFoundation
+
+ Copyright 2016-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVMediaFormat.h>
+#import <AVFoundation/AVCaptureOutputBase.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureDepthDataOutput
+
+@class AVDepthData;
+
+@class AVCaptureDepthDataOutputInternal;
+@protocol AVCaptureDepthDataOutputDelegate;
+
+/*!
+ @class AVCaptureDepthDataOutput
+ @abstract
+ AVCaptureDepthDataOutput is a concrete subclass of AVCaptureOutput that can be used to process depth data in a streaming fashion.
+
+ @discussion
+ Instances of AVCaptureDepthDataOutput capture AVDepthData objects expressing disparity/depth. Applications can access the frames with the captureOutput:didOutputDepthData:fromConnection: delegate method.
+
+ AVCaptureDepthDataOutput always provides depth data in the format expressed by its source's -[AVCaptureDevice activeDepthDataFormat] property. If you wish to receive depth data in another format, you may choose from the -[AVCaptureDevice activeFormat]'s -[AVCaptureDeviceFormat supportedDepthDataFormats], and set it using -[AVCaptureDevice setActiveDepthDataFormat:].
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureDepthDataOutput : AVCaptureOutput
+{
+@private
+ AVCaptureDepthDataOutputInternal *_internal;
+}
+
+- (instancetype)init;
+
++ (instancetype)new;
+
+/*!
+ @method setDelegate:queue:
+ @abstract
+ Sets the receiver's delegate that receives captured depth data and the dispatch queue on which the delegate is called.
+
+ @param delegate
+ An object conforming to the AVCaptureDepthDataOutputDelegate protocol that receives depth data in a streaming fashion.
+ @param callbackQueue
+ A dispatch queue on which all delegate methods are called.
+
+ @discussion
+ The depth data output vends captured depth data to its delegate using the methods specified in the AVCaptureDepthOutputDelegate protocol. All delegate methods are called on the specified dispatch queue. If the callback queue is blocked when new depth data is captured, that depth data is automatically dropped at a time determined by the value of the alwaysDiscardsLateDepthData property. This allows clients to process existing depth data on the same queue without having to manage the potential memory usage increases that would otherwise occur when that processing is unable to keep up with the rate of incoming depth data.
+
+ Clients who need to minimize the chances of depth data being dropped should provide a dedicated queue and not share it with other data outputs. Processing of depth data may be deferred to another queue, but beware that the depth data pixel buffer maps may come from a finite buffer pool, which may be starved if your deferred processing fails to keep up.
+
+ A serial dispatch queue must be used to guarantee that depth data will be delivered in order. The callbackQueue parameter may not be NULL, except when setting the delegate to nil.
+ */
+- (void)setDelegate:(id<AVCaptureDepthDataOutputDelegate>)delegate callbackQueue:(dispatch_queue_t)callbackQueue;
+
+/*!
+ @property delegate
+ @abstract
+ The receiver's delegate.
+
+ @discussion
+ The value of this property is an object conforming to the AVCaptureDepthDataOutputDelegate protocol that receives depth data as it is captured. The delegate is set using the setDelegate:queue: method.
+ */
+@property(nonatomic, readonly) id<AVCaptureDepthDataOutputDelegate> delegate;
+
+/*!
+ @property delegateCallbackQueue
+ @abstract
+ The dispatch queue on which all delegate methods are called.
+
+ @discussion
+ The value of this property is a dispatch_queue_t. The queue is set using the setDelegate:queue: method.
+ */
+@property(nonatomic, readonly) dispatch_queue_t delegateCallbackQueue;
+
+/*!
+ @property alwaysDiscardsLateDepthData
+ @abstract
+ Specifies whether the receiver should always discard any depth data that is not processed before the next depth data is captured.
+
+ @discussion
+ When the value of this property is YES, the receiver will immediately discard depth data that are captured while the depthDataCallbackQueue is blocked. When the value of this property is NO, delegates will be allowed more time to process old depth data before new depth data are discarded, but application memory usage may increase as a result. The default value is YES.
+ */
+@property(nonatomic) BOOL alwaysDiscardsLateDepthData;
+
+/*!
+ @property filteringEnabled
+ @abstract
+ Specifies whether the depth data output should filter depth data to smooth out noise and fill invalid values.
+
+ @discussion
+ When the value of this property is YES, the receiver temporally filters the stream of AVDepthData objects to reduce noise, as well as fill invalid values. Invalid values (NaN) may be present in AVDepthData pixel buffer maps due to factors such as low light or lens occlusion. When filtering is enabled, the depth data output interpolates missing depth data values. Filtering should be disabled if you desire the raw depth data values. The default value is YES.
+ */
+@property(nonatomic, getter=isFilteringEnabled) BOOL filteringEnabled;
+
+@end
+
+
+/*!
+ @protocol AVCaptureDepthDataOutputDelegate
+ @abstract
+ Defines an interface for delegates of AVCaptureDepthDataOutput to receive captured depth data and be notified of late depth data that were dropped.
+ */
+NS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@protocol AVCaptureDepthDataOutputDelegate <NSObject>
+
+@optional
+
+/*!
+ @method depthDataOutput:didOutputDepthData:timestamp:connection:
+ @abstract
+ Called whenever an AVCaptureDepthDataOutput instance outputs a new depth data object.
+
+ @param output
+ The AVCaptureDepthDataOutput instance vending the depth data.
+ @param depthData
+ An AVDepthData object containing the depth/disparity data.
+ @param timestamp
+ A CMTime indicating when the depth data was captured.
+ @param connection
+ The AVCaptureConnection through which the depth data is received.
+
+ @discussion
+ The delegate receives this message whenever the depth data output captures and outputs a new depth data object. This method is called on the dispatch queue specified by the output's delegateCallbackQueue property. This method is called frequently. Care must be taken to process the depth data quickly in order to prevent dropped depth data.
+
+ Clients that need to reference the AVDepthData object outside of the scope of this method must retain it and then release it when they are finished with it (in a MRR app).
+
+ Note that to maintain optimal performance, AVDepthData pixel buffer maps may be backed by a finite memory pool. If AVDepthData objects are held onto for too long, capture inputs will no longer be able to copy new depth data into memory, resulting in droppage. If your application is causing depth data drops by holding on to provided depth data objects for too long, consider copying the pixel buffer map data into a new pixel buffer so that the AVDepthData backing memory can be reused more quickly.
+ */
+- (void)depthDataOutput:(AVCaptureDepthDataOutput *)output didOutputDepthData:(AVDepthData *)depthData timestamp:(CMTime)timestamp connection:(AVCaptureConnection *)connection;
+
+/*!
+ @method depthDataOutput:didDropDepthData:timestamp:connection:reason:
+ @abstract
+ Called once for each depth data that is discarded.
+
+ @param output
+ The AVCaptureDepthDataOutput instance that dropped the depth data.
+ @param depthData
+ A depth data object containing information about the dropped depth, such as its native depth type. This depth data object produces nil CVPixelBuffers for depth / disparity as it has no backing depth map.
+ @param timestamp
+ A CMTime indicating when the depth data was captured.
+ @param connection
+ The AVCaptureConnection from which the dropped depth data object was received.
+ @param reason
+ The reason the depth data object was dropped.
+
+ @discussion
+ Delegates receive this message whenever a depth data object is dropped. This method is called once for each dropped depth data. The object passed to this delegate method will contain a shell of an AVDepthData that contains no actual depth data backing pixel buffer, as well as a presentation time stamp and a reason for the drop. This method will be called on the dispatch queue specified by the output's delegateCallbackQueue property. Because this method is called on the same dispatch queue that outputs depth data, it must be efficient to prevent further capture performance problems, such as additional drops.
+ */
+- (void)depthDataOutput:(AVCaptureDepthDataOutput *)output didDropDepthData:(AVDepthData *)depthData timestamp:(CMTime)timestamp connection:(AVCaptureConnection *)connection reason:(AVCaptureOutputDataDroppedReason)reason;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h 1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h 2017-05-24 00:28:27.000000000 -0400
@@ -0,0 +1,140 @@
+/*
+ File: AVCaptureOutputBase.h
+
+ Framework: AVFoundation
+
+ Copyright 2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVMediaFormat.h>
+#import <Foundation/Foundation.h>
+#import <AVFoundation/AVCaptureSession.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureOutput
+
+@class AVMetadataObject;
+@class AVCaptureOutputInternal;
+
+/*!
+ @class AVCaptureOutput
+ @abstract
+ AVCaptureOutput is an abstract class that defines an interface for an output destination of an AVCaptureSession.
+
+ @discussion
+ AVCaptureOutput provides an abstract interface for connecting capture output destinations, such as files and video previews, to an AVCaptureSession.
+
+ An AVCaptureOutput can have multiple connections represented by AVCaptureConnection objects, one for each stream of media that it receives from an AVCaptureInput. An AVCaptureOutput does not have any connections when it is first created. When an output is added to an AVCaptureSession, connections are created that map media data from that session's inputs to its outputs.
+
+ Concrete AVCaptureOutput instances can be added to an AVCaptureSession using the -[AVCaptureSession addOutput:] and -[AVCaptureSession addOutputWithNoConnections:] methods.
+ */
+NS_CLASS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureOutput : NSObject
+{
+@private
+ AVCaptureOutputInternal *_outputInternal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @property connections
+ @abstract
+ The connections that describe the flow of media data to the receiver from AVCaptureInputs.
+
+ @discussion
+ The value of this property is an NSArray of AVCaptureConnection objects, each describing the mapping between the receiver and the AVCaptureInputPorts of one or more AVCaptureInputs.
+ */
+@property(nonatomic, readonly) NSArray<AVCaptureConnection *> *connections;
+
+/*!
+ @method connectionWithMediaType:
+ @abstract
+ Returns the first connection in the connections array with an inputPort of the specified mediaType.
+
+ @param mediaType
+ An AVMediaType constant from AVMediaFormat.h, e.g. AVMediaTypeVideo.
+
+ @discussion
+ This convenience method returns the first AVCaptureConnection in the receiver's connections array that has an AVCaptureInputPort of the specified mediaType. If no connection with the specified mediaType is found, nil is returned.
+ */
+- (nullable AVCaptureConnection *)connectionWithMediaType:(AVMediaType)mediaType NS_AVAILABLE(10_7, 5_0);
+
+/*!
+ @method transformedMetadataObjectForMetadataObject:connection:
+ @abstract
+ Converts an AVMetadataObject's visual properties to the receiver's coordinates.
+
+ @param metadataObject
+ An AVMetadataObject originating from the same AVCaptureInput as the receiver.
+ @param connection
+ The receiver's connection whose AVCaptureInput matches that of the metadata object to be converted.
+ @result
+ An AVMetadataObject whose properties are in output coordinates.
+
+ @discussion
+ AVMetadataObject bounds may be expressed as a rect where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. Face metadata objects likewise express yaw and roll angles with respect to an unrotated picture. -transformedMetadataObjectForMetadataObject:connection: converts the visual properties in the coordinate space of the supplied AVMetadataObject to the coordinate space of the receiver. The conversion takes orientation, mirroring, and scaling into consideration. If the provided metadata object originates from an input source other than the preview layer's, nil will be returned.
+
+ If an AVCaptureVideoDataOutput instance's connection's videoOrientation or videoMirrored properties are set to non-default values, the output applies the desired mirroring and orientation by physically rotating and or flipping sample buffers as they pass through it. AVCaptureStillImageOutput, on the other hand, does not physically rotate its buffers. It attaches an appropriate kCGImagePropertyOrientation number to captured still image buffers (see ImageIO/CGImageProperties.h) indicating how the image should be displayed on playback. Likewise, AVCaptureMovieFileOutput does not physically apply orientation/mirroring to its sample buffers -- it uses a QuickTime track matrix to indicate how the buffers should be rotated and/or flipped on playback.
+
+ transformedMetadataObjectForMetadataObject:connection: alters the visual properties of the provided metadata object to match the physical rotation / mirroring of the sample buffers provided by the receiver through the indicated connection. I.e., for video data output, adjusted metadata object coordinates are rotated/mirrored. For still image and movie file output, they are not.
+ */
+- (nullable AVMetadataObject *)transformedMetadataObjectForMetadataObject:(AVMetadataObject *)metadataObject connection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(6_0);
+
+/*!
+ @method metadataOutputRectOfInterestForRect:
+ @abstract
+ Converts a rectangle in the receiver's coordinate space to a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput whose capture device is providing input to the receiver.
+
+ @param rectInOutputCoordinates
+ A CGRect in the receiver's coordinates.
+ @result
+ A CGRect in the coordinate space of the metadata output whose capture device is providing input to the receiver.
+
+ @discussion
+ AVCaptureMetadataOutput rectOfInterest is expressed as a CGRect where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. This convenience method converts a rectangle in the coordinate space of the receiver to a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput whose AVCaptureDevice is providing input to the receiver. The conversion takes orientation, mirroring, and scaling into consideration. See -transformedMetadataObjectForMetadataObject:connection: for a full discussion of how orientation and mirroring are applied to sample buffers passing through the output.
+ */
+- (CGRect)metadataOutputRectOfInterestForRect:(CGRect)rectInOutputCoordinates NS_AVAILABLE_IOS(7_0);
+
+/*!
+ @method rectForMetadataOutputRectOfInterest:
+ @abstract
+ Converts a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput whose capture device is providing input to the receiver to a rectangle in the receiver's coordinates.
+
+ @param rectInMetadataOutputCoordinates
+ A CGRect in the coordinate space of the metadata output whose capture device is providing input to the receiver.
+ @result
+ A CGRect in the receiver's coordinates.
+
+ @discussion
+ AVCaptureMetadataOutput rectOfInterest is expressed as a CGRect where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. This convenience method converts a rectangle in the coordinate space of an AVCaptureMetadataOutput whose AVCaptureDevice is providing input to the coordinate space of the receiver. The conversion takes orientation, mirroring, and scaling into consideration. See -transformedMetadataObjectForMetadataObject:connection: for a full discussion of how orientation and mirroring are applied to sample buffers passing through the output.
+ */
+- (CGRect)rectForMetadataOutputRectOfInterest:(CGRect)rectInMetadataOutputCoordinates NS_AVAILABLE_IOS(7_0);
+
+@end
+
+
+/*!
+ @enum AVCaptureOutputDataDroppedReason
+ @abstract
+ Constants indicating the reason a capture data output dropped data.
+
+ @constant AVCaptureOutputDataDroppedReasonNone
+ No data was dropped.
+ @constant AVCaptureOutputDataDroppedReasonLateData
+ Data was dropped because alwaysDiscardsLate{VideoFrames | DepthData} is YES and the client was still processing previous data when the current data needed to be delivered.
+ @constant AVCaptureOutputDataDroppedReasonOutOfBuffers
+ Data was dropped because its pool of buffers ran dry. This is usually indicative that the client is holding onto data objects too long.
+ @constant AVCaptureOutputDataDroppedReasonDiscontinuity
+ Data was dropped because the device providing the data experienced a discontinuity, and an unknown number of data objects have been lost. This condition is typically caused by the system being too busy.
+ */
+typedef NS_ENUM(NSInteger, AVCaptureOutputDataDroppedReason) {
+ AVCaptureOutputDataDroppedReasonNone = 0,
+ AVCaptureOutputDataDroppedReasonLateData = 1,
+ AVCaptureOutputDataDroppedReasonOutOfBuffers = 2,
+ AVCaptureOutputDataDroppedReasonDiscontinuity = 3,
+} NS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSessionPreset.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSessionPreset.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSessionPreset.h 1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureSessionPreset.h 2017-05-25 06:23:48.000000000 -0400
@@ -0,0 +1,162 @@
+/*
+ File: AVCaptureSessionPreset.h
+
+ Framework: AVFoundation
+
+ Copyright 2010-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVBase.h>
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/*!
+ @group AVCaptureSessionPreset string constants
+
+ @discussion
+ Clients may use an AVCaptureSessionPreset to set the format for output on an AVCaptureSession.
+ */
+typedef NSString * AVCaptureSessionPreset NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED NS_STRING_ENUM;
+
+/*!
+ @constant AVCaptureSessionPresetPhoto
+ @abstract
+ An AVCaptureSession preset suitable for high resolution photo quality output.
+
+ @discussion
+ Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetPhoto for full resolution photo quality output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetPhoto NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPresetHigh
+ @abstract
+ An AVCaptureSession preset suitable for high quality video and audio output.
+
+ @discussion
+ Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetHigh to achieve high quality video and audio output. AVCaptureSessionPresetHigh is the default sessionPreset value.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetHigh NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPresetMedium
+ @abstract
+ An AVCaptureSession preset suitable for medium quality output.
+
+ @discussion
+ Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetMedium to achieve output video and audio bitrates suitable for sharing over WiFi.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetMedium NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPresetLow
+ @abstract
+ An AVCaptureSession preset suitable for low quality output.
+
+ @discussion
+ Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetLow to achieve output video and audio bitrates suitable for sharing over 3G.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetLow NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPreset320x240
+ @abstract
+ An AVCaptureSession preset suitable for 320x240 video output.
+
+ @discussion
+ Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset320x240 to achieve 320x240 output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset320x240 NS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPreset352x288
+ @abstract
+ An AVCaptureSession preset suitable for 352x288 video output.
+
+ @discussion
+ Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset352x288 to achieve CIF quality (352x288) output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset352x288 NS_AVAILABLE(10_7, 5_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPreset640x480
+ @abstract
+ An AVCaptureSession preset suitable for 640x480 video output.
+
+ @discussion
+ Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset640x480 to achieve VGA quality (640x480) output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset640x480 NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPreset960x540
+ @abstract
+ An AVCaptureSession preset suitable for 960x540 video output.
+
+ @discussion
+ Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset960x540 to achieve quarter HD quality (960x540) output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset960x540 NS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPreset1280x720
+ @abstract
+ An AVCaptureSession preset suitable for 1280x720 video output.
+
+ @discussion
+ Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset1280x720 to achieve 1280x720 output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset1280x720 NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPreset1920x1080
+ @abstract
+ An AVCaptureSession preset suitable for 1920x1080 video output.
+
+ @discussion
+ Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset1920x1080 to achieve 1920x1080 output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset1920x1080 NS_AVAILABLE_IOS(5_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPreset3840x2160
+ @abstract
+ An AVCaptureSession preset suitable for 3840x2160 (UHD 4K) video output.
+
+ @discussion
+ Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPreset3840x2160 to achieve 3840x2160 output.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPreset3840x2160 NS_AVAILABLE_IOS(9_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPresetiFrame960x540
+ @abstract
+ An AVCaptureSession preset producing 960x540 Apple iFrame video and audio content.
+
+ @discussion
+ Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetiFrame960x540 to achieve 960x540 quality iFrame H.264 video at ~30 Mbits/sec with AAC audio. QuickTime movies captured in iFrame format are optimal for editing applications.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetiFrame960x540 NS_AVAILABLE(10_9, 5_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPresetiFrame1280x720
+ @abstract
+ An AVCaptureSession preset producing 1280x720 Apple iFrame video and audio content.
+
+ @discussion
+ Clients may set an AVCaptureSession instance's sessionPreset to AVCaptureSessionPresetiFrame1280x720 to achieve 1280x720 quality iFrame H.264 video at ~40 Mbits/sec with AAC audio. QuickTime movies captured in iFrame format are optimal for editing applications.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetiFrame1280x720 NS_AVAILABLE(10_9, 5_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureSessionPresetInputPriority
+ @abstract
+ An AVCaptureSession preset indicating that the formats of the session's inputs are being given priority.
+
+ @discussion
+ By calling -setSessionPreset:, clients can easily configure an AVCaptureSession to produce a desired quality of service level. The session configures its inputs and outputs optimally to produce the QoS level indicated. Clients who need to ensure a particular input format is chosen can use AVCaptureDevice's -setActiveFormat: method. When a client sets the active format on a device, the associated session's -sessionPreset property automatically changes to AVCaptureSessionPresetInputPriority. This change indicates that the input format selected by the client now dictates the quality of service level provided at the outputs. When a client sets the session preset to anything other than AVCaptureSessionPresetInputPriority, the session resumes responsibility for configuring inputs and outputs, and is free to change its inputs' activeFormat as needed.
+ */
+AVF_EXPORT AVCaptureSessionPreset const AVCaptureSessionPresetInputPriority NS_AVAILABLE_IOS(7_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVContentKeySession.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVContentKeySession.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVContentKeySession.h 2017-02-20 23:29:10.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVContentKeySession.h 2017-05-24 00:28:08.000000000 -0400
@@ -1,7 +1,7 @@
/*
File: AVContentKeySession.h
- Copyright (c) 2015-2016 Apple Inc. All rights reserved.
+ Copyright (c) 2015-2017 Apple Inc. All rights reserved.
*/
#import <AVFoundation/AVBase.h>
@@ -32,31 +32,47 @@
@group AVContentKeySystem string constants
@brief Used by AVContentKeySession to determine the method of key delivery
*/
-typedef NSString *AVContentKeySystem NS_STRING_ENUM API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+typedef NSString *AVContentKeySystem NS_STRING_ENUM API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
/*!
@constant AVContentKeySystemFairPlayStreaming
@discussion Used to specify FairPlay Streaming (FPS) as the method of key delivery.
*/
-AVF_EXPORT AVContentKeySystem const AVContentKeySystemFairPlayStreaming API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+AVF_EXPORT AVContentKeySystem const AVContentKeySystemFairPlayStreaming API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
-API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2))
+/*!
+ @constant AVContentKeySystemClearKey
+ @discussion Used to specify clear key as the method of key delivery.
+ */
+AVF_EXPORT AVContentKeySystem const AVContentKeySystemClearKey API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)) API_UNAVAILABLE(watchos);
+
+API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos)
@interface AVContentKeySession : NSObject {
@private
AVContentKeySessionInternal *_session;
}
/*!
+ @method contentKeySessionWithKeySystem:
+ @abstract Creates a new instance of AVContentKeySession to manage a collection of media content keys.
+ @param keySystem
+ A valid key system for retrieving keys.
+ @result A new AVContentKeySession.
+ @discussion This method returns an AVContentKeySession instance that is capable of managing collection of media content keys corresponding to the input keySystem. An NSInvalidArgumentException will be raised if the value of keySystem is unsupported.
+ */
++ (instancetype)contentKeySessionWithKeySystem:(AVContentKeySystem)keySystem API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)) API_UNAVAILABLE(watchos);
+
+/*!
@method contentKeySessionWithKeySystem:storageDirectoryAtURL:
@abstract Creates a new instance of AVContentKeySession to manage a collection of media content keys.
@param keySystem
A valid key system for retrieving keys.
@param storageURL
- Optional URL to a writable directory that the session will use to facilitate expired session reports after abnormal session termination. Pass nil if you do not require expired session reports.
+ URL to a writable directory that the session will use to facilitate expired session reports after abnormal session termination.
@result A new AVContentKeySession.
@discussion This method returns an AVContentKeySession instance that is capable of managing collection of media content keys corresponding to the input keySystem. An NSInvalidArgumentException will be raised if the value of keySystem is unsupported.
*/
-+ (instancetype)contentKeySessionWithKeySystem:(AVContentKeySystem)keySystem storageDirectoryAtURL:(nullable NSURL *)storageURL;
++ (instancetype)contentKeySessionWithKeySystem:(AVContentKeySystem)keySystem storageDirectoryAtURL:(NSURL *)storageURL;
/*!
@method setDelegate:queue:
@@ -129,6 +145,17 @@
*/
- (void)renewExpiringResponseDataForContentKeyRequest:(AVContentKeyRequest *)contentKeyRequest;
+/*!
+ @method makeSecureTokenForExpirationDateOfPersistableContentKey:
+ @abstract Creates a secure server playback context (SPC) that the client could send to the key server to obtain an expiration date for the provided persistable content key data.
+ @param persistableContentKeyData
+ Persistable content key data that was previously created using -[AVContentKeyRequest persistableContentKeyFromKeyVendorResponse:options:error:] or obtained via AVContentKeySessionDelegate callback -contentKeySession:didUpdatePersistableContentKey:forContentKeyIdentifier:.
+ @param handler
+ Once the secure token is ready, this block will be called with the token or an error describing the failure.
+ */
+- (void)makeSecureTokenForExpirationDateOfPersistableContentKey:(NSData *)persistableContentKeyData
+ completionHandler:(void (^)(NSData * _Nullable secureTokenData, NSError * _Nullable error))handler API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macosx, tvos, watchos);
+
@end
@interface AVContentKeySession (AVContentKeyRecipients)
@@ -188,27 +215,27 @@
@group AVContentKeyRequestRetryReason string constants
@brief Used to specify a reason for asking the client to retry a content key request.
*/
-typedef NSString *AVContentKeyRequestRetryReason NS_STRING_ENUM API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+typedef NSString *AVContentKeyRequestRetryReason NS_STRING_ENUM API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
/*!
@constant AVContentKeyRequestRetryReasonTimedOut
@discussion Indicates that the content key request should be retried because the key response was not set soon enough either due the initial request/response was taking too long, or a lease was expiring in the meantime.
*/
-AVF_EXPORT AVContentKeyRequestRetryReason const AVContentKeyRequestRetryReasonTimedOut API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+AVF_EXPORT AVContentKeyRequestRetryReason const AVContentKeyRequestRetryReasonTimedOut API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
/*!
@constant AVContentKeyRequestRetryReasonReceivedResponseWithExpiredLease
@discussion Indicates that the content key request should be retried because a key response with expired lease was set on the previous content key request.
*/
-AVF_EXPORT AVContentKeyRequestRetryReason const AVContentKeyRequestRetryReasonReceivedResponseWithExpiredLease API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+AVF_EXPORT AVContentKeyRequestRetryReason const AVContentKeyRequestRetryReasonReceivedResponseWithExpiredLease API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
/*!
@constant AVContentKeyRequestRetryReasonReceivedObsoleteContentKey
@discussion Indicates that the content key request should be retried because an obsolete key response was set on the previous content key request.
*/
-AVF_EXPORT AVContentKeyRequestRetryReason const AVContentKeyRequestRetryReasonReceivedObsoleteContentKey API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+AVF_EXPORT AVContentKeyRequestRetryReason const AVContentKeyRequestRetryReasonReceivedObsoleteContentKey API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
-API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2))
+API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos)
@protocol AVContentKeySessionDelegate <NSObject>
/*!
@@ -248,6 +275,20 @@
- (void)contentKeySession:(AVContentKeySession *)session didProvidePersistableContentKeyRequest:(AVPersistableContentKeyRequest *)keyRequest;
/*!
+ @method contentKeySession:didUpdatePersistableContentKey:forContentKeyIdentifier:
+ @abstract Provides the receiver with an updated persistable content key for a particular key request.
+ @param session
+ An instance of AVContentKeySession that is providing the updated persistable content key.
+ @param persistableContentKey
+ Updated persistable content key data that may be stored offline and used to answer future requests to content keys with matching key identifier.
+ @param keyIdentifier
+ Container- and protocol-specific identifier for the persistable content key that was updated.
+ @discussion If the content key session provides an updated persistable content key data, the previous key data is no longer valid and cannot be used to answer future loading requests.
+ */
+@optional
+- (void)contentKeySession:(AVContentKeySession *)session didUpdatePersistableContentKey:(NSData *)persistableContentKey forContentKeyIdentifier:(id)keyIdentifier API_AVAILABLE(ios(11.0)) API_UNAVAILABLE(macos, tvos, watchos);
+
+/*!
@method contentKeySession:contentKeyRequest:didFailWithError:
@abstract Informs the receiver a content key request has failed.
@param session
@@ -306,11 +347,11 @@
AVContentKeyRequestStatusRetried,
AVContentKeyRequestStatusCancelled,
AVContentKeyRequestStatusFailed
-} API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+} API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
@class AVContentKeyRequestInternal;
-API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2))
+API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos)
@interface AVContentKeyRequest : NSObject
{
@private
@@ -356,7 +397,7 @@
@param appIdentifier
An opaque identifier for the application. The value of this identifier depends on the particular system used to provide the content key.
@param contentIdentifier
- An opaque identifier for the content. The value of this identifier depends on the particular system used to provide the content key. May be nil.
+ An opaque identifier for the content. The value of this identifier depends on the particular system used to provide the content key.
@param options
Additional information necessary to obtain the key, or nil if none. See AVContentKeyRequest*Key below.
@param handler
@@ -364,7 +405,7 @@
@discussion If option AVContentKeyRequestProtocolVersionsKey is not specified the default protocol version of 1 is assumed.
*/
- (void)makeStreamingContentKeyRequestDataForApp:(NSData *)appIdentifier
- contentIdentifier:(nullable NSData *)contentIdentifier
+ contentIdentifier:(NSData *)contentIdentifier
options:(nullable NSDictionary<NSString *, id> *)options
completionHandler:(void (^)(NSData * _Nullable contentKeyRequestData, NSError * _Nullable error))handler;
@@ -388,13 +429,13 @@
/*!
@method respondByRequestingPersistableContentKeyRequest
@abstract Informs the receiver to process a persistable content key request.
- @discussion When you receive an AVContentKeyRequest via -contentKeySession:didProvideContentKeyRequest: and you want the resulting key response to produce a key that can persist across multiple playback sessions, you must invoke -respondByRequestingPersistableContentKeyRequest on that AVContentKeyRequest in order to signal that you want to process an AVPersistableContentKeyRequest instead. If the underlying protocol supports persistable content keys, in response your delegate will receive an AVPersistableContentKeyRequest via -contentKeySession:didProvidePersistableContentKeyRequest:.
+ @discussion When you receive an AVContentKeyRequest via -contentKeySession:didProvideContentKeyRequest: and you want the resulting key response to produce a key that can persist across multiple playback sessions, you must invoke -respondByRequestingPersistableContentKeyRequest on that AVContentKeyRequest in order to signal that you want to process an AVPersistableContentKeyRequest instead. If the underlying protocol supports persistable content keys, in response your delegate will receive an AVPersistableContentKeyRequest via -contentKeySession:didProvidePersistableContentKeyRequest:. NSInternalInconsistencyException will be raised, if you are attempting to create and use a persistable key but your AVContentKeySession delegate does not respond to contentKeySession:didProvidePersistableContentKeyRequest:.
*/
-- (void)respondByRequestingPersistableContentKeyRequest;
+- (void)respondByRequestingPersistableContentKeyRequest API_AVAILABLE(ios(10.3)) API_UNAVAILABLE(macos, tvos, watchos);
@end
-API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2))
+API_AVAILABLE(ios(10.3)) API_UNAVAILABLE(macos, tvos, watchos)
@interface AVPersistableContentKeyRequest : AVContentKeyRequest
/*!
@@ -409,9 +450,9 @@
@result The persistable content key data that may be stored offline to answer future loading requests of the same content key.
@discussion The data returned from this method may be used to immediately satisfy an AVPersistableContentKeyRequest, as well as any subsequent requests for the same key url using processContentKeyResponse: method. When you receive an AVContentKeyRequest via -contentKeySession:didProvideContentKeyRequest: and you want to use existing persistent content key from storage, you must invoke -respondByRequestingPersistableContentKeyRequest on that AVContentKeyRequest in order to signal that you want to process an AVPersistableContentKeyRequest instead. If the underlying protocol supports persistable content keys, in response your delegate will receive an AVPersistableContentKeyRequest via -contentKeySession:didProvidePersistableContentKeyRequest:. You can set the persistent key from storage on the AVPersistableContentKeyRequest using processContentKeyResponse:.
*/
-- (NSData *)persistableContentKeyFromKeyVendorResponse:(NSData *)keyVendorResponse
- options:(nullable NSDictionary <NSString *, id> *)options
- error:(NSError **)outError;
+- (nullable NSData *)persistableContentKeyFromKeyVendorResponse:(NSData *)keyVendorResponse
+ options:(nullable NSDictionary <NSString *, id> *)options
+ error:(NSError * _Nullable * _Nullable)outError;
@end
@@ -431,11 +472,11 @@
@class AVContentKeyResponse
@abstract AVContentKeyResponse is used to represent the data returned from the key server when requesting a key for decrypting content.
*/
-API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2))
+API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos)
@interface AVContentKeyResponse : NSObject
{
@private
- AVContentKeyResponseInternal *_keyResponse;
+ AVContentKeyResponseInternal * _keyResponse;
}
/*!
@@ -448,14 +489,27 @@
*/
+ (instancetype)contentKeyResponseWithFairPlayStreamingKeyResponseData:(NSData *)keyResponseData;
+/*!
+ @method contentKeyResponseWithClearKeyData:initializationVector:
+ @abstract Create an AVContentKeyResponse from the key and IV when using AVContentKeySystemClearKey as the key system
+
+ @param keyData
+ The key used for decrypting content.
+ @param initializationVector
+ The initialization vector used for decrypting content, or nil if initialization vector is available in the media to be decrypted
+ @result A new AVContentKeyResponse holding Clear Key data.
+ @discussion The object created by this method is typically used with an AVContentKeyRequest created by an AVContentKeySession using keySystem AVContentKeySystemClearKey. It is passed to AVContentKeyRequest -processContentKeyResponse: in order to supply the decryptor with key data.
+*/
++ (instancetype)contentKeyResponseWithClearKeyData:(NSData *)keyData initializationVector:(nullable NSData *)initializationVector API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)) API_UNAVAILABLE(watchos);
+
@end
-// Options keys for use with -[AVContentKeySession makeStreamingContentKeyRequestDataForApp:contentIdentifier:options:completionHandler:]
+// Options keys for use with -[AVContentKeyRequest makeStreamingContentKeyRequestDataForApp:contentIdentifier:options:completionHandler:]
/*!
@constant AVContentKeyRequestProtocolVersionsKey
@abstract Specifies the versions of the content protection protocol supported by the application as an NSArray of one or more NSNumber objects.
*/
-AVF_EXPORT NSString *const AVContentKeyRequestProtocolVersionsKey API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+AVF_EXPORT NSString *const AVContentKeyRequestProtocolVersionsKey API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2)) API_UNAVAILABLE(watchos);
/*!
@protocol AVContentKeyRecipient
@@ -463,7 +517,7 @@
@abstract
Classes of objects that may require decryption keys for media data in order to enable processing, such as parsing or playback, conform to this protocol.
*/
-API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2))
+API_AVAILABLE(macos(10.12.4), ios(10.3), tvos(10.2), watchos(3.3))
@protocol AVContentKeyRecipient
@required
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVDepthData.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVDepthData.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVDepthData.h 1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVDepthData.h 2017-05-24 00:37:44.000000000 -0400
@@ -0,0 +1,206 @@
+/*
+ File: AVDepthData.h
+
+ Framework: AVFoundation
+
+ Copyright 2016-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVCameraCalibrationData.h>
+#import <Foundation/Foundation.h>
+#import <CoreVideo/CVPixelBufferPool.h>
+#import <ImageIO/CGImageProperties.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/*
+ @enum AVDepthDataAccuracy
+ @abstract
+ Constants indicating the accuracy of the units expressed by depth data map values.
+
+ @constant AVDepthDataAccuracyRelative
+ Values within the depth data map are usable for foreground / background separation, but are not absolutely accurate in the physical world.
+ @constant AVDepthDataAccuracyAbsolute
+ Values within the depth map are absolutely accurate within the physical world.
+
+ @discussion
+ The accuracy of a depth data map is highly dependent on the camera calibration data used to generate it. If the camera's focal length cannot be precisely determined at the time of capture, scaling error in the z (depth) plane will be introduced. If the camera's optical center can't be precisely determined at capture time, principal point error will be introduced, leading to an offset error in the disparity estimate. AVDepthDataAccuracy constants report the accuracy of a map's values with respect to its reported units. If the accuracy is reported to be AVDepthDataAccuracyRelative, the values within the map are usable relative to one another (that is, larger depth values are farther away than smaller depth values), but do not accurately convey real world distance. Disparity maps with relative accuracy may still be used to reliably determine the difference in disparity between two points in the same map.
+*/
+typedef NS_ENUM(NSInteger, AVDepthDataAccuracy) {
+ AVDepthDataAccuracyRelative = 0,
+ AVDepthDataAccuracyAbsolute = 1,
+} NS_AVAILABLE(10_13, 11_0) __TVOS_AVAILABLE(11_0) __WATCHOS_PROHIBITED;
+
+@class AVDepthDataInternal;
+
+/*!
+ @class AVDepthData
+ @abstract
+ An object wrapping a map of disparity or depth pixel data, plus metadata.
+
+ @discussion
+ "Depth Data" is a generic term for a map of pixel data containing depth-related information. AVDepthData wraps a disparity or depth map and provides conversion methods, focus information, and camera calibration data to aid in using the map for rendering or computer vision tasks. CoreVideo supports the following four depth data pixel formats:
+ kCVPixelFormatType_DisparityFloat16 = 'hdis'
+ kCVPixelFormatType_DisparityFloat32 = 'fdis'
+ kCVPixelFormatType_DepthFloat16 = 'hdep'
+ kCVPixelFormatType_DepthFloat32 = 'fdep'
+
+ The disparity formats describe normalized shift values when comparing two images. Units are 1/meters: ( pixelShift / (pixelFocalLength * baselineInMeters) ).
+ The depth formats describe the distance to an object in meters.
+
+ Disparity / depth maps are generated from camera images containing non-rectilinear data. Camera lenses have small imperfections that cause small distortions in their resultant images compared to a pinhole camera. AVDepthData maps contain non-rectilinear (non-distortion-corrected) data as well. Their values are warped to match the lens distortion characteristics present in their accompanying YUV image. Therefore an AVDepthData map can be used as a proxy for depth when rendering effects to its accompanying image, but not to correlate points in 3D space. In order to use AVDepthData for computer vision tasks, you should use its accompanying camera calibration data to rectify the depth data (see AVCameraCalibrationData).
+
+ When capturing depth data from a camera using AVCaptureDepthDataOutput, AVDepthData objects are delivered to your AVCaptureDepthDataOutputDelegate in a streaming fashion. When capturing depth data along with photos using AVCapturePhotoOutput, depth data is delivered to your AVCapturePhotoCaptureDelegate as a property of an AVCapturePhoto (see -[AVCapturePhotoCaptureDelegate captureOutput:didFinishProcessingPhoto:error:]). When working with image files containing depth information, AVDepthData may be instantiated using information obtained from ImageIO. When editing images containing depth information, derivative AVDepthData objects may be instantiated reflecting the edits that have been performed.
+ */
+NS_CLASS_AVAILABLE(10_13, 11_0) __TVOS_AVAILABLE(11_0) __WATCHOS_PROHIBITED
+@interface AVDepthData : NSObject
+{
+@private
+ AVDepthDataInternal *_internal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @method depthDataFromDictionaryRepresentation:error:
+ @abstract
+ Returns an AVDepthData instance from depth information in an image file.
+
+ @param imageSourceAuxDataInfoDictionary
+ A dictionary of primitive depth-related information obtained from CGImageSourceCopyAuxiliaryDataInfoAtIndex.
+ @param outError
+ On return, if the depth data cannot be created, points to an NSError describing the problem.
+ @result
+ An AVDepthData instance, or nil if the auxiliary data info dictionary was malformed.
+
+ @discussion
+ When using ImageIO framework's CGImageSource API to read from a HEIF, JPEG, or DNG file containing depth data, AVDepthData can be instantiated using the result of CGImageSourceCopyAuxiliaryDataInfoAtIndex, which returns a CFDictionary of primitive map information.
+ */
++ (nullable instancetype)depthDataFromDictionaryRepresentation:(NSDictionary *)imageSourceAuxDataInfoDictionary error:(NSError * _Nullable * _Nullable)outError;
+
+/*!
+ @method depthDataByConvertingToDepthDataType:
+ @abstract
+ Returns a converted, derivative AVDepthData instance in the specified depthDataType.
+
+ @param depthDataType
+ The OSType of depthData object to which you'd like to convert. Must be present in availableDepthDataTypes.
+ @result
+ An AVDepthData instance.
+
+ @discussion
+ This method throws an NSInvalidArgumentException if you pass an unrecognized depthDataType. See
+ */
+- (instancetype)depthDataByConvertingToDepthDataType:(OSType)depthDataType;
+
+/*!
+ @method depthDataByApplyingExifOrientation:
+ @abstract
+ Returns a derivative AVDepthData instance in which the specified Exif orientation has been applied.
+
+ @param exifOrientation
+ One of the 8 standard Exif orientation tags expressing how the depth data should be rotated / mirrored.
+ @result
+ An AVDepthData instance.
+
+ @discussion
+ When applying simple 90 degree rotation or mirroring edits to media containing depth data, you may use this initializer to create a derivative copy of the depth in which the specified orientation is applied to both the underlying pixel map data and the camera calibration data. This method throws an NSInvalidArgumentException if you pass an unrecognized exifOrientation.
+ */
+- (instancetype)depthDataByApplyingExifOrientation:(CGImagePropertyOrientation)exifOrientation;
+
+/*!
+ @method depthDataByReplacingDepthDataMapWithPixelBuffer:error:
+ @abstract
+ Returns an AVDepthData instance wrapping the replacement depth data map pixel buffer.
+
+ @param pixelBuffer
+ A pixel buffer containing depth data information in one of the 4 supported disparity / depth pixel formats.
+ @param outError
+ On return, if the depth data cannot be created, points to an NSError describing the problem.
+ @result
+ An AVDepthData instance, or nil if the pixel buffer is malformed.
+
+ @discussion
+ When applying complex edits to media containing depth data, you may create a derivative map with arbitrary transforms applied to it, then use this initializer to create a new AVDepthData. Note that this new depth data object has no camera calibration data, so its cameraCalibrationData property always returns nil.
+ */
+- (nullable instancetype)depthDataByReplacingDepthDataMapWithPixelBuffer:(CVPixelBufferRef)pixelBuffer error:(NSError * _Nullable * _Nullable)outError;
+
+/*!
+ @property availableDepthDataTypes
+ @abstract
+ Specifies which depth data pixel formats may be used with depthDataByConvertingToDepthDataType:.
+
+ @discussion
+ This property presents the available pixel format types as an array of NSNumbers, each wrapping an OSType (CV pixel format type).
+ */
+@property(readonly) NSArray<NSNumber *> *availableDepthDataTypes;
+
+/*!
+ @method dictionaryRepresentationForAuxiliaryDataType:
+ @abstract
+ Returns a dictionary of primitive map information to be used when writing an image file with depth data.
+
+ @param outAuxDataType
+ On output, either kCGImageAuxiliaryDataTypeDisparity or kCGImageAuxiliaryDataTypeDepth, depending on the depth data's file.
+ @result
+ A dictionary of CGImageDestination compatible depth information, or nil if the auxDataType is unsupported.
+
+ @discussion
+ When using ImageIO framework's CGImageDestination API to write depth data to a HEIF, JPEG, or DNG file, you may use this method to generate a dictionary of primitive map information consumed by CGImageDestinationAddAuxiliaryDataInfo.
+ */
+- (nullable NSDictionary *)dictionaryRepresentationForAuxiliaryDataType:(NSString * _Nullable * _Nullable)outAuxDataType;
+
+/*!
+ @property depthDataType
+ @abstract
+ Specifies the pixel format type of this depth data object's internal map.
+
+ @discussion
+ One of kCVPixelFormatType_DisparityFloat16, kCVPixelFormatType_DisparityFloat32, kCVPixelFormatType_DepthFloat16, or kCVPixelFormatType_DepthFloat32.
+ */
+@property(readonly) OSType depthDataType;
+
+/*!
+ @property depthDataMap
+ @abstract
+ Provides access to the depth data object's internal map.
+
+ @discussion
+ The depth data map's pixel format can be queried using the depthDataType property.
+ */
+@property(readonly) __attribute__((NSObject)) CVPixelBufferRef depthDataMap NS_RETURNS_INNER_POINTER;
+
+/*!
+ @property depthDataFiltered
+ @abstract
+ Specifies whether the depth data pixel buffer map contains filtered (hole-filled) data.
+
+ @discussion
+ By setting either AVCaptureDepthDataOutput's filteringEnabled property or AVCapturePhotoSettings' depthDataFiltered property to YES, the resulting depth data are filtered to remove invalid pixel values that may be present due to a variety of factors including low light and lens occlusion. If you've requested depth data filtering, all depth data holes are filled. Note that filtering the depth data makes it more usable for applying effects, but alters the data such that it may no longer be suitable for computer vision tasks. Unfiltered depth maps present missing data as NaN.
+ */
+@property(readonly, getter=isDepthDataFiltered) BOOL depthDataFiltered;
+
+/*!
+ @property depthDataAccuracy
+ @abstract
+ Specifies the accuracy of the units in the depth data map's values.
+
+ @discussion
+ See AVDepthDataAccuracy documentation for more information.
+ */
+@property(readonly) AVDepthDataAccuracy depthDataAccuracy;
+
+/*!
+ @property cameraCalibrationData
+ @abstract
+ The calibration data of the camera with which AVDepthData map's values are aligned.
+
+ @discussion
+ See AVCameraCalibrationData for more information.
+ */
+@property(nullable, readonly) AVCameraCalibrationData *cameraCalibrationData;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes
--- /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes 1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes 2017-05-23 21:01:42.000000000 -0400
@@ -0,0 +1,2140 @@
+---
+Name: AVFoundation
+Classes:
+- Name: AVAssetDownloadURLSession
+ Methods:
+ - Selector: 'assetDownloadTaskWithURLAsset:destinationURL:options:'
+ SwiftName: makeAssetDownloadTask(asset:destinationURL:options:)
+ MethodKind: Instance
+ - Selector: 'assetDownloadTaskWithURLAsset:assetTitle:assetArtworkData:options:'
+ SwiftName: makeAssetDownloadTask(asset:assetTitle:assetArtworkData:options:)
+ MethodKind: Instance
+- Name: AVAssetWriterInput
+ Methods:
+ - Selector: 'appendSampleBuffer:'
+ SwiftName: append(_:)
+ MethodKind: Instance
+- Name: AVAssetWriterInputPixelBufferAdaptor
+ Methods:
+ - Selector: 'appendPixelBuffer:withPresentationTime:'
+ SwiftName: append(_:withPresentationTime:)
+ MethodKind: Instance
+- Name: AVAssetWriterInputMetadataAdaptor
+ Methods:
+ - Selector: 'appendTimedMetadataGroup:'
+ SwiftName: append(_:)
+ MethodKind: Instance
+- Name: AVCaptureAudioDataOutput
+ Methods:
+ - Selector: 'new'
+ MethodKind: Class
+ Availability: nonswift
+ AvailabilityMsg: 'use object initializers instead'
+ - Selector: 'recommendedAudioSettingsForAssetWriterWithOutputFileType:'
+ SwiftName: 'recommendedAudioSettingsForAssetWriter(writingTo:)'
+ MethodKind: Instance
+- Name: AVCaptureAudioFileOutput
+ Methods:
+ - Selector: 'new'
+ MethodKind: Class
+ Availability: nonswift
+ AvailabilityMsg: 'use object initializers instead'
+ - Selector: 'startRecordingToOutputFileURL:outputFileType:recordingDelegate:'
+ SwiftName: 'startRecording(to:outputFileType:recordingDelegate:)'
+ MethodKind: Instance
+- Name: AVCaptureAudioPreviewOutput
+ Methods:
+ - Selector: 'new'
+ MethodKind: Class
+ Availability: nonswift
+ AvailabilityMsg: 'use object initializers instead'
+- Name: AVCaptureAutoExposureBracketedStillImageSettings
+ Methods:
+ - Selector: 'autoExposureSettingsWithExposureTargetBias:'
+ SwiftName: 'autoExposureSettings(exposureTargetBias:)'
+ MethodKind: Class
+- Name: AVCaptureDepthDataOutput
+ Methods:
+ - Selector: 'new'
+ MethodKind: Class
+ Availability: nonswift
+ AvailabilityMsg: 'use object initializers instead'
+- Name: AVCaptureDevice
+ Methods:
+ - Selector: 'authorizationStatusForMediaType:'
+ SwiftName: 'authorizationStatus(for:)'
+ MethodKind: Class
+ - Selector: 'chromaticityValuesForDeviceWhiteBalanceGains:'
+ SwiftName: 'chromaticityValues(for:)'
+ MethodKind: Instance
+ - Selector: 'defaultDeviceWithDeviceType:mediaType:position:'
+ SwiftName: 'default(_:for:position:)'
+ MethodKind: Class
+ - Selector: 'defaultDeviceWithMediaType:'
+ SwiftName: 'default(for:)'
+ MethodKind: Class
+ - Selector: 'devicesWithMediaType:'
+ SwiftName: 'devices(for:)'
+ MethodKind: Class
+ - Selector: 'requestAccessForMediaType:completionHandler:'
+ SwiftName: 'requestAccess(for:completionHandler:)'
+ MethodKind: Class
+ - Selector: 'setExposureModeCustomWithDuration:ISO:completionHandler:'
+ SwiftName: 'setExposureModeCustom(duration:iso:completionHandler:)'
+ MethodKind: Instance
+ - Selector: 'setFocusModeLockedWithLensPosition:completionHandler:'
+ SwiftName: 'setFocusModeLocked(lensPosition:completionHandler:)'
+ MethodKind: Instance
+ - Selector: 'setTorchModeOnWithLevel:error:'
+ SwiftName: 'setTorchModeOn(level:)'
+ MethodKind: Instance
+ - Selector: 'setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:'
+ SwiftName: 'setWhiteBalanceModeLocked(with:completionHandler:)'
+ MethodKind: Instance
+ - Selector: 'supportsAVCaptureSessionPreset:'
+ SwiftName: 'supportsSessionPreset(_:)'
+ MethodKind: Instance
+ - Selector: 'temperatureAndTintValuesForDeviceWhiteBalanceGains:'
+ SwiftName: 'temperatureAndTintValues(for:)'
+ MethodKind: Instance
+- Name: AVCaptureDeviceDiscoverySession
+ SwiftName: AVCaptureDevice.DiscoverySession
+- Name: AVCaptureDeviceFormat
+ SwiftName: AVCaptureDevice.Format
+- Name: AVCaptureDeviceInputSource
+ SwiftName: AVCaptureDevice.InputSource
+- Name: AVCaptureFileOutput
+ Methods:
+ - Selector: 'startRecordingToOutputFileURL:recordingDelegate:'
+ SwiftName: 'startRecording(to:recordingDelegate:)'
+ MethodKind: Instance
+- Name: AVCaptureInputPort
+ SwiftName: AVCaptureInput.Port
+- Name: AVCaptureManualExposureBracketedStillImageSettings
+ Methods:
+ - Selector: 'manualExposureSettingsWithExposureDuration:ISO:'
+ SwiftName: 'manualExposureSettings(exposureDuration:iso:)'
+ MethodKind: Class
+- Name: AVCaptureMetadataOutput
+ Methods:
+ - Selector: 'new'
+ MethodKind: Class
+ Availability: nonswift
+ AvailabilityMsg: 'use object initializers instead'
+- Name: AVCaptureMovieFileOutput
+ Methods:
+ - Selector: 'new'
+ MethodKind: Class
+ Availability: nonswift
+ AvailabilityMsg: 'use object initializers instead'
+ - Selector: 'setRecordsVideoOrientationAndMirroringChanges:asMetadataTrackForConnection:'
+ SwiftName: 'setRecordsVideoOrientationAndMirroringChangesAsMetadataTrack(_:for:)'
+ MethodKind: Instance
+- Name: AVCaptureOutput
+ Methods:
+ - Selector: 'connectionWithMediaType:'
+ SwiftName: 'connection(with:)'
+ MethodKind: Instance
+ - Selector: 'metadataOutputRectOfInterestForRect:'
+ SwiftName: 'metadataOutputRectConverted(fromOutputRect:)'
+ MethodKind: Instance
+ - Selector: 'rectForMetadataOutputRectOfInterest:'
+ SwiftName: 'outputRectConverted(fromMetadataOutputRect:)'
+ MethodKind: Instance
+- Name: AVCapturePhotoOutput
+ Methods:
+ - Selector: 'new'
+ MethodKind: Class
+ Availability: nonswift
+ AvailabilityMsg: 'use object initializers instead'
+- Name: AVCaptureStillImageOutput
+ Methods:
+ - Selector: 'new'
+ MethodKind: Class
+ Availability: nonswift
+ AvailabilityMsg: 'use object initializers instead'
+- Name: AVCaptureVideoDataOutput
+ Methods:
+ - Selector: 'availableVideoCodecTypesForAssetWriterWithOutputFileType:'
+ SwiftName: 'availableVideoCodecTypesForAssetWriter(writingTo:)'
+ MethodKind: Instance
+ - Selector: 'new'
+ MethodKind: Class
+ Availability: nonswift
+ AvailabilityMsg: 'use object initializers instead'
+ - Selector: 'recommendedVideoSettingsForAssetWriterWithOutputFileType:'
+ SwiftName: 'recommendedVideoSettingsForAssetWriter(writingTo:)'
+ MethodKind: Instance
+- Name: AVCaptureVideoPreviewLayer
+ Methods:
+ - Selector: 'captureDevicePointOfInterestForPoint:'
+ SwiftName: 'captureDevicePointConverted(fromLayerPoint:)'
+ MethodKind: Instance
+ - Selector: 'metadataOutputRectOfInterestForRect:'
+ SwiftName: 'metadataOutputRectConverted(fromLayerRect:)'
+ MethodKind: Instance
+ - Selector: 'pointForCaptureDevicePointOfInterest:'
+ SwiftName: 'layerPointConverted(fromCaptureDevicePoint:)'
+ MethodKind: Instance
+ - Selector: 'rectForMetadataOutputRectOfInterest:'
+ SwiftName: 'layerRectConverted(fromMetadataOutputRect:)'
+ MethodKind: Instance
+- Name: AVFrameRateRange
+ SwiftName: AVCaptureDeviceFormat.FrameRateRange
+- Name: AVMutableComposition
+ Methods:
+ - Selector: 'insertTimeRange:ofAsset:atTime:error:'
+ SwiftName: insertTimeRange(_:of:at:)
+ MethodKind: Instance
+ - Selector: 'insertEmptyTimeRange:'
+ SwiftName: insertEmptyTimeRange(_:)
+ MethodKind: Instance
+ - Selector: 'removeTimeRange:'
+ SwiftName: removeTimeRange(_:)
+ MethodKind: Instance
+ - Selector: 'scaleTimeRange:toDuration:'
+ SwiftName: scaleTimeRange(_:toDuration:)
+ MethodKind: Instance
+ - Selector: 'compositionWithURLAssetInitializationOptions:'
+ SwiftName: 'init(urlAssetInitializationOptions:)'
+ MethodKind: Class
+- Name: AVCompositionTrackSegment
+ Methods:
+ - Selector: 'compositionTrackSegmentWithURL:trackID:sourceTimeRange:targetTimeRange:'
+ SwiftName: init(url:trackID:sourceTimeRange:targetTimeRange:)
+ MethodKind: Class
+- Name: AVMutableMovie
+ Methods:
+ - Selector: 'insertTimeRange:ofAsset:atTime:copySampleData:error:'
+ SwiftName: 'insertTimeRange(_:of:at:copySampleData:)'
+ MethodKind: Instance
+ - Selector: 'insertEmptyTimeRange:'
+ SwiftName: 'insertEmptyTimeRange(_:)'
+ MethodKind: Instance
+ - Selector: 'removeTimeRange:'
+ SwiftName: 'removeTimeRange(_:)'
+ MethodKind: Instance
+- Name: AVPlayer
+ Properties:
+ - Name: 'outputObscuredDueToInsufficientExternalProtection'
+ SwiftName: isOutputObscuredDueToInsufficientExternalProtection
+- Name: AVPlayerItem
+ Methods:
+ - Selector: 'selectMediaOption:inMediaSelectionGroup:'
+ SwiftName: select(_:in:)
+ MethodKind: Instance
+ - Selector: 'addOutput:'
+ SwiftName: add(_:)
+ MethodKind: Instance
+ - Selector: 'removeOutput:'
+ SwiftName: remove(_:)
+ MethodKind: Instance
+ - Selector: 'addMediaDataCollector:'
+ SwiftName: add(_:)
+ MethodKind: Instance
+ - Selector: 'removeMediaDataCollector:'
+ SwiftName: remove(_:)
+ MethodKind: Instance
+- Name: AVSampleCursor
+ Methods:
+ - Selector: 'stepByDecodeTime:wasPinned:'
+ SwiftName: step(byDecodeTime:wasPinned:)
+ MethodKind: Instance
+ - Selector: 'samplesWithEarlierDecodeTimeStampsMayHaveLaterPresentationTimeStampsThanCursor:'
+ SwiftName: maySamplesWithEarlierDecodeTimeStampsHavePresentationTimeStamps(laterThan:)
+ MethodKind: Instance
+ - Selector: 'samplesWithLaterDecodeTimeStampsMayHaveEarlierPresentationTimeStampsThanCursor:'
+ SwiftName: maySamplesWithLaterDecodeTimeStampsHavePresentationTimeStamps(earlierThan:)
+ MethodKind: Instance
+- Name: AVVideoComposition
+ Methods:
+ - Selector: 'videoCompositionWithPropertiesOfAsset:'
+ SwiftName: init(propertiesOf:)
+ MethodKind: Instance
+ - Selector: 'videoCompositionWithAsset:applyingCIFiltersWithHandler:'
+ SwiftName: init(asset:filterApplier:)
+ MethodKind: Instance
+- Name: AVMutableVideoCompositionLayerInstruction
+ Methods:
+ - Selector: 'setTransformRampFromStartTransform:toEndTransform:timeRange:'
+ SwiftName: setTransformRamp(fromStart:toEnd:timeRange:)
+ MethodKind: Instance
+ - Selector: 'setOpacityRampFromStartOpacity:toEndOpacity:timeRange:'
+ SwiftName: setOpacityRamp(fromStartOpacity:toEndOpacity:timeRange:)
+ MethodKind: Instance
+ - Selector: 'setCropRectangleRampFromStartCropRectangle:toEndCropRectangle:timeRange:'
+ SwiftName: setCropRectangleRamp(fromStartCropRectangle:toEndCropRectangle:timeRange:)
+ MethodKind: Instance
+- Name: AVAssetReader
+ Methods:
+ - Selector: 'canAddOutput:'
+ SwiftName: canAdd(_:)
+ MethodKind: Instance
+ - Selector: 'addOutput:'
+ SwiftName: add(_:)
+ MethodKind: Instance
+- Name: AVAssetResourceLoadingRequest
+ Methods:
+ - Selector: 'finishLoadingWithError:'
+ SwiftName: finishLoading(with:)
+ MethodKind: Instance
+- Name: AVAssetTrack
+ Methods:
+ - Selector: 'makeSampleCursorWithPresentationTimeStamp:'
+ SwiftName: makeSampleCursor(presentationTimeStamp:)
+ MethodKind: Instance
+- Name: AVAssetWriter
+ Methods:
+ - Selector: 'initWithURL:fileType:error:'
+ SwiftName: init(outputURL:fileType:)
+ MethodKind: Instance
+ - Selector: 'canApplyOutputSettings:forMediaType:'
+ SwiftName: canApply(outputSettings:forMediaType:)
+ MethodKind: Instance
+ - Selector: 'canAddInput:'
+ SwiftName: canAdd(_:)
+ MethodKind: Instance
+ - Selector: 'addInput:'
+ SwiftName: add(_:)
+ MethodKind: Instance
+ - Selector: 'canAddInputGroup:'
+ SwiftName: canAdd(_:)
+ MethodKind: Instance
+ - Selector: 'addInputGroup:'
+ SwiftName: add(_:)
+ MethodKind: Instance
+- Name: AVMutableAudioMixInputParameters
+ Methods:
+ - Selector: 'setVolumeRampFromStartVolume:toEndVolume:timeRange:'
+ SwiftName: setVolumeRamp(fromStartVolume:toEndVolume:timeRange:)
+ MethodKind: Instance
+- Name: AVMutableMediaSelection
+ Methods:
+ - Selector: 'selectMediaOption:inMediaSelectionGroup:'
+ SwiftName: select(_:in:)
+ MethodKind: Instance
+- Name: AVMovie
+ Methods:
+ - Selector: 'movieHeaderWithFileType:error:'
+ SwiftName: makeMovieHeader(fileType:)
+ MethodKind: Instance
+ - Selector: 'movieWithURL:options:'
+ SwiftName: init(url:options:)
+ MethodKind: Class
+- Name: AVMutableMovieTrack
+ Methods:
+ - Selector: 'appendSampleBuffer:decodeTime:presentationTime:error:'
+ SwiftName: append(_:decodeTime:presentationTime:)
+ MethodKind: Instance
+- Name: AVQueuePlayer
+ Methods:
+ - Selector: 'canInsertItem:afterItem:'
+ SwiftName: canInsert(_:after:)
+ MethodKind: Instance
+ - Selector: 'insertItem:afterItem:'
+ SwiftName: insert(_:after:)
+ MethodKind: Instance
+ - Selector: 'removeItem:'
+ SwiftName: remove(_:)
+ MethodKind: Instance
+- Name: NSValue
+ Methods:
+ - Selector: 'valueWithCMTime:'
+ SwiftName: init(time:)
+ MethodKind: Class
+ - Selector: 'valueWithCMTimeRange:'
+ SwiftName: init(timeRange:)
+ MethodKind: Class
+ - Selector: 'valueWithCMTimeMapping:'
+ SwiftName: init(timeMapping:)
+ MethodKind: Class
+ Properties:
+ - Name: CMTimeValue
+ SwiftName: timeValue
+ - Name: CMTimeRangeValue
+ SwiftName: timeRangeValue
+ - Name: CMTimeMappingValue
+ SwiftName: timeMappingValue
+- Name: NSCoder
+ Methods:
+ - Selector: 'decodeCMTimeForKey:'
+ SwiftName: decodeTime(forKey:)
+ MethodKind: Instance
+ - Selector: 'decodeCMTimeRangeForKey:'
+ SwiftName: decodeTimeRange(forKey:)
+ MethodKind: Instance
+ - Selector: 'decodeCMTimeMappingForKey:'
+ SwiftName: decodeTimeMapping(forKey:)
+ MethodKind: Instance
+- Name: AVAsynchronousVideoCompositionRequest
+ Methods:
+ - Selector: 'finishWithError:'
+ SwiftName: finish(with:)
+ MethodKind: Instance
+- Name: AVAsynchronousCIImageFilteringRequest
+ Methods:
+ - Selector: 'finishWithError:'
+ SwiftName: finish(with:)
+ MethodKind: Instance
+Protocols:
+- Name: AVCaptureAudioDataOutputSampleBufferDelegate
+ Methods:
+ - Selector: 'captureOutput:didOutputSampleBuffer:fromConnection:'
+ SwiftName: 'captureOutput(_:didOutput:from:)'
+ MethodKind: Instance
+- Name: AVCaptureDataOutputSynchronizerDelegate
+ Methods:
+ - Selector: 'dataOutputSynchronizer:didOutputSynchronizedDataCollection:'
+ SwiftName: 'dataOutputSynchronizer(_:didOutput:)'
+ MethodKind: Instance
+- Name: AVCaptureDepthDataOutputDelegate
+ Methods:
+ - Selector: 'depthDataOutput:didOutputDepthData:timestamp:connection:'
+ SwiftName: 'depthDataOutput(_:didOutput:timestamp:connection:)'
+ MethodKind: Instance
+- Name: AVCaptureFileOutputDelegate
+ Methods:
+ - Selector: 'captureOutput:didOutputSampleBuffer:fromConnection:'
+ SwiftName: 'fileOutput(_:didOutputSampleBuffer:from:)'
+ MethodKind: Instance
+ - Selector: 'captureOutputShouldProvideSampleAccurateRecordingStart:'
+ SwiftName: 'fileOutputShouldProvideSampleAccurateRecordingStart(_:)'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: N
+ Type: 'AVCaptureFileOutput *'
+- Name: AVCaptureFileOutputRecordingDelegate
+ Methods:
+ - Selector: 'captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:'
+ SwiftName: 'fileOutput(_:didFinishRecordingTo:from:error:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didPauseRecordingToOutputFileAtURL:fromConnections:'
+ SwiftName: 'fileOutput(_:didPauseRecordingTo:from:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didResumeRecordingToOutputFileAtURL:fromConnections:'
+ SwiftName: 'fileOutput(_:didResumeRecordingTo:from:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:'
+ SwiftName: 'fileOutput(_:didStartRecordingTo:from:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:'
+ SwiftName: 'fileOutput(_:willFinishRecordingTo:from:error:)'
+ MethodKind: Instance
+- Name: AVCaptureMetadataOutputObjectsDelegate
+ Methods:
+ - Selector: 'captureOutput:didOutputMetadataObjects:fromConnection:'
+ SwiftName: 'metadataOutput(_:didOutput:from:)'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: N
+ Type: 'AVCaptureMetadataOutput *'
+- Name: AVCapturePhotoCaptureDelegate
+ Methods:
+ - Selector: 'captureOutput:didFinishProcessingPhoto:error:'
+ SwiftName: 'photoOutput(_:didFinishProcessingPhoto:error:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:willBeginCaptureForResolvedSettings:'
+ SwiftName: 'photoOutput(_:willBeginCaptureFor:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:willCapturePhotoForResolvedSettings:'
+ SwiftName: 'photoOutput(_:willCapturePhotoFor:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didCapturePhotoForResolvedSettings:'
+ SwiftName: 'photoOutput(_:didCapturePhotoFor:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didFinishCaptureForResolvedSettings:error:'
+ SwiftName: 'photoOutput(_:didFinishCaptureFor:error:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error:'
+ SwiftName: 'photoOutput(_:didFinishProcessingLivePhotoToMovieFileAt:duration:photoDisplayTime:resolvedSettings:error:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:'
+ SwiftName: 'photoOutput(_:didFinishProcessingPhoto:previewPhoto:resolvedSettings:bracketSettings:error:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:'
+ SwiftName: 'photoOutput(_:didFinishProcessingRawPhoto:previewPhoto:resolvedSettings:bracketSettings:error:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didFinishRecordingLivePhotoMovieForEventualFileAtURL:resolvedSettings:'
+ SwiftName: 'photoOutput(_:didFinishRecordingLivePhotoMovieForEventualFileAt:resolvedSettings:)'
+ MethodKind: Instance
+- Name: AVCaptureVideoDataOutputSampleBufferDelegate
+ Methods:
+ - Selector: 'captureOutput:didOutputSampleBuffer:fromConnection:'
+ SwiftName: 'captureOutput(_:didOutput:from:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didDropSampleBuffer:fromConnection:'
+ SwiftName: 'captureOutput(_:didDrop:from:)'
+ MethodKind: Instance
+- Name: AVVideoCompositing
+ Methods:
+ - Selector: 'startVideoCompositionRequest:'
+ SwiftName: startRequest(_:)
+ MethodKind: Instance
+- Name: AVVideoCompositionValidationHandling
+ Methods:
+ - Selector: 'videoComposition:shouldContinueValidatingAfterFindingEmptyTimeRange:'
+ SwiftName: videoComposition(_:shouldContinueValidatingAfterFindingEmptyTimeRange:)
+ MethodKind: Instance
+Functions:
+- Name: AVMakeRectWithAspectRatioInsideRect
+ SwiftName: AVMakeRect(aspectRatio:insideRect:)
+Enumerators:
+- Name: AVCaptureColorSpace_sRGB
+ SwiftName: sRGB
+- Name: AVCaptureColorSpace_P3_D65
+ SwiftName: P3_D65
+- Name: AVCaptureDeviceTransportControlsNotPlayingMode
+ SwiftName: notPlaying
+- Name: AVCaptureDeviceTransportControlsPlayingMode
+ SwiftName: playing
+- Name: AVMovieWritingAddMovieHeaderToDestination
+ SwiftName: addMovieHeaderToDestination
+- Name: AVMusicSequenceLoadSMF_ChannelsToTracks
+ SwiftName: smfChannelsToTracks
+Tags:
+- Name: AVCaptureAutoFocusRangeRestriction
+ SwiftName: AVCaptureDevice.AutoFocusRangeRestriction
+- Name: AVCaptureAutoFocusSystem
+ SwiftName: AVCaptureDeviceFormat.AutoFocusSystem
+- Name: AVCaptureDevicePosition
+ SwiftName: AVCaptureDevice.Position
+- Name: AVCaptureDeviceTransportControlsPlaybackMode
+ SwiftName: AVCaptureDevice.TransportControlsPlaybackMode
+- Name: AVCaptureExposureMode
+ SwiftName: AVCaptureDevice.ExposureMode
+- Name: AVCaptureFlashMode
+ SwiftName: AVCaptureDevice.FlashMode
+- Name: AVCaptureFocusMode
+ SwiftName: AVCaptureDevice.FocusMode
+- Name: AVCaptureLensStabilizationStatus
+ SwiftName: AVCaptureDevice.LensStabilizationStatus
+- Name: AVCaptureOutputDataDroppedReason
+ SwiftName: AVCaptureOutput.DataDroppedReason
+- Name: AVCaptureSessionInterruptionReason
+ SwiftName: AVCaptureSession.InterruptionReason
+- Name: AVCaptureTorchMode
+ SwiftName: AVCaptureDevice.TorchMode
+- Name: AVCaptureWhiteBalanceMode
+ SwiftName: AVCaptureDevice.WhiteBalanceMode
+- Name: AVError
+ NSErrorDomain: AVFoundationErrorDomain
+Typedefs:
+- Name: AVCaptureDeviceTransportControlsSpeed
+ SwiftName: AVCaptureDevice.TransportControlsSpeed
+- Name: AVCaptureDeviceType
+ SwiftName: AVCaptureDevice.DeviceType
+- Name: AVCaptureSessionPreset
+ SwiftName: AVCaptureSession.Preset
+- Name: AVCaptureWhiteBalanceChromaticityValues
+ SwiftName: AVCaptureDevice.WhiteBalanceChromaticityValues
+- Name: AVCaptureWhiteBalanceGains
+ SwiftName: AVCaptureDevice.WhiteBalanceGains
+- Name: AVCaptureWhiteBalanceTemperatureAndTintValues
+ SwiftName: AVCaptureDevice.WhiteBalanceTemperatureAndTintValues
+- Name: AVMetadataObjectType
+ SwiftName: AVMetadataObject.ObjectType
+- Name: AVPlayerWaitingReason
+ SwiftName: AVPlayer.WaitingReason
+- Name: AVTrackAssociationType
+ SwiftName: AVAssetTrack.AssociationType
+Globals:
+# AVCaptureDevice constants
+- Name: AVCaptureExposureDurationCurrent
+ SwiftName: AVCaptureDevice.currentExposureDuration
+- Name: AVCaptureExposureTargetBiasCurrent
+ SwiftName: AVCaptureDevice.currentExposureTargetBias
+- Name: AVCaptureISOCurrent
+ SwiftName: AVCaptureDevice.currentISO
+- Name: AVCaptureLensPositionCurrent
+ SwiftName: AVCaptureDevice.currentLensPosition
+- Name: AVCaptureMaxAvailableTorchLevel
+ SwiftName: AVCaptureDevice.maxAvailableTorchLevel
+- Name: AVCaptureWhiteBalanceGainsCurrent
+ SwiftName: AVCaptureDevice.currentWhiteBalanceGains
+
+# AVCaptureSessionPreset
+- Name: AVCaptureSessionPreset320x240
+ SwiftName: qvga320x240
+- Name: AVCaptureSessionPreset352x288
+ SwiftName: cif352x288
+- Name: AVCaptureSessionPreset640x480
+ SwiftName: vga640x480
+- Name: AVCaptureSessionPreset960x540
+ SwiftName: qHD960x540
+- Name: AVCaptureSessionPreset1280x720
+ SwiftName: hd1280x720
+- Name: AVCaptureSessionPreset1920x1080
+ SwiftName: hd1920x1080
+- Name: AVCaptureSessionPreset3840x2160
+ SwiftName: hd4K3840x2160
+- Name: AVCaptureSessionPresetiFrame960x540
+ SwiftName: iFrame960x540
+- Name: AVCaptureSessionPresetiFrame1280x720
+ SwiftName: iFrame1280x720
+
+# AVFileType
+- Name: AVFileType3GPP
+ SwiftName: mobile3GPP
+- Name: AVFileType3GPP2
+ SwiftName: mobile3GPP2
+- Name: AVFileTypeAC3
+ SwiftName: ac3
+- Name: AVFileTypeAIFC
+ SwiftName: aifc
+- Name: AVFileTypeAIFF
+ SwiftName: aiff
+- Name: AVFileTypeAMR
+ SwiftName: amr
+- Name: AVFileTypeAVCI
+ SwiftName: avci
+- Name: AVFileTypeAppleM4A
+ SwiftName: m4a
+- Name: AVFileTypeAppleM4V
+ SwiftName: m4v
+- Name: AVFileTypeCoreAudioFormat
+ SwiftName: caf
+- Name: AVFileTypeDNG
+ SwiftName: dng
+- Name: AVFileTypeEnhancedAC3
+ SwiftName: eac3
+- Name: AVFileTypeHEIC
+ SwiftName: heic
+- Name: AVFileTypeHEIF
+ SwiftName: heif
+- Name: AVFileTypeJPEG
+ SwiftName: jpg
+- Name: AVFileTypeMPEG4
+ SwiftName: mp4
+- Name: AVFileTypeMPEGLayer3
+ SwiftName: mp3
+- Name: AVFileTypeQuickTimeMovie
+ SwiftName: mov
+- Name: AVFileTypeSunAU
+ SwiftName: au
+- Name: AVFileTypeTIFF
+ SwiftName: tif
+- Name: AVFileTypeWAVE
+ SwiftName: wav
+
+# AVMetadataExtraAttributeKey
+- Name: AVMetadataExtraAttributeValueURIKey
+ SwiftName: valueURI
+- Name: AVMetadataExtraAttributeBaseURIKey
+ SwiftName: baseURI
+- Name: AVMetadataExtraAttributeInfoKey
+ SwiftName: info
+
+# AVMetadataFormat
+- Name: AVMetadataFormatiTunesMetadata
+ SwiftName: iTunesMetadata
+
+# AVMetadataIdentifieriTunesMetadata
+- Name: AVMetadataIdentifieriTunesMetadataAlbum
+ SwiftName: iTunesMetadataAlbum
+- Name: AVMetadataIdentifieriTunesMetadataArtist
+ SwiftName: iTunesMetadataArtist
+- Name: AVMetadataIdentifieriTunesMetadataUserComment
+ SwiftName: iTunesMetadataUserComment
+- Name: AVMetadataIdentifieriTunesMetadataCoverArt
+ SwiftName: iTunesMetadataCoverArt
+- Name: AVMetadataIdentifieriTunesMetadataCopyright
+ SwiftName: iTunesMetadataCopyright
+- Name: AVMetadataIdentifieriTunesMetadataReleaseDate
+ SwiftName: iTunesMetadataReleaseDate
+- Name: AVMetadataIdentifieriTunesMetadataEncodedBy
+ SwiftName: iTunesMetadataEncodedBy
+- Name: AVMetadataIdentifieriTunesMetadataPredefinedGenre
+ SwiftName: iTunesMetadataPredefinedGenre
+- Name: AVMetadataIdentifieriTunesMetadataUserGenre
+ SwiftName: iTunesMetadataUserGenre
+- Name: AVMetadataIdentifieriTunesMetadataSongName
+ SwiftName: iTunesMetadataSongName
+- Name: AVMetadataIdentifieriTunesMetadataTrackSubTitle
+ SwiftName: iTunesMetadataTrackSubTitle
+- Name: AVMetadataIdentifieriTunesMetadataEncodingTool
+ SwiftName: iTunesMetadataEncodingTool
+- Name: AVMetadataIdentifieriTunesMetadataComposer
+ SwiftName: iTunesMetadataComposer
+- Name: AVMetadataIdentifieriTunesMetadataAlbumArtist
+ SwiftName: iTunesMetadataAlbumArtist
+- Name: AVMetadataIdentifieriTunesMetadataAccountKind
+ SwiftName: iTunesMetadataAccountKind
+- Name: AVMetadataIdentifieriTunesMetadataAppleID
+ SwiftName: iTunesMetadataAppleID
+- Name: AVMetadataIdentifieriTunesMetadataArtistID
+ SwiftName: iTunesMetadataArtistID
+- Name: AVMetadataIdentifieriTunesMetadataSongID
+ SwiftName: iTunesMetadataSongID
+- Name: AVMetadataIdentifieriTunesMetadataDiscCompilation
+ SwiftName: iTunesMetadataDiscCompilation
+- Name: AVMetadataIdentifieriTunesMetadataDiscNumber
+ SwiftName: iTunesMetadataDiscNumber
+- Name: AVMetadataIdentifieriTunesMetadataGenreID
+ SwiftName: iTunesMetadataGenreID
+- Name: AVMetadataIdentifieriTunesMetadataGrouping
+ SwiftName: iTunesMetadataGrouping
+- Name: AVMetadataIdentifieriTunesMetadataPlaylistID
+ SwiftName: iTunesMetadataPlaylistID
+- Name: AVMetadataIdentifieriTunesMetadataContentRating
+ SwiftName: iTunesMetadataContentRating
+- Name: AVMetadataIdentifieriTunesMetadataBeatsPerMin
+ SwiftName: iTunesMetadataBeatsPerMin
+- Name: AVMetadataIdentifieriTunesMetadataTrackNumber
+ SwiftName: iTunesMetadataTrackNumber
+- Name: AVMetadataIdentifieriTunesMetadataArtDirector
+ SwiftName: iTunesMetadataArtDirector
+- Name: AVMetadataIdentifieriTunesMetadataArranger
+ SwiftName: iTunesMetadataArranger
+- Name: AVMetadataIdentifieriTunesMetadataAuthor
+ SwiftName: iTunesMetadataAuthor
+- Name: AVMetadataIdentifieriTunesMetadataLyrics
+ SwiftName: iTunesMetadataLyrics
+- Name: AVMetadataIdentifieriTunesMetadataAcknowledgement
+ SwiftName: iTunesMetadataAcknowledgement
+- Name: AVMetadataIdentifieriTunesMetadataConductor
+ SwiftName: iTunesMetadataConductor
+- Name: AVMetadataIdentifieriTunesMetadataDescription
+ SwiftName: iTunesMetadataDescription
+- Name: AVMetadataIdentifieriTunesMetadataDirector
+ SwiftName: iTunesMetadataDirector
+- Name: AVMetadataIdentifieriTunesMetadataEQ
+ SwiftName: iTunesMetadataEQ
+- Name: AVMetadataIdentifieriTunesMetadataLinerNotes
+ SwiftName: iTunesMetadataLinerNotes
+- Name: AVMetadataIdentifieriTunesMetadataRecordCompany
+ SwiftName: iTunesMetadataRecordCompany
+- Name: AVMetadataIdentifieriTunesMetadataOriginalArtist
+ SwiftName: iTunesMetadataOriginalArtist
+- Name: AVMetadataIdentifieriTunesMetadataPhonogramRights
+ SwiftName: iTunesMetadataPhonogramRights
+- Name: AVMetadataIdentifieriTunesMetadataProducer
+ SwiftName: iTunesMetadataProducer
+- Name: AVMetadataIdentifieriTunesMetadataPerformer
+ SwiftName: iTunesMetadataPerformer
+- Name: AVMetadataIdentifieriTunesMetadataPublisher
+ SwiftName: iTunesMetadataPublisher
+- Name: AVMetadataIdentifieriTunesMetadataSoundEngineer
+ SwiftName: iTunesMetadataSoundEngineer
+- Name: AVMetadataIdentifieriTunesMetadataSoloist
+ SwiftName: iTunesMetadataSoloist
+- Name: AVMetadataIdentifieriTunesMetadataCredits
+ SwiftName: iTunesMetadataCredits
+- Name: AVMetadataIdentifieriTunesMetadataThanks
+ SwiftName: iTunesMetadataThanks
+- Name: AVMetadataIdentifieriTunesMetadataOnlineExtras
+ SwiftName: iTunesMetadataOnlineExtras
+- Name: AVMetadataIdentifieriTunesMetadataExecProducer
+ SwiftName: iTunesMetadataExecProducer
+
+# AVMetadataKeySpace
+- Name: AVMetadataKeySpaceiTunes
+ SwiftName: iTunes
+- Name: AVMetadataKeySpaceID3
+ SwiftName: id3
+
+# AVMetadataObjectType
+- Name: AVMetadataObjectTypeUPCECode
+ SwiftName: upce
+- Name: AVMetadataObjectTypeCode39Code
+ SwiftName: code39
+- Name: AVMetadataObjectTypeCode39Mod43Code
+ SwiftName: code39Mod43
+- Name: AVMetadataObjectTypeEAN13Code
+ SwiftName: ean13
+- Name: AVMetadataObjectTypeEAN8Code
+ SwiftName: ean8
+- Name: AVMetadataObjectTypeCode93Code
+ SwiftName: code93
+- Name: AVMetadataObjectTypeCode128Code
+ SwiftName: code128
+- Name: AVMetadataObjectTypePDF417Code
+ SwiftName: pdf417
+- Name: AVMetadataObjectTypeQRCode
+ SwiftName: qr
+- Name: AVMetadataObjectTypeAztecCode
+ SwiftName: aztec
+- Name: AVMetadataObjectTypeInterleaved2of5Code
+ SwiftName: interleaved2of5
+- Name: AVMetadataObjectTypeITF14Code
+ SwiftName: itf14
+- Name: AVMetadataObjectTypeDataMatrixCode
+ SwiftName: dataMatrix
+
+# AVMetadataiTunesMetadataKey
+- Name: AVMetadataiTunesMetadataKeyAlbum
+ SwiftName: iTunesMetadataKeyAlbum
+- Name: AVMetadataiTunesMetadataKeyArtist
+ SwiftName: iTunesMetadataKeyArtist
+- Name: AVMetadataiTunesMetadataKeyUserComment
+ SwiftName: iTunesMetadataKeyUserComment
+- Name: AVMetadataiTunesMetadataKeyCoverArt
+ SwiftName: iTunesMetadataKeyCoverArt
+- Name: AVMetadataiTunesMetadataKeyCopyright
+ SwiftName: iTunesMetadataKeyCopyright
+- Name: AVMetadataiTunesMetadataKeyReleaseDate
+ SwiftName: iTunesMetadataKeyReleaseDate
+- Name: AVMetadataiTunesMetadataKeyEncodedBy
+ SwiftName: iTunesMetadataKeyEncodedBy
+- Name: AVMetadataiTunesMetadataKeyPredefinedGenre
+ SwiftName: iTunesMetadataKeyPredefinedGenre
+- Name: AVMetadataiTunesMetadataKeyUserGenre
+ SwiftName: iTunesMetadataKeyUserGenre
+- Name: AVMetadataiTunesMetadataKeySongName
+ SwiftName: iTunesMetadataKeySongName
+- Name: AVMetadataiTunesMetadataKeyTrackSubTitle
+ SwiftName: iTunesMetadataKeyTrackSubTitle
+- Name: AVMetadataiTunesMetadataKeyEncodingTool
+ SwiftName: iTunesMetadataKeyEncodingTool
+- Name: AVMetadataiTunesMetadataKeyComposer
+ SwiftName: iTunesMetadataKeyComposer
+- Name: AVMetadataiTunesMetadataKeyAlbumArtist
+ SwiftName: iTunesMetadataKeyAlbumArtist
+- Name: AVMetadataiTunesMetadataKeyAccountKind
+ SwiftName: iTunesMetadataKeyAccountKind
+- Name: AVMetadataiTunesMetadataKeyAppleID
+ SwiftName: iTunesMetadataKeyAppleID
+- Name: AVMetadataiTunesMetadataKeyArtistID
+ SwiftName: iTunesMetadataKeyArtistID
+- Name: AVMetadataiTunesMetadataKeySongID
+ SwiftName: iTunesMetadataKeySongID
+- Name: AVMetadataiTunesMetadataKeyDiscCompilation
+ SwiftName: iTunesMetadataKeyDiscCompilation
+- Name: AVMetadataiTunesMetadataKeyDiscNumber
+ SwiftName: iTunesMetadataKeyDiscNumber
+- Name: AVMetadataiTunesMetadataKeyGenreID
+ SwiftName: iTunesMetadataKeyGenreID
+- Name: AVMetadataiTunesMetadataKeyGrouping
+ SwiftName: iTunesMetadataKeyGrouping
+- Name: AVMetadataiTunesMetadataKeyPlaylistID
+ SwiftName: iTunesMetadataKeyPlaylistID
+- Name: AVMetadataiTunesMetadataKeyContentRating
+ SwiftName: iTunesMetadataKeyContentRating
+- Name: AVMetadataiTunesMetadataKeyBeatsPerMin
+ SwiftName: iTunesMetadataKeyBeatsPerMin
+- Name: AVMetadataiTunesMetadataKeyTrackNumber
+ SwiftName: iTunesMetadataKeyTrackNumber
+- Name: AVMetadataiTunesMetadataKeyArtDirector
+ SwiftName: iTunesMetadataKeyArtDirector
+- Name: AVMetadataiTunesMetadataKeyArranger
+ SwiftName: iTunesMetadataKeyArranger
+- Name: AVMetadataiTunesMetadataKeyAuthor
+ SwiftName: iTunesMetadataKeyAuthor
+- Name: AVMetadataiTunesMetadataKeyLyrics
+ SwiftName: iTunesMetadataKeyLyrics
+- Name: AVMetadataiTunesMetadataKeyAcknowledgement
+ SwiftName: iTunesMetadataKeyAcknowledgement
+- Name: AVMetadataiTunesMetadataKeyConductor
+ SwiftName: iTunesMetadataKeyConductor
+- Name: AVMetadataiTunesMetadataKeyDescription
+ SwiftName: iTunesMetadataKeyDescription
+- Name: AVMetadataiTunesMetadataKeyDirector
+ SwiftName: iTunesMetadataKeyDirector
+- Name: AVMetadataiTunesMetadataKeyEQ
+ SwiftName: iTunesMetadataKeyEQ
+- Name: AVMetadataiTunesMetadataKeyLinerNotes
+ SwiftName: iTunesMetadataKeyLinerNotes
+- Name: AVMetadataiTunesMetadataKeyRecordCompany
+ SwiftName: iTunesMetadataKeyRecordCompany
+- Name: AVMetadataiTunesMetadataKeyOriginalArtist
+ SwiftName: iTunesMetadataKeyOriginalArtist
+- Name: AVMetadataiTunesMetadataKeyPhonogramRights
+ SwiftName: iTunesMetadataKeyPhonogramRights
+- Name: AVMetadataiTunesMetadataKeyProducer
+ SwiftName: iTunesMetadataKeyProducer
+- Name: AVMetadataiTunesMetadataKeyPerformer
+ SwiftName: iTunesMetadataKeyPerformer
+- Name: AVMetadataiTunesMetadataKeyPublisher
+ SwiftName: iTunesMetadataKeyPublisher
+- Name: AVMetadataiTunesMetadataKeySoundEngineer
+ SwiftName: iTunesMetadataKeySoundEngineer
+- Name: AVMetadataiTunesMetadataKeySoloist
+ SwiftName: iTunesMetadataKeySoloist
+- Name: AVMetadataiTunesMetadataKeyCredits
+ SwiftName: iTunesMetadataKeyCredits
+- Name: AVMetadataiTunesMetadataKeyThanks
+ SwiftName: iTunesMetadataKeyThanks
+- Name: AVMetadataiTunesMetadataKeyOnlineExtras
+ SwiftName: iTunesMetadataKeyOnlineExtras
+- Name: AVMetadataiTunesMetadataKeyExecProducer
+ SwiftName: iTunesMetadataKeyExecProducer
+
+# AVPlayerWaitingReason
+- Name: AVPlayerWaitingToMinimizeStallsReason
+ SwiftName: toMinimizeStalls
+- Name: AVPlayerWaitingWhileEvaluatingBufferingRateReason
+ SwiftName: evaluatingBufferingRate
+- Name: AVPlayerWaitingWithNoItemToPlayReason
+ SwiftName: noItemToPlay
+
+# AVVideoCodecType
+- Name: AVVideoCodecTypeHEVC
+ SwiftName: hevc
+- Name: AVVideoCodecTypeH264
+ SwiftName: h264
+- Name: AVVideoCodecTypeJPEG
+ SwiftName: jpeg
+- Name: AVVideoCodecTypeAppleProRes4444
+ SwiftName: proRes4444
+- Name: AVVideoCodecTypeAppleProRes422
+ SwiftName: proRes422
+
+SwiftVersions:
+- Version: 3
+ Classes:
+ - Name: AVAssetResourceLoadingRequest
+ Methods:
+ - Selector: 'persistentContentKeyFromKeyVendorResponse:options:error:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ Nullability: [ N, O, N ]
+ - Name: AVAssetTrack
+ Methods:
+ - Selector: 'associatedTracksOfType:'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: N
+ Type: 'NSString *'
+ Properties:
+ - Name: availableTrackAssociationTypes
+ PropertyKind: Instance
+ Nullability: N
+ Type: 'NSArray<NSString *> *'
+ - Name: AVCaptureAudioDataOutput
+ Methods:
+ - Selector: 'recommendedAudioSettingsForAssetWriterWithOutputFileType:'
+ MethodKind: Instance
+ NullabilityOfRet: U
+ Nullability: [ U ]
+ - Selector: 'setSampleBufferDelegate:queue:'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Position: 1
+ Nullability: U
+ Properties:
+ - Name: audioSettings
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSDictionary *'
+ - Name: sampleBufferCallbackQueue
+ PropertyKind: Instance
+ Nullability: U
+ - Name: sampleBufferDelegate
+ PropertyKind: Instance
+ Nullability: U
+ - Name: AVAudioEngine
+ Properties:
+ - Name: inputNode
+ PropertyKind: Instance
+ Nullability: O
+ - Name: AVAudioChannelLayout
+ Methods:
+ - Selector: 'initWithLayoutTag:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ - Name: AVAudioConverter
+ Methods:
+ - Selector: 'initFromFormat:toFormat:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ Properties:
+ - Name: sampleRateConverterAlgorithm
+ PropertyKind: Instance
+ Nullability: N
+ - Name: AVAudioFormat
+ Methods:
+ - Selector: 'initWithStreamDescription:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ - Selector: 'initWithStreamDescription:channelLayout:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ Nullability: [ N, O ]
+ - Selector: 'initStandardFormatWithSampleRate:channels:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ - Selector: 'initWithCommonFormat:sampleRate:channels:interleaved:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ - Selector: 'initWithSettings:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ - Name: AVAudioNode
+ Methods:
+ - Selector: 'nameForInputBus:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ - Selector: 'nameForOutputBus:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ - Name: AVAudioPCMBuffer
+ Methods:
+ - Selector: 'initWithPCMFormat:frameCapacity:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ - Name: AVAudioTime
+ Methods:
+ - Selector: 'extrapolateTimeFromAnchor:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ - Name: AVCaptureAudioFileOutput
+ Methods:
+ - Selector: availableOutputFileTypes
+ MethodKind: Class
+ NullabilityOfRet: U
+ ResultType: 'NSArray *'
+ - Selector: 'startRecordingToOutputFileURL:outputFileType:recordingDelegate:'
+ SwiftName: 'startRecording(toOutputFileURL:outputFileType:recordingDelegate:)'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Position: 1
+ Nullability: U
+ - Position: 2
+ Nullability: U
+ Properties:
+ - Name: audioSettings
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSDictionary *'
+ - Name: metadata
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: AVCaptureAudioPreviewOutput
+ Properties:
+ - Name: outputDeviceUniqueID
+ PropertyKind: Instance
+ Nullability: U
+ - Name: AVCaptureAutoExposureBracketedStillImageSettings
+ Methods:
+ - Selector: 'autoExposureSettingsWithExposureTargetBias:'
+ MethodKind: Class
+ NullabilityOfRet: U
+ - Name: AVCaptureConnection
+ Methods:
+ - Selector: 'initWithInputPorts:output:'
+ MethodKind: Instance
+ NullabilityOfRet: U
+ Nullability: [ U, U ]
+ Parameters:
+ - Position: 0
+ Type: 'NSArray *'
+ - Selector: 'initWithInputPort:videoPreviewLayer:'
+ MethodKind: Instance
+ NullabilityOfRet: U
+ Nullability: [ U, U ]
+ - Selector: 'connectionWithInputPorts:output:'
+ MethodKind: Class
+ NullabilityOfRet: U
+ Nullability: [ U, U ]
+ Parameters:
+ - Position: 0
+ Type: 'NSArray *'
+ - Selector: 'connectionWithInputPort:videoPreviewLayer:'
+ MethodKind: Class
+ NullabilityOfRet: U
+ Nullability: [ U, U ]
+ Properties:
+ - Name: audioChannels
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: inputPorts
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: output
+ PropertyKind: Instance
+ Nullability: U
+ - Name: videoPreviewLayer
+ PropertyKind: Instance
+ Nullability: U
+ - Name: AVCaptureDevice
+ Methods:
+ - Selector: 'authorizationStatusForMediaType:'
+ MethodKind: Class
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Selector: 'chromaticityValuesForDeviceWhiteBalanceGains:'
+ SwiftName: 'chromaticityValues(forDeviceWhiteBalanceGains:)'
+ MethodKind: Instance
+ - Selector: 'defaultDeviceWithDeviceType:mediaType:position:'
+ MethodKind: Class
+ NullabilityOfRet: U
+ Nullability: [ U, U, N ]
+ - Selector: 'defaultDeviceWithMediaType:'
+ MethodKind: Class
+ NullabilityOfRet: U
+ Nullability: [ U ]
+ - Selector: devices
+ MethodKind: Class
+ NullabilityOfRet: U
+ ResultType: "NSArray *"
+ - Selector: 'deviceWithUniqueID:'
+ MethodKind: Class
+ NullabilityOfRet: U
+ Nullability: [ U ]
+ - Selector: 'devicesWithMediaType:'
+ MethodKind: Class
+ NullabilityOfRet: U
+ Nullability: [ U ]
+ ResultType: 'NSArray *'
+ - Selector: 'hasMediaType:'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Selector: 'requestAccessForMediaType:completionHandler:'
+ MethodKind: Class
+ Nullability: [ U, U ]
+ - Selector: 'setExposureModeCustomWithDuration:ISO:completionHandler:'
+ MethodKind: Instance
+ Nullability: [ N, N, U ]
+ - Selector: 'setExposureTargetBias:completionHandler:'
+ MethodKind: Instance
+ Nullability: [ N, U ]
+ - Selector: 'setFocusModeLockedWithLensPosition:completionHandler:'
+ MethodKind: Instance
+ Nullability: [ N, U ]
+ - Selector: 'setTorchModeOnWithLevel:error:'
+ SwiftName: 'setTorchModeOnWithLevel(_:)'
+ MethodKind: Instance
+ - Selector: 'setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:'
+ MethodKind: Instance
+ Nullability: [ N, U ]
+ - Selector: 'supportsAVCaptureSessionPreset:'
+ MethodKind: Instance
+ Nullability: [ U ]
+ - Selector: 'temperatureAndTintValuesForDeviceWhiteBalanceGains:'
+ SwiftName: 'temperatureAndTintValues(forDeviceWhiteBalanceGains:)'
+ MethodKind: Instance
+ Properties:
+ - Name: activeFormat
+ PropertyKind: Instance
+ Nullability: U
+ - Name: activeInputSource
+ PropertyKind: Instance
+ Nullability: U
+ - Name: deviceType
+ PropertyKind: Instance
+ Nullability: U
+ - Name: formats
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: inputSources
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: linkedDevices
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: localizedName
+ PropertyKind: Instance
+ Nullability: U
+ - Name: manufacturer
+ PropertyKind: Instance
+ Nullability: U
+ - Name: modelID
+ PropertyKind: Instance
+ Nullability: U
+ - Name: uniqueID
+ PropertyKind: Instance
+ Nullability: U
+ - Name: AVCaptureDeviceInputSource
+ SwiftName: AVCaptureDeviceInputSource
+ Properties:
+ - Name: inputSourceID
+ PropertyKind: Instance
+ Nullability: U
+ - Name: localizedName
+ PropertyKind: Instance
+ Nullability: U
+ - Name: AVCaptureDeviceFormat
+ Properties:
+ - Name: formatDescription
+ Nullability: U
+ - Name: mediaType
+ Nullability: U
+ - Name: supportedColorSpaces
+ Nullability: U
+ - Name: videoSupportedFrameRateRanges
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: AVCaptureDeviceInput
+ Methods:
+ - Selector: 'init'
+ MethodKind: Instance
+ - Selector: 'initWithDevice:error:'
+ MethodKind: Instance
+ NullabilityOfRet: U
+ Nullability: [ U, U ]
+ - Selector: 'deviceInputWithDevice:error:'
+ MethodKind: Class
+ NullabilityOfRet: U
+ Nullability: [ U, U ]
+ Properties:
+ - Name: device
+ PropertyKind: Instance
+ Nullability: U
+ - Name: AVCaptureDeviceDiscoverySession
+ Methods:
+ - Selector: 'discoverySessionWithDeviceTypes:mediaType:position:'
+ MethodKind: Class
+ NullabilityOfRet: U
+ Nullability: [ U, U, N ]
+ Properties:
+ - Name: devices
+ PropertyKind: Instance
+ Nullability: U
+ - Name: AVCaptureFileOutput
+ Methods:
+ - Selector: 'startRecordingToOutputFileURL:recordingDelegate:'
+ MethodKind: Instance
+ Nullability: [ U, U ]
+ Properties:
+ - Name: delegate
+ PropertyKind: Instance
+ Nullability: U
+ - Name: outputFileURL
+ PropertyKind: Instance
+ Nullability: U
+ - Name: AVCaptureInput
+ Properties:
+ - Name: ports
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: AVCaptureInputPort
+ Properties:
+ - Name: clock
+ PropertyKind: Instance
+ Nullability: U
+ - Name: formatDescription
+ PropertyKind: Instance
+ Nullability: U
+ - Name: input
+ PropertyKind: Instance
+ Nullability: U
+ - Name: mediaType
+ PropertyKind: Instance
+ Nullability: U
+ - Name: AVCaptureManualExposureBracketedStillImageSettings
+ Methods:
+ - Selector: 'manualExposureSettingsWithExposureDuration:ISO:'
+ MethodKind: Class
+ NullabilityOfRet: U
+ - Name: AVCaptureMetadataInput
+ Methods:
+ - Selector: 'appendTimedMetadataGroup:error:'
+ MethodKind: Instance
+ Nullability: [ U, U ]
+ - Selector: 'initWithFormatDescription:clock:'
+ MethodKind: Instance
+ NullabilityOfRet: U
+ Nullability: [ U, U ]
+ - Selector: 'metadataInputWithFormatDescription:clock:'
+ MethodKind: Class
+ NullabilityOfRet: U
+ Nullability: [ U, U ]
+ - Name: AVCaptureMetadataOutput
+ Methods:
+ - Selector: 'setMetadataObjectsDelegate:queue:'
+ MethodKind: Instance
+ Nullability: [ U, U ]
+ Properties:
+ - Name: availableMetadataObjectTypes
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: metadataObjectTypes
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: metadataObjectsCallbackQueue
+ PropertyKind: Instance
+ Nullability: U
+ - Name: metadataObjectsDelegate
+ PropertyKind: Instance
+ Nullability: U
+ - Name: AVCaptureMovieFileOutput
+ Methods:
+ - Selector: 'recordsVideoOrientationAndMirroringChangesAsMetadataTrackForConnection:'
+ MethodKind: Instance
+ Nullability: [ U ]
+ - Selector: 'setOutputSettings:forConnection:'
+ MethodKind: Instance
+ Nullability: [ U, U ]
+ Parameters:
+ - Position: 0
+ Type: 'NSDictionary *'
+ - Selector: 'setRecordsVideoOrientationAndMirroringChanges:asMetadataTrackForConnection:'
+ MethodKind: Instance
+ Parameters:
+ - Position: 1
+ Nullability: U
+ - Selector: 'outputSettingsForConnection:'
+ MethodKind: Instance
+ NullabilityOfRet: U
+ Nullability: [ U ]
+ ResultType: 'NSDictionary *'
+ Properties:
+ - Name: availableVideoCodecTypes
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: metadata
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: AVCaptureOutput
+ Methods:
+ - Selector: 'connectionWithMediaType:'
+ MethodKind: Instance
+ NullabilityOfRet: U
+ Nullability: [ U ]
+ - Selector: 'metadataOutputRectOfInterestForRect:'
+ MethodKind: Instance
+ - Selector: 'rectForMetadataOutputRectOfInterest:'
+ MethodKind: Instance
+ - Selector: 'transformedMetadataObjectForMetadataObject:connection:'
+ MethodKind: Instance
+ NullabilityOfRet: U
+ Nullability: [ U, U ]
+ Properties:
+ - Name: connections
+ PropertyKind: Instance
+ Nullability: U
+ Type: "NSArray *"
+ - Name: AVCapturePhotoOutput
+ Properties:
+ - Name: availablePhotoCodecTypes
+ PropertyKind: Instance
+ Nullability: N
+ Type: 'NSArray<NSString *> *'
+ - Name: AVCaptureScreenInput
+ Methods:
+ - Selector: 'initWithDisplayID:'
+ MethodKind: Instance
+ NullabilityOfRet: U
+ - Name: AVCaptureSession
+ Methods:
+ - Selector: 'addConnection:'
+ MethodKind: Instance
+ Nullability: [ U ]
+ - Selector: 'addInput:'
+ MethodKind: Instance
+ Nullability: [ U ]
+ - Selector: 'addInputWithNoConnections:'
+ MethodKind: Instance
+ Nullability: [ U ]
+ - Selector: 'canAddConnection:'
+ MethodKind: Instance
+ Nullability: [ U ]
+ - Selector: 'canAddInput:'
+ MethodKind: Instance
+ Nullability: [ U ]
+ - Selector: 'addOutput:'
+ MethodKind: Instance
+ Nullability: [ U ]
+ - Selector: 'addOutputWithNoConnections:'
+ MethodKind: Instance
+ Nullability: [ U ]
+ - Selector: 'canAddOutput:'
+ MethodKind: Instance
+ Nullability: [ U ]
+ - Selector: 'canSetSessionPreset:'
+ MethodKind: Instance
+ Nullability: [ U ]
+ - Selector: 'removeConnection:'
+ MethodKind: Instance
+ Nullability: [ U ]
+ - Selector: 'removeInput:'
+ MethodKind: Instance
+ Nullability: [ U ]
+ - Selector: 'removeOutput:'
+ MethodKind: Instance
+ Nullability: [ U ]
+ Properties:
+ - Name: inputs
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: masterClock
+ PropertyKind: Instance
+ Nullability: U
+ - Name: outputs
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: sessionPreset
+ PropertyKind: Instance
+ Nullability: U
+ - Name: AVCaptureStillImageOutput
+ Methods:
+ - Selector: 'captureStillImageAsynchronouslyFromConnection:completionHandler:'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Position: 1
+ Nullability: U
+ - Selector: 'captureStillImageBracketAsynchronouslyFromConnection:withSettingsArray:completionHandler:'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Position: 1
+ Nullability: U
+ Type: 'NSArray *'
+ - Position: 2
+ Nullability: U
+ - Selector: 'jpegStillImageNSDataRepresentation:'
+ MethodKind: Class
+ NullabilityOfRet: U
+ Nullability: [ U ] # TODO: Use Parameters: instead
+ - Selector: 'prepareToCaptureStillImageBracketFromConnection:withSettingsArray:completionHandler:'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Position: 1
+ Nullability: U
+ Type: 'NSArray *'
+ - Position: 2
+ Nullability: U
+ Properties:
+ - Name: availableImageDataCVPixelFormatTypes
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: availableImageDataCodecTypes
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: outputSettings
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSDictionary *'
+ - Name: AVCaptureVideoDataOutput
+ Methods:
+ - Selector: 'recommendedVideoSettingsForAssetWriterWithOutputFileType:'
+ MethodKind: Instance
+ NullabilityOfRet: U
+ Nullability: [ U ]
+ ResultType: 'NSDictionary *'
+ - Selector: 'setSampleBufferDelegate:queue:'
+ MethodKind: Instance
+ Nullability: [ U, U ]
+ Properties:
+ - Name: availableVideoCVPixelFormatTypes
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: availableVideoCodecTypes
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: sampleBufferCallbackQueue
+ PropertyKind: Instance
+ Nullability: U
+ - Name: sampleBufferDelegate
+ PropertyKind: Instance
+ Nullability: U
+ - Name: videoSettings
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSDictionary *'
+ - Name: AVCaptureVideoPreviewLayer
+ Methods:
+ - Selector: 'captureDevicePointOfInterestForPoint:'
+ SwiftName: 'captureDevicePointOfInterest(for:)'
+ MethodKind: Instance
+ - Selector: 'initWithSession:'
+ MethodKind: Instance
+ NullabilityOfRet: U
+ Nullability: [ U ]
+ - Selector: 'initWithSessionWithNoConnection:'
+ MethodKind: Instance
+ NullabilityOfRet: U
+ Nullability: [ U ]
+ - Selector: 'metadataOutputRectOfInterestForRect:'
+ SwiftName: 'metadataOutputRectOfInterest(for:)'
+ MethodKind: Instance
+ - Selector: 'pointForCaptureDevicePointOfInterest:'
+ SwiftName: 'pointForCaptureDevicePoint(ofInterest:)'
+ MethodKind: Instance
+ - Selector: 'rectForMetadataOutputRectOfInterest:'
+ SwiftName: 'rectForMetadataOutputRect(ofInterest:)'
+ MethodKind: Instance
+ - Selector: 'setSessionWithNoConnection:'
+ MethodKind: Instance
+ Nullability: [ U ]
+ - Selector: 'transformedMetadataObjectForMetadataObject:'
+ MethodKind: Instance
+ NullabilityOfRet: U
+ Nullability: [ U ]
+ Properties:
+ - Name: connection
+ PropertyKind: Instance
+ Nullability: U
+ - Name: session
+ PropertyKind: Instance
+ Nullability: U
+ - Name: videoGravity
+ PropertyKind: Instance
+ Nullability: U
+ - Name: AVContentKeyRequest
+ Methods:
+ - Selector: 'makeStreamingContentKeyRequestDataForApp:contentIdentifier:options:completionHandler:'
+ MethodKind: Instance
+ Nullability: [ N, O, O, N ]
+ - Name: AVPersistableContentKeyRequest
+ Methods:
+ - Selector: 'persistableContentKeyFromKeyVendorResponse:options:error:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ Nullability: [ N, O, N ]
+ - Name: AVContentKeySession
+ Methods:
+ - Selector: 'contentKeySessionWithKeySystem:storageDirectoryAtURL:'
+ MethodKind: Class
+ Nullability: [ N, O ]
+ - Name: AVFrameRateRange
+ SwiftName: AVFrameRateRange
+ - Name: AVMetadataMachineReadableCodeObject
+ Properties:
+ - Name: corners
+ PropertyKind: Instance
+ Nullability: U
+ Type: 'NSArray *'
+ - Name: stringValue
+ PropertyKind: Instance
+ Nullability: U
+ - Name: AVMetadataObject
+ Properties:
+ - Name: type
+ Nullability: U
+ - Name: AVMutableComposition
+ Methods:
+ - Selector: 'addMutableTrackWithMediaType:preferredTrackID:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ - Name: AVMutableMovie
+ Methods:
+ - Selector: 'addMutableTrackWithMediaType:copySettingsFromTrack:options:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ Nullability: [ N, O, O ]
+ Properties:
+ - Name: defaultMediaDataStorage
+ PropertyKind: Instance
+ Nullability: N
+ - Name: AVMutableVideoComposition
+ Methods:
+ - Selector: 'videoCompositionWithPropertiesOfAsset:'
+ MethodKind: Class
+ NullabilityOfRet: N
+ - Name: AVVideoComposition
+ Methods:
+ - Selector: 'videoCompositionWithPropertiesOfAsset:'
+ MethodKind: Class
+ NullabilityOfRet: N
+ - Name: AVPlayerItem
+ Methods:
+ - Selector: 'seekToTime:completionHandler:'
+ MethodKind: Instance
+ Parameters:
+ - Position: 1
+ Nullability: N
+ - Selector: 'seekToTime:toleranceBefore:toleranceAfter:completionHandler:'
+ MethodKind: Instance
+ Parameters:
+ - Position: 3
+ Nullability: N
+ - Selector: 'seekToDate:completionHandler:'
+ MethodKind: Instance
+ Parameters:
+ - Position: 1
+ Nullability: N
+ - Name: AVSampleBufferGenerator
+ Methods:
+ - Selector: 'createSampleBufferForRequest:'
+ MethodKind: Instance
+ NullabilityOfRet: N
+ Protocols:
+ - Name: AVCaptureAudioDataOutputSampleBufferDelegate
+ Methods:
+ - Selector: 'captureOutput:didOutputSampleBuffer:fromConnection:'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Position: 1
+ Nullability: U
+ - Position: 2
+ Nullability: U
+ - Name: AVCaptureFileOutputDelegate
+ Methods:
+ - Selector: 'captureOutput:didOutputSampleBuffer:fromConnection:'
+ SwiftName: 'capture(_:didOutputSampleBuffer:from:)'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Position: 1
+ Nullability: U
+ - Position: 2
+ Nullability: U
+ - Selector: 'captureOutputShouldProvideSampleAccurateRecordingStart:'
+ SwiftName: 'captureOutputShouldProvideSampleAccurateRecordingStart(_:)'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Name: AVCaptureFileOutputRecordingDelegate
+ Methods:
+ - Selector: 'captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:'
+ SwiftName: 'capture(_:didFinishRecordingToOutputFileAt:fromConnections:error:)'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Position: 1
+ Nullability: U
+ - Position: 2
+ Nullability: U
+ Type: 'NSArray *'
+ - Position: 3
+ Nullability: U
+ - Selector: 'captureOutput:didPauseRecordingToOutputFileAtURL:fromConnections:'
+ SwiftName: 'capture(_:didPauseRecordingToOutputFileAt:fromConnections:)'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Position: 1
+ Nullability: U
+ - Position: 2
+ Nullability: U
+ Type: 'NSArray *'
+ - Position: 3
+ Nullability: U
+ - Selector: 'captureOutput:didResumeRecordingToOutputFileAtURL:fromConnections:'
+ SwiftName: 'capture(_:didResumeRecordingToOutputFileAt:fromConnections:)'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Position: 1
+ Nullability: U
+ - Position: 2
+ Nullability: U
+ Type: 'NSArray *'
+ - Position: 3
+ Nullability: U
+ - Position: 4
+ Nullability: U
+ - Selector: 'captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:'
+ SwiftName: 'capture(_:didStartRecordingToOutputFileAt:fromConnections:)'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Position: 1
+ Nullability: U
+ - Position: 2
+ Nullability: U
+ Type: 'NSArray *'
+ - Selector: 'captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:'
+ SwiftName: 'capture(_:willFinishRecordingToOutputFileAt:fromConnections:error:)'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Position: 1
+ Nullability: U
+ - Position: 2
+ Nullability: U
+ Type: 'NSArray *'
+ - Position: 3
+ Nullability: U
+ - Position: 4
+ Nullability: U
+ - Name: AVCaptureMetadataOutputObjectsDelegate
+ Methods:
+ - Selector: 'captureOutput:didOutputMetadataObjects:fromConnection:'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Position: 1
+ Nullability: U
+ Type: 'NSArray *'
+ - Position: 2
+ Nullability: U
+ - Name: AVCapturePhotoCaptureDelegate
+ Methods:
+ - Selector: 'captureOutput:willBeginCaptureForResolvedSettings:'
+ SwiftName: 'capture(_:willBeginCaptureForResolvedSettings:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:willCapturePhotoForResolvedSettings:'
+ SwiftName: 'capture(_:willCapturePhotoForResolvedSettings:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didCapturePhotoForResolvedSettings:'
+ SwiftName: 'capture(_:didCapturePhotoForResolvedSettings:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didFinishCaptureForResolvedSettings:error:'
+ SwiftName: 'capture(_:didFinishCaptureForResolvedSettings:error:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error:'
+ SwiftName: 'capture(_:didFinishProcessingLivePhotoToMovieFileAt:duration:photoDisplay:resolvedSettings:error:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:'
+ SwiftName: 'capture(_:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:'
+ SwiftName: 'capture(_:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:)'
+ MethodKind: Instance
+ - Selector: 'captureOutput:didFinishRecordingLivePhotoMovieForEventualFileAtURL:resolvedSettings:'
+ SwiftName: 'capture(_:didFinishRecordingLivePhotoMovieForEventualFileAt:resolvedSettings:)'
+ MethodKind: Instance
+ - Name: AVCaptureVideoDataOutputSampleBufferDelegate
+ Methods:
+ - Selector: 'captureOutput:didOutputSampleBuffer:fromConnection:'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Position: 1
+ Nullability: U
+ - Position: 2
+ Nullability: U
+ - Selector: 'captureOutput:didDropSampleBuffer:fromConnection:'
+ MethodKind: Instance
+ Parameters:
+ - Position: 0
+ Nullability: U
+ - Position: 1
+ Nullability: U
+ - Position: 2
+ Nullability: U
+ - Name: AVPlayerItemMetadataOutputPushDelegate
+ Methods:
+ - Selector: 'metadataOutput:didOutputTimedMetadataGroups:fromPlayerItemTrack:'
+ MethodKind: Instance
+ Parameters:
+ - Position: 2
+ Nullability: N
+ Enumerators:
+ - Name: AVCaptureDeviceTransportControlsNotPlayingMode
+ SwiftName: notPlayingMode
+ - Name: AVCaptureDeviceTransportControlsPlayingMode
+ SwiftName: playingMode
+ Tags:
+ - Name: AVCaptureAutoFocusSystem
+ SwiftName: AVCaptureAutoFocusSystem
+ - Name: AVCaptureAutoFocusRangeRestriction
+ SwiftName: AVCaptureAutoFocusRangeRestriction
+ - Name: AVCaptureDevicePosition
+ SwiftName: AVCaptureDevicePosition
+ - Name: AVCaptureDeviceTransportControlsPlaybackMode
+ SwiftName: AVCaptureDeviceTransportControlsPlaybackMode
+ - Name: AVCaptureExposureMode
+ SwiftName: AVCaptureExposureMode
+ - Name: AVCaptureFlashMode
+ SwiftName: AVCaptureFlashMode
+ - Name: AVCaptureFocusMode
+ SwiftName: AVCaptureFocusMode
+ - Name: AVCaptureTorchMode
+ SwiftName: AVCaptureTorchMode
+ - Name: AVCaptureSessionInterruptionReason
+ SwiftName: AVCaptureSessionInterruptionReason
+ - Name: AVCaptureWhiteBalanceMode
+ SwiftName: AVCaptureWhiteBalanceMode
+ Typedefs:
+ - Name: AVAssetDownloadSessionKey
+ SwiftWrapper: none
+ - Name: AVAssetImageGeneratorApertureMode
+ SwiftWrapper: none
+ - Name: AVAudioTimePitchAlgorithm
+ SwiftWrapper: none
+ - Name: AVCaptureDeviceTransportControlsSpeed
+ SwiftWrapper: none
+ - Name: AVCaptureDeviceType
+ SwiftName: AVCaptureDeviceType
+ - Name: AVCaptureSessionPreset
+ SwiftWrapper: none
+ - Name: AVCaptureWhiteBalanceChromaticityValues
+ SwiftName: AVCaptureWhiteBalanceChromaticityValues
+ - Name: AVCaptureWhiteBalanceGains
+ SwiftName: AVCaptureWhiteBalanceGains
+ - Name: AVCaptureWhiteBalanceTemperatureAndTintValues
+ SwiftName: AVCaptureWhiteBalanceTemperatureAndTintValues
+ - Name: AVExternalDeviceLimitedUIElement
+ SwiftWrapper: none
+ - Name: AVExternalDeviceScreenInputCapability
+ SwiftWrapper: none
+ - Name: AVFileType
+ SwiftWrapper: none
+ - Name: AVLayerVideoGravity
+ SwiftWrapper: none
+ - Name: AVMediaCharacteristic
+ SwiftWrapper: none
+ - Name: AVMediaType
+ SwiftWrapper: none
+ - Name: AVMetadataExtraAttributeKey
+ SwiftWrapper: none
+ - Name: AVMetadataFormat
+ SwiftWrapper: none
+ - Name: AVMetadataIdentifier
+ SwiftWrapper: none
+ - Name: AVMetadataObjectType
+ SwiftWrapper: none
+ - Name: AVMetadataKey
+ SwiftWrapper: none
+ - Name: AVMetadataKeySpace
+ SwiftWrapper: none
+ - Name: AVOutputSettingsPreset
+ SwiftWrapper: none
+ - Name: AVPlayerItemLegibleOutputTextStylingResolution
+ SwiftWrapper: none
+ - Name: AVPlayerWaitingReason
+ SwiftWrapper: none
+ - Name: AVTrackAssociationType
+ SwiftWrapper: none
+ Globals:
+ # AVCaptureDevice constants
+ - Name: AVCaptureExposureDurationCurrent
+ SwiftName: AVCaptureExposureDurationCurrent
+ - Name: AVCaptureExposureTargetBiasCurrent
+ SwiftName: AVCaptureExposureTargetBiasCurrent
+ - Name: AVCaptureISOCurrent
+ SwiftName: AVCaptureISOCurrent
+ - Name: AVCaptureLensPositionCurrent
+ SwiftName: AVCaptureLensPositionCurrent
+ - Name: AVCaptureMaxAvailableTorchLevel
+ SwiftName: AVCaptureMaxAvailableTorchLevel
+ - Name: AVCaptureWhiteBalanceGainsCurrent
+ SwiftName: AVCaptureWhiteBalanceGainsCurrent
+
+ # AVCaptureSessionPreset
+ - Name: AVCaptureSessionPreset320x240
+ SwiftName: AVCaptureSessionPreset320x240
+ - Name: AVCaptureSessionPreset352x288
+ SwiftName: AVCaptureSessionPreset352x288
+ - Name: AVCaptureSessionPreset640x480
+ SwiftName: AVCaptureSessionPreset640x480
+ - Name: AVCaptureSessionPreset960x540
+ SwiftName: AVCaptureSessionPreset960x540
+ - Name: AVCaptureSessionPreset1280x720
+ SwiftName: AVCaptureSessionPreset1280x720
+ - Name: AVCaptureSessionPreset1920x1080
+ SwiftName: AVCaptureSessionPreset1920x1080
+ - Name: AVCaptureSessionPreset3840x2160
+ SwiftName: AVCaptureSessionPreset3840x2160
+ - Name: AVCaptureSessionPresetiFrame960x540
+ SwiftName: AVCaptureSessionPresetiFrame960x540
+ - Name: AVCaptureSessionPresetiFrame1280x720
+ SwiftName: AVCaptureSessionPresetiFrame1280x720
+
+ # AVFileType
+ - Name: AVFileType3GPP
+ SwiftName: AVFileType3GPP
+ - Name: AVFileType3GPP2
+ SwiftName: AVFileType3GPP2
+ - Name: AVFileTypeAC3
+ SwiftName: AVFileTypeAC3
+ - Name: AVFileTypeAIFC
+ SwiftName: AVFileTypeAIFC
+ - Name: AVFileTypeAIFF
+ SwiftName: AVFileTypeAIFF
+ - Name: AVFileTypeAMR
+ SwiftName: AVFileTypeAMR
+ - Name: AVFileTypeAVCI
+ SwiftName: AVFileTypeAVCI
+ - Name: AVFileTypeAppleM4A
+ SwiftName: AVFileTypeAppleM4A
+ - Name: AVFileTypeAppleM4V
+ SwiftName: AVFileTypeAppleM4V
+ - Name: AVFileTypeCoreAudioFormat
+ SwiftName: AVFileTypeCoreAudioFormat
+ - Name: AVFileTypeDNG
+ SwiftName: AVFileTypeDNG
+ - Name: AVFileTypeEnhancedAC3
+ SwiftName: AVFileTypeEnhancedAC3
+ - Name: AVFileTypeHEIC
+ SwiftName: AVFileTypeHEIC
+ - Name: AVFileTypeHEIF
+ SwiftName: AVFileTypeHEIF
+ - Name: AVFileTypeJPEG
+ SwiftName: AVFileTypeJPEG
+ - Name: AVFileTypeMPEG4
+ SwiftName: AVFileTypeMPEG4
+ - Name: AVFileTypeMPEGLayer3
+ SwiftName: AVFileTypeMPEGLayer3
+ - Name: AVFileTypeQuickTimeMovie
+ SwiftName: AVFileTypeQuickTimeMovie
+ - Name: AVFileTypeSunAU
+ SwiftName: AVFileTypeSunAU
+ - Name: AVFileTypeTIFF
+ SwiftName: AVFileTypeTIFF
+ - Name: AVFileTypeWAVE
+ SwiftName: AVFileTypeWAVE
+
+ # AVMetadataExtraAttributeKey
+ - Name: AVMetadataExtraAttributeValueURIKey
+ SwiftName: AVMetadataExtraAttributeValueURIKey
+ - Name: AVMetadataExtraAttributeBaseURIKey
+ SwiftName: AVMetadataExtraAttributeBaseURIKey
+ - Name: AVMetadataExtraAttributeInfoKey
+ SwiftName: AVMetadataExtraAttributeInfoKey
+
+ # AVMetadataFormat
+ - Name: AVMetadataFormatiTunesMetadata
+ SwiftName: AVMetadataFormatiTunesMetadata
+
+ # AVMetadataIdentifieriTunesMetadata
+ - Name: AVMetadataIdentifieriTunesMetadataAlbum
+ SwiftName: AVMetadataIdentifieriTunesMetadataAlbum
+ - Name: AVMetadataIdentifieriTunesMetadataArtist
+ SwiftName: AVMetadataIdentifieriTunesMetadataArtist
+ - Name: AVMetadataIdentifieriTunesMetadataUserComment
+ SwiftName: AVMetadataIdentifieriTunesMetadataUserComment
+ - Name: AVMetadataIdentifieriTunesMetadataCoverArt
+ SwiftName: AVMetadataIdentifieriTunesMetadataCoverArt
+ - Name: AVMetadataIdentifieriTunesMetadataCopyright
+ SwiftName: AVMetadataIdentifieriTunesMetadataCopyright
+ - Name: AVMetadataIdentifieriTunesMetadataReleaseDate
+ SwiftName: AVMetadataIdentifieriTunesMetadataReleaseDate
+ - Name: AVMetadataIdentifieriTunesMetadataEncodedBy
+ SwiftName: AVMetadataIdentifieriTunesMetadataEncodedBy
+ - Name: AVMetadataIdentifieriTunesMetadataPredefinedGenre
+ SwiftName: AVMetadataIdentifieriTunesMetadataPredefinedGenre
+ - Name: AVMetadataIdentifieriTunesMetadataUserGenre
+ SwiftName: AVMetadataIdentifieriTunesMetadataUserGenre
+ - Name: AVMetadataIdentifieriTunesMetadataSongName
+ SwiftName: AVMetadataIdentifieriTunesMetadataSongName
+ - Name: AVMetadataIdentifieriTunesMetadataTrackSubTitle
+ SwiftName: AVMetadataIdentifieriTunesMetadataTrackSubTitle
+ - Name: AVMetadataIdentifieriTunesMetadataEncodingTool
+ SwiftName: AVMetadataIdentifieriTunesMetadataEncodingTool
+ - Name: AVMetadataIdentifieriTunesMetadataComposer
+ SwiftName: AVMetadataIdentifieriTunesMetadataComposer
+ - Name: AVMetadataIdentifieriTunesMetadataAlbumArtist
+ SwiftName: AVMetadataIdentifieriTunesMetadataAlbumArtist
+ - Name: AVMetadataIdentifieriTunesMetadataAccountKind
+ SwiftName: AVMetadataIdentifieriTunesMetadataAccountKind
+ - Name: AVMetadataIdentifieriTunesMetadataAppleID
+ SwiftName: AVMetadataIdentifieriTunesMetadataAppleID
+ - Name: AVMetadataIdentifieriTunesMetadataArtistID
+ SwiftName: AVMetadataIdentifieriTunesMetadataArtistID
+ - Name: AVMetadataIdentifieriTunesMetadataSongID
+ SwiftName: AVMetadataIdentifieriTunesMetadataSongID
+ - Name: AVMetadataIdentifieriTunesMetadataDiscCompilation
+ SwiftName: AVMetadataIdentifieriTunesMetadataDiscCompilation
+ - Name: AVMetadataIdentifieriTunesMetadataDiscNumber
+ SwiftName: AVMetadataIdentifieriTunesMetadataDiscNumber
+ - Name: AVMetadataIdentifieriTunesMetadataGenreID
+ SwiftName: AVMetadataIdentifieriTunesMetadataGenreID
+ - Name: AVMetadataIdentifieriTunesMetadataGrouping
+ SwiftName: AVMetadataIdentifieriTunesMetadataGrouping
+ - Name: AVMetadataIdentifieriTunesMetadataPlaylistID
+ SwiftName: AVMetadataIdentifieriTunesMetadataPlaylistID
+ - Name: AVMetadataIdentifieriTunesMetadataContentRating
+ SwiftName: AVMetadataIdentifieriTunesMetadataContentRating
+ - Name: AVMetadataIdentifieriTunesMetadataBeatsPerMin
+ SwiftName: AVMetadataIdentifieriTunesMetadataBeatsPerMin
+ - Name: AVMetadataIdentifieriTunesMetadataTrackNumber
+ SwiftName: AVMetadataIdentifieriTunesMetadataTrackNumber
+ - Name: AVMetadataIdentifieriTunesMetadataArtDirector
+ SwiftName: AVMetadataIdentifieriTunesMetadataArtDirector
+ - Name: AVMetadataIdentifieriTunesMetadataArranger
+ SwiftName: AVMetadataIdentifieriTunesMetadataArranger
+ - Name: AVMetadataIdentifieriTunesMetadataAuthor
+ SwiftName: AVMetadataIdentifieriTunesMetadataAuthor
+ - Name: AVMetadataIdentifieriTunesMetadataLyrics
+ SwiftName: AVMetadataIdentifieriTunesMetadataLyrics
+ - Name: AVMetadataIdentifieriTunesMetadataAcknowledgement
+ SwiftName: AVMetadataIdentifieriTunesMetadataAcknowledgement
+ - Name: AVMetadataIdentifieriTunesMetadataConductor
+ SwiftName: AVMetadataIdentifieriTunesMetadataConductor
+ - Name: AVMetadataIdentifieriTunesMetadataDescription
+ SwiftName: AVMetadataIdentifieriTunesMetadataDescription
+ - Name: AVMetadataIdentifieriTunesMetadataDirector
+ SwiftName: AVMetadataIdentifieriTunesMetadataDirector
+ - Name: AVMetadataIdentifieriTunesMetadataEQ
+ SwiftName: AVMetadataIdentifieriTunesMetadataEQ
+ - Name: AVMetadataIdentifieriTunesMetadataLinerNotes
+ SwiftName: AVMetadataIdentifieriTunesMetadataLinerNotes
+ - Name: AVMetadataIdentifieriTunesMetadataRecordCompany
+ SwiftName: AVMetadataIdentifieriTunesMetadataRecordCompany
+ - Name: AVMetadataIdentifieriTunesMetadataOriginalArtist
+ SwiftName: AVMetadataIdentifieriTunesMetadataOriginalArtist
+ - Name: AVMetadataIdentifieriTunesMetadataPhonogramRights
+ SwiftName: AVMetadataIdentifieriTunesMetadataPhonogramRights
+ - Name: AVMetadataIdentifieriTunesMetadataProducer
+ SwiftName: AVMetadataIdentifieriTunesMetadataProducer
+ - Name: AVMetadataIdentifieriTunesMetadataPerformer
+ SwiftName: AVMetadataIdentifieriTunesMetadataPerformer
+ - Name: AVMetadataIdentifieriTunesMetadataPublisher
+ SwiftName: AVMetadataIdentifieriTunesMetadataPublisher
+ - Name: AVMetadataIdentifieriTunesMetadataSoundEngineer
+ SwiftName: AVMetadataIdentifieriTunesMetadataSoundEngineer
+ - Name: AVMetadataIdentifieriTunesMetadataSoloist
+ SwiftName: AVMetadataIdentifieriTunesMetadataSoloist
+ - Name: AVMetadataIdentifieriTunesMetadataCredits
+ SwiftName: AVMetadataIdentifieriTunesMetadataCredits
+ - Name: AVMetadataIdentifieriTunesMetadataThanks
+ SwiftName: AVMetadataIdentifieriTunesMetadataThanks
+ - Name: AVMetadataIdentifieriTunesMetadataOnlineExtras
+ SwiftName: AVMetadataIdentifieriTunesMetadataOnlineExtras
+ - Name: AVMetadataIdentifieriTunesMetadataExecProducer
+ SwiftName: AVMetadataIdentifieriTunesMetadataExecProducer
+
+ # AVMetadataKeySpace
+ - Name: AVMetadataKeySpaceiTunes
+ SwiftName: AVMetadataKeySpaceiTunes
+ - Name: AVMetadataKeySpaceID3
+ SwiftName: AVMetadataKeySpaceID3
+
+ # AVMetadataObjectType
+ - Name: AVMetadataObjectTypeUPCECode
+ SwiftName: AVMetadataObjectTypeUPCECode
+ - Name: AVMetadataObjectTypeCode39Code
+ SwiftName: AVMetadataObjectTypeCode39Code
+ - Name: AVMetadataObjectTypeCode39Mod43Code
+ SwiftName: AVMetadataObjectTypeCode39Mod43Code
+ - Name: AVMetadataObjectTypeEAN13Code
+ SwiftName: AVMetadataObjectTypeEAN13Code
+ - Name: AVMetadataObjectTypeEAN8Code
+ SwiftName: AVMetadataObjectTypeEAN8Code
+ - Name: AVMetadataObjectTypeCode93Code
+ SwiftName: AVMetadataObjectTypeCode93Code
+ - Name: AVMetadataObjectTypeCode128Code
+ SwiftName: AVMetadataObjectTypeCode128Code
+ - Name: AVMetadataObjectTypePDF417Code
+ SwiftName: AVMetadataObjectTypePDF417Code
+ - Name: AVMetadataObjectTypeQRCode
+ SwiftName: AVMetadataObjectTypeQRCode
+ - Name: AVMetadataObjectTypeAztecCode
+ SwiftName: AVMetadataObjectTypeAztecCode
+ - Name: AVMetadataObjectTypeInterleaved2of5Code
+ SwiftName: AVMetadataObjectTypeInterleaved2of5Code
+ - Name: AVMetadataObjectTypeITF14Code
+ SwiftName: AVMetadataObjectTypeITF14Code
+ - Name: AVMetadataObjectTypeDataMatrixCode
+ SwiftName: AVMetadataObjectTypeDataMatrixCode
+
+ # AVMetadataiTunesMetadataKey
+ - Name: AVMetadataiTunesMetadataKeyAlbum
+ SwiftName: AVMetadataiTunesMetadataKeyAlbum
+ - Name: AVMetadataiTunesMetadataKeyArtist
+ SwiftName: AVMetadataiTunesMetadataKeyArtist
+ - Name: AVMetadataiTunesMetadataKeyUserComment
+ SwiftName: AVMetadataiTunesMetadataKeyUserComment
+ - Name: AVMetadataiTunesMetadataKeyCoverArt
+ SwiftName: AVMetadataiTunesMetadataKeyCoverArt
+ - Name: AVMetadataiTunesMetadataKeyCopyright
+ SwiftName: AVMetadataiTunesMetadataKeyCopyright
+ - Name: AVMetadataiTunesMetadataKeyReleaseDate
+ SwiftName: AVMetadataiTunesMetadataKeyReleaseDate
+ - Name: AVMetadataiTunesMetadataKeyEncodedBy
+ SwiftName: AVMetadataiTunesMetadataKeyEncodedBy
+ - Name: AVMetadataiTunesMetadataKeyPredefinedGenre
+ SwiftName: AVMetadataiTunesMetadataKeyPredefinedGenre
+ - Name: AVMetadataiTunesMetadataKeyUserGenre
+ SwiftName: AVMetadataiTunesMetadataKeyUserGenre
+ - Name: AVMetadataiTunesMetadataKeySongName
+ SwiftName: AVMetadataiTunesMetadataKeySongName
+ - Name: AVMetadataiTunesMetadataKeyTrackSubTitle
+ SwiftName: AVMetadataiTunesMetadataKeyTrackSubTitle
+ - Name: AVMetadataiTunesMetadataKeyEncodingTool
+ SwiftName: AVMetadataiTunesMetadataKeyEncodingTool
+ - Name: AVMetadataiTunesMetadataKeyComposer
+ SwiftName: AVMetadataiTunesMetadataKeyComposer
+ - Name: AVMetadataiTunesMetadataKeyAlbumArtist
+ SwiftName: AVMetadataiTunesMetadataKeyAlbumArtist
+ - Name: AVMetadataiTunesMetadataKeyAccountKind
+ SwiftName: AVMetadataiTunesMetadataKeyAccountKind
+ - Name: AVMetadataiTunesMetadataKeyAppleID
+ SwiftName: AVMetadataiTunesMetadataKeyAppleID
+ - Name: AVMetadataiTunesMetadataKeyArtistID
+ SwiftName: AVMetadataiTunesMetadataKeyArtistID
+ - Name: AVMetadataiTunesMetadataKeySongID
+ SwiftName: AVMetadataiTunesMetadataKeySongID
+ - Name: AVMetadataiTunesMetadataKeyDiscCompilation
+ SwiftName: AVMetadataiTunesMetadataKeyDiscCompilation
+ - Name: AVMetadataiTunesMetadataKeyDiscNumber
+ SwiftName: AVMetadataiTunesMetadataKeyDiscNumber
+ - Name: AVMetadataiTunesMetadataKeyGenreID
+ SwiftName: AVMetadataiTunesMetadataKeyGenreID
+ - Name: AVMetadataiTunesMetadataKeyGrouping
+ SwiftName: AVMetadataiTunesMetadataKeyGrouping
+ - Name: AVMetadataiTunesMetadataKeyPlaylistID
+ SwiftName: AVMetadataiTunesMetadataKeyPlaylistID
+ - Name: AVMetadataiTunesMetadataKeyContentRating
+ SwiftName: AVMetadataiTunesMetadataKeyContentRating
+ - Name: AVMetadataiTunesMetadataKeyBeatsPerMin
+ SwiftName: AVMetadataiTunesMetadataKeyBeatsPerMin
+ - Name: AVMetadataiTunesMetadataKeyTrackNumber
+ SwiftName: AVMetadataiTunesMetadataKeyTrackNumber
+ - Name: AVMetadataiTunesMetadataKeyArtDirector
+ SwiftName: AVMetadataiTunesMetadataKeyArtDirector
+ - Name: AVMetadataiTunesMetadataKeyArranger
+ SwiftName: AVMetadataiTunesMetadataKeyArranger
+ - Name: AVMetadataiTunesMetadataKeyAuthor
+ SwiftName: AVMetadataiTunesMetadataKeyAuthor
+ - Name: AVMetadataiTunesMetadataKeyLyrics
+ SwiftName: AVMetadataiTunesMetadataKeyLyrics
+ - Name: AVMetadataiTunesMetadataKeyAcknowledgement
+ SwiftName: AVMetadataiTunesMetadataKeyAcknowledgement
+ - Name: AVMetadataiTunesMetadataKeyConductor
+ SwiftName: AVMetadataiTunesMetadataKeyConductor
+ - Name: AVMetadataiTunesMetadataKeyDescription
+ SwiftName: AVMetadataiTunesMetadataKeyDescription
+ - Name: AVMetadataiTunesMetadataKeyDirector
+ SwiftName: AVMetadataiTunesMetadataKeyDirector
+ - Name: AVMetadataiTunesMetadataKeyEQ
+ SwiftName: AVMetadataiTunesMetadataKeyEQ
+ - Name: AVMetadataiTunesMetadataKeyLinerNotes
+ SwiftName: AVMetadataiTunesMetadataKeyLinerNotes
+ - Name: AVMetadataiTunesMetadataKeyRecordCompany
+ SwiftName: AVMetadataiTunesMetadataKeyRecordCompany
+ - Name: AVMetadataiTunesMetadataKeyOriginalArtist
+ SwiftName: AVMetadataiTunesMetadataKeyOriginalArtist
+ - Name: AVMetadataiTunesMetadataKeyPhonogramRights
+ SwiftName: AVMetadataiTunesMetadataKeyPhonogramRights
+ - Name: AVMetadataiTunesMetadataKeyProducer
+ SwiftName: AVMetadataiTunesMetadataKeyProducer
+ - Name: AVMetadataiTunesMetadataKeyPerformer
+ SwiftName: AVMetadataiTunesMetadataKeyPerformer
+ - Name: AVMetadataiTunesMetadataKeyPublisher
+ SwiftName: AVMetadataiTunesMetadataKeyPublisher
+ - Name: AVMetadataiTunesMetadataKeySoundEngineer
+ SwiftName: AVMetadataiTunesMetadataKeySoundEngineer
+ - Name: AVMetadataiTunesMetadataKeySoloist
+ SwiftName: AVMetadataiTunesMetadataKeySoloist
+ - Name: AVMetadataiTunesMetadataKeyCredits
+ SwiftName: AVMetadataiTunesMetadataKeyCredits
+ - Name: AVMetadataiTunesMetadataKeyThanks
+ SwiftName: AVMetadataiTunesMetadataKeyThanks
+ - Name: AVMetadataiTunesMetadataKeyOnlineExtras
+ SwiftName: AVMetadataiTunesMetadataKeyOnlineExtras
+ - Name: AVMetadataiTunesMetadataKeyExecProducer
+ SwiftName: AVMetadataiTunesMetadataKeyExecProducer
+
+ # AVPlayerWaitingReason
+ - Name: AVPlayerWaitingToMinimizeStallsReason
+ SwiftName: AVPlayerWaitingToMinimizeStallsReason
+ - Name: AVPlayerWaitingWhileEvaluatingBufferingRateReason
+ SwiftName: AVPlayerWaitingWhileEvaluatingBufferingRateReason
+ - Name: AVPlayerWaitingWithNoItemToPlayReason
+ SwiftName: AVPlayerWaitingWithNoItemToPlayReason
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.h 2017-02-20 23:29:10.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.h 2017-05-25 06:23:45.000000000 -0400
@@ -42,6 +42,7 @@
#endif
#if (TARGET_OS_IPHONE || defined(__MAC_10_7))
+#import <AVFoundation/AVCaptureDataOutputSynchronizer.h>
#import <AVFoundation/AVCaptureDevice.h>
#import <AVFoundation/AVCaptureInput.h>
#import <AVFoundation/AVCaptureOutput.h>
@@ -52,6 +53,7 @@
#import <AVFoundation/AVComposition.h>
#import <AVFoundation/AVCompositionTrack.h>
#import <AVFoundation/AVCompositionTrackSegment.h>
+#import <AVFoundation/AVDepthData.h>
#import <AVFoundation/AVError.h>
#endif
@@ -81,7 +83,10 @@
#import <AVFoundation/AVPlayerLayer.h>
#import <AVFoundation/AVPlayerLooper.h>
#import <AVFoundation/AVPlayerMediaSelectionCriteria.h>
+#import <AVFoundation/AVQueuedSampleBufferRendering.h>
+#import <AVFoundation/AVSampleBufferAudioRenderer.h>
#import <AVFoundation/AVSampleBufferDisplayLayer.h>
+#import <AVFoundation/AVSampleBufferRenderSynchronizer.h>
#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
#import <AVFoundation/AVSampleBufferGenerator.h>
#import <AVFoundation/AVSampleCursor.h>
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVQueuedSampleBufferRendering.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVQueuedSampleBufferRendering.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVQueuedSampleBufferRendering.h 1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVQueuedSampleBufferRendering.h 2017-05-25 05:32:52.000000000 -0400
@@ -0,0 +1,117 @@
+/*
+ File: AVQueuedSampleBufferRendering.h
+
+ Framework: AVFoundation
+
+ Copyright 2013-2017 Apple Inc. All rights reserved.
+
+*/
+
+#import <AVFoundation/AVBase.h>
+#import <CoreMedia/CMSampleBuffer.h>
+#import <CoreMedia/CMSync.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/*!
+ @enum AVQueuedSampleBufferRenderingStatus
+ @abstract These constants are the possible status values for queued sample buffer renderers.
+ @constant AVQueuedSampleBufferRenderingStatusUnknown
+ Indicates that the receiver is in a fresh state without any sample buffers enqueued on it.
+ @constant AVQueuedSampleBufferRenderingStatusRendering
+ Indicates at least one sample buffer has been enqueued on the receiver.
+ @constant AVQueuedSampleBufferRenderingStatusFailed
+ Indicates that the receiver cannot currently enqueue or render sample buffers because of an error.
+ */
+typedef NS_ENUM(NSInteger, AVQueuedSampleBufferRenderingStatus) {
+ AVQueuedSampleBufferRenderingStatusUnknown,
+ AVQueuedSampleBufferRenderingStatusRendering,
+ AVQueuedSampleBufferRenderingStatusFailed
+} API_AVAILABLE(macos(10.10), ios(8.0), tvos(10.2)) __WATCHOS_PROHIBITED;
+
+/*
+ @protocol AVQueuedSampleBufferRendering
+ @abstract Defines methods for enqueueing CMSampleBuffers for presentation.
+ @discussion
+ AVSampleBufferDisplayLayer and AVSampleBufferAudioRenderer conform to this protocol. When used in conjunction with an AVSampleBufferRenderSynchronizer, an object conforming to AVQueuedSampleBufferRendering can only be attached to a single synchronizer.
+*/
+API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)) __WATCHOS_PROHIBITED
+@protocol AVQueuedSampleBufferRendering <NSObject>
+
+/*!
+ @property timebase
+ @abstract The renderer's timebase, which governs how time stamps are interpreted.
+ @discussion
+ The timebase is used to interpret time stamps.
+
+ The timebase is read-only. Use the AVSampleBufferRenderSynchronizer to set the rate or time.
+*/
+@property (retain, readonly) __attribute__((NSObject)) CMTimebaseRef timebase;
+
+/*!
+ @method enqueueSampleBuffer:
+ @abstract Sends a sample buffer in order to render its contents.
+ @discussion
+ Video-specific notes:
+
+ If sampleBuffer has the kCMSampleAttachmentKey_DoNotDisplay attachment set to kCFBooleanTrue, the frame will be decoded but not displayed. Otherwise, if sampleBuffer has the kCMSampleAttachmentKey_DisplayImmediately attachment set to kCFBooleanTrue, the decoded image will be displayed as soon as possible, replacing all previously enqueued images regardless of their timestamps. Otherwise, the decoded image will be displayed at sampleBuffer's output presentation timestamp, as interpreted by the timebase.
+
+ To schedule the removal of previous images at a specific timestamp, enqueue a marker sample buffer containing no samples, with the kCMSampleBufferAttachmentKey_EmptyMedia attachment set to kCFBooleanTrue.
+
+ IMPORTANT NOTE: attachments with the kCMSampleAttachmentKey_ prefix must be set via CMSampleBufferGetSampleAttachmentsArray and CFDictionarySetValue. Attachments with the kCMSampleBufferAttachmentKey_ prefix must be set via CMSetAttachment.
+*/
+- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
+
+/*!
+ @method flush
+ @abstract Instructs the receiver to discard pending enqueued sample buffers.
+ @discussion
+ Additional sample buffers can be appended after -flush.
+
+ Video-specific notes:
+
+ It is not possible to determine which sample buffers have been decoded, so the next frame passed to enqueueSampleBuffer: should be an IDR frame (also known as a key frame or sync sample).
+*/
+- (void)flush;
+
+/*!
+ @property readyForMoreMediaData
+ @abstract Indicates the readiness of the receiver to accept more sample buffers.
+ @discussion
+ An object conforming to AVQueuedSampleBufferRendering keeps track of the occupancy levels of its internal queues for the benefit of clients that enqueue sample buffers from non-real-time sources -- i.e., clients that can supply sample buffers faster than they are consumed, and so need to decide when to hold back.
+
+ Clients enqueueing sample buffers from non-real-time sources may hold off from generating or obtaining more sample buffers to enqueue when the value of readyForMoreMediaData is NO.
+
+ It is safe to call enqueueSampleBuffer: when readyForMoreMediaData is NO, but it is a bad idea to enqueue sample buffers without bound.
+
+ To help with control of the non-real-time supply of sample buffers, such clients can use -requestMediaDataWhenReadyOnQueue:usingBlock in order to specify a block that the receiver should invoke whenever it's ready for sample buffers to be appended.
+
+ The value of readyForMoreMediaData will often change from NO to YES asynchronously, as previously supplied sample buffers are decoded and rendered.
+
+ This property is not key value observable.
+*/
+@property (readonly, getter=isReadyForMoreMediaData) BOOL readyForMoreMediaData;
+
+/*!
+ @method requestMediaDataWhenReadyOnQueue:usingBlock:
+ @abstract Instructs the target to invoke a client-supplied block repeatedly, at its convenience, in order to gather sample buffers for playback.
+ @discussion
+ The block should enqueue sample buffers to the receiver either until the receiver's readyForMoreMediaData property becomes NO or until there is no more data to supply. When the receiver has decoded enough of the media data it has received that it becomes ready for more media data again, it will invoke the block again in order to obtain more.
+
+ If this method is called multiple times, only the last call is effective. Call stopRequestingMediaData to cancel this request.
+
+ Each call to requestMediaDataWhenReadyOnQueue:usingBlock: should be paired with a corresponding call to stopRequestingMediaData:. Releasing the AVQueuedSampleBufferRendering object without a call to stopRequestingMediaData will result in undefined behavior.
+*/
+- (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
+
+/*!
+ @method stopRequestingMediaData
+ @abstract Cancels any current requestMediaDataWhenReadyOnQueue:usingBlock: call.
+ @discussion
+ This method may be called from outside the block or from within the block.
+*/
+- (void)stopRequestingMediaData;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferAudioRenderer.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferAudioRenderer.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferAudioRenderer.h 1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferAudioRenderer.h 2017-05-24 00:28:27.000000000 -0400
@@ -0,0 +1,123 @@
+/*
+ File: AVSampleBufferAudioRenderer.h
+
+ Framework: AVFoundation
+
+ Copyright 2016-2017 Apple Inc. All rights reserved.
+
+*/
+
+#import <AVFoundation/AVQueuedSampleBufferRendering.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class AVSampleBufferAudioRendererInternal;
+
+/*
+ @class AVSampleBufferAudioRenderer
+ @abstract AVSampleBufferAudioRenderer can decompress and play compressed or uncompressed audio.
+ @discussion
+ An instance of AVSampleBufferAudioRenderer must be added to an AVSampleBufferRenderSynchronizer before the first sample buffer is enqueued.
+*/
+API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)) __WATCHOS_PROHIBITED
+@interface AVSampleBufferAudioRenderer : NSObject <AVQueuedSampleBufferRendering>
+{
+@private
+ AVSampleBufferAudioRendererInternal *_audioRendererInternal;
+}
+
+/*
+ @property status
+ @abstract Indicates the status of the audio renderer.
+ @discussion
+ A renderer begins with status AVQueuedSampleBufferRenderingStatusUnknown.
+
+ As sample buffers are enqueued for rendering using -enqueueSampleBuffer:, the renderer will transition to either AVQueuedSampleBufferRenderingStatusRendering or AVQueuedSampleBufferRenderingStatusFailed.
+
+ If the status is AVQueuedSampleBufferRenderingStatusFailed, check the value of the renderer's error property for information on the error encountered. This is terminal status from which recovery is not always possible.
+
+ This property is key value observable.
+*/
+@property (nonatomic, readonly) AVQueuedSampleBufferRenderingStatus status;
+
+/*
+ @property error
+ @abstract If the renderer's status is AVQueuedSampleBufferRenderingStatusFailed, this describes the error that caused the failure.
+ @discussion
+ The value of this property is an NSError that describes what caused the renderer to no longer be able to render sample buffers. The value of this property is nil unless the value of status is AVQueuedSampleBufferRenderingStatusFailed.
+*/
+@property (nonatomic, readonly, nullable) NSError *error;
+
+/*!
+ @property audioOutputDeviceUniqueID
+ @abstract Specifies the unique ID of the Core Audio output device used to play audio.
+ @discussion
+ By default, the value of this property is nil, indicating that the default audio output device is used. Otherwise the value of this property is an NSString containing the unique ID of the Core Audio output device to be used for audio output.
+
+ Core Audio's kAudioDevicePropertyDeviceUID is a suitable source of audio output device unique IDs.
+
+ Modifying this property while the timebase's rate is not 0.0 may cause the rate to briefly change to 0.0.
+
+ On macOS, the audio device clock may be used as the AVSampleBufferRenderSynchronizer's and all attached AVQueuedSampleBufferRendering's timebase's clocks. If the audioOutputDeviceUniqueID is modified, the clocks of all these timebases may also change.
+
+ If multiple AVSampleBufferAudioRenderers with different values for audioOutputDeviceUniqueID are attached to the same AVSampleBufferRenderSynchronizer, audio may not stay in sync during playback. To avoid this, ensure that all synchronized AVSampleBufferAudioRenderers are using the same audio output device.
+*/
+@property (nonatomic, copy, nullable) NSString *audioOutputDeviceUniqueID API_AVAILABLE(macos(10.13));
+
+/*!
+ @property audioTimePitchAlgorithm
+ @abstract Indicates the processing algorithm used to manage audio pitch at varying rates.
+ @discussion
+ Constants for various time pitch algorithms, e.g. AVAudioTimePitchSpectral, are defined in AVAudioProcessingSettings.h.
+
+ The default value on iOS is AVAudioTimePitchAlgorithmLowQualityZeroLatency and on macOS is AVAudioTimePitchAlgorithmTimeDomain.
+
+ If the timebase's rate is not supported by the audioTimePitchAlgorithm, audio will be muted.
+
+ Modifying this property while the timebase's rate is not 0.0 may cause the rate to briefly change to 0.0.
+*/
+@property (nonatomic, copy) NSString *audioTimePitchAlgorithm;
+
+@end
+
+@interface AVSampleBufferAudioRenderer (AVSampleBufferAudioRendererVolumeControl)
+
+/*
+ @property volume
+ @abstract Indicates the current audio volume of the AVSampleBufferAudioRenderer.
+ @discussion
+ A value of 0.0 means "silence all audio", while 1.0 means "play at the full volume of the audio media".
+
+ This property should be used for frequent volume changes, for example via a volume knob or fader.
+
+ This property is most useful on iOS to control the volume of the AVSampleBufferAudioRenderer relative to other audio output, not for setting absolute volume.
+*/
+@property (nonatomic) float volume;
+
+/*
+ @property muted
+ @abstract Indicates whether or not audio output of the AVSampleBufferAudioRenderer is muted.
+ @discussion
+ Setting this property only affects audio muting for the renderer instance and not for the device.
+ */
+@property (nonatomic, getter=isMuted) BOOL muted;
+
+@end
+
+@interface AVSampleBufferAudioRenderer (AVSampleBufferAudioRendererQueueManagement)
+
+/*!
+ @method flushFromSourceTime:completionHandler:
+ @abstract Flushes enqueued sample buffers with presentation time stamps later than or equal to the specified time.
+ @param completionHandler
+ A block that is invoked, possibly asynchronously, after the flush operation completes or fails.
+ @discussion
+ This method can be used to replace media data scheduled to be rendered in the future, without interrupting playback. One example of this is when the data that has already been enqueued is from a sequence of two songs and the second song is swapped for a new song. In this case, this method would be called with the time stamp of the first sample buffer from the second song. After the completion handler is executed with a YES parameter, media data may again be enqueued with timestamps at the specified time.
+
+ If NO is provided to the completion handler, the flush did not succeed and the set of enqueued sample buffers remains unchanged. A flush can fail becuse the source time was too close to (or earlier than) the current time or because the current configuration of the receiver does not support flushing at a particular time. In these cases, the caller can choose to flush all enqueued media data by invoking the -flush method.
+ */
+- (void)flushFromSourceTime:(CMTime)time completionHandler:(void (^)(BOOL flushSucceeded))completionHandler;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferRenderSynchronizer.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferRenderSynchronizer.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferRenderSynchronizer.h 1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/WatchOS.platform/Developer/SDKs/WatchOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVSampleBufferRenderSynchronizer.h 2017-05-24 00:37:44.000000000 -0400
@@ -0,0 +1,165 @@
+/*
+ File: AVSampleBufferRenderSynchronizer.h
+
+ Framework: AVFoundation
+
+ Copyright 2013-2017 Apple Inc. All rights reserved.
+
+*/
+
+#import <AVFoundation/AVBase.h>
+#import <Foundation/Foundation.h>
+#import <CoreMedia/CMSync.h>
+
+@protocol AVQueuedSampleBufferRendering;
+@class AVSampleBufferRenderSynchronizerInternal;
+
+NS_ASSUME_NONNULL_BEGIN
+
+/*!
+ @class AVSampleBufferRenderSynchronizer
+ @abstract AVSampleBufferRenderSynchronizer can synchronize multiple objects conforming to AVQueuedSampleBufferRendering to a single timebase.
+*/
+API_AVAILABLE(macos(10.13), ios(11.0), tvos(11.0)) __WATCHOS_PROHIBITED
+@interface AVSampleBufferRenderSynchronizer : NSObject
+{
+@private
+ AVSampleBufferRenderSynchronizerInternal *_synchronizerInternal;
+}
+
+/*!
+ @property timebase
+ @abstract The synchronizer's rendering timebase, which governs how time stamps are interpreted.
+ @discussion
+ By default, this timebase will be driven by the clock of an added AVSampleBufferAudioRenderer.
+
+ If no AVSampleBufferAudioRenderer has been added, the master clock will be the host time clock (mach_absolute_time with the appropriate timescale conversion; this is the same as Core Animation's CACurrentMediaTime).
+
+ The timebase is a read-only timebase. Use the rate property and corresponding methods to adjust the timebase.
+*/
+@property (retain, readonly) __attribute__((NSObject)) CMTimebaseRef timebase;
+
+/*!
+ @property rate
+ @abstract Playback rate.
+ @discussion
+ Indicates the current rate of rendering. A value of 0.0 means "stopped"; a value of 1.0 means "play at the natural rate of the media". Must be greater than or equal to 0.0.
+*/
+@property (nonatomic, readwrite) float rate;
+
+/*!
+ @method setRate:time:
+ @abstract Sets the timebase's time and rate.
+ @param rate
+ A new timebase rate to set. Must be greater than or equal to 0.0
+ @param time
+ A new time to set. Must be greater than or equal to kCMTimeZero, or kCMTimeInvalid
+ @discussion
+ Sets the timebase's time to time and then sets the rendering rate to rate. A rate value of 0.0 means "stopped"; a rate value of 1.0 means "play at the natural rate of the media". Use kCMTimeInvalid for time to not modify the timebase's time.
+*/
+- (void)setRate:(float)rate time:(CMTime)time;
+
+@end
+
+@interface AVSampleBufferRenderSynchronizer (AVSampleBufferRenderSynchronizerRendererManagement)
+
+/*!
+ @property renderers
+ @abstract Array of id<AVQueuedSampleBufferRendering> currently attached to the synchronizer.
+ @discussion
+ A list of renderers added to and not removed from the synchronizer. The list also includes renderers that have been scheduled to be removed but have not yet been removed.
+
+ This property is not KVO observable.
+*/
+@property (atomic, readonly) NSArray<__kindof id <AVQueuedSampleBufferRendering>> *renderers;
+
+/*!
+ @method addRenderer:
+ @abstract Adds a renderer to the list of renderers under the synchronizer's control.
+ @param renderer
+ An object conforming to AVQueuedSampleBufferRendering to be synchronized by this synchronizer.
+ @discussion
+ Adds a renderer to begin operating with the synchronizer's timebase.
+
+ This method can be called while rate is non-0.0.
+*/
+- (void)addRenderer:(id <AVQueuedSampleBufferRendering>)renderer;
+
+/*!
+ @method removeRenderer:atTime:completionHandler:
+ @abstract Removes a renderer from the list of renderers under the synchronizer's control.
+ @param renderer
+ An object conforming to AVQueuedSampleBufferRendering currently synchronized by this synchronizer to no longer be synchronized by the synchronizer.
+ @param time
+ The time on the timebase's timeline at which the renderer should be removed.
+ @param completionHandler
+ Optional. A block called when the renderer is removed from the synchronizer. If provided, this block will always be called with didRemoveRenderer indicating whether the renderer was removed by this scheduled removal.
+ @discussion
+ This method can be called while rate is non-0.0.
+
+ time is used to schedule future removals. If the time is in the past, the renderer will be removed immediately. kCMTimeInvalid can also be used to force immediate removal.
+
+ This method removes the renderer asynchronously. The method can be called more than once, with a subsequent scheduled removal replacing a previously scheduled removal.
+
+ Clients may provide an optional completionHandler block to be notified when the scheduled removal completes. If provided, completionHandler will always be called with the following values for didRemoveRenderer:
+
+ - If the renderer has not been added to this synchronizer, completionHandler will be called and didRemoveRenderer will be NO.
+ - If a removal of a particular renderer is scheduled after another removal of that same renderer has already been scheduled but not yet occurred, the previously-scheduled removal's completionHandler will be called and didRemoveRenderer will be NO. The new scheduled removal's completionHandler will not be called until it is replaced by another scheduled removal or the renderer is actually removed.
+ - When the renderer is removed due to a scheduled removal, the completionHandler provided when that removal was scheduled will be called and didRemoveRenderer will be YES.
+*/
+- (void)removeRenderer:(id <AVQueuedSampleBufferRendering>)renderer atTime:(CMTime)time completionHandler:(nullable void (^)(BOOL didRemoveRenderer))completionHandler;
+
+@end
+
+@interface AVSampleBufferRenderSynchronizer (AVSampleBufferRenderSynchronizerTimeObservation)
+
+/*!
+ @method addPeriodicTimeObserverForInterval:queue:usingBlock:
+ @abstract Requests invocation of a block during rendering to report changing time.
+ @param interval
+ The interval of invocation of the block during normal rendering, according to progress of the current time of the timebase.
+ @param queue
+ The serial queue onto which block should be enqueued. If you pass NULL, the main queue (obtained using dispatch_get_main_queue()) will be used. Passing a concurrent queue to this method will result in undefined behavior.
+ @param block
+ The block to be invoked periodically.
+ @result
+ An object conforming to the NSObject protocol. You must retain this returned value as long as you want the time observer to be invoked by the synchronizer. Pass this object to -removeTimeObserver: to cancel time observation.
+ @discussion
+ The block is invoked periodically at the interval specified, interpreted according to the timeline of the timebase. The block is also invoked whenever time jumps and whenever rendering starts or stops.
+
+ If the interval corresponds to a very short interval in real time, the synchronizer may invoke the block less frequently than requested. Even so, the synchronizer will invoke the block sufficiently often for the client to update indications of the current time appropriately in its end-user interface.
+
+ Each call to -addPeriodicTimeObserverForInterval:queue:usingBlock: should be paired with a corresponding call to -removeTimeObserver:. Releasing the observer object without a call to -removeTimeObserver: will result in undefined behavior.
+*/
+- (id)addPeriodicTimeObserverForInterval:(CMTime)interval queue:(nullable dispatch_queue_t)queue usingBlock:(void (^)(CMTime time))block;
+
+/*!
+ @method addBoundaryTimeObserverForTimes:queue:usingBlock:
+ @abstract Requests invocation of a block when specified times are traversed during normal rendering.
+ @param times
+ The times for which the observer requests notification, supplied as an array of NSValues carrying CMTimes.
+ @param queue
+ The serial queue onto which block should be enqueued. If you pass NULL, the main queue (obtained using dispatch_get_main_queue()) will be used. Passing a
+ concurrent queue to this method will result in undefined behavior.
+ @param block
+ The block to be invoked when any of the specified times is crossed during normal rendering.
+ @result
+ An object conforming to the NSObject protocol. You must retain this returned value as long as you want the time observer to be invoked by the synchronizer. Pass this object to -removeTimeObserver: to cancel time observation.
+ @discussion
+ Each call to -addPeriodicTimeObserverForInterval:queue:usingBlock: should be paired with a corresponding call to -removeTimeObserver:. Releasing the observer object without a call to -removeTimeObserver: will result in undefined behavior.
+*/
+- (id)addBoundaryTimeObserverForTimes:(NSArray<NSValue *> *)times queue:(nullable dispatch_queue_t)queue usingBlock:(void (^)(void))block;
+
+/*!
+ @method removeTimeObserver:
+ @abstract Cancels a previously registered time observer.
+ @param observer
+ An object returned by a previous call to -addPeriodicTimeObserverForInterval:queue:usingBlock: or -addBoundaryTimeObserverForTimes:queue:usingBlock:.
+ @discussion
+ Upon return, the caller is guaranteed that no new time observer blocks will begin executing. Depending on the calling thread and the queue used to add the time observer, an in-flight block may continue to execute after this method returns. You can guarantee synchronous time observer removal by enqueuing the call to -removeTimeObserver: on that queue. Alternatively, call dispatch_sync(queue, ^{}) after -removeTimeObserver: to wait for any in-flight blocks to finish executing. -removeTimeObserver: should be used to explicitly cancel each time observer added using -addPeriodicTimeObserverForInterval:queue:usingBlock: and -addBoundaryTimeObserverForTimes:queue:usingBlock:.
+*/
+- (void)removeTimeObserver:(id)observer;
+
+@end
+
+NS_ASSUME_NONNULL_END