Skip to content

AVFoundation macOS xcode9 beta1

Manuel de la Pena edited this page Jul 5, 2017 · 2 revisions

#AVFoundation.framework

mandel

diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAnimation.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAnimation.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAnimation.h	2016-09-22 17:56:42.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAnimation.h	2017-05-23 21:01:42.000000000 -0400
@@ -3,7 +3,7 @@
  
     Framework:  AVFoundation
  
-	Copyright 2010-2012 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
  
  */
 
@@ -19,6 +19,7 @@
 */
 AVF_EXPORT const CFTimeInterval AVCoreAnimationBeginTimeAtZero NS_AVAILABLE(10_7, 4_0);
 
+typedef NSString * AVLayerVideoGravity NS_STRING_ENUM;
 
 /*!
 	@constant		AVLayerVideoGravityResizeAspect
@@ -26,7 +27,7 @@
 	@discussion		AVLayerVideoGravityResizeAspect may be used when setting the videoGravity
                     property of an AVPlayerLayer or AVCaptureVideoPreviewLayer instance.
  */
-AVF_EXPORT NSString *const AVLayerVideoGravityResizeAspect NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT AVLayerVideoGravity const AVLayerVideoGravityResizeAspect NS_AVAILABLE(10_7, 4_0);
 
 
 /*!
@@ -35,7 +36,7 @@
     @discussion     AVLayerVideoGravityResizeAspectFill may be used when setting the videoGravity
                     property of an AVPlayerLayer or AVCaptureVideoPreviewLayer instance.
  */
-AVF_EXPORT NSString *const AVLayerVideoGravityResizeAspectFill NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT AVLayerVideoGravity const AVLayerVideoGravityResizeAspectFill NS_AVAILABLE(10_7, 4_0);
 
 /*!
 	@constant		AVLayerVideoGravityResize
@@ -43,4 +44,4 @@
     @discussion     AVLayerVideoGravityResize may be used when setting the videoGravity
                     property of an AVPlayerLayer or AVCaptureVideoPreviewLayer instance.
  */
-AVF_EXPORT NSString *const AVLayerVideoGravityResize NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT AVLayerVideoGravity const AVLayerVideoGravityResize NS_AVAILABLE(10_7, 4_0);
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsset.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsset.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsset.h	2017-02-22 01:14:49.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAsset.h	2017-05-24 00:41:53.000000000 -0400
@@ -3,7 +3,7 @@
 
 	Framework:  AVFoundation
  
-	Copyright 2010-2016 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
 
 */
 
@@ -11,6 +11,8 @@
 #import <Foundation/Foundation.h>
 #import <AVFoundation/AVAsynchronousKeyValueLoading.h>
 #import <AVFoundation/AVContentKeySession.h>
+#import <AVFoundation/AVMediaFormat.h>
+#import <AVFoundation/AVMetadataFormat.h>
 
 #import <CoreGraphics/CGAffineTransform.h>
 
@@ -177,7 +179,7 @@
   @result		An NSArray of AVAssetTracks; may be empty if no tracks of the specified media type are available.
   @discussion	Becomes callable without blocking when the key @"tracks" has been loaded
 */
-- (NSArray<AVAssetTrack *> *)tracksWithMediaType:(NSString *)mediaType;
+- (NSArray<AVAssetTrack *> *)tracksWithMediaType:(AVMediaType)mediaType;
 
 /*!
   @method		tracksWithMediaCharacteristic:
@@ -187,7 +189,7 @@
   @result		An NSArray of AVAssetTracks; may be empty if no tracks with the specified characteristic are available.
   @discussion	Becomes callable without blocking when the key @"tracks" has been loaded
 */
-- (NSArray<AVAssetTrack *> *)tracksWithMediaCharacteristic:(NSString *)mediaCharacteristic;
+- (NSArray<AVAssetTrack *> *)tracksWithMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
 
 /*!
  @property trackGroups
@@ -224,7 +226,7 @@
 
 /* Provides an NSArray of NSStrings, each representing a metadata format that's available to the asset (e.g. ID3, iTunes metadata, etc.). Metadata formats are defined in AVMetadataFormat.h.
 */
-@property (nonatomic, readonly) NSArray<NSString *> *availableMetadataFormats;
+@property (nonatomic, readonly) NSArray<AVMetadataFormat> *availableMetadataFormats;
 
 /*!
   @method		metadataForFormat:
@@ -234,7 +236,7 @@
   @result		An NSArray containing AVMetadataItems; may be empty if there is no metadata of the specified format.
   @discussion	Becomes callable without blocking when the key @"availableMetadataFormats" has been loaded
 */
-- (NSArray<AVMetadataItem *> *)metadataForFormat:(NSString *)format;
+- (NSArray<AVMetadataItem *> *)metadataForFormat:(AVMetadataFormat)format;
 
 @end
 
@@ -263,7 +265,7 @@
  
 	Further filtering of the metadata items in AVTimedMetadataGroups according to language can be accomplished using +[AVMetadataItem metadataItemsFromArray:filteredAndSortedAccordingToPreferredLanguages:]; filtering of the metadata items according to locale can be accomplished using +[AVMetadataItem metadataItemsFromArray:withLocale:].
 */
-- (NSArray<AVTimedMetadataGroup *> *)chapterMetadataGroupsWithTitleLocale:(NSLocale *)locale containingItemsWithCommonKeys:(nullable NSArray<NSString *> *)commonKeys NS_AVAILABLE(10_7, 4_3);
+- (NSArray<AVTimedMetadataGroup *> *)chapterMetadataGroupsWithTitleLocale:(NSLocale *)locale containingItemsWithCommonKeys:(nullable NSArray<AVMetadataKey> *)commonKeys NS_AVAILABLE(10_7, 4_3);
 
 /*!
  @method		chapterMetadataGroupsBestMatchingPreferredLanguages:
@@ -293,7 +295,7 @@
 
 /* Provides an NSArray of NSStrings, each NSString indicating a media characteristic for which a media selection option is available.
 */
-@property (nonatomic, readonly) NSArray<NSString *> *availableMediaCharacteristicsWithMediaSelectionOptions NS_AVAILABLE(10_8, 5_0);
+@property (nonatomic, readonly) NSArray<AVMediaCharacteristic> *availableMediaCharacteristicsWithMediaSelectionOptions NS_AVAILABLE(10_8, 5_0);
 
 /*!
   @method		mediaSelectionGroupForMediaCharacteristic:
@@ -312,7 +314,7 @@
 	
 	Filtering of the options in the returned AVMediaSelectionGroup according to playability, locale, and additional media characteristics can be accomplished using the category AVMediaSelectionOptionFiltering defined on AVMediaSelectionGroup.
 */
-- (nullable AVMediaSelectionGroup *)mediaSelectionGroupForMediaCharacteristic:(NSString *)mediaCharacteristic NS_AVAILABLE(10_8, 5_0);
+- (nullable AVMediaSelectionGroup *)mediaSelectionGroupForMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic NS_AVAILABLE(10_8, 5_0);
 
 /*!
   @property		preferredMediaSelection
@@ -320,6 +322,14 @@
 */
 @property (nonatomic, readonly) AVMediaSelection *preferredMediaSelection NS_AVAILABLE(10_11, 9_0);
 
+/*!
+  @property		allMediaSelections
+  @abstract		Provides an array of all permutations of AVMediaSelection for this asset.
+  @discussion
+	Becomes callable without blocking when the key @"availableMediaCharacteristicsWithMediaSelectionOptions" has been loaded.
+*/
+@property (nonatomic, readonly) NSArray <AVMediaSelection *> *allMediaSelections NS_AVAILABLE(10_13, 11_0);
+
 @end
 
 
@@ -364,7 +374,10 @@
 
 @interface AVAsset (AVAssetUsability)
 
-/* indicates whether an AVPlayerItem can be initialized with the receiver or with its URL
+/*!
+ @property		playable
+ @abstract		Indicates whether an AVPlayer can play the contents of the asset in a manner that meets user expectations.
+ @discussion	A client can attempt playback when playable is NO, this however may lead to a substandard playback experience.
 */
 @property (nonatomic, readonly, getter=isPlayable) BOOL playable NS_AVAILABLE(10_7, 4_3);
 
@@ -477,7 +490,7 @@
   @abstract		Provides the file types the AVURLAsset class understands.
   @result		An NSArray of UTIs identifying the file types the AVURLAsset class understands.
 */
-+ (NSArray<NSString *> *)audiovisualTypes NS_AVAILABLE(10_7, 5_0);
++ (NSArray<AVFileType> *)audiovisualTypes NS_AVAILABLE(10_7, 5_0);
 
 /*!
   @method		audiovisualMIMETypes
@@ -632,7 +645,7 @@
 @interface AVFragmentedAsset : AVURLAsset <AVFragmentMinding>
 {
 @private
-	AVFragmentedAssetInternal	*_fragmentedAsset;
+	AVFragmentedAssetInternal	*_fragmentedAsset __attribute__((unused));
 }
 
 /*!
@@ -675,7 +688,7 @@
   @result		An NSArray of AVFragmentedAssetTracks; may be empty if no tracks of the specified media type are available.
   @discussion	Becomes callable without blocking when the key @"tracks" has been loaded
 */
-- (NSArray<AVFragmentedAssetTrack *> *)tracksWithMediaType:(NSString *)mediaType;
+- (NSArray<AVFragmentedAssetTrack *> *)tracksWithMediaType:(AVMediaType)mediaType;
 
 /*!
   @method		tracksWithMediaCharacteristic:
@@ -685,7 +698,7 @@
   @result		An NSArray of AVFragmentedAssetTracks; may be empty if no tracks with the specified characteristic are available.
   @discussion	Becomes callable without blocking when the key @"tracks" has been loaded
 */
-- (NSArray<AVFragmentedAssetTrack *> *)tracksWithMediaCharacteristic:(NSString *)mediaCharacteristic;
+- (NSArray<AVFragmentedAssetTrack *> *)tracksWithMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
 
 @end
 
@@ -751,7 +764,7 @@
 	@property 		mayRequireContentKeysForMediaDataProcessing
 	@abstract		Allows AVURLAsset to be added as a content key recipient to an AVContentKeySession.
 */
-@property (nonatomic, readonly) BOOL mayRequireContentKeysForMediaDataProcessing API_AVAILABLE(macosx(10.12.4), ios(10.3), tvos(10.2));
+@property (nonatomic, readonly) BOOL mayRequireContentKeysForMediaDataProcessing;
 
 @end
 
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h	2016-05-03 18:21:25.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h	2017-05-24 00:41:53.000000000 -0400
@@ -3,12 +3,14 @@
 
 	Framework:  AVFoundation
  
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
 
 */
 
 
 #import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVMediaFormat.h>
+#import <AVFoundation/AVAudioProcessingSettings.h>
 #import <Foundation/Foundation.h>
 #import <CoreMedia/CMTime.h>
 #import <CoreMedia/CMTimeRange.h>
@@ -53,21 +55,31 @@
 
 
 /* These export options can be used to produce movie files with video size appropriate to the device.
-	The export will not scale the video up from a smaller size. The video will be compressed using
-	H.264 and the audio will be compressed using AAC.  */
+   The export will not scale the video up from a smaller size. The video will be compressed using
+   H.264 and the audio will be compressed using AAC.  */
+AVF_EXPORT NSString *const AVAssetExportPresetLowQuality         NS_AVAILABLE(10_11, 4_0);
+AVF_EXPORT NSString *const AVAssetExportPresetMediumQuality      NS_AVAILABLE(10_11, 4_0);
+AVF_EXPORT NSString *const AVAssetExportPresetHighestQuality     NS_AVAILABLE(10_11, 4_0);
 
-AVF_EXPORT NSString *const AVAssetExportPresetLowQuality        NS_AVAILABLE(10_11, 4_0);
-AVF_EXPORT NSString *const AVAssetExportPresetMediumQuality     NS_AVAILABLE(10_11, 4_0);
-AVF_EXPORT NSString *const AVAssetExportPresetHighestQuality    NS_AVAILABLE(10_11, 4_0);
+/* These export options can be used to produce movie files with video size appropriate to the device.
+   The export will not scale the video up from a smaller size. The video will be compressed using
+   HEVC and the audio will be compressed using AAC.  */
+AVF_EXPORT NSString *const AVAssetExportPresetHEVCHighestQuality NS_AVAILABLE(10_13, 11_0);
+
+/* These export options can be used to produce movie files with the specified video size.
+   The export will not scale the video up from a smaller size. The video will be compressed using
+   H.264 and the audio will be compressed using AAC.  Some devices cannot support some sizes. */
+AVF_EXPORT NSString *const AVAssetExportPreset640x480           NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT NSString *const AVAssetExportPreset960x540           NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT NSString *const AVAssetExportPreset1280x720          NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT NSString *const AVAssetExportPreset1920x1080         NS_AVAILABLE(10_7, 5_0);
+AVF_EXPORT NSString *const AVAssetExportPreset3840x2160         NS_AVAILABLE(10_10, 9_0);
 
 /* These export options can be used to produce movie files with the specified video size.
-	The export will not scale the video up from a smaller size. The video will be compressed using
-	H.264 and the audio will be compressed using AAC.  Some devices cannot support some sizes. */
-AVF_EXPORT NSString *const AVAssetExportPreset640x480			NS_AVAILABLE(10_7, 4_0);
-AVF_EXPORT NSString *const AVAssetExportPreset960x540   		NS_AVAILABLE(10_7, 4_0);
-AVF_EXPORT NSString *const AVAssetExportPreset1280x720  		NS_AVAILABLE(10_7, 4_0);
-AVF_EXPORT NSString *const AVAssetExportPreset1920x1080			NS_AVAILABLE(10_7, 5_0);
-AVF_EXPORT NSString *const AVAssetExportPreset3840x2160			NS_AVAILABLE(10_10, 9_0);
+   The export will not scale the video up from a smaller size. The video will be compressed using
+   HEVC and the audio will be compressed using AAC.  Some devices cannot support some sizes. */
+AVF_EXPORT NSString *const AVAssetExportPresetHEVC1920x1080     NS_AVAILABLE(10_13, 11_0);
+AVF_EXPORT NSString *const AVAssetExportPresetHEVC3840x2160     NS_AVAILABLE(10_13, 11_0);
 
 /*  This export option will produce an audio-only .m4a file with appropriate iTunes gapless playback data */
 AVF_EXPORT NSString *const AVAssetExportPresetAppleM4A			NS_AVAILABLE(10_7, 4_0);
@@ -152,7 +164,7 @@
 /* Indicates the type of file to be written by the session.
    The value of this property must be set before you invoke -exportAsynchronouslyWithCompletionHandler:; otherwise -exportAsynchronouslyWithCompletionHandler: will raise an NSInternalInconsistencyException.
    Setting the value of this property to a file type that's not among the session's supported file types will result in an NSInvalidArgumentException. See supportedFileTypes. */
-@property (nonatomic, copy, nullable) NSString *outputFileType;
+@property (nonatomic, copy, nullable) AVFileType outputFileType;
 
 /* Indicates the URL of the export session's output. You may use UTTypeCopyPreferredTagWithClass(outputFileType, kUTTagClassFilenameExtension) to obtain an appropriate path extension for the outputFileType you have specified. For more information about UTTypeCopyPreferredTagWithClass and kUTTagClassFilenameExtension, on iOS see <MobileCoreServices/UTType.h> and on Mac OS X see <LaunchServices/UTType.h>.  */
 @property (nonatomic, copy, nullable) NSURL *outputURL;
@@ -231,10 +243,10 @@
 								(such as adding or deleting tracks) should be made to the asset between retrieving compatible identifiers and performing the export operation.
 	@param presetName			An NSString specifying the name of the preset template for the export.
 	@param asset				An AVAsset object that is intended to be exported.
-	@param outputFileType		An NSString indicating a file type to check; or nil, to query whether there are any compatible types.
+	@param outputFileType		An AVFileType indicating a file type to check; or nil, to query whether there are any compatible types.
 	@param completionHandler	A block called with the compatibility result.
  */
-+ (void)determineCompatibilityOfExportPreset:(NSString *)presetName withAsset:(AVAsset *)asset outputFileType:(nullable NSString *)outputFileType completionHandler:(void (^)(BOOL compatible))handler NS_AVAILABLE(10_9, 6_0);
++ (void)determineCompatibilityOfExportPreset:(NSString *)presetName withAsset:(AVAsset *)asset outputFileType:(nullable AVFileType)outputFileType completionHandler:(void (^)(BOOL compatible))handler NS_AVAILABLE(10_9, 6_0);
 
 @end
 
@@ -242,7 +254,7 @@
 
 /* Indicates the types of files the target can write, according to the preset the target was initialized with.
    Does not perform an inspection of the AVAsset to determine whether its contents are compatible with the supported file types. If you need to make that determination before initiating the export, use - (void)determineCompatibleFileTypesWithCompletionHandler:(void (^)(NSArray *compatibleFileTypes))handler:. */
-@property (nonatomic, readonly) NSArray<NSString *> *supportedFileTypes;
+@property (nonatomic, readonly) NSArray<AVFileType> *supportedFileTypes;
 
 /*!
 	@method						determineCompatibleFileTypesWithCompletionHandler:
@@ -251,7 +263,7 @@
 								Called when the inspection completes with an array of file types the ExportSession can write.  Note that this may have a count of zero.
 	@discussion					This method is different than the supportedFileTypes property in that it performs an inspection of the AVAsset in order to determine its compatibility with each of the session's supported file types.
 */
-- (void)determineCompatibleFileTypesWithCompletionHandler:(void (^)(NSArray<NSString *> *compatibleFileTypes))handler NS_AVAILABLE(10_9, 6_0);
+- (void)determineCompatibleFileTypesWithCompletionHandler:(void (^)(NSArray<AVFileType> *compatibleFileTypes))handler NS_AVAILABLE(10_9, 6_0);
 
 @end
 
@@ -298,7 +310,7 @@
 /* Indicates the processing algorithm used to manage audio pitch for scaled audio edits.
    Constants for various time pitch algorithms, e.g. AVAudioTimePitchAlgorithmSpectral, are defined in AVAudioProcessingSettings.h. An NSInvalidArgumentException will be raised if this property is set to a value other than the constants defined in that file.
    The default value is AVAudioTimePitchAlgorithmSpectral. */
-@property (nonatomic, copy) NSString *audioTimePitchAlgorithm NS_AVAILABLE(10_9, 7_0);
+@property (nonatomic, copy) AVAudioTimePitchAlgorithm audioTimePitchAlgorithm NS_AVAILABLE(10_9, 7_0);
 
 /* Indicates whether non-default audio mixing is enabled for export and supplies the parameters for audio mixing.  Ignored when export preset is AVAssetExportPresetPassthrough. */
 @property (nonatomic, copy, nullable) AVAudioMix *audioMix;
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetImageGenerator.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetImageGenerator.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetImageGenerator.h	2016-09-12 23:29:45.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetImageGenerator.h	2017-05-24 00:41:53.000000000 -0400
@@ -3,7 +3,7 @@
 
 	Framework:  AVFoundation
  
-	Copyright 2010-2016 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
 
 */
 
@@ -32,12 +32,19 @@
 NS_ASSUME_NONNULL_BEGIN
 
 /*!
+ @typedef AVAssetImageGeneratorApertureMode
+ @abstract
+    The type of an aperture mode.
+*/
+typedef NSString * AVAssetImageGeneratorApertureMode NS_STRING_ENUM;
+
+/*!
 	@constant		AVAssetImageGeneratorApertureModeCleanAperture
 	@abstract		Both pixel aspect ratio and clean aperture will be applied.
 	@discussion
 		An image's clean aperture is a region of video free from transition artifacts caused by the encoding of the signal.
 */
-AVF_EXPORT NSString *const AVAssetImageGeneratorApertureModeCleanAperture NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT AVAssetImageGeneratorApertureMode const AVAssetImageGeneratorApertureModeCleanAperture NS_AVAILABLE(10_7, 4_0);
 
 /*!
 	@constant		AVAssetImageGeneratorApertureModeProductionAperture
@@ -45,7 +52,7 @@
 	@discussion
 		The image is not cropped to the clean aperture region, but it is scaled according to the pixel aspect ratio. Use this option when you want to see all the pixels in your video, including the edges.
 */
-AVF_EXPORT NSString *const AVAssetImageGeneratorApertureModeProductionAperture NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT AVAssetImageGeneratorApertureMode const AVAssetImageGeneratorApertureModeProductionAperture NS_AVAILABLE(10_7, 4_0);
 
 /*!
 	@constant		AVAssetImageGeneratorApertureModeEncodedPixels
@@ -53,7 +60,7 @@
 	@discussion
 		The image is not cropped to the clean aperture region and is not scaled according to the pixel aspect ratio. The encoded dimensions of the image description are displayed.
 */
-AVF_EXPORT NSString *const AVAssetImageGeneratorApertureModeEncodedPixels NS_AVAILABLE(10_7, 4_0);
+AVF_EXPORT AVAssetImageGeneratorApertureMode const AVAssetImageGeneratorApertureModeEncodedPixels NS_AVAILABLE(10_7, 4_0);
 
 typedef NS_ENUM(NSInteger, AVAssetImageGeneratorResult)
 {
@@ -83,7 +90,7 @@
 @property (nonatomic) CGSize maximumSize;
 
 /* Specifies the aperture mode for the generated image.  Default is AVAssetImageGeneratorApertureModeCleanAperture. */
-@property (nonatomic, copy, nullable) NSString *apertureMode;
+@property (nonatomic, copy, nullable) AVAssetImageGeneratorApertureMode apertureMode;
 
 /* Specifies the video composition to use when extracting images from assets with multiple video tracks.
    If no videoComposition is specified, only the first enabled video track will be used.
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReaderOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReaderOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReaderOutput.h	2016-09-12 23:29:45.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetReaderOutput.h	2017-05-24 00:41:53.000000000 -0400
@@ -3,12 +3,13 @@
 
 	Framework:  AVFoundation
  
-    Copyright 2010-2015 Apple Inc. All rights reserved.
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 
 */
 
 #import <AVFoundation/AVBase.h>
 #import <AVFoundation/AVVideoComposition.h>
+#import <AVFoundation/AVAudioProcessingSettings.h>
 #import <CoreMedia/CMTime.h>
 #import <CoreMedia/CMSampleBuffer.h>
 
@@ -27,6 +28,8 @@
  
  @discussion
 	Clients can read the media data of an asset by adding one or more concrete instances of AVAssetReaderOutput to an AVAssetReader using the -[AVAssetReader addOutput:] method.
+	
+	IMPORTANT PERFORMANCE NOTE: Make sure to set the alwaysCopiesSampleData property to NO if you do not need to modify the sample data in-place, to avoid unnecessary and inefficient copying.
  */
 NS_CLASS_AVAILABLE(10_7, 4_1)
 @interface AVAssetReaderOutput : NSObject
@@ -244,7 +247,7 @@
  
 	The default value is AVAudioTimePitchAlgorithmSpectral.
  */
-@property (nonatomic, copy) NSString *audioTimePitchAlgorithm NS_AVAILABLE(10_9, 7_0);
+@property (nonatomic, copy) AVAudioTimePitchAlgorithm audioTimePitchAlgorithm NS_AVAILABLE(10_9, 7_0);
 
 @end
 
@@ -347,7 +350,7 @@
  
 	The default value is AVAudioTimePitchAlgorithmSpectral.
  */
-@property (nonatomic, copy) NSString *audioTimePitchAlgorithm NS_AVAILABLE(10_9, 7_0);
+@property (nonatomic, copy) AVAudioTimePitchAlgorithm audioTimePitchAlgorithm NS_AVAILABLE(10_9, 7_0);
 
 @end
 
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetResourceLoader.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetResourceLoader.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetResourceLoader.h	2017-02-22 00:47:31.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetResourceLoader.h	2017-05-24 00:41:53.000000000 -0400
@@ -395,7 +395,7 @@
  @result		The persistable content key data that may be stored offline to answer future loading requests of the same content key.
  @discussion	The data returned from this method may be used to immediately satisfy an AVAssetResourceLoadingDataRequest, as well as any subsequent requests for the same key url. The value of AVAssetResourceLoadingContentInformationRequest.contentType must be set to AVStreamingKeyDeliveryPersistentContentKeyType when responding with data created with this method.
 */
-- (NSData *)persistentContentKeyFromKeyVendorResponse:(NSData *)keyVendorResponse options:(nullable NSDictionary<NSString *, id> *)options error:(NSError **)outError NS_AVAILABLE_IOS(9_0);
+- (nullable NSData *)persistentContentKeyFromKeyVendorResponse:(NSData *)keyVendorResponse options:(nullable NSDictionary<NSString *, id> *)options error:(NSError **)outError NS_AVAILABLE_IOS(9_0);
 
 @end
 
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrack.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrack.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrack.h	2016-09-23 21:02:15.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrack.h	2017-05-23 21:01:43.000000000 -0400
@@ -3,7 +3,7 @@
 
 	Framework:  AVFoundation
  
-	Copyright 2010-2016 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
 
 */
 
@@ -20,6 +20,8 @@
 #import <AVFoundation/AVAsynchronousKeyValueLoading.h>
 #import <AVFoundation/AVAsset.h>
 #import <AVFoundation/AVAssetTrackSegment.h>
+#import <AVFoundation/AVMediaFormat.h>
+#import <AVFoundation/AVMetadataFormat.h>
 #import <CoreMedia/CMTimeRange.h>
 
 NS_ASSUME_NONNULL_BEGIN
@@ -48,7 +50,7 @@
 @interface AVAssetTrack (AVAssetTrackBasicPropertiesAndCharacteristics)
 
 /* indicates the media type for this track, e.g. AVMediaTypeVideo, AVMediaTypeAudio, etc., as defined in AVMediaFormat.h. */
-@property (nonatomic, readonly) NSString *mediaType;
+@property (nonatomic, readonly) AVMediaType mediaType;
 
 /* provides an array of CMFormatDescriptions
    each of which indicates the format of media samples referenced by the track;
@@ -59,6 +61,9 @@
 /* Indicates whether the receiver is playable in the current environment; if YES, an AVPlayerItemTrack of an AVPlayerItem initialized with the receiver's asset can be enabled for playback.  */
 @property (nonatomic, readonly, getter=isPlayable) BOOL playable NS_AVAILABLE(10_8, 5_0);
 
+/* Indicates whether the receiver is decodable in the current environment; if YES, the track can be decoded even though decoding may be too slow for real time playback.  */
+@property (nonatomic, readonly, getter=isDecodable) BOOL decodable NS_AVAILABLE(10_13, 11_0);
+
 /* indicates whether the track is enabled according to state stored in its container or construct;
    note that its presentation state can be changed from this default via AVPlayerItemTrack */
 @property (nonatomic, readonly, getter=isEnabled) BOOL enabled;
@@ -77,7 +82,7 @@
 					as defined above.
 	@result			YES if the track references media with the specified characteristic, otherwise NO.
 */
-- (BOOL)hasMediaCharacteristic:(NSString *)mediaCharacteristic;
+- (BOOL)hasMediaCharacteristic:(AVMediaCharacteristic)mediaCharacteristic;
 
 @end
 
@@ -192,7 +197,7 @@
 
 /* provides an NSArray of NSStrings, each representing a format of metadata that's available for the track (e.g. QuickTime userdata, etc.)
    Metadata formats are defined in AVMetadataItem.h. */
-@property (nonatomic, readonly) NSArray<NSString *> *availableMetadataFormats;
+@property (nonatomic, readonly) NSArray<AVMetadataFormat> *availableMetadataFormats;
 
 /*!
 	@method			metadataForFormat:
@@ -202,13 +207,20 @@
 	@result			An NSArray containing AVMetadataItems.
 	@discussion		Becomes callable without blocking when the key @"availableMetadataFormats" has been loaded
 */
-- (NSArray<AVMetadataItem *> *)metadataForFormat:(NSString *)format;
+- (NSArray<AVMetadataItem *> *)metadataForFormat:(AVMetadataFormat)format;
 
 @end
 
 
 @interface AVAssetTrack (AVAssetTrackTrackAssociations)
 
+/*!
+ @typedef AVTrackAssociationType
+ @abstract
+    The type of a track association.
+*/
+typedef NSString * AVTrackAssociationType NS_STRING_ENUM;
+
 /*
  @constant		AVTrackAssociationTypeAudioFallback
  @abstract		Indicates an association between an audio track with another audio track that contains the same content but is typically encoded in a different format that's more widely supported, used to nominate a track that should be used in place of an unsupported track.
@@ -219,7 +231,7 @@
 	Example: Using AVTrackAssociationTypeAudioFallback, a stereo audio track with media subtype kAudioFormatMPEG4AAC could be nominated as the "fallback" for an audio track encoding the same source material but with media subtype kAudioFormatAC3 and a 5.1 channel layout.  This would ensure that all clients are capable of playing back some form of the audio.
 
  */
-AVF_EXPORT NSString *const AVTrackAssociationTypeAudioFallback NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVTrackAssociationType const AVTrackAssociationTypeAudioFallback NS_AVAILABLE(10_9, 7_0);
 
 /*
  @constant		AVTrackAssociationTypeChapterList
@@ -228,7 +240,7 @@
  @discussion
 	This association is not symmetric; when used with -[AVAssetWriterInput addTrackAssociationWithTrackOfInput:type:], the receiver should be an instance of AVAssetWriterInput with a corresponding track that has renderable content while the input parameter should be an instance of AVAssetWriterInput with a corresponding track that contains chapter metadata.
  */
-AVF_EXPORT NSString *const AVTrackAssociationTypeChapterList NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVTrackAssociationType const AVTrackAssociationTypeChapterList NS_AVAILABLE(10_9, 7_0);
 
 /*
  @constant		AVTrackAssociationTypeForcedSubtitlesOnly
@@ -237,7 +249,7 @@
  @discussion
 	Associations of type AVTrackAssociationTypeForcedSubtitlesOnly are supported only between subtitle tracks.  This association is not symmetric; when used with -[AVAssetWriterInput addTrackAssociationWithTrackOfInput:type:], the receiver should be an instance of AVAssetWriterInput with a corresponding subtitle track that contains non-forced subtitles, and the input parameter should be an instance of AVAssetWriterInput with a corresponding subtitle track that contains forced subtitles only.
  */
-AVF_EXPORT NSString *const AVTrackAssociationTypeForcedSubtitlesOnly NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVTrackAssociationType const AVTrackAssociationTypeForcedSubtitlesOnly NS_AVAILABLE(10_9, 7_0);
 
 /*
  @constant		AVTrackAssociationTypeSelectionFollower
@@ -246,7 +258,7 @@
  @discussion
 	This association is not symmetric; when used with -[AVAssetWriterInput addTrackAssociationWithTrackOfInput:type:], the input parameter should be an instance of AVAssetWriterInput whose selection may depend on the selection of the receiver.  In the example above, the receiver would be the instance of AVAssetWriterInput corresponding with the audio track and the input parameter would be the instance of AVAssetWriterInput corresponding with the subtitle track.
  */
-AVF_EXPORT NSString *const AVTrackAssociationTypeSelectionFollower NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVTrackAssociationType const AVTrackAssociationTypeSelectionFollower NS_AVAILABLE(10_9, 7_0);
 
 /*
  @constant		AVTrackAssociationTypeTimecode
@@ -255,7 +267,7 @@
  @discussion
 	This association is not symmetric; when used with -[AVAssetWriterInput addTrackAssociationWithTrackOfInput:type:], the receiver should be an instance of AVAssetWriterInput with a corresponding track that may be a video track or an audio track while the input parameter should be an instance of AVAssetWriterInput with a corresponding timecode track.
  */
-AVF_EXPORT NSString *const AVTrackAssociationTypeTimecode NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVTrackAssociationType const AVTrackAssociationTypeTimecode NS_AVAILABLE(10_9, 7_0);
 
 /*
 @constant		AVTrackAssociationTypeMetadataReferent
@@ -265,11 +277,11 @@
 	This track association is optional for AVAssetTracks with the mediaType AVMediaTypeMetadata. When a metadata track lacks this track association, its contents are assumed to describe or annotate the asset as a whole.
 	This association is not symmetric; when used with -[AVAssetWriterInput addTrackAssociationWithTrackOfInput:type:], the receiver should be an instance of AVAssetWriterInput with mediaType AVMediaTypeMetadata while the input parameter should be an instance of AVAssetWriterInput that's used to create the track to which the contents of the receiver's corresponding metadata track refer.
 */
-AVF_EXPORT NSString *const AVTrackAssociationTypeMetadataReferent NS_AVAILABLE(10_10, 8_0);
+AVF_EXPORT AVTrackAssociationType const AVTrackAssociationTypeMetadataReferent NS_AVAILABLE(10_10, 8_0);
 
 /* Provides an NSArray of NSStrings, each representing a type of track association that the receiver has with one or more of the other tracks of the asset (e.g. AVTrackAssociationTypeChapterList, AVTrackAssociationTypeTimecode, etc.).
    Track association types are defined immediately above. */
-@property (nonatomic, readonly) NSArray<NSString *> *availableTrackAssociationTypes NS_AVAILABLE(10_9, 7_0);
+@property (nonatomic, readonly) NSArray<AVTrackAssociationType> *availableTrackAssociationTypes NS_AVAILABLE(10_9, 7_0);
 
 /*!
 	@method			associatedTracksOfType:
@@ -279,7 +291,7 @@
 	@result			An NSArray containing AVAssetTracks; may be empty if there is no associated tracks of the specified type.
 	@discussion		Becomes callable without blocking when the key @"availableTrackAssociationTypes" has been loaded.
 */
-- (NSArray<AVAssetTrack *> *)associatedTracksOfType:(NSString *)trackAssociationType NS_AVAILABLE(10_9, 7_0);
+- (NSArray<AVAssetTrack *> *)associatedTracksOfType:(AVTrackAssociationType)trackAssociationType NS_AVAILABLE(10_9, 7_0);
 
 @end
 
@@ -360,7 +372,7 @@
 @interface AVFragmentedAssetTrack : AVAssetTrack
 {
 @private
-	AVFragmentedAssetTrackInternal	*_fragmentedAssetTrack;
+	AVFragmentedAssetTrackInternal	*_fragmentedAssetTrack __attribute__((unused));
 }
 
 @end
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrackGroup.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrackGroup.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrackGroup.h	2016-09-22 17:56:43.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetTrackGroup.h	2017-05-23 21:01:43.000000000 -0400
@@ -3,7 +3,7 @@
 
 	Framework:  AVFoundation
 
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+	Copyright 2010-2016 Apple Inc. All rights reserved.
 
  */
 
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriter.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriter.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriter.h	2016-08-05 01:30:06.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriter.h	2017-05-24 00:41:53.000000000 -0400
@@ -8,6 +8,7 @@
 */
 
 #import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVMediaFormat.h>
 #import <AVFoundation/AVMediaSelectionGroup.h>
 #import <Foundation/Foundation.h>
 #import <CoreMedia/CMBase.h>
@@ -84,7 +85,7 @@
 	
 	UTIs for container formats that can be written are declared in AVMediaFormat.h.
  */
-+ (nullable instancetype)assetWriterWithURL:(NSURL *)outputURL fileType:(NSString *)outputFileType error:(NSError * _Nullable * _Nullable)outError;
++ (nullable instancetype)assetWriterWithURL:(NSURL *)outputURL fileType:(AVFileType)outputFileType error:(NSError * _Nullable * _Nullable)outError;
 
 /*!
  @method initWithURL:fileType:error:
@@ -105,7 +106,7 @@
 	
 	UTIs for container formats that can be written are declared in AVMediaFormat.h.
  */
-- (nullable instancetype)initWithURL:(NSURL *)outputURL fileType:(NSString *)outputFileType error:(NSError * _Nullable * _Nullable)outError NS_DESIGNATED_INITIALIZER;
+- (nullable instancetype)initWithURL:(NSURL *)outputURL fileType:(AVFileType)outputFileType error:(NSError * _Nullable * _Nullable)outError NS_DESIGNATED_INITIALIZER;
 
 /*!
  @property outputURL
@@ -121,7 +122,7 @@
  @abstract
 	The UTI of the file format of the file for which the instance of AVAssetWriter was initialized for writing.
  */
-@property (nonatomic, copy, readonly) NSString *outputFileType;
+@property (nonatomic, copy, readonly) AVFileType outputFileType;
 
 /*!
  @property availableMediaTypes
@@ -131,7 +132,7 @@
  @discussion
 	Some media types may not be accepted within the file format with which an AVAssetWriter was initialized.
  */
-@property (nonatomic, readonly) NSArray<NSString *> *availableMediaTypes;
+@property (nonatomic, readonly) NSArray<AVMediaType> *availableMediaTypes;
 
 /*!
  @property status
@@ -217,7 +218,7 @@
  
 	Attempting to add an input with output settings and a media type for which this method returns NO will cause an exception to be thrown.
 */
-- (BOOL)canApplyOutputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings forMediaType:(NSString *)mediaType;
+- (BOOL)canApplyOutputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings forMediaType:(AVMediaType)mediaType;
 
 /*!
  @method canAddInput:
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriterInput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriterInput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriterInput.h	2016-08-05 01:30:07.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetWriterInput.h	2017-05-24 00:37:42.000000000 -0400
@@ -3,11 +3,12 @@
 
 	Framework:  AVFoundation
  
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
 
 */
 
 #import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVMediaFormat.h>
 #import <Foundation/Foundation.h>
 #import <CoreMedia/CMTime.h>
 #import <CoreMedia/CMTimeRange.h>
@@ -59,9 +60,9 @@
  
 	For AVMediaTypeAudio the following keys are not currently supported in the outputSettings dictionary: AVEncoderAudioQualityKey and AVSampleRateConverterAudioQualityKey.  When using this method to construct a new instance, an audio settings dictionary must be fully specified, meaning that it must contain AVFormatIDKey, AVSampleRateKey, and AVNumberOfChannelsKey.  If no other channel layout information is available, a value of 1 for AVNumberOfChannelsKey will result in mono output and a value of 2 will result in stereo output.  If AVNumberOfChannelsKey specifies a channel count greater than 2, the dictionary must also specify a value for AVChannelLayoutKey.  For kAudioFormatLinearPCM, all relevant AVLinearPCM*Key keys must be included, and for kAudioFormatAppleLossless, AVEncoderBitDepthHintKey keys must be included.  See +assetWriterInputWithMediaType:outputSettings:sourceFormatHint: for a way to avoid having to specify a value for each of those keys.
  
-	For AVMediaTypeVideo, any output settings dictionary must request a compressed video format.  This means that the value passed in for outputSettings must follow the rules for compressed video output, as laid out in AVVideoSettings.h.  When using this method to construct a new instance, a video settings dictionary must be fully specified, meaning that it must contain AVVideoCodecKey, AVVideoWidthKey, and AVVideoHeightKey.  See +assetWriterInputWithMediaType:outputSettings:sourceFormatHint: for a way to avoid having to specify a value for each of those keys.  On iOS, the only values currently supported for AVVideoCodecKey are AVVideoCodecH264 and AVVideoCodecJPEG.  AVVideoCodecH264 is not supported on iPhone 3G.  For AVVideoScalingModeKey, the value AVVideoScalingModeFit is not supported.
+	For AVMediaTypeVideo, any output settings dictionary must request a compressed video format.  This means that the value passed in for outputSettings must follow the rules for compressed video output, as laid out in AVVideoSettings.h.  When using this method to construct a new instance, a video settings dictionary must be fully specified, meaning that it must contain AVVideoCodecKey, AVVideoWidthKey, and AVVideoHeightKey.  See +assetWriterInputWithMediaType:outputSettings:sourceFormatHint: for a way to avoid having to specify a value for each of those keys.  On iOS, the only values currently supported for AVVideoCodecKey are AVVideoCodecTypeH264 and AVVideoCodecTypeJPEG.  AVVideoCodecTypeH264 is not supported on iPhone 3G.  For AVVideoScalingModeKey, the value AVVideoScalingModeFit is not supported.
  */
-+ (instancetype)assetWriterInputWithMediaType:(NSString *)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings;
++ (instancetype)assetWriterInputWithMediaType:(AVMediaType)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings;
 
 /*!
  @method assetWriterInputWithMediaType:outputSettings:sourceFormatHint:
@@ -82,7 +83,7 @@
  
 	An NSInvalidArgumentException will be thrown if the media type of the format description does not match the media type string passed into this method.
  */
-+ (instancetype)assetWriterInputWithMediaType:(NSString *)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings sourceFormatHint:(nullable CMFormatDescriptionRef)sourceFormatHint NS_AVAILABLE(10_8, 6_0);
++ (instancetype)assetWriterInputWithMediaType:(AVMediaType)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings sourceFormatHint:(nullable CMFormatDescriptionRef)sourceFormatHint NS_AVAILABLE(10_8, 6_0);
 
 /*!
  @method initWithMediaType:outputSettings:
@@ -103,9 +104,9 @@
  
 	For AVMediaTypeAudio the following keys are not currently supported in the outputSettings dictionary: AVEncoderAudioQualityKey and AVSampleRateConverterAudioQualityKey.  When using this initializer, an audio settings dictionary must be fully specified, meaning that it must contain AVFormatIDKey, AVSampleRateKey, and AVNumberOfChannelsKey.  If no other channel layout information is available, a value of 1 for AVNumberOfChannelsKey will result in mono output and a value of 2 will result in stereo output.  If AVNumberOfChannelsKey specifies a channel count greater than 2, the dictionary must also specify a value for AVChannelLayoutKey.  For kAudioFormatLinearPCM, all relevant AVLinearPCM*Key keys must be included, and for kAudioFormatAppleLossless, AVEncoderBitDepthHintKey keys must be included.  See -initWithMediaType:outputSettings:sourceFormatHint: for a way to avoid having to specify a value for each of those keys.
  
-	For AVMediaTypeVideo, any output settings dictionary must request a compressed video format.  This means that the value passed in for outputSettings must follow the rules for compressed video output, as laid out in AVVideoSettings.h.  When using this initializer, a video settings dictionary must be fully specified, meaning that it must contain AVVideoCodecKey, AVVideoWidthKey, and AVVideoHeightKey.  See -initWithMediaType:outputSettings:sourceFormatHint: for a way to avoid having to specify a value for each of those keys.  On iOS, the only values currently supported for AVVideoCodecKey are AVVideoCodecH264 and AVVideoCodecJPEG.  AVVideoCodecH264 is not supported on iPhone 3G.  For AVVideoScalingModeKey, the value AVVideoScalingModeFit is not supported.
+	For AVMediaTypeVideo, any output settings dictionary must request a compressed video format.  This means that the value passed in for outputSettings must follow the rules for compressed video output, as laid out in AVVideoSettings.h.  When using this initializer, a video settings dictionary must be fully specified, meaning that it must contain AVVideoCodecKey, AVVideoWidthKey, and AVVideoHeightKey.  See -initWithMediaType:outputSettings:sourceFormatHint: for a way to avoid having to specify a value for each of those keys.  On iOS, the only values currently supported for AVVideoCodecKey are AVVideoCodecTypeH264 and AVVideoCodecTypeJPEG.  AVVideoCodecTypeH264 is not supported on iPhone 3G.  For AVVideoScalingModeKey, the value AVVideoScalingModeFit is not supported.
  */
-- (instancetype)initWithMediaType:(NSString *)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings;
+- (instancetype)initWithMediaType:(AVMediaType)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings;
 
 /*!
  @method initWithMediaType:outputSettings:sourceFormatHint:
@@ -126,7 +127,7 @@
  
 	An NSInvalidArgumentException will be thrown if the media type of the format description does not match the media type string passed into this method.
  */
-- (instancetype)initWithMediaType:(NSString *)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings sourceFormatHint:(nullable CMFormatDescriptionRef)sourceFormatHint NS_AVAILABLE(10_8, 6_0) NS_DESIGNATED_INITIALIZER;
+- (instancetype)initWithMediaType:(AVMediaType)mediaType outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings sourceFormatHint:(nullable CMFormatDescriptionRef)sourceFormatHint NS_AVAILABLE(10_8, 6_0) NS_DESIGNATED_INITIALIZER;
 
 /*!
  @property mediaType
@@ -136,7 +137,7 @@
  @discussion
 	The value of this property is one of the media type strings defined in AVMediaFormat.h.
  */
-@property (nonatomic, readonly) NSString *mediaType;
+@property (nonatomic, readonly) AVMediaType mediaType;
 
 /*!
  @property outputSettings
@@ -450,6 +451,39 @@
  */
 @property (nonatomic, copy, nullable) NSURL *sampleReferenceBaseURL NS_AVAILABLE(10_10, 8_0);
 
+typedef NSString *AVAssetWriterInputMediaDataLocation NS_STRING_ENUM NS_AVAILABLE(10_13, 11_0);
+
+/*!
+ @constant AVAssetWriterInputMediaDataLocationInterleavedWithMainMediaData
+	Indicates that the media data should be interleaved with all other media data with this constant.
+ */
+AVF_EXPORT AVAssetWriterInputMediaDataLocation const AVAssetWriterInputMediaDataLocationInterleavedWithMainMediaData NS_AVAILABLE(10_13, 11_0);
+
+/*!
+ @constant AVAssetWriterInputMediaDataLocationBeforeMainMediaDataNotInterleaved
+	Indicates that the media data should be laid out before all the media data with AVAssetWriterInputMediaDataLocationInterleavedWithMainMediaData and not be interleaved.
+ */
+AVF_EXPORT AVAssetWriterInputMediaDataLocation const AVAssetWriterInputMediaDataLocationBeforeMainMediaDataNotInterleaved NS_AVAILABLE(10_13, 11_0);
+
+/*!
+ @property mediaDataLocation
+ @abstract
+	Specifies where the media data will be laid out and whether the media data will be interleaved as the main media data.
+
+ @discussion
+	If this value is set to AVAssetWriterInputMediaDataLocationBeforeMainMediaDataNotInterleaved, AVAssetWriter tries to write the media data for this track before all the media data for AVAssetWriterInputs with this property set to AVAssetWriterInputMediaDataLocationInterleavedWithMainMediaData.
+
+	Use of this property is recommended for optimizing tracks that contain a small amount of data that is needed all at once, independent of playback time, such as chapter name tracks and chapter image tracks.
+	Keep it set to AVAssetWriterInputMediaDataLocationInterleavedWithMainMediaData for tracks whose media data that's needed only as its presentation time is approaching and, when multiple inputs are present that supply media data that will be played concurrently, should be interleaved for optimal access.
+
+	For file types that support preloading media data such as QuickTime movie file, if this value is set to AVAssetWriterInputMediaDataLocationBeforeMainMediaDataNotInterleaved, AVAssetWriter will write an indication such as 'load' atom that the whole media data should be preloaded.
+
+	The default value is AVAssetWriterInputMediaDataLocationInterleavedWithMainMediaData, which means that the receiver will not write the indication and that the media data will be interleaved.
+
+	This property cannot be set after -startWriting has been called on the receiver.
+ */
+@property (nonatomic, copy) AVAssetWriterInputMediaDataLocation mediaDataLocation NS_AVAILABLE(10_13, 11_0);
+
 @end
 
 
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioMix.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioMix.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioMix.h	2016-09-12 23:29:46.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioMix.h	2017-05-24 00:37:43.000000000 -0400
@@ -3,11 +3,12 @@
  
 	Framework:  AVFoundation
  
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
  
  */
 
 #import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVAudioProcessingSettings.h>
 #import <Foundation/Foundation.h>
 #import <CoreMedia/CMBase.h>
 #import <CoreMedia/CMTime.h>
@@ -44,7 +45,7 @@
 NS_CLASS_AVAILABLE(10_7, 4_0)
 @interface AVMutableAudioMix : AVAudioMix {
 @private
-    AVMutableAudioMixInternal    *_mutableAudioMix;
+    AVMutableAudioMixInternal    *_mutableAudioMix __attribute__((unused));
 }
 
 /*  
@@ -104,7 +105,7 @@
    Constants for various time pitch algorithms, e.g. AVAudioTimePitchSpectral, are defined in AVAudioProcessingSettings.h.
    Can be nil, in which case the audioTimePitchAlgorithm set on the AVPlayerItem, AVAssetExportSession, or AVAssetReaderAudioMixOutput on which the AVAudioMix is set will be used for the associated track.
 */
-@property (nonatomic, readonly, copy, nullable) NSString *audioTimePitchAlgorithm NS_AVAILABLE(10_10, 7_0);
+@property (nonatomic, readonly, copy, nullable) AVAudioTimePitchAlgorithm audioTimePitchAlgorithm NS_AVAILABLE(10_10, 7_0);
 
 /*!
  @property		audioTapProcessor
@@ -139,7 +140,7 @@
 NS_CLASS_AVAILABLE(10_7, 4_0)
 @interface AVMutableAudioMixInputParameters : AVAudioMixInputParameters {
 @private
-    AVMutableAudioMixInputParametersInternal    *_mutableInputParameters;
+    AVMutableAudioMixInputParametersInternal    *_mutableInputParameters __attribute__((unused));
 }
 
 /*  
@@ -169,7 +170,7 @@
    Constants for various time pitch algorithms, e.g. AVAudioTimePitchSpectral, are defined in AVAudioProcessingSettings.h.
    Can be nil, in which case the audioTimePitchAlgorithm set on the AVPlayerItem, AVAssetExportSession, or AVAssetReaderAudioMixOutput on which the AVAudioMix is set will be used for the associated track.
 */
-@property (nonatomic, copy, nullable) NSString *audioTimePitchAlgorithm NS_AVAILABLE(10_10, 7_0);
+@property (nonatomic, copy, nullable) AVAudioTimePitchAlgorithm audioTimePitchAlgorithm NS_AVAILABLE(10_10, 7_0);
 
 /*!
  @property		audioTapProcessor
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioProcessingSettings.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioProcessingSettings.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioProcessingSettings.h	2016-09-22 17:56:46.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioProcessingSettings.h	2017-05-23 21:01:44.000000000 -0400
@@ -3,13 +3,26 @@
  
     Framework:  AVFoundation
  
-	Copyright 2013 Apple Inc. All rights reserved.
+	Copyright 2013-2017 Apple Inc. All rights reserved.
  
  */
 
 #import <AVFoundation/AVBase.h>
 #import <Foundation/Foundation.h>
 
+
+/*!
+ @typedef AVAudioTimePitchAlgorithm
+ @abstract
+    The type of a time pitch algorithm.
+ @discussion
+	On OS X, the default algorithm for all time pitch operations is AVAudioTimePitchAlgorithmSpectral.  On iOS, the default algorithm for playback is AVAudioTimePitchAlgorithmLowQualityZeroLatency and the default for export & other offline processing is AVAudioTimePitchAlgorithmSpectral.
+
+	For scaled audio edits, i.e. when the timeMapping of an AVAssetTrackSegment is between timeRanges of unequal duration, it is important to choose an algorithm that supports the full range of edit rates present in the source media.  AVAudioTimePitchAlgorithmSpectral is often the best choice due to the highly inclusive range of rates it supports, assuming that it is desirable to maintain a constant pitch regardless of the edit rate.  If it is instead desirable to allow the pitch to vary with the edit rate, AVAudioTimePitchAlgorithmVarispeed is the best choice.
+ 
+*/
+typedef NSString * AVAudioTimePitchAlgorithm NS_STRING_ENUM;
+
 /*!
  @abstract		Values for time pitch algorithm
  
@@ -29,13 +42,8 @@
 				High quality, no pitch correction. Pitch varies with rate.
                 Variable rate from 1/32 to 32.
  
- @discussion
-	On OS X, the default algorithm for all time pitch operations is AVAudioTimePitchAlgorithmSpectral.  On iOS, the default algorithm for playback is AVAudioTimePitchAlgorithmLowQualityZeroLatency and the default for export & other offline processing is AVAudioTimePitchAlgorithmSpectral.
-
-	For scaled audio edits, i.e. when the timeMapping of an AVAssetTrackSegment is between timeRanges of unequal duration, it is important to choose an algorithm that supports the full range of edit rates present in the source media.  AVAudioTimePitchAlgorithmSpectral is often the best choice due to the highly inclusive range of rates it supports, assuming that it is desirable to maintain a constant pitch regardless of the edit rate.  If it is instead desirable to allow the pitch to vary with the edit rate, AVAudioTimePitchAlgorithmVarispeed is the best choice.
- 
 */
-AVF_EXPORT NSString *const AVAudioTimePitchAlgorithmLowQualityZeroLatency NS_AVAILABLE_IOS(7_0);
-AVF_EXPORT NSString *const AVAudioTimePitchAlgorithmTimeDomain NS_AVAILABLE(10_9, 7_0);
-AVF_EXPORT NSString *const AVAudioTimePitchAlgorithmSpectral NS_AVAILABLE(10_9, 7_0);
-AVF_EXPORT NSString *const AVAudioTimePitchAlgorithmVarispeed NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVAudioTimePitchAlgorithm const AVAudioTimePitchAlgorithmLowQualityZeroLatency NS_AVAILABLE_IOS(7_0);
+AVF_EXPORT AVAudioTimePitchAlgorithm const AVAudioTimePitchAlgorithmTimeDomain NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVAudioTimePitchAlgorithm const AVAudioTimePitchAlgorithmSpectral NS_AVAILABLE(10_9, 7_0);
+AVF_EXPORT AVAudioTimePitchAlgorithm const AVAudioTimePitchAlgorithmVarispeed NS_AVAILABLE(10_9, 7_0);
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioSession.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioSession.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioSession.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAudioSession.h	2017-05-23 21:01:42.000000000 -0400
@@ -0,0 +1,9 @@
+/*
+	File:           AVAudioSession.h
+	Framework:      AVFoundation
+	
+	Copyright 2016 Apple Inc. All rights reserved.
+*/
+
+#import <AVFAudio/AVAudioSession.h>
+
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h	2017-02-22 01:08:53.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVBase.h	2017-05-24 00:28:27.000000000 -0400
@@ -3,7 +3,7 @@
 
 	Framework:  AVFoundation
  
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+	Copyright 2010-2017 Apple Inc. All rights reserved.
 
  */
 
@@ -16,30 +16,20 @@
 	#define AVF_EXPORT extern
 #endif
 
-// Annotation for classes that inherit -init from NSObject but cannot be usefully initialized using -init
-#define AV_INIT_UNAVAILABLE - (instancetype)init NS_UNAVAILABLE;
+// Annotation for classes that inherit -init and +new from NSObject but cannot be usefully initialized using -init or +new
+#define AV_INIT_UNAVAILABLE - (instancetype)init NS_UNAVAILABLE; \
+                            + (instancetype)new  NS_UNAVAILABLE;
 
 #ifndef __has_feature
 	#define __has_feature(FEATURE) 0
 #endif
 
-// Generics
-
-// Use when declaring a variable of a generic type
-#if __has_feature(objc_generics)
-	#define AV_GENERIC(BASETYPE, ...) BASETYPE<__VA_ARGS__>
-#else
-	#define AV_GENERIC(BASETYPE, ...) BASETYPE
+#ifndef NS_STRING_ENUM
+	#define NS_STRING_ENUM
 #endif
 
-// Use when declaring a generic class interface
-#define AV_GENERIC_CLASS AV_GENERIC
-
-// Use to refer to generic types in a generic class
-#if __has_feature(objc_generics)
-	#define AV_PARAMETERIZED_TYPE(TYPENAME, TYPEBOUNDS) TYPENAME
-#else
-	#define AV_PARAMETERIZED_TYPE(TYPENAME, TYPEBOUNDS) TYPEBOUNDS
+#ifndef NS_EXTENSIBLE_STRING_ENUM
+	#define NS_EXTENSIBLE_STRING_ENUM
 #endif
 
 // Pre-10.12
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCameraCalibrationData.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCameraCalibrationData.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCameraCalibrationData.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCameraCalibrationData.h	2017-05-24 00:28:27.000000000 -0400
@@ -0,0 +1,171 @@
+/*
+    File:  AVCameraCalibrationData.h
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2016-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVBase.h>
+#import <Foundation/Foundation.h>
+#import <simd/matrix_types.h>
+#import <CoreGraphics/CGGeometry.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@class AVCameraCalibrationDataInternal;
+
+/*!
+ @class AVCameraCalibrationData
+ @abstract
+    AVCameraCalibrationData is a model object describing a camera's calibration information.
+ 
+ @discussion
+    When rendering effects to images produced by cameras, or performing computer vision tasks such as correcting images for geometric distortions, it is necessary to characterize the camera's calibration information, such as its pixel focal length, principal point, lens distortion characteristics, etc. AVCameraCalibrationData provides this information.
+ */
+NS_CLASS_AVAILABLE(10_13, 11_0) __TVOS_AVAILABLE(11_0) __WATCHOS_PROHIBITED
+@interface AVCameraCalibrationData : NSObject
+{
+@private
+    AVCameraCalibrationDataInternal *_internal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @property intrinsicMatrix
+ @abstract
+    A camera's intrinsic (K) matrix describes its geometric properties.
+ 
+ @discussion
+    The intrinsic matrix allows one to transform 3D coordinates to 2D coordinates on an image plane using the pinhole camera model. All values are expressed in pixels. The elements in the matrix are:
+       /           \
+       | fx 0   ox |
+       | 0  fy  oy |
+       | 0  0   1  |
+       \           /
+    where fx and fy describe the focal length. For square pixels, their values are identical.
+    ox and oy are the offset of the principal point. The origin is the upper left of the frame.
+ */
+@property(nonatomic, readonly) matrix_float3x3 intrinsicMatrix;
+
+/*!
+ @property intrinsicMatrixReferenceDimensions
+ @abstract
+    The reference frame dimensions used in calculating a camera's principal point.
+ 
+ @discussion
+    A camera's intrinsic matrix expresses values in pixels with respect to a frame of this width and height.
+ */
+@property(nonatomic, readonly) CGSize intrinsicMatrixReferenceDimensions;
+
+/*!
+ @property extrinsicMatrix
+ @abstract
+    A camera's extrinsic matrix describes its pose (position and direction) in world coordinates.
+ 
+ @discussion
+    The extrinsic matrix consists of a unitless 3x3 rotation matrix (R) on the left and a translation (t) 3x1 column vector on the right. The translation vector's units are millimeters. The camera's pose is expressed with respect to a reference camera (camera-to-world view). If the rotation matrix is an identity matrix, then this camera is the reference camera. Note that a matrix_float4x3 matrix is column major with 3 rows and 4 columns.
+               /                       \
+       /   \   | r1,1  r1,2  r1,3 | t1 |
+       |R|t| = | r2,1  r2,2  r2,3 | t2 |
+       \   /   | r3,1  r3,2  r3,3 | t3 |
+               \                       /
+ */
+@property(nonatomic, readonly) matrix_float4x3 extrinsicMatrix;
+
+/*!
+ @property pixelSize
+ @abstract
+    The size of one pixel in millimeters
+ */
+@property(nonatomic, readonly) float pixelSize;
+
+/*!
+ @property lensDistortionLookupTable
+ @abstract
+    An NSData of floats describing the camera lens' radial distortions.
+ 
+ @discussion
+    Images captured by a camera are geometrically warped by radial distortions in the lens. In order to project from the 2D image plane back into the 3D world, the images must be distortion corrected, or made rectilinear. Lens distortion is modeled using a one-dimensional lookup table of 32-bit float values evenly distributed along a radius from the center of the distortion to the farthest corner, with each value representing an elongation or compression of the radius (1.0 for any given point indicates no elongation). This model assumes radially symmetric lens distortion. When dealing with AVDepthData, the disparity / depth map representations are geometrically distorted to align with images produced by the camera. For more information, see the reference implementation below.
+ */
+@property(nonatomic, readonly) NSData *lensDistortionLookupTable;
+
+/*!
+ @property inverseLensDistortionLookupTable
+ @abstract
+    An NSData of floats describing the inverse lookup table required to reapply the camera lens' radial distortions to a rectified image.
+ 
+ @discussion
+    See lensDistortionLookupTable. If you've rectified an image by removing the distortions characterized by the lensDistortionLookupTable, and now wish to go back to geometrically distorted, you may use the inverseLensDistortionLookupTable. For more information, see the reference implementation below.
+ */
+@property(nonatomic, readonly) NSData *inverseLensDistortionLookupTable;
+
+/*!
+ @property lensDistortionCenter
+ @abstract
+    A CGPoint describing the offset of the lens' distortion center from the top left.
+ 
+ @discussion
+    Due to geometric distortions in the image, the center of the distortion may not be equal to the optical center (principal point) of the lens. When making an image rectilinear, the distortion center should be used rather than the optical center of the image. For more information, see the reference implementation below.
+ */
+@property(nonatomic, readonly) CGPoint lensDistortionCenter;
+
+/*
+    The following reference implementation illustrates how to use the lensDistortionLookupTable, inverseLensDistortionLookupTable, and lensDistortionCenter properties to find points in the lens-distorted or undistorted (rectilinear, corrected) space. If you have a distorted image (such as a photo taken by a camera) and want to find a particular point in a corresponding undistorted image, you would call the sample method below using the inverseLensDistortionLookupTable. If you have an undistorted (aka distortion-corrected) image and want to find a point in the distorted image's space, you would call the sample method below using the lensDistortionLookupTable.
+ 
+    To apply distortion correction to an image, you'd begin with an empty destination buffer and iterate through it row by row, calling the sample implementation below for each point in the output image, passing the lensDistortionLookupTable to find the corresponding value in the distorted image, and write it to your output buffer. Please note that the "point", "opticalCenter", and "imageSize" parameters below must be in the same coordinate system, i.e. both at full resolution, or both scaled to a different resolution but with the same aspect ratio.
+ 
+- (CGPoint)lensDistortionPointForPoint:(CGPoint)point
+                           lookupTable:(NSData *)lookupTable
+               distortionOpticalCenter:(CGPoint)opticalCenter
+                             imageSize:(CGSize)imageSize
+{
+    // The lookup table holds the radial magnification for n linearly spaced radii.
+    // The first position corresponds to radius = 0
+    // The last position corresponds to the largest radius found in the image.
+ 
+    // Determine the maximum radius.
+    float delta_ocx_max = MAX( opticalCenter.x, imageSize.width  - opticalCenter.x );
+    float delta_ocy_max = MAX( opticalCenter.y, imageSize.height - opticalCenter.y );
+    float r_max = sqrtf( delta_ocx_max * delta_ocx_max + delta_ocy_max * delta_ocy_max );
+ 
+    // Determine the vector from the optical center to the given point.
+    float v_point_x = point.x - opticalCenter.x;
+    float v_point_y = point.y - opticalCenter.y;
+ 
+    // Determine the radius of the given point.
+    float r_point = sqrtf( v_point_x * v_point_x + v_point_y * v_point_y );
+ 
+    // Look up the radial magnification to apply in the provided lookup table
+    float magnification;
+    const float *lookupTableValues = lookupTable.bytes;
+    NSUInteger lookupTableCount = lookupTable.length / sizeof(float);
+ 
+    if ( r_point < r_max ) {
+        // Linear interpolation
+        float val   = r_point * ( lookupTableCount - 1 ) / r_max;
+        int   idx   = (int)val;
+        float frac  = val - idx;
+ 
+        float mag_1 = lookupTableValues[idx];
+        float mag_2 = lookupTableValues[idx + 1];
+ 
+        magnification = ( 1.0f - frac ) * mag_1 + frac * mag_2;
+    }
+    else {
+        magnification = lookupTableValues[lookupTableCount - 1];
+    }
+ 
+    // Apply radial magnification
+    float new_v_point_x = magnification * v_point_x;
+    float new_v_point_y = magnification * v_point_y;
+ 
+    // Construct output
+    return CGPointMake( opticalCenter.x + new_v_point_x, opticalCenter.y + new_v_point_y );
+}
+ */
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioDataOutput.h	2017-05-24 00:28:07.000000000 -0400
@@ -0,0 +1,147 @@
+/*
+    File:  AVCaptureAudioDataOutput.h
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2010-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVCaptureOutputBase.h>
+#import <AVFoundation/AVMediaFormat.h>
+#import <CoreMedia/CMSampleBuffer.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureAudioDataOutput
+
+@class AVCaptureAudioDataOutputInternal;
+@protocol AVCaptureAudioDataOutputSampleBufferDelegate;
+
+/*!
+ @class AVCaptureAudioDataOutput
+ @abstract
+    AVCaptureAudioDataOutput is a concrete subclass of AVCaptureOutput that can be used to process uncompressed or compressed samples from the audio being captured.
+ 
+ @discussion
+    Instances of AVCaptureAudioDataOutput produce audio sample buffers suitable for processing using other media APIs. Applications can access the sample buffers with the captureOutput:didOutputSampleBuffer:fromConnection: delegate method.
+ */
+NS_CLASS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
+@interface AVCaptureAudioDataOutput : AVCaptureOutput 
+{
+@private
+    AVCaptureAudioDataOutputInternal *_internal;
+}
+
+- (instancetype)init;
+
++ (instancetype)new;
+
+/*!
+ @method setSampleBufferDelegate:queue:
+ @abstract
+    Sets the receiver's delegate that will accept captured buffers and dispatch queue on which the delegate will be called.
+ 
+ @param sampleBufferDelegate
+    An object conforming to the AVCaptureAudioDataOutputSampleBufferDelegate protocol that will receive sample buffers after they are captured.
+ @param sampleBufferCallbackQueue
+    A dispatch queue on which all sample buffer delegate methods will be called.
+ 
+ @discussion
+    When a new audio sample buffer is captured it will be vended to the sample buffer delegate using the captureOutput:didOutputSampleBuffer:fromConnection: delegate method. All delegate methods will be called on the specified dispatch queue. If the queue is blocked when new samples are captured, those samples will be automatically dropped when they become sufficiently late. This allows clients to process existing samples on the same queue without having to manage the potential memory usage increases that would otherwise occur when that processing is unable to keep up with the rate of incoming samples.
+ 
+    Clients that need to minimize the chances of samples being dropped should specify a queue on which a sufficiently small amount of processing is being done outside of receiving sample buffers. However, if such clients migrate extra processing to another queue, they are responsible for ensuring that memory usage does not grow without bound from samples that have not been processed.
+ 
+    A serial dispatch queue must be used to guarantee that audio samples will be delivered in order. The sampleBufferCallbackQueue parameter may not be NULL, except when setting sampleBufferDelegate to nil.
+ */
+- (void)setSampleBufferDelegate:(nullable id<AVCaptureAudioDataOutputSampleBufferDelegate>)sampleBufferDelegate queue:(nullable dispatch_queue_t)sampleBufferCallbackQueue;
+
+/*!
+ @property sampleBufferDelegate
+ @abstract
+    The receiver's delegate.
+ 
+ @discussion
+    The value of this property is an object conforming to the AVCaptureAudioDataOutputSampleBufferDelegate protocol that will receive sample buffers after they are captured. The delegate is set using the setSampleBufferDelegate:queue: method.
+ */
+@property(nonatomic, readonly, nullable) id<AVCaptureAudioDataOutputSampleBufferDelegate> sampleBufferDelegate;
+
+/*!
+ @property sampleBufferCallbackQueue
+ @abstract
+    The dispatch queue on which all sample buffer delegate methods will be called.
+ 
+ @discussion
+    The value of this property is a dispatch_queue_t. The queue is set using the setSampleBufferDelegate:queue: method.
+ */
+@property(nonatomic, readonly, nullable) dispatch_queue_t sampleBufferCallbackQueue;
+
+#if TARGET_OS_OSX
+
+/*!
+ @property audioSettings
+ @abstract
+    Specifies the settings used to decode or re-encode audio before it is output by the receiver.
+ 
+ @discussion
+    The value of this property is an NSDictionary containing values for audio settings keys defined in AVAudioSettings.h. When audioSettings is set to nil, the AVCaptureAudioDataOutput vends samples in their device native format.
+ */
+@property(nonatomic, copy, null_resettable) NSDictionary<NSString *, id> *audioSettings NS_AVAILABLE_MAC(10_7);
+
+#endif // TARGET_OS_OSX
+
+/*!
+ @method recommendedAudioSettingsForAssetWriterWithOutputFileType:
+ @abstract
+    Specifies the recommended settings for use with an AVAssetWriterInput.
+ 
+ @param outputFileType
+    Specifies the UTI of the file type to be written (see AVMediaFormat.h for a list of file format UTIs).
+ @result
+    A fully populated dictionary of keys and values that are compatible with AVAssetWriter.
+ 
+ @discussion
+    The value of this property is an NSDictionary containing values for compression settings keys defined in AVAudioSettings.h. This dictionary is suitable for use as the "outputSettings" parameter when creating an AVAssetWriterInput, such as,
+ 
+       [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:outputSettings sourceFormatHint:hint];
+ 
+    The dictionary returned contains all necessary keys and values needed by AVAssetWriter (see AVAssetWriterInput.h, -initWithMediaType:outputSettings: for a more in depth discussion). For QuickTime movie and ISO files, the recommended audio settings will always produce output comparable to that of AVCaptureMovieFileOutput.
+ 
+    Note that the dictionary of settings is dependent on the current configuration of the receiver's AVCaptureSession and its inputs. The settings dictionary may change if the session's configuration changes. As such, you should configure your session first, then query the recommended audio settings.
+ */
+- (nullable NSDictionary *)recommendedAudioSettingsForAssetWriterWithOutputFileType:(AVFileType)outputFileType NS_AVAILABLE_IOS(7_0);
+
+@end
+
+
+/*!
+ @protocol AVCaptureAudioDataOutputSampleBufferDelegate
+ @abstract
+    Defines an interface for delegates of AVCaptureAudioDataOutput to receive captured audio sample buffers.
+ */
+NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
+@protocol AVCaptureAudioDataOutputSampleBufferDelegate <NSObject>
+
+@optional
+
+/*!
+ @method captureOutput:didOutputSampleBuffer:fromConnection:
+ @abstract
+    Called whenever an AVCaptureAudioDataOutput instance outputs a new audio sample buffer.
+ 
+ @param output
+    The AVCaptureAudioDataOutput instance that output the samples.
+ @param sampleBuffer
+    A CMSampleBuffer object containing the audio samples and additional information about them, such as their format and presentation time.
+ @param connection
+    The AVCaptureConnection from which the audio was received.
+ 
+ @discussion
+    Delegates receive this message whenever the output captures and outputs new audio samples, decoding or re-encoding as specified by the audioSettings property. Delegates can use the provided sample buffer in conjunction with other APIs for further processing. This method will be called on the dispatch queue specified by the output's sampleBufferCallbackQueue property. This method is called periodically, so it must be efficient to prevent capture performance problems, including dropped audio samples.
+ 
+    Clients that need to reference the CMSampleBuffer object outside of the scope of this method must CFRetain it and then CFRelease it when they are finished with it.
+ */
+- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioPreviewOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioPreviewOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioPreviewOutput.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureAudioPreviewOutput.h	2017-05-23 21:01:45.000000000 -0400
@@ -0,0 +1,58 @@
+/*
+    File:  AVCaptureAudioPreviewOutput.h
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2010-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVCaptureOutputBase.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureAudioPreviewOutput
+
+@class AVCaptureAudioPreviewOutputInternal;
+
+/*!
+ @class AVCaptureAudioPreviewOutput
+ @abstract
+    AVCaptureAudioPreviewOutput is a concrete subclass of AVCaptureOutput that can be used to preview the audio being captured.
+ 
+ @discussion
+    Instances of AVCaptureAudioPreviewOutput have an associated Core Audio output device that can be used to play audio being captured by the capture session. The unique ID of a Core Audio device can be obtained from its kAudioDevicePropertyDeviceUID property.
+ */
+NS_CLASS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED
+@interface AVCaptureAudioPreviewOutput : AVCaptureOutput 
+{
+@private
+    AVCaptureAudioPreviewOutputInternal *_internal;
+}
+
+- (instancetype)init;
+
++ (instancetype)new;
+
+/*!
+ @property outputDeviceUniqueID
+ @abstract
+    Specifies the unique ID of the Core Audio output device being used to play preview audio.
+ 
+ @discussion
+    The value of this property is an NSString containing the unique ID of the Core Audio device to be used for output, or nil if the default system output should be used.
+ */
+@property(nonatomic, copy, nullable) NSString *outputDeviceUniqueID;
+
+/*!
+ @property volume
+ @abstract
+    Specifies the preview volume of the output.
+ 
+ @discussion
+    The value of this property is the preview volume of the receiver, where 1.0 is the maximum volume and 0.0 is muted. 
+ */
+@property(nonatomic) float volume;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDataOutputSynchronizer.h	2017-05-24 00:28:07.000000000 -0400
@@ -0,0 +1,354 @@
+/*
+    File:  AVCaptureDataOutputSynchronizer.h
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2016-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVCaptureOutput.h>
+#import <CoreMedia/CMSampleBuffer.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureDataOutputSynchronizer
+
+@class AVCaptureDataOutputSynchronizerInternal;
+@class AVCaptureSynchronizedDataCollection;
+@protocol AVCaptureDataOutputSynchronizerDelegate;
+
+/*!
+ @class AVCaptureDataOutputSynchronizer
+ @abstract
+    AVCaptureDataOutputSynchronizer synchronizes the delivery of data from multiple capture data outputs (AVCaptureVideoDataOutput, AVCaptureDepthDataOutput, AVCaptureMetadataOutput, AVCaptureAudioDataOutput) to a single delegate callback.
+
+ @discussion
+    AVCaptureDataOutputSynchronizer is initialized with an array of data outputs (AVCaptureVideoDataOutput, AVCaptureDepthDataOutput, AVCaptureMetadataOutput, or AVCaptureAudioDataOutput) from which you'd like to receive a single, synchronized delegate callback. The first output in the array acts as the master data output and determines when the synchronized callback is delivered. When data is received for the master data output, it is held until all other data outputs have received data with an equal or later presentation time stamp, or it has been determined that there is no data for a particular output at the master data output's pts. Once all other outputs are ready, a single delegate callback is sent with all the data aligned with the master data output's data. Separate delegate callbacks are sent for any other data received with presentation time stamps earlier than the next master data output time.
+
+    For instance, if you specify a video data output as your first (master) output and a metadata output for detected faces as your second output, your data callback will not be called until there is face data ready for a video frame, or it is assured that there is no face metadata for that particular video frame.
+ 
+    Note that the AVCaptureDataOutputSynchronizer overrides each data output's -setSampleBufferDelegate:queue:, -setDepthDataDelegate:queue:, or -setMetadataObjectsDelegate:queue: method call. -[AVCaptureVideoDataOutput alwaysDiscardsLateVideoFrames] and -[AVCaptureDepthData alwaysDiscardsLateDepthData] properties are honored.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureDataOutputSynchronizer : NSObject
+{
+@private
+    AVCaptureDataOutputSynchronizerInternal *_internal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @method initWithDataOutputs:
+ @abstract
+    Instantiates an AVCaptureDataOutputSynchronizer from one or more capture data outputs.
+ 
+ @param dataOutputs
+    An array of capture data outputs where the first is the master.
+ @result
+    A newly initialized AVCaptureDataOutputSynchronizer instance.
+ */
+- (instancetype)initWithDataOutputs:(NSArray<AVCaptureOutput *> *)dataOutputs;
+
+/*!
+ @property dataOutputs
+ @abstract
+    The data outputs provided in the initializer method.
+ */
+@property(readonly, retain) NSArray<AVCaptureOutput *> *dataOutputs;
+
+/*!
+ @method setDelegate:queue:
+ @abstract
+    Sets the receiver's delegate that will accept synchronized data and the dispatch queue on which the delegate will be called.
+ 
+ @param delegate
+    An object conforming to the AVCaptureDataOutputSynchronizerDelegate protocol that will receive synchronized data from the provided data outputs.
+ @param delegateCallbackQueue
+    A dispatch queue on which all AVCaptureDataOutputSynchronizerDelegate methods will be called.
+ 
+ @discussion
+    AVCaptureDataOutputSynchronizer gathers data from its dataOutputs, and when it determines that all data has been received for a given timestamp, it calls the specified delegate on the specified delegateCallbackQueue. AVCaptureDataOutputSynchronizer overrides all the data outputs' delegates and callbacks. Data outputs under the control of AVCaptureDataOutputSynchronizer do not fire delegate callbacks. Delegate callbacks are restored to individual data outputs when you call this method with nil as your delegate and NULL as your delegateCallbackQueue.
+ 
+    A serial dispatch queue must be used to guarantee that synchronized data will be delivered in order. The delegateCallbackQueue parameter may not be NULL, except when setting the delegate to nil.
+ */
+- (void)setDelegate:(nullable id<AVCaptureDataOutputSynchronizerDelegate>)delegate queue:(nullable dispatch_queue_t)delegateCallbackQueue;
+
+/*!
+ @property delegate
+ @abstract
+    The receiver's delegate.
+ 
+ @discussion
+    The value of this property is an object conforming to the AVCaptureDataOutputSynchronizerDelegate protocol that will receive synchronized data output. The delegate is set using the -setDelegate:queue: method. This property is key-value observable.
+ */
+@property(nullable, nonatomic, readonly) id<AVCaptureDataOutputSynchronizerDelegate> delegate;
+
+/*!
+ @property delegateCallbackQueue
+ @abstract
+    The dispatch queue on which all AVCaptureDataOutputSynchronizerDelegate methods will be called.
+ 
+ @discussion
+    The value of this property is a dispatch_queue_t. The queue is set using the -setDelegate:queue: method.
+ */
+@property(nullable, nonatomic, readonly) dispatch_queue_t delegateCallbackQueue;
+
+@end
+
+
+NS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@protocol AVCaptureDataOutputSynchronizerDelegate <NSObject>
+
+@required
+/*!
+ @method captureOutputSynchronizer:didOutputSynchronizedData:
+ @abstract
+    Called when an AVCaptureDataOutputSynchronizer instance outputs synchronized data from one or more data outputs.
+ 
+ @param captureOutputSynchronizer
+    The AVCaptureDataOutputSynchronizer instance delivering synchronized data.
+ @param synchronizedDataCollection
+    A collection of synchronized data objects indexed by data output.
+ 
+ @discussion
+    The synchronized data collection only contains synchronized data for capture outputs with synchronized data ready.
+ */
+- (void)dataOutputSynchronizer:(AVCaptureDataOutputSynchronizer *)synchronizer didOutputSynchronizedDataCollection:(AVCaptureSynchronizedDataCollection *)synchronizedDataCollection;
+
+@end
+
+
+#pragma mark - AVCaptureSynchronizedDataCollection
+
+@class AVCaptureSynchronizedData;
+@class AVCaptureSynchronizedDataCollectionInternal;
+
+/*!
+ @class AVCaptureSynchronizedDataCollection
+ @abstract
+    A collection of AVCaptureSynchronizedData objects.
+ 
+ @discussion
+    AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedDataCollection: delegate method delivers a collection of AVCaptureSynchronizedData objects which can be iterated by use AVCaptureOutput. AVCaptureSynchronizedDataCollection supports object subscripting and fast enumeration of the data outputs as keys.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureSynchronizedDataCollection : NSObject <NSFastEnumeration>
+{
+@private
+    AVCaptureSynchronizedDataCollectionInternal *_internal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @method synchronizedDataForCaptureOutput:
+ @abstract
+    Provides the synchronized data object for a given capture output.
+ 
+ @param captureOutput
+    The data output whose synchronized data you'd like to inspect.
+ @result
+    The synchronized data object associated with the provided output, or nil, if there is none.
+ */
+- (nullable AVCaptureSynchronizedData *)synchronizedDataForCaptureOutput:(AVCaptureOutput *)captureOutput;
+
+/*!
+ @method objectForKeyedSubscript:
+ @abstract
+    Method that provides support for object subscripting.
+ 
+ @param key
+    The data output whose synchronized data you'd like to inspect.
+ @result
+    The synchronized data object associated with the provided output, or nil, if there is none.
+ 
+ @discussion
+    AVCaptureSynchronizedDataCollection supports object subscripting. If you'd like to find the synchronized data for a given data output, simply:
+        AVCaptureSynchronizedData *synchronizedData = synchronizedDataCollection[dataOutput];
+ */
+- (nullable AVCaptureSynchronizedData *)objectForKeyedSubscript:(AVCaptureOutput *)key;
+
+/*!
+ @property count
+ @abstract
+    The number of items in the collection.
+ 
+ @discussion
+    Returns the number of data output / synchronized data pairs present in the collection.
+ */
+@property(readonly) NSUInteger count;
+
+@end
+
+
+#pragma mark - AVCaptureSynchronizedData
+
+@class AVCaptureSynchronizedDataInternal;
+
+/*!
+ @class AVCaptureSynchronizedData
+ @abstract
+    An abstract base class representing the data delivered by a data output through the AVCaptureDataOutputSynchronizer interface.
+ 
+ @discussion
+    AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedData: delegate callback delivers a dictionary of key/value pairs, with the keys being the AVCaptureOutput instances returning data, and the values being concrete subclasses of AVCaptureSynchronizedData.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureSynchronizedData : NSObject
+{
+@private
+    AVCaptureSynchronizedDataInternal *_synchronizedDataInternal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @property timestamp
+ @abstract
+    The time at which this synchronized data was captured.
+ 
+ @discussion
+    Synchronized data is always clocked to the masterClock of the AVCaptureSession to which the data output is connected.
+ */
+@property(readonly) CMTime timestamp;
+
+@end
+
+
+#pragma mark - AVCaptureSynchronizedSampleBufferData
+
+@class AVCaptureSynchronizedSampleBufferDataInternal;
+
+/*!
+ @class AVCaptureSynchronizedSampleBufferData
+ @abstract
+    An concrete subclass of AVCaptureSynchronizedData representing the data delivered by an AVCaptureVideoDataOutput or AVCaptureAudioDataOutput.
+
+ @discussion
+    Synchronized sample buffer data is valid for the duration of AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedData: delegate callback. To extend the sample buffer data beyond the callback, you must CFRetain it, and later call CFRelease when you're done with it.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureSynchronizedSampleBufferData : AVCaptureSynchronizedData
+{
+@private
+    AVCaptureSynchronizedSampleBufferDataInternal *_internal;
+}
+
+/*!
+ @property sampleBuffer
+ @abstract
+    A sample buffer containing video or audio data.
+ 
+ @discussion
+    If sampleBufferWasDropped is YES, the returned sampleBuffer was dropped before it could be delivered to you, and thus this sample buffer is a shell containing metadata and format information, but no actual pixel data. This property is never NULL. If a data output has no data to return, it is simply not present in the dictionary of synchronized data returned by AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedData: delegate callback.
+ */
+@property(readonly) CMSampleBufferRef sampleBuffer;
+
+/*!
+ @property sampleBufferWasDropped
+ @abstract
+    YES if the sample buffer was dropped.
+ 
+ @discussion
+    AVCaptureVideoDataOutput has a delegate callback for dropped sample buffers. AVCaptureAudioDataOutput does not. Therefore, sampleBufferWasDropped may be YES for video, but never for audio.
+ */
+@property(readonly) BOOL sampleBufferWasDropped;
+
+/*!
+ @property droppedReason
+ @abstract
+    If sampleBufferWasDropped is YES, the reason for the drop, otherwise AVCaptureOutputDataDroppedReasonNone.
+ 
+ @discussion
+    AVCaptureOutputDataDroppedReasons are defined in AVCaptureOutputBase.h.
+ */
+@property(readonly) AVCaptureOutputDataDroppedReason droppedReason;
+
+@end
+
+
+#pragma mark - AVCaptureSynchronizedMetadataObjectData
+
+@class AVCaptureSynchronizedMetadataObjectDataInternal;
+
+/*!
+ @class AVCaptureSynchronizedMetadataObjectData
+ @abstract
+    An concrete subclass of AVCaptureSynchronizedData representing the data delivered by an AVCaptureMetadataOutput.
+ 
+ @discussion
+    A single AVCaptureMetadataOutput may be configured to deliver multiple kinds of metadata objects (such as QRCodes and detected faces). AVCaptureSynchronizedMetadataObjectData's -metadataObjects array may contain multiple AVMetadataObject subclasses, depending on how the AVCaptureMetadataOutput was configured. All synchronized metadata objects share a common timestamp.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureSynchronizedMetadataObjectData : AVCaptureSynchronizedData
+{
+@private
+    AVCaptureSynchronizedMetadataObjectDataInternal *_internal;
+}
+
+/*!
+ @property metadataObjects
+ @abstract
+    An array of AVMetadataObject subclasses.
+ 
+ @discussion
+    -metadataObjects is never nil. If no metadata objects are present for a given time, an empty array is returned.
+ */
+@property(readonly) NSArray<AVMetadataObject *> *metadataObjects;
+
+@end
+
+
+#pragma mark - AVCaptureSynchronizedDepthData
+
+@class AVCaptureSynchronizedDepthDataInternal;
+
+/*!
+ @class AVCaptureSynchronizedDepthData
+ @abstract
+    An concrete subclass of AVCaptureSynchronizedData representing the data delivered by an AVCaptureDepthDataOutput.
+ 
+ @discussion
+    Depth data, like video, may be dropped if not serviced in a timely fashion.
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureSynchronizedDepthData : AVCaptureSynchronizedData
+{
+@private
+    AVCaptureSynchronizedDepthDataInternal *_internal;
+}
+
+/*!
+ @property depthData
+ @abstract
+    An instance of AVDepthData.
+ 
+ @discussion
+    If depthDataWasDropped is YES, the returned depthData was dropped before it could be delivered to you, and thus this AVDepthDAta is a shell containing format information and calibration data, but no actual pixel map data. This property is never nil. If a data output has no data to return, it is simply not present in the dictionary of synchronized data returned by AVCaptureDataOutputSynchronizer's -dataOutputSynchronizer:didOutputSynchronizedData: delegate callback.
+ */
+@property(readonly) AVDepthData *depthData;
+
+/*!
+ @property depthDataWasDropped
+ @abstract
+    YES if the depth data was dropped.
+ 
+ @discussion
+    If YES, inspect -droppedReason for the reason.
+ */
+@property(readonly) BOOL depthDataWasDropped;
+
+/*!
+ @property droppedReason
+ @abstract
+    If depthDataWasDropped is YES, the reason for the drop, otherwise AVCaptureOutputDataDroppedReasonNone.
+ 
+ @discussion
+    AVCaptureOutputDataDroppedReasons are defined in AVCaptureOutputBase.h.
+ */
+@property(readonly) AVCaptureOutputDataDroppedReason droppedReason;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDepthDataOutput.h	2017-05-24 00:28:07.000000000 -0400
@@ -0,0 +1,160 @@
+/*
+    File:  AVCaptureDepthDataOutput.h
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2016-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVMediaFormat.h>
+#import <AVFoundation/AVCaptureOutputBase.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureDepthDataOutput
+
+@class AVDepthData;
+
+@class AVCaptureDepthDataOutputInternal;
+@protocol AVCaptureDepthDataOutputDelegate;
+
+/*!
+ @class AVCaptureDepthDataOutput
+ @abstract
+    AVCaptureDepthDataOutput is a concrete subclass of AVCaptureOutput that can be used to process depth data in a streaming fashion.
+ 
+ @discussion
+    Instances of AVCaptureDepthDataOutput capture AVDepthData objects expressing disparity/depth. Applications can access the frames with the captureOutput:didOutputDepthData:fromConnection: delegate method.
+ 
+    AVCaptureDepthDataOutput always provides depth data in the format expressed by its source's -[AVCaptureDevice activeDepthDataFormat] property. If you wish to receive depth data in another format, you may choose from the -[AVCaptureDevice activeFormat]'s -[AVCaptureDeviceFormat supportedDepthDataFormats], and set it using -[AVCaptureDevice setActiveDepthDataFormat:].
+ */
+NS_CLASS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureDepthDataOutput : AVCaptureOutput
+{
+@private
+    AVCaptureDepthDataOutputInternal *_internal;
+}
+
+- (instancetype)init;
+
++ (instancetype)new;
+
+/*!
+ @method setDelegate:queue:
+ @abstract
+    Sets the receiver's delegate that receives captured depth data and the dispatch queue on which the delegate is called.
+ 
+ @param delegate
+    An object conforming to the AVCaptureDepthDataOutputDelegate protocol that receives depth data in a streaming fashion.
+ @param callbackQueue
+    A dispatch queue on which all delegate methods are called.
+ 
+ @discussion
+    The depth data output vends captured depth data to its delegate using the methods specified in the AVCaptureDepthOutputDelegate protocol. All delegate methods are called on the specified dispatch queue. If the callback queue is blocked when new depth data is captured, that depth data is automatically dropped at a time determined by the value of the alwaysDiscardsLateDepthData property. This allows clients to process existing depth data on the same queue without having to manage the potential memory usage increases that would otherwise occur when that processing is unable to keep up with the rate of incoming depth data.
+ 
+    Clients who need to minimize the chances of depth data being dropped should provide a dedicated queue and not share it with other data outputs. Processing of depth data may be deferred to another queue, but beware that the depth data pixel buffer maps may come from a finite buffer pool, which may be starved if your deferred processing fails to keep up.
+ 
+    A serial dispatch queue must be used to guarantee that depth data will be delivered in order. The callbackQueue parameter may not be NULL, except when setting the delegate to nil.
+ */
+- (void)setDelegate:(id<AVCaptureDepthDataOutputDelegate>)delegate callbackQueue:(dispatch_queue_t)callbackQueue;
+
+/*!
+ @property delegate
+ @abstract
+    The receiver's delegate.
+ 
+ @discussion
+    The value of this property is an object conforming to the AVCaptureDepthDataOutputDelegate protocol that receives depth data as it is captured. The delegate is set using the setDelegate:queue: method.
+ */
+@property(nonatomic, readonly) id<AVCaptureDepthDataOutputDelegate> delegate;
+
+/*!
+ @property delegateCallbackQueue
+ @abstract
+    The dispatch queue on which all delegate methods are called.
+ 
+ @discussion
+    The value of this property is a dispatch_queue_t. The queue is set using the setDelegate:queue: method.
+ */
+@property(nonatomic, readonly) dispatch_queue_t delegateCallbackQueue;
+
+/*!
+ @property alwaysDiscardsLateDepthData
+ @abstract
+    Specifies whether the receiver should always discard any depth data that is not processed before the next depth data is captured.
+ 
+ @discussion
+    When the value of this property is YES, the receiver will immediately discard depth data that are captured while the depthDataCallbackQueue is blocked. When the value of this property is NO, delegates will be allowed more time to process old depth data before new depth data are discarded, but application memory usage may increase as a result. The default value is YES.
+ */
+@property(nonatomic) BOOL alwaysDiscardsLateDepthData;
+
+/*!
+ @property filteringEnabled
+ @abstract
+    Specifies whether the depth data output should filter depth data to smooth out noise and fill invalid values.
+
+ @discussion
+    When the value of this property is YES, the receiver temporally filters the stream of AVDepthData objects to reduce noise, as well as fill invalid values. Invalid values (NaN) may be present in AVDepthData pixel buffer maps due to factors such as low light or lens occlusion. When filtering is enabled, the depth data output interpolates missing depth data values. Filtering should be disabled if you desire the raw depth data values. The default value is YES.
+ */
+@property(nonatomic, getter=isFilteringEnabled) BOOL filteringEnabled;
+
+@end
+
+
+/*!
+ @protocol AVCaptureDepthDataOutputDelegate
+ @abstract
+    Defines an interface for delegates of AVCaptureDepthDataOutput to receive captured depth data and be notified of late depth data that were dropped.
+ */
+NS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@protocol AVCaptureDepthDataOutputDelegate <NSObject>
+
+@optional
+
+/*!
+ @method depthDataOutput:didOutputDepthData:timestamp:connection:
+ @abstract
+    Called whenever an AVCaptureDepthDataOutput instance outputs a new depth data object.
+ 
+ @param output
+    The AVCaptureDepthDataOutput instance vending the depth data.
+ @param depthData
+    An AVDepthData object containing the depth/disparity data.
+ @param timestamp
+    A CMTime indicating when the depth data was captured.
+ @param connection
+    The AVCaptureConnection through which the depth data is received.
+ 
+ @discussion
+    The delegate receives this message whenever the depth data output captures and outputs a new depth data object. This method is called on the dispatch queue specified by the output's delegateCallbackQueue property. This method is called frequently. Care must be taken to process the depth data quickly in order to prevent dropped depth data.
+ 
+    Clients that need to reference the AVDepthData object outside of the scope of this method must retain it and then release it when they are finished with it (in a MRR app).
+ 
+    Note that to maintain optimal performance, AVDepthData pixel buffer maps may be backed by a finite memory pool. If AVDepthData objects are held onto for too long, capture inputs will no longer be able to copy new depth data into memory, resulting in droppage. If your application is causing depth data drops by holding on to provided depth data objects for too long, consider copying the pixel buffer map data into a new pixel buffer so that the AVDepthData backing memory can be reused more quickly.
+ */
+- (void)depthDataOutput:(AVCaptureDepthDataOutput *)output didOutputDepthData:(AVDepthData *)depthData timestamp:(CMTime)timestamp connection:(AVCaptureConnection *)connection;
+
+/*!
+ @method depthDataOutput:didDropDepthData:timestamp:connection:reason:
+ @abstract
+    Called once for each depth data that is discarded.
+ 
+ @param output
+    The AVCaptureDepthDataOutput instance that dropped the depth data.
+ @param depthData
+    A depth data object containing information about the dropped depth, such as its native depth type. This depth data object produces nil CVPixelBuffers for depth / disparity as it has no backing depth map.
+ @param timestamp
+    A CMTime indicating when the depth data was captured.
+ @param connection
+    The AVCaptureConnection from which the dropped depth data object was received.
+ @param reason
+    The reason the depth data object was dropped.
+ 
+ @discussion
+    Delegates receive this message whenever a depth data object is dropped. This method is called once for each dropped depth data. The object passed to this delegate method will contain a shell of an AVDepthData that contains no actual depth data backing pixel buffer, as well as a presentation time stamp and a reason for the drop. This method will be called on the dispatch queue specified by the output's delegateCallbackQueue property. Because this method is called on the same dispatch queue that outputs depth data, it must be efficient to prevent further capture performance problems, such as additional drops.
+  */
+- (void)depthDataOutput:(AVCaptureDepthDataOutput *)output didDropDepthData:(AVDepthData *)depthData timestamp:(CMTime)timestamp connection:(AVCaptureConnection *)connection reason:(AVCaptureOutputDataDroppedReason)reason;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h	2016-05-03 18:21:22.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h	2017-05-24 00:28:27.000000000 -0400
@@ -1,79 +1,77 @@
 /*
-	File:  AVCaptureDevice.h
+    File:  AVCaptureDevice.h
  
-	Framework:  AVFoundation
+    Framework:  AVFoundation
  
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 */
 
 #import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVCaptureSessionPreset.h>
+#import <AVFoundation/AVMediaFormat.h>
 #import <Foundation/Foundation.h>
 #import <CoreMedia/CMFormatDescription.h>
 #import <CoreGraphics/CGBase.h>
 #import <CoreGraphics/CGGeometry.h>
 
+NS_ASSUME_NONNULL_BEGIN
+
 /*!
  @constant AVCaptureDeviceWasConnectedNotification
  @abstract
     Posted when a device becomes available on the system.
-
+ 
  @discussion
     The notification object is an AVCaptureDevice instance representing the device that became available.
-*/
-AVF_EXPORT NSString *const AVCaptureDeviceWasConnectedNotification NS_AVAILABLE(10_7, 4_0);
+ */
+AVF_EXPORT NSString *const AVCaptureDeviceWasConnectedNotification NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
 
 /*!
  @constant AVCaptureDeviceWasDisconnectedNotification
  @abstract
     Posted when a device becomes unavailable on the system.
-
+ 
  @discussion
     The notification object is an AVCaptureDevice instance representing the device that became unavailable.
-*/
-AVF_EXPORT NSString *const AVCaptureDeviceWasDisconnectedNotification NS_AVAILABLE(10_7, 4_0);
+ */
+AVF_EXPORT NSString *const AVCaptureDeviceWasDisconnectedNotification NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
 
 /*!
- @constant  AVCaptureDeviceSubjectAreaDidChangeNotification
+ @constant AVCaptureDeviceSubjectAreaDidChangeNotification
  @abstract
-	Posted when the instance of AVCaptureDevice has detected a substantial change
-	to the video subject area.
+    Posted when the instance of AVCaptureDevice has detected a substantial change to the video subject area.
  
  @discussion
-	Clients may observe the AVCaptureDeviceSubjectAreaDidChangeNotification to know
-	when an instance of AVCaptureDevice has detected a substantial change
-	to the video subject area.  This notification is only sent if you first set
-	subjectAreaChangeMonitoringEnabled to YES.
- */
-AVF_EXPORT NSString *const AVCaptureDeviceSubjectAreaDidChangeNotification NS_AVAILABLE_IOS(5_0);
+    Clients may observe the AVCaptureDeviceSubjectAreaDidChangeNotification to know when an instance of AVCaptureDevice has detected a substantial change to the video subject area. This notification is only sent if you first set subjectAreaChangeMonitoringEnabled to YES.
+  */
+AVF_EXPORT NSString *const AVCaptureDeviceSubjectAreaDidChangeNotification NS_AVAILABLE_IOS(5_0) __TVOS_PROHIBITED;
+
+
+#pragma mark - AVCaptureDevice
 
 @class AVCaptureDeviceFormat;
-#if TARGET_OS_MAC && ! (TARGET_OS_EMBEDDED || TARGET_OS_IPHONE || TARGET_OS_WIN32)
 @class AVCaptureDeviceInputSource;
-#endif
 @class AVCaptureDeviceInternal;
 
 /*!
  @class AVCaptureDevice
  @abstract
     An AVCaptureDevice represents a physical device that provides realtime input media data, such as video and audio.
-
+ 
  @discussion
-    Each instance of AVCaptureDevice corresponds to a device, such as a camera or microphone. Instances of
-    AVCaptureDevice cannot be created directly. An array of all currently available devices can also be obtained using
-    the devices class method. Devices can provide one or more streams of a given media type. Applications can search
-    for devices that provide media of a specific type using the devicesWithMediaType: and defaultDeviceWithMediaType:
-    class methods.
-
-    Instances of AVCaptureDevice can be used to provide media data to an AVCaptureSession by creating an
-    AVCaptureDeviceInput with the device and adding that to the capture session.
-*/
-NS_CLASS_AVAILABLE(10_7, 4_0)
+    Each instance of AVCaptureDevice corresponds to a device, such as a camera or microphone. Instances of AVCaptureDevice cannot be created directly. An array of all currently available devices can also be obtained using the AVCaptureDeviceDiscoverySession. Devices can provide one or more streams of a given media type. Applications can search for devices matching desired criteria by using AVCaptureDeviceDiscoverySession, or may obtain a reference to the default device matching desired criteria by using +[AVCaptureDevice defaultDeviceWithDeviceType:mediaType:position:].
+ 
+    Instances of AVCaptureDevice can be used to provide media data to an AVCaptureSession by creating an AVCaptureDeviceInput with the device and adding that to the capture session.
+ */
+NS_CLASS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
 @interface AVCaptureDevice : NSObject
 {
 @private
     AVCaptureDeviceInternal *_internal;
 }
 
+AV_INIT_UNAVAILABLE
+
 /*!
  @method devices
  @abstract
@@ -81,90 +79,75 @@
  
  @result
     An NSArray of AVCaptureDevice instances for each available device.
-
+ 
  @discussion
-    This method returns an array of AVCaptureDevice instances for input devices currently connected and available for
-    capture. The returned array contains all devices that are available at the time the method is called. Applications
-    should observe AVCaptureDeviceWasConnectedNotification and AVCaptureDeviceWasDisconnectedNotification to be notified
-    when the list of available devices has changed.
-*/
-+ (NSArray *)devices;
+    This method returns an array of AVCaptureDevice instances for input devices currently connected and available for capture. The returned array contains all devices that are available at the time the method is called. Applications should observe AVCaptureDeviceWasConnectedNotification and AVCaptureDeviceWasDisconnectedNotification to be notified when the list of available devices has changed.
+ */
++ (NSArray<AVCaptureDevice *> *)devices NS_DEPRECATED(10_7, NA, 4_0, 10_0, "Use AVCaptureDeviceDiscoverySession instead.");
 
 /*!
  @method devicesWithMediaType:
  @abstract
     Returns an array of devices currently available for use as sources of media with the given media type.
-
+ 
  @param mediaType
     The media type, such as AVMediaTypeVideo, AVMediaTypeAudio, or AVMediaTypeMuxed, supported by each returned device.
  @result
     An NSArray of AVCaptureDevice instances for each available device.
-
+ 
  @discussion
-    This method returns an array of AVCaptureDevice instances for input devices currently connected and available for
-    capture that provide media of the given type. Media type constants are defined in AVMediaFormat.h. The returned
-    array contains all devices that are available at the time the method is called. Applications should observe
-    AVCaptureDeviceWasConnectedNotification and AVCaptureDeviceWasDisconnectedNotification to be notified when the list
-    of available devices has changed.
-*/
-+ (NSArray *)devicesWithMediaType:(NSString *)mediaType;
+    This method returns an array of AVCaptureDevice instances for input devices currently connected and available for capture that provide media of the given type. Media type constants are defined in AVMediaFormat.h. The returned array contains all devices that are available at the time the method is called. Applications should observe AVCaptureDeviceWasConnectedNotification and AVCaptureDeviceWasDisconnectedNotification to be notified when the list of available devices has changed.
+ */
++ (NSArray<AVCaptureDevice *> *)devicesWithMediaType:(AVMediaType)mediaType NS_DEPRECATED(10_7, NA, 4_0, 10_0, "Use AVCaptureDeviceDiscoverySession instead.");
 
 /*!
  @method defaultDeviceWithMediaType:
  @abstract
     Returns an AVCaptureDevice instance for the default device of the given media type.
-
+ 
  @param mediaType
     The media type, such as AVMediaTypeVideo, AVMediaTypeAudio, or AVMediaTypeMuxed, supported by the returned device.
  @result
     The default device with the given media type, or nil if no device with that media type exists.
-
+ 
  @discussion
-    This method returns the default device of the given media type currently available on the system. For example, for
-    AVMediaTypeVideo, this method will return the built in camera that is primarily used for capture and recording.
-    Media type constants are defined in AVMediaFormat.h.
-*/
-+ (AVCaptureDevice *)defaultDeviceWithMediaType:(NSString *)mediaType;
+    This method returns the default device of the given media type currently available on the system. For example, for AVMediaTypeVideo, this method will return the built in camera that is primarily used for capture and recording. Media type constants are defined in AVMediaFormat.h.
+ */
++ (nullable AVCaptureDevice *)defaultDeviceWithMediaType:(AVMediaType)mediaType;
 
 /*!
  @method deviceWithUniqueID:
  @abstract
     Returns an AVCaptureDevice instance with the given unique ID.
-
+ 
  @param deviceUniqueID
     The unique ID of the device instance to be returned.
  @result
     An AVCaptureDevice instance with the given unique ID, or nil if no device with that unique ID is available.
-
+ 
  @discussion
-    Every available capture device has a unique ID that persists on one system across device connections and
-    disconnections, application restarts, and reboots of the system itself. This method can be used to recall or track
-    the status of a specific device whose unique ID has previously been saved.
-*/
-+ (AVCaptureDevice *)deviceWithUniqueID:(NSString *)deviceUniqueID;
+    Every available capture device has a unique ID that persists on one system across device connections and disconnections, application restarts, and reboots of the system itself. This method can be used to recall or track the status of a specific device whose unique ID has previously been saved.
+ */
++ (nullable AVCaptureDevice *)deviceWithUniqueID:(NSString *)deviceUniqueID;
 
 /*!
  @property uniqueID
  @abstract
     An ID unique to the model of device corresponding to the receiver.
-
+ 
  @discussion
-    Every available capture device has a unique ID that persists on one system across device connections and
-    disconnections, application restarts, and reboots of the system itself. Applications can store the value returned by
-    this property to recall or track the status of a specific device in the future.
-*/
+    Every available capture device has a unique ID that persists on one system across device connections and disconnections, application restarts, and reboots of the system itself. Applications can store the value returned by this property to recall or track the status of a specific device in the future.
+ */
 @property(nonatomic, readonly) NSString *uniqueID;
 
 /*!
  @property modelID
  @abstract
     The model ID of the receiver.
-
+ 
  @discussion
-    The value of this property is an identifier unique to all devices of the same model. The value is persistent across
-    device connections and disconnections, and across different systems. For example, the model ID of the camera built
-    in to two identical iPhone models will be the same even though they are different physical devices.
-*/
+    The value of this property is an identifier unique to all devices of the same model. The value is persistent across device connections and disconnections, and across different systems. For example, the model ID of the camera built in to two identical iPhone models will be the same even though they are different physical devices.
+ */
 @property(nonatomic, readonly) NSString *modelID;
 
 /*!
@@ -174,40 +157,38 @@
  
  @discussion
     This property can be used for displaying the name of a capture device in a user interface.
-*/
+ */
 @property(nonatomic, readonly) NSString *localizedName;
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#if TARGET_OS_OSX
 
 /*!
  @property manufacturer
  @abstract
     The human-readable manufacturer name for the receiver.
-
+ 
  @discussion
-    This property can be used to identify capture devices from a particular manufacturer.  All Apple devices return "Apple Inc.".
-    Devices from third party manufacturers may return an empty string.
-*/
-@property(nonatomic, readonly) NSString *manufacturer NS_AVAILABLE(10_9, NA);
+    This property can be used to identify capture devices from a particular manufacturer. All Apple devices return "Apple Inc.". Devices from third party manufacturers may return an empty string.
+ */
+@property(nonatomic, readonly) NSString *manufacturer NS_AVAILABLE_MAC(10_9);
 
 /*!
  @property transportType
  @abstract
     The transport type of the receiver (e.g. USB, PCI, etc).
-
+ 
  @discussion
-    This property can be used to discover the transport type of a capture device.  Transport types
-    are defined in <IOKit/audio/IOAudioTypes.h> as kIOAudioDeviceTransportType*.
-*/
-@property(nonatomic, readonly) int32_t transportType NS_AVAILABLE(10_7, NA);
+    This property can be used to discover the transport type of a capture device. Transport types are defined in <IOKit/audio/IOAudioTypes.h> as kIOAudioDeviceTransportType*.
+ */
+@property(nonatomic, readonly) int32_t transportType NS_AVAILABLE_MAC(10_7);
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#endif // TARGET_OS_OSX
 
 /*!
  @method hasMediaType:
  @abstract
     Returns whether the receiver provides media with the given media type.
-
+ 
  @param mediaType
     A media type, such as AVMediaTypeVideo, AVMediaTypeAudio, or AVMediaTypeMuxed.
  @result
@@ -215,220 +196,181 @@
  
  @discussion
     Media type constants are defined in AVMediaFormat.h.
-*/
-- (BOOL)hasMediaType:(NSString *)mediaType;
-
+ */
+- (BOOL)hasMediaType:(AVMediaType)mediaType;
 
 /*!
  @method lockForConfiguration:
  @abstract
     Requests exclusive access to configure device hardware properties.
-
+ 
  @param outError
     On return, if the device could not be locked, points to an NSError describing why the failure occurred.
  @result
     A BOOL indicating whether the device was successfully locked for configuration.
-
+ 
  @discussion
-    In order to set hardware properties on an AVCaptureDevice, such as focusMode and exposureMode, clients must first
-    acquire a lock on the device.  Clients should only hold the device lock if they require settable device properties
-    to remain unchanged.  Holding the device lock unnecessarily may degrade capture quality in other applications
-    sharing the device.
-*/
-- (BOOL)lockForConfiguration:(NSError **)outError;
+    In order to set hardware properties on an AVCaptureDevice, such as focusMode and exposureMode, clients must first acquire a lock on the device. Clients should only hold the device lock if they require settable device properties to remain unchanged. Holding the device lock unnecessarily may degrade capture quality in other applications sharing the device.
+ */
+- (BOOL)lockForConfiguration:(NSError * _Nullable * _Nullable)outError;
 
 /*!
  @method unlockForConfiguration
  @abstract
     Release exclusive control over device hardware properties.
-
+ 
  @discussion
-    This method should be called to match an invocation of lockForConfiguration: when an application no longer needs to
-    keep device hardware properties from changing automatically.
-*/
+    This method should be called to match an invocation of lockForConfiguration: when an application no longer needs to keep device hardware properties from changing automatically.
+ */
 - (void)unlockForConfiguration;
 
 /*!
  @method supportsAVCaptureSessionPreset:
  @abstract
     Returns whether the receiver can be used in an AVCaptureSession configured with the given preset.
-
+ 
  @param preset
     An AVCaptureSession preset.
  @result
     YES if the receiver can be used with the given preset, NO otherwise.
-
+ 
  @discussion
-    An AVCaptureSession instance can be associated with a preset that configures its inputs and outputs to fulfill common
-    use cases. This method can be used to determine if the receiver can be used in a capture session with the given
-    preset. Presets are defined in AVCaptureSession.h.
-*/
-- (BOOL)supportsAVCaptureSessionPreset:(NSString *)preset;
+    An AVCaptureSession instance can be associated with a preset that configures its inputs and outputs to fulfill common use cases. This method can be used to determine if the receiver can be used in a capture session with the given preset. Presets are defined in AVCaptureSession.h.
+ */
+- (BOOL)supportsAVCaptureSessionPreset:(AVCaptureSessionPreset)preset;
 
 /*!
  @property connected
  @abstract
     Indicates whether the device is connected and available to the system.
-
+ 
  @discussion
-    The value of this property is a BOOL indicating whether the device represented by the receiver is connected and
-    available for use as a capture device. Clients can key value observe the value of this property to be notified when
-    a device is no longer available. When the value of this property becomes NO for a given instance, it will not become
-    YES again. If the same physical device again becomes available to the system, it will be represented using a new
-    instance of AVCaptureDevice.
-*/
+    The value of this property is a BOOL indicating whether the device represented by the receiver is connected and available for use as a capture device. Clients can key value observe the value of this property to be notified when a device is no longer available. When the value of this property becomes NO for a given instance, it will not become YES again. If the same physical device again becomes available to the system, it will be represented using a new instance of AVCaptureDevice.
+ */
 @property(nonatomic, readonly, getter=isConnected) BOOL connected;
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#if TARGET_OS_OSX
 
 /*!
  @property inUseByAnotherApplication
  @abstract
     Indicates whether the device is in use by another application.
-
+ 
  @discussion
-    The value of this property is a BOOL indicating whether the device represented by the receiver is
-    in use by another application. Clients can key value observe the value of this property to be notified when
-    another app starts or stops using this device.
-*/
-@property(nonatomic, readonly, getter=isInUseByAnotherApplication) BOOL inUseByAnotherApplication NS_AVAILABLE(10_7, NA);
+    The value of this property is a BOOL indicating whether the device represented by the receiver is in use by another application. Clients can key value observe the value of this property to be notified when another app starts or stops using this device.
+ */
+@property(nonatomic, readonly, getter=isInUseByAnotherApplication) BOOL inUseByAnotherApplication NS_AVAILABLE_MAC(10_7);
 
 /*!
  @property suspended
  @abstract
     Indicates whether the device is suspended.
-
+ 
  @discussion
-    The value of this property is a BOOL indicating whether the device represented by the receiver is
-    currently suspended.  Some devices disallow data capture due to a feature on the device.
-    For example, isSuspended returns YES for the external iSight when its privacy iris is closed, or 
-    for the internal iSight on a notebook when the notebook's display is closed.  Clients can key value 
-    observe the value of this property to be notified when the device becomes suspended or unsuspended.
-*/
-@property(nonatomic, readonly, getter=isSuspended) BOOL suspended NS_AVAILABLE(10_7, NA);
+    The value of this property is a BOOL indicating whether the device represented by the receiver is currently suspended. Some devices disallow data capture due to a feature on the device. For example, isSuspended returns YES for the external iSight when its privacy iris is closed, or for the internal iSight on a notebook when the notebook's display is closed. Clients can key value observe the value of this property to be notified when the device becomes suspended or unsuspended.
+ */
+@property(nonatomic, readonly, getter=isSuspended) BOOL suspended NS_AVAILABLE_MAC(10_7);
 
 /*!
  @property linkedDevices
  @abstract
     An array of AVCaptureDevice objects physically linked to the receiver.
-
+ 
  @discussion
-    The value of this property is an array of AVCaptureDevice objects that are a part of the same physical 
-    device as the receiver.  For example, for the external iSight camera, linkedDevices returns an array 
-    containing an AVCaptureDevice for the external iSight microphone.
-*/
-@property(nonatomic, readonly) NSArray *linkedDevices NS_AVAILABLE(10_7, NA);
+    The value of this property is an array of AVCaptureDevice objects that are a part of the same physical device as the receiver. For example, for the external iSight camera, linkedDevices returns an array containing an AVCaptureDevice for the external iSight microphone.
+ */
+@property(nonatomic, readonly) NSArray<AVCaptureDevice *> *linkedDevices NS_AVAILABLE_MAC(10_7);
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#endif // TARGET_OS_OSX
 
 /*!
  @property formats
  @abstract
     An array of AVCaptureDeviceFormat objects supported by the receiver.
-
+ 
  @discussion
-    This property can be used to enumerate the formats natively supported by the receiver.  The
-    capture device's activeFormat property may be set to one of the formats in this array.  Clients 
-    can observe automatic changes to the receiver's formats by key value observing this property.
-*/
-@property(nonatomic, readonly) NSArray *formats NS_AVAILABLE(10_7, 7_0);
+    This property can be used to enumerate the formats natively supported by the receiver. The capture device's activeFormat property may be set to one of the formats in this array. Clients can observe automatic changes to the receiver's formats by key value observing this property.
+ */
+@property(nonatomic, readonly) NSArray<AVCaptureDeviceFormat *> *formats NS_AVAILABLE(10_7, 7_0);
 
 /*!
  @property activeFormat
  @abstract
     The currently active format of the receiver.
-
+ 
  @discussion
     This property can be used to get or set the currently active device format.
-    -setActiveFormat: throws an NSInvalidArgumentException if set to a format not present in the formats
-    array.  -setActiveFormat: throws an NSGenericException if called without first obtaining exclusive
-    access to the receiver using lockForConfiguration:.  Clients can observe automatic changes to the 
-    receiver's activeFormat by key value observing this property.
- 
-    On iOS, use of AVCaptureDevice's setActiveFormat: and AVCaptureSession's setSessionPreset: are mutually
-    exclusive.  If you set a capture device's active format, the session to which it is attached changes its
-    preset to AVCaptureSessionPresetInputPriority.  Likewise if you set the AVCaptureSession's sessionPreset
-    property, the session assumes control of its input devices, and configures their activeFormat appropriately.
-    Note that audio devices do not expose any user-configurable formats on iOS.  To configure audio input on
-    iOS, you should use the AVAudioSession APIs instead (see AVAudioSession.h).
-    
-    The activeFormat, activeVideoMinFrameDuration, and activeVideoMaxFrameDuration properties may be set simultaneously
-    by using AVCaptureSession's begin/commitConfiguration methods:
+ 
+    -setActiveFormat: throws an NSInvalidArgumentException if set to a format not present in the formats array.
+ 
+    -setActiveFormat: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
+ 
+    Clients can observe automatic changes to the receiver's activeFormat by key value observing this property.
+ 
+    On iOS, use of AVCaptureDevice's setActiveFormat: and AVCaptureSession's setSessionPreset: are mutually exclusive. If you set a capture device's active format, the session to which it is attached changes its preset to AVCaptureSessionPresetInputPriority. Likewise if you set the AVCaptureSession's sessionPreset property, the session assumes control of its input devices, and configures their activeFormat appropriately. Note that audio devices do not expose any user-configurable formats on iOS. To configure audio input on iOS, you should use the AVAudioSession APIs instead (see AVAudioSession.h).
+ 
+    The activeFormat, activeVideoMinFrameDuration, and activeVideoMaxFrameDuration properties may be set simultaneously by using AVCaptureSession's begin/commitConfiguration methods:
  
     [session beginConfiguration]; // the session to which the receiver's AVCaptureDeviceInput is added.
     if ( [device lockForConfiguration:&error] ) {
         [device setActiveFormat:newFormat];
         [device setActiveVideoMinFrameDuration:newMinDuration];
         [device setActiveVideoMaxFrameDuration:newMaxDuration];
-	    [device unlockForConfiguration];
+        [device unlockForConfiguration];
     }
     [session commitConfiguration]; // The new format and frame rates are applied together in commitConfiguration
  
-	Note that when configuring a session to use an active format intended for high resolution still photography and applying one or more of the
-	following operations to an AVCaptureVideoDataOutput, the system may not meet the target framerate: zoom, orientation changes, format conversion.
-*/
+    Note that when configuring a session to use an active format intended for high resolution still photography and applying one or more of the following operations to an AVCaptureVideoDataOutput, the system may not meet the target framerate: zoom, orientation changes, format conversion.
+ */
 @property(nonatomic, retain) AVCaptureDeviceFormat *activeFormat NS_AVAILABLE(10_7, 7_0);
 
 /*!
  @property activeVideoMinFrameDuration
  @abstract
     A property indicating the receiver's current active minimum frame duration (the reciprocal of its max frame rate).
-
+ 
  @discussion
-    An AVCaptureDevice's activeVideoMinFrameDuration property is the reciprocal of its active
-    maximum frame rate.  To limit the max frame rate of the capture device, clients may
-    set this property to a value supported by the receiver's activeFormat (see AVCaptureDeviceFormat's 
-    videoSupportedFrameRateRanges property).  Clients may set this property's value to kCMTimeInvalid to
-    return activeVideoMinFrameDuration to its default value for the given activeFormat.
-    -setActiveVideoMinFrameDuration: throws an NSInvalidArgumentException if set to an unsupported value.  
-    -setActiveVideoMinFrameDuration: throws an NSGenericException if called without first obtaining exclusive 
-    access to the receiver using lockForConfiguration:.  Clients can observe automatic changes to the receiver's 
-    activeVideoMinFrameDuration by key value observing this property.
+    An AVCaptureDevice's activeVideoMinFrameDuration property is the reciprocal of its active maximum frame rate. To limit the max frame rate of the capture device, clients may set this property to a value supported by the receiver's activeFormat (see AVCaptureDeviceFormat's videoSupportedFrameRateRanges property). Clients may set this property's value to kCMTimeInvalid to return activeVideoMinFrameDuration to its default value for the given activeFormat.
+ 
+    -setActiveVideoMinFrameDuration: throws an NSInvalidArgumentException if set to an unsupported value.
+ 
+    -setActiveVideoMinFrameDuration: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
+ 
+    Clients can observe automatic changes to the receiver's activeVideoMinFrameDuration by key value observing this property.
  
     On iOS, the receiver's activeVideoMinFrameDuration resets to its default value under the following conditions:
-	    - The receiver's activeFormat changes
+        - The receiver's activeFormat changes
         - The receiver's AVCaptureDeviceInput's session's sessionPreset changes
         - The receiver's AVCaptureDeviceInput is added to a session
  
-    When exposureMode is AVCaptureExposureModeCustom, setting the activeVideoMinFrameDuration affects max frame
-    rate, but not exposureDuration. You may use setExposureModeCustomWithDuration:ISO:completionHandler:
-    to set a shorter exposureDuration than your activeVideoMinFrameDuration, if desired.
-*/
+    When exposureMode is AVCaptureExposureModeCustom, setting the activeVideoMinFrameDuration affects max frame rate, but not exposureDuration. You may use setExposureModeCustomWithDuration:ISO:completionHandler: to set a shorter exposureDuration than your activeVideoMinFrameDuration, if desired.
+ */
 @property(nonatomic) CMTime activeVideoMinFrameDuration NS_AVAILABLE(10_7, 7_0);
 
 /*!
  @property activeVideoMaxFrameDuration
  @abstract
     A property indicating the receiver's current active maximum frame duration (the reciprocal of its min frame rate).
-
+ 
  @discussion
-    An AVCaptureDevice's activeVideoMaxFrameDuration property is the reciprocal of its active
-    minimum frame rate.  To limit the min frame rate of the capture device, clients may
-    set this property to a value supported by the receiver's activeFormat (see AVCaptureDeviceFormat's 
-    videoSupportedFrameRateRanges property).  Clients may set this property's value to kCMTimeInvalid to
-    return activeVideoMaxFrameDuration to its default value for the given activeFormat.
-    -setActiveVideoMaxFrameDuration: throws an NSInvalidArgumentException if set to an unsupported value.  
-    -setActiveVideoMaxFrameDuration: throws an NSGenericException if called without first obtaining exclusive 
-    access to the receiver using lockForConfiguration:.  Clients can observe automatic changes to the receiver's 
-    activeVideoMaxFrameDuration by key value observing this property.
+    An AVCaptureDevice's activeVideoMaxFrameDuration property is the reciprocal of its active minimum frame rate. To limit the min frame rate of the capture device, clients may set this property to a value supported by the receiver's activeFormat (see AVCaptureDeviceFormat's videoSupportedFrameRateRanges property). Clients may set this property's value to kCMTimeInvalid to return activeVideoMaxFrameDuration to its default value for the given activeFormat.
+ 
+    -setActiveVideoMaxFrameDuration: throws an NSInvalidArgumentException if set to an unsupported value.
+ 
+    -setActiveVideoMaxFrameDuration: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
+ 
+    Clients can observe automatic changes to the receiver's activeVideoMaxFrameDuration by key value observing this property.
  
     On iOS, the receiver's activeVideoMaxFrameDuration resets to its default value under the following conditions:
-	    - The receiver's activeFormat changes
+        - The receiver's activeFormat changes
         - The receiver's AVCaptureDeviceInput's session's sessionPreset changes
         - The receiver's AVCaptureDeviceInput is added to a session
  
-    When exposureMode is AVCaptureExposureModeCustom, frame rate and exposure duration are interrelated.
-    If you call setExposureModeCustomWithDuration:ISO:completionHandler: with an exposureDuration longer 
-    than the current activeVideoMaxFrameDuration, the activeVideoMaxFrameDuration will be lengthened to
-    accommodate the longer exposure time.  Setting a shorter exposure duration does not automatically
-    change the activeVideoMinFrameDuration or activeVideoMaxFrameDuration. To explicitly increase the
-    frame rate in custom exposure mode, you must set the activeVideoMaxFrameDuration to a shorter value.
-    If your new max frame duration is shorter than the current exposureDuration, the exposureDuration will
-    shorten as well to accommodate the new frame rate.
-*/
+    When exposureMode is AVCaptureExposureModeCustom, frame rate and exposure duration are interrelated. If you call setExposureModeCustomWithDuration:ISO:completionHandler: with an exposureDuration longer than the current activeVideoMaxFrameDuration, the activeVideoMaxFrameDuration will be lengthened to accommodate the longer exposure time. Setting a shorter exposure duration does not automatically change the activeVideoMinFrameDuration or activeVideoMaxFrameDuration. To explicitly increase the frame rate in custom exposure mode, you must set the activeVideoMaxFrameDuration to a shorter value. If your new max frame duration is shorter than the current exposureDuration, the exposureDuration will shorten as well to accommodate the new frame rate.
+ */
 @property(nonatomic) CMTime activeVideoMaxFrameDuration NS_AVAILABLE(10_9, 7_0);
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#if TARGET_OS_OSX
 
 /*!
  @property inputSources
@@ -436,28 +378,21 @@
     An array of AVCaptureDeviceInputSource objects supported by the receiver.
  
  @discussion
-    Some devices can capture data from one of multiple data sources (different input jacks on the same 
-    audio device, for example).  For devices with multiple possible data sources, inputSources can be 
-    used to enumerate the possible choices. Clients can observe automatic changes to the receiver's 
-    inputSources by key value observing this property.
-*/
-@property(nonatomic, readonly) NSArray *inputSources NS_AVAILABLE(10_7, NA);
+    Some devices can capture data from one of multiple data sources (different input jacks on the same audio device, for example). For devices with multiple possible data sources, inputSources can be used to enumerate the possible choices. Clients can observe automatic changes to the receiver's inputSources by key value observing this property.
+ */
+@property(nonatomic, readonly) NSArray<AVCaptureDeviceInputSource *> *inputSources NS_AVAILABLE_MAC(10_7);
 
 /*!
  @property activeInputSource
  @abstract
     The currently active input source of the receiver.
-
+ 
  @discussion
-    This property can be used to get or set the currently active device input source.
-    -setActiveInputSource: throws an NSInvalidArgumentException if set to a value not present in the
-    inputSources array.  -setActiveInputSource: throws an NSGenericException if called without first 
-    obtaining exclusive access to the receiver using lockForConfiguration:.  Clients can observe automatic  
-    changes to the receiver's activeInputSource by key value observing this property.
-*/
-@property(nonatomic, retain) AVCaptureDeviceInputSource *activeInputSource NS_AVAILABLE(10_7, NA);
+    This property can be used to get or set the currently active device input source. -setActiveInputSource: throws an NSInvalidArgumentException if set to a value not present in the inputSources array. -setActiveInputSource: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's activeInputSource by key value observing this property.
+ */
+@property(nonatomic, retain, nullable) AVCaptureDeviceInputSource *activeInputSource NS_AVAILABLE_MAC(10_7);
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#endif // TARGET_OS_OSX
 
 @end
 
@@ -466,19 +401,20 @@
  @enum AVCaptureDevicePosition
  @abstract
     Constants indicating the physical position of an AVCaptureDevice's hardware on the system.
-
+ 
  @constant AVCaptureDevicePositionUnspecified
     Indicates that the device's position relative to the system hardware is unspecified.
  @constant AVCaptureDevicePositionBack
     Indicates that the device is physically located on the back of the system hardware.
  @constant AVCaptureDevicePositionFront
     Indicates that the device is physically located on the front of the system hardware.
-*/
+ */
 typedef NS_ENUM(NSInteger, AVCaptureDevicePosition) {
-	AVCaptureDevicePositionUnspecified         = 0,
-	AVCaptureDevicePositionBack                = 1,
-	AVCaptureDevicePositionFront               = 2
-} NS_AVAILABLE(10_7, 4_0);
+    AVCaptureDevicePositionUnspecified = 0,
+    AVCaptureDevicePositionBack        = 1,
+    AVCaptureDevicePositionFront       = 2,
+} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
+
 
 @interface AVCaptureDevice (AVCaptureDevicePosition)
 
@@ -486,32 +422,117 @@
  @property position
  @abstract
     Indicates the physical position of an AVCaptureDevice's hardware on the system.
-
+ 
  @discussion
-    The value of this property is an AVCaptureDevicePosition indicating where the receiver's device is physically
-    located on the system hardware.
-*/
+    The value of this property is an AVCaptureDevicePosition indicating where the receiver's device is physically located on the system hardware.
+ */
 @property(nonatomic, readonly) AVCaptureDevicePosition position;
 
 @end
 
+
+/*!
+ @group AVCaptureDeviceType string constants
+ 
+ @discussion
+    The AVCaptureDeviceType string constants are intended to be used in combination with the AVCaptureDeviceDiscoverySession class to obtain a list of devices matching certain search criteria.
+ */
+typedef NSString *AVCaptureDeviceType NS_STRING_ENUM NS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureDeviceTypeBuiltInMicrophone
+    A built-in microphone.
+ */
+AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInMicrophone NS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureDeviceTypeBuiltInWideAngleCamera
+    A built-in wide angle camera device. These devices are suitable for general purpose use.
+ */
+AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInWideAngleCamera NS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureDeviceTypeBuiltInTelephotoCamera
+    A built-in camera device with a longer focal length than a wide angle camera. Note that devices of this type may only be discovered using an AVCaptureDeviceDiscoverySession.
+ */
+AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInTelephotoCamera NS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureDeviceTypeBuiltInDualCamera
+    A device that consists of two fixed focal length cameras, one wide and one telephoto. Note that devices of this type may only be discovered using an AVCaptureDeviceDiscoverySession.
+ 
+    A device of this device type supports the following new features:
+    - Auto switching from one camera to the other when zoom factor, light level, and focus position allow this.
+    - Higher quality zoom for still captures by fusing images from both cameras.
+ 
+    A device of this device type does not support the following features:
+    - AVCaptureExposureModeCustom and manual exposure bracketing.
+    - Locking focus with a lens position other than AVCaptureLensPositionCurrent.
+    - Locking auto white balance with device white balance gains other than AVCaptureWhiteBalanceGainsCurrent.
+ 
+    Even when locked, exposure duration, ISO, aperture, white balance gains, or lens position may change when the device switches from one camera to the other. The overall exposure, white balance, and focus position however should be consistent.
+ */
+AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInDualCamera NS_AVAILABLE_IOS(10_2) __TVOS_PROHIBITED;
+
+/*!
+ @constant AVCaptureDeviceTypeBuiltInDuoCamera
+    A deprecated synonym for AVCaptureDeviceTypeBuiltInDualCamera. Please use AVCaptureDeviceTypeBuiltInDualCamera instead.
+ */
+AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInDuoCamera NS_DEPRECATED_IOS(10_0, 10_2, "Use AVCaptureDeviceTypeBuiltInDualCamera instead") __TVOS_PROHIBITED;
+
+
+@interface AVCaptureDevice (AVCaptureDeviceType)
+
+/*!
+ @property deviceType
+ @abstract
+    The type of the capture device.
+ 
+ @discussion
+    A capture device's type never changes.
+ */
+@property(nonatomic, readonly) AVCaptureDeviceType deviceType NS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED;
+
+/*!
+ @method defaultDeviceWithDeviceType:
+ @abstract
+    Returns an AVCaptureDevice instance for the default device of the given device type, media type, and position.
+ 
+ @param deviceType
+    The device type supported by the returned device. It must be a valid AVCaptureDeviceType.
+ @param mediaType
+    The media type, such as AVMediaTypeVideo, AVMediaTypeAudio, or AVMediaTypeMuxed, supported by the returned device. Pass nil to consider devices with any media type.
+ @param position
+    The position supported by the returned device. Pass AVCaptureDevicePositionUnspecified to consider devices with any position.
+ @result
+    The default device with the given device type, media type and position or nil if no device with that media type exists and nil otherwise.
+ 
+ @discussion
+    This method returns the default device of the given combination of device type, media type, and position currently available on the system.
+ */
++ (nullable AVCaptureDevice *)defaultDeviceWithDeviceType:(AVCaptureDeviceType)deviceType mediaType:(nullable AVMediaType)mediaType position:(AVCaptureDevicePosition)position NS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED;
+
+@end
+
+
 /*!
  @enum AVCaptureFlashMode
  @abstract
     Constants indicating the mode of the flash on the receiver's device, if it has one.
-
+ 
  @constant AVCaptureFlashModeOff
     Indicates that the flash should always be off.
  @constant AVCaptureFlashModeOn
     Indicates that the flash should always be on.
  @constant AVCaptureFlashModeAuto
     Indicates that the flash should be used automatically depending on ambient light conditions.
-*/
+ */
 typedef NS_ENUM(NSInteger, AVCaptureFlashMode) {
-	AVCaptureFlashModeOff  = 0,
-	AVCaptureFlashModeOn   = 1,
-	AVCaptureFlashModeAuto = 2
-} NS_AVAILABLE(10_7, 4_0);
+    AVCaptureFlashModeOff  = 0,
+    AVCaptureFlashModeOn   = 1,
+    AVCaptureFlashModeAuto = 2,
+} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
+
 
 @interface AVCaptureDevice (AVCaptureDeviceFlash)
 
@@ -519,11 +540,10 @@
  @property hasFlash
  @abstract
     Indicates whether the receiver has a flash.
-
+ 
  @discussion
-    The value of this property is a BOOL indicating whether the receiver has a flash. The receiver's flashMode property
-    can only be set when this property returns YES.
-*/
+    The value of this property is a BOOL indicating whether the receiver has a flash. The receiver's flashMode property can only be set when this property returns YES.
+ */
 @property(nonatomic, readonly) BOOL hasFlash;
 
 /*!
@@ -532,10 +552,8 @@
     Indicates whether the receiver's flash is currently available for use.
  
  @discussion
-    The value of this property is a BOOL indicating whether the receiver's flash is 
-    currently available. The flash may become unavailable if, for example, the device
-    overheats and needs to cool off. This property is key-value observable.
-*/
+    The value of this property is a BOOL indicating whether the receiver's flash is currently available. The flash may become unavailable if, for example, the device overheats and needs to cool off. This property is key-value observable.
+ */
 @property(nonatomic, readonly, getter=isFlashAvailable) BOOL flashAvailable NS_AVAILABLE_IOS(5_0);
 
 /*!
@@ -544,71 +562,65 @@
     Indicates whether the receiver's flash is currently active.
  
  @discussion
-    The value of this property is a BOOL indicating whether the receiver's flash is 
-    currently active. When the flash is active, it will flash if a still image is
-    captured. When a still image is captured with the flash active, exposure and
-    white balance settings are overridden for the still. This is true even when
-    using AVCaptureExposureModeCustom and/or AVCaptureWhiteBalanceModeLocked.
-    This property is key-value observable.
-*/
-@property(nonatomic, readonly, getter=isFlashActive) BOOL flashActive NS_AVAILABLE_IOS(5_0);
+    The value of this property is a BOOL indicating whether the receiver's flash is currently active. When the flash is active, it will flash if a still image is captured. When a still image is captured with the flash active, exposure and white balance settings are overridden for the still. This is true even when using AVCaptureExposureModeCustom and/or AVCaptureWhiteBalanceModeLocked. This property is key-value observable.
+ */
+@property(nonatomic, readonly, getter=isFlashActive) BOOL flashActive NS_DEPRECATED_IOS(5_0, 10_0, "Use AVCapturePhotoOutput's -isFlashScene instead.");
 
 /*!
  @method isFlashModeSupported:
  @abstract
     Returns whether the receiver supports the given flash mode.
-
+ 
  @param flashMode
     An AVCaptureFlashMode to be checked.
  @result
     YES if the receiver supports the given flash mode, NO otherwise.
-
+ 
  @discussion
     The receiver's flashMode property can only be set to a certain mode if this method returns YES for that mode.
-*/
-- (BOOL)isFlashModeSupported:(AVCaptureFlashMode)flashMode;
+ */
+- (BOOL)isFlashModeSupported:(AVCaptureFlashMode)flashMode NS_DEPRECATED(10_7, NA, 4_0, 10_0, "Use AVCapturePhotoOutput's -supportedFlashModes instead.");
 
 /*!
  @property flashMode
  @abstract
     Indicates current mode of the receiver's flash, if it has one.
-
+ 
  @discussion
-    The value of this property is an AVCaptureFlashMode that determines the mode of the 
-    receiver's flash, if it has one.  -setFlashMode: throws an NSInvalidArgumentException
-    if set to an unsupported value (see -isFlashModeSupported:).  -setFlashMode: throws an NSGenericException 
-    if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
-    Clients can observe automatic changes to the receiver's flashMode by key value observing this property.
-*/
-@property(nonatomic) AVCaptureFlashMode flashMode;
+    The value of this property is an AVCaptureFlashMode that determines the mode of the receiver's flash, if it has one. -setFlashMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isFlashModeSupported:). -setFlashMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's flashMode by key value observing this property.
+ 
+    When using AVCapturePhotoOutput, AVCaptureDevice's flashMode property is ignored. You specify flashMode on a per photo basis by setting the AVCapturePhotoSettings.flashMode property.
+ */
+@property(nonatomic) AVCaptureFlashMode flashMode NS_DEPRECATED(10_7, NA, 4_0, 10_0, "Use AVCapturePhotoSettings.flashMode instead.");
 
 @end
 
+
 /*!
  @enum AVCaptureTorchMode
  @abstract
     Constants indicating the mode of the torch on the receiver's device, if it has one.
-
+ 
  @constant AVCaptureTorchModeOff
     Indicates that the torch should always be off.
  @constant AVCaptureTorchModeOn
     Indicates that the torch should always be on.
  @constant AVCaptureTorchModeAuto
     Indicates that the torch should be used automatically depending on ambient light conditions.
-*/
+ */
 typedef NS_ENUM(NSInteger, AVCaptureTorchMode) {
-	AVCaptureTorchModeOff  = 0,
-	AVCaptureTorchModeOn   = 1,
-	AVCaptureTorchModeAuto = 2,
-} NS_AVAILABLE(10_7, 4_0);
+    AVCaptureTorchModeOff  = 0,
+    AVCaptureTorchModeOn   = 1,
+    AVCaptureTorchModeAuto = 2,
+} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
+
 
 /*!
-  @constant AVCaptureMaxAvailableTorchLevel
-    A special value that may be passed to -setTorchModeWithLevel:error: to set the torch to the
-    maximum level currently available. Under thermal duress, the maximum available torch level
-    may be less than 1.0.
-*/
-extern const float AVCaptureMaxAvailableTorchLevel;
+ @constant AVCaptureMaxAvailableTorchLevel
+    A special value that may be passed to -setTorchModeWithLevel:error: to set the torch to the maximum level currently available. Under thermal duress, the maximum available torch level may be less than 1.0.
+ */
+AVF_EXPORT const float AVCaptureMaxAvailableTorchLevel NS_AVAILABLE_IOS(6_0) __TVOS_PROHIBITED;
+
 
 @interface AVCaptureDevice (AVCaptureDeviceTorch)
 
@@ -616,11 +628,10 @@
  @property hasTorch
  @abstract
     Indicates whether the receiver has a torch.
-
+ 
  @discussion
-    The value of this property is a BOOL indicating whether the receiver has a torch. The receiver's torchMode property
-    can only be set when this property returns YES.
-*/
+    The value of this property is a BOOL indicating whether the receiver has a torch. The receiver's torchMode property can only be set when this property returns YES.
+ */
 @property(nonatomic, readonly) BOOL hasTorch;
 
 /*!
@@ -629,10 +640,8 @@
     Indicates whether the receiver's torch is currently available for use.
  
  @discussion
-    The value of this property is a BOOL indicating whether the receiver's torch is 
-    currently available. The torch may become unavailable if, for example, the device
-    overheats and needs to cool off. This property is key-value observable.
-*/
+    The value of this property is a BOOL indicating whether the receiver's torch is currently available. The torch may become unavailable if, for example, the device overheats and needs to cool off. This property is key-value observable.
+ */
 @property(nonatomic, readonly, getter=isTorchAvailable) BOOL torchAvailable NS_AVAILABLE_IOS(5_0);
 
 /*!
@@ -641,107 +650,95 @@
     Indicates whether the receiver's torch is currently active.
  
  @discussion
-    The value of this property is a BOOL indicating whether the receiver's torch is 
-    currently active. If the current torchMode is AVCaptureTorchModeAuto and isTorchActive
-    is YES, the torch will illuminate once a recording starts (see AVCaptureOutput.h 
-    -startRecordingToOutputFileURL:recordingDelegate:). This property is key-value observable.
-*/
+    The value of this property is a BOOL indicating whether the receiver's torch is currently active. If the current torchMode is AVCaptureTorchModeAuto and isTorchActive is YES, the torch will illuminate once a recording starts (see AVCaptureOutput.h -startRecordingToOutputFileURL:recordingDelegate:). This property is key-value observable.
+ */
 @property(nonatomic, readonly, getter=isTorchActive) BOOL torchActive NS_AVAILABLE_IOS(6_0);
 
 /*!
  @property torchLevel
  @abstract
     Indicates the receiver's current torch brightness level as a floating point value.
-
+ 
  @discussion
-    The value of this property is a float indicating the receiver's torch level 
-    from 0.0 (off) -> 1.0 (full). This property is key-value observable.
-*/
+    The value of this property is a float indicating the receiver's torch level from 0.0 (off) -> 1.0 (full). This property is key-value observable.
+ */
 @property(nonatomic, readonly) float torchLevel NS_AVAILABLE_IOS(5_0);
 
 /*!
  @method isTorchModeSupported:
  @abstract
     Returns whether the receiver supports the given torch mode.
-
+ 
  @param torchMode
     An AVCaptureTorchMode to be checked.
  @result
     YES if the receiver supports the given torch mode, NO otherwise.
-
+ 
  @discussion
     The receiver's torchMode property can only be set to a certain mode if this method returns YES for that mode.
-*/
+ */
 - (BOOL)isTorchModeSupported:(AVCaptureTorchMode)torchMode;
 
 /*!
  @property torchMode
  @abstract
     Indicates current mode of the receiver's torch, if it has one.
-
+ 
  @discussion
-    The value of this property is an AVCaptureTorchMode that determines the mode of the 
-    receiver's torch, if it has one.  -setTorchMode: throws an NSInvalidArgumentException
-    if set to an unsupported value (see -isTorchModeSupported:).  -setTorchMode: throws an NSGenericException 
-    if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
-    Clients can observe automatic changes to the receiver's torchMode by key value observing this property.
-*/
+    The value of this property is an AVCaptureTorchMode that determines the mode of the receiver's torch, if it has one. -setTorchMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isTorchModeSupported:). -setTorchMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's torchMode by key value observing this property.
+ */
 @property(nonatomic) AVCaptureTorchMode torchMode;
 
 /*!
  @method setTorchModeOnWithLevel:error:
  @abstract
     Sets the current mode of the receiver's torch to AVCaptureTorchModeOn at the specified level.
-
+ 
  @discussion
-    This method sets the torch mode to AVCaptureTorchModeOn at a specified level.  torchLevel must be 
-    a value between 0 and 1, or the special value AVCaptureMaxAvailableTorchLevel.  The specified value
-    may not be available if the iOS device is too hot. This method throws an NSInvalidArgumentException
-    if set to an unsupported level. If the specified level is valid, but unavailable, the method returns
-    NO with AVErrorTorchLevelUnavailable.  -setTorchModeOnWithLevel:error: throws an NSGenericException 
-    if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
-    Clients can observe automatic changes to the receiver's torchMode by key value observing the torchMode 
-    property.
-*/
-- (BOOL)setTorchModeOnWithLevel:(float)torchLevel error:(NSError **)outError NS_AVAILABLE_IOS(6_0);
+    This method sets the torch mode to AVCaptureTorchModeOn at a specified level. torchLevel must be a value between 0 and 1, or the special value AVCaptureMaxAvailableTorchLevel. The specified value may not be available if the iOS device is too hot. This method throws an NSInvalidArgumentException if set to an unsupported level. If the specified level is valid, but unavailable, the method returns NO with AVErrorTorchLevelUnavailable. -setTorchModeOnWithLevel:error: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's torchMode by key value observing the torchMode property.
+ */
+- (BOOL)setTorchModeOnWithLevel:(float)torchLevel error:(NSError * _Nullable * _Nullable)outError NS_AVAILABLE_IOS(6_0);
 
 @end
 
+
 /*!
  @enum AVCaptureFocusMode
  @abstract
     Constants indicating the mode of the focus on the receiver's device, if it has one.
-
+ 
  @constant AVCaptureFocusModeLocked
     Indicates that the focus should be locked at the lens' current position.
  @constant AVCaptureFocusModeAutoFocus
     Indicates that the device should autofocus once and then change the focus mode to AVCaptureFocusModeLocked.
  @constant AVCaptureFocusModeContinuousAutoFocus
     Indicates that the device should automatically focus when needed.
-*/
+ */
 typedef NS_ENUM(NSInteger, AVCaptureFocusMode) {
-	AVCaptureFocusModeLocked              = 0,
-	AVCaptureFocusModeAutoFocus           = 1,
-	AVCaptureFocusModeContinuousAutoFocus = 2,
-} NS_AVAILABLE(10_7, 4_0);
+    AVCaptureFocusModeLocked              = 0,
+    AVCaptureFocusModeAutoFocus           = 1,
+    AVCaptureFocusModeContinuousAutoFocus = 2,
+} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
+
 
 /*!
  @enum AVCaptureAutoFocusRangeRestriction
  @abstract
-	Constants indicating the restriction of the receiver's autofocus system to a particular range of focus scan, if it supports range restrictions.
+    Constants indicating the restriction of the receiver's autofocus system to a particular range of focus scan, if it supports range restrictions.
  
  @constant AVCaptureAutoFocusRangeRestrictionNone
-	Indicates that the autofocus system should not restrict the focus range.
+    Indicates that the autofocus system should not restrict the focus range.
  @constant AVCaptureAutoFocusRangeRestrictionNear
-	Indicates that the autofocus system should restrict the focus range for subject matter that is near to the camera.
+    Indicates that the autofocus system should restrict the focus range for subject matter that is near to the camera.
  @constant AVCaptureAutoFocusRangeRestrictionFar
-	Indicates that the autofocus system should restrict the focus range for subject matter that is far from the camera.
-*/
+    Indicates that the autofocus system should restrict the focus range for subject matter that is far from the camera.
+ */
 typedef NS_ENUM(NSInteger, AVCaptureAutoFocusRangeRestriction) {
-	AVCaptureAutoFocusRangeRestrictionNone = 0,
-	AVCaptureAutoFocusRangeRestrictionNear = 1,
-	AVCaptureAutoFocusRangeRestrictionFar  = 2,
-} NS_AVAILABLE_IOS(7_0);
+    AVCaptureAutoFocusRangeRestrictionNone = 0,
+    AVCaptureAutoFocusRangeRestrictionNear = 1,
+    AVCaptureAutoFocusRangeRestrictionFar  = 2,
+} NS_AVAILABLE_IOS(7_0) __TVOS_PROHIBITED;
+
 
 @interface AVCaptureDevice (AVCaptureDeviceFocus)
 
@@ -749,124 +746,106 @@
  @method isFocusModeSupported:
  @abstract
     Returns whether the receiver supports the given focus mode.
-
+ 
  @param focusMode
     An AVCaptureFocusMode to be checked.
  @result
     YES if the receiver supports the given focus mode, NO otherwise.
-
+ 
  @discussion
     The receiver's focusMode property can only be set to a certain mode if this method returns YES for that mode.
-*/
+ */
 - (BOOL)isFocusModeSupported:(AVCaptureFocusMode)focusMode;
 
 /*!
+ @property lockingFocusWithCustomLensPositionSupported
+ @abstract
+    Indicates whether the receiver supports a lens position other than AVCaptureLensPositionCurrent.
+ 
+ @discussion
+    If lockingFocusWithCustomLensPositionSupported returns NO, setFocusModeLockedWithLensPosition: may only be called with AVCaptureLensPositionCurrent. Passing any other lens position will result in an exception.
+ */
+@property(nonatomic, readonly, getter=isLockingFocusWithCustomLensPositionSupported) BOOL lockingFocusWithCustomLensPositionSupported NS_AVAILABLE_IOS(10_0);
+
+/*!
  @property focusMode
  @abstract
     Indicates current focus mode of the receiver, if it has one.
-
+ 
  @discussion
-    The value of this property is an AVCaptureFocusMode that determines the receiver's focus mode, if it has one.
-    -setFocusMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isFocusModeSupported:).  
-    -setFocusMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver 
-    using lockForConfiguration:.  Clients can observe automatic changes to the receiver's focusMode by key value 
-    observing this property.
-*/
+    The value of this property is an AVCaptureFocusMode that determines the receiver's focus mode, if it has one. -setFocusMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isFocusModeSupported:). -setFocusMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's focusMode by key value observing this property.
+ */
 @property(nonatomic) AVCaptureFocusMode focusMode;
 
 /*!
  @property focusPointOfInterestSupported
  @abstract
     Indicates whether the receiver supports focus points of interest.
-
+ 
  @discussion
     The receiver's focusPointOfInterest property can only be set if this property returns YES.
-*/
+ */
 @property(nonatomic, readonly, getter=isFocusPointOfInterestSupported) BOOL focusPointOfInterestSupported;
 
 /*!
  @property focusPointOfInterest
  @abstract
     Indicates current focus point of interest of the receiver, if it has one.
-
+ 
  @discussion
-    The value of this property is a CGPoint that determines the receiver's focus point of interest, if it has one. A
-    value of (0,0) indicates that the camera should focus on the top left corner of the image, while a value of (1,1)
-    indicates that it should focus on the bottom right. The default value is (0.5,0.5).  -setFocusPointOfInterest:
-    throws an NSInvalidArgumentException if isFocusPointOfInterestSupported returns NO.  -setFocusPointOfInterest: throws 
-    an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.  
-    Clients can observe automatic changes to the receiver's focusPointOfInterest by key value observing this property.  Note that
-    setting focusPointOfInterest alone does not initiate a focus operation.  After setting focusPointOfInterest, call
-    -setFocusMode: to apply the new point of interest.
-*/
+    The value of this property is a CGPoint that determines the receiver's focus point of interest, if it has one. A value of (0,0) indicates that the camera should focus on the top left corner of the image, while a value of (1,1) indicates that it should focus on the bottom right. The default value is (0.5,0.5). -setFocusPointOfInterest: throws an NSInvalidArgumentException if isFocusPointOfInterestSupported returns NO. -setFocusPointOfInterest: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's focusPointOfInterest by key value observing this property. Note that setting focusPointOfInterest alone does not initiate a focus operation. After setting focusPointOfInterest, call -setFocusMode: to apply the new point of interest.
+ */
 @property(nonatomic) CGPoint focusPointOfInterest;
 
 /*!
  @property adjustingFocus
  @abstract
     Indicates whether the receiver is currently performing a focus scan to adjust focus.
-
+ 
  @discussion
-    The value of this property is a BOOL indicating whether the receiver's camera focus is being automatically
-    adjusted by means of a focus scan, because its focus mode is AVCaptureFocusModeAutoFocus or
-	AVCaptureFocusModeContinuousAutoFocus.
-    Clients can observe the value of this property to determine whether the camera's focus is stable.
-	@seealso lensPosition
-	@seealso AVCaptureAutoFocusSystem
-*/
+    The value of this property is a BOOL indicating whether the receiver's camera focus is being automatically adjusted by means of a focus scan, because its focus mode is AVCaptureFocusModeAutoFocus or AVCaptureFocusModeContinuousAutoFocus. Clients can observe the value of this property to determine whether the camera's focus is stable.
+ @seealso lensPosition
+ @seealso AVCaptureAutoFocusSystem
+ */
 @property(nonatomic, readonly, getter=isAdjustingFocus) BOOL adjustingFocus;
 
 /*!
  @property autoFocusRangeRestrictionSupported
  @abstract
-	Indicates whether the receiver supports autofocus range restrictions.
+    Indicates whether the receiver supports autofocus range restrictions.
  
  @discussion
-	The receiver's autoFocusRangeRestriction property can only be set if this property returns YES.
+    The receiver's autoFocusRangeRestriction property can only be set if this property returns YES.
  */
 @property(nonatomic, readonly, getter=isAutoFocusRangeRestrictionSupported) BOOL autoFocusRangeRestrictionSupported NS_AVAILABLE_IOS(7_0);
 
 /*!
  @property autoFocusRangeRestriction
  @abstract
-	Indicates current restriction of the receiver's autofocus system to a particular range of focus scan, if it supports range restrictions.
+    Indicates current restriction of the receiver's autofocus system to a particular range of focus scan, if it supports range restrictions.
  
  @discussion
-	The value of this property is an AVCaptureAutoFocusRangeRestriction indicating how the autofocus system
-	should limit its focus scan.  The default value is AVCaptureAutoFocusRangeRestrictionNone.
-	-setAutoFocusRangeRestriction: throws an NSInvalidArgumentException if isAutoFocusRangeRestrictionSupported
-	returns NO.  -setAutoFocusRangeRestriction: throws an NSGenericException if called without first obtaining exclusive
-	access to the receiver using lockForConfiguration:.  This property only has an effect when the focusMode property is
-	set to AVCaptureFocusModeAutoFocus or AVCaptureFocusModeContinuousAutoFocus.  Note that setting autoFocusRangeRestriction 
-	alone does not initiate a focus operation.  After setting autoFocusRangeRestriction, call -setFocusMode: to apply the new restriction.
+    The value of this property is an AVCaptureAutoFocusRangeRestriction indicating how the autofocus system should limit its focus scan. The default value is AVCaptureAutoFocusRangeRestrictionNone. -setAutoFocusRangeRestriction: throws an NSInvalidArgumentException if isAutoFocusRangeRestrictionSupported returns NO. -setAutoFocusRangeRestriction: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. This property only has an effect when the focusMode property is set to AVCaptureFocusModeAutoFocus or AVCaptureFocusModeContinuousAutoFocus. Note that setting autoFocusRangeRestriction alone does not initiate a focus operation. After setting autoFocusRangeRestriction, call -setFocusMode: to apply the new restriction.
  */
 @property(nonatomic) AVCaptureAutoFocusRangeRestriction autoFocusRangeRestriction NS_AVAILABLE_IOS(7_0);
 
 /*!
  @property smoothAutoFocusSupported
  @abstract
-	Indicates whether the receiver supports smooth autofocus.
+    Indicates whether the receiver supports smooth autofocus.
  
  @discussion
-	The receiver's smoothAutoFocusEnabled property can only be set if this property returns YES.
+    The receiver's smoothAutoFocusEnabled property can only be set if this property returns YES.
  */
 @property(nonatomic, readonly, getter=isSmoothAutoFocusSupported) BOOL smoothAutoFocusSupported NS_AVAILABLE_IOS(7_0);
 
 /*!
  @property smoothAutoFocusEnabled
  @abstract
-	Indicates whether the receiver should use smooth autofocus.
+    Indicates whether the receiver should use smooth autofocus.
  
  @discussion
-	On a receiver where -isSmoothAutoFocusSupported returns YES and smoothAutoFocusEnabled is set to YES,
-	a smooth autofocus will be engaged when the focus mode is set to AVCaptureFocusModeAutoFocus or
-	AVCaptureFocusModeContinuousAutoFocus.  Enabling smooth autofocus is appropriate for movie recording.
-	Smooth autofocus is slower and less visually invasive. Disabling smooth autofocus is more appropriate
-	for video processing where a fast autofocus is necessary.  The default value is NO.
-	Setting this property throws an NSInvalidArgumentException if -isSmoothAutoFocusSupported returns NO.
-	The receiver must be locked for configuration using lockForConfiguration: before clients can set this method,
-	otherwise an NSGenericException is thrown. Note that setting smoothAutoFocusEnabled alone does not initiate a
-	focus operation.  After setting smoothAutoFocusEnabled, call -setFocusMode: to apply the new smooth autofocus mode.
+    On a receiver where -isSmoothAutoFocusSupported returns YES and smoothAutoFocusEnabled is set to YES, a smooth autofocus will be engaged when the focus mode is set to AVCaptureFocusModeAutoFocus or AVCaptureFocusModeContinuousAutoFocus. Enabling smooth autofocus is appropriate for movie recording. Smooth autofocus is slower and less visually invasive. Disabling smooth autofocus is more appropriate for video processing where a fast autofocus is necessary. The default value is NO. Setting this property throws an NSInvalidArgumentException if -isSmoothAutoFocusSupported returns NO. The receiver must be locked for configuration using lockForConfiguration: before clients can set this method, otherwise an NSGenericException is thrown. Note that setting smoothAutoFocusEnabled alone does not initiate a focus operation. After setting smoothAutoFocusEnabled, call -setFocusMode: to apply the new smooth autofocus mode.
  */
 @property(nonatomic, getter=isSmoothAutoFocusEnabled) BOOL smoothAutoFocusEnabled NS_AVAILABLE_IOS(7_0);
 
@@ -876,22 +855,15 @@
     Indicates the focus position of the lens.
  
  @discussion
-    The range of possible positions is 0.0 to 1.0, with 0.0 being the shortest distance at which the lens can focus and
-    1.0 the furthest. Note that 1.0 does not represent focus at infinity. The default value is 1.0.
-    Note that a given lens position value does not correspond to an exact physical distance, nor does it represent a
-    consistent focus distance from device to device. This property is key-value observable. It can be read at any time, 
-    regardless of focus mode, but can only be set via setFocusModeLockedWithLensPosition:completionHandler:.
-*/
+    The range of possible positions is 0.0 to 1.0, with 0.0 being the shortest distance at which the lens can focus and 1.0 the furthest. Note that 1.0 does not represent focus at infinity. The default value is 1.0. Note that a given lens position value does not correspond to an exact physical distance, nor does it represent a consistent focus distance from device to device. This property is key-value observable. It can be read at any time, regardless of focus mode, but can only be set via setFocusModeLockedWithLensPosition:completionHandler:.
+ */
 @property(nonatomic, readonly) float lensPosition NS_AVAILABLE_IOS(8_0);
 
 /*!
  @constant AVCaptureLensPositionCurrent
-    A special value that may be passed as the lensPosition parameter of setFocusModeLockedWithLensPosition:completionHandler: to
-    indicate that the caller does not wish to specify a value for the lensPosition property, and that it should instead be set 
-    to its current value. Note that the device may be adjusting lensPosition at the time of the call, in which case the value at 
-    which lensPosition is locked may differ from the value obtained by querying the lensPosition property.
-*/
-AVF_EXPORT const float AVCaptureLensPositionCurrent NS_AVAILABLE_IOS(8_0);
+    A special value that may be passed as the lensPosition parameter of setFocusModeLockedWithLensPosition:completionHandler: to indicate that the caller does not wish to specify a value for the lensPosition property, and that it should instead be set to its current value. Note that the device may be adjusting lensPosition at the time of the call, in which case the value at which lensPosition is locked may differ from the value obtained by querying the lensPosition property.
+ */
+AVF_EXPORT const float AVCaptureLensPositionCurrent NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
 
 /*!
  @method setFocusModeLockedWithLensPosition:completionHandler:
@@ -899,46 +871,39 @@
     Sets focusMode to AVCaptureFocusModeLocked and locks lensPosition at an explicit value.
  
  @param lensPosition
-    The lens position, as described in the documentation for the lensPosition property. A value of AVCaptureLensPositionCurrent can be used
-    to indicate that the caller does not wish to specify a value for lensPosition.
+    The lens position, as described in the documentation for the lensPosition property. A value of AVCaptureLensPositionCurrent can be used to indicate that the caller does not wish to specify a value for lensPosition.
  @param handler
-    A block to be called when lensPosition has been set to the value specified and focusMode is set to AVCaptureFocusModeLocked. If
-    setFocusModeLockedWithLensPosition:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. 
-    The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp 
-    is synchronized to the device clock, and thus must be converted to the master clock prior to comparison with the timestamps of buffers 
-    delivered via an AVCaptureVideoDataOutput. The client may pass nil for the handler parameter if knowledge of the operation's completion 
-    is not required.
+    A block to be called when lensPosition has been set to the value specified and focusMode is set to AVCaptureFocusModeLocked. If setFocusModeLockedWithLensPosition:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the master clock prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. The client may pass nil for the handler parameter if knowledge of the operation's completion is not required.
  
  @discussion
-    This is the only way of setting lensPosition.
-    This method throws an NSRangeException if lensPosition is set to an unsupported level.
-    This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
-*/
-- (void)setFocusModeLockedWithLensPosition:(float)lensPosition completionHandler:(void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
+    This is the only way of setting lensPosition. This method throws an NSRangeException if lensPosition is set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
+ */
+- (void)setFocusModeLockedWithLensPosition:(float)lensPosition completionHandler:(nullable void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
 
 @end
 
+
 /*!
  @enum AVCaptureExposureMode
  @abstract
     Constants indicating the mode of the exposure on the receiver's device, if it has adjustable exposure.
-
+ 
  @constant AVCaptureExposureModeLocked
     Indicates that the exposure should be locked at its current value.
  @constant AVCaptureExposureModeAutoExpose
-    Indicates that the device should automatically adjust exposure once and then change the exposure mode to 
-    AVCaptureExposureModeLocked.
+    Indicates that the device should automatically adjust exposure once and then change the exposure mode to AVCaptureExposureModeLocked.
  @constant AVCaptureExposureModeContinuousAutoExposure
     Indicates that the device should automatically adjust exposure when needed.
  @constant AVCaptureExposureModeCustom
     Indicates that the device should only adjust exposure according to user provided ISO, exposureDuration values.
-*/
+ */
 typedef NS_ENUM(NSInteger, AVCaptureExposureMode) {
-	AVCaptureExposureModeLocked                            = 0,
-	AVCaptureExposureModeAutoExpose                        = 1,
-	AVCaptureExposureModeContinuousAutoExposure	           = 2,
-	AVCaptureExposureModeCustom NS_ENUM_AVAILABLE_IOS(8_0) = 3,
-} NS_AVAILABLE(10_7, 4_0);
+    AVCaptureExposureModeLocked                            = 0,
+    AVCaptureExposureModeAutoExpose                        = 1,
+    AVCaptureExposureModeContinuousAutoExposure            = 2,
+    AVCaptureExposureModeCustom NS_ENUM_AVAILABLE_IOS(8_0) = 3,
+} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
+
 
 @interface AVCaptureDevice (AVCaptureDeviceExposure)
 
@@ -946,34 +911,25 @@
  @method isExposureModeSupported:
  @abstract
     Returns whether the receiver supports the given exposure mode.
-
+ 
  @param exposureMode
     An AVCaptureExposureMode to be checked.
  @result
     YES if the receiver supports the given exposure mode, NO otherwise.
-
+ 
  @discussion
     The receiver's exposureMode property can only be set to a certain mode if this method returns YES for that mode.
-*/
+ */
 - (BOOL)isExposureModeSupported:(AVCaptureExposureMode)exposureMode;
 
 /*!
  @property exposureMode
  @abstract
     Indicates current exposure mode of the receiver, if it has adjustable exposure.
-
+ 
  @discussion
-    The value of this property is an AVCaptureExposureMode that determines the receiver's exposure mode, if it has
-    adjustable exposure.  -setExposureMode: throws an NSInvalidArgumentException if set to an unsupported value 
-    (see -isExposureModeSupported:).  -setExposureMode: throws an NSGenericException if called without first obtaining 
-    exclusive access to the receiver using lockForConfiguration:. When using AVCaptureStillImageOutput with
-    automaticallyEnablesStillImageStabilizationWhenAvailable set to YES (the default behavior), the receiver's ISO and 
-    exposureDuration values may be overridden by automatic still image stabilization values if the scene is dark enough to 
-    warrant still image stabilization.  To ensure that the receiver's ISO and exposureDuration values are honored while
-    in AVCaptureExposureModeCustom or AVCaptureExposureModeLocked, you must set AVCaptureStillImageOutput's
-    automaticallyEnablesStillImageStabilizationWhenAvailable property to NO. Clients can observe automatic changes to the receiver's
-    exposureMode by key value observing this property.
-*/
+    The value of this property is an AVCaptureExposureMode that determines the receiver's exposure mode, if it has adjustable exposure. -setExposureMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isExposureModeSupported:). -setExposureMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. When using AVCaptureStillImageOutput with automaticallyEnablesStillImageStabilizationWhenAvailable set to YES (the default behavior), the receiver's ISO and exposureDuration values may be overridden by automatic still image stabilization values if the scene is dark enough to warrant still image stabilization. To ensure that the receiver's ISO and exposureDuration values are honored while in AVCaptureExposureModeCustom or AVCaptureExposureModeLocked, you must set AVCaptureStillImageOutput's automaticallyEnablesStillImageStabilizationWhenAvailable property to NO. Clients can observe automatic changes to the receiver's exposureMode by key value observing this property.
+ */
 @property(nonatomic) AVCaptureExposureMode exposureMode;
 
 /*!
@@ -983,36 +939,27 @@
  
  @discussion
     The receiver's exposurePointOfInterest property can only be set if this property returns YES.
-*/
+ */
 @property(nonatomic, readonly, getter=isExposurePointOfInterestSupported) BOOL exposurePointOfInterestSupported;
 
 /*!
  @property exposurePointOfInterest
  @abstract
     Indicates current exposure point of interest of the receiver, if it has one.
-
+ 
  @discussion
-    The value of this property is a CGPoint that determines the receiver's exposure point of interest, if it has
-    adjustable exposure. A value of (0,0) indicates that the camera should adjust exposure based on the top left
-    corner of the image, while a value of (1,1) indicates that it should adjust exposure based on the bottom right corner. The
-    default value is (0.5,0.5). -setExposurePointOfInterest: throws an NSInvalidArgumentException if isExposurePointOfInterestSupported 
-    returns NO.  -setExposurePointOfInterest: throws an NSGenericException if called without first obtaining exclusive access 
-    to the receiver using lockForConfiguration:.  Note that setting exposurePointOfInterest alone does not initiate an exposure
-    operation.  After setting exposurePointOfInterest, call -setExposureMode: to apply the new point of interest.
-*/
+    The value of this property is a CGPoint that determines the receiver's exposure point of interest, if it has adjustable exposure. A value of (0,0) indicates that the camera should adjust exposure based on the top left corner of the image, while a value of (1,1) indicates that it should adjust exposure based on the bottom right corner. The default value is (0.5,0.5). -setExposurePointOfInterest: throws an NSInvalidArgumentException if isExposurePointOfInterestSupported returns NO. -setExposurePointOfInterest: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Note that setting exposurePointOfInterest alone does not initiate an exposure operation. After setting exposurePointOfInterest, call -setExposureMode: to apply the new point of interest.
+ */
 @property(nonatomic) CGPoint exposurePointOfInterest;
 
 /*!
  @property adjustingExposure
  @abstract
     Indicates whether the receiver is currently adjusting camera exposure.
-
+ 
  @discussion
-    The value of this property is a BOOL indicating whether the receiver's camera exposure is being automatically
-    adjusted because its exposure mode is AVCaptureExposureModeAutoExpose or AVCaptureExposureModeContinuousAutoExposure.
-    Clients can observe the value of this property to determine whether the camera exposure is stable or is being
-    automatically adjusted.
-*/
+    The value of this property is a BOOL indicating whether the receiver's camera exposure is being automatically adjusted because its exposure mode is AVCaptureExposureModeAutoExpose or AVCaptureExposureModeContinuousAutoExposure. Clients can observe the value of this property to determine whether the camera exposure is stable or is being automatically adjusted.
+ */
 @property(nonatomic, readonly, getter=isAdjustingExposure) BOOL adjustingExposure;
 
 /*!
@@ -1021,9 +968,8 @@
     The size of the lens diaphragm.
  
  @discussion
-    The value of this property is a float indicating the size (f number) of the lens diaphragm.
-    This property does not change.
-*/
+    The value of this property is a float indicating the size (f number) of the lens diaphragm. This property does not change.
+ */
 @property(nonatomic, readonly) float lensAperture NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -1032,10 +978,8 @@
     The length of time over which exposure takes place.
  
  @discussion
-    Only exposure duration values between activeFormat.minExposureDuration and activeFormat.maxExposureDuration are supported.
-    This property is key-value observable. It can be read at any time, regardless of exposure mode, but can only be set
-    via setExposureModeCustomWithDuration:ISO:completionHandler:.
-*/
+    Only exposure duration values between activeFormat.minExposureDuration and activeFormat.maxExposureDuration are supported. This property is key-value observable. It can be read at any time, regardless of exposure mode, but can only be set via setExposureModeCustomWithDuration:ISO:completionHandler:.
+ */
 @property(nonatomic, readonly) CMTime exposureDuration NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -1044,30 +988,21 @@
     The current exposure ISO value.
  
  @discussion
-    This property controls the sensor's sensitivity to light by means of a gain value applied to the signal. Only ISO values
-    between activeFormat.minISO and activeFormat.maxISO are supported. Higher values will result in noisier images.
-    This property is key-value observable. It can be read at any time, regardless of exposure mode, but can only be set
-    via setExposureModeCustomWithDuration:ISO:completionHandler:.
-*/
+    This property controls the sensor's sensitivity to light by means of a gain value applied to the signal. Only ISO values between activeFormat.minISO and activeFormat.maxISO are supported. Higher values will result in noisier images. This property is key-value observable. It can be read at any time, regardless of exposure mode, but can only be set via setExposureModeCustomWithDuration:ISO:completionHandler:.
+ */
 @property(nonatomic, readonly) float ISO NS_AVAILABLE_IOS(8_0);
 
 /*!
  @constant AVCaptureExposureDurationCurrent
-    A special value that may be passed as the duration parameter of setExposureModeCustomWithDuration:ISO:completionHandler: to
-    indicate that the caller does not wish to specify a value for the exposureDuration property, and that it should instead be set to its 
-    current value. Note that the device may be adjusting exposureDuration at the time of the call, in which case the value to which
-    exposureDuration is set may differ from the value obtained by querying the exposureDuration property.
-*/
+    A special value that may be passed as the duration parameter of setExposureModeCustomWithDuration:ISO:completionHandler: to indicate that the caller does not wish to specify a value for the exposureDuration property, and that it should instead be set to its current value. Note that the device may be adjusting exposureDuration at the time of the call, in which case the value to which exposureDuration is set may differ from the value obtained by querying the exposureDuration property.
+ */
 AVF_EXPORT const CMTime AVCaptureExposureDurationCurrent NS_AVAILABLE_IOS(8_0);
 
 /*!
  @constant AVCaptureISOCurrent
-    A special value that may be passed as the ISO parameter of setExposureModeCustomWithDuration:ISO:completionHandler: to indicate
-    that the caller does not wish to specify a value for the ISO property, and that it should instead be set to its current value. Note that the
-    device may be adjusting ISO at the time of the call, in which case the value to which ISO is set may differ from the value obtained by querying
-    the ISO property.
-*/
-AVF_EXPORT const float AVCaptureISOCurrent NS_AVAILABLE_IOS(8_0);
+    A special value that may be passed as the ISO parameter of setExposureModeCustomWithDuration:ISO:completionHandler: to indicate that the caller does not wish to specify a value for the ISO property, and that it should instead be set to its current value. Note that the device may be adjusting ISO at the time of the call, in which case the value to which ISO is set may differ from the value obtained by querying the ISO property.
+ */
+AVF_EXPORT const float AVCaptureISOCurrent NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
 
 /*!
  @method setExposureModeCustomWithDuration:ISO:completionHandler:
@@ -1075,31 +1010,16 @@
     Sets exposureMode to AVCaptureExposureModeCustom and locks exposureDuration and ISO at explicit values.
  
  @param duration
-    The exposure duration, as described in the documentation for the exposureDuration property. A value of AVCaptureExposureDurationCurrent
-    can be used to indicate that the caller does not wish to specify a value for exposureDuration.
-    Note that changes to this property may result in changes to activeVideoMinFrameDuration and/or activeVideoMaxFrameDuration.
+    The exposure duration, as described in the documentation for the exposureDuration property. A value of AVCaptureExposureDurationCurrent can be used to indicate that the caller does not wish to specify a value for exposureDuration. Note that changes to this property may result in changes to activeVideoMinFrameDuration and/or activeVideoMaxFrameDuration.
  @param ISO
-    The exposure ISO value, as described in the documentation for the ISO property. A value of AVCaptureISOCurrent
-    can be used to indicate that the caller does not wish to specify a value for ISO.
+    The exposure ISO value, as described in the documentation for the ISO property. A value of AVCaptureISOCurrent can be used to indicate that the caller does not wish to specify a value for ISO.
  @param handler
-    A block to be called when both exposureDuration and ISO have been set to the values specified and exposureMode is set to
-    AVCaptureExposureModeCustom. If setExposureModeCustomWithDuration:ISO:completionHandler: is called multiple times, the completion handlers 
-    will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied.
-    Note that the timestamp is synchronized to the device clock, and thus must be converted to the master clock prior to comparison with the
-    timestamps of buffers delivered via an AVCaptureVideoDataOutput. The client may pass nil for the handler parameter if knowledge of the 
-    operation's completion is not required.
- 
- @discussion
-    This is the only way of setting exposureDuration and ISO.
-    This method throws an NSRangeException if either exposureDuration or ISO is set to an unsupported level.
-    This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
-    When using AVCaptureStillImageOutput with automaticallyEnablesStillImageStabilizationWhenAvailable set to YES (the default behavior),
-    the receiver's ISO and exposureDuration values may be overridden by automatic still image stabilization values if the scene is dark
-    enough to warrant still image stabilization.  To ensure that the receiver's ISO and exposureDuration values are honored while
-    in AVCaptureExposureModeCustom or AVCaptureExposureModeLocked, you must set AVCaptureStillImageOutput's
-    automaticallyEnablesStillImageStabilizationWhenAvailable property to NO.
-*/
-- (void)setExposureModeCustomWithDuration:(CMTime)duration ISO:(float)ISO completionHandler:(void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
+    A block to be called when both exposureDuration and ISO have been set to the values specified and exposureMode is set to AVCaptureExposureModeCustom. If setExposureModeCustomWithDuration:ISO:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the master clock prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. The client may pass nil for the handler parameter if knowledge of the operation's completion is not required.
+ 
+ @discussion
+    This is the only way of setting exposureDuration and ISO. This method throws an NSRangeException if either exposureDuration or ISO is set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. When using AVCaptureStillImageOutput with automaticallyEnablesStillImageStabilizationWhenAvailable set to YES (the default behavior), the receiver's ISO and exposureDuration values may be overridden by automatic still image stabilization values if the scene is dark enough to warrant still image stabilization. To ensure that the receiver's ISO and exposureDuration values are honored while in AVCaptureExposureModeCustom or AVCaptureExposureModeLocked, you must set AVCaptureStillImageOutput's automaticallyEnablesStillImageStabilizationWhenAvailable property to NO.
+ */
+- (void)setExposureModeCustomWithDuration:(CMTime)duration ISO:(float)ISO completionHandler:(nullable void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
 
 /*!
  @property exposureTargetOffset
@@ -1107,9 +1027,8 @@
     Indicates the metered exposure level's offset from the target exposure value, in EV units.
  
  @discussion
-    The value of this read-only property indicates the difference between the metered exposure level of the current scene and the target exposure value.
-    This property is key-value observable.
-*/
+    The value of this read-only property indicates the difference between the metered exposure level of the current scene and the target exposure value. This property is key-value observable.
+ */
 @property(nonatomic, readonly) float exposureTargetOffset NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -1118,11 +1037,8 @@
     Bias applied to the target exposure value, in EV units.
  
  @discussion
-    When exposureMode is AVCaptureExposureModeContinuousAutoExposure or AVCaptureExposureModeLocked, the bias will affect
-    both metering (exposureTargetOffset), and the actual exposure level (exposureDuration and ISO). When the exposure mode
-    is AVCaptureExposureModeCustom, it will only affect metering.
-    This property is key-value observable. It can be read at any time, but can only be set via setExposureBias:completionHandler:.
-*/
+    When exposureMode is AVCaptureExposureModeContinuousAutoExposure or AVCaptureExposureModeLocked, the bias will affect both metering (exposureTargetOffset), and the actual exposure level (exposureDuration and ISO). When the exposure mode is AVCaptureExposureModeCustom, it will only affect metering. This property is key-value observable. It can be read at any time, but can only be set via setExposureTargetBias:completionHandler:.
+ */
 @property(nonatomic, readonly) float exposureTargetBias NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -1132,7 +1048,7 @@
  
  @discussion
     This read-only property indicates the minimum supported exposure bias.
-*/
+ */
 @property(nonatomic, readonly) float minExposureTargetBias NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -1142,16 +1058,14 @@
  
  @discussion
     This read-only property indicates the maximum supported exposure bias.
-*/
+ */
 @property(nonatomic, readonly) float maxExposureTargetBias NS_AVAILABLE_IOS(8_0);
 
 /*!
  @constant AVCaptureExposureTargetBiasCurrent
-    A special value that may be passed as the bias parameter of setExposureTargetBias:completionHandler: to indicate that the
-    caller does not wish to specify a value for the exposureTargetBias property, and that it should instead be set to its current
-    value.
-*/
-AVF_EXPORT const float AVCaptureExposureTargetBiasCurrent NS_AVAILABLE_IOS(8_0);
+    A special value that may be passed as the bias parameter of setExposureTargetBias:completionHandler: to indicate that the caller does not wish to specify a value for the exposureTargetBias property, and that it should instead be set to its current value.
+ */
+AVF_EXPORT const float AVCaptureExposureTargetBiasCurrent NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
 
 /*!
  @method setExposureTargetBias:completionHandler:
@@ -1161,68 +1075,68 @@
  @param bias
     The bias to be applied to the exposure target value, as described in the documentation for the exposureTargetBias property.
  @param handler
-    A block to be called when exposureTargetBias has been set to the value specified. If setExposureTargetBias:completionHandler:
-    is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches 
-    that of the first buffer to which the setting has been applied. Note that the timestamp is synchronized to the device clock, 
-    and thus must be converted to the master clock prior to comparison with the timestamps of buffers delivered via an 
-    AVCaptureVideoDataOutput. The client may pass nil for the handler parameter if knowledge of the operation's completion is not 
-    required.
+    A block to be called when exposureTargetBias has been set to the value specified. If setExposureTargetBias:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which the setting has been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the master clock prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. The client may pass nil for the handler parameter if knowledge of the operation's completion is not required.
  
  @discussion
-    This is the only way of setting exposureTargetBias.
-    This method throws an NSRangeException if exposureTargetBias is set to an unsupported level.
-    This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
-*/
-- (void)setExposureTargetBias:(float)bias completionHandler:(void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
+    This is the only way of setting exposureTargetBias. This method throws an NSRangeException if exposureTargetBias is set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
+ */
+- (void)setExposureTargetBias:(float)bias completionHandler:(nullable void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
 
 @end
 
+
 /*!
  @enum AVCaptureWhiteBalanceMode
  @abstract
     Constants indicating the mode of the white balance on the receiver's device, if it has adjustable white balance.
-
+ 
  @constant AVCaptureWhiteBalanceModeLocked
     Indicates that the white balance should be locked at its current value.
  @constant AVCaptureWhiteBalanceModeAutoWhiteBalance
-    Indicates that the device should automatically adjust white balance once and then change the white balance mode to 
-    AVCaptureWhiteBalanceModeLocked.
+    Indicates that the device should automatically adjust white balance once and then change the white balance mode to AVCaptureWhiteBalanceModeLocked.
  @constant AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance
     Indicates that the device should automatically adjust white balance when needed.
-*/
+ */
 typedef NS_ENUM(NSInteger, AVCaptureWhiteBalanceMode) {
-	AVCaptureWhiteBalanceModeLocked				        = 0,
-	AVCaptureWhiteBalanceModeAutoWhiteBalance	        = 1,
+    AVCaptureWhiteBalanceModeLocked                     = 0,
+    AVCaptureWhiteBalanceModeAutoWhiteBalance           = 1,
     AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance = 2,
-} NS_AVAILABLE(10_7, 4_0);
+} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
+
 
 /*!
- @typedef	AVCaptureWhiteBalanceGains
- @abstract	Structure containing RGB white balance gain values.
-*/
+ @typedef AVCaptureWhiteBalanceGains
+ @abstract
+    Structure containing RGB white balance gain values.
+ */
 typedef struct {
     float redGain;
     float greenGain;
     float blueGain;
-} AVCaptureWhiteBalanceGains NS_AVAILABLE_IOS(8_0);
+} AVCaptureWhiteBalanceGains NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
+
 
 /*!
- @typedef	AVCaptureWhiteBalanceChromaticityValues
- @abstract	Structure containing CIE 1931 xy chromaticity values
-*/
+ @typedef AVCaptureWhiteBalanceChromaticityValues
+ @abstract
+    Structure containing CIE 1931 xy chromaticity values.
+ */
 typedef struct {
     float x;
     float y;
-} AVCaptureWhiteBalanceChromaticityValues NS_AVAILABLE_IOS(8_0);
+} AVCaptureWhiteBalanceChromaticityValues NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
+
 
 /*!
- @typedef	AVCaptureWhiteBalanceTemperatureAndTintValues
- @abstract	Structure containing a white balance color correlated temperature in kelvin, plus a tint value in the range of [-150 - +150].
-*/
+ @typedef AVCaptureWhiteBalanceTemperatureAndTintValues
+ @abstract
+    Structure containing a white balance color correlated temperature in kelvin, plus a tint value in the range of [-150 - +150].
+ */
 typedef struct {
-	float temperature;
-	float tint;
-} AVCaptureWhiteBalanceTemperatureAndTintValues NS_AVAILABLE_IOS(8_0);
+    float temperature;
+    float tint;
+} AVCaptureWhiteBalanceTemperatureAndTintValues NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
+
 
 @interface AVCaptureDevice (AVCaptureDeviceWhiteBalance)
 
@@ -1230,42 +1144,45 @@
  @method isWhiteBalanceModeSupported:
  @abstract
     Returns whether the receiver supports the given white balance mode.
-
+ 
  @param whiteBalanceMode
     An AVCaptureWhiteBalanceMode to be checked.
  @result
     YES if the receiver supports the given white balance mode, NO otherwise.
-
+ 
  @discussion
     The receiver's whiteBalanceMode property can only be set to a certain mode if this method returns YES for that mode.
-*/
+ */
 - (BOOL)isWhiteBalanceModeSupported:(AVCaptureWhiteBalanceMode)whiteBalanceMode;
 
 /*!
+ @property lockingWhiteBalanceWithCustomDeviceGainsSupported
+ @abstract
+    Indicates whether the receiver supports white balance gains other than AVCaptureWhiteBalanceGainsCurrent.
+ 
+ @discussion
+    If lockingWhiteBalanceWithCustomDeviceGainsSupported returns NO, setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains: may only be called with AVCaptureWhiteBalanceGainsCurrent. Passing any other white balance gains will result in an exception.
+ */
+@property(nonatomic, readonly, getter=isLockingWhiteBalanceWithCustomDeviceGainsSupported) BOOL lockingWhiteBalanceWithCustomDeviceGainsSupported NS_AVAILABLE_IOS(10_0);
+
+/*!
  @property whiteBalanceMode
  @abstract
     Indicates current white balance mode of the receiver, if it has adjustable white balance.
-
+ 
  @discussion
-    The value of this property is an AVCaptureWhiteBalanceMode that determines the receiver's white balance mode, if it
-    has adjustable white balance. -setWhiteBalanceMode: throws an NSInvalidArgumentException if set to an unsupported value 
-    (see -isWhiteBalanceModeSupported:).  -setWhiteBalanceMode: throws an NSGenericException if called without first obtaining 
-    exclusive access to the receiver using lockForConfiguration:.  Clients can observe automatic changes to the receiver's 
-    whiteBalanceMode by key value observing this property.
-*/
+    The value of this property is an AVCaptureWhiteBalanceMode that determines the receiver's white balance mode, if it has adjustable white balance. -setWhiteBalanceMode: throws an NSInvalidArgumentException if set to an unsupported value (see -isWhiteBalanceModeSupported:). -setWhiteBalanceMode: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's whiteBalanceMode by key value observing this property.
+ */
 @property(nonatomic) AVCaptureWhiteBalanceMode whiteBalanceMode;
 
 /*!
  @property adjustingWhiteBalance
  @abstract
     Indicates whether the receiver is currently adjusting camera white balance.
-
+ 
  @discussion
-    The value of this property is a BOOL indicating whether the receiver's camera white balance is being
-    automatically adjusted because its white balance mode is AVCaptureWhiteBalanceModeAutoWhiteBalance or
-    AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance. Clients can observe the value of this property to determine
-    whether the camera white balance is stable or is being automatically adjusted.
-*/
+    The value of this property is a BOOL indicating whether the receiver's camera white balance is being automatically adjusted because its white balance mode is AVCaptureWhiteBalanceModeAutoWhiteBalance or AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance. Clients can observe the value of this property to determine whether the camera white balance is stable or is being automatically adjusted.
+ */
 @property(nonatomic, readonly, getter=isAdjustingWhiteBalance) BOOL adjustingWhiteBalance;
 
 /*!
@@ -1274,14 +1191,8 @@
     Indicates the current device-specific RGB white balance gain values in use.
  
  @discussion
-    This property specifies the current red, green, and blue gain values used for white balance.  The values
-    can be used to adjust color casts for a given scene.
- 
-    For each channel, only values between 1.0 and -maxWhiteBalanceGain are supported.
- 
-    This property is key-value observable. It can be read at any time, regardless of white balance mode, but can only be
-    set via setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:.
-*/
+    This property specifies the current red, green, and blue gain values used for white balance. The values can be used to adjust color casts for a given scene. For each channel, only values between 1.0 and -maxWhiteBalanceGain are supported. This property is key-value observable. It can be read at any time, regardless of white balance mode, but can only be set via setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:.
+ */
 @property(nonatomic, readonly) AVCaptureWhiteBalanceGains deviceWhiteBalanceGains NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -1290,17 +1201,8 @@
     Indicates the current device-specific Gray World RGB white balance gain values in use.
  
  @discussion
-    This property specifies the current red, green, and blue gain values derived from the current scene to deliver
-    a neutral (or "Gray World") white point for white balance.
- 
-    Gray World values assume a neutral subject (e.g. a gray card) has been placed in the middle of the subject area and
-    fills the center 50% of the frame.  Clients can read these values and apply them to the device using
-    setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:.
- 
-    For each channel, only values between 1.0 and -maxWhiteBalanceGain are supported.
- 
-    This property is key-value observable. It can be read at any time, regardless of white balance mode.
-*/
+    This property specifies the current red, green, and blue gain values derived from the current scene to deliver a neutral (or "Gray World") white point for white balance. Gray World values assume a neutral subject (e.g. a gray card) has been placed in the middle of the subject area and fills the center 50% of the frame. Clients can read these values and apply them to the device using setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:. For each channel, only values between 1.0 and -maxWhiteBalanceGain are supported. This property is key-value observable. It can be read at any time, regardless of white balance mode.
+ */
 @property(nonatomic, readonly) AVCaptureWhiteBalanceGains grayWorldDeviceWhiteBalanceGains NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -1310,15 +1212,13 @@
  
  @discussion
     This property does not change for the life of the receiver.
-*/
+ */
 @property(nonatomic, readonly) float maxWhiteBalanceGain NS_AVAILABLE_IOS(8_0);
 
 /*!
  @constant AVCaptureWhiteBalanceGainsCurrent
-    A special value that may be passed as a parameter of setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler: to
-    indicate that the caller does not wish to specify a value for deviceWhiteBalanceGains, and that gains should instead be
-    locked at their value at the moment that white balance is locked.
-*/
+    A special value that may be passed as a parameter of setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler: to indicate that the caller does not wish to specify a value for deviceWhiteBalanceGains, and that gains should instead be locked at their value at the moment that white balance is locked.
+ */
 AVF_EXPORT const AVCaptureWhiteBalanceGains AVCaptureWhiteBalanceGainsCurrent NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -1327,24 +1227,14 @@
     Sets white balance to locked mode with explicit deviceWhiteBalanceGains values.
  
  @param whiteBalanceGains
-    The white balance gain values, as described in the documentation for the deviceWhiteBalanceGains property. A value of
-    AVCaptureWhiteBalanceGainsCurrent can be used to indicate that the caller does not wish to specify a value for deviceWhiteBalanceGains.
+    The white balance gain values, as described in the documentation for the deviceWhiteBalanceGains property. A value of AVCaptureWhiteBalanceGainsCurrent can be used to indicate that the caller does not wish to specify a value for deviceWhiteBalanceGains.
  @param handler
-    A block to be called when white balance gains have been set to the values specified and whiteBalanceMode is set to
-    AVCaptureWhiteBalanceModeLocked. If setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler: is called multiple times, 
-    the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which 
-    all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the master 
-    clock prior to comparison  with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. This parameter may be nil if 
-    synchronization is not required.
+    A block to be called when white balance gains have been set to the values specified and whiteBalanceMode is set to AVCaptureWhiteBalanceModeLocked. If setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler: is called multiple times, the completion handlers will be called in FIFO order. The block receives a timestamp which matches that of the first buffer to which all settings have been applied. Note that the timestamp is synchronized to the device clock, and thus must be converted to the master clock prior to comparison with the timestamps of buffers delivered via an AVCaptureVideoDataOutput. This parameter may be nil if synchronization is not required.
  
  @discussion
-    For each channel in the whiteBalanceGains struct, only values between 1.0 and -maxWhiteBalanceGain are supported.
-    Gain values are normalized to the minimum channel value to avoid brightness changes (e.g. R:2 G:2 B:4 will be
-	normalized to R:1 G:1 B:2).
-    This method throws an NSRangeException if any of the whiteBalanceGains are set to an unsupported level.
-    This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
-*/
-- (void)setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:(AVCaptureWhiteBalanceGains)whiteBalanceGains completionHandler:(void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
+    For each channel in the whiteBalanceGains struct, only values between 1.0 and -maxWhiteBalanceGain are supported. Gain values are normalized to the minimum channel value to avoid brightness changes (e.g. R:2 G:2 B:4 will be normalized to R:1 G:1 B:2). This method throws an NSRangeException if any of the whiteBalanceGains are set to an unsupported level. This method throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
+ */
+- (void)setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:(AVCaptureWhiteBalanceGains)whiteBalanceGains completionHandler:(nullable void (^)(CMTime syncTime))handler NS_AVAILABLE_IOS(8_0);
 
 /*!
  @method chromaticityValuesForDeviceWhiteBalanceGains:
@@ -1352,18 +1242,13 @@
     Converts device-specific white balance RGB gain values to device-independent chromaticity values.
  
  @param whiteBalanceGains
-    White balance gain values, as described in the documentation for the deviceWhiteBalanceGains property.
-    A value of AVCaptureWhiteBalanceGainsCurrent may not be used in this function.
- @return
+    White balance gain values, as described in the documentation for the deviceWhiteBalanceGains property. A value of AVCaptureWhiteBalanceGainsCurrent may not be used in this function.
+ @result
     A fully populated AVCaptureWhiteBalanceChromaticityValues structure containing device-independent values.
  
  @discussion
-    This method may be called on the receiver to convert device-specific white balance RGB gain values to
-    device-independent chromaticity (little x, little y) values.
- 
-    For each channel in the whiteBalanceGains struct, only values between 1.0 and -maxWhiteBalanceGain are supported.
-    This method throws an NSRangeException if any of the whiteBalanceGains are set to unsupported values.
-*/
+    This method may be called on the receiver to convert device-specific white balance RGB gain values to device-independent chromaticity (little x, little y) values. For each channel in the whiteBalanceGains struct, only values between 1.0 and -maxWhiteBalanceGain are supported. This method throws an NSRangeException if any of the whiteBalanceGains are set to unsupported values.
+ */
 - (AVCaptureWhiteBalanceChromaticityValues)chromaticityValuesForDeviceWhiteBalanceGains:(AVCaptureWhiteBalanceGains)whiteBalanceGains NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -1373,19 +1258,12 @@
  
  @param chromaticityValues
     Little x, little y chromaticity values as described in the documentation for AVCaptureWhiteBalanceChromaticityValues.
- 
- @return
+ @result
     A fully populated AVCaptureWhiteBalanceGains structure containing device-specific RGB gain values.
  
  @discussion
-    This method may be called on the receiver to convert device-independent chromaticity values to device-specific RGB white
-    balance gain values.
- 
-    This method throws an NSRangeException if any of the chromaticityValues are set outside the range [0,1].
-	Note that some x,y combinations yield out-of-range device RGB values that will cause an exception to be thrown
-    if passed directly to -setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:.  Be sure to check that 
-    red, green, and blue gain values are within the range of [1.0 - maxWhiteBalanceGain].
-*/
+    This method may be called on the receiver to convert device-independent chromaticity values to device-specific RGB white balance gain values. This method throws an NSRangeException if any of the chromaticityValues are set outside the range [0,1]. Note that some x,y combinations yield out-of-range device RGB values that will cause an exception to be thrown if passed directly to -setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:. Be sure to check that red, green, and blue gain values are within the range of [1.0 - maxWhiteBalanceGain].
+ */
 - (AVCaptureWhiteBalanceGains)deviceWhiteBalanceGainsForChromaticityValues:(AVCaptureWhiteBalanceChromaticityValues)chromaticityValues NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -1394,18 +1272,13 @@
     Converts device-specific white balance RGB gain values to device-independent temperature and tint values.
  
  @param whiteBalanceGains
-    White balance gain values, as described in the documentation for the deviceWhiteBalanceGains property.
-    A value of AVCaptureWhiteBalanceGainsCurrent may not be used in this function.
- @return
+    White balance gain values, as described in the documentation for the deviceWhiteBalanceGains property. A value of AVCaptureWhiteBalanceGainsCurrent may not be used in this function.
+ @result
     A fully populated AVCaptureWhiteBalanceTemperatureAndTintValues structure containing device-independent values.
  
  @discussion
-    This method may be called on the receiver to convert device-specific white balance RGB gain values to
-    device-independent temperature (in kelvin) and tint values.
- 
-    For each channel in the whiteBalanceGains struct, only values between 1.0 and -maxWhiteBalanceGain are supported.
-    This method throws an NSRangeException if any of the whiteBalanceGains are set to unsupported values.
-*/
+    This method may be called on the receiver to convert device-specific white balance RGB gain values to device-independent temperature (in kelvin) and tint values. For each channel in the whiteBalanceGains struct, only values between 1.0 and -maxWhiteBalanceGain are supported. This method throws an NSRangeException if any of the whiteBalanceGains are set to unsupported values.
+ */
 - (AVCaptureWhiteBalanceTemperatureAndTintValues)temperatureAndTintValuesForDeviceWhiteBalanceGains:(AVCaptureWhiteBalanceGains)whiteBalanceGains NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -1415,44 +1288,32 @@
  
  @param tempAndTintValues
     Temperature and tint values as described in the documentation for AVCaptureWhiteBalanceTemperatureAndTintValues.
- 
- @return
+ @result
     A fully populated AVCaptureWhiteBalanceGains structure containing device-specific RGB gain values.
  
  @discussion
-    This method may be called on the receiver to convert device-independent temperature and tint values to device-specific RGB white
-    balance gain values.
- 
-    You may pass any temperature and tint values and corresponding white balance gains will be produced. Note though that
-    some temperature and tint combinations yield out-of-range device RGB values that will cause an exception to be thrown
-    if passed directly to -setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:.  Be sure to check that 
-    red, green, and blue gain values are within the range of [1.0 - maxWhiteBalanceGain].
-*/
+    This method may be called on the receiver to convert device-independent temperature and tint values to device-specific RGB white balance gain values. You may pass any temperature and tint values and corresponding white balance gains will be produced. Note though that some temperature and tint combinations yield out-of-range device RGB values that will cause an exception to be thrown if passed directly to -setWhiteBalanceModeLockedWithDeviceWhiteBalanceGains:completionHandler:. Be sure to check that red, green, and blue gain values are within the range of [1.0 - maxWhiteBalanceGain].
+ */
 - (AVCaptureWhiteBalanceGains)deviceWhiteBalanceGainsForTemperatureAndTintValues:(AVCaptureWhiteBalanceTemperatureAndTintValues)tempAndTintValues NS_AVAILABLE_IOS(8_0);
 
 @end
 
+
 @interface AVCaptureDevice (AVCaptureDeviceSubjectAreaChangeMonitoring)
 
 /*!
  @property subjectAreaChangeMonitoringEnabled
  @abstract
-	Indicates whether the receiver should monitor the subject area for changes.
+    Indicates whether the receiver should monitor the subject area for changes.
  
  @discussion
-	The value of this property is a BOOL indicating whether the receiver should
-	monitor the video subject area for changes, such as lighting changes, substantial
-	movement, etc.  If subject area change monitoring is enabled, the receiver
-	sends an AVCaptureDeviceSubjectAreaDidChangeNotification whenever it detects
-	a change to the subject area, at which time an interested client may wish
-	to re-focus, adjust exposure, white balance, etc.  The receiver must be locked 
-	for configuration using lockForConfiguration: before clients can set
-	the value of this property.
-*/
+    The value of this property is a BOOL indicating whether the receiver should monitor the video subject area for changes, such as lighting changes, substantial movement, etc. If subject area change monitoring is enabled, the receiver sends an AVCaptureDeviceSubjectAreaDidChangeNotification whenever it detects a change to the subject area, at which time an interested client may wish to re-focus, adjust exposure, white balance, etc. The receiver must be locked for configuration using lockForConfiguration: before clients can set the value of this property.
+ */
 @property(nonatomic, getter=isSubjectAreaChangeMonitoringEnabled) BOOL subjectAreaChangeMonitoringEnabled NS_AVAILABLE_IOS(5_0);
 
 @end
 
+
 @interface AVCaptureDevice (AVCaptureDeviceLowLightBoost)
 
 /*!
@@ -1462,7 +1323,7 @@
  
  @discussion
     The receiver's automaticallyEnablesLowLightBoostWhenAvailable property can only be set if this property returns YES.
-*/
+ */
 @property(nonatomic, readonly, getter=isLowLightBoostSupported) BOOL lowLightBoostSupported NS_AVAILABLE_IOS(6_0);
 
 /*!
@@ -1471,11 +1332,8 @@
     Indicates whether the receiver's low light boost feature is enabled.
  
  @discussion
-    The value of this property is a BOOL indicating whether the receiver is currently enhancing
-    images to improve quality due to low light conditions. When -isLowLightBoostEnabled returns
-    YES, the receiver has switched into a special mode in which more light can be perceived in images.
-    This property is key-value observable.
-*/
+    The value of this property is a BOOL indicating whether the receiver is currently enhancing images to improve quality due to low light conditions. When -isLowLightBoostEnabled returns YES, the receiver has switched into a special mode in which more light can be perceived in images. This property is key-value observable.
+ */
 @property(nonatomic, readonly, getter=isLowLightBoostEnabled) BOOL lowLightBoostEnabled NS_AVAILABLE_IOS(6_0);
 
 /*!
@@ -1484,95 +1342,80 @@
     Indicates whether the receiver should automatically switch to low light boost mode when necessary.
  
  @discussion
-    On a receiver where -isLowLightBoostSupported returns YES, a special low light boost mode may be
-    engaged to improve image quality. When the automaticallyEnablesLowLightBoostWhenAvailable
-    property is set to YES, the receiver switches at its discretion to a special boost mode under
-    low light, and back to normal operation when the scene becomes sufficiently lit.  An AVCaptureDevice that
-    supports this feature may only engage boost mode for certain source formats or resolutions.
-    Clients may observe changes to the lowLightBoostEnabled property to know when the mode has engaged.
-    The switch between normal operation and low light boost mode may drop one or more video frames.
-    The default value is NO. Setting this property throws an NSInvalidArgumentException if -isLowLightBoostSupported
-    returns NO. The receiver must be locked for configuration using lockForConfiguration: before clients
-    can set this method, otherwise an NSGenericException is thrown.
-*/
+    On a receiver where -isLowLightBoostSupported returns YES, a special low light boost mode may be engaged to improve image quality. When the automaticallyEnablesLowLightBoostWhenAvailable property is set to YES, the receiver switches at its discretion to a special boost mode under low light, and back to normal operation when the scene becomes sufficiently lit. An AVCaptureDevice that supports this feature may only engage boost mode for certain source formats or resolutions. Clients may observe changes to the lowLightBoostEnabled property to know when the mode has engaged. The switch between normal operation and low light boost mode may drop one or more video frames. The default value is NO. Setting this property throws an NSInvalidArgumentException if -isLowLightBoostSupported returns NO. The receiver must be locked for configuration using lockForConfiguration: before clients can set this method, otherwise an NSGenericException is thrown.
+ */
 @property(nonatomic) BOOL automaticallyEnablesLowLightBoostWhenAvailable NS_AVAILABLE_IOS(6_0);
 
 @end
 
+
 @interface AVCaptureDevice (AVCaptureDeviceVideoZoom)
 
 /*!
  @property videoZoomFactor
  @abstract
- Controls zoom level of image outputs
+    Controls zoom level of image outputs
  
  @discussion
- Applies a centered crop for all image outputs, scaling as necessary to maintain output
- dimensions.  Minimum value of 1.0 yields full field of view, increasing values will increase
- magnification, up to a maximum value specified in the activeFormat's videoMaxZoomFactor property.
- Modifying the zoom factor will cancel any active rampToVideoZoomFactor:withRate:, and snap
- directly to the assigned value.  Assigning values outside the acceptable range will generate
- an NSRangeException.  Clients can key value observe the value of this property.
+    Applies a centered crop for all image outputs, scaling as necessary to maintain output dimensions. Minimum value of 1.0 yields full field of view, increasing values will increase magnification, up to a maximum value specified in the activeFormat's videoMaxZoomFactor property. Modifying the zoom factor will cancel any active rampToVideoZoomFactor:withRate:, and snap directly to the assigned value. Assigning values outside the acceptable range will generate an NSRangeException. Clients can key value observe the value of this property.
  
- -setVideoZoomFactor: throws an NSGenericException if called without first obtaining exclusive
- access to the receiver using lockForConfiguration:.
+    -setVideoZoomFactor: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
  
- @seealso AVCaptureDeviceFormat AVCaptureDeviceFormat - videoMaxZoomFactor and videoZoomFactorUpscaleThreshold
+ @seealso -[AVCaptureDeviceFormat videoMaxZoomFactor] and -[AVCaptureDeviceFormat videoZoomFactorUpscaleThreshold]
  */
 @property(nonatomic) CGFloat videoZoomFactor NS_AVAILABLE_IOS(7_0);
 
 /*!
  @method rampToVideoZoomFactor:withRate:
  @abstract
- Provides smooth changes in zoom factor.
+    Provides smooth changes in zoom factor.
  
  @discussion
- This method provides a change in zoom by compounding magnification at the specified
- rate over time.  Although the zoom factor will grow exponentially, this yields a
- visually linear zoom in the image over time.
- 
- The zoom transition will stop at the specified factor, which must be in the valid range for
- videoZoomFactor.  Assignments to videoZoomFactor while a ramp is in progress will cancel the
- ramp and snap to the assigned value.
- 
- The zoom factor is continuously scaled by pow(2,rate * time).  A rate of 0 causes no
- change in zoom factor, equivalent to calling cancelVideoZoomRamp.  A rate of 1 will
- cause the magnification to double every second (or halve every second if zooming out),
- and similarly larger or smaller values will zoom faster or slower respectively.  Only
- the absolute value of the rate is significant--sign is corrected for the direction
- of the target.  Changes in rate will be smoothed by an internal acceleration limit.
+    This method provides a change in zoom by compounding magnification at the specified rate over time. Although the zoom factor will grow exponentially, this yields a visually linear zoom in the image over time.
+ 
+    The zoom transition will stop at the specified factor, which must be in the valid range for videoZoomFactor. Assignments to videoZoomFactor while a ramp is in progress will cancel the ramp and snap to the assigned value.
  
- -rampToVideoZoomFactor:withRate: throws an NSGenericException if called without first
- obtaining exclusive access to the receiver using lockForConfiguration:.
+    The zoom factor is continuously scaled by pow(2,rate * time). A rate of 0 causes no change in zoom factor, equivalent to calling cancelVideoZoomRamp. A rate of 1 will cause the magnification to double every second (or halve every second if zooming out), and similarly larger or smaller values will zoom faster or slower respectively. Only the absolute value of the rate is significant--sign is corrected for the direction of the target. Changes in rate will be smoothed by an internal acceleration limit.
+ 
+    -rampToVideoZoomFactor:withRate: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
  */
 - (void)rampToVideoZoomFactor:(CGFloat)factor withRate:(float)rate NS_AVAILABLE_IOS(7_0);
 
 /*!
  @property rampingVideoZoom
  @abstract
- Indicates if the zoom factor is transitioning to a value set by rampToVideoZoomFactor:withRate:
+    Indicates if the zoom factor is transitioning to a value set by rampToVideoZoomFactor:withRate:
  
  @discussion
- Clients can observe this value to determine when a ramp begins or completes.
+    Clients can observe this value to determine when a ramp begins or completes.
  */
-@property(nonatomic,readonly,getter=isRampingVideoZoom) BOOL rampingVideoZoom NS_AVAILABLE_IOS(7_0);
+@property(nonatomic, readonly, getter=isRampingVideoZoom) BOOL rampingVideoZoom NS_AVAILABLE_IOS(7_0);
 
 /*!
  @method cancelVideoZoomRamp
  @abstract
- Eases out of any video zoom transitions initiated by rampToVideoZoomFactor:withRate:
+    Eases out of any video zoom transitions initiated by rampToVideoZoomFactor:withRate:
  
  @discussion
- This method is equivalent to calling rampToVideoZoomFactor:withRate: using the current zoom factor
- target and a rate of 0.  This allows a smooth stop to any changes in zoom which were in progress.
+    This method is equivalent to calling rampToVideoZoomFactor:withRate: using the current zoom factor target and a rate of 0. This allows a smooth stop to any changes in zoom which were in progress.
  
- -cancelVideoZoomRamp: throws an NSGenericException if called without first
- obtaining exclusive access to the receiver using lockForConfiguration:.
+    -cancelVideoZoomRamp: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:.
  */
 - (void)cancelVideoZoomRamp NS_AVAILABLE_IOS(7_0);
 
+/*!
+ @property dualCameraSwitchOverVideoZoomFactor
+ @abstract
+    The video zoom factor at or above which a DualCamera can select between its wide angle camera and its telephoto camera.
+ 
+ @discussion
+    This is the zoom factor at which the wide angle camera's field of view matches telephoto camera's full field of view. On non-DualCamera devices this will return 1.0.
+ */
+@property(atomic, readonly) CGFloat dualCameraSwitchOverVideoZoomFactor NS_AVAILABLE_IOS(11_0);
+
 @end
-	
+
+
 /*!
  @enum AVAuthorizationStatus
  @abstract
@@ -1581,20 +1424,20 @@
  @constant AVAuthorizationStatusNotDetermined
     Indicates that the user has not yet made a choice regarding whether the client can access the hardware.
  @constant AVAuthorizationStatusRestricted
-    The client is not authorized to access the hardware for the media type. The user cannot change
-    the client's status, possibly due to active restrictions such as parental controls being in place.
+    The client is not authorized to access the hardware for the media type. The user cannot change the client's status, possibly due to active restrictions such as parental controls being in place.
  @constant AVAuthorizationStatusDenied
     The user explicitly denied access to the hardware supporting a media type for the client.
  @constant AVAuthorizationStatusAuthorized
     The client is authorized to access the hardware supporting a media type.
  */
 typedef NS_ENUM(NSInteger, AVAuthorizationStatus) {
-	AVAuthorizationStatusNotDetermined = 0,
-	AVAuthorizationStatusRestricted,
-	AVAuthorizationStatusDenied,
-	AVAuthorizationStatusAuthorized
-} NS_AVAILABLE_IOS(7_0);
-		
+    AVAuthorizationStatusNotDetermined = 0,
+    AVAuthorizationStatusRestricted    = 1,
+    AVAuthorizationStatusDenied        = 2,
+    AVAuthorizationStatusAuthorized    = 3,
+} NS_AVAILABLE_IOS(7_0) __TVOS_PROHIBITED;
+
+
 @interface AVCaptureDevice (AVCaptureDeviceAuthorization)
 
 /*!
@@ -1604,17 +1447,13 @@
  
  @param mediaType
     The media type, either AVMediaTypeVideo or AVMediaTypeAudio
- 
  @result
     The authorization status of the client
  
  @discussion
-    This method returns the AVAuthorizationStatus of the client for accessing the underlying hardware supporting
-    the media type.  Media type constants are defined in AVMediaFormat.h.  If any media type other than AVMediaTypeVideo or
-    AVMediaTypeAudio is supplied, an NSInvalidArgumentException will be thrown.  If the status is AVAuthorizationStatusNotDetermined,
-    you may use the +requestAccessForMediaType:completionHandler: method to request access by prompting the user.
+    This method returns the AVAuthorizationStatus of the client for accessing the underlying hardware supporting the media type. Media type constants are defined in AVMediaFormat.h. If any media type other than AVMediaTypeVideo or AVMediaTypeAudio is supplied, an NSInvalidArgumentException will be thrown. If the status is AVAuthorizationStatusNotDetermined, you may use the +requestAccessForMediaType:completionHandler: method to request access by prompting the user.
  */
-+ (AVAuthorizationStatus)authorizationStatusForMediaType:(NSString *)mediaType NS_AVAILABLE_IOS(7_0);
++ (AVAuthorizationStatus)authorizationStatusForMediaType:(AVMediaType)mediaType NS_AVAILABLE_IOS(7_0);
 
 /*!
  @method requestAccessForMediaType:completionHandler:
@@ -1627,43 +1466,44 @@
     A block called with the result of requesting access
  
  @discussion
-    Use this function to request access to the hardware for a given media type.   Media type constants are defined in AVMediaFormat.h.
-    If any media type other than AVMediaTypeVideo or AVMediaTypeAudio is supplied, an NSInvalidArgumentException will be thrown.
+    Use this function to request access to the hardware for a given media type. Media type constants are defined in AVMediaFormat.h. If any media type other than AVMediaTypeVideo or AVMediaTypeAudio is supplied, an NSInvalidArgumentException will be thrown.
  
-    This call will not block while the user is being asked for access, allowing the client to continue running.  Until access has been granted,
-    any AVCaptureDevices for the media type will vend silent audio samples or black video frames.  The user is only asked for permission
-    the first time the client requests access.  Later calls use the permission granted by the user.
+    This call will not block while the user is being asked for access, allowing the client to continue running. Until access has been granted, any AVCaptureDevices for the media type will vend silent audio samples or black video frames. The user is only asked for permission the first time the client requests access. Later calls use the permission granted by the user.
  
-    Note that the authorization dialog will automatically be shown if the status is AVAuthorizationStatusNotDetermined when
-    creating an AVCaptureDeviceInput.
+    Note that the authorization dialog will automatically be shown if the status is AVAuthorizationStatusNotDetermined when creating an AVCaptureDeviceInput.
  
     Invoking this method with AVMediaTypeAudio is equivalent to calling -[AVAudioSession requestRecordPermission:].
 
-    The completion handler is called on an arbitrary dispatch queue.  Is it the client's responsibility to ensure that
-    any UIKit-related updates are called on the main queue or main thread as a result.
+    The completion handler is called on an arbitrary dispatch queue. Is it the client's responsibility to ensure that any UIKit-related updates are called on the main queue or main thread as a result.
  */
-+ (void)requestAccessForMediaType:(NSString *)mediaType completionHandler:(void (^)(BOOL granted))handler NS_AVAILABLE_IOS(7_0);
++ (void)requestAccessForMediaType:(AVMediaType)mediaType completionHandler:(void (^)(BOOL granted))handler NS_AVAILABLE_IOS(7_0);
 
 @end
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
 
-typedef float AVCaptureDeviceTransportControlsSpeed; 
+/*!
+ @typedef
+ @abstract
+    A constant that is used to specify the transport controls' speed.
+ */
+typedef float AVCaptureDeviceTransportControlsSpeed NS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED;
+
 
 /*!
  @enum AVCaptureDeviceTransportControlsPlaybackMode
  @abstract
     Constants indicating the transport controls' current mode of play back, if it has one.
-
+ 
  @constant AVCaptureDeviceTransportControlsNotPlayingMode
     Indicates that the tape transport is not threaded through the play head.
  @constant AVCaptureDeviceTransportControlsPlayingMode
     Indicates that the tape transport is threaded through the play head.
-*/
+ */
 typedef NS_ENUM(NSInteger, AVCaptureDeviceTransportControlsPlaybackMode) {
-	AVCaptureDeviceTransportControlsNotPlayingMode      = 0,
-	AVCaptureDeviceTransportControlsPlayingMode         = 1
-} NS_AVAILABLE(10_7, NA);
+    AVCaptureDeviceTransportControlsNotPlayingMode = 0,
+    AVCaptureDeviceTransportControlsPlayingMode    = 1,
+} NS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED;
+
 
 @interface AVCaptureDevice (AVCaptureDeviceTransportControls)
 
@@ -1671,63 +1511,54 @@
  @property transportControlsSupported
  @abstract
     Returns whether the receiver supports transport control commands.
-
+ 
  @discussion
-    For devices with transport controls, such as AVC tape-based camcorders or pro capture devices with
-    RS422 deck control, the value of this property is YES.  If transport controls are not supported,
-    none of the associated transport control methods and properties are available on the receiver.
-*/
-@property(nonatomic, readonly) BOOL transportControlsSupported NS_AVAILABLE(10_7, NA);
+    For devices with transport controls, such as AVC tape-based camcorders or pro capture devices with RS422 deck control, the value of this property is YES. If transport controls are not supported, none of the associated transport control methods and properties are available on the receiver.
+ */
+@property(nonatomic, readonly) BOOL transportControlsSupported NS_AVAILABLE_MAC(10_7);
 
 /*!
  @property transportControlsPlaybackMode
  @abstract
     Returns the receiver's current playback mode.
-
+ 
  @discussion
-    For devices that support transport control, this property may be queried to discover the 
-    current playback mode.
-*/
-@property(nonatomic, readonly) AVCaptureDeviceTransportControlsPlaybackMode transportControlsPlaybackMode NS_AVAILABLE(10_7, NA);
+    For devices that support transport control, this property may be queried to discover the current playback mode.
+ */
+@property(nonatomic, readonly) AVCaptureDeviceTransportControlsPlaybackMode transportControlsPlaybackMode NS_AVAILABLE_MAC(10_7);
 
 /*!
  @property transportControlsSpeed
  @abstract
     Returns the receiver's current playback speed as a floating point value.
-
+ 
  @discussion
-    For devices that support transport control, this property may be queried to discover the 
-    current playback speed of the deck.
+    For devices that support transport control, this property may be queried to discover the current playback speed of the deck.
     0.0 -> stopped.
     1.0 -> forward at normal speed.
     -1.0-> reverse at normal speed.
     2.0 -> forward at 2x normal speed.
     etc.
-*/
-@property(nonatomic, readonly) AVCaptureDeviceTransportControlsSpeed transportControlsSpeed NS_AVAILABLE(10_7, NA);
+ */
+@property(nonatomic, readonly) AVCaptureDeviceTransportControlsSpeed transportControlsSpeed NS_AVAILABLE_MAC(10_7);
 
 /*!
  @method setTransportControlsPlaybackMode:speed:
  @abstract
-    sets both the transport controls playback mode and speed in a single method.
-
+    Sets both the transport controls playback mode and speed in a single method.
+ 
  @param mode
-    A AVCaptureDeviceTransportControlsPlaybackMode indicating whether the deck should be put into
-    play mode.
-@param speed
+    A AVCaptureDeviceTransportControlsPlaybackMode indicating whether the deck should be put into play mode.
+ @param speed
     A AVCaptureDeviceTransportControlsSpeed indicating the speed at which to wind or play the tape.
-
+ 
  @discussion
-    A method for setting the receiver's transport controls playback mode and speed.  The receiver must 
-    be locked for configuration using lockForConfiguration: before clients can set this method, otherwise
-    an NSGenericException is thrown.
-*/
-- (void)setTransportControlsPlaybackMode:(AVCaptureDeviceTransportControlsPlaybackMode)mode speed:(AVCaptureDeviceTransportControlsSpeed)speed NS_AVAILABLE(10_7, NA);
+    A method for setting the receiver's transport controls playback mode and speed. The receiver must be locked for configuration using lockForConfiguration: before clients can set this method, otherwise an NSGenericException is thrown.
+ */
+- (void)setTransportControlsPlaybackMode:(AVCaptureDeviceTransportControlsPlaybackMode)mode speed:(AVCaptureDeviceTransportControlsSpeed)speed NS_AVAILABLE_MAC(10_7);
 
 @end
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
 
 @interface AVCaptureDevice (AVCaptureDeviceHighDynamicRangeSupport)
 
@@ -1737,15 +1568,8 @@
     Indicates whether the receiver is allowed to turn high dynamic range streaming on or off.
  
  @discussion
-    The value of this property is a BOOL indicating whether the receiver is free to turn
-    high dynamic range streaming on or off.  This property defaults to YES. By default, AVCaptureDevice
-    always turns off videoHDREnabled when a client uses the -setActiveFormat: API to set a new format.
-    When the client uses AVCaptureSession's setSessionPreset: API instead, AVCaptureDevice turns
-    videoHDR on automatically if it's a good fit for the preset.  -setAutomaticallyAdjustsVideoHDREnabled:
-    throws an NSGenericException if called without first obtaining exclusive access to the receiver using
-    -lockForConfiguration:.  Clients can key-value observe videoHDREnabled to know when the receiver has automatically
-    changed the value.
-*/
+    The value of this property is a BOOL indicating whether the receiver is free to turn high dynamic range streaming on or off. This property defaults to YES. By default, AVCaptureDevice always turns off videoHDREnabled when a client uses the -setActiveFormat: API to set a new format. When the client uses AVCaptureSession's setSessionPreset: API instead, AVCaptureDevice turns videoHDR on automatically if it's a good fit for the preset. -setAutomaticallyAdjustsVideoHDREnabled: throws an NSGenericException if called without first obtaining exclusive access to the receiver using -lockForConfiguration:. Clients can key-value observe videoHDREnabled to know when the receiver has automatically changed the value.
+ */
 @property(nonatomic) BOOL automaticallyAdjustsVideoHDREnabled NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -1754,89 +1578,191 @@
     Indicates whether the receiver's streaming high dynamic range feature is enabled.
  
  @discussion
-    The value of this property is a BOOL indicating whether the receiver is currently streaming
-    high dynamic range video buffers. The property may only be set if you first set 
-    automaticallyAdjustsVideoHDREnabled to NO, otherwise an NSGenericException is thrown.
-    videoHDREnabled may only be set to YES if the receiver's activeFormat.isVideoHDRSupported property
-    returns YES, otherwise an NSGenericException is thrown.  This property may be key-value observed.
- 
-    Note that setting this property may cause a lengthy reconfiguration of the receiver,
-    similar to setting a new active format or AVCaptureSession sessionPreset.  If you are setting either the
-    active format or the AVCaptureSession's sessionPreset AND this property, you should bracket these operations
-    with [session beginConfiguration] and [session commitConfiguration] to minimize reconfiguration time.
-*/
+    The value of this property is a BOOL indicating whether the receiver is currently streaming high dynamic range video buffers. The property may only be set if you first set automaticallyAdjustsVideoHDREnabled to NO, otherwise an NSGenericException is thrown. videoHDREnabled may only be set to YES if the receiver's activeFormat.isVideoHDRSupported property returns YES, otherwise an NSGenericException is thrown. This property may be key-value observed.
+ 
+    Note that setting this property may cause a lengthy reconfiguration of the receiver, similar to setting a new active format or AVCaptureSession sessionPreset. If you are setting either the active format or the AVCaptureSession's sessionPreset AND this property, you should bracket these operations with [session beginConfiguration] and [session commitConfiguration] to minimize reconfiguration time.
+ */
 @property(nonatomic, getter=isVideoHDREnabled) BOOL videoHDREnabled NS_AVAILABLE_IOS(8_0);
 
 @end
 
 
+/*!
+ @enum AVCaptureColorSpace
+ @abstract
+    Constants indicating active or supported video color space.
+ 
+ @constant AVCaptureColorSpace_sRGB
+    The sGRB color space ( https://www.w3.org/Graphics/Color/srgb )
+ @constant AVCaptureColorSpace_P3_D65
+    The P3 D65 wide color space which uses Illuminant D65 as the white point.
+ */
+typedef NS_ENUM(NSInteger, AVCaptureColorSpace) {
+    AVCaptureColorSpace_sRGB   = 0,
+    AVCaptureColorSpace_P3_D65 = 1,
+} NS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED;
+
+
+@interface AVCaptureDevice (AVCaptureDeviceColorSpaceSupport)
+
+/*!
+ @property activeColorSpace
+ @abstract
+    Indicates the receiver's current active color space.
+ 
+ @discussion
+    By default, an AVCaptureDevice attached to an AVCaptureSession is automatically configured for wide color by the AVCaptureSession (see AVCaptureSession automaticallyConfiguresCaptureDeviceForWideColor). You may also set the activeColorSpace manually. To prevent the AVCaptureSession from undoing your work, remember to set AVCaptureSession's automaticallyConfiguresCaptureDeviceForWideColor property to NO. Changing the receiver's activeColorSpace while the session is running requires a disruptive reconfiguration of the capture render pipeline. Movie captures in progress will be ended immediately; unfulfilled photo requests will be aborted; video preview will temporarily freeze. -setActiveColorSpace: throws an NSGenericException if called without first obtaining exclusive access to the receiver using -lockForConfiguration:.
+ */
+@property(nonatomic) AVCaptureColorSpace activeColorSpace NS_AVAILABLE_IOS(10_0);
+
+@end
+
+
+@interface AVCaptureDevice (AVCaptureDeviceDepthSupport)
+
+/*!
+ @property activeDepthDataFormat
+ @abstract
+    The currently active depth data format of the receiver.
+
+ @discussion
+    This property can be used to get or set the device's currently active depth data format. -setActiveDepthDataFormat: throws an NSInvalidArgumentException if set to a format not present in the activeFormat's -supportedDepthDataFormats array. -setActiveDepthDataFormat: throws an NSGenericException if called without first obtaining exclusive access to the receiver using lockForConfiguration:. Clients can observe automatic changes to the receiver's activeDepthDataFormat by key value observing this property. On devices where depth data is not supported, this property returns nil.
+ 
+    The frame rate of depth data may not be set directly. Depth data frame rate is synchronized to the device's activeMin/MaxFrameDurations. It may match the device's current frame rate, or lower, if depth data cannot be produced fast enough for the active video frame rate.
+ 
+    Delivery of depth data to a AVCaptureDepthDataOutput may increase the system load, resulting in a reduced video frame rate for thermal sustainability.
+ */
+@property(nonatomic, retain, nullable) AVCaptureDeviceFormat *activeDepthDataFormat NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property minAvailableVideoZoomFactor
+ @abstract
+    Indicates the minimum zoom factor available for the AVCaptureDevice's videoZoomFactor property.
+ 
+ @discussion
+    On non-dual camera devices the minAvailableVideoZoomFactor is always 1.0. On a dual camera device the minAvailableVideoZoomFactor can change when the device is delivering depth data to one or more outputs (see -[AVCaptureDeviceFormat videoMinZoomFactorForDepthDataDelivery]). If the device's videoZoomFactor property is assigned a value smaller than 1.0, an NSRangeException is thrown. Setting the videoZoomFactor to a value greater than or equal to 1.0, but lower than minAvailableVideoZoomFactor results in the value being clamped to the minAvailableVideoZoomFactor. Clients can key value observe the value of this property.
+ */
+@property(nonatomic, readonly) CGFloat minAvailableVideoZoomFactor NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property maxAvailableVideoZoomFactor
+ @abstract
+    Indicates the maximum zoom factor available for the AVCaptureDevice's videoZoomFactor property.
+ 
+ @discussion
+    On non-dual camera devices the maxAvailableVideoZoomFactor is always equal to the activeFormat.videoMaxZoomFactor. On a dual camera device the maxAvailableVideoZoomFactor can change when the device is delivering depth data to one or more outputs (see -[AVCaptureDeviceFormat videoMaxZoomFactorForDepthDataDelivery]). If the device's videoZoomFactor property is assigned a value greater than activeFormat.videoMaxZoomFactor, an NSRangeException is thrown. Setting the videoZoomFactor to a value less than or equal to activeFormat.videoMaxZoomFactor, but greater than maxAvailableVideoZoomFactor results in the value being clamped to the maxAvailableVideoZoomFactor. Clients can key value observe the value of this property.
+ */
+@property(nonatomic, readonly) CGFloat maxAvailableVideoZoomFactor NS_AVAILABLE_IOS(11_0);
+
+@end
+
+
+#pragma mark - AVCaptureDeviceDiscoverySession
+
+/*!
+ @class AVCaptureDeviceDiscoverySession
+ @abstract
+    The AVCaptureDeviceDiscoverySession allows clients to search for devices by certain criteria.
+ 
+ @discussion
+    This class allows clients to discover devices by providing certain search criteria. The objective of this class is to help find devices by device type and optionally by media type or position and allow you to key-value observe changes to the returned devices list.
+ */
+NS_CLASS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED
+@interface AVCaptureDeviceDiscoverySession : NSObject
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @method discoverySessionWithDeviceTypes:
+ @abstract
+    Returns an AVCaptureDeviceDiscoverySession instance for the given device types, media type, and position.
+ 
+ @param deviceTypes
+    An array specifying the device types to include in the list of discovered devices.
+ @param mediaType
+    The media type, such as AVMediaTypeVideo, AVMediaTypeAudio, or AVMediaTypeMuxed, to include in the list of discovered devices. Pass nil to search for devices with any media type.
+ @param position
+    The position to include in the list of discovered devices. Pass AVCaptureDevicePositionUnspecified to search for devices with any position.
+ @result
+    The AVCaptureDeviceDiscoverySession from which the list of devices can be obtained.
+ 
+ @discussion
+    The list of device types is mandatory. This is used to make sure that clients only get access to devices of types they expect. This prevents new device types from automatically being included in the list of devices.
+ */
++ (instancetype)discoverySessionWithDeviceTypes:(NSArray<AVCaptureDeviceType> *)deviceTypes mediaType:(nullable AVMediaType)mediaType position:(AVCaptureDevicePosition)position;
+
+/*!
+ @property devices
+ @abstract
+    The list of devices that comply to the search criteria specified on the discovery session.
+ 
+ @discussion
+    The returned array contains only devices that are available at the time the method is called. Applications can key-value observe this property to be notified when the list of available devices has changed. For apps linked against iOS 10, the devices returned are unsorted. For apps linked against iOS 11 or later, the devices are sorted by AVCaptureDeviceType, matching the order specified in the deviceTypes parameter of +[AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:mediaType:position:]. If a position of AVCaptureDevicePositionUnspecified is specified, the results are further ordered by position in the AVCaptureDevicePosition enum.
+ */
+@property(nonatomic, readonly) NSArray<AVCaptureDevice *> *devices;
+
+@end
+
+
+#pragma mark - AVFrameRateRange
+
 @class AVFrameRateRangeInternal;
 
 /*!
  @class AVFrameRateRange
  @abstract
-    An AVFrameRateRange expresses a range of valid frame rates as min and max
-    rate and min and max duration.
-
+    An AVFrameRateRange expresses a range of valid frame rates as min and max rate and min and max duration.
+ 
  @discussion
-    An AVCaptureDevice exposes an array of formats, and its current activeFormat may be queried.  The
-    payload for the formats property is an array of AVCaptureDeviceFormat objects and the activeFormat property
-    payload is an AVCaptureDeviceFormat.  AVCaptureDeviceFormat wraps a CMFormatDescription and
-    expresses a range of valid video frame rates as an NSArray of AVFrameRateRange objects.
-    AVFrameRateRange expresses min and max frame rate as a rate in frames per second and
-    duration (CMTime).  An AVFrameRateRange object is immutable.  Its values do not change for the life of the object.
-*/
-NS_CLASS_AVAILABLE(10_7, 7_0)
+    An AVCaptureDevice exposes an array of formats, and its current activeFormat may be queried. The payload for the formats property is an array of AVCaptureDeviceFormat objects and the activeFormat property payload is an AVCaptureDeviceFormat. AVCaptureDeviceFormat wraps a CMFormatDescription and expresses a range of valid video frame rates as an NSArray of AVFrameRateRange objects. AVFrameRateRange expresses min and max frame rate as a rate in frames per second and duration (CMTime). An AVFrameRateRange object is immutable. Its values do not change for the life of the object.
+ */
+NS_CLASS_AVAILABLE(10_7, 7_0) __TVOS_PROHIBITED
 @interface AVFrameRateRange : NSObject
 {
 @private
     AVFrameRateRangeInternal *_internal;
 }
 
+AV_INIT_UNAVAILABLE
+
 /*!
  @property minFrameRate
  @abstract
     A Float64 indicating the minimum frame rate supported by this range.
-
+ 
  @discussion
-    This read-only property indicates the minimum frame rate supported by
-    this range in frames per second.
-*/
+    This read-only property indicates the minimum frame rate supported by this range in frames per second.
+ */
 @property(readonly) Float64 minFrameRate;
 
 /*!
  @property maxFrameRate
  @abstract
     A Float64 indicating the maximum frame rate supported by this range.
-
+ 
  @discussion
-    This read-only property indicates the maximum frame rate supported by
-    this range in frames per second.
-*/
+    This read-only property indicates the maximum frame rate supported by this range in frames per second.
+ */
 @property(readonly) Float64 maxFrameRate;
 
 /*!
  @property maxFrameDuration
  @abstract
     A CMTime indicating the maximum frame duration supported by this range.
-
+ 
  @discussion
-    This read-only property indicates the maximum frame duration supported by
-    this range.  It is the reciprocal of minFrameRate, and expresses minFrameRate
-    as a duration.
-*/
+    This read-only property indicates the maximum frame duration supported by this range. It is the reciprocal of minFrameRate, and expresses minFrameRate as a duration.
+ */
 @property(readonly) CMTime maxFrameDuration;
 
 /*!
  @property minFrameDuration
  @abstract
     A CMTime indicating the minimum frame duration supported by this range.
-
+ 
  @discussion
-    This read-only property indicates the minimum frame duration supported by
-    this range.  It is the reciprocal of maxFrameRate, and expresses maxFrameRate
-    as a duration.
-*/
+    This read-only property indicates the minimum frame duration supported by this range. It is the reciprocal of maxFrameRate, and expresses maxFrameRate as a duration.
+ */
 @property(readonly) CMTime minFrameDuration;
 
 @end
@@ -1850,24 +1776,19 @@
  @constant AVCaptureVideoStabilizationModeOff
     Indicates that video should not be stabilized.
  @constant AVCaptureVideoStabilizationModeStandard
-    Indicates that video should be stabilized using the standard video stabilization algorithm introduced with iOS 5.0.
-    Standard video stabilization has a reduced field of view.  Enabling video stabilization may introduce additional
-    latency into the video capture pipeline.
+    Indicates that video should be stabilized using the standard video stabilization algorithm introduced with iOS 5.0. Standard video stabilization has a reduced field of view. Enabling video stabilization may introduce additional latency into the video capture pipeline.
  @constant AVCaptureVideoStabilizationModeCinematic
-    Indicates that video should be stabilized using the cinematic stabilization algorithm for more dramatic results.
-    Cinematic video stabilization has a reduced field of view compared to standard video stabilization.
-    Enabling cinematic video stabilization introduces much more latency into the video capture pipeline than
-    standard video stabilization and consumes significantly more system memory.  Use narrow or identical min and max
-    frame durations in conjunction with this mode.
+    Indicates that video should be stabilized using the cinematic stabilization algorithm for more dramatic results. Cinematic video stabilization has a reduced field of view compared to standard video stabilization. Enabling cinematic video stabilization introduces much more latency into the video capture pipeline than standard video stabilization and consumes significantly more system memory. Use narrow or identical min and max frame durations in conjunction with this mode.
  @constant AVCaptureVideoStabilizationModeAuto
     Indicates that the most appropriate video stabilization mode for the device and format should be chosen.
-*/
+ */
 typedef NS_ENUM(NSInteger, AVCaptureVideoStabilizationMode) {
     AVCaptureVideoStabilizationModeOff       = 0,
-    AVCaptureVideoStabilizationModeStandard	 = 1,
+    AVCaptureVideoStabilizationModeStandard  = 1,
     AVCaptureVideoStabilizationModeCinematic = 2,
     AVCaptureVideoStabilizationModeAuto      = -1,
-} NS_AVAILABLE_IOS(8_0);
+} NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
+
 
 /*!
  @enum AVCaptureAutoFocusSystem
@@ -1877,75 +1798,68 @@
  @constant AVCaptureAutoFocusSystemNone
     Indicates that autofocus is not available.
  @constant AVCaptureAutoFocusSystemContrastDetection
-    Indicates that autofocus is achieved by contrast detection. 
-    Contrast detection performs a focus scan to find the optimal position.
+    Indicates that autofocus is achieved by contrast detection. Contrast detection performs a focus scan to find the optimal position.
  @constant AVCaptureAutoFocusSystemPhaseDetection
-    Indicates that autofocus is achieved by phase detection. 
-    Phase detection has the ability to achieve focus in many cases without a focus scan.
-    Phase detection autofocus is typically less visually intrusive than contrast detection autofocus.
-*/
+    Indicates that autofocus is achieved by phase detection. Phase detection has the ability to achieve focus in many cases without a focus scan. Phase detection autofocus is typically less visually intrusive than contrast detection autofocus.
+ */
 typedef NS_ENUM(NSInteger, AVCaptureAutoFocusSystem) {
-	AVCaptureAutoFocusSystemNone              = 0,
-	AVCaptureAutoFocusSystemContrastDetection = 1,
-	AVCaptureAutoFocusSystemPhaseDetection    = 2,
-} NS_AVAILABLE_IOS(8_0);
+    AVCaptureAutoFocusSystemNone              = 0,
+    AVCaptureAutoFocusSystemContrastDetection = 1,
+    AVCaptureAutoFocusSystemPhaseDetection    = 2,
+} NS_AVAILABLE_IOS(8_0) __TVOS_PROHIBITED;
+
 
+#pragma mark - AVCaptureDeviceFormat
 
+@class AVCaptureOutput;
 @class AVCaptureDeviceFormatInternal;
 
 /*!
  @class AVCaptureDeviceFormat
  @abstract
-    An AVCaptureDeviceFormat wraps a CMFormatDescription and other format-related information, such
-    as min and max framerate.
-
+    An AVCaptureDeviceFormat wraps a CMFormatDescription and other format-related information, such as min and max framerate.
+ 
  @discussion
-    An AVCaptureDevice exposes an array of formats, and its current activeFormat may be queried.  The
-    payload for the formats property is an array of AVCaptureDeviceFormat objects and the activeFormat property
-    payload is an AVCaptureDeviceFormat.  AVCaptureDeviceFormat is a thin wrapper around a 
-    CMFormatDescription, and can carry associated device format information that doesn't go in a
-    CMFormatDescription, such as min and max frame rate.  An AVCaptureDeviceFormat object is immutable.
-    Its values do not change for the life of the object.
-*/
-NS_CLASS_AVAILABLE(10_7, 7_0)
+    An AVCaptureDevice exposes an array of formats, and its current activeFormat may be queried. The payload for the formats property is an array of AVCaptureDeviceFormat objects and the activeFormat property payload is an AVCaptureDeviceFormat. AVCaptureDeviceFormat is a thin wrapper around a CMFormatDescription, and can carry associated device format information that doesn't go in a CMFormatDescription, such as min and max frame rate. An AVCaptureDeviceFormat object is immutable. Its values do not change for the life of the object.
+ */
+NS_CLASS_AVAILABLE(10_7, 7_0) __TVOS_PROHIBITED
 @interface AVCaptureDeviceFormat : NSObject
 {
 @private
     AVCaptureDeviceFormatInternal *_internal;
 }
 
+AV_INIT_UNAVAILABLE
+
 /*!
  @property mediaType
  @abstract
     An NSString describing the media type of an AVCaptureDevice active or supported format.
-
+ 
  @discussion
-    Supported mediaTypes are listed in AVMediaFormat.h.  This is a read-only
-    property.  The caller assumes no ownership of the returned value and should not CFRelease it.
-*/
-@property(nonatomic, readonly) NSString *mediaType;
+    Supported mediaTypes are listed in AVMediaFormat.h. This is a read-only property. The caller assumes no ownership of the returned value and should not CFRelease it.
+ */
+@property(nonatomic, readonly) AVMediaType mediaType;
 
 /*!
  @property formatDescription
  @abstract
     A CMFormatDescription describing an AVCaptureDevice active or supported format.
-
+ 
  @discussion
-    A CMFormatDescription describing an AVCaptureDevice active or supported format.  This is a read-only
-    property.  The caller assumes no ownership of the returned value and should not CFRelease it.
-*/
+    A CMFormatDescription describing an AVCaptureDevice active or supported format. This is a read-only property. The caller assumes no ownership of the returned value and should not CFRelease it.
+ */
 @property(nonatomic, readonly) CMFormatDescriptionRef formatDescription;
 
 /*!
  @property videoSupportedFrameRateRanges
  @abstract
     A property indicating the format's supported frame rate ranges.
-
+ 
  @discussion
-    videoSupportedFrameRateRanges is an array of AVFrameRateRange objects, one for
-    each of the format's supported video frame rate ranges.
-*/
-@property(nonatomic, readonly) NSArray *videoSupportedFrameRateRanges;
+    videoSupportedFrameRateRanges is an array of AVFrameRateRange objects, one for each of the format's supported video frame rate ranges.
+ */
+@property(nonatomic, readonly) NSArray<AVFrameRateRange *> *videoSupportedFrameRateRanges;
 
 #if TARGET_OS_IPHONE
 
@@ -1953,22 +1867,20 @@
  @property videoFieldOfView
  @abstract
     A property indicating the format's field of view.
-
+ 
  @discussion
-    videoFieldOfView is a float value indicating the receiver's field of view in degrees.
-    If field of view is unknown, a value of 0 is returned.
-*/
+    videoFieldOfView is a float value indicating the receiver's field of view in degrees. If field of view is unknown, a value of 0 is returned.
+ */
 @property(nonatomic, readonly) float videoFieldOfView NS_AVAILABLE_IOS(7_0);
 
 /*!
  @property videoBinned
  @abstract
     A property indicating whether the format is binned.
-
+ 
  @discussion
-    videoBinned is a BOOL indicating whether the format is a binned format.
-    Binning is a pixel-combining process which can result in greater low light sensitivity at the cost of reduced resolution.
-*/
+    videoBinned is a BOOL indicating whether the format is a binned format. Binning is a pixel-combining process which can result in greater low light sensitivity at the cost of reduced resolution.
+ */
 @property(nonatomic, readonly, getter=isVideoBinned) BOOL videoBinned NS_AVAILABLE_IOS(7_0);
 
 /*!
@@ -1980,21 +1892,18 @@
     An AVCaptureVideoStabilizationMode to be checked.
  
  @discussion
-    isVideoStabilizationModeSupported: returns a boolean value indicating whether the format can be stabilized using
-    the given mode with -[AVCaptureConnection setPreferredVideoStabilizationMode:].
-*/
+    isVideoStabilizationModeSupported: returns a boolean value indicating whether the format can be stabilized using the given mode with -[AVCaptureConnection setPreferredVideoStabilizationMode:].
+ */
 - (BOOL)isVideoStabilizationModeSupported:(AVCaptureVideoStabilizationMode)videoStabilizationMode NS_AVAILABLE_IOS(8_0);
 
 /*!
  @property videoStabilizationSupported
  @abstract
     A property indicating whether the format supports video stabilization.
-
+ 
  @discussion
-    videoStabilizationSupported is a BOOL indicating whether the format can be stabilized using 
-    AVCaptureConnection -setEnablesVideoStabilizationWhenAvailable.
-    This property is deprecated.  Use isVideoStabilizationModeSupported: instead.
-*/
+    videoStabilizationSupported is a BOOL indicating whether the format can be stabilized using AVCaptureConnection -setEnablesVideoStabilizationWhenAvailable. This property is deprecated. Use isVideoStabilizationModeSupported: instead.
+ */
 @property(nonatomic, readonly, getter=isVideoStabilizationSupported) BOOL videoStabilizationSupported NS_DEPRECATED_IOS(7_0, 8_0, "Use isVideoStabilizationModeSupported: instead.");
 
 /*!
@@ -2003,24 +1912,17 @@
     Indicates the maximum zoom factor available for the AVCaptureDevice's videoZoomFactor property.
  
  @discussion
-    If the device's videoZoomFactor property is assigned a larger value, an NSRangeException will
-    be thrown. A maximum zoom factor of 1 indicates no zoom is available.
+    If the device's videoZoomFactor property is assigned a larger value, an NSRangeException will be thrown. A maximum zoom factor of 1 indicates no zoom is available.
  */
 @property(nonatomic, readonly) CGFloat videoMaxZoomFactor NS_AVAILABLE_IOS(7_0);
 
 /*!
  @property videoZoomFactorUpscaleThreshold
  @abstract
-    Indicates the value of AVCaptureDevice's videoZoomFactor property at which the image output
-    begins to require upscaling.
+    Indicates the value of AVCaptureDevice's videoZoomFactor property at which the image output begins to require upscaling.
  
  @discussion
-    In some cases the image sensor's dimensions are larger than the dimensions reported by the video
-    AVCaptureDeviceFormat.  As long as the sensor crop is larger than the reported dimensions of the
-    AVCaptureDeviceFormat, the image will be downscaled.  Setting videoZoomFactor to the value of
-    videoZoomFactorUpscalingThreshold will provide a center crop of the sensor image data without
-    any scaling.  If a greater zoom factor is used, then the sensor data will be upscaled to the
-    device format's dimensions.
+    In some cases the image sensor's dimensions are larger than the dimensions reported by the video AVCaptureDeviceFormat. As long as the sensor crop is larger than the reported dimensions of the AVCaptureDeviceFormat, the image will be downscaled. Setting videoZoomFactor to the value of videoZoomFactorUpscalingThreshold will provide a center crop of the sensor image data without any scaling. If a greater zoom factor is used, then the sensor data will be upscaled to the device format's dimensions.
  */
 @property(nonatomic, readonly) CGFloat videoZoomFactorUpscaleThreshold NS_AVAILABLE_IOS(7_0);
 
@@ -2031,7 +1933,7 @@
  
  @discussion
     This read-only property indicates the minimum supported exposure duration.
-*/
+ */
 @property(nonatomic, readonly) CMTime minExposureDuration NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -2041,7 +1943,7 @@
  
  @discussion
     This read-only property indicates the maximum supported exposure duration.
-*/
+ */
 @property(nonatomic, readonly) CMTime maxExposureDuration NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -2051,7 +1953,7 @@
  
  @discussion
     This read-only property indicates the minimum supported exposure ISO value.
-*/
+ */
 @property(nonatomic, readonly) float minISO NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -2061,18 +1963,17 @@
  
  @discussion
     This read-only property indicates the maximum supported exposure ISO value.
-*/
+ */
 @property(nonatomic, readonly) float maxISO NS_AVAILABLE_IOS(8_0);
 
 /*!
  @property videoHDRSupported
  @abstract
     A property indicating whether the format supports high dynamic range streaming.
-
+ 
  @discussion
-    videoHDRSupported is a BOOL indicating whether the format supports
-    high dynamic range streaming.  See AVCaptureDevice's videoHDREnabled property.
-*/
+    videoHDRSupported is a BOOL indicating whether the format supports high dynamic range streaming. See AVCaptureDevice's videoHDREnabled property.
+ */
 @property(nonatomic, readonly, getter=isVideoHDRSupported) BOOL videoHDRSupported NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -2081,10 +1982,12 @@
     CMVideoDimensions indicating the highest resolution still image that can be produced by this format.
  
  @discussion
-    Normally, AVCaptureStillImageOutput emits images with the same dimensions as its source AVCaptureDevice's
-    activeFormat.  However, if you set highResolutionStillImageOutputEnabled to YES, AVCaptureStillImageOutput
-    emits still images with its source AVCaptureDevice's activeFormat.highResolutionStillImageDimensions.
-*/
+    By default, AVCapturePhotoOutput and AVCaptureStillImageOutput emit images with the same dimensions as their source AVCaptureDevice's activeFormat.formatDescription property. Some device formats support high resolution photo output. That is, they can stream video to an AVCaptureVideoDataOutput or AVCaptureMovieFileOutput at one resolution while outputting photos to AVCapturePhotoOutput at a higher resolution. You may query this property to discover a video format's supported high resolution still image dimensions. See -[AVCapturePhotoOutput highResolutionPhotoEnabled], -[AVCapturePhotoSettings highResolutionPhotoEnabled], and -[AVCaptureStillImageOutput highResolutionStillImageOutputEnabled].
+ 
+    AVCaptureDeviceFormats of type AVMediaTypeDepthData may also support the delivery of a higher resolution depth data map to an AVCapturePhotoOutput. Chief differences are:
+       - Depth data accompanying still images is not supported by AVCaptureStillImageOutput. You must use AVCapturePhotoOutput.
+       - By opting in for depth data ( -[AVCapturePhotoSettings setDepthDataDeliveryEnabled:YES] ), you implicitly opt in for high resolution depth data if it's available. You may query the -[AVCaptureDevice activeDepthDataFormat]'s highResolutionStillImageDimensions to discover the depth data resolution that will be delivered with captured photos.
+ */
 @property(nonatomic, readonly) CMVideoDimensions highResolutionStillImageDimensions NS_AVAILABLE_IOS(8_0);
 
 /*!
@@ -2094,14 +1997,65 @@
  
  @discussion
     This read-only property indicates the autofocus system.
-*/
+ */
 @property(nonatomic, readonly) AVCaptureAutoFocusSystem autoFocusSystem NS_AVAILABLE_IOS(8_0);
 
+/*!
+ @property supportedColorSpaces
+ @abstract
+    A property indicating the receiver's supported color spaces.
+ 
+ @discussion
+    This read-only property indicates the receiver's supported color spaces as an array of AVCaptureColorSpace constants sorted from narrow to wide color.
+ */
+@property(nonatomic, readonly) NSArray<NSNumber *> *supportedColorSpaces NS_AVAILABLE_IOS(10_0);
+
+/*!
+ @property videoMinZoomFactorForDepthDataDelivery
+ @abstract
+    Indicates the minimum zoom factor available for the AVCaptureDevice's videoZoomFactor property when delivering depth data to one or more outputs.
+ 
+ @discussion
+    Dual camera devices support a limited zoom range when delivering depth data to any output. If this device format has no -supportedDepthDataFormats, this property returns 1.0.
+ */
+@property(nonatomic, readonly) CGFloat videoMinZoomFactorForDepthDataDelivery NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property videoMaxZoomFactorForDepthDataDelivery
+ @abstract
+    Indicates the maximum zoom factor available for the AVCaptureDevice's videoZoomFactor property when delivering depth data to one or more outputs.
+ 
+ @discussion
+    Dual camera devices support a limited zoom range when delivering depth data to any output. If this device format has no -supportedDepthDataFormats, this property returns videoMaxZoomFactor.
+ */
+@property(nonatomic, readonly) CGFloat videoMaxZoomFactorForDepthDataDelivery NS_AVAILABLE_IOS(11_0);
+
+/*
+ @property supportedDepthDataFormats
+ @abstract
+    Indicates this format's companion depth data formats.
+ 
+ @discussion
+    If no depth data formats are supported by the receiver, an empty array is returned. On dual camera devices, the supportedDepthDataFormats list items always match the aspect ratio of their paired video format. When the receiver is set as the device's activeFormat, you may set the device's activeDepthDataFormat to one of these supported depth data formats.
+ */
+@property(nonatomic, readonly) NSArray<AVCaptureDeviceFormat *> *supportedDepthDataFormats NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property unsupportedCaptureOutputClasses
+ @abstract
+    A property indicating AVCaptureOutput subclasses the receiver does not support.
+ 
+ @discussion
+    As a rule, AVCaptureDeviceFormats of a given mediaType are available for use with all AVCaptureOutputs that accept that media type, but there are exceptions. For instance, on iOS, the photo resolution video formats may not be used as sources for AVCaptureMovieFileOutput due to bandwidth limitations. On DualCamera devices, AVCaptureDepthDataOutput is not supported by the 12 MP device formats when using the -[AVCaptureDevice setActiveFormat:] API due to bandwidth limitations, though their use with -[AVCaptureSession setSessionPreset:AVCaptureSessionPresetPhoto] is supported. When using the photo preset, video is streamed at preview resolution rather than full sensor resolution.  
+ */
+@property(nonatomic, readonly) NSArray<Class> *unsupportedCaptureOutputClasses NS_AVAILABLE_IOS(11_0);
+
 #endif // TARGET_OS_IPHONE
 
 @end
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+
+#pragma mark - AVCaptureDeviceInputSource
 
 @class AVCaptureDeviceInputSourceInternal;
 
@@ -2109,41 +2063,39 @@
  @class AVCaptureDeviceInputSource
  @abstract
     An AVCaptureDeviceInputSource represents a distinct input source on an AVCaptureDevice object.
-
+ 
  @discussion
-    An AVCaptureDevice may optionally present an array of inputSources, representing distinct mutually
-    exclusive inputs to the device, for example, an audio AVCaptureDevice might have ADAT optical
-    and analog input sources.  A video AVCaptureDevice might have an HDMI input source, or a component 
-    input source.
-*/
-NS_CLASS_AVAILABLE(10_7, NA)
+    An AVCaptureDevice may optionally present an array of inputSources, representing distinct mutually exclusive inputs to the device, for example, an audio AVCaptureDevice might have ADAT optical and analog input sources. A video AVCaptureDevice might have an HDMI input source, or a component input source.
+ */
+NS_CLASS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED
 @interface AVCaptureDeviceInputSource : NSObject
 {
 @private
     AVCaptureDeviceInputSourceInternal *_internal;
 }
 
+AV_INIT_UNAVAILABLE
+
 /*!
  @property inputSourceID
  @abstract
     An ID unique among the inputSources exposed by a given AVCaptureDevice.
-
+ 
  @discussion
-    An AVCaptureDevice's inputSources array must contain AVCaptureInputSource objects with unique
-    inputSourceIDs.
-*/
+    An AVCaptureDevice's inputSources array must contain AVCaptureInputSource objects with unique inputSourceIDs.
+ */
 @property(nonatomic, readonly) NSString *inputSourceID;
 
 /*!
  @property localizedName
  @abstract
     A localized human-readable name for the receiver.
-
+ 
  @discussion
     This property can be used for displaying the name of the capture device input source in a user interface.
-*/
+ */
 @property(nonatomic, readonly) NSString *localizedName;
 
 @end
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureFileOutput.h	2017-05-24 00:37:44.000000000 -0400
@@ -0,0 +1,582 @@
+/*
+    File:  AVCaptureFileOutput.h
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2010-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVCaptureOutputBase.h>
+#import <AVFoundation/AVVideoSettings.h>
+#import <CoreMedia/CMSampleBuffer.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureFileOutput
+
+@class AVCaptureFileOutputInternal;
+@class AVMetadataItem;
+@protocol AVCaptureFileOutputDelegate;
+@protocol AVCaptureFileOutputRecordingDelegate;
+
+/*!
+ @class AVCaptureFileOutput
+ @abstract
+    AVCaptureFileOutput is an abstract subclass of AVCaptureOutput that provides an interface for writing captured media to files.
+ 
+ @discussion
+    This abstract superclass defines the interface for outputs that record media samples to files. File outputs can start recording to a new file using the startRecordingToOutputFileURL:recordingDelegate: method. On successive invocations of this method on Mac OS X, the output file can by changed dynamically without losing media samples. A file output can stop recording using the stopRecording method. Because files are recorded in the background, applications will need to specify a delegate for each new file so that they can be notified when recorded files are finished.
+ 
+    On Mac OS X, clients can also set a delegate on the file output itself that can be used to control recording along exact media sample boundaries using the captureOutput:didOutputSampleBuffer:fromConnection: method.
+ 
+    The concrete subclasses of AVCaptureFileOutput are AVCaptureMovieFileOutput, which records media to a QuickTime movie file, and AVCaptureAudioFileOutput, which writes audio media to a variety of audio file formats.
+ */
+NS_CLASS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
+@interface AVCaptureFileOutput : AVCaptureOutput 
+{
+@private
+    AVCaptureFileOutputInternal *_fileOutputInternal;
+}
+
+#if TARGET_OS_OSX
+
+/*!
+ @property delegate
+ @abstract
+    The receiver's delegate.
+ 
+ @discussion
+    The value of this property is an object conforming to the AVCaptureFileOutputDelegate protocol that will be able to monitor and control recording along exact sample boundaries.
+ */
+@property(nonatomic, assign, nullable) id<AVCaptureFileOutputDelegate> delegate NS_AVAILABLE_MAC(10_7);
+
+#endif // TARGET_OS_OSX
+
+/*!
+ @property outputFileURL
+ @abstract
+    The file URL of the file to which the receiver is currently recording incoming buffers.
+ 
+ @discussion
+    The value of this property is an NSURL object containing the file URL of the file currently being written by the receiver. Returns nil if the receiver is not recording to any file.
+ */
+@property(nonatomic, readonly, nullable) NSURL *outputFileURL;
+
+/*!
+ @method startRecordingToOutputFileURL:recordingDelegate:
+ @abstract
+    Tells the receiver to start recording to a new file, and specifies a delegate that will be notified when recording is finished.
+ 
+ @param outputFileURL
+    An NSURL object containing the URL of the output file. This method throws an NSInvalidArgumentException if the URL is not a valid file URL.
+ @param delegate
+    An object conforming to the AVCaptureFileOutputRecordingDelegate protocol. Clients must specify a delegate so that they can be notified when recording to the given URL is finished.
+ 
+ @discussion
+    The method sets the file URL to which the receiver is currently writing output media. If a file at the given URL already exists when capturing starts, recording to the new file will fail.
+ 
+    Clients need not call stopRecording before calling this method while another recording is in progress. On Mac OS X, if this method is invoked while an existing output file was already being recorded, no media samples will be discarded between the old file and the new file.
+ 
+    When recording is stopped either by calling stopRecording, by changing files using this method, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, clients must specify a delegate that will be notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method. The recording delegate can also optionally implement methods that inform it when data starts being written, when recording is paused and resumed, and when recording is about to be finished.
+ 
+    On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the new file are guaranteed to be those contained in the sample buffer passed to that method.
+ 
+    Note: AVCaptureAudioFileOutput does not support -startRecordingToOutputFileURL:recordingDelegate:. Use -startRecordingToOutputFileURL:outputFileType:recordingDelegate: instead.
+ */
+- (void)startRecordingToOutputFileURL:(NSURL *)outputFileURL recordingDelegate:(id<AVCaptureFileOutputRecordingDelegate>)delegate;
+
+/*!
+ @method stopRecording
+ @abstract
+    Tells the receiver to stop recording to the current file.
+ 
+ @discussion
+    Clients can call this method when they want to stop recording new samples to the current file, and do not want to continue recording to another file. Clients that want to switch from one file to another should not call this method. Instead they should simply call startRecordingToOutputFileURL:recordingDelegate: with the new file URL.
+ 
+    When recording is stopped either by calling this method, by changing files using startRecordingToOutputFileURL:recordingDelegate:, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, before using the file, clients must wait until the delegate that was specified in startRecordingToOutputFileURL:recordingDelegate: is notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method.
+ 
+    On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the last samples written to the current file are guaranteed to be those that were output immediately before those in the sample buffer passed to that method.
+ */
+- (void)stopRecording;
+
+/*!
+ @property recording
+ @abstract
+    Indicates whether the receiver is currently recording.
+ 
+ @discussion
+    The value of this property is YES when the receiver currently has a file to which it is writing new samples, NO otherwise.
+ */
+@property(nonatomic, readonly, getter=isRecording) BOOL recording;
+
+#if TARGET_OS_OSX
+
+/*!
+ @property recordingPaused
+ @abstract
+    Indicates whether recording to the current output file is paused.
+ 
+ @discussion
+    This property indicates recording to the file returned by outputFileURL has been previously paused using the pauseRecording method. When a recording is paused, captured samples are not written to the output file, but new samples can be written to the same file in the future by calling resumeRecording.
+ */
+@property(nonatomic, readonly, getter=isRecordingPaused) BOOL recordingPaused NS_AVAILABLE_MAC(10_7);
+
+/*!
+ @method pauseRecording
+ @abstract
+    Pauses recording to the current output file.
+ 
+ @discussion
+    This method causes the receiver to stop writing captured samples to the current output file returned by outputFileURL, but leaves the file open so that samples can be written to it in the future, when resumeRecording is called. This allows clients to record multiple media segments that are not contiguous in time to a single file.
+ 
+    On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the last samples written to the current file are guaranteed to be those that were output immediately before those in the sample buffer passed to that method.
+ */
+- (void)pauseRecording NS_AVAILABLE_MAC(10_7);
+
+/*!
+ @method resumeRecording
+ @abstract
+    Resumes recording to the current output file after it was previously paused using pauseRecording.
+ 
+ @discussion
+    This method causes the receiver to resume writing captured samples to the current output file returned by outputFileURL, after recording was previously paused using pauseRecording. This allows clients to record multiple media segments that are not contiguous in time to a single file.
+ 
+    On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the current file are guaranteed to be those contained in the sample buffer passed to that method.
+ */
+- (void)resumeRecording NS_AVAILABLE_MAC(10_7);
+
+#endif // TARGET_OS_OSX
+
+/*!
+ @property recordedDuration
+ @abstract
+    Indicates the duration of the media recorded to the current output file.
+ 
+ @discussion
+    If recording is in progress, this property returns the total time recorded so far.
+ */
+@property(nonatomic, readonly) CMTime recordedDuration;
+
+/*!
+ @property recordedFileSize
+ @abstract
+    Indicates the size, in bytes, of the data recorded to the current output file.
+ 
+ @discussion
+    If a recording is in progress, this property returns the size in bytes of the data recorded so far.
+ */
+@property(nonatomic, readonly) int64_t recordedFileSize;
+
+/*!
+ @property maxRecordedDuration
+ @abstract
+    Specifies the maximum duration of the media that should be recorded by the receiver.
+ 
+ @discussion
+    This property specifies a hard limit on the duration of recorded files. Recording is stopped when the limit is reached and the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: delegate method is invoked with an appropriate error. The default value of this property is kCMTimeInvalid, which indicates no limit.
+ */
+@property(nonatomic) CMTime maxRecordedDuration;
+
+/*!
+ @property maxRecordedFileSize
+ @abstract
+    Specifies the maximum size, in bytes, of the data that should be recorded by the receiver.
+ 
+ @discussion
+    This property specifies a hard limit on the data size of recorded files. Recording is stopped when the limit is reached and the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: delegate method is invoked with an appropriate error. The default value of this property is 0, which indicates no limit.
+ */
+@property(nonatomic) int64_t maxRecordedFileSize;
+
+/*!
+ @property minFreeDiskSpaceLimit
+ @abstract
+    Specifies the minimum amount of free space, in bytes, required for recording to continue on a given volume.
+ 
+ @discussion
+    This property specifies a hard lower limit on the amount of free space that must remain on a target volume for recording to continue. Recording is stopped when the limit is reached and the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: delegate method is invoked with an appropriate error.
+ */
+@property(nonatomic) int64_t minFreeDiskSpaceLimit;
+
+@end
+
+
+/*!
+ @protocol AVCaptureFileOutputRecordingDelegate
+ @abstract
+    Defines an interface for delegates of AVCaptureFileOutput to respond to events that occur in the process of recording a single file.
+ */
+NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
+@protocol AVCaptureFileOutputRecordingDelegate <NSObject>
+
+@optional
+
+/*!
+ @method captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:
+ @abstract
+    Informs the delegate when the output has started writing to a file.
+ 
+ @param output
+    The capture file output that started writing the file.
+ @param fileURL
+    The file URL of the file that is being written.
+ @param connections
+    An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
+ 
+ @discussion
+    This method is called when the file output has started writing data to a file. If an error condition prevents any data from being written, this method may not be called. captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error: and captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: will always be called, even if no data is written.
+ 
+    Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
+ */
+- (void)captureOutput:(AVCaptureFileOutput *)output didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray<AVCaptureConnection *> *)connections;
+
+/*!
+ @method captureOutput:didPauseRecordingToOutputFileAtURL:fromConnections:
+ @abstract
+    Called whenever the output is recording to a file and successfully pauses the recording at the request of the client.
+ 
+ @param output
+    The capture file output that has paused its file recording.
+ @param fileURL
+    The file URL of the file that is being written.
+ @param connections
+    An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
+ 
+ @discussion
+    Delegates can use this method to be informed when a request to pause recording is actually respected. It is safe for delegates to change what the file output is currently doing (starting a new file, for example) from within this method. If recording to a file is stopped, either manually or due to an error, this method is not guaranteed to be called, even if a previous call to pauseRecording was made.
+ 
+    Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
+ */
+- (void)captureOutput:(AVCaptureFileOutput *)output didPauseRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray<AVCaptureConnection *> *)connections NS_AVAILABLE_MAC(10_7);
+
+/*!
+ @method captureOutput:didResumeRecordingToOutputFileAtURL:fromConnections:
+ @abstract
+    Called whenever the output, at the request of the client, successfully resumes a file recording that was paused.
+ 
+ @param output
+    The capture file output that has resumed its paused file recording.
+ @param fileURL
+    The file URL of the file that is being written.
+ @param connections
+    An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
+ 
+ @discussion
+    Delegates can use this method to be informed when a request to resume recording is actually respected. It is safe for delegates to change what the file output is currently doing (starting a new file, for example) from within this method. If recording to a file is stopped, either manually or due to an error, this method is not guaranteed to be called, even if a previous call to resumeRecording was made.
+ 
+    Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
+ */
+- (void)captureOutput:(AVCaptureFileOutput *)output didResumeRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray<AVCaptureConnection *> *)connections NS_AVAILABLE_MAC(10_7);
+
+/*!
+ @method captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:
+ @abstract
+    Informs the delegate when the output will stop writing new samples to a file.
+ 
+ @param output
+    The capture file output that will finish writing the file.
+ @param fileURL
+    The file URL of the file that is being written.
+ @param connections
+    An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to the file.
+ @param error
+    An error describing what caused the file to stop recording, or nil if there was no error.
+ 
+ @discussion
+    This method is called when the file output will stop recording new samples to the file at outputFileURL, either because startRecordingToOutputFileURL:recordingDelegate: or stopRecording were called, or because an error, described by the error parameter, occurred (if no error occurred, the error parameter will be nil). This method will always be called for each recording request, even if no data is successfully written to the file.
+ 
+    Clients should not assume that this method will be called on a specific thread, and should also try to make this method as efficient as possible.
+ */
+- (void)captureOutput:(AVCaptureFileOutput *)output willFinishRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray<AVCaptureConnection *> *)connections error:(nullable NSError *)error NS_AVAILABLE_MAC(10_7);
+
+@required
+
+/*!
+ @method captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:
+ @abstract
+    Informs the delegate when all pending data has been written to an output file.
+ 
+ @param output
+    The capture file output that has finished writing the file.
+ @param outputFileURL
+    The file URL of the file that has been written.
+ @param connections
+    An array of AVCaptureConnection objects attached to the file output that provided the data that was written to the file.
+ @param error
+    An error describing what caused the file to stop recording, or nil if there was no error.
+ 
+ @discussion
+    This method is called when the file output has finished writing all data to a file whose recording was stopped, either because startRecordingToOutputFileURL:recordingDelegate: or stopRecording were called, or because an error, described by the error parameter, occurred (if no error occurred, the error parameter will be nil). This method will always be called for each recording request, even if no data is successfully written to the file.
+ 
+    Clients should not assume that this method will be called on a specific thread.
+ 
+    Delegates are required to implement this method.
+ */
+- (void)captureOutput:(AVCaptureFileOutput *)output didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray<AVCaptureConnection *> *)connections error:(nullable NSError *)error;
+
+@end
+
+
+/*!
+ @protocol AVCaptureFileOutputDelegate
+ @abstract
+    Defines an interface for delegates of AVCaptureFileOutput to monitor and control recordings along exact sample boundaries.
+ */
+NS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED
+@protocol AVCaptureFileOutputDelegate <NSObject>
+
+@required
+
+/*!
+ @method captureOutputShouldProvideSampleAccurateRecordingStart:
+ @abstract
+    Allows a client to opt in to frame accurate record-start in captureOutput:didOutputSampleBuffer:fromConnection:
+ 
+ @param output
+    The AVCaptureFileOutput instance with which the delegate is associated.
+ 
+ @discussion
+    In apps linked before Mac OS X 10.8, delegates that implement the captureOutput:didOutputSampleBuffer:fromConnection: method can ensure frame accurate start / stop of a recording by calling startRecordingToOutputFileURL:recordingDelegate: from within the callback. Frame accurate start requires the capture output to apply outputSettings when the session starts running, so it is ready to record on any given frame boundary. Compressing all the time while the session is running has power, thermal, and CPU implications. In apps linked on or after Mac OS X 10.8, delegates must implement captureOutputShouldProvideSampleAccurateRecordingStart: to indicate whether frame accurate start/stop recording is required (returning YES) or not (returning NO). The output calls this method as soon as the delegate is added, and never again. If your delegate returns NO, the capture output applies compression settings when startRecordingToOutputFileURL:recordingDelegate: is called, and disables compression settings after the recording is stopped.
+ */
+- (BOOL)captureOutputShouldProvideSampleAccurateRecordingStart:(AVCaptureOutput *)output NS_AVAILABLE_MAC(10_8);
+
+@optional
+
+/*!
+ @method captureOutput:didOutputSampleBuffer:fromConnection:
+ @abstract
+    Gives the delegate the opportunity to inspect samples as they are received by the output and optionally start and stop recording at exact times.
+ 
+ @param output
+    The capture file output that is receiving the media data.
+ @param sampleBuffer
+    A CMSampleBuffer object containing the sample data and additional information about the sample, such as its format and presentation time.
+ @param connection
+    The AVCaptureConnection object attached to the file output from which the sample data was received.
+ 
+ @discussion
+    This method is called whenever the file output receives a single sample buffer (a single video frame or audio buffer, for example) from the given connection. This gives delegates an opportunity to start and stop recording or change output files at an exact sample boundary if -captureOutputShouldProvideSampleAccurateRecordingStart: returns YES. If called from within this method, the file output's startRecordingToOutputFileURL:recordingDelegate: and resumeRecording methods are guaranteed to include the received sample buffer in the new file, whereas calls to stopRecording and pauseRecording are guaranteed to include all samples leading up to those in the current sample buffer in the existing file.
+ 
+    Delegates can gather information particular to the samples by inspecting the CMSampleBuffer object. Sample buffers always contain a single frame of video if called from this method but may also contain multiple samples of audio. For B-frame video formats, samples are always delivered in presentation order.
+ 
+    Clients that need to reference the CMSampleBuffer object outside of the scope of this method must CFRetain it and then CFRelease it when they are finished with it.
+ 
+    Note that to maintain optimal performance, some sample buffers directly reference pools of memory that may need to be reused by the device system and other capture inputs. This is frequently the case for uncompressed device native capture where memory blocks are copied as little as possible. If multiple sample buffers reference such pools of memory for too long, inputs will no longer be able to copy new samples into memory and those samples will be dropped. If your application is causing samples to be dropped by retaining the provided CMSampleBuffer objects for too long, but it needs access to the sample data for a long period of time, consider copying the data into a new buffer and then calling CFRelease on the sample buffer if it was previously retained so that the memory it references can be reused.
+ 
+    Clients should not assume that this method will be called on a specific thread. In addition, this method is called periodically, so it must be efficient to prevent capture performance problems.
+ */
+- (void)captureOutput:(AVCaptureFileOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
+
+@end
+
+
+#pragma mark - AVCaptureMovieFileOutput
+
+@class AVCaptureMovieFileOutputInternal;
+
+/*!
+ @class AVCaptureMovieFileOutput
+ @abstract
+    AVCaptureMovieFileOutput is a concrete subclass of AVCaptureFileOutput that writes captured media to QuickTime movie files.
+ 
+ @discussion
+    AVCaptureMovieFileOutput implements the complete file recording interface declared by AVCaptureFileOutput for writing media data to QuickTime movie files. In addition, instances of AVCaptureMovieFileOutput allow clients to configure options specific to the QuickTime file format, including allowing them to write metadata collections to each file, specify media encoding options for each track (Mac OS X), and specify an interval at which movie fragments should be written.
+ */
+NS_CLASS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
+@interface AVCaptureMovieFileOutput : AVCaptureFileOutput
+{
+@private
+    AVCaptureMovieFileOutputInternal *_internal;
+}
+
+- (instancetype)init;
+
++ (instancetype)new;
+
+/*!
+ @property movieFragmentInterval
+ @abstract
+    Specifies the frequency with which movie fragments should be written.
+ 
+ @discussion
+    When movie fragments are used, a partially written QuickTime movie file whose writing is unexpectedly interrupted can be successfully opened and played up to multiples of the specified time interval. A value of kCMTimeInvalid indicates that movie fragments should not be used, but that only a movie atom describing all of the media in the file should be written. The default value of this property is ten seconds.
+ 
+    Changing the value of this property will not affect the movie fragment interval of the file currently being written, if there is one.
+ */
+@property(nonatomic) CMTime movieFragmentInterval;
+
+/*!
+ @property metadata
+ @abstract
+    A collection of metadata to be written to the receiver's output files.
+ 
+ @discussion
+    The value of this property is an array of AVMetadataItem objects representing the collection of top-level metadata to be written in each output file.
+ */
+@property(nonatomic, copy, nullable) NSArray<AVMetadataItem *> *metadata;
+
+#if TARGET_OS_IPHONE
+
+/*!
+ @property availableVideoCodecTypes
+ @abstract
+    Indicates the supported video codec formats that can be specified in setOutputSettingsForConnection:.
+ 
+ @discussion
+    The value of this property is an NSArray of AVVideoCodecTypes that can be used as values for the AVVideoCodecKey in the receiver's setOutputSettingsForConnection: dictionary. The array of available video codecs may change depending on the current session preset. The first codec in the array is used by default when recording a file.
+ */
+@property(nonatomic, readonly) NSArray<AVVideoCodecType> *availableVideoCodecTypes NS_AVAILABLE_IOS(10_0);
+
+#endif // TARGET_OS_IPHONE
+
+/*!
+ @method outputSettingsForConnection:
+ @abstract
+    Returns the options the receiver uses to encode media from the given connection as it is being recorded.
+ 
+ @param connection
+    The connection delivering the media to be encoded.
+ @result
+    An NSDictionary of output settings.
+ 
+ @discussion
+    See AVAudioSettings.h for audio connections or AVVideoSettings.h for video connections for more information on the structure of an output settings dictionary. If the returned value is an empty dictionary (i.e. [NSDictionary dictionary], the format of the media from the connection will not be changed before being written to the file. If -setOutputSettings:forConnection: was called with a nil dictionary, this method returns a non-nil dictionary reflecting the settings used by the AVCaptureSession's current sessionPreset.
+ */
+- (NSDictionary<NSString *, id> *)outputSettingsForConnection:(AVCaptureConnection *)connection NS_AVAILABLE(10_7, 10_0);
+
+/*!
+ @method setOutputSettings:forConnection:
+ @abstract
+    Sets the options the receiver uses to encode media from the given connection as it is being recorded.
+ 
+ @param outputSettings
+    An NSDictionary of output settings.
+ @param connection
+    The connection delivering the media to be encoded.
+ 
+ @discussion
+    See AVAudioSettings.h for audio connections or AVVideoSettings.h for video connections for more information on how to construct an output settings dictionary. A value of an empty dictionary (i.e. [NSDictionary dictionary], means that the format of the media from the connection should not be changed before being written to the file. A value of nil means that the output format will be determined by the session preset. In this case, -outputSettingsForConnection: will return a non-nil dictionary reflecting the settings used by the AVCaptureSession's current sessionPreset.
+ 
+    On iOS, you may only specify the AVVideoCodecKey in the outputSettings. If you specify any other key, an NSInvalidArgumentException will be thrown. See the availableVideoCodecTypes property.
+ */
+- (void)setOutputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings forConnection:(AVCaptureConnection *)connection NS_AVAILABLE(10_7, 10_0);
+
+/*!
+ @method recordsVideoOrientationAndMirroringChangesAsMetadataTrackForConnection:
+ @abstract
+    Returns YES if the movie file output will create a timed metadata track that records samples which reflect changes made to the given connection's videoOrientation and videoMirrored properties during recording.
+ 
+ @param connection
+    A connection delivering video media to the movie file output. This method throws an NSInvalidArgumentException if the connection does not have a mediaType of AVMediaTypeVideo or if the connection does not terminate at the movie file output.
+ 
+ @discussion
+    See setRecordsVideoOrientationAndMirroringChanges:asMetadataTrackForConnection: for details on the behavior controlled by this value. The default value returned is NO.
+ */
+- (BOOL)recordsVideoOrientationAndMirroringChangesAsMetadataTrackForConnection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(9_0);
+
+/*!
+ @method setRecordsVideoOrientationAndMirroringChanges:asMetadataTrackForConnection:
+ @abstract
+    Controls whether or not the movie file output will create a timed metadata track that records samples which reflect changes made to the given connection's videoOrientation and videoMirrored properties during recording.
+ 
+ @param doRecordChanges
+    If YES, the movie file output will create a timed metadata track that records samples which reflect changes made to the given connection's videoOrientation and videoMirrored properties during recording.
+ @param connection
+    A connection delivering video media to the movie file output. This method throws an NSInvalidArgumentException if the connection does not have a mediaType of AVMediaTypeVideo or if the connection does not terminate at the movie file output.
+ 
+ @discussion
+    When a recording is started the current state of a video capture connection's videoOrientation and videoMirrored properties are used to build the display matrix for the created video track. The movie file format allows only one display matrix per track, which means that any changes made during a recording to the videoOrientation and videoMirrored properties are not captured. For example, a user starts a recording with their device in the portrait orientation, and then partway through the recording changes the device to a landscape orientation. The landscape orientation requires a different display matrix, but only the initial display matrix (the portrait display matrix) is recorded for the video track.
+ 
+    By invoking this method the client application directs the movie file output to create an additional track in the captured movie. This track is a timed metadata track that is associated with the video track, and contains one or more samples that contain a Video Orientation value (as defined by EXIF and TIFF specifications, which is enumerated by CGImagePropertyOrientation in <ImageIO/CGImageProperties.h>). The value represents the display matrix corresponding to the AVCaptureConnection's videoOrientation and videoMirrored properties when applied to the input source. The initial sample written to the timed metadata track represents video track's display matrix. During recording additional samples will be written to the timed metadata track whenever the client application changes the video connection's videoOrienation or videoMirrored properties. Using the above example, when the client application detects the user changing the device from portrait to landscape orientation, it updates the video connection's videoOrientation property, thus causing the movie file output to add a new sample to the timed metadata track.
+ 
+    After capture, playback and editing applications can use the timed metadata track to enhance their user's experience. For example, when playing back the captured movie, a playback engine can use the samples to adjust the display of the video samples to keep the video properly oriented. Another example is an editing application that uses the sample the sample times to suggest cut points for breaking the captured movie into separate clips, where each clip is properly oriented.
+ 
+    The default behavior is to not create the timed metadata track.
+ 
+    The doRecordChanges value is only observed at the start of recording. Changes to the value will not have any effect until the next recording is started.
+ */
+- (void)setRecordsVideoOrientationAndMirroringChanges:(BOOL)doRecordChanges asMetadataTrackForConnection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(9_0);
+
+@end
+
+
+#pragma mark - AVCaptureAudioFileOutput
+
+@class AVCaptureAudioFileOutputInternal;
+
+/*!
+ @class AVCaptureAudioFileOutput
+ @abstract
+    AVCaptureAudioFileOutput is a concrete subclass of AVCaptureFileOutput that writes captured audio to any audio file type supported by CoreAudio.
+ 
+ @discussion
+    AVCaptureAudioFileOutput implements the complete file recording interface declared by AVCaptureFileOutput for writing media data to audio files. In addition, instances of AVCaptureAudioFileOutput allow clients to configure options specific to the audio file formats, including allowing them to write metadata collections to each file and specify audio encoding options.
+ */
+NS_CLASS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED
+@interface AVCaptureAudioFileOutput : AVCaptureFileOutput
+{
+@private
+    AVCaptureAudioFileOutputInternal *_internal;
+}
+
+- (instancetype)init;
+
++ (instancetype)new;
+
+/*!
+ @method availableOutputFileTypes
+ @abstract
+    Provides the file types AVCaptureAudioFileOutput can write.
+ 
+ @result
+    An NSArray of UTIs identifying the file types the AVCaptureAudioFileOutput class can write.
+ */
++ (NSArray<AVFileType> *)availableOutputFileTypes;
+
+/*!
+ @method startRecordingToOutputFileURL:outputFileType:recordingDelegate:
+ @abstract
+    Tells the receiver to start recording to a new file of the specified format, and specifies a delegate that will be notified when recording is finished.
+ 
+ @param outputFileURL
+    An NSURL object containing the URL of the output file. This method throws an NSInvalidArgumentException if the URL is not a valid file URL.
+ @param fileType
+    A UTI indicating the format of the file to be written.
+ @param delegate
+    An object conforming to the AVCaptureFileOutputRecordingDelegate protocol. Clients must specify a delegate so that they can be notified when recording to the given URL is finished.
+ 
+ @discussion
+    The method sets the file URL to which the receiver is currently writing output media. If a file at the given URL already exists when capturing starts, recording to the new file will fail.
+ 
+    The fileType argument is a UTI corresponding to the audio file format that should be written. UTIs for common audio file types are declared in AVMediaFormat.h.
+ 
+    Clients need not call stopRecording before calling this method while another recording is in progress. If this method is invoked while an existing output file was already being recorded, no media samples will be discarded between the old file and the new file.
+ 
+    When recording is stopped either by calling stopRecording, by changing files using this method, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, clients must specify a delegate that will be notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method. The recording delegate can also optionally implement methods that inform it when data starts being written, when recording is paused and resumed, and when recording is about to be finished.
+ 
+    On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method, the first samples written to the new file are guaranteed to be those contained in the sample buffer passed to that method.
+ */
+- (void)startRecordingToOutputFileURL:(NSURL *)outputFileURL outputFileType:(AVFileType)fileType recordingDelegate:(id<AVCaptureFileOutputRecordingDelegate>)delegate;
+
+/*!
+ @property metadata
+ @abstract
+    A collection of metadata to be written to the receiver's output files.
+ 
+ @discussion
+    The value of this property is an array of AVMetadataItem objects representing the collection of top-level metadata to be written in each output file. Only ID3 v2.2, v2.3, or v2.4 style metadata items are supported.
+ */
+@property(nonatomic, copy) NSArray<AVMetadataItem *> *metadata;
+
+/*!
+ @property audioSettings
+ @abstract
+    Specifies the options the receiver uses to re-encode audio as it is being recorded.
+ 
+ @discussion
+    The output settings dictionary can contain values for keys from AVAudioSettings.h. A value of nil indicates that the format of the audio should not be changed before being written to the file.
+ */
+@property(nonatomic, copy, nullable) NSDictionary<NSString *, id> *audioSettings;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h	2016-05-03 18:21:22.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureInput.h	2017-05-24 00:41:54.000000000 -0400
@@ -1,19 +1,24 @@
 /*
-	File:  AVCaptureInput.h
-
-	Framework:  AVFoundation
+    File:  AVCaptureInput.h
+ 
+    Framework:  AVFoundation
  
-	Copyright 2010-2015 Apple Inc. All rights reserved.
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 */
 
 #import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVMediaFormat.h>
 #import <Foundation/Foundation.h>
 #import <CoreMedia/CMFormatDescription.h>
 #import <CoreMedia/CMSync.h>
-#if TARGET_OS_MAC && ! (TARGET_OS_EMBEDDED || TARGET_OS_IPHONE || TARGET_OS_WIN32)
-	#import <CoreGraphics/CGDirectDisplay.h>
+#if TARGET_OS_OSX
+    #import <CoreGraphics/CGDirectDisplay.h>
 #endif
 
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureInput
+
 @class AVCaptureInputPort;
 @class AVCaptureInputInternal;
 @class AVTimedMetadataGroup;
@@ -21,34 +26,29 @@
 /*!
  @class AVCaptureInput
  @abstract
-    AVCaptureInput is an abstract class that provides an interface for connecting capture input sources to an
-    AVCaptureSession.
-
+    AVCaptureInput is an abstract class that provides an interface for connecting capture input sources to an AVCaptureSession.
+ 
  @discussion
-    Concrete instances of AVCaptureInput representing input sources such as cameras can be added to instances of
-    AVCaptureSession using the -[AVCaptureSession addInput:] method. An AVCaptureInput vends one or more streams of
-    media data. For example, input devices can provide both audio and video data. Each media stream provided by an input
-    is represented by an AVCaptureInputPort object. Within a capture session, connections are made between
-    AVCaptureInput instances and AVCaptureOutput instances via AVCaptureConnection objects that define the mapping
-    between a set of AVCaptureInputPort objects and a single AVCaptureOutput.
-*/
-NS_CLASS_AVAILABLE(10_7, 4_0)
+    Concrete instances of AVCaptureInput representing input sources such as cameras can be added to instances of AVCaptureSession using the -[AVCaptureSession addInput:] method. An AVCaptureInput vends one or more streams of media data. For example, input devices can provide both audio and video data. Each media stream provided by an input is represented by an AVCaptureInputPort object. Within a capture session, connections are made between AVCaptureInput instances and AVCaptureOutput instances via AVCaptureConnection objects that define the mapping between a set of AVCaptureInputPort objects and a single AVCaptureOutput.
+ */
+NS_CLASS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
 @interface AVCaptureInput : NSObject 
 {
 @private
-	AVCaptureInputInternal *_inputInternal;
+    AVCaptureInputInternal *_inputInternal;
 }
 
+AV_INIT_UNAVAILABLE
+
 /*!
  @property ports
  @abstract
     The ports owned by the receiver.
-
+ 
  @discussion
-    The value of this property is an array of AVCaptureInputPort objects, each exposing an interface to a single stream
-    of media data provided by an input.
-*/
-@property(nonatomic, readonly) NSArray *ports;
+    The value of this property is an array of AVCaptureInputPort objects, each exposing an interface to a single stream of media data provided by an input.
+ */
+@property(nonatomic, readonly) NSArray<AVCaptureInputPort *> *ports;
 
 @end
 
@@ -57,305 +57,282 @@
  @constant AVCaptureInputPortFormatDescriptionDidChangeNotification
  @abstract
     This notification is posted when the value of an AVCaptureInputPort instance's formatDescription property changes.
-
+ 
  @discussion
     The notification object is the AVCaptureInputPort instance whose format description changed.
-*/
-AVF_EXPORT NSString *const AVCaptureInputPortFormatDescriptionDidChangeNotification NS_AVAILABLE(10_7, 4_0);
+ */
+AVF_EXPORT NSString *const AVCaptureInputPortFormatDescriptionDidChangeNotification NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
+
+
+#pragma mark - AVCaptureInputPort
 
 @class AVCaptureInputPortInternal;
 
 /*!
  @class AVCaptureInputPort
  @abstract
-    An AVCaptureInputPort describes a single stream of media data provided by an AVCaptureInput and provides an
-    interface for connecting that stream to AVCaptureOutput instances via AVCaptureConnection.
-
+    An AVCaptureInputPort describes a single stream of media data provided by an AVCaptureInput and provides an interface for connecting that stream to AVCaptureOutput instances via AVCaptureConnection.
+ 
  @discussion
-    Instances of AVCaptureInputPort cannot be created directly. An AVCaptureInput exposes its input ports via its ports
-    property. Input ports provide information about the format of their media data via the mediaType and
-    formatDescription properties, and allow clients to control the flow of data via the enabled property. Input ports
-    are used by an AVCaptureConnection to define the mapping between inputs and outputs in an AVCaptureSession.
-*/
-NS_CLASS_AVAILABLE(10_7, 4_0)
+    Instances of AVCaptureInputPort cannot be created directly. An AVCaptureInput exposes its input ports via its ports property. Input ports provide information about the format of their media data via the mediaType and formatDescription properties, and allow clients to control the flow of data via the enabled property. Input ports are used by an AVCaptureConnection to define the mapping between inputs and outputs in an AVCaptureSession.
+ */
+NS_CLASS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
 @interface AVCaptureInputPort : NSObject
 {
 @private
     AVCaptureInputPortInternal *_internal;
 }
 
+AV_INIT_UNAVAILABLE
+
 /*!
  @property input
  @abstract
     The input that owns the receiver.
-
+ 
  @discussion
     The value of this property is an AVCaptureInput instance that owns the receiver.
-*/
+ */
 @property(nonatomic, readonly) AVCaptureInput *input;
 
 /*!
  @property mediaType
  @abstract
     The media type of the data provided by the receiver.
-
+ 
  @discussion
-    The value of this property is a constant describing the type of media, such as AVMediaTypeVideo or AVMediaTypeAudio,
-    provided by the receiver. Media type constants are defined in AVMediaFormat.h.
-*/
-@property(nonatomic, readonly) NSString *mediaType;
+    The value of this property is a constant describing the type of media, such as AVMediaTypeVideo or AVMediaTypeAudio, provided by the receiver. Media type constants are defined in AVMediaFormat.h.
+ */
+@property(nonatomic, readonly) AVMediaType mediaType;
 
 /*!
  @property formatDescription
  @abstract
     The format of the data provided by the receiver.
-
+ 
  @discussion
-    The value of this property is a CMFormatDescription that describes the format of the media data currently provided
-    by the receiver. Clients can be notified of changes to the format by observing the
-    AVCaptureInputPortFormatDescriptionDidChangeNotification.
-*/
-@property(nonatomic, readonly) CMFormatDescriptionRef formatDescription;
+    The value of this property is a CMFormatDescription that describes the format of the media data currently provided by the receiver. Clients can be notified of changes to the format by observing the AVCaptureInputPortFormatDescriptionDidChangeNotification.
+ */
+@property(nonatomic, readonly, nullable) __attribute__((NSObject)) CMFormatDescriptionRef formatDescription;
 
 /*!
  @property enabled
  @abstract
     Whether the receiver should provide data.
-
+ 
  @discussion
-    The value of this property is a BOOL that determines whether the receiver should provide data to outputs when a
-    session is running. Clients can set this property to fine tune which media streams from a given input will be used
-    during capture. The default value is YES.
-*/
+    The value of this property is a BOOL that determines whether the receiver should provide data to outputs when a session is running. Clients can set this property to fine tune which media streams from a given input will be used during capture. The default value is YES.
+ */
 @property(nonatomic, getter=isEnabled) BOOL enabled;
 
 /*!
  @property clock
  @abstract
-	Provides access to the "native" clock used by the input port.
+    Provides access to the "native" clock used by the input port.
+ 
  @discussion
-	The clock is read-only.
+    The clock is read-only.
  */
-@property(nonatomic, readonly) __attribute__((NSObject)) CMClockRef clock NS_AVAILABLE(10_9, 7_0);
+@property(nonatomic, readonly, nullable) __attribute__((NSObject)) CMClockRef clock NS_AVAILABLE(10_9, 7_0);
 
 @end
 
+
+#pragma mark - AVCaptureDeviceInput
+
 @class AVCaptureDevice;
 @class AVCaptureDeviceInputInternal;
 
 /*!
  @class AVCaptureDeviceInput
  @abstract
-    AVCaptureDeviceInput is a concrete subclass of AVCaptureInput that provides an interface for capturing media from an
-    AVCaptureDevice.
-
+    AVCaptureDeviceInput is a concrete subclass of AVCaptureInput that provides an interface for capturing media from an AVCaptureDevice.
+ 
  @discussion
-    Instances of AVCaptureDeviceInput are input sources for AVCaptureSession that provide media data from devices
-    connected to the system, represented by instances of AVCaptureDevice.
-*/
-NS_CLASS_AVAILABLE(10_7, 4_0)
+    Instances of AVCaptureDeviceInput are input sources for AVCaptureSession that provide media data from devices connected to the system, represented by instances of AVCaptureDevice.
+ */
+NS_CLASS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED
 @interface AVCaptureDeviceInput : AVCaptureInput 
 {
 @private
-	AVCaptureDeviceInputInternal *_internal;
+    AVCaptureDeviceInputInternal *_internal;
 }
 
 /*!
  @method deviceInputWithDevice:error:
  @abstract
     Returns an AVCaptureDeviceInput instance that provides media data from the given device.
-
+ 
  @param device
     An AVCaptureDevice instance to be used for capture.
  @param outError
     On return, if the given device cannot be used for capture, points to an NSError describing the problem.
  @result
-    An AVCaptureDeviceInput instance that provides data from the given device, or nil, if the device could not be used
-    for capture.
-
+    An AVCaptureDeviceInput instance that provides data from the given device, or nil, if the device could not be used for capture.
+ 
  @discussion
-    This method returns an instance of AVCaptureDeviceInput that can be used to capture data from an AVCaptureDevice in
-    an AVCaptureSession. This method attempts to open the device for capture, taking exclusive control of it if
-    necessary. If the device cannot be opened because it is no longer available or because it is in use, for example,
-    this method returns nil, and the optional outError parameter points to an NSError describing the problem.
-*/
-+ (instancetype)deviceInputWithDevice:(AVCaptureDevice *)device error:(NSError **)outError;
+    This method returns an instance of AVCaptureDeviceInput that can be used to capture data from an AVCaptureDevice in an AVCaptureSession. This method attempts to open the device for capture, taking exclusive control of it if necessary. If the device cannot be opened because it is no longer available or because it is in use, for example, this method returns nil, and the optional outError parameter points to an NSError describing the problem.
+ */
++ (nullable instancetype)deviceInputWithDevice:(AVCaptureDevice *)device error:(NSError * _Nullable * _Nullable)outError;
 
 /*!
  @method initWithDevice:error:
  @abstract
     Creates an AVCaptureDeviceInput instance that provides media data from the given device.
-
+ 
  @param device
     An AVCaptureDevice instance to be used for capture.
  @param outError
     On return, if the given device cannot be used for capture, points to an NSError describing the problem.
  @result
-    An AVCaptureDeviceInput instance that provides data from the given device, or nil, if the device could not be used
-    for capture.
-
+    An AVCaptureDeviceInput instance that provides data from the given device, or nil, if the device could not be used for capture.
+ 
  @discussion
-    This method creates an instance of AVCaptureDeviceInput that can be used to capture data from an AVCaptureDevice in
-    an AVCaptureSession. This method attempts to open the device for capture, taking exclusive control of it if
-    necessary. If the device cannot be opened because it is no longer available or because it is in use, for example,
-    this method returns nil, and the optional outError parameter points to an NSError describing the problem.
-*/
-- (instancetype)initWithDevice:(AVCaptureDevice *)device error:(NSError **)outError;
+    This method creates an instance of AVCaptureDeviceInput that can be used to capture data from an AVCaptureDevice in an AVCaptureSession. This method attempts to open the device for capture, taking exclusive control of it if necessary. If the device cannot be opened because it is no longer available or because it is in use, for example, this method returns nil, and the optional outError parameter points to an NSError describing the problem.
+ */
+- (nullable instancetype)initWithDevice:(AVCaptureDevice *)device error:(NSError * _Nullable * _Nullable)outError;
 
 /*!
  @property device
  @abstract
     The device from which the receiver provides data.
-
+ 
  @discussion
     The value of this property is the AVCaptureDevice instance that was used to create the receiver.
-*/
+ */
 @property(nonatomic, readonly) AVCaptureDevice *device;
 
 @end
 
 
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
+#pragma mark - AVCaptureScreenInput
 
 @class AVCaptureScreenInputInternal;
 
 /*!
  @class AVCaptureScreenInput
  @abstract
-    AVCaptureScreenInput is a concrete subclass of AVCaptureInput that provides an interface for capturing media from
-    a screen or portion thereof.
-
+    AVCaptureScreenInput is a concrete subclass of AVCaptureInput that provides an interface for capturing media from a screen or portion thereof.
+ 
  @discussion
-    Instances of AVCaptureScreenInput are input sources for AVCaptureSession that provide media data from
-    one of the screens connected to the system, represented by CGDirectDisplayIDs.
-*/
-NS_CLASS_AVAILABLE(10_7, NA)
+    Instances of AVCaptureScreenInput are input sources for AVCaptureSession that provide media data from one of the screens connected to the system, represented by CGDirectDisplayIDs.
+ */
+NS_CLASS_AVAILABLE_MAC(10_7) __TVOS_PROHIBITED
 @interface AVCaptureScreenInput : AVCaptureInput 
 {
 @private
-	AVCaptureScreenInputInternal *_internal;
+    AVCaptureScreenInputInternal *_internal;
 }
 
 /*!
+ @method init
+ @abstract
+    Creates an AVCaptureScreenInput instance that provides media data from the main display.
+ 
+ @discussion
+    This method creates an instance of AVCaptureScreenInput using the main display whose id is returned from CGMainDisplayID().
+ */
+- (instancetype)init;
+
++ (instancetype)new;
+
+#if TARGET_OS_OSX
+
+/*!
  @method initWithDisplayID:
  @abstract
     Creates an AVCaptureScreenInput instance that provides media data from the given display.
  
  @param displayID
-    The id of the display from which to capture video.  CGDirectDisplayID is defined in <CoreGraphics/CGDirectDisplay.h>
+    The id of the display from which to capture video. CGDirectDisplayID is defined in <CoreGraphics/CGDirectDisplay.h>
  @result
-    An AVCaptureScreenInput instance that provides data from the given screen, or nil, if the screen could not be used
-    for capture.
-
+    An AVCaptureScreenInput instance that provides data from the given screen, or nil, if the screen could not be used for capture.
+ 
  @discussion
-    This method creates an instance of AVCaptureScreenInput that can be used to capture data from a display in
-    an AVCaptureSession. This method validates the displayID. If the display cannot be used because it is not available
-    on the system, for example, this method returns nil.
-*/
+    This method creates an instance of AVCaptureScreenInput that can be used to capture data from a display in an AVCaptureSession. This method validates the displayID. If the display cannot be used because it is not available on the system, for example, this method returns nil.
+ */
 - (instancetype)initWithDisplayID:(CGDirectDisplayID)displayID;
 
+#endif // TARGET_OS_OSX
+
 /*!
  @property minFrameDuration
  @abstract
     A property indicating the screen input's minimum frame duration.
-
+ 
  @discussion
-    An AVCaptureScreenInput's minFrameDuration is the reciprocal of its maximum frame rate.  This property
-    may be used to request a maximum frame rate at which the input produces video frames.  The requested
-    rate may not be achievable due to overall bandwidth, so actual frame rates may be lower.
-*/
+    An AVCaptureScreenInput's minFrameDuration is the reciprocal of its maximum frame rate. This property may be used to request a maximum frame rate at which the input produces video frames. The requested rate may not be achievable due to overall bandwidth, so actual frame rates may be lower.
+ */
 @property(nonatomic) CMTime minFrameDuration;
 
 /*!
  @property cropRect
  @abstract
     A property indicating the bounding rectangle of the screen area to be captured in pixels.
-
+ 
  @discussion
-    By default, AVCaptureScreenInput captures the entire area of the displayID with which it is associated.
-    To limit the capture rectangle to a subsection of the screen, set the cropRect property, which
-    defines a smaller section of the screen in the screen's coordinate system.  The origin (0,0) is
-    the bottom-left corner of the screen.
-*/
+    By default, AVCaptureScreenInput captures the entire area of the displayID with which it is associated. To limit the capture rectangle to a subsection of the screen, set the cropRect property, which defines a smaller section of the screen in the screen's coordinate system. The origin (0,0) is the bottom-left corner of the screen.
+ */
 @property(nonatomic) CGRect cropRect;
 
 /*!
  @property scaleFactor
  @abstract
     A property indicating the factor by which video buffers captured from the screen are to be scaled.
-
+ 
  @discussion
-    By default, AVCaptureScreenInput captures the video buffers from the display at a scale factor
-    of 1.0 (no scaling).  Set this property to scale the buffers by a given factor.  For instance,
-    a 320x240 capture area with a scaleFactor of 2.0f produces video buffers at 640x480.
-*/
+    By default, AVCaptureScreenInput captures the video buffers from the display at a scale factor of 1.0 (no scaling). Set this property to scale the buffers by a given factor. For instance, a 320x240 capture area with a scaleFactor of 2.0f produces video buffers at 640x480.
+ */
 @property(nonatomic) CGFloat scaleFactor;
 
 /*!
  @property capturesMouseClicks
  @abstract
     A property indicating whether mouse clicks should be highlighted in the captured output.
-
+ 
  @discussion
-    By default, AVCaptureScreenInput does not highlight mouse clicks in its captured output.  If this
-    property is set to YES, mouse clicks are highlighted (a circle is drawn around the mouse for the
-    duration of the click) in the captured output.
-*/
+    By default, AVCaptureScreenInput does not highlight mouse clicks in its captured output. If this property is set to YES, mouse clicks are highlighted (a circle is drawn around the mouse for the duration of the click) in the captured output.
+ */
 @property(nonatomic) BOOL capturesMouseClicks;
 
 /*!
  @property capturesCursor
  @abstract
     A property indicating whether the cursor should be rendered to the captured output.
-
+ 
  @discussion
-    By default, AVCaptureScreenInput draws the cursor in its captured output.  If this property
-    is set to NO, the captured output contains only the windows on the screen.  Cursor is
-    omitted.  Note that cursor position and mouse button state at the time of capture is
-    preserved in CMSampleBuffers emitted from AVCaptureScreenInput.  See the inline documentation
-    for kCMIOSampleBufferAttachmentKey_MouseAndKeyboardModifiers in <CoreMediaIO/CMIOSampleBuffer.h>
-*/
-@property(nonatomic) BOOL capturesCursor NS_AVAILABLE(10_8, NA);
+    By default, AVCaptureScreenInput draws the cursor in its captured output. If this property is set to NO, the captured output contains only the windows on the screen. Cursor is omitted. Note that cursor position and mouse button state at the time of capture is preserved in CMSampleBuffers emitted from AVCaptureScreenInput. See the inline documentation for kCMIOSampleBufferAttachmentKey_MouseAndKeyboardModifiers in <CoreMediaIO/CMIOSampleBuffer.h>
+ */
+@property(nonatomic) BOOL capturesCursor NS_AVAILABLE_MAC(10_8);
 
 /*!
  @property removesDuplicateFrames
  @abstract
     A property indicating whether duplicate frames should be removed by the input.
-
+ 
  @discussion
-    If this property is set to YES, AVCaptureScreenInput performs frame differencing and when it
-	detects duplicate frames, it drops them.  If set to NO, the captured output receives all frames
-    from the input.  Prior to 10.9 this value defaulted to YES.  In 10.9 and later, it defaults to
-	NO, as modern platforms support frame differencing in hardware-based encoders.
-	
-	As of 10.10, this property has been deprecated and is ignored.  Clients wishing to re-create
-	this functionality can use an AVCaptureVideoDataOutput and compare frame contents in their
-	own code.  If they wish to write a movie file, they can then pass the unique frames to an
-	AVAssetWriterInput.
-*/
-@property(nonatomic) BOOL removesDuplicateFrames NS_DEPRECATED(10_8, 10_10, NA, NA);
+    If this property is set to YES, AVCaptureScreenInput performs frame differencing and when it detects duplicate frames, it drops them. If set to NO, the captured output receives all frames from the input. Prior to 10.9 this value defaulted to YES. In 10.9 and later, it defaults to NO, as modern platforms support frame differencing in hardware-based encoders.
+ 
+    As of 10.10, this property has been deprecated and is ignored. Clients wishing to re-create this functionality can use an AVCaptureVideoDataOutput and compare frame contents in their own code. If they wish to write a movie file, they can then pass the unique frames to an AVAssetWriterInput.
+ */
+@property(nonatomic) BOOL removesDuplicateFrames NS_DEPRECATED_MAC(10_8, 10_10);
 
 @end
 
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
 
+#pragma mark - AVCaptureMetadataInput
 
 @class AVCaptureMetadataInputInternal;
 
 /*!
  @class AVCaptureMetadataInput
  @abstract
-    AVCaptureMetadataInput is a concrete subclass of AVCaptureInput that provides a way for
-    clients to supply AVMetadataItems to an AVCaptureSession.
-
+    AVCaptureMetadataInput is a concrete subclass of AVCaptureInput that provides a way for clients to supply AVMetadataItems to an AVCaptureSession.
+ 
  @discussion
-    Instances of AVCaptureMetadataInput are input sources for AVCaptureSession that provide
-    AVMetadataItems to an AVCaptureSession.  AVCaptureMetadataInputs present one and only one
-    AVCaptureInputPort, which currently may only be connected to an AVCaptureMovieFileOutput.
-    The metadata supplied over the input port is provided by the client, and must conform to a
-    client-supplied CMFormatDescription.  The AVMetadataItems are supplied in an AVTimedMetadataGroup.
-*/
-NS_CLASS_AVAILABLE(NA, 9_0)
+    Instances of AVCaptureMetadataInput are input sources for AVCaptureSession that provide AVMetadataItems to an AVCaptureSession. AVCaptureMetadataInputs present one and only one AVCaptureInputPort, which currently may only be connected to an AVCaptureMovieFileOutput. The metadata supplied over the input port is provided by the client, and must conform to a client-supplied CMFormatDescription. The AVMetadataItems are supplied in an AVTimedMetadataGroup.
+ */
+NS_CLASS_AVAILABLE_IOS(9_0) __TVOS_PROHIBITED
 @interface AVCaptureMetadataInput : AVCaptureInput 
 {
 @private
@@ -365,62 +342,50 @@
 /*!
  @method metadataInputWithFormatDescription:clock:
  @abstract
-    Returns an AVCaptureMetadataInput instance that allows a client to provide
-    AVTimedMetadataGroups to an AVCaptureSession.
-
+    Returns an AVCaptureMetadataInput instance that allows a client to provide AVTimedMetadataGroups to an AVCaptureSession.
+ 
  @param desc
-    A CMFormatDescription that defines the metadata to be supplied by the client.
-    Throws an NSInvalidArgumentException if NULL is passed.
+    A CMFormatDescription that defines the metadata to be supplied by the client. Throws an NSInvalidArgumentException if NULL is passed.
  @param clock
-    A CMClock that provided the timebase for the supplied samples.
-    Throws an NSInvalidArgumentException if NULL is passed.
+    A CMClock that provided the timebase for the supplied samples. Throws an NSInvalidArgumentException if NULL is passed.
  @result
     An AVCaptureMetadataInput instance.
-
+ 
  @discussion
-    This method returns an instance of AVCaptureMetadataInput that can be used to capture
-    AVTimedMetadataGroups supplied by the client to an AVCaptureSession.
-*/
+    This method returns an instance of AVCaptureMetadataInput that can be used to capture AVTimedMetadataGroups supplied by the client to an AVCaptureSession.
+ */
 + (instancetype)metadataInputWithFormatDescription:(CMMetadataFormatDescriptionRef)desc clock:(CMClockRef)clock;
 
 /*!
  @method initWithFormatDescription:clock:
  @abstract
-    Creates an AVCaptureMetadataInput instance that allows a client to provide
-    AVTimedMetadataGroups to an AVCaptureSession.
-
+    Creates an AVCaptureMetadataInput instance that allows a client to provide AVTimedMetadataGroups to an AVCaptureSession.
+ 
  @param desc
-    A CMFormatDescription that defines the metadata to be supplied by the client.
-    Throws NSInvalidArgumentException if NULL is passed.
+    A CMFormatDescription that defines the metadata to be supplied by the client. Throws NSInvalidArgumentException if NULL is passed.
  @param clock
-    A CMClock that provided the timebase for the supplied samples.
-    Throws NSInvalidArgumentException if NULL is passed.
+    A CMClock that provided the timebase for the supplied samples. Throws NSInvalidArgumentException if NULL is passed.
  @result
-    An AVCaptureMetadataInput instance, or nil, if the device could not be used
-    for capture.
-
+    An AVCaptureMetadataInput instance, or nil, if the device could not be used for capture.
+ 
  @discussion
-    This method creates an instance of AVCaptureMetadataInput that can be used to capture
-    AVTimedMetadataGroups supplied by the client to an AVCaptureSession.
-*/
+    This method creates an instance of AVCaptureMetadataInput that can be used to capture AVTimedMetadataGroups supplied by the client to an AVCaptureSession.
+ */
 - (instancetype)initWithFormatDescription:(CMMetadataFormatDescriptionRef)desc clock:(CMClockRef)clock;
 
 /*!
  @method appendTimedMetadataGroup:
  @abstract
     Provides metadata to the AVCaptureSession.
-
+ 
  @param metadata
-    An AVTimedMetadataGroup of metadata.  Will throw an exception if nil.
-    In order to denote a period of no metadata, an empty AVTimedMetadataGroup should
-    be passed.
-
+    An AVTimedMetadataGroup of metadata. Will throw an exception if nil. In order to denote a period of no metadata, an empty AVTimedMetadataGroup should be passed.
+ 
  @discussion
-    The provided AVTimedMetadataGroup will be provided to the AVCaptureSession.  The group's
-    presentation timestamp is expressed in the context of the clock supplied to the initializer.
-    It is not required that the AVTimedMetadataGroup have a duration;  an empty AVTimedMetadataGroup
-    can be supplied to denote a period of no metadata.
-*/
-- (BOOL)appendTimedMetadataGroup:(AVTimedMetadataGroup *)metadata error:(NSError **)outError;
+    The provided AVTimedMetadataGroup will be provided to the AVCaptureSession. The group's presentation timestamp is expressed in the context of the clock supplied to the initializer. It is not required that the AVTimedMetadataGroup have a duration; an empty AVTimedMetadataGroup can be supplied to denote a period of no metadata.
+ */
+- (BOOL)appendTimedMetadataGroup:(AVTimedMetadataGroup *)metadata error:(NSError * _Nullable * _Nullable)outError;
 
 @end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureMetadataOutput.h	2017-05-24 00:28:27.000000000 -0400
@@ -0,0 +1,141 @@
+/*
+    File:  AVCaptureMetadataOutput.h
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2010-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVCaptureOutputBase.h>
+#import <AVFoundation/AVMetadataObject.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureMetadataOutput
+
+@class AVCaptureMetadataOutputInternal;
+@protocol AVCaptureMetadataOutputObjectsDelegate;
+
+/*!
+ @class AVCaptureMetadataOutput
+ @abstract
+    AVCaptureMetadataOutput is a concrete subclass of AVCaptureOutput that can be used to process metadata objects from an attached connection.
+ 
+ @discussion
+    Instances of AVCaptureMetadataOutput emit arrays of AVMetadataObject instances (see AVMetadataObject.h), such as detected faces. Applications can access the metadata objects with the captureOutput:didOutputMetadataObjects:fromConnection: delegate method.
+ */
+NS_CLASS_AVAILABLE_IOS(6_0) __TVOS_PROHIBITED
+@interface AVCaptureMetadataOutput : AVCaptureOutput 
+{
+@private
+    AVCaptureMetadataOutputInternal *_internal;
+}
+
+- (instancetype)init;
+
++ (instancetype)new;
+
+/*!
+ @method setMetadataObjectsDelegate:queue:
+ @abstract
+    Sets the receiver's delegate that will accept metadata objects and dispatch queue on which the delegate will be called.
+ 
+ @param objectsDelegate
+    An object conforming to the AVCaptureMetadataOutputObjectsDelegate protocol that will receive metadata objects after they are captured.
+ @param objectsCallbackQueue
+    A dispatch queue on which all delegate methods will be called.
+ 
+ @discussion
+    When new metadata objects are captured in the receiver's connection, they will be vended to the delegate using the captureOutput:didOutputMetadataObjects:fromConnection: delegate method. All delegate methods will be called on the specified dispatch queue.
+ 
+    Clients that need to minimize the chances of metadata being dropped should specify a queue on which a sufficiently small amount of processing is performed along with receiving metadata objects.
+ 
+    A serial dispatch queue must be used to guarantee that metadata objects will be delivered in order. The objectsCallbackQueue parameter may not be NULL, except when setting the objectsDelegate to nil.
+ */
+- (void)setMetadataObjectsDelegate:(nullable id<AVCaptureMetadataOutputObjectsDelegate>)objectsDelegate queue:(nullable dispatch_queue_t)objectsCallbackQueue;
+
+/*!
+ @property metadataObjectsDelegate
+ @abstract
+    The receiver's delegate.
+ 
+ @discussion
+    The value of this property is an object conforming to the AVCaptureMetadataOutputObjectsDelegate protocol that will receive metadata objects after they are captured. The delegate is set using the setMetadataObjectsDelegate:queue: method.
+ */
+@property(nonatomic, readonly, nullable) id<AVCaptureMetadataOutputObjectsDelegate> metadataObjectsDelegate;
+
+/*!
+ @property metadataObjectsCallbackQueue
+ @abstract
+    The dispatch queue on which all metadata object delegate methods will be called.
+ 
+ @discussion
+    The value of this property is a dispatch_queue_t. The queue is set using the setMetadataObjectsDelegate:queue: method.
+ */
+@property(nonatomic, readonly, nullable) dispatch_queue_t metadataObjectsCallbackQueue;
+
+/*!
+ @property availableMetadataObjectTypes
+ @abstract
+    Indicates the receiver's supported metadata object types.
+ 
+ @discussion
+    The value of this property is an NSArray of NSStrings corresponding to AVMetadataObjectType strings defined in AVMetadataObject.h -- one for each metadata object type supported by the receiver. Available metadata object types are dependent on the capabilities of the AVCaptureInputPort to which this receiver's AVCaptureConnection is connected. Clients may specify the types of objects they would like to process by calling setMetadataObjectTypes:. This property is key-value observable.
+ */
+@property(nonatomic, readonly) NSArray<AVMetadataObjectType> *availableMetadataObjectTypes;
+
+/*!
+ @property metadataObjectTypes
+ @abstract
+    Specifies the types of metadata objects that the receiver should present to the client.
+ 
+ @discussion
+    AVCaptureMetadataOutput may detect and emit multiple metadata object types. For apps linked before iOS 7.0, the receiver defaults to capturing face metadata objects if supported (see -availableMetadataObjectTypes). For apps linked on or after iOS 7.0, the receiver captures no metadata objects by default. -setMetadataObjectTypes: throws an NSInvalidArgumentException if any elements in the array are not present in the -availableMetadataObjectTypes array.
+ */
+@property(nonatomic, copy, null_resettable) NSArray<AVMetadataObjectType> *metadataObjectTypes;
+
+/*!
+ @property rectOfInterest
+ @abstract
+    Specifies a rectangle of interest for limiting the search area for visual metadata.
+ 
+ @discussion
+    The value of this property is a CGRect that determines the receiver's rectangle of interest for each frame of video. The rectangle's origin is top left and is relative to the coordinate space of the device providing the metadata. Specifying a rectOfInterest may improve detection performance for certain types of metadata. The default value of this property is the value CGRectMake(0, 0, 1, 1). Metadata objects whose bounds do not intersect with the rectOfInterest will not be returned.
+ */
+@property(nonatomic) CGRect rectOfInterest NS_AVAILABLE_IOS(7_0);
+
+@end
+
+
+/*!
+ @protocol AVCaptureMetadataOutputObjectsDelegate
+ @abstract
+    Defines an interface for delegates of AVCaptureMetadataOutput to receive emitted objects.
+ */
+NS_AVAILABLE_IOS(6_0) __TVOS_PROHIBITED
+@protocol AVCaptureMetadataOutputObjectsDelegate <NSObject>
+
+@optional
+
+/*!
+ @method captureOutput:didOutputMetadataObjects:fromConnection:
+ @abstract
+    Called whenever an AVCaptureMetadataOutput instance emits new objects through a connection.
+ 
+ @param output
+    The AVCaptureMetadataOutput instance that emitted the objects.
+ @param metadataObjects
+    An array of AVMetadataObject subclasses (see AVMetadataObject.h).
+ @param connection
+    The AVCaptureConnection through which the objects were emitted.
+ 
+ @discussion
+    Delegates receive this message whenever the output captures and emits new objects, as specified by its metadataObjectTypes property. Delegates can use the provided objects in conjunction with other APIs for further processing. This method will be called on the dispatch queue specified by the output's metadataObjectsCallbackQueue property. This method may be called frequently, so it must be efficient to prevent capture performance problems, including dropped metadata objects.
+ 
+    Clients that need to reference metadata objects outside of the scope of this method must retain them and then release them when they are finished with them.
+ */
+- (void)captureOutput:(AVCaptureOutput *)output didOutputMetadataObjects:(NSArray<__kindof AVMetadataObject *> *)metadataObjects fromConnection:(AVCaptureConnection *)connection;
+
+@end
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutput.h	2016-05-03 18:21:22.000000000 -0400
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutput.h	2017-05-23 21:01:45.000000000 -0400
@@ -1,1805 +1,18 @@
 /*
     File:  AVCaptureOutput.h
- 	
- 	Framework:  AVFoundation
  
-	Copyright 2010-2015 Apple Inc. All rights reserved.
-*/
-
-#import <AVFoundation/AVBase.h>
-#import <Foundation/Foundation.h>
-#import <AVFoundation/AVCaptureSession.h>
-#import <CoreMedia/CMSampleBuffer.h>
-#import <QuartzCore/CALayer.h>
-#import <dispatch/dispatch.h>
-
-@class AVMetadataObject;
-@class AVCaptureOutputInternal;
-
-/*!
- @class AVCaptureOutput
- @abstract
-    AVCaptureOutput is an abstract class that defines an interface for an output destination of an AVCaptureSession.
- 
- @discussion
-    AVCaptureOutput provides an abstract interface for connecting capture output destinations, such as files and video
-    previews, to an AVCaptureSession.
-
-    An AVCaptureOutput can have multiple connections represented by AVCaptureConnection objects, one for each stream of
-    media that it receives from an AVCaptureInput. An AVCaptureOutput does not have any connections when it is first
-    created. When an output is added to an AVCaptureSession, connections are created that map media data from that
-    session's inputs to its outputs.
-
-    Concrete AVCaptureOutput instances can be added to an AVCaptureSession using the -[AVCaptureSession addOutput:] and
-    -[AVCaptureSession addOutputWithNoConnections:] methods.
-*/
-NS_CLASS_AVAILABLE(10_7, 4_0)
-@interface AVCaptureOutput : NSObject
-{
-@private
-    AVCaptureOutputInternal *_outputInternal;
-}
-
-/*!
- @property connections
- @abstract
-    The connections that describe the flow of media data to the receiver from AVCaptureInputs.
-
- @discussion
-    The value of this property is an NSArray of AVCaptureConnection objects, each describing the mapping between the
-    receiver and the AVCaptureInputPorts of one or more AVCaptureInputs.
-*/
-@property(nonatomic, readonly) NSArray *connections;
-
-/*!
- @method connectionWithMediaType:
- @abstract
-    Returns the first connection in the connections array with an inputPort of the specified mediaType.
-
- @param mediaType
-    An AVMediaType constant from AVMediaFormat.h, e.g. AVMediaTypeVideo.
-
- @discussion
-    This convenience method returns the first AVCaptureConnection in the receiver's
-    connections array that has an AVCaptureInputPort of the specified mediaType.  If no
-    connection with the specified mediaType is found, nil is returned.
-*/
-- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType NS_AVAILABLE(10_7, 5_0);
-
-/*!
- @method transformedMetadataObjectForMetadataObject:connection:
- @abstract
-    Converts an AVMetadataObject's visual properties to the receiver's coordinates.
-
- @param metadataObject
-    An AVMetadataObject originating from the same AVCaptureInput as the receiver.
- 
- @param connection
-    The receiver's connection whose AVCaptureInput matches that of the metadata object to be converted.
-
- @result
-    An AVMetadataObject whose properties are in output coordinates.
-
- @discussion
-    AVMetadataObject bounds may be expressed as a rect where {0,0} represents the top left of the picture area,
-    and {1,1} represents the bottom right on an unrotated picture.  Face metadata objects likewise express
-    yaw and roll angles with respect to an unrotated picture.  -transformedMetadataObjectForMetadataObject:connection: 
-	converts the visual properties in the coordinate space of the supplied AVMetadataObject to the coordinate space of 
-    the receiver.  The conversion takes orientation, mirroring, and scaling into consideration.
-    If the provided metadata object originates from an input source other than the preview layer's, nil will be returned.
- 
-    If an AVCaptureVideoDataOutput instance's connection's videoOrientation or videoMirrored properties are set to
-    non-default values, the output applies the desired mirroring and orientation by physically rotating and or flipping 
-    sample buffers as they pass through it.  AVCaptureStillImageOutput, on the other hand, does not physically rotate its buffers.
-    It attaches an appropriate kCGImagePropertyOrientation number to captured still image buffers (see ImageIO/CGImageProperties.h)
-    indicating how the image should be displayed on playback.  Likewise, AVCaptureMovieFileOutput does not physically
-    apply orientation/mirroring to its sample buffers -- it uses a QuickTime track matrix to indicate how the buffers
-    should be rotated and/or flipped on playback.
- 
-    transformedMetadataObjectForMetadataObject:connection: alters the visual properties of the provided metadata object 
-    to match the physical rotation / mirroring of the sample buffers provided by the receiver through the indicated 
-    connection.  I.e., for video data output, adjusted metadata object coordinates are rotated/mirrored.  For still image 
-    and movie file output, they are not.
-*/
-- (AVMetadataObject *)transformedMetadataObjectForMetadataObject:(AVMetadataObject *)metadataObject connection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(6_0);
-
-/*!
- @method metadataOutputRectOfInterestForRect:
- @abstract
-	Converts a rectangle in the receiver's coordinate space to a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput
-	whose capture device is providing input to the receiver.
- 
- @param rectInOutputCoordinates
-	A CGRect in the receiver's coordinates.
- 
- @result
-	A CGRect in the coordinate space of the metadata output whose capture device is providing input to the receiver.
- 
- @discussion
-	AVCaptureMetadataOutput rectOfInterest is expressed as a CGRect where {0,0} represents the top left of the picture area,
-	and {1,1} represents the bottom right on an unrotated picture.  This convenience method converts a rectangle in
-	the coordinate space of the receiver to a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput
-	whose AVCaptureDevice is providing input to the receiver.  The conversion takes orientation, mirroring, and scaling into 
-	consideration.  See -transformedMetadataObjectForMetadataObject:connection: for a full discussion of how orientation and mirroring
-	are applied to sample buffers passing through the output.	
- */
-- (CGRect)metadataOutputRectOfInterestForRect:(CGRect)rectInOutputCoordinates NS_AVAILABLE_IOS(7_0);
-
-/*!
- @method rectForMetadataOutputRectOfInterest:
- @abstract
-	Converts a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput whose capture device is
-	providing input to the receiver to a rectangle in the receiver's coordinates.
- 
- @param rectInMetadataOutputCoordinates
-	A CGRect in the coordinate space of the metadata output whose capture device is providing input to the receiver.
- 
- @result
-	A CGRect in the receiver's coordinates.
- 
- @discussion
-	AVCaptureMetadataOutput rectOfInterest is expressed as a CGRect where {0,0} represents the top left of the picture area,
-	and {1,1} represents the bottom right on an unrotated picture.  This convenience method converts a rectangle in the coordinate 
-	space of an AVCaptureMetadataOutput whose AVCaptureDevice is providing input to the coordinate space of the receiver.  The 
-	conversion takes orientation, mirroring, and scaling into consideration. See -transformedMetadataObjectForMetadataObject:connection: 
-	for a full discussion of how orientation and mirroring are applied to sample buffers passing through the output.
- */
-- (CGRect)rectForMetadataOutputRectOfInterest:(CGRect)rectInMetadataOutputCoordinates NS_AVAILABLE_IOS(7_0);
-
-@end
-
-
-@class AVCaptureVideoDataOutputInternal;
-@protocol AVCaptureVideoDataOutputSampleBufferDelegate;
-
-/*!
- @class AVCaptureVideoDataOutput
- @abstract
-    AVCaptureVideoDataOutput is a concrete subclass of AVCaptureOutput that can be used to process uncompressed or
-    compressed frames from the video being captured.
-
- @discussion
-    Instances of AVCaptureVideoDataOutput produce video frames suitable for processing using other media APIs.
-    Applications can access the frames with the captureOutput:didOutputSampleBuffer:fromConnection: delegate method.
-*/
-NS_CLASS_AVAILABLE(10_7, 4_0)
-@interface AVCaptureVideoDataOutput : AVCaptureOutput 
-{
-@private
-	AVCaptureVideoDataOutputInternal *_internal;
-}
-
-/*!
- @method setSampleBufferDelegate:queue:
- @abstract
-    Sets the receiver's delegate that will accept captured buffers and dispatch queue on which the delegate will be
-    called.
-
- @param sampleBufferDelegate
-    An object conforming to the AVCaptureVideoDataOutputSampleBufferDelegate protocol that will receive sample buffers
-    after they are captured.
- @param sampleBufferCallbackQueue
-    A dispatch queue on which all sample buffer delegate methods will be called.
-
- @discussion
-    When a new video sample buffer is captured it will be vended to the sample buffer delegate using the
-    captureOutput:didOutputSampleBuffer:fromConnection: delegate method. All delegate methods will be called on the
-    specified dispatch queue. If the queue is blocked when new frames are captured, those frames will be automatically
-    dropped at a time determined by the value of the alwaysDiscardsLateVideoFrames property. This allows clients to
-    process existing frames on the same queue without having to manage the potential memory usage increases that would
-    otherwise occur when that processing is unable to keep up with the rate of incoming frames. If their frame processing
-    is consistently unable to keep up with the rate of incoming frames, clients should consider using the
-    minFrameDuration property, which will generally yield better performance characteristics and more consistent frame
-    rates than frame dropping alone.
-
-    Clients that need to minimize the chances of frames being dropped should specify a queue on which a sufficiently
-    small amount of processing is being done outside of receiving sample buffers. However, if such clients migrate extra
-    processing to another queue, they are responsible for ensuring that memory usage does not grow without bound from
-    frames that have not been processed.
-
-    A serial dispatch queue must be used to guarantee that video frames will be delivered in order.
-    The sampleBufferCallbackQueue parameter may not be NULL, except when setting the sampleBufferDelegate
-    to nil.
-*/
-- (void)setSampleBufferDelegate:(id<AVCaptureVideoDataOutputSampleBufferDelegate>)sampleBufferDelegate queue:(dispatch_queue_t)sampleBufferCallbackQueue;
-
-/*!
- @property sampleBufferDelegate
- @abstract
-    The receiver's delegate.
-
- @discussion
-    The value of this property is an object conforming to the AVCaptureVideoDataOutputSampleBufferDelegate protocol that
-    will receive sample buffers after they are captured. The delegate is set using the setSampleBufferDelegate:queue:
-    method.
-*/
-@property(nonatomic, readonly) id<AVCaptureVideoDataOutputSampleBufferDelegate> sampleBufferDelegate;
-
-/*!
- @property sampleBufferCallbackQueue
- @abstract
-    The dispatch queue on which all sample buffer delegate methods will be called.
-
- @discussion
-    The value of this property is a dispatch_queue_t. The queue is set using the setSampleBufferDelegate:queue: method.
-*/
-@property(nonatomic, readonly) dispatch_queue_t sampleBufferCallbackQueue;
-
-/*!
- @property videoSettings
- @abstract
-    Specifies the settings used to decode or re-encode video before it is output by the receiver.
-
- @discussion
-    See AVVideoSettings.h for more information on how to construct a video settings dictionary.  To receive samples in their 
-    device native format, set this property to an empty dictionary (i.e. [NSDictionary dictionary]).  To receive samples in
-    a default uncompressed format, set this property to nil.  Note that after this property is set to nil, subsequent
-    querying of this property will yield a non-nil dictionary reflecting the settings used by the AVCaptureSession's current 
-    sessionPreset.
-
-    On iOS, the only supported key is kCVPixelBufferPixelFormatTypeKey. Supported pixel formats are
-    kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange and kCVPixelFormatType_32BGRA.
-*/
-@property(nonatomic, copy) NSDictionary *videoSettings;
-
-/*!
- @method recommendedVideoSettingsForAssetWriterWithOutputFileType:
- @abstract
-    Specifies the recommended settings for use with an AVAssetWriterInput.
-
- @param outputFileType
-    Specifies the UTI of the file type to be written (see AVMediaFormat.h for a list of file format UTIs).
- 
- @return
-    A fully populated dictionary of keys and values that are compatible with AVAssetWriter.
- 
- @discussion
-    The value of this property is an NSDictionary containing values for compression settings keys defined in
-    AVVideoSettings.h.  This dictionary is suitable for use as the "outputSettings" parameter when creating an AVAssetWriterInput, such as,
-        
-       [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings sourceFormatHint:hint];
-    
-	The dictionary returned contains all necessary keys and values needed by AVAssetWriter (see AVAssetWriterInput.h, 
-    -initWithMediaType:outputSettings: for a more in depth discussion). For QuickTime movie and ISO file types,
-    the recommended video settings will produce output comparable to that of AVCaptureMovieFileOutput.
-
-    Note that the dictionary of settings is dependent on the current configuration of the receiver's AVCaptureSession
-    and its inputs.  The settings dictionary may change if the session's configuration changes.  As such, you should
-    configure your session first, then query the recommended video settings.
-*/
-- (NSDictionary *)recommendedVideoSettingsForAssetWriterWithOutputFileType:(NSString *)outputFileType NS_AVAILABLE_IOS(7_0);
-
-/*!
- @property availableVideoCVPixelFormatTypes
- @abstract
-    Indicates the supported video pixel formats that can be specified in videoSettings.
-
- @discussion
-    The value of this property is an NSArray of NSNumbers that can be used as values for the 
-    kCVPixelBufferPixelFormatTypeKey in the receiver's videoSettings property.  The first
-    format in the returned list is the most efficient output format.
-*/
-@property(nonatomic, readonly) NSArray *availableVideoCVPixelFormatTypes NS_AVAILABLE(10_7, 5_0);
-
-/*!
- @property availableVideoCodecTypes
- @abstract
-    Indicates the supported video codec formats that can be specified in videoSettings.
-
- @discussion
-    The value of this property is an NSArray of NSStrings that can be used as values for the 
-    AVVideoCodecKey in the receiver's videoSettings property.
-*/
-@property(nonatomic, readonly) NSArray *availableVideoCodecTypes NS_AVAILABLE(10_7, 5_0);
-
-/*!
- @property minFrameDuration
- @abstract
-    Specifies the minimum time interval between which the receiver should output consecutive video frames.
-
- @discussion
-    The value of this property is a CMTime specifying the minimum duration of each video frame output by the receiver,
-    placing a lower bound on the amount of time that should separate consecutive frames. This is equivalent to the
-    inverse of the maximum frame rate. A value of kCMTimeZero or kCMTimeInvalid indicates an unlimited maximum frame
-    rate. The default value is kCMTimeInvalid.  As of iOS 5.0, minFrameDuration is deprecated.  Use AVCaptureConnection's
-    videoMinFrameDuration property instead.
-*/
-@property(nonatomic) CMTime minFrameDuration NS_DEPRECATED_IOS(4_0, 5_0, "Use AVCaptureConnection's videoMinFrameDuration property instead.");
-
-/*!
- @property alwaysDiscardsLateVideoFrames
- @abstract
-    Specifies whether the receiver should always discard any video frame that is not processed before the next frame is
-    captured.
-
- @discussion
-    When the value of this property is YES, the receiver will immediately discard frames that are captured while the
-    dispatch queue handling existing frames is blocked in the captureOutput:didOutputSampleBuffer:fromConnection:
-    delegate method. When the value of this property is NO, delegates will be allowed more time to process old frames
-    before new frames are discarded, but application memory usage may increase significantly as a result. The default
-    value is YES.
-*/
-@property(nonatomic) BOOL alwaysDiscardsLateVideoFrames;
-
-@end
-
-/*!
- @protocol AVCaptureVideoDataOutputSampleBufferDelegate
- @abstract
-    Defines an interface for delegates of AVCaptureVideoDataOutput to receive captured video sample buffers and be
-    notified of late sample buffers that were dropped.
-*/
-@protocol AVCaptureVideoDataOutputSampleBufferDelegate <NSObject>
-
-@optional
-
-/*!
- @method captureOutput:didOutputSampleBuffer:fromConnection:
- @abstract
-    Called whenever an AVCaptureVideoDataOutput instance outputs a new video frame.
-
- @param captureOutput
-    The AVCaptureVideoDataOutput instance that output the frame.
- @param sampleBuffer
-    A CMSampleBuffer object containing the video frame data and additional information about the frame, such as its
-    format and presentation time.
- @param connection
-    The AVCaptureConnection from which the video was received.
-
- @discussion
-    Delegates receive this message whenever the output captures and outputs a new video frame, decoding or re-encoding it
-    as specified by its videoSettings property. Delegates can use the provided video frame in conjunction with other APIs
-    for further processing. This method will be called on the dispatch queue specified by the output's
-    sampleBufferCallbackQueue property. This method is called periodically, so it must be efficient to prevent capture
-    performance problems, including dropped frames.
-
-    Clients that need to reference the CMSampleBuffer object outside of the scope of this method must CFRetain it and
-    then CFRelease it when they are finished with it.
-
-    Note that to maintain optimal performance, some sample buffers directly reference pools of memory that may need to be
-    reused by the device system and other capture inputs. This is frequently the case for uncompressed device native
-    capture where memory blocks are copied as little as possible. If multiple sample buffers reference such pools of
-    memory for too long, inputs will no longer be able to copy new samples into memory and those samples will be dropped.
-    If your application is causing samples to be dropped by retaining the provided CMSampleBuffer objects for too long,
-    but it needs access to the sample data for a long period of time, consider copying the data into a new buffer and
-    then calling CFRelease on the sample buffer if it was previously retained so that the memory it references can be
-    reused.
-*/
-- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
-
-/*!
- @method captureOutput:didDropSampleBuffer:fromConnection:
- @abstract
-    Called once for each frame that is discarded.
-
- @param captureOutput
-    The AVCaptureVideoDataOutput instance that dropped the frame.
- @param sampleBuffer
-    A CMSampleBuffer object containing information about the dropped frame, such as its format and presentation time.
-    This sample buffer will contain none of the original video data.
- @param connection
-    The AVCaptureConnection from which the dropped video frame was received.
-
- @discussion
-    Delegates receive this message whenever a video frame is dropped. This method is called once 
-    for each dropped frame. The CMSampleBuffer object passed to this delegate method will contain metadata 
-    about the dropped video frame, such as its duration and presentation time stamp, but will contain no 
-    actual video data. On iOS, Included in the sample buffer attachments is the kCMSampleBufferAttachmentKey_DroppedFrameReason,
-    which indicates why the frame was dropped.  This method will be called on the dispatch queue specified by the output's
-    sampleBufferCallbackQueue property. Because this method will be called on the same dispatch queue that is responsible
-    for outputting video frames, it must be efficient to prevent further capture performance problems, such as additional
-    dropped video frames.
- */
-- (void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection NS_AVAILABLE(10_7, 6_0);
-
-@end
-
-
-@class AVCaptureAudioDataOutputInternal;
-@protocol AVCaptureAudioDataOutputSampleBufferDelegate;
-
-/*!
- @class AVCaptureAudioDataOutput
- @abstract
-    AVCaptureAudioDataOutput is a concrete subclass of AVCaptureOutput that can be used to process uncompressed or
-    compressed samples from the audio being captured.
- 
- @discussion
-    Instances of AVCaptureAudioDataOutput produce audio sample buffers suitable for processing using other media APIs.
-    Applications can access the sample buffers with the captureOutput:didOutputSampleBuffer:fromConnection: delegate
-    method.
-*/
-NS_CLASS_AVAILABLE(10_7, 4_0)
-@interface AVCaptureAudioDataOutput : AVCaptureOutput 
-{
-@private
-	AVCaptureAudioDataOutputInternal *_internal;
-}
-
-/*!
- @method setSampleBufferDelegate:queue:
- @abstract
-    Sets the receiver's delegate that will accept captured buffers and dispatch queue on which the delegate will be
-    called.
-
- @param sampleBufferDelegate
-    An object conforming to the AVCaptureAudioDataOutputSampleBufferDelegate protocol that will receive sample buffers
-    after they are captured.
- @param sampleBufferCallbackQueue
-    A dispatch queue on which all sample buffer delegate methods will be called.
-
- @discussion
-    When a new audio sample buffer is captured it will be vended to the sample buffer delegate using the
-    captureOutput:didOutputSampleBuffer:fromConnection: delegate method. All delegate methods will be called on the
-    specified dispatch queue. If the queue is blocked when new samples are captured, those samples will be automatically
-    dropped when they become sufficiently late. This allows clients to process existing samples on the same queue without
-    having to manage the potential memory usage increases that would otherwise occur when that processing is unable to
-    keep up with the rate of incoming samples.
-
-    Clients that need to minimize the chances of samples being dropped should specify a queue on which a sufficiently
-    small amount of processing is being done outside of receiving sample buffers. However, if such clients migrate extra
-    processing to another queue, they are responsible for ensuring that memory usage does not grow without bound from
-    samples that have not been processed.
-
-    A serial dispatch queue must be used to guarantee that audio samples will be delivered in order.
-    The sampleBufferCallbackQueue parameter may not be NULL, except when setting sampleBufferDelegate to nil.
-*/
-- (void)setSampleBufferDelegate:(id<AVCaptureAudioDataOutputSampleBufferDelegate>)sampleBufferDelegate queue:(dispatch_queue_t)sampleBufferCallbackQueue;
-
-/*!
- @property sampleBufferDelegate
- @abstract
-    The receiver's delegate.
-
- @discussion
-    The value of this property is an object conforming to the AVCaptureAudioDataOutputSampleBufferDelegate protocol that
-    will receive sample buffers after they are captured. The delegate is set using the setSampleBufferDelegate:queue:
-    method.
-*/
-@property(nonatomic, readonly) id<AVCaptureAudioDataOutputSampleBufferDelegate> sampleBufferDelegate;
-
-/*!
- @property sampleBufferCallbackQueue
- @abstract
-    The dispatch queue on which all sample buffer delegate methods will be called.
-
- @discussion
-    The value of this property is a dispatch_queue_t. The queue is set using the setSampleBufferDelegate:queue: method.
-*/
-@property(nonatomic, readonly) dispatch_queue_t sampleBufferCallbackQueue;
-
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-/*!
- @property	 audioSettings
- @abstract
-    Specifies the settings used to decode or re-encode audio before it is output by the receiver.
-
- @discussion
-    The value of this property is an NSDictionary containing values for audio settings keys defined 
-    in AVAudioSettings.h.  When audioSettings is set to nil, the AVCaptureAudioDataOutput vends samples
-    in their device native format.
-*/
-@property(nonatomic, copy) NSDictionary *audioSettings NS_AVAILABLE(10_7, NA);
-
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-/*!
- @method recommendedAudioSettingsForAssetWriterWithOutputFileType:
- @abstract
-    Specifies the recommended settings for use with an AVAssetWriterInput.
-
- @param outputFileType
-    Specifies the UTI of the file type to be written (see AVMediaFormat.h for a list of file format UTIs).
- 
- @return
-    A fully populated dictionary of keys and values that are compatible with AVAssetWriter.
- 
- @discussion
-    The value of this property is an NSDictionary containing values for compression settings keys defined in
-    AVAudioSettings.h.  This dictionary is suitable for use as the "outputSettings" parameter when creating an AVAssetWriterInput, such as,
-        
-       [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:outputSettings sourceFormatHint:hint];
-    
-	The dictionary returned contains all necessary keys and values needed by AVAssetWriter (see AVAssetWriterInput.h, 
-    -initWithMediaType:outputSettings: for a more in depth discussion).  For QuickTime movie and ISO files, the 
-    recommended audio settings will always produce output comparable to that of AVCaptureMovieFileOutput.
-
-	Note that the dictionary of settings is dependent on the current configuration of the receiver's AVCaptureSession
-    and its inputs.  The settings dictionary may change if the session's configuration changes.  As such, you should
-    configure your session first, then query the recommended audio settings.
-*/
-- (NSDictionary *)recommendedAudioSettingsForAssetWriterWithOutputFileType:(NSString *)outputFileType NS_AVAILABLE_IOS(7_0);
-
-@end
-
-/*!
- @protocol AVCaptureAudioDataOutputSampleBufferDelegate
- @abstract
-    Defines an interface for delegates of AVCaptureAudioDataOutput to receive captured audio sample buffers.
-*/
-@protocol AVCaptureAudioDataOutputSampleBufferDelegate <NSObject>
-
-@optional
-
-/*!
- @method captureOutput:didOutputSampleBuffer:fromConnection:
- @abstract
-    Called whenever an AVCaptureAudioDataOutput instance outputs a new audio sample buffer.
-
- @param captureOutput
-    The AVCaptureAudioDataOutput instance that output the samples.
- @param sampleBuffer
-    A CMSampleBuffer object containing the audio samples and additional information about them, such as their format and
-    presentation time.
- @param connection
-    The AVCaptureConnection from which the audio was received.
-
- @discussion
-    Delegates receive this message whenever the output captures and outputs new audio samples, decoding or re-encoding
-    as specified by the audioSettings property. Delegates can use the provided sample buffer in conjunction with other
-    APIs for further processing. This method will be called on the dispatch queue specified by the output's
-    sampleBufferCallbackQueue property. This method is called periodically, so it must be efficient to prevent capture
-    performance problems, including dropped audio samples.
-
-    Clients that need to reference the CMSampleBuffer object outside of the scope of this method must CFRetain it and
-    then CFRelease it when they are finished with it.
-*/
-- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
-
-@end
-
-
-@class AVCaptureFileOutputInternal;
-@protocol AVCaptureFileOutputRecordingDelegate;
-
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-@protocol AVCaptureFileOutputDelegate;
-#endif
-
-/*!
- @class AVCaptureFileOutput
- @abstract
-    AVCaptureFileOutput is an abstract subclass of AVCaptureOutput that provides an interface for writing captured media
-    to files.
- 
- @discussion
-    This abstract superclass defines the interface for outputs that record media samples to files. File outputs can start
-    recording to a new file using the startRecordingToOutputFileURL:recordingDelegate: method. On successive invocations of this method on
-    Mac OS X, the output file can by changed dynamically without losing media samples. A file output can stop recording
-    using the stopRecording method. Because files are recorded in the background, applications will need to specify a
-    delegate for each new file so that they can be notified when recorded files are finished.
-
-    On Mac OS X, clients can also set a delegate on the file output itself that can be used to control recording along
-    exact media sample boundaries using the captureOutput:didOutputSampleBuffer:fromConnection: method.
-
-    The concrete subclasses of AVCaptureFileOutput are AVCaptureMovieFileOutput, which records media to a QuickTime movie
-    file, and AVCaptureAudioFileOutput, which writes audio media to a variety of audio file formats.
-*/
-NS_CLASS_AVAILABLE(10_7, 4_0)
-@interface AVCaptureFileOutput : AVCaptureOutput 
-{
-@private
-	AVCaptureFileOutputInternal *_fileOutputInternal;
-}
-
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-/*!
- @property delegate
- @abstract
-    The receiver's delegate.
-
- @discussion
-    The value of this property is an object conforming to the AVCaptureFileOutputDelegate protocol that will be able to
-    monitor and control recording along exact sample boundaries.
-*/
-@property(nonatomic, assign) id<AVCaptureFileOutputDelegate> delegate NS_AVAILABLE(10_7, NA);
-
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-/*!
- @property outputFileURL
- @abstract
-    The file URL of the file to which the receiver is currently recording incoming buffers.
-
- @discussion
-    The value of this property is an NSURL object containing the file URL of the file currently being written by the
-    receiver. Returns nil if the receiver is not recording to any file.
-*/
-@property(nonatomic, readonly) NSURL *outputFileURL;
-
-/*!
- @method startRecordingToOutputFileURL:recordingDelegate:
- @abstract
-    Tells the receiver to start recording to a new file, and specifies a delegate that will be notified when recording is
-    finished.
- 
- @param outputFileURL
-    An NSURL object containing the URL of the output file. This method throws an NSInvalidArgumentException if the URL is
-    not a valid file URL.
- @param delegate
-    An object conforming to the AVCaptureFileOutputRecordingDelegate protocol. Clients must specify a delegate so that
-    they can be notified when recording to the given URL is finished.
-
- @discussion
-    The method sets the file URL to which the receiver is currently writing output media. If a file at the given URL
-    already exists when capturing starts, recording to the new file will fail.
-
-    Clients need not call stopRecording before calling this method while another recording is in progress. On Mac OS X,
-    if this method is invoked while an existing output file was already being recorded, no media samples will be
-    discarded between the old file and the new file.
-
-    When recording is stopped either by calling stopRecording, by changing files using this method, or because of an
-    error, the remaining data that needs to be included to the file will be written in the background. Therefore, clients
-    must specify a delegate that will be notified when all data has been written to the file using the
-    captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method. The recording delegate can also
-    optionally implement methods that inform it when data starts being written, when recording is paused and resumed, and
-    when recording is about to be finished.
-
-    On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method,
-    the first samples written to the new file are guaranteed to be those contained in the sample buffer passed to that
-    method.
-
-    Note: AVCaptureAudioFileOutput does not support -startRecordingToOutputFileURL:recordingDelegate:.  Use
-    -startRecordingToOutputFileURL:outputFileType:recordingDelegate: instead.
-*/
-- (void)startRecordingToOutputFileURL:(NSURL*)outputFileURL recordingDelegate:(id<AVCaptureFileOutputRecordingDelegate>)delegate;
-
-/*!
- @method stopRecording
- @abstract
-    Tells the receiver to stop recording to the current file.
-
- @discussion
-    Clients can call this method when they want to stop recording new samples to the current file, and do not want to
-    continue recording to another file. Clients that want to switch from one file to another should not call this method.
-    Instead they should simply call startRecordingToOutputFileURL:recordingDelegate: with the new file URL.
-
-    When recording is stopped either by calling this method, by changing files using
-    startRecordingToOutputFileURL:recordingDelegate:, or because of an error, the remaining data that needs to be
-    included to the file will be written in the background. Therefore, before using the file, clients must wait until the
-    delegate that was specified in startRecordingToOutputFileURL:recordingDelegate: is notified when all data has been
-    written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method.
-
-    On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method,
-    the last samples written to the current file are guaranteed to be those that were output immediately before those in
-    the sample buffer passed to that method.
-*/
-- (void)stopRecording;
-
-/*!
- @property recording
- @abstract
-    Indicates whether the receiver is currently recording.
-
- @discussion
-    The value of this property is YES when the receiver currently has a file to which it is writing new samples, NO
-    otherwise.
-*/
-@property(nonatomic, readonly, getter=isRecording) BOOL recording;
-
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-/*!
- @property recordingPaused
- @abstract
-    Indicates whether recording to the current output file is paused.
-
- @discussion
-    This property indicates recording to the file returned by outputFileURL has been previously paused using the
-    pauseRecording method. When a recording is paused, captured samples are not written to the output file, but new
-    samples can be written to the same file in the future by calling resumeRecording.
-*/
-@property(nonatomic, readonly, getter=isRecordingPaused) BOOL recordingPaused NS_AVAILABLE(10_7, NA);
-
-/*!
- @method pauseRecording
- @abstract
-    Pauses recording to the current output file.
-
- @discussion
-    This method causes the receiver to stop writing captured samples to the current output file returned by
-    outputFileURL, but leaves the file open so that samples can be written to it in the future, when resumeRecording is
-    called. This allows clients to record multiple media segments that are not contiguous in time to a single file.
-
-    On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method,
-    the last samples written to the current file are guaranteed to be those that were output immediately before those in
-    the sample buffer passed to that method. 
-*/
-- (void)pauseRecording NS_AVAILABLE(10_7, NA);
-
-/*!
- @method resumeRecording
- @abstract
-    Resumes recording to the current output file after it was previously paused using pauseRecording.
-
- @discussion
-    This method causes the receiver to resume writing captured samples to the current output file returned by
-    outputFileURL, after recording was previously paused using pauseRecording. This allows clients to record multiple
-    media segments that are not contiguous in time to a single file. 
-
-    On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method,
-    the first samples written to the current file are guaranteed to be those contained in the sample buffer passed to
-    that method.
-*/
-- (void)resumeRecording NS_AVAILABLE(10_7, NA);
-
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-/*!
- @property recordedDuration
- @abstract
-    Indicates the duration of the media recorded to the current output file.
-
- @discussion
-    If recording is in progress, this property returns the total time recorded so far.
-*/
-@property(nonatomic, readonly) CMTime recordedDuration;
-
-/*!
- @property recordedFileSize
- @abstract
-    Indicates the size, in bytes, of the data recorded to the current output file.
-
- @discussion
-    If a recording is in progress, this property returns the size in bytes of the data recorded so far.
-*/
-@property(nonatomic, readonly) int64_t recordedFileSize;	
-
-/*!
- @property maxRecordedDuration
- @abstract
-    Specifies the maximum duration of the media that should be recorded by the receiver.
-
- @discussion
-    This property specifies a hard limit on the duration of recorded files. Recording is stopped when the limit is
-    reached and the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: delegate method is invoked
-    with an appropriate error. The default value of this property is kCMTimeInvalid, which indicates no limit.
-*/
-@property(nonatomic) CMTime maxRecordedDuration;
-
-/*!
- @property maxRecordedFileSize
- @abstract
-    Specifies the maximum size, in bytes, of the data that should be recorded by the receiver.
+    Framework:  AVFoundation
  
- @discussion
-    This property specifies a hard limit on the data size of recorded files. Recording is stopped when the limit is
-    reached and the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: delegate method is invoked
-    with an appropriate error. The default value of this property is 0, which indicates no limit.
+    Copyright 2010-2017 Apple Inc. All rights reserved.
 */
-@property(nonatomic) int64_t maxRecordedFileSize;
-
-/*!
- @property minFreeDiskSpaceLimit
- @abstract
-    Specifies the minimum amount of free space, in bytes, required for recording to continue on a given volume.
-
- @discussion
-    This property specifies a hard lower limit on the amount of free space that must remain on a target volume for
-    recording to continue. Recording is stopped when the limit is reached and the
-    captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: delegate method is invoked with an
-    appropriate error.
-*/
-@property(nonatomic) int64_t minFreeDiskSpaceLimit;
-
-@end
-
-/*!
- @protocol AVCaptureFileOutputRecordingDelegate
- @abstract
-    Defines an interface for delegates of AVCaptureFileOutput to respond to events that occur in the process of recording
-    a single file.
-*/
-@protocol AVCaptureFileOutputRecordingDelegate <NSObject>
-
-@optional
-
-/*!
- @method captureOutput:didStartRecordingToOutputFileAtURL:fromConnections:
- @abstract
-    Informs the delegate when the output has started writing to a file.
-
- @param captureOutput
-    The capture file output that started writing the file.
- @param fileURL
-    The file URL of the file that is being written.
- @param connections
-    An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to
-    the file.
-
- @discussion
-    This method is called when the file output has started writing data to a file. If an error condition prevents any
-    data from being written, this method may not be called.
-    captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error: and
-    captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: will always be called, even if no data is
-    written.
-
-    Clients should not assume that this method will be called on a specific thread, and should also try to make this
-    method as efficient as possible.
-*/
-- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections;
-
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-/*!
- @method captureOutput:didPauseRecordingToOutputFileAtURL:fromConnections:
- @abstract
-    Called whenever the output is recording to a file and successfully pauses the recording at the request of the client.
-
- @param captureOutput
-    The capture file output that has paused its file recording.
- @param fileURL
-    The file URL of the file that is being written.
- @param connections
-    An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to
-    the file.
-
- @discussion
-    Delegates can use this method to be informed when a request to pause recording is actually respected. It is safe for
-    delegates to change what the file output is currently doing (starting a new file, for example) from within this
-    method. If recording to a file is stopped, either manually or due to an error, this method is not guaranteed to be
-    called, even if a previous call to pauseRecording was made.
-
-    Clients should not assume that this method will be called on a specific thread, and should also try to make this
-    method as efficient as possible.
-*/
-- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didPauseRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections NS_AVAILABLE(10_7, NA);
-
-/*!
- @method captureOutput:didResumeRecordingToOutputFileAtURL:fromConnections:
- @abstract
-    Called whenever the output, at the request of the client, successfully resumes a file recording that was paused.
-
- @param captureOutput
-    The capture file output that has resumed its paused file recording.
- @param fileURL
-    The file URL of the file that is being written.
- @param connections
-    An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to
-    the file.
-
- @discussion
-    Delegates can use this method to be informed when a request to resume recording is actually respected. It is safe for
-    delegates to change what the file output is currently doing (starting a new file, for example) from within this
-    method. If recording to a file is stopped, either manually or due to an error, this method is not guaranteed to be
-    called, even if a previous call to resumeRecording was made.
-
-    Clients should not assume that this method will be called on a specific thread, and should also try to make this
-    method as efficient as possible.
-*/
-- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didResumeRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections NS_AVAILABLE(10_7, NA);
-
-/*!
- @method captureOutput:willFinishRecordingToOutputFileAtURL:fromConnections:error:
- @abstract
-    Informs the delegate when the output will stop writing new samples to a file.
-
- @param captureOutput
-    The capture file output that will finish writing the file.
- @param fileURL
-    The file URL of the file that is being written.
- @param connections
-    An array of AVCaptureConnection objects attached to the file output that provided the data that is being written to
-    the file.
- @param error
-    An error describing what caused the file to stop recording, or nil if there was no error.
-
- @discussion
-    This method is called when the file output will stop recording new samples to the file at outputFileURL, either
-    because startRecordingToOutputFileURL:recordingDelegate: or stopRecording were called, or because an error, described
-    by the error parameter, occurred (if no error occurred, the error parameter will be nil). This method will always be
-    called for each recording request, even if no data is successfully written to the file.
-
-    Clients should not assume that this method will be called on a specific thread, and should also try to make this
-    method as efficient as possible.
-*/
-- (void)captureOutput:(AVCaptureFileOutput *)captureOutput willFinishRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections error:(NSError *)error NS_AVAILABLE(10_7, NA);
-
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-@required
-
-/*!
- @method captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:
- @abstract
-    Informs the delegate when all pending data has been written to an output file.
-
- @param captureOutput
-    The capture file output that has finished writing the file.
- @param fileURL
-    The file URL of the file that has been written.
- @param connections
-    An array of AVCaptureConnection objects attached to the file output that provided the data that was written to the
-    file.
- @param error
-    An error describing what caused the file to stop recording, or nil if there was no error.
-
- @discussion
-    This method is called when the file output has finished writing all data to a file whose recording was stopped,
-    either because startRecordingToOutputFileURL:recordingDelegate: or stopRecording were called, or because an error,
-    described by the error parameter, occurred (if no error occurred, the error parameter will be nil).  This method will
-    always be called for each recording request, even if no data is successfully written to the file.
 
-    Clients should not assume that this method will be called on a specific thread.
-
-    Delegates are required to implement this method.
-*/
-- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error;
-
-@end
-
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-/*!
- @protocol AVCaptureFileOutputDelegate
- @abstract
-    Defines an interface for delegates of AVCaptureFileOutput to monitor and control recordings along exact sample
-    boundaries.
-*/
-@protocol AVCaptureFileOutputDelegate <NSObject>
-
-@required
-
-/*!
- @method captureOutputShouldProvideSampleAccurateRecordingStart:
- @abstract
-    Allows a client to opt in to frame accurate record-start in captureOutput:didOutputSampleBuffer:fromConnection:
-
- @param captureOutput
-    The AVCaptureFileOutput instance with which the delegate is associated.
-
- @discussion
-    In apps linked before Mac OS X 10.8, delegates that implement the captureOutput:didOutputSampleBuffer:fromConnection: 
-    method can ensure frame accurate start / stop of a recording by calling startRecordingToOutputFileURL:recordingDelegate:
-    from within the callback.  Frame accurate start requires the capture output to apply outputSettings
-    when the session starts running, so it is ready to record on any given frame boundary.  Compressing
-    all the time while the session is running has power, thermal, and CPU implications.  In apps linked on or after
-    Mac OS X 10.8, delegates must implement captureOutputShouldProvideSampleAccurateRecordingStart: to indicate
-    whether frame accurate start/stop recording is required (returning YES) or not (returning NO).
-    The output calls this method as soon as the delegate is added, and never again.  If your delegate returns
-    NO, the capture output applies compression settings when startRecordingToOutputFileURL:recordingDelegate: is called, 
-    and disables compression settings after the recording is stopped.
-*/
-- (BOOL)captureOutputShouldProvideSampleAccurateRecordingStart:(AVCaptureOutput *)captureOutput NS_AVAILABLE(10_8, NA);
-
-@optional
-
-/*!
- @method captureOutput:didOutputSampleBuffer:fromConnection:
- @abstract
-    Gives the delegate the opportunity to inspect samples as they are received by the output and optionally start and
-    stop recording at exact times.
-
- @param captureOutput
-    The capture file output that is receiving the media data.
- @param sampleBuffer
-    A CMSampleBuffer object containing the sample data and additional information about the sample, such as its format
-    and presentation time.
- @param connection
-    The AVCaptureConnection object attached to the file output from which the sample data was received.
-
- @discussion
-    This method is called whenever the file output receives a single sample buffer (a single video frame or audio buffer,
-    for example) from the given connection. This gives delegates an opportunity to start and stop recording or change
-    output files at an exact sample boundary if -captureOutputShouldProvideSampleAccurateRecordingStart: returns YES. 
-    If called from within this method, the file output's startRecordingToOutputFileURL:recordingDelegate: and 
-    resumeRecording methods are guaranteed to include the received sample buffer in the new file, whereas calls to 
-    stopRecording and pauseRecording are guaranteed to include all samples leading up to those in the current sample 
-    buffer in the existing file.
-
-    Delegates can gather information particular to the samples by inspecting the CMSampleBuffer object. Sample buffers
-    always contain a single frame of video if called from this method but may also contain multiple samples of audio. For
-    B-frame video formats, samples are always delivered in presentation order.
-
-    Clients that need to reference the CMSampleBuffer object outside of the scope of this method must CFRetain it and
-    then CFRelease it when they are finished with it.
-
-    Note that to maintain optimal performance, some sample buffers directly reference pools of memory that may need to be
-    reused by the device system and other capture inputs. This is frequently the case for uncompressed device native
-    capture where memory blocks are copied as little as possible. If multiple sample buffers reference such pools of
-    memory for too long, inputs will no longer be able to copy new samples into memory and those samples will be dropped.
-    If your application is causing samples to be dropped by retaining the provided CMSampleBuffer objects for too long,
-    but it needs access to the sample data for a long period of time, consider copying the data into a new buffer and
-    then calling CFRelease on the sample buffer if it was previously retained so that the memory it references can be
-    reused. 
- 
-    Clients should not assume that this method will be called on a specific thread. In addition, this method is called
-    periodically, so it must be efficient to prevent capture performance problems.
-*/
-- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection NS_AVAILABLE(10_7, NA);
-
-@end
-
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-
-@class AVCaptureMovieFileOutputInternal;
-
-/*!
- @class AVCaptureMovieFileOutput
- @abstract
-    AVCaptureMovieFileOutput is a concrete subclass of AVCaptureFileOutput that writes captured media to QuickTime movie
-    files.
-
- @discussion
-    AVCaptureMovieFileOutput implements the complete file recording interface declared by AVCaptureFileOutput for writing
-    media data to QuickTime movie files. In addition, instances of AVCaptureMovieFileOutput allow clients to configure
-    options specific to the QuickTime file format, including allowing them to write metadata collections to each file,
-    specify media encoding options for each track (Mac OS X), and specify an interval at which movie fragments should be written.
-*/
-NS_CLASS_AVAILABLE(10_7, 4_0)
-@interface AVCaptureMovieFileOutput : AVCaptureFileOutput
-{
-@private
-	AVCaptureMovieFileOutputInternal *_internal;
-}
-
-/*!
- @property movieFragmentInterval
- @abstract
-    Specifies the frequency with which movie fragments should be written.
-
- @discussion
-    When movie fragments are used, a partially written QuickTime movie file whose writing is unexpectedly interrupted can
-    be successfully opened and played up to multiples of the specified time interval. A value of kCMTimeInvalid indicates
-    that movie fragments should not be used, but that only a movie atom describing all of the media in the file should be
-    written. The default value of this property is ten seconds.
-
-    Changing the value of this property will not affect the movie fragment interval of the file currently being written,
-    if there is one.
-*/
-@property(nonatomic) CMTime movieFragmentInterval;
-
-/*!
- @property metadata
- @abstract
-    A collection of metadata to be written to the receiver's output files.
-
- @discussion
-    The value of this property is an array of AVMetadataItem objects representing the collection of top-level metadata to
-    be written in each output file.
-*/
-@property(nonatomic, copy) NSArray *metadata;
-
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-/*!
- @method outputSettingsForConnection:
- @abstract
-    Returns the options the receiver uses to re-encode media from the given connection as it is being recorded.
-
- @param connection
-    The connection delivering the media to be re-encoded.
- @result
-    An NSDictionary of output settings.
-
- @discussion
-    See AVAudioSettings.h for audio connections or AVVideoSettings.h for video connections for more information on
-    how to construct an output settings dictionary.  If the returned value is an empty dictionary (i.e. [NSDictionary
-    dictionary], the format of the media from the connection will not be changed before being written to the file.  If
-    -setOutputSettings:forConnection: was called with a nil dictionary, this method returns a non-nil dictionary reflecting
-    the settings used by the AVCaptureSession's current sessionPreset.
-*/
-- (NSDictionary *)outputSettingsForConnection:(AVCaptureConnection *)connection NS_AVAILABLE(10_7, NA);
-
-/*!
- @method setOutputSettings:forConnection:
- @abstract
-    Sets the options the receiver uses to re-encode media from the given connection as it is being recorded.
-
- @param outputSettings
-    An NSDictionary of output settings.
- @param connection
-    The connection delivering the media to be re-encoded.
-
- @discussion
-    See AVAudioSettings.h for audio connections or AVVideoSettings.h for video connections for more information on
-    how to construct an output settings dictionary.  A value of an empty dictionary (i.e. [NSDictionary dictionary], means
-    that the format of the media from the connection should not be changed before being written to the file.  A value of
-    nil means that the output format will be determined by the session preset.  In this case, -outputSettingsForConnection:
-    will return a non-nil dictionary reflecting the settings used by the AVCaptureSession's current sessionPreset.
-*/
-- (void)setOutputSettings:(NSDictionary *)outputSettings forConnection:(AVCaptureConnection *)connection NS_AVAILABLE(10_7, NA);
-
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-#if TARGET_OS_IPHONE
-
-/*!
- @method recordsVideoOrientationAndMirroringChangesAsMetadataTrackForConnection:
- @abstract
-    Returns YES if the movie file output will create a timed metadata track that records samples which
-	reflect changes made to the given connection's videoOrientation and videoMirrored properties
-	during recording.
-
- @param connection
-    A connection delivering video media to the movie file output. This method throws an NSInvalidArgumentException
-	if the connection does not have a mediaType of AVMediaTypeVideo or if the connection does not terminate at
-	the movie file output.
-
- @discussion
-	See setRecordsVideoOrientationAndMirroringChanges:asMetadataTrackForConnection: for details on the behavior
-	controlled by this value.
-	
-	The default value returned is NO.
-*/
-- (BOOL)recordsVideoOrientationAndMirroringChangesAsMetadataTrackForConnection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(9_0);
-
-/*!
- @method setRecordsVideoOrientationAndMirroringChanges:asMetadataTrackForConnection:
- @abstract
-    Controls whether or not the movie file output will create a timed metadata track that records samples which
-	reflect changes made to the given connection's videoOrientation and videoMirrored properties during
-	recording.
- 
- @param doRecordChanges
-    If YES, the movie file output will create a timed metadata track that records samples which reflect changes
-	made to the given connection's videoOrientation and videoMirrored properties during recording.
-
- @param connection
-    A connection delivering video media to the movie file output. This method throws an NSInvalidArgumentException
-	if the connection does not have a mediaType of AVMediaTypeVideo or if the connection does not terminate at
-	the movie file output.
-
- @discussion
-    When a recording is started the current state of a video capture connection's videoOrientation and videoMirrored
-	properties are used to build the display matrix for the created video track. The movie file format allows only
-	one display matrix per track, which means that any changes made during a recording to the videoOrientation and
-	videoMirrored properties are not captured.  For example, a user starts a recording with their device in the portrait
-	orientation, and then partway through the recording changes the device to a landscape orientation. The landscape
-	orientation requires a different display matrix, but only the initial display matrix (the portrait display
-	matrix) is recorded for the video track.
-	
-	By invoking this method the client application directs the movie file output to create an additional track in the
-	captured movie. This track is a timed metadata track that is associated with the video track, and contains one or
-	more samples that contain a Video Orientation value (as defined by EXIF and TIFF specifications, which is enumerated
-	by CGImagePropertyOrientation in <ImageIO/CGImageProperties.h>).  The value represents the display matrix corresponding
-	to the AVCaptureConnection's videoOrientation and videoMirrored properties when applied to the input source.  The
-	initial sample written to the timed metadata track represents video track's display matrix. During recording additional
-	samples will be written to the timed metadata track whenever the client application changes the video connection's
-	videoOrienation or videoMirrored properties. Using the above example, when the client application detects the user
-	changing the device from portrait to landscape orientation, it updates the video connection's videoOrientation property,
-	thus causing the movie file output to add a new sample to the timed metadata track.
-	
-	After capture, playback and editing applications can use the timed metadata track to enhance their user's experience.
-	For example, when playing back the captured movie, a playback engine can use the samples to adjust the display of the
-	video samples to keep the video properly oriented.  Another example is an editing application that uses the sample
-	the sample times to suggest cut points for breaking the captured movie into separate clips, where each clip is properly
-	oriented.
-	
-	The default behavior is to not create the timed metadata track.
-	
-	The doRecordChanges value is only observed at the start of recording.  Changes to the value will not have any
-	effect until the next recording is started.
-*/
-- (void)setRecordsVideoOrientationAndMirroringChanges:(BOOL)doRecordChanges asMetadataTrackForConnection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(9_0);
-
-#endif // TARGET_OS_IPHONE
-
-@end
-
-
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-@class AVCaptureAudioFileOutputInternal;
-
-/*!
- @class AVCaptureAudioFileOutput
- @abstract
-    AVCaptureAudioFileOutput is a concrete subclass of AVCaptureFileOutput that writes captured audio to any audio file
-    type supported by CoreAudio.
- 
- @discussion
-    AVCaptureAudioFileOutput implements the complete file recording interface declared by AVCaptureFileOutput for writing
-    media data to audio files. In addition, instances of AVCaptureAudioFileOutput allow clients to configure options
-    specific to the audio file formats, including allowing them to write metadata collections to each file and specify
-    audio encoding options.
-*/
-NS_CLASS_AVAILABLE(10_7, NA)
-@interface AVCaptureAudioFileOutput : AVCaptureFileOutput
-{
-@private
-	AVCaptureAudioFileOutputInternal *_internal;
-}
-
-/*!
- @method availableOutputFileTypes
- @abstract		
-    Provides the file types AVCaptureAudioFileOutput can write.
- @result
-    An NSArray of UTIs identifying the file types the AVCaptureAudioFileOutput class can write.
-*/
-+ (NSArray *) availableOutputFileTypes;
-
-/*!
- @method startRecordingToOutputFileURL:outputFileType:recordingDelegate:
- @abstract
-    Tells the receiver to start recording to a new file of the specified format, and specifies a delegate that will be
-    notified when recording is finished.
-
- @param outputFileURL
-    An NSURL object containing the URL of the output file. This method throws an NSInvalidArgumentException if the URL is
-    not a valid file URL.
- @param fileType
-    A UTI indicating the format of the file to be written.
- @param delegate
-    An object conforming to the AVCaptureFileOutputRecordingDelegate protocol. Clients must specify a delegate so that they
-    can be notified when recording to the given URL is finished.
-
- @discussion
-    The method sets the file URL to which the receiver is currently writing output media. If a file at the given URL
-    already exists when capturing starts, recording to the new file will fail.
-
-    The fileType argument is a UTI corresponding to the audio file format that should be written. UTIs for common 
-    audio file types are declared in AVMediaFormat.h.
-
-    Clients need not call stopRecording before calling this method while another recording is in progress. If this method
-    is invoked while an existing output file was already being recorded, no media samples will be discarded between the
-    old file and the new file.
-
-    When recording is stopped either by calling stopRecording, by changing files using this method, or because of an
-    error, the remaining data that needs to be included to the file will be written in the background. Therefore, clients
-    must specify a delegate that will be notified when all data has been written to the file using the
-    captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method. The recording delegate can also
-    optionally implement methods that inform it when data starts being written, when recording is paused and resumed, and
-    when recording is about to be finished.
-
-    On Mac OS X, if this method is called within the captureOutput:didOutputSampleBuffer:fromConnection: delegate method,
-    the first samples written to the new file are guaranteed to be those contained in the sample buffer passed to that
-    method.
-*/
-- (void)startRecordingToOutputFileURL:(NSURL*)outputFileURL outputFileType:(NSString *)fileType recordingDelegate:(id<AVCaptureFileOutputRecordingDelegate>)delegate;
-
-/*!
- @property metadata
- @abstract
-    A collection of metadata to be written to the receiver's output files.
-
- @discussion
-    The value of this property is an array of AVMetadataItem objects representing the collection of top-level metadata to
-    be written in each output file. Only ID3 v2.2, v2.3, or v2.4 style metadata items are supported.
-*/
-@property(nonatomic, copy) NSArray *metadata; 
-
-/*!
- @property audioSettings
- @abstract
-    Specifies the options the receiver uses to re-encode audio as it is being recorded.
-
- @discussion
-    The output settings dictionary can contain values for keys from AVAudioSettings.h. A value of nil indicates that the
-    format of the audio should not be changed before being written to the file.
-*/
-@property(nonatomic, copy) NSDictionary *audioSettings;
-
-@end
-
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-
-@class AVCaptureStillImageOutputInternal;
-
-/*!
- @class AVCaptureStillImageOutput
- @abstract
-    AVCaptureStillImageOutput is a concrete subclass of AVCaptureOutput that can be used to capture high-quality still
-    images with accompanying metadata.
-
- @discussion
-    Instances of AVCaptureStillImageOutput can be used to capture, on demand, high quality snapshots from a realtime
-    capture source. Clients can request a still image for the current time using the
-    captureStillImageAsynchronouslyFromConnection:completionHandler: method. Clients can also configure still image
-    outputs to produce still images in specific image formats.
-*/
-NS_CLASS_AVAILABLE(10_7, 4_0)
-@interface AVCaptureStillImageOutput : AVCaptureOutput 
-{
-@private
-	AVCaptureStillImageOutputInternal *_internal;
-}
-
-/*!
- @property outputSettings
- @abstract
-    Specifies the options the receiver uses to encode still images before they are delivered.
-
- @discussion
-    See AVVideoSettings.h for more information on how to construct an output settings dictionary.
-
-    On iOS, the only currently supported keys are AVVideoCodecKey and kCVPixelBufferPixelFormatTypeKey. 
-    Use -availableImageDataCVPixelFormatTypes and -availableImageDataCodecTypes to determine what 
-    codec keys and pixel formats are supported. AVVideoQualityKey is supported on iOS 6.0 and later
-    and may only be used when AVVideoCodecKey is set to AVVideoCodecJPEG.
-*/
-@property(nonatomic, copy) NSDictionary *outputSettings;
-
-/*!
- @property availableImageDataCVPixelFormatTypes
- @abstract
-    Indicates the supported image pixel formats that can be specified in outputSettings.
-
- @discussion
-    The value of this property is an NSArray of NSNumbers that can be used as values for the 
-    kCVPixelBufferPixelFormatTypeKey in the receiver's outputSettings property.  The first
-    format in the returned list is the most efficient output format.
-*/
-@property(nonatomic, readonly) NSArray *availableImageDataCVPixelFormatTypes;
-
-/*!
- @property availableImageDataCodecTypes
- @abstract
-    Indicates the supported image codec formats that can be specified in outputSettings.
-
- @discussion
-    The value of this property is an NSArray of NSStrings that can be used as values for the 
-    AVVideoCodecKey in the receiver's outputSettings property.
-*/
-@property(nonatomic, readonly) NSArray *availableImageDataCodecTypes;
-
-#if TARGET_OS_IPHONE
-
-/*!
- @property stillImageStabilizationSupported
- @abstract
-    Indicates whether the receiver supports still image stabilization.
- 
- @discussion
-    The receiver's automaticallyEnablesStillImageStabilizationWhenAvailable property can only be set 
-    if this property returns YES.  Its value may change as the session's -sessionPreset or input device's
-    -activeFormat changes.
-*/
-@property(nonatomic, readonly, getter=isStillImageStabilizationSupported) BOOL stillImageStabilizationSupported NS_AVAILABLE_IOS(7_0);
-
-/*!
- @property automaticallyEnablesStillImageStabilizationWhenAvailable
- @abstract
-    Indicates whether the receiver should automatically use still image stabilization when necessary.
- 
- @discussion
-    On a receiver where -isStillImageStabilizationSupported returns YES, image stabilization
-    may be applied to reduce blur commonly found in low light photos. When stabilization is enabled, still 
-    image captures incur additional latency. The default value is YES when supported, NO otherwise. Setting 
-    this property throws an NSInvalidArgumentException if -isStillImageStabilizationSupported returns NO.
-*/
-@property(nonatomic) BOOL automaticallyEnablesStillImageStabilizationWhenAvailable NS_AVAILABLE_IOS(7_0);
-
-/*!
- @property stillImageStabilizationActive
- @abstract
-    Indicates whether still image stabilization is in use for the current capture.
- 
- @discussion
-    On a receiver where -isStillImageStabilizationSupported returns YES, and
-    automaticallyEnablesStillImageStabilizationWhenAvailable is set to YES, this property may be key-value
-    observed, or queried from inside your key-value observation callback for the @"capturingStillImage"
-	property, to find out if still image stabilization is being applied to the current capture.
-*/
-@property(nonatomic, readonly, getter=isStillImageStabilizationActive) BOOL stillImageStabilizationActive NS_AVAILABLE_IOS(7_0);
-
-/*!
- @property highResolutionStillImageOutputEnabled
- @abstract
-    Indicates whether the receiver should emit still images at the highest resolution supported
-    by its source AVCaptureDevice's activeFormat.
- 
- @discussion
-    By default, AVCaptureStillImageOutput emits images with the same dimensions as its source AVCaptureDevice's
-    activeFormat.formatDescription.  However, if you set this property to YES, the receiver emits still images at its source
-    AVCaptureDevice's activeFormat.highResolutionStillImageDimensions.  Note that if you enable video stabilization
-    (see AVCaptureConnection's preferredVideoStabilizationMode) for any output, the high resolution still images 
-    emitted by AVCaptureStillImageOutput may be smaller by 10 or more percent.
-*/
-@property(nonatomic, getter=isHighResolutionStillImageOutputEnabled) BOOL highResolutionStillImageOutputEnabled NS_AVAILABLE_IOS(8_0);
-
-#endif // TARGET_OS_IPHONE
-
-/*!
- @property capturingStillImage
- @abstract
-    A boolean value that becomes true when a still image is being captured.
-
- @discussion
-    The value of this property is a BOOL that becomes true when a still image is being
-    captured, and false when no still image capture is underway.  This property is
-    key-value observable.
-*/
-@property(readonly, getter=isCapturingStillImage) BOOL capturingStillImage NS_AVAILABLE(10_8, 5_0);
-
-/*!
- @method captureStillImageAsynchronouslyFromConnection:completionHandler:
- @abstract
-    Initiates an asynchronous still image capture, returning the result to a completion handler.
-
- @param connection
-    The AVCaptureConnection object from which to capture the still image.
- @param handler
-    A block that will be called when the still image capture is complete. The block will be passed a CMSampleBuffer
-    object containing the image data or an NSError object if an image could not be captured.
-
- @discussion
-    This method will return immediately after it is invoked, later calling the provided completion handler block when
-    image data is ready. If the request could not be completed, the error parameter will contain an NSError object
-    describing the failure.
-
-    Attachments to the image data sample buffer may contain metadata appropriate to the image data format. For instance,
-    a sample buffer containing JPEG data may carry a kCGImagePropertyExifDictionary as an attachment. See
-    <ImageIO/CGImageProperties.h> for a list of keys and value types.
-
-    Clients should not assume that the completion handler will be called on a specific thread.
- 
-    Calls to captureStillImageAsynchronouslyFromConnection:completionHandler: are not synchronized with AVCaptureDevice
-    manual control completion handlers. Setting a device manual control, waiting for its completion, then calling
-    captureStillImageAsynchronouslyFromConnection:completionHandler: DOES NOT ensure that the still image returned reflects
-    your manual control change. It may be from an earlier time. You can compare your manual control completion handler sync time
-    to the returned still image's presentation time. You can retrieve the sample buffer's pts using 
-    CMSampleBufferGetPresentationTimestamp(). If the still image has an earlier timestamp, your manual control command 
-    does not apply to it.
-*/
-- (void)captureStillImageAsynchronouslyFromConnection:(AVCaptureConnection *)connection completionHandler:(void (^)(CMSampleBufferRef imageDataSampleBuffer, NSError *error))handler;
-
-/*!
- @method jpegStillImageNSDataRepresentation:
- @abstract
-    Converts the still image data and metadata attachments in a JPEG sample buffer to an NSData representation.
-
- @param jpegSampleBuffer
-    The sample buffer carrying JPEG image data, optionally with Exif metadata sample buffer attachments.
-    This method throws an NSInvalidArgumentException if jpegSampleBuffer is NULL or not in the JPEG format.
-
- @discussion
-    This method returns an NSData representation of a JPEG still image sample buffer, merging the image data and
-    Exif metadata sample buffer attachments without recompressing the image.
-    The returned NSData is suitable for writing to disk.
-*/
-+ (NSData *)jpegStillImageNSDataRepresentation:(CMSampleBufferRef)jpegSampleBuffer;
-
-@end
-
-#if TARGET_OS_IPHONE
-
-/*!
- @class AVCaptureBracketedStillImageSettings
- @abstract
-    AVCaptureBracketedStillImageSettings is an abstract base class that defines an interface for settings
-	pertaining to a bracketed capture.
- 
- @discussion
-    AVCaptureBracketedStillImageSettings may not be instantiated directly.
-*/
-NS_CLASS_AVAILABLE_IOS(8_0)
-@interface AVCaptureBracketedStillImageSettings : NSObject
-@end
-
-/*!
- @class AVCaptureManualExposureBracketedStillImageSettings
- @abstract
-    AVCaptureManualExposureBracketedStillImageSettings is a concrete subclass of AVCaptureBracketedStillImageSettings
-    to be used when bracketing exposure duration and ISO.
- 
- @discussion
-    An AVCaptureManualExposureBracketedStillImageSettings instance defines the exposure duration and ISO
-    settings that should be applied to one image in a bracket. An array of settings objects is passed to
-    -[AVCaptureStillImageOutput captureStillImageBracketAsynchronouslyFromConnection:withSettingsArray:completionHandler:].
-    Min and max duration and ISO values are queryable properties of the AVCaptureDevice supplying data to
-    an AVCaptureStillImageOutput instance. If you wish to leave exposureDuration unchanged for this bracketed
-    still image, you may pass the special value AVCaptureExposureDurationCurrent. To keep ISO unchanged, you may
-    pass AVCaptureISOCurrent (see AVCaptureDevice.h).
-*/
-NS_CLASS_AVAILABLE_IOS(8_0)
-@interface AVCaptureManualExposureBracketedStillImageSettings : AVCaptureBracketedStillImageSettings
-
-+ (instancetype)manualExposureSettingsWithExposureDuration:(CMTime)duration ISO:(float)ISO;
-
-@property(readonly) CMTime exposureDuration;
-@property(readonly) float ISO;
-
-@end
-
-/*!
- @class AVCaptureAutoExposureBracketedStillImageSettings
- @abstract
-    AVCaptureAutoExposureBracketedStillImageSettings is a concrete subclass of AVCaptureBracketedStillImageSettings
-    to be used when bracketing exposure target bias.
- 
- @discussion
-    An AVCaptureAutoExposureBracketedStillImageSettings instance defines the exposure target bias
-    setting that should be applied to one image in a bracket. An array of settings objects is passed to
-    -[AVCaptureStillImageOutput captureStillImageBracketAsynchronouslyFromConnection:withSettingsArray:completionHandler:].
-    Min and max exposure target bias are queryable properties of the AVCaptureDevice supplying data to
-    an AVCaptureStillImageOutput instance. If you wish to leave exposureTargetBias unchanged for this bracketed
-    still image, you may pass the special value AVCaptureExposureTargetBiasCurrent (see AVCaptureDevice.h).
-*/
-NS_CLASS_AVAILABLE_IOS(8_0)
-@interface AVCaptureAutoExposureBracketedStillImageSettings : AVCaptureBracketedStillImageSettings
-
-+ (instancetype)autoExposureSettingsWithExposureTargetBias:(float)exposureTargetBias;
-
-@property(readonly) float exposureTargetBias;
-
-@end
-
-/*!
- @category AVCaptureStillImageOutput (BracketedCaptureMethods)
- @abstract
-    A category of methods for bracketed still image capture.
- 
- @discussion
-    A "still image bracket" is a batch of images taken as quickly as possible in succession,
-    optionally with different settings from picture to picture.
- 
-    In a bracketed capture, AVCaptureDevice flashMode property is ignored (flash is forced off), as is AVCaptureStillImageOutput's
-    automaticallyEnablesStillImageStabilizationWhenAvailable property (stabilization is forced off).
-*/
-@interface AVCaptureStillImageOutput ( BracketedCaptureMethods )
-
-/*!
- @property maxBracketedCaptureStillImageCount
- @abstract
-    Specifies the maximum number of still images that may be taken in a single bracket.
-
- @discussion
-    AVCaptureStillImageOutput can only satisfy a limited number of image requests in a single bracket without exhausting system
-    resources. The maximum number of still images that may be taken in a single bracket depends on the size of the images being captured,
-    and consequently may vary with AVCaptureSession -sessionPreset and AVCaptureDevice -activeFormat.  Some formats do not support
-    bracketed capture and return a maxBracketedCaptureStillImageCount of 0.  This read-only property is key-value observable.
-	If you exceed -maxBracketedCaptureStillImageCount, then -captureStillImageBracketAsynchronouslyFromConnection:withSettingsArray:completionHandler:
-	fails and the completionHandler is called [settings count] times with a NULL sample buffer and AVErrorMaximumStillImageCaptureRequestsExceeded.
-*/
-@property(nonatomic, readonly) NSUInteger maxBracketedCaptureStillImageCount NS_AVAILABLE_IOS(8_0);
-
-/*!
- @method prepareToCaptureStillImageBracketFromConnection:withSettingsArray:completionHandler:
- @abstract
-    Allows the receiver to prepare resources in advance of capturing a still image bracket.
- 
- @param connection
-    The connection through which the still image bracket should be captured.
- 
- @param settings
-    An array of AVCaptureBracketedStillImageSettings objects. All must be of the same kind of AVCaptureBracketedStillImageSettings
-    subclass, or an NSInvalidArgumentException is thrown.
- 
- @param completionHandler
-    A user provided block that will be called asynchronously once resources have successfully been allocated
-    for the specified bracketed capture operation. If sufficient resources could not be allocated, the
-    "prepared" parameter contains NO, and "error" parameter contains a non-nil error value. If [settings count]
-    exceeds -maxBracketedCaptureStillImageCount, then AVErrorMaximumStillImageCaptureRequestsExceeded is returned.
-    You should not assume that the completion handler will be called on a specific thread.
- 
- @discussion
-    -maxBracketedCaptureStillImageCount tells you the maximum number of images that may be taken in a single
-    bracket given the current AVCaptureDevice/AVCaptureSession/AVCaptureStillImageOutput configuration. But before
-    taking a still image bracket, additional resources may need to be allocated. By calling
-    -prepareToCaptureStillImageBracketFromConnection:withSettingsArray:completionHandler: first, you are able to 
-    deterministically know when the receiver is ready to capture the bracket with the specified settings array.
-
-*/
-- (void)prepareToCaptureStillImageBracketFromConnection:(AVCaptureConnection *)connection withSettingsArray:(NSArray *)settings completionHandler:(void (^)(BOOL prepared, NSError *error))handler NS_AVAILABLE_IOS(8_0);
-
-/*!
- @method captureStillImageBracketAsynchronouslyFromConnection:withSettingsArray:completionHandler:
- @abstract
-    Captures a still image bracket.
- 
- @param connection
-    The connection through which the still image bracket should be captured.
- 
- @param settings
-    An array of AVCaptureBracketedStillImageSettings objects. All must be of the same kind of AVCaptureBracketedStillImageSettings
-    subclass, or an NSInvalidArgumentException is thrown.
- 
- @param completionHandler
-    A user provided block that will be called asynchronously as each still image in the bracket is captured.
-    If the capture request is successful, the "sampleBuffer" parameter contains a valid CMSampleBuffer, the
-    "stillImageSettings" parameter contains the settings object corresponding to this still image, and a nil
-    "error" parameter. If the bracketed capture fails, sample buffer is NULL and error is non-nil.
-    If [settings count] exceeds -maxBracketedCaptureStillImageCount, then AVErrorMaximumStillImageCaptureRequestsExceeded 
-    is returned. You should not assume that the completion handler will be called on a specific thread.
- 
- @discussion
-    If you have not called -prepareToCaptureStillImageBracketFromConnection:withSettingsArray:completionHandler: for this 
-    still image bracket request, the bracket may not be taken immediately, as the receiver may internally need to 
-    prepare resources.
-*/
-- (void)captureStillImageBracketAsynchronouslyFromConnection:(AVCaptureConnection *)connection withSettingsArray:(NSArray *)settings completionHandler:(void (^)(CMSampleBufferRef sampleBuffer, AVCaptureBracketedStillImageSettings *stillImageSettings, NSError *error))handler NS_AVAILABLE_IOS(8_0);
-
-@end
-
-#endif // TARGET_OS_IPHONE
-
-
-#if (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-@class AVCaptureAudioPreviewOutputInternal;
-
-/*!
- @class AVCaptureAudioPreviewOutput
- @abstract
-    AVCaptureAudioPreviewOutput is a concrete subclass of AVCaptureOutput that can be used to preview the audio being
-    captured.
- 
- @discussion
-    Instances of AVCaptureAudioPreviewOutput have an associated Core Audio output device that can be used to play audio
-    being captured by the capture session. The unique ID of a Core Audio device can be obtained from its
-    kAudioDevicePropertyDeviceUID property.
-*/
-NS_CLASS_AVAILABLE(10_7, NA)
-@interface AVCaptureAudioPreviewOutput : AVCaptureOutput 
-{
-@private
-	AVCaptureAudioPreviewOutputInternal *_internal;
-}
-
-/*!
- @property outputDeviceUniqueID
- @abstract
-    Specifies the unique ID of the Core Audio output device being used to play preview audio.
-
- @discussion
-    The value of this property is an NSString containing the unique ID of the Core Audio device to be used for output, or
-    nil if the default system output should be used
-*/
-@property(nonatomic, copy) NSString *outputDeviceUniqueID;
-
-/*!
- @property volume
- @abstract
-    Specifies the preview volume of the output.
-
- @discussion
-    The value of this property is the preview volume of the receiver, where 1.0 is the maximum volume and 0.0 is muted. 
-*/
-@property(nonatomic) float volume;
-
-@end
-
-#endif // (TARGET_OS_MAC && !(TARGET_OS_EMBEDDED || TARGET_OS_IPHONE))
-
-
-@class AVCaptureMetadataOutputInternal;
-@protocol AVCaptureMetadataOutputObjectsDelegate;
-
-/*!
- @class AVCaptureMetadataOutput
- @abstract
-    AVCaptureMetadataOutput is a concrete subclass of AVCaptureOutput that can be used to process metadata objects
-    from an attached connection.
-
- @discussion
-    Instances of AVCaptureMetadataOutput emit arrays of AVMetadataObject instances (see AVMetadataObject.h), such 
-    as detected faces. Applications can access the metadata objects with the captureOutput:didOutputMetadataObjects:fromConnection: 
-    delegate method.
-*/
-NS_CLASS_AVAILABLE(NA, 6_0)
-@interface AVCaptureMetadataOutput : AVCaptureOutput 
-{
-@private
-	AVCaptureMetadataOutputInternal *_internal;
-}
-
-/*!
- @method setMetadataObjectsDelegate:queue:
- @abstract
-    Sets the receiver's delegate that will accept metadata objects and dispatch queue on which the delegate will be
-    called.
-
- @param objectsDelegate
-    An object conforming to the AVCaptureMetadataOutputObjectsDelegate protocol that will receive metadata objects
-    after they are captured.
- @param objectsCallbackQueue
-    A dispatch queue on which all delegate methods will be called.
-
- @discussion
-    When new metadata objects are captured in the receiver's connection, they will be vended to the delegate using the
-    captureOutput:didOutputMetadataObjects:fromConnection: delegate method. All delegate methods will be called on the
-    specified dispatch queue.
-
-    Clients that need to minimize the chances of metadata being dropped should specify a queue on which a sufficiently
-    small amount of processing is performed along with receiving metadata objects.
-
-    A serial dispatch queue must be used to guarantee that metadata objects will be delivered in order.
-    The objectsCallbackQueue parameter may not be NULL, except when setting the objectsDelegate
-    to nil.
-*/
-- (void)setMetadataObjectsDelegate:(id<AVCaptureMetadataOutputObjectsDelegate>)objectsDelegate queue:(dispatch_queue_t)objectsCallbackQueue;
-
-/*!
- @property metadataObjectsDelegate
- @abstract
-    The receiver's delegate.
- 
- @discussion
-    The value of this property is an object conforming to the AVCaptureMetadataOutputObjectsDelegate protocol that
-    will receive metadata objects after they are captured. The delegate is set using the setMetadataObjectsDelegate:queue:
-    method.
-*/
-@property(nonatomic, readonly) id<AVCaptureMetadataOutputObjectsDelegate> metadataObjectsDelegate;
-
-/*!
- @property metadataObjectsCallbackQueue
- @abstract
-    The dispatch queue on which all metadata object delegate methods will be called.
-
- @discussion
-    The value of this property is a dispatch_queue_t. The queue is set using the setMetadataObjectsDelegate:queue: method.
-*/
-@property(nonatomic, readonly) dispatch_queue_t metadataObjectsCallbackQueue;
-
-/*!
- @property availableMetadataObjectTypes
- @abstract
-    Indicates the receiver's supported metadata object types.
- 
- @discussion
-    The value of this property is an NSArray of NSStrings corresponding to AVMetadataObjectType strings defined
-    in AVMetadataObject.h -- one for each metadata object type supported by the receiver.  Available 
-    metadata object types are dependent on the capabilities of the AVCaptureInputPort to which this receiver's 
-    AVCaptureConnection is connected.  Clients may specify the types of objects they would like to process
-    by calling setMetadataObjectTypes:.  This property is key-value observable.
-*/
-@property(nonatomic, readonly) NSArray *availableMetadataObjectTypes;
-
-/*!
- @property metadataObjectTypes
- @abstract
-    Specifies the types of metadata objects that the receiver should present to the client.
-
- @discussion
-	AVCaptureMetadataOutput may detect and emit multiple metadata object types.  For apps linked before iOS 7.0, the 
-	receiver defaults to capturing face metadata objects if supported (see -availableMetadataObjectTypes).  For apps 
-	linked on or after iOS 7.0, the receiver captures no metadata objects by default.  -setMetadataObjectTypes: throws 
-	an NSInvalidArgumentException if any elements in the array are not present in the -availableMetadataObjectTypes array.
-*/
-@property(nonatomic, copy) NSArray *metadataObjectTypes;
-
-/*!
- @property rectOfInterest
- @abstract
-	Specifies a rectangle of interest for limiting the search area for visual metadata.
- 
- @discussion
-	The value of this property is a CGRect that determines the receiver's rectangle of interest for each frame of video.  
-	The rectangle's origin is top left and is relative to the coordinate space of the device providing the metadata.  Specifying 
-	a rectOfInterest may improve detection performance for certain types of metadata. The default value of this property is the 
-	value CGRectMake(0, 0, 1, 1).  Metadata objects whose bounds do not intersect with the rectOfInterest will not be returned.
- */
-@property(nonatomic) CGRect rectOfInterest NS_AVAILABLE_IOS(7_0);
-
-@end
-
-/*!
- @protocol AVCaptureMetadataOutputObjectsDelegate
- @abstract
-    Defines an interface for delegates of AVCaptureMetadataOutput to receive emitted objects.
-*/
-@protocol AVCaptureMetadataOutputObjectsDelegate <NSObject>
-
-@optional
-
-/*!
- @method captureOutput:didOutputMetadataObjects:fromConnection:
- @abstract
-    Called whenever an AVCaptureMetadataOutput instance emits new objects through a connection.
-
- @param captureOutput
-    The AVCaptureMetadataOutput instance that emitted the objects.
- @param metadataObjects
-    An array of AVMetadataObject subclasses (see AVMetadataObject.h).
- @param connection
-    The AVCaptureConnection through which the objects were emitted.
-
- @discussion
-    Delegates receive this message whenever the output captures and emits new objects, as specified by
-    its metadataObjectTypes property. Delegates can use the provided objects in conjunction with other APIs
-    for further processing. This method will be called on the dispatch queue specified by the output's
-    metadataObjectsCallbackQueue property. This method may be called frequently, so it must be efficient to 
-    prevent capture performance problems, including dropped metadata objects.
-
-    Clients that need to reference metadata objects outside of the scope of this method must retain them and
-    then release them when they are finished with them.
-*/
-- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection;
+#import <AVFoundation/AVBase.h>
 
-@end
+#import <AVFoundation/AVCaptureAudioDataOutput.h>
+#import <AVFoundation/AVCaptureAudioPreviewOutput.h>
+#import <AVFoundation/AVCaptureDepthDataOutput.h>
+#import <AVFoundation/AVCaptureFileOutput.h>
+#import <AVFoundation/AVCaptureMetadataOutput.h>
+#import <AVFoundation/AVCapturePhotoOutput.h>
+#import <AVFoundation/AVCaptureStillImageOutput.h>
+#import <AVFoundation/AVCaptureVideoDataOutput.h>
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureOutputBase.h	2017-05-24 00:28:27.000000000 -0400
@@ -0,0 +1,140 @@
+/*
+    File:  AVCaptureOutputBase.h
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVBase.h>
+#import <AVFoundation/AVMediaFormat.h>
+#import <Foundation/Foundation.h>
+#import <AVFoundation/AVCaptureSession.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCaptureOutput
+
+@class AVMetadataObject;
+@class AVCaptureOutputInternal;
+
+/*!
+ @class AVCaptureOutput
+ @abstract
+    AVCaptureOutput is an abstract class that defines an interface for an output destination of an AVCaptureSession.
+ 
+ @discussion
+    AVCaptureOutput provides an abstract interface for connecting capture output destinations, such as files and video previews, to an AVCaptureSession.
+
+    An AVCaptureOutput can have multiple connections represented by AVCaptureConnection objects, one for each stream of media that it receives from an AVCaptureInput. An AVCaptureOutput does not have any connections when it is first created. When an output is added to an AVCaptureSession, connections are created that map media data from that session's inputs to its outputs.
+
+    Concrete AVCaptureOutput instances can be added to an AVCaptureSession using the -[AVCaptureSession addOutput:] and -[AVCaptureSession addOutputWithNoConnections:] methods.
+ */
+NS_CLASS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED
+@interface AVCaptureOutput : NSObject
+{
+@private
+    AVCaptureOutputInternal *_outputInternal;
+}
+
+AV_INIT_UNAVAILABLE
+
+/*!
+ @property connections
+ @abstract
+    The connections that describe the flow of media data to the receiver from AVCaptureInputs.
+
+ @discussion
+    The value of this property is an NSArray of AVCaptureConnection objects, each describing the mapping between the receiver and the AVCaptureInputPorts of one or more AVCaptureInputs.
+ */
+@property(nonatomic, readonly) NSArray<AVCaptureConnection *> *connections;
+
+/*!
+ @method connectionWithMediaType:
+ @abstract
+    Returns the first connection in the connections array with an inputPort of the specified mediaType.
+
+ @param mediaType
+    An AVMediaType constant from AVMediaFormat.h, e.g. AVMediaTypeVideo.
+
+ @discussion
+    This convenience method returns the first AVCaptureConnection in the receiver's connections array that has an AVCaptureInputPort of the specified mediaType. If no connection with the specified mediaType is found, nil is returned.
+ */
+- (nullable AVCaptureConnection *)connectionWithMediaType:(AVMediaType)mediaType NS_AVAILABLE(10_7, 5_0);
+
+/*!
+ @method transformedMetadataObjectForMetadataObject:connection:
+ @abstract
+    Converts an AVMetadataObject's visual properties to the receiver's coordinates.
+
+ @param metadataObject
+    An AVMetadataObject originating from the same AVCaptureInput as the receiver.
+ @param connection
+    The receiver's connection whose AVCaptureInput matches that of the metadata object to be converted.
+ @result
+    An AVMetadataObject whose properties are in output coordinates.
+
+ @discussion
+    AVMetadataObject bounds may be expressed as a rect where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. Face metadata objects likewise express yaw and roll angles with respect to an unrotated picture. -transformedMetadataObjectForMetadataObject:connection: converts the visual properties in the coordinate space of the supplied AVMetadataObject to the coordinate space of the receiver. The conversion takes orientation, mirroring, and scaling into consideration. If the provided metadata object originates from an input source other than the preview layer's, nil will be returned.
+ 
+    If an AVCaptureVideoDataOutput instance's connection's videoOrientation or videoMirrored properties are set to non-default values, the output applies the desired mirroring and orientation by physically rotating and or flipping sample buffers as they pass through it. AVCaptureStillImageOutput, on the other hand, does not physically rotate its buffers. It attaches an appropriate kCGImagePropertyOrientation number to captured still image buffers (see ImageIO/CGImageProperties.h) indicating how the image should be displayed on playback. Likewise, AVCaptureMovieFileOutput does not physically apply orientation/mirroring to its sample buffers -- it uses a QuickTime track matrix to indicate how the buffers should be rotated and/or flipped on playback.
+ 
+    transformedMetadataObjectForMetadataObject:connection: alters the visual properties of the provided metadata object to match the physical rotation / mirroring of the sample buffers provided by the receiver through the indicated connection. I.e., for video data output, adjusted metadata object coordinates are rotated/mirrored. For still image and movie file output, they are not.
+ */
+- (nullable AVMetadataObject *)transformedMetadataObjectForMetadataObject:(AVMetadataObject *)metadataObject connection:(AVCaptureConnection *)connection NS_AVAILABLE_IOS(6_0);
+
+/*!
+ @method metadataOutputRectOfInterestForRect:
+ @abstract
+    Converts a rectangle in the receiver's coordinate space to a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput whose capture device is providing input to the receiver.
+ 
+ @param rectInOutputCoordinates
+    A CGRect in the receiver's coordinates.
+ @result
+    A CGRect in the coordinate space of the metadata output whose capture device is providing input to the receiver.
+ 
+ @discussion
+    AVCaptureMetadataOutput rectOfInterest is expressed as a CGRect where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. This convenience method converts a rectangle in the coordinate space of the receiver to a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput whose AVCaptureDevice is providing input to the receiver. The conversion takes orientation, mirroring, and scaling into consideration. See -transformedMetadataObjectForMetadataObject:connection: for a full discussion of how orientation and mirroring are applied to sample buffers passing through the output.
+ */
+- (CGRect)metadataOutputRectOfInterestForRect:(CGRect)rectInOutputCoordinates NS_AVAILABLE_IOS(7_0);
+
+/*!
+ @method rectForMetadataOutputRectOfInterest:
+ @abstract
+    Converts a rectangle of interest in the coordinate space of an AVCaptureMetadataOutput whose capture device is providing input to the receiver to a rectangle in the receiver's coordinates.
+ 
+ @param rectInMetadataOutputCoordinates
+    A CGRect in the coordinate space of the metadata output whose capture device is providing input to the receiver.
+ @result
+    A CGRect in the receiver's coordinates.
+ 
+ @discussion
+    AVCaptureMetadataOutput rectOfInterest is expressed as a CGRect where {0,0} represents the top left of the picture area, and {1,1} represents the bottom right on an unrotated picture. This convenience method converts a rectangle in the coordinate space of an AVCaptureMetadataOutput whose AVCaptureDevice is providing input to the coordinate space of the receiver. The conversion takes orientation, mirroring, and scaling into consideration. See -transformedMetadataObjectForMetadataObject:connection: for a full discussion of how orientation and mirroring are applied to sample buffers passing through the output.
+ */
+- (CGRect)rectForMetadataOutputRectOfInterest:(CGRect)rectInMetadataOutputCoordinates NS_AVAILABLE_IOS(7_0);
+
+@end
+
+
+/*!
+ @enum AVCaptureOutputDataDroppedReason
+ @abstract
+    Constants indicating the reason a capture data output dropped data.
+
+ @constant AVCaptureOutputDataDroppedReasonNone
+    No data was dropped.
+ @constant AVCaptureOutputDataDroppedReasonLateData
+    Data was dropped because alwaysDiscardsLate{VideoFrames | DepthData} is YES and the client was still processing previous data when the current data needed to be delivered.
+ @constant AVCaptureOutputDataDroppedReasonOutOfBuffers
+    Data was dropped because its pool of buffers ran dry. This is usually indicative that the client is holding onto data objects too long.
+ @constant AVCaptureOutputDataDroppedReasonDiscontinuity
+    Data was dropped because the device providing the data experienced a discontinuity, and an unknown number of data objects have been lost. This condition is typically caused by the system being too busy.
+ */
+typedef NS_ENUM(NSInteger, AVCaptureOutputDataDroppedReason) {
+    AVCaptureOutputDataDroppedReasonNone          = 0,
+    AVCaptureOutputDataDroppedReasonLateData      = 1,
+    AVCaptureOutputDataDroppedReasonOutOfBuffers  = 2,
+    AVCaptureOutputDataDroppedReasonDiscontinuity = 3,
+} NS_AVAILABLE_IOS(11_0) __TVOS_PROHIBITED __WATCHOS_PROHIBITED;
+
+NS_ASSUME_NONNULL_END
diff -ruN /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h
--- /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.12.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode9-beta1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h	2017-05-24 00:37:44.000000000 -0400
@@ -0,0 +1,1433 @@
+/*
+    File:  AVCapturePhotoOutput.h
+ 
+    Framework:  AVFoundation
+ 
+    Copyright 2016-2017 Apple Inc. All rights reserved.
+*/
+
+#import <AVFoundation/AVCaptureOutputBase.h>
+#import <AVFoundation/AVVideoSettings.h>
+#import <CoreMedia/CMSampleBuffer.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+#pragma mark AVCapturePhotoOutput
+
+@class AVCapturePhotoSettings;
+@class AVCapturePhotoBracketSettings;
+@class AVCaptureResolvedPhotoSettings;
+@class AVCaptureBracketedStillImageSettings;
+@class AVMetadataItem;
+@class AVCapturePhotoOutputInternal;
+
+@protocol AVCapturePhotoCaptureDelegate;
+
+/*!
+ @class AVCapturePhotoOutput
+ @abstract
+    AVCapturePhotoOutput is a concrete subclass of AVCaptureOutput that supersedes AVCaptureStillImageOutput as the preferred interface for capturing photos. In addition to capturing all flavors of still image supported by AVCaptureStillImageOutput, it supports Live Photo capture, preview-sized image delivery, wide color, RAW, RAW+JPG and RAW+DNG formats.
+ 
+ @discussion
+    Taking a photo is multi-step process. Clients wishing to build a responsive UI need to know about the progress of a photo capture request as it advances from capture to processing to finished delivery. AVCapturePhotoOutput informs clients of photo capture progress through a delegate protocol. To take a picture, a client instantiates and configures an AVCapturePhotoSettings object, then calls AVCapturePhotoOutput's -capturePhotoWithSettings:delegate:, passing a delegate to be informed when events relating to the photo capture occur (e.g., the photo is about to be captured, the photo has been captured but not processed yet, the Live Photo movie is ready, etc.).
+ 
+    Some AVCapturePhotoSettings properties are "Auto", such as autoStillImageStabilizationEnabled. When set to YES, the photo output decides at capture time whether the current scene and lighting conditions require still image stabilization. Thus the client doesn't know with certainty which features are enabled when making the capture request. With the first and each subsequent delegate callback, the client is provided an AVCaptureResolvedPhotoSettings instance that indicates the settings that were applied to the capture. All "Auto" features have now been resolved to on or off. The AVCaptureResolvedPhotoSettings object passed in the client's delegate callbacks has a uniqueID identical to the AVCapturePhotoSettings request. This uniqueID allows clients to pair unresolved and resolved settings objects. See AVCapturePhotoCaptureDelegate below for a detailed discussion of the delegate callbacks.
+ 
+    Enabling certain photo features (Live Photo capture and high resolution capture) requires a reconfiguration of the capture render pipeline. Clients wishing to opt in for these features should call -setLivePhotoCaptureEnabled: and/or -setHighResolutionCaptureEnabled: before calling -startRunning on the AVCaptureSession. Changing any of these properties while the session is running requires a disruptive reconfiguration of the capture render pipeline. Live Photo captures in progress will be ended immediately; unfulfilled photo requests will be aborted; video preview will temporarily freeze. If you wish to capture Live Photos containing sound, you must add an audio AVCaptureDeviceInput to your AVCaptureSession.
+
+    Simultaneous Live Photo capture and MovieFileOutput capture is not supported. If an AVCaptureMovieFileOutput is added to your session, AVCapturePhotoOutput's livePhotoCaptureSupported property returns NO. Note that simultaneous Live Photo capture and AVCaptureVideoDataOutput is supported.
+ 
+    AVCaptureStillImageOutput and AVCapturePhotoOutput may not both be added to a capture session. You must use one or the other. If you add both to a session, a NSInvalidArgumentException is thrown.
+ 
+    AVCapturePhotoOutput implicitly supports wide color photo capture, following the activeColorSpace of the source AVCaptureDevice. If the source device's activeColorSpace is AVCaptureColorSpace_P3_D65, photos are encoded with wide color information, unless you've specified an output format of '420v', which does not support wide color.
+ */
+NS_CLASS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED
+@interface AVCapturePhotoOutput : AVCaptureOutput
+{
+@private
+    AVCapturePhotoOutputInternal *_internal;
+}
+
+- (instancetype)init;
+
++ (instancetype)new;
+
+/*!
+ @method capturePhotoWithSettings:delegate:
+ @abstract
+    Method for initiating a photo capture request with progress monitoring through the supplied delegate.
+ 
+ @param settings
+    An AVCapturePhotoSettings object you have configured. May not be nil.
+ @param delegate
+    An object conforming to the AVCapturePhotoCaptureDelegate protocol. This object's delegate methods are called back as the photo advances from capture to processing to finished delivery. May not be nil.
+ 
+ @discussion
+    This method initiates a photo capture. The receiver copies your provided settings to prevent unintentional mutation. It is illegal to re-use settings. The receiver throws a NSInvalidArgumentException if your settings.uniqueID matches that of any previously used settings. This method is used to initiate all flavors of photo capture: single photo, RAW capture with or without a processed image (such as a JPEG), bracketed capture, and Live Photo.
+ 
+    Clients need not wait for a capture photo request to complete before issuing another request. This is true for single photo captures as well as Live Photos, where movie complements of adjacent photo captures are allowed to overlap.
+ 
+    This method validates your settings and enforces the following rules in order to ensure deterministic behavior. If any of these rules are violated, a NSInvalidArgumentException is thrown.
+    RAW rules:
+        - If rawPhotoPixelFormatType is non-zero, it must be present in the receiver's -availableRawPhotoPixelFormatTypes array.
+        - If rawPhotoPixelFormatType is non-zero, autoStillImageStabilizationEnabled must be set to NO.
+        - If rawPhotoPixelFormatType is non-zero, your delegate must respond to -captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:.
+        - If rawPhotoPixelFormatType is non-zero, highResolutionPhotoEnabled may be YES or NO, but the setting only applies to the processed image, if you've specified one.
+        - If rawPhotoPixelFormatType is non-zero, the videoZoomFactor of the source device and the videoScaleAndCropFactor of the photo output's video connection must both be 1.0. Ensure no zoom is applied before requesting a RAW capture, and don't change the zoom during RAW capture.
+        - If rawFileType is specified, it must be present in -availableRawPhotoFileTypes and must support the rawPhotoPixelFormatType specified using -supportedRawPhotoPixelFormatTypesForFileType:.
+    Processed Format rules:
+        - If format is non-nil, a kCVPixelBufferPixelFormatTypeKey or AVVideoCodecKey must be present, and both may not be present.
+        - If format has a kCVPixelBufferPixelFormatTypeKey, its value must be present in the receiver's -availablePhotoPixelFormatTypes array.
+        - If format has a AVVideoCodecKey, its value must be present in the receiver's -availablePhotoCodecTypes array.
+        - If format is non-nil, your delegate must respond to -captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:.
+        - If processedFileType is specified, it must be present in -availablePhotoFileTypes and must support the format's specified kCVPixelBufferPixelFormatTypeKey (using -supportedPhotoPixelFormatTypesForFileType:) or AVVideoCodecKey (using -supportedPhotoCodecTypesForFileType:).
+    Flash rules:
+        - The specified flashMode must be present in the receiver's -supportedFlashModes array.
+    Live Photo rules:
+        - The receiver's livePhotoCaptureEnabled must be YES if settings.livePhotoMovieURL is non-nil.
+        - If settings.livePhotoMovieURL is non-nil, the receiver's livePhotoCaptureSuspended property must be set to NO.
+        - If settings.livePhotoMovieURL is non-nil, it must be a file URL that's accessible to your app's sandbox.
+        - If settings.livePhotoMovieURL is non-nil, your delegate must respond to -captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error:.
+    Bracketed capture rules:
+        - bracketedSettings.count must be <= the receiver's maxBracketedCapturePhotoCount property.
+        - For manual exposure brackets, ISO value must be within the source device activeFormat's minISO and maxISO values.
+        - For manual exposure brackets, exposureDuration value must be within the source device activeFormat's minExposureDuration and maxExposureDuration values.
+        - For auto exposure brackets, exposureTargetBias value must be within the source device's minExposureTargetBias and maxExposureTargetBias values.
+ */
+- (void)capturePhotoWithSettings:(AVCapturePhotoSettings *)settings delegate:(id<AVCapturePhotoCaptureDelegate>)delegate;
+
+/*!
+ @property preparedPhotoSettingsArray
+ @abstract
+    An array of AVCapturePhotoSettings instances for which the receiver is prepared to capture.
+
+ @discussion
+    @seealso setPreparedPhotoSettingsArray:completionHandler:
+    Some types of photo capture, such as bracketed captures and RAW captures, require the receiver to allocate additional buffers or prepare other resources. To prevent photo capture requests from executing slowly due to lazy resource allocation, you may call -setPreparedPhotoSettingsArray:completionHandler: with an array of settings objects representative of the types of capture you will be performing (e.g., settings for a bracketed capture, RAW capture, and/or still image stabilization capture). By default, the receiver prepares sufficient resources to capture photos with default settings, +[AVCapturePhotoSettings photoSettings].
+ */
+@property(nonatomic, readonly) NSArray<AVCapturePhotoSettings *> *preparedPhotoSettingsArray;
+
+/*!
+ @method setPreparedPhotoSettingsArray:completionHandler:
+ @abstract
+    Method allowing the receiver to prepare resources in advance for future -capturePhotoWithSettings:delegate: requests.
+ 
+ @param preparedPhotoSettingsArray
+    An array of AVCapturePhotoSettings instances indicating the types of capture for which the receiver should prepare resources.
+ @param completionHandler
+    A completion block to be fired on a serial dispatch queue once the receiver has finished preparing. You may pass nil to indicate you do not wish to be called back when preparation is complete.
+ 
+ @discussion
+    Some types of photo capture, such as bracketed captures and RAW captures, require the receiver to allocate additional buffers or prepare other resources. To prevent photo capture requests from executing slowly due to lazy resource allocation, you may call this method with an array of settings objects representative of the types of capture you will be performing (e.g., settings for a bracketed capture, RAW capture, and/or still image stabilization capture). You may call this method even before calling -[AVCaptureSession startRunning] in order to hint the receiver up front which features you'll be utilizing. Each time you call this method with an array of settings, the receiver evaluates what additional resources it needs to allocate, as well as existing resources that can be reclaimed, and calls back your completionHandler when it has finished preparing (and possibly reclaiming) needed resources. By default, the receiver prepares sufficient resources to capture photos with default settings, +[AVCapturePhotoSettings photoSettings]. If you wish to reclaim all possible resources, you may call this method with an empty array.
+ 
+    Preparation for photo capture is always optional. You may call -capturePhotoWithSettings:delegate: without first calling -setPreparedPhotoSettingsArray:completionHandler:, but be advised that some of your photo captures may execute slowly as additional resources are allocated just-in-time.
+ 
+    If you call this method while your AVCaptureSession is not running, your completionHandler does not fire immediately. It only fires once you've called -[AVCaptureSession startRunning], and the needed resources have actually been prepared. If you call -setPreparedPhotoSettingsArray:completionHandler: with an array of settings, and then call it a second time, your first prepare call's completionHandler fires immediately with prepared == NO.
+ 
+    Prepared settings persist across session starts/stops and committed configuration changes. This property participates in -[AVCaptureSession beginConfiguration] / -[AVCaptureSession commitConfiguration] deferred work behavior. That is, if you call -[AVCaptureSession beginConfiguration], change your session's input/output topology, and call this method, preparation is deferred until you call -[AVCaptureSession commitConfiguration], enabling you to atomically commit a new configuration as well as prepare to take photos in that new configuration.
+ */
+- (void)setPreparedPhotoSettingsArray:(NSArray<AVCapturePhotoSettings *> *)preparedPhotoSettingsArray completionHandler:(nullable void (^)(BOOL prepared, NSError * _Nullable error))completionHandler;
+
+/*!
+ @property availablePhotoPixelFormatTypes
+ @abstract
+    An array of kCVPixelBufferPixelFormatTypeKey values that are currently supported by the receiver.
+
+ @discussion
+    If you wish to capture a photo in an uncompressed format, such as 420f, 420v, or BGRA, you must ensure that the format you want is present in the receiver's availablePhotoPixelFormatTypes array. If you've not yet added your receiver to an AVCaptureSession with a video source, no pixel format types are available. This property is key-value observable.
+ */
+@property(nonatomic, readonly) NSArray<NSNumber *> *availablePhotoPixelFormatTypes;
+
+/*!
+ @property availablePhotoCodecTypes
+ @abstract
+    An array of AVVideoCodecKey values that are currently supported by the receiver.
+
+ @discussion
+    If you wish to capture a photo in a compressed format, such as JPEG, you must ensure that the format you want is present in the receiver's availablePhotoCodecTypes array. If you've not yet added your receiver to an AVCaptureSession with a video source, no codec types are available. This property is key-value observable.
+ */
+@property(nonatomic, readonly) NSArray<AVVideoCodecType> *availablePhotoCodecTypes;
+
+/*!
+ @property availableRawPhotoPixelFormatTypes
+ @abstract
+    An array of Bayer RAW CVPixelBufferPixelFormatTypeKey values that are currently supported by the receiver.
+
+ @discussion
+    If you wish to capture a RAW photo, you must ensure that the Bayer RAW format you want is present in the receiver's availableRawPhotoPixelFormatTypes array. If you've not yet added your receiver to an AVCaptureSession with a video source, no RAW formats are available. This property is key-value observable. RAW capture is not supported on all platforms.
+ */
+@property(nonatomic, readonly) NSArray<NSNumber *> *availableRawPhotoPixelFormatTypes;
+
+/*!
+ @property availablePhotoFileTypes
+ @abstract
+    An array of AVFileType values that are currently supported by the receiver.
+
+ @discussion
+    If you wish to capture a photo that is formatted for a particular file container, such as HEIF, you must ensure that the fileType you desire is present in the receiver's availablePhotoFileTypes array. If you've not yet added your receiver to an AVCaptureSession with a video source, no file types are available. This property is key-value observable.
+ */
+@property(nonatomic, readonly) NSArray<AVFileType> *availablePhotoFileTypes NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property availableRawPhotoFileTypes
+ @abstract
+    An array of AVFileType values that are currently supported by the receiver for RAW capture.
+
+ @discussion
+    If you wish to capture a RAW photo that is formatted for a particular file container, such as DNG, you must ensure that the fileType you desire is present in the receiver's availableRawPhotoFileTypes array. If you've not yet added your receiver to an AVCaptureSession with a video source, no file types are available. This property is key-value observable.
+ */
+@property(nonatomic, readonly) NSArray<AVFileType> *availableRawPhotoFileTypes NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @method supportedPhotoPixelFormatTypesForFileType:
+ @abstract
+    An array of pixel format type values that are currently supported by the receiver for a particular file container.
+ 
+ @param fileType
+    The AVFileType container type intended for storage of a photo.
+ @result
+    An array of CVPixelBufferPixelFormatTypeKey values supported by the receiver for the file type in question.
+ 
+ @discussion
+    If you wish to capture a photo for storage in a particular file container, such as TIFF, you must ensure that the photo pixel format type you request is valid for that file type. If no pixel format types are supported for a given fileType, an empty array is returned. If you've not yet added your receiver to an AVCaptureSession with a video source, no pixel format types are supported.
+ */
+- (NSArray<NSNumber *> *)supportedPhotoPixelFormatTypesForFileType:(AVFileType)fileType NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @method supportedPhotoCodecTypesForFileType:
+ @abstract
+    An array of AVVideoCodecKey values that are currently supported by the receiver for a particular file container.
+ 
+ @param fileType
+    The AVFileType container type intended for storage of a photo.
+ @result
+    An array of AVVideoCodecKey values supported by the receiver for the file type in question.
+ 
+ @discussion
+    If you wish to capture a photo for storage in a particular file container, such as HEIF, you must ensure that the photo codec type you request is valid for that file type. If no codec types are supported for a given fileType, an empty array is returned. If you've not yet added your receiver to an AVCaptureSession with a video source, no codec types are supported.
+ */
+- (NSArray<AVVideoCodecType> *)supportedPhotoCodecTypesForFileType:(AVFileType)fileType NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @method supportedRawPhotoPixelFormatTypesForFileType:
+ @abstract
+    An array of CVPixelBufferPixelFormatType values that are currently supported by the receiver for a particular file container.
+ 
+ @param fileType
+    The AVFileType container type intended for storage of a photo.
+ @result
+    An array of CVPixelBufferPixelFormatType values supported by the receiver for the file type in question.
+ 
+ @discussion
+    If you wish to capture a photo for storage in a particular file container, such as DNG, you must ensure that the RAW pixel format type you request is valid for that file type. If no RAW pixel format types are supported for a given fileType, an empty array is returned. If you've not yet added your receiver to an AVCaptureSession with a video source, no pixel format types are supported.
+ */
+- (NSArray<NSNumber *> *)supportedRawPhotoPixelFormatTypesForFileType:(AVFileType)fileType NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property stillImageStabilizationSupported
+ @abstract
+    Indicates whether the still image stabilization feature is supported by the receiver.
+
+ @discussion
+    This property may change as the session's -sessionPreset or source device's -activeFormat change. When still image stabilization is not supported, your capture requests always resolve stillImageStabilizationEnabled to NO. This property is key-value observable.
+ */
+@property(nonatomic, readonly, getter=isStillImageStabilizationSupported) BOOL stillImageStabilizationSupported;
+
+/*!
+ @property isStillImageStabilizationScene
+ @abstract
+    Indicates whether the current scene is dark enough to warrant use of still image stabilization.
+
+ @discussion
+    This property reports whether the current scene being previewed by the camera is dark enough to benefit from still image stabilization. You can influence this property's answers by setting the photoSettingsForSceneMonitoring property, indicating whether autoStillImageStabilization monitoring should be on or off. If you set autoStillImageStabilization to NO, isStillImageStabilizationScene always reports NO. If you set it to YES, this property returns YES or NO depending on the current scene's lighting conditions. Note that some very dark scenes do not benefit from still image stabilization, but do benefit from flash. By default, this property always returns NO unless you set photoSettingsForSceneMonitoring to a non-nil value. This property may be key-value observed.
+ */
+@property(nonatomic, readonly) BOOL isStillImageStabilizationScene;
+
+/*!
+ @property dualCameraFusionSupported
+ @abstract
+    Indicates whether the DualCamera image fusion feature is supported by the receiver.
+
+ @discussion
+    This property may change as the session's -sessionPreset or source device's -activeFormat change. When using the AVCaptureDevice with deviceType AVCaptureDeviceTypeBuiltInDualCamera, the wide-angle and telephoto camera images can be fused together to improve image quality in some configurations. When DualCamera image fusion is not supported by the current configuration, your capture requests always resolve dualCameraFusionEnabled to NO. This property is key-value observable.
+ */
+@property(nonatomic, readonly, getter=isDualCameraFusionSupported) BOOL dualCameraFusionSupported NS_AVAILABLE_IOS(10_2);
+
+/*!
+ @property dualCameraDualPhotoDeliverySupported
+ @abstract
+    Specifies whether the photo output's current configuration supports delivery of both telephoto and wide images from the DualCamera.
+
+ @discussion
+    DualCamera dual photo delivery is only supported for certain AVCaptureSession sessionPresets and AVCaptureDevice activeFormats. When switching cameras or formats this property may change. When this property changes from YES to NO, dualCameraDualPhotoDeliveryEnabled also reverts to NO. If you've previously opted in for DualCamera dual photo delivery and then change configurations, you may need to set dualCameraDualPhotoDeliveryEnabled = YES again. This property is key-value observable.
+ */
+@property(nonatomic, readonly, getter=isDualCameraDualPhotoDeliverySupported) BOOL dualCameraDualPhotoDeliverySupported NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property dualCameraDualPhotoDeliveryEnabled
+ @abstract
+    Indicates whether the photo output is configured for delivery of both the telephoto and wide images from the DualCamera.
+
+ @discussion
+    Default value is NO. This property may only be set to YES if dualCameraDualPhotoDeliverySupported is YES. DualCamera dual photo delivery requires a lengthy reconfiguration of the capture render pipeline, so if you intend to do any dual photo delivery captures, you should set this property to YES before calling -[AVCaptureSession startRunning]. See also -[AVCapturePhotoSettings dualCameraDualPhotoDeliveryEnabled].
+ */
+@property(nonatomic, getter=isDualCameraDualPhotoDeliveryEnabled) BOOL dualCameraDualPhotoDeliveryEnabled NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property cameraCalibrationDataDeliverySupported
+ @abstract
+    Specifies whether the photo output's current configuration supports delivery of AVCameraCalibrationData in the resultant AVCapturePhoto.
+
+ @discussion
+    Camera calibration data delivery (intrinsics, extrinsics, lens distortion characteristics, etc.) is only supported in certain configurations. In iOS 11, its value is only YES if dualCameraDualPhotoDeliveryEnabled is YES. This property is key-value observable.
+ */
+@property(nonatomic, readonly, getter=isCameraCalibrationDataDeliverySupported) BOOL cameraCalibrationDataDeliverySupported NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property supportedFlashModes
+ @abstract
+    An array of AVCaptureFlashMode constants for the current capture session configuration.
+
+ @discussion
+    This property supersedes AVCaptureDevice's isFlashModeSupported: It returns an array of AVCaptureFlashMode constants. To test whether a particular flash mode is supported, use NSArray's containsObject API: [photoOutput.supportedFlashModes containsObject:@(AVCaptureFlashModeAuto)]. This property is key-value observable.
+ */
+@property(nonatomic, readonly) NSArray<NSNumber *> *supportedFlashModes;
+
+/*!
+ @property isFlashScene
+ @abstract
+    Indicates whether the current scene is dark enough to warrant use of the flash.
+
+ @discussion
+    This property reports whether the current scene being previewed by the camera is dark enough to need the flash. If -supportedFlashModes only contains AVCaptureFlashModeOff, isFlashScene always reports NO. You can influence this property's answers by setting the photoSettingsForSceneMonitoring property, indicating the flashMode you wish to monitor. If you set flashMode to AVCaptureFlashModeOff, isFlashScene always reports NO. If you set it to AVCaptureFlashModeAuto or AVCaptureFlashModeOn, isFlashScene answers YES or NO based on the current scene's lighting conditions. By default, this property always returns NO unless you set photoSettingsForSceneMonitoring to a non-nil value. Note that there is some overlap in the light level ranges that benefit from still image stabilization and flash. If your photoSettingsForSceneMonitoring indicate that both still image stabilization and flash scenes should be monitored, still image stabilization takes precedence, and isFlashScene becomes YES at lower overall light levels. This property may be key-value observed.
+ */
+@property(nonatomic, readonly) BOOL isFlashScene;
+
+/*!
+ @property photoSettingsForSceneMonitoring
+ @abstract
+    Settings that govern the behavior of isFlashScene and isStillImageStabilizationScene.
+
+ @discussion
+    You can influence the return values of isFlashScene and isStillImageStabilizationScene by setting this property, indicating the flashMode and autoStillImageStabilizationEnabled values that should be considered for scene monitoring. For instance, if you set flashMode to AVCaptureFlashModeOff, isFlashScene always reports NO. If you set it to AVCaptureFlashModeAuto or AVCaptureFlashModeOn, isFlashScene answers YES or NO based on the current scene's lighting conditions. Note that there is some overlap in the light level ranges that benefit from still image stabilization and flash. If your photoSettingsForSceneMonitoring indicate that both still image stabilization and flash scenes should be monitored, still image stabilization takes precedence, and isFlashScene becomes YES at lower overall light levels. The default value for this property is nil. See isStillImageStabilizationScene and isFlashScene for further discussion.
+ */
+@property(nonatomic, copy, nullable) AVCapturePhotoSettings *photoSettingsForSceneMonitoring;
+
+/*!
+ @property highResolutionCaptureEnabled
+ @abstract
+    Indicates whether the photo render pipeline should be configured to deliver high resolution still images.
+
+ @discussion
+    Some AVCaptureDeviceFormats support outputting higher resolution stills than their streaming resolution (See AVCaptureDeviceFormat.highResolutionStillImageDimensions). Under some conditions, AVCaptureSession needs to set up the photo render pipeline differently to support high resolution still image capture. If you intend to take high resolution still images at all, you should set this property to YES before calling -[AVCaptureSession startRunning]. Once you've opted in for high resolution capture, you are free to issue photo capture requests with or without highResolutionCaptureEnabled in the AVCapturePhotoSettings. If you have not set this property to YES and call capturePhotoWithSettings:delegate: with settings.highResolutionCaptureEnabled set to YES, an NSInvalidArgumentException will be thrown.
+ */
+@property(nonatomic, getter=isHighResolutionCaptureEnabled) BOOL highResolutionCaptureEnabled;
+
+/*!
+ @property maxBracketedCapturePhotoCount
+ @abstract
+    Specifies the maximum number of photos that may be taken in a single bracket.
+
+ @discussion
+     AVCapturePhotoOutput can only satisfy a limited number of image requests in a single bracket without exhausting system resources. The maximum number of photos that may be taken in a single bracket depends on the size and format of the images being captured, and consequently may vary with AVCaptureSession -sessionPreset and AVCaptureDevice -activeFormat. Some formats do not support bracketed capture at all, and thus this property may return a value of 0. This read-only property is key-value observable. If you call -capturePhotoWithSettings:delegate: with a bracketedSettings whose count exceeds -maxBracketedCapturePhotoCount, an NSInvalidArgumentException is thrown.
+ */
+@property(nonatomic, readonly) NSUInteger maxBracketedCapturePhotoCount;
+
+/*!
+ @property lensStabilizationDuringBracketedCaptureSupported
+ @abstract
+    Indicates whether the receiver supports lens stabilization during bracketed captures.
+
+ @discussion
+    The AVCapturePhotoBracketSettings lensStabilizationEnabled property may only be set if this property returns YES. Its value may change as the session's -sessionPreset or input device's -activeFormat changes. This read-only property is key-value observable.
+ */
+@property(nonatomic, readonly, getter=isLensStabilizationDuringBracketedCaptureSupported) BOOL lensStabilizationDuringBracketedCaptureSupported;
+
+/*!
+ @property livePhotoCaptureSupported
+ @abstract
+    Indicates whether the receiver supports Live Photo capture.
+
+ @discussion
+    Live Photo capture is only supported for certain AVCaptureSession sessionPresets and AVCaptureDevice activeFormats. When switching cameras or formats this property may change. When this property changes from YES to NO, livePhotoCaptureEnabled also reverts to NO. If you've previously opted in for Live Photo capture and then change configurations, you may need to set livePhotoCaptureEnabled = YES again. 
+ */
+@property(nonatomic, readonly, getter=isLivePhotoCaptureSupported) BOOL livePhotoCaptureSupported;
+
+/*!
+ @property livePhotoCaptureEnabled
+ @abstract
+    Indicates whether the receiver is configured for Live Photo capture
+
+ @discussion
+    Default value is NO. This property may only be set to YES if livePhotoCaptureSupported is YES. Live Photo capture requires a lengthy reconfiguration of the capture render pipeline, so if you intend to do any Live Photo captures at all, you should set livePhotoCaptureEnabled to YES before calling -[AVCaptureSession startRunning].
+ */
+@property(nonatomic, getter=isLivePhotoCaptureEnabled) BOOL livePhotoCaptureEnabled;
+
+/*!
+ @property livePhotoCaptureSuspended
+ @abstract
+    Indicates whether Live Photo capture is enabled, but currently suspended.
+
+ @discussion
+    This property allows you to cut current Live Photo movie captures short (for instance, if you suddenly need to do something that you don't want to show up in the Live Photo movie, such as take a non Live Photo capture that makes a shutter sound). By default, livePhotoCaptureSuspended is NO. When you set livePhotoCaptureSuspended = YES, any Live Photo movie captures in progress are trimmed to the current time. Likewise, when you toggle livePhotoCaptureSuspended from YES to NO, subsequent Live Photo movie captures will not contain any samples earlier than the time you un-suspended Live Photo capture. Setting this property to YES throws an NSInvalidArgumentException if livePhotoCaptureEnabled is NO. This property may only be set while the session is running. Setting this property to YES when the session is not running will fail resulting in livePhotoCaptureSuspended being reverted to NO.
+ */
+@property(nonatomic, getter=isLivePhotoCaptureSuspended) BOOL livePhotoCaptureSuspended;
+
+/*!
+ @property livePhotoAutoTrimmingEnabled
+ @abstract
+    Indicates whether Live Photo movies are trimmed in real time to avoid excessive movement.
+
+ @discussion
+    This property defaults to YES when livePhotoCaptureSupported is YES. Changing this property's value while your session is running will cause a lengthy reconfiguration of the session. You should set livePhotoAutoTrimmingEnabled to YES or NO before calling -[AVCaptureSession startRunning]. When set to YES, Live Photo movies are analyzed in real time and trimmed if there's excessive movement before or after the photo is taken. Nominally, Live Photos are approximately 3 seconds long. With trimming enabled, they may be shorter, depending on movement. This feature prevents common problems such as Live Photo movies containing shoe or pocket shots.
+ */
+@property(nonatomic, getter=isLivePhotoAutoTrimmingEnabled) BOOL livePhotoAutoTrimmingEnabled;
+
+/*!
+ @property availableLivePhotoVideoCodecTypes
+ @abstract
+    An array of AVVideoCodecKey values that are currently supported by the receiver for use in the movie complement of a Live Photo.
+
+ @discussion
+    Prior to iOS 11, all Live Photo movie video tracks are compressed using H.264. Beginning in iOS 11, you can select the Live Photo movie video compression format using one of the AVVideoCodecKey strings presented in this property. The system's default (preferred) video codec is always presented first in the list. If you've not yet added your receiver to an AVCaptureSession with a video source, no codecs are available. This property is key-value observable.
+ */
+@property(nonatomic, readonly) NSArray<AVVideoCodecType> *availableLivePhotoVideoCodecTypes NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @method JPEGPhotoDataRepresentationForJPEGSampleBuffer:previewPhotoSampleBuffer:
+ @abstract
+    A class method that writes a JPEG sample buffer to an NSData in the JPEG file format.
+ 
+ @param JPEGSampleBuffer
+    A CMSampleBuffer containing JPEG compressed data.
+ @param previewPhotoSampleBuffer
+    An optional CMSampleBuffer containing pixel buffer image data to be written as a thumbnail image.
+ @result
+    An NSData containing bits in the JPEG file format. May return nil if the re-packaging process fails.
+
+ @discussion
+    AVCapturePhotoOutput's depecrated -captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error: callback delivers JPEG photos to clients as CMSampleBuffers. To re-package these buffers in a data format suitable for writing to a JPEG file, you may call this class method, optionally inserting your own metadata into the JPEG CMSampleBuffer first, and optionally passing a preview image to be written to the JPEG file format as a thumbnail image.
+ */
++ (nullable NSData *)JPEGPhotoDataRepresentationForJPEGSampleBuffer:(CMSampleBufferRef)JPEGSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer NS_DEPRECATED_IOS(10_0, 11_0, "Use -[AVCapturePhoto fileDataRepresentation] instead.");
+
+/*!
+ @method DNGPhotoDataRepresentationForRawSampleBuffer:previewPhotoSampleBuffer:
+ @abstract
+    A class method that writes a RAW sample buffer to an NSData containing bits in the DNG file format.
+ 
+ @param rawSampleBuffer
+    A CMSampleBuffer containing Bayer RAW data.
+ @param previewPhotoSampleBuffer
+    An optional CMSampleBuffer containing pixel buffer image data to be written as a thumbnail image.
+ @result
+    An NSData containing bits in the DNG file format. May return nil if the re-packaging process fails.
+
+ @discussion
+    AVCapturePhotoOutput's deprecated -captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error: callback delivers RAW photos to clients as CMSampleBuffers. To re-package these buffers in a data format suitable for writing to a DNG file, you may call this class method, optionally inserting your own metadata into the RAW CMSampleBuffer first, and optionally passing a preview image to be written to the DNG file format as a thumbnail image. Only RAW images from Apple built-in cameras are supported.
+ */
++ (nullable NSData *)DNGPhotoDataRepresentationForRawSampleBuffer:(CMSampleBufferRef)rawSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer NS_DEPRECATED_IOS(10_0, 11_0, "Use -[AVCapturePhoto fileDataRepresentation] instead.");
+
+@end
+
+
+@class AVCapturePhoto;
+
+@interface AVCapturePhotoOutput (AVCapturePhotoOutputDepthDataDeliverySupport)
+
+/*!
+ @property depthDataDeliverySupported
+ @abstract
+    A BOOL value specifying whether depth data delivery is supported.
+
+ @discussion
+    Some cameras and configurations support the delivery of depth data (e.g. disparity maps) along with the photo. This property returns YES if the session's current configuration allows photos to be captured with depth data, from which depth-related filters may be applied. When switching cameras or formats this property may change. When this property changes from YES to NO, depthDataDeliveryEnabled also reverts to NO. If you've previously opted in for depth data delivery and then change configurations, you may need to set depthDataDeliveryEnabled = YES again.This property is key-value observable.
+ */
+@property(nonatomic, readonly, getter=isDepthDataDeliverySupported) BOOL depthDataDeliverySupported NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @property depthDataDeliveryEnabled
+ @abstract
+    A BOOL specifying whether the photo render pipeline is prepared for depth data delivery.
+
+ @discussion
+    Default is NO. Set to YES if you wish depth data to be delivered with your AVCapturePhotos. This property may only be set to YES if depthDataDeliverySupported is YES. Enabling depth data delivery requires a lengthy reconfiguration of the capture render pipeline, so if you intend to capture depth data, you should set this property to YES before calling -[AVCaptureSession startRunning].
+ */
+@property(nonatomic, getter=isDepthDataDeliveryEnabled) BOOL depthDataDeliveryEnabled NS_AVAILABLE_IOS(11_0);
+
+@end
+
+
+/*!
+ @protocol AVCapturePhotoCaptureDelegate
+ @abstract
+    A set of delegate callbacks to be implemented by a client who calls AVCapturePhotoOutput's -capturePhotoWithSettings:delegate.
+ 
+ @discussion
+    AVCapturePhotoOutput invokes the AVCapturePhotoCaptureDelegate callbacks on a common dispatch queue — not necessarily the main queue. While the -captureOutput:willBeginCaptureForResolvedSettings: callback always comes first and the -captureOutput:didFinishCaptureForResolvedSettings: callback always comes last, none of the other callbacks can be assumed to come in any particular order. The AVCaptureResolvedPhotoSettings instance passed to the client with each callback has the same uniqueID as the AVCapturePhotoSettings instance passed in -capturePhotoWithSettings:delegate:. All callbacks are marked optional, but depending on the features you've specified in your AVCapturePhotoSettings, some callbacks become mandatory and are validated in -capturePhotoWithSettings:delegate:. If your delegate does not implement the mandatory callbacks, an NSInvalidArgumentException is thrown.
+
+    - If you initialize your photo settings with a format dictionary, or use one of the default constructors (that is, if you're not requesting a RAW-only capture), your delegate must respond to either - captureOutput:didFinishProcessingPhoto:error: or the deprecated -captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:. If your delegate responds to both of these callbacks, only the undeprecated variant will be called.
+    - If you initialize your photo settings with a rawPhotoPixelFormatType, your delegate must respond to either -captureOutput:didFinishProcessingPhoto:error: or the deprecated -captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:. If your delegate responds to both of these callbacks, only the undeprecated variant will be called.
+    - If you set livePhotoMovieFileURL to non-nil, your delegate must respond to -captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error:.
+ 
+    In the event of an error, all expected callbacks are fired with an appropriate error.
+ */
+NS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED
+@protocol AVCapturePhotoCaptureDelegate <NSObject>
+
+@optional
+/*!
+ @method captureOutput:willBeginCaptureForResolvedSettings:
+ @abstract
+    A callback fired as soon as the capture settings have been resolved.
+ 
+ @param output
+    The calling instance of AVCapturePhotoOutput.
+ @param resolvedSettings
+    An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
+
+ @discussion
+    This callback is always delivered first for a particular capture request. It is delivered as soon as possible after you call -capturePhotoWithSettings:delegate:, so you can know what to expect in the remainder of your callbacks.
+ */
+- (void)captureOutput:(AVCapturePhotoOutput *)output willBeginCaptureForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings;
+
+/*!
+ @method captureOutput:willCapturePhotoForResolvedSettings:
+ @abstract
+    A callback fired just as the photo is being taken.
+ 
+ @param output
+    The calling instance of AVCapturePhotoOutput.
+ @param resolvedSettings
+    An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
+ 
+ @discussion
+    The timing of this callback is analogous to AVCaptureStillImageOutput's capturingStillImage property changing from NO to YES. The callback is delivered right after the shutter sound is heard (note that shutter sounds are suppressed when Live Photos are being captured).
+ */
+- (void)captureOutput:(AVCapturePhotoOutput *)output willCapturePhotoForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings;
+
+/*!
+ @method captureOutput:didCapturePhotoForResolvedSettings:
+ @abstract
+    A callback fired just after the photo is taken.
+ 
+ @param output
+    The calling instance of AVCapturePhotoOutput.
+ @param resolvedSettings
+    An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
+ 
+ @discussion
+    The timing of this callback is analogous to AVCaptureStillImageOutput's capturingStillImage property changing from YES to NO.
+ */
+- (void)captureOutput:(AVCapturePhotoOutput *)output didCapturePhotoForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings;
+
+/*!
+ @method captureOutput:didFinishProcessingPhoto:error:
+ @abstract
+    A callback fired when photos are ready to be delivered to you (RAW or processed).
+ 
+ @param output
+    The calling instance of AVCapturePhotoOutput.
+ @param photo
+    An instance of AVCapturePhoto.
+ @param error
+    An error indicating what went wrong. If the photo was processed successfully, nil is returned.
+ 
+ @discussion
+    This callback fires resolvedSettings.expectedPhotoCount number of times for a given capture request. Note that the photo parameter is always non nil, even if an error is returned. The delivered AVCapturePhoto's rawPhoto property can be queried to know if it's a RAW image or processed image.
+ */
+- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(nullable NSError *)error NS_AVAILABLE_IOS(11_0);
+
+/*!
+ @method captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:
+ @abstract
+    A callback fired when the primary processed photo or photos are done.
+ 
+ @param output
+    The calling instance of AVCapturePhotoOutput.
+ @param photoSampleBuffer
+    A CMSampleBuffer containing an uncompressed pixel buffer or compressed data, along with timing information and metadata. May be nil if there was an error.
+ @param previewPhotoSampleBuffer
+    An optional CMSampleBuffer containing an uncompressed, down-scaled preview pixel buffer. Note that the preview sample buffer contains no metadata. Refer to the photoSampleBuffer for metadata (e.g., the orientation). May be nil.
+ @param resolvedSettings
+    An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
+ @param bracketSettings
+    If this image is being delivered as part of a bracketed capture, the bracketSettings corresponding to this image. Otherwise nil.
+ @param error
+    An error indicating what went wrong if photoSampleBuffer is nil.
+ 
+ @discussion
+    If you've requested a single processed image (uncompressed or compressed) capture, the photo is delivered here. If you've requested a bracketed capture, this callback is fired bracketedSettings.count times (once for each photo in the bracket).
+ */
+- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhotoSampleBuffer:(nullable CMSampleBufferRef)photoSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(nullable AVCaptureBracketedStillImageSettings *)bracketSettings error:(nullable NSError *)error NS_DEPRECATED_IOS(10_0, 11_0, "Use -captureOutput:didFinishProcessingPhoto:error: instead.");
+
+/*!
+ @method captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:
+ @abstract
+    A callback fired when the RAW photo or photos are done.
+ 
+ @param output
+    The calling instance of AVCapturePhotoOutput.
+ @param rawSampleBuffer
+    A CMSampleBuffer containing Bayer RAW pixel data, along with timing information and metadata. May be nil if there was an error.
+ @param previewPhotoSampleBuffer
+    An optional CMSampleBuffer containing an uncompressed, down-scaled preview pixel buffer. Note that the preview sample buffer contains no metadata. Refer to the rawSampleBuffer for metadata (e.g., the orientation). May be nil.
+ @param resolvedSettings
+    An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
+ @param bracketSettings
+    If this image is being delivered as part of a bracketed capture, the bracketSettings corresponding to this image. Otherwise nil.
+ @param error
+    An error indicating what went wrong if rawSampleBuffer is nil.
+ 
+ @discussion
+    Single RAW image and bracketed RAW photos are delivered here. If you've requested a RAW bracketed capture, this callback is fired bracketedSettings.count times (once for each photo in the bracket).
+ */
+- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingRawPhotoSampleBuffer:(nullable CMSampleBufferRef)rawSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(nullable AVCaptureBracketedStillImageSettings *)bracketSettings error:(nullable NSError *)error NS_DEPRECATED_IOS(10_0, 11_0, "Use -captureOutput:didFinishProcessingPhoto:error: instead");
+
+/*!
+ @method captureOutput:didFinishRecordingLivePhotoMovieForEventualFileAtURL:resolvedSettings:
+ @abstract
+    A callback fired when the Live Photo movie has captured all its media data, though all media has not yet been written to file.
+ 
+ @param output
+    The calling instance of AVCapturePhotoOutput.
+ @param outputFileURL
+    The URL to which the movie file will be written. This URL is equal to your AVCapturePhotoSettings.livePhotoMovieURL.
+ @param resolvedSettings
+    An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
+ 
+ @discussion
+    When this callback fires, no new media is being written to the file. If you are displaying a "Live" badge, this is an appropriate time to dismiss it. The movie file itself is not done being written until the -captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error: callback fires.
+ */
+- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishRecordingLivePhotoMovieForEventualFileAtURL:(NSURL *)outputFileURL resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings;
+
+/*!
+ @method captureOutput:didFinishProcessingLivePhotoToMovieFileAtURL:duration:photoDisplayTime:resolvedSettings:error:
+ @abstract
+    A callback fired when the Live Photo movie is finished being written to disk.
+ 
+ @param output
+    The calling instance of AVCapturePhotoOutput.
+ @param outputFileURL
+    The URL where the movie file resides. This URL is equal to your AVCapturePhotoSettings.livePhotoMovieURL.
+ @param duration
+    A CMTime indicating the duration of the movie file.
+ @param photoDisplayTime
+    A CMTime indicating the time in the movie at which the still photo should be displayed.
+ @param resolvedSettings
+    An instance of AVCaptureResolvedPhotoSettings indicating which capture features have been selected.
+ @param error
+    An error indicating what went wrong if the outputFileURL is damaged.
+ 
+ @discussion
+    When this callback fires, the movie on disk is fully finished and ready for consumption.
+ */
+- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingLivePhotoToMovieFileAtURL:(NSURL *)outputFileURL duration:(CMTime)duration photoDisplayTime:(CMTime)photoDisplayTime resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings error:(nullable NSError *)error;
+
+/*!
+ @method captureOutput:didFinishCaptureForResolvedSettings:error:
+ @abstract
+    A callback fired when the photo capture is completed and no more callbacks will be fired.
+ 
+ @param output
+    The calling instance of AVCapturePhotoOutput.
+ @param resolvedSettings
+    An instance of AVCaptureResolvedPhotoSettings indicating which capture features were selected.
+ @param error
+    An error indicating whether the capture was unsuccessful. Nil if there were no problems.
+ 
+ @discussion
+    This callback always fires last and when it does, you may clean up any state relating to this photo capture.
+ */
+- (void)captureOutput:(AVCapturePhotoOutput *)output didFinishCaptureForResolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings error:(nullable NSError *)error;
+
+@end
+
+
+#pragma mark - AVCapturePhotoSettings
+
+@class AVCapturePhotoSettingsInternal;
+
+/*!
+ @class AVCapturePhotoSettings
+ @abstract
+    A mutable settings object encapsulating all the desired properties of a photo capture.
+ 
+ @discussion
+    To take a picture, a client instantiates and configures an AVCapturePhotoSettings object, then calls AVCapturePhotoOutput's -capturePhotoWithSettings:delegate:, passing the settings and a delegate to be informed when events relating to the photo capture occur. Since AVCapturePhotoSettings has no reference to the AVCapturePhotoOutput instance with which it will be used, minimal validation occurs while you configure an AVCapturePhotoSettings instance. The bulk of the validation is executed when you call AVCapturePhotoOutput's -capturePhotoWithSettings:delegate:.
+ */
+NS_CLASS_AVAILABLE_IOS(10_0) __TVOS_PROHIBITED
+@interface AVCapturePhotoSettings : NSObject <NSCopying>
+{
+@private
+    AVCapturePhotoSettingsInternal *_internal;
+}
+
+/*!
+ @method photoSettings
+ @abstract
+    Creates a default instance of AVCapturePhotoSettings.
+ 
+ @result
+    An instance of AVCapturePhotoSettings.
+ 
+ @discussion
+    A default AVCapturePhotoSettings object has a format of AVVideoCodecTypeJPEG, a fileType of AVFileTypeJPEG, and autoStillImageStabilizationEnabled set to YES.
+ */
++ (instancetype)photoSettings;
+
+/*!
+ @method photoSettingsWithFormat:
+ @abstract
+    Creates an instance of AVCapturePhotoSettings with a user-specified output format.
+ 
+ @param format
+    A dictionary of Core Video pixel buffer attributes or AVVideoSettings, analogous to AVCaptureStillImageOutput's outputSettings property.
+ @result
+    An instance of AVCapturePhotoSettings.
+ 
+ @discussion
+    If you wish an uncompressed format, your dictionary must contain kCVPixelBufferPixelFormatTypeKey, and the format specified must be present in AVCapturePhotoOutput's -availablePhotoPixelFormatTypes array. kCVPixelBufferPixelFormatTypeKey is the only supported key when expressing uncompressed output. If you wish a compressed format, your dictionary must contain AVVideoCodecKey and the codec specified must be present in AVCapturePhoto
Clone this wiki locally