Skip to content

AVFoundation iOS xcode14.0 beta2

Manuel de la Pena edited this page Nov 6, 2022 · 3 revisions

#AVFoundation.framework https://github.com/xamarin/xamarin-macios/pull/16408

diff -ruN /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h /Applications/Xcode_14.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h
--- /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h	2022-05-31 14:52:19.000000000 -0400
+++ /Applications/Xcode_14.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVAssetExportSession.h	2022-06-17 14:30:11.000000000 -0400
@@ -321,7 +321,7 @@
 	@abstract 					Starts the asynchronous execution of estimating the output file length of the export based on the asset, preset, and timeRange associated with the export session.
 	@discussion 				If timeRange is not set on the export session, timeRange will be assumed to be the full time range of the asset.
 	@param						handler
-								A block called with the estimated output file length in bytes, if it can be accurately determined; 0 otherwise.  The error parameter will be non-nil if an error occurs.
+								A block called with the estimated output file length in bytes, if it can be determined; 0 otherwise.  The error parameter will be non-nil if an error occurs.
  */
 - (void)estimateOutputFileLengthWithCompletionHandler:(void (^)(int64_t estimatedOutputFileLength, NSError * _Nullable error ))handler API_AVAILABLE(macos(10.15), ios(13.0), tvos(13.0)) API_UNAVAILABLE(watchos) NS_SWIFT_ASYNC_NAME(getter:estimatedOutputFileLengthInBytes());
 
diff -ruN /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h /Applications/Xcode_14.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h
--- /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h	2022-05-31 15:02:34.000000000 -0400
+++ /Applications/Xcode_14.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureDevice.h	2022-06-17 11:26:10.000000000 -0400
@@ -522,7 +522,7 @@
 
 /*!
  @constant AVCaptureDeviceTypeBuiltInLiDARDepthCamera
-    A device that consists of two cameras, one YUV and one LiDAR. The LiDAR camera provides high quality, high accuracy depth information by measuring the round trip of an artifical light signal emitted by a laser. The depth is synchronized and perspective corrected to frames produced by the paired YUV camera. While the resolution of the depth data and YUV frames may differ, their field of view and aspect ratio always match. Note that devices of this type may only be discovered using an AVCaptureDeviceDiscoverySession or -[AVCaptureDevice defaultDeviceWithDeviceType:mediaType:position:].
+    A device that consists of two cameras, one YUV and one LiDAR. The LiDAR camera provides high quality, high accuracy depth information by measuring the round trip of an artificial light signal emitted by a laser. The depth is synchronized and perspective corrected to frames produced by the paired YUV camera. While the resolution of the depth data and YUV frames may differ, their field of view and aspect ratio always match. Note that devices of this type may only be discovered using an AVCaptureDeviceDiscoverySession or -[AVCaptureDevice defaultDeviceWithDeviceType:mediaType:position:].
  */
 AVF_EXPORT AVCaptureDeviceType const AVCaptureDeviceTypeBuiltInLiDARDepthCamera API_AVAILABLE(ios(15.4), macCatalyst(15.4)) API_UNAVAILABLE(macos, tvos) API_UNAVAILABLE(watchos);
 
diff -ruN /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h /Applications/Xcode_14.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h
--- /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h	2022-05-31 14:49:51.000000000 -0400
+++ /Applications/Xcode_14.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCapturePhotoOutput.h	2022-06-17 14:49:08.000000000 -0400
@@ -416,7 +416,7 @@
  @discussion
     This property supersedes AVCaptureDevice's isFlashModeSupported: It returns an array of AVCaptureFlashMode constants. To test whether a particular flash mode is supported, use NSArray's containsObject API: [photoOutput.supportedFlashModes containsObject:@(AVCaptureFlashModeAuto)]. This property is key-value observable.
  */
-@property(nonatomic, readonly) NSArray<NSNumber *> *supportedFlashModes API_AVAILABLE(macos(13.0));
+@property(nonatomic, readonly) NSArray<NSNumber *> *supportedFlashModes API_AVAILABLE(macos(13.0), macCatalyst(14.0));
 
 /*!
  @property autoRedEyeReductionSupported
@@ -1011,7 +1011,7 @@
  @discussion
     flashMode takes the place of the deprecated AVCaptureDevice -flashMode API. Setting AVCaptureDevice.flashMode has no effect on AVCapturePhotoOutput, which only pays attention to the flashMode specified in your AVCapturePhotoSettings. The default value is AVCaptureFlashModeOff. Flash modes are defined in AVCaptureDevice.h. If you specify a flashMode of AVCaptureFlashModeOn, it wins over autoStillImageStabilizationEnabled=YES. When the device becomes very hot, the flash becomes temporarily unavailable until the device cools down (see AVCaptureDevice's -flashAvailable). While the flash is unavailable, AVCapturePhotoOutput's -supportedFlashModes property still reports AVCaptureFlashModeOn and AVCaptureFlashModeAuto as being available, thus allowing you to specify a flashMode of AVCaptureModeOn. You should always check the AVCaptureResolvedPhotoSettings provided to you in the AVCapturePhotoCaptureDelegate callbacks, as the resolved flashEnabled property will tell you definitively if the flash is being used.
  */
-@property(nonatomic) AVCaptureFlashMode flashMode API_AVAILABLE(macos(13.0));
+@property(nonatomic) AVCaptureFlashMode flashMode API_AVAILABLE(macos(13.0), macCatalyst(14.0));
 
 /*!
  @property autoRedEyeReductionEnabled
@@ -1095,7 +1095,7 @@
 
     Starting in iOS 14.5 if you disable geometric distortion correction, the high resolution photo emitted by AVCapturePhotoOutput may be is smaller depending on the format.
  */
-@property(nonatomic, getter=isHighResolutionPhotoEnabled) BOOL highResolutionPhotoEnabled API_AVAILABLE(macos(13.0));
+@property(nonatomic, getter=isHighResolutionPhotoEnabled) BOOL highResolutionPhotoEnabled API_AVAILABLE(macos(13.0), macCatalyst(14.0));
 
 /*!
  @property depthDataDeliveryEnabled
diff -ruN /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h /Applications/Xcode_14.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h
--- /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h	2022-06-03 18:07:15.000000000 -0400
+++ /Applications/Xcode_14.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVCaptureVideoDataOutput.h	2022-06-17 14:51:22.000000000 -0400
@@ -155,7 +155,7 @@
     Indicates the supported video pixel formats that can be specified in videoSettings.
  
  @discussion
-    The value of this property is an NSArray of NSNumbers that can be used as values for the kCVPixelBufferPixelFormatTypeKey in the receiver's videoSettings property. The first format in the returned list is the most efficient output format.
+    The value of this property is an NSArray of NSNumbers that can be used as values for the kCVPixelBufferPixelFormatTypeKey in the receiver's videoSettings property. The formats are listed in an unspecified order. This list can may change if the activeFormat of the AVCaptureDevice connected to the receiver changes.
  */
 @property(nonatomic, readonly) NSArray<NSNumber *> *availableVideoCVPixelFormatTypes API_AVAILABLE(ios(5.0), macCatalyst(14.0)) API_UNAVAILABLE(tvos);
 
diff -ruN /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes /Applications/Xcode_14.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes
--- /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes	2022-06-02 20:59:56.000000000 -0400
+++ /Applications/Xcode_14.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVFoundation.apinotes	2022-06-16 01:22:33.000000000 -0400
@@ -432,6 +432,11 @@
   - Selector: 'removeMediaDataCollector:'
     SwiftName: remove(_:)
     MethodKind: Instance
+- Name: AVPlayerLayer
+  Methods:
+  - Selector: 'copyDisplayedPixelBuffer'
+    SwiftName: currentlyDisplayedPixelBuffer()
+    MethodKind: Instance
 - Name: AVPlaybackCoordinator
   Methods:
   - Selector: 'beginSuspension:'
diff -ruN /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMovie.h /Applications/Xcode_14.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMovie.h
--- /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMovie.h	2022-06-03 18:07:13.000000000 -0400
+++ /Applications/Xcode_14.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVMovie.h	2022-06-17 11:26:09.000000000 -0400
@@ -565,7 +565,7 @@
 	@param			mediaType
 					The media type of the new track (e.g. AVMediaTypeVideo for a video track).
 	@param			track
-					If you wish to transfer settings from an existing track, including track userdata and metadata, width, height, preferred volume, etc., pass a reference to an AVAssetTrack representing that track. Otherwise pass nil.
+					If you wish to transfer settings from an existing track, including width, height, preferred volume, etc., pass a reference to an AVAssetTrack representing that track. Otherwise pass nil.
 	@param			options
 					An NSDictionary object that contains keys for specifying options for the initialization of the new AVMutableMovieTrack object. Pass nil for default initialization behavior.
 	@result			An AVMutableMovieTrack object
diff -ruN /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerLayer.h /Applications/Xcode_14.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerLayer.h
--- /Applications/Xcode_14.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerLayer.h	2022-05-31 14:52:21.000000000 -0400
+++ /Applications/Xcode_14.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFoundation.framework/Headers/AVPlayerLayer.h	2022-06-17 14:28:52.000000000 -0400
@@ -4,7 +4,7 @@
 
 	Framework:  AVFoundation
  
-	Copyright 2010-2017 Apple Inc. All rights reserved.
+	Copyright 2010-2022 Apple Inc. All rights reserved.
 
 */
 
@@ -38,6 +38,7 @@
 
 #import <AVFoundation/AVBase.h>
 #import <AVFoundation/AVAnimation.h>
+#import <CoreVideo/CVPixelBuffer.h>
 
 #if __has_include(<QuartzCore/CoreAnimation.h>)
 #import <QuartzCore/CoreAnimation.h>
@@ -74,7 +75,7 @@
  					and AVLayerVideoGravityResize. AVLayerVideoGravityResizeAspect is default. 
 					See <AVFoundation/AVAnimation.h> for a description of these options.
  */
-@property(copy) AVLayerVideoGravity videoGravity;
+@property (copy) AVLayerVideoGravity videoGravity;
 
 /*!
 	 @property		readyForDisplay
@@ -82,8 +83,9 @@
 	 @discusssion	Use this property as an indicator of when best to show or animate-in an AVPlayerLayer into view. 
 					An AVPlayerLayer may be displayed, or made visible, while this property is NO, however the layer will not have any user-visible content until the value becomes YES. Note that if an animation is added to an AVPlayerLayer before it becomes readyForDisplay the video image displayed inside might not animate with the receiver.
 					This property remains NO for an AVPlayer currentItem whose AVAsset contains no enabled video tracks.
+					This property is key-value observable.
  */
-@property(nonatomic, readonly, getter=isReadyForDisplay) BOOL readyForDisplay;
+@property (nonatomic, readonly, getter=isReadyForDisplay) BOOL readyForDisplay;
 
 /*!
 	@property		videoRect
@@ -95,9 +97,19 @@
 	@property		pixelBufferAttributes
 	@abstract		The client requirements for the visual output displayed in AVPlayerLayer during playback.  	
 	@discussion		Pixel buffer attribute keys are defined in <CoreVideo/CVPixelBuffer.h>
+					This property is key-value observable.
  */
 @property (nonatomic, copy, nullable) NSDictionary<NSString *, id> *pixelBufferAttributes API_AVAILABLE(macos(10.11), ios(9.0), tvos(9.0)) API_UNAVAILABLE(watchos);
 
+/*!
+ 	@method			copyDisplayedPixelBuffer
+ 	@abstract		Returns a retained reference to the pixel buffer currently displayed in this AVPlayerLayer. This will return NULL if the displayed pixel buffer is protected, no image is currently being displayed, if the current player's rate is non-zero or if the image is unavailable.
+ 	@discussion		This will only return the current image while the media is paused, otherwise this will return nil. Clients must release the pixel buffer after use.
+ 			
+ 					Do not write to the returned CVPixelBuffer's attachments or pixel data.
+ */
+- (nullable CVPixelBufferRef)copyDisplayedPixelBuffer CF_RETURNS_RETAINED API_AVAILABLE(macos(13.0), ios(16.0), tvos(16.0)) API_UNAVAILABLE(watchos);
+
 @end
 
 NS_ASSUME_NONNULL_END
Clone this wiki locally