Skip to content

AVFAudio iOS xcode13.0 beta2

Manuel de la Pena edited this page Aug 25, 2021 · 2 revisions

#AVFAudio.framework https://github.com/xamarin/xamarin-macios/pull/12550

diff -ruN /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioRoutingArbiter.h /Applications/Xcode_13.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioRoutingArbiter.h
--- /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioRoutingArbiter.h	1969-12-31 19:00:00.000000000 -0500
+++ /Applications/Xcode_13.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioRoutingArbiter.h	2021-06-22 17:34:12.000000000 -0400
@@ -0,0 +1,112 @@
+#if (defined(USE_AVFAUDIO_PUBLIC_HEADERS) && USE_AVFAUDIO_PUBLIC_HEADERS) || !__has_include(<AudioSession/AVAudioRoutingArbiter.h>)
+/*!
+    @file       AVAudioRoutingArbiter.h
+    @framework  AudioSession.framework
+    @copyright  (c) 2020 by Apple Inc. All rights reserved.
+    @abstract   API's to allow macOS apps to participate in audio routing arbitration.
+
+    @discussion
+        Audio routing arbitration is a feature of Apple operating systems for automatically connecting wireless headphones
+        to the best audio source device in a multi-product environment. The supported Apple products are iOS devices,
+        iPadOS devices, watchOS devices, and Mac computers. The best audio source is the device from which the user is
+        most likely to want  audio playing. The operating system tries to determine that device intelligently. For example, if a
+        user was playing a movie with their iPad, then locked the iPad, and started using their iPhone to play music, the iPad
+        would stop being the source audio device and the iPhone would become the source audio. This feature is limited
+        to select Apple and Beats wireless headsets.
+
+        The intention of the API is to give a hint to the operating system that the app is about to start audio and
+        to provide context about what type of audio is to be used. To be a participant of the routing arbitration the app has
+        to begin the arbitration. Once arbitration completes, the application is free to start running audio I/O.  The result of
+        arbitration may change the system default input and/or output audio devices, so the use of this API will affect any applications
+        that are impacted when system default devices change. When the app is no longer interested in using audio I/O it should
+        leave audio routing arbitration.
+
+        Applications that use the AVAudioRoutingArbiter API are encouraged to also use MPRemoteCommandCenter and
+        MPNowPlayingInfoCenter classes provided by Media Player framework. The use of those classes provides the operating system
+        additional context about the content being played and allow the application to respond to remote command events.
+*/
+#ifndef AudioSession_AVAudioRoutingArbiter_h
+#define AudioSession_AVAudioRoutingArbiter_h
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+/*!
+    @typedef    AVAudioRoutingArbitrationCategory
+    @abstract   The category describes the general type of audio that the app plans to use.
+    @discussion
+        Provides context to the operating system about the type of audio an application intends to use. The system uses this information
+        when arbitrating between Apple products that want to take ownership of Bluetooth audio routes.
+    @constant   AVAudioRoutingArbitrationCategoryPlayback
+        Used for Audio playback.
+    @constant   AVAudioRoutingArbitrationCategoryPlayAndRecord
+        Used for recording and playing back audio.
+    @constant   AVAudioRoutingArbitrationCategoryPlayAndRecordVoice
+        Appropriate for Voice over IP(VoIP) applications.
+*/
+typedef NS_ENUM(NSInteger, AVAudioRoutingArbitrationCategory) {
+    AVAudioRoutingArbitrationCategoryPlayback           = 0,
+    AVAudioRoutingArbitrationCategoryPlayAndRecord      = 1,
+    AVAudioRoutingArbitrationCategoryPlayAndRecordVoice = 2,
+} NS_SWIFT_NAME(AVAudioRoutingArbiter.Category);
+
+/*!
+    @class      AVAudioRoutingArbiter
+    @brief      The interface to participate in audio routing arbitration.
+*/
+API_AVAILABLE(macos(11.0)) API_UNAVAILABLE(ios, watchos, tvos)
+@interface AVAudioRoutingArbiter : NSObject
+
+- (instancetype)init NS_UNAVAILABLE;
++ (instancetype)new NS_UNAVAILABLE;
+
+/*!
+    @property sharedRoutingArbiter
+    @abstract Returns the singleton AVAudioRoutingArbiter instance.
+*/
+@property (class, readonly, nonatomic) AVAudioRoutingArbiter *sharedRoutingArbiter;
+
+/*!
+    @method        beginArbitrationWithCategory:completionHandler:
+    @abstract      Begin routing arbitration to take ownership of nearby Bluetooth audio routes.
+    @discussion
+       When an app wants to participate in automatic audio arbitration for the wireless headphones route, it has to begin arbitration
+       specifying its arbitration session category. It provides the operating system time to arbitrate with other nearby Apple
+       devices to obtain ownership of supported Bluetooth audio devices. Then upon completion of arbitration, the operating system
+       will automatically determine  whether to route audio to the nearby Bluetooth device. Once arbitration completes, the application
+       is free to start running audio I/O. I/O will be started upon the app request even if the -beginArbitrationWithCategory:completionHandler: fails.
+       This method should also be used whenever restarting audio I/O in order to allow the system to arbitrate for ownership of a Bluetooth
+       device that may have been taken by another nearby Apple device during the time that I/O was stopped.
+    @param         category
+       The category describes the general type of audio that the app plans to use.
+    @param         handler
+        A client-supplied block called asynchronously when audio routing arbitration is completed.
+        This completion handler takes the following parameters:
+        defaultDeviceChanged
+            Indicating that the system default audio device has been changed as a result of the arbitration operation.
+        error
+            An error object that indicates why the request failed, or nil if the request was successful.
+*/
+- (void)beginArbitrationWithCategory:(AVAudioRoutingArbitrationCategory)category completionHandler:(void(^)(BOOL defaultDeviceChanged, NSError * _Nullable error))handler
+NS_SWIFT_NAME(begin(category:completionHandler:));
+
+/*!
+    @method   leaveArbitration
+    @abstract   Stop participating in audio routing arbitration.
+    @discussion
+        When an application has stopped using audio for the foreseeable future, it should notify the system. For example,
+        in Voice over IP (VoIP)  use cases, the application should call -leaveArbitration when the VoIP call has ended.
+        This allows the system to make a better decision when other participating Apple devices would like to take ownership
+        of a nearby Bluetooth device. Applications should not call this API in cases where audio is only momentarily paused.
+*/
+- (void)leaveArbitration NS_SWIFT_NAME(leave());
+
+@end
+
+NS_ASSUME_NONNULL_END
+
+#endif /* AudioSession_AVAudioRoutingArbiter_h */
+#else
+#include <AudioSession/AVAudioRoutingArbiter.h>
+#endif
diff -ruN /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSession.h /Applications/Xcode_13.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSession.h
--- /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSession.h	2021-06-02 12:47:32.000000000 -0400
+++ /Applications/Xcode_13.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSession.h	2021-06-17 14:34:08.000000000 -0400
@@ -405,6 +405,15 @@
 - (BOOL)setAggregatedIOPreference:(AVAudioSessionIOType)inIOType
 							error:(NSError **)outError API_AVAILABLE(ios(10.0)) API_UNAVAILABLE(tvos, watchos, macos);
 
+/// Set YES to inform the system if the app can supply multichannel audio content.
+/// Default value is NO. This property is intended to be used by 'Now Playing' applications.
+/// See https://developer.apple.com/documentation/mediaplayer/becoming_a_now_playable_app for more information
+/// about what it means to be a 'Now Playing' application. Typically 'Now Playing' applications will also use
+/// AVAudioSessionRouteSharingPolicyLongFormAudio or AVAudioSessionRouteSharingPolicyLongFormVideo.
+- (BOOL)setSupportsMultichannelContent:(BOOL)inValue error:(NSError **)outError API_AVAILABLE(ios(15.0), watchos(8.0), tvos(15.0)) API_UNAVAILABLE(macos);
+
+@property(readonly) BOOL supportsMultichannelContent API_AVAILABLE(ios(15.0), watchos(8.0), tvos(15.0)) API_UNAVAILABLE(macos);
+
 @end // interface for AVAudioSession (RoutingConfiguration)
 
 #pragma mark-- Names for NSNotifications --
@@ -456,9 +465,26 @@
 */
 OS_EXPORT NSNotificationName const  AVAudioSessionSilenceSecondaryAudioHintNotification API_AVAILABLE(ios(8.0), watchos(2.0), tvos(9.0));
 
+/*!
+    @brief  Notification sent to registered listeners when spatial playback capabilities are changed due to a
+    change in user preference.
+
+    Check the notification's userInfo dictionary for AVAudioSessionSpatialAudioEnabledKey to check if spatial
+    audio is enabled.
+    
+    Observers of this notification should also observe AVAudioSessionRouteChangeNotification since a route change
+    may also result in a change in the ability for the system to play spatial audio. Use
+    AVAudioSessionPortDescription's isSpatialAudioEnabled property to check if the current route supports
+    spatialized playback.
+*/
+OS_EXPORT NSNotificationName const  AVAudioSessionSpatialPlaybackCapabilitiesChangedNotification API_AVAILABLE(ios(15.0), watchos(8.0), tvos(15.0)) API_UNAVAILABLE(macos) NS_SWIFT_NAME(AVAudioSession.spatialPlaybackCapabilitiesChangedNotification);
 
 #pragma mark-- Keys for NSNotification userInfo dictionaries --
 
+/// keys for AVAudioSessionSpatialPlaybackCapabilitiesChangedNotification
+/// value is an NSNumber whose boolean value indicates if spatial audio enabled.
+OS_EXPORT NSString *const AVAudioSessionSpatialAudioEnabledKey API_AVAILABLE(ios(15.0), watchos(8.0), tvos(15.0)) API_UNAVAILABLE(macos);
+
 /// keys for AVAudioSessionInterruptionNotification
 /// Value is an NSNumber representing an AVAudioSessionInterruptionType
 OS_EXPORT NSString *const AVAudioSessionInterruptionTypeKey API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0));
diff -ruN /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionRoute.h /Applications/Xcode_13.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionRoute.h
--- /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionRoute.h	2021-06-02 11:07:36.000000000 -0400
+++ /Applications/Xcode_13.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVAudioSessionRoute.h	2021-06-17 04:14:06.000000000 -0400
@@ -150,6 +150,25 @@
 */
 @property (readonly) BOOL hasHardwareVoiceCallProcessing API_AVAILABLE(ios(10.0), watchos(3.0), tvos(10.0)) API_UNAVAILABLE(macos);
 
+/*!
+    @brief This property's value will be true if the port supports spatial audio playback and the feature is
+    enabled.
+     
+    'Now Playing' apps should also inform the system if they support multichannel audio content using
+    -setSupportsMultichannelContent:error: method. Apps may also register to receive the
+    AVAudioSessionSpatialPlaybackCapabilitiesChanged notification to detect changes in user preferences that
+    affect spatial audio playback.
+   
+    This property is only relevant in the context of ports that have a small number of hardware channels
+    (typically 2), but have enhanced capabilities for rendering multi-channel content. Note that some port
+    types such as USB and HDMI may support multi-channel playback because they have hardware formats supporting
+    more than 2 channels. For example, many HDMI receivers are connected to multiple speakers and are capable of
+    rendering 5.1, 7.1, or other popular surround sound formats. Applications interested in utilizing multi-channel
+    formats should also query AVAudioSession's maximumOutputNumberOfChannels property and make use of
+    -setPreferredOutputNumberOfChannels:error: to set the preferred number of hardware channels.
+*/
+@property (readonly, getter=isSpatialAudioEnabled) BOOL spatialAudioEnabled API_AVAILABLE(ios(15.0), watchos(8.0), tvos(15.0)) API_UNAVAILABLE(macos);
+
 @property (readonly, nonatomic, nullable) NSArray<AVAudioSessionChannelDescription *> *channels API_AVAILABLE(ios(6.0), watchos(2.0), tvos(9.0)) API_UNAVAILABLE(macos);
 
 /// Will be nil if there are no selectable data sources.
diff -ruN /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.h /Applications/Xcode_13.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.h
--- /Applications/Xcode_13.0.0-beta.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.h	2021-06-01 21:25:41.000000000 -0400
+++ /Applications/Xcode_13.0.0-beta2.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS.sdk/System/Library/Frameworks/AVFAudio.framework/Headers/AVFAudio.h	2021-06-11 13:48:52.000000000 -0400
@@ -19,7 +19,9 @@
 #import <AVFAudio/AVAudioPlayerNode.h>
 #if !0
 #import <AVFAudio/AVAudioRecorder.h>
+#import <AVFAudio/AVAudioRoutingArbiter.h>
 #endif
+
 #import <AVFAudio/AVAudioSequencer.h>
 #import <AVFAudio/AVAudioSettings.h>
 #import <AVFAudio/AVAudioSinkNode.h>
@@ -52,7 +54,5 @@
 #import <AVFAudio/AVAudioSessionDeprecated.h>
 #endif
 
-#if TARGET_OS_OSX
-#import <AVFAudio/AVAudioRoutingArbiter.h>
-#endif
+
 
Clone this wiki locally