List available output audio target AVAudioSession
我需要列出可用于iOS应用程序的音频输出。 我的问题与此相似:如何列出iOS上的可用音频输出路线
我尝试了这段代码:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 | NSError *setCategoryError = nil; BOOL success = [[AVAudioSession sharedInstance] setCategory: AVAudioSessionCategoryPlayback error: &setCategoryError]; NSError *activationError = nil; [[AVAudioSession sharedInstance] setActive: YES error: &activationError]; … NSLog(@"session.currentRoute.outputs count %d", [[[[AVAudioSession sharedInstance] currentRoute] outputs ] count]); for (AVAudioSessionPortDescription *portDesc in [[[AVAudioSession sharedInstance] currentRoute] outputs ]) { NSLog(@"-----"); NSLog(@"portDesc UID %@", portDesc.UID); NSLog(@"portDesc portName %@", portDesc.portName); NSLog(@"portDesc portType %@", portDesc.portType); NSLog(@"portDesc channels %@", portDesc.channels); } |
但是,我总是只看到一个输出端口(计数为1),如果我有两个(Airplay和内置扬声器),我也会看到。
如果我使用音乐应用程序,则可以看到两个端口并在它们之间切换。
在我的应用中,我只会看到我选择的那个。
还有什么我需要做的吗?
谢谢
编辑:
我也尝试了这段代码:
1 2 3 4 5 | CFDictionaryRef asCFType = nil; UInt32 dataSize = sizeof(asCFType); AudioSessionGetProperty(kAudioSessionProperty_AudioRouteDescription, &dataSize, &asCFType); NSDictionary *audioRoutesDesc = (__bridge NSDictionary *)asCFType; NSLog(@"audioRoutesDesc %@", audioRoutesDesc); |
但是字典仅列出一个输出目标。 此外,输入源数组为空(我有iPhone 4s)
编辑2:
我使用MPVolumeView可以正常工作。 该组件具有一个按钮,可让您选择输出音频路线,例如在Music App中。
如果需要,可以使用以下方法隐藏滑块(仅包含按钮):
1 | self.myMPVolumeView.showsVolumeSlider = NO; |
这将取决于您的AVAudioSession类别。
您可以安全地假设在iPhone上至少有一个麦克风作为输入,并且有一个扬声器作为输出。如果您想获取蓝牙/ AirPlay输出的列表,首先必须确保您的会话类别正在向您报告:
1 2 3 4 5 6 7 8 9 | do { try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord, withOptions: .AllowBluetooth) try audioSession.setActive(true) } catch let e { debugPrint("failed to initialize audio session: \\(e)") } |
那么获取可用输出的非直观方法是检查
更好的方法是使用MultipleRoute类别,这将为您提供更多访问
尝试这样的事情,它比您需要的更多,但是您可以减少它:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 | + (NSString *) demonstrateInputSelection { NSError* theError = nil; BOOL result = YES; NSMutableString *info = [[NSMutableString alloc] init]; [info appendString: @" Device Audio Input Hardware\ "]; NSString *str = nil; if( iOSMajorVersion < 7 ){ str = @"No input device information available"; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; return info; } AVAudioSession* myAudioSession = [AVAudioSession sharedInstance]; result = [myAudioSession setCategory:AVAudioSessionCategoryPlayAndRecord error:&theError]; if (!result) { NSLog(@"setCategory failed"); } result = [myAudioSession setActive:YES error:&theError]; if (!result) { NSLog(@"setActive failed"); } // Get the set of available inputs. If there are no audio accessories attached, there will be // only one available input -- the built in microphone. NSArray* inputs = [myAudioSession availableInputs]; str = [NSString stringWithFormat:@"\ --- Ports available on %@: %d ---", [UIDevice currentDevice].name , [inputs count]]; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; // Locate the Port corresponding to the built-in microphone. AVAudioSessionPortDescription* builtInMicPort = nil; AVAudioSessionDataSourceDescription* frontDataSource = nil; for (AVAudioSessionPortDescription* port in inputs) { // Print out a description of the data sources for the built-in microphone str = @"\ **********"; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; str = [NSString stringWithFormat:@"Port :\"%@\": UID:%@", port.portName, port.UID ]; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; if( [port.dataSources count] ){ str = [NSString stringWithFormat:@"Port has %d data sources",(unsigned)[port.dataSources count] ]; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; } str = [NSString stringWithFormat:@">%@", port.dataSources]; NSLog(@"%@",str); // [info appendFormat:@"%@\ ",str]; if( [port.portType isEqualToString:AVAudioSessionPortLineIn] ){ str = @"Line Input found"; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; } else if( [port.portType isEqualToString:AVAudioSessionPortUSBAudio] ){ str = @"USB Audio found"; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; } else if ([port.portType isEqualToString:AVAudioSessionPortBuiltInMic]){ builtInMicPort = port; str = @"Built-in Mic found"; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; } else if ([port.portType isEqualToString:AVAudioSessionPortHeadsetMic]){ builtInMicPort = port; str = @"Headset Mic found"; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; } else{ str = @"Other input source found"; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; } // loop over the built-in mic's data sources and attempt to locate the front microphone for (AVAudioSessionDataSourceDescription* source in port.dataSources) { str = [NSString stringWithFormat:@"\ Name:%@ (%d) \ Polar:%@ \ Type:%@ \ Patterns:%@", source.dataSourceName, [source.dataSourceID intValue], source.selectedPolarPattern, port.portType, source.supportedPolarPatterns]; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; // if ([source.orientation isEqual:AVAudioSessionOrientationFront]) // { // frontDataSource = source; // break; // } } // end data source iteration } str = @"\ ---- Current Selected Ports ----\ "; NSLog(@"%@",str); [info appendFormat:@"%@",str]; NSArray *currentInputs = myAudioSession.currentRoute.inputs; // str = [NSString stringWithFormat:@"\ %d current input ports", [currentInputs count]]; // NSLog(@"%@",str); // [info appendFormat:@"%@\ ",str]; for( AVAudioSessionPortDescription *port in currentInputs ){ str = [NSString stringWithFormat:@"\ Input Port :\"%@\":", port.portName ]; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; if( [port.dataSources count] ){ str = [NSString stringWithFormat:@"Port has %d data sources",(unsigned)[port.dataSources count] ]; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; str = [NSString stringWithFormat:@"Selected data source:%@", port.selectedDataSource.dataSourceName]; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; if( [port.selectedDataSource.supportedPolarPatterns count] > 0 ){ str = [NSString stringWithFormat:@"Selected polar pattern:%@", port.selectedDataSource.selectedPolarPattern]; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; } } } NSArray *currentOutputs = myAudioSession.currentRoute.outputs; // str = [NSString stringWithFormat:@"\ %d current output ports", [currentOutputs count]]; // NSLog(@"%@",str); // [info appendFormat:@"%@\ ",str]; for( AVAudioSessionPortDescription *port in currentOutputs ){ str = [NSString stringWithFormat:@"\ Output Port :\"%@\":", port.portName ]; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; if( [port.dataSources count] ){ str = [NSString stringWithFormat:@"Port has %d data sources",(unsigned)[port.dataSources count] ]; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; str = [NSString stringWithFormat:@"Selected data source:%@", port.selectedDataSource.dataSourceName]; NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; } } // str = [NSString stringWithFormat:@"\\Current Route: %@ Source:%@\ ", myAudioSession.currentRoute.portName, myAudioSession.preferredInput.selectedDataSource.dataSourceName]; // NSLog(@"%@",str); // [info appendFormat:@"%@\ ",str]; if( myAudioSession.preferredInput.portName ){ str = [NSString stringWithFormat:@"\ Preferred Port: %@ Source:%@\ ", myAudioSession.preferredInput.portName, myAudioSession.preferredInput.selectedDataSource.dataSourceName]; } else { str = @"\ No Preferred Port set"; } NSLog(@"%@",str); [info appendFormat:@"%@\ ",str]; return info; if (frontDataSource) { NSLog(@"Currently selected source is \"%@\" for port \"%@\"", builtInMicPort.selectedDataSource.dataSourceName, builtInMicPort.portName); NSLog(@"Attempting to select source \"%@\" on port \"%@\"", frontDataSource, builtInMicPort.portName); // Set a preference for the front data source. theError = nil; result = [builtInMicPort setPreferredDataSource:frontDataSource error:&theError]; if (!result) { // an error occurred. Handle it! NSLog(@"setPreferredDataSource failed"); } } // Make sure the built-in mic is selected for input. This will be a no-op if the built-in mic is // already the current input Port. theError = nil; result = [myAudioSession setPreferredInput:builtInMicPort error:&theError]; if (!result) { // an error occurred. Handle it! NSLog(@"setPreferredInput failed"); } return info; } |
1 2 3 4 | AVAudioSessionRouteDescription *currentRoute = [[AVAudioSession sharedInstance] currentRoute]; for (AVAudioSessionPortDescription *output in currentRoute.outputs) { } |
请检查出完整的工作代码。
音频会话,输出设备句柄的完整代码,并在操作表中显示。
在给定的链接中有以下文件可用。
以下是每个文件的简要说明。
AVAudioSessionHandler.swift->
所有必需的方法都可用,这对于根据选定的输出设备覆盖路由很有用。
AudioOutputDeviceHandler.swift->
所有必需的方法都可用,这对于获取输入设备列表,当前输出设备以及显示所有可用输入设备的操作表很有用。
SpeakerUIHandler.swift->
所有必需的方法都可用,这对于根据所选的输出设备更新Speaker UI很有用。
AudioSession.swift->
所有方法都可用,这对于创建音频会话和设置音频会话的所有必需参数很有用。
请检查以下代码以获取可用输入设备列表的列表。
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 | extension AVAudioSession { @objc func ChangeAudioOutput(_ presenterViewController : UIViewController, _ speakerButton: UIButton) { let CHECKED_KEY ="checked" var deviceAction = UIAlertAction() var headphonesExist = false if AudioOutputDeviceHandler.sharedInstance.isDeviceListRequired() { let optionMenu = UIAlertController(title: nil, message: nil, preferredStyle: .actionSheet) for audioPort in self.availableInputs!{ switch audioPort.portType { case AVAudioSession.Port.bluetoothA2DP, AVAudioSession.Port.bluetoothHFP, AVAudioSession.Port.bluetoothLE : overrideBluetooth(audioPort, optionMenu) break case AVAudioSession.Port.builtInMic, AVAudioSession.Port.builtInReceiver: deviceAction = overrideBuiltInReceiver(audioPort) break case AVAudioSession.Port.headphones, AVAudioSession.Port.headsetMic: headphonesExist = true overrideheadphones(audioPort,optionMenu) break case AVAudioSession.Port.carAudio: overrideCarAudio(port: audioPort, optionMenu: optionMenu) break default: break } } if !headphonesExist { if self.currentRoute.outputs.contains(where: {return $0.portType == AVAudioSession.Port.builtInReceiver}) || self.currentRoute.outputs.contains(where: {return $0.portType == AVAudioSession.Port.builtInMic}) { deviceAction.setValue(true, forKey: CHECKED_KEY) } optionMenu.addAction(deviceAction) } overrideSpeaker(optionMenu) let cancelAction = UIAlertAction(title:"Cancel", style: .cancel, handler: { (alert: UIAlertAction!) -> Void in }) optionMenu.addAction(cancelAction) alertViewSetupForIpad(optionMenu, speakerButton) presenterViewController.present(optionMenu, animated: false, completion: nil) // auto dismiss after 5 seconds DispatchQueue.main.asyncAfter(deadline: .now() + 5.0) { optionMenu.dismiss(animated: true, completion: nil) } } else { if self.isBuiltInSpeaker { if AudioOutputDeviceHandler.sharedInstance.isSpeaker { let port = self.currentRoute.inputs.first! setPortToNone(port) AudioOutputDeviceHandler.sharedInstance.isSpeaker = false } } else if self.isReceiver || self.isBuiltInMic || self.isHeadphonesConnected { setPortToSpeaker() AudioOutputDeviceHandler.sharedInstance.isSpeaker = true } } } func overrideCarAudio(port: AVAudioSessionPortDescription, optionMenu: UIAlertController) { let action = UIAlertAction(title: port.portName, style: .default) { (action) in do { // set new input try self.setPreferredInput(port) } catch let error as NSError { print("audioSession error change to input: \\(port.portName) with error: \\(error.localizedDescription)") } } if self.currentRoute.outputs.contains(where: {return $0.portType == port.portType}){ action.setValue(true, forKey:"checked") } if let image = UIImage(named:"CarAudio") { action.setValue(image, forKey:"image") } optionMenu.addAction(action) } func overrideheadphones(_ port: AVAudioSessionPortDescription, _ optionMenu: UIAlertController) { let CHECKED_KEY ="checked" let HEADPHONES_TITLE ="Headphones" let action = UIAlertAction(title: HEADPHONES_TITLE, style: .default) { (action) in do { // set new input try self.setPreferredInput(port) } catch let error as NSError { print("audioSession error change to input: \\(port.portName) with error: \\(error.localizedDescription)") } } if self.currentRoute.outputs.contains(where: {return $0.portType == AVAudioSession.Port.headphones}) || self.currentRoute.outputs.contains(where: {return $0.portType == AVAudioSession.Port.headsetMic}) { action.setValue(true, forKey: CHECKED_KEY) } if let image = UIImage(named:"Headphone") { action.setValue(image, forKey:"image") } optionMenu.addAction(action) } func overrideSpeaker(_ optionMenu: UIAlertController) { let SPEAKER_TITLE ="Speaker" let CHECKED_KEY ="checked" let speakerOutput = UIAlertAction(title: SPEAKER_TITLE, style: .default, handler: { [weak self] (alert: UIAlertAction!) -> Void in self?.setPortToSpeaker() }) AudioOutputDeviceHandler.sharedInstance.isSpeaker = true if self.currentRoute.outputs.contains(where: {return $0.portType == AVAudioSession.Port.builtInSpeaker}){ speakerOutput.setValue(true, forKey: CHECKED_KEY) } if let image = UIImage(named:"Speaker") { speakerOutput.setValue(image, forKey:"image") } optionMenu.addAction(speakerOutput) } func overrideBluetooth(_ port: AVAudioSessionPortDescription, _ optionMenu: UIAlertController) { let CHECKED_KEY ="checked" let action = UIAlertAction(title: port.portName, style: .default) { (action) in do { // set new input try self.setPreferredInput(port) } catch let error as NSError { print("audioSession error change to input: \\(port.portName) with error: \\(error.localizedDescription)") } } if self.currentRoute.outputs.contains(where: {return $0.portType == port.portType}){ action.setValue(true, forKey: CHECKED_KEY) } if let image = UIImage(named:"Bluetooth") { action.setValue(image, forKey:"image") } optionMenu.addAction(action) } func overrideBuiltInReceiver(_ port: AVAudioSessionPortDescription) -> UIAlertAction { let IPHONE_TITLE ="iPhone" let deviceAction = UIAlertAction(title: IPHONE_TITLE, style: .default) {[weak self] (action) in self?.setPortToNone(port) } if let image = UIImage(named:"Device") { deviceAction.setValue(image, forKey:"image") } return deviceAction } func setPortToSpeaker() { do { try self.overrideOutputAudioPort(AVAudioSession.PortOverride.speaker) } catch let error as NSError { print("audioSession error turning on speaker: \\(error.localizedDescription)") } } func setPortToNone(_ port: AVAudioSessionPortDescription) { do { // remove speaker if needed try self.overrideOutputAudioPort(AVAudioSession.PortOverride.none) // set new input try self.setPreferredInput(port) } catch let error as NSError { print("audioSession error change to input: \\(AVAudioSession.PortOverride.none.rawValue) with error: \\(error.localizedDescription)") } } func alertViewSetupForIpad(_ optionMenu: UIAlertController, _ speakerButton: UIButton) { optionMenu.modalPresentationStyle = .popover if let presenter = optionMenu.popoverPresentationController { presenter.sourceView = speakerButton; presenter.sourceRect = speakerButton.bounds; } } } extension AVAudioSession { static var isHeadphonesConnected: Bool { return sharedInstance().isHeadphonesConnected } static var isBluetoothConnected: Bool { return sharedInstance().isBluetoothConnected } static var isCarAudioConnected: Bool { return sharedInstance().isCarAudioConnected } static var isBuiltInSpeaker: Bool { return sharedInstance().isBuiltInSpeaker } static var isReceiver: Bool { return sharedInstance().isReceiver } static var isBuiltInMic: Bool { return sharedInstance().isBuiltInMic } var isCarAudioConnected: Bool { return !currentRoute.outputs.filter { $0.isCarAudio }.isEmpty } var isHeadphonesConnected: Bool { return !currentRoute.outputs.filter { $0.isHeadphones }.isEmpty } var isBluetoothConnected: Bool { return !currentRoute.outputs.filter { $0.isBluetooth }.isEmpty } var isBuiltInSpeaker: Bool { return !currentRoute.outputs.filter { $0.isSpeaker }.isEmpty } var isReceiver: Bool { return !currentRoute.outputs.filter { $0.isReceiver }.isEmpty } var isBuiltInMic: Bool { return !currentRoute.outputs.filter { $0.isBuiltInMic }.isEmpty } } extension AVAudioSessionPortDescription { var isHeadphones: Bool { return portType == AVAudioSession.Port.headphones || portType == AVAudioSession.Port.headsetMic } var isBluetooth: Bool { return portType == AVAudioSession.Port.bluetoothHFP || portType == AVAudioSession.Port.bluetoothA2DP || portType == AVAudioSession.Port.bluetoothLE } var isCarAudio: Bool { return portType == AVAudioSession.Port.carAudio } var isSpeaker: Bool { return portType == AVAudioSession.Port.builtInSpeaker } var isBuiltInMic: Bool { return portType == AVAudioSession.Port.builtInMic } var isReceiver: Bool { return portType == AVAudioSession.Port.builtInReceiver } } |