Speaker isn't an available input when airpods are connected - objective-c

I am trying to override me application's audio to speaker when the user hits a button but speaker doesn't show as an available audioutput when airpods are connected. What can i do to make the sound go to speaker? This code works when any other bluetooth device is connected.
po AVAudioSession.sharedInstance().availableInputs
AVAudioSessionPortDescription: 0x283dc3410, type = MicrophoneBuiltIn; name = iPhone Microphone; UID = Built-In Microphone; selectedDataSource = (null)>
VAudioSessionPortDescription: 0x283dc32b0, type = BluetoothHFP; name = Lindsey’s AirPods; UID = 94:B0:1F:C3:FF:6B-tsco; selectedDataSource = (null)>
var err: Error? = nil
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSession.Category.playAndRecord, mode: .voiceChat, options: [.allowBluetooth, .allowBluetoothA2DP, .mixWithOthers])
} catch {
NSLog("Unable to change audio category because : \(String(describing: err?.localizedDescription))")
err = nil
}
do {
try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.speaker)
} catch let error as NSError {
print("audioSession error turning on speaker: \(error.localizedDescription)")
}

Related

CallKit + WebRTC: CallKit call is getting disconnected when pressing lock / Power button in iOS

It is a conferencing app and I am initiating outgoing call to make my VoIP call as high priority and doesn't interrupt the incoming call when i am in VoIP call.
I am using WebRTC + CallKit in my App.
I started a call and when I press Lock / Power button then the CallKit call is getting disconnected and the My Voip call audio route changes to Receiver and remains.
Why locking the iPhone terminating the call.
Here is my Code.
var callUUID: UUID?
extension AppDelegate {
func initiateCallKitCall() {
let config = CXProviderConfiguration(localizedName: "AppName")
config.includesCallsInRecents = false;
config.supportsVideo = true;
config.maximumCallsPerCallGroup = 1
provider = CXProvider(configuration: config)
guard let provider = provider else { return }
provider.setDelegate(self, queue: nil)
callController = CXCallController()
guard let callController = callController else { return }
callUUID = UUID()
let transaction = CXTransaction(action: CXStartCallAction(call: callUUID!, handle: CXHandle(type: .generic, value: "AppName")))
callController.request(transaction, completion: { error in
print("Error is : \(String(describing: error))")
})
}
func endCallKitCall(userEnded: Bool) {
self.userEnded = userEnded
guard provider != nil else { return }
guard let callController = callController else { return }
if let uuid = callUUID {
let endCallAction = CXEndCallAction(call: uuid)
callController.request(
CXTransaction(action: endCallAction),
completion: { error in
if let error = error {
print("Error: \(error)")
} else {
print("Success")
}
})
}
}
func isCallGoing() -> Bool {
let callController = CXCallController()
if callController.callObserver.calls.count != 0 {
return true
}
return false
}
}
extension AppDelegate: CXProviderDelegate {
func providerDidReset(_ provider: CXProvider) {
print("-Provider-providerDidReset")
}
func provider(_ provider: CXProvider, perform action: CXAnswerCallAction) {
print("-Provider-perform action: CXAnswerCallAction")
action.fulfill()
}
func provider(_ provider: CXProvider, perform action: CXEndCallAction) {
action.fulfill()
print("-Provider: End Call")
}
func provider(_ provider: CXProvider, perform action: CXStartCallAction) {
action.fulfill()
DispatchQueue.main.asyncAfter(wallDeadline: DispatchWallTime.now() + 3) {
provider.reportOutgoingCall(with: action.callUUID, startedConnectingAt: Date())
DispatchQueue.main.asyncAfter(wallDeadline: DispatchWallTime.now() + 1.5) {
provider.reportOutgoingCall(with: action.callUUID, connectedAt: Date())
}
}
}
func provider(_ provider: CXProvider, perform action: CXSetHeldCallAction) {
action.fulfill()
}
func provider(_ provider: CXProvider, didActivate audioSession: AVAudioSession) {
RTCAudioSession.sharedInstance().audioSessionDidActivate(audioSession)
RTCAudioSession.sharedInstance().isAudioEnabled = true
}
func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {
RTCAudioSession.sharedInstance().audioSessionDidDeactivate(audioSession)
RTCAudioSession.sharedInstance().isAudioEnabled = false
}
}
The power button ends a call if and only if the call is running through the built-in speaker on top of the screen (receiver). In any other case (i.e. the audio is playing through headphones, Bluetooth or built-in loudspeaker) the power button will not end the call.
The same is true with the native phone calls.
I would like to answer all issues related to CallKit here.
Answer for my question is:
You need to set the AudioSession mode to .default after your voip call established successfully.
try AVAudioSession.sharedInstance().setMode(.default)
AudioRouteManager.shared.fourceRouteAudioToSpeakers()

Showing a progress bar when uploading file to google drive with swift 3

I'm currently developing an application in swift 3 for a school project that requires uploading files to google drive .
The upload is currently working , but i need to inform users with an upload bar.
Here the current code
let folderId: String = folders[indexPath.row].folderId
let metadata = GTLRDrive_File.init()
metadata.name = "Ruin.mp3"
// metadata.mimeType = "application/vnd.google-apps.photo"
metadata.parents = [folderId]
guard let filePath = Bundle.main.path(forResource: "Ruin", ofType: "mp3") else {
return
}
guard let fileData = FileManager.default.contents(atPath: filePath) else {
return
}
let uploadParameters = GTLRUploadParameters(data: fileData , mimeType: "audio/mpeg")
uploadParameters.shouldUploadWithSingleRequest = true
let query = GTLRDriveQuery_FilesCreate.query(withObject: metadata, uploadParameters: uploadParameters)
query.fields = "id"
self.service.executeQuery(query, completionHandler: {(ticket:GTLRServiceTicket, object:Any?, error:Error?) in
if error == nil {
// print("File ID \(files.identifier)")
}
else {
print("An error occurred: \(error)")
}
})
Thanx in advance

Google Drive API IOS Permissions of GTLRDriveService

I am playing around with the google drive API and trying to build a simple app that uploads a picture to my google drive. The app is supposed to upload a picture once the user is signed in, however it gives an error of
"2017-09-14 00:55:20.342237-0400 driveTest[6705:1647551] An error
occurred: Error Domain=com.google.GTLRErrorObjectDomain Code=403
"Insufficient Permission"
UserInfo={GTLRStructuredError=GTLRErrorObject 0x1c4251d30:
{message:"Insufficient Permission" errors:[1] code:403},
NSLocalizedDescription=Insufficient Permission}"
I have tried to pass it the service which is of type GTLRDriveService to the initSetup() function of the userSetUp class, but to no avail. Could someone please point me to the right track as to why my permissions are not working even though I have logged on correctly, and the part where I am passing in the GTLRDriveService is in the code that runs after a sucessful login.
I instantiate a userSetUp object and I
let setUpUser = userSetUp()
setUpUser.initSetup(service)
I have userSetUp written in objective c as such and it is bridged correctly as I am able to instantiate it in my viewcontroller file which is written in swift.
UserSetUp:::::::
#import "userSetUp.h"
#import <GoogleSignIn/GoogleSignIn.h>
#import GoogleAPIClientForREST;
#implementation userSetUp
- (void) initSetup:(GTLRDriveService *) driveService {
printf("heloooooaiosuoiadoidauoalo");
//GTLRDriveService *driveService = [GTLRDriveService new];
//NSData *fileData = [[NSFileManager defaultManager] contentsAtPath:#"files/apple.jpg"];
NSString *filePath = [[NSBundle mainBundle] pathForResource:#"apple" ofType:#"jpg"];
NSData *fileData = [NSData dataWithContentsOfFile:filePath];
GTLRDrive_File *metadata = [GTLRDrive_File object];
metadata.name = #"apple.jpg";
//metadata.mimeType = #"application/vnd.google-apps.document";
GTLRUploadParameters *uploadParameters = [GTLRUploadParameters uploadParametersWithData:fileData
MIMEType:#"image/jpeg"];
uploadParameters.shouldUploadWithSingleRequest = TRUE;
GTLRDriveQuery_FilesCreate *query = [GTLRDriveQuery_FilesCreate queryWithObject:metadata
uploadParameters:uploadParameters];
query.fields = #"id";
[driveService executeQuery:query completionHandler:^(GTLRServiceTicket *ticket,
GTLRDrive_File *file,
NSError *error) {
if (error == nil) {
//NSLog(#"File ID %#", file.identifier);
printf("it worked");
} else {
NSLog(#"An error occurred: %#", error);
}
}];
printf("upload complete!");
}
#end
And Viewcontroller. swift
import GoogleAPIClientForREST
import GoogleSignIn
import UIKit
class ViewController: UIViewController, GIDSignInDelegate, GIDSignInUIDelegate {
// If modifying these scopes, delete your previously saved credentials by
// resetting the iOS simulator or uninstall the app.
private let scopes = [kGTLRAuthScopeDriveReadonly]
let service = GTLRDriveService()
let signInButton = GIDSignInButton()
let output = UITextView()
override func viewDidLoad() {
super.viewDidLoad()
// Configure Google Sign-in.
GIDSignIn.sharedInstance().delegate = self
GIDSignIn.sharedInstance().uiDelegate = self
GIDSignIn.sharedInstance().scopes = scopes
GIDSignIn.sharedInstance().signInSilently()
signInButton.frame = CGRect(x: view.frame.width/2 - signInButton.frame.width , y: view.frame.height/2, width: signInButton.frame.width, height: signInButton.frame.height)
// Add the sign-in button.
view.addSubview(signInButton)
// Add a UITextView to display output.
output.frame = view.bounds
output.isEditable = false
output.contentInset = UIEdgeInsets(top: 20, left: 0, bottom: 20, right: 0)
output.autoresizingMask = [.flexibleHeight, .flexibleWidth]
output.isHidden = true
view.addSubview(output);
//let itsASetup()
}
func sign(_ signIn: GIDSignIn!, didSignInFor user: GIDGoogleUser!,
withError error: Error!) {
if let error = error {
showAlert(title: "Authentication Error", message: error.localizedDescription)
self.service.authorizer = nil
} else {
self.signInButton.isHidden = true
self.output.isHidden = false
self.service.authorizer = user.authentication.fetcherAuthorizer()
listFiles()
}
}
// List up to 10 files in Drive
func listFiles() {
let query = GTLRDriveQuery_FilesList.query()
query.pageSize = 10
service.executeQuery(query,
delegate: self,
didFinish: #selector(displayResultWithTicket(ticket:finishedWithObject:error:))
)
}
// Process the response and display output
#objc func displayResultWithTicket(ticket: GTLRServiceTicket,
finishedWithObject result : GTLRDrive_FileList,
error : NSError?) {
if let error = error {
showAlert(title: "Error", message: error.localizedDescription)
return
}
var text = "";
if let files = result.files, !files.isEmpty {
text += "Files:\n"
for file in files {
text += "\(file.name!) (\(file.identifier!))\n"
}
} else {
text += "No files found."
}
output.text = text
let setUpUser = userSetUp()
setUpUser.initSetup(service)
}
// Helper for showing an alert
func showAlert(title : String, message: String) {
let alert = UIAlertController(
title: title,
message: message,
preferredStyle: UIAlertControllerStyle.alert
)
let ok = UIAlertAction(
title: "OK",
style: UIAlertActionStyle.default,
handler: nil
)
alert.addAction(ok)
present(alert, animated: true, completion: nil)
}
}
try to Change your scope like:
class ViewController: UIViewController, GIDSignInDelegate, GIDSignInUIDelegate
{
// If modifying these scopes, delete your previously saved credentials by
private let scopes = ["https://www.googleapis.com/auth/drive"]
...
}

Unable to record again after stopping record in Kurento

I am working on this Kurento application and there is a strange problem i am facing where once I start recording the video and stop the recording, I cant start another recording again. The event goes to the server but nothing seems to be happening! PFB the code:
room.pipeline.create('WebRtcEndpoint', function (error, outgoingMedia) {
if (error) {
console.error('no participant in room');
// no participants in room yet release pipeline
if (Object.keys(room.participants).length == 0) {
room.pipeline.release();
}
return callback(error);
}
outgoingMedia.setMaxVideoRecvBandwidth(256);
userSession.outgoingMedia = outgoingMedia;
// add ice candidate the get sent before endpoint is established
var iceCandidateQueue = userSession.iceCandidateQueue[socket.id];
if (iceCandidateQueue) {
while (iceCandidateQueue.length) {
var message = iceCandidateQueue.shift();
console.error('user : ' + userSession.id + ' collect candidate for outgoing media');
userSession.outgoingMedia.addIceCandidate(message.candidate);
}
}
userSession.outgoingMedia.on('OnIceCandidate', function (event) {
console.log("generate outgoing candidate : " + userSession.id);
var candidate = kurento.register.complexTypes.IceCandidate(event.candidate);
userSession.sendMessage({
id: 'iceCandidate',
sessionId: userSession.id,
candidate: candidate
});
});
// notify other user that new user is joining
var usersInRoom = room.participants;
var data = {
id: 'newParticipantArrived',
new_user_id: userSession.id,
receiveVid: receiveVid
};
// notify existing user
for (var i in usersInRoom) {
usersInRoom[i].sendMessage(data);
}
var existingUserIds = [];
for (var i in room.participants) {
existingUserIds.push({id: usersInRoom[i].id, receiveVid: usersInRoom[i].receiveVid});
}
// send list of current user in the room to current participant
userSession.sendMessage({
id: 'existingParticipants',
data: existingUserIds,
roomName: room.name,
receiveVid: receiveVid
});
// register user to room
room.participants[userSession.id] = userSession;
var recorderParams = {
mediaProfile: 'WEBM',
uri: "file:///tmp/Room_"+room.name+"_file"+userSession.id +".webm"
};
//make recorder endpoint
room.pipeline.create('RecorderEndpoint', recorderParams, function(error, recorderEndpoint){
userSession.outgoingMedia.recorderEndpoint = recorderEndpoint;
outgoingMedia.connect(recorderEndpoint);
});
On the screen when I click the record button, the function on the server is:
function startRecord(socket) {
console.log("in func");
var userSession = userRegistry.getById(socket.id);
if (!userSession) {
return;
}
var room = rooms[userSession.roomName];
if(!room){
return;
}
var usersInRoom = room.participants;
var data = {
id: 'startRecording'
};
for (var i in usersInRoom) {
console.log("in loop");
var user = usersInRoom[i];
// release viewer from this
user.outgoingMedia.recorderEndpoint.record();
// notify all user in the room
user.sendMessage(data);
console.log(user.id);
}}
The thing is, for the very 1st time, it records properly i.e. file created on server and video&audio recorded properly.
When I press stop to stop recording, the intended effect is seen i.e. recording stops.
NOW, when I press record again, a video file is not made. The event reaches the server properly (console.log says so)
Can anyone help me pls?!
Thanks.

CMTimeGetSeconds crash app

I use AVPlayer to play video link and add observer to update progress bar. But with the video which have time more than 10s, my app get crash. Below is my code
Play video button press:
#IBAction func playVideoButtonPressed(sender: AnyObject) {
if let contentUrl = curVideoModel?.video_Content_Url{
player = AVPlayer(URL: NSURL(string: contentUrl))
player?.actionAtItemEnd = AVPlayerActionAtItemEnd.None
playerLayer = AVPlayerLayer(player: player)
playerLayer!.frame = CGRectMake(0, 0, self.imvVideoThumbnail.frame.size.width, self.imvVideoThumbnail.frame.size.height)
playerLayer!.backgroundColor = UIColor.blackColor().CGColor
playerLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill
self.playvideoHolderView.layer.addSublayer(playerLayer)
NSNotificationCenter.defaultCenter().addObserver(self, selector: "itemDidFinishPlaying:", name: AVPlayerItemDidPlayToEndTimeNotification, object: player?.currentItem)
player?.addPeriodicTimeObserverForInterval(CMTimeMakeWithSeconds(1.0/60.0, Int32(NSEC_PER_SEC)), queue: nil, usingBlock: { (time) -> Void in
self.updateProgressbar()
})
player!.play()
}
}
The function call back when video finish play:
NSNotificationCenter.defaultCenter().removeObserver(self, name: AVPlayerItemDidPlayToEndTimeNotification, object: player?.currentItem)
self.progressBar.progress = 0
if notification.name == AVPlayerItemDidPlayToEndTimeNotification{
if player != nil{
player?.pause()
}
if playerLayer != nil{
playerLayer!.removeFromSuperlayer()
}
}
And the function to update progressbar:
var duration: Double
var time: Double
if player != nil{
duration = CMTimeGetSeconds(player!.currentItem.duration)
time = CMTimeGetSeconds(player!.currentTime())
} else{
duration = 0
time = 0
}
self.lblTotalTime.text = "\(Int(duration))"
self.lblCurrentTime.text = "\(Int(time))"
self.progressBar.progress = Float(time / duration)
P/S: It crash on real device(iPod touch iOS 8.3). But play OK on simulator
EDIT: Maybe the comment of Peter is right. I check: if player!.currentItem.duration.value > 0 before get: duration = CMTimeGetSeconds(player!.currentItem.duration) .Then the crash is fixed**