Switch from Built-in Speaker to Bluetooth Speaker - objective-c

I just found a code that switches the speaker between the built-in and the connected bluetooth speaker:
changeResult = [[AVAudioSession sharedInstance] overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker
error:&audioError];
But this is in Obj-C, is there anyone here who is kind enough to convert this to Swift?
Thanks!
UPDATE: Here's the code I am using to record a video. Here's the scenario. I am playing a music while the bluetooth speaker is connected. So the sound output is in the external speaker. When I click the record button, the sound output transfers to the built-in speaker. And that is my problem. The output should always be in the external speaker.
#IBAction func record_video(sender: AnyObject) {
var initialOutputURL = NSURL(fileURLWithPath: "")
do
{
initialOutputURL = try NSFileManager.defaultManager().URLForDirectory(.DocumentDirectory, inDomain: .UserDomainMask, appropriateForURL: nil, create: true).URLByAppendingPathComponent("output").URLByAppendingPathExtension("mov")
}catch
{
print(error)
}
if !isRecording
{
isRecording = true
if let outputs = captureSession.outputs as? [AVCaptureOutput] {
for output in outputs {
captureSession.removeOutput(output)
}
}
do
{
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord, withOptions: [AVAudioSessionCategoryOptions.MixWithOthers, AVAudioSessionCategoryOptions.AllowBluetooth])
//print(audioSession.setOutputDataSource)
}
catch
{
print("Can't Set Audio Session Category: \(error)")
}
do
{
try audioSession.setMode(AVAudioSessionModeVideoRecording)
}
catch
{
print("Can't Set Audio Session Mode: \(error)")
}
// Start Session
do
{
try audioSession.setActive(true)
}
catch
{
print("Can't Start Audio Session: \(error)")
}
UIView.animateWithDuration(0.5, delay: 0.0, options: [.Repeat, .Autoreverse, .AllowUserInteraction], animations: { () -> Void in
self.record.transform = CGAffineTransformMakeScale(0.75, 0.75)
}, completion: nil)
let audioInputDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
AVCaptureDevice.devicesWithMediaType(AVMediaTypeAudio)
do
{
if let availableInputs = audioSession.availableInputs {
for input in availableInputs {
if input.portType == AVAudioSessionPortBuiltInMic ||
input.portType == AVAudioSessionPortHeadsetMic {
inputs.append(input)
try audioSession.setPreferredInput(input)
}
}
}
let audioInput = try AVCaptureDeviceInput(device: audioInputDevice)
// Add Audio Input
if captureSession.canAddInput(audioInput)
{
captureSession.addInput(audioInput)
}
else
{
NSLog("Can't Add Audio Input")
}
/**
let videoInput: AVCaptureDeviceInput
do
{
videoInput = try AVCaptureDeviceInput(device: captureDevice)
// Add Video Input
if captureSession.canAddInput(videoInput)
{
captureSession.addInput(videoInput)
}
else
{
NSLog("ERROR: Can't add video input")
}
}
catch let error
{
NSLog("ERROR: Getting input device: \(error)")
}
*/
try AVAudioSession.sharedInstance().overrideOutputAudioPort(.Speaker)
videoFileOutput = AVCaptureMovieFileOutput()
captureSession.addOutput(videoFileOutput)
captureSession.sessionPreset = AVCaptureSessionPresetHigh
captureSession.automaticallyConfiguresApplicationAudioSession = false
videoFileOutput?.startRecordingToOutputFileURL(initialOutputURL, recordingDelegate: self)
}
catch let error
{
NSLog("Error Getting Input Device: \(error)")
}
}
else
{
isRecording = false
UIView.animateWithDuration(0.5, delay: 0, options: [], animations: { () -> Void in
self.record.transform = CGAffineTransformMakeScale(1.0, 1.0)
}, completion: nil)
record.layer.removeAllAnimations()
videoFileOutput?.stopRecording()
}
}

Related

SwiftUI Share variable Between Struct

import SwiftUI
struct ReserveView: View {
#State var searchT = ""
#State var isSearching = false
#State private var showCheckAlert = false
#Binding var roomnum:Int
#StateObject private var vm = ReserveViewModel(
service: ReserveService()
)
var body: some View {
VStack{
HStack{
TextField("Search", text:$searchT)
.padding(.leading, 30)
}
.padding()
.background(Color.gray.opacity(0.2))
.cornerRadius(6)
.padding(.horizontal)
.onTapGesture(perform: {
isSearching = true
})
.overlay(
HStack {
Image(systemName: "magnifyingglass")
Spacer()
}.padding(.horizontal,32)
.foregroundColor(.white)
)
if isSearching {
Button(action:{
isSearching = false
searchT = ""
UIApplication.shared.sendAction(#selector(UIResponder.resignFirstResponder), to: nil, from: nil, for:nil)
}, label: {
Text("Cancle")
.padding(.trailing)
.padding(.leading,0)
})
.transition(.move(edge: .trailing))
}
switch vm.state{
case .success(let data):
List{
ForEach((data).filter({"\($0)".contains(searchT)||searchT.isEmpty}),
id: \.roomnum){ item in
HStack{
Text("\(item.when) \(item.time) \(item.username)").foregroundColor(Color.black)
}
}
}
.padding(.bottom,15)
//.padding(.top,20)
case .loading:
ProgressView()
default:
EmptyView()
}
}
.task {
await vm.getReserves()
}
}
}
struct ReserveView_Previews: PreviewProvider {
static var previews: some View {
ReserveView(roomnum:.constant(""))
}
}
import Foundation
import SwiftUI
struct ReserveService {
enum ReserveListError: Error {
case failed
case failedToDecode
case invalidStatusCode
}
func fetchReserves() async throws -> [Reserve] {
let url = URL(string: "https://f6d3-119-203-102/roomreserveview?roomnum=\(here i want use variable)")!
let configuration = URLSessionConfiguration.ephemeral
print(url)
let (data, response) = try await URLSession(configuration: configuration).data(from: url)
guard let response = response as? HTTPURLResponse,
response.statusCode == 200 else{
throw ReserveListError.invalidStatusCode
}
let decodedData = try JSONDecoder().decode(ReserveServiceResult.self, from: data)
return decodedData.reserveInfo
}
}
import SwiftUI
import Foundation
#MainActor
class ReserveViewModel: ObservableObject {
enum State {
case na
case loading
case success(data: [Reserve])
case failed(error: Error)
}
#Published private(set) var state: State = .na
#Published var hasError: Bool = false
private let service: ReserveService
init(service: ReserveService) {
self.service = service
}
func getReserves() async {
self.state = .loading
self.hasError = false
do {
let reserves = try await service.fetchReserves()
self.state = .success(data: reserves)
}catch {
self.state = .failed(error: error)
self.hasError = true
print(String(describing: error))
}
}
}
hello! I'd like to ask you a SwiftUI question.
Based on the ReserveService file, I am implementing the part that lists and displays the desired data in ReserveView.
I want to complete the url in the 'fetchReserves' function by receiving the variable 'roomnum' from the ReserveView model to the ReserveService.
However, Binding does not seem to work because ReserveService is not a view model. Is there any way I can get this variable from the viewmodel?
If you don't understand my explanation, please ask the question again.
This is my first time asking a question. Please forgive me if there is something missing in my question
It is possible to inject it as function argument, like
func fetchReserves(_ roomnum: Int) async throws -> [Reserve] {
let url = URL(string:
"https://f6d3-119-203-102/roomreserveview?roomnum=\(roomnum)")!

CallKit + WebRTC: CallKit call is getting disconnected when pressing lock / Power button in iOS

It is a conferencing app and I am initiating outgoing call to make my VoIP call as high priority and doesn't interrupt the incoming call when i am in VoIP call.
I am using WebRTC + CallKit in my App.
I started a call and when I press Lock / Power button then the CallKit call is getting disconnected and the My Voip call audio route changes to Receiver and remains.
Why locking the iPhone terminating the call.
Here is my Code.
var callUUID: UUID?
extension AppDelegate {
func initiateCallKitCall() {
let config = CXProviderConfiguration(localizedName: "AppName")
config.includesCallsInRecents = false;
config.supportsVideo = true;
config.maximumCallsPerCallGroup = 1
provider = CXProvider(configuration: config)
guard let provider = provider else { return }
provider.setDelegate(self, queue: nil)
callController = CXCallController()
guard let callController = callController else { return }
callUUID = UUID()
let transaction = CXTransaction(action: CXStartCallAction(call: callUUID!, handle: CXHandle(type: .generic, value: "AppName")))
callController.request(transaction, completion: { error in
print("Error is : \(String(describing: error))")
})
}
func endCallKitCall(userEnded: Bool) {
self.userEnded = userEnded
guard provider != nil else { return }
guard let callController = callController else { return }
if let uuid = callUUID {
let endCallAction = CXEndCallAction(call: uuid)
callController.request(
CXTransaction(action: endCallAction),
completion: { error in
if let error = error {
print("Error: \(error)")
} else {
print("Success")
}
})
}
}
func isCallGoing() -> Bool {
let callController = CXCallController()
if callController.callObserver.calls.count != 0 {
return true
}
return false
}
}
extension AppDelegate: CXProviderDelegate {
func providerDidReset(_ provider: CXProvider) {
print("-Provider-providerDidReset")
}
func provider(_ provider: CXProvider, perform action: CXAnswerCallAction) {
print("-Provider-perform action: CXAnswerCallAction")
action.fulfill()
}
func provider(_ provider: CXProvider, perform action: CXEndCallAction) {
action.fulfill()
print("-Provider: End Call")
}
func provider(_ provider: CXProvider, perform action: CXStartCallAction) {
action.fulfill()
DispatchQueue.main.asyncAfter(wallDeadline: DispatchWallTime.now() + 3) {
provider.reportOutgoingCall(with: action.callUUID, startedConnectingAt: Date())
DispatchQueue.main.asyncAfter(wallDeadline: DispatchWallTime.now() + 1.5) {
provider.reportOutgoingCall(with: action.callUUID, connectedAt: Date())
}
}
}
func provider(_ provider: CXProvider, perform action: CXSetHeldCallAction) {
action.fulfill()
}
func provider(_ provider: CXProvider, didActivate audioSession: AVAudioSession) {
RTCAudioSession.sharedInstance().audioSessionDidActivate(audioSession)
RTCAudioSession.sharedInstance().isAudioEnabled = true
}
func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {
RTCAudioSession.sharedInstance().audioSessionDidDeactivate(audioSession)
RTCAudioSession.sharedInstance().isAudioEnabled = false
}
}
The power button ends a call if and only if the call is running through the built-in speaker on top of the screen (receiver). In any other case (i.e. the audio is playing through headphones, Bluetooth or built-in loudspeaker) the power button will not end the call.
The same is true with the native phone calls.
I would like to answer all issues related to CallKit here.
Answer for my question is:
You need to set the AudioSession mode to .default after your voip call established successfully.
try AVAudioSession.sharedInstance().setMode(.default)
AudioRouteManager.shared.fourceRouteAudioToSpeakers()

Download APNG File

I am getting some issues related to APNG file, APNG file animation working perfect if i put APNG files in resource bundle , But when i have download same APNG file from assets server and saving APNG file into resource directory and then load using MSSticker like this way. after loading it showing only first frame.if anyone wanna try to check APNG file please have a look to this.
let imagePath = Bundle.main.path(forResource: imgName, ofType: ".png")
let pathurl = URL(fileURLWithPath: imagePath!)
do {
try cell.stickerview.sticker = MSSticker(contentsOfFileURL: pathurl, localizedDescription: "anything that you want")
}
catch {
fatalError("Failed to create sticker: \(error)")
}
Here i am saving image & getting saved image url from resource directory:
static func saveImage(image: UIImage , name:String) -> Bool? {
guard let data = UIImagePNGRepresentation(image) else {
return false
}
guard let directory = try? FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: false) as NSURL else {
return false
}
do {
try data.write(to: directory.appendingPathComponent(name)!)
return true
} catch {
print(error.localizedDescription)
return false
}
}
static func getSavedImageUrl(named: String) -> URL? {
if let dir = try? FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: false) {
return URL(fileURLWithPath: dir.absoluteString).appendingPathComponent(named)
}
return nil
}
I have written the extension in custom MSSticker class
extension MSStickerView {
func downloadedFrom(url: URL , name: String) {
URLSession.shared.dataTask(with: url) { (data, response, error) in
guard let httpURLResponse = response as? HTTPURLResponse, httpURLResponse.statusCode == 200,
let mimeType = response?.mimeType, mimeType.hasPrefix("image"),
let data = data, error == nil,
let image = UIImage(data: data)
else { return }
DispatchQueue.main.async() { () -> Void in
// self.sticker = image
_ = GameUtil.saveImage(image: image, name: name)
if let pathurl = GameUtil.getSavedImageUrl(named: name) {
do {
try self.sticker = MSSticker(contentsOfFileURL: pathurl, localizedDescription: "Raid")
}
catch {
fatalError("Failed to create sticker: \(error)")
}
}
self.startAnimating()
}
}.resume()
}
func downloadedFrom(link: String , name: String) {
guard let url = URL(string: link) else { return }
downloadedFrom(url: url ,name: name)
}
I think problem is this UIImagePNGRepresentation. Why convert Data to UIImage and then use UIImagePNGRepresentation.
Try saving data directly.
static func saveData(data: Data , name:String) -> Bool? {
guard let directory = try? FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: false) as NSURL else {
return false
}
do {
try data.write(to: directory.appendingPathComponent(name)!)
return true
} catch {
print(error.localizedDescription)
return false
}
}
And ignore image just pass data.
_ = GameUtil.saveImage(data: data, name: name)

No Sim Card Installed Pop up showing twice in calling feature

I am using calling feature in App . Problem is that if no sim card installed in device then "No sim card installed" Alert view showing 2 times . I am using this code :
if ([[UIApplication sharedApplication] canOpenURL:[NSURL URLWithString:phoneNumber]])
{
[[UIApplication sharedApplication] openURL:[NSURL URLWithString:phoneNumber]];
}
Note: 1st Alert view automatically hide and again 2nd one is appearing .
Finally Found alternate solution for this :
Actually this is not an issue , This is transition effect :
To resolve it i have integrated below code before calling feature :
#import CoreTelephony;
CTTelephonyNetworkInfo *networkInfo = [CTTelephonyNetworkInfo new];
CTCarrier *carrier = [networkInfo subscriberCellularProvider];
if (!carrier.isoCountryCode) {
UIAlertView *alert = [[UIAlertView alloc]initWithTitle:#"No SIM Card Installed" message:nil delegate:nil cancelButtonTitle:#"OK" otherButtonTitles:nil, nil];
[alert show];
}
else{
[[UIApplication sharedApplication] openURL:[NSURL URLWithString:phoneNumber]];
}
import UIKit
import Alamofire
import NVActivityIndicatorView
import Reachability
//pod 'NVActivityIndicatorView'
//pod 'Alamofire', '~> 4.0'
//pod 'ReachabilitySwift'
typealias CompletionHandler = (_ success:Bool,_ reponsedata:Data) -> Void
typealias CompletionHandlerJson = (_ success:Bool,_ reponsedata:NSMutableDictionary) -> Void
typealias ResponseHandler = (_ success:Bool,_ data:NSData, _ error : String) -> Void
typealias ConnectivityHandler = (_ success:Bool) -> Void
class Connectivity {
class func internetConnection(completionHandler: #escaping ConnectivityHandler) {
//var Status:Bool = false
let url = NSURL(string: "http://google.com/")
let request = NSMutableURLRequest(url: url! as URL)
request.httpMethod = "HEAD"
request.cachePolicy = NSURLRequest.CachePolicy.reloadIgnoringLocalAndRemoteCacheData
request.timeoutInterval = 1.0
let session = URLSession.shared
session.dataTask(with: request as URLRequest as URLRequest, completionHandler: {(data, response, error) in
if let httpResponse = response as? HTTPURLResponse {
if httpResponse.statusCode == 200 {
completionHandler(true)
}
}
completionHandler(false)
}).resume()
}
class func connetivityAvailable() ->Bool {
return NetworkReachabilityManager()!.isReachable
}
}
class WebserviceHelper: NSObject {
class func postWebServiceCall(urlString:String, parameters:[String:AnyObject], encodingType: String, ShowProgress:Bool, completionHandler: #escaping CompletionHandler){
if Connectivity.connetivityAvailable() == false {
log(message: "internet is not available.")
HUD.hide()
AppDelegate.showMessage(message:NO_INTERNET_AVAILABLE)
completionHandler(false,Data())
return
}
if ShowProgress {
HUD.show(.systemActivity)
}
log(message: "\(urlString): + parametersnew")
let encoding: ParameterEncoding!
if encodingType == DefaultEncoding{
encoding = JSONEncoding.default
}else{
encoding = URLEncoding.httpBody
}
Alamofire.request(urlString, method: HTTPMethod.post, parameters: parameters, encoding: encoding , headers: HelperClass.sharedInstance.getApiHeader()).responseData(completionHandler: { (response) in
if response.result.isSuccess {
if ShowProgress {
HUD.hide()
}
if let result = response.result.value {
completionHandler(true,result)
}
}
if response.result.isFailure {
if ShowProgress{
HUD.hide()
}
print("Lost Connection %#",urlString)
if let responseCode = response.response?.statusCode as Int?{
print("Lost Connection with code %#",response.result.value as Any)
let responseDic = NSMutableDictionary()
responseDic.setObject(responseCode, forKey: "statusCode" as NSCopying)
completionHandler(false,Data())
}
completionHandler(false,Data())
}
})
}
class func postWebServiceJson(urlString:String, parameters:[String:AnyObject], encodingType: String, ShowProgress:Bool, completionHandler: #escaping CompletionHandlerJson){
if Connectivity.connetivityAvailable() == false {
AppDelegate.showMessage(message:NO_INTERNET_AVAILABLE)
completionHandler(false,NSMutableDictionary())
return
}
if ShowProgress {
HUD.show(.systemActivity)
}
log(message: "\(urlString): + parametersnew")
let encoding: ParameterEncoding!
if encodingType == DefaultEncoding{
encoding = JSONEncoding.default
}else{
encoding = URLEncoding.httpBody
}
Alamofire.request(urlString, method: HTTPMethod.post, parameters: parameters, encoding: encoding , headers: HelperClass.sharedInstance.getApiHeader()).responseJSON { (response) in
if response.result.isSuccess {
if ShowProgress {
HUD.hide()
}
if let result = response.result.value as? NSDictionary {
completionHandler(true,NSMutableDictionary(dictionary: result))
}
}
if response.result.isFailure {
if ShowProgress{
HUD.hide()
}
print("Lost Connection %#",urlString)
if let responseCode = response.response?.statusCode as Int?{
print("Lost Connection with code %#",response.result.value as Any)
let responseDic = NSMutableDictionary()
responseDic.setObject(responseCode, forKey: "statusCode" as NSCopying)
completionHandler(false,NSMutableDictionary())
}
completionHandler(false,NSMutableDictionary())
}
}
}
class func WebapiGetMethod(strurl:String, ShowProgress: Bool = false, completionHandler: #escaping CompletionHandlerJson){
if Connectivity.connetivityAvailable() == false {
log(message:"internet is not available.")
completionHandler(false,NSMutableDictionary())
//.. AppDelegate .showMessage(message:"Please check your internet connection")
return
}
if ShowProgress{
HUD.show(.systemActivity)
}
Alamofire.request(strurl).responseJSON { response in
if let status = response.response?.statusCode {
switch(status){
case 200:
if let result = response.result.value {
if let JSONdict = result as? NSMutableDictionary{
DispatchQueue.main.async {
completionHandler(true,JSONdict)
}
}
if ShowProgress {
HUD.hide()
}
}else{
if ShowProgress {
HUD.hide()
}
completionHandler(false,NSMutableDictionary())
}
log(message:"example success")
default:
log(message:"error with get response status: \(status)")
DispatchQueue.main.async {
if ShowProgress {
HUD.hide()
}
}
completionHandler(false,NSMutableDictionary())
}
}
DispatchQueue.main.async {
if ShowProgress {
HUD.hide()
}
}
}
}
class func getWebServiceCall(urlString:String,ShowProgress:Bool, completionHandler: #escaping CompletionHandler){
if Connectivity.connetivityAvailable() == false {
log(message: "internet is not available.")
HUD.hide()
AppDelegate.showMessage(message:NO_INTERNET_AVAILABLE)
completionHandler(false,Data())
return
}
if ShowProgress {
HUD.show(.systemActivity)
}
log(message: "\(urlString): + parametersnew")
Alamofire.request(urlString, method: .get, parameters: nil, encoding: URLEncoding.methodDependent, headers: HelperClass.sharedInstance.getApiHeader()).responseData { (response) in
if response.result.isSuccess {
if ShowProgress {
HUD.hide()
}
if let result = response.result.value {
completionHandler(true,result)
}
}
if response.result.isFailure {
if ShowProgress{
HUD.hide()
}
print("Lost Connection %#",urlString)
if let responseCode = response.response?.statusCode as Int?{
print("Lost Connection with code %#",response.result.value as Any)
let responseDic = NSMutableDictionary()
responseDic.setObject(responseCode, forKey: "statusCode" as NSCopying)
completionHandler(false,Data())
}
completionHandler(false,Data())
}
}
}
}

Using back Camera instead of front camera

How can I force the Capture manager to use the back camera, since I can't use CameraCaptureUI in windows phone 8.1.
The app launches the default front camera.
This is my code:
function initCaptureSettings() {
captureInitSettings = null;
captureInitSettings =
new Windows.Media.Capture.MediaCaptureInitializationSettings();
captureInitSettings.photoCaptureSource = Windows.Media.Capture.PhotoCaptureSource.photo;
captureInitSettings.streamingCaptureMode =
Windows.Media.Capture.StreamingCaptureMode.audioAndVideo;
startDevice();
}
function startDevice() {
Debug.writeln("Starting device");
releaseMediaCapture();
mediaCaptureMgr = new Windows.Media.Capture.MediaCapture();
mediaCaptureMgr.initializeAsync(captureInitSettings).done(function (result) {
// do we have a camera and a microphone present?
if (mediaCaptureMgr.mediaCaptureSettings.videoDeviceId && mediaCaptureMgr.mediaCaptureSettings.audioDeviceId) {
// Update the UI
Debug.writeln("Device started");
} else {
Debug.writeln("No capture device was found");
}
startPreview();
}, errorHandler);
}
function startPreview() {
Debug.writeln("Starting preview");
try {
var video = id("photoCapture");
//Windows.Media.Capture.CameraOptionsUI.show(mediaCaptureMgr);
video.src = URL.createObjectURL(mediaCaptureMgr, { oneTimeOnly: true });
video.play();
} catch (e) {
Debug.writeln("Preview failed: " + e.message);
return;
}
Debug.writeln("Preview started");
intervalID = setInterval(function () {
capturePhoto();
}, 1500);
}
I suppose it is something in the settings.
try this
var devices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);
await mediaCaptureMgr.InitializeAsync(new MediaCaptureInitializationSettings
{
VideoDeviceId = devices[1].Id
});