WebRTC mobile cameras not switching - webrtc

I'm trying to make a live stream app and I want the functionality of switching cameras in smartphones. I've followed this article from MDN but I couldn't get the expected results.
The following are the results I'm getting:
Chrome mobile results: Camera switching from front to back but not switching the other way around.
Firefox mobile results: Camera starting the stream with the rear camera(which is not expected) and not switching to the front camera.
I'm using RTCMultiConnection and RecordRTC for this app.
Code:
function switchCamera(stream){
connection.replaceTrack(stream)
video.pause();
video.srcObject = stream;
video.play();
}
var front = true;
$('#switch').on('click', function(e){
e.preventDefault();
e.stopImmediatePropagation();
front = !front;
var constraints = { video: { facingMode: (front? "user" : "environment") } };
var internalRecorder = recorder.getInternalRecorder();
connection.streamEvents.selectFirst({local: true}).stream.getVideoTracks().forEach(function(track){
track.stop();
});
navigator.mediaDevices.getUserMedia(constraints).then(function(stream){
if(internalRecorder instanceof MultiStreamRecorder){
internalRecorder.resetVideoStreams(stream)
}
switchCamera(stream)
});

Related

Mediastream Recording API. ondataavailable is not triggered

I am trying to record media using the mediaRecorder API. Here is my code (just the relevant part). I expect to get a console log in saveChunks but it appears that the ondataavailable event is never triggered.I am able to see the video in the video element.
recordedChunks = [];
navigator.mediaDevices.getUserMedia({video:true, audio:true})
.then(function(stream) {
myVideoMedia = document.getElementById("vid1");
myVideoMedia.srcObject = stream;
myVideoMedia.onloadedmetadata = function(e) {
myVideoMedia.play();
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = saveChunks;
mediaRecorder.start();
console.log(mediaRecorder);
};
})
function saveChunks(event) {
console.log("Data recorded...");
//...
};
}
The console log of mediaRecorder.state is 'recording'
I did try by passing a timeslice of 1000 to start() and its working now! If no timeslice is passed, the function is called once at the end.

Why I can't display remote stream?

I want to make Alice in navig.html able to call Seb in index.html with a live video stream.
But in the index.html file, I am not able to display the remote live stream of Alice in index.html file because the video player displays nothing. Why ?
This is Alice, she has an offer (navig.html)
<video id="video1" controls ></video>
<script>
navigator.getUserMedia({audio:true, video:true}, success, error);
function success(stream) {
var video1 = document.querySelector("#video1");
video1.src = URL.createObjectURL(stream)
video1.play()
//rtcpeer
console.log("1")
var pc1 = new RTCPeerConnection()
pc1.addStream(stream)
pc1.createOffer().then(function(desc) {
pc1.setLocalDescription(desc)
console.log("" + JSON.stringify(desc))
})
}
function error(err) {
console.log(err)
}
</script>
This is Seb, he wants to receive live stream from Alice using its offer (index.html)
<video id="video1" controls ></video>
<textarea></textarea>
<p onclick="finir()">Fini</p>
<script>
function finir() {
navigator.getUserMedia({audio:true, video:true}, success, error);
}
function success(stream) {
var champ = document.querySelector("textarea").value
var texto = JSON.parse(champ)
console.log(texto)
var vid2 = document.querySelector("#video1");
var pc2 = new RTCPeerConnection()
pc2.setRemoteDescription(texto)
pc2.createAnswer()
pc2.onaddstream = function(e) {
vid2.src = URL.createObjectURL(e.stream);
vid2.play()
}
}
function error(err) {
console.log(err)
}
</script>
Thanks for helping
Oh, if it would be so simple ;).
With WebRTC you also need some kind of signaling protocol to exchange offer/answer between the parties. Please check one of the many WebRTC samples available.

how to add a audio stream on canvas stream in webrtc

I want to add an audio stream obtained via getusermedia () to the canvas stream and send it to the remote peer...
So I looked up, and there was addTrack ().
But addTrack () does not seem to work properly. I am running on chrome.
var audioTracks;
navigator.getUserMedia({ "audio": true, "video": false }, function (stream)
{
audioTracks = stream.getAudioTracks()[0];
}, function(error) { console.log(error);});
.
.
.
var sharestream = canvas2.captureStream(25); // 25 FPS
peerConn.addTrack(audioTracks,sharestream);
peerConn.addStream(sharestream);
It's part of my code. What's wrong?
My webrtc full source code works fine, but it does not work with addTrack ().
Updated on 10-29-2018 to replace getAudioTracks with getTracks:
var canvasStream = canvas2d.captureStream(25); // parameter is optional
// get first audio track
// var audioTrack = audioStream.getAudioTracks()[0];
var audioTrack = audioStream.getTracks().filter(function(track) {
return track.kind === 'audio'
})[0];
// append audio track into Canvas2D stream
canvasStream.addTrack( audioTrack );
// now canvas2D stream has both audio and video tracks
// peerConnection.addStream( canvasStream );
canvasStream.getTracks().forEach(function(track) {
peerConnection.Track( track, canvasStream );
});
// create offer or answer descriptions
peerConnection.createOffer(hints).then(success).catch(failure);

Web audio API and multiple inputs mic device

I have an audio device with 4 inputs microphones..
Someone knows if i can use all these inputs with Web audio API ?
Thanks !
It should work by calling getUserMedia four times, choosing a different device each time, and using createMediaStreamSource four times, although I haven't tested.
yes,you can list all input devices and select one to use.
<html><body>
<select id="devices_list"></select>
<script>
function devices_list(){
var handleMediaSourcesList = function(list){
for(i=0;i<list.length;i++){
var device= list[i];
if(device.kind == 'audioinput') {
document.querySelector('#devices_list').options.add(new Option(device.label ,device.deviceId));
}
}
}
if (navigator["mediaDevices"] && navigator["mediaDevices"]["enumerateDevices"])
{
navigator["mediaDevices"]["enumerateDevices"]().then(handleMediaSourcesList);
}
// Old style API
else if (window["MediaStreamTrack"] && window["MediaStreamTrack"]["getSources"])
{
window["MediaStreamTrack"]["getSources"](handleMediaSourcesList);
}
}
function usemic(){
navigator.getUserMedia ({
"audio":{
"optional": [{
"sourceId": document.querySelector('#devices_list').value
}]
}}, function (stream) {
//...some code to use stream from mic
},function(err){
debuginfo('getMedia ERR:'+err.message );
});
}
</script>
</body></html>

Phonegap get camera blackberry not working also place image inline and save to library

Get Camera - Blackberry and save to library advise please
I am using the following code to get pictures from the camera and place them in the app.
However it is not working on Blackberry (camera does not open) onclick="takePicture();"
Also I want to save images to user library as well.
Have found a number of different ways to do this is this any good ? or is there a better method
function takePicture() {
navigator.camera.getPicture(
function(uri) {
var img = document.getElementById('camera_image');
img.style.visibility = "visible";
img.style.display = "block";
img.src = uri;
document.getElementById('camera_status').innerHTML = "Success";
},
function(e) {
console.log("Error getting picture: " + e);
document.getElementById('camera_status').innerHTML = "Error getting picture.";
},
{ quality: 50, destinationType: navigator.camera.DestinationType.FILE_URI});
};