Add video track while navigator.getUserMedia in running - webrtc

I'm trying to use navigator.getUserMedia() api for capturing the audio/video. I've following code.
var cameraEl;
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = (navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia);
navigator.getUserMedia(
{ video: true, audio: true },
function (stream) {
cameraEl = document.getElementById("selfCam");
cameraEl.src = window.URL.createObjectURL(stream);
window.mediaStream = stream;
},
function () {
alert('Error: Camera failed!');
}
);
Than I stop the video track like this,
window.mediaStream.getVideoTracks()[0].stop();
Now I want to start the video track again. How do I start it?

You need to call getUserMedia again. stop() is a final action.

Related

WebRtc,onaddstream,receive,stream,no show?

I met with difficulties.
var onaddstream = function(event){
var video = $("#chat_dialogForOne video[name='remote']")[0];
var remoteStream = event.stream;
video.srcObject = remoteStream;
video.onloadedmetadata = function(e) {
video.play();
};
}
$("#chat_dialogForOne button[name='openVideo']").on("click",function(){
$(this).toggleClass("active");
$(this).data("use",$(this).data("use") ? false : true);
if($(this).data("use")){//开启视频语音聊天
rtc.openVideoAudioLocal(function(localStream){//创建本地视频流,绑定到控件上
var video = $("#chat_dialogForOne video[name='video']")[0]; //获取到展现视频的标签
video.srcObject=localStream;
video.onloadedmetadata = function(e) {
video.play();
};
rtc.openVideoAudioLocal(function(remoteStream){
rtc.sendAddStream(remoteStream);
},true,true);
},true,false);//为了防止自己能听到自己发出的声音,只启动视频,不启动音频
$(this).find(" > span").html("结束视频");
$("#chat_dialogForOne button[name='openAudio']").hide();
}else{//关闭视频语音聊天
// closeRemoteChannelStream([oneWebRtc]);
// closeLocalStream();
// resetVideoButton();
}
});
images 2,Successful display of local
images 1,Failure Display Remote
onaddstream,Received the remote stream, it does not show.to video
I need your help.
Sorry, it was a mistake of mine. I made a very slight mistake, which resulted in a new PC object being recreated after receiving the offer. Although the remote received the video stream object, the channel had been replaced.

Why does node-lame module saves mp3 file with a high pitched/fast playback speed?

I am trying to record audio from the browser and stream it as raw audio (PCM) to my node server where I want to save to it in .mp3 format. I am using node-lame module on my server for creating an mp3 file from the PCM audio stream. The problem here is that the mp3 file is always high pitched and playing at a fast speed. I have tried both sending the data from the browser(client side) as int16 and as float32 and setting the appropriate lame.Encoder for them :
stream.pipe(new lame.Encoder({channels:2, bitDepth: 32, float:true,})) //float32
.pipe(fs.createWriteStream(path.resolve(__dirname, 'demo.mp3')))
stream.pipe(new lame.Encoder({channels:2, bitDepth: 16, sampleRate:44100,})) // int16
.pipe(fs.createWriteStream(path.resolve(__dirname, 'demo.mp3')))
Here is the code for the client side
(function(window) {
var client = new BinaryClient('ws://localhost:9001');
client.on('open', function() {
window.Stream = client.createStream();
if (!navigator.getUserMedia)
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia;
if (navigator.getUserMedia) {
navigator.getUserMedia({audio:true}, success, function(e) {
alert('Error capturing audio.');
});
} else alert('getUserMedia not supported in this browser.');
var recording = false;
window.startRecording = function() {
recording = true;
}
window.stopRecording = function() {
recording = false;
window.Stream.end();
}
function success(e) {
audioContext = window.AudioContext || window.webkitAudioContext;
context = new audioContext();
// the sample rate is in context.sampleRate
audioInput = context.createMediaStreamSource(e);
var bufferSize = 2048;
recorder = context.createScriptProcessor(bufferSize, 1, 1);
recorder.onaudioprocess = function(e){
if(!recording) return;
console.log ('recording');
var left = e.inputBuffer.getChannelData(0);
window.Stream.write(left); //trying it with float32
}
audioInput.connect(recorder)
recorder.connect(context.destination);
}
function convertoFloat32ToInt16(buffer) {
var l = buffer.length;
var buf = new Int16Array(l)
while (l--) {
buf[l] = buffer[l]*0xFFFF; //convert to 16 bit
}
return buf.buffer
}
});
})(this);
Here is the node app
var express = require('express');
var BinaryServer = require('binaryjs').BinaryServer;
var fs = require('fs');
var lame = require("lame");
var path = require('path');
var KalmanFilter = require('kalmanjs').default;
var buffer = [];
var port = 3700;
var outFile = 'demo.mp3';
var app = express();
app.set('views', __dirname + '/tpl');
app.set('view engine', 'jade');
app.engine('jade', require('jade').__express);
app.use(express.static(__dirname + '/public'))
app.get('/', function(req, res){
res.render('index');
});
app.listen(port);
console.log('server open on port ' + port);
binaryServer = BinaryServer({port: 9001});
binaryServer.on('connection', function(client) {
console.log('new connection');
client.on('stream', function(stream, meta) {
console.log('new stream');
stream.pipe(new lame.Encoder({channels:2, bitDepth: 32, float:true}))
.pipe(fs.createWriteStream(path.resolve(__dirname, 'demo.mp3')))
.on('close',function () {
console.log('done?');
})
stream.on('end', function() {
console.log('wrote to file ' + outFile);
});
});
});
Both of these snippets are taken from https://github.com/gabrielpoca/browser-pcm-stream except for the mp3 part.
The default sampling rate in the browser is 48000hz, while you are encoding your mp3 files with a sampling rate of 44100hz. You either need to resample your PCM data or set the recording sampling rate to 44100hz.

Share screen using getScreenId.js in WebRTC for two peers

I am trying to implement share screen function in webrtc video conferencing. From suggestion, I am now following muaz-khan's solution using https://www.webrtc-experiment.com/getScreenId/ . I can easily capture the application images of one peer, and replace the video stream with the capture stream. But it is a video conferencing experiment, so two browsers need to video conference with each other. For example, browser 1, has video streams A (local video), video streams B (remote video); browser 2 has video streams B (local video), video streams A (remote video). So when I am in browser 1 and trying to share the screen, the share screen stream should replace the local video in browser 1, and remote video in browser 2.
But right now, I can only make the share screen replace the local video in browser 1, browser 2 doesn't have any changes, cann't see any changes in its remote video (which is the local video in browser 1). I don't know how to trigger the changes in browser 2 as well. do i need to signal the share screen streams to server? and change the remote stream accordingly?
Here is my code in javascript:
$(function() {
var brokerController, ws, webRTC, localid;
// ws = new XSockets.WebSocket("wss://rtcplaygrouund.azurewebsites.net:443", ["connectionbroker"], {
ws = new XSockets.WebSocket("ws://localhost:4502", ["connectionbroker"], {
ctx: "152300ed-4d84-4e72-bc99-965052dc1e95"
});
var addRemoteVideo = function(peerId,mediaStream) {
var remoteVideo = document.createElement("video");
remoteVideo.setAttribute("autoplay", "true");
remoteVideo.setAttribute("rel",peerId);
attachMediaStream(remoteVideo, mediaStream);
remoteVideo.setAttribute("class", "col-md-3");
remoteVideo.setAttribute("height", $( document ).height() * 0.3);
remoteVideo.setAttribute("id", 'remoteVideo');
$("#videoscreen").append(remoteVideo);
};
var onConnectionLost = function (remotePeer) {
console.log("onconnectionlost");
var peerId = remotePeer.PeerId;
var videoToRemove = $("video[rel='" + peerId + "']");
videoToRemove.remove();
};
var oncConnectionCreated = function() {
console.log("oncconnectioncreated", arguments);
}
var onGetUerMedia = function(stream) {
console.log("Successfully got some userMedia , hopefully a goat will appear..");
webRTC.connectToContext(); // connect to the current context?
};
var onRemoteStream = function (remotePeer) {
addRemoteVideo(remotePeer.PeerId, remotePeer.stream);
console.log("Opps, we got a remote stream. lets see if its a goat..");
};
var onLocalStream = function(mediaStream) {
console.log("Got a localStream", mediaStream.id);
localid = mediaStream.id;
console.log("check this id: meadiastram id ", mediaStream.id);
var video = document.createElement("video");
video.setAttribute("height", "100%");
video.setAttribute("autoplay", "true");
video.setAttribute("id", "localvideo");
video.setAttribute("name", mediaStream.id);
attachMediaStream(video, mediaStream);
$("#videoscreen").append(video);
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
$('#localvideo').attr('src', URL.createObjectURL(stream));
}, function (error) {
console.error(error);
});
});
});
};
var onContextCreated = function(ctx) {
console.log("RTC object created, and a context is created - ", ctx);
webRTC.getUserMedia(webRTC.userMediaConstraints.hd(true), onGetUerMedia, onError);
};
var onOpen = function() {
console.log("Connected to the brokerController - 'connectionBroker'");
webRTC = new XSockets.WebRTC(this);
webRTC.onlocalstream = onLocalStream;
webRTC.oncontextcreated = onContextCreated;
webRTC.onconnectioncreated = oncConnectionCreated;
webRTC.onconnectionlost = onConnectionLost;
webRTC.onremotestream = onRemoteStream;
};
var onConnected = function() {
console.log("connection to the 'broker' server is established");
console.log("Try get the broker controller form server..");
brokerController = ws.controller("connectionbroker");
brokerController.onopen = onOpen;
};
ws.onconnected = onConnected;
});
I am using xsocket as the server, and the codes for click share and change the local stream with the share screen streams are just very simple as this:
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
$('#localvideo').attr('src', URL.createObjectURL(stream));
}, function (error) {
console.error(error);
});
});
Any help or suggestion would be grateful.
Thanks for pointing out the other post: How to addTrack in MediaStream in WebRTC, but I don't think they are the same. And also I am not sure how to renegotiate the remote connection in this case.
Xsocket.webrtc.js file for webrtc connection:
https://github.com/XSockets/XSockets.WebRTC/blob/master/src/js/XSockets.WebRTC.latest.js
How I could I renegotiate the remote connection in this case?
I figured out a work around solution by myself for this question, do not replace the local stream with the sharescreen stream, instead remove the old local stream from local div, then add the new sharescreen stream to local div. In the meantime, send the old local stream id by datachanel to the other peer, and remove that old remote video as well.
The most important thing is reflesh the streams (renegotiation), then sharescreen stream would display in remote peer.
Code:
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
webRTC.removeStream(webRTC.getLocalStreams()[0]);
var id = $('#localvideo').attr('name');
$('#localvideo').remove();
brokerController.invoke('updateremotevideo', id);
webRTC.addLocalStream(stream);
webRTC.getRemotePeers().forEach(function (p) {
webRTC.refreshStreams(p);
});
}, function (error) {
console.error(error);
});
});
});
after get the command to remove that old video stream from the server:
brokerController.on('updateremotevideo', function(streamid){
$(document.getElementById(streamid)).remove();
});
This solution works for me. Although if only like to replace the local video stream with share screen stream, we need to re create the offer with sdp, and send sdp to remote peer. It is more complicated.
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
navigator.getUserMedia({audio: true}, function (audioStream) {
stream.addTrack(audioStream.getAudioTracks()[0]);
var mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/mp4'
mediaRecorder.stream = stream;
self.setState({recorder: mediaRecorder, startRecord: true, shareVideo: true, pauseRecord: false, resumeRecord: false, stopRecord: false, downloadRecord: false, updateRecord: false});
document.querySelector('video').src = URL.createObjectURL(stream);
var video = document.getElementById('screen-video')
if (video) {
video.src = URL.createObjectURL(stream);
video.width = 360;
video.height = 300;
}
}, function (error) {
alert(error);
});
}, function (error) {
alert(error);
});
});

How to record a video using webrtc

I need to record a video using the laptop camera on my website built using nodejs. For this I am using webRTC. So far I could take a photo using the laptop camera but I need to record a video. Could some one help as to how the code would go? My current code is as follows:
<video id="video"></video>
<button id="startbutton">Take photo</button>
<button id="pausebutton">Pause</button>
<button id="resumebutton">Resume</button>
<canvas id="canvas"></canvas>
<script type="text/javascript">
(function() {
var streaming = false,
video = document.querySelector('#video'),
canvas = document.querySelector('#canvas'),
//photo = document.querySelector('#photo'),
startbutton = document.querySelector('#startbutton'),
width = 620,
height = 50;
navigator.getMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
navigator.getMedia(
{
video: true,
audio: false
},
function(stream) {
if (navigator.mozGetUserMedia) {
video.mozSrcObject = stream;
} else {
var vendorURL = window.URL || window.webkitURL;
video.src = vendorURL.createObjectURL(stream);
}
video.play();
},
function(err) {
console.log("An error occured! " + err);
}
);
video.addEventListener('canplay', function(ev){
if (!streaming) {
height = video.videoHeight / (video.videoWidth/width);
video.setAttribute('width', width);
video.setAttribute('height', height);
canvas.setAttribute('width', width);
canvas.setAttribute('height', height);
streaming = true;
}
}, false);
function takepicture() {
canvas.width = width;
canvas.height = height;
canvas.getContext('2d').drawImage(video, 0, 0, width, height);
var data = canvas.toDataURL('image/png');
// photo.setAttribute('src', data);
}
function pausevideo() {
canvas.width = width;
canvas.height = height;
video.pause();
}
function resumevideo() {
canvas.width = width;
canvas.height = height;
video.play();
}
startbutton.addEventListener('click', function(ev){
takepicture();
ev.preventDefault();
}, false);
pausebutton.addEventListener('click', function(ev){
pausevideo();
ev.preventDefault();
}, false);
resumebutton.addEventListener('click', function(ev){
resumevideo();
ev.preventDefault();
}, false);
})();
</script>
I am not going to write code for you(you seem pretty capable if you have gotten this far) but here are some pointers to get you in the right direction.
Assign a global variable the value of the stream when you grab it(this way you can reuse the same stream in numerous functions
Once you have the stream you can easily follow the tutorials at RTC-Recording, there is a write to disk method that should help you out in downloading the recording
If you have a stream, this is how to start using RecordRTC.
var options = {
type: 'video'
};
var recordRTC = RecordRTC(mediaStream, options);
recordRTC.startRecording();
recordRTC.stopRecording(function(videoURL) {
mediaElement.src = videoURL; //plays the recorded blob url on that src
});

my webRTC implements never calls onaddstream

I am using https://github.com/HenrikJoreteg/RTCPeerConnection which simplifies WebRTC but still cannot get it works. WebRTC peer connection (offer, answer, ice) get established but onaddstream never gets call.
(I open 2 browser tab with the same html which utilize the following code and I run the code by running start function to get the video and then running peer the start the connection.)
Can anyone help me point me out the bug in this code?
var local_stream;
var localvid = document.getElementById('localvid');
var remotevid = document.getElementById('remotevid');
var servers= {iceServers: [{"url": "stun:stun.l.google.com:19302"}]};
var constraints = {optional: [{"DtlsSrtpKeyAgreement": true}]};
var peerConn = new PeerConnection(servers,constraints);
function start(){
navigator.getUserMedia ||
(navigator.getUserMedia = navigator.getUserMedia || navigator.mozGetUserMedia ||
  navigator.webkitGetUserMedia || navigator.msGetUserMedia);
if (navigator.getUserMedia) {
navigator.getUserMedia({
      video: true,
      audio: false
    }, onSuccess, function(){});
} else {
    alert('getUserMedia is not supported in this browser.');
}
}
function onSuccess(stream){
localvid.src = window.webkitURL.createObjectURL(stream);
local_stream = stream;
localvid.autoplay = true;
}
function peer(){
peerConn.addStream(local_stream);
peerConn.offer({ mandatory: { OfferToReceiveAudio:false, OfferToReceiveVideo: true} },
function( err, offer){
if(!err){
console.log("Creating an offer...");
socket.emit('offer',offer);
}
}
);
}
peerConn.on('ice', function(candidate){
if(candidate){
console.log("Sending ice...");console.log(candidate);
socket.emit('ice',candidate);
}else { console.log("End of candidates.");}
});
peerConn.on('streamAdded',function(stream){
console.log("Adding stream..");
remotevid.src = window.webkitURL.createObjectURL(stream);
remotevid.autoplay = true;
});
socket.on('offer',function(offer){
peerConn.answer(offer,function (err,answer){
if(!err){
console.log("Creating the answer...");console.log(answer);
socket.emit('answer',answer);
}
});
});
socket.on('answer', function(answer){
console.log("Got the answer...");
peerConn.handleAnswer(answer);
});
socket.on('ice', function (candidate){
console.log("Processing ice...");
peerConn.processIce(candidate);
console.log(candidate);
});
OK. I got it to work now.
https://github.com/HenrikJoreteg/RTCPeerConnection
The document there is so wrong...
There is no 'streamAdded' event but 'addStream' event and the value inside is not stream but the event. So, I need to do as follow, instead.
peerConn.on('addStream',function(e){
remotevid.src = window.webkitURL.createObjectURL(e.stream);
remotevid.autoplay =true;
});