WEBRTC Object #<RTCPeerConnection> has no method 'processSignalingMessage' - webrtc

I have problems with the WebRTC:
I use this code from one example about Video calls.
if (new_connection) {
console.log('New Peer Connection');
var peer_connection = {};
peer_connection.connection_id = msg.from_connection_id;
peer_connection.pc = createPeerConnection(peer_connection.connection_id,
false);
peer_connections.push(peer_connection);
$('#remote').prepend(remoteVideoHtml.replace('remoteVideoId', 'peer' +
peer_connection.connection_id));
}
//Now process the SDP JSON Blob received
for (var i in peer_connections) {
if (peer_connections[i].connection_id == msg.from_connection_id) {
try {
peer_connections[i].pc.processSignalingMessage(msg.data);
}catch (e) {
console.log("Failed to create processSignalingMessage, exception: " + e.message);
}
I need help because I have one problem here.
peer_connections[i].pc.processSignalingMessage(msg.data);
The problem is:
Object #<RTCPeerConnection> has no method 'processSignalingMessage'
I don't know how those functions works and how they are invoqued:
pc.onconnecting = function (msg) {
console.log('onSessionConnecting');
}
pc.onopen = function (msg) {
console.log('onSessionOpened');
}
pc.onaddstream = function (event) {
console.log('onRemoteStreamAdded add the remote peers video stream.');
var url = webkitURL.createObjectURL(event.stream);
$('#peer' + connection_id).attr({
src: url
});
}
I will appreciate any help.

The initial version of WebRTC in Chrome was based on ROAP and it used to have a processSignallingMessage() method. The current version based on JSEP and it has methods like setRometeDescription() or setLocalDescription() to inject the local SDP and the SDP received from other users.
You can still find this implementation in old versions of Chrome or in Bowser.

Related

Share screen using getScreenId.js in WebRTC for two peers

I am trying to implement share screen function in webrtc video conferencing. From suggestion, I am now following muaz-khan's solution using https://www.webrtc-experiment.com/getScreenId/ . I can easily capture the application images of one peer, and replace the video stream with the capture stream. But it is a video conferencing experiment, so two browsers need to video conference with each other. For example, browser 1, has video streams A (local video), video streams B (remote video); browser 2 has video streams B (local video), video streams A (remote video). So when I am in browser 1 and trying to share the screen, the share screen stream should replace the local video in browser 1, and remote video in browser 2.
But right now, I can only make the share screen replace the local video in browser 1, browser 2 doesn't have any changes, cann't see any changes in its remote video (which is the local video in browser 1). I don't know how to trigger the changes in browser 2 as well. do i need to signal the share screen streams to server? and change the remote stream accordingly?
Here is my code in javascript:
$(function() {
var brokerController, ws, webRTC, localid;
// ws = new XSockets.WebSocket("wss://rtcplaygrouund.azurewebsites.net:443", ["connectionbroker"], {
ws = new XSockets.WebSocket("ws://localhost:4502", ["connectionbroker"], {
ctx: "152300ed-4d84-4e72-bc99-965052dc1e95"
});
var addRemoteVideo = function(peerId,mediaStream) {
var remoteVideo = document.createElement("video");
remoteVideo.setAttribute("autoplay", "true");
remoteVideo.setAttribute("rel",peerId);
attachMediaStream(remoteVideo, mediaStream);
remoteVideo.setAttribute("class", "col-md-3");
remoteVideo.setAttribute("height", $( document ).height() * 0.3);
remoteVideo.setAttribute("id", 'remoteVideo');
$("#videoscreen").append(remoteVideo);
};
var onConnectionLost = function (remotePeer) {
console.log("onconnectionlost");
var peerId = remotePeer.PeerId;
var videoToRemove = $("video[rel='" + peerId + "']");
videoToRemove.remove();
};
var oncConnectionCreated = function() {
console.log("oncconnectioncreated", arguments);
}
var onGetUerMedia = function(stream) {
console.log("Successfully got some userMedia , hopefully a goat will appear..");
webRTC.connectToContext(); // connect to the current context?
};
var onRemoteStream = function (remotePeer) {
addRemoteVideo(remotePeer.PeerId, remotePeer.stream);
console.log("Opps, we got a remote stream. lets see if its a goat..");
};
var onLocalStream = function(mediaStream) {
console.log("Got a localStream", mediaStream.id);
localid = mediaStream.id;
console.log("check this id: meadiastram id ", mediaStream.id);
var video = document.createElement("video");
video.setAttribute("height", "100%");
video.setAttribute("autoplay", "true");
video.setAttribute("id", "localvideo");
video.setAttribute("name", mediaStream.id);
attachMediaStream(video, mediaStream);
$("#videoscreen").append(video);
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
$('#localvideo').attr('src', URL.createObjectURL(stream));
}, function (error) {
console.error(error);
});
});
});
};
var onContextCreated = function(ctx) {
console.log("RTC object created, and a context is created - ", ctx);
webRTC.getUserMedia(webRTC.userMediaConstraints.hd(true), onGetUerMedia, onError);
};
var onOpen = function() {
console.log("Connected to the brokerController - 'connectionBroker'");
webRTC = new XSockets.WebRTC(this);
webRTC.onlocalstream = onLocalStream;
webRTC.oncontextcreated = onContextCreated;
webRTC.onconnectioncreated = oncConnectionCreated;
webRTC.onconnectionlost = onConnectionLost;
webRTC.onremotestream = onRemoteStream;
};
var onConnected = function() {
console.log("connection to the 'broker' server is established");
console.log("Try get the broker controller form server..");
brokerController = ws.controller("connectionbroker");
brokerController.onopen = onOpen;
};
ws.onconnected = onConnected;
});
I am using xsocket as the server, and the codes for click share and change the local stream with the share screen streams are just very simple as this:
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
$('#localvideo').attr('src', URL.createObjectURL(stream));
}, function (error) {
console.error(error);
});
});
Any help or suggestion would be grateful.
Thanks for pointing out the other post: How to addTrack in MediaStream in WebRTC, but I don't think they are the same. And also I am not sure how to renegotiate the remote connection in this case.
Xsocket.webrtc.js file for webrtc connection:
https://github.com/XSockets/XSockets.WebRTC/blob/master/src/js/XSockets.WebRTC.latest.js
How I could I renegotiate the remote connection in this case?
I figured out a work around solution by myself for this question, do not replace the local stream with the sharescreen stream, instead remove the old local stream from local div, then add the new sharescreen stream to local div. In the meantime, send the old local stream id by datachanel to the other peer, and remove that old remote video as well.
The most important thing is reflesh the streams (renegotiation), then sharescreen stream would display in remote peer.
Code:
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
webRTC.removeStream(webRTC.getLocalStreams()[0]);
var id = $('#localvideo').attr('name');
$('#localvideo').remove();
brokerController.invoke('updateremotevideo', id);
webRTC.addLocalStream(stream);
webRTC.getRemotePeers().forEach(function (p) {
webRTC.refreshStreams(p);
});
}, function (error) {
console.error(error);
});
});
});
after get the command to remove that old video stream from the server:
brokerController.on('updateremotevideo', function(streamid){
$(document.getElementById(streamid)).remove();
});
This solution works for me. Although if only like to replace the local video stream with share screen stream, we need to re create the offer with sdp, and send sdp to remote peer. It is more complicated.
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
navigator.getUserMedia({audio: true}, function (audioStream) {
stream.addTrack(audioStream.getAudioTracks()[0]);
var mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/mp4'
mediaRecorder.stream = stream;
self.setState({recorder: mediaRecorder, startRecord: true, shareVideo: true, pauseRecord: false, resumeRecord: false, stopRecord: false, downloadRecord: false, updateRecord: false});
document.querySelector('video').src = URL.createObjectURL(stream);
var video = document.getElementById('screen-video')
if (video) {
video.src = URL.createObjectURL(stream);
video.width = 360;
video.height = 300;
}
}, function (error) {
alert(error);
});
}, function (error) {
alert(error);
});
});

rtcmulticonnection: webrtc recordrtc not returning a blob

I am trying to simply record the webrtc video using what I though was a standard example. The library is here: https://github.com/muaz-khan/RTCMultiConnection
rtcMultiConnection.onstream = function(e) {
var mediaElement = getMediaElement(e.mediaElement, {
onRecordingStarted: function(type) {
// www.RTCMultiConnection.org/docs/startRecording/
rtcMultiConnection.streams[e.streamid].startRecording();
},
onRecordingStopped: function(type) {
// www.RTCMultiConnection.org/docs/stopRecording/
rtcMultiConnection.streams[e.streamid].stopRecording(function(blob){
console.log("test");
console.log(blob);
});
}});}
I can follow the steps through the function calls, the issue is that the callback is never run from recordrtc.js....
It goes to line 100 of https://github.com/muaz-khan/RecordRTC/blob/master/RecordRTC.js
There it runs:
mediaRecorder.stop(_callback);
Which never calls the callback....
Even calling functions directly doesn't work:
console.log(rtcMultiConnection.streams[e.streamid].audioRecorder.getBlob());
console.log(rtcMultiConnection.streams[e.streamid].videoRecorder.save("a.png"));
I am wondering if two different versions of recordrtc and rtcmulticonneciton are interacting.... Any ideas? Maybe an older recordrtc, but I can't find an older version
Please use blob.video:
var stream = connection.streams['stream-id'];
stream.stopRecording(function(blob) {
var h2;
if (blob.audio) {
h2 = document.createElement('h2');
h2.innerHTML = 'Open recorded ' + blob.audio.type + '';
div.appendChild(h2);
}
if (blob.video) {
h2 = document.createElement('h2');
h2.innerHTML = 'Open recorded ' + blob.video.type + '';
div.appendChild(h2);
}
});
Updated at March 29, 2016
Here is the actual documentation:
http://www.rtcmulticonnection.org/docs/startRecording/
Please make sure that:
You are using v2.2.2
You called startRecording first
For v3, you can directly use the RecordRTC:
connection.onstream = function(event) {
recordStream(event.stream);
};
function recordStream(stream) {
if (!!window.recorder) return;
window.recorder = RecordRTC(stream, {
type: 'video'
});
recorder.startRecording();
}
btnStopRecording.onclick = function() {
if (!window.recorder) return;
recorder.stopRecording(function() {
var blob = recorder.blob;
// or dataURL
recorder.getDataURL(func_callback);
});
};
btnStartRecording.onclick = function() {
var stream = connection.attachStreams[0];
recordStream(straem);
// or
var stream = connection.streamEvents['stream-id'].stream;
recordStream(straem);
};
Above snippet can be used within v2.2.2 as well.

limitation in the callback function in nodejs redis?

I am not sure if the issue I am having is a limitation in redis itself or in the nodejs 'redis' module implementation.
var redis = require('redis');
var client = redis.createClient(6379,'192.168.200.5');
client.on('error',function (error) {
console.log("** error in connection **");
process.exit(1);
});
client.on('connect',function () {
console.log("** connected **");
client.on('message',function (channel,message) {
if (channel == 'taskqueue') {
console.log(channel + ' --> ' + message);
var params = message.split(' ');
var inputf = params[0];
var outputf = params[1];
var dim = inputf.split('_').slice(-1)[0];
client.rpush('records',message,function (e,reply) {
});
}
});
client.subscribe('taskqueue');
});
From the code snippet above, I tried to do an RPUSH inside an "ON MESSAGE" subscription event. It does not work, and I get a client 'ON ERROR' event, thus, it prints error in connection. What is the correct way to do this?
After further searching, I came across this page https://github.com/phpredis/phpredis/issues/365 which seems to explain the scenario.

webrtc: createAnswer works on chrome but fires an error with firefox

function createPeerConnection() {
try {
pc = new RTCPeerConnection(null, pc_constraints);
pc.onicecandidate = handleIceCandidate;
pc.onaddstream = handleRemoteStreamAdded;
pc.onremovestream = handleRemoteStreamRemoved;
console.log('Created RTCPeerConnnection');
} catch (e) {
console.log('Failed to create PeerConnection, exception: ' + e.message);
alert('Cannot create RTCPeerConnection object.');
return;
}
try {
// Reliable Data Channels not yet supported in Chrome
sendChannel = pc.createDataChannel("sendDataChannel",
{reliable: false});
sendChannel.onmessage = handleMessage;
trace('Created send data channel');
} catch (e) {
alert('Failed to create data channel. ' +
'You need Chrome M25 or later with RtpDataChannel enabled');
trace('createDataChannel() failed with exception: ' + e.message);
}
sendChannel.onopen = handleSendChannelStateChange;
sendChannel.onclose = handleSendChannelStateChange;
pc.ondatachannel = gotReceiveChannel;
}
function doAnswer() {
console.log('Sending answer to peer.');
pc.createAnswer(setLocalAndSendMessage, null, sdpConstraints);
}
I got error:
TypeError: Argument 2 of mozRTCPeerConnection.createAnswer is not an object.
The following code should work in Firefox:
function doAnswer() {
console.log('Sending answer to peer.');
pc.createAnswer(setLocalAndSendMessage, handleCreateAnswerError, sdpConstraints);
}
function setLocalAndSendMessage(sessionDescription) {
sessionDescription.sdp = preferOpus(sessionDescription.sdp);
pc.setLocalDescription(sessionDescription);
console.log('setLocalAndSendMessage sending message' , sessionDescription);
sendMessage(sessionDescription);
}
function handleCreateAnswerError(error) {
console.log('createAnswer() error: ', e);
}
The reason why this fails in Firefox can be found in the documentation for createAnswer. The issue is that Firefox won't let you pass null for the error handler. All this requires is that you write your own and then pass it into createAnswer. Don't pass null, you should actually be passing a function (object) to do something with the error.
Sorry for the late response, better late than never!
how about this? Its what the guys # xsockets use...
pc.createAnswer(setLocalAndSendMessage,function (ex) { self.onerror(ex); }, sdpConstraints);

vline add remote stream for callee fail

I am trying to use your api in a custom app with imported users.
Everything works fine (auth_token, login, call initiation) , but when the callee should get a response and add the remotestream nothing happens. no errors get shown in the console.
I would appreciate if someone takes a look at the code and tells me what i m missing.
I tried the vline demo at https://freeofcinema.vline.com and it worked with the same browsers and conditions between the two computers. In my app it is a http , but i tried it also with https, and the same problem came up. This is some simplified code i used to test the api.
var Streams = [];
var Vsession = null;
var Vline = (function(){
var Client;
var authToken;
var service_id = 'freeofcinema';
var profile = null;
var Person;
var Calls = [];
var onMessage = function(event){
//alert('message');
var msg = event.message, sender = msg.getSender();
console.log(sender.getDisplayName() +'sais: '+ msg.getBody());
console.log(event);
}
var onMediaSession = function(event){
console.log(event);
var mediaSession = event.target;
InitSession(mediaSession);
}
function Call(mediaSession) {
mediaSession.
on('change', alert_info);
}
function alert_info(b){
console.log(b);
}
function InitSession(mediaSession){
mediaSession.on('mediaSession:addRemoteStream', function(event) {
alert('addRemoteStream');
});
mediaSession.on('mediaSession:addLocalStream', function(event) {
alert('addLocalStream');
});
mediaSession.on('mediaSession:removeLocalStream mediaSession:removeRemoteStream', function(event) {
console.log('removedStream');
});
Calls.push(new Call(mediaSession));
}
return {
init : function(){
if(profile){
return;
}
profile = {
"displayName" : //some getusrname function...
};
$.post('vtoken.php',{//get auth token
id : Comm.Voip_user().id
},function(data){
authToken = data;
Client = vline.Client.create({
"serviceId": service_id,
"ui" : true
});
Client.on('recv:im', onMessage , this);
Client.on('add:mediaSession', onMediaSession, this);
Client.on('login', function(e) {
Vsession = e.target;
//alert('loged in');
});
Client.login(service_id, profile, authToken);
});
},
getPerson : function(id){//id of user to call
if(Vsession){
Vsession.getPerson(id).
done(function(person){
Person = person;
Vsession.startMedia(id);
});
}
}
}
}());
Thank you for your response.
I tried with one user from the app, and another from the https://freeofcinema.vline.com, and the same problem occured. Also the call (in pending state) gets terminated after a short while..
When passing ui:true when creating the client, you do not have to handle the media sessions yourself. Just comment the line Client.on('add:mediaSession', onMediaSession, this); and it should just work.