<!DOCTYPE html>
<html>
<head>
<title>WebRTC Reference App</title>
<meta http-equiv="X-UA-Compatible" content="chrome=1"/>
<link rel="canonical" href="https://apprtc.appspot.com/?r=96443121"/>
<link rel="stylesheet" href="css/main.css" />
<script type="text/javascript" src="_ah/channel/jsapi.js"></script>
<!--<script type="text/javascript" src="/_ah/channel/jsapi"></script>-->
<script src="js/main.js"></script>
<!-- Load the polyfill to switch-hit between Chrome and Firefox -->
<script src="js/adapter.js"></script>
</head>
<body >
<script type="text/javascript">
var errorMessages = [];
var channelToken = 'AHRlWroGHj6YqWMdr7JeFULjqux9vQzrpCOnFM7zD91GLHc4PVW1WDZJI08ptgM_XMj5M22InerI5Lw5QN1rS3rHu2Dlnx3g95ILmzupe2OFB4OLCbZGbAQ';
var me = '49847744';
var roomKey = '96443121';
var roomLink = 'https://apprtc.appspot.com/?r=96443121';
var initiator = 0;
var pcConfig = {"iceServers": [{"url": "stun:stun.l.google.com:19302"}]};
var pcConstraints = {"optional": [{"DtlsSrtpKeyAgreement": true}]};
var offerConstraints = {"optional": [], "mandatory": {}};
var mediaConstraints = {"audio": true, "video": true};
var turnUrl = 'https://computeengineondemand.appspot.com/turn?username=49847744&key=4080218913';
var stereo = false;
var audio_send_codec = '';
var audio_receive_codec = 'opus/48000';
setTimeout(initialize, 1);
</script>`enter code here`
<div id="maindiv">
<div id="container" ondblclick="enterFullScreen()">
<div id="card">
<div id="local">
<video id="localVideo" autoplay muted="true"/>
</div>
<div id="remote">
<video id="remoteVideo" autoplay>
</video>
<div id="mini">
<video id="miniVideo" autoplay muted="true"/>
</div>
</div>
</div>
</div>
<div class="buttons">
<button id="startButton">Start</button>
<button id="callButton">Call</button>
<button id="hangupButton">Hang Up</button>
</div>
</div>
<script>
var localStream, localPeerConnection, remotePeerConnection;
var localVideo = document.getElementById("localVideo");
var remoteVideo = document.getElementById("remoteVideo");
var startButton = document.getElementById("startButton");
var callButton = document.getElementById("callButton");
var hangupButton = document.getElementById("hangupButton");
startButton.disabled = false;
callButton.disabled = true;
hangupButton.disabled = true;
startButton.onclick = start;
callButton.onclick = call;
hangupButton.onclick = hangup;
function trace(text) {
console.log((performance.now() / 1000).toFixed(3) + ": " + text);
}
function gotStream(stream){
trace("Received local stream");
localVideo.src = URL.createObjectURL(stream);
localStream = stream;
callButton.disabled = false;
}
function start() {
trace("Requesting local stream");
startButton.disabled = true;
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
navigator.getUserMedia({audio:true, video:true}, gotStream,
function(error) {
trace("navigator.getUserMedia error: ", error);
});
}
function call() {
callButton.disabled = true;
hangupButton.disabled = false;
trace("Starting call");
if (localStream.getVideoTracks().length > 0) {
trace('Using video device: ' + localStream.getVideoTracks()[0].label);
}
if (localStream.getAudioTracks().length > 0) {
trace('Using audio device: ' + localStream.getAudioTracks()[0].label);
}
var servers = null;
localPeerConnection = new webkitRTCPeerConnection(servers);
trace("Created local peer connection object localPeerConnection");
localPeerConnection.onicecandidate = gotLocalIceCandidate;
remotePeerConnection = new webkitRTCPeerConnection(servers);
trace("Created remote peer connection object remotePeerConnection");
remotePeerConnection.onicecandidate = gotRemoteIceCandidate;
remotePeerConnection.onaddstream = gotRemoteStream;
localPeerConnection.addStream(localStream);
trace("Added localStream to localPeerConnection");
localPeerConnection.createOffer(gotLocalDescription);
}
function gotLocalDescription(description){
localPeerConnection.setLocalDescription(description);
trace("Offer from localPeerConnection: \n" + description.sdp);
remotePeerConnection.setRemoteDescription(description);
remotePeerConnection.createAnswer(gotRemoteDescription);
}
function gotRemoteDescription(description){
remotePeerConnection.setLocalDescription(description);
trace("Answer from remotePeerConnection: \n" + description.sdp);
localPeerConnection.setRemoteDescription(description);
}
function hangup() {
trace("Ending call");
localPeerConnection.close();
remotePeerConnection.close();
localPeerConnection = null;
remotePeerConnection = null;
hangupButton.disabled = true;
callButton.disabled = false;
}
function gotRemoteStream(event){
remoteVideo.src = URL.createObjectURL(event.stream);
trace("Received remote stream");
}
function gotLocalIceCandidate(event){
if (event.candidate) {
remotePeerConnection.addIceCandidate(new RTCIceCandidate(event.candidate));
trace("Local ICE candidate: \n" + event.candidate.candidate);
}
}
function gotRemoteIceCandidate(event){
if (event.candidate) {
localPeerConnection.addIceCandidate(new RTCIceCandidate(event.candidate));
trace("Remote ICE candidate: \n " + event.candidate.candidate);
}
}
</script>
</body>
<footer id="status">`enter code here`
</footer>
<div id="infoDiv"></div>
</html>
here in the code above how to connect two different pc's and do a video call. I had taken it from webrtc.org.
I am getting two videos on the same PC. I want to connect two different PC's and do a video call. the example for this can be seen on vline.com where the link is autogenerated and on just needs to connect to the given link and they can do a video call.
please help
You will need a signaling server to relay all messages between the two peers. You could for example do this in Javascript using node.js:
var WebSocketServer = require('websocket').server;
var http = require('http');
var clients = [];
var server = http.createServer(function(request, response) {
// process HTTP request. Since we're writing just WebSockets server
// we don't have to implement anything.
});
server.listen(80, function() {
console.log((new Date()) + " Server is listening on port 80");
});
// create the server
wsServer = new WebSocketServer({
httpServer: server
});
function sendCallback(err) {
if (err) console.error("send() error: " + err);
}
// This callback function is called every time someone
// tries to connect to the WebSocket server
wsServer.on('request', function(request) {
console.log((new Date()) + ' Connection from origin ' + request.origin + '.');
var connection = request.accept(null, request.origin);
console.log(' Connection ' + connection.remoteAddress);
clients.push(connection);
// This is the most important callback for us, we'll handle
// all messages from users here.
connection.on('message', function(message) {
if (message.type === 'utf8') {
// process WebSocket message
console.log((new Date()) + ' Received Message ' + message.utf8Data);
// broadcast message to all connected clients
clients.forEach(function (outputConnection) {
if (outputConnection != connection) {
outputConnection.send(message.utf8Data, sendCallback);
}
});
}
});
connection.on('close', function(connection) {
// close user connection
console.log((new Date()) + " Peer disconnected.");
});
});
The peers need to send all signaling (offers, answers and candidates) via this signaling server (socket.send()). peer example below:
function connect() {
if (!started && localStream && channelReady) {
createPeerConnection();
started = true;
peerConn.createOffer(setLocalAndSendMessage, errorCallback, mediaConstraints);
} else {
alert("Local stream may not be running yet, or problem with signaling");
}
}
// send SDP via websocket connection
function setLocalAndSendMessage(sessionDescription) {
peerConn.setLocalDescription(sessionDescription);
socket.send(JSON.stringify(sessionDescription));
}
// Signaling messages end up here
function onMessage(evt) {
var descr = JSON.parse(evt.data);
if (descr.type === 'offer') {
if (!started) {
createPeerConnection();
started = true;
}
peerConn.setRemoteDescription(new RTCSessionDescription(descr));
peerConn.createAnswer(setLocalAndSendMessage, errorCallback, mediaConstraints);
} else if (descr.type === 'answer'){
peerConn.setRemoteDescription(new RTCSessionDescription(descr));
} else if (descr.type === 'candidate'){
var candidate = new RTCIceCandidate({sdpMLineIndex: descr.sdpMLineIndex, sdpMid: descr.sdpMid, candidate: descr.candidate});
peerConn.addIceCandidate(candidate);
}
}
//Creates a new RTCPeerConnection
function createPeerConnection() {
peerConn = new webkitRTCPeerConnection({"iceServers": []});
// send any ice candidates to the other peer
peerConn.onicecandidate = function(evt) {
if (event.candidate) {
socket.send(JSON.stringify({
type: "candidate",
sdpMLineIndex: evt.candidate.sdpMLineIndex,
sdpMid: evt.candidate.sdpMid,
candidate: evt.candidate.candidate}));
} else {
console.log("End of candidates.");
}
};
peerConn.addStream(localStream);
peerConn.addEventListener("addstream", onRemoteStreamAdded, false);
}
function onRemoteStreamAdded(event) {
//peerConn.removeEventListener("addstream", onRemoteStreamAdded, false);
remotevidArr.src = window.URL.createObjectURL(event.stream);
}
Related
I'm trying to make a video call using WebRTC on the local same machine. I can see the remote video on Firefox. But I can't see it on Chrome. When I console logged my code I found that event.candidate is null for both localPeerConnection.onicecandidate and remotePeerConnection.onicecandidate. So, I tested on Chrome again and found out that the connectionStatus is "new" and the iceGatheringState is "complete". On trying out Trickle ICE (https://webrtc.github.io/samples/src/content/peerconnection/trickle-ice/), I found that I wasn't getting any ice options on Chrome, but was for Firefox.
Thank you :)
Here's my code
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>WebRTC</title>
</head>
<body>
<video id="local" playsinline muted autoplay controls></video>
<video id="remote" playsinline autoplay controls></video>
<button id="startButton">Start</button>
<button id="callButton">Call</button>
<button id="hangupButton">Hang UP</button>
<script type="text/javascript">
// Stream info
var localStream, localPeerConnection, remotePeerConnection;
// Video tags
var localVideo = document.getElementById("local");
var remoteVideo = document.getElementById("remote");
// Buttons
var startButton = document.getElementById("startButton");
var callButton = document.getElementById("callButton");
var hangupButton = document.getElementById("hangupButton");
startButton.disabled = false;
callButton.disabled = true;
hangupButton.disabled = true;
var servers = null;
startButton.onclick = start;
callButton.onclick = call;
hangupButton.onclick = hangup;
function log(text){
console.log("At time"+(performance.now()/1000).toFixed(3)+"-->\n"+ text);
}
function successCallback(stream){
log("Received local stream");
if(navigator.mediaDevices.getUserMedia){
localVideo.srcObject = stream;
}
else{
console.error("GetUserMediaError",error);
}
localStream = stream;
callButton.disabled = false;
}
var constraints = {audio: true, video: true};
function start(){
log("Requesting local stream");
startButton.disabled = true;
navigator.mediaDevices.getUserMedia(constraints).then(successCallback);
}
function call(){
callButton.disabled = true;
hangupButton.disabled = false;
log("Starting Call");
if(navigator.mediaDevices.getUserMedia){
if(localStream.getVideoTracks().length > 0){
log("Using video device:" + localStream.getVideoTracks()[0].label);
}
if(localStream.getAudioTracks().length > 0){
log("Using audio device:" + localStream.getAudioTracks()[0].label);
}
}
localPeerConnection = new RTCPeerConnection(servers);
log("Created local peer connection object localPeerConnection");
localPeerConnection.onicecandidate = gotLocalIceCandidate;
localPeerConnection.onconnectionstatechange = function(event){
console.log("QWERTY");
}
remotePeerConnection = new RTCPeerConnection(servers);
log("Created remote peer connection object remotePeerConnection");
remotePeerConnection.onicecandidate = gotRemoteIceCandidate;
if (remotePeerConnection.addTrack !== undefined) {
remotePeerConnection.ontrack = ev => {
ev.streams.forEach(stream => doAddStream(stream));
}
} else {
remotePeerConnection.onaddstream = ev => {
doAddStream(ev.stream);
}
}
localStream.getTracks().forEach((track)=>{
localPeerConnection.addTrack(track, localStream);
});
// localPeerConnection.addStream(localStream);
log("Added localStream to localPeerConnection");
localPeerConnection.createOffer(gotLocalDescription, onSignalingError);
}
function gotLocalDescription(description){
localPeerConnection.setLocalDescription(description);
log("Offer from localPeerConnection: "+ description.sdp);
remotePeerConnection.setRemoteDescription(description);
remotePeerConnection.createAnswer(gotRemoteDescription, onSignalingError);
}
function gotRemoteDescription(description){
remotePeerConnection.setLocalDescription(description);
log("Answer from remotePeerConnection:"+description.sdp);
localPeerConnection.setRemoteDescription(description);
}
function hangup(){
log("Ending call");
localPeerConnection.close();
remotePeerConnection.close();
localPeerConnection = null;
remotePeerConnection = null;
hangupButton.disabled = true;
callButton.disabled = false;
}
function onSignalingError(error){
log("Failed to create signaling message: "+ error.name);
}
function gotLocalIceCandidate(event){
if(event.candidate){
remotePeerConnection.addIceCandidate(new RTCIceCandidate(event.candidate));
log("Local ICE candidate: \n" + event.candidate.candidate);
}
}
function gotRemoteIceCandidate(event){
if(event.candidate){
localPeerConnection.addIceCandidate(new RTCIceCandidate(event.candidate));
log("Remote ICE candidate: \n" +event.candidate.candidate);
}
}
function doAddStream(stream){
remoteVideo.srcObject = stream;
log("Received remote stream Do add Stream");
}
</script>
</body>
</html>
[1]: https://i.stack.imgur.com/gjNi4.png
I've created a demo to show this, it seems that when I show a visualization the recorded audio gets really crunchy. Is there any way I can fix this? This works in chrome and Firefox, but safari is the only offender.
<style>
html, body {
margin: 0!important;
padding: 0!important;
overflow: hidden!important;
width: 100%;
}
</style>
<title>Audio Recording | RecordRTC</title>
<h1>Simple Audio Recording using RecordRTC</h1>
<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
<br>
<button id="btn-start-recording">Start Recording</button>
<button id="btn-stop-recording" disabled>Stop Recording</button>
<label><input id="visualizer" type="checkbox" checked/>Show Visualizer</label>
<hr>
<canvas id="audio-canvas"></canvas>
<div><audio controls autoplay></audio></div>
<script src="https://cdn.webrtc-experiment.com/RecordRTC.js"></script>
<script>
var audio = document.querySelector('audio');
function captureMicrophone(callback) {
if(microphone) {
callback(microphone);
return;
}
if(typeof navigator.mediaDevices === 'undefined' || !navigator.mediaDevices.getUserMedia) {
alert('This browser does not supports WebRTC getUserMedia API.');
if(!!navigator.getUserMedia) {
alert('This browser seems supporting deprecated getUserMedia API.');
}
}
navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false
}
}).then(function(mic) {
callback(mic);
}).catch(function(error) {
alert('Unable to capture your microphone. Please check console logs.');
console.error(error);
});
}
function replaceAudio(src) {
var newAudio = document.createElement('audio');
newAudio.controls = true;
if(src) {
newAudio.src = src;
}
var parentNode = audio.parentNode;
parentNode.innerHTML = '';
parentNode.appendChild(newAudio);
audio = newAudio;
}
function stopRecordingCallback() {
replaceAudio(URL.createObjectURL(recorder.getBlob()));
btnStartRecording.disabled = false;
setTimeout(function() {
if(!audio.paused) return;
setTimeout(function() {
if(!audio.paused) return;
audio.play();
}, 1000);
audio.play();
}, 300);
audio.play();
if(microphone) {
microphone.stop();
microphone = null;
}
}
var recorder; // globally accessible
var microphone;
var btnStartRecording = document.getElementById('btn-start-recording');
var btnStopRecording = document.getElementById('btn-stop-recording');
var canvas = document.getElementById('audio-canvas');
var visualizerCheckbox = document.getElementById('visualizer');
btnStartRecording.onclick = function() {
this.disabled = true;
this.style.border = '';
this.style.fontSize = '';
if (!microphone) {
captureMicrophone(function(mic) {
microphone = mic;
replaceAudio();
audio.muted = true;
audio.play();
btnStartRecording.disabled = false;
btnStartRecording.style.border = '1px solid red';
btnStartRecording.style.fontSize = '150%';
alert('Please click startRecording button again. First time we tried to access your microphone. Now we will record it.');
});
return;
}
replaceAudio();
audio.muted = true;
audio.play();
if(visualizerCheckbox.checked){
initScope(canvas);
}
var options = {
type: 'audio',
numberOfAudioChannels: 2,
checkForInactiveTracks: true,
bufferSize: 16384,
sampleRate: 48000
};
if(recorder) {
recorder.destroy();
recorder = null;
}
recorder = RecordRTC(microphone, options);
recorder.startRecording();
btnStopRecording.disabled = false;
};
btnStopRecording.onclick = function() {
this.disabled = true;
recorder.stopRecording(stopRecordingCallback);
};
function initScope(audioScopeCanvas) {
if (audioScopeCanvas) {
var analyser;
var mic;
var javascriptNode;
var canvasContext;
audioContext = new AudioContext(); // NEW!!
analyser = audioContext.createAnalyser();
mic = audioContext.createMediaStreamSource(microphone);
javascriptNode = audioContext.createScriptProcessor(2048, 1, 1);
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = 1024;
mic.connect(analyser);
analyser.connect(javascriptNode);
javascriptNode.connect(audioContext.destination);
//canvasContext = $("#canvas")[0].getContext("2d");
canvasContext = audioScopeCanvas.getContext("2d");
javascriptNode.onaudioprocess = function () {
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
var values = 0;
var length = array.length;
for (var i = 0; i < length; i++) {
values += array[i];
}
var average = values / length;
canvasContext.clearRect(0, 0, 60, 130);
canvasContext.fillStyle = '#00ff00';
canvasContext.fillRect(0, 130 - average, 25, 130);
};
}
}
</script>
you can play with it here:
Http://andrew.colchagoff.com/safari-record.html
I'm using record rtc.
I found this link on the internet which demonstrates how WebRTC works https://shanetully.com/2014/09/a-dead-simple-webrtc-example/
Its source code is here https://github.com/shanet/WebRTC-Example
Now, I am trying to follow the example and here what I did:
1- I created a folder name voicechat
2- I created 2 folders inside voicechat. That is voicechat\client & voicechat\server
3- I put the index.html & webrtc.js into voicechat\client
4- I put server.js into voicechat\server
5- I put the folder voicechat into my Tomcat webapps folder. So The path will be like this C:\apache-tomcat-7.0.53\webapps\ROOT\voicechat
6- I started my Tomcat.
7- I opened http://xxx.xxx.xxx.xxx/voicechat/client/index.html in my PC & the webpage showed webcam (webcam 1) of my PC. No problem.
8- I opened http://xxx.xxx.xxx.xxx/voicechat/client/index.html in another PC & the webpage also showed webcam (webcam 2) of other PC. But I could not see webcam 1 of my PC. And when I talked in my PC, the person sitting in other PC could not hear what I am talking and via versa.
So, why it didn't work What did I do wrong?
Here is the code of 3 files:
index.html
<html>
<head>
<script src="webrtc.js"></script>
</head>
<body>
<video id="localVideo" autoplay muted style="width:40%;"></video>
<video id="remoteVideo" autoplay style="width:40%;"></video>
<br />
<input type="button" id="start" onclick="start(true)" value="Start Video"></input>
<script type="text/javascript">
pageReady();
</script>
</body>
</html>
webrtc.js
var localVideo;
var remoteVideo;
var peerConnection;
var peerConnectionConfig = {'iceServers': [{'url': 'stun:stun.services.mozilla.com'}, {'url': 'stun:stun.l.google.com:19302'}]};
navigator.getUserMedia = navigator.getUserMedia || navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
window.RTCPeerConnection = window.RTCPeerConnection || window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
window.RTCIceCandidate = window.RTCIceCandidate || window.mozRTCIceCandidate || window.webkitRTCIceCandidate;
window.RTCSessionDescription = window.RTCSessionDescription || window.mozRTCSessionDescription || window.webkitRTCSessionDescription;
function pageReady() {
localVideo = document.getElementById('localVideo');
remoteVideo = document.getElementById('remoteVideo');
serverConnection = new WebSocket('ws://127.0.0.1:3434');
serverConnection.onmessage = gotMessageFromServer;
var constraints = {
video: true,
audio: true,
};
if(navigator.getUserMedia) {
navigator.getUserMedia(constraints, getUserMediaSuccess, errorHandler);
} else {
alert('Your browser does not support getUserMedia API');
}
}
function getUserMediaSuccess(stream) {
localStream = stream;
localVideo.src = window.URL.createObjectURL(stream);
}
function start(isCaller) {
peerConnection = new RTCPeerConnection(peerConnectionConfig);
peerConnection.onicecandidate = gotIceCandidate;
peerConnection.onaddstream = gotRemoteStream;
peerConnection.addStream(localStream);
if(isCaller) {
peerConnection.createOffer(gotDescription, errorHandler);
}
}
function gotMessageFromServer(message) {
if(!peerConnection) start(false);
var signal = JSON.parse(message.data);
if(signal.sdp) {
peerConnection.setRemoteDescription(new RTCSessionDescription(signal.sdp), function() {
peerConnection.createAnswer(gotDescription, errorHandler);
}, errorHandler);
} else if(signal.ice) {
peerConnection.addIceCandidate(new RTCIceCandidate(signal.ice));
}
}
function gotIceCandidate(event) {
if(event.candidate != null) {
serverConnection.send(JSON.stringify({'ice': event.candidate}));
}
}
function gotDescription(description) {
console.log('got description');
peerConnection.setLocalDescription(description, function () {
serverConnection.send(JSON.stringify({'sdp': description}));
}, function() {console.log('set description error')});
}
function gotRemoteStream(event) {
console.log('got remote stream');
remoteVideo.src = window.URL.createObjectURL(event.stream);
}
function errorHandler(error) {
console.log(error);
}
server.js
var WebSocketServer = require('ws').Server;
var wss = new WebSocketServer({port: 3434});
wss.broadcast = function(data) {
for(var i in this.clients) {
this.clients[i].send(data);
}
};
wss.on('connection', function(ws) {
ws.on('message', function(message) {
console.log('received: %s', message);
wss.broadcast(message);
});
});
server.js is intended to be run as a node server for websocket signaling. Run it with node server.js. You shouldn't need Tomcat at all.
From the project readme:
The signaling server uses Node.js and ws and can be started as such:
$ npm install ws
$ node server/server.js
With the client running, open client/index.html in a recent version of either Firefox or Chrome.
You can open index.html with just a file URL.
I changed HTTPS_PORT = 8443 to HTTP_PORT = 8443. Do same with all the https; change it to http. Next, have only const serverConfig = { }; as the serverConfig and delete serverConfig in const httpServer = http.createServer(handleRequest); After these changes, u can now run your server with npm start.
This is the ultimate simple code can do the job. No need to install Node.js. Why need to install Node.js?
AND put that code into index.html file and start your webhost, then you done!
<!DOCTYPE html>
<html>
<head>
<script src="//simplewebrtc.com/latest.js"></script>
</head>
<body>
<div id="localVideo" muted></div>
<div id="remoteVideo"></div>
<script>
var webrtc = new SimpleWebRTC({
localVideoEl: 'localVideo',
remoteVideosEl: 'remoteVideo',
autoRequestMedia: true
});
webrtc.on('readyToCall', function () {
webrtc.joinRoom('My room name');
});
</script>
</body>
</html>
I've updated my code with the complete signaling exchange. The problem now, is that upon completing the exchange, "socket.on('receivedAnswer')" throws an error. I'm testing all this on my local machine in two browser tabs so I assume I don't need ICE just yet for this to work...
<html>
<head>
<link rel = 'stylesheet' type = 'text/css' href= 'css.css'>
<script src="node_modules/socket.io/node_modules/socket.io-client/socket.io.js"></script>
</head>
<body>
<div id='video_box'>
<video id= 'video' autoplay="true">
</video>
</div>
<div id='video_box2'>
<video id='video2' autoplay="true">
</video>
<div>
<script>
var local_stream;
var baseURL = getBaseURL();
var socketIOPort = 8999;
var socketIOLocation = baseURL + socketIOPort;
var socket = io(socketIOLocation);
var localvid = document.getElementById('video');
var mediaOptions = { audio: false, video: true };
var pc = new mozRTCPeerConnection( {"iceServers": [{"url": "stun:stun.1.google.com:19302" }] });
var pc2 = new mozRTCPeerConnection();
var PeerConnection = window.RTCPeerConnection || window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
var SessionDescription = window.RTCSessionDescription || window.mozRTCSessionDescription || window.webkitRTCSessionDescription;
var offerConstraints = {OfferToReceiveAudio: true, OfferToReceiveVideo: true }
socket.on('receivedAnswer', function(answerSDP){
pc.setRemoteDescription(new mozRTCSessionDescription(answerSDP), function(){
alert('received the answer');
}, error2);
});
socket.on('getPeer1', function(SDP) {
alert('got peer 1 SDP');
pc.setRemoteDescription(new mozRTCSessionDescription(SDP), function() {
pc.createAnswer(function(answerSDP){
pc.setLocalDescription(answerSDP, function() {
socket.emit('answerSDP', answerSDP);
alert('sending answer');
}, error2);
}, error2);
}, error2);
});
function start(){
checkMedia();
navigator.getUserMedia(mediaOptions, getMediaSuccess, error2);
}
function answer(offeredSDP){
offeredSDP = new SessionDescription(offeredSDP);
}
function peer(){
pc.addStream(local_stream);
pc.createOffer(function(SDP){
socket.emit('sendSDPtoServer', SDP);
}, error2, offerConstraints);
}
function getMediaSuccess(stream){
localvid.src = window.URL.createObjectURL(stream);
local_stream = stream;
peer(); ///////////
}
function error2(){
alert('error');
}
function error3(){
alert('error here');
}
function checkMedia() {
if (!navigator.getUserMedia) {
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
}
if (!navigator.getUserMedia){
alert('getUserMedia not supported in this browser.');
}
}
function getBaseURL(){
baseURL = location.protocol + "//" + location.hostname + ":" + location.port;
return baseURL;
}
start();
//alert('script');
</script>
</body>
</html>
Server
var static = require('node-static');
var express = require('express');
var app = express();
var port = 8999;
var http = require('http');
var file= new(static.Server)();
var io = require('socket.io').listen(app.listen(port));
var connectedClients = {};
var peer1 = 0;
var peer2 = 0;
var peer1sdp = 0;
var peer2sdp = 0;
app.set('views', __dirname)
.engine('html', require('ejs').renderFile)
.use(express.static(__dirname + '/public'))
.get('/', function(req, res) {
//res.render('indexcpy.html');
});
io.on('connection', function(socket) {
console.log('a user connected');
socket.on('disconnect', function(){
console.log('user disconnected');
});
socket.on('answerSDP', function(answerSDP){
console.log('ANSWER SENT');
io.to(peer1).emit('receivedAnswer', answerSDP);
});
socket.on('sendSDPtoServer', function(SDP) {
if(peer1 == 0){
console.log('peer 1 ' + socket.id + ' has sent its SDP to server');
peer1 = socket.id;
peer1sdp = SDP;
} else {
console.log('peer 2 ' + socket.id + ' has been sent to the server');
socket.emit('getPeer1', peer1sdp);
}
});
});
the mistake here is your assumption that, pc.addStream(stream) would fire pc.onstream(event) on the same peer, but it would actually be fired on the remote peer.
Reference
other that that, your webrtc code is incomplete, while your offer SDP reaches the server, it is not sent to the remote peer and also it's answer has to be forwarded to your peer and so on...
You have to make sure that before creating Answer your local stream is added to the peerconnection. It will be more clear if you show where you are generating the answer
i have following code ... from site http://forestmist.org/blog/web-audio-api-loops/
it works good... but i need record functionality which will help to record previously clicked button and store there audio too... any help
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Web Audio API Loops Demo</title>
</head>
<body>
<form>
<button id="button-loop-1" type="button" value="1">Loop 1</button>
<button id="button-loop-2" type="button" value="2">Loop 2</button>
</form>
<script>
//--------------
// Audio Object
//--------------
var audio = {
buffer: {},
compatibility: {},
files: [
'synth.wav',
'beat.wav'
],
proceed: true,
source_loop: {},
source_once: {}
};
//-----------------
// Audio Functions
//-----------------
audio.findSync = function(n) {
var first = 0,
current = 0,
offset = 0;
// Find the audio source with the earliest startTime to sync all others to
for (var i in audio.source_loop) {
current = audio.source_loop[i]._startTime;
if (current > 0) {
if (current < first || first === 0) {
first = current;
}
}
}
if (audio.context.currentTime > first) {
offset = (audio.context.currentTime - first) % audio.buffer[n].duration;
}
return offset;
};
audio.play = function(n) {
if (audio.source_loop[n]._playing) {
audio.stop(n);
} else {
audio.source_loop[n] = audio.context.createBufferSource();
audio.source_loop[n].buffer = audio.buffer[n];
audio.source_loop[n].loop = true;
audio.source_loop[n].connect(audio.context.destination);
var offset = audio.findSync(n);
audio.source_loop[n]._startTime = audio.context.currentTime;
if (audio.compatibility.start === 'noteOn') {
/*
The depreciated noteOn() function does not support offsets.
Compensate by using noteGrainOn() with an offset to play once and then schedule a noteOn() call to loop after that.
*/
audio.source_once[n] = audio.context.createBufferSource();
audio.source_once[n].buffer = audio.buffer[n];
audio.source_once[n].connect(audio.context.destination);
audio.source_once[n].noteGrainOn(0, offset, audio.buffer[n].duration - offset); // currentTime, offset, duration
/*
Note about the third parameter of noteGrainOn().
If your sound is 10 seconds long, your offset 5 and duration 5 then you'll get what you expect.
If your sound is 10 seconds long, your offset 5 and duration 10 then the sound will play from the start instead of the offset.
*/
// Now queue up our looping sound to start immediatly after the source_once audio plays.
audio.source_loop[n][audio.compatibility.start](audio.context.currentTime + (audio.buffer[n].duration - offset));
} else {
audio.source_loop[n][audio.compatibility.start](0, offset);
}
audio.source_loop[n]._playing = true;
}
};
audio.stop = function(n) {
if (audio.source_loop[n]._playing) {
audio.source_loop[n][audio.compatibility.stop](0);
audio.source_loop[n]._playing = false;
audio.source_loop[n]._startTime = 0;
if (audio.compatibility.start === 'noteOn') {
audio.source_once[n][audio.compatibility.stop](0);
}
}
};
//-----------------------------
// Check Web Audio API Support
//-----------------------------
try {
// More info at http://caniuse.com/#feat=audio-api
window.AudioContext = window.AudioContext || window.webkitAudioContext;
audio.context = new window.AudioContext();
} catch(e) {
audio.proceed = false;
alert('Web Audio API not supported in this browser.');
}
if (audio.proceed) {
//---------------
// Compatibility
//---------------
(function() {
var start = 'start',
stop = 'stop',
buffer = audio.context.createBufferSource();
if (typeof buffer.start !== 'function') {
start = 'noteOn';
}
audio.compatibility.start = start;
if (typeof buffer.stop !== 'function') {
stop = 'noteOff';
}
audio.compatibility.stop = stop;
})();
//-------------------------------
// Setup Audio Files and Buttons
//-------------------------------
for (var a in audio.files) {
(function() {
var i = parseInt(a) + 1;
var req = new XMLHttpRequest();
req.open('GET', audio.files[i - 1], true); // array starts with 0 hence the -1
req.responseType = 'arraybuffer';
req.onload = function() {
audio.context.decodeAudioData(
req.response,
function(buffer) {
audio.buffer[i] = buffer;
audio.source_loop[i] = {};
var button = document.getElementById('button-loop-' + i);
button.addEventListener('click', function(e) {
e.preventDefault();
audio.play(this.value);
});
},
function() {
console.log('Error decoding audio "' + audio.files[i - 1] + '".');
}
);
};
req.send();
})();
}
}
</script>
</body>
</html>
Check out RecordJS (https://github.com/mattdiamond/Recorderjs). It should help you out.