I had earlier posted some questions on this problem. At that time I had two separate programs for caller and receiver. I was also using old-fashioned callback API. Thanks to help from #jib on that post, I was able to understand the need for some fundamental changes. I rewrote the program to make it an integrated one for both caller and receiver and have used the WebRTC promises API. My problem is that I am not getting remote video from either end. One part I understand but do not know the solution: The receiver does not create SDPs for Video in the first place, only for audio. The caller part does create SDPS for Video and audio but on the receiver end there is no event generated for remote stream.
I have checked, through console logs, that the core functions work. Offer SDP is created, sent out, received, answer SDP created, sent out, received, etc. Candidates get exchanged and added too. But the .onaddstream event handler is never triggered. Local video is shown but that is trivial.
I have spent a LOT of time on this. I simply need to get that exciting feeling of seeing remote video on both ends which has kept me going. ANY HELP WILL BE SINCERELY APPRECIATED.
<script>
$(document).ready(function () {
var iceCandidates = [], countIceCandidates=0;
var socket = io.connect();
socket.on('connect',function() { console.log("Socket connected"); });
var pc = new RTCPeerConnection({"iceServers":[{"url":"stun:stun.l.google.com:19302"}]});
//If remote video stream comes in, display it in DIV vid2
pc.onaddStream = function (event) {
stream = event.stream;
var video = $('#vid2');
video.attr('src', URL.createObjectURL(stream));
video.onloadedmetadata = function(e) { video.play(); }
}
//Display media in both Caller and Receiver
navigator.mediaDevices.getUserMedia({ audio: true, video: true })
.then(function(stream) {
var video = $('#vid1');
video.attr('src', URL.createObjectURL(stream));
video.onloadedmetadata = function(e) { video.play(); };
pc.addStream(stream);
})
.catch(function(err) { console.log(err);});
//INITIATE CALL
$('#call').click(function() {
pc.createOffer({ offerToReceiveVideo: true, offerToReceiveAudio: true })
.then(function(offer) {
localSessionDescription = new RTCSessionDescription(offer);
pc.setLocalDescription(localSessionDescription)
.then (function() { socket.emit('sdpOffer',localSessionDescription); })
.catch(function(err) { console.log("Error in setLocalDescription"); console.log(err); })
.catch(function(err) { console.log("Error in createOffer"); console.log(err); })
});
})
pc.onicecandidate = function (event) {
socket.emit('candidate',event.candidate);
};
socket.on('candidate',function (data) {
if (data != null) {
pc.addIceCandidate(new RTCIceCandidate(data))
.then(function() { console.log("peer candidate added");})
.catch(function(err) {console.log(err); console.log("Error during peer candidate addition");});
}
});
socket.on('disconnect',function() { alert("Disconnected"); });
function error(err) {
console.log("The following error occurred: " + err.name);
}
socket.on('sdpAnswer',function(data) {
sdpAnswer = new RTCSessionDescription(data.sdpAnswer);
pc.setRemoteDescription(sdpAnswer)
.then(function() { console.log("Answer SDP Set:"); console.log(sdpAnswer); })
.catch(function(err) { console.log("Error enountered when setting remote SDP Answer"); console.log(err)});
});
socket.on('sdpOffer', function(data) {
sdpOffer = new RTCSessionDescription(data.sdpOffer);
pc.setRemoteDescription(sdpOffer)
.then(function() { console.log("Remote SDP set in receiver");
pc.createAnswer()
.then(function(sdpAnswer) {
localSessionDescription = new RTCSessionDescription(sdpAnswer);
socket.emit('sdpAnswer',localSessionDescription);
pc.setLocalDescription(localSessionDescription)
.then(function(){
console.log("Local SDP Description set in receiver:");
})
.catch(function(err) { console.log("Error enountered when setting local SDP in receiver"); console.log(err)});
})
.catch(function(err) { console.log("Error enountered when creating answer SDP in receiver"); console.log(err)});
});
});
}); //End of document.ready function
</script>
ON THE SERVER SIDE (RELEVANT CODE ONLY). I have included here just in case there are any datatype related issues - object types, etc. getting changed when sent thru the server.
io.sockets.on('connection', function(socket) {
socket.on('sdpOffer', function(data) {
sdpOffer = data.sdp;
socket.broadcast.emit('sdpOffer',{"sdpOffer":data});
});
socket.on('sdpAnswer', function(data) {
sdpAnswer = data.sdp;
socket.broadcast.emit('sdpAnswer',{"sdpAnswer":data});
});
socket.on('candidate', function(data) {
socket.broadcast.emit('candidate',data);
});
});
Rename pc.onaddStream to pc.onaddstream.
Related
I am developing a voice call app for android using peer.js. It is working but there is a lag in audio (around 1-5 seconds). So I was searching for a fix and found out that I can enable DTX Audio to reduce the number of packets sent. But I have no idea How to access and change SDP object in peer.js. Here is my code,
let audioConstraints = {
channelCount: 1,
sampleRate: 16000,
sampleSize: 8,
volume: 1,
latency: 0.003,
echoCancellation: true,
noiseSuppression: true,
autoGainControl: true,
};
constraints = { audio: audioConstraints, video: false };
let localStream;
//listen for calls
function listen() {
peer.on("call", (call) => {
navigator.mediaDevices
.getUserMedia(constraints)
.then(function (stream) {
localVideo.srcObject = stream;
localStream = stream;
call.answer(stream);
call.on("stream", (remoteStream) => {
Android.onPeerStream();
remoteVideo.srcObject = remoteStream;
});
})
.catch(function (err) {
Android.logEvent("getUserMedia error: " + err);
});
});
}
//start call
function startCall(otherUserId) {
navigator.mediaDevices
.getUserMedia(constraints)
.then(function (stream) {
localVideo.srcObject = stream;
localStream = stream;
const call = peer.call(otherUserId, stream);
call.on("stream", (remoteStream) => {
Android.onPeerStream();
remoteVideo.srcObject = remoteStream;
});
})
.catch(function (err) {
Android.logEvent("getUserMedia error: " + err);
});
}
could someone please share a working example of how to access and modify SDP in peer.js? Also, how can I tune the audioConstraints to achieve low network lag while keeping decent voice quality?
UPDATE
Found the way to access SDP in peer.js but still can't figure it out how to enable DTX audio.
I'm trying to establish a WebRTC connection between two browsers. I have a node.js server for them to communicate through, which essentially just forwards the messages from one client to the other. I am running the server and two tabs all on my laptop, but I have not been able to make a connection. I have been able to send the offers and answers between the two tabs successfully resulting in pc.signalingState = 'stable' in both tabs. I believe once this is done then the RTCPeerConnection objects should start producing icecandidate events, but this is not happening and I do not know why. Here is my code (I've omitted the server code):
'use strict';
// This is mostly copy pasted from webrtc.org/getting-started/peer-connections.
import { io } from 'socket.io-client';
const configuration = {
'iceServers': [
{ 'urls': 'stun:stun4.l.google.com:19302' },
{ 'urls': 'stun:stunserver.stunprotocol.org:3478' },
]
}
// Returns a promise for an RTCDataChannel
function join() {
const socket = io('ws://localhost:8090');
const pc = new RTCPeerConnection(configuration);
socket.on('error', error => {
socket.close();
throw error;
});
pc.addEventListener('signalingstatechange', event => {
// Prints 'have-local-offer' then 'stable' in one tab,
// 'have-remote-offer' then 'stable' in the other.
console.log(pc.signalingState);
})
pc.addEventListener('icegatheringstatechange', event => {
console.log(pc.iceGatheringState); // This line is never reached.
})
// Listen for local ICE candidates on the local RTCPeerConnection
pc.addEventListener('icecandidate', event => {
if (event.candidate) {
console.log('Sending ICE candidate'); // This line is never reached.
socket.emit('icecandidate', event.candidate);
}
});
// Listen for remote ICE candidates and add them to the local RTCPeerConnection
socket.on('icecandidate', async candidate => {
try {
await pc.addIceCandidate(candidate);
} catch (e) {
console.error('Error adding received ice candidate', e);
}
});
// Listen for connectionstatechange on the local RTCPeerConnection
pc.addEventListener('connectionstatechange', event => {
if (pc.connectionState === 'connected') {
socket.close();
}
});
// When both browsers send this signal they will both receive the 'matched' signal,
// one with the payload true and the other with false.
socket.emit('join');
return new Promise((res, rej) => {
socket.on('matched', async first => {
if (first) {
// caller side
socket.on('answer', async answer => {
await pc.setRemoteDescription(new RTCSessionDescription(answer))
.catch(console.error);
});
const offer = await pc.createOffer();
await pc.setLocalDescription(offer)
.catch(console.error);
socket.emit('offer', offer);
// Listen for connectionstatechange on the local RTCPeerConnection
pc.addEventListener('connectionstatechange', event => {
if (pc.connectionState === 'connected') {
res(pc.createDataChannel('data'));
}
});
} else {
// recipient side
socket.on('offer', async offer => {
pc.setRemoteDescription(new RTCSessionDescription(offer))
.catch(console.error);
const answer = await pc.createAnswer();
await pc.setLocalDescription(answer)
.catch(console.error);
socket.emit('answer', answer);
});
pc.addEventListener('datachannel', event => {
res(event.channel);
});
}
});
});
}
join().then(dc => {
dc.addEventListener('open', event => {
dc.send('Hello');
});
dc.addEventListener('message', event => {
console.log(event.data);
});
});
The behavior is the same in both Firefox and Chrome. That behavior is, again, that the offers and answers are signalled successfully, but no ICE candidates are ever created. Does anyone know what I'm missing?
Okay, I found the problem. I have to create the RTCDataChannel before creating the offer. Here's a before and after comparison of the SDP offers:
# offer created before data channel:
{
type: 'offer',
sdp: 'v=0\r\n' +
'o=- 9150577729961293316 2 IN IP4 127.0.0.1\r\n' +
's=-\r\n' +
't=0 0\r\n' +
'a=extmap-allow-mixed\r\n' +
'a=msid-semantic: WMS\r\n'
}
# data channel created before offer:
{
type: 'offer',
sdp: 'v=0\r\n' +
'o=- 1578211649345353372 2 IN IP4 127.0.0.1\r\n' +
's=-\r\n' +
't=0 0\r\n' +
'a=group:BUNDLE 0\r\n' +
'a=extmap-allow-mixed\r\n' +
'a=msid-semantic: WMS\r\n' +
'm=application 9 UDP/DTLS/SCTP webrtc-datachannel\r\n' +
'c=IN IP4 0.0.0.0\r\n' +
'a=ice-ufrag:MZWR\r\n' +
'a=ice-pwd:LfptE6PDVughzmQBPoOtvaU8\r\n' +
'a=ice-options:trickle\r\n' +
'a=fingerprint:sha-256 1B:C4:38:9A:CD:7F:34:20:B8:8D:78:CA:4A:3F:81:AE:C5:55:B3:27:6A:BD:E5:49:5A:F9:07:AE:0C:F6:6F:C8\r\n' +
'a=setup:actpass\r\n' +
'a=mid:0\r\n' +
'a=sctp-port:5000\r\n' +
'a=max-message-size:262144\r\n'
}
In both cases the answer looked similar to the offer. You an see the offer is much longer and mentions webrtc-datachannel in the second case. And sure enough, I started getting icecandidate events and everything is working now.
I want to stream audio from a web page to a local server, using WebRTC. That server will process that audio and will output it immediately to the user. I need real time.
My code is actually working. However I am asking the user for the microphone with getUserMedia, and I don't need that microphone. This is quite annoying. What can I do in order to stream the audio without having to ask the user for the microphone?
Thank you.
Here is a minimal working example (it is highly inspired by https://github.com/aiortc/aiortc/blob/main/examples/server/client.js). Only the last part with comments is interesting :
let webSocket = new WebSocket('wss://0.0.0.0:8080/ws');
const config = { sdpSemantics: 'unified-plan' }
const pc = new RTCPeerConnection(config);
webSocket.onmessage = (message) => {
const data = JSON.parse(message.data);
switch(data.type) {
case "answer":
pc.setRemoteDescription(data.answer)
break;
default:
break;
}
};
function negotiate() {
return pc.createOffer()
.then(function(offer) {
return pc.setLocalDescription(offer);
})
.then(function() {
return new Promise(function(resolve) {
if (pc.iceGatheringState === 'complete') {
resolve();
} else {
function checkState() {
if (pc.iceGatheringState === 'complete') {
pc.removeEventListener('icegatheringstatechange', checkState);
resolve();
}
}
pc.addEventListener('icegatheringstatechange', checkState);
}
});
})
.then(function() {
const offer = pc.localDescription;
webSocket.send(
JSON.stringify({
type: "offer",
offer: {
sdp: offer.sdp,
type: offer.type
}
})
);
})
}
// Preparing the oscillator
const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
const oscillator = audioCtx.createOscillator();
const serverDestination = audioCtx.createMediaStreamDestination();
oscillator.connect(serverDestination);
// Asking for useless microphone
navigator.mediaDevices.getUserMedia({audio: true})
.then(() => {
return negotiate();
});
// Actual streaming
const stream = new MediaStream();
serverDestination.stream.getTracks().forEach((track) => {
pc.addTrack(track, stream);
})
// User pushes button to start the oscillator
function play() {
oscillator.start();
};
Just get rid of this:
// Asking for useless microphone
navigator.mediaDevices.getUserMedia({audio: true})
.then(() => {
return negotiate();
});
As you say, it's useless and not necessary. If you don't call getUserMedia(), the user won't be prompted to share their microphone. You can make WebRTC connections without this.
I suspect the problem you're running into is that your audio context is paused. If you call audioCtx.resume() when a user clicks a button, you'll be up and running. This is due to autoplay policy.
If you don't need user media, don't ask for it with getUserMedia in your code.
I have implemented the agora code in my angular 5 application using agora CDN. Camera is opening but one to one user can not see each other's video frame.
Why?
var self = this;
var client = AgoraRTC.createClient({ mode: 'rtc', codec: "h264" });
client.init('my key', function () {
console.log("AgoraRTC client initialized");
client.join(null, 'TestChanel', null, function (uid) {
console.log("User " + uid + " join channel successfully");
self.uId = uid;
var localStream = AgoraRTC.createStream({
// streamID: uid,
// audio: true,
// video: true,
// screen: false,
streamID: uid,
audio: true,
cameraId: self.deviceId,
// microphoneId: self.microphone,
video: true,
screen: false,
extensionId: 'minllpmhdgpndnkomcoccfekfegnlikg',
}
);
localStream.init(function () {
console.log("getUserMedia successfully");
localStream.play('agora_local');
// localStream.play('video-caller');
client.publish(localStream, function (err) {
console.log("Publish local stream error: " + err);
});
client.on('stream-published', function (evt) {
console.log("Publish local stream successfully");
});
client.on('stream-added', function (evt) {
var stream = evt.stream;
console.log("New stream added: " + stream.getId());
client.subscribe(stream, function (err) {
console.log("Subscribe stream failed", err);
});
});
client.on('stream-subscribed', function (evt) {
var remoteStream = evt.stream;
console.log("Subscribe remote stream successfully: " + remoteStream.getId());
remoteStream.play('agora_remote' + remoteStream.getId());
})
}, function (err) {
console.log("getUserMedia failed", err);
});
}, function (err) {
console.log("Join channel failed", err);
});
}, function (err) {
console.log("AgoraRTC client init failed", err);
});
There are a few things that might help you resolve this issue:
Take advantage of Typescript typings to help with debugging. You can install a basic typing library for the SDK with the command npm install --save-dev #types/agora-rtc-sdk, to give you more debugging power.
Set a timeout or use rxjs's timer before you try to play the remote stream, that sometimes helps ensure the stream is played.
Test not assigning a cameraId in the createStream() method, or using this instead of self - you could be having issues with Angular because of it.
Also make sure in future tests that the uid used in join() is of the same type for both users. I've experienced this issue before when one client was passing in a number and the other was passing in a string.
If you're using Chrome to test this between two users, then it's probably not a browser issue, but, depending on the SDK version, there are some known bugs you should look into.
I am trying to make a video calling web application using webRTC. I am using angularjs and express.io
I am getting this error:
DOMException: Failed to set remote offer sdp: Called in wrong state: STATE_SENTOFFER
Some of my code is:
// in controller (socket is already defined in controller)
var videolocal = document.getElementById('videolocal');
var videoremote = document.getElementById('videoremote');
var streamlocal = null;
var pc = null;
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
window.RTCPeerConnection = window.RTCPeerConnection || window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
var configuration = {'iceServers': [
// {'url': 'stun:stun.services.mozilla.com'},
{'url': 'stun:stun.l.google.com:19302'}
]};
// run start(true) to initiate a call
$scope.start = function() {
console.log('start');
// get the local stream, show it in the local video element and send it
navigator.getUserMedia({ "audio": true, "video": true }, function (stream) {
videolocal.src = URL.createObjectURL(stream);
pc = new RTCPeerConnection(configuration);
pc.addStream(stream);
// once remote stream arrives, show it in the remote video element
pc.onaddstream = function (evt) {
console.log('onaddstream');
videoremote.src = URL.createObjectURL(evt.stream);
};
// send any ice candidates to the other peer
pc.onicecandidate = function (evt) {
console.log('onicecandidate');
if(evt.candidate){
socket.emit('video_call',{user:2, type: 'candidate', candidate: evt.candidate});
}
};
// create an offer
pc.createOffer(function (offer) {
socket.emit('video_call', {user:2, type: "offer", offer: offer});
pc.setLocalDescription(offer);
}, function (error) {
alert("Error when creating an offer");
});
}, function () {alert('error in start')});
}
$scope.start();
socket.on('video_call', function (data) {
console.log(data);
//when somebody sends us an offer
function handleOffer(offer) {
// this line is giving error
pc.setRemoteDescription(new RTCSessionDescription(offer), function(){alert('success')}, function(e){ console.log(e); alert(e)});
//create an answer to an offer
pc.createAnswer(function (answer) {
pc.setLocalDescription(answer);
socket.emit('video_call', {user:2, type: "answer", answer: answer});
}, function (error) {
console.log(error);
alert("Error when creating an answer");
});
};
//when we got an answer from a remote user
function handleAnswer(answer) {
pc.setRemoteDescription(new RTCSessionDescription(answer));
};
//when we got an ice candidate from a remote user
function handleCandidate(candidate) {
pc.addIceCandidate(new RTCIceCandidate(candidate));
};
switch(data['type']) {
case "offer":
handleOffer(data["offer"]);
break;
case "answer":
handleAnswer(data['answer']);
break;
//when a remote peer sends an ice candidate to us
case "candidate":
handleCandidate(data['candidate']);
break;
default:
break;
}
});
On server:
// this function is called on video_call event
video_call: function (data) {
var id = data.user;
// if user is active
// users is dict of users (user_id as key)
if(Object.keys(users).indexOf(id.toString()) > -1){
// for each device of the user
users[id].forEach(function(user_socket){
console.log(data);
user_socket.emit('video_call', data);
});
}
}
Please can anyone tell what is wrong with this code. The local stream is capturing properly. I am using chromium browser.
Data on server:
I think the problem is that in your handleOffer() function you need to create another PeerConnection and call setRemoteDescription() on that pc.
var remote_pc = new RTCPeerConnection(configuration)
remote_pc.setRemoteDescription(new RTCSessionDescription(offer), ...) {
remote_pc.createAnswer()
}
This is what I have in my code.
EDIT: In the official link you can go to chapter 11.7 and check the steps after 15 (when the offer is sent and the other peer receives it).