pion ion-sdk-js How to publish a stream from a video file into a PeerConnection - webrtc

I’m experimenting with the ion echo-test example, which works for me (Chrome 98).
Instead of getting the stream from getUserMedia() though, I’d like to be able to capture it from a file (e.g. an mp4 file created with ffmpeg).
After associating the file to the local-video element, I have replaced this part in main.js:
Ion.LocalStream.getUserMedia({
resolution: resolutionBox.options[resolutionBox.selectedIndex].value,
codec:codecBox.options[codecBox.selectedIndex].value,
simulcast: sc,
audio: true,
})
.then((media) => {
localStream = media;
localVideo.srcObject = media;
localVideo.autoplay = true;
localVideo.controls = true;
localVideo.muted = true;
// joinBtns.style.display = "none";
localRTC.publish(media);
localDataChannel = localRTC.createDataChannel("data");
})
.catch(console.error);
with:
if (localVideo.captureStream) {
stream = localVideo.captureStream();
console.log('Captured stream from localVideo with captureStream', stream);
localVideo.autoplay = true;
localVideo.controls = true;
if (stream) {
localStream = new Ion.LocalStream(stream, {
resolution: resolutionBox.options[resolutionBox.selectedIndex].value,
codec:codecBox.options[codecBox.selectedIndex].value,
simulcast: sc,
audio: true,
video: true,
});
localRTC.publish(localStream);
localVideo.play()
}
localDataChannel = localRTC.createDataChannel("data");
}
The file plays in localVideo, and I can trace the ICE connectivity checks, confirming the connection is successful, but localRTC is not transmitting any video packet (and so remoteRTC doesn't receive anything and ontrackEvent doesn't trigger).
I have compared the stream structure in both the unmodified example case and my case, and I cannot see anything different.
No errors were logged in console.
Is there anything else I should do to create a proper capture stream to pass to localRTC.publish()?

Related

No sound in safari using Web Audio API webkitAudioContext()

I am trying to use the Web Audio API to play sound in my React application.
It's currently playing sound in all browsers except Safari v12.1.
I am aware Safari has restrictions on autoplay and requires user interaction to play sound, so I have a play button which calls the _play() function:
_play = (url, index) => {
this._getData(url);
this.source.start(index)
}
It's calling the _getData() function which looks like this:
_getData(url) {
this.source = this.audioContext.createBufferSource();
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = () => {
var audioData = request.response;
console.log(this.audioContext)
this.audioContext.decodeAudioData(audioData, buffer => {
this.source.buffer = buffer;
this.source.connect(this.audioContext.destination);
},
function(e){ console.log("Error with decoding audio data" + e.err); });
}
request.send();
}
this.audioContext is created in the component constructor using:
this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
The console.log(this.audioContext) inside the request.onload outputs this before pressing play:
...and this after pressing play:
But no sound is playing (in Safari).
What am I doing wrong?
I think the problem that you ran into is that Safari does not allow you to modify the buffer anymore once you called start().
The following page does for example play a second of noise in Safari when you press the play button.
<!DOCTYPE html>
<html>
<body>
<button id="play-button">play</button>
<script>
document
.getElementById('play-button')
.addEventListener('click', () => {
const audioContext = new AudioContext();
const audioBufferSourceNode = audioContext.createBufferSource();
const sampleRate = audioContext.sampleRate;
const audioBuffer = audioContext.createBuffer(1, sampleRate, sampleRate);
const channelData = audioBuffer.getChannelData(0);
for (let i = 0; i < sampleRate; i += 1) {
channelData[i] = (Math.random() * 2) - 1;
}
audioBufferSourceNode.buffer = audioBuffer;
audioBufferSourceNode.connect(audioContext.destination);
audioBufferSourceNode.start(audioContext.currentTime);
});
</script>
</body>
</html>
But it doesn't work anymore if you modify it slightly. When starting the audioBufferSourceNode before assigning the buffer there will be no output anymore.
audioBufferSourceNode.connect(audioContext.destination);
audioBufferSourceNode.start(audioContext.currentTime);
audioBufferSourceNode.buffer = audioBuffer;
I guess you can get your code working by waiting for the HTTP response and the audio decoding before you start the source. Make sure to execute this.source.buffer = buffer before you execute this.source.start(index).
I hope this helps.

WebRtc,onaddstream,receive,stream,no show?

I met with difficulties.
var onaddstream = function(event){
var video = $("#chat_dialogForOne video[name='remote']")[0];
var remoteStream = event.stream;
video.srcObject = remoteStream;
video.onloadedmetadata = function(e) {
video.play();
};
}
$("#chat_dialogForOne button[name='openVideo']").on("click",function(){
$(this).toggleClass("active");
$(this).data("use",$(this).data("use") ? false : true);
if($(this).data("use")){//开启视频语音聊天
rtc.openVideoAudioLocal(function(localStream){//创建本地视频流,绑定到控件上
var video = $("#chat_dialogForOne video[name='video']")[0]; //获取到展现视频的标签
video.srcObject=localStream;
video.onloadedmetadata = function(e) {
video.play();
};
rtc.openVideoAudioLocal(function(remoteStream){
rtc.sendAddStream(remoteStream);
},true,true);
},true,false);//为了防止自己能听到自己发出的声音,只启动视频,不启动音频
$(this).find(" > span").html("结束视频");
$("#chat_dialogForOne button[name='openAudio']").hide();
}else{//关闭视频语音聊天
// closeRemoteChannelStream([oneWebRtc]);
// closeLocalStream();
// resetVideoButton();
}
});
images 2,Successful display of local
images 1,Failure Display Remote
onaddstream,Received the remote stream, it does not show.to video
I need your help.
Sorry, it was a mistake of mine. I made a very slight mistake, which resulted in a new PC object being recreated after receiving the offer. Although the remote received the video stream object, the channel had been replaced.

Remote video is black screen or blank in WebRTC

I have signaling server in java and websocket.
It works well with local video. but Remote video is black screen or blank
But it is not always a blank. If you turn off the server and turn it on again, the remote video will show up on your remote.
Why does not it always come out sometimes, and sometimes it does not come out?
this is my code...
navigator.getUserMedia = navigator.getUserMedia || navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
window.RTCPeerConnection = window.RTCPeerConnection || window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
window.RTCIceCandidate = window.RTCIceCandidate || window.mozRTCIceCandidate || window.webkitRTCIceCandidate;
window.RTCSessionDescription = window.RTCSessionDescription || window.mozRTCSessionDescription || window.webkitRTCSessionDescription;
window.SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition || window.mozSpeechRecognition
|| window.msSpeechRecognition || window.oSpeechRecognition;
var localVideoStream = null;
var peerConn = null,
wsc = new WebSocket("ws://localhost:8080/signaling"),
peerConnCfg = {
'iceServers': [{
'url': 'stun:stun.l.google.com:19302'
}]
};
var videoCallButton = document.getElementById("caller");
var endCallButton = document.getElementById("callee");
var localVideo = document.getElementById('localVideo');
var remoteVideo = document.getElementById('remoteVideo');
videoCallButton.addEventListener("click", initiateCall);
endCallButton.addEventListener("click", function (evt) {
wsc.send(JSON.stringify({"closeConnection": true }));
});
var sdpConstraints = {
'mandatory': {
'OfferToReceiveAudio': true,
'OfferToReceiveVideo': true
}
};
function prepareCall() {
peerConn = new RTCPeerConnection(peerConnCfg);
// send any ice candidates to the other peer
peerConn.onicecandidate = onIceCandidateHandler;
// once remote stream arrives, show it in the remote video element
peerConn.onaddstream = onAddStreamHandler;
};
// run start(true) to initiate a call
function initiateCall() {
prepareCall();
// get the local stream, show it in the local video element and send it
navigator.getUserMedia({ "audio": true, "video": true }, function (stream) {
localVideoStream = stream;
localVideo.src = URL.createObjectURL(localVideoStream);
peerConn.addStream(localVideoStream);
createAndSendOffer();
}, function(error) { console.log(error);});
};
function answerCall() {
prepareCall();
// get the local stream, show it in the local video element and send it
navigator.getUserMedia({ "audio": true, "video": true }, function (stream) {
localVideoStream = stream;
localVideo.src = URL.createObjectURL(localVideoStream);
peerConn.addStream(localVideoStream);
createAndSendAnswer();
}, function(error) { console.log(error);});
};
wsc.onmessage = function (evt) {
var signal = null;
if (!peerConn) answerCall();
signal = JSON.parse(evt.data);
if (signal.sdp) {
console.log("Received SDP from remote peer.");
console.log("signal"+ signal);
peerConn.setRemoteDescription(new RTCSessionDescription(signal.sdp));
}
else if (signal.candidate) {
console.log("signal"+ signal.candidate);
console.log("Received ICECandidate from remote peer.");
peerConn.addIceCandidate(new RTCIceCandidate(signal.candidate));
} else if ( signal.closeConnection){
console.log("Received 'close call' signal from remote peer.");
endCall();
}else{
console.log("signal"+ signal.candidate);
}
};
function createAndSendOffer() {
peerConn.createOffer(
function (offer) {
var off = new RTCSessionDescription(offer);
peerConn.setLocalDescription(new RTCSessionDescription(off),
function() {
wsc.send(JSON.stringify({"sdp": off }));
},
function(error) { console.log(error);}
);
},
function (error) { console.log(error);}
);
};
function createAndSendAnswer() {
peerConn.createAnswer(
function (answer) {
var ans = new RTCSessionDescription(answer);
peerConn.setLocalDescription(ans, function() {
wsc.send(JSON.stringify({"sdp": ans }));
},
function (error) { console.log(error);}
);
},
function (error) {console.log(error);}
);
};
function onIceCandidateHandler(evt) {
if (!evt || !evt.candidate) return;
wsc.send(JSON.stringify({"candidate": evt.candidate }));
};
function onAddStreamHandler(evt) {
videoCallButton.setAttribute("disabled", true);
endCallButton.removeAttribute("disabled");
// set remote video stream as source for remote video HTML5 element
remoteVideo.src = window.URL.createObjectURL(evt.stream);
remoteVideo.play();
console.log("remote src : "+ remoteVideo.src);
};
function endCall() {
peerConn.close();
peerConn = null;
videoCallButton.removeAttribute("disabled");
endCallButton.setAttribute("disabled", true);
if (localVideoStream) {
localVideoStream.getTracks().forEach(function (track) {
track.stop();
});
localVideo.src = "";
}
if (remoteVideo){
remoteVideo.src = "";
window.URL.revokeObjectURL(remoteVideo);
}
};
One of the reasons for WebRTC blank / empty video is having high packet loss. In that scenario, in server and client logs it will show as the connection is successful and video is playing normally, so you won't see any warning or error.
To check if there is a high packet loss, you can go to about:webrtc on firefox, or chrome://webrtc-internals on chrome. For firefox, you can navigate to "RTP Stats". You'll see it shows Received: ... packets and Lost: ... packets. You can calculate packet loss ratio using these counts. For chrome, there is a graph for packet loss ratio. You might have a very high packet loss such as 70% for example.
If you have this extreme high packet loss, one reason is having a smaller MTU https://en.wikipedia.org/wiki/Maximum_transmission_unit on the client network interface than the MTU used by the server. For example, your client network interface can have MTU=1500 bytes when not connected to VPN, and MTU=1250 bytes when connected to VPN. If the server is sending RTP packets (over UDP) with MTU=1400, it can be received by the client if the client is not using VPN, but the packets larger than 1250 bytes will get dropped by the client network interface.
If you want to check the client MTU locally, you can run ifconfig on mac or linux.
Mac example, without example vpn:
en0: flags=8863<UP,BROADCAST,SMART,RUNNING,SIMPLEX,MULTICAST> mtu 1500
...
inet SOME_IP netmask 0xffffff00 broadcast 192.168.1.255
media: autoselect
status: active
Mac example, with example vpn:
utun2: flags=80d1<UP,POINTOPOINT,RUNNING,NOARP,MULTICAST> mtu 1250
inet SOME_IP --> SOME_IP netmask 0xffffffff
nd6 options=201<PERFORMNUD,DAD>
How to configure MTU for server:
If you are using GStreamer for WebRTC server, the payload generator element, for example rtpvp8pay has a property to set desired MTU value.
By using gst-inspect-1.0 rtpvp8pay you can see that it uses 1400 as MTU by default which can be larger than your client network interface can handle (e.g. 1250 on above example).
You can make it work by setting lower MTU on your GStreamer pipeline so that your client network interface won't drop majority of the packages anymore (package loss ratio can reduce to 0.01% just by changing MTU on GStreamer pipeline on the server).
When this is the case, the incoming video can work for ~10 seconds when VPN is freshly reconnected, then the incoming video can freeze and subsequent page refreshes can lead to just blank video with 70%+ packet loss.
This is a very specific scenario but when it happens it is a complete silent/hidden error so hopefully this helps someone.
add oniceconnectionstatechange to your prepeareCall Function and see if there is any ICE failure because of NAT issues
function prepareCall() {
peerConn = new RTCPeerConnection(peerConnCfg);
// send any ice candidates to the other peer
peerConn.onicecandidate = onIceCandidateHandler;
// once remote stream arrives, show it in the remote video element
peerConn.onaddstream = onAddStreamHandler;
peerConn.oniceconnectionstatechange = function(){
console.log('ICE state: ',peerConn.iceConnectionState);
}
};
in func did recieve remote video track perform a timer then in timer selector add track to view

Share screen using getScreenId.js in WebRTC for two peers

I am trying to implement share screen function in webrtc video conferencing. From suggestion, I am now following muaz-khan's solution using https://www.webrtc-experiment.com/getScreenId/ . I can easily capture the application images of one peer, and replace the video stream with the capture stream. But it is a video conferencing experiment, so two browsers need to video conference with each other. For example, browser 1, has video streams A (local video), video streams B (remote video); browser 2 has video streams B (local video), video streams A (remote video). So when I am in browser 1 and trying to share the screen, the share screen stream should replace the local video in browser 1, and remote video in browser 2.
But right now, I can only make the share screen replace the local video in browser 1, browser 2 doesn't have any changes, cann't see any changes in its remote video (which is the local video in browser 1). I don't know how to trigger the changes in browser 2 as well. do i need to signal the share screen streams to server? and change the remote stream accordingly?
Here is my code in javascript:
$(function() {
var brokerController, ws, webRTC, localid;
// ws = new XSockets.WebSocket("wss://rtcplaygrouund.azurewebsites.net:443", ["connectionbroker"], {
ws = new XSockets.WebSocket("ws://localhost:4502", ["connectionbroker"], {
ctx: "152300ed-4d84-4e72-bc99-965052dc1e95"
});
var addRemoteVideo = function(peerId,mediaStream) {
var remoteVideo = document.createElement("video");
remoteVideo.setAttribute("autoplay", "true");
remoteVideo.setAttribute("rel",peerId);
attachMediaStream(remoteVideo, mediaStream);
remoteVideo.setAttribute("class", "col-md-3");
remoteVideo.setAttribute("height", $( document ).height() * 0.3);
remoteVideo.setAttribute("id", 'remoteVideo');
$("#videoscreen").append(remoteVideo);
};
var onConnectionLost = function (remotePeer) {
console.log("onconnectionlost");
var peerId = remotePeer.PeerId;
var videoToRemove = $("video[rel='" + peerId + "']");
videoToRemove.remove();
};
var oncConnectionCreated = function() {
console.log("oncconnectioncreated", arguments);
}
var onGetUerMedia = function(stream) {
console.log("Successfully got some userMedia , hopefully a goat will appear..");
webRTC.connectToContext(); // connect to the current context?
};
var onRemoteStream = function (remotePeer) {
addRemoteVideo(remotePeer.PeerId, remotePeer.stream);
console.log("Opps, we got a remote stream. lets see if its a goat..");
};
var onLocalStream = function(mediaStream) {
console.log("Got a localStream", mediaStream.id);
localid = mediaStream.id;
console.log("check this id: meadiastram id ", mediaStream.id);
var video = document.createElement("video");
video.setAttribute("height", "100%");
video.setAttribute("autoplay", "true");
video.setAttribute("id", "localvideo");
video.setAttribute("name", mediaStream.id);
attachMediaStream(video, mediaStream);
$("#videoscreen").append(video);
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
$('#localvideo').attr('src', URL.createObjectURL(stream));
}, function (error) {
console.error(error);
});
});
});
};
var onContextCreated = function(ctx) {
console.log("RTC object created, and a context is created - ", ctx);
webRTC.getUserMedia(webRTC.userMediaConstraints.hd(true), onGetUerMedia, onError);
};
var onOpen = function() {
console.log("Connected to the brokerController - 'connectionBroker'");
webRTC = new XSockets.WebRTC(this);
webRTC.onlocalstream = onLocalStream;
webRTC.oncontextcreated = onContextCreated;
webRTC.onconnectioncreated = oncConnectionCreated;
webRTC.onconnectionlost = onConnectionLost;
webRTC.onremotestream = onRemoteStream;
};
var onConnected = function() {
console.log("connection to the 'broker' server is established");
console.log("Try get the broker controller form server..");
brokerController = ws.controller("connectionbroker");
brokerController.onopen = onOpen;
};
ws.onconnected = onConnected;
});
I am using xsocket as the server, and the codes for click share and change the local stream with the share screen streams are just very simple as this:
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
$('#localvideo').attr('src', URL.createObjectURL(stream));
}, function (error) {
console.error(error);
});
});
Any help or suggestion would be grateful.
Thanks for pointing out the other post: How to addTrack in MediaStream in WebRTC, but I don't think they are the same. And also I am not sure how to renegotiate the remote connection in this case.
Xsocket.webrtc.js file for webrtc connection:
https://github.com/XSockets/XSockets.WebRTC/blob/master/src/js/XSockets.WebRTC.latest.js
How I could I renegotiate the remote connection in this case?
I figured out a work around solution by myself for this question, do not replace the local stream with the sharescreen stream, instead remove the old local stream from local div, then add the new sharescreen stream to local div. In the meantime, send the old local stream id by datachanel to the other peer, and remove that old remote video as well.
The most important thing is reflesh the streams (renegotiation), then sharescreen stream would display in remote peer.
Code:
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
webRTC.removeStream(webRTC.getLocalStreams()[0]);
var id = $('#localvideo').attr('name');
$('#localvideo').remove();
brokerController.invoke('updateremotevideo', id);
webRTC.addLocalStream(stream);
webRTC.getRemotePeers().forEach(function (p) {
webRTC.refreshStreams(p);
});
}, function (error) {
console.error(error);
});
});
});
after get the command to remove that old video stream from the server:
brokerController.on('updateremotevideo', function(streamid){
$(document.getElementById(streamid)).remove();
});
This solution works for me. Although if only like to replace the local video stream with share screen stream, we need to re create the offer with sdp, and send sdp to remote peer. It is more complicated.
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
navigator.getUserMedia({audio: true}, function (audioStream) {
stream.addTrack(audioStream.getAudioTracks()[0]);
var mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/mp4'
mediaRecorder.stream = stream;
self.setState({recorder: mediaRecorder, startRecord: true, shareVideo: true, pauseRecord: false, resumeRecord: false, stopRecord: false, downloadRecord: false, updateRecord: false});
document.querySelector('video').src = URL.createObjectURL(stream);
var video = document.getElementById('screen-video')
if (video) {
video.src = URL.createObjectURL(stream);
video.width = 360;
video.height = 300;
}
}, function (error) {
alert(error);
});
}, function (error) {
alert(error);
});
});

WEBRTC Object #<RTCPeerConnection> has no method 'processSignalingMessage'

I have problems with the WebRTC:
I use this code from one example about Video calls.
if (new_connection) {
console.log('New Peer Connection');
var peer_connection = {};
peer_connection.connection_id = msg.from_connection_id;
peer_connection.pc = createPeerConnection(peer_connection.connection_id,
false);
peer_connections.push(peer_connection);
$('#remote').prepend(remoteVideoHtml.replace('remoteVideoId', 'peer' +
peer_connection.connection_id));
}
//Now process the SDP JSON Blob received
for (var i in peer_connections) {
if (peer_connections[i].connection_id == msg.from_connection_id) {
try {
peer_connections[i].pc.processSignalingMessage(msg.data);
}catch (e) {
console.log("Failed to create processSignalingMessage, exception: " + e.message);
}
I need help because I have one problem here.
peer_connections[i].pc.processSignalingMessage(msg.data);
The problem is:
Object #<RTCPeerConnection> has no method 'processSignalingMessage'
I don't know how those functions works and how they are invoqued:
pc.onconnecting = function (msg) {
console.log('onSessionConnecting');
}
pc.onopen = function (msg) {
console.log('onSessionOpened');
}
pc.onaddstream = function (event) {
console.log('onRemoteStreamAdded add the remote peers video stream.');
var url = webkitURL.createObjectURL(event.stream);
$('#peer' + connection_id).attr({
src: url
});
}
I will appreciate any help.
The initial version of WebRTC in Chrome was based on ROAP and it used to have a processSignallingMessage() method. The current version based on JSEP and it has methods like setRometeDescription() or setLocalDescription() to inject the local SDP and the SDP received from other users.
You can still find this implementation in old versions of Chrome or in Bowser.