I’m experimenting with the ion echo-test example, which works for me (Chrome 98).
Instead of getting the stream from getUserMedia() though, I’d like to be able to capture it from a file (e.g. an mp4 file created with ffmpeg).
After associating the file to the local-video element, I have replaced this part in main.js:
Ion.LocalStream.getUserMedia({
resolution: resolutionBox.options[resolutionBox.selectedIndex].value,
codec:codecBox.options[codecBox.selectedIndex].value,
simulcast: sc,
audio: true,
})
.then((media) => {
localStream = media;
localVideo.srcObject = media;
localVideo.autoplay = true;
localVideo.controls = true;
localVideo.muted = true;
// joinBtns.style.display = "none";
localRTC.publish(media);
localDataChannel = localRTC.createDataChannel("data");
})
.catch(console.error);
with:
if (localVideo.captureStream) {
stream = localVideo.captureStream();
console.log('Captured stream from localVideo with captureStream', stream);
localVideo.autoplay = true;
localVideo.controls = true;
if (stream) {
localStream = new Ion.LocalStream(stream, {
resolution: resolutionBox.options[resolutionBox.selectedIndex].value,
codec:codecBox.options[codecBox.selectedIndex].value,
simulcast: sc,
audio: true,
video: true,
});
localRTC.publish(localStream);
localVideo.play()
}
localDataChannel = localRTC.createDataChannel("data");
}
The file plays in localVideo, and I can trace the ICE connectivity checks, confirming the connection is successful, but localRTC is not transmitting any video packet (and so remoteRTC doesn't receive anything and ontrackEvent doesn't trigger).
I have compared the stream structure in both the unmodified example case and my case, and I cannot see anything different.
No errors were logged in console.
Is there anything else I should do to create a proper capture stream to pass to localRTC.publish()?
I am trying to use the Web Audio API to play sound in my React application.
It's currently playing sound in all browsers except Safari v12.1.
I am aware Safari has restrictions on autoplay and requires user interaction to play sound, so I have a play button which calls the _play() function:
_play = (url, index) => {
this._getData(url);
this.source.start(index)
}
It's calling the _getData() function which looks like this:
_getData(url) {
this.source = this.audioContext.createBufferSource();
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = () => {
var audioData = request.response;
console.log(this.audioContext)
this.audioContext.decodeAudioData(audioData, buffer => {
this.source.buffer = buffer;
this.source.connect(this.audioContext.destination);
},
function(e){ console.log("Error with decoding audio data" + e.err); });
}
request.send();
}
this.audioContext is created in the component constructor using:
this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
The console.log(this.audioContext) inside the request.onload outputs this before pressing play:
...and this after pressing play:
But no sound is playing (in Safari).
What am I doing wrong?
I think the problem that you ran into is that Safari does not allow you to modify the buffer anymore once you called start().
The following page does for example play a second of noise in Safari when you press the play button.
<!DOCTYPE html>
<html>
<body>
<button id="play-button">play</button>
<script>
document
.getElementById('play-button')
.addEventListener('click', () => {
const audioContext = new AudioContext();
const audioBufferSourceNode = audioContext.createBufferSource();
const sampleRate = audioContext.sampleRate;
const audioBuffer = audioContext.createBuffer(1, sampleRate, sampleRate);
const channelData = audioBuffer.getChannelData(0);
for (let i = 0; i < sampleRate; i += 1) {
channelData[i] = (Math.random() * 2) - 1;
}
audioBufferSourceNode.buffer = audioBuffer;
audioBufferSourceNode.connect(audioContext.destination);
audioBufferSourceNode.start(audioContext.currentTime);
});
</script>
</body>
</html>
But it doesn't work anymore if you modify it slightly. When starting the audioBufferSourceNode before assigning the buffer there will be no output anymore.
audioBufferSourceNode.connect(audioContext.destination);
audioBufferSourceNode.start(audioContext.currentTime);
audioBufferSourceNode.buffer = audioBuffer;
I guess you can get your code working by waiting for the HTTP response and the audio decoding before you start the source. Make sure to execute this.source.buffer = buffer before you execute this.source.start(index).
I hope this helps.
I have trawled the internet looking at why blob videos are used but I am not tech savy enough to really understand it. Can someone explain simply why a blob URL for my video is better (if it is) than loading the src as /video/intro.mp4.
Here is the code that I have used. Which one is better for my use case?
<video rel='preload' as='video' id='bgvid'>
<source type='video/mp4' src='/video/intro.mp4' />
</video>
or
var req = new XMLHttpRequest();
req.open('GET', videoURL, true);
req.responseType = 'blob';
req.onload = function() {
if (this.status === 200) {
var videoBlob = this.response;
var vid = URL.createObjectURL(videoBlob); // IE10+
var video = document.getElementById('bgvid');
video.autoplay = true;
video.src = vid;
}
}
req.onerror = function() {
// Error
}
Thanks
I would say the regular html way because it does not take extra time to process because the html is built right into the webpage
I am trying to implement share screen function in webrtc video conferencing. From suggestion, I am now following muaz-khan's solution using https://www.webrtc-experiment.com/getScreenId/ . I can easily capture the application images of one peer, and replace the video stream with the capture stream. But it is a video conferencing experiment, so two browsers need to video conference with each other. For example, browser 1, has video streams A (local video), video streams B (remote video); browser 2 has video streams B (local video), video streams A (remote video). So when I am in browser 1 and trying to share the screen, the share screen stream should replace the local video in browser 1, and remote video in browser 2.
But right now, I can only make the share screen replace the local video in browser 1, browser 2 doesn't have any changes, cann't see any changes in its remote video (which is the local video in browser 1). I don't know how to trigger the changes in browser 2 as well. do i need to signal the share screen streams to server? and change the remote stream accordingly?
Here is my code in javascript:
$(function() {
var brokerController, ws, webRTC, localid;
// ws = new XSockets.WebSocket("wss://rtcplaygrouund.azurewebsites.net:443", ["connectionbroker"], {
ws = new XSockets.WebSocket("ws://localhost:4502", ["connectionbroker"], {
ctx: "152300ed-4d84-4e72-bc99-965052dc1e95"
});
var addRemoteVideo = function(peerId,mediaStream) {
var remoteVideo = document.createElement("video");
remoteVideo.setAttribute("autoplay", "true");
remoteVideo.setAttribute("rel",peerId);
attachMediaStream(remoteVideo, mediaStream);
remoteVideo.setAttribute("class", "col-md-3");
remoteVideo.setAttribute("height", $( document ).height() * 0.3);
remoteVideo.setAttribute("id", 'remoteVideo');
$("#videoscreen").append(remoteVideo);
};
var onConnectionLost = function (remotePeer) {
console.log("onconnectionlost");
var peerId = remotePeer.PeerId;
var videoToRemove = $("video[rel='" + peerId + "']");
videoToRemove.remove();
};
var oncConnectionCreated = function() {
console.log("oncconnectioncreated", arguments);
}
var onGetUerMedia = function(stream) {
console.log("Successfully got some userMedia , hopefully a goat will appear..");
webRTC.connectToContext(); // connect to the current context?
};
var onRemoteStream = function (remotePeer) {
addRemoteVideo(remotePeer.PeerId, remotePeer.stream);
console.log("Opps, we got a remote stream. lets see if its a goat..");
};
var onLocalStream = function(mediaStream) {
console.log("Got a localStream", mediaStream.id);
localid = mediaStream.id;
console.log("check this id: meadiastram id ", mediaStream.id);
var video = document.createElement("video");
video.setAttribute("height", "100%");
video.setAttribute("autoplay", "true");
video.setAttribute("id", "localvideo");
video.setAttribute("name", mediaStream.id);
attachMediaStream(video, mediaStream);
$("#videoscreen").append(video);
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
$('#localvideo').attr('src', URL.createObjectURL(stream));
}, function (error) {
console.error(error);
});
});
});
};
var onContextCreated = function(ctx) {
console.log("RTC object created, and a context is created - ", ctx);
webRTC.getUserMedia(webRTC.userMediaConstraints.hd(true), onGetUerMedia, onError);
};
var onOpen = function() {
console.log("Connected to the brokerController - 'connectionBroker'");
webRTC = new XSockets.WebRTC(this);
webRTC.onlocalstream = onLocalStream;
webRTC.oncontextcreated = onContextCreated;
webRTC.onconnectioncreated = oncConnectionCreated;
webRTC.onconnectionlost = onConnectionLost;
webRTC.onremotestream = onRemoteStream;
};
var onConnected = function() {
console.log("connection to the 'broker' server is established");
console.log("Try get the broker controller form server..");
brokerController = ws.controller("connectionbroker");
brokerController.onopen = onOpen;
};
ws.onconnected = onConnected;
});
I am using xsocket as the server, and the codes for click share and change the local stream with the share screen streams are just very simple as this:
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
$('#localvideo').attr('src', URL.createObjectURL(stream));
}, function (error) {
console.error(error);
});
});
Any help or suggestion would be grateful.
Thanks for pointing out the other post: How to addTrack in MediaStream in WebRTC, but I don't think they are the same. And also I am not sure how to renegotiate the remote connection in this case.
Xsocket.webrtc.js file for webrtc connection:
https://github.com/XSockets/XSockets.WebRTC/blob/master/src/js/XSockets.WebRTC.latest.js
How I could I renegotiate the remote connection in this case?
I figured out a work around solution by myself for this question, do not replace the local stream with the sharescreen stream, instead remove the old local stream from local div, then add the new sharescreen stream to local div. In the meantime, send the old local stream id by datachanel to the other peer, and remove that old remote video as well.
The most important thing is reflesh the streams (renegotiation), then sharescreen stream would display in remote peer.
Code:
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
webRTC.removeStream(webRTC.getLocalStreams()[0]);
var id = $('#localvideo').attr('name');
$('#localvideo').remove();
brokerController.invoke('updateremotevideo', id);
webRTC.addLocalStream(stream);
webRTC.getRemotePeers().forEach(function (p) {
webRTC.refreshStreams(p);
});
}, function (error) {
console.error(error);
});
});
});
after get the command to remove that old video stream from the server:
brokerController.on('updateremotevideo', function(streamid){
$(document.getElementById(streamid)).remove();
});
This solution works for me. Although if only like to replace the local video stream with share screen stream, we need to re create the offer with sdp, and send sdp to remote peer. It is more complicated.
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
navigator.getUserMedia({audio: true}, function (audioStream) {
stream.addTrack(audioStream.getAudioTracks()[0]);
var mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/mp4'
mediaRecorder.stream = stream;
self.setState({recorder: mediaRecorder, startRecord: true, shareVideo: true, pauseRecord: false, resumeRecord: false, stopRecord: false, downloadRecord: false, updateRecord: false});
document.querySelector('video').src = URL.createObjectURL(stream);
var video = document.getElementById('screen-video')
if (video) {
video.src = URL.createObjectURL(stream);
video.width = 360;
video.height = 300;
}
}, function (error) {
alert(error);
});
}, function (error) {
alert(error);
});
});
I am trying to use your api in a custom app with imported users.
Everything works fine (auth_token, login, call initiation) , but when the callee should get a response and add the remotestream nothing happens. no errors get shown in the console.
I would appreciate if someone takes a look at the code and tells me what i m missing.
I tried the vline demo at https://freeofcinema.vline.com and it worked with the same browsers and conditions between the two computers. In my app it is a http , but i tried it also with https, and the same problem came up. This is some simplified code i used to test the api.
var Streams = [];
var Vsession = null;
var Vline = (function(){
var Client;
var authToken;
var service_id = 'freeofcinema';
var profile = null;
var Person;
var Calls = [];
var onMessage = function(event){
//alert('message');
var msg = event.message, sender = msg.getSender();
console.log(sender.getDisplayName() +'sais: '+ msg.getBody());
console.log(event);
}
var onMediaSession = function(event){
console.log(event);
var mediaSession = event.target;
InitSession(mediaSession);
}
function Call(mediaSession) {
mediaSession.
on('change', alert_info);
}
function alert_info(b){
console.log(b);
}
function InitSession(mediaSession){
mediaSession.on('mediaSession:addRemoteStream', function(event) {
alert('addRemoteStream');
});
mediaSession.on('mediaSession:addLocalStream', function(event) {
alert('addLocalStream');
});
mediaSession.on('mediaSession:removeLocalStream mediaSession:removeRemoteStream', function(event) {
console.log('removedStream');
});
Calls.push(new Call(mediaSession));
}
return {
init : function(){
if(profile){
return;
}
profile = {
"displayName" : //some getusrname function...
};
$.post('vtoken.php',{//get auth token
id : Comm.Voip_user().id
},function(data){
authToken = data;
Client = vline.Client.create({
"serviceId": service_id,
"ui" : true
});
Client.on('recv:im', onMessage , this);
Client.on('add:mediaSession', onMediaSession, this);
Client.on('login', function(e) {
Vsession = e.target;
//alert('loged in');
});
Client.login(service_id, profile, authToken);
});
},
getPerson : function(id){//id of user to call
if(Vsession){
Vsession.getPerson(id).
done(function(person){
Person = person;
Vsession.startMedia(id);
});
}
}
}
}());
Thank you for your response.
I tried with one user from the app, and another from the https://freeofcinema.vline.com, and the same problem occured. Also the call (in pending state) gets terminated after a short while..
When passing ui:true when creating the client, you do not have to handle the media sessions yourself. Just comment the line Client.on('add:mediaSession', onMediaSession, this); and it should just work.