I need to play multiple video for test a video server. I'm using lubuntu 14.04 and have installed V4l2loopback to make the device file ( /dev/videoN )
I am using mplayer to play video from this device as described mplayer cam
I have done the modify to the source code and successfully played the video and viewed with xawtv and with flashplayer (on firefox 28). I have tried to view with webRtc but it can't work.
Do you have some idea to do this? There is some particular pixelformat to define in examples/yuv4mpeg_to_v4l2.c ?
.....
I'm trying to find the problem using direct access to the resource with this script:
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<title>Test rtc</title>
<script type="text/javascript" charset="utf-8">
navigator.getUserMedia =
(
navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia
);
var constraints =
{
audio: true,
video:true,
video:
{
mandatory:
{
minWidth: 640,
minHeight: 360
}
}
};
if( navigator.getUserMedia )
{
navigator.getUserMedia(
// constraints
constraints ,
// successCallback
function(localMediaStream)
{
var video = document.querySelector('video');
video.src = window.URL.createObjectURL(localMediaStream);
video.play();
console.log( video );
console.log( localMediaStream );
},
// errorCallback
function(err)
{
console.log("The following error occured: " + err);
}
);
}
else
{
console.log("getUserMedia not supported");
}
</script>
</head>
<body>
<video>
</body>
</html>
The video constraingts are take from mplayer output:
VIDEO: 640x360 25.000 fps 555.0 kbps (69.4 kB/s)
[swscaler # 0x7f83633f3640]BICUBIC scaler, from yuv420p to yuv420p using MMXEXT
VO: [yuv4mpeg] 480x360 => 640x360 Planar YV12
But the problem persists: "The following error occured: Starting video failed"..
The video is correctly played and visible both with xawtv and with flashplayer.
there are two things:
you need a recent enough v4l2loopback module, IIRC you have to use at least 0.7.1
$ dmesg | grep v4l2loopback
[0000123.456] v4l2loopback driver version 0.8.0 loaded
the v4l2loopback-device will only appear as a proper webcam, if some (other) application is writing video data to it. e.g.
gst-launch videotestsrc ! v4l2sink device=/dev/video0
I was able to get this to work using ffmpeg. This was the command I used:
ffmpeg -re -f lavfi -i "movie=my_video_file.mp4" -f v4l2 /dev/video0
After doing that, I was able to access this virtual webcam, which was looping a video file infinitely, from my WebRTC app.
not sure if this will help or not but you could try using webcamstudio - it creates a loopback device also and can do the source mixing as well - as i remember it uses ffmpeg as its backend so you should be able to tweak it into any format you like
Related
let localStream;
let peerConnection;
navigator.mediaDevices.getUserMedia({
audio: true,
video: true
}).then(function(stream) {
createPeerConnection();
localStream = stream;
peerConnection.addStream(localStream);
});
so when stopping the stream it stops the video
localStream.getTracks().forEach(track => track.stop());
But the browser tab says that it is accessing the camera or microphone with a red dot besides it. I just do not want to reload the page in order to stop that.
Note: this happens when after establishing a peer connection using webRTC and after disconnecting the peers the camera light stays on.
Is there any way to do that. Thanks for your help in advance.
you can use boolean value or condition in which tab access camera after track.stop() you can set the value to false then the camera will not be acessed anymore. (p.s you can try that if its works)
<!DOCTYPE html>
<html>
<head>
<title>Web Client</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
</head>
<body>
<div id="callerIDContainer">
<button onclick="call_user();">Call User</button>
</div>
<div class="video-container">
<video autoplay muted class="local-video" id="local-video"></video>
</div>
<div>
<button onclick="hangup();">Hangup</button>
</div>
</body>
<script >
var localStream;
var accessRequired=true
function call_user() //your function
{
if(accessRequired)
{
navigator.mediaDevices.getUserMedia({
audio: true,
video: true
}).then(function(stream) {
localStream = stream;
const localVideo = document.getElementById("local-video");
if (localVideo) {
localVideo.srcObject = localStream;
}
});
}
}
function hangup(){
localStream.getTracks().forEach(track => track.stop()).then(()=>{accessRequired=false});
}
</script>
</html>
try this call user then hangup it is working
The sample code in your question looks like it uses gUM() to create an audio-only stream ({video: false, audio:true}).
It would be strange if using .stop() on all the tracks on your audio-only stream also stopped the video track on some other stream. If you want to turn off your camera's on-the-air light you'll need to stop the video track you used in peerConnection.addTrack(videoTrack). You probably also need to tear down the call using peerConnection.close().
I had same issue with webRTC and React. I have stopped tracks of remote stream but I forgot to stop local stream :
window.localStream.getTracks().forEach((track) => {
track.stop();
});
I am using Videojs version 7.6.6. It will not play a html5 video if the src is a blob URL. It will load the video time however, but will not play. I get this warning, and then it loads forever:
VIDEOJS: WARN: Problem encountered with the current HLS playlist. Trying again since it is the only playlist.
This is the way my code runs:
<video id="my_video" class="video-js vjs-matrix vjs-default-skin vjs-big-play-centered" controls
preload="none" width="640" height="268" data-setup="{}"></video>
<script type="text/javascript" src="/js/video-766.min.js"></script>
<script>
fetch("https://server/hls/index.m3u8").then(result => result.blob())
.then(blob => {
var blobURL = URL.createObjectURL(blob);
var player = videojs("my_video");
player.src({ src: blobURL, type: "application/x-mpegURL" });
}
);
</script>
If I try it without a blob, just a regular URL to the index.m3u8 file, then it works. So this is a problem with the creation of the blob URL I think. This works, the video starts playing:
<video id="my_video" class="video-js vjs-default-skin" height="360" width="640" controls preload="none">
<source src="https://server/hls/index.m3u8" type="application/x-mpegURL" />
</video>
<script>
var player = videojs('my_video');
</script>
I have searched for this issue and found a lot, but none of it helps me. Am I creating the blob wrong?
The object URL that is generated for the blob will start with file:// protocol if I'm not wrong. Browsers doesn't let you load data with file:// URL. I ran into a similar problem so I created a simple server on my app which returns the requested file over https:// .
The reason why your index.m3u8 is running because it is served over https protocol
I am trying to play back a video (currently hosted on S3 with public access) by creating a blob URL.
I have used Elastic Transcoder to encode the video since it is supposed to set the MOOV atom to the top (beginning).
I am unable to get the code to work but also found a working example: link here
Here is my code:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<video controls></video>
<script>
var video = document.querySelector('video');
var assetURL = 'https://ovation-blob-url-test.s3.amazonaws.com/AdobeStock_116640093_Video_WM_NEW.mp4';
// Need to be specific for Blink regarding codecs
// ./mp4info frag_bunny.mp4 | grep Codec
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
if ('MediaSource' in window && MediaSource.isTypeSupported(mimeCodec)) {
var mediaSource = new MediaSource;
//console.log(mediaSource.readyState); // closed
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
} else {
console.error('Unsupported MIME type or codec: ', mimeCodec);
}
function sourceOpen (_) {
//console.log(this.readyState); // open
var mediaSource = this;
var sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener('updateend', function (_) {
mediaSource.endOfStream();
video.play();
//console.log(mediaSource.readyState); // ended
});
sourceBuffer.appendBuffer(buf);
});
};
function fetchAB (url, cb) {
console.log(url);
var xhr = new XMLHttpRequest;
xhr.open('get', url);
xhr.responseType = 'arraybuffer';
xhr.onload = function () {
cb(xhr.response);
};
xhr.send();
};
</script>
</body>
</html>
What am I doing wrong? I looked at tools ie.e MP4Box or QT-FastStart but they seem to be kind of old school. I would also be willing to change from MP4 to M3U8 playlist but then I don't know what MIME types to use.
At the ned of the day I am trying to play back a video/stream and hide the URL (origin) potentially using blob.
Thank you guys!
So, first, even though this code seems to be taken from mozilla documentation site, there are a few issues - you are not checking the readyState before calling endOfStream thus the error you get is valid, secondly, the play() call is blocked by the autoplay policy changes. If you add an error handler, you will actually see that the appendBuffer fails. Here is the updated snippet:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
</head>
<body>
<video controls></video>
<script>
var video = document.querySelector('video');
var assetURL = 'https://ovation-blob-url-test.s3.amazonaws.com/AdobeStock_116640093_Video_WM_NEW.mp4';
// Need to be specific for Blink regarding codecs
// ./mp4info frag_bunny.mp4 | grep Codec
var mimeCodec = 'video/mp4; codecs="avc1.42E01E, mp4a.40.2"';
if ('MediaSource' in window && MediaSource.isTypeSupported(mimeCodec)) {
var mediaSource = new MediaSource;
//console.log(mediaSource.readyState); // closed
video.src = URL.createObjectURL(mediaSource);
mediaSource.addEventListener('sourceopen', sourceOpen);
} else {
console.error('Unsupported MIME type or codec: ', mimeCodec);
}
function sourceOpen (_) {
//console.log(this.readyState); // open
var mediaSource = this;
var sourceBuffer = mediaSource.addSourceBuffer(mimeCodec);
fetchAB(assetURL, function (buf) {
sourceBuffer.addEventListener('updateend', function (_) {
// console.log(mediaSource.readyState); // ended
if (mediaSource.readyState === "open") {
mediaSource.endOfStream();
video.play();
}
});
sourceBuffer.addEventListener('error', function (event) {
console.log('an error encountered while trying to append buffer');
});
sourceBuffer.appendBuffer(buf);
});
};
function fetchAB (url, cb) {
console.log(url);
var xhr = new XMLHttpRequest;
xhr.open('get', url);
xhr.responseType = 'arraybuffer';
xhr.onload = function () {
cb(xhr.response);
};
xhr.send();
};
</script>
</body>
</html>
So lets advance to next issue - the actual error. So, using chrome://media-internals/ we can see that the video actually fails to load do to incompatibility with the ISOBMFF format:
I am not familiar with Elastic Transcoder, but it seems that is it not producing an mp4 file suitable for live streaming. Also, if using mse, putting moov at the beginning is not enough, the video actually has to meet all of the ISOBMFF requirements - see chapters 3. and 4.
The working sample you mentioned is not a valid comparison since it uses the blob for the src, where the ISOBMFF rules do not apply. If it is fine for you to go that way, don't use MSE and put the blob directly in the src. If you need MSE, you have to mux it correctly.
Ok, so I got the original code example to work by encoding my MP4 videos with ffmpeg:
ffmpeg -i input.mp4 -vf scale=1920:1080,setsar=1:1 -c:v libx264 -preset medium -c:a aac -movflags empty_moov+default_base_moof+frag_keyframe output.mp4 -hide_banner
Important is: -movflags empty_moov+default_base_moof+frag_keyframe
This setup also scales the video to 1920x1080 (disregarding any aspect ratio of the input video)
However, based on the comments of the original post, I do believe there might be a more efficient way to generate the blob url and ingest into a video tag. This example was copied straight from https://developer.mozilla.org.
If anyone comes up with a better script (not over-engineered), please post it here.
Thank you #Rudolfs Bundulis for all your help!
I have written an application in Sencha Touch 2.1, of which I embed a package build into Cordova/PhoneGap 2.5.0 and compile in xCode to run on iOS Simulator / iOS. I have added the PGSQLite plugin to PhoneGap, and built my own PhoneGap/SQLite Proxy for Sencha, which I used on a few of my Stores.*
Problem: When I embed a package build into PhoneGap and run in iOS Simulator, I see that Cordova does not load before Sencha initializes. I see this because my calls in my Sencha app to Cordova.exec that I make in my Proxy initialization result in an error telling me that the Cordova object cannot be found.
I do successfully use Cordova.exec later in my application to run things like the Childbrowser plugin for PhoneGap, and it works. But using Cordova.exec at an early stage in the app's execution, i.e., initialization, is too soon to guarantee that the Cordova object will have been instantiated.
Already tried: I already tried the following approaches:
I tried simply embedding the developer build of my Sencha app into PhoneGap. Although this worked, I don't want to deploy my development build as my released app because it is inefficient and takes up a lot of space. I have learned from this experiment, however, that the way the Sencha Touch microloader works on package and production builds loads PhoneGap after Sencha. This can be clearly seen when inspecting the DOM after Sencha loads in a package build.
I have already configured my app.json file to include PhoneGap and
my plugins before app.js and the Sencha Touch framework. Playing
with the order of my JS file references in my app.json did not
seem to affect the load order.
I also tried creating a script loader, as described here
(StackOverflow). I then ran the script loader for Cordova, and in
the callback for that, ran the script loader for my plugin, and
then, finally, in the callback for that, ran the Sencha Touch
microloader. This resulted in an error. Additionally, I had to
manually set that up in my index.html file after Sencha built my
package. This seems unacceptable.
What I am looking for: I am looking for answers to the following:
Is there a way to configure Sencha's microloader or my Sencha app in general so that Cordova is ensured to have loaded before Sencha's microloader runs?
Is there a way to set this up so that using Sencha Cmd still works, and I don't have to hack around in my index.html file after I build the app?
Note:
*Please don't suggest I use the existing, so-called, SQLite Proxy for Sencha. I specifically chose my approach because, though I appreciated the existing work on a SQLite proxy for Sencha Touch 2 (namely, this), it is actually a WebSQL proxy that does not store natively in SQLite on iOS. My proxy uses the PGSQLite plugin for PhoneGap to natively store data in SQLite on iOS. I plan to open-source it when I have an opportunity to clean it up and untangle it from my code.
I ended up solving this myself by building a custom loader. I am not sure if there is a more Sencha-ish way to do it, but here are the details of what I did, which does work, in case anyone else wants to ensure that PhoneGap is completely loaded in package and production builds before running anything in Sencha. (That would probably be the case in all scenarios in which PhoneGap is packaging a Sencha app).
My index.html file:
<!DOCTYPE HTML>
<html manifest="" lang="en-US">
<head>
<!-- Load Cordova first. Replace with whatever version you are using -->
<script type="text/javascript" src="cordova.js"></script>
<script type="text/javascript" charset="utf-8">
function onBodyLoad() {
// Check for whatever mobile you will run your PhoneGap app
// on. Below is a list of iOS devices. If you have a ton of
// devices, you can probably do this more elegantly.
// The goal here is to only listen to the onDeviceReady event
// to continue the load process on devices. Otherwise you will
// be waiting forever (literally) on Desktops.
if ((navigator.platform == 'iPad') ||
(navigator.platform == 'iPhone') ||
(navigator.platform == 'iPod') ||
(navigator.platform == 'iPhone Simulator') ||
(navigator.platform == 'iPadSimulator')
) {
// Listening for this event to continue the load process ensures
// that Cordova is loaded.
document.addEventListener("deviceready", onDeviceReady, false);
} else {
// If we're on Desktops, just proceed with loading Sencha.
loadScript('loader.js', function() {
console.log('Finished loading scripts.');
});
}
};
// This function is a modified version of the one found on
// StackOverflow, here: http://stackoverflow.com/questions/756382/bookmarklet-wait-until-javascript-is-loaded#answer-756526
// Using this allows you to wait to load another script by
// putting the call to load it in a callback, which is
// executed only when the script that loadScript is loading has
// been loaded.
function loadScript(url, callback)
{
var head = document.getElementsByTagName("head")[0];
var script = document.createElement("script");
script.src = url;
// Attach handlers for all browsers
var done = false;
script.onload = script.onreadystatechange = function()
{
if( !done && ( !this.readyState
|| this.readyState == "loaded"
|| this.readyState == "complete") )
{
done = true;
// Continue your code
callback();
}
};
head.appendChild(script);
}
function onDeviceReady() {
console.log("[PhoneGap] Device initialized.");
console.log("[PhoneGap] Loading plugins.");
// You can load whatever PhoneGap plugins you want by daisy-chaining
// callbacks together like I did with pgsqlite and Sencha.
loadScript('pgsqlite_plugin.js', function() {
console.log("[Sencha] Adding loader.");
// The last one to load is the custom Sencha loader.
loadScript('loader.js', function() {
console.log('Finished loading scripts.');
});
});
};
</script>
<meta charset="UTF-8">
<title>Sencha App</title>
</head>
<!-- Don't forget to call onBodyLoad() in onLoad -->
<body onLoad="onBodyLoad();">
</body>
</html>
Next, create a custom loader in loader.js in your document root, alongside your index.html. This one is heavily based on the development microloader that comes with Sencha. Much props to them:
console.log("Loader included.");
(function() {
function write(content) {
document.write(content);
}
function meta(name, content) {
write('<meta name="' + name + '" content="' + content + '">');
}
var global = this;
if (typeof Ext === 'undefined') {
var Ext = global.Ext = {};
}
var head = document.getElementsByTagName("head")[0];
var xhr = new XMLHttpRequest();
xhr.open('GET', 'app.json', false);
xhr.send(null);
var options = eval("(" + xhr.responseText + ")"),
scripts = options.js || [],
styleSheets = options.css || [],
i, ln, path;
meta('viewport', 'width=device-width, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0, user-scalable=no');
meta('apple-mobile-web-app-capable', 'yes');
meta('apple-touch-fullscreen', 'yes');
console.log("Loading stylesheets");
for (i = 0,ln = styleSheets.length; i < ln; i++) {
path = styleSheets[i];
if (typeof path != 'string') {
path = path.path;
}
var stylesheet = document.createElement("link");
stylesheet.rel = "stylesheet";
stylesheet.href = path;
head.appendChild(stylesheet);
}
for (i = 0,ln = scripts.length; i < ln; i++) {
path = scripts[i];
if (typeof path != 'string') {
path = path.path;
}
var script = document.createElement("script");
script.src = path;
head.appendChild(script);
}
})();
Notice that your index.html file does not contain a #microloader script element. That's because you should take it out and use your custom loader.
With all that in place, you will be able to sail smoothly, knowing that the whole PhoneGap environment is in place before your Sencha javascript starts doing things.
I started to learn webrtc when I tried to implement the basic sample application
<html>
<head>
</head>
<body>
<script type="text/javascript">
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia;
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia({video: true}, function(localMediaStream) {
var video = document.createElement("video");
video.autoplay = true;
video.src = window.URL.createObjectURL(localMediaStream);
document.body.appendChild(video);
}, function(error) {
console.log(error);
});
</script>
</body>
</html>
I used this code to run in locaL browser google canary I enabled peerconnection and I didnot found mediastream in my browser but I think it might enabled as defalut in my browser.
The problem is this code results as NavigatorUserMediaError in console.i am not finding the way to step out from this problem.
Any one have idea where I went wrong in my code.
Did you run this from a web server?
If you run it from a file:// URL, you'll get a NavigatorUserMediaError.
I just tried your code from localhost in Chrome 22.0 and it works fine.
Note that this example does not use RTCPeerConnection and you don't have to enable any flags now in Chrome.