I've written a MSE video player and it's loading WebMs. These are loading well, however I have a problem with video files with no audio tracks.
I've tried changing the codec depending on if there is audio
mediaSource.addSourceBuffer(`video/webm; ${videoHasAudio(asset) ? 'codecs="vp9,vorbis"' : 'codecs="vp9"'}`)`
And I thought this was working but now isn't. How do I run silent WebMs in MSE?
I have added sample MSE project here:
https://github.com/thowfeeq178/MediaSourceExtention
checkout the example in the github
overview:
we need to add one for video and one for audio like below:
// BBB : https://dash.akamaized.net/akamai/bbb_30fps/bbb_30fps.mpd
var baseUrl = "https://dash.akamaized.net/akamai/bbb_30fps/";
var initUrl = baseUrl + "bbb_30fps_480x270_600k/bbb_30fps_480x270_600k_0.m4v";
var initAudioUrl = baseUrl + "bbb_a64k/bbb_a64k_0.m4a";
var templateUrl =
baseUrl + "bbb_30fps_480x270_600k/bbb_30fps_480x270_600k_$Number$.m4v";
var templateUrlForAudio = baseUrl + "bbb_a64k/bbb_a64k_$Number$.m4a";
var sourceBuffer;
var audioSourceBuffer;
var index = 0;
var audioIndex = 0;
var numberOfChunks = 159;
var video = document.querySelector("video");
var ms = new MediaSource();
function onPageLoad() {
console.log("page loaded ..");
if (!window.MediaSource) {
console.error("No Media Source API available");
return;
}
// making source controlled by JS using MS
video.src = window.URL.createObjectURL(ms);
ms.addEventListener("sourceopen", onMediaSourceOpen);
}
function onMediaSourceOpen() {
// create source buffer
sourceBuffer = ms.addSourceBuffer('video/mp4; codecs="avc1.4d401f"');
audioSourceBuffer = ms.addSourceBuffer('audio/mp4; codecs="mp4a.40.5"');
// when ever one segment is loaded go for next
sourceBuffer.addEventListener("updateend", nextSegment);
audioSourceBuffer.addEventListener("updateend", nextAudioSegment);
// fire init segemnts
GET(initUrl, appendToBuffer);
GET(initAudioUrl, appendToAudioBuffer);
// play
video.play();
}
// get next segment based on index and append, once everything loaded unlisten to the event
function nextSegment() {
var url = templateUrl.replace("$Number$", index);
GET(url, appendToBuffer);
index++;
if (index > numberOfChunks) {
sourceBuffer.removeEventListener("updateend", nextSegment);
}
}
// get next audio segment based on index and append, once everything loaded unlisten to the event
function nextAudioSegment() {
var audioUrl = templateUrlForAudio.replace("$Number$", audioIndex);
GET(audioUrl, appendToAudioBuffer);
audioIndex++;
if (index > numberOfChunks) {
audioSourceBuffer.removeEventListener("updateend", nextAudioSegment);
}
}
// add to existing source
function appendToBuffer(videoChunk) {
if (videoChunk) {
sourceBuffer.appendBuffer(new Uint8Array(videoChunk));
}
}
function appendToAudioBuffer(audioChunk) {
if (audioChunk) {
audioSourceBuffer.appendBuffer(new Uint8Array(audioChunk));
}
}
// just network thing
function GET(url, callback) {
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "arraybuffer";
xhr.onload = function(e) {
if (xhr.status != 200) {
console.warn("Unexpected status code " + xhr.status + " for " + url);
return false;
}
callback(xhr.response);
};
xhr.send();
}
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>MSE Demo</title>
</head>
<body onload="onPageLoad()">
<h1>MSE Demo</h1>
<div>
<video muted controls width="80%"></video>
</div>
</body>
</html>
Related
I'm trying to make a video call using WebRTC on the local same machine. I can see the remote video on Firefox. But I can't see it on Chrome. When I console logged my code I found that event.candidate is null for both localPeerConnection.onicecandidate and remotePeerConnection.onicecandidate. So, I tested on Chrome again and found out that the connectionStatus is "new" and the iceGatheringState is "complete". On trying out Trickle ICE (https://webrtc.github.io/samples/src/content/peerconnection/trickle-ice/), I found that I wasn't getting any ice options on Chrome, but was for Firefox.
Thank you :)
Here's my code
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>WebRTC</title>
</head>
<body>
<video id="local" playsinline muted autoplay controls></video>
<video id="remote" playsinline autoplay controls></video>
<button id="startButton">Start</button>
<button id="callButton">Call</button>
<button id="hangupButton">Hang UP</button>
<script type="text/javascript">
// Stream info
var localStream, localPeerConnection, remotePeerConnection;
// Video tags
var localVideo = document.getElementById("local");
var remoteVideo = document.getElementById("remote");
// Buttons
var startButton = document.getElementById("startButton");
var callButton = document.getElementById("callButton");
var hangupButton = document.getElementById("hangupButton");
startButton.disabled = false;
callButton.disabled = true;
hangupButton.disabled = true;
var servers = null;
startButton.onclick = start;
callButton.onclick = call;
hangupButton.onclick = hangup;
function log(text){
console.log("At time"+(performance.now()/1000).toFixed(3)+"-->\n"+ text);
}
function successCallback(stream){
log("Received local stream");
if(navigator.mediaDevices.getUserMedia){
localVideo.srcObject = stream;
}
else{
console.error("GetUserMediaError",error);
}
localStream = stream;
callButton.disabled = false;
}
var constraints = {audio: true, video: true};
function start(){
log("Requesting local stream");
startButton.disabled = true;
navigator.mediaDevices.getUserMedia(constraints).then(successCallback);
}
function call(){
callButton.disabled = true;
hangupButton.disabled = false;
log("Starting Call");
if(navigator.mediaDevices.getUserMedia){
if(localStream.getVideoTracks().length > 0){
log("Using video device:" + localStream.getVideoTracks()[0].label);
}
if(localStream.getAudioTracks().length > 0){
log("Using audio device:" + localStream.getAudioTracks()[0].label);
}
}
localPeerConnection = new RTCPeerConnection(servers);
log("Created local peer connection object localPeerConnection");
localPeerConnection.onicecandidate = gotLocalIceCandidate;
localPeerConnection.onconnectionstatechange = function(event){
console.log("QWERTY");
}
remotePeerConnection = new RTCPeerConnection(servers);
log("Created remote peer connection object remotePeerConnection");
remotePeerConnection.onicecandidate = gotRemoteIceCandidate;
if (remotePeerConnection.addTrack !== undefined) {
remotePeerConnection.ontrack = ev => {
ev.streams.forEach(stream => doAddStream(stream));
}
} else {
remotePeerConnection.onaddstream = ev => {
doAddStream(ev.stream);
}
}
localStream.getTracks().forEach((track)=>{
localPeerConnection.addTrack(track, localStream);
});
// localPeerConnection.addStream(localStream);
log("Added localStream to localPeerConnection");
localPeerConnection.createOffer(gotLocalDescription, onSignalingError);
}
function gotLocalDescription(description){
localPeerConnection.setLocalDescription(description);
log("Offer from localPeerConnection: "+ description.sdp);
remotePeerConnection.setRemoteDescription(description);
remotePeerConnection.createAnswer(gotRemoteDescription, onSignalingError);
}
function gotRemoteDescription(description){
remotePeerConnection.setLocalDescription(description);
log("Answer from remotePeerConnection:"+description.sdp);
localPeerConnection.setRemoteDescription(description);
}
function hangup(){
log("Ending call");
localPeerConnection.close();
remotePeerConnection.close();
localPeerConnection = null;
remotePeerConnection = null;
hangupButton.disabled = true;
callButton.disabled = false;
}
function onSignalingError(error){
log("Failed to create signaling message: "+ error.name);
}
function gotLocalIceCandidate(event){
if(event.candidate){
remotePeerConnection.addIceCandidate(new RTCIceCandidate(event.candidate));
log("Local ICE candidate: \n" + event.candidate.candidate);
}
}
function gotRemoteIceCandidate(event){
if(event.candidate){
localPeerConnection.addIceCandidate(new RTCIceCandidate(event.candidate));
log("Remote ICE candidate: \n" +event.candidate.candidate);
}
}
function doAddStream(stream){
remoteVideo.srcObject = stream;
log("Received remote stream Do add Stream");
}
</script>
</body>
</html>
[1]: https://i.stack.imgur.com/gjNi4.png
I am trying to use the Web Audio API to play sound in my React application.
It's currently playing sound in all browsers except Safari v12.1.
I am aware Safari has restrictions on autoplay and requires user interaction to play sound, so I have a play button which calls the _play() function:
_play = (url, index) => {
this._getData(url);
this.source.start(index)
}
It's calling the _getData() function which looks like this:
_getData(url) {
this.source = this.audioContext.createBufferSource();
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = () => {
var audioData = request.response;
console.log(this.audioContext)
this.audioContext.decodeAudioData(audioData, buffer => {
this.source.buffer = buffer;
this.source.connect(this.audioContext.destination);
},
function(e){ console.log("Error with decoding audio data" + e.err); });
}
request.send();
}
this.audioContext is created in the component constructor using:
this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
The console.log(this.audioContext) inside the request.onload outputs this before pressing play:
...and this after pressing play:
But no sound is playing (in Safari).
What am I doing wrong?
I think the problem that you ran into is that Safari does not allow you to modify the buffer anymore once you called start().
The following page does for example play a second of noise in Safari when you press the play button.
<!DOCTYPE html>
<html>
<body>
<button id="play-button">play</button>
<script>
document
.getElementById('play-button')
.addEventListener('click', () => {
const audioContext = new AudioContext();
const audioBufferSourceNode = audioContext.createBufferSource();
const sampleRate = audioContext.sampleRate;
const audioBuffer = audioContext.createBuffer(1, sampleRate, sampleRate);
const channelData = audioBuffer.getChannelData(0);
for (let i = 0; i < sampleRate; i += 1) {
channelData[i] = (Math.random() * 2) - 1;
}
audioBufferSourceNode.buffer = audioBuffer;
audioBufferSourceNode.connect(audioContext.destination);
audioBufferSourceNode.start(audioContext.currentTime);
});
</script>
</body>
</html>
But it doesn't work anymore if you modify it slightly. When starting the audioBufferSourceNode before assigning the buffer there will be no output anymore.
audioBufferSourceNode.connect(audioContext.destination);
audioBufferSourceNode.start(audioContext.currentTime);
audioBufferSourceNode.buffer = audioBuffer;
I guess you can get your code working by waiting for the HTTP response and the audio decoding before you start the source. Make sure to execute this.source.buffer = buffer before you execute this.source.start(index).
I hope this helps.
I have 4 iframe and I want to recover by clicking on their id .
I walked my iframe using google analytic and I put their id in a table.
Then I create an object of type YT.Player
Probleme : the method onPlayerStateChange does not run .
here is my code :
<script type="text/javascript">
/*
YouTube Analytics
Code adapted from:
http://www.lunametrics.com/blog/2012/10/22/automatically-track-youtube-videos-events-google-analytics/
http://lunametrics.wpengine.netdna-cdn.com/js/lunametrics-youtube.js
Code adapted by Alex Mueller for ISITE Design http://isitedesign.com
*/
// enable cross-domain scripting in IE < 10 for the YouTube Data API
// https://github.com/jaubourg/ajaxHooks/blob/master/src/xdr.js
if(window.XDomainRequest){jQuery.ajaxTransport(function(e){if(e.crossDomain&&e.async){if(e.timeout){e.xdrTimeout=e.timeout;delete e.timeout}var t;return{send:function(n,r){function i(e,n,i,s){t.onload=t.onerror=t.ontimeout=jQuery.noop;t=undefined;r(e,n,i,s)}t=new XDomainRequest;t.onload=function(){i(200,"OK",{text:t.responseText},"Content-Type: "+t.contentType)};t.onerror=function(){i(404,"Not Found")};t.onprogress=jQuery.noop;t.ontimeout=function(){i(0,"timeout")};t.timeout=e.xdrTimeout||Number.MAX_VALUE;t.open(e.type,e.url);t.send(e.hasContent&&e.data||null)},abort:function(){if(t){t.onerror=jQuery.noop;t.abort()}}}}})}
// load the YouTube iframe API
var tag = document.createElement('script');
tag.src = "//www.youtube.com/iframe_api";
var firstScriptTag = document.getElementsByTagName('script')[0];
firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);
// initialize our arrays to hold video and player information
var playerArray = [],
videoArray = [];
// safely pass the jQuery object as $
(function($) {
// enables tracking of all YouTube videos on the page
function trackYouTube() {
// iterate through every iframe on the page
$('iframe').each(function(i) {
// grab the video source and other properties
var baseUrlLength,
$iframe = $(this),
iframeSrc = $iframe.attr('src'),
isYouTubeVideo = false,
videoID,
url;
// if the video uses the http protocol
if (iframeSrc.substr(0,25) == "http://www.youtube.com/v/") {
baseUrlLength = 25;
isYouTubeVideo = true;
}
// otherwise if the video uses the https protocol
else if (iframeSrc.substr(0,26) == "https://www.youtube.com/v/") {
baseUrlLength = 26;
isYouTubeVideo = true;
}
// if we're dealing with a YouTube video, store its information in our arrays
if (isYouTubeVideo) {
// grab the videoID
videoID = iframeSrc.substr(baseUrlLength);
url = '//gdata.youtube.com/feeds/api/videos/' + videoID + '?v=2&alt=json';
// if the ID ends with extra characters...
if (videoID.indexOf('&') > -1) {
// ...remove the extra characters
videoID = videoID.substr(0, videoID.indexOf('&'));
}
// put an object in our array with the videoID...
videoArray[i] = {};
videoArray[i].id = videoID;
// put the videoID on the iframe as its id
$iframe.attr('id', videoID);
}
});
}
$(function() {
// initiate tracking on document ready
trackYouTube();
onYouTubeIframeAPIReady();
});
})(jQuery);
function onYouTubeIframeAPIReady() {
// insert YouTube Player objects into our playerArray
for (var i = 0; i < videoArray.length; i++) {
playerArray[i] = new YT.Player(videoArray[i].id, {
events: {
'onStateChange': onPlayerStateChange
}
});
}
}
// when the player changes states
function onPlayerStateChange(event) {
// if the video begins playing, send the event
if (event.data == YT.PlayerState.PLAYING) {
alert();
}
// if the video ends, send the event
if (event.data == YT.PlayerState.ENDED) {
alert();
}
}
</script>
You have to set the enablejsapi parameter to 1 in your iframe embed link.
By default, the parameter is set to 0 and unless you set it to 1, the callbacks won't work.
Reference: https://developers.google.com/youtube/js_api_reference
i have following code ... from site http://forestmist.org/blog/web-audio-api-loops/
it works good... but i need record functionality which will help to record previously clicked button and store there audio too... any help
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Web Audio API Loops Demo</title>
</head>
<body>
<form>
<button id="button-loop-1" type="button" value="1">Loop 1</button>
<button id="button-loop-2" type="button" value="2">Loop 2</button>
</form>
<script>
//--------------
// Audio Object
//--------------
var audio = {
buffer: {},
compatibility: {},
files: [
'synth.wav',
'beat.wav'
],
proceed: true,
source_loop: {},
source_once: {}
};
//-----------------
// Audio Functions
//-----------------
audio.findSync = function(n) {
var first = 0,
current = 0,
offset = 0;
// Find the audio source with the earliest startTime to sync all others to
for (var i in audio.source_loop) {
current = audio.source_loop[i]._startTime;
if (current > 0) {
if (current < first || first === 0) {
first = current;
}
}
}
if (audio.context.currentTime > first) {
offset = (audio.context.currentTime - first) % audio.buffer[n].duration;
}
return offset;
};
audio.play = function(n) {
if (audio.source_loop[n]._playing) {
audio.stop(n);
} else {
audio.source_loop[n] = audio.context.createBufferSource();
audio.source_loop[n].buffer = audio.buffer[n];
audio.source_loop[n].loop = true;
audio.source_loop[n].connect(audio.context.destination);
var offset = audio.findSync(n);
audio.source_loop[n]._startTime = audio.context.currentTime;
if (audio.compatibility.start === 'noteOn') {
/*
The depreciated noteOn() function does not support offsets.
Compensate by using noteGrainOn() with an offset to play once and then schedule a noteOn() call to loop after that.
*/
audio.source_once[n] = audio.context.createBufferSource();
audio.source_once[n].buffer = audio.buffer[n];
audio.source_once[n].connect(audio.context.destination);
audio.source_once[n].noteGrainOn(0, offset, audio.buffer[n].duration - offset); // currentTime, offset, duration
/*
Note about the third parameter of noteGrainOn().
If your sound is 10 seconds long, your offset 5 and duration 5 then you'll get what you expect.
If your sound is 10 seconds long, your offset 5 and duration 10 then the sound will play from the start instead of the offset.
*/
// Now queue up our looping sound to start immediatly after the source_once audio plays.
audio.source_loop[n][audio.compatibility.start](audio.context.currentTime + (audio.buffer[n].duration - offset));
} else {
audio.source_loop[n][audio.compatibility.start](0, offset);
}
audio.source_loop[n]._playing = true;
}
};
audio.stop = function(n) {
if (audio.source_loop[n]._playing) {
audio.source_loop[n][audio.compatibility.stop](0);
audio.source_loop[n]._playing = false;
audio.source_loop[n]._startTime = 0;
if (audio.compatibility.start === 'noteOn') {
audio.source_once[n][audio.compatibility.stop](0);
}
}
};
//-----------------------------
// Check Web Audio API Support
//-----------------------------
try {
// More info at http://caniuse.com/#feat=audio-api
window.AudioContext = window.AudioContext || window.webkitAudioContext;
audio.context = new window.AudioContext();
} catch(e) {
audio.proceed = false;
alert('Web Audio API not supported in this browser.');
}
if (audio.proceed) {
//---------------
// Compatibility
//---------------
(function() {
var start = 'start',
stop = 'stop',
buffer = audio.context.createBufferSource();
if (typeof buffer.start !== 'function') {
start = 'noteOn';
}
audio.compatibility.start = start;
if (typeof buffer.stop !== 'function') {
stop = 'noteOff';
}
audio.compatibility.stop = stop;
})();
//-------------------------------
// Setup Audio Files and Buttons
//-------------------------------
for (var a in audio.files) {
(function() {
var i = parseInt(a) + 1;
var req = new XMLHttpRequest();
req.open('GET', audio.files[i - 1], true); // array starts with 0 hence the -1
req.responseType = 'arraybuffer';
req.onload = function() {
audio.context.decodeAudioData(
req.response,
function(buffer) {
audio.buffer[i] = buffer;
audio.source_loop[i] = {};
var button = document.getElementById('button-loop-' + i);
button.addEventListener('click', function(e) {
e.preventDefault();
audio.play(this.value);
});
},
function() {
console.log('Error decoding audio "' + audio.files[i - 1] + '".');
}
);
};
req.send();
})();
}
}
</script>
</body>
</html>
Check out RecordJS (https://github.com/mattdiamond/Recorderjs). It should help you out.
I'm displaying a Bing Map (v7) in my Webmatrix2 website with a series of pushpins & infoboxes drawn from a SQL Express database using a JSON enquiry.
While the maps appears in all 3 browsers I'm testing (IE, FF & Chrome) the pushpins are sometimes not showing in FF & Chrome, particularly if I refresh with Cntrl+F5
This is my first JSON and Bing Maps app so expect there's a few mistakes.
Any suggestions on how to improve the code and get display consistency?
#{
Layout = "~/_MapLayout.cshtml";
}
<script type="text/javascript" src="~/Scripts/jquery-1.9.1.min.js"></script>
<script type="text/javascript" src="http://ecn.dev.virtualearth.net/mapcontrol/mapcontrol.ashx?v=7.0"></script>
<link rel="StyleSheet" href="infoboxStyles.css" type="text/css">
<script type="text/javascript">
var map = null;
var pinLayer, pinInfobox;
var mouseover;
var pushpinFrameHTML = '<div class="infobox"><a class="infobox_close" href="javascript:closeInfobox()"><img src="/Images/close2.jpg" /></a><div class="infobox_content">{content}</div></div><div class="infobox_pointer"><img src="images/pointer_shadow.png"></div>';
var pinLayer = new Microsoft.Maps.EntityCollection();
var infoboxLayer = new Microsoft.Maps.EntityCollection();
function getMap() {
map = new Microsoft.Maps.Map(document.getElementById('map'), {
credentials: "my-key",
zoom: 4,
center: new Microsoft.Maps.Location(-25, 135),
mapTypeId: Microsoft.Maps.MapTypeId.road
});
pinInfobox = new Microsoft.Maps.Infobox(new Microsoft.Maps.Location(0, 0), { visible: false });
AddData();
}
$(function AddData() {
$.getJSON('/ListSchools', function (data) {
var schools = data;
$.each(schools, function (index, school) {
for (var i = 0; i < schools.length; i++) {
var pinLocation = new Microsoft.Maps.Location(school.SchoolLat, school.SchoolLon);
var NewPin = new Microsoft.Maps.Pushpin(pinLocation);
NewPin.title = school.SchoolName;
NewPin.description = "-- Learn More --";
pinLayer.push(NewPin); //add pushpin to pinLayer
Microsoft.Maps.Events.addHandler(NewPin, 'mouseover', displayInfobox);
}
});
infoboxLayer.push(pinInfobox);
map.entities.push(pinLayer);
map.entities.push(infoboxLayer);
});
})
function displayInfobox(e) {
if (e.targetType == "pushpin") {
var pin = e.target;
var html = "<span class='infobox_title'>" + pin.title + "</span><br/>" + pin.description;
pinInfobox.setOptions({
visible: true,
offset: new Microsoft.Maps.Point(-33, 20),
htmlContent: pushpinFrameHTML.replace('{content}', html)
});
//set location of infobox
pinInfobox.setLocation(pin.getLocation());
}
}
function closeInfobox() {
pinInfobox.setOptions({ visible: false });
}
function getCurrentLocation() {
var geoLocationProvider = new Microsoft.Maps.GeoLocationProvider(map);
geoLocationProvider.getCurrentPosition();
}
</script>
<body onload="getMap();">
<div id="map" style="position:relative; width:800px; height:600px;"></div>
<div>
<input type="button" value="Find Nearest Schools" onclick="getCurrentLocation();" />
</div>
</body>
The JSON file is simply
#{
var db = Database.Open("StarterSite");
var sql = #"SELECT * FROM Schools WHERE SchoolLon != ' ' AND SchoolLon != 'null' ";
var data = db.Query(sql);
Json.Write(data, Response.Output);
}
Add your pinLayer, infobox, and infoboxLayer before calling the AddData function and see if that makes a difference. Also verify that school.SchoolLat and school.SchoolLon are numbers and not a string version of a number. If they are a string, then use parseFloat to turn them into a number. Other than that everything looks fine.