i have following code ... from site http://forestmist.org/blog/web-audio-api-loops/
it works good... but i need record functionality which will help to record previously clicked button and store there audio too... any help
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Web Audio API Loops Demo</title>
</head>
<body>
<form>
<button id="button-loop-1" type="button" value="1">Loop 1</button>
<button id="button-loop-2" type="button" value="2">Loop 2</button>
</form>
<script>
//--------------
// Audio Object
//--------------
var audio = {
buffer: {},
compatibility: {},
files: [
'synth.wav',
'beat.wav'
],
proceed: true,
source_loop: {},
source_once: {}
};
//-----------------
// Audio Functions
//-----------------
audio.findSync = function(n) {
var first = 0,
current = 0,
offset = 0;
// Find the audio source with the earliest startTime to sync all others to
for (var i in audio.source_loop) {
current = audio.source_loop[i]._startTime;
if (current > 0) {
if (current < first || first === 0) {
first = current;
}
}
}
if (audio.context.currentTime > first) {
offset = (audio.context.currentTime - first) % audio.buffer[n].duration;
}
return offset;
};
audio.play = function(n) {
if (audio.source_loop[n]._playing) {
audio.stop(n);
} else {
audio.source_loop[n] = audio.context.createBufferSource();
audio.source_loop[n].buffer = audio.buffer[n];
audio.source_loop[n].loop = true;
audio.source_loop[n].connect(audio.context.destination);
var offset = audio.findSync(n);
audio.source_loop[n]._startTime = audio.context.currentTime;
if (audio.compatibility.start === 'noteOn') {
/*
The depreciated noteOn() function does not support offsets.
Compensate by using noteGrainOn() with an offset to play once and then schedule a noteOn() call to loop after that.
*/
audio.source_once[n] = audio.context.createBufferSource();
audio.source_once[n].buffer = audio.buffer[n];
audio.source_once[n].connect(audio.context.destination);
audio.source_once[n].noteGrainOn(0, offset, audio.buffer[n].duration - offset); // currentTime, offset, duration
/*
Note about the third parameter of noteGrainOn().
If your sound is 10 seconds long, your offset 5 and duration 5 then you'll get what you expect.
If your sound is 10 seconds long, your offset 5 and duration 10 then the sound will play from the start instead of the offset.
*/
// Now queue up our looping sound to start immediatly after the source_once audio plays.
audio.source_loop[n][audio.compatibility.start](audio.context.currentTime + (audio.buffer[n].duration - offset));
} else {
audio.source_loop[n][audio.compatibility.start](0, offset);
}
audio.source_loop[n]._playing = true;
}
};
audio.stop = function(n) {
if (audio.source_loop[n]._playing) {
audio.source_loop[n][audio.compatibility.stop](0);
audio.source_loop[n]._playing = false;
audio.source_loop[n]._startTime = 0;
if (audio.compatibility.start === 'noteOn') {
audio.source_once[n][audio.compatibility.stop](0);
}
}
};
//-----------------------------
// Check Web Audio API Support
//-----------------------------
try {
// More info at http://caniuse.com/#feat=audio-api
window.AudioContext = window.AudioContext || window.webkitAudioContext;
audio.context = new window.AudioContext();
} catch(e) {
audio.proceed = false;
alert('Web Audio API not supported in this browser.');
}
if (audio.proceed) {
//---------------
// Compatibility
//---------------
(function() {
var start = 'start',
stop = 'stop',
buffer = audio.context.createBufferSource();
if (typeof buffer.start !== 'function') {
start = 'noteOn';
}
audio.compatibility.start = start;
if (typeof buffer.stop !== 'function') {
stop = 'noteOff';
}
audio.compatibility.stop = stop;
})();
//-------------------------------
// Setup Audio Files and Buttons
//-------------------------------
for (var a in audio.files) {
(function() {
var i = parseInt(a) + 1;
var req = new XMLHttpRequest();
req.open('GET', audio.files[i - 1], true); // array starts with 0 hence the -1
req.responseType = 'arraybuffer';
req.onload = function() {
audio.context.decodeAudioData(
req.response,
function(buffer) {
audio.buffer[i] = buffer;
audio.source_loop[i] = {};
var button = document.getElementById('button-loop-' + i);
button.addEventListener('click', function(e) {
e.preventDefault();
audio.play(this.value);
});
},
function() {
console.log('Error decoding audio "' + audio.files[i - 1] + '".');
}
);
};
req.send();
})();
}
}
</script>
</body>
</html>
Check out RecordJS (https://github.com/mattdiamond/Recorderjs). It should help you out.
Related
I've written a MSE video player and it's loading WebMs. These are loading well, however I have a problem with video files with no audio tracks.
I've tried changing the codec depending on if there is audio
mediaSource.addSourceBuffer(`video/webm; ${videoHasAudio(asset) ? 'codecs="vp9,vorbis"' : 'codecs="vp9"'}`)`
And I thought this was working but now isn't. How do I run silent WebMs in MSE?
I have added sample MSE project here:
https://github.com/thowfeeq178/MediaSourceExtention
checkout the example in the github
overview:
we need to add one for video and one for audio like below:
// BBB : https://dash.akamaized.net/akamai/bbb_30fps/bbb_30fps.mpd
var baseUrl = "https://dash.akamaized.net/akamai/bbb_30fps/";
var initUrl = baseUrl + "bbb_30fps_480x270_600k/bbb_30fps_480x270_600k_0.m4v";
var initAudioUrl = baseUrl + "bbb_a64k/bbb_a64k_0.m4a";
var templateUrl =
baseUrl + "bbb_30fps_480x270_600k/bbb_30fps_480x270_600k_$Number$.m4v";
var templateUrlForAudio = baseUrl + "bbb_a64k/bbb_a64k_$Number$.m4a";
var sourceBuffer;
var audioSourceBuffer;
var index = 0;
var audioIndex = 0;
var numberOfChunks = 159;
var video = document.querySelector("video");
var ms = new MediaSource();
function onPageLoad() {
console.log("page loaded ..");
if (!window.MediaSource) {
console.error("No Media Source API available");
return;
}
// making source controlled by JS using MS
video.src = window.URL.createObjectURL(ms);
ms.addEventListener("sourceopen", onMediaSourceOpen);
}
function onMediaSourceOpen() {
// create source buffer
sourceBuffer = ms.addSourceBuffer('video/mp4; codecs="avc1.4d401f"');
audioSourceBuffer = ms.addSourceBuffer('audio/mp4; codecs="mp4a.40.5"');
// when ever one segment is loaded go for next
sourceBuffer.addEventListener("updateend", nextSegment);
audioSourceBuffer.addEventListener("updateend", nextAudioSegment);
// fire init segemnts
GET(initUrl, appendToBuffer);
GET(initAudioUrl, appendToAudioBuffer);
// play
video.play();
}
// get next segment based on index and append, once everything loaded unlisten to the event
function nextSegment() {
var url = templateUrl.replace("$Number$", index);
GET(url, appendToBuffer);
index++;
if (index > numberOfChunks) {
sourceBuffer.removeEventListener("updateend", nextSegment);
}
}
// get next audio segment based on index and append, once everything loaded unlisten to the event
function nextAudioSegment() {
var audioUrl = templateUrlForAudio.replace("$Number$", audioIndex);
GET(audioUrl, appendToAudioBuffer);
audioIndex++;
if (index > numberOfChunks) {
audioSourceBuffer.removeEventListener("updateend", nextAudioSegment);
}
}
// add to existing source
function appendToBuffer(videoChunk) {
if (videoChunk) {
sourceBuffer.appendBuffer(new Uint8Array(videoChunk));
}
}
function appendToAudioBuffer(audioChunk) {
if (audioChunk) {
audioSourceBuffer.appendBuffer(new Uint8Array(audioChunk));
}
}
// just network thing
function GET(url, callback) {
var xhr = new XMLHttpRequest();
xhr.open("GET", url);
xhr.responseType = "arraybuffer";
xhr.onload = function(e) {
if (xhr.status != 200) {
console.warn("Unexpected status code " + xhr.status + " for " + url);
return false;
}
callback(xhr.response);
};
xhr.send();
}
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>MSE Demo</title>
</head>
<body onload="onPageLoad()">
<h1>MSE Demo</h1>
<div>
<video muted controls width="80%"></video>
</div>
</body>
</html>
I've created a demo to show this, it seems that when I show a visualization the recorded audio gets really crunchy. Is there any way I can fix this? This works in chrome and Firefox, but safari is the only offender.
<style>
html, body {
margin: 0!important;
padding: 0!important;
overflow: hidden!important;
width: 100%;
}
</style>
<title>Audio Recording | RecordRTC</title>
<h1>Simple Audio Recording using RecordRTC</h1>
<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
<br>
<button id="btn-start-recording">Start Recording</button>
<button id="btn-stop-recording" disabled>Stop Recording</button>
<label><input id="visualizer" type="checkbox" checked/>Show Visualizer</label>
<hr>
<canvas id="audio-canvas"></canvas>
<div><audio controls autoplay></audio></div>
<script src="https://cdn.webrtc-experiment.com/RecordRTC.js"></script>
<script>
var audio = document.querySelector('audio');
function captureMicrophone(callback) {
if(microphone) {
callback(microphone);
return;
}
if(typeof navigator.mediaDevices === 'undefined' || !navigator.mediaDevices.getUserMedia) {
alert('This browser does not supports WebRTC getUserMedia API.');
if(!!navigator.getUserMedia) {
alert('This browser seems supporting deprecated getUserMedia API.');
}
}
navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false
}
}).then(function(mic) {
callback(mic);
}).catch(function(error) {
alert('Unable to capture your microphone. Please check console logs.');
console.error(error);
});
}
function replaceAudio(src) {
var newAudio = document.createElement('audio');
newAudio.controls = true;
if(src) {
newAudio.src = src;
}
var parentNode = audio.parentNode;
parentNode.innerHTML = '';
parentNode.appendChild(newAudio);
audio = newAudio;
}
function stopRecordingCallback() {
replaceAudio(URL.createObjectURL(recorder.getBlob()));
btnStartRecording.disabled = false;
setTimeout(function() {
if(!audio.paused) return;
setTimeout(function() {
if(!audio.paused) return;
audio.play();
}, 1000);
audio.play();
}, 300);
audio.play();
if(microphone) {
microphone.stop();
microphone = null;
}
}
var recorder; // globally accessible
var microphone;
var btnStartRecording = document.getElementById('btn-start-recording');
var btnStopRecording = document.getElementById('btn-stop-recording');
var canvas = document.getElementById('audio-canvas');
var visualizerCheckbox = document.getElementById('visualizer');
btnStartRecording.onclick = function() {
this.disabled = true;
this.style.border = '';
this.style.fontSize = '';
if (!microphone) {
captureMicrophone(function(mic) {
microphone = mic;
replaceAudio();
audio.muted = true;
audio.play();
btnStartRecording.disabled = false;
btnStartRecording.style.border = '1px solid red';
btnStartRecording.style.fontSize = '150%';
alert('Please click startRecording button again. First time we tried to access your microphone. Now we will record it.');
});
return;
}
replaceAudio();
audio.muted = true;
audio.play();
if(visualizerCheckbox.checked){
initScope(canvas);
}
var options = {
type: 'audio',
numberOfAudioChannels: 2,
checkForInactiveTracks: true,
bufferSize: 16384,
sampleRate: 48000
};
if(recorder) {
recorder.destroy();
recorder = null;
}
recorder = RecordRTC(microphone, options);
recorder.startRecording();
btnStopRecording.disabled = false;
};
btnStopRecording.onclick = function() {
this.disabled = true;
recorder.stopRecording(stopRecordingCallback);
};
function initScope(audioScopeCanvas) {
if (audioScopeCanvas) {
var analyser;
var mic;
var javascriptNode;
var canvasContext;
audioContext = new AudioContext(); // NEW!!
analyser = audioContext.createAnalyser();
mic = audioContext.createMediaStreamSource(microphone);
javascriptNode = audioContext.createScriptProcessor(2048, 1, 1);
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = 1024;
mic.connect(analyser);
analyser.connect(javascriptNode);
javascriptNode.connect(audioContext.destination);
//canvasContext = $("#canvas")[0].getContext("2d");
canvasContext = audioScopeCanvas.getContext("2d");
javascriptNode.onaudioprocess = function () {
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
var values = 0;
var length = array.length;
for (var i = 0; i < length; i++) {
values += array[i];
}
var average = values / length;
canvasContext.clearRect(0, 0, 60, 130);
canvasContext.fillStyle = '#00ff00';
canvasContext.fillRect(0, 130 - average, 25, 130);
};
}
}
</script>
you can play with it here:
Http://andrew.colchagoff.com/safari-record.html
I'm using record rtc.
The jsfiddle (https://jsfiddle.net/kalyansai99/mm1b74uy/22/) contains code where the user can toggle between front and back camera of the mobile.
In few mobiles its working fine (Moto g5 plus, Moto E3 and so on - Chrome Browser) and in few mobiles (Mi Redimi Note 4 - Chrome Browser) when I am switching to back camera, initially the stream is loading with a track of "readyState" as "live". But when i am about to play the stream in video player, the "readyState" is getting changed to "ended" and black screen is been shown on the video tag.
Not sure whats happening. Any clues?
JSFiddle Code
var player = document.getElementById('player');
var flipBtn = document.getElementById('flipBtn');
var deviceIdMap = {};
var front;
var constraints = {
audio: false,
video: {
frameRate: 1000
}
};
var gotDevices = function (deviceList) {
var length = deviceList.length;
console.log(deviceList);
for (var i = 0; i < length; i++) {
var deviceInfo = deviceList[i];
if (deviceInfo.kind === 'videoinput') {
if (deviceInfo.label.indexOf('front') !== -1) {
deviceIdMap.front = deviceInfo.deviceId;
} else if (deviceInfo.label.indexOf('back') !== -1) {
deviceIdMap.back = deviceInfo.deviceId;
}
}
}
if (deviceIdMap.front) {
constraints.video.deviceId = {exact: deviceIdMap.front};
front = true;
} else if (deviceIdMap.back) {
constraints.video.deviceId = {exact: deviceIdMap.back};
front = false;
}
console.log('deviceIdMap - ', deviceIdMap);
};
var handleError = function (error) {
console.log('navigator.getUserMedia error: ', error);
};
function handleSuccess(stream) {
window.stream = stream;
// this is a video track as there is no audio track
console.log("Track - ", window.stream.getTracks()[0]);
console.log('Ready State - ', window.stream.getTracks()[0].readyState);
if (window.URL) {
player.src = window.URL.createObjectURL(stream);
} else {
player.src = stream;
}
player.onloadedmetadata = function (e) {
console.log('Ready State - 3', window.stream.getTracks()[0].readyState);
player.play();
console.log('Ready State - 4', window.stream.getTracks()[0].readyState);
}
console.log('Ready State - 2', window.stream.getTracks()[0].readyState);
}
navigator.mediaDevices.enumerateDevices().then(gotDevices).catch(handleError);
flipBtn.addEventListener('click', function () {
if (window.stream) {
window.stream.getTracks().forEach(function(track) {
track.stop();
});
}
if (front) {
constraints.video.deviceId = {exact: deviceIdMap.back};
} else {
constraints.video.deviceId = {exact: deviceIdMap.front};
}
front = !front;
navigator.getUserMedia(constraints, handleSuccess, handleError);
}, false);
console.log(constraints);
navigator.getUserMedia(constraints, handleSuccess, handleError);
#player {
width: 320px;
}
#flipBtn {
width: 150px;
height: 50px;
}
<video id="player" autoplay></video>
<div>
<button id="flipBtn">
Flip Camera
</button>
</div>
Replace track.stop() to track.enabled=false and when adding track to the stream, enable it back using track.enabled=true
The MediaStream.readyState property is changed to "ended" when we stop the track and can never be used again. Therefore its not wise to use stop. For more reference:
https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/readyState
https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/stop
I have multiple YouTube players on a single page inside a banner slider and I want to use the YouTube Player API to control them and do other stuff based on the state of the video's. I have the code below which I'm pretty sure of used to work fine where any state changes where registered. But it doesnt work for me anymore. The YouTube object is still there and I can use it to start and stop a video but the onStateChange event never gets triggered. What is wrong with this code?
var YT_ready = (function() {
var onReady_funcs = [],
api_isReady = false;
return function(func, b_before) {
if (func === true) {
api_isReady = true;
for (var i = 0; i < onReady_funcs.length; i++) {
onReady_funcs.shift()();
}
}
else if (typeof func == "function") {
if (api_isReady) func();
else onReady_funcs[b_before ? "unshift" : "push"](func);
}
}
})();
function onYouTubePlayerAPIReady() {
YT_ready(true);
}
var players_homepage = {};
YT_ready(function() {
$("li.video iframe.youtube").each(function(event) {
var frameID_homepage = $(this).attr('id');
if (frameID_homepage) {
players_homepage[frameID_homepage] = new YT.Player(frameID_homepage, {
events: {
'onStateChange': onPlayerStateChange_homepage
}
});
}
});
});
(function(){
var tag = document.createElement('script');
tag.src = "//www.youtube.com/iframe_api";
var firstScriptTag = document.getElementsByTagName('script')[0];
firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);
})();
function onPlayerStateChange_homepage(event) {
if (event.data === 0) {
// do something on end
} else if (event.data === 1) {
// do something on play
} else if (event.data === 2) {
// do something on pause
}
}
function pauseVideo_homepage(previousVideo) {
players_homepage[previousVideo].pauseVideo();
}
function playVideo_homepage(currentVideo) {
players_homepage[currentVideo].playVideo();
}
I need to record a video using the laptop camera on my website built using nodejs. For this I am using webRTC. So far I could take a photo using the laptop camera but I need to record a video. Could some one help as to how the code would go? My current code is as follows:
<video id="video"></video>
<button id="startbutton">Take photo</button>
<button id="pausebutton">Pause</button>
<button id="resumebutton">Resume</button>
<canvas id="canvas"></canvas>
<script type="text/javascript">
(function() {
var streaming = false,
video = document.querySelector('#video'),
canvas = document.querySelector('#canvas'),
//photo = document.querySelector('#photo'),
startbutton = document.querySelector('#startbutton'),
width = 620,
height = 50;
navigator.getMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
navigator.getMedia(
{
video: true,
audio: false
},
function(stream) {
if (navigator.mozGetUserMedia) {
video.mozSrcObject = stream;
} else {
var vendorURL = window.URL || window.webkitURL;
video.src = vendorURL.createObjectURL(stream);
}
video.play();
},
function(err) {
console.log("An error occured! " + err);
}
);
video.addEventListener('canplay', function(ev){
if (!streaming) {
height = video.videoHeight / (video.videoWidth/width);
video.setAttribute('width', width);
video.setAttribute('height', height);
canvas.setAttribute('width', width);
canvas.setAttribute('height', height);
streaming = true;
}
}, false);
function takepicture() {
canvas.width = width;
canvas.height = height;
canvas.getContext('2d').drawImage(video, 0, 0, width, height);
var data = canvas.toDataURL('image/png');
// photo.setAttribute('src', data);
}
function pausevideo() {
canvas.width = width;
canvas.height = height;
video.pause();
}
function resumevideo() {
canvas.width = width;
canvas.height = height;
video.play();
}
startbutton.addEventListener('click', function(ev){
takepicture();
ev.preventDefault();
}, false);
pausebutton.addEventListener('click', function(ev){
pausevideo();
ev.preventDefault();
}, false);
resumebutton.addEventListener('click', function(ev){
resumevideo();
ev.preventDefault();
}, false);
})();
</script>
I am not going to write code for you(you seem pretty capable if you have gotten this far) but here are some pointers to get you in the right direction.
Assign a global variable the value of the stream when you grab it(this way you can reuse the same stream in numerous functions
Once you have the stream you can easily follow the tutorials at RTC-Recording, there is a write to disk method that should help you out in downloading the recording
If you have a stream, this is how to start using RecordRTC.
var options = {
type: 'video'
};
var recordRTC = RecordRTC(mediaStream, options);
recordRTC.startRecording();
recordRTC.stopRecording(function(videoURL) {
mediaElement.src = videoURL; //plays the recorded blob url on that src
});