recorded audio distorted in safari 11 when using visualization with web rtc - safari

I've created a demo to show this, it seems that when I show a visualization the recorded audio gets really crunchy. Is there any way I can fix this? This works in chrome and Firefox, but safari is the only offender.
<style>
html, body {
margin: 0!important;
padding: 0!important;
overflow: hidden!important;
width: 100%;
}
</style>
<title>Audio Recording | RecordRTC</title>
<h1>Simple Audio Recording using RecordRTC</h1>
<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
<br>
<button id="btn-start-recording">Start Recording</button>
<button id="btn-stop-recording" disabled>Stop Recording</button>
<label><input id="visualizer" type="checkbox" checked/>Show Visualizer</label>
<hr>
<canvas id="audio-canvas"></canvas>
<div><audio controls autoplay></audio></div>
<script src="https://cdn.webrtc-experiment.com/RecordRTC.js"></script>
<script>
var audio = document.querySelector('audio');
function captureMicrophone(callback) {
if(microphone) {
callback(microphone);
return;
}
if(typeof navigator.mediaDevices === 'undefined' || !navigator.mediaDevices.getUserMedia) {
alert('This browser does not supports WebRTC getUserMedia API.');
if(!!navigator.getUserMedia) {
alert('This browser seems supporting deprecated getUserMedia API.');
}
}
navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false
}
}).then(function(mic) {
callback(mic);
}).catch(function(error) {
alert('Unable to capture your microphone. Please check console logs.');
console.error(error);
});
}
function replaceAudio(src) {
var newAudio = document.createElement('audio');
newAudio.controls = true;
if(src) {
newAudio.src = src;
}
var parentNode = audio.parentNode;
parentNode.innerHTML = '';
parentNode.appendChild(newAudio);
audio = newAudio;
}
function stopRecordingCallback() {
replaceAudio(URL.createObjectURL(recorder.getBlob()));
btnStartRecording.disabled = false;
setTimeout(function() {
if(!audio.paused) return;
setTimeout(function() {
if(!audio.paused) return;
audio.play();
}, 1000);
audio.play();
}, 300);
audio.play();
if(microphone) {
microphone.stop();
microphone = null;
}
}
var recorder; // globally accessible
var microphone;
var btnStartRecording = document.getElementById('btn-start-recording');
var btnStopRecording = document.getElementById('btn-stop-recording');
var canvas = document.getElementById('audio-canvas');
var visualizerCheckbox = document.getElementById('visualizer');
btnStartRecording.onclick = function() {
this.disabled = true;
this.style.border = '';
this.style.fontSize = '';
if (!microphone) {
captureMicrophone(function(mic) {
microphone = mic;
replaceAudio();
audio.muted = true;
audio.play();
btnStartRecording.disabled = false;
btnStartRecording.style.border = '1px solid red';
btnStartRecording.style.fontSize = '150%';
alert('Please click startRecording button again. First time we tried to access your microphone. Now we will record it.');
});
return;
}
replaceAudio();
audio.muted = true;
audio.play();
if(visualizerCheckbox.checked){
initScope(canvas);
}
var options = {
type: 'audio',
numberOfAudioChannels: 2,
checkForInactiveTracks: true,
bufferSize: 16384,
sampleRate: 48000
};
if(recorder) {
recorder.destroy();
recorder = null;
}
recorder = RecordRTC(microphone, options);
recorder.startRecording();
btnStopRecording.disabled = false;
};
btnStopRecording.onclick = function() {
this.disabled = true;
recorder.stopRecording(stopRecordingCallback);
};
function initScope(audioScopeCanvas) {
if (audioScopeCanvas) {
var analyser;
var mic;
var javascriptNode;
var canvasContext;
audioContext = new AudioContext(); // NEW!!
analyser = audioContext.createAnalyser();
mic = audioContext.createMediaStreamSource(microphone);
javascriptNode = audioContext.createScriptProcessor(2048, 1, 1);
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = 1024;
mic.connect(analyser);
analyser.connect(javascriptNode);
javascriptNode.connect(audioContext.destination);
//canvasContext = $("#canvas")[0].getContext("2d");
canvasContext = audioScopeCanvas.getContext("2d");
javascriptNode.onaudioprocess = function () {
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
var values = 0;
var length = array.length;
for (var i = 0; i < length; i++) {
values += array[i];
}
var average = values / length;
canvasContext.clearRect(0, 0, 60, 130);
canvasContext.fillStyle = '#00ff00';
canvasContext.fillRect(0, 130 - average, 25, 130);
};
}
}
</script>
you can play with it here:
Http://andrew.colchagoff.com/safari-record.html
I'm using record rtc.

Related

Fabric JS 2.4.1 ClipPath Crop Not Working with a Dynamically Created rect Mask

I am making a editor using fabric js 2.4.1 and have completed all functionality except for the dynamic image cropping. The functionality involves creating a rectangle with the mouse over an image and clicking a crop button.
I have successfully done a proof of concept with a rectangle that was created statically but can't get it to render in my dynamic code. I don't think that the problem has to do with the dynamically created rect but I can't seem to isolate the problem. It has to be something simple that I'm overlooking and I think the problem might be in my crop button code.
document.getElementById("crop").addEventListener("click", function() {
if (target !== null && mask !== null) {
mask.setCoords();
target.clipPath = mask; // THIS LINE IS NOT WORKING!!!
//target.selectable = true;
target.setCoords();
console.log(target);
canvas.renderAll();
//canvas.remove(mask);
}
});
Here is a fiddle to the dynamic code that has the problem:
https://jsfiddle.net/Larry_Robertson/mqrv5fnt/
Here is a fiddle to the static code that I gained proof of concept from:
https://jsfiddle.net/Larry_Robertson/f34q67op/
Source code of Dynamic Version:
HTML
<canvas id="c" width="500" height="500" style="border:1px solid #ccc"></canvas>
<button id="crop">Crop</button>
JS
var canvas = new fabric.Canvas('c', {
selection: true
});
var rect, isDown, origX, origY, done, object, mask, target;
var src = "http://fabricjs.com/lib/pug.jpg";
fabric.Image.fromURL(src, function(img) {
img.selectable = false;
img.id = 'image';
object = img;
canvas.add(img);
});
canvas.on('object:added', function(e) {
target = null;
mask = null;
canvas.forEachObject(function(obj) {
//alert(obj.get('id'));
var id = obj.get('id');
if (id === 'image') {
target = obj;
}
if (id === 'mask') {
//alert('mask');
mask = obj;
}
});
});
document.getElementById("crop").addEventListener("click", function() {
if (target !== null && mask !== null) {
mask.setCoords();
target.clipPath = mask; // THIS LINE IS NOT WORKING!!!
//target.selectable = true;
target.setCoords();
console.log(target);
canvas.renderAll();
//canvas.remove(mask);
}
});
canvas.on('mouse:down', function(o) {
if (done) {
canvas.renderAll();
return;
}
isDown = true;
var pointer = canvas.getPointer(o.e);
origX = pointer.x;
origY = pointer.y;
rect = new fabric.Rect({
left: origX,
top: origY,
//originX: 'left',
//originY: 'top',
width: pointer.x - origX,
height: pointer.y - origY,
//angle: 0,
fill: 'rgba(255,0,0,0.3)',
transparentCorners: false,
//selectable: true,
id: 'mask'
});
canvas.add(rect);
canvas.renderAll();
});
canvas.on('mouse:move', function(o) {
if (done) {
canvas.renderAll();
return;
}
if (!isDown) return;
var pointer = canvas.getPointer(o.e);
if (origX > pointer.x) {
rect.set({
left: Math.abs(pointer.x)
});
}
if (origY > pointer.y) {
rect.set({
top: Math.abs(pointer.y)
});
}
rect.set({
width: Math.abs(origX - pointer.x)
});
rect.set({
height: Math.abs(origY - pointer.y)
});
canvas.renderAll();
});
canvas.on('mouse:up', function(o) {
if (done) {
canvas.renderAll();
return;
}
isDown = false;
//rect.selectable = true;
rect.set({
selectable: true
});
rect.setCoords();
canvas.setActiveObject(rect);
canvas.bringToFront(rect);
canvas.renderAll();
//alert(rect);
rect.setCoords();
object.clipPath = rect;
object.selectable = true;
object.setCoords();
canvas.renderAll();
//canvas.remove(rect);
done = true;
});
You need to set the dirty parameter on image on true, so object's cache will be rerendered next render call.
Here is the fiddle:
https://jsfiddle.net/mqrv5fnt/115/
var canvas = new fabric.Canvas('c', {
selection: true
});
var rect, isDown, origX, origY, done, object, mask, target;
var src = "http://fabricjs.com/lib/pug.jpg";
fabric.Image.fromURL(src, function(img) {
img.selectable = false;
img.id = 'image';
object = img;
canvas.add(img);
});
canvas.on('object:added', function(e) {
target = null;
mask = null;
canvas.forEachObject(function(obj) {
//alert(obj.get('id'));
var id = obj.get('id');
if (id === 'image') {
target = obj;
}
if (id === 'mask') {
//alert('mask');
mask = obj;
}
});
});
document.getElementById("crop").addEventListener("click", function() {
if (target !== null && mask !== null) {
mask.setCoords();
target.clipPath = mask; // THIS LINE IS NOT WORKING!!!
target.dirty=true;
//target.selectable = true;
target.setCoords();
canvas.remove(mask);
canvas.renderAll();
//canvas.remove(mask);
}
});
canvas.on('mouse:down', function(o) {
if (done) {
canvas.renderAll();
return;
}
isDown = true;
var pointer = canvas.getPointer(o.e);
origX = pointer.x;
origY = pointer.y;
rect = new fabric.Rect({
left: origX,
top: origY,
//originX: 'left',
//originY: 'top',
width: pointer.x - origX,
height: pointer.y - origY,
//angle: 0,
fill: 'rgba(255,0,0,0.3)',
transparentCorners: false,
//selectable: true,
id: 'mask'
});
canvas.add(rect);
canvas.renderAll();
});
canvas.on('mouse:move', function(o) {
if (done) {
canvas.renderAll();
return;
}
if (!isDown) return;
var pointer = canvas.getPointer(o.e);
if (origX > pointer.x) {
rect.set({
left: Math.abs(pointer.x)
});
}
if (origY > pointer.y) {
rect.set({
top: Math.abs(pointer.y)
});
}
rect.set({
width: Math.abs(origX - pointer.x)
});
rect.set({
height: Math.abs(origY - pointer.y)
});
canvas.renderAll();
});
canvas.on('mouse:up', function(o) {
if (done) {
canvas.renderAll();
return;
}
isDown = false;
//rect.selectable = true;
rect.set({
selectable: true
});
rect.setCoords();
canvas.setActiveObject(rect);
canvas.bringToFront(rect);
canvas.renderAll();
//alert(rect);
rect.setCoords();
object.clipPath = rect;
object.selectable = true;
object.setCoords();
canvas.renderAll();
//canvas.remove(rect);
done = true;
});

StreamTrack's readyState is getting changed to ended, just before playing the stream (MediaStream - MediaStreamTrack - WebRTC)

The jsfiddle (https://jsfiddle.net/kalyansai99/mm1b74uy/22/) contains code where the user can toggle between front and back camera of the mobile.
In few mobiles its working fine (Moto g5 plus, Moto E3 and so on - Chrome Browser) and in few mobiles (Mi Redimi Note 4 - Chrome Browser) when I am switching to back camera, initially the stream is loading with a track of "readyState" as "live". But when i am about to play the stream in video player, the "readyState" is getting changed to "ended" and black screen is been shown on the video tag.
Not sure whats happening. Any clues?
JSFiddle Code
var player = document.getElementById('player');
var flipBtn = document.getElementById('flipBtn');
var deviceIdMap = {};
var front;
var constraints = {
audio: false,
video: {
frameRate: 1000
}
};
var gotDevices = function (deviceList) {
var length = deviceList.length;
console.log(deviceList);
for (var i = 0; i < length; i++) {
var deviceInfo = deviceList[i];
if (deviceInfo.kind === 'videoinput') {
if (deviceInfo.label.indexOf('front') !== -1) {
deviceIdMap.front = deviceInfo.deviceId;
} else if (deviceInfo.label.indexOf('back') !== -1) {
deviceIdMap.back = deviceInfo.deviceId;
}
}
}
if (deviceIdMap.front) {
constraints.video.deviceId = {exact: deviceIdMap.front};
front = true;
} else if (deviceIdMap.back) {
constraints.video.deviceId = {exact: deviceIdMap.back};
front = false;
}
console.log('deviceIdMap - ', deviceIdMap);
};
var handleError = function (error) {
console.log('navigator.getUserMedia error: ', error);
};
function handleSuccess(stream) {
window.stream = stream;
// this is a video track as there is no audio track
console.log("Track - ", window.stream.getTracks()[0]);
console.log('Ready State - ', window.stream.getTracks()[0].readyState);
if (window.URL) {
player.src = window.URL.createObjectURL(stream);
} else {
player.src = stream;
}
player.onloadedmetadata = function (e) {
console.log('Ready State - 3', window.stream.getTracks()[0].readyState);
player.play();
console.log('Ready State - 4', window.stream.getTracks()[0].readyState);
}
console.log('Ready State - 2', window.stream.getTracks()[0].readyState);
}
navigator.mediaDevices.enumerateDevices().then(gotDevices).catch(handleError);
flipBtn.addEventListener('click', function () {
if (window.stream) {
window.stream.getTracks().forEach(function(track) {
track.stop();
});
}
if (front) {
constraints.video.deviceId = {exact: deviceIdMap.back};
} else {
constraints.video.deviceId = {exact: deviceIdMap.front};
}
front = !front;
navigator.getUserMedia(constraints, handleSuccess, handleError);
}, false);
console.log(constraints);
navigator.getUserMedia(constraints, handleSuccess, handleError);
#player {
width: 320px;
}
#flipBtn {
width: 150px;
height: 50px;
}
<video id="player" autoplay></video>
<div>
<button id="flipBtn">
Flip Camera
</button>
</div>
Replace track.stop() to track.enabled=false and when adding track to the stream, enable it back using track.enabled=true
The MediaStream.readyState property is changed to "ended" when we stop the track and can never be used again. Therefore its not wise to use stop. For more reference:
https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/readyState
https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/stop

How to record a video using webrtc

I need to record a video using the laptop camera on my website built using nodejs. For this I am using webRTC. So far I could take a photo using the laptop camera but I need to record a video. Could some one help as to how the code would go? My current code is as follows:
<video id="video"></video>
<button id="startbutton">Take photo</button>
<button id="pausebutton">Pause</button>
<button id="resumebutton">Resume</button>
<canvas id="canvas"></canvas>
<script type="text/javascript">
(function() {
var streaming = false,
video = document.querySelector('#video'),
canvas = document.querySelector('#canvas'),
//photo = document.querySelector('#photo'),
startbutton = document.querySelector('#startbutton'),
width = 620,
height = 50;
navigator.getMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
navigator.getMedia(
{
video: true,
audio: false
},
function(stream) {
if (navigator.mozGetUserMedia) {
video.mozSrcObject = stream;
} else {
var vendorURL = window.URL || window.webkitURL;
video.src = vendorURL.createObjectURL(stream);
}
video.play();
},
function(err) {
console.log("An error occured! " + err);
}
);
video.addEventListener('canplay', function(ev){
if (!streaming) {
height = video.videoHeight / (video.videoWidth/width);
video.setAttribute('width', width);
video.setAttribute('height', height);
canvas.setAttribute('width', width);
canvas.setAttribute('height', height);
streaming = true;
}
}, false);
function takepicture() {
canvas.width = width;
canvas.height = height;
canvas.getContext('2d').drawImage(video, 0, 0, width, height);
var data = canvas.toDataURL('image/png');
// photo.setAttribute('src', data);
}
function pausevideo() {
canvas.width = width;
canvas.height = height;
video.pause();
}
function resumevideo() {
canvas.width = width;
canvas.height = height;
video.play();
}
startbutton.addEventListener('click', function(ev){
takepicture();
ev.preventDefault();
}, false);
pausebutton.addEventListener('click', function(ev){
pausevideo();
ev.preventDefault();
}, false);
resumebutton.addEventListener('click', function(ev){
resumevideo();
ev.preventDefault();
}, false);
})();
</script>
I am not going to write code for you(you seem pretty capable if you have gotten this far) but here are some pointers to get you in the right direction.
Assign a global variable the value of the stream when you grab it(this way you can reuse the same stream in numerous functions
Once you have the stream you can easily follow the tutorials at RTC-Recording, there is a write to disk method that should help you out in downloading the recording
If you have a stream, this is how to start using RecordRTC.
var options = {
type: 'video'
};
var recordRTC = RecordRTC(mediaStream, options);
recordRTC.startRecording();
recordRTC.stopRecording(function(videoURL) {
mediaElement.src = videoURL; //plays the recorded blob url on that src
});

web audio api record function

i have following code ... from site http://forestmist.org/blog/web-audio-api-loops/
it works good... but i need record functionality which will help to record previously clicked button and store there audio too... any help
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Web Audio API Loops Demo</title>
</head>
<body>
<form>
<button id="button-loop-1" type="button" value="1">Loop 1</button>
<button id="button-loop-2" type="button" value="2">Loop 2</button>
</form>
<script>
//--------------
// Audio Object
//--------------
var audio = {
buffer: {},
compatibility: {},
files: [
'synth.wav',
'beat.wav'
],
proceed: true,
source_loop: {},
source_once: {}
};
//-----------------
// Audio Functions
//-----------------
audio.findSync = function(n) {
var first = 0,
current = 0,
offset = 0;
// Find the audio source with the earliest startTime to sync all others to
for (var i in audio.source_loop) {
current = audio.source_loop[i]._startTime;
if (current > 0) {
if (current < first || first === 0) {
first = current;
}
}
}
if (audio.context.currentTime > first) {
offset = (audio.context.currentTime - first) % audio.buffer[n].duration;
}
return offset;
};
audio.play = function(n) {
if (audio.source_loop[n]._playing) {
audio.stop(n);
} else {
audio.source_loop[n] = audio.context.createBufferSource();
audio.source_loop[n].buffer = audio.buffer[n];
audio.source_loop[n].loop = true;
audio.source_loop[n].connect(audio.context.destination);
var offset = audio.findSync(n);
audio.source_loop[n]._startTime = audio.context.currentTime;
if (audio.compatibility.start === 'noteOn') {
/*
The depreciated noteOn() function does not support offsets.
Compensate by using noteGrainOn() with an offset to play once and then schedule a noteOn() call to loop after that.
*/
audio.source_once[n] = audio.context.createBufferSource();
audio.source_once[n].buffer = audio.buffer[n];
audio.source_once[n].connect(audio.context.destination);
audio.source_once[n].noteGrainOn(0, offset, audio.buffer[n].duration - offset); // currentTime, offset, duration
/*
Note about the third parameter of noteGrainOn().
If your sound is 10 seconds long, your offset 5 and duration 5 then you'll get what you expect.
If your sound is 10 seconds long, your offset 5 and duration 10 then the sound will play from the start instead of the offset.
*/
// Now queue up our looping sound to start immediatly after the source_once audio plays.
audio.source_loop[n][audio.compatibility.start](audio.context.currentTime + (audio.buffer[n].duration - offset));
} else {
audio.source_loop[n][audio.compatibility.start](0, offset);
}
audio.source_loop[n]._playing = true;
}
};
audio.stop = function(n) {
if (audio.source_loop[n]._playing) {
audio.source_loop[n][audio.compatibility.stop](0);
audio.source_loop[n]._playing = false;
audio.source_loop[n]._startTime = 0;
if (audio.compatibility.start === 'noteOn') {
audio.source_once[n][audio.compatibility.stop](0);
}
}
};
//-----------------------------
// Check Web Audio API Support
//-----------------------------
try {
// More info at http://caniuse.com/#feat=audio-api
window.AudioContext = window.AudioContext || window.webkitAudioContext;
audio.context = new window.AudioContext();
} catch(e) {
audio.proceed = false;
alert('Web Audio API not supported in this browser.');
}
if (audio.proceed) {
//---------------
// Compatibility
//---------------
(function() {
var start = 'start',
stop = 'stop',
buffer = audio.context.createBufferSource();
if (typeof buffer.start !== 'function') {
start = 'noteOn';
}
audio.compatibility.start = start;
if (typeof buffer.stop !== 'function') {
stop = 'noteOff';
}
audio.compatibility.stop = stop;
})();
//-------------------------------
// Setup Audio Files and Buttons
//-------------------------------
for (var a in audio.files) {
(function() {
var i = parseInt(a) + 1;
var req = new XMLHttpRequest();
req.open('GET', audio.files[i - 1], true); // array starts with 0 hence the -1
req.responseType = 'arraybuffer';
req.onload = function() {
audio.context.decodeAudioData(
req.response,
function(buffer) {
audio.buffer[i] = buffer;
audio.source_loop[i] = {};
var button = document.getElementById('button-loop-' + i);
button.addEventListener('click', function(e) {
e.preventDefault();
audio.play(this.value);
});
},
function() {
console.log('Error decoding audio "' + audio.files[i - 1] + '".');
}
);
};
req.send();
})();
}
}
</script>
</body>
</html>
Check out RecordJS (https://github.com/mattdiamond/Recorderjs). It should help you out.

YouTube in UIWebView only plays once

I have a UIWebView with some YouTube Videos embeded via the iframe code:
<iframe width="190" height="102" src="http://www.youtube.com/embed/...?showinfo=0" frameborder="0" allowfullscreen></iframe>
When it first loads it is possible to view each video exactly once. After viewing it the area is just black with white "Youtube" in it.
Any ideas? Of course reloading the UIWebView after watching a video fixes it, but I don't like this...
I did it! The following Javascript did the job:
<script>
VideoIDs = new Array(...some ids here...);
function getFrameID(id){
var elem = document.getElementById(id);
if (elem) {
if(/^iframe$/i.test(elem.tagName))
return id;
var elems = elem.getElementsByTagName("iframe");
if (!elems.length)
return null;
for (var i=0; i<elems.length; i++) {
if (/^https?:\/\/(?:www\.)?youtube(?:-nocookie)?\.com(\/|$)/i.test(elems[i].src))
break;
}
elem = elems[i];
if (elem.id)
return elem.id;
do {
id += "-frame";
} while (document.getElementById(id));
elem.id = id;
return id;
}
return null;
}
var YT_ready = (function(){
var onReady_funcs = [], api_isReady = false;
return function(func, b_before){
if (func === true) {
api_isReady = true;
for (var i=0; i<onReady_funcs.length; i++){
onReady_funcs.shift()();
}
} else if(typeof func == "function") {
if (api_isReady)
func();
else
onReady_funcs[b_before?"unshift":"push"](func);
}
}
})();
function onYouTubePlayerAPIReady() {
YT_ready(true)
}
(function(){
var s = document.createElement("script");
s.src = "http://www.youtube.com/player_api";
var before = document.getElementsByTagName("script")[0];
before.parentNode.insertBefore(s, before);
})();
var players = new Array();
YT_ready(function() {
for(index in VideoIDs) {
var frameID = getFrameID(VideoIDs[index]);
if (frameID) {
players[frameID] = new YT.Player(frameID, {
events: {
"onStateChange": stateChange
}
});
}
}
});
function youtube_parser(url){
var regExp = /^.*((youtu.be\/)|(v\/)|(\/u\/\w\/)|(embed\/)|(watch\?))\??v?=?([^#\&\?]*).*/;
var match = url.match(regExp);
if (match&&match[7].length==11){
return match[7];
}
}
function stateChange(event) {
if(event.data == YT.PlayerState.ENDED){
document.location = 'callback:finished';
document.getElementById(youtube_parser(event.target.getVideoUrl())).contentWindow.location.reload(true);
}
}
</script>