Using Self-Segmentation javascript of Mediapipe to pass user selfie as a texture on Networked-Aframe for multiplaying experiences - mediapipe

Well my target is to make a multiplaying 3D environment where the persons are represented by cubes that have as textures their selfie without the background. It is a kind of cheap virtual production without chroma key background. The background is removed with MediaPipe Selfie-Segmentation. The issue is that instead of having the other player texture on the cube (P1 should see P2, and P2 should see P1, each one sees his selfie. This means that P1 sees P1 and P2 sees P2 which is bad.
Live demo: https://vrodos-multiplaying.iti.gr/plain_aframe_mediapipe_testbed.html
Instructions: You should use two machines to test it Desktops, Laptops or Mobiles. Use only Chrome as Mediapipe is not working at other browsers. In case webpage jams, reload the webpage (Mediapipe is sometimes sticky). At least two machines should load the webpage in order to start the multiplaying environment.
Code:
<html>
<head>
<script src="https://aframe.io/releases/1.2.0/aframe.min.js"></script>
<!-- Selfie Segmentation of Mediapipe -->
<link rel="stylesheet" type="text/css" href="https://cdn.jsdelivr.net/npm/#mediapipe/control_utils#0.6/control_utils.css" crossorigin="anonymous">
<script src="https://cdn.jsdelivr.net/npm/#mediapipe/camera_utils#0.3/camera_utils.js" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/#mediapipe/control_utils#0.6/control_utils.js" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/#mediapipe/drawing_utils#0.3/drawing_utils.js" crossorigin="anonymous"></script>
<script src="https://cdn.jsdelivr.net/npm/#mediapipe/selfie_segmentation#0.1/selfie_segmentation.js" crossorigin="anonymous"></script>
<!-- Networked A-frame -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/2.3.0/socket.io.slim.js"></script>
<script src="/easyrtc/easyrtc.js"></script>
<script src="https://unpkg.com/networked-aframe/dist/networked-aframe.min.js"></script>
</head>
<body>
<a-scene networked-scene="
adapter: easyrtc;
video: true;
debug: true;
connectOnLoad: true;">
<a-assets>
<template id="avatar-template">
<a-box material="alphaTest: 0.5; transparent: true; side: both;"
width="1"
height="1"
position="0 0 0" rotation="0 0 0"
networked-video-source-mediapiped></a-box>
</template>
</a-assets>
<a-entity id="player"
networked="template:#avatar-template;attachTemplateToLocal:false;"
camera wasd-controls look-controls>
</a-entity>
<a-plane position="0 -2 -4" rotation="-90 0 0" width="4" height="4" color="#7BC8A4" ></a-plane>
<a-sky color="#777"></a-sky>
</a-scene>
</body>
<script>
// Networked Aframe : Register new component for streaming the Selfie-Segmentation video stream
AFRAME.registerComponent('networked-video-source-mediapiped', {
schema: {
},
dependencies: ['material'],
init: function () {
this.videoTexture = null;
this.video = null;
this.stream = null;
this._setMediaStream = this._setMediaStream.bind(this);
NAF.utils.getNetworkedEntity(this.el).then((networkedEl) => {
const ownerId = networkedEl.components.networked.data.owner;
if (ownerId) {
NAF.connection.adapter.getMediaStream(ownerId, "video")
.then(this._setMediaStream)
.catch((e) => NAF.log.error(`Error getting media stream for ${ownerId}`, e));
} else {
// Correctly configured local entity, perhaps do something here for enabling debug audio loopback
}
});
},
_setMediaStream(newStream) {
if(!this.video) {
this.setupVideo();
}
if(newStream != this.stream) {
if (this.stream) {
this._clearMediaStream();
}
if (newStream) {
this.video.srcObject = canvasElement.captureStream(30);
this.videoTexture = new THREE.VideoTexture(this.video);
this.videoTexture.format = THREE.RGBAFormat;
// Mesh to send
const mesh = this.el.getObject3D('mesh');
mesh.material.map = this.videoTexture;
mesh.material.needsUpdate = true;
}
this.stream = newStream;
}
},
_clearMediaStream() {
this.stream = null;
if (this.videoTexture) {
if (this.videoTexture.image instanceof HTMLVideoElement) {
// Note: this.videoTexture.image === this.video
const video = this.videoTexture.image;
video.pause();
video.srcObject = null;
video.load();
}
this.videoTexture.dispose();
this.videoTexture = null;
}
},
remove: function() {
this._clearMediaStream();
},
setupVideo: function() {
if (!this.video) {
const video = document.createElement('video');
video.setAttribute('autoplay', true);
video.setAttribute('playsinline', true);
video.setAttribute('muted', true);
this.video = video;
}
}
});
// ----- Mediapipe ------
const controls = window;
//const mpSelfieSegmentation = window;
const examples = {
images: [],
// {name: 'name', src: 'https://url.com'},
videos: [],
};
const fpsControl = new controls.FPS();
let activeEffect = 'background';
const controlsElement = document.createElement('control-panel');
var canvasElement = document.createElement('canvas');
canvasElement.height= 1000;
canvasElement.width = 1000;
var canvasCtx = canvasElement.getContext('2d');
// --------
function drawResults(results) {
canvasCtx.clearRect(0, 0, canvasElement.width, canvasElement.height);
canvasCtx.drawImage(results.segmentationMask, 0, 0, canvasElement.width, canvasElement.height);
canvasCtx.globalCompositeOperation = 'source-in';
canvasCtx.drawImage(results.image, 0, 0, canvasElement.width, canvasElement.height);
}
const selfieSegmentation = new SelfieSegmentation({
locateFile: (file) => {
console.log(file);
return `https://cdn.jsdelivr.net/npm/#mediapipe/selfie_segmentation#0.1/${file}`;
}
});
selfieSegmentation.onResults(drawResults);
// -------------
new controls
.ControlPanel(controlsElement, {
selfieMode: true,
modelSelection: 1,
effect: 'background',
})
.add([
new controls.StaticText({title: 'MediaPipe Selfie Segmentation'}),
fpsControl,
new controls.Toggle({title: 'Selfie Mode', field: 'selfieMode'}),
new controls.SourcePicker({
onSourceChanged: () => {
selfieSegmentation.reset();
},
onFrame: async (input, size) => {
const aspect = size.height / size.width;
let width, height;
if (window.innerWidth > window.innerHeight) {
height = window.innerHeight;
width = height / aspect;
} else {
width = window.innerWidth;
height = width * aspect;
}
canvasElement.width = width;
canvasElement.height = height;
await selfieSegmentation.send({image: input});
},
examples: examples
}),
new controls.Slider({
title: 'Model Selection',
field: 'modelSelection',
discrete: ['General', 'Landscape'],
}),
new controls.Slider({
title: 'Effect',
field: 'effect',
discrete: {'background': 'Background', 'mask': 'Foreground'},
}),
])
.on(x => {
const options = x;
//videoElement.classList.toggle('selfie', options.selfieMode);
activeEffect = x['effect'];
selfieSegmentation.setOptions(options);
});
</script>
</html>
Screenshot:
Here Player 1 sees Player 1 stream instead of Player 2 stream (Grrrrrr):

Well, I found it. The problem was that a MediaStream can have many video tracks. See my answer here:
https://github.com/networked-aframe/networked-aframe/issues/269
Unfortunately networked-aframe EasyRtcAdapter does not support many MediaStreams, but it is easy to add another video track and then get videotrack[1] instead of videotrack[0]. I should make a special EasyRtcAdapter to avoid having two video tracks and avoid overstressing bandwidth.

Related

recorded audio distorted in safari 11 when using visualization with web rtc

I've created a demo to show this, it seems that when I show a visualization the recorded audio gets really crunchy. Is there any way I can fix this? This works in chrome and Firefox, but safari is the only offender.
<style>
html, body {
margin: 0!important;
padding: 0!important;
overflow: hidden!important;
width: 100%;
}
</style>
<title>Audio Recording | RecordRTC</title>
<h1>Simple Audio Recording using RecordRTC</h1>
<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0">
<br>
<button id="btn-start-recording">Start Recording</button>
<button id="btn-stop-recording" disabled>Stop Recording</button>
<label><input id="visualizer" type="checkbox" checked/>Show Visualizer</label>
<hr>
<canvas id="audio-canvas"></canvas>
<div><audio controls autoplay></audio></div>
<script src="https://cdn.webrtc-experiment.com/RecordRTC.js"></script>
<script>
var audio = document.querySelector('audio');
function captureMicrophone(callback) {
if(microphone) {
callback(microphone);
return;
}
if(typeof navigator.mediaDevices === 'undefined' || !navigator.mediaDevices.getUserMedia) {
alert('This browser does not supports WebRTC getUserMedia API.');
if(!!navigator.getUserMedia) {
alert('This browser seems supporting deprecated getUserMedia API.');
}
}
navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false
}
}).then(function(mic) {
callback(mic);
}).catch(function(error) {
alert('Unable to capture your microphone. Please check console logs.');
console.error(error);
});
}
function replaceAudio(src) {
var newAudio = document.createElement('audio');
newAudio.controls = true;
if(src) {
newAudio.src = src;
}
var parentNode = audio.parentNode;
parentNode.innerHTML = '';
parentNode.appendChild(newAudio);
audio = newAudio;
}
function stopRecordingCallback() {
replaceAudio(URL.createObjectURL(recorder.getBlob()));
btnStartRecording.disabled = false;
setTimeout(function() {
if(!audio.paused) return;
setTimeout(function() {
if(!audio.paused) return;
audio.play();
}, 1000);
audio.play();
}, 300);
audio.play();
if(microphone) {
microphone.stop();
microphone = null;
}
}
var recorder; // globally accessible
var microphone;
var btnStartRecording = document.getElementById('btn-start-recording');
var btnStopRecording = document.getElementById('btn-stop-recording');
var canvas = document.getElementById('audio-canvas');
var visualizerCheckbox = document.getElementById('visualizer');
btnStartRecording.onclick = function() {
this.disabled = true;
this.style.border = '';
this.style.fontSize = '';
if (!microphone) {
captureMicrophone(function(mic) {
microphone = mic;
replaceAudio();
audio.muted = true;
audio.play();
btnStartRecording.disabled = false;
btnStartRecording.style.border = '1px solid red';
btnStartRecording.style.fontSize = '150%';
alert('Please click startRecording button again. First time we tried to access your microphone. Now we will record it.');
});
return;
}
replaceAudio();
audio.muted = true;
audio.play();
if(visualizerCheckbox.checked){
initScope(canvas);
}
var options = {
type: 'audio',
numberOfAudioChannels: 2,
checkForInactiveTracks: true,
bufferSize: 16384,
sampleRate: 48000
};
if(recorder) {
recorder.destroy();
recorder = null;
}
recorder = RecordRTC(microphone, options);
recorder.startRecording();
btnStopRecording.disabled = false;
};
btnStopRecording.onclick = function() {
this.disabled = true;
recorder.stopRecording(stopRecordingCallback);
};
function initScope(audioScopeCanvas) {
if (audioScopeCanvas) {
var analyser;
var mic;
var javascriptNode;
var canvasContext;
audioContext = new AudioContext(); // NEW!!
analyser = audioContext.createAnalyser();
mic = audioContext.createMediaStreamSource(microphone);
javascriptNode = audioContext.createScriptProcessor(2048, 1, 1);
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = 1024;
mic.connect(analyser);
analyser.connect(javascriptNode);
javascriptNode.connect(audioContext.destination);
//canvasContext = $("#canvas")[0].getContext("2d");
canvasContext = audioScopeCanvas.getContext("2d");
javascriptNode.onaudioprocess = function () {
var array = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(array);
var values = 0;
var length = array.length;
for (var i = 0; i < length; i++) {
values += array[i];
}
var average = values / length;
canvasContext.clearRect(0, 0, 60, 130);
canvasContext.fillStyle = '#00ff00';
canvasContext.fillRect(0, 130 - average, 25, 130);
};
}
}
</script>
you can play with it here:
Http://andrew.colchagoff.com/safari-record.html
I'm using record rtc.

Object is not moving proper with mouse movement in mobile devices

I am trying to move an object with mouse using pressmove event on the object. In PC and Tablet it is working well. But, in the mobile devices it is not working well as those have different width and height.
You can view the following URL:-
http://quirktools.com/screenfly/#u=http://saurabhysecit.byethost15.com/scratchGame_Canvas.html&w=320&h=568&a=37&s=1
Direct URL is - http://saurabhysecit.byethost15.com/scratchGame_Canvas.html
It has been generated from Animate CC.
These are the code below.
JS Code -
(function (lib, img, cjs, ss, an) {
var p; // shortcut to reference prototypes
lib.ssMetadata = [];
function mc_symbol_clone() {
var clone = this._cloneProps(new this.constructor(this.mode, this.startPosition, this.loop));
clone.gotoAndStop(this.currentFrame);
clone.paused = this.paused;
clone.framerate = this.framerate;
return clone;
}
function getMCSymbolPrototype(symbol, nominalBounds, frameBounds) {
var prototype = cjs.extend(symbol, cjs.MovieClip);
prototype.clone = mc_symbol_clone;
prototype.nominalBounds = nominalBounds;
prototype.frameBounds = frameBounds;
return prototype;
}
(lib.coin = function(mode,startPosition,loop) {
this.initialize(mode,startPosition,loop,{});
// Layer 1
this.shape = new cjs.Shape();
this.shape.graphics.f().s("#AA8801").ss(1,2,1).p("AH5AAQAACrhhCBQgIAKgIAKQgQAUgTASQiKCLjAAKQgNAAgOAAQi5AAiKh2QgRgOgQgRQiUiTAAjTQAAjDCBiOIAAgBQAJgKAKgKQAUgUAVgSQB9hlCigIQAOgBAOAAQAaAAAZACQAHAAAHABQCoAUB8B9QATASAQAUQBtCEAECvQAAADAAACQAAAFAAADg");
this.shape.setTransform(-4.8,0.1);
this.shape_1 = new cjs.Shape();
this.shape_1.graphics.f("#D9AE01").s().p("AlbGiQgTgQgSgSQigieAAjiQAAjhCgifQAWgWAXgTQCRh2DCgBQAdABAbACQC+ARCLCMQAUATASAWQB6CTAADEQAADGh6CSQgSAVgUATQiUCWjQAKIgdAAQjHAAiUh+gAgqnnQijAIh8BmQgVARgUAUIgUAUIAAABQiBCOAADEQAADSCVCTQAQARARAOQCJB2C6AAIAbAAQDAgJCJiMQATgSAQgUIAQgUQBhiBAAiqIAAgJIAAgGQgEithtiFQgQgTgTgTQh7h9ipgTIAAgDQgYgCgaAAQgVAAgVACg");
this.shape_1.setTransform(-3.3,-1.7);
this.shape_2 = new cjs.Shape();
this.shape_2.graphics.f("#FFCC00").s().p("AlDGFQgQgOgQgQQiViUAAjSQAAjDCBiPIAAAAIAUgVQAUgUAUgRQB9hmCigIQAWgCAVAAQAZAAAZACIAAADIgNgCQgZgCgbAAIgcABIAcgBQAbAAAZACIANACQCoATB8B9QATATAQAUQBsCEAFCvIAAAFIAAAIQAACqhhCCIgQATQgQAUgTATQiKCLjAAJIgbABQi5AAiKh3g");
this.shape_2.setTransform(-4.8,0.1);
this.shape_3 = new cjs.Shape();
this.shape_3.graphics.f("#695400").s().p("Ah6I1IgBAAQjHgKiUiJQCUB+DIAAIAeAAQDOgKCUiWQAUgTASgWQB6iSAAjFQAAjEh6iTQgSgWgUgUQiKiLi+gRQgbgDgdAAQjCAAiSB3QCSiBDBgKIABAAIAfAAIAbAAIAAAAQDXAJCbCcIAaAbIACAEQCJCcAADUIAAACQgBDViICaIgCAEQgMANgOANQikCmjpAAIgfAAg");
this.shape_3.setTransform(9,-1.7);
this.timeline.addTween(cjs.Tween.get({}).to({state:[{t:this.shape_3},{t:this.shape_2},{t:this.shape_1},{t:this.shape}]}).wait(1));
}).prototype = getMCSymbolPrototype(lib.coin, new cjs.Rectangle(-57.8,-58.2,113.9,113.1), null);
// stage content:
(lib.scratchGame_Canvas = function(mode,startPosition,loop) {
if (loop == null) { loop = false; } this.initialize(mode,startPosition,loop,{});
// timeline functions:
this.frame_0 = function() {
var pressBool=false;
var mask_mc = new createjs.Shape();
var bg_mc = new createjs.Bitmap("images/YLogo1.jpg");
//var coin_mc = new lib.coin();
var coin_mc = new createjs.Shape(new createjs.Graphics().beginFill("#FFFFFF").drawCircle(0, 0, 50));
coin_mc.x = stage.canvas.width/2;
coin_mc.y = stage.canvas.width/2;
stage.addChild(bg_mc, coin_mc);
createjs.Touch.enable(stage, false, true);
if(sRatio<1){
coin_mc.scaleX = coin_mc.scaleY = sRatio;
}
coin_mc.addEventListener('pressmove', moveCoin.bind(this));
coin_mc.addEventListener('mouseup', releaseCoin.bind(this));
updateCacheImage(false);
function moveCoin(e){
e.currentTarget.x = e.stageX;
e.currentTarget.y = e.stageY;
stage.update();
createMask(e);
}
function createMask(e) {
if(!pressBool)return;
var xLoc = stage.mouseX-20;
var yLoc = stage.mouseY-30;
mask_mc.graphics.beginFill("FFFFFF").drawEllipse(xLoc, yLoc, 40, 60);
updateCacheImage(true);
}
function updateCacheImage(update){
var w = 361;
if (update) {
mask_mc.updateCache();
} else {
mask_mc.cache(0, 0, w, w);
}
maskFilter = new createjs.AlphaMaskFilter(mask_mc.cacheCanvas);
bg_mc.filters = [maskFilter];
if (update) {
bg_mc.updateCache(0, 0, w, w);
} else {
bg_mc.cache(0, 0, w, w);
}
}
function releaseCoin(e) {
//stage.canvas.style.cursor = "default";
pressBool = false;
updateCacheImage(true);
}
}
// actions tween:
this.timeline.addTween(cjs.Tween.get(this).call(this.frame_0).wait(1));
}).prototype = p = new cjs.MovieClip();
p.nominalBounds = new cjs.Rectangle(385.9,359.4,113.9,113);
// library properties:
lib.properties = {
width: 550,
height: 400,
fps: 24,
color: "#999999",
opacity: 1.00,
manifest: [],
preloads: []
};
})(lib = lib||{}, images = images||{}, createjs = createjs||{}, ss = ss||{}, AdobeAn = AdobeAn||{});
var lib, images, createjs, ss, AdobeAn;
HTML Code:-
<!DOCTYPE html>
<!--
NOTES:
1. All tokens are represented by '$' sign in the template.
2. You can write your code only wherever mentioned.
3. All occurrences of existing tokens will be replaced by their appropriate values.
4. Blank lines will be removed automatically.
5. Remove unnecessary comments before creating your template.
-->
<html>
<head>
<meta charset="UTF-8">
<meta name="authoring-tool" content="Adobe_Animate_CC">
<meta name="viewport" content="width=device-width, user-scalable=no, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, minimal-ui" />
<meta name="msapplication-tap-highlight" content="no"/>
<title>scratchGame_Canvas</title>
<!-- write your code here -->
<script src="jquery-3.2.1.slim.min.js"></script>
<script src="createjs-2015.11.26.min.js"></script>
<script src="scratchGame_Canvas.js?1497868121984"></script>
<script>
var canvas, stage, exportRoot, anim_container, dom_overlay_container, fnStartAnimation;
var pRatio = window.devicePixelRatio || 1, xRatio, yRatio, sRatio=1;
function init() {
canvas = document.getElementById("canvas");
anim_container = document.getElementById("animation_container");
dom_overlay_container = document.getElementById("dom_overlay_container");
handleComplete();
}
function handleComplete() {
//This function is always called, irrespective of the content. You can use the variable "stage" after it is created in token create_stage.
exportRoot = new lib.scratchGame_Canvas();
stage = new createjs.Stage(canvas);
stage.addChild(exportRoot);
//Registers the "tick" event listener.
fnStartAnimation = function() {
createjs.Ticker.setFPS(lib.properties.fps);
createjs.Ticker.addEventListener("tick", stage);
}
//Code to support hidpi screens and responsive scaling.
function makeResponsive(isResp, respDim, isScale, scaleType) {
var lastW, lastH, lastS=1;
window.addEventListener('resize', resizeCanvas);
resizeCanvas();
function resizeCanvas() {
var w = lib.properties.width, h = lib.properties.height;
var iw = window.innerWidth, ih=window.innerHeight;
pRatio = window.devicePixelRatio || 1, xRatio=iw/w, yRatio=ih/h, sRatio=1;
if(isResp) {
if((respDim=='width'&&lastW==iw) || (respDim=='height'&&lastH==ih)) {
sRatio = lastS;
}
else if(!isScale) {
if(iw<w || ih<h)
sRatio = Math.min(xRatio, yRatio);
}
else if(scaleType==1) {
sRatio = Math.min(xRatio, yRatio);
}
else if(scaleType==2) {
sRatio = Math.max(xRatio, yRatio);
}
}
canvas.width = w*pRatio*sRatio;
canvas.height = h*pRatio*sRatio;
canvas.style.width = dom_overlay_container.style.width = anim_container.style.width = w*sRatio+'px';
canvas.style.height = anim_container.style.height = dom_overlay_container.style.height = h*sRatio+'px';
stage.scaleX = pRatio*sRatio;
stage.scaleY = pRatio*sRatio;
lastW = iw; lastH = ih; lastS = sRatio;
}
}
makeResponsive(true,'both',false,1);
fnStartAnimation();
}
</script>
<!-- write your code here -->
</head>
<body onload="init();" style="margin:0px;">
<div id="animation_container" style="background-color:rgba(153, 153, 153, 1.00); width:550px; height:400px">
<canvas id="canvas" width="550" height="400" style="position: absolute; display: block; background-color:rgba(153, 153, 153, 1.00);"></canvas>
<div id="dom_overlay_container" style="pointer-events:none; overflow:hidden; width:550px; height:400px; position: absolute; left: 0px; top: 0px; display: block;">
</div>
</div>
</body>
</html>
Hope to get response soon.
Thanks.
As I mentioned in the comments, you can transform the coordinates, so your x/y mouse position is not affected by the canvas scaling by responsiveness, and one alternative is to use, in the binded function with the pressmove event (in your case, the function moveCoin()), the globalToLocal() method.
scratchGame_Canvas.js
Function moveCoin()
function moveCoin(e){
var point;
point = stage.globalToLocal(e.stageX, e.stageY);
e.currentTarget.x = point.x;
e.currentTarget.y = point.y;
stage.update();
createMask(e);
}
See also: Drag and drop with easeljs in Animate CC 2017
& MovieClip class (EaselJS API)

How to record a video using webrtc

I need to record a video using the laptop camera on my website built using nodejs. For this I am using webRTC. So far I could take a photo using the laptop camera but I need to record a video. Could some one help as to how the code would go? My current code is as follows:
<video id="video"></video>
<button id="startbutton">Take photo</button>
<button id="pausebutton">Pause</button>
<button id="resumebutton">Resume</button>
<canvas id="canvas"></canvas>
<script type="text/javascript">
(function() {
var streaming = false,
video = document.querySelector('#video'),
canvas = document.querySelector('#canvas'),
//photo = document.querySelector('#photo'),
startbutton = document.querySelector('#startbutton'),
width = 620,
height = 50;
navigator.getMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
navigator.getMedia(
{
video: true,
audio: false
},
function(stream) {
if (navigator.mozGetUserMedia) {
video.mozSrcObject = stream;
} else {
var vendorURL = window.URL || window.webkitURL;
video.src = vendorURL.createObjectURL(stream);
}
video.play();
},
function(err) {
console.log("An error occured! " + err);
}
);
video.addEventListener('canplay', function(ev){
if (!streaming) {
height = video.videoHeight / (video.videoWidth/width);
video.setAttribute('width', width);
video.setAttribute('height', height);
canvas.setAttribute('width', width);
canvas.setAttribute('height', height);
streaming = true;
}
}, false);
function takepicture() {
canvas.width = width;
canvas.height = height;
canvas.getContext('2d').drawImage(video, 0, 0, width, height);
var data = canvas.toDataURL('image/png');
// photo.setAttribute('src', data);
}
function pausevideo() {
canvas.width = width;
canvas.height = height;
video.pause();
}
function resumevideo() {
canvas.width = width;
canvas.height = height;
video.play();
}
startbutton.addEventListener('click', function(ev){
takepicture();
ev.preventDefault();
}, false);
pausebutton.addEventListener('click', function(ev){
pausevideo();
ev.preventDefault();
}, false);
resumebutton.addEventListener('click', function(ev){
resumevideo();
ev.preventDefault();
}, false);
})();
</script>
I am not going to write code for you(you seem pretty capable if you have gotten this far) but here are some pointers to get you in the right direction.
Assign a global variable the value of the stream when you grab it(this way you can reuse the same stream in numerous functions
Once you have the stream you can easily follow the tutorials at RTC-Recording, there is a write to disk method that should help you out in downloading the recording
If you have a stream, this is how to start using RecordRTC.
var options = {
type: 'video'
};
var recordRTC = RecordRTC(mediaStream, options);
recordRTC.startRecording();
recordRTC.stopRecording(function(videoURL) {
mediaElement.src = videoURL; //plays the recorded blob url on that src
});

web audio api record function

i have following code ... from site http://forestmist.org/blog/web-audio-api-loops/
it works good... but i need record functionality which will help to record previously clicked button and store there audio too... any help
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Web Audio API Loops Demo</title>
</head>
<body>
<form>
<button id="button-loop-1" type="button" value="1">Loop 1</button>
<button id="button-loop-2" type="button" value="2">Loop 2</button>
</form>
<script>
//--------------
// Audio Object
//--------------
var audio = {
buffer: {},
compatibility: {},
files: [
'synth.wav',
'beat.wav'
],
proceed: true,
source_loop: {},
source_once: {}
};
//-----------------
// Audio Functions
//-----------------
audio.findSync = function(n) {
var first = 0,
current = 0,
offset = 0;
// Find the audio source with the earliest startTime to sync all others to
for (var i in audio.source_loop) {
current = audio.source_loop[i]._startTime;
if (current > 0) {
if (current < first || first === 0) {
first = current;
}
}
}
if (audio.context.currentTime > first) {
offset = (audio.context.currentTime - first) % audio.buffer[n].duration;
}
return offset;
};
audio.play = function(n) {
if (audio.source_loop[n]._playing) {
audio.stop(n);
} else {
audio.source_loop[n] = audio.context.createBufferSource();
audio.source_loop[n].buffer = audio.buffer[n];
audio.source_loop[n].loop = true;
audio.source_loop[n].connect(audio.context.destination);
var offset = audio.findSync(n);
audio.source_loop[n]._startTime = audio.context.currentTime;
if (audio.compatibility.start === 'noteOn') {
/*
The depreciated noteOn() function does not support offsets.
Compensate by using noteGrainOn() with an offset to play once and then schedule a noteOn() call to loop after that.
*/
audio.source_once[n] = audio.context.createBufferSource();
audio.source_once[n].buffer = audio.buffer[n];
audio.source_once[n].connect(audio.context.destination);
audio.source_once[n].noteGrainOn(0, offset, audio.buffer[n].duration - offset); // currentTime, offset, duration
/*
Note about the third parameter of noteGrainOn().
If your sound is 10 seconds long, your offset 5 and duration 5 then you'll get what you expect.
If your sound is 10 seconds long, your offset 5 and duration 10 then the sound will play from the start instead of the offset.
*/
// Now queue up our looping sound to start immediatly after the source_once audio plays.
audio.source_loop[n][audio.compatibility.start](audio.context.currentTime + (audio.buffer[n].duration - offset));
} else {
audio.source_loop[n][audio.compatibility.start](0, offset);
}
audio.source_loop[n]._playing = true;
}
};
audio.stop = function(n) {
if (audio.source_loop[n]._playing) {
audio.source_loop[n][audio.compatibility.stop](0);
audio.source_loop[n]._playing = false;
audio.source_loop[n]._startTime = 0;
if (audio.compatibility.start === 'noteOn') {
audio.source_once[n][audio.compatibility.stop](0);
}
}
};
//-----------------------------
// Check Web Audio API Support
//-----------------------------
try {
// More info at http://caniuse.com/#feat=audio-api
window.AudioContext = window.AudioContext || window.webkitAudioContext;
audio.context = new window.AudioContext();
} catch(e) {
audio.proceed = false;
alert('Web Audio API not supported in this browser.');
}
if (audio.proceed) {
//---------------
// Compatibility
//---------------
(function() {
var start = 'start',
stop = 'stop',
buffer = audio.context.createBufferSource();
if (typeof buffer.start !== 'function') {
start = 'noteOn';
}
audio.compatibility.start = start;
if (typeof buffer.stop !== 'function') {
stop = 'noteOff';
}
audio.compatibility.stop = stop;
})();
//-------------------------------
// Setup Audio Files and Buttons
//-------------------------------
for (var a in audio.files) {
(function() {
var i = parseInt(a) + 1;
var req = new XMLHttpRequest();
req.open('GET', audio.files[i - 1], true); // array starts with 0 hence the -1
req.responseType = 'arraybuffer';
req.onload = function() {
audio.context.decodeAudioData(
req.response,
function(buffer) {
audio.buffer[i] = buffer;
audio.source_loop[i] = {};
var button = document.getElementById('button-loop-' + i);
button.addEventListener('click', function(e) {
e.preventDefault();
audio.play(this.value);
});
},
function() {
console.log('Error decoding audio "' + audio.files[i - 1] + '".');
}
);
};
req.send();
})();
}
}
</script>
</body>
</html>
Check out RecordJS (https://github.com/mattdiamond/Recorderjs). It should help you out.

Multiple Bing Map Pushpins from SQL not showing in Firefox & Chrome but do in IE

I'm displaying a Bing Map (v7) in my Webmatrix2 website with a series of pushpins & infoboxes drawn from a SQL Express database using a JSON enquiry.
While the maps appears in all 3 browsers I'm testing (IE, FF & Chrome) the pushpins are sometimes not showing in FF & Chrome, particularly if I refresh with Cntrl+F5
This is my first JSON and Bing Maps app so expect there's a few mistakes.
Any suggestions on how to improve the code and get display consistency?
#{
Layout = "~/_MapLayout.cshtml";
}
<script type="text/javascript" src="~/Scripts/jquery-1.9.1.min.js"></script>
<script type="text/javascript" src="http://ecn.dev.virtualearth.net/mapcontrol/mapcontrol.ashx?v=7.0"></script>
<link rel="StyleSheet" href="infoboxStyles.css" type="text/css">
<script type="text/javascript">
var map = null;
var pinLayer, pinInfobox;
var mouseover;
var pushpinFrameHTML = '<div class="infobox"><a class="infobox_close" href="javascript:closeInfobox()"><img src="/Images/close2.jpg" /></a><div class="infobox_content">{content}</div></div><div class="infobox_pointer"><img src="images/pointer_shadow.png"></div>';
var pinLayer = new Microsoft.Maps.EntityCollection();
var infoboxLayer = new Microsoft.Maps.EntityCollection();
function getMap() {
map = new Microsoft.Maps.Map(document.getElementById('map'), {
credentials: "my-key",
zoom: 4,
center: new Microsoft.Maps.Location(-25, 135),
mapTypeId: Microsoft.Maps.MapTypeId.road
});
pinInfobox = new Microsoft.Maps.Infobox(new Microsoft.Maps.Location(0, 0), { visible: false });
AddData();
}
$(function AddData() {
$.getJSON('/ListSchools', function (data) {
var schools = data;
$.each(schools, function (index, school) {
for (var i = 0; i < schools.length; i++) {
var pinLocation = new Microsoft.Maps.Location(school.SchoolLat, school.SchoolLon);
var NewPin = new Microsoft.Maps.Pushpin(pinLocation);
NewPin.title = school.SchoolName;
NewPin.description = "-- Learn More --";
pinLayer.push(NewPin); //add pushpin to pinLayer
Microsoft.Maps.Events.addHandler(NewPin, 'mouseover', displayInfobox);
}
});
infoboxLayer.push(pinInfobox);
map.entities.push(pinLayer);
map.entities.push(infoboxLayer);
});
})
function displayInfobox(e) {
if (e.targetType == "pushpin") {
var pin = e.target;
var html = "<span class='infobox_title'>" + pin.title + "</span><br/>" + pin.description;
pinInfobox.setOptions({
visible: true,
offset: new Microsoft.Maps.Point(-33, 20),
htmlContent: pushpinFrameHTML.replace('{content}', html)
});
//set location of infobox
pinInfobox.setLocation(pin.getLocation());
}
}
function closeInfobox() {
pinInfobox.setOptions({ visible: false });
}
function getCurrentLocation() {
var geoLocationProvider = new Microsoft.Maps.GeoLocationProvider(map);
geoLocationProvider.getCurrentPosition();
}
</script>
<body onload="getMap();">
<div id="map" style="position:relative; width:800px; height:600px;"></div>
<div>
<input type="button" value="Find Nearest Schools" onclick="getCurrentLocation();" />
</div>
</body>
The JSON file is simply
#{
var db = Database.Open("StarterSite");
var sql = #"SELECT * FROM Schools WHERE SchoolLon != ' ' AND SchoolLon != 'null' ";
var data = db.Query(sql);
Json.Write(data, Response.Output);
}
Add your pinLayer, infobox, and infoboxLayer before calling the AddData function and see if that makes a difference. Also verify that school.SchoolLat and school.SchoolLon are numbers and not a string version of a number. If they are a string, then use parseFloat to turn them into a number. Other than that everything looks fine.