Why does node-lame module saves mp3 file with a high pitched/fast playback speed? - express

I am trying to record audio from the browser and stream it as raw audio (PCM) to my node server where I want to save to it in .mp3 format. I am using node-lame module on my server for creating an mp3 file from the PCM audio stream. The problem here is that the mp3 file is always high pitched and playing at a fast speed. I have tried both sending the data from the browser(client side) as int16 and as float32 and setting the appropriate lame.Encoder for them :
stream.pipe(new lame.Encoder({channels:2, bitDepth: 32, float:true,})) //float32
.pipe(fs.createWriteStream(path.resolve(__dirname, 'demo.mp3')))
stream.pipe(new lame.Encoder({channels:2, bitDepth: 16, sampleRate:44100,})) // int16
.pipe(fs.createWriteStream(path.resolve(__dirname, 'demo.mp3')))
Here is the code for the client side
(function(window) {
var client = new BinaryClient('ws://localhost:9001');
client.on('open', function() {
window.Stream = client.createStream();
if (!navigator.getUserMedia)
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia;
if (navigator.getUserMedia) {
navigator.getUserMedia({audio:true}, success, function(e) {
alert('Error capturing audio.');
});
} else alert('getUserMedia not supported in this browser.');
var recording = false;
window.startRecording = function() {
recording = true;
}
window.stopRecording = function() {
recording = false;
window.Stream.end();
}
function success(e) {
audioContext = window.AudioContext || window.webkitAudioContext;
context = new audioContext();
// the sample rate is in context.sampleRate
audioInput = context.createMediaStreamSource(e);
var bufferSize = 2048;
recorder = context.createScriptProcessor(bufferSize, 1, 1);
recorder.onaudioprocess = function(e){
if(!recording) return;
console.log ('recording');
var left = e.inputBuffer.getChannelData(0);
window.Stream.write(left); //trying it with float32
}
audioInput.connect(recorder)
recorder.connect(context.destination);
}
function convertoFloat32ToInt16(buffer) {
var l = buffer.length;
var buf = new Int16Array(l)
while (l--) {
buf[l] = buffer[l]*0xFFFF; //convert to 16 bit
}
return buf.buffer
}
});
})(this);
Here is the node app
var express = require('express');
var BinaryServer = require('binaryjs').BinaryServer;
var fs = require('fs');
var lame = require("lame");
var path = require('path');
var KalmanFilter = require('kalmanjs').default;
var buffer = [];
var port = 3700;
var outFile = 'demo.mp3';
var app = express();
app.set('views', __dirname + '/tpl');
app.set('view engine', 'jade');
app.engine('jade', require('jade').__express);
app.use(express.static(__dirname + '/public'))
app.get('/', function(req, res){
res.render('index');
});
app.listen(port);
console.log('server open on port ' + port);
binaryServer = BinaryServer({port: 9001});
binaryServer.on('connection', function(client) {
console.log('new connection');
client.on('stream', function(stream, meta) {
console.log('new stream');
stream.pipe(new lame.Encoder({channels:2, bitDepth: 32, float:true}))
.pipe(fs.createWriteStream(path.resolve(__dirname, 'demo.mp3')))
.on('close',function () {
console.log('done?');
})
stream.on('end', function() {
console.log('wrote to file ' + outFile);
});
});
});
Both of these snippets are taken from https://github.com/gabrielpoca/browser-pcm-stream except for the mp3 part.

The default sampling rate in the browser is 48000hz, while you are encoding your mp3 files with a sampling rate of 44100hz. You either need to resample your PCM data or set the recording sampling rate to 44100hz.

Related

startRecording not working using RecordRTC with RTCMultiConnection

I am trying to record every new session/user added to RTCMultiConnection.
i am using the following demo url in application
https://rtcmulticonnection.herokuapp.com/demos/Audio+Video+TextChat+FileSharing.html
Now i have added the following cdn reference to the code.
https://cdn.webrtc-experiment.com/RecordRTC.js
and this is the code i am working with but connection.streams[event.streamid].startRecording(); is not working.
// ..................RTCMultiConnection Code.............
// ......................................................
var connection = new RTCMultiConnection();
var btnStopRec = document.getElementById("btnStopRecording");
connection.socketURL = 'https://rtcmulticonnection.herokuapp.com:443/';
connection.enableFileSharing = true;
connection.session = {
audio: true,
video: true,
data: true,
};
connection.sdpConstraints.mandatory = {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true,
};
connection.onstream = function (event)
{
document.body.appendChild(event.mediaElement);
console.log("stream recording starts")
connection.streams[event.streamid].startRecording();
console.log("stream recording started")
}
I included all possible situations in a single snippet, below. Please take only the code that you need:
// global object that contains multiple recorders
var recorders = {};
// auto start recorder as soon as stream starts/begins
connection.onstream = function(event) {
document.body.appendChild(event.mediaElement);
recorders[event.streamid] = RecordRTC(event.stream, {
type: 'video'
});
recorders[event.streamid].startRecording();
};
// auto stop recorder as soon as stream stops/ends
connection.onstreamended = function(event) {
if (recorders[event.streamid]) {
recorders[event.streamid].stopRecording(function() {
var blob = recorders[event.streamid].getBlob();
var url = URL.createObjectURL(blob);
window.open(url);
delete recorders[streamid]; // clear
});
}
if (event.mediaElement.parentNode) {
event.mediaElement.parentNode.removeChild(event.mediaElement);
}
};
// stop single recorder
document.getElementById('manually-stop-single-recording').onclick = function() {
var streamid = prompt('Enter streamid');
recorders[streamid].stopRecording(function() {
var blob = recorders[streamid].getBlob();
var url = URL.createObjectURL(blob);
window.open(url);
delete recorders[streamid]; // clear
});
};
// stop all recorders
document.getElementById('manually-stop-all-recordings').onclick = function() {
Object.keys(recorders).forEach(function(streamid) {
recorders[streamid].stopRecording(function() {
var blob = recorders[streamid].getBlob();
var url = URL.createObjectURL(blob);
window.open(url);
delete recorders[streamid]; // clear
});
});
};
// record outside onstream event
// i.e. start recording anytime manually
document.getElementById('record-stream-outside-the-onstream-event').onclick = function() {
var streamid = prompt('Enter streamid');
var stream = connection.streamEvents[streamid].stream;
recorders[streamid] = RecordRTC(stream, {
type: 'video'
});
recorders[streamid].startRecording();
};

Share screen using getScreenId.js in WebRTC for two peers

I am trying to implement share screen function in webrtc video conferencing. From suggestion, I am now following muaz-khan's solution using https://www.webrtc-experiment.com/getScreenId/ . I can easily capture the application images of one peer, and replace the video stream with the capture stream. But it is a video conferencing experiment, so two browsers need to video conference with each other. For example, browser 1, has video streams A (local video), video streams B (remote video); browser 2 has video streams B (local video), video streams A (remote video). So when I am in browser 1 and trying to share the screen, the share screen stream should replace the local video in browser 1, and remote video in browser 2.
But right now, I can only make the share screen replace the local video in browser 1, browser 2 doesn't have any changes, cann't see any changes in its remote video (which is the local video in browser 1). I don't know how to trigger the changes in browser 2 as well. do i need to signal the share screen streams to server? and change the remote stream accordingly?
Here is my code in javascript:
$(function() {
var brokerController, ws, webRTC, localid;
// ws = new XSockets.WebSocket("wss://rtcplaygrouund.azurewebsites.net:443", ["connectionbroker"], {
ws = new XSockets.WebSocket("ws://localhost:4502", ["connectionbroker"], {
ctx: "152300ed-4d84-4e72-bc99-965052dc1e95"
});
var addRemoteVideo = function(peerId,mediaStream) {
var remoteVideo = document.createElement("video");
remoteVideo.setAttribute("autoplay", "true");
remoteVideo.setAttribute("rel",peerId);
attachMediaStream(remoteVideo, mediaStream);
remoteVideo.setAttribute("class", "col-md-3");
remoteVideo.setAttribute("height", $( document ).height() * 0.3);
remoteVideo.setAttribute("id", 'remoteVideo');
$("#videoscreen").append(remoteVideo);
};
var onConnectionLost = function (remotePeer) {
console.log("onconnectionlost");
var peerId = remotePeer.PeerId;
var videoToRemove = $("video[rel='" + peerId + "']");
videoToRemove.remove();
};
var oncConnectionCreated = function() {
console.log("oncconnectioncreated", arguments);
}
var onGetUerMedia = function(stream) {
console.log("Successfully got some userMedia , hopefully a goat will appear..");
webRTC.connectToContext(); // connect to the current context?
};
var onRemoteStream = function (remotePeer) {
addRemoteVideo(remotePeer.PeerId, remotePeer.stream);
console.log("Opps, we got a remote stream. lets see if its a goat..");
};
var onLocalStream = function(mediaStream) {
console.log("Got a localStream", mediaStream.id);
localid = mediaStream.id;
console.log("check this id: meadiastram id ", mediaStream.id);
var video = document.createElement("video");
video.setAttribute("height", "100%");
video.setAttribute("autoplay", "true");
video.setAttribute("id", "localvideo");
video.setAttribute("name", mediaStream.id);
attachMediaStream(video, mediaStream);
$("#videoscreen").append(video);
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
$('#localvideo').attr('src', URL.createObjectURL(stream));
}, function (error) {
console.error(error);
});
});
});
};
var onContextCreated = function(ctx) {
console.log("RTC object created, and a context is created - ", ctx);
webRTC.getUserMedia(webRTC.userMediaConstraints.hd(true), onGetUerMedia, onError);
};
var onOpen = function() {
console.log("Connected to the brokerController - 'connectionBroker'");
webRTC = new XSockets.WebRTC(this);
webRTC.onlocalstream = onLocalStream;
webRTC.oncontextcreated = onContextCreated;
webRTC.onconnectioncreated = oncConnectionCreated;
webRTC.onconnectionlost = onConnectionLost;
webRTC.onremotestream = onRemoteStream;
};
var onConnected = function() {
console.log("connection to the 'broker' server is established");
console.log("Try get the broker controller form server..");
brokerController = ws.controller("connectionbroker");
brokerController.onopen = onOpen;
};
ws.onconnected = onConnected;
});
I am using xsocket as the server, and the codes for click share and change the local stream with the share screen streams are just very simple as this:
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
$('#localvideo').attr('src', URL.createObjectURL(stream));
}, function (error) {
console.error(error);
});
});
Any help or suggestion would be grateful.
Thanks for pointing out the other post: How to addTrack in MediaStream in WebRTC, but I don't think they are the same. And also I am not sure how to renegotiate the remote connection in this case.
Xsocket.webrtc.js file for webrtc connection:
https://github.com/XSockets/XSockets.WebRTC/blob/master/src/js/XSockets.WebRTC.latest.js
How I could I renegotiate the remote connection in this case?
I figured out a work around solution by myself for this question, do not replace the local stream with the sharescreen stream, instead remove the old local stream from local div, then add the new sharescreen stream to local div. In the meantime, send the old local stream id by datachanel to the other peer, and remove that old remote video as well.
The most important thing is reflesh the streams (renegotiation), then sharescreen stream would display in remote peer.
Code:
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
webRTC.removeStream(webRTC.getLocalStreams()[0]);
var id = $('#localvideo').attr('name');
$('#localvideo').remove();
brokerController.invoke('updateremotevideo', id);
webRTC.addLocalStream(stream);
webRTC.getRemotePeers().forEach(function (p) {
webRTC.refreshStreams(p);
});
}, function (error) {
console.error(error);
});
});
});
after get the command to remove that old video stream from the server:
brokerController.on('updateremotevideo', function(streamid){
$(document.getElementById(streamid)).remove();
});
This solution works for me. Although if only like to replace the local video stream with share screen stream, we need to re create the offer with sdp, and send sdp to remote peer. It is more complicated.
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
navigator.getUserMedia({audio: true}, function (audioStream) {
stream.addTrack(audioStream.getAudioTracks()[0]);
var mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/mp4'
mediaRecorder.stream = stream;
self.setState({recorder: mediaRecorder, startRecord: true, shareVideo: true, pauseRecord: false, resumeRecord: false, stopRecord: false, downloadRecord: false, updateRecord: false});
document.querySelector('video').src = URL.createObjectURL(stream);
var video = document.getElementById('screen-video')
if (video) {
video.src = URL.createObjectURL(stream);
video.width = 360;
video.height = 300;
}
}, function (error) {
alert(error);
});
}, function (error) {
alert(error);
});
});

Backbone - Test method in view that uses ReadFile

I have written a backbone view which takes a file object or blob as an option in instantiation and then checks that file for EXIF data, corrects orientation and resizes the image if necessary depending on the options passed in.
Within the view there is a function mainFn which takes the file object and calls all other subsequent functions.
My issue is how to I test mainFn that uses ReadFile and an image constructor?
For my test set-up I am using mocah, chai, sinon and phantomjs.
In my sample code I have removed all other functions as to not add unnecessary clutter. If you wish to see the whole view visit its github repository.
var imageUpLoad = Backbone.View.extend({
template: _.template(document.getElementById("file-uploader-template").innerHTML),
// global variables passed in through options - required
_file: null, // our target file
cb: null,
maxFileSize: null, // megabytes
maxHeight: null, // pixels - resize target
maxWidth: null, // pixels - resize target
minWidth: null, // pixels
maxAllowedHeight: null, //pixels
maxAllowedWidth: null, // pixels
// globals determined through function
sourceWidth: null,
sourceHeight: null,
initialize: function (options) {
this._file = options.file;
this.cb = options.cb;
this.maxHeight = options.maxHeight;
this.maxWidth = options.maxWidth;
this.maxFileSize = options.maxFileSize;
this.minWidth = options.minWidth;
this.maxAllowedHeight = options.maxAllowedHeight;
this.maxAllowedWidth = options.maxAllowedWidth;
},
render: function () {
this.setElement(this.template());
this.mainFn(this._file);
return this;
},
// returns the width and height of the source file and calls the transform function
mainFn: function (file) {
var fr = new FileReader();
var that = this;
fr.onloadend = function () {
var _img = new Image();
// image width and height can only be determined once the image has loaded
_img.onload = function () {
that.sourceWidth = _img.width;
that.sourceHeight = _img.height;
that.transformImg(file);
};
_img.src = fr.result;
};
fr.readAsDataURL(file);
}
});
My test set-up
describe("image-upload view", function () {
before(function () {
// create test fixture
this.$fixture = $('<div id="image-view-fixture"></div><div>');
});
beforeEach(function () {
// fake image
this.b64DataJPG = '/9j/4AAQSkZJRgABAQEAYABgAAD/4QAiRXhpZgAASUkqAAgAAA' +
'ABABIBAwABAAAABgASAAAAAAD/2wBDAAEBAQEBAQEBAQEBAQEB' +
'AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQ' +
'EBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQEBAQEBAQEBAQEBAQEB' +
'AQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQ' +
'EBAQEBAQH/wAARCAABAAIDASIAAhEBAxEB/8QAHwAAAQUBAQEB' +
'AQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBA' +
'QAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAk' +
'M2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1' +
'hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKj' +
'pKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+' +
'Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAA' +
'AAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAx' +
'EEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl' +
'8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2' +
'hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmq' +
'srO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8v' +
'P09fb3+Pn6/9oADAMBAAIRAxEAPwD+/iiiigD/2Q==';
var b64toBlob = function (b64Data, contentType, sliceSize) {
contentType = contentType || '';
sliceSize = sliceSize || 512;
var input = b64Data.replace(/\s/g, '');
var byteCharacters = atob(b64Data);
var byteArrays = [];
for (var offset = 0; offset < byteCharacters.length; offset += sliceSize) {
var slice = byteCharacters.slice(offset, offset + sliceSize);
var byteNumbers = new Array(slice.length);
for (var i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
var byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
try{
var blob = new Blob( byteArrays, {type : contentType});
}
catch(e){
// TypeError old chrome and FF
window.BlobBuilder = window.BlobBuilder ||
window.WebKitBlobBuilder ||
window.MozBlobBuilder ||
window.MSBlobBuilder;
if(e.name == 'TypeError' && window.BlobBuilder){
var bb = new BlobBuilder();
bb.append(byteArrays);
blob = bb.getBlob(contentType);
}
else if(e.name == "InvalidStateError"){
// InvalidStateError (tested on FF13 WinXP)
blob = new Blob(byteArrays, {type : contentType});
}
else{
// We're screwed, blob constructor unsupported entirely
}
}
return blob;
};
this.blobJPG = b64toBlob(this.b64DataJPG, "image/jpg");
/* **************** */
this.$fixture.empty().appendTo($("#fixtures"));
this.view = new imageUpLoad({
file: this.blobJPG,
cb: function (url) {console.log(url);},
maxFileSize: 500000,
minWidth: 200,
maxHeight: 900,
maxWidth: 1000,
maxAllowedHeight: 4300,
maxAllowedWidth: 1000
});
this.renderSpy = sinon.spy(this.view, "render");
this.readFileDataStub = sinon.stub(this.view, 'readFileData');
this.resizeImageStub = sinon.stub(this.view, 'resizeImage');
this.returnDataUrlStub = sinon.stub(this.view, 'returnDataUrl');
this.mainFnSpy = sinon.spy(this.view, 'mainFn');
this.transformImgStub = sinon.stub(this.view, 'transformImg');
this.sizeConfigStub = sinon.stub(this.view, 'sizeConfig');
this.resizeConfStub = sinon.stub(this.view, 'resizeConf');
this.callbackSpy = sinon.spy();
});
afterEach(function () {
this.renderSpy.restore();
this.readFileDataStub.restore();
this.resizeImageStub.restore();
this.returnDataUrlStub.restore();
this.mainFnSpy.restore();
this.sizeConfigStub.restore();
this.resizeConfStub.restore();
this.transformImgStub.restore();
});
after(function () {
$("#fixtures").empty();
});
it("can render", function () {
var _view = this.view.render();
expect(this.renderSpy).to.have.been.called;
expect(this.view).to.equal(_view);
});
});
You could either mock the FileReader / Image on the window, e.g.
// beforeEach
var _FileReader = window.FileReader;
window.FileReader = sinon.stub().return('whatever');
// afterEach
window.FileReader = _FileReader;
Or reference the constructor on the instance, e.g.
// view.js
var View = Backbone.View.extend({
FileReader: window.FileReader,
mainFn: function() {
var fileReader = new this.FileReader();
}
});
// view.spec.js
sinon.stub(this.view, 'FileReader').return('whatever');
Personally I'd prefer the latter as there's no risk of breaking the global reference if, for example, you forget to reassign the original value.

WebRTC/Socket.io Connecting two Clients / Signaling

I've updated my code with the complete signaling exchange. The problem now, is that upon completing the exchange, "socket.on('receivedAnswer')" throws an error. I'm testing all this on my local machine in two browser tabs so I assume I don't need ICE just yet for this to work...
<html>
<head>
<link rel = 'stylesheet' type = 'text/css' href= 'css.css'>
<script src="node_modules/socket.io/node_modules/socket.io-client/socket.io.js"></script>
</head>
<body>
<div id='video_box'>
<video id= 'video' autoplay="true">
</video>
</div>
<div id='video_box2'>
<video id='video2' autoplay="true">
</video>
<div>
<script>
var local_stream;
var baseURL = getBaseURL();
var socketIOPort = 8999;
var socketIOLocation = baseURL + socketIOPort;
var socket = io(socketIOLocation);
var localvid = document.getElementById('video');
var mediaOptions = { audio: false, video: true };
var pc = new mozRTCPeerConnection( {"iceServers": [{"url": "stun:stun.1.google.com:19302" }] });
var pc2 = new mozRTCPeerConnection();
var PeerConnection = window.RTCPeerConnection || window.mozRTCPeerConnection || window.webkitRTCPeerConnection;
var SessionDescription = window.RTCSessionDescription || window.mozRTCSessionDescription || window.webkitRTCSessionDescription;
var offerConstraints = {OfferToReceiveAudio: true, OfferToReceiveVideo: true }
socket.on('receivedAnswer', function(answerSDP){
pc.setRemoteDescription(new mozRTCSessionDescription(answerSDP), function(){
alert('received the answer');
}, error2);
});
socket.on('getPeer1', function(SDP) {
alert('got peer 1 SDP');
pc.setRemoteDescription(new mozRTCSessionDescription(SDP), function() {
pc.createAnswer(function(answerSDP){
pc.setLocalDescription(answerSDP, function() {
socket.emit('answerSDP', answerSDP);
alert('sending answer');
}, error2);
}, error2);
}, error2);
});
function start(){
checkMedia();
navigator.getUserMedia(mediaOptions, getMediaSuccess, error2);
}
function answer(offeredSDP){
offeredSDP = new SessionDescription(offeredSDP);
}
function peer(){
pc.addStream(local_stream);
pc.createOffer(function(SDP){
socket.emit('sendSDPtoServer', SDP);
}, error2, offerConstraints);
}
function getMediaSuccess(stream){
localvid.src = window.URL.createObjectURL(stream);
local_stream = stream;
peer(); ///////////
}
function error2(){
alert('error');
}
function error3(){
alert('error here');
}
function checkMedia() {
if (!navigator.getUserMedia) {
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
}
if (!navigator.getUserMedia){
alert('getUserMedia not supported in this browser.');
}
}
function getBaseURL(){
baseURL = location.protocol + "//" + location.hostname + ":" + location.port;
return baseURL;
}
start();
//alert('script');
</script>
</body>
</html>
Server
var static = require('node-static');
var express = require('express');
var app = express();
var port = 8999;
var http = require('http');
var file= new(static.Server)();
var io = require('socket.io').listen(app.listen(port));
var connectedClients = {};
var peer1 = 0;
var peer2 = 0;
var peer1sdp = 0;
var peer2sdp = 0;
app.set('views', __dirname)
.engine('html', require('ejs').renderFile)
.use(express.static(__dirname + '/public'))
.get('/', function(req, res) {
//res.render('indexcpy.html');
});
io.on('connection', function(socket) {
console.log('a user connected');
socket.on('disconnect', function(){
console.log('user disconnected');
});
socket.on('answerSDP', function(answerSDP){
console.log('ANSWER SENT');
io.to(peer1).emit('receivedAnswer', answerSDP);
});
socket.on('sendSDPtoServer', function(SDP) {
if(peer1 == 0){
console.log('peer 1 ' + socket.id + ' has sent its SDP to server');
peer1 = socket.id;
peer1sdp = SDP;
} else {
console.log('peer 2 ' + socket.id + ' has been sent to the server');
socket.emit('getPeer1', peer1sdp);
}
});
});
the mistake here is your assumption that, pc.addStream(stream) would fire pc.onstream(event) on the same peer, but it would actually be fired on the remote peer.
Reference
other that that, your webrtc code is incomplete, while your offer SDP reaches the server, it is not sent to the remote peer and also it's answer has to be forwarded to your peer and so on...
You have to make sure that before creating Answer your local stream is added to the peerconnection. It will be more clear if you show where you are generating the answer

WebRTC Play Audio Input as Microphone

I want to play my audio file as microphone input (without sending my live voice but my audio file) to the WebRTC connected user. Can anybody tell me how could it be done?
I have done some following tries in the JS code, like:
1. base64 Audio
<script>
var base64string = "T2dnUwACAAAAAAA..";
var snd = new Audio("data:audio/wav;base64," + base64string);
snd.play();
var Sound = (function () {
var df = document.createDocumentFragment();
return function Sound(src) {
var snd = new Audio(src);
df.appendChild(snd);
snd.addEventListener('ended', function () {df.removeChild(snd);});
snd.play();
return snd;
}
}());
var snd = Sound("data:audio/wav;base64," + base64string);
</script>
2. AudioBuffer
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var audioContext = new AudioContext();
var isPlaying = false;
var sourceNode = null;
var theBuffer = null;
window.onload = function() {
var request = new XMLHttpRequest();
request.open("GET", "sounds/DEMO_positive_resp.wav", true);
request.responseType = "arraybuffer";
request.onload = function() {
audioContext.decodeAudioData( request.response, function(buffer) {
theBuffer = buffer;
} );
}
request.send();
}
function togglePlayback() {
var now = audioContext.currentTime;
if (isPlaying) {
//stop playing and return
sourceNode.stop( now );
sourceNode = null;
analyser = null;
isPlaying = false;
if (!window.cancelAnimationFrame)
window.cancelAnimationFrame = window.webkitCancelAnimationFrame;
//window.cancelAnimationFrame( rafID );
return "start";
}
sourceNode = audioContext.createBufferSource();
sourceNode.buffer = theBuffer;
sourceNode.loop = true;
analyser = audioContext.createAnalyser();
analyser.fftSize = 2048;
sourceNode.connect( analyser );
analyser.connect( audioContext.destination );
sourceNode.start( now );
isPlaying = true;
isLiveInput = true;
return "stop";
}
Please help me out in this case. It would be highly appreciable.
Here is a demo that may help you stream mp3 or wav using chrome:
https://www.webrtc-experiment.com/RTCMultiConnection/stream-mp3-live.html
Here is, how it is written:
http://www.rtcmulticonnection.org/docs/getting-started/#stream-mp3-live
And source code of the demo:
https://github.com/muaz-khan/RTCMultiConnection/blob/master/demos/stream-mp3-live.html
https://github.com/muaz-khan/WebRTC-Experiment/issues/222
Use in 3rd party WebRTC applications
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
var gainNode = context.createGain();
gainNode.connect(context.destination);
// don't play for self
gainNode.gain.value = 0;
document.querySelector('input[type=file]').onchange = function() {
this.disabled = true;
var reader = new FileReader();
reader.onload = (function(e) {
// Import callback function that provides PCM audio data decoded as an audio buffer
context.decodeAudioData(e.target.result, function(buffer) {
// Create the sound source
var soundSource = context.createBufferSource();
soundSource.buffer = buffer;
soundSource.start(0, 0 / 1000);
soundSource.connect(gainNode);
var destination = context.createMediaStreamDestination();
soundSource.connect(destination);
createPeerConnection(destination.stream);
});
});
reader.readAsArrayBuffer(this.files[0]);
};
function createPeerConnection(mp3Stream) {
// you need to place 3rd party WebRTC code here
}
Updated at: 5:55 PM - Thursday, August 28, 2014
Here is how to get mp3 from server:
function HTTP_GET(url, callback) {
var xhr = new XMLHttpRequest();
xhr.open('GET', url, true);
xhr.responseType = 'arraybuffer';
xhr.send();
xhr.onload = function(e) {
if (xhr.status != 200) {
alert("Unexpected status code " + xhr.status + " for " + url);
return false;
}
callback(xhr.response); // return array-buffer
};
}
// invoke above "HTTP_GET" method
// to load mp3 as array-buffer
HTTP_GET('http://domain.com/file.mp3', function(array_buffer) {
// Import callback function that provides PCM audio data decoded as an audio buffer
context.decodeAudioData(array_buffer, function(buffer) {
// Create the sound source
var soundSource = context.createBufferSource();
soundSource.buffer = buffer;
soundSource.start(0, 0 / 1000);
soundSource.connect(gainNode);
var destination = context.createMediaStreamDestination();
soundSource.connect(destination);
createPeerConnection(destination.stream);
});
});