Can't play WebAudio MediaStream generated - webrtc

I have a problem with generating a MediaStream from an AudioContext with WebAudio for sending it to another peer with WebRTC. It turns out that when I generate the MediaStream and try to play it locally and remotely with an Audio HTML tag or JS, I don't hear anything, but I do receive the stream well.
This is my code snippet:
CreateTone =
function () {
var ringing = 'assets/tones/ringing.wav';
var request = new XMLHttpRequest();
request.open('GET', ringing , true);
request.responseType = 'arraybuffer';
request.onload = function () {
context.decodeAudioData(request.response, function (buffer) {
bufferAudio = buffer;
SendTone();
});
}
request.send();
};
SendTone =
function () {
var source = context.createBufferSource();
source.buffer = bufferAudio;
source.loop = true;
source.connect(context.destination);
//If I use here source.start(0); it will play
var remote = context.createMediaStreamDestination();
source.connect(remote);
var streamToSend= remote.stream;
//If I try to play this stream with an Audio element, I can't hear anything. When I send with pc.addMediaStream(streamToSend) and it's received by the other peer, it's still deaf
};
Am I getting the MediaStream the wrong way? Thanks in advance

Related

Send image in attachments by URL in Circuit JS SDK

I'm using a Circuit JS SDK and want to send message with attached image. I found on documentation that I should set the item.attachments to File[] object. But how can I do it if I have only image URL (like https://abc.cde/fgh.png)?
To be able to post an image in a conversation, the image needs to be uploaded to Circuit which is done internally in the addTextItem API as you already found out. And yes this API takes an array of File objects.
You will need to download the image via XMLHttpRequest as blob and then construct a File object.
const xhr = new XMLHttpRequest();
xhr.responseType = 'blob';
xhr.open('GET',<url of image> , true);
xhr.onreadystatechange = async () => {
if (xhr.readyState == xhr.DONE) {
const file = new File([xhr.response], 'image.jpg', { lastModified: Date.now() });
const item = await client.addTextItem(convId.value, {
attachments: [file]
});
}
};
xhr.send();
Here is a jsbin https://output.jsbin.com/sumarub

Can fetch() do responseType=document?

XHR's responseType='document' is awesome because it hands you back a DOM document that you can use querySelector, etc on:
var xhr = new XMLHttpRequest();
xhr.open('GET', '/', true);
xhr.responseType = 'document';
xhr.onload = function(e) {
var document = e.target.response;
var h2headings = document.querySelectorAll('h2');
// ...
};
Is this possible with the fetch method?
It's not natively supported in fetch as the API is a purely network-layer API with no dependencies on being in a web browser (see discussion), but it's not too hard to pull off:
fetch('/').then(res => res.text())
.then(text => new DOMParser().parseFromString(text, 'text/html'))
.then(document => {
const h2headings = document.querySelectorAll('h2');
// ...
});

webrtc and peerjs: how to play a stream without starting his own stream?

I am using peerjs
Media call
var getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
getUserMedia({video: true, audio: true}, function(stream) {
var call = peer.call('another-peers-id', stream);
call.on('stream', function(remoteStream) {
// Show stream in some video/canvas element.
});
}, function(err) {
console.log('Failed to get local stream' ,err);
});
Answer
var getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
peer.on('call', function(call) {
getUserMedia({video: true, audio: true}, function(stream) {
call.answer(stream); // Answer the call with an A/V stream.
call.on('stream', function(remoteStream) {
// Show stream in some video/canvas element.
});
}, function(err) {
console.log('Failed to get local stream' ,err);
});
});
Problem is: to get the remoteStream, I need to show my own stream
var call = peer.call('another-peers-id', stream);
How can I play someone else stream without having to show my own stream ?
This is NOT a bug, you just got confused how WebRTC API works. We first create a function variable named getUserMedia which will be mapped to native webrtc method provided by browser (for example in case the browser is Chrome then method navigator.getUserMedia will be assigned to this variable and rest will be null. Similarly if browser is Firefox then native method navigator.mozGetUserMedia is assigned to this variable and rest will be null and so on).
Once appropriate native method is assigned to the variable getUserMedia, we then call this method with appropriate arguments. This first argument is type of call we want to create (i.e. media constraints), e.g. audio only, audio + video and so on. The second argument is a callback in case of success (i.e. when browser successfully able to access and connect to local mic and/or camera etc.). An example of this callback function would be,
function(stream) {
var video = document.querySelector('video');
video.srcObject = stream;
video.onloadedmetadata = function(e) {
// Do something with the video here.
};
}
There is a third argument for getUserMedia method which is optional and missing in your example code, this argument is a callback to another method which will kick-in in case getUserMedia method fails. There can be many reasons for failure, e.g. user disallowed the browser to acccess local mic or camera etc. Using this call back method you can inform user about call failure issues.
Here is full example code (Ref. https://developer.mozilla.org/en-US/docs/Web/API/Navigator/getUserMedia)
var getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia;
if (getUserMedia) {
getUserMedia({ audio: true, video: { width: 1280, height: 720 } },
function(stream) {
var video = document.querySelector('video');
video.srcObject = stream;
video.onloadedmetadata = function(e) {
video.play();
};
},
function(err) {
console.log("The following error occurred: " + err.name);
}
);
} else {
console.log("getUserMedia not supported");
}
If you do not want to send your own audio and/or video, then simply set it to false in media constraints variable.
Hope this helps.

ICE failed, PeerJS call started but no video/audio through

I am trying to get a simple video chat working with PeerJS. I want to send audio between Firefox on a pc and Firefox on Android. I can get the call running (call.on) and I can view local video but for some reason the media just doesn't come through to the other user. Currently I am getting the error:
ICE failed, see about:webrtc for more details
I have a server which in its simple version is as such:
var ip = require('ip');
var PeerServer = require('peer').PeerServer;
var port = 9000;
var server = new PeerServer({port: port, allow_discovery: true});
Then I have two clients, one for the pc that makes the call:
var SERVER_IP = window.location.hostname;
var SERVER_PORT = 9000;
var localStream = "";
var peerID = "pc"
var peerConnectionID = "and"
var remoteVideo = document.querySelector('#rremote-video');
var localVideo = document.querySelector('#llocal-video');
var peer = new Peer(peerID, {host: SERVER_IP, port: SERVER_PORT});
var conn = peer.connect(peerConnectionID);
var getUserMedia = navigator.mediaDevices.getUserMedia({ video: true, audio: true })
.then(stream => localVideo.srcObject = stream)
.then(stream => localStream = stream)
.catch(e => console.log(e.name + ": "+ e.message));
waitForElement();
function waitForElement(){
if(localStream != ""){
conn.on('open', function(){
conn.send('hi from PC!');
});
peer.on('connection', function(conn) {
conn.on('data', function(data){
console.log(data);
});
});
console.log("we have a stream: "+localStream);
var call = peer.call(peerConnectionID, localStream);
console.log("Calling "+peerConnectionID);
call.on('stream', function(remotestream) {
console.log("Call on.");
remoteVideo.srcObject = remotestream;
});
}
else{
setTimeout(function(){
waitForElement();
},750);
}
}
And the one that answers the call is:
var SERVER_IP = window.location.hostname;
var SERVER_PORT = 9000;
var localStream = "";
var peerID = "and"
var peerConnectionID = "pc"
var remoteVideo = document.querySelector('#rremote-video');
var localVideo = document.querySelector('#llocal-video');
var remoteAudio = document.querySelector('#remote-audio');
var localAudio = document.querySelector('#local-audio');
var peer = new Peer(peerID, {host: SERVER_IP, port: SERVER_PORT});
var conn = peer.connect(peerConnectionID);
var getUserMedia = navigator.mediaDevices.getUserMedia({ video: true, audio: true })
.then(stream => localAudio.srcObject = stream)
.then(stream => localVideo.srcObject = stream)
.then(stream => localStream = stream)
.catch(e => console.log(e.name + ": "+ e.message));
waitForElement();
function waitForElement(){
if(localStream != ""){
conn.on('open', function(){
conn.send('hi from android!');
});
peer.on('connection', function(conn) {
conn.on('data', function(data){
console.log(data);
});
});
peer.on('call', function(call) {
console.log("Picking up call.");
call.answer(localStream);
call.on('stream', function(remotestream) {
console.log("Call on.");
remoteVideo.srcObject = remotestream;
});
});
}
else{
setTimeout(function(){
waitForElement();
},750);
}
}
I think it is some little tweak that I'm getting wrong, I have mainly followed instructions on PeerJS website: http://peerjs.com/ Please if anyone can see something that needs to change, any help is welcome!
Are you using https? Making calls to non-local machines is no longer allowed by the browsers.
To test this out, run both sets of code on your local machine. If you can do that connection, it means your code is ok.
To do a remote connection you will unfortunately need https. This means you will also need your own peerjs server (to run as https).
The other option is to use port forwarding to make one of the machines think it is talking to the localhost
It sounds like your ICE Candidates cannot communicate one to each other. You will have to use a STUN server and, if it still doesnt work, you will need a TURN server.
From PeerJS Documentation:
var peer = new Peer({
config: {'iceServers': [
{ url: 'stun:stun.l.google.com:19302' },
{ url: 'turn:homeo#turn.bistri.com:80', credential: 'homeo' }
]} /* Sample servers, please use appropriate ones */
});
This link will provide you a method to deploy your own TURN server.

Slice ArrayBuffer with Safari and play it

I need to load a mp3, slice and play it using web audio , on firefox a slice mp3 any where and decode work fine, but on safari an error with null value occurs. Exist a trick or a way do slice the ArrayBuffer on Safari?
player.loadMp3 = function(url, callback) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function() {
var mp3slice = request.response.slice(1000,100000);
player.context.decodeAudioData(mp3slice); // context is webkitAudioContext on safari
callback();
};
request.send();
};
I need to create a mp3 player with some especial features:
Time shift the music (like http://codepen.io/eranshapira/pen/mnuoB)
Remove gap between musics ( I got this slicing ArrayBuffers and join then with a Blob but only in safary/IPAD don't work).
Cross platform (IPAD and android. I'm using apache cordova for that).
Solution
player.loadMp3 = function(url, callback) {
console.log("loading " + url);
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function() {
console.log("loaded");
console.log("decoding...");
player.context.decodeAudioData(request.response, function(buffer) {
console.log("decoded");
player.buffer = player.joinAudioBuffers(player.buffer,buffer,2000000);
player.duration += player.buffer.duration;
player.time = minsSecs(player.buffer.duration);
console.log("concatenated");
callback();
});
}, function() {
alert("decode failure");
};
request.send();
};
The code you've shown shouldn't work on any browser. For one thing you need to provide a callback function to decodeAudioData. You also need to slice the decoded data after decoding it, not the raw mp3-encoded data before decoding it. Some browsers might be able to decode a slice of the mp3 file, but it's not expected. Something like this:
player.loadMp3 = function(url, callback) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function() {
var mp3slice = request.response.slice(1000,100000);
player.context.decodeAudioData(mp3slice, function(decoded) {
var pcmSlice = decoded.slice(1000, 100000);
callback(pcmSlice);
});
};
request.send();
};
I haven't tested this code.