I want to stream audio from a web page to a local server, using WebRTC. That server will process that audio and will output it immediately to the user. I need real time.
My code is actually working. However I am asking the user for the microphone with getUserMedia, and I don't need that microphone. This is quite annoying. What can I do in order to stream the audio without having to ask the user for the microphone?
Thank you.
Here is a minimal working example (it is highly inspired by https://github.com/aiortc/aiortc/blob/main/examples/server/client.js). Only the last part with comments is interesting :
let webSocket = new WebSocket('wss://0.0.0.0:8080/ws');
const config = { sdpSemantics: 'unified-plan' }
const pc = new RTCPeerConnection(config);
webSocket.onmessage = (message) => {
const data = JSON.parse(message.data);
switch(data.type) {
case "answer":
pc.setRemoteDescription(data.answer)
break;
default:
break;
}
};
function negotiate() {
return pc.createOffer()
.then(function(offer) {
return pc.setLocalDescription(offer);
})
.then(function() {
return new Promise(function(resolve) {
if (pc.iceGatheringState === 'complete') {
resolve();
} else {
function checkState() {
if (pc.iceGatheringState === 'complete') {
pc.removeEventListener('icegatheringstatechange', checkState);
resolve();
}
}
pc.addEventListener('icegatheringstatechange', checkState);
}
});
})
.then(function() {
const offer = pc.localDescription;
webSocket.send(
JSON.stringify({
type: "offer",
offer: {
sdp: offer.sdp,
type: offer.type
}
})
);
})
}
// Preparing the oscillator
const audioCtx = new (window.AudioContext || window.webkitAudioContext)();
const oscillator = audioCtx.createOscillator();
const serverDestination = audioCtx.createMediaStreamDestination();
oscillator.connect(serverDestination);
// Asking for useless microphone
navigator.mediaDevices.getUserMedia({audio: true})
.then(() => {
return negotiate();
});
// Actual streaming
const stream = new MediaStream();
serverDestination.stream.getTracks().forEach((track) => {
pc.addTrack(track, stream);
})
// User pushes button to start the oscillator
function play() {
oscillator.start();
};
Just get rid of this:
// Asking for useless microphone
navigator.mediaDevices.getUserMedia({audio: true})
.then(() => {
return negotiate();
});
As you say, it's useless and not necessary. If you don't call getUserMedia(), the user won't be prompted to share their microphone. You can make WebRTC connections without this.
I suspect the problem you're running into is that your audio context is paused. If you call audioCtx.resume() when a user clicks a button, you'll be up and running. This is due to autoplay policy.
If you don't need user media, don't ask for it with getUserMedia in your code.
Related
I'm trying to establish a WebRTC connection between two browsers. I have a node.js server for them to communicate through, which essentially just forwards the messages from one client to the other. I am running the server and two tabs all on my laptop, but I have not been able to make a connection. I have been able to send the offers and answers between the two tabs successfully resulting in pc.signalingState = 'stable' in both tabs. I believe once this is done then the RTCPeerConnection objects should start producing icecandidate events, but this is not happening and I do not know why. Here is my code (I've omitted the server code):
'use strict';
// This is mostly copy pasted from webrtc.org/getting-started/peer-connections.
import { io } from 'socket.io-client';
const configuration = {
'iceServers': [
{ 'urls': 'stun:stun4.l.google.com:19302' },
{ 'urls': 'stun:stunserver.stunprotocol.org:3478' },
]
}
// Returns a promise for an RTCDataChannel
function join() {
const socket = io('ws://localhost:8090');
const pc = new RTCPeerConnection(configuration);
socket.on('error', error => {
socket.close();
throw error;
});
pc.addEventListener('signalingstatechange', event => {
// Prints 'have-local-offer' then 'stable' in one tab,
// 'have-remote-offer' then 'stable' in the other.
console.log(pc.signalingState);
})
pc.addEventListener('icegatheringstatechange', event => {
console.log(pc.iceGatheringState); // This line is never reached.
})
// Listen for local ICE candidates on the local RTCPeerConnection
pc.addEventListener('icecandidate', event => {
if (event.candidate) {
console.log('Sending ICE candidate'); // This line is never reached.
socket.emit('icecandidate', event.candidate);
}
});
// Listen for remote ICE candidates and add them to the local RTCPeerConnection
socket.on('icecandidate', async candidate => {
try {
await pc.addIceCandidate(candidate);
} catch (e) {
console.error('Error adding received ice candidate', e);
}
});
// Listen for connectionstatechange on the local RTCPeerConnection
pc.addEventListener('connectionstatechange', event => {
if (pc.connectionState === 'connected') {
socket.close();
}
});
// When both browsers send this signal they will both receive the 'matched' signal,
// one with the payload true and the other with false.
socket.emit('join');
return new Promise((res, rej) => {
socket.on('matched', async first => {
if (first) {
// caller side
socket.on('answer', async answer => {
await pc.setRemoteDescription(new RTCSessionDescription(answer))
.catch(console.error);
});
const offer = await pc.createOffer();
await pc.setLocalDescription(offer)
.catch(console.error);
socket.emit('offer', offer);
// Listen for connectionstatechange on the local RTCPeerConnection
pc.addEventListener('connectionstatechange', event => {
if (pc.connectionState === 'connected') {
res(pc.createDataChannel('data'));
}
});
} else {
// recipient side
socket.on('offer', async offer => {
pc.setRemoteDescription(new RTCSessionDescription(offer))
.catch(console.error);
const answer = await pc.createAnswer();
await pc.setLocalDescription(answer)
.catch(console.error);
socket.emit('answer', answer);
});
pc.addEventListener('datachannel', event => {
res(event.channel);
});
}
});
});
}
join().then(dc => {
dc.addEventListener('open', event => {
dc.send('Hello');
});
dc.addEventListener('message', event => {
console.log(event.data);
});
});
The behavior is the same in both Firefox and Chrome. That behavior is, again, that the offers and answers are signalled successfully, but no ICE candidates are ever created. Does anyone know what I'm missing?
Okay, I found the problem. I have to create the RTCDataChannel before creating the offer. Here's a before and after comparison of the SDP offers:
# offer created before data channel:
{
type: 'offer',
sdp: 'v=0\r\n' +
'o=- 9150577729961293316 2 IN IP4 127.0.0.1\r\n' +
's=-\r\n' +
't=0 0\r\n' +
'a=extmap-allow-mixed\r\n' +
'a=msid-semantic: WMS\r\n'
}
# data channel created before offer:
{
type: 'offer',
sdp: 'v=0\r\n' +
'o=- 1578211649345353372 2 IN IP4 127.0.0.1\r\n' +
's=-\r\n' +
't=0 0\r\n' +
'a=group:BUNDLE 0\r\n' +
'a=extmap-allow-mixed\r\n' +
'a=msid-semantic: WMS\r\n' +
'm=application 9 UDP/DTLS/SCTP webrtc-datachannel\r\n' +
'c=IN IP4 0.0.0.0\r\n' +
'a=ice-ufrag:MZWR\r\n' +
'a=ice-pwd:LfptE6PDVughzmQBPoOtvaU8\r\n' +
'a=ice-options:trickle\r\n' +
'a=fingerprint:sha-256 1B:C4:38:9A:CD:7F:34:20:B8:8D:78:CA:4A:3F:81:AE:C5:55:B3:27:6A:BD:E5:49:5A:F9:07:AE:0C:F6:6F:C8\r\n' +
'a=setup:actpass\r\n' +
'a=mid:0\r\n' +
'a=sctp-port:5000\r\n' +
'a=max-message-size:262144\r\n'
}
In both cases the answer looked similar to the offer. You an see the offer is much longer and mentions webrtc-datachannel in the second case. And sure enough, I started getting icecandidate events and everything is working now.
I've implemented the agora sdk 3.0 for video calls.
now I'm trying to get screen sharing to work, but I keep getting the error provided in
the picture below (Join failed: NO_AUTHORIZED).
Picture of console while sharing a screen
screen sharing code sample:
async shareScreen() {
this.shareClient = AgoraRTC.createClient({
mode: 'rtc',
codec: 'vp8'
})
this.shareClient.init('xxxxxxxxxxxxxx', () => {
this.shareClient.join('same token video call started with', 'same room name of current outgoing video call', null, (uid) => {
const streamSpec = {
streamID: uid,
audio: false,
video: false,
screen: true
}
if (isFirefox()) {
streamSpec.mediaSource = 'window';
} else if (!isCompatibleChrome()) {
streamSpec.extensionId = 'minllpmhdgpndnkomcoccfekfegnlikg';
}
this.shareScreenStream = AgoraRTC.createStream(streamSpec);
// Initialize the stream.
this.shareScreenStream.init(() => {
// Play the stream.
this.shareScreenStream.play('renderer');
// Publish the stream.
this.shareClient.publish(this.shareScreenStream);
}, function(err) {
console.log(err);
});
}, function(err) {
console.log(err);
})
});
},
The screensharing client should use an unique token based on the UID and channel name. Not the one the main user is using.
I'm really new to this but I already know (by searching other projects through the Internet) that the MiBands have an authentication process.
The thing is I have tried to write without and with response to the only service UUID I got through the connection and it's always saying the band has not that UUID Service.
I am using React-Native-BLE-PLX library.
As it can be seen in the image I use the device's Service UUID since I cannot get any other service but it always says that it does not exist.
search(){
this.manager = new BleManager();
this.manager.startDeviceScan(null, null, (error, device) => {
if (error) {
console.log(error.message);
return;
}
if (device.name == 'Mi Band 3') {
this.manager.stopDeviceScan();
this.device = device;
this.connect();
}
});
}
connect() {
console.log("CONNECTING...");
this.device.connect()
.then(async (device) => {
console.log("CONNECTED!!!");
console.log("DEVICE CONNECTED:\n");
console.log(device);
this.auth(device);
// return this.manager.discoverAllServicesAndCharacteristicsForDevice(device.id)
})
// .then((device) => {
// console.log(device);
// this.send(device);
// })
// .catch((error) => {
// console.log("ERROR: ");
// console.log(error);
// });
}
async auth(device) {
console.log("DEVICE: \n");console.log(this.device);
console.log("DEVICE'S SERVICE UUID: \n" +this.device.serviceUUIDs[0]);
console.log("TRYING");
this.manager.writeCharacteristicWithoutResponseForDevice('D7:2D:F8:F2:24:3F', '0000fee0-0000-1000-8000-00805f9b34fb', '00000009-0000-3512-2118-0009af100700', 0x01 + 0x00 + new Buffer("OLA MUNDO"))
.then((device) => {
console.log("STUFF GOING ON:\n");
console.log(device);
})
.catch((error) => {
throw error;
});
}
Really need help and thanks for that.
If there is something I need to describe more please just tell me.
Directly after getting connected you must first discover the services and characteristics. After that you can start the authentication part. However your authentication part is totally wrong. Do a bit of Googling to find out how to do it properly...
I would like connect 2 devices with WebRTC on localhost. All devices have no internet access. They are connected to a same local wifi.
I try this on React Native App.
In this context offline, do I need to trickle ICE candidates and addIceCandidate ? If I understund correctly, ICE candidates is for iceServer. But my case, iceServer is null (because i'm offline only, connected on same localhost wifi) :
const configuration = { iceServers: [{ urls: [] }] };
So actualty i exchange offer and answer, but after setRemoteDescription the answer, the connectionState stay on checking.
You can see my React Component :
constructor(props) {
super(props);
this.pc = new RTCPeerConnection(configuration);
}
state = initialState;
componentDidMount() {
const { pc } = this;
if (pc) {
this.setState({
peerCreated: true
});
}
this.setConnectionState();
pc.oniceconnectionstatechange = () => this.setConnectionState();
pc.onaddstream = ({ stream }) => {
if (stream) {
this.setState({
receiverVideoURL: stream.toURL()
});
}
};
pc.onnegotiationneeded = () => {
if (this.state.initiator) {
this.createOffer();
}
};
pc.onicecandidate = ({ candidate }) => {
if (candidate === null) {
const { offer } = this.state;
const field = !offer ? 'offer' : 'data';
setTimeout(() => {
alert('setTimeout started');
this.setState({
[field]: JSON.stringify(pc.localDescription)
});
}, 2000);
}
};
}
#autobind
setConnectionState() {
this.setState({
connectionState: this.pc.iceConnectionState
});
}
getUserMedia() {
MediaStreamTrack.getSources(() => {
getUserMedia(
{
audio: false,
video: true
},
this.getUserMediaSuccess,
this.getUserMediaError
);
});
}
#autobind
async getUserMediaSuccess(stream) {
const { pc } = this;
pc.addStream(stream);
await this.setState({ videoURL: stream.toURL() });
if (this.state.initiator) {
return this.createOffer();
}
return this.createAnswer();
}
getUserMediaError(error) {
console.log(error);
}
#autobind
logError(error) {
const errorArray = [...this.state.error, error];
return this.setState({
error: errorArray
});
}
/**
* Create offer
*
* #memberof HomeScreen
*/
#autobind
createOffer() {
const { pc } = this;
pc.createOffer()
.then(offer => pc.setLocalDescription(offer))
.then(() => {
this.setState({
offerCreated: true
});
})
.catch(this.logError);
}
/**
* Create anwser
*
* #memberof HomeScreen
*/
#autobind
async createAnswer() {
const { pc } = this;
const { data } = this.state;
if (data) {
const sd = new RTCSessionDescription(JSON.parse(data));
await this.setState({
offerImported: true
});
pc.setRemoteDescription(sd)
.then(() => pc.createAnswer())
.then(answer => pc.setLocalDescription(answer))
.then(() => {
this.setState({
answerCreated: true
});
})
.catch(this.logError);
}
}
#autobind
receiveAnswer() {
const { pc } = this;
const { data } = this.state;
const sd = new RTCSessionDescription(JSON.parse(data));
return pc
.setRemoteDescription(sd)
.then(() => {
this.setState({
answerImported: true
});
})
.catch(this.logError);
}
/**
* Start communication
*
* #param {boolean} [initiator=true]
* #returns
* #memberof HomeScreen
*/
#autobind
async start(initiator = true) {
if (!initiator) {
await this.setState({
initiator: false
});
}
return this.getUserMedia();
}
Anyone can help me?
No iceServers is fine on a LAN, but peers must still exchange at least one candidate: their host candidate (based on their machine's LAN IP address).
Either:
Trickle candidates using onicecandidate -> signaling -> addIceCandidate as usual, or
Out-wait the ICE process (a few seconds) before exchanging pc.localDescription.
It looks like you're attempting the latter. This approach works because...
Trickle ICE is an optimization.
The signaling (trickling) of individual ice candidates using onicecandidate, is an optimization meant to speed up negotiation. Once setLocalDescription succeeds, the browser's internal ICE Agent starts, inserting ICE candidates, as they're discovered, into the localDescription itself. Wait a few seconds to negotiate, and trickling isn't necessary at all: all ICE candidates will be in the offer and answer transmitted.
Your code
From your onicecandidate code it looks like you're already attempting to gather the localDescription after ICE completion (and you've written it to work from both ends):
pc.onicecandidate = ({ candidate }) => {
if (!candidate) {
const { offer } = this.state;
const field = !offer ? 'offer' : 'data';
this.setState({
[field]: JSON.stringify(pc.localDescription)
});
}
};
In the offerer side you've correctly commented out the equivalent code in createOffer:
pc.createOffer()
.then(offer => pc.setLocalDescription(offer))
.catch(this.logError);
// .then(() => {
// this.setState({
// offer: JSON.stringify(pc.localDescription)
// });
// });
But on the answerer side, you have not, and that's likely the problem:
createAnswer() {
const { pc } = this;
const { data } = this.state;
if (data) {
const sd = new RTCSessionDescription(JSON.parse(data));
pc.setRemoteDescription(sd)
.then(() => pc.createAnswer())
.then(answer => pc.setLocalDescription(answer))
.then(() => {
this.setState({
offer: JSON.stringify(pc.localDescription)
});
})
.catch(this.logError);
}
}
This means it sends an answer back immediately, before the answerer's ICE agent has had time to insert any candidates into the answer. This is probably why it fails.
On a side-note: Nothing appears to wait for getUserMedia to finish either, so answers likely won't contain any video either, depending on the timing of your getUserMediaSuccess function, which fails to add any tracks or streams to the connection. But assuming you're just doing data channels, this should work with my recommended fixes.
I had earlier posted some questions on this problem. At that time I had two separate programs for caller and receiver. I was also using old-fashioned callback API. Thanks to help from #jib on that post, I was able to understand the need for some fundamental changes. I rewrote the program to make it an integrated one for both caller and receiver and have used the WebRTC promises API. My problem is that I am not getting remote video from either end. One part I understand but do not know the solution: The receiver does not create SDPs for Video in the first place, only for audio. The caller part does create SDPS for Video and audio but on the receiver end there is no event generated for remote stream.
I have checked, through console logs, that the core functions work. Offer SDP is created, sent out, received, answer SDP created, sent out, received, etc. Candidates get exchanged and added too. But the .onaddstream event handler is never triggered. Local video is shown but that is trivial.
I have spent a LOT of time on this. I simply need to get that exciting feeling of seeing remote video on both ends which has kept me going. ANY HELP WILL BE SINCERELY APPRECIATED.
<script>
$(document).ready(function () {
var iceCandidates = [], countIceCandidates=0;
var socket = io.connect();
socket.on('connect',function() { console.log("Socket connected"); });
var pc = new RTCPeerConnection({"iceServers":[{"url":"stun:stun.l.google.com:19302"}]});
//If remote video stream comes in, display it in DIV vid2
pc.onaddStream = function (event) {
stream = event.stream;
var video = $('#vid2');
video.attr('src', URL.createObjectURL(stream));
video.onloadedmetadata = function(e) { video.play(); }
}
//Display media in both Caller and Receiver
navigator.mediaDevices.getUserMedia({ audio: true, video: true })
.then(function(stream) {
var video = $('#vid1');
video.attr('src', URL.createObjectURL(stream));
video.onloadedmetadata = function(e) { video.play(); };
pc.addStream(stream);
})
.catch(function(err) { console.log(err);});
//INITIATE CALL
$('#call').click(function() {
pc.createOffer({ offerToReceiveVideo: true, offerToReceiveAudio: true })
.then(function(offer) {
localSessionDescription = new RTCSessionDescription(offer);
pc.setLocalDescription(localSessionDescription)
.then (function() { socket.emit('sdpOffer',localSessionDescription); })
.catch(function(err) { console.log("Error in setLocalDescription"); console.log(err); })
.catch(function(err) { console.log("Error in createOffer"); console.log(err); })
});
})
pc.onicecandidate = function (event) {
socket.emit('candidate',event.candidate);
};
socket.on('candidate',function (data) {
if (data != null) {
pc.addIceCandidate(new RTCIceCandidate(data))
.then(function() { console.log("peer candidate added");})
.catch(function(err) {console.log(err); console.log("Error during peer candidate addition");});
}
});
socket.on('disconnect',function() { alert("Disconnected"); });
function error(err) {
console.log("The following error occurred: " + err.name);
}
socket.on('sdpAnswer',function(data) {
sdpAnswer = new RTCSessionDescription(data.sdpAnswer);
pc.setRemoteDescription(sdpAnswer)
.then(function() { console.log("Answer SDP Set:"); console.log(sdpAnswer); })
.catch(function(err) { console.log("Error enountered when setting remote SDP Answer"); console.log(err)});
});
socket.on('sdpOffer', function(data) {
sdpOffer = new RTCSessionDescription(data.sdpOffer);
pc.setRemoteDescription(sdpOffer)
.then(function() { console.log("Remote SDP set in receiver");
pc.createAnswer()
.then(function(sdpAnswer) {
localSessionDescription = new RTCSessionDescription(sdpAnswer);
socket.emit('sdpAnswer',localSessionDescription);
pc.setLocalDescription(localSessionDescription)
.then(function(){
console.log("Local SDP Description set in receiver:");
})
.catch(function(err) { console.log("Error enountered when setting local SDP in receiver"); console.log(err)});
})
.catch(function(err) { console.log("Error enountered when creating answer SDP in receiver"); console.log(err)});
});
});
}); //End of document.ready function
</script>
ON THE SERVER SIDE (RELEVANT CODE ONLY). I have included here just in case there are any datatype related issues - object types, etc. getting changed when sent thru the server.
io.sockets.on('connection', function(socket) {
socket.on('sdpOffer', function(data) {
sdpOffer = data.sdp;
socket.broadcast.emit('sdpOffer',{"sdpOffer":data});
});
socket.on('sdpAnswer', function(data) {
sdpAnswer = data.sdp;
socket.broadcast.emit('sdpAnswer',{"sdpAnswer":data});
});
socket.on('candidate', function(data) {
socket.broadcast.emit('candidate',data);
});
});
Rename pc.onaddStream to pc.onaddstream.