Mediastream Recording API. ondataavailable is not triggered - webrtc

I am trying to record media using the mediaRecorder API. Here is my code (just the relevant part). I expect to get a console log in saveChunks but it appears that the ondataavailable event is never triggered.I am able to see the video in the video element.
recordedChunks = [];
navigator.mediaDevices.getUserMedia({video:true, audio:true})
.then(function(stream) {
myVideoMedia = document.getElementById("vid1");
myVideoMedia.srcObject = stream;
myVideoMedia.onloadedmetadata = function(e) {
myVideoMedia.play();
mediaRecorder = new MediaRecorder(stream);
mediaRecorder.ondataavailable = saveChunks;
mediaRecorder.start();
console.log(mediaRecorder);
};
})
function saveChunks(event) {
console.log("Data recorded...");
//...
};
}
The console log of mediaRecorder.state is 'recording'
I did try by passing a timeslice of 1000 to start() and its working now! If no timeslice is passed, the function is called once at the end.

Related

WebRTC mobile cameras not switching

I'm trying to make a live stream app and I want the functionality of switching cameras in smartphones. I've followed this article from MDN but I couldn't get the expected results.
The following are the results I'm getting:
Chrome mobile results: Camera switching from front to back but not switching the other way around.
Firefox mobile results: Camera starting the stream with the rear camera(which is not expected) and not switching to the front camera.
I'm using RTCMultiConnection and RecordRTC for this app.
Code:
function switchCamera(stream){
connection.replaceTrack(stream)
video.pause();
video.srcObject = stream;
video.play();
}
var front = true;
$('#switch').on('click', function(e){
e.preventDefault();
e.stopImmediatePropagation();
front = !front;
var constraints = { video: { facingMode: (front? "user" : "environment") } };
var internalRecorder = recorder.getInternalRecorder();
connection.streamEvents.selectFirst({local: true}).stream.getVideoTracks().forEach(function(track){
track.stop();
});
navigator.mediaDevices.getUserMedia(constraints).then(function(stream){
if(internalRecorder instanceof MultiStreamRecorder){
internalRecorder.resetVideoStreams(stream)
}
switchCamera(stream)
});

how to add a audio stream on canvas stream in webrtc

I want to add an audio stream obtained via getusermedia () to the canvas stream and send it to the remote peer...
So I looked up, and there was addTrack ().
But addTrack () does not seem to work properly. I am running on chrome.
var audioTracks;
navigator.getUserMedia({ "audio": true, "video": false }, function (stream)
{
audioTracks = stream.getAudioTracks()[0];
}, function(error) { console.log(error);});
.
.
.
var sharestream = canvas2.captureStream(25); // 25 FPS
peerConn.addTrack(audioTracks,sharestream);
peerConn.addStream(sharestream);
It's part of my code. What's wrong?
My webrtc full source code works fine, but it does not work with addTrack ().
Updated on 10-29-2018 to replace getAudioTracks with getTracks:
var canvasStream = canvas2d.captureStream(25); // parameter is optional
// get first audio track
// var audioTrack = audioStream.getAudioTracks()[0];
var audioTrack = audioStream.getTracks().filter(function(track) {
return track.kind === 'audio'
})[0];
// append audio track into Canvas2D stream
canvasStream.addTrack( audioTrack );
// now canvas2D stream has both audio and video tracks
// peerConnection.addStream( canvasStream );
canvasStream.getTracks().forEach(function(track) {
peerConnection.Track( track, canvasStream );
});
// create offer or answer descriptions
peerConnection.createOffer(hints).then(success).catch(failure);

RTCPeerConnection.iceConnectionState changed from checking to closed

Following the method here I'm trying to answer an audio call initiated with a Chrome browser from an iPhone simulator(with React Native).
A summary of the event sequence:
received call signal
got local stream
sent join call signal
received remote description(offer),
created PeerConnection
added local stream
received candidate
added candidate
7 and 8 repeated 15 times (that is 16 times in total)
onnegotiationneeded triggered
signalingState changed into have-remote-offer
onaddstream triggered
the callback function of setRemoteDescription was triggered, created answer.
signalingState changed into stable
iceconnectionstate changed into checking
onicecandidate triggered for the first time.
emited the candidate from 15
onicecandidate triggered for the 2nd time. The candidate is null
iceconnectionstate changed into closed
Step 7,8,9 may appear at different places after 6 and before 19.
I have been stuck on this problem for quite a while. I don't even know what to debug at this time. What are the possible causes of the closing of connection? I can post more logs if needed.
One observation is that the two RTCEvent corresponding to iceconnectionstatechange has the following properties:
isTrusted:false
The target RTCPeerConnection has
iceConnectionState:"closed"
iceGatheringState:"complete"
Here are my functions to handle remoteOffer and remoteCandidates:
WebRTCClass.prototype.onRemoteOffer = function(data) {
var ref;
if (this.active !== true) {
return;
}
var peerConnection = this.getPeerConnection(data.from);
console.log('onRemoteOffer', data,peerConnection.signalingState);
if (peerConnection.iceConnectionState !== 'new') {
return;
}
var onSuccess = (function(_this){
return function(){
console.log("setRemoteDescription onSuccess function");
_this.getLocalUserMedia((function(_this) {
return function(onSuccess,stream) {
peerConnection.addStream(_this.localStream);
var onAnswer = (function(_this) {
return function(answer) {
var onLocalDescription = function() {
return _this.transport.sendDescription({
to: data.from,
type: 'answer',
ts: peerConnection.createdAt,
description: {
sdp: answer.sdp,
type: answer.type
}
});
};
return peerConnection.setLocalDescription(new RTCSessionDescription(answer), onLocalDescription, _this.onError);
};
})(_this);
return peerConnection.createAnswer(onAnswer, _this.onError);
}
})(_this)
);
}
})(this);
return peerConnection.setRemoteDescription(new RTCSessionDescription(data.description),onSuccess,console.warn);
};
WebRTCClass.prototype.onRemoteCandidate = function(data) {
var peerConnection, ref;
if (this.active !== true) {
return;
}
if (data.to !== this.selfId) {
return;
}
console.log('onRemoteCandidate', data);
peerConnection = this.getPeerConnection(data.from);
if ((ref = peerConnection.iceConnectionState) !== "closed" && ref !== "failed" && ref !== "disconnected" && ref !== "completed") {
peerConnection.addIceCandidate(new RTCIceCandidate(data.candidate));
}
};
I found that if I call the following two functions one by one, then it will work.
peerConnection.setRemoteDescription(new RTCSessionDescription(data.description),onSuccess,console.warn);
(...definition of onAnswer ...)
peerConnection.createAnswer(onAnswer, this.onError);
My previous codes called createAnswer within the onSuccess callback of setRemoteDescription. That did work for the react-native-webrtc demo, but not with Rocket.Chat. Still don't fully understand it. But my project can move on now.

currentTime() in video.js

The following is my code which is not working. Any help would be appreciated.
var myPlayer;
videojs("example_video_1").ready(function(){
myPlayer = this;
if(myPlayer.currentTime()>3)
{
alert("STARTED");
});
});
The ready event only occurs once, when the video initially loads. At that point, the current time is likely 0.
console.log(myPlayer.currentTime()); // 0
To continue checking the time as it changes, you should be able to use the timeupdate event.
myPlayer = this;
myPlayer.on('timeupdate', function () {
// ...
});
Though, note that this event occurs multiple times per second. So, to avoid spamming yourself with alerts, you'll probably want to keep track whether 3 seconds have already passed.
var threshold = 4;
var thresholdReached = false;
myPlayer = this;
myPlayer.on('timeupdate', function () {
if (myPlayer.currentTime() >= threshold && !thresholdReached) {
thresholdReached = true;
alert('Started');
}
});

Titanium HttpClient cache for Android

I need to cache my HttpClient response for android device. The Example given in their official document applies for Iphone & IPad.
Ok, here is one approach to achieve this:
Within your httpRequests success handler, attach a "timestamp" to your response:
// Assuming you are working with JSON data
var response = JSON.parse(this.responseText);
response.timestamp = new Date();
// For the purpose of this example, we persist our response to properties
Ti.App.Properties.setObject('cachedResponse', response);
In your button eventListener we will check for the time that has passed
button.addEventListener('click', function() {
var cachedResponse = Ti.App.Properties.getObject('cachedResponse', { timestamp: false });
if(cachedResponse.timestamp) {
if(getHoursDiff(cachedResponse.timestamp, new Date()) > 24) {
// Last request older than 24 hours, reload data
} else {
// Last request was within 24 hours, use cached data
}
} else {
// No data has been saved yet, load Data
}
});
This function calculates the time difference in hours
// http://blogs.digitss.com/javascript/calculate-datetime-difference-simple-javascript-code-snippet/
function getHoursDiff(earlierDate, laterDate) {
var nTotalDiff = laterDate.getTime() - earlierDate.getTime();
var hours = Math.floor(nTotalDiff/1000/60/60);
return hours;
}
Note
This is not a complete example, you have to optimize and change the code to your needs.