I can't set the video size with video.js
i made a video node:
videoNode = $('<video>');
videoNode.attr('id', 'simulationVideo');
$('#' + _config.videoTargetId).append(videoNode);
i set up the player:
//create new player
myPlayer = _V_("simulationVideo", {
'id': _config.id,
'preload': "auto",
'controls': false,
'autoplay': _config.autoplay
}, function () {
//set listeners for our controlbar
//if (_config.controls) {
controls = new Doczero.VideoPlayer.Controlbar();
//$('#' + _config.controlbarTargetId).append(controls.html());
//put listeners in place
this.addEvent('timeupdate', controls.onTimeUpdate);
this.addEvent('progress', controls.onBufferUpdate);
this.addEvent('play', controls.onPlay);
this.addEvent('pause', controls.onPause);
//listen to controlls
controls.CustomEvents.AddListener('onPressPlay', _self);
//}
});
i play a track:
//set sources to play
this.play = function (srcArray, newCallback) {
//set source
myPlayer.src(srcArray);
myPlayer.play();
//remove old callback
if (callback != null) {
myPlayer.removeEvent('ended', callback);
}
//add new ended callback
myPlayer.removeEvent('ended', newCallback);
}
everything seems to work nice, untill the video pops up 3 times bigger (it stays the orinigal size)
Related
I am trying to use the Web Audio API to play sound in my React application.
It's currently playing sound in all browsers except Safari v12.1.
I am aware Safari has restrictions on autoplay and requires user interaction to play sound, so I have a play button which calls the _play() function:
_play = (url, index) => {
this._getData(url);
this.source.start(index)
}
It's calling the _getData() function which looks like this:
_getData(url) {
this.source = this.audioContext.createBufferSource();
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = () => {
var audioData = request.response;
console.log(this.audioContext)
this.audioContext.decodeAudioData(audioData, buffer => {
this.source.buffer = buffer;
this.source.connect(this.audioContext.destination);
},
function(e){ console.log("Error with decoding audio data" + e.err); });
}
request.send();
}
this.audioContext is created in the component constructor using:
this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
The console.log(this.audioContext) inside the request.onload outputs this before pressing play:
...and this after pressing play:
But no sound is playing (in Safari).
What am I doing wrong?
I think the problem that you ran into is that Safari does not allow you to modify the buffer anymore once you called start().
The following page does for example play a second of noise in Safari when you press the play button.
<!DOCTYPE html>
<html>
<body>
<button id="play-button">play</button>
<script>
document
.getElementById('play-button')
.addEventListener('click', () => {
const audioContext = new AudioContext();
const audioBufferSourceNode = audioContext.createBufferSource();
const sampleRate = audioContext.sampleRate;
const audioBuffer = audioContext.createBuffer(1, sampleRate, sampleRate);
const channelData = audioBuffer.getChannelData(0);
for (let i = 0; i < sampleRate; i += 1) {
channelData[i] = (Math.random() * 2) - 1;
}
audioBufferSourceNode.buffer = audioBuffer;
audioBufferSourceNode.connect(audioContext.destination);
audioBufferSourceNode.start(audioContext.currentTime);
});
</script>
</body>
</html>
But it doesn't work anymore if you modify it slightly. When starting the audioBufferSourceNode before assigning the buffer there will be no output anymore.
audioBufferSourceNode.connect(audioContext.destination);
audioBufferSourceNode.start(audioContext.currentTime);
audioBufferSourceNode.buffer = audioBuffer;
I guess you can get your code working by waiting for the HTTP response and the audio decoding before you start the source. Make sure to execute this.source.buffer = buffer before you execute this.source.start(index).
I hope this helps.
I met with difficulties.
var onaddstream = function(event){
var video = $("#chat_dialogForOne video[name='remote']")[0];
var remoteStream = event.stream;
video.srcObject = remoteStream;
video.onloadedmetadata = function(e) {
video.play();
};
}
$("#chat_dialogForOne button[name='openVideo']").on("click",function(){
$(this).toggleClass("active");
$(this).data("use",$(this).data("use") ? false : true);
if($(this).data("use")){//开启视频语音聊天
rtc.openVideoAudioLocal(function(localStream){//创建本地视频流,绑定到控件上
var video = $("#chat_dialogForOne video[name='video']")[0]; //获取到展现视频的标签
video.srcObject=localStream;
video.onloadedmetadata = function(e) {
video.play();
};
rtc.openVideoAudioLocal(function(remoteStream){
rtc.sendAddStream(remoteStream);
},true,true);
},true,false);//为了防止自己能听到自己发出的声音,只启动视频,不启动音频
$(this).find(" > span").html("结束视频");
$("#chat_dialogForOne button[name='openAudio']").hide();
}else{//关闭视频语音聊天
// closeRemoteChannelStream([oneWebRtc]);
// closeLocalStream();
// resetVideoButton();
}
});
images 2,Successful display of local
images 1,Failure Display Remote
onaddstream,Received the remote stream, it does not show.to video
I need your help.
Sorry, it was a mistake of mine. I made a very slight mistake, which resulted in a new PC object being recreated after receiving the offer. Although the remote received the video stream object, the channel had been replaced.
I am trying to implement share screen function in webrtc video conferencing. From suggestion, I am now following muaz-khan's solution using https://www.webrtc-experiment.com/getScreenId/ . I can easily capture the application images of one peer, and replace the video stream with the capture stream. But it is a video conferencing experiment, so two browsers need to video conference with each other. For example, browser 1, has video streams A (local video), video streams B (remote video); browser 2 has video streams B (local video), video streams A (remote video). So when I am in browser 1 and trying to share the screen, the share screen stream should replace the local video in browser 1, and remote video in browser 2.
But right now, I can only make the share screen replace the local video in browser 1, browser 2 doesn't have any changes, cann't see any changes in its remote video (which is the local video in browser 1). I don't know how to trigger the changes in browser 2 as well. do i need to signal the share screen streams to server? and change the remote stream accordingly?
Here is my code in javascript:
$(function() {
var brokerController, ws, webRTC, localid;
// ws = new XSockets.WebSocket("wss://rtcplaygrouund.azurewebsites.net:443", ["connectionbroker"], {
ws = new XSockets.WebSocket("ws://localhost:4502", ["connectionbroker"], {
ctx: "152300ed-4d84-4e72-bc99-965052dc1e95"
});
var addRemoteVideo = function(peerId,mediaStream) {
var remoteVideo = document.createElement("video");
remoteVideo.setAttribute("autoplay", "true");
remoteVideo.setAttribute("rel",peerId);
attachMediaStream(remoteVideo, mediaStream);
remoteVideo.setAttribute("class", "col-md-3");
remoteVideo.setAttribute("height", $( document ).height() * 0.3);
remoteVideo.setAttribute("id", 'remoteVideo');
$("#videoscreen").append(remoteVideo);
};
var onConnectionLost = function (remotePeer) {
console.log("onconnectionlost");
var peerId = remotePeer.PeerId;
var videoToRemove = $("video[rel='" + peerId + "']");
videoToRemove.remove();
};
var oncConnectionCreated = function() {
console.log("oncconnectioncreated", arguments);
}
var onGetUerMedia = function(stream) {
console.log("Successfully got some userMedia , hopefully a goat will appear..");
webRTC.connectToContext(); // connect to the current context?
};
var onRemoteStream = function (remotePeer) {
addRemoteVideo(remotePeer.PeerId, remotePeer.stream);
console.log("Opps, we got a remote stream. lets see if its a goat..");
};
var onLocalStream = function(mediaStream) {
console.log("Got a localStream", mediaStream.id);
localid = mediaStream.id;
console.log("check this id: meadiastram id ", mediaStream.id);
var video = document.createElement("video");
video.setAttribute("height", "100%");
video.setAttribute("autoplay", "true");
video.setAttribute("id", "localvideo");
video.setAttribute("name", mediaStream.id);
attachMediaStream(video, mediaStream);
$("#videoscreen").append(video);
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
$('#localvideo').attr('src', URL.createObjectURL(stream));
}, function (error) {
console.error(error);
});
});
});
};
var onContextCreated = function(ctx) {
console.log("RTC object created, and a context is created - ", ctx);
webRTC.getUserMedia(webRTC.userMediaConstraints.hd(true), onGetUerMedia, onError);
};
var onOpen = function() {
console.log("Connected to the brokerController - 'connectionBroker'");
webRTC = new XSockets.WebRTC(this);
webRTC.onlocalstream = onLocalStream;
webRTC.oncontextcreated = onContextCreated;
webRTC.onconnectioncreated = oncConnectionCreated;
webRTC.onconnectionlost = onConnectionLost;
webRTC.onremotestream = onRemoteStream;
};
var onConnected = function() {
console.log("connection to the 'broker' server is established");
console.log("Try get the broker controller form server..");
brokerController = ws.controller("connectionbroker");
brokerController.onopen = onOpen;
};
ws.onconnected = onConnected;
});
I am using xsocket as the server, and the codes for click share and change the local stream with the share screen streams are just very simple as this:
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
$('#localvideo').attr('src', URL.createObjectURL(stream));
}, function (error) {
console.error(error);
});
});
Any help or suggestion would be grateful.
Thanks for pointing out the other post: How to addTrack in MediaStream in WebRTC, but I don't think they are the same. And also I am not sure how to renegotiate the remote connection in this case.
Xsocket.webrtc.js file for webrtc connection:
https://github.com/XSockets/XSockets.WebRTC/blob/master/src/js/XSockets.WebRTC.latest.js
How I could I renegotiate the remote connection in this case?
I figured out a work around solution by myself for this question, do not replace the local stream with the sharescreen stream, instead remove the old local stream from local div, then add the new sharescreen stream to local div. In the meantime, send the old local stream id by datachanel to the other peer, and remove that old remote video as well.
The most important thing is reflesh the streams (renegotiation), then sharescreen stream would display in remote peer.
Code:
$('#share').click(function() {
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
webRTC.removeStream(webRTC.getLocalStreams()[0]);
var id = $('#localvideo').attr('name');
$('#localvideo').remove();
brokerController.invoke('updateremotevideo', id);
webRTC.addLocalStream(stream);
webRTC.getRemotePeers().forEach(function (p) {
webRTC.refreshStreams(p);
});
}, function (error) {
console.error(error);
});
});
});
after get the command to remove that old video stream from the server:
brokerController.on('updateremotevideo', function(streamid){
$(document.getElementById(streamid)).remove();
});
This solution works for me. Although if only like to replace the local video stream with share screen stream, we need to re create the offer with sdp, and send sdp to remote peer. It is more complicated.
getScreenId(function (error, sourceId, screen_constraints) {
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia(screen_constraints, function (stream) {
navigator.getUserMedia({audio: true}, function (audioStream) {
stream.addTrack(audioStream.getAudioTracks()[0]);
var mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/mp4'
mediaRecorder.stream = stream;
self.setState({recorder: mediaRecorder, startRecord: true, shareVideo: true, pauseRecord: false, resumeRecord: false, stopRecord: false, downloadRecord: false, updateRecord: false});
document.querySelector('video').src = URL.createObjectURL(stream);
var video = document.getElementById('screen-video')
if (video) {
video.src = URL.createObjectURL(stream);
video.width = 360;
video.height = 300;
}
}, function (error) {
alert(error);
});
}, function (error) {
alert(error);
});
});
I have 4 iframe and I want to recover by clicking on their id .
I walked my iframe using google analytic and I put their id in a table.
Then I create an object of type YT.Player
Probleme : the method onPlayerStateChange does not run .
here is my code :
<script type="text/javascript">
/*
YouTube Analytics
Code adapted from:
http://www.lunametrics.com/blog/2012/10/22/automatically-track-youtube-videos-events-google-analytics/
http://lunametrics.wpengine.netdna-cdn.com/js/lunametrics-youtube.js
Code adapted by Alex Mueller for ISITE Design http://isitedesign.com
*/
// enable cross-domain scripting in IE < 10 for the YouTube Data API
// https://github.com/jaubourg/ajaxHooks/blob/master/src/xdr.js
if(window.XDomainRequest){jQuery.ajaxTransport(function(e){if(e.crossDomain&&e.async){if(e.timeout){e.xdrTimeout=e.timeout;delete e.timeout}var t;return{send:function(n,r){function i(e,n,i,s){t.onload=t.onerror=t.ontimeout=jQuery.noop;t=undefined;r(e,n,i,s)}t=new XDomainRequest;t.onload=function(){i(200,"OK",{text:t.responseText},"Content-Type: "+t.contentType)};t.onerror=function(){i(404,"Not Found")};t.onprogress=jQuery.noop;t.ontimeout=function(){i(0,"timeout")};t.timeout=e.xdrTimeout||Number.MAX_VALUE;t.open(e.type,e.url);t.send(e.hasContent&&e.data||null)},abort:function(){if(t){t.onerror=jQuery.noop;t.abort()}}}}})}
// load the YouTube iframe API
var tag = document.createElement('script');
tag.src = "//www.youtube.com/iframe_api";
var firstScriptTag = document.getElementsByTagName('script')[0];
firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);
// initialize our arrays to hold video and player information
var playerArray = [],
videoArray = [];
// safely pass the jQuery object as $
(function($) {
// enables tracking of all YouTube videos on the page
function trackYouTube() {
// iterate through every iframe on the page
$('iframe').each(function(i) {
// grab the video source and other properties
var baseUrlLength,
$iframe = $(this),
iframeSrc = $iframe.attr('src'),
isYouTubeVideo = false,
videoID,
url;
// if the video uses the http protocol
if (iframeSrc.substr(0,25) == "http://www.youtube.com/v/") {
baseUrlLength = 25;
isYouTubeVideo = true;
}
// otherwise if the video uses the https protocol
else if (iframeSrc.substr(0,26) == "https://www.youtube.com/v/") {
baseUrlLength = 26;
isYouTubeVideo = true;
}
// if we're dealing with a YouTube video, store its information in our arrays
if (isYouTubeVideo) {
// grab the videoID
videoID = iframeSrc.substr(baseUrlLength);
url = '//gdata.youtube.com/feeds/api/videos/' + videoID + '?v=2&alt=json';
// if the ID ends with extra characters...
if (videoID.indexOf('&') > -1) {
// ...remove the extra characters
videoID = videoID.substr(0, videoID.indexOf('&'));
}
// put an object in our array with the videoID...
videoArray[i] = {};
videoArray[i].id = videoID;
// put the videoID on the iframe as its id
$iframe.attr('id', videoID);
}
});
}
$(function() {
// initiate tracking on document ready
trackYouTube();
onYouTubeIframeAPIReady();
});
})(jQuery);
function onYouTubeIframeAPIReady() {
// insert YouTube Player objects into our playerArray
for (var i = 0; i < videoArray.length; i++) {
playerArray[i] = new YT.Player(videoArray[i].id, {
events: {
'onStateChange': onPlayerStateChange
}
});
}
}
// when the player changes states
function onPlayerStateChange(event) {
// if the video begins playing, send the event
if (event.data == YT.PlayerState.PLAYING) {
alert();
}
// if the video ends, send the event
if (event.data == YT.PlayerState.ENDED) {
alert();
}
}
</script>
You have to set the enablejsapi parameter to 1 in your iframe embed link.
By default, the parameter is set to 0 and unless you set it to 1, the callbacks won't work.
Reference: https://developers.google.com/youtube/js_api_reference
I'm coding a basic video marquee and one of the key requirements is that the videos need to be able to advance while keeping the player in full screen.
Using Video.js (4.1.0) I have been able to get everything work correctly except that I cannot get the captions to change when switching to another video.
Either inserting a "track" tag when the player HTML is first created or adding a track to the 'options' object when the player is initialized are the only ways I can get the player to display the "CC" button and show captions. However, I cannot re-initialize the player while in full screen so changing the track that way will not work.
I have tried addTextTrack and addTextTracks and both show that the tracks have been added - using something like console.log(videoObject.textTracks()) - but the player never shows them or the "CC" button.
Here is my code, any help is greatly appreciated:
;(function(window,undefined) {
// VIDEOS OBJECT
var videos = [
{"volume":"70","title":"TEST 1","url":"test1.mp4","type":"mp4"},
{"volume":"80","title":"TEST 2","url":"test2.mp4","type":"mp4"},
{"volume":"90","title":"TEST 3","url":"test3.mp4","type":"mp4"}
];
// CONSTANTS
var VIDEO_BOX_ID = "jbunow_marquee_video_box", NAV_TEXT_ID = "jbunow_marquee_nav_text", NAV_ARROWS_ID = "jbunow_marquee_nav_arrows", VIDEO_OBJ_ID = "jbunow_marquee_video", NAV_PREV_ID = "jbunow_nav_prev", NAV_NEXT_ID = "jbunow_nav_next";
// GLOBAL VARIABLS
var videoObject;
var currentTrack = 0;
var videoObjectCreated = false;
var controlBarHideTimeout;
jQuery(document).ready(function(){
// CREATE NAV ARROWS AND LISTENERS, THEN START MARQUEE
var navArrowsHtml = "<div id='" + NAV_PREV_ID + "' title='Play Previous Video'></div>";
navArrowsHtml += "<div id='" + NAV_NEXT_ID + "' title='Play Next Video'></div>";
jQuery('#' + NAV_ARROWS_ID).html(navArrowsHtml);
jQuery('#' + NAV_PREV_ID).on('click',function() { ChangeVideo(GetPrevVideo()); });
jQuery('#' + NAV_NEXT_ID).on('click',function() { ChangeVideo(GetNextVideo()); });
ChangeVideo(currentTrack);
});
var ChangeVideo = function(newIndex) {
var videoBox = jQuery('#' + VIDEO_BOX_ID);
if (!videoObjectCreated) {
// LOAD PLAYER HTML
videoBox.html(GetPlayerHtml());
// INITIALIZE VIDEO-JS
videojs(VIDEO_OBJ_ID, {}, function(){
videoObject = this;
// LISTENERS
videoObject.on("ended", function() { ChangeVideo(GetNextVideo()); });
videoObject.on("loadeddata", function () { videoObject.play(); });
videoObjectCreated = true;
PlayVideo(newIndex);
});
} else { PlayVideo(newIndex); }
}
var PlayVideo = function(newIndex) {
// TRY ADDING MULTIPLE TRACKS
videoObject.addTextTracks([{ kind: 'captions', label: 'English2', language: 'en', srclang: 'en', src: 'track2.vtt' }]);
// TRY ADDING HTML
//jQuery('#' + VIDEO_OBJ_ID + ' video').eq(0).append("<track kind='captions' src='track2.vtt' srclang='en' label='English' default />");
// TRY ADDING SINGLE TRACK THEN SHOWING USING RETURNED ID
//var newTrack = videoObject.addTextTrack('captions', 'English2', 'en', { kind: 'captions', label: 'English2', language: 'en', srclang: 'en', src: 'track2.vtt' });
//videoObject.showTextTrack(newTrack.id_, newTrack.kind_);
videoObject.volume(parseFloat(videos[newIndex]["volume"]) / 100); // SET START VOLUME
videoObject.src({ type: "video/" + videos[newIndex]["type"], src: videos[newIndex]["url"] }); // SET NEW SRC
videoObject.load();
videoObject.ready(function () {
videoObject.play();
clearTimeout(controlBarHideTimeout);
controlBarHideTimeout = setTimeout(function() { videoObject.controlBar.fadeOut(); }, 2000);
jQuery('#' + NAV_TEXT_ID).fadeOut(150, function() {
currentTrack = newIndex;
var navHtml = "";
navHtml += "<h1>Now Playing</h1><h2>" + videos[newIndex]["title"] + "</h2>";
if (videos.length > 1) { navHtml += "<h1>Up Next</h1><h2>" + videos[GetNextVideo()]["title"] + "</h2>"; }
jQuery('#' + NAV_TEXT_ID).html(navHtml).fadeIn(250);
});
});
}
var GetPlayerHtml = function() {
var playerHtml = "";
playerHtml += "<video id='" + VIDEO_OBJ_ID + "' class='video-js vjs-default-skin' controls='controls' preload='auto' width='560' height='315'>";
playerHtml += "<source src='' type='video/mp4' />";
//playerHtml += "<track kind='captions' src='track.vtt' srclang='en' label='English' default='default' />";
playerHtml += "</video>";
return playerHtml;
}
var GetNextVideo = function() {
if (currentTrack >= videos.length - 1) { return 0; }
else { return (currentTrack + 1); }
}
var GetPrevVideo = function() {
if (currentTrack <= 0) { return videos.length - 1; }
else { return (currentTrack - 1); }
}
})(window);
The current VideoJS implementation (4.4.2) loads every kind of text tracks (subtitles, captions, chapters) on initialization time of the player itself, so it grabs correctly only those, which are defined between the <video> tags.
EDIT: I meant it does load them when calling addTextTrack, but the player UI will never update after initialization time, and will always show the initialization time text tracks.
One possible workaround is if you destroy the complete videojs player and re-create it on video source change after you have refreshed the content between the <video> tags. So this way you don't update the source via the videojs player, but via dynamically adding the required DOM elements and initializing a new player on them. Probably this solution will cause some UI flashes, and is quite non-optimal for the problem. Here is a link about destroying the videojs player
Second option is to add the dynamic text track handling to the existing code, which is not as hard as it sounds if one knows where to look (I did it for only chapters, but could be similar for other text tracks as well). The code below works with the latest official build 4.4.2. Note that I'm using jQuery for removing the text track elements, so if anyone applies these changes as is, jQuery needs to be loaded before videojs.
Edit the video.dev.js file as follows:
1: Add a clearTextTracks function to the Player
vjs.Player.prototype.clearTextTracks = function() {
var tracks = this.textTracks_ = this.textTracks_ || [];
for (var i = 0; i != tracks.length; ++i)
$(tracks[i].el()).remove();
tracks.splice(0, tracks.length);
this.trigger("textTracksChanged");
};
2: Add the new 'textTracksChanged' event trigger to the end of the existing addTextTrack method
vjs.Player.prototype.addTextTrack = function(kind, label, language, options) {
...
this.trigger("textTracksChanged");
}
3: Handle the new event in the TextTrackButton constructor function
vjs.TextTrackButton = vjs.MenuButton.extend({
/** #constructor */
init: function(player, options) {
vjs.MenuButton.call(this, player, options);
if (this.items.length <= 1) {
this.hide();
}
player.on('textTracksChanged', vjs.bind(this, this.refresh));
}
});
4: Implement the refresh method on the TextTrackButton
// removes and recreates the texttrack menu
vjs.TextTrackButton.prototype.refresh = function () {
this.removeChild(this.menu);
this.menu = this.createMenu();
this.addChild(this.menu);
if (this.items && this.items.length <= this.kind_ == "chapters" ? 0 : 1) {
this.hide();
} else
this.show();
};
Sorry, but for now I cannot link to a real working example, I hope the snippets above will be enough as a starting point to anyone intrested in this.
You can use this code when you update the source to a new video. Just call the clearTextTracks method, and add the new text tracks with the addTextTrack method, and the menus now should update themselves.
Doing the exact same thing (or rather NOT doing the exact same thing)... really need to figure out how to dynamically change / add a caption track.
This works to get it playing via the underlying HTML5, but it does not show the videojs CC button:
document.getElementById("HtmlFiveMediaPlayer_html5_api").innerHTML = '<track label="English Captions" srclang="en" kind="captions" src="http://localhost/media/captiontest/demo_Brian/demo_h264_1.vtt" type="text/vtt" default />';