Image sequence which plays in sync with a sound file on a play/pause button click. Both start as paused and when clicked play in a loop - sequence

the problem with the script I have is it starts out playing and then pauses when button is pressed I need the opposite. Plus I need the sound to be in sync with the length of the images in the array and both the image sequence and the sound file should play in a loop and start out as paused and play when the button is pressed.
HTML
<div>
<button id="bt1" onclick="togglePlay();toggle()"></button>
</div>
<script>
var myAudio = document.getElementById("myAudio");
var isPlaying = false;
let
loop,
index = 0,
images = [
"cat/1.svg", "cat/2.svg", "cat/3.svg", "cat/4.svg", "cat/5.svg", "cat/6.svg", "cat/7.svg", "cat/8.svg",
"cat/9.svg", "cat/10.svg", "cat/11.svg", "cat/12.svg", "cat/13.svg", "cat/14.svg", "cat/15.svg", "cat/16.svg",
"cat/17.svg", "cat/18.svg"
];
function startLoop() {
loop = setInterval(() => {
document.querySelector('img').src = images[index];
index++;
// Loop images array
if (index % images.length === 0) {
index = 0;
}
}, 100);
}
function toggle() {
if (loop) {
clearInterval(loop);
loop = null;
} else {
startLoop();
}
}
startLoop();
function togglePlay() {
isPlaying ? myAudio.pause() : myAudio.play();
};
myAudio.onplaying = function() {
isPlaying = true;
};
myAudio.onpause = function() {
isPlaying = false;
};
</script>

Related

Jerky Animations After Awhile ThreeJs

At first, my animation seems to work fine. However, after a few seconds, the animations become very jerky and laggy, I'm not sure why.
At first I thought it was due to the animation button I had which allows the user to start and stop the animation at will. However, even after I commented out the button, the animation continued to be laggy.
let camera, scene, renderer;
const loader = new GLTFLoader();
let mixer = null;
let controls;
const clock = new THREE.Clock();
let previousTime = 0;
//start and stop button
let runAnim = false;
let isPlay = true;
//animation
function animation() {
if (!isPlay) return;
const elapsedTime = clock.getElapsedTime();
const deltaTime = elapsedTime - previousTime;
previousTime = elapsedTime;
//Update mixer
if (mixer !== null) {
mixer.update(deltaTime);
}
// Update controls
controls.update();
window.requestAnimationFrame(animation);
render();
}
function render() {
renderer.render(scene, camera);
}
module.exports = function getImage() {
const mountRef = useRef(null);
useEffect(() => {
//Model
loader.load(`/gltf/1.gltf`);
mixer = new THREE.AnimationMixer(gltf.scene);
const action = mixer.clipAction(gltf.animations[0]);
action.play();
animation();
//Camera
camera = new THREE.PerspectiveCamera(
70,
window.innerWidth / window.innerHeight,
0.1,
100
);
camera.position.set(4, 0, 5);
scene = new THREE.Scene();
// Controls
controls = new OrbitControls(camera, renderer.domElement);
controls.update();
controls.enableDamping = true;
// Animation button
const animateButton = document.getElementById('animate-button');
const stopAnimation = (e) => {
if (runAnim) {
runAnim = false;
isPlay = true;
animation();
console.log('animation starts');
} else {
runAnim = true;
isPlay = false;
console.log('animation stops');
}
};
animateButton.addEventListener('click', stopAnimation);
return () => mountRef.removeChild(renderer.domElement);
}, []);
return (
<div>
<div ref={mountRef}>
<AnimationButton />
</div>
</div>
);
};

StreamTrack's readyState is getting changed to ended, just before playing the stream (MediaStream - MediaStreamTrack - WebRTC)

The jsfiddle (https://jsfiddle.net/kalyansai99/mm1b74uy/22/) contains code where the user can toggle between front and back camera of the mobile.
In few mobiles its working fine (Moto g5 plus, Moto E3 and so on - Chrome Browser) and in few mobiles (Mi Redimi Note 4 - Chrome Browser) when I am switching to back camera, initially the stream is loading with a track of "readyState" as "live". But when i am about to play the stream in video player, the "readyState" is getting changed to "ended" and black screen is been shown on the video tag.
Not sure whats happening. Any clues?
JSFiddle Code
var player = document.getElementById('player');
var flipBtn = document.getElementById('flipBtn');
var deviceIdMap = {};
var front;
var constraints = {
audio: false,
video: {
frameRate: 1000
}
};
var gotDevices = function (deviceList) {
var length = deviceList.length;
console.log(deviceList);
for (var i = 0; i < length; i++) {
var deviceInfo = deviceList[i];
if (deviceInfo.kind === 'videoinput') {
if (deviceInfo.label.indexOf('front') !== -1) {
deviceIdMap.front = deviceInfo.deviceId;
} else if (deviceInfo.label.indexOf('back') !== -1) {
deviceIdMap.back = deviceInfo.deviceId;
}
}
}
if (deviceIdMap.front) {
constraints.video.deviceId = {exact: deviceIdMap.front};
front = true;
} else if (deviceIdMap.back) {
constraints.video.deviceId = {exact: deviceIdMap.back};
front = false;
}
console.log('deviceIdMap - ', deviceIdMap);
};
var handleError = function (error) {
console.log('navigator.getUserMedia error: ', error);
};
function handleSuccess(stream) {
window.stream = stream;
// this is a video track as there is no audio track
console.log("Track - ", window.stream.getTracks()[0]);
console.log('Ready State - ', window.stream.getTracks()[0].readyState);
if (window.URL) {
player.src = window.URL.createObjectURL(stream);
} else {
player.src = stream;
}
player.onloadedmetadata = function (e) {
console.log('Ready State - 3', window.stream.getTracks()[0].readyState);
player.play();
console.log('Ready State - 4', window.stream.getTracks()[0].readyState);
}
console.log('Ready State - 2', window.stream.getTracks()[0].readyState);
}
navigator.mediaDevices.enumerateDevices().then(gotDevices).catch(handleError);
flipBtn.addEventListener('click', function () {
if (window.stream) {
window.stream.getTracks().forEach(function(track) {
track.stop();
});
}
if (front) {
constraints.video.deviceId = {exact: deviceIdMap.back};
} else {
constraints.video.deviceId = {exact: deviceIdMap.front};
}
front = !front;
navigator.getUserMedia(constraints, handleSuccess, handleError);
}, false);
console.log(constraints);
navigator.getUserMedia(constraints, handleSuccess, handleError);
#player {
width: 320px;
}
#flipBtn {
width: 150px;
height: 50px;
}
<video id="player" autoplay></video>
<div>
<button id="flipBtn">
Flip Camera
</button>
</div>
Replace track.stop() to track.enabled=false and when adding track to the stream, enable it back using track.enabled=true
The MediaStream.readyState property is changed to "ended" when we stop the track and can never be used again. Therefore its not wise to use stop. For more reference:
https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/readyState
https://developer.mozilla.org/en-US/docs/Web/API/MediaStreamTrack/stop

Swiper JS return to first slide

I have slider with swiper js, how it can return to first slide when we click Next Button on end of slide ?
http://idangero.us
You can set the loop parameter. For example:
var mySwiper = new Swiper ('.swiper-header', {
loop: true, //this should allow the last next button to return to the beginning
nextButton: '.swiper-button-next',
prevButton: '.swiper-button-prev',
});
i made function like this:
.swiper-next is: navigation - nextEl
var changed = false;
$(".swiper-next").on('click', function () {
if (changed === true) {
changed = false;
faqSwiper.slideTo(0);
}
if (faqSwiper.isEnd) changed = true;
})
You may use "click" event from Swiper API.
Note: setTimeout is needed to correctly determine slide index
const swiper_instance = new Swiper('.swiper-container', {
// ...
});
swiper_instance.on('click', function (swiper, event) {
const is_next_click = event.target === swiper.navigation.nextEl,
is_prev_click = event.target === swiper.navigation.prevEl,
is_end = this.isEnd,
is_beginning = this.isBeginning;
// slide to begin
if (is_next_click && is_end) {
setTimeout(() => swiper.slideTo(0))
}
// slide to end
if (is_prev_click && is_beginning) {
setTimeout(() => swiper.slideTo(swiper.slides.length))
}
});

YouTube OnStateChange with multiple players on the same page

I have multiple YouTube players on a single page inside a banner slider and I want to use the YouTube Player API to control them and do other stuff based on the state of the video's. I have the code below which I'm pretty sure of used to work fine where any state changes where registered. But it doesnt work for me anymore. The YouTube object is still there and I can use it to start and stop a video but the onStateChange event never gets triggered. What is wrong with this code?
var YT_ready = (function() {
var onReady_funcs = [],
api_isReady = false;
return function(func, b_before) {
if (func === true) {
api_isReady = true;
for (var i = 0; i < onReady_funcs.length; i++) {
onReady_funcs.shift()();
}
}
else if (typeof func == "function") {
if (api_isReady) func();
else onReady_funcs[b_before ? "unshift" : "push"](func);
}
}
})();
function onYouTubePlayerAPIReady() {
YT_ready(true);
}
var players_homepage = {};
YT_ready(function() {
$("li.video iframe.youtube").each(function(event) {
var frameID_homepage = $(this).attr('id');
if (frameID_homepage) {
players_homepage[frameID_homepage] = new YT.Player(frameID_homepage, {
events: {
'onStateChange': onPlayerStateChange_homepage
}
});
}
});
});
(function(){
var tag = document.createElement('script');
tag.src = "//www.youtube.com/iframe_api";
var firstScriptTag = document.getElementsByTagName('script')[0];
firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);
})();
function onPlayerStateChange_homepage(event) {
if (event.data === 0) {
// do something on end
} else if (event.data === 1) {
// do something on play
} else if (event.data === 2) {
// do something on pause
}
}
function pauseVideo_homepage(previousVideo) {
players_homepage[previousVideo].pauseVideo();
}
function playVideo_homepage(currentVideo) {
players_homepage[currentVideo].playVideo();
}

web audio api record function

i have following code ... from site http://forestmist.org/blog/web-audio-api-loops/
it works good... but i need record functionality which will help to record previously clicked button and store there audio too... any help
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Web Audio API Loops Demo</title>
</head>
<body>
<form>
<button id="button-loop-1" type="button" value="1">Loop 1</button>
<button id="button-loop-2" type="button" value="2">Loop 2</button>
</form>
<script>
//--------------
// Audio Object
//--------------
var audio = {
buffer: {},
compatibility: {},
files: [
'synth.wav',
'beat.wav'
],
proceed: true,
source_loop: {},
source_once: {}
};
//-----------------
// Audio Functions
//-----------------
audio.findSync = function(n) {
var first = 0,
current = 0,
offset = 0;
// Find the audio source with the earliest startTime to sync all others to
for (var i in audio.source_loop) {
current = audio.source_loop[i]._startTime;
if (current > 0) {
if (current < first || first === 0) {
first = current;
}
}
}
if (audio.context.currentTime > first) {
offset = (audio.context.currentTime - first) % audio.buffer[n].duration;
}
return offset;
};
audio.play = function(n) {
if (audio.source_loop[n]._playing) {
audio.stop(n);
} else {
audio.source_loop[n] = audio.context.createBufferSource();
audio.source_loop[n].buffer = audio.buffer[n];
audio.source_loop[n].loop = true;
audio.source_loop[n].connect(audio.context.destination);
var offset = audio.findSync(n);
audio.source_loop[n]._startTime = audio.context.currentTime;
if (audio.compatibility.start === 'noteOn') {
/*
The depreciated noteOn() function does not support offsets.
Compensate by using noteGrainOn() with an offset to play once and then schedule a noteOn() call to loop after that.
*/
audio.source_once[n] = audio.context.createBufferSource();
audio.source_once[n].buffer = audio.buffer[n];
audio.source_once[n].connect(audio.context.destination);
audio.source_once[n].noteGrainOn(0, offset, audio.buffer[n].duration - offset); // currentTime, offset, duration
/*
Note about the third parameter of noteGrainOn().
If your sound is 10 seconds long, your offset 5 and duration 5 then you'll get what you expect.
If your sound is 10 seconds long, your offset 5 and duration 10 then the sound will play from the start instead of the offset.
*/
// Now queue up our looping sound to start immediatly after the source_once audio plays.
audio.source_loop[n][audio.compatibility.start](audio.context.currentTime + (audio.buffer[n].duration - offset));
} else {
audio.source_loop[n][audio.compatibility.start](0, offset);
}
audio.source_loop[n]._playing = true;
}
};
audio.stop = function(n) {
if (audio.source_loop[n]._playing) {
audio.source_loop[n][audio.compatibility.stop](0);
audio.source_loop[n]._playing = false;
audio.source_loop[n]._startTime = 0;
if (audio.compatibility.start === 'noteOn') {
audio.source_once[n][audio.compatibility.stop](0);
}
}
};
//-----------------------------
// Check Web Audio API Support
//-----------------------------
try {
// More info at http://caniuse.com/#feat=audio-api
window.AudioContext = window.AudioContext || window.webkitAudioContext;
audio.context = new window.AudioContext();
} catch(e) {
audio.proceed = false;
alert('Web Audio API not supported in this browser.');
}
if (audio.proceed) {
//---------------
// Compatibility
//---------------
(function() {
var start = 'start',
stop = 'stop',
buffer = audio.context.createBufferSource();
if (typeof buffer.start !== 'function') {
start = 'noteOn';
}
audio.compatibility.start = start;
if (typeof buffer.stop !== 'function') {
stop = 'noteOff';
}
audio.compatibility.stop = stop;
})();
//-------------------------------
// Setup Audio Files and Buttons
//-------------------------------
for (var a in audio.files) {
(function() {
var i = parseInt(a) + 1;
var req = new XMLHttpRequest();
req.open('GET', audio.files[i - 1], true); // array starts with 0 hence the -1
req.responseType = 'arraybuffer';
req.onload = function() {
audio.context.decodeAudioData(
req.response,
function(buffer) {
audio.buffer[i] = buffer;
audio.source_loop[i] = {};
var button = document.getElementById('button-loop-' + i);
button.addEventListener('click', function(e) {
e.preventDefault();
audio.play(this.value);
});
},
function() {
console.log('Error decoding audio "' + audio.files[i - 1] + '".');
}
);
};
req.send();
})();
}
}
</script>
</body>
</html>
Check out RecordJS (https://github.com/mattdiamond/Recorderjs). It should help you out.