Slice ArrayBuffer with Safari and play it - safari

I need to load a mp3, slice and play it using web audio , on firefox a slice mp3 any where and decode work fine, but on safari an error with null value occurs. Exist a trick or a way do slice the ArrayBuffer on Safari?
player.loadMp3 = function(url, callback) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function() {
var mp3slice = request.response.slice(1000,100000);
player.context.decodeAudioData(mp3slice); // context is webkitAudioContext on safari
callback();
};
request.send();
};
I need to create a mp3 player with some especial features:
Time shift the music (like http://codepen.io/eranshapira/pen/mnuoB)
Remove gap between musics ( I got this slicing ArrayBuffers and join then with a Blob but only in safary/IPAD don't work).
Cross platform (IPAD and android. I'm using apache cordova for that).
Solution
player.loadMp3 = function(url, callback) {
console.log("loading " + url);
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function() {
console.log("loaded");
console.log("decoding...");
player.context.decodeAudioData(request.response, function(buffer) {
console.log("decoded");
player.buffer = player.joinAudioBuffers(player.buffer,buffer,2000000);
player.duration += player.buffer.duration;
player.time = minsSecs(player.buffer.duration);
console.log("concatenated");
callback();
});
}, function() {
alert("decode failure");
};
request.send();
};

The code you've shown shouldn't work on any browser. For one thing you need to provide a callback function to decodeAudioData. You also need to slice the decoded data after decoding it, not the raw mp3-encoded data before decoding it. Some browsers might be able to decode a slice of the mp3 file, but it's not expected. Something like this:
player.loadMp3 = function(url, callback) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function() {
var mp3slice = request.response.slice(1000,100000);
player.context.decodeAudioData(mp3slice, function(decoded) {
var pcmSlice = decoded.slice(1000, 100000);
callback(pcmSlice);
});
};
request.send();
};
I haven't tested this code.

Related

Shutdown Kodi with api (blackberry qml)

Hello I trying shutdown Kodi (raspberry pi) with mobile app (blackberry qml).
But I do not how.
I used this code: (in browser)
"http://[myip]:[myport]/jsonrpc?request={"jsonrpc":"2.0","method":"System.Suspend","id":1}"
I used this code: (in the app)
function sendRequest() {
var xhr = new XMLHttpRequest();
var url = "http://[myip]:[myport]/jsonrpc?request={\"jsonrpc\":\"2.0\",\"method\": \"System.Suspend\",\"id\":1}"
xhr.onreadystatechange = function() {
if (xhr.readyState === XMLHttpRequest.DONE) {
if (xhr.status === 200) {
console.log(xhr.responseText);
textArea.text = xhr.responseText;
}
}
};
xhr.open("GET", url, true); // with "POST" I got the same problem.
xhr.send();
}
I got:
{"error":{"code":-32700,"message":"Parse error."},"id":null,"jsonrpc":"2.0"}
Remote from web browser works fine (http://[myip]:[myport])
Thank you for your answers.
********** Update: 21.10.2020 **********
I'm in progress. But I don't know what to do next.
I found some information why I have an error.
I don't know how to implement in my code.
Can you help me?
Thank you so much.
https://github.com/xbmc/xbmc/pull/12281
https://forum.kodi.tv/showthread.php?tid=324598&highlight=json
Here's how to do it.But I can't understand it.
https://retifrav.github.io/blog/2018/09/01/kodi-remote-control-app/
This is my function (on Kodi 17.6 it is working but on Kodi 18 is not working )
function sendRequest() {
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function() {
if (xhr.readyState === XMLHttpRequest.DONE) {
if (xhr.status === 200) {
text.text = xhr.responseText
}
}
};
var url = 'http://<IP:PORT>/jsonrpc?request={"jsonrpc": "2.0", "id": 1, "method": "System.Shutdown"}'
xhr.open("GET", url, true) // when I write "POST" - nothing happens
xhr.send()
}
You should URL encode the json in your url before it's used in the open method. Use encodeURIComponent() to do that. Your browser is changing:
{"jsonrpc":"2.0","method": "System.Suspend","id":1}"
To:
%7B%22jsonrpc%22%3A%222.0%22%2C%22method%22%3A%20%22System.Suspend%22%2C%22id%22%3A1%7D%22
But your code is not.

How do I convert base64 string image to blob in React Native?

I am trying to using React-Native-Camera to capture an image and upload to the server, the captured response only provide base64 image and relative uri path to the system's cache. I used to turn the image to a blob in websites using packages like blob-util, which doesn't work on React-native.
As I was searching around I see that most people are uploading the base64 strings directly to the server, but I can't find anything about blob, how can I get a blob from base64 image string?
I have a function in my project to convert image to a blob. Here it is. 1st function is to handle the camera. 2nd fuction is to create a blob and a image name.
addPicture = () => {
ImagePicker.showImagePicker({ title: "Pick an Image", maxWidth: 800, maxHeight: 600 }, res => {
if (res.didCancel) {
console.log("User cancelled!");
} else if (res.error) {
console.log("Error", res.error);
} else {
this.updateProfilePicture(res.uri)
}
});
}
This addPicture is used to launch the image picker. In above function, res means the output, that comes from showImagePicker. I had to pass the uri prop of the result(res.uri) to below function, in order to create the blob file
In below function, I wanted to name the image with userId. You can use anything you like.
updateProfilePicture = async (uri) => {
var that = this;
var userId = this.state.user.uid
var re = /(?:\.([^.]+))?$/;
var ext = re.exec(uri)[1];
this.setState({
currentFileType: ext
});
const blob = await new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
xhr.onload = function () {
resolve(xhr.response);
};
xhr.onerror = function (e) {
console.log(e);
reject(new TypeError('Network request failed'));
};
xhr.responseType = 'blob';
xhr.open('GET', uri, true);
xhr.send(null);
});
var filePath = userId + '.' + that.state.currentFileType;
}
There are some other codes in above function, which are using to uplad the image to firebase storage. I did not include those codes.

Can fetch() do responseType=document?

XHR's responseType='document' is awesome because it hands you back a DOM document that you can use querySelector, etc on:
var xhr = new XMLHttpRequest();
xhr.open('GET', '/', true);
xhr.responseType = 'document';
xhr.onload = function(e) {
var document = e.target.response;
var h2headings = document.querySelectorAll('h2');
// ...
};
Is this possible with the fetch method?
It's not natively supported in fetch as the API is a purely network-layer API with no dependencies on being in a web browser (see discussion), but it's not too hard to pull off:
fetch('/').then(res => res.text())
.then(text => new DOMParser().parseFromString(text, 'text/html'))
.then(document => {
const h2headings = document.querySelectorAll('h2');
// ...
});

Loading local JSON file with Safari Extension

Trying to load JSON file with Safari Extension.
var xhr = new XMLHttpRequest();
xhr.open("GET", safari.extension.baseURI +'js/data.json', true);
It gives an error "Cross origin requests are only supported for HTTP."
For example it is possible with Chrome Extenison
var xhr = new XMLHttpRequest();
xhr.open("GET", chrome.extension.getURL('/js/data.json'), true);
There you need to specify it in manifest
"web_accessible_resources": ["/js/data.json"]
Is there a similar way in Safari?
EDIT
Found a solution
It is possible through Global page
global.html
function handleMessage(event) {
if (event.name === "requestParagraphs") {
var xhr = new XMLHttpRequest();
xhr.open("GET", safari.extension.baseURI + 'js/data.json', true);
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
var articlesJSON = JSON.parse(xhr.responseText);
event.target.page.dispatchMessage('paragraphs', articlesJSON);
}
};
xhr.send();
}
}
safari.application.addEventListener("message", handleMessage, false);
injected.js
function handleMessage(msgEvent) {
var messageName = msgEvent.name;
var messageData = msgEvent.message;
if (messageName === "paragraphs") {
// ...
}
}
safari.self.addEventListener("message", handleMessage, false); // Listen response
safari.self.tab.dispatchMessage('requestParagraphs'); // Call global page

Can't play WebAudio MediaStream generated

I have a problem with generating a MediaStream from an AudioContext with WebAudio for sending it to another peer with WebRTC. It turns out that when I generate the MediaStream and try to play it locally and remotely with an Audio HTML tag or JS, I don't hear anything, but I do receive the stream well.
This is my code snippet:
CreateTone =
function () {
var ringing = 'assets/tones/ringing.wav';
var request = new XMLHttpRequest();
request.open('GET', ringing , true);
request.responseType = 'arraybuffer';
request.onload = function () {
context.decodeAudioData(request.response, function (buffer) {
bufferAudio = buffer;
SendTone();
});
}
request.send();
};
SendTone =
function () {
var source = context.createBufferSource();
source.buffer = bufferAudio;
source.loop = true;
source.connect(context.destination);
//If I use here source.start(0); it will play
var remote = context.createMediaStreamDestination();
source.connect(remote);
var streamToSend= remote.stream;
//If I try to play this stream with an Audio element, I can't hear anything. When I send with pc.addMediaStream(streamToSend) and it's received by the other peer, it's still deaf
};
Am I getting the MediaStream the wrong way? Thanks in advance