Base64 String to pdf JavaScript React Native then download - react-native

I have a base64 string as follow:
I would like this base64 string ( or any base64 String) to be turned into a pdf, then downloaded in React Native. I am using expo for this. I have looked all over the place and could not find an answer.

If you just want to show the pdf in react-native.The react-native-pdf package can do the job.Use a source obj like this.
const source = {uri:"data:application/pdf;base64,JVBERi0xLjcKJc..."};
If you want to download base64 to pdf file, it's also simple.
var RNFetchBlob = require('react-native-fetch-blob').default;
const DocumentDir = RNFetchBlob.fs.dirs.DocumentDir;
let pdfLocation = DocumentDir + '/' + 'test.pdf';
RNFetchBlob.fs.writeFile(pdfLocation, pdf_base64Str, 'base64');
export async function downloadFile(url){
let binary = await getPdfBinary(url);
const base64Str = base64_encode(binary);
const DocumentDir = RNFetchBlob.fs.dirs.DocumentDir;
let pdfLocation = DocumentDir + '/' + 'test.pdf';
RNFetchBlob.fs.writeFile(pdfLocation, base64Str, 'base64');
}
I'm sorry I didn't notice that you are using expo. As I have never written codes about the file system part in the expo. After a lot try, it finally works. But only for android. I think you may hope the pdf file should be saved to an external storage path so the user can see it in the file app. But for ios, It is hard to reach that. A possible way is to use the share dialog. If you just want to save the pdf file to the document folder in the app(not see by the user), FileSystem.writeAsStringAsync is enough!Hope this is helpful!
import * as FileSystem from 'expo-file-system';
import { Platform } from 'react-native';
async function getPdfBinary(url) {
return new Promise((resolve, reject) => {
var xhr = new XMLHttpRequest();
xhr.open("GET", url, true);
xhr.responseType = "arraybuffer"; // get the binary
xhr.setRequestHeader('content-type', 'application/json');
xhr.onload = function (event) {
var arrayBuffer = xhr.response;
var byteArray = new Uint8Array(arrayBuffer);
var len = byteArray.byteLength;
var binary = ""
for (var i = 0; i < len; i++) {
binary += String.fromCharCode(byteArray[i]);
}
resolve(binary);
}
xhr.send();
})
}
function base64_encode(str) {
var c1, c2, c3;
var base64EncodeChars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
var i = 0,
len = str.length,
string = '';
while (i < len) {
c1 = str.charCodeAt(i++) & 0xff;
if (i == len) {
string += base64EncodeChars.charAt(c1 >> 2);
string += base64EncodeChars.charAt((c1 & 0x3) << 4);
string += "==";
break;
}
c2 = str.charCodeAt(i++);
if (i == len) {
string += base64EncodeChars.charAt(c1 >> 2);
string += base64EncodeChars.charAt(((c1 & 0x3) << 4) | ((c2 & 0xF0) >> 4));
string += base64EncodeChars.charAt((c2 & 0xF) << 2);
string += "=";
break;
}
c3 = str.charCodeAt(i++);
string += base64EncodeChars.charAt(c1 >> 2);
string += base64EncodeChars.charAt(((c1 & 0x3) << 4) | ((c2 & 0xF0) >> 4));
string += base64EncodeChars.charAt(((c2 & 0xF) << 2) | ((c3 & 0xC0) >> 6));
string += base64EncodeChars.charAt(c3 & 0x3F)
}
return string
}
const downloadForAos = async (pdfBase64Str) => {
const folder = FileSystem.StorageAccessFramework.getUriForDirectoryInRoot("test");
const permissions = await FileSystem.StorageAccessFramework.requestDirectoryPermissionsAsync(folder);
if (!permissions.granted) return;
let filePath = await FileSystem.StorageAccessFramework.createFileAsync(permissions.directoryUri, "test.pdf", "application/pdf");
// let filePath = "content://com.android.externalstorage.documents/tree/primary%3Atest/document/primary%3Atest%2Ftest.txt";
console.log(pdfBase64Str, "====");
try {
await FileSystem.StorageAccessFramework.writeAsStringAsync(filePath, pdfBase64Str, { encoding: FileSystem.EncodingType.Base64 });
alert("download success!")
} catch (err) {
console.log(err);
}
}
const downloadForIos = async () => {
alert("try do it by yourself")
}
export async function downloadPdf(url) {
let binary = await getPdfBinary(url);
console.log(binary, "=====")
const base64Str = base64_encode(binary);
if (Platform.OS === "ios") {
downloadForIos(base64Str);
} else {
downloadForAos(base64Str);
}
}

Related

ethers.js, Swap on uniswapV3 failed tx

Im trying to use exactInput() function for UniV3 interface but when trying to execute the code the transactions fails https://goerli.etherscan.io/tx/0xb0d5e4b491610b9db8d98cc938008ba2a4e1a06e67b05ed87ac6c0ca3ad61dab
I know eth send shows 0 in this one but even especifying amount it fails, I dont know what to change..
I have checked many codes out there and cant see the mistake, please could someone give me some advice?
const {abi: V3SwapRouterABI} = require('#uniswap/v3-periphery/artifacts/contracts/interfaces/ISwapRouter.sol/ISwapRouter.json')
const { ethers } = require("ethers")
require("dotenv").config()
const INFURA_URL_TESTNET = process.env.INFURA_URL_TESTNET
const PRIVATE_KEY = process.env.PRIVATE_KEY
const WALLET_ADDRESS = process.env.WALLET_ADDRESS
// now you can call sendTransaction
const wethToken= "0xB4FBF271143F4FBf7B91A5ded31805e42b2208d6"
const Uni= "0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984"
const UniswapRouter="0x68b3465833fb72A70ecDF485E0e4C7bD8665Fc45"
const UniV3Contract = new ethers.Contract(
UniswapRouter,
V3SwapRouterABI
)
const provider = new ethers.providers.JsonRpcProvider(INFURA_URL_TESTNET)
const wallet = new ethers.Wallet(PRIVATE_KEY)
const signer = wallet.connect(provider)
const FEE_SIZE = 3
function encodePath(path, fees) {
if (path.length != fees.length + 1) {
throw new Error('path/fee lengths do not match')
}
let encoded = '0x'
for (let i = 0; i < fees.length; i++) {
// 20 byte encoding of the address
encoded += path[i].slice(2)
// 3 byte encoding of the fee
encoded += fees[i].toString(16).padStart(2 * FEE_SIZE, '0')
}
// encode the final token
encoded += path[path.length - 1].slice(2)
return encoded.toLowerCase()
}
async function getToken() {
const path = encodePath([wethToken, Uni], [3000])
const deadline = Math.floor(Date.now()/1000) + (60*10)
const params = {
path: path,
recipient: WALLET_ADDRESS,
deadline: deadline,
amountIn: ethers.utils.parseEther('0.01'),
amountOutMinimum: 0
}
const encodedData = UniV3Contract.interface.encodeFunctionData("exactInput", [params])
const txArg = {
to: UniswapRouter,
from: WALLET_ADDRESS,
data: encodedData,
gasLimit: ethers.utils.hexlify(1000000)
}
const tx = await signer.sendTransaction(txArg)
console.log('tx: ', tx)
const receipt = tx.wait()
console.log('receipt: ', receipt)
}
module.exports = { getToken
You will need to remove the Deadline.. The new router 0x68b3465833fb72A70ecDF485E0e4C7bD8665Fc45 moved deadline to the multi-call function (since the router is designed to be multi-call)

Cloudflare R2 Worker throwing 'Network Connection Lost' error

In my worker I am converting a base64 string I get from the request to a blob with some function. However, when I try to PUT the blob into my bucket, I get "Network Connection Lost" error. I can successfully PUT just the base64 string or any other string but not a blob. Here is my worker:
// Function to convert b64 to blob (working fine)
function b64toBlob(b64Data, contentType, sliceSize=512) {
const byteCharacters = atob(b64Data);
const byteArrays = [];
for (let offset = 0; offset < byteCharacters.length; offset += sliceSize) {
const slice = byteCharacters.slice(offset, offset + sliceSize);
const byteNumbers = new Array(slice.length);
for (let i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
const blob = new Blob(byteArrays, {type: contentType});
return blob;
}
export default {
async fetch(request, env) {
const url = new URL(request.url);
const key = url.pathname.slice(1);
switch (request.method) {
case 'PUT':
const contentType = 'application/pdf';
const b64Data = request.body;
const blob = b64toBlob(b64Data, contentType);
try {
await env.qa_sub_agreements_bucket.put(key, blob, { // Failing here
httpMetadata: request.headers,
})
return new Response(blob) // Successfully returns the blob when above PUT is commented out
} catch (e) {
console.error(e.message, e.stack); // Logs out "Error: Network Connection Lost"
}
Hard to say definitively because the Worker posted doesn't appear to be totally complete. An eagle-eyed coworker spotted that it looks like the problem may be that you're invoking atob on a ReadableStream and likely that conversion is what's throwing the exception.

Upload large file to Azure blob storage via REST API Put Block Blob

I am using React Native to build Mobile application for Andrioid and iOS.
based on the situation that no framework is exist to support Azure Storage API for React Native (all frameworks are required browsers that does not exist in React Native),
I use REST API for the interaction with the Azure storage and it works fine e.g list containers, list blob, get blob and put blob.
in order to upload large file I tried to use the same mechanizm for 'put block' api (as describe here: https://learn.microsoft.com/en-us/rest/api/storageservices/put-block) without succcess, failed on error code 403.
I will appreciate for your assist.
Thank you.
my code for upload single block:
private createAuthorizationHeader(canonicalizedString: string) {
const str = CryptoJS.HmacSHA256(canonicalizedString, CryptoJS.enc.Base64.parse(this.config.accountKey));
const sig = CryptoJS.enc.Base64.stringify(str);
const authorizationHeader = `SharedKey ${this.config.accountName}:${sig}`;
return authorizationHeader;
}
async putBlockBlob(containerName: str, blobPath: str, blobContent: str, blockIndex: number,) {
const requestMethod = 'PUT';
const urlPath = `${containerName}/${blobPath}`;
const dateInRfc1123Format = new Date(Date.now()).toUTCString();
const storageServiceVersion = '2019-12-12';
const blobLength: number = blobContent.length;
const blockId = Buffer.from(`block-${blockIndex}`).toString('base64');
const blobType = 'BlockBlob';
// StringToSign =
// VERB + "\n" +
// Content-Encoding + "\n" +
// Content-Language + "\n" +
// Content-Length + "\n" +
// Content-MD5 + "\n" +
// Content-Type + "\n" +
// Date + "\n" +
// If-Modified-Since + "\n" +
// If-Match + "\n" +
// If-None-Match + "\n" +
// If-Unmodified-Since + "\n" +
// Range + "\n" +
// CanonicalizedHeaders +
// CanonicalizedResource;
const canonicalizedHeaders = `x-ms-date:${dateInRfc1123Format}\nx-ms-version:${storageServiceVersion}`;
const canonicalizedResource = `/${this.config.accountName}/${urlPath}}\nblockid:${blockId}\ncomp:block`;
const stringToSign = `${requestMethod}\n\n\n${blobLength}\n\napplication/octet-stream\n\n\n\n\n\n\n${canonicalizedHeaders}\n${canonicalizedResource}`;
const uriStr = `${urlPath}?comp=block&blockid=${blockId}`;
const authorizationHeader = this.createAuthorizationHeader(stringToSign);
const header = {
'cache-control': 'no-cache',
'x-ms-date': dateInRfc1123Format,
'x-ms-version': storageServiceVersion,
Authorization: authorizationHeader,
'Content-Length': `${blobLength}`,
'Content-Type': 'application/octet-stream',
};
try {
return axios
.create({baseURL: `https://${this.config.accountName}.blob.core.windows.net/`,})
.request({
method: requestMethod,
url: uriStr,
data: blobContent,
headers: header,
})
.then((response) => response.data)
.catch((err) => {
throw err;
});
} catch (err) {
console.log(err);
throw err;
}
}
I believe the issue is coming because of a missing new line character between Range and CanonicalizedHeaders.
Can you try by changing the following line of code:
const stringToSign = `${requestMethod}\n\n\n${blobLength}\n\napplication/octet-stream\n\n\n\n\n\n\n${canonicalizedHeaders}\n${canonicalizedResource}`;
to:
const stringToSign = `${requestMethod}\n\n\n${blobLength}\n\napplication/octet-stream\n\n\n\n\n\n\n\n${canonicalizedHeaders}\n${canonicalizedResource}`;
it will help you to upload the data to Azure storage server
upload file to Server
export const uploadMedia = async (params: any, callBack: any) => {
const SAS_URL: any = "https://${blobUrl}.blob.core.windows.net";
const CONTAINER: any = "";
const SAS_TOKEN: any = "";
const { fileType, localUri } = params;
const userId = "set user ID here";
const fileName = String(fileType).concat(customIdGenerator(7));
const assetPath = `${SAS_URL}/${CONTAINER}/${userId}/${fileName}`;
HEADER["x-ms-blob-content-type"] = CONST_HEADER(fileType);
return await RNFetchBlob.fetch(
"PUT",
`${assetPath}?${SAS_TOKEN}`,
HEADER,
RNFetchBlob.wrap(localUri)
)
?.uploadProgress(callBack)
.then(() => {
return assetPath;
});
};
fileType = 'video' | image | pdf
let params: any = {
fileType: 'image',
localUri: image,
};
generate Custom Id for Uniqueness or you can also use UUID
const customIdGenerator = (length: any) => {
var result = "";
var characters =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
var charactersLength = characters.length;
for (var i = 0; i < length; i++) {
result += characters.charAt(Math.floor(Math.random() * charactersLength));
}
return result;
};
set Headers for different Files
const CONST_HEADER = (type: any) => {
return type == 'image'
? `image/png`
: type == 'video'
? 'video/mp4'
: type == 'pdf' && 'application/pdf';
};

CREATE PDF FROM GOOGLE SHEETS

function createBulkPDFs(){
const docFile = DriveApp.getFileById("id");
const tempFolder = DriveApp.getFolderById("id");
const pdfFolder = DriveApp.getFolderById("id");
const currentSheet = SpreadsheetApp.getActiveSpreadsheet().getSheetByName("Sheet1")
const data = currentSheet.getRange(2,1,currentSheet.getLastRow()-1,15).getDisplayValues();
let errors = [];
data.forEach(row => {
try{
createpdf(row[0],row[1],row[5],row[6],row[7],row[8],row[9],row[0] + " " + row[1],docFile,tempFolder,pdfFolder);
errors.push("");
} catch(err){
errors.push("Failed");
}
}); //close forEach
currentSheet.getRange(2,15,currentSheet.getLastRow()-1,1).setValues(errors);
}
function
createpdf(First_name,Last_name,Description,Address,Location,Date_of_letter,Date_of_Def,pdfname,docFile,tempFolder,pdfFolder) {
const tempFile = docFile.makeCopy(tempFolder);
const tempDocFile = DocumentApp.openById(tempFile.getId());
const body = tempDocFile.getBody();
body.replaceText("{First name}",First_name);
body.replaceText("{Last name}",Last_name);
body.replaceText("{Description}",Description);
body.replaceText("{Address}",Address);
body.replaceText("{Location}",Location);
body.replaceText("{Date of letter}",Date_of_letter);
body.replaceText("{Date of Def}",Date_of_Def);
tempDocFile.saveAndClose();
const pdfContentBlob = tempFile.getAs(MineType.Pdf);
pdfFolder.createFile(pdfContentBlob).setName("pdfname");
I tried to replicate your code and found some issues.
The use of setValues() when populating rows of data. The data should be in 2 dimensional. Each sub-array represents row of data.
The value used in getAs() should be application/pdf instead of MineType.pdf.
Here is my sample data:
Code:
function createBulkPDFs() {
const docFile = DriveApp.getFileById("id");
const tempFolder = DriveApp.getFolderById("id");
const pdfFolder = DriveApp.getFolderById("id");
const currentSheet = SpreadsheetApp.getActiveSpreadsheet().getSheetByName("Sheet1")
const data = currentSheet.getRange(1, 1, currentSheet.getLastRow(), 11).getDisplayValues();
let errors = [];
data.forEach(row => {
try {
createpdf(row[0], row[1], row[5], row[6], row[7], row[8], row[9], row[0] + " " + row[1], docFile, tempFolder, pdfFolder);
errors.push("Success");
} catch (err) {
errors.push("Failed");
}
}); //close forEach
let newArr = [];
while(errors.length > 0) {
newArr.push(errors.splice(0,1));
}
currentSheet.getRange(1, 12, currentSheet.getLastRow(), 1).setValues(newArr);
}
function createpdf(First_name, Last_name, Description, Address, Location, Date_of_letter, Date_of_Def, pdfname, docFile, tempFolder, pdfFolder){
const tempFile = docFile.makeCopy(tempFolder);
const tempDocFile = DocumentApp.openById(tempFile.getId());
const body = tempDocFile.getBody();
body.replaceText("{First name}", First_name);
body.replaceText("{Last name}", Last_name);
body.replaceText("{Description}", Description);
body.replaceText("{Address}", Address);
body.replaceText("{Location}", Location);
body.replaceText("{Date of letter}", Date_of_letter);
body.replaceText("{Date of Def}", Date_of_Def);
tempDocFile.saveAndClose();
const pdfContentBlob = tempFile.getAs('application/pdf');
pdfFolder.createFile(pdfContentBlob).setName(pdfname);
}
PDF:
Sheets:
References:
getAs()
setValues()

Large file upload for office 365(StartUpload,ContinueUpload,FinishUpload) not working as expected - SharePoint

When I am trying to upload large file using 3 new methods (StartUpload, ContinueUpload, FinishUpload) by uploading chunks of file then final uploaded file is corrupt file and size is also greater than actual file. I have used Rest API to upload large files.
Steps followed are as follows:-
Create HTML for input file.
<input name="FileUpload" type="file" id="uploadInput" className="inputFile" multiple="false" onchange="upload(this.files[0])" />
Below method is start point of code:
Creating Global variable for siteurl
var Tasks = {
urlName: window.location.origin + "/",
siteName: '/sites/ABC',
};
Calling Upload() method
First Create Dummy File with size 0 in folder to continue with large file upload.
Create FileReader object and then start creating chunks of file with 3 parameters(offset,length,method(i.e. start/continue/finishupload)) and push chunks into an array.
Creating unique id for upload i.e. uploadID
Calling UploadFile method
function upload(file) {
var docLibraryName = "/sites/ABC/Shared Documents";
var fileName = $("#uploadInput").val().replace(/C:\\fakepath\\/i, '');
var folderName = "";
createDummaryFile(docLibraryName, fileName, folderName)
var fr = new FileReader();
var offset = 0;
var total = file.size;
var length = 1000000 > total ? total : 1000000;
var chunks = [];
fr.onload = evt => {
while (offset < total) {
if (offset + length > total)
length = total - offset;
chunks.push({
offset,
length,
method: getUploadMethod(offset, length, total)
});
offset += length;
}
for (var i = 0; i < chunks.length; i++)
console.log(chunks[i]);
if (chunks.length > 0) {
const id = getGuid();
uploadFile(evt.target.result, id, docLibraryName, fileName, chunks, 0);
}
};
fr.readAsArrayBuffer(file);
}
function createDummaryFile(libraryName, fileName, folderName) {
return new Promise((resolve, reject) => {
var endpoint = Tasks.urlName + Tasks.siteName + "/_api/web/GetFolderByServerRelativeUrl('" + libraryName + "/" + folderName + "')/Files/add(url=#TargetFileName,overwrite='true')?" +
"&#TargetFileName='" + fileName + "'";
var url;
const headers = {
"accept": "application/json;odata=verbose"
};
performUpload(endpoint, headers, libraryName, fileName, folderName, convertDataBinaryString(0));
});
}
function S4() {
return (((1 + Math.random()) * 0x10000) | 0).toString(16).substring(1);
}
function getGuid() {
return (S4() + S4() + "-" + S4() + "-4" + S4().substr(0, 3) + "-" + S4() + "-" + S4() + S4() + S4()).toLowerCase();
}
//check position for selecting method
function getUploadMethod(offset, length, total) {
if (offset + length + 1 > total) {
return 'finishupload';
} else if (offset === 0) {
return 'startupload';
} else if (offset < total) {
return 'continueupload';
}
return null;
}
Upload file method
Convert arraybuffer to blob chunks to start uploading file
Start actual file chunks upload using methods and offset of 1mb we created earlier (uploadFileChunk method)
Start loop for chunk and call same method
function uploadFile(result, id, libraryPath, fileName, chunks, index) {
const data = convertFileToBlobChunks(result, chunks[index]);
var response = uploadFileChunk(id, libraryPath, fileName, chunks[index], data);
index += 1;
if (index < chunks.length)
uploadFile(result, id, libraryPath, fileName, chunks, index, chunks[index].offset);
}
function convertFileToBlobChunks(result, chunkInfo) {
var arrayBuffer = chunkInfo.method === 'finishupload' ? result.slice(chunkInfo.offset) : result.slice(chunkInfo.offset, chunkInfo.offset + chunkInfo.length);
return convertDataBinaryString(arrayBuffer);
}
function convertDataBinaryString(data) {
var fileData = '';
var byteArray = new Uint8Array(data);
for (var i = 0; i < byteArray.byteLength; i++) {
fileData += String.fromCharCode(byteArray[i]);
}
return fileData;
}
UploadFileChunk method to actually start uploading file chunks)
Form string if startupload then no fileoffset and if continueupload and finishupload then it will have fileoffset.
Call performupload method to start uploading using rest api
function uploadFileChunk(id, libraryPath, fileName, chunk, data) {
new Promise((resolve, reject) => {
var offset = chunk.offset === 0 ? '' : ',fileOffset=' + chunk.offset;
var folderName = "";
var endpoint = Tasks.urlName + Tasks.siteName + "/_api/web/getfilebyserverrelativeurl('" + libraryPath + "/" + fileName + "')/" + chunk.method + "(uploadId=guid'" + id + "'" + offset + ")";
const headers = {
"Accept": "application/json; odata=verbose",
"Content-Type": "application/octet-stream"
};
performUpload(endpoint, headers, libraryPath, fileName, folderName, data);
});
}
function performUpload(endpoint, headers, libraryName, fileName, folderName, fileData) {
new Promise((resolve, reject) => {
var digest = $("#__REQUESTDIGEST").val();
$.ajax({
url: endpoint,
async: false,
method: "POST",
headers: headers,
data: fileData,
binaryStringRequestBody: true,
success: function(data) {},
error: err => reject(err.responseText)
});
});
}
Please suggest why file uploaded is corrupted and having size less or greater than actual file?
Thanks in advance.
I had the same problem with this code. I changed convertFileToBlobChunks to just return the ArrayBuffer.
function convertFileToBlobChunks(result, chunkInfo) {
var arrayBuffer = chunkInfo.method === 'finishupload' ?
result.slice(chunkInfo.offset) : result.slice(chunkInfo.offset, chunkInfo.offset + chunkInfo.length);
return arrayBuffer;
}
I also removed "Content-Type": "application/octet-stream" from the header.
After doing that it uploaded fine.