Jsreport Malformed URI error when rendering Async - pdf

Since I am sending lots of data with the request, I have to use renderAsync to use POST. When the stream came back, I use the following JS code to open it
jsreport.renderAsync(request).then(function(arrayBuffer) {
window.open("data:application/pdf;base64," + arrayBuffer
)};);
But then the error showed. Is there alternative way to do it?

This seems to work
<script>
jsreport.renderAsync(request).then(function(response) {
var uInt8Array = new Uint8Array(response);
var i = uInt8Array.length;
var binaryString = new Array(i);
while (i--)
{
binaryString[i] = String.fromCharCode(uInt8Array[i]);
}
var data = binaryString.join('');
var base64 = window.btoa(data);
window.open("data:application/pdf;base64, " + base64);
})
</script>

Related

Write rows to BigQuery via nodejs BigQuery Storage Write API

It seems quite new, but just hoping someone here has been able to use nodejs to write directly to BigQuery storage using #google-cloud/bigquery-storage.
There is an explanation of how the overall backend API works and how to write a collection of rows atomically using BigQuery Write API but no such documentation for nodejs yet. A recent release 2.7.0 documents the addition of said feature but there is no documentation, and the code is not easily understood.
There is an open issue requesting an example but thought I'd try my luck to see if anyone has been able to use this API yet.
Suppose you have a BigQuery table called student with three columns id,name and age. Following steps will get you to load data into the table with nodejs storage write api.
Define student.proto file as follows
syntax = "proto2";
message Student {
required int64 id = 1;
optional string name = 2;
optional int64 age = 3;
}
Run the following at the command prompt
protoc --js_out=import_style=commonjs,binary:. student.proto
It should generate student_pb.js file in the current directory.
Write the following js code in the current directory and run it
const {BigQueryWriteClient} = require('#google-cloud/bigquery-storage').v1;
const st = require('./student_pb.js')
const type = require('#google-cloud/bigquery-storage').protos.google.protobuf.FieldDescriptorProto.Type
const mode = require('#google-cloud/bigquery-storage').protos.google.cloud.bigquery.storage.v1.WriteStream.Type
const storageClient = new BigQueryWriteClient();
const parent = `projects/${project}/datasets/${dataset}/tables/student`
var writeStream = {type: mode.PENDING}
var student = new st.Student()
var protoDescriptor = {}
protoDescriptor.name = 'student'
protoDescriptor.field = [{'name':'id','number':1,'type':type.TYPE_INT64},{'name':'name','number':2,'type':type.TYPE_STRING},{'name':'age','number':3,'type':type.TYPE_INT64}]
async function run() {
try {
var request = {
parent,
writeStream
}
var response = await storageClient.createWriteStream(request);
writeStream = response[0].name
var serializedRows = []
//Row 1
student.setId(1)
student.setName('st1')
student.setAge(15)
serializedRows.push(student.serializeBinary())
//Row 2
student.setId(2)
student.setName('st2')
student.setAge(15)
serializedRows.push(student.serializeBinary())
var protoRows = {
serializedRows
}
var proto_data = {
writerSchema: {protoDescriptor},
rows: protoRows
}
// Construct request
request = {
writeStream,
protoRows: proto_data
};
// Insert rows
const stream = await storageClient.appendRows();
stream.on('data', response => {
console.log(response);
});
stream.on('error', err => {
throw err;
});
stream.on('end', async () => {
/* API call completed */
try {
var response = await storageClient.finalizeWriteStream({name: writeStream})
response = await storageClient.batchCommitWriteStreams({parent,writeStreams: [writeStream]})
}
catch(err) {
console.log(err)
}
});
stream.write(request);
stream.end();
}
catch(err) {
console.log(err)
}
}
run();
Make sure your environment variables are set correctly to point to the file containing google cloud credentials.
Change project and dataset values accordingly.

How to upload static json file into indexedDB ONLY when an upgrade is needed (i.e. onupgradeneeded)

I'm tyring to upload a static json file into an indexedDB ONLY when an upgrade is needed (i.e. onupgradeneeded). I've search for answers to this repeatedly but have yet to see code examples of how to approach this.
My current code below gets the json file every time the page opens, which is of course inefficient since I only need to get the json file if the indexedDB has not yet been created or needs upgraded.
I tried putting the xhr.onload section into the end of the .onupgradeneeded function, but as many have noted, the .onsuccess gets called before the xhr.onload has completed.
var jsonUrl = '/path/to/hcLookup.json');
var req, db, hcObjectStore, objectStore, data, dataArr, trans, addreq, key;
var xhr = new XMLHttpRequest();
xhr.open("GET", jsonUrl, true);
xhr.type='json';
xhr.send();
xhr.onload = function(msg) {
data = msg.target.response;
req = window.indexedDB.open("hcLookup", 1);
req.onerror=function(event){console.log("onerror: " + event.target.errorCode)};
req.onsuccess = function(event){
console.log("ready.");
};
req.onupgradeneeded = function(event){
db = event.target.result;
objectStore = db.createObjectStore("hcLookup", {autoIncrement: true});
objectStore.createIndex("S", "S", {unique: false});
// make sure the objectStore creation is finished before adding data into it
objectStore.transaction.oncomplete = function (event) {
// Store values in the newly created objectStore.
trans = db.transaction(["hcLookup"], "readwrite");
hcObjectStore = trans.objectStore("hcLookup");
// Do something when all the data is added to the database.
trans.oncomplete = function (event) {
console.log("upgrading done!");
};
trans.onerror = function (event) {
console.log("bulk add onerror: " + event.target.errorCode)
};
//convert JSON to an strArray in order to add the dataArr into to the objectStore
dataArr = JSON.parse(data);
for (var i in dataArr) {
addreq = hcObjectStore.add(dataArr[i]);
}
};
};
};

Open pdf from bytes array in angular 5

I was following the below links for displaying pdf page in new tab in my angular 5 application. But unable to achieve the result.
I am consuming the bytes array from spring controller api.
PDF Blob is not showing content, Angular 2
PDF Blob - Pop up window not showing content
Angular2 Displaying PDF
I tried the below options but none of them is working.
Trial 1
Consumed the response as json
component.ts
clickEvent(){
this.service.getPDF().subscribe((response)=>{
let file = new Blob([response.byteString], { type: 'application/pdf' });
var fileURL = URL.createObjectURL(file);
window.open(fileURL);
})
}
service.ts
getPDF(){
const url = `${this.serviceUrl}/pdf`;
const httpOptions = {
headers: new HttpHeaders(
{
'Accept': 'application/json',
'responseType':'blob'
}
)
};
return this.http.get<any>(url, httpOptions);
}
Trial 2
Consumed the response as json
component.ts
clickEvent(){
this.service.getPDF().subscribe((response)=>{
let file = new Blob([response.byteArray], { type: 'application/pdf' });
var fileURL = URL.createObjectURL(file);
window.open(fileURL);
})
}
service.ts
getPDF(){
const url = `${this.serviceUrl}/pdf`;
const httpOptions = {
headers: new HttpHeaders(
{
'Accept': 'application/json',
'responseType':'arraybuffer'
}
)
};
return this.http.get<any>(url, httpOptions);
}
Trial 3
Consumed the response as bytes
component.ts
clickEvent(){
this.service.getPDF().subscribe((response)=>{
let file = new Blob([response], { type: 'application/pdf' });
var fileURL = URL.createObjectURL(file);
window.open(fileURL);
})
}
service.ts
getPDF(){
const url = `${this.serviceUrl}/pdf`;
const httpOptions = {
headers: new HttpHeaders(
{
'responseType':'blob' //both combination
//'responseType' : 'arraybuffer'
}
)
};
return this.http.get<any>(url, httpOptions);
}
By all the combination I am only getting two results.
Empty pdf document or Failed to load PDF document.
For understanding posting java spring controller code.
controller.java
#GetMapping(value = "/pdf")
public ResTest generatePDF(HttpServletResponse response) throws IOException {
ResTest test = new ResTest();
ByteArrayOutputStream baos = docTypeService.createPdf();
test.setByteArray(baos.toByteArray());
test.setByteString(new String(baos.toByteArray()));
return test;
}
At last, I was able to render pdf. There were two small mistakes from my side.
1 st Problem was, I gave 'responseType' inside HttpHeaders which was wrong.
It should be outside as below.
2 nd Problem was, even though if you mention as responseType : 'arraybuffer', it was unable to take it. For that you need to mention as responseType : 'arraybuffer' as 'json'.(Reference)
The corrected and working code below.
Trial 3
component.ts (nochanges)
clickEvent(){
this.service.getPDF().subscribe((response)=>{
let file = new Blob([response], { type: 'application/pdf' });
var fileURL = URL.createObjectURL(file);
window.open(fileURL);
})
service.ts
getPDF(){
const url = `${this.serviceUrl}/pdf`;
const httpOptions = {
'responseType' : 'arraybuffer' as 'json'
//'responseType' : 'blob' as 'json' //This also worked
};
return this.http.get<any>(url, httpOptions);
}
Referred from the below link
https://github.com/angular/angular/issues/18586
I had the same problem with angular and pdf display. I will describe my solution - use base64 encoded string. All modern browsers support base64.
Use import java.util.Base64 to decode your byte array
byte[] bytes = baos.toByteArray();
String string = Base64.getEncoder().encodeToString(bytes);
test.setByteString(string);
On the frontend side use standard mime type for pdf and indicate that you are using base64 data:application/pdf;base64,.
Ref. to mime types: https://en.wikipedia.org/wiki/Media_type
If you need to open document in a new window:
let newPdfWindow = window.open("","Print");
let content = encodeURIComponent(response.byteString);
let iframeStart = "<\iframe width='100%' height='100%' src='data:application/pdf;base64, ";
let iframeEnd = "'><\/iframe>";
newPdfWindow.document.write(iframeStart + content + iframeEnd);
If you need to open in a new tab, you may simply provide to your html href:
let pdfHref = this.sanitizer.bypassSecurityTrustUrl('data:application/octet-stream;base64,' + content);
bypassSecurityTrustUrl will sanitize your url. As I remember there was some problem with angular security, that prevented me from seeing the content.
PS. before checking how it works with angular I would like to recommend you to store the pdf file on a drive and try to open it. I mean, that you should be certainly sure that you file is valid and you may open it with simple reader.
Update. The simpliest solution is to use pdf.js library https://github.com/mozilla/pdf.js
Have you looked for an angular component to wrap pdf.js?
https://github.com/VadimDez/ng2-pdf-viewer
Sample usage:
<pdf-viewer [src]="pdfSrc"
[render-text]="true"
style="display: block;">
</pdf-viewer>
pdfSrc can be a url string or a UInt8Array
When you make AJAX call to get PDF/file stream
var req = this.getYourPDFRequest(fd);
this.postData(environment.previewPDFRFR, req).then(res => {
res.blob().then(blob => {
const fileURL = URL.createObjectURL(blob);
window.open(fileURL, '', 'height=650,width=840');
})
});
If ur byte array comes from a .net backend u have to return
return File(doc.BinaryData, "application/pdf"); // page visible in typescript
, and not this :
return Ok(doc.BinaryData); // page blank in typescript

How to manage depending functions in nodejs

I am trying to teach myself nodejs and expressjs, however coming from java and c++ this is proving difficult to get used to.
I made a simple and messy module that it is supposed to return a weather forecast for a given zip code.
The way this happens is by taking the user zip code and using a google api to generate the geo coordinates for that zip code. I get the coordinates from the JASON file and then provide them to the next api call, this call is done to the forecast.io api and this time the weather data for the location is also taken from a JASON file.
Coming from java and with a not so solid background on JavaScript I am having a hard time making these two functions wait for one another, in this case I need the google api call to finish first because the coordinates it will provide are needed for the second api call. Can someone take a look at this code and tell me if the strategy I used is correct/ provide a suggestion so that I can know what is done in javascript in situations like this.
here is the code:
// The required modules.
var http = require("http");
var https = require("https");
//result object
var resultSet = {
latitude :"",
longitude:"",
localInfo:"",
weather:"",
humidity:"",
pressure:"",
time:""
};
//print out error messages
function printError(error){
console.error(error.message);
}
//Forecast API required information:
//key for the forecast IO app
var forecast_IO_Key = "this is my key, not publishing for security reasons";
var forecast_IO_Web_Adress = "https://api.forecast.io/forecast/";
//Create Forecast request string function
function createForecastRequest(latitude, longitude){
var request = forecast_IO_Web_Adress + forecast_IO_Key + "/"
+ latitude +"," + longitude;
return request;
}
//Google GEO API required information:
//Create Google Geo Request
var google_GEO_Web_Adress = "https://maps.googleapis.com/maps/api/geocode/json?address=";
function createGoogleGeoMapRequest(zipCode){
var request = google_GEO_Web_Adress+zipCode + "&sensor=false";
return request;
}
function get(zipCode){
// 1- Need to request google for geo locations using a given zip
var googleRequest = https.get(createGoogleGeoMapRequest(zipCode), function(response){
//console.log(createGoogleGeoMapRequest(zipCode));
var body = "";
var status = response.statusCode;
//a- Read the data.
response.on("data", function(chunk){
body+=chunk;
});
//b- Parse the data.
response.on("end", function(){
if(status === 200){
try{
var coordinates = JSON.parse(body);
resultSet.latitude = coordinates.results[0].geometry.location.lat;
resultSet.longitude = coordinates.results[0].geometry.location.lng;
resultSet.localInfo = coordinates.results[0].address_components[0].long_name + ", " +
coordinates.results[0].address_components[1].long_name + ", " +
coordinates.results[0].address_components[2].long_name + ", " +
coordinates.results[0].address_components[3].long_name + ". ";
}catch(error){
printError(error.message);
}finally{
connectToForecastIO(resultSet.latitude,resultSet.longitude);
}
}else{
printError({message: "Error with GEO API"+http.STATUS_CODES[response.statusCode]})
}
});
});
function connectToForecastIO(latitude,longitude){
var forecastRequest = https.get(createForecastRequest(latitude,longitude),function(response){
// console.log(createForecastRequest(latitude,longitude));
var body = "";
var status = response.statusCode;
//read the data
response.on("data", function(chunk){
body+=chunk;
});
//parse the data
response.on("end", function(){
try{
var weatherReport = JSON.parse(body);
resultSet.weather = weatherReport.currently.summary;
resultSet.humidity = weatherReport.currently.humidity;
resultSet.temperature = weatherReport.currently.temperature;
resultSet.pressure = weatherReport.currently.pressure;
resultSet.time = weatherReport.currently.time;
}catch(error){
printError(error.message);
}finally{
return resultSet;
}
});
});
}
}
//define the name of the outer module.
module.exports.get = get;
is the return statement properly placed? Is my use of finally proper in here? Please notice that I come from a java background and in java is perfectly fine to use the try{} catch(){} and finally{} blocks to execute closure code, it was the only way i managed this module to work. But now that i have incorporated some Express and I try to execute this module's method from another module, all I am getting is an undefined return.
You could use the Promise API, kind of like Futures in Java, so basically what you could do is wrap both functions in promises and the you could wait for resolve to execute the next function
var googleRequest = function(zipcode) {
return new Promise(function(resolve, reject) {
var request = https.get(createGoogleGeoMapRequest(zipCode), function(response) {
if (response.statusCode !== 200) {
reject(new Error('Failed to get request status:' + response.statusCode));
}
var body = "";
//a- Read the data.
response.on("data", function(chunk) {
body+=chunk;
});
//b- Parse the data.
response.on("end", function(body) {
var coordinates = JSON.parse(body);
resultSet.latitude = coordinates.results[0].geometry.location.lat;
resultSet.longitude = coordinates.results[0].geometry.location.lng;
resultSet.localInfo = coordinates.results[0].address_components[0].long_name + ", " +
coordinates.results[0].address_components[1].long_name + ", " +
coordinates.results[0].address_components[2].long_name + ", " +
coordinates.results[0].address_components[3].long_name + ". ";
resolve(resultSet);
})
});
request.on('error', function(err) {
reject(err);
});
});
}
After that you could just do
googleRequest(90210).then(function(result) {
connectToForecastIO(result.latitude, result.longitude);
}
You can find out more about Promise's usage in the Promise API docs
You should also note that there are several libraries available that allow for promise based http requests such as fetch

Reading binary data from a child process in Node.js

When trying to read data in Node.js from an ImageMagick child process, it comes out corrupted.
A simple test case would be the following:
var fs = require('fs');
var exec = require('child_process').exec;
var cmd = 'convert ./test.jpg -';
exec(cmd, {encoding: 'binary', maxBuffer: 5000*1024}, function(error, stdout) {
fs.writeFileSync('test2.jpg', stdout);
});
I would expect that to be the equivalent of the command line convert ./test.jpg - > test2.jpg that does write the binary file correctly.
Originally there was a problem with the maxBuffer option being too small and resulting in a truncated file. After increasing that, the file now appears slightly larger than expected and still corrupted.
The data from stdout is required to send over HTTP.
What would be the correct way to read this data from the ImageMagick stdout?
There were two problems with the initial approach.
The maxBuffer needs to be high enough to handle the whole response from the child process.
Binary encoding needs to be properly set everywhere.
A full working example would be the following:
var fs = require('fs');
var exec = require('child_process').exec;
var cmd = 'convert ./test.jpg -';
exec(cmd, {encoding: 'binary', maxBuffer: 5000*1024}, function(error, stdout) {
fs.writeFileSync('test2.jpg', stdout, 'binary');
});
Another example, sending the data in an HTTP response using the Express web framework, would like this:
var express = require('express');
var app = express.createServer();
app.get('/myfile', function(req, res) {
var cmd = 'convert ./test.jpg -';
exec(cmd, {encoding: 'binary', maxBuffer: 5000*1024}, function(error, stdout) {
res.send(new Buffer(stdout, 'binary'));
});
});
Ah, problem is:
If timeout is greater than 0, then it
will kill the child process if it runs
longer than timeout milliseconds. The
child process is killed with
killSignal (default: 'SIGTERM').
maxBuffer specifies the largest amount
of data allowed on stdout or stderr -
if this value is exceeded then the
child process is killed.
Source: http://nodejs.org/docs/v0.4.8/api/child_processes.html#child_process.exec
So if your image is over the default buffer size of 200*1024 bytes, your image is going to be corrupted as you mentioned. I was able to get it to work with the following code:
var fs = require('fs');
var spawn = require('child_process').spawn;
var util = require('util');
var output_file = fs.createWriteStream('test2.jpg', {encoding: 'binary'});
var convert = spawn('convert', ['test.jpg', '-']);
convert.stdout.on('data', function(data) {
output_file.write(data);
});
convert.on('exit', function(code) {
output_file.end();
});
Here I used spawn to get a streamable stdout, then I used a Writeable Stream to write the data in binary format. Just tested it and was able to open the resulting test2.jpg image.
EDIT: Yes you can use this to send the result over HTTP. Here's an example of me downsizing an image with convert, then posting the result to the glowfoto API:
var fs = require('fs');
var http = require('http');
var util = require('util');
var spawn = require('child_process').spawn;
var url = require('url');
// Technically the only reason I'm using this
// is to get the XML parsed from the first call
// you probably don't need this, but just in case:
//
// npm install xml2js
var xml = require('xml2js');
var post_url;
var input_filename = 'giant_image.jpg';
var output_filename = 'giant_image2.jpg';
// The general format of a multipart/form-data part looks something like:
// --[boundary]\r\n
// Content-Disposition: form-data; name="fieldname"\r\n
// \r\n
// field value
function EncodeFieldPart(boundary,name,value) {
var return_part = "--" + boundary + "\r\n";
return_part += "Content-Disposition: form-data; name=\"" + name + "\"\r\n\r\n";
return_part += value + "\r\n";
return return_part;
}
// Same as EncodeFieldPart except that it adds a filename,
// as well as sets the content type (mime) for the part
function EncodeFilePart(boundary,type,name,filename) {
var return_part = "--" + boundary + "\r\n";
return_part += "Content-Disposition: form-data; name=\"" + name + "\"; filename=\"" + filename + "\"\r\n";
return_part += "Content-Type: " + type + "\r\n\r\n";
return return_part;
}
// We could use Transfer-Encoding: Chunked in the headers
// but not every server supports this. Instead we're going
// to build our post data, then create a buffer from it to
// pass to our MakePost() function. This means you'll have
// 2 copies of the post data sitting around
function PreparePost() {
// Just a random string I copied from a packet sniff of a mozilla post
// This can be anything you want really
var boundary = "---------------------------168072824752491622650073";
var post_data = '';
post_data += EncodeFieldPart(boundary, 'type', 'file');
post_data += EncodeFieldPart(boundary, 'thumbnail', '400');
post_data += EncodeFilePart(boundary, 'image/jpeg', 'image', output_filename);
fs.readFile(output_filename, 'binary', function(err,data){
post_data += data;
// This terminates our multi-part data
post_data += "\r\n--" + boundary + "--";
// We need to have our network transfer in binary
// Buffer is a global object
MakePost(new Buffer(post_data, 'binary'));
});
}
function MakePost(post_data) {
var parsed_url = url.parse(post_url);
var post_options = {
host: parsed_url.hostname,
port: '80',
path: parsed_url.pathname,
method: 'POST',
headers : {
'Content-Type' : 'multipart/form-data; boundary=---------------------------168072824752491622650073',
'Content-Length' : post_data.length
}
};
var post_request = http.request(post_options, function(response){
response.setEncoding('utf8');
response.on('data', function(chunk){
console.log(chunk);
});
});
post_request.write(post_data);
post_request.end();
}
// Glowfoto first makes you get the url of the server
// to upload
function GetServerURL() {
var response = '';
var post_options = {
host: 'www.glowfoto.com',
port: '80',
path: '/getserverxml.php'
};
var post_req = http.request(post_options, function(res) {
res.setEncoding('utf8');
// Here we buildup the xml
res.on('data', function (chunk) {
response += chunk;
});
// When we're done, we parse the xml
// Could probably just do string manipulation instead,
// but just to be safe
res.on('end', function(){
var parser = new xml.Parser();
parser.addListener('end', function(result){
// Grab the uploadform element value and prepare our post
post_url = result.uploadform;
PreparePost();
});
// This parses an XML string into a JS object
var xml_object = parser.parseString(response);
});
});
post_req.end();
}
// We use spawn here to get a streaming stdout
// This will use imagemagick to downsize the full image to 30%
var convert = spawn('convert', ['-resize', '30%', input_filename, '-']);
// Create a binary write stream for the resulting file
var output_file = fs.createWriteStream(output_filename, {encoding: 'binary'});
// This just writes to the file and builds the data
convert.stdout.on('data', function(data){
output_file.write(data);
});
// When the process is done, we close off the file stream
// Then trigger off our POST code
convert.on('exit', function(code){
output_file.end();
GetServerURL();
});
Sample result:
$ node test.js
<?xml version="1.0" encoding="utf-8"?>
<upload>
<thumburl>http://img4.glowfoto.com/images/2011/05/29-0939312591T.jpg</thumburl>
<imageurl>http://www.glowfoto.com/static_image/29-093931L/2591/jpg/05/2011/img4/glowfoto</imageurl>
<codes>http://www.glowfoto.com/getcode.php?srv=img4&img=29-093931L&t=jpg&rand=2591&m=05&y=2011</codes>
</upload>
You can also take advantage of io pipes in nodejs
var file = fs.createWritableStream("path-to-file", {encoding: 'binary'});
converter = spawn(cmd, ['parameters ommited']);
converter.stdout.pipe(file); //this will set out stdout.write cal to you file
converter.on('exit', function(){ file.end();});