image in image (overlay image) with gm (GraphicsMagick) - watermark

I want to put a watermark picture (logo) on my resized pictures as you can see in the code below.
Can somebody help how to put a picture in the right low corner with a opacity of 50%?
var fs = require('fs')
, gm = require('gm');
function walk(currentDirPath, callback) {
var fs = require('fs'), path = require('path');
fs.readdirSync(currentDirPath).forEach(function(name) {
var filePath = path.join(currentDirPath, name);
var stat = fs.statSync(filePath);
if (stat.isFile()) {
callback(filePath, stat);
} else if (stat.isDirectory()) {
walk(filePath, callback);
}
});
}
var inputDir = "/Users/USER/Desktop/src/"
var outputDir = "/Users/USER/Desktop/target/"
walk(inputDir, function(filePath, stat) {
// match filename like IMG_1234.JPG
var filename = filePath.match(/IMG_\d{4}.JPG/gmi).toString();
console.log(filename);
var outputfile = outputDir + filename
var readStream = fs.createReadStream(filePath);
gm(readStream, filename)
.size({bufferStream: true}, function(err, size) {
this.resize(size.width / 2, size.height / 2)
this.write(outputfile, function (err) {
if (!err) console.log('done');
});
});
});

Here is the solution:
.draw(['image Over 0,0 0,0 /Users/USER/Desktop/target/nike-global-diversity-logo.png'])
put this line of code after gm call and before the resizing.

Related

html2canvas add multiple pages using jspdf?

Below are my snippet right now its printing all canvas in single page. I want to print the each canvas in a new page.
I have use Fabricjs to render the canvas from json. Other pdf library not able to print the canvas it download empty PDF so i try JSPDF But stuck in a point.
DEMO
<script>
var jsPDF = window.jspdf.jsPDF;
var html2canvas = window.html2canvas;
function downloadpdf(){
console.log('Inside downloadpdf ');
var quotes = document.getElementById('generatePDF');
html2canvas(quotes, {
onrendered: function(canvas) {
canvas.getContext('2d');
var HTML_Width = canvas.width;
var HTML_Height = canvas.height;
var top_left_margin = 15;
var PDF_Width = HTML_Width+parseInt(top_left_margin*2);
var PDF_Height = parseInt(PDF_Width*1.5)+parseInt(top_left_margin*2);
var canvas_image_width = HTML_Width;
var canvas_image_height = HTML_Height;
var totalPDFPages = Math.ceil(HTML_Height/PDF_Height)-1;
var pages = $('#generatePDF .canvas-container').length;
console.log('height => '+canvas.height+" width => "+canvas.width+'totalpage => '+pages);
var imgData = canvas.toDataURL("image/jpeg", 1.0);
var pdf = new jsPDF('p', 'pt', [PDF_Width, PDF_Height]);
pdf.addImage(imgData, 'JPG', top_left_margin, top_left_margin,canvas_image_width,canvas_image_height);
for (var i = 1; i <= pages; i++) {
//pdf.addPage(PDF_Width, PDF_Height);
pdf.addPage();
let margin=-parseInt(PDF_Height*i)+parseInt(top_left_margin*4);
if(i>1){
margin= parseInt(margin+i*8);
}
pdf.addImage(imgData, 'JPG', top_left_margin, margin,canvas_image_width,canvas_image_height);
}
pdf.save("HTML-Document.pdf");
}
});
}
</script>

How to upload multiple images to the Rest API in Flutter using HTTP?

I want to upload multiple images into the Rest API. I tried the below code to upload a single image to the rest API. That is working fine, for multiple image selection I'm using multi_image_picker link, how can I modified below code to upload multiple images? Thank you
Future<String> uploadSingleImage(File file,String userid) async
{
final prefs = await SharedPreferences.getInstance();
final key = 'token';
final value = prefs.get(key ) ?? 0;
String fileName = file.path.split("/").last;
var stream =
new http.ByteStream(DelegatingStream.typed(file.openRead()));
// get file length
var length = await file.length(); //imageFile is your image file
Map<String, String> headers = {
"Accept": "application/json",
"Authorization": "Bearer $value"
}; // ignore this headers if there is no authentication
// string to uri
var uri = Uri.parse(serverUrl + "/api/v1/upload_parent_image");
// create multipart request
var request = new http.MultipartRequest("POST", uri);
// multipart that takes file
var multipartFileSign = new http.MultipartFile('photo',
stream,
length,
filename: fileName
);
// add file to multipart
request.files.add(multipartFileSign);
//add headers
request.headers.addAll(headers);
//adding params
request.fields['id'] = userid;
// request.fields['firstName'] = 'abc';
// request.fields['lastName'] = 'efg';
// send
var response = await request.send();
print(response.statusCode);
// listen for response
response.stream.transform(utf8.decoder).listen((value) {
print(value);
});
}
your Image list
List<String> photos = ["path of image1","path of image2", "path of image3",];
List<http.MultipartFile> newList = [];
for (var img in photos!) {
if (img != "") {
var multipartFile = await http.MultipartFile.fromPath(
'Photos',
File(img).path,
filename: img.split('/').last,
);
newList.add(multipartFile);
}
}
request.files.addAll(newList);
You could pass a list of files to your method, loop over to build each MultipartFile objects and add them to your MultipartRequest
Future<String> uploadMultipleImage(List<File> files, String userid) async {
final prefs = await SharedPreferences.getInstance();
final key = 'token';
final value = prefs.get(key) ?? 0;
// string to uri
var uri = Uri.parse(serverUrl + "/api/v1/upload_parent_image");
// create multipart request
var request = new http.MultipartRequest("POST", uri);
for (var file in files) {
String fileName = file.path.split("/").last;
var stream = new http.ByteStream(DelegatingStream.typed(file.openRead()));
// get file length
var length = await file.length(); //imageFile is your image file
// multipart that takes file
var multipartFileSign = new http.MultipartFile('photo', stream, length, filename: fileName);
request.files.add(multipartFileSign);
}
Map<String, String> headers = {
"Accept": "application/json",
"Authorization": "Bearer $value"
}; // ignore this headers if there is no authentication
//add headers
request.headers.addAll(headers);
//adding params
request.fields['id'] = userid;
// request.fields['firstName'] = 'abc';
// request.fields['lastName'] = 'efg';
// send
var response = await request.send();
print(response.statusCode);
// listen for response
response.stream.transform(utf8.decoder).listen((value) {
print(value);
});
}
Well you are almost close to send multiple files at a time let me post some code
Future<String> uploadSingleImage(File file,File file2,String userid) async
{
final prefs = await SharedPreferences.getInstance();
final key = 'token';
final value = prefs.get(key ) ?? 0;
String fileName = file.path.split("/").last;
var stream =
new http.ByteStream(DelegatingStream.typed(file.openRead()));
// get file length
var length = await file.length(); //imageFile is your image file
Map<String, String> headers = {
"Accept": "application/json",
"Authorization": "Bearer $value"
}; // ignore this headers if there is no authentication
// string to uri
var uri = Uri.parse(serverUrl + "/api/v1/upload_parent_image");
// create multipart request
var request = new http.MultipartRequest("POST", uri);
// multipart that takes file
var multipartFileSign = new http.MultipartFile('photo',
stream,
length,
filename: fileName
);
// add file to multipart
request.files.add(multipartFileSign);
// Now Adding file 2 in request
String fileName2 = file2.path.split("/").last;
var stream2 =
new http.ByteStream(DelegatingStream.typed(file2.openRead()));
var lengthOfFile2 = await file2.length();
// multipart that takes file
var multipartFile2 = new http.MultipartFile('file2_key_here',
stream2,
lengthOfFile2,
filename: fileName2
);
// add file2 to multipart
request.files.add(multipartFile2);
//add headers
request.headers.addAll(headers);
//adding params
request.fields['id'] = userid;
// request.fields['firstName'] = 'abc';
// request.fields['lastName'] = 'efg';
// send
var response = await request.send();
print(response.statusCode);
// listen for response
response.stream.transform(utf8.decoder).listen((value) {
print(value);
});
}
I have shared my code that i used to upload mutliple image with this packages
multi_image_picker: ^4.8.0
flutter_absolute_path: ^1.0.6
flutter_image_compress:
path_provider
http
flutter compress and path provider are used to compress the size
Http request code
static Future<String> uploadMultipleImage({List<File> files}) async {
// string to uri
var uri = Uri.parse("your api url");
print("image upload URL - $uri");
// create multipart request
var request = new http.MultipartRequest("POST", uri);
for (var file in files) {
String fileName = file.path.split("/").last;
var stream = new http.ByteStream(DelegatingStream.typed(file.openRead()));
// get file length
var length = await file.length(); //imageFile is your image file
print("File lenght - $length");
print("fileName - $fileName");
// multipart that takes file
var multipartFileSign = new http.MultipartFile('images[]', stream, length,
filename: fileName);
request.files.add(multipartFileSign);
}
Map<String, String> headers = {
"Accept": "application/json",
"Authorization":
"Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoxMiwiZXhwIjoxNjE3NTQyNDE0LCJpc3MiOiJsb2NhbGhvc3QiLCJpYXQiOjE2MTcxODI0MTR9.dGRbINOdx_tf417fpsjdQ5CR7uGULs98FjLGm2w4kRY"
}; // ignore this headers if there is no authentication
print("headers - $headers}");
//add headers
request.headers.addAll(headers);
//adding params
request.fields['heading'] = "heading";
request.fields['description'] = "description";
request.fields['mobile'] = "mobile";
request.fields['email'] = "email";
request.fields['category'] = "1";
request.fields['location_type'] = "1";
request.fields['location'] = "location";
request.fields['lat'] = "12";
request.fields['lng'] = "123";
request.fields['price'] = "1231";
request.fields['sub_category'] = "3";
// send
var response = await request.send();
print(response.statusCode);
var res = await http.Response.fromStream(response);
if (response.statusCode == 200 || response.statusCode == 201) {
print("Item form is statuscode 200");
print(res.body);
var responseDecode = json.decode(res.body);
if (responseDecode['status'] == true) {
return res.body;
} else {
return res.body;
}
}
}
My UI screen where I call the method
import 'dart:convert';
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:flutter_absolute_path/flutter_absolute_path.dart';
import 'package:flutter_image_compress/flutter_image_compress.dart';
import 'package:lookapt_olx_app/service/ApiService.dart';
import 'package:lookapt_olx_app/utils/Utils.dart';
import 'package:lookapt_olx_app/utils/colorUtils.dart';
import 'package:lookapt_olx_app/utils/fontUtils.dart';
import 'package:lookapt_olx_app/widgets/appbar_widget.dart';
import 'package:lookapt_olx_app/widgets/commonWidget.dart';
import 'package:lookapt_olx_app/widgets/textFieldWidget.dart';
import 'package:lookapt_olx_app/widgets/textWidgets.dart';
import 'package:multi_image_picker/multi_image_picker.dart';
import 'package:rounded_loading_button/rounded_loading_button.dart';
import 'addNewPostController.dart';
import 'multiImagePicker.dart';
import 'package:path_provider/path_provider.dart' as path_provider;
class AddNewPostScreen extends StatefulWidget {
Map<String, dynamic> parameters;
String categoryId;
AddNewPostScreen({this.parameters, this.categoryId = ""});
#override
_AddNewPostScreenState createState() => _AddNewPostScreenState();
}
class _AddNewPostScreenState extends State<AddNewPostScreen> {
#override
void initState() {
super.initState();
print("add new post");
print(widget.parameters['name']);
print(widget.categoryId.toString());
}
List<Asset> images = [];
String _error = "";
Widget buildGridView() {
if (images != null)
return GridView.count(
crossAxisCount: 3,
crossAxisSpacing: 10,
children: List.generate(images.length, (index) {
Asset asset = images[index];
return AssetThumb(
asset: asset,
width: 300,
height: 300,
);
}),
);
else
return Container(color: Colors.white);
}
Future<void> loadAssets() async {
setState(() {
images = List<Asset>();
});
List<Asset> resultList;
String error;
try {
resultList = await MultiImagePicker.pickImages(
maxImages: 3,
);
} on Exception catch (e) {
error = e.toString();
}
// If the widget was removed from the tree while the asynchronous platform
// message was in flight, we want to discard the reply rather than calling
// setState to update our non-existent appearance.
if (!mounted) return;
setState(() {
images = resultList;
if (error == null) _error = 'Selected images';
});
}
/*
Usage
final dir = await path_provider.getTemporaryDirectory();
final targetPath = dir.absolute.path + "/temp.jpg";
File imgFile = await testCompressAndGetFile(
File(_capturedImage.path), targetPath);
* */
Future<File> testCompressAndGetFile(File file, String targetPath) async {
print("testCompressAndGetFile");
final result = await FlutterImageCompress.compressAndGetFile(
file.absolute.path,
targetPath,
quality: 30,
minWidth: 1024,
minHeight: 1024,
// rotate: 90,
);
print(file.lengthSync());
print(result.lengthSync());
return result;
}
_uploadImageFun() async {
print("Note - _getImagePaths called");
List<File> fileImageArray = [];
images.forEach((imageAsset) async {
final filePath =
await FlutterAbsolutePath.getAbsolutePath(imageAsset.identifier);
File tempFile = File(filePath);
print(filePath);
print("filePath.length - ${filePath.length}");
print(tempFile);
print("tempFile.length() - ${tempFile.lengthSync()}");
if (tempFile.existsSync()) {
DateTime now = DateTime.now();
final dir = await path_provider.getTemporaryDirectory();
final targetPath =
dir.absolute.path + "/lookaptPostImage${now.microsecond}.jpg";
File imgFile =
await testCompressAndGetFile(File(tempFile.path), targetPath);
print("Compressed image");
print(imgFile.lengthSync());
fileImageArray.add(imgFile); //with image compress
}
if (fileImageArray.length == images.length) {
var res = await ApiService.uploadMultipleImage(files: fileImageArray);
print("image upload response");
print(res);
var resp = json.decode(res);
if (resp['status'] == true) {
SuccessToastWidget(context, message: resp['message']);
} else {
FailedToastWidget(context, message: resp['message']);
}
}
});
print("Test Prints");
print(fileImageArray.length);
return fileImageArray;
}
final RoundedLoadingButtonController _loginBtnController =
new RoundedLoadingButtonController();
#override
Widget build(BuildContext context) {
return Scaffold(
appBar: CommonAppBarWidget(title: widget.parameters['name'] ?? ""),
body: _body(),
);
}
AddNEwPostController _addNEwPostController = new AddNEwPostController();
Widget _body() {
return Padding(
padding: const EdgeInsets.only(left: 20, right: 20, top: 10),
child: ListView(
children: [
InkWell(
onTap: loadAssets,
child: ClipRRect(
borderRadius: BorderRadius.circular(10),
child: Container(
color: Colors.grey.shade400,
child: ListTile(
leading: Icon(
Icons.add_box_outlined,
size: 30,
color: Colors.black,
),
trailing: MyTextWidgets.textWidgetSemiBold(
str: "Pick Images", fontSize: 20),
),
),
),
),
RoundedLoadingButton(
child: MyTextWidgets.textWidgetBold(
fontSize: 16, str: "Next", color: MyColors.white.redC),
controller: _loginBtnController,
onPressed: () {
_getImagePaths();
},
width: MediaQuery.of(context).size.width,
borderRadius: 10,
color: MyColors.appGreenColor.redC,
height: 44,
),
Center(
child: _error == ""
? Container()`enter code here`
: MyTextWidgets.textWidgetLight(str: _error)),
Container(
child: buildGridView(),
height: 100,
width: MediaQuery.of(context).size.width - 100,
),
],
),
);
}
}
NOTE:
My ui code may not run in your code so only copy the required code from the Screen code.
Http request code will work fine just copy and past it
Thank for you support!
I am posting this solution with dio and image_picker dependency. And it will definitely work. I have spent 2 days for this solution.
FormData formData = new FormData.fromMap({
"name": "Max",
"location": "Paris",
"age": 21,
"image[]": [
await MultipartFile.fromFile(
_imageFile.path,
),
await MultipartFile.fromFile(
_imageFile.path,
),
],
});
print(FormData1().then((value) {
print(value);
}));
response = await dio.post(
"http://143.110.244.110/radius/frontuser/eventsubmitbutton",
data: formData,
onSendProgress: (received, total) {
if (total != -1) {
print((received / total * 100).toStringAsFixed(0) + '%');
}
},
);
print(response);

How to convert Assets Images & Icons to PdfImage in flutter using dart_pdf

Used Library: dart_pdf After searching I found the same issue in GITHUB but unable to resolve the issue. I tried this but blurry image appears. Please help!!
ByteData data = await rootBundle.load('assets/test.jpg');
var codec = await instantiateImageCodec(data.buffer.asUint8List());
var frame = await codec.getNextFrame();
var imageBytes = await frame.image.toByteData();
PdfImage assetImage = PdfImage(pdf.document,
image: imageBytes.buffer.asUint8List(), width: 86, height: 80);
Rendered Image:
Use this instead:
final PdfImage assetImage = await pdfImageFromImageProvider(
pdf: pdf.document,
image: const AssetImage('assets/test.jpg'),
);
This function will create your pdf with image and custom data
var pdf = new pw.Document();
Future<pw.Document> createPDF() async {
var assetImage = pw.MemoryImage(
(await rootBundle.load('assets/images/delivery.png'))
.buffer
.asUint8List(),
);
pdf.addPage(pw.Page(
pageFormat: PdfPageFormat.a4,
build: (pw.Context context) {
var width = MediaQuery.of(this.context).size.width;
var height = MediaQuery.of(this.context).size.height;
return pw.Container(
margin: pw.EdgeInsets.only(top: height * 0.1),
child: pw.ListView(
children: [
// your image here
pw.Container(
height: height * 0.25, child: pw.Image(assetImage)),
// other contents
pw.Row(
mainAxisAlignment: pw.MainAxisAlignment.spaceAround,
children: [
pw.Text("order Id:"),
pw.Text(widget.doc['orderId']),
],
),
],
),
);
}));
return pdf;
}
use this function to save
Future savePdf(pw.Document pdfnew) async {
String pdfName;
File file;
try {
var documentDirectory = await AndroidPathProvider.downloadsPath; // for android downloads folder
// var localDirectory = await getApplicationDocumentsDirectory(); // for local directory
setState(() {
pdfName = "your_pdf_name";
});
file = File("$documentDirectory/$pdfName.pdf");
await file.writeAsBytes(await pdf.save());
return file.path;
} catch (e) {
print(e);
}
}

I want to get a file object in TypeScript from a html file-Input-Type.?

public UploadFile()
{
//File Data
this.filePath = $("#inputFile").val();
var file = $("#inputFile").get(0).files[0];
var reader = new FileReader();
reader.onload = function (evt) {
var fileContent = reader.result;
var x = fileContent.bytes;
}
Your question isn't completely clear, but here's some sample code that may help. This should be valid TypeScript code, which reads a file from input element #inputFile and displays the text from it in a div with id #divMain.
$("#inputFile").on('change', null, (e) => {
var input = <HTMLInputElement>e.target;
var files = input.files;
var f:File = files[0];
var reader = new FileReader();
var name = f.name;
console.log("File name: " + name);
reader.onload = function (e) {
var target: any = e.target;
var data = target.result;
$("#divMain").text(data);
};
reader.readAsText(f);
});

WebRTC Play Audio Input as Microphone

I want to play my audio file as microphone input (without sending my live voice but my audio file) to the WebRTC connected user. Can anybody tell me how could it be done?
I have done some following tries in the JS code, like:
1. base64 Audio
<script>
var base64string = "T2dnUwACAAAAAAA..";
var snd = new Audio("data:audio/wav;base64," + base64string);
snd.play();
var Sound = (function () {
var df = document.createDocumentFragment();
return function Sound(src) {
var snd = new Audio(src);
df.appendChild(snd);
snd.addEventListener('ended', function () {df.removeChild(snd);});
snd.play();
return snd;
}
}());
var snd = Sound("data:audio/wav;base64," + base64string);
</script>
2. AudioBuffer
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var audioContext = new AudioContext();
var isPlaying = false;
var sourceNode = null;
var theBuffer = null;
window.onload = function() {
var request = new XMLHttpRequest();
request.open("GET", "sounds/DEMO_positive_resp.wav", true);
request.responseType = "arraybuffer";
request.onload = function() {
audioContext.decodeAudioData( request.response, function(buffer) {
theBuffer = buffer;
} );
}
request.send();
}
function togglePlayback() {
var now = audioContext.currentTime;
if (isPlaying) {
//stop playing and return
sourceNode.stop( now );
sourceNode = null;
analyser = null;
isPlaying = false;
if (!window.cancelAnimationFrame)
window.cancelAnimationFrame = window.webkitCancelAnimationFrame;
//window.cancelAnimationFrame( rafID );
return "start";
}
sourceNode = audioContext.createBufferSource();
sourceNode.buffer = theBuffer;
sourceNode.loop = true;
analyser = audioContext.createAnalyser();
analyser.fftSize = 2048;
sourceNode.connect( analyser );
analyser.connect( audioContext.destination );
sourceNode.start( now );
isPlaying = true;
isLiveInput = true;
return "stop";
}
Please help me out in this case. It would be highly appreciable.
Here is a demo that may help you stream mp3 or wav using chrome:
https://www.webrtc-experiment.com/RTCMultiConnection/stream-mp3-live.html
Here is, how it is written:
http://www.rtcmulticonnection.org/docs/getting-started/#stream-mp3-live
And source code of the demo:
https://github.com/muaz-khan/RTCMultiConnection/blob/master/demos/stream-mp3-live.html
https://github.com/muaz-khan/WebRTC-Experiment/issues/222
Use in 3rd party WebRTC applications
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
var gainNode = context.createGain();
gainNode.connect(context.destination);
// don't play for self
gainNode.gain.value = 0;
document.querySelector('input[type=file]').onchange = function() {
this.disabled = true;
var reader = new FileReader();
reader.onload = (function(e) {
// Import callback function that provides PCM audio data decoded as an audio buffer
context.decodeAudioData(e.target.result, function(buffer) {
// Create the sound source
var soundSource = context.createBufferSource();
soundSource.buffer = buffer;
soundSource.start(0, 0 / 1000);
soundSource.connect(gainNode);
var destination = context.createMediaStreamDestination();
soundSource.connect(destination);
createPeerConnection(destination.stream);
});
});
reader.readAsArrayBuffer(this.files[0]);
};
function createPeerConnection(mp3Stream) {
// you need to place 3rd party WebRTC code here
}
Updated at: 5:55 PM - Thursday, August 28, 2014
Here is how to get mp3 from server:
function HTTP_GET(url, callback) {
var xhr = new XMLHttpRequest();
xhr.open('GET', url, true);
xhr.responseType = 'arraybuffer';
xhr.send();
xhr.onload = function(e) {
if (xhr.status != 200) {
alert("Unexpected status code " + xhr.status + " for " + url);
return false;
}
callback(xhr.response); // return array-buffer
};
}
// invoke above "HTTP_GET" method
// to load mp3 as array-buffer
HTTP_GET('http://domain.com/file.mp3', function(array_buffer) {
// Import callback function that provides PCM audio data decoded as an audio buffer
context.decodeAudioData(array_buffer, function(buffer) {
// Create the sound source
var soundSource = context.createBufferSource();
soundSource.buffer = buffer;
soundSource.start(0, 0 / 1000);
soundSource.connect(gainNode);
var destination = context.createMediaStreamDestination();
soundSource.connect(destination);
createPeerConnection(destination.stream);
});
});