I have a CasperJS test suite that needs to verify an image onload event. To test this I have a 1x1 pixel gif image, which I serve using PhantomJS webserver:
var fs = require('fs');
var webserver = require('webserver');
casper.test.begin('Image onload should be invoked', 1, {
setUp: function() {
var server = webserver.create();
this.server = server.listen(8080, function(request, response) {
if (request.url == '/') {
response.writeHead(200, { 'Content-Type': 'text/html' });
response.write('' +
'<!doctype html>\n' +
'<html>\n' +
'<head>\n' +
'<script type="text/javascript">\n' +
'window.onload = function() {\n' +
'var px = document.createElement("img");\n' +
'px.onload = function() {\n' +
'window._pxLoad = true;\n' +
'};\n' +
'px.src = "px.gif";\n' +
'};\n' +
'</script>\n' +
'</head>\n' +
'<body></body>\n' +
'</html>' +
'');
} else if (request.url.match(/px\.gif$/i)) {
response.writeHead(200, {
'Content-Type': 'image/gif',
'Cache-Control': 'no-cache'
});
var filePath = fs.workingDirectory + request.url.split('/').join(fs.separator).replace(/\d/gi, '');
response.write(fs.read(filePath));
}
response.close();
});
},
tearDown: function() {
this.server.close();
},
test: function(test) {
casper.start('http://localhost:8080', function() {
this.waitFor(function() {
return this.evaluate(function () {
return window._pxLoad !== undefined;
});
}, function then() {
var flag = this.evaluate(function () {
return window._pxLoad;
});
test.assertTruthy(flag, 'Image has been successfully loaded');
});
});
casper.run(function () {
test.done();
});
}
});
This test fails because window._pxLoad !== undefined did not evaluate within the 5000ms timeout. I even placed console.log statements inside image handler and they showed that my routing works, server do receive this /px.gif call, but looks like git image isn't served at all.
I tried to replace the call to local px.gif with the similar image from the Internet, this one, and test passed! So the problem is definitely relates to how local gif image is served by PhantomJS webserver.
Ok, looks like I've found an answer by myself. Well, sort of.
First of all, I couldn't make my PhantomJS webserver solution work. So I created a simple Node.js script which runs a webserver. It is spawned as a child process before running test suite:
img_server.js
var http = require('http');
var fs = require('fs');
var path = require('path');
var argv = process.argv;
var port = argv.length > 3 ? parseInt(argv[3], 10) || 8124;
http.createServer(function(req, res) {
var fileStream = fs.createReadStream(path.join(__dirname, 'px.gif'));
res.writeHead(200, {
'Content-Type': 'image/gif',
'Cache-Control': 'no-cache'
});
fileStream.pipe(res);
}).listen(port);
// This is important. Script should put something in the STDOUT when started.
// The CasperJS test suite will bind to this inside setUp hook to know when server is ready
console.log('Server running at http://127.0.0.1:' + port + '/');
Changes to CasperJS test suite:
var fs = require('fs');
var webserver = require('webserver');
var process = require('child_process');
var spawn = process.spawn;
casper.test.setUp(function(done) {
this.imgServerChild = spawn('node', ['img_server.js', '-p', '8180']);
// This where STDOUT from server script is used
this.imgServerChild.stdout.on("data", done);
});
casper.test.tearDown(function() {
this.imgServerChild.kill('SIGKILL');
});
// The rest of test suite
Of course I also changed the path to the image file inside the faked HTML output. Now image is served from different domain, which is totally fine for me. Now tests are working.
Related
I'm setting up a server for a project that I've been trying to develop on my own, and using SQLite and Node.js as the database type and server platform, respectively. I'm relatively new to backend development, and I am trying to set up a POST request from the frontend of the webpage to the backend so whatever the user submits will store in "comments.db", a file in the server which will store all the username and data (specifically the user ID and data).
In the past, I've tried renaming the database differently, and moving the database into several different subfiles, but I always yield the same error on submission.
The following is my code for my node.js file:
var os = require( 'os' );
const interfaces = os.networkInterfaces();
const addresses = [];
var getMacAddress;
var request = require('request');
var express = require('express');
var sqlite3 = require('sqlite3');
var bodyParser = require('body-parser');
var ip = require("ip");
var address = ip.address();
var db = new sqlite3.Database('comments.db');
var app = express();
app.use(express.static(__dirname));
app.use(bodyParser.urlencoded({extended: false}));
app.get('/comments', function(request, response){
// response.send('Hello Worlds');
response.send("Hello my name is aestheticnoodle and you have reached a wrong");
});
app.get('/comments', function(request, response){
console.log('GET request received at /alpha');
db.all('SELECT * FROM comments', function(err, rows){
if(err){
console.log("Error");
} else {
response.send(rows);
//pass rows back to the client
}
});
});
//runs once a user submits a comment
app.post('/comments', function(request, response){
db.run('INSERT INTO comments VALUES (?, ?)', ["aestheticnoodle" ,request.body.action], function(err, rows){
if(err){
console.log(request.body.action);
console.log("INSERT INTO " + err);
} else {
response.status(200).redirect('chat.html');
}
});
});
app.listen(3000, function(){
console.log("Server is running on port 3000");
});
var admin = require("firebase-admin");
var firebaseConfig = {
apiKey: "baseapp",
authDomain: "fire",
databaseURL: "inthe,
projectId: "data",
storageBucket: "",
messagingSenderId: "private",
appId: "some2"
};
admin.initializeApp(firebaseConfig);
var headers = {
'App-Id': 'someappid',
'App-Key': 'someappkey',
'Content-Type': 'application/json'
};
var dataString = '{"text": "test run.."}';
var options = {
url: 'https://somesite.com/v2/parse',
method: 'POST',
headers: headers,
body: dataString
};
function callback(error, response, body) {
if (!error && response.statusCode == 200) {
console.log(body);
}
}
request(options, callback);
and the error is as follows:
C:\Users\achu1\Music\projectsummer2019\cybeRx>node nodeRequest.js
Server is running on port 3000
undefined
INSERT INTO Error: SQLITE_ERROR: no such table: comments
here is the javascript snippet where I try to insert html in the interface of a chat bot:
var string = "What seems to be the problem you have been experiencing? Use one of the keywords, per SymptomList the complete list of symptoms."
document.getElementById("chat").innerHTML += '<form action = "/comments" method = "POST">'+ "<div id = \"botLog\" class = \"chatting\">" + string + '<textarea rows = "15" name = "comment"> Write stuff here' + '</textarea>'+ '<br><br><input type = "submit" value = "SUBMIT"/></form>' + "</div><br>";
as you can see, I attempted to connect my database and my html file running in the localhost by making a POST method form into my comment.db file, which is actually in my directory.
https://imgur.com/a/pdGAVhm
why does the node say that there is no such database to execute the POST command, and how do I resolve this problem so my textbox data saves to the database?
I'm using webdriver.io to do some browser automation. Not really testing , just saving time by automating things. I can see examples of how to use some of the functions .. but I can't seem to access them. I'm quite new to node.js
** Important ** I realise you can use browser.getAttribute - but looking at this example: http://webdriver.io/api/property/getAttribute.html
I should be able to execute getAttribute on the element object..?
var allInputs = $$('.loginForm input')
console.log(allInputs.map(function(el) { return el.getAttribute('name'); }))
My code:
var webdriverio = require('webdriverio');
var options = {
desiredCapabilities: {
browserName: 'chrome'
}
};
var browser = webdriverio.remote(options)
async function browserTest(){
await browser
.init()
.url('http://www.google.com')
.getTitle().then(function(title) {
console.log('Title was: ' + title);
})
.catch(function(err) {
console.log(err);
});
var body = await browser.element("//body")
console.log(body)
//the following line fails
console.log(await body.getAttribute("id"))
}
browserTest()
I'm creating an extenstion for google chrome that will perform checking if a stream on twitch.tv is online and will notify the user evey X minutes, I got that covered. What I'm looking for is a JScirpt code that will recognize if user is already on the streamers channel and will stop notifying him.
var username="$user";
setInterval(check,300000);
function check()
{
request("https://api.twitch.tv/kraken/streams/" + username, function() {
var json = JSON.parse(this.response);
if (json.stream == null)
{
chrome.browserAction.setIcon({ path: "offline.png" });
}
else
{
notify();
}
});
return 1;
}
function notify(){
var opt = {type: "basic",title: username + " is streaming!",message: "Click to join!",iconUrl: "start.png"};
chrome.notifications.create("", opt, function(notificationId)
{
setTimeout(function()
{
chrome.notifications.clear(notificationId, function(wasCleared) { console.log(wasCleared); });
}, 3000);
});
chrome.browserAction.setIcon({path:"online.png" });
}
chrome.browserAction.onClicked.addListener(function () {
chrome.tabs.create({ url: "http://www.twitch.tv/"+username });
});
function request(url, func, post)
{
var xhr = new XMLHttpRequest();
xhr.onload = func;
xhr.open(post == undefined ? 'GET' : 'POST', url, true);
xhr.send(post || '');
return 1;
}
check();
Use window.location.href to get the complete URL.
Use window.location.pathname to get URL leaving the host.
You can read more here.
In my grails 2.3.7 application,
I am using atmosphere-meteor 0.8.3.
On my home page load, I subscribe the client. And by default I run long-polling; and it works fine.
On page refresh, I unsubscribe the client.
However, if I refresh the page; then some of the JS and CSS fails to load. It happens 5 out of 10 times of refresh.
Am I doing anything wrong? (As I subscribe on document.ready()).
Or do I need to do anything else?
Any help is appreciated.
Update:
Code inside gsp for subscription:
$('body').bind('beforeunload',function(){
Jabber.unsubscribe();
});
$(document).ready(function () {
if (typeof atmosphere == 'undefined') {
Jabber.socket = $.atmosphere;
} else {
Jabber.socket = atmosphere;
}
var atmosphereRequest = {
type: 'public',
url: 'atmosphere/public',
trackMessageLength: false
};
//setTimeout(function(){
Jabber.subscribe(atmosphereRequest);
//}, 10000);
});
And the Jabber variable
var Jabber = {
socket: null,
publicSubscription: null,
transport: null,
subscribe: function (options) {
var defaults = {
type: '',
contentType: "application/json",
shared: false,
//transport: 'websocket',
transport: 'long-polling',
fallbackTransport: 'long-polling',
trackMessageLength: true
},
atmosphereRequest = $.extend({}, defaults, options);
console.log(atmosphereRequest);
atmosphereRequest.onOpen = function (response) {
console.log('atmosphereOpen transport: ' + response.transport);
};
atmosphereRequest.onReconnect = function (request, response) {
console.log("atmosphereReconnect");
};
atmosphereRequest.onMessage = function (response) {
console.log("on message");
Jabber.onMessage(response);
};
atmosphereRequest.onError = function (response) {
console.log('atmosphereError: ' + response);
};
atmosphereRequest.onTransportFailure = function (errorMsg, request) {
console.log('atmosphereTransportFailure: ' + errorMsg);
};
atmosphereRequest.onClose = function (response) {
console.log('atmosphereClose: ' + response);
};
switch (options.type) {
case 'public':
Jabber.publicSubscription = Jabber.socket.subscribe(atmosphereRequest);
break;
default:
return false;
}
//Jabber.publicSubscription = Jabber.socket.subscribe(atmosphereRequest);
},
unsubscribe: function () {
if (Jabber.socket)
Jabber.socket.unsubscribe();
},
onMessage:function(response){....}
}
I'm the plugin author. Please update to version 1.0.1. If you still have trouble after updating the plugin, create a new issue. We can work through the problem then. However, I do have a question. When you say the JS fails to load, do you mean the atmosphere JavaScript or your own? There is no plugin related CSS.
I'm trying to stream an octet-stream straight to S3 using knox on node.js. The octet-stream is an XHR file upload from the browser. I assumed that I could just stream the request into putStream and everything would just work, but alas no.
Here's my code:
var client = knox.createClient({
// AWS credentials here
});
if (req.headers['content-type'].match(/application\/octet-stream/i)) {
var filename = '/'+req.headers['x-file-name'];
client.putStream(req, filename, function(err, res){
// TODO: Catch errors
body = '{"success":"true"}'
res.writeHead(200,
{ 'Content-Type':'text/html'
, 'Content-Length':body.length
})
res.end(body)
});
}
And the error I receive:
TypeError: Bad argument
at Object.stat (fs.js:354:11)
at Client.putStream (./lib/knox/client.js:181:6)
I'm doing something like this:
app.post('/uploadAmazon', function(req, res) {
var params = req.query;
var request = client.request("PUT", '/' + req.header('x-file-name') + '?partNumber=' + params.partNumber
+ '&uploadId=' + params.uploadId, {
'Content-Length' : req.header('Content-Length')
} );
req.on('data', function(data){
request.write(data);
});
request.on('response', function(response) {
console.log('Partial ' + params.partNumber + ' statusCode: ' + response.statusCode);
if (response.statusCode== 200) {
uploadMap[params.id].currentSize++;
uploadMap[params.id].completeXmlArray[+(params.partNumber) - 1] = '<Part><PartNumber>' + params.partNumber + '</PartNumber><ETag>' + response.headers.etag + '</ETag></Part>' ;
if (uploadMap[params.id].currentSize == uploadMap[params.id].totalSize) {
uploadMap[params.id].uploadId = params.uploadId;
completeSend(uploadMap[params.id]);
}
}
}).end();
res.end();
});
Assuming that I receive the file name, part number and uploadId from the post.
I believe client.putStream accepts 4 params, like this:
client.putStream(stream, filepath, {
'Content-Length': file.length,
'Content-Type': 'application/octet-stream',
'x-amz-acl': 'private'
}, function(err, res) {
...
});
If you are using a version of node.js much older than 0.4.5 then upgrade.
Look in the util module and use util.pump to copy the file from the input stream to the output stream. If the file has to be downloaded first, just use a ReadStream from the file as the input stream.
Also, do have a look at the Javascript code for util.pump because I suspect that you haven't quite grasped how async I/O works in node.js.