Looping throw links using PhantomJS - phantomjs

There is a problem. I have some urls. And there are a list of links on this urls, which I want to visit. Each of this links. There is no problem with looping throw urls, but problems with this links. Here is my code...
var urls = [];
var TEMPLATE = 'https://example.com/page/'
for (var i = 1; i > 0; i--) {
urls.push(TEMPLATE + i);
}
var page = require('webpage').create();
//Here is looping throw urls
function process(){
if (urls.length == 0){
phantom.exit();
} else{
url = urls.pop();
page = require('webpage').create();
page.open(url, onFinishedLoading);
}
}
function onFinishedLoading(status){
var links = page.evaluate(function() {
var arr = [];
//Here we are grab links inside urls
$('some.selector').each(function() {
arr.push( $('a', $(this)).attr("href"))
});
return arr;
});
//And this is just my tries to visit this links
link = links.pop();
//Just fine. Get the link
console.log(link);
parse(link);
setTimeout(function parse(link) {
page.open(link, function(status) {
var parsing = page.evaluate(function() {
return link + status;
});
//Don't work :(
console.log(parsing);
});
}, 1500);
page.release();
process();
// return links;
}
process();
Sorry for my silly question, i little know at phatom and JS.
Wish you can help me

Related

Using Mongoose query outside exec function

I am trying to use the result of a query outside the exec function, but I can't seem to get it working
This is my function in Express
getUrlsFromDatabase = function(){
blog.find()
.select('url')
.exec(function(err,docs){
var sitemap = [];
for(var i=0; i<docs.length; i++) {
sitemap.push(docs[i].url);
}
return sitemap
})
console.log("Trying to get result docs here")
}
Try using async/await
getUrlsFromDatabase = async function(){
var blogs = await blog.find().select('url')
var sitemap = [];
for(var i=0; i<docs.length; i++) {
sitemap.push(docs[i].url);
}
return sitemap
console.log("Trying to get result docs here")
}
Update:
Or just add a return to back some result from the function
getUrlsFromDatabase = function(){
return blog.find() // <- add return here
.select('url')
.exec(function(err,docs){
var sitemap = [];
for(var i=0; i<docs.length; i++) {
sitemap.push(docs[i].url);
}
return sitemap
})
console.log("Trying to get result docs here")
}
Or just save It in a variable:
getUrlsFromDatabase = function(){
const result = blog.find() // <- add variable here
.select('url')
.exec(function(err,docs){
var sitemap = [];
for(var i=0; i<docs.length; i++) {
sitemap.push(docs[i].url);
}
return sitemap
})
return result // to return it outbound
}

How can i scrape images from website

Hie I am trying to scrape images from website. I all seems easy when i try to scrape image with src attribute. But when using srcset throws an error undefined and doesnt work ???
I tried it by
var page = require('webpage').create();
page.onLoadFinished = function(){
var urls = page.evaluate(function(){
var image_urls = new Array;
var images = document.getElementsByTagName("img");
for(q = 0; q < images.length; q++){
images[q].srcset = images[q].src;
image_urls.push(images[q].src);
}
return image_urls;
});
console.log(urls.length);
console.log(urls[0]);
phantom.exit();
}
page.open('https://www.example.com/Food/Pears/Anjou-Pears');
All i want to do is that i can input url and it extracts and downloads image from url.
Upadate: I also tried following code to get img url and got me that phantom js crashed i.e"PhantomJS has crashed. Please read the bug reporting guide at
http://phantomjs.org/bug-reporting.html and file a bug report.
Segmentation fault: 11".
var url = "https://www.example.com/Food/Fruits/Pears/Anjou-Pears/p/20174514001_KG";
var page = require( 'webpage' ).create();
page.open(url, function( status ) {
if ( status === 'success' ) {
page.includeJs('https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js', function() {
var link = page.evaluate(function() {
if($( 'img' ).length != 0)
{
return $( 'img' ).attr('srcset');
}
});
console.log( link );
phantom.exit();
});
} else {
console.log( 'FAIL' );
}
});

Visiting multiple urls using PhantomJS evaluating Error

I have this beautiful code, all I want to make some pause between visits, so I add a 'setinterval', but this not works:
var page = require('webpage').create();
// the urls to navigate to
var urls = [
'http://blogger.com/',
'https://github.com/',
'http://reddit.com/'
];
var i = 0;
// the recursion function
var genericCallback = setInterval(function () {
return function (status) {
console.log("URL: " + urls[i]);
console.log("Status: " + status);
// exit if there was a problem with the navigation
if (!status || status === 'fail') phantom.exit();
i++;
if (status === "success") {
//-- YOUR STUFF HERE ----------------------
// do your stuff here... I'm taking a picture of the page
page.render('example' + i + '.png');
//-----------------------------------------
if (i < urls.length) {
// navigate to the next url and the callback is this function (recursion)
page.open(urls[i], genericCallback());
} else {
// try navigate to the next url (it is undefined because it is the last element) so the callback is exit
page.open(urls[i], function () {
phantom.exit();
});
}
}
};
},2000);
// start from the first url
page.open(urls[i], genericCallback());
the screenshot with the error I get:
maybe someone could help me and heal this code? Because I'm new to JS and to PhantomJS, any help will be apreciate.
I got this code from another stackoverflow answer here - Using Multiple page.open in Single Script
but I can't comment to the author , because I don't have 50 reputation
It should rather be something like this:
var page = require('webpage').create();
var urls = ['http://blogger.com/','https://github.com/','http://reddit.com/'];
var i = 0;
function OpenPage(){
setTimeout(function(){
page.open(urls[i], function(status) {
if (status == 'success') {
page.render('example' + i + '.png');
}
i++;
if(i <= urls.length - 1){
OpenPage();
}else{
phantom.exit();
}
});
},2000);
}
OpenPage();

Phantomjs Automation of a website leads me to getting IP blocked

I'm using PhantomJS to automate a page. What I do is:
do{
console.log(i);
i++;
page.open(url);
do { phantom.page.sendEvent('mousemove'); } while (page.loading);
if(page.injectJs('./Search.js') == false){
console.log("Search.js Failed")
}
var links = page.evaluate(function(json){
return search(json)
},json)
console.log(links);
} while(links == "")
So this leads me to opening the website repeated until what I'm looking for appears. But this also leads me to getting IP banned. What can I do to get around this?
Your IP is probably getting banned because the script generates too many requests to the website in very little time. So, you need to throttle requests, to apply a pause between them.
I would rewrite your script like this:
var page = require('webpage').create();
var url = "http://www.website.tld/";
var json = {"some" : "json"};
var i = 0;
var links;
// We abstract main code to a function so that we can call it
// again and again from itself
function getlinks (url, json) {
i++;
console.log(i);
page.open(url);
do { phantom.page.sendEvent('mousemove'); } while (page.loading);
if(page.injectJs('./Search.js') == false){
console.log("Search.js Failed")
}
var links = page.evaluate(function(json){
return search(json);
}, json);
if(links == "")
{
// No links scraped yet, so we wait for 3 seconds and try again
setTimeout(function(){
getlinks(url, json);
}, 3000)
}
else
{
console.log(links);
phantom.exit();
}
}
getlinks(url, json);

CasperJS looking for 404 error on links site

I'm beginner programmer. I found nice script
http://planzero.org/blog/2013/03/07/spidering_the_web_with_casperjs
I tried to rewrite this script with CasperJS test framework.
I would to get xunit report from this code
var startUrl = 'http://yoursite.foo';
var visitedUrls = [], pendingUrls = [];
var casper = require('casper').create({
pageSettings: {
loadImages: false,
loadPlugins: false
}});
var utils = require('utils')
var helpers = require('helpers')
// Spider from the given URL
casper.test.begin('href' , function(test) {
casper.start(startUrl, function() {
function spider(url) {
// Add the URL to the visited stack
visitedUrls.push(url);
// Open the URL
casper.open(url).then(function() {
test.assertHttpStatus(200, ":" + url);
// Find links present on this page
var links = this.evaluate(function() {
var links = [];
Array.prototype.forEach.call(__utils__.findAll('a'), function(e) {
links.push(e.getAttribute('href'));
});
return links;
});
// Add newly found URLs to the stack
var baseUrl = this.getGlobal('location').origin;
Array.prototype.forEach.call(links, function(link) {
var newUrl = helpers.absoluteUri(baseUrl, link);
if (pendingUrls.indexOf(newUrl) == -1 && visitedUrls.indexOf(newUrl) == -1 && !(link.search(startUrl) == -1)) {
pendingUrls.push(newUrl);
}
});
// If there are URLs to be processed
if (pendingUrls.length > 0) {
var nextUrl = pendingUrls.shift();
spider(nextUrl);
}
else {
console.log('links ended');
this.break;
}
});
}
spider(startUrl);
}).run(function(){
test.done();
});
});
Script is running but when he and Job I can't get report.
If you're trying to learn how to use CasperJS you need to start with a smaller example than that. That script is a mess which goes after a site named yoursite.foo (maybe you put that name in there?)
I would take small steps. I have a video which may help explain how to use CasperJS.
http://www.youtube.com/watch?v=Kefil5tCL9o