Dynamic paging with Nightmare / Electron (page scrape)
Asked Answered
F

1

7

I'm attempting to scrape dynamic paging websites with Nightmare / Electron. I don't see a way to perform a do... until with nightmare functions OR a way to chain evaluate calls with logic.

Here's a simple code example that merely Googles a phrase and returns the result hrefs from page 1. I'd like this code to continue for each page in the results.

var Nightmare = require('nightmare');
var vo = require('vo');

vo(function* () {
  var nightmare = Nightmare({ show: true });
  var links = yield nightmare
    .goto('http://www.google.com')
    .wait('input[title="Search"]')
    .click('input[title="Search"]')
    .type('input[title="Search"]', 'Anequim Project')
    .click('input[name="btnK"]')
    .wait(600)
    .evaluate(function(){
        var linkArray = [];
        var links = document.querySelectorAll('h3.r a');
        for (var i = 0; i < links.length; ++i) {
            linkArray.push(links[i].getAttribute('href'));
        }
        return linkArray;
    });
  yield nightmare.end();
  return links;
})(function (err, result) {
  if (err) return console.log(err);
  console.log(result);
});
Fulmis answered 4/1, 2016 at 17:45 Comment(0)
F
5

The following code example is a modified version of a solution provided by rosshinkley of the segmentio/nightmare project. This still needs some work as it is not 100% reliable from my tests at this point with Nightmare version 2.1.2 but is an excellent starting point.

Note: When testing if you run it more than X times Google will require a captcha.

var Nightmare = require('nightmare');
var vo = require('vo');

vo(run)(function(err, result) {
    if (err) throw err;
});

function* run() {
    var nightmare = Nightmare({ show: true }), 
        MAX_PAGE = 100, 
        currentPage = 0, 
        nextExists = true, 
        links = []; 

    yield nightmare 
        .goto('http://www.google.com')
        .wait('input[title="Search"]')
        .click('input[title="Search"]')
        .type('input[title="Search"]', 'Anequim Project')
        .click('input[name="btnK"]') 
        .wait(2000)

    nextExists = yield nightmare.visible('#pnnext'); 

    while (nextExists && currentPage < MAX_PAGE) { 
        links.push(yield nightmare 
            .evaluate(function() { 
                var linkArray = [];
                var links = document.querySelectorAll('h3.r a');
                return links[0].href; 
            })); 

        yield nightmare 
            .click('#pnnext')
            .wait(2000)

        currentPage++; 
        nextExists = yield nightmare.visible('#pnnext'); 
    } 

    console.dir(links); 
    yield nightmare.end(); 
} 
Fulmis answered 5/1, 2016 at 19:35 Comment(3)
whats with the links[0] , why only the first link ?Alt
@Alt for brevity the example returns the first link of each page returned by google.Fulmis
alright i thought it was a mistake of sorts , but there exists a problem with this solution , the last page doesn't get scraped , if anyone stumbles upon this issue then they should just copy the links.push() function to outside the while loop , so it can run once more to compensate for the last pageAlt

© 2022 - 2024 — McMap. All rights reserved.