I'm running a nightmare.js script where where I'm trying to take a screenshot of multiple elements on a page.
The first element is captured just fine, but every other element that is below the fold is captured with a zero length. I am struggling to debug this issue. Any help would be incredibly appreciated.
Basically this script walks through a page and selects all the elements on the page that match a selector. Then, using async
it collects the responses and returns a buffer of objects. The issue is that the elements below the fold do not get screenshotted (buffer length ends up at zero). I tried to wait()
and scroll to the element, but I have not had any success as of yet.
import * as Nightmare from 'nightmare'
import * as vo from 'vo'
import * as async from 'async'
import * as fs from 'fs'
const urls:String[] = [
'https://yahoo.com/'
]
Nightmare.action('snap', function(selector:String, done:Function) {
const self = this;
this.evaluate_now(function (selector) {
return Array.from(document.querySelectorAll(selector))
.map((ele:Element) => {
if (ele) {
const rect = ele.getBoundingClientRect()
const r:Function = Math.round
return {
x: r(rect.left),
y: r(rect.top),
width: r(rect.width),
height: r(rect.height)
}
}
})
}, function(err, clips) {
if (err) return done(err)
if (!clips) return done(new Error(`Selector not found`))
let snaps = []
const snap = (clip, cb) => {
self
.scrollTo(clip.y - clip.height, clip.x)
.screenshot(clip, cb)
.run()
}
async.mapSeries(clips.reverse(), snap, (err, res) => {
done(err, res)
})
}, selector)
})
const scrape = (url) => {
const nightmare = Nightmare({
show: true
});
nightmare
.goto(url)
.snap('.navbar')
.end()
.then((buffers:Buffer[]) => {
buffers.forEach((data, index) => {
fs.writeFileSync(`images/navbar-${index}.png`, data)
})
})
}
urls.forEach(scrape)