I have about 5000 links and I need to crawl all those. So Im wonder is there a better approach than this. Here is my code.
let urls = [ 5000 urls go here ]; const doms = await getDoms(urls); // processing and storing the doms getDoms = (urls) => { let data = await Promise.all(urls.map(url => { return getSiteCrawlPromise(url) })); return data; } getSiteCrawlPromise = (url) => { return new Promise((resolve, reject) => { let j = request.jar(); request.get({url: url, jar: j}, function(err, response, body) { if(err) return resolve({ body: null, jar: j, error: err}); return resolve({body: body, jar: j, error: null}); }); }) } Is there a mechanism implemented in promise so it can devide the jobs to multiple threads and process. then return the output as a whole ? and I don't want to devide the urls into smaller fragments and process those fragments