async request (path) { const _url = path ? URL.resolve(this.config.url, path) : this.config.url; return await superagent[this.method](_url) .set('cache-control', 'no-cache, no-store, must-revalidate') .set('pragma', 'no-cache') .set('expires', '0') .timeout(this.timeout); }
var baseUrl = result.uri; $('a').each(function(index, a) { var toQueueUrl = url.resolve(baseUrl, $(a).attr('href')); c.queue(toQueueUrl); });
var link = urlModule.resolve(baseUrl, url); if (link && (link != currentUrl)) { var iHash = link.indexOf('#');
function processSinglePage(page) { return new Promise(function (resolve, reject) { if (_cancellationRequested) { return resolve(); return resolve(); return resolve(); _processingProblemDescription = "Cannot process the site because of the robots.txt settings"; return resolve(); return resolve(); var link = urlModule.resolve(page.url, location); return resolve(); page.title = title; return resolve(); }); });