Skip to content

Commit

Permalink
fixed bug
Browse files Browse the repository at this point in the history
  • Loading branch information
pbvahlst committed Apr 13, 2020
1 parent 0fdc084 commit 6845152
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 22 deletions.
29 changes: 15 additions & 14 deletions src/model/page-analyzer.js
Original file line number Diff line number Diff line change
Expand Up @@ -65,22 +65,23 @@ class PageAnalyzer {
*/
async extractData(browser, screenshotOptions = undefined) {
return new Promise(async (resolve, reject) => { // we use an explicit promise so we can force reject if it hangs
let result = {
name: undefined,
data: undefined,
requestStrategy: undefined,
afterExtractAbortSave: false
};

let page;
this._errorCaught = false;
this._extractPromiseReject = reject;

if (screenshotOptions && screenshotOptions.resetCounter) {
this._screenshotCounter = 1;
}

try {
this._extractPromiseReject = reject;
this._resetActionTimerAndThrowIfErrorCaught();
let result = {
name: undefined,
data: undefined,
requestStrategy: undefined,
afterExtractAbortSave: false
};

let page;
this._errorCaught = false;
if (screenshotOptions && screenshotOptions.resetCounter) {
this._screenshotCounter = 1;
}

this._browserContext = await browser.createIncognitoBrowserContext();
page = await this._browserContext.newPage();
this._page = page;
Expand Down
9 changes: 1 addition & 8 deletions x_dev/api-demo.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,6 @@ async function run() {
let destDir = "d:/temp/cmp-temp-test";
let rule = {
extractor: {
beforeExtract: async (page, url) => {
//if (url === 'google.com') {
console.log("Going to sleep for: " + url);
await delay(300000000); // a very long time
console.log("waking up: " + url);
//}
},
extract: () => {
return true; // we accept all pages as a match
}
Expand All @@ -23,7 +16,7 @@ async function run() {

for (let i = 0; i < 50; i++) { // try to force an error, delete loop again before commit
console.log("Iteration: " + (i + 1));
let webExtractor = new WebExtractorApi(urlsPath, rule, destDir, {maxConcurrency: 2, printProgression: false});
let webExtractor = new WebExtractorApi(urlsPath, rule, destDir, {maxConcurrency: 15, printProgression: false});
try {
await webExtractor.execute();
} catch (e) {
Expand Down

0 comments on commit 6845152

Please sign in to comment.