Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 0 additions & 10 deletions packages/basic-crawler/src/internals/basic-crawler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1511,23 +1511,13 @@ export class BasicCrawler<Context extends CrawlingContext = BasicCrawlingContext
return false;
}

if (source instanceof RequestQueueV1) {
// eslint-disable-next-line dot-notation
source['inProgress']?.delete(request.id!);
}

const delay = lastAccessTime + this.sameDomainDelayMillis - now;
this.log.debug(
`Request ${request.url} (${request.id}) will be reclaimed after ${delay} milliseconds due to same domain delay`,
);
setTimeout(async () => {
this.log.debug(`Adding request ${request.url} (${request.id}) back to the queue`);

if (source instanceof RequestQueueV1) {
// eslint-disable-next-line dot-notation
source['inProgress'].add(request.id!);
}

await source.reclaimRequest(request, { forefront: request.userData?.__crawlee?.forefront });
}, delay);

Expand Down
50 changes: 49 additions & 1 deletion test/core/crawlers/basic_crawler.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import {
RequestList,
RequestQueue,
} from '@crawlee/basic';
import { RequestState } from '@crawlee/core';
import { RequestState, RequestQueueV1 } from '@crawlee/core';
import type { Dictionary } from '@crawlee/utils';
import { RobotsTxtFile, sleep } from '@crawlee/utils';
import express from 'express';
Expand Down Expand Up @@ -2090,4 +2090,52 @@ describe('BasicCrawler', () => {
expect(crawlerB.requestQueue?.config).toBe(configB);
});
});

describe('sameDomainDelaySecs race condition', () => {
test('delayRequest should keep request in inProgress during delay', async () => {
const requestQueue = await RequestQueueV1.open();
await requestQueue.addRequest({ url: 'http://example.com/a', uniqueKey: 'a' });

const request = await requestQueue.fetchNextRequest();
expect(request).not.toBeNull();
expect(requestQueue.inProgressCount()).toBe(1);

const crawler = new BasicCrawler({
requestQueue,
sameDomainDelaySecs: 10,
requestHandler: async () => {},
});

// Set domain access time to force delay trigger
(crawler as any).domainAccessedTime.set('example.com', Date.now());

const delayed = (crawler as any).delayRequest(request, requestQueue);
expect(delayed).toBe(true);

// Request must remain in inProgress to prevent duplicate fetching
expect(requestQueue.inProgressCount()).toBe(1);
});

test('second fetchNextRequest should not return the same request after delayRequest', async () => {
const requestQueue = await RequestQueueV1.open();
await requestQueue.addRequest({ url: 'http://example.com/a', uniqueKey: 'a' });

const r1 = await requestQueue.fetchNextRequest();
expect(r1).not.toBeNull();

const crawler = new BasicCrawler({
requestQueue,
sameDomainDelaySecs: 10,
requestHandler: async () => {},
});
(crawler as any).domainAccessedTime.set('example.com', Date.now());

const delayed = (crawler as any).delayRequest(r1, requestQueue);
expect(delayed).toBe(true);

// Another worker must not get the same request during the delay window
const r1Again = await requestQueue.fetchNextRequest();
expect(r1Again).toBeNull();
});
});
});