diff options
| author | MohamedBassem <me@mbassem.com> | 2024-02-07 18:29:52 +0000 |
|---|---|---|
| committer | MohamedBassem <me@mbassem.com> | 2024-02-07 18:37:20 +0000 |
| commit | 3ec45e8bbb8285b17c703907d4c161b633663096 (patch) | |
| tree | ee52a753740a4a45e0ffe34840fc878ac383a5e2 /crawler/index.ts | |
| parent | b12b964e0617f410b5c7b0989754cf94d01177cf (diff) | |
| download | karakeep-3ec45e8bbb8285b17c703907d4c161b633663096.tar.zst | |
[refactor] Rename the crawlers package to workers
Diffstat (limited to 'crawler/index.ts')
| -rw-r--r-- | crawler/index.ts | 32 |
1 files changed, 0 insertions, 32 deletions
diff --git a/crawler/index.ts b/crawler/index.ts deleted file mode 100644 index 76c6f03f..00000000 --- a/crawler/index.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { Worker } from "bullmq"; - -import { - LinkCrawlerQueue, - ZCrawlLinkRequest, - queueConnectionDetails, -} from "@remember/shared/queues"; -import logger from "@remember/shared/logger"; -import runCrawler from "./crawler"; - -logger.info("Starting crawler worker ..."); - -const crawlerWorker = new Worker<ZCrawlLinkRequest, void>( - LinkCrawlerQueue.name, - runCrawler, - { - connection: queueConnectionDetails, - autorun: false, - }, -); - -crawlerWorker.on("completed", (job) => { - const jobId = job?.id || "unknown"; - logger.info(`[Crawler][${jobId}] Completed successfully`); -}); - -crawlerWorker.on("failed", (job, error) => { - const jobId = job?.id || "unknown"; - logger.error(`[Crawler][${jobId}] Crawling job failed: ${error}`); -}); - -await Promise.all([crawlerWorker.run()]); |
