diff options
| author | MohamedBassem <me@mbassem.com> | 2024-02-07 18:29:52 +0000 |
|---|---|---|
| committer | MohamedBassem <me@mbassem.com> | 2024-02-07 18:37:20 +0000 |
| commit | 3ec45e8bbb8285b17c703907d4c161b633663096 (patch) | |
| tree | ee52a753740a4a45e0ffe34840fc878ac383a5e2 /workers/index.ts | |
| parent | b12b964e0617f410b5c7b0989754cf94d01177cf (diff) | |
| download | karakeep-3ec45e8bbb8285b17c703907d4c161b633663096.tar.zst | |
[refactor] Rename the crawlers package to workers
Diffstat (limited to 'workers/index.ts')
| -rw-r--r-- | workers/index.ts | 32 |
1 files changed, 32 insertions, 0 deletions
diff --git a/workers/index.ts b/workers/index.ts new file mode 100644 index 00000000..76c6f03f --- /dev/null +++ b/workers/index.ts @@ -0,0 +1,32 @@ +import { Worker } from "bullmq"; + +import { + LinkCrawlerQueue, + ZCrawlLinkRequest, + queueConnectionDetails, +} from "@remember/shared/queues"; +import logger from "@remember/shared/logger"; +import runCrawler from "./crawler"; + +logger.info("Starting crawler worker ..."); + +const crawlerWorker = new Worker<ZCrawlLinkRequest, void>( + LinkCrawlerQueue.name, + runCrawler, + { + connection: queueConnectionDetails, + autorun: false, + }, +); + +crawlerWorker.on("completed", (job) => { + const jobId = job?.id || "unknown"; + logger.info(`[Crawler][${jobId}] Completed successfully`); +}); + +crawlerWorker.on("failed", (job, error) => { + const jobId = job?.id || "unknown"; + logger.error(`[Crawler][${jobId}] Crawling job failed: ${error}`); +}); + +await Promise.all([crawlerWorker.run()]); |
