diff options
| author | Ahmad Mujahid <55625580+AhmadMuj@users.noreply.github.com> | 2024-04-12 00:52:53 +0400 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2024-04-11 23:52:53 +0300 |
| commit | 95cf8f47300d6eb6efe36d44bcab0f44a8e27585 (patch) | |
| tree | dcc64d69e11c80d4b84e977e5b27d135a8d28918 /apps/workers/crawlerWorker.ts | |
| parent | 238c2967b269ca0f66d8e759c6a0234107e1fd1e (diff) | |
| download | karakeep-95cf8f47300d6eb6efe36d44bcab0f44a8e27585.tar.zst | |
feature: Recrawl failed links from admin UI (#95)
* feature: Retry failed crawling URLs
* fix: Enhancing visuals and some minor changes.
Diffstat (limited to 'apps/workers/crawlerWorker.ts')
| -rw-r--r-- | apps/workers/crawlerWorker.ts | 20 |
1 files changed, 20 insertions, 0 deletions
diff --git a/apps/workers/crawlerWorker.ts b/apps/workers/crawlerWorker.ts index eec8cd98..91b0a03f 100644 --- a/apps/workers/crawlerWorker.ts +++ b/apps/workers/crawlerWorker.ts @@ -124,17 +124,37 @@ export class CrawlerWorker { worker.on("completed", (job) => { const jobId = job?.id ?? "unknown"; logger.info(`[Crawler][${jobId}] Completed successfully`); + const bookmarkId = job?.data.bookmarkId; + if (bookmarkId) { + changeBookmarkStatus(bookmarkId, "success"); + } }); worker.on("failed", (job, error) => { const jobId = job?.id ?? "unknown"; logger.error(`[Crawler][${jobId}] Crawling job failed: ${error}`); + const bookmarkId = job?.data.bookmarkId; + if (bookmarkId) { + changeBookmarkStatus(bookmarkId, "failure"); + } }); return worker; } } +async function changeBookmarkStatus( + bookmarkId: string, + crawlStatus: "success" | "failure", +) { + await db + .update(bookmarkLinks) + .set({ + crawlStatus, + }) + .where(eq(bookmarkLinks.id, bookmarkId)); +} + async function getBookmarkUrl(bookmarkId: string) { const bookmark = await db.query.bookmarkLinks.findFirst({ where: eq(bookmarkLinks.id, bookmarkId), |
