aboutsummaryrefslogtreecommitdiffstats
path: root/apps/workers
diff options
context:
space:
mode:
authorMohamedBassem <me@mbassem.com>2024-03-30 16:26:16 +0000
committerMohamedBassem <me@mbassem.com>2024-03-30 16:26:16 +0000
commit46b78eaac30be26fe40520e97786563344af8403 (patch)
treec4c0e1ae1d3d21a6f1fbf5f44f68e99243bbb5d3 /apps/workers
parent853ed13450b3a0d92cba144cc0dfd0696e7c810c (diff)
downloadkarakeep-46b78eaac30be26fe40520e97786563344af8403.tar.zst
format: Add missing lint and format, and format the entire repo
Diffstat (limited to 'apps/workers')
-rw-r--r--apps/workers/crawlerWorker.ts11
-rw-r--r--apps/workers/exit.ts2
-rw-r--r--apps/workers/index.ts7
-rw-r--r--apps/workers/inference.ts10
-rw-r--r--apps/workers/openaiWorker.ts28
-rw-r--r--apps/workers/package.json2
-rw-r--r--apps/workers/searchWorker.ts2
7 files changed, 37 insertions, 25 deletions
diff --git a/apps/workers/crawlerWorker.ts b/apps/workers/crawlerWorker.ts
index 3f7bff94..d1ebbdaa 100644
--- a/apps/workers/crawlerWorker.ts
+++ b/apps/workers/crawlerWorker.ts
@@ -1,8 +1,10 @@
import assert from "assert";
import * as dns from "dns";
+import type { Job } from "bullmq";
+import type { Browser } from "puppeteer";
import { Readability } from "@mozilla/readability";
import { Mutex } from "async-mutex";
-import { Job, Worker } from "bullmq";
+import { Worker } from "bullmq";
import DOMPurify from "dompurify";
import { eq } from "drizzle-orm";
import { isShuttingDown } from "exit";
@@ -15,11 +17,11 @@ import metascraperReadability from "metascraper-readability";
import metascraperTitle from "metascraper-title";
import metascraperTwitter from "metascraper-twitter";
import metascraperUrl from "metascraper-url";
-import { Browser } from "puppeteer";
import puppeteer from "puppeteer-extra";
import AdblockerPlugin from "puppeteer-extra-plugin-adblocker";
import StealthPlugin from "puppeteer-extra-plugin-stealth";
+import type { ZCrawlLinkRequest } from "@hoarder/shared/queues";
import { db } from "@hoarder/db";
import { bookmarkLinks } from "@hoarder/db/schema";
import serverConfig from "@hoarder/shared/config";
@@ -29,7 +31,6 @@ import {
OpenAIQueue,
queueConnectionDetails,
SearchIndexingQueue,
- ZCrawlLinkRequest,
zCrawlLinkRequestSchema,
} from "@hoarder/shared/queues";
@@ -83,7 +84,7 @@ async function launchBrowser() {
}, 5000);
return;
}
- browser.on("disconnected", async (): Promise<void> => {
+ browser.on("disconnected", () => {
if (isShuttingDown) {
logger.info(
"The puppeteer browser got disconnected. But we're shutting down so won't restart it.",
@@ -93,7 +94,7 @@ async function launchBrowser() {
logger.info(
"The puppeteer browser got disconnected. Will attempt to launch it again.",
);
- await launchBrowser();
+ launchBrowser();
});
});
}
diff --git a/apps/workers/exit.ts b/apps/workers/exit.ts
index 156cf2c6..29bfa5ef 100644
--- a/apps/workers/exit.ts
+++ b/apps/workers/exit.ts
@@ -3,7 +3,7 @@ import logger from "@hoarder/shared/logger";
export let isShuttingDown = false;
export const shutdownPromise = new Promise((resolve) => {
- process.on('SIGTERM', () => {
+ process.on("SIGTERM", () => {
logger.info("Received SIGTERM, shutting down ...");
isShuttingDown = true;
resolve("");
diff --git a/apps/workers/index.ts b/apps/workers/index.ts
index 4db524ef..24bdc67b 100644
--- a/apps/workers/index.ts
+++ b/apps/workers/index.ts
@@ -1,14 +1,15 @@
import "dotenv/config";
+
import { CrawlerWorker } from "./crawlerWorker";
+import { shutdownPromise } from "./exit";
import { OpenAiWorker } from "./openaiWorker";
import { SearchIndexingWorker } from "./searchWorker";
-import { shutdownPromise } from "./exit";
async function main() {
const [crawler, openai, search] = [
await CrawlerWorker.build(),
- await OpenAiWorker.build(),
- await SearchIndexingWorker.build(),
+ OpenAiWorker.build(),
+ SearchIndexingWorker.build(),
];
await Promise.any([
diff --git a/apps/workers/inference.ts b/apps/workers/inference.ts
index 3b0b5943..13b10aba 100644
--- a/apps/workers/inference.ts
+++ b/apps/workers/inference.ts
@@ -124,7 +124,9 @@ class OllamaInferenceClient implements InferenceClient {
// Using stream + accumulating the response so far is a workaround.
// https://github.com/ollama/ollama-js/issues/72
totalTokens = NaN;
- logger.warn(`Got an exception from ollama, will still attempt to deserialize the response we got so far: ${e}`)
+ logger.warn(
+ `Got an exception from ollama, will still attempt to deserialize the response we got so far: ${e}`,
+ );
}
return { response, totalTokens };
@@ -139,6 +141,10 @@ class OllamaInferenceClient implements InferenceClient {
_contentType: string,
image: string,
): Promise<InferenceResponse> {
- return await this.runModel(serverConfig.inference.imageModel, prompt, image);
+ return await this.runModel(
+ serverConfig.inference.imageModel,
+ prompt,
+ image,
+ );
}
}
diff --git a/apps/workers/openaiWorker.ts b/apps/workers/openaiWorker.ts
index ee48d148..bb8015a5 100644
--- a/apps/workers/openaiWorker.ts
+++ b/apps/workers/openaiWorker.ts
@@ -1,7 +1,9 @@
-import { Job, Worker } from "bullmq";
+import type { Job } from "bullmq";
+import { Worker } from "bullmq";
import { and, eq, inArray } from "drizzle-orm";
import { z } from "zod";
+import type { ZOpenAIRequest } from "@hoarder/shared/queues";
import { db } from "@hoarder/db";
import { bookmarks, bookmarkTags, tagsOnBookmarks } from "@hoarder/db/schema";
import { readAsset } from "@hoarder/shared/assetdb";
@@ -10,11 +12,11 @@ import {
OpenAIQueue,
queueConnectionDetails,
SearchIndexingQueue,
- ZOpenAIRequest,
zOpenAIRequestSchema,
} from "@hoarder/shared/queues";
-import { InferenceClient, InferenceClientFactory } from "./inference";
+import type { InferenceClient } from "./inference";
+import { InferenceClientFactory } from "./inference";
const openAIResponseSchema = z.object({
tags: z.array(z.string()),
@@ -41,7 +43,7 @@ async function attemptMarkTaggingStatus(
}
export class OpenAiWorker {
- static async build() {
+ static build() {
logger.info("Starting inference worker ...");
const worker = new Worker<ZOpenAIRequest, void>(
OpenAIQueue.name,
@@ -52,16 +54,16 @@ export class OpenAiWorker {
},
);
- worker.on("completed", async (job): Promise<void> => {
+ worker.on("completed", (job) => {
const jobId = job?.id ?? "unknown";
logger.info(`[inference][${jobId}] Completed successfully`);
- await attemptMarkTaggingStatus(job?.data, "success");
+ attemptMarkTaggingStatus(job?.data, "success");
});
- worker.on("failed", async (job, error): Promise<void> => {
+ worker.on("failed", (job, error) => {
const jobId = job?.id ?? "unknown";
logger.error(`[inference][${jobId}] inference job failed: ${error}`);
- await attemptMarkTaggingStatus(job?.data, "failure");
+ attemptMarkTaggingStatus(job?.data, "failure");
});
return worker;
@@ -90,11 +92,11 @@ function buildPrompt(
bookmark: NonNullable<Awaited<ReturnType<typeof fetchBookmark>>>,
) {
const truncateContent = (content: string) => {
- let words = content.split(" ");
- if (words.length > 1500) {
- words = words.slice(1500);
- content = words.join(" ");
- }
+ let words = content.split(" ");
+ if (words.length > 1500) {
+ words = words.slice(1500);
+ content = words.join(" ");
+ }
return content;
};
if (bookmark.link) {
diff --git a/apps/workers/package.json b/apps/workers/package.json
index 27a02f88..c9de43a4 100644
--- a/apps/workers/package.json
+++ b/apps/workers/package.json
@@ -44,6 +44,8 @@
"scripts": {
"start": "tsx watch index.ts",
"start:prod": "tsx index.ts",
+ "lint": "eslint .",
+ "format": "prettier . --ignore-path ../../.prettierignore",
"typecheck": "tsc --noEmit"
},
"eslintConfig": {
diff --git a/apps/workers/searchWorker.ts b/apps/workers/searchWorker.ts
index ae916441..79b0c8c1 100644
--- a/apps/workers/searchWorker.ts
+++ b/apps/workers/searchWorker.ts
@@ -14,7 +14,7 @@ import {
import { getSearchIdxClient } from "@hoarder/shared/search";
export class SearchIndexingWorker {
- static async build() {
+ static build() {
logger.info("Starting search indexing worker ...");
const worker = new Worker<ZSearchIndexingRequest, void>(
SearchIndexingQueue.name,