diff options
Diffstat (limited to '')
| -rw-r--r-- | apps/workers/exit.ts | 4 | ||||
| -rw-r--r-- | apps/workers/index.ts | 7 | ||||
| -rw-r--r-- | apps/workers/metrics.ts | 17 | ||||
| -rw-r--r-- | apps/workers/package.json | 4 | ||||
| -rw-r--r-- | apps/workers/server.ts | 51 | ||||
| -rw-r--r-- | apps/workers/workers/assetPreprocessingWorker.ts | 3 | ||||
| -rw-r--r-- | apps/workers/workers/crawlerWorker.ts | 9 | ||||
| -rw-r--r-- | apps/workers/workers/feedWorker.ts | 3 | ||||
| -rw-r--r-- | apps/workers/workers/inference/inferenceWorker.ts | 3 | ||||
| -rw-r--r-- | apps/workers/workers/ruleEngineWorker.ts | 3 | ||||
| -rw-r--r-- | apps/workers/workers/searchWorker.ts | 3 | ||||
| -rw-r--r-- | apps/workers/workers/tidyAssetsWorker.ts | 3 | ||||
| -rw-r--r-- | apps/workers/workers/videoWorker.ts | 3 | ||||
| -rw-r--r-- | apps/workers/workers/webhookWorker.ts | 3 | ||||
| -rw-r--r-- | docs/docs/03-configuration.md | 36 | ||||
| -rw-r--r-- | packages/shared/config.ts | 8 | ||||
| -rw-r--r-- | pnpm-lock.yaml | 55 |
17 files changed, 181 insertions, 34 deletions
diff --git a/apps/workers/exit.ts b/apps/workers/exit.ts index cd50a323..d4bc84f1 100644 --- a/apps/workers/exit.ts +++ b/apps/workers/exit.ts @@ -1,11 +1,11 @@ import logger from "@karakeep/shared/logger"; -export let isShuttingDown = false; +export const exitAbortController = new AbortController(); export const shutdownPromise = new Promise((resolve) => { process.on("SIGTERM", () => { logger.info("Received SIGTERM, shutting down ..."); - isShuttingDown = true; + exitAbortController.abort(); resolve(""); }); }); diff --git a/apps/workers/index.ts b/apps/workers/index.ts index a21b9c2d..f34e4722 100644 --- a/apps/workers/index.ts +++ b/apps/workers/index.ts @@ -1,5 +1,7 @@ import "dotenv/config"; +import { buildServer } from "server"; + import { loadAllPlugins } from "@karakeep/shared-server"; import serverConfig from "@karakeep/shared/config"; import logger from "@karakeep/shared/logger"; @@ -31,6 +33,7 @@ async function main() { assetPreprocessing, webhook, ruleEngine, + httpServer, ] = [ await CrawlerWorker.build(), OpenAiWorker.build(), @@ -41,6 +44,7 @@ async function main() { AssetPreprocessingWorker.build(), WebhookWorker.build(), RuleEngineWorker.build(), + buildServer(), ]; FeedRefreshingWorker.start(); @@ -55,6 +59,7 @@ async function main() { assetPreprocessing.run(), webhook.run(), ruleEngine.run(), + httpServer.serve(), ]), shutdownPromise, ]); @@ -72,6 +77,8 @@ async function main() { assetPreprocessing.stop(); webhook.stop(); ruleEngine.stop(); + await httpServer.stop(); + process.exit(0); } main(); diff --git a/apps/workers/metrics.ts b/apps/workers/metrics.ts new file mode 100644 index 00000000..04eec1fb --- /dev/null +++ b/apps/workers/metrics.ts @@ -0,0 +1,17 @@ +import { prometheus } from "@hono/prometheus"; +import { Counter, Registry } from "prom-client"; + +const registry = new Registry(); + +export const { printMetrics } = prometheus({ + registry: registry, + prefix: "karakeep_", +}); + +export const workerStatsCounter = new Counter({ + name: "karakeep_worker_stats", + help: "Stats for each worker", + labelNames: ["worker_name", "status"], +}); + +registry.registerMetric(workerStatsCounter); diff --git a/apps/workers/package.json b/apps/workers/package.json index a771c710..2c0b9a77 100644 --- a/apps/workers/package.json +++ b/apps/workers/package.json @@ -6,6 +6,8 @@ "type": "module", "dependencies": { "@ghostery/adblocker-playwright": "^2.5.1", + "@hono/node-server": "^1.19.0", + "@hono/prometheus": "^1.0.2", "@karakeep/db": "workspace:^0.1.0", "@karakeep/shared": "workspace:^0.1.0", "@karakeep/shared-server": "workspace:^0.1.0", @@ -18,6 +20,7 @@ "dotenv": "^16.4.1", "drizzle-orm": "^0.44.2", "execa": "9.3.1", + "hono": "^4.7.10", "http-proxy-agent": "^7.0.2", "https-proxy-agent": "^7.0.6", "jsdom": "^24.0.0", @@ -42,6 +45,7 @@ "pdfjs-dist": "^4.2.67", "playwright": "^1.42.1", "playwright-extra": "^4.3.6", + "prom-client": "^15.1.3", "puppeteer-extra-plugin-stealth": "^2.11.2", "rss-parser": "^3.13.0", "tesseract.js": "^5.1.1", diff --git a/apps/workers/server.ts b/apps/workers/server.ts new file mode 100644 index 00000000..f1b8a11d --- /dev/null +++ b/apps/workers/server.ts @@ -0,0 +1,51 @@ +import { serve } from "@hono/node-server"; +import { Hono } from "hono"; +import { bearerAuth } from "hono/bearer-auth"; + +import serverConfig from "@karakeep/shared/config"; +import logger from "@karakeep/shared/logger"; + +import { printMetrics } from "./metrics"; + +const app = new Hono() + .get("/health", (c) => + c.json({ status: "ok", timestamp: new Date().toISOString() }), + ) + .get( + "/metrics", + bearerAuth({ token: serverConfig.prometheus.metricsToken }), + printMetrics, + ); + +export function buildServer() { + const server = serve( + { + fetch: app.fetch, + port: serverConfig.workers.port, + hostname: serverConfig.workers.host, + }, + (info) => { + logger.info(`Listening on http://${info.address}:${info.port}`); + }, + ); + return { + _server: server, + stop: () => + new Promise<void>((resolve, reject) => { + server.close((err) => { + if (err) { + reject(err); + } else { + resolve(); + } + }); + }), + serve: () => + new Promise<void>((resolve, reject) => { + server.on("error", reject); + server.on("close", () => resolve()); + }), + }; +} + +export default app; diff --git a/apps/workers/workers/assetPreprocessingWorker.ts b/apps/workers/workers/assetPreprocessingWorker.ts index c3ecd1e0..73cf8bb5 100644 --- a/apps/workers/workers/assetPreprocessingWorker.ts +++ b/apps/workers/workers/assetPreprocessingWorker.ts @@ -1,6 +1,7 @@ import os from "os"; import { eq } from "drizzle-orm"; import { DequeuedJob, EnqueueOptions, Runner } from "liteque"; +import { workerStatsCounter } from "metrics"; import PDFParser from "pdf2json"; import { fromBuffer } from "pdf2pic"; import { createWorker } from "tesseract.js"; @@ -34,11 +35,13 @@ export class AssetPreprocessingWorker { { run: run, onComplete: async (job) => { + workerStatsCounter.labels("assetPreprocessing", "completed").inc(); const jobId = job.id; logger.info(`[assetPreprocessing][${jobId}] Completed successfully`); return Promise.resolve(); }, onError: async (job) => { + workerStatsCounter.labels("assetPreProcessing", "failed").inc(); const jobId = job.id; logger.error( `[assetPreprocessing][${jobId}] Asset preprocessing failed: ${job.error}\n${job.error.stack}`, diff --git a/apps/workers/workers/crawlerWorker.ts b/apps/workers/workers/crawlerWorker.ts index f22a68d6..e032ab16 100644 --- a/apps/workers/workers/crawlerWorker.ts +++ b/apps/workers/workers/crawlerWorker.ts @@ -8,7 +8,7 @@ import { Mutex } from "async-mutex"; import DOMPurify from "dompurify"; import { eq } from "drizzle-orm"; import { execa } from "execa"; -import { isShuttingDown } from "exit"; +import { exitAbortController } from "exit"; import { HttpProxyAgent } from "http-proxy-agent"; import { HttpsProxyAgent } from "https-proxy-agent"; import { JSDOM, VirtualConsole } from "jsdom"; @@ -24,6 +24,7 @@ import metascraperPublisher from "metascraper-publisher"; import metascraperTitle from "metascraper-title"; import metascraperTwitter from "metascraper-twitter"; import metascraperUrl from "metascraper-url"; +import { workerStatsCounter } from "metrics"; import fetch from "node-fetch"; import { Browser, BrowserContextOptions } from "playwright"; import { chromium } from "playwright-extra"; @@ -203,7 +204,7 @@ async function launchBrowser() { logger.error( `[Crawler] Failed to connect to the browser instance, will retry in 5 secs: ${globalBrowserResult.error.stack}`, ); - if (isShuttingDown) { + if (exitAbortController.signal.aborted) { logger.info("[Crawler] We're shutting down so won't retry."); return; } @@ -214,7 +215,7 @@ async function launchBrowser() { } globalBrowser = globalBrowserResult.data; globalBrowser?.on("disconnected", () => { - if (isShuttingDown) { + if (exitAbortController.signal.aborted) { logger.info( "[Crawler] The Playwright browser got disconnected. But we're shutting down so won't restart it.", ); @@ -265,6 +266,7 @@ export class CrawlerWorker { /* timeoutSec */ serverConfig.crawler.jobTimeoutSec, ), onComplete: async (job) => { + workerStatsCounter.labels("crawler", "completed").inc(); const jobId = job.id; logger.info(`[Crawler][${jobId}] Completed successfully`); const bookmarkId = job.data.bookmarkId; @@ -273,6 +275,7 @@ export class CrawlerWorker { } }, onError: async (job) => { + workerStatsCounter.labels("crawler", "failed").inc(); const jobId = job.id; logger.error( `[Crawler][${jobId}] Crawling job failed: ${job.error}\n${job.error.stack}`, diff --git a/apps/workers/workers/feedWorker.ts b/apps/workers/workers/feedWorker.ts index 682889f0..62106fc8 100644 --- a/apps/workers/workers/feedWorker.ts +++ b/apps/workers/workers/feedWorker.ts @@ -1,5 +1,6 @@ import { and, eq, inArray } from "drizzle-orm"; import { DequeuedJob, Runner } from "liteque"; +import { workerStatsCounter } from "metrics"; import cron from "node-cron"; import Parser from "rss-parser"; import { buildImpersonatingTRPCClient } from "trpc"; @@ -50,6 +51,7 @@ export class FeedWorker { { run: run, onComplete: async (job) => { + workerStatsCounter.labels("feed", "completed").inc(); const jobId = job.id; logger.info(`[feed][${jobId}] Completed successfully`); await db @@ -58,6 +60,7 @@ export class FeedWorker { .where(eq(rssFeedsTable.id, job.data?.feedId)); }, onError: async (job) => { + workerStatsCounter.labels("feed", "failed").inc(); const jobId = job.id; logger.error( `[feed][${jobId}] Feed fetch job failed: ${job.error}\n${job.error.stack}`, diff --git a/apps/workers/workers/inference/inferenceWorker.ts b/apps/workers/workers/inference/inferenceWorker.ts index 0dba6f58..32de3806 100644 --- a/apps/workers/workers/inference/inferenceWorker.ts +++ b/apps/workers/workers/inference/inferenceWorker.ts @@ -1,5 +1,6 @@ import { eq } from "drizzle-orm"; import { DequeuedJob, Runner } from "liteque"; +import { workerStatsCounter } from "metrics"; import type { ZOpenAIRequest } from "@karakeep/shared/queues"; import { db } from "@karakeep/db"; @@ -43,11 +44,13 @@ export class OpenAiWorker { { run: runOpenAI, onComplete: async (job) => { + workerStatsCounter.labels("inference", "completed").inc(); const jobId = job.id; logger.info(`[inference][${jobId}] Completed successfully`); await attemptMarkStatus(job.data, "success"); }, onError: async (job) => { + workerStatsCounter.labels("inference", "failed").inc(); const jobId = job.id; logger.error( `[inference][${jobId}] inference job failed: ${job.error}\n${job.error.stack}`, diff --git a/apps/workers/workers/ruleEngineWorker.ts b/apps/workers/workers/ruleEngineWorker.ts index 39f0a523..2a4fbb1a 100644 --- a/apps/workers/workers/ruleEngineWorker.ts +++ b/apps/workers/workers/ruleEngineWorker.ts @@ -1,5 +1,6 @@ import { eq } from "drizzle-orm"; import { DequeuedJob, Runner } from "liteque"; +import { workerStatsCounter } from "metrics"; import { buildImpersonatingAuthedContext } from "trpc"; import type { ZRuleEngineRequest } from "@karakeep/shared/queues"; @@ -21,11 +22,13 @@ export class RuleEngineWorker { { run: runRuleEngine, onComplete: (job) => { + workerStatsCounter.labels("ruleEngine", "completed").inc(); const jobId = job.id; logger.info(`[ruleEngine][${jobId}] Completed successfully`); return Promise.resolve(); }, onError: (job) => { + workerStatsCounter.labels("ruleEngine", "failed").inc(); const jobId = job.id; logger.error( `[ruleEngine][${jobId}] rule engine job failed: ${job.error}\n${job.error.stack}`, diff --git a/apps/workers/workers/searchWorker.ts b/apps/workers/workers/searchWorker.ts index 4c924ceb..7dd25ee8 100644 --- a/apps/workers/workers/searchWorker.ts +++ b/apps/workers/workers/searchWorker.ts @@ -1,5 +1,6 @@ import { eq } from "drizzle-orm"; import { DequeuedJob, Runner } from "liteque"; +import { workerStatsCounter } from "metrics"; import type { ZSearchIndexingRequest } from "@karakeep/shared/queues"; import { db } from "@karakeep/db"; @@ -25,11 +26,13 @@ export class SearchIndexingWorker { { run: runSearchIndexing, onComplete: (job) => { + workerStatsCounter.labels("search", "completed").inc(); const jobId = job.id; logger.info(`[search][${jobId}] Completed successfully`); return Promise.resolve(); }, onError: (job) => { + workerStatsCounter.labels("search", "failed").inc(); const jobId = job.id; logger.error( `[search][${jobId}] search job failed: ${job.error}\n${job.error.stack}`, diff --git a/apps/workers/workers/tidyAssetsWorker.ts b/apps/workers/workers/tidyAssetsWorker.ts index d4c8abdb..cf3e33b6 100644 --- a/apps/workers/workers/tidyAssetsWorker.ts +++ b/apps/workers/workers/tidyAssetsWorker.ts @@ -1,5 +1,6 @@ import { eq } from "drizzle-orm"; import { DequeuedJob, Runner } from "liteque"; +import { workerStatsCounter } from "metrics"; import { db } from "@karakeep/db"; import { assets } from "@karakeep/db/schema"; @@ -19,11 +20,13 @@ export class TidyAssetsWorker { { run: runTidyAssets, onComplete: (job) => { + workerStatsCounter.labels("tidyAssets", "completed").inc(); const jobId = job.id; logger.info(`[tidyAssets][${jobId}] Completed successfully`); return Promise.resolve(); }, onError: (job) => { + workerStatsCounter.labels("tidyAssets", "failed").inc(); const jobId = job.id; logger.error( `[tidyAssets][${jobId}] tidy assets job failed: ${job.error}\n${job.error.stack}`, diff --git a/apps/workers/workers/videoWorker.ts b/apps/workers/workers/videoWorker.ts index d25c1948..ca46fcee 100644 --- a/apps/workers/workers/videoWorker.ts +++ b/apps/workers/workers/videoWorker.ts @@ -3,6 +3,7 @@ import * as os from "os"; import path from "path";
import { execa } from "execa";
import { DequeuedJob, Runner } from "liteque";
+import { workerStatsCounter } from "metrics";
import { db } from "@karakeep/db";
import { AssetTypes } from "@karakeep/db/schema";
@@ -41,6 +42,7 @@ export class VideoWorker { /* timeoutSec */ serverConfig.crawler.downloadVideoTimeout,
),
onComplete: async (job) => {
+ workerStatsCounter.labels("video", "completed").inc();
const jobId = job.id;
logger.info(
`[VideoCrawler][${jobId}] Video Download Completed successfully`,
@@ -48,6 +50,7 @@ export class VideoWorker { return Promise.resolve();
},
onError: async (job) => {
+ workerStatsCounter.labels("video", "failed").inc();
const jobId = job.id;
logger.error(
`[VideoCrawler][${jobId}] Video Download job failed: ${job.error}`,
diff --git a/apps/workers/workers/webhookWorker.ts b/apps/workers/workers/webhookWorker.ts index 504f7f9b..96070e22 100644 --- a/apps/workers/workers/webhookWorker.ts +++ b/apps/workers/workers/webhookWorker.ts @@ -1,5 +1,6 @@ import { eq } from "drizzle-orm"; import { DequeuedJob, Runner } from "liteque"; +import { workerStatsCounter } from "metrics"; import fetch from "node-fetch"; import { db } from "@karakeep/db"; @@ -20,11 +21,13 @@ export class WebhookWorker { { run: runWebhook, onComplete: async (job) => { + workerStatsCounter.labels("webhook", "completed").inc(); const jobId = job.id; logger.info(`[webhook][${jobId}] Completed successfully`); return Promise.resolve(); }, onError: async (job) => { + workerStatsCounter.labels("webhook", "failed").inc(); const jobId = job.id; logger.error( `[webhook][${jobId}] webhook job failed: ${job.error}\n${job.error.stack}`, diff --git a/docs/docs/03-configuration.md b/docs/docs/03-configuration.md index 17191752..bd53f1bb 100644 --- a/docs/docs/03-configuration.md +++ b/docs/docs/03-configuration.md @@ -2,23 +2,25 @@ The app is mainly configured by environment variables. All the used environment variables are listed in [packages/shared/config.ts](https://github.com/karakeep-app/karakeep/blob/main/packages/shared/config.ts). The most important ones are: -| Name | Required | Default | Description | -| ------------------------------- | ------------------------------------- | ------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| DATA_DIR | Yes | Not set | The path for the persistent data directory. This is where the db lives. Assets are stored here by default unless `ASSETS_DIR` is set. | -| ASSETS_DIR | No | Not set | The path where crawled assets will be stored. If not set, defaults to `${DATA_DIR}/assets`. | -| NEXTAUTH_URL | Yes | Not set | Should point to the address of your server. The app will function without it, but will redirect you to wrong addresses on signout for example. | -| NEXTAUTH_SECRET | Yes | Not set | Random string used to sign the JWT tokens. Generate one with `openssl rand -base64 36`. | -| MEILI_ADDR | No | Not set | The address of meilisearch. If not set, Search will be disabled. E.g. (`http://meilisearch:7700`) | -| MEILI_MASTER_KEY | Only in Prod and if search is enabled | Not set | The master key configured for meilisearch. Not needed in development environment. Generate one with `openssl rand -base64 36 \| tr -dc 'A-Za-z0-9'` | -| MAX_ASSET_SIZE_MB | No | 50 | Sets the maximum allowed asset size (in MB) to be uploaded | -| DISABLE_NEW_RELEASE_CHECK | No | false | If set to true, latest release check will be disabled in the admin panel. | -| PROMETHEUS_AUTH_TOKEN | No | Random | Enable a prometheus metrics endpoint at `/api/metrics`. This endpoint will require this token being passed in the Authorization header as a Bearer token. If not set, a new random token is generated everytime at startup. | -| RATE_LIMITING_ENABLED | No | false | If set to true, API rate limiting will be enabled. | -| DB_WAL_MODE | No | false | Enables WAL mode for the sqlite database. This should improve the performance of the database. There's no reason why you shouldn't set this to true unless you're running the db on a network attached drive. This will become the default at some time in the future. | -| SEARCH_NUM_WORKERS | No | 1 | Number of concurrent workers for search indexing tasks. Increase this if you have a high volume of content being indexed for search. | -| WEBHOOK_NUM_WORKERS | No | 1 | Number of concurrent workers for webhook delivery. Increase this if you have multiple webhook endpoints or high webhook traffic. | -| ASSET_PREPROCESSING_NUM_WORKERS | No | 1 | Number of concurrent workers for asset preprocessing tasks (image processing, OCR, etc.). Increase this if you have many images or documents that need processing. | -| RULE_ENGINE_NUM_WORKERS | No | 1 | Number of concurrent workers for rule engine processing. Increase this if you have complex automation rules that need to be processed quickly. | +| Name | Required | Default | Description | +| ------------------------------- | ------------------------------------- | --------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| PORT | No | 3000 | The port on which the web server will listen. DON'T CHANGE THIS IF YOU'RE USING DOCKER, instead changed the docker bound external port. | +| WORKERS_PORT | No | 0 (Random Port) | The port on which the worker will export its prometheus metrics on `/metrics`. By default it's a random unused port. If you want to utilize those metrics, fix the port to a value (and export it in docker if you're using docker). | +| DATA_DIR | Yes | Not set | The path for the persistent data directory. This is where the db lives. Assets are stored here by default unless `ASSETS_DIR` is set. | +| ASSETS_DIR | No | Not set | The path where crawled assets will be stored. If not set, defaults to `${DATA_DIR}/assets`. | +| NEXTAUTH_URL | Yes | Not set | Should point to the address of your server. The app will function without it, but will redirect you to wrong addresses on signout for example. | +| NEXTAUTH_SECRET | Yes | Not set | Random string used to sign the JWT tokens. Generate one with `openssl rand -base64 36`. | +| MEILI_ADDR | No | Not set | The address of meilisearch. If not set, Search will be disabled. E.g. (`http://meilisearch:7700`) | +| MEILI_MASTER_KEY | Only in Prod and if search is enabled | Not set | The master key configured for meilisearch. Not needed in development environment. Generate one with `openssl rand -base64 36 \| tr -dc 'A-Za-z0-9'` | +| MAX_ASSET_SIZE_MB | No | 50 | Sets the maximum allowed asset size (in MB) to be uploaded | +| DISABLE_NEW_RELEASE_CHECK | No | false | If set to true, latest release check will be disabled in the admin panel. | +| PROMETHEUS_AUTH_TOKEN | No | Random | Enable a prometheus metrics endpoint at `/api/metrics`. This endpoint will require this token being passed in the Authorization header as a Bearer token. If not set, a new random token is generated everytime at startup. | +| RATE_LIMITING_ENABLED | No | false | If set to true, API rate limiting will be enabled. | +| DB_WAL_MODE | No | false | Enables WAL mode for the sqlite database. This should improve the performance of the database. There's no reason why you shouldn't set this to true unless you're running the db on a network attached drive. This will become the default at some time in the future. | +| SEARCH_NUM_WORKERS | No | 1 | Number of concurrent workers for search indexing tasks. Increase this if you have a high volume of content being indexed for search. | +| WEBHOOK_NUM_WORKERS | No | 1 | Number of concurrent workers for webhook delivery. Increase this if you have multiple webhook endpoints or high webhook traffic. | +| ASSET_PREPROCESSING_NUM_WORKERS | No | 1 | Number of concurrent workers for asset preprocessing tasks (image processing, OCR, etc.). Increase this if you have many images or documents that need processing. | +| RULE_ENGINE_NUM_WORKERS | No | 1 | Number of concurrent workers for rule engine processing. Increase this if you have complex automation rules that need to be processed quickly. | ## Asset Storage diff --git a/packages/shared/config.ts b/packages/shared/config.ts index 4ba3978a..15c2b4a5 100644 --- a/packages/shared/config.ts +++ b/packages/shared/config.ts @@ -17,6 +17,9 @@ const optionalStringBool = () => .optional(); const allEnv = z.object({ + PORT: z.coerce.number().default(3000), + WORKERS_HOST: z.string().default("127.0.0.1"), + WORKERS_PORT: z.coerce.number().default(0), API_URL: z.string().url().default("http://localhost:3000"), NEXTAUTH_URL: z .string() @@ -146,6 +149,11 @@ const allEnv = z.object({ const serverConfigSchema = allEnv.transform((val, ctx) => { const obj = { + port: val.PORT, + workers: { + host: val.WORKERS_HOST, + port: val.WORKERS_PORT, + }, apiUrl: val.API_URL, publicUrl: val.NEXTAUTH_URL, publicApiUrl: `${val.NEXTAUTH_URL}/api`, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index dd1cb7bb..4eca42eb 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -611,7 +611,7 @@ importers: version: 1.11.10 drizzle-orm: specifier: ^0.44.2 - version: 0.44.2(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(better-sqlite3@11.3.0)(gel@2.1.0) + version: 0.44.2(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(better-sqlite3@11.3.0)(gel@2.1.0)(kysely@0.28.5) fastest-levenshtein: specifier: ^1.0.16 version: 1.0.16 @@ -763,6 +763,12 @@ importers: '@ghostery/adblocker-playwright': specifier: ^2.5.1 version: 2.7.0(playwright@1.53.1) + '@hono/node-server': + specifier: ^1.19.0 + version: 1.19.0(hono@4.7.11) + '@hono/prometheus': + specifier: ^1.0.2 + version: 1.0.2(hono@4.7.11)(prom-client@15.1.3) '@karakeep/db': specifier: workspace:^0.1.0 version: link:../../packages/db @@ -795,10 +801,13 @@ importers: version: 16.4.5 drizzle-orm: specifier: ^0.44.2 - version: 0.44.2(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(better-sqlite3@11.3.0)(gel@2.1.0) + version: 0.44.2(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(better-sqlite3@11.3.0)(gel@2.1.0)(kysely@0.28.5) execa: specifier: 9.3.1 version: 9.3.1 + hono: + specifier: ^4.7.10 + version: 4.7.11 http-proxy-agent: specifier: ^7.0.2 version: 7.0.2 @@ -810,7 +819,7 @@ importers: version: 24.1.3 liteque: specifier: ^0.5.0 - version: 0.5.0(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/react@19.1.8)(better-sqlite3@11.3.0)(react@19.1.0) + version: 0.5.0(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/react@19.1.8)(better-sqlite3@11.3.0)(kysely@0.28.5)(react@19.1.0) metascraper: specifier: ^5.46.18 version: 5.47.1 @@ -871,6 +880,9 @@ importers: playwright-extra: specifier: ^4.3.6 version: 4.3.6(playwright-core@1.53.1)(playwright@1.53.1) + prom-client: + specifier: ^15.1.3 + version: 15.1.3 puppeteer-extra-plugin-stealth: specifier: ^2.11.2 version: 2.11.2(playwright-extra@4.3.6(playwright-core@1.53.1)(playwright@1.53.1)) @@ -977,7 +989,7 @@ importers: version: 11.4.3(typescript@5.8.3) drizzle-orm: specifier: ^0.44.2 - version: 0.44.2(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(better-sqlite3@11.3.0)(gel@2.1.0) + version: 0.44.2(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(better-sqlite3@11.3.0)(gel@2.1.0)(kysely@0.28.5) hono: specifier: ^4.7.10 version: 4.7.11 @@ -1035,7 +1047,7 @@ importers: version: 0.31.4 drizzle-orm: specifier: ^0.44.2 - version: 0.44.2(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(better-sqlite3@11.3.0)(gel@2.1.0) + version: 0.44.2(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(better-sqlite3@11.3.0)(gel@2.1.0)(kysely@0.28.5) tsx: specifier: ^4.8.1 version: 4.20.3 @@ -1178,7 +1190,7 @@ importers: version: 1.0.20 liteque: specifier: ^0.5.0 - version: 0.5.0(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/react@19.1.8)(better-sqlite3@11.3.0)(react@19.1.0) + version: 0.5.0(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/react@19.1.8)(better-sqlite3@11.3.0)(kysely@0.28.5)(react@19.1.0) nodemailer: specifier: ^7.0.4 version: 7.0.4 @@ -1289,10 +1301,10 @@ importers: version: 2.2.3 drizzle-orm: specifier: ^0.44.2 - version: 0.44.2(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(better-sqlite3@11.3.0)(gel@2.1.0) + version: 0.44.2(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(better-sqlite3@11.3.0)(gel@2.1.0)(kysely@0.28.5) liteque: specifier: ^0.5.0 - version: 0.5.0(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/react@19.1.8)(better-sqlite3@11.3.0)(react@19.1.0) + version: 0.5.0(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/react@19.1.8)(better-sqlite3@11.3.0)(kysely@0.28.5)(react@19.1.0) nodemailer: specifier: ^7.0.4 version: 7.0.4 @@ -3364,6 +3376,12 @@ packages: '@hapi/topo@5.1.0': resolution: {integrity: sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==} + '@hono/node-server@1.19.0': + resolution: {integrity: sha512-1k8/8OHf5VIymJEcJyVksFpT+AQ5euY0VA5hUkCnlKpD4mr8FSbvXaHblxeTTEr90OaqWzAkQaqD80qHZQKxBA==} + engines: {node: '>=18.14.1'} + peerDependencies: + hono: ^4 + '@hono/prometheus@1.0.2': resolution: {integrity: sha512-7z2nBMaiHEaAFfNWfIV2H5/HRezv9kLH0jDY6ZotQQAr3QR7cIYAd6FGiyTIng4GUAw6ZWeX3C0Y4QS36SLRjg==} peerDependencies: @@ -9515,6 +9533,10 @@ packages: kuler@2.0.0: resolution: {integrity: sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==} + kysely@0.28.5: + resolution: {integrity: sha512-rlB0I/c6FBDWPcQoDtkxi9zIvpmnV5xoIalfCMSMCa7nuA6VGA3F54TW9mEgX4DVf10sXAWCF5fDbamI/5ZpKA==} + engines: {node: '>=20.0.0'} + lan-network@0.1.7: resolution: {integrity: sha512-mnIlAEMu4OyEvUNdzco9xpuB9YVcPkQec+QsgycBCtPZvEqWPCDPfbAE4OJMdBBWpZWtpCn1xw9jJYlwjWI5zQ==} hasBin: true @@ -17933,6 +17955,10 @@ snapshots: dependencies: '@hapi/hoek': 9.3.0 + '@hono/node-server@1.19.0(hono@4.7.11)': + dependencies: + hono: 4.7.11 + '@hono/prometheus@1.0.2(hono@4.7.11)(prom-client@15.1.3)': dependencies: hono: 4.7.11 @@ -22614,20 +22640,22 @@ snapshots: transitivePeerDependencies: - supports-color - drizzle-orm@0.33.0(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/react@19.1.8)(better-sqlite3@11.3.0)(react@19.1.0): + drizzle-orm@0.33.0(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/react@19.1.8)(better-sqlite3@11.3.0)(kysely@0.28.5)(react@19.1.0): optionalDependencies: '@opentelemetry/api': 1.9.0 '@types/better-sqlite3': 7.6.13 '@types/react': 19.1.8 better-sqlite3: 11.3.0 + kysely: 0.28.5 react: 19.1.0 - drizzle-orm@0.44.2(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(better-sqlite3@11.3.0)(gel@2.1.0): + drizzle-orm@0.44.2(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(better-sqlite3@11.3.0)(gel@2.1.0)(kysely@0.28.5): optionalDependencies: '@opentelemetry/api': 1.9.0 '@types/better-sqlite3': 7.6.13 better-sqlite3: 11.3.0 gel: 2.1.0 + kysely: 0.28.5 dts-resolver@2.1.1: {} @@ -24895,6 +24923,9 @@ snapshots: kuler@2.0.0: {} + kysely@0.28.5: + optional: true + lan-network@0.1.7: {} latest-version@7.0.0: @@ -25029,11 +25060,11 @@ snapshots: liquid-json@0.3.1: {} - liteque@0.5.0(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/react@19.1.8)(better-sqlite3@11.3.0)(react@19.1.0): + liteque@0.5.0(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/react@19.1.8)(better-sqlite3@11.3.0)(kysely@0.28.5)(react@19.1.0): dependencies: async-mutex: 0.4.1 better-sqlite3: 11.3.0 - drizzle-orm: 0.33.0(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/react@19.1.8)(better-sqlite3@11.3.0)(react@19.1.0) + drizzle-orm: 0.33.0(@opentelemetry/api@1.9.0)(@types/better-sqlite3@7.6.13)(@types/react@19.1.8)(better-sqlite3@11.3.0)(kysely@0.28.5)(react@19.1.0) zod: 3.24.2 transitivePeerDependencies: - '@aws-sdk/client-rds-data' |
