aboutsummaryrefslogtreecommitdiffstats
path: root/tools/compare-models/src/bookmarkProcessor.ts
diff options
context:
space:
mode:
authorMohamed Bassem <me@mbassem.com>2025-12-26 11:14:17 +0000
committerMohamed Bassem <me@mbassem.com>2025-12-26 11:14:17 +0000
commit1dfa5d12f6af6ca964bdfa911809a061ffdf36c2 (patch)
tree87c734eaa5395051a0a46972ca575f2866c73dd5 /tools/compare-models/src/bookmarkProcessor.ts
parentecb7a710ca7ec22aa3304b8d1f6b603bb60874bc (diff)
downloadkarakeep-1dfa5d12f6af6ca964bdfa911809a061ffdf36c2.tar.zst
chore: add a tool for comparing perf of different models
Diffstat (limited to 'tools/compare-models/src/bookmarkProcessor.ts')
-rw-r--r--tools/compare-models/src/bookmarkProcessor.ts56
1 files changed, 56 insertions, 0 deletions
diff --git a/tools/compare-models/src/bookmarkProcessor.ts b/tools/compare-models/src/bookmarkProcessor.ts
new file mode 100644
index 00000000..910957fe
--- /dev/null
+++ b/tools/compare-models/src/bookmarkProcessor.ts
@@ -0,0 +1,56 @@
+import type { InferenceClient } from "./inferenceClient";
+import type { Bookmark } from "./types";
+
+export async function extractBookmarkContent(
+ bookmark: Bookmark,
+): Promise<string> {
+ if (bookmark.content.type === "link") {
+ const parts = [];
+
+ if (bookmark.content.url) {
+ parts.push(`URL: ${bookmark.content.url}`);
+ }
+
+ if (bookmark.title) {
+ parts.push(`Title: ${bookmark.title}`);
+ }
+
+ if (bookmark.content.description) {
+ parts.push(`Description: ${bookmark.content.description}`);
+ }
+
+ if (bookmark.content.htmlContent) {
+ parts.push(`Content: ${bookmark.content.htmlContent}`);
+ }
+
+ return parts.join("\n");
+ }
+
+ if (bookmark.content.type === "text" && bookmark.content.text) {
+ return bookmark.content.text;
+ }
+
+ return "";
+}
+
+export async function runTaggingForModel(
+ bookmark: Bookmark,
+ model: string,
+ inferenceClient: InferenceClient,
+ lang: string = "english",
+): Promise<string[]> {
+ const content = await extractBookmarkContent(bookmark);
+
+ if (!content) {
+ return [];
+ }
+
+ try {
+ const tags = await inferenceClient.inferTags(content, model, lang, []);
+ return tags;
+ } catch (error) {
+ throw new Error(
+ `Failed to generate tags with ${model}: ${error instanceof Error ? error.message : String(error)}`,
+ );
+ }
+}