aboutsummaryrefslogtreecommitdiffstats
path: root/tools/compare-models/src/bookmarkProcessor.ts
diff options
context:
space:
mode:
authorMohamed Bassem <me@mbassem.com>2025-12-29 23:35:28 +0000
committerMohamed Bassem <me@mbassem.com>2025-12-29 23:38:21 +0000
commitf00287ede0675521c783c1199675538571f977d6 (patch)
tree2d04b983fa514f4c62a3695c0a521fb50de24eef /tools/compare-models/src/bookmarkProcessor.ts
parentba8d84a555f9e6cf209c826b97a124f0539739eb (diff)
downloadkarakeep-f00287ede0675521c783c1199675538571f977d6.tar.zst
refactor: reduce duplication in compare-models tool
Diffstat (limited to 'tools/compare-models/src/bookmarkProcessor.ts')
-rw-r--r--tools/compare-models/src/bookmarkProcessor.ts20
1 files changed, 16 insertions, 4 deletions
diff --git a/tools/compare-models/src/bookmarkProcessor.ts b/tools/compare-models/src/bookmarkProcessor.ts
index 910957fe..21280b97 100644
--- a/tools/compare-models/src/bookmarkProcessor.ts
+++ b/tools/compare-models/src/bookmarkProcessor.ts
@@ -1,4 +1,7 @@
-import type { InferenceClient } from "./inferenceClient";
+import type { InferenceClient } from "@karakeep/shared/inference";
+import { buildTextPrompt } from "@karakeep/shared/prompts";
+
+import { inferTags } from "./inferenceClient";
import type { Bookmark } from "./types";
export async function extractBookmarkContent(
@@ -35,9 +38,9 @@ export async function extractBookmarkContent(
export async function runTaggingForModel(
bookmark: Bookmark,
- model: string,
inferenceClient: InferenceClient,
lang: string = "english",
+ contextLength: number = 8000,
): Promise<string[]> {
const content = await extractBookmarkContent(bookmark);
@@ -46,11 +49,20 @@ export async function runTaggingForModel(
}
try {
- const tags = await inferenceClient.inferTags(content, model, lang, []);
+ // Use the shared prompt builder with empty custom prompts and default tag style
+ const prompt = await buildTextPrompt(
+ lang,
+ [], // No custom prompts for comparison tool
+ content,
+ contextLength,
+ "as-generated", // Use tags as generated by the model
+ );
+
+ const tags = await inferTags(inferenceClient, prompt);
return tags;
} catch (error) {
throw new Error(
- `Failed to generate tags with ${model}: ${error instanceof Error ? error.message : String(error)}`,
+ `Failed to generate tags: ${error instanceof Error ? error.message : String(error)}`,
);
}
}