diff options
| author | MohamedBassem <me@mbassem.com> | 2024-10-12 17:25:01 +0000 |
|---|---|---|
| committer | MohamedBassem <me@mbassem.com> | 2024-10-12 17:37:42 +0000 |
| commit | 1b09682685f54f29957163be9b9f9fc2de3b49cc (patch) | |
| tree | 7f10a7635cf984acd45147c24ec3e1d35798e8ba /apps/workers/openaiWorker.ts | |
| parent | c16173ea0fdbf6cc47b13756c0a77e8399669055 (diff) | |
| download | karakeep-1b09682685f54f29957163be9b9f9fc2de3b49cc.tar.zst | |
feature: Allow customizing the inference's context length
Diffstat (limited to 'apps/workers/openaiWorker.ts')
| -rw-r--r-- | apps/workers/openaiWorker.ts | 16 |
1 files changed, 7 insertions, 9 deletions
diff --git a/apps/workers/openaiWorker.ts b/apps/workers/openaiWorker.ts index 6c6104f3..d51771b2 100644 --- a/apps/workers/openaiWorker.ts +++ b/apps/workers/openaiWorker.ts @@ -23,7 +23,7 @@ import { import type { InferenceClient } from "./inference"; import { InferenceClientFactory } from "./inference"; -import { readPDFText, truncateContent } from "./utils"; +import { readPDFText } from "./utils"; const openAIResponseSchema = z.object({ tags: z.array(z.string()), @@ -102,10 +102,7 @@ async function buildPrompt( ); } - let content = bookmark.link.content; - if (content) { - content = truncateContent(content); - } + const content = bookmark.link.content; return buildTextPrompt( serverConfig.inference.inferredTagLang, prompts, @@ -113,16 +110,16 @@ async function buildPrompt( Title: ${bookmark.link.title ?? ""} Description: ${bookmark.link.description ?? ""} Content: ${content ?? ""}`, + serverConfig.inference.contextLength, ); } if (bookmark.text) { - const content = truncateContent(bookmark.text.text ?? ""); - // TODO: Ensure that the content doesn't exceed the context length of openai return buildTextPrompt( serverConfig.inference.inferredTagLang, prompts, - content, + bookmark.text.text ?? "", + serverConfig.inference.contextLength, ); } @@ -215,7 +212,8 @@ async function inferTagsFromPDF( const prompt = buildTextPrompt( serverConfig.inference.inferredTagLang, await fetchCustomPrompts(bookmark.userId, "text"), - `Content: ${truncateContent(pdfParse.text)}`, + `Content: ${pdfParse.text}`, + serverConfig.inference.contextLength, ); return inferenceClient.inferFromText(prompt); } |
