From 70d572096706cb2d7f5d3f29b51e6ba1d86578c6 Mon Sep 17 00:00:00 2001 From: Mohamed Bassem Date: Sun, 18 May 2025 13:43:48 +0000 Subject: fix: Use proper tokenizer when truncating for inference. Fixes #1405 --- packages/shared/prompts.ts | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) (limited to 'packages/shared/prompts.ts') diff --git a/packages/shared/prompts.ts b/packages/shared/prompts.ts index 40e706b2..b7957dd4 100644 --- a/packages/shared/prompts.ts +++ b/packages/shared/prompts.ts @@ -1,15 +1,15 @@ -// TODO: Use a proper tokenizer +import { getEncoding } from "js-tiktoken"; + +const encoding = getEncoding("o200k_base"); + function calculateNumTokens(text: string) { - return text.split(" ").length; + return encoding.encode(text).length; } function truncateContent(content: string, length: number) { - let words = content.split(" "); - if (words.length > length) { - words = words.slice(0, length); - content = words.join(" "); - } - return content; + const tokens = encoding.encode(content); + const truncatedTokens = tokens.slice(0, length); + return encoding.decode(truncatedTokens); } export function buildImagePrompt(lang: string, customPrompts: string[]) { -- cgit v1.2.3-70-g09d2