From 9986746aa890f2490ff18fd4fc79be4de0e4dbe2 Mon Sep 17 00:00:00 2001 From: MohamedBassem Date: Wed, 27 Mar 2024 16:30:27 +0000 Subject: fix: Attempt to increase the reliability of the ollama inference --- apps/workers/openaiWorker.ts | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) (limited to 'apps/workers/openaiWorker.ts') diff --git a/apps/workers/openaiWorker.ts b/apps/workers/openaiWorker.ts index b706fb90..9b2934e3 100644 --- a/apps/workers/openaiWorker.ts +++ b/apps/workers/openaiWorker.ts @@ -14,7 +14,7 @@ import { zOpenAIRequestSchema, } from "@hoarder/shared/queues"; -import { InferenceClientFactory, InferenceClient } from "./inference"; +import { InferenceClient, InferenceClientFactory } from "./inference"; const openAIResponseSchema = z.object({ tags: z.array(z.string()), @@ -36,7 +36,7 @@ async function attemptMarkTaggingStatus( }) .where(eq(bookmarks.id, request.bookmarkId)); } catch (e) { - console.log(`Something went wrong when marking the tagging status: ${e}`); + logger.error(`Something went wrong when marking the tagging status: ${e}`); } } @@ -196,8 +196,9 @@ async function inferTags( return tags; } catch (e) { + const responseSneak = response.response.substr(0, 20); throw new Error( - `[inference][${jobId}] Failed to parse JSON response from inference client: ${e}`, + `[inference][${jobId}] The model ignored our prompt and didn't respond with the expected JSON: ${JSON.stringify(e)}. Here's a sneak peak from the response: ${responseSneak}`, ); } } @@ -285,6 +286,10 @@ async function runOpenAI(job: Job) { ); } + logger.info( + `[inference][${jobId}] Starting an inference job for bookmark with id "${bookmark.id}"`, + ); + const tags = await inferTags(jobId, bookmark, inferenceClient); await connectTags(bookmarkId, tags, bookmark.userId); -- cgit v1.2.3-70-g09d2