aboutsummaryrefslogtreecommitdiffstats
path: root/apps/workers/openaiWorker.ts
diff options
context:
space:
mode:
Diffstat (limited to 'apps/workers/openaiWorker.ts')
-rw-r--r--apps/workers/openaiWorker.ts11
1 files changed, 8 insertions, 3 deletions
diff --git a/apps/workers/openaiWorker.ts b/apps/workers/openaiWorker.ts
index b706fb90..9b2934e3 100644
--- a/apps/workers/openaiWorker.ts
+++ b/apps/workers/openaiWorker.ts
@@ -14,7 +14,7 @@ import {
zOpenAIRequestSchema,
} from "@hoarder/shared/queues";
-import { InferenceClientFactory, InferenceClient } from "./inference";
+import { InferenceClient, InferenceClientFactory } from "./inference";
const openAIResponseSchema = z.object({
tags: z.array(z.string()),
@@ -36,7 +36,7 @@ async function attemptMarkTaggingStatus(
})
.where(eq(bookmarks.id, request.bookmarkId));
} catch (e) {
- console.log(`Something went wrong when marking the tagging status: ${e}`);
+ logger.error(`Something went wrong when marking the tagging status: ${e}`);
}
}
@@ -196,8 +196,9 @@ async function inferTags(
return tags;
} catch (e) {
+ const responseSneak = response.response.substr(0, 20);
throw new Error(
- `[inference][${jobId}] Failed to parse JSON response from inference client: ${e}`,
+ `[inference][${jobId}] The model ignored our prompt and didn't respond with the expected JSON: ${JSON.stringify(e)}. Here's a sneak peak from the response: ${responseSneak}`,
);
}
}
@@ -285,6 +286,10 @@ async function runOpenAI(job: Job<ZOpenAIRequest, void>) {
);
}
+ logger.info(
+ `[inference][${jobId}] Starting an inference job for bookmark with id "${bookmark.id}"`,
+ );
+
const tags = await inferTags(jobId, bookmark, inferenceClient);
await connectTags(bookmarkId, tags, bookmark.userId);