diff options
| author | MohamedBassem <me@mbassem.com> | 2024-03-30 16:26:16 +0000 |
|---|---|---|
| committer | MohamedBassem <me@mbassem.com> | 2024-03-30 16:26:16 +0000 |
| commit | 46b78eaac30be26fe40520e97786563344af8403 (patch) | |
| tree | c4c0e1ae1d3d21a6f1fbf5f44f68e99243bbb5d3 /apps/workers/inference.ts | |
| parent | 853ed13450b3a0d92cba144cc0dfd0696e7c810c (diff) | |
| download | karakeep-46b78eaac30be26fe40520e97786563344af8403.tar.zst | |
format: Add missing lint and format, and format the entire repo
Diffstat (limited to 'apps/workers/inference.ts')
| -rw-r--r-- | apps/workers/inference.ts | 10 |
1 files changed, 8 insertions, 2 deletions
diff --git a/apps/workers/inference.ts b/apps/workers/inference.ts index 3b0b5943..13b10aba 100644 --- a/apps/workers/inference.ts +++ b/apps/workers/inference.ts @@ -124,7 +124,9 @@ class OllamaInferenceClient implements InferenceClient { // Using stream + accumulating the response so far is a workaround. // https://github.com/ollama/ollama-js/issues/72 totalTokens = NaN; - logger.warn(`Got an exception from ollama, will still attempt to deserialize the response we got so far: ${e}`) + logger.warn( + `Got an exception from ollama, will still attempt to deserialize the response we got so far: ${e}`, + ); } return { response, totalTokens }; @@ -139,6 +141,10 @@ class OllamaInferenceClient implements InferenceClient { _contentType: string, image: string, ): Promise<InferenceResponse> { - return await this.runModel(serverConfig.inference.imageModel, prompt, image); + return await this.runModel( + serverConfig.inference.imageModel, + prompt, + image, + ); } } |
