diff options
| author | Gavin Mogan <github@gavinmogan.com> | 2025-02-16 09:10:52 -0800 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2025-02-16 17:10:52 +0000 |
| commit | 0621cd920a6461b46778fc5dfc1b02014c494517 (patch) | |
| tree | db31239c2d403fe097fa988b842cfe2a70d45b96 /packages/shared/inference.ts | |
| parent | 602e938d32ae2920e7928377cde11eee4de921dd (diff) | |
| download | karakeep-0621cd920a6461b46778fc5dfc1b02014c494517.tar.zst | |
fix: custom fetch wrapper for ollama inference. Fixes #656 (#1032)
* Add configurable fetch timeout for Ollama client
* Worker service needs access to the .env file
* repair typescript types
* Update customFetch.ts
* update the config docs
---------
Co-authored-by: sbarbett <shane@barbetta.me>
Co-authored-by: Mohamed Bassem <me@mbassem.com>
Diffstat (limited to 'packages/shared/inference.ts')
| -rw-r--r-- | packages/shared/inference.ts | 2 |
1 files changed, 2 insertions, 0 deletions
diff --git a/packages/shared/inference.ts b/packages/shared/inference.ts index e5ddf5ca..92d9dd94 100644 --- a/packages/shared/inference.ts +++ b/packages/shared/inference.ts @@ -2,6 +2,7 @@ import { Ollama } from "ollama"; import OpenAI from "openai"; import serverConfig from "./config"; +import { customFetch } from "./customFetch"; import logger from "./logger"; export interface InferenceResponse { @@ -153,6 +154,7 @@ class OllamaInferenceClient implements InferenceClient { constructor() { this.ollama = new Ollama({ host: serverConfig.inference.ollamaBaseUrl, + fetch: customFetch, // Use the custom fetch with configurable timeout }); } |
