aboutsummaryrefslogtreecommitdiffstats
path: root/packages
diff options
context:
space:
mode:
authorrzxczxc <32613327+rzxczxc@users.noreply.github.com>2025-12-27 12:24:52 +0300
committerGitHub <noreply@github.com>2025-12-27 09:24:52 +0000
commitbb6b742a040a70478d276529774bde67b8f93648 (patch)
tree46c0c9c09a8a59b0e5655fa6446027c32ff1413d /packages
parente82694debf3fce51e2825221f13395ab4509945b (diff)
downloadkarakeep-bb6b742a040a70478d276529774bde67b8f93648.tar.zst
feat: add OPENAI_PROXY_URL configuration and support for proxy in OpenAI client (#2231)
* Add OPENAI_PROXY_URL configuration and support for proxy in OpenAIInferenceClient * docs: add OPENAI_PROXY_URL configuration for proxy support in OpenAI API requests * format --------- Co-authored-by: Mohamed Bassem <me@mbassem.com>
Diffstat (limited to 'packages')
-rw-r--r--packages/shared/config.ts2
-rw-r--r--packages/shared/inference.ts10
2 files changed, 12 insertions, 0 deletions
diff --git a/packages/shared/config.ts b/packages/shared/config.ts
index 52dd2cf2..e956c0bc 100644
--- a/packages/shared/config.ts
+++ b/packages/shared/config.ts
@@ -58,6 +58,7 @@ const allEnv = z.object({
TURNSTILE_SECRET_KEY: z.string().optional(),
OPENAI_API_KEY: z.string().optional(),
OPENAI_BASE_URL: z.string().url().optional(),
+ OPENAI_PROXY_URL: z.string().url().optional(),
OLLAMA_BASE_URL: z.string().url().optional(),
OLLAMA_KEEP_ALIVE: z.string().optional(),
INFERENCE_JOB_TIMEOUT_SEC: z.coerce.number().default(30),
@@ -267,6 +268,7 @@ const serverConfigSchema = allEnv.transform((val, ctx) => {
fetchTimeoutSec: val.INFERENCE_FETCH_TIMEOUT_SEC,
openAIApiKey: val.OPENAI_API_KEY,
openAIBaseUrl: val.OPENAI_BASE_URL,
+ openAIProxyUrl: val.OPENAI_PROXY_URL,
ollamaBaseUrl: val.OLLAMA_BASE_URL,
ollamaKeepAlive: val.OLLAMA_KEEP_ALIVE,
textModel: val.INFERENCE_TEXT_MODEL,
diff --git a/packages/shared/inference.ts b/packages/shared/inference.ts
index 7689f4f4..fb9fce09 100644
--- a/packages/shared/inference.ts
+++ b/packages/shared/inference.ts
@@ -1,6 +1,7 @@
import { Ollama } from "ollama";
import OpenAI from "openai";
import { zodResponseFormat } from "openai/helpers/zod";
+import * as undici from "undici";
import { z } from "zod";
import { zodToJsonSchema } from "zod-to-json-schema";
@@ -68,9 +69,18 @@ class OpenAIInferenceClient implements InferenceClient {
openAI: OpenAI;
constructor() {
+ const fetchOptions = serverConfig.inference.openAIProxyUrl
+ ? {
+ dispatcher: new undici.ProxyAgent(
+ serverConfig.inference.openAIProxyUrl,
+ ),
+ }
+ : undefined;
+
this.openAI = new OpenAI({
apiKey: serverConfig.inference.openAIApiKey,
baseURL: serverConfig.inference.openAIBaseUrl,
+ ...(fetchOptions ? { fetchOptions } : {}),
defaultHeaders: {
"X-Title": "Karakeep",
"HTTP-Referer": "https://karakeep.app",