From bb6b742a040a70478d276529774bde67b8f93648 Mon Sep 17 00:00:00 2001 From: rzxczxc <32613327+rzxczxc@users.noreply.github.com> Date: Sat, 27 Dec 2025 12:24:52 +0300 Subject: feat: add OPENAI_PROXY_URL configuration and support for proxy in OpenAI client (#2231) * Add OPENAI_PROXY_URL configuration and support for proxy in OpenAIInferenceClient * docs: add OPENAI_PROXY_URL configuration for proxy support in OpenAI API requests * format --------- Co-authored-by: Mohamed Bassem --- packages/shared/config.ts | 2 ++ packages/shared/inference.ts | 10 ++++++++++ 2 files changed, 12 insertions(+) (limited to 'packages/shared') diff --git a/packages/shared/config.ts b/packages/shared/config.ts index 52dd2cf2..e956c0bc 100644 --- a/packages/shared/config.ts +++ b/packages/shared/config.ts @@ -58,6 +58,7 @@ const allEnv = z.object({ TURNSTILE_SECRET_KEY: z.string().optional(), OPENAI_API_KEY: z.string().optional(), OPENAI_BASE_URL: z.string().url().optional(), + OPENAI_PROXY_URL: z.string().url().optional(), OLLAMA_BASE_URL: z.string().url().optional(), OLLAMA_KEEP_ALIVE: z.string().optional(), INFERENCE_JOB_TIMEOUT_SEC: z.coerce.number().default(30), @@ -267,6 +268,7 @@ const serverConfigSchema = allEnv.transform((val, ctx) => { fetchTimeoutSec: val.INFERENCE_FETCH_TIMEOUT_SEC, openAIApiKey: val.OPENAI_API_KEY, openAIBaseUrl: val.OPENAI_BASE_URL, + openAIProxyUrl: val.OPENAI_PROXY_URL, ollamaBaseUrl: val.OLLAMA_BASE_URL, ollamaKeepAlive: val.OLLAMA_KEEP_ALIVE, textModel: val.INFERENCE_TEXT_MODEL, diff --git a/packages/shared/inference.ts b/packages/shared/inference.ts index 7689f4f4..fb9fce09 100644 --- a/packages/shared/inference.ts +++ b/packages/shared/inference.ts @@ -1,6 +1,7 @@ import { Ollama } from "ollama"; import OpenAI from "openai"; import { zodResponseFormat } from "openai/helpers/zod"; +import * as undici from "undici"; import { z } from "zod"; import { zodToJsonSchema } from "zod-to-json-schema"; @@ -68,9 +69,18 @@ class OpenAIInferenceClient implements InferenceClient { openAI: OpenAI; constructor() { + const fetchOptions = serverConfig.inference.openAIProxyUrl + ? { + dispatcher: new undici.ProxyAgent( + serverConfig.inference.openAIProxyUrl, + ), + } + : undefined; + this.openAI = new OpenAI({ apiKey: serverConfig.inference.openAIApiKey, baseURL: serverConfig.inference.openAIBaseUrl, + ...(fetchOptions ? { fetchOptions } : {}), defaultHeaders: { "X-Title": "Karakeep", "HTTP-Referer": "https://karakeep.app", -- cgit v1.2.3-70-g09d2