From bb6b742a040a70478d276529774bde67b8f93648 Mon Sep 17 00:00:00 2001 From: rzxczxc <32613327+rzxczxc@users.noreply.github.com> Date: Sat, 27 Dec 2025 12:24:52 +0300 Subject: feat: add OPENAI_PROXY_URL configuration and support for proxy in OpenAI client (#2231) * Add OPENAI_PROXY_URL configuration and support for proxy in OpenAIInferenceClient * docs: add OPENAI_PROXY_URL configuration for proxy support in OpenAI API requests * format --------- Co-authored-by: Mohamed Bassem --- packages/shared/inference.ts | 10 ++++++++++ 1 file changed, 10 insertions(+) (limited to 'packages/shared/inference.ts') diff --git a/packages/shared/inference.ts b/packages/shared/inference.ts index 7689f4f4..fb9fce09 100644 --- a/packages/shared/inference.ts +++ b/packages/shared/inference.ts @@ -1,6 +1,7 @@ import { Ollama } from "ollama"; import OpenAI from "openai"; import { zodResponseFormat } from "openai/helpers/zod"; +import * as undici from "undici"; import { z } from "zod"; import { zodToJsonSchema } from "zod-to-json-schema"; @@ -68,9 +69,18 @@ class OpenAIInferenceClient implements InferenceClient { openAI: OpenAI; constructor() { + const fetchOptions = serverConfig.inference.openAIProxyUrl + ? { + dispatcher: new undici.ProxyAgent( + serverConfig.inference.openAIProxyUrl, + ), + } + : undefined; + this.openAI = new OpenAI({ apiKey: serverConfig.inference.openAIApiKey, baseURL: serverConfig.inference.openAIBaseUrl, + ...(fetchOptions ? { fetchOptions } : {}), defaultHeaders: { "X-Title": "Karakeep", "HTTP-Referer": "https://karakeep.app", -- cgit v1.2.3-70-g09d2