diff --git a/.env.example b/.env.example
index 15978eeb9..d5cfc5cb3 100644
--- a/.env.example
+++ b/.env.example
@@ -29,6 +29,12 @@ GOOGLE_GENERATIVE_AI_API_KEY=
# EXAMPLE http://localhost:11434
OLLAMA_API_BASE_URL=
+# You only need this environment variable set if you want to use OpenAI Like models
+OPENAI_LIKE_API_BASE_URL=
+
+# Get your OpenAI Like API Key
+OPENAI_LIKE_API_KEY=
+
# Get your Mistral API Key by following these instructions -
# https://console.mistral.ai/api-keys/
# You only need this environment variable set if you want to use Mistral models
diff --git a/app/components/chat/BaseChat.tsx b/app/components/chat/BaseChat.tsx
index b7421349e..c1175f700 100644
--- a/app/components/chat/BaseChat.tsx
+++ b/app/components/chat/BaseChat.tsx
@@ -28,7 +28,7 @@ const ModelSelector = ({ model, setModel, modelList, providerList }) => {
const [provider, setProvider] = useState(DEFAULT_PROVIDER);
return (
- {
setProvider(e.target.value);
@@ -42,9 +42,12 @@ const ModelSelector = ({ model, setModel, modelList, providerList }) => {
{provider}
))}
-
- Ollama
-
+
+ Ollama
+
+
+ OpenAILike
+
(
);
},
-);
\ No newline at end of file
+);
diff --git a/app/lib/.server/llm/api-key.ts b/app/lib/.server/llm/api-key.ts
index bbf5e5b87..a1fd3a022 100644
--- a/app/lib/.server/llm/api-key.ts
+++ b/app/lib/.server/llm/api-key.ts
@@ -22,7 +22,18 @@ export function getAPIKey(cloudflareEnv: Env, provider: string) {
case 'Deepseek':
return env.DEEPSEEK_API_KEY || cloudflareEnv.DEEPSEEK_API_KEY
case 'Mistral':
- return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
+ return env.MISTRAL_API_KEY || cloudflareEnv.MISTRAL_API_KEY;
+ case "OpenAILike":
+ return env.OPENAI_LIKE_API_KEY || cloudflareEnv.OPENAI_LIKE_API_KEY;
+ default:
+ return "";
+ }
+}
+
+export function getBaseURL(cloudflareEnv: Env, provider: string) {
+ switch (provider) {
+ case 'OpenAILike':
+ return env.OPENAI_LIKE_API_BASE_URL || cloudflareEnv.OPENAI_LIKE_API_BASE_URL;
default:
return "";
}
diff --git a/app/lib/.server/llm/model.ts b/app/lib/.server/llm/model.ts
index 3cf80487b..9c792893a 100644
--- a/app/lib/.server/llm/model.ts
+++ b/app/lib/.server/llm/model.ts
@@ -1,6 +1,6 @@
// @ts-nocheck
// Preventing TS checks with files presented in the video for a better presentation.
-import { getAPIKey } from '~/lib/.server/llm/api-key';
+import { getAPIKey, getBaseURL } from '~/lib/.server/llm/api-key';
import { createAnthropic } from '@ai-sdk/anthropic';
import { createOpenAI } from '@ai-sdk/openai';
import { createGoogleGenerativeAI } from '@ai-sdk/google';
@@ -16,7 +16,14 @@ export function getAnthropicModel(apiKey: string, model: string) {
return anthropic(model);
}
+export function getOpenAILikeModel(baseURL:string,apiKey: string, model: string) {
+ const openai = createOpenAI({
+ baseURL,
+ apiKey,
+ });
+ return openai(model);
+}
export function getOpenAIModel(apiKey: string, model: string) {
const openai = createOpenAI({
apiKey,
@@ -72,7 +79,7 @@ export function getOpenRouterModel(apiKey: string, model: string) {
export function getModel(provider: string, model: string, env: Env) {
const apiKey = getAPIKey(env, provider);
-
+ const baseURL = getBaseURL(env, provider);
switch (provider) {
case 'Anthropic':
@@ -85,6 +92,8 @@ export function getModel(provider: string, model: string, env: Env) {
return getOpenRouterModel(apiKey, model);
case 'Google':
return getGoogleModel(apiKey, model)
+ case 'OpenAILike':
+ return getOpenAILikeModel(baseURL,apiKey, model);
case 'Deepseek':
return getDeepseekModel(apiKey, model)
case 'Mistral':
diff --git a/app/utils/constants.ts b/app/utils/constants.ts
index b4227c0bf..cb638ba32 100644
--- a/app/utils/constants.ts
+++ b/app/utils/constants.ts
@@ -49,7 +49,9 @@ export let MODEL_LIST: ModelInfo[] = [...staticModels];
async function getOllamaModels(): Promise {
try {
- const response = await fetch(`http://localhost:11434/api/tags`);
+ const base_url = import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434";
+ const url = new URL(base_url).toString();
+ const response = await fetch(`${url}/api/tags`);
const data = await response.json() as OllamaApiResponse;
return data.models.map((model: OllamaModel) => ({
@@ -62,9 +64,36 @@ async function getOllamaModels(): Promise {
}
}
+async function getOpenAILikeModels(): Promise {
+
+ try {
+ const base_url =import.meta.env.OPENAI_LIKE_API_BASE_URL || "";
+ if (!base_url) {
+ return [];
+ }
+ const url = new URL(base_url).toString();
+ const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? "";
+ const response = await fetch(`${url}/models`, {
+ headers: {
+ Authorization: `Bearer ${api_key}`,
+ }
+ });
+ const res = await response.json();
+ return res.data.map((model: any) => ({
+ name: model.id,
+ label: model.id,
+ provider: 'OpenAILike',
+ }));
+ }catch (e) {
+ return []
+ }
+
+}
async function initializeModelList(): Promise {
const ollamaModels = await getOllamaModels();
- MODEL_LIST = [...ollamaModels, ...staticModels];
+ const openAiLikeModels = await getOpenAILikeModels();
+ console.log(openAiLikeModels);
+ MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels];
}
initializeModelList().then();
export { getOllamaModels, initializeModelList };
diff --git a/vite.config.ts b/vite.config.ts
index 1c5a0a6ac..625390702 100644
--- a/vite.config.ts
+++ b/vite.config.ts
@@ -27,6 +27,7 @@ export default defineConfig((config) => {
chrome129IssuePlugin(),
config.mode === 'production' && optimizeCssModules({ apply: 'build' }),
],
+ envPrefix:["VITE_","OPENAI_LIKE_API_","OLLAMA_API_BASE_URL"],
css: {
preprocessorOptions: {
scss: {
diff --git a/worker-configuration.d.ts b/worker-configuration.d.ts
index 2f7f64234..82961ecd6 100644
--- a/worker-configuration.d.ts
+++ b/worker-configuration.d.ts
@@ -4,5 +4,7 @@ interface Env {
GROQ_API_KEY: string;
OPEN_ROUTER_API_KEY: string;
OLLAMA_API_BASE_URL: string;
+ OPENAI_LIKE_API_KEY: string;
+ OPENAI_LIKE_API_BASE_URL: string;
DEEPSEEK_API_KEY: string;
}