diff --git a/bun.lock b/bun.lock index 5021c1d9138..5a64e8f0fa3 100644 --- a/bun.lock +++ b/bun.lock @@ -329,6 +329,7 @@ "@effect/platform-node": "catalog:", "@hono/standard-validator": "0.1.5", "@hono/zod-validator": "catalog:", + "@llmgateway/ai-sdk-provider": "2.5.1", "@modelcontextprotocol/sdk": "1.27.1", "@octokit/graphql": "9.0.2", "@octokit/rest": "catalog:", @@ -1325,6 +1326,8 @@ "@leichtgewicht/ip-codec": ["@leichtgewicht/ip-codec@2.0.5", "", {}, "sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw=="], + "@llmgateway/ai-sdk-provider": ["@llmgateway/ai-sdk-provider@2.5.1", "", { "peerDependencies": { "ai": "^5.0.0-beta.12", "zod": "^3.24.1 || ^v4" } }, "sha512-UlmqsLr4Vsgd+mbEULFRyMStRS5DEd4SMUVU6Iu+n+uauKryJXilU/BXbAjHF7y8f595xHcYnpmPUocofN8xvQ=="], + "@lukeed/ms": ["@lukeed/ms@2.0.2", "", {}, "sha512-9I2Zn6+NJLfaGoz9jN3lpwDgAYvfGeNYdbAIjJOqzs4Tpc+VU3Jqq4IofSUBKajiDS8k9fZIg18/z13mpk1bsA=="], "@malept/cross-spawn-promise": ["@malept/cross-spawn-promise@2.0.0", "", { "dependencies": { "cross-spawn": "^7.0.1" } }, "sha512-1DpKU0Z5ThltBwjNySMC14g0CkbyhCaz9FkhxqNsZI6uAPJXFS8cMXlBKo26FJ8ZuW6S9GCMcR9IO5k2X5/9Fg=="], diff --git a/packages/opencode/package.json b/packages/opencode/package.json index dd6a0497e16..f7438d78366 100644 --- a/packages/opencode/package.json +++ b/packages/opencode/package.json @@ -93,6 +93,7 @@ "@effect/platform-node": "catalog:", "@hono/standard-validator": "0.1.5", "@hono/zod-validator": "catalog:", + "@llmgateway/ai-sdk-provider": "2.5.1", "@modelcontextprotocol/sdk": "1.27.1", "@octokit/graphql": "9.0.2", "@octokit/rest": "catalog:", diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 7fb3166284b..08e109e5e6a 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -47,6 +47,7 @@ import { isWorkflowModel, discoverWorkflowModels, } from "gitlab-ai-provider" +import { createLLMGateway } from "@llmgateway/ai-sdk-provider" import { fromNodeProviderChain } from "@aws-sdk/credential-providers" import { GoogleAuth } from "google-auth-library" import { ProviderTransform } from "./transform" @@ -114,6 +115,15 @@ export namespace Provider { languageModel(modelId: string): LanguageModelV3 } + // @llmgateway/ai-sdk-provider's return type doesn't currently satisfy BundledSDK, + // so we wrap it with an adapter that delegates to the underlying provider. + const createLLMGatewayAdapter: (options: any) => BundledSDK = (options) => { + const llmgw: any = createLLMGateway(options) + return { + languageModel: (modelId: string) => llmgw.languageModel(modelId), + } + } + const BUNDLED_PROVIDERS: Record BundledSDK> = { "@ai-sdk/amazon-bedrock": createAmazonBedrock, "@ai-sdk/anthropic": createAnthropic, @@ -124,6 +134,7 @@ export namespace Provider { "@ai-sdk/openai": createOpenAI, "@ai-sdk/openai-compatible": createOpenAICompatible, "@openrouter/ai-sdk-provider": createOpenRouter, + "@llmgateway/ai-sdk-provider": createLLMGatewayAdapter, "@ai-sdk/xai": createXai, "@ai-sdk/mistral": createMistral, "@ai-sdk/groq": createGroq, @@ -415,6 +426,18 @@ export namespace Provider { }, } }, + llmgateway: async () => { + return { + autoload: false, + options: { + headers: { + "HTTP-Referer": "https://opencode.ai/", + "X-Title": "opencode", + "X-Source": "opencode", + }, + }, + } + }, vercel: async () => { return { autoload: false, diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index f651a5b91aa..42787016330 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -758,7 +758,7 @@ export namespace ProviderTransform { result["store"] = false } - if (input.model.api.npm === "@openrouter/ai-sdk-provider") { + if (input.model.api.npm === "@openrouter/ai-sdk-provider" || input.model.api.npm === "@llmgateway/ai-sdk-provider") { result["usage"] = { include: true, } @@ -883,7 +883,7 @@ export namespace ProviderTransform { } return { thinkingConfig: { thinkingBudget: 0 } } } - if (model.providerID === "openrouter") { + if (model.providerID === "openrouter" || model.providerID === "llmgateway") { if (model.api.id.includes("google")) { return { reasoning: { enabled: false } } } diff --git a/packages/opencode/test/preload.ts b/packages/opencode/test/preload.ts index 0ddc797faf7..60b25b2302f 100644 --- a/packages/opencode/test/preload.ts +++ b/packages/opencode/test/preload.ts @@ -62,6 +62,7 @@ delete process.env["AWS_PROFILE"] delete process.env["AWS_REGION"] delete process.env["AWS_BEARER_TOKEN_BEDROCK"] delete process.env["OPENROUTER_API_KEY"] +delete process.env["LLM_GATEWAY_API_KEY"] delete process.env["GROQ_API_KEY"] delete process.env["MISTRAL_API_KEY"] delete process.env["PERPLEXITY_API_KEY"] diff --git a/packages/ui/src/components/provider-icons/types.ts b/packages/ui/src/components/provider-icons/types.ts index f9ddfdf0e92..5a97287509e 100644 --- a/packages/ui/src/components/provider-icons/types.ts +++ b/packages/ui/src/components/provider-icons/types.ts @@ -32,6 +32,7 @@ export const iconNames = [ "perplexity", "ovhcloud", "openrouter", + "llmgateway", "opencode", "opencode-go", "openai", diff --git a/packages/web/src/content/docs/providers.mdx b/packages/web/src/content/docs/providers.mdx index b14c8ab10a8..fad794bec23 100644 --- a/packages/web/src/content/docs/providers.mdx +++ b/packages/web/src/content/docs/providers.mdx @@ -1533,6 +1533,74 @@ OpenCode Zen is a list of tested and verified models provided by the OpenCode te --- +### LLM Gateway + +1. Head over to the [LLM Gateway dashboard](https://llmgateway.io/dashboard), click **Create API Key**, and copy the key. + +2. Run the `/connect` command and search for LLM Gateway. + + ```txt + /connect + ``` + +3. Enter the API key for the provider. + + ```txt + ┌ API key + │ + │ + └ enter + ``` + +4. Many LLM Gateway models are preloaded by default, run the `/models` command to select the one you want. + + ```txt + /models + ``` + + You can also add additional models through your opencode config. + + ```json title="opencode.json" {6} + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "llmgateway": { + "models": { + "somecoolnewmodel": {} + } + } + } + } + ``` + +5. You can also customize them through your opencode config. Here's an example of specifying a provider + + ```json title="opencode.json" + { + "$schema": "https://opencode.ai/config.json", + "provider": { + "llmgateway": { + "models": { + "glm-4.7": { + "name": "GLM 4.7" + }, + "gpt-5.2": { + "name": "GPT-5.2" + }, + "gemini-2.5-pro": { + "name": "Gemini 2.5 Pro" + }, + "claude-3-5-sonnet-20241022": { + "name": "Claude 3.5 Sonnet" + } + } + } + } + } + ``` + +--- + ### SAP AI Core SAP AI Core provides access to 40+ models from OpenAI, Anthropic, Google, Amazon, Meta, Mistral, and AI21 through a unified platform.