Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
34c3ee4
Add LLM Gateway provider
smakosh Jan 11, 2026
ed1ee14
feat(llmgateway): add comprehensive model definitions
smakosh Feb 5, 2026
0d8f6d7
chore: update llmgateway provider docs
smakosh Feb 5, 2026
588a0e4
Merge parent/dev into feat/llmgateway-provider
smakosh Mar 17, 2026
91124c5
Merge branch 'dev' into feat/llmgateway-provider
smakosh Mar 17, 2026
64c4a2e
feat: filter llmgateway models to coding-only and add X-Source header
smakosh Mar 23, 2026
2846dd9
feat: include all tool-calling text models from llmgateway API
smakosh Mar 23, 2026
d815e2d
Merge parent/dev into feat/llmgateway-provider
smakosh Mar 23, 2026
6770c16
refactor: remove hardcoded llmgateway model list in favor of models.dev
smakosh Mar 23, 2026
eda1b69
Merge remote-tracking branch 'parent/dev' into feat/llmgateway-provider
smakosh Mar 26, 2026
bcf1d3f
Merge branch 'dev' into feat/llmgateway-provider
smakosh Mar 26, 2026
b05bcc2
refactor: address PR review feedback for llmgateway provider
smakosh Mar 26, 2026
b448db8
Merge branch 'dev' into feat/llmgateway-provider
smakosh Mar 27, 2026
6aa43ad
Merge branch 'dev' into feat/llmgateway-provider
smakosh Mar 27, 2026
f3b825e
Merge branch 'dev' into feat/llmgateway-provider
smakosh Mar 27, 2026
8b42079
merge: resolve conflict with parent/dev in provider.ts
smakosh Mar 27, 2026
57c364e
Merge branch 'dev' into feat/llmgateway-provider
smakosh Mar 27, 2026
62ad05d
Merge branch 'dev' into feat/llmgateway-provider
smakosh Mar 27, 2026
e1cf762
Merge branch 'dev' into feat/llmgateway-provider
smakosh Mar 28, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions bun.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions packages/opencode/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@
"@effect/platform-node": "catalog:",
"@hono/standard-validator": "0.1.5",
"@hono/zod-validator": "catalog:",
"@llmgateway/ai-sdk-provider": "2.5.1",
"@modelcontextprotocol/sdk": "1.27.1",
"@octokit/graphql": "9.0.2",
"@octokit/rest": "catalog:",
Expand Down
23 changes: 23 additions & 0 deletions packages/opencode/src/provider/provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ import {
isWorkflowModel,
discoverWorkflowModels,
} from "gitlab-ai-provider"
import { createLLMGateway } from "@llmgateway/ai-sdk-provider"
import { fromNodeProviderChain } from "@aws-sdk/credential-providers"
import { GoogleAuth } from "google-auth-library"
import { ProviderTransform } from "./transform"
Expand Down Expand Up @@ -114,6 +115,15 @@ export namespace Provider {
languageModel(modelId: string): LanguageModelV3
}

// @llmgateway/ai-sdk-provider's return type doesn't currently satisfy BundledSDK,
// so we wrap it with an adapter that delegates to the underlying provider.
const createLLMGatewayAdapter: (options: any) => BundledSDK = (options) => {
const llmgw: any = createLLMGateway(options)
return {
languageModel: (modelId: string) => llmgw.languageModel(modelId),
}
}

const BUNDLED_PROVIDERS: Record<string, (options: any) => BundledSDK> = {
"@ai-sdk/amazon-bedrock": createAmazonBedrock,
"@ai-sdk/anthropic": createAnthropic,
Expand All @@ -124,6 +134,7 @@ export namespace Provider {
"@ai-sdk/openai": createOpenAI,
"@ai-sdk/openai-compatible": createOpenAICompatible,
"@openrouter/ai-sdk-provider": createOpenRouter,
"@llmgateway/ai-sdk-provider": createLLMGatewayAdapter,
"@ai-sdk/xai": createXai,
"@ai-sdk/mistral": createMistral,
"@ai-sdk/groq": createGroq,
Expand Down Expand Up @@ -415,6 +426,18 @@ export namespace Provider {
},
}
},
llmgateway: async () => {
return {
autoload: false,
options: {
headers: {
"HTTP-Referer": "https://opencode.ai/",
"X-Title": "opencode",
"X-Source": "opencode",
},
},
}
},
vercel: async () => {
return {
autoload: false,
Expand Down
4 changes: 2 additions & 2 deletions packages/opencode/src/provider/transform.ts
Original file line number Diff line number Diff line change
Expand Up @@ -758,7 +758,7 @@ export namespace ProviderTransform {
result["store"] = false
}

if (input.model.api.npm === "@openrouter/ai-sdk-provider") {
if (input.model.api.npm === "@openrouter/ai-sdk-provider" || input.model.api.npm === "@llmgateway/ai-sdk-provider") {
result["usage"] = {
include: true,
}
Expand Down Expand Up @@ -883,7 +883,7 @@ export namespace ProviderTransform {
}
return { thinkingConfig: { thinkingBudget: 0 } }
}
if (model.providerID === "openrouter") {
if (model.providerID === "openrouter" || model.providerID === "llmgateway") {
if (model.api.id.includes("google")) {
return { reasoning: { enabled: false } }
}
Expand Down
1 change: 1 addition & 0 deletions packages/opencode/test/preload.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ delete process.env["AWS_PROFILE"]
delete process.env["AWS_REGION"]
delete process.env["AWS_BEARER_TOKEN_BEDROCK"]
delete process.env["OPENROUTER_API_KEY"]
delete process.env["LLM_GATEWAY_API_KEY"]
delete process.env["GROQ_API_KEY"]
delete process.env["MISTRAL_API_KEY"]
delete process.env["PERPLEXITY_API_KEY"]
Expand Down
1 change: 1 addition & 0 deletions packages/ui/src/components/provider-icons/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ export const iconNames = [
"perplexity",
"ovhcloud",
"openrouter",
"llmgateway",
"opencode",
"opencode-go",
"openai",
Expand Down
68 changes: 68 additions & 0 deletions packages/web/src/content/docs/providers.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -1533,6 +1533,74 @@ OpenCode Zen is a list of tested and verified models provided by the OpenCode te

---

### LLM Gateway

1. Head over to the [LLM Gateway dashboard](https://llmgateway.io/dashboard), click **Create API Key**, and copy the key.

2. Run the `/connect` command and search for LLM Gateway.

```txt
/connect
```

3. Enter the API key for the provider.

```txt
┌ API key
└ enter
```

4. Many LLM Gateway models are preloaded by default, run the `/models` command to select the one you want.

```txt
/models
```

You can also add additional models through your opencode config.

```json title="opencode.json" {6}
{
"$schema": "https://opencode.ai/config.json",
"provider": {
"llmgateway": {
"models": {
"somecoolnewmodel": {}
}
}
}
}
```

5. You can also customize them through your opencode config. Here's an example of specifying a provider

```json title="opencode.json"
{
"$schema": "https://opencode.ai/config.json",
"provider": {
"llmgateway": {
"models": {
"glm-4.7": {
"name": "GLM 4.7"
},
"gpt-5.2": {
"name": "GPT-5.2"
},
"gemini-2.5-pro": {
"name": "Gemini 2.5 Pro"
},
"claude-3-5-sonnet-20241022": {
"name": "Claude 3.5 Sonnet"
}
}
}
}
}
```

---

### SAP AI Core

SAP AI Core provides access to 40+ models from OpenAI, Anthropic, Google, Amazon, Meta, Mistral, and AI21 through a unified platform.
Expand Down
Loading