diff --git a/packages/opencode/src/provider/transform.ts b/packages/opencode/src/provider/transform.ts index c983bf32c4f..02fdffa00d4 100644 --- a/packages/opencode/src/provider/transform.ts +++ b/packages/opencode/src/provider/transform.ts @@ -248,6 +248,8 @@ export namespace ProviderTransform { model.providerID === "anthropic" || model.api.id.includes("anthropic") || model.api.id.includes("claude") || + model.id.includes("anthropic") || + model.id.includes("claude") || model.api.npm === "@ai-sdk/anthropic" ) { msgs = applyCaching(msgs, model.providerID) diff --git a/packages/opencode/test/provider/transform.test.ts b/packages/opencode/test/provider/transform.test.ts index 2b8f1872f56..6ebac7d28b9 100644 --- a/packages/opencode/test/provider/transform.test.ts +++ b/packages/opencode/test/provider/transform.test.ts @@ -996,6 +996,41 @@ describe("ProviderTransform.message - providerOptions key remapping", () => { }) }) +describe("ProviderTransform.message - claude w/bedrock custom inference profile", () => { + test("adds cachePoint", () => { + const model = { + id: "amazon-bedrock/custom-claude-sonnet-4.5", + providerID: "amazon-bedrock", + api: { + id: "arn:aws:bedrock:xxx:yyy:application-inference-profile/zzz", + url: "https://api.test.com", + npm: "@ai-sdk/amazon-bedrock", + }, + name: "Custom inference profile", + capabilities: {}, + options: {}, + headers: {}, + } as any + + const msgs = [ + { + role: "user", + content: "Hello", + }, + ] as any[] + + const result = ProviderTransform.message(msgs, model, {}) + + expect(result[0].providerOptions?.bedrock).toEqual( + expect.objectContaining({ + cachePoint: { + type: "ephemeral", + }, + }), + ) + }) +}) + describe("ProviderTransform.variants", () => { const createMockModel = (overrides: Partial = {}): any => ({ id: "test/test-model", diff --git a/packages/web/src/content/docs/providers.mdx b/packages/web/src/content/docs/providers.mdx index a5ae45815e5..8e1d0b4ed5c 100644 --- a/packages/web/src/content/docs/providers.mdx +++ b/packages/web/src/content/docs/providers.mdx @@ -229,6 +229,27 @@ To use Amazon Bedrock with OpenCode: /models ``` +:::note +For custom inference profiles, use the model and provider name in the key and set the `id` property to the arn. This ensures correct caching: + +```json title="opencode.json" +{ + "$schema": "https://opencode.ai/config.json", + "provider": { + "amazon-bedrock": { + // ... + "models": { + "anthropic-claude-sonnet-4.5": { + "id": "arn:aws:bedrock:us-east-1:xxx:application-inference-profile/yyy" + } + } + } + } +} +``` + +::: + --- ### Anthropic