Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 11 additions & 9 deletions providers/anthropic/anthropic.go
Original file line number Diff line number Diff line change
Expand Up @@ -263,17 +263,19 @@ func (a languageModel) prepareParams(call fantasy.Call) (*anthropic.MessageNewPa
params.TopP = param.NewOpt(*call.TopP)
}

isThinking := false
var thinkingBudget int64
if providerOptions.Thinking != nil {
isThinking = true
thinkingBudget = providerOptions.Thinking.BudgetTokens
}
if isThinking {
if thinkingBudget == 0 {
switch {
case providerOptions.Effort != nil:
effort := *providerOptions.Effort
params.OutputConfig = anthropic.OutputConfigParam{
Effort: anthropic.OutputConfigEffort(effort),
}
adaptive := anthropic.NewThinkingConfigAdaptiveParam()
params.Thinking.OfAdaptive = &adaptive
case providerOptions.Thinking != nil:
if providerOptions.Thinking.BudgetTokens == 0 {
return nil, nil, &fantasy.Error{Title: "no budget", Message: "thinking requires budget"}
}
params.Thinking = anthropic.ThinkingConfigParamOfEnabled(thinkingBudget)
params.Thinking = anthropic.ThinkingConfigParamOfEnabled(providerOptions.Thinking.BudgetTokens)
if call.Temperature != nil {
params.Temperature = param.Opt[float64]{}
warnings = append(warnings, fantasy.CallWarning{
Expand Down
226 changes: 226 additions & 0 deletions providers/anthropic/anthropic_test.go
Original file line number Diff line number Diff line change
@@ -1,8 +1,14 @@
package anthropic

import (
"context"
"encoding/json"
"errors"
"fmt"
"net/http"
"net/http/httptest"
"testing"
"time"

"charm.land/fantasy"
"github.com/stretchr/testify/require"
Expand Down Expand Up @@ -401,3 +407,223 @@ func TestParseContextTooLargeError(t *testing.T) {
})
}
}

func TestParseOptions_Effort(t *testing.T) {
t.Parallel()

options, err := ParseOptions(map[string]any{
"send_reasoning": true,
"thinking": map[string]any{"budget_tokens": int64(2048)},
"effort": "medium",
"disable_parallel_tool_use": true,
})
require.NoError(t, err)
require.NotNil(t, options.SendReasoning)
require.True(t, *options.SendReasoning)
require.NotNil(t, options.Thinking)
require.Equal(t, int64(2048), options.Thinking.BudgetTokens)
require.NotNil(t, options.Effort)
require.Equal(t, EffortMedium, *options.Effort)
require.NotNil(t, options.DisableParallelToolUse)
require.True(t, *options.DisableParallelToolUse)
}

func TestGenerate_SendsOutputConfigEffort(t *testing.T) {
t.Parallel()

server, calls := newAnthropicJSONServer(mockAnthropicGenerateResponse())
defer server.Close()

provider, err := New(
WithAPIKey("test-api-key"),
WithBaseURL(server.URL),
)
require.NoError(t, err)

model, err := provider.LanguageModel(context.Background(), "claude-sonnet-4-20250514")
require.NoError(t, err)

effort := EffortMedium
_, err = model.Generate(context.Background(), fantasy.Call{
Prompt: testPrompt(),
ProviderOptions: NewProviderOptions(&ProviderOptions{
Effort: &effort,
}),
})
require.NoError(t, err)

call := awaitAnthropicCall(t, calls)
require.Equal(t, "POST", call.method)
require.Equal(t, "/v1/messages", call.path)
requireAnthropicEffort(t, call.body, EffortMedium)
}

func TestStream_SendsOutputConfigEffort(t *testing.T) {
t.Parallel()

server, calls := newAnthropicStreamingServer([]string{
"event: message_start\n",
"data: {\"type\":\"message_start\",\"message\":{}}\n\n",
"event: message_stop\n",
"data: {\"type\":\"message_stop\"}\n\n",
})
defer server.Close()

provider, err := New(
WithAPIKey("test-api-key"),
WithBaseURL(server.URL),
)
require.NoError(t, err)

model, err := provider.LanguageModel(context.Background(), "claude-sonnet-4-20250514")
require.NoError(t, err)

effort := EffortHigh
stream, err := model.Stream(context.Background(), fantasy.Call{
Prompt: testPrompt(),
ProviderOptions: NewProviderOptions(&ProviderOptions{
Effort: &effort,
}),
})
require.NoError(t, err)

stream(func(fantasy.StreamPart) bool { return true })

call := awaitAnthropicCall(t, calls)
require.Equal(t, "POST", call.method)
require.Equal(t, "/v1/messages", call.path)
requireAnthropicEffort(t, call.body, EffortHigh)
}

type anthropicCall struct {
method string
path string
body map[string]any
}

func newAnthropicJSONServer(response map[string]any) (*httptest.Server, <-chan anthropicCall) {
calls := make(chan anthropicCall, 4)

server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
var body map[string]any
if r.Body != nil {
_ = json.NewDecoder(r.Body).Decode(&body)
}

calls <- anthropicCall{
method: r.Method,
path: r.URL.Path,
body: body,
}

w.Header().Set("Content-Type", "application/json")
_ = json.NewEncoder(w).Encode(response)
}))

return server, calls
}

func newAnthropicStreamingServer(chunks []string) (*httptest.Server, <-chan anthropicCall) {
calls := make(chan anthropicCall, 4)

server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
var body map[string]any
if r.Body != nil {
_ = json.NewDecoder(r.Body).Decode(&body)
}

calls <- anthropicCall{
method: r.Method,
path: r.URL.Path,
body: body,
}

w.Header().Set("Content-Type", "text/event-stream")
w.Header().Set("Cache-Control", "no-cache")
w.Header().Set("Connection", "keep-alive")
w.WriteHeader(http.StatusOK)

for _, chunk := range chunks {
_, _ = fmt.Fprint(w, chunk)
if flusher, ok := w.(http.Flusher); ok {
flusher.Flush()
}
}
}))

return server, calls
}

func awaitAnthropicCall(t *testing.T, calls <-chan anthropicCall) anthropicCall {
t.Helper()

select {
case call := <-calls:
return call
case <-time.After(2 * time.Second):
t.Fatal("timed out waiting for Anthropic request")
return anthropicCall{}
}
}

func assertNoAnthropicCall(t *testing.T, calls <-chan anthropicCall) {
t.Helper()

select {
case call := <-calls:
t.Fatalf("expected no Anthropic API call, but got %s %s", call.method, call.path)
case <-time.After(200 * time.Millisecond):
}
}

func requireAnthropicEffort(t *testing.T, body map[string]any, expected Effort) {
t.Helper()

outputConfig, ok := body["output_config"].(map[string]any)
thinking, ok := body["thinking"].(map[string]any)
require.True(t, ok)
require.Equal(t, string(expected), outputConfig["effort"])
require.Equal(t, "adaptive", thinking["type"])
}

func testPrompt() fantasy.Prompt {
return fantasy.Prompt{
{
Role: fantasy.MessageRoleUser,
Content: []fantasy.MessagePart{
fantasy.TextPart{Text: "Hello"},
},
},
}
}

func mockAnthropicGenerateResponse() map[string]any {
return map[string]any{
"id": "msg_01Test",
"type": "message",
"role": "assistant",
"model": "claude-sonnet-4-20250514",
"content": []any{
map[string]any{
"type": "text",
"text": "Hi there",
},
},
"stop_reason": "end_turn",
"stop_sequence": "",
"usage": map[string]any{
"cache_creation": map[string]any{
"ephemeral_1h_input_tokens": 0,
"ephemeral_5m_input_tokens": 0,
},
"cache_creation_input_tokens": 0,
"cache_read_input_tokens": 0,
"input_tokens": 5,
"output_tokens": 2,
"server_tool_use": map[string]any{
"web_search_requests": 0,
},
"service_tier": "standard",
},
}
}
17 changes: 17 additions & 0 deletions providers/anthropic/provider_options.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,22 @@ import (
"charm.land/fantasy"
)

// Effort represents the output effort level for Anthropic models.
//
// This maps to Messages API `output_config.effort`.
type Effort string

const (
// EffortLow represents low output effort.
EffortLow Effort = "low"
// EffortMedium represents medium output effort.
EffortMedium Effort = "medium"
// EffortHigh represents high output effort.
EffortHigh Effort = "high"
// EffortMax represents maximum output effort.
EffortMax Effort = "max"
)

// Global type identifiers for Anthropic-specific provider data.
const (
TypeProviderOptions = Name + ".options"
Expand Down Expand Up @@ -43,6 +59,7 @@ func init() {
type ProviderOptions struct {
SendReasoning *bool `json:"send_reasoning"`
Thinking *ThinkingProviderOption `json:"thinking"`
Effort *Effort `json:"effort"`
DisableParallelToolUse *bool `json:"disable_parallel_tool_use"`
}

Expand Down
Loading