Lower max tokens for anthropic (#367)

This commit is contained in:
Will Chen
2025-06-09 13:44:13 -07:00
committed by GitHub
parent 1a39238ecf
commit ddce3c65d3

View File

@@ -58,7 +58,8 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
name: "claude-sonnet-4-20250514", name: "claude-sonnet-4-20250514",
displayName: "Claude 4 Sonnet", displayName: "Claude 4 Sonnet",
description: "Excellent coder", description: "Excellent coder",
maxOutputTokens: 64_000, // See comment below for Claude 3.7 Sonnet for why we set this to 16k
maxOutputTokens: 16_000,
contextWindow: 200_000, contextWindow: 200_000,
supportsTurboEdits: true, supportsTurboEdits: true,
}, },
@@ -66,7 +67,11 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
name: "claude-3-7-sonnet-latest", name: "claude-3-7-sonnet-latest",
displayName: "Claude 3.7 Sonnet", displayName: "Claude 3.7 Sonnet",
description: "Excellent coder", description: "Excellent coder",
maxOutputTokens: 64_000, // Technically the max output tokens is 64k, *however* if the user has a lot of input tokens,
// then setting a high max output token will cause the request to fail because
// the max output tokens is *included* in the context window limit, see:
// https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#max-tokens-and-context-window-size-with-extended-thinking
maxOutputTokens: 16_000,
contextWindow: 200_000, contextWindow: 200_000,
supportsTurboEdits: true, supportsTurboEdits: true,
}, },