Improve model picker UX (#1180)

1. Show less common AI providers (secondary) in submenu
2. Show $ signs for rough cost guide
3. Show "Pro" for supported AI providers with Pro is enabled

    
<!-- This is an auto-generated description by cubic. -->
---

## Summary by cubic
Improves the Model Picker UX by grouping less-used providers under an
“Other AI providers” submenu and adding clear cost and Pro indicators.
This makes picking models faster and more informative.

- **New Features**
- Grouped secondary providers under “Other AI providers” using a new
provider.secondary flag (Azure marked secondary).
- Added rough cost hints: models can set dollarSigns and the UI shows a
“$” badge accordingly.
- Shows a “Pro” badge on supported cloud providers when Pro is enabled;
added a “Custom” badge for custom providers.
- Extended types: LanguageModelProvider.secondary and
LanguageModel.dollarSigns; populated values across OpenAI, Anthropic,
Google, and OpenRouter.

<!-- End of auto-generated description by cubic. -->
This commit is contained in:
Will Chen
2025-09-03 15:36:54 -07:00
committed by GitHub
parent b5b637f73b
commit 2842c61f7c
6 changed files with 153 additions and 43 deletions

View File

@@ -173,6 +173,7 @@ export interface LanguageModelProvider {
hasFreeTier?: boolean;
websiteUrl?: string;
gatewayPrefix?: string;
secondary?: boolean;
envVarName?: string;
apiBaseUrl?: string;
type: "custom" | "local" | "cloud";
@@ -188,6 +189,7 @@ export type LanguageModel =
maxOutputTokens?: number;
contextWindow?: number;
temperature?: number;
dollarSigns?: number;
type: "custom";
}
| {
@@ -198,6 +200,7 @@ export type LanguageModel =
maxOutputTokens?: number;
contextWindow?: number;
temperature?: number;
dollarSigns?: number;
type: "local" | "cloud";
};

View File

@@ -15,6 +15,7 @@ export interface ModelOption {
name: string;
displayName: string;
description: string;
dollarSigns?: number;
temperature?: number;
tag?: string;
maxOutputTokens?: number;
@@ -33,6 +34,7 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
contextWindow: 400_000,
// Requires temperature to be default value (1)
temperature: 1,
dollarSigns: 3,
},
// https://platform.openai.com/docs/models/gpt-5-mini
{
@@ -44,6 +46,7 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
contextWindow: 400_000,
// Requires temperature to be default value (1)
temperature: 1,
dollarSigns: 2,
},
// https://platform.openai.com/docs/models/gpt-5-nano
{
@@ -55,34 +58,7 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
contextWindow: 400_000,
// Requires temperature to be default value (1)
temperature: 1,
},
// https://platform.openai.com/docs/models/gpt-4.1
{
name: "gpt-4.1",
displayName: "GPT 4.1",
description: "OpenAI's flagship model",
maxOutputTokens: 32_768,
contextWindow: 1_047_576,
temperature: 0,
},
// https://platform.openai.com/docs/models/gpt-4.1-mini
{
name: "gpt-4.1-mini",
displayName: "GPT 4.1 Mini",
description: "OpenAI's lightweight, but intelligent model",
maxOutputTokens: 32_768,
contextWindow: 1_047_576,
temperature: 0,
},
// https://platform.openai.com/docs/models/o3-mini
{
name: "o3-mini",
displayName: "o3 mini",
description: "Reasoning model",
// See o4-mini comment below for why we set this to 32k
maxOutputTokens: 32_000,
contextWindow: 200_000,
temperature: 0,
dollarSigns: 1,
},
// https://platform.openai.com/docs/models/o4-mini
{
@@ -95,6 +71,7 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
maxOutputTokens: 32_000,
contextWindow: 200_000,
temperature: 0,
dollarSigns: 2,
},
],
// https://docs.anthropic.com/en/docs/about-claude/models/all-models#model-comparison-table
@@ -107,6 +84,7 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
maxOutputTokens: 16_000,
contextWindow: 200_000,
temperature: 0,
dollarSigns: 4,
},
{
name: "claude-3-7-sonnet-latest",
@@ -119,6 +97,7 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
maxOutputTokens: 16_000,
contextWindow: 200_000,
temperature: 0,
dollarSigns: 4,
},
{
name: "claude-3-5-sonnet-20241022",
@@ -127,6 +106,7 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
maxOutputTokens: 8_000,
contextWindow: 200_000,
temperature: 0,
dollarSigns: 4,
},
{
name: "claude-3-5-haiku-20241022",
@@ -135,6 +115,7 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
maxOutputTokens: 8_000,
contextWindow: 200_000,
temperature: 0,
dollarSigns: 2,
},
],
google: [
@@ -148,6 +129,7 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
// Gemini context window = input token + output token
contextWindow: 1_048_576,
temperature: 0,
dollarSigns: 3,
},
// https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview
{
@@ -159,6 +141,7 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
// Gemini context window = input token + output token
contextWindow: 1_048_576,
temperature: 0,
dollarSigns: 2,
},
],
openrouter: [
@@ -169,6 +152,7 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
maxOutputTokens: 32_000,
contextWindow: 262_000,
temperature: 0,
dollarSigns: 2,
},
// https://openrouter.ai/deepseek/deepseek-chat-v3-0324:free
{
@@ -178,6 +162,7 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
maxOutputTokens: 32_000,
contextWindow: 128_000,
temperature: 0,
dollarSigns: 2,
},
// https://openrouter.ai/moonshotai/kimi-k2
{
@@ -187,6 +172,7 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
maxOutputTokens: 32_000,
contextWindow: 131_000,
temperature: 0,
dollarSigns: 2,
},
{
name: "deepseek/deepseek-r1-0528",
@@ -195,6 +181,7 @@ export const MODEL_OPTIONS: Record<string, ModelOption[]> = {
maxOutputTokens: 32_000,
contextWindow: 128_000,
temperature: 0,
dollarSigns: 2,
},
],
auto: [
@@ -262,6 +249,7 @@ export const CLOUD_PROVIDERS: Record<
hasFreeTier?: boolean;
websiteUrl?: string;
gatewayPrefix: string;
secondary?: boolean;
}
> = {
openai: {
@@ -298,6 +286,7 @@ export const CLOUD_PROVIDERS: Record<
hasFreeTier: false,
websiteUrl: "https://portal.azure.com/",
gatewayPrefix: "",
secondary: true,
},
};
@@ -359,6 +348,7 @@ export async function getLanguageModelProviders(): Promise<
hasFreeTier: providerDetails.hasFreeTier,
websiteUrl: providerDetails.websiteUrl,
gatewayPrefix: providerDetails.gatewayPrefix,
secondary: providerDetails.secondary,
envVarName: PROVIDER_TO_ENV_VAR[key] ?? undefined,
type: "cloud",
// apiBaseUrl is not directly in PROVIDERS