feat: rename ai models to match norm (#2121)

This commit is contained in:
Daniel Salazar
2025-12-09 16:38:34 -08:00
committed by GitHub
parent 2aa4cd857d
commit 9da8aa0849
15 changed files with 174 additions and 102 deletions

View File

@@ -55,7 +55,7 @@ export class ClaudeProvider implements IChatProvider {
});
}
getDefaultModel () {
return 'claude-haiku-4-5-20251001';
return 'claude/claude-haiku-4-5-20251001';
}
async list () {

View File

@@ -2,8 +2,8 @@ import { IChatModel } from '../types';
export const CLAUDE_MODELS: IChatModel[] = [
{
id: 'claude-opus-4-5-20251101',
aliases: ['claude-opus-4-5-latest', 'claude-opus-4-5', 'claude-opus-4.5'],
id: 'claude/claude-opus-4-5-20251101',
aliases: ['claude-opus-4-5-20251101', 'claude-opus-4-5-latest', 'claude-opus-4-5', 'claude-opus-4.5'],
name: 'Claude Opus 4.5',
costs_currency: 'usd-cents',
input_cost_key: 'input_tokens',
@@ -20,8 +20,8 @@ export const CLAUDE_MODELS: IChatModel[] = [
max_tokens: 64000,
},
{
id: 'claude-haiku-4-5-20251001',
aliases: ['claude-haiku-4.5', 'claude-haiku-4-5', 'claude-4-5-haiku'],
id: 'claude/claude-haiku-4-5-20251001',
aliases: ['claude-haiku-4-5-20251001', 'claude-haiku-4.5', 'claude-haiku-4-5', 'claude-4-5-haiku'],
name: 'Claude Haiku 4.5',
costs_currency: 'usd-cents',
input_cost_key: 'input_tokens',
@@ -38,8 +38,8 @@ export const CLAUDE_MODELS: IChatModel[] = [
max_tokens: 64000,
},
{
id: 'claude-sonnet-4-5-20250929',
aliases: ['claude-sonnet-4.5', 'claude-sonnet-4-5'],
id: 'claude/claude-sonnet-4-5-20250929',
aliases: ['claude-sonnet-4-5-20250929', 'claude-sonnet-4.5', 'claude-sonnet-4-5'],
name: 'Claude Sonnet 4.5',
costs_currency: 'usd-cents',
input_cost_key: 'input_tokens',
@@ -56,8 +56,8 @@ export const CLAUDE_MODELS: IChatModel[] = [
max_tokens: 64000,
},
{
id: 'claude-opus-4-1-20250805',
aliases: ['claude-opus-4-1'],
id: 'claude/claude-opus-4-1-20250805',
aliases: ['claude-opus-4-1-20250805', 'claude-opus-4-1'],
name: 'Claude Opus 4.1',
costs_currency: 'usd-cents',
input_cost_key: 'input_tokens',
@@ -74,8 +74,8 @@ export const CLAUDE_MODELS: IChatModel[] = [
max_tokens: 32000,
},
{
id: 'claude-opus-4-20250514',
aliases: ['claude-opus-4', 'claude-opus-4-latest'],
id: 'claude/claude-opus-4-20250514',
aliases: ['claude-opus-4-20250514', 'claude-opus-4', 'claude-opus-4-latest'],
name: 'Claude Opus 4',
costs_currency: 'usd-cents',
input_cost_key: 'input_tokens',
@@ -92,8 +92,8 @@ export const CLAUDE_MODELS: IChatModel[] = [
max_tokens: 32000,
},
{
id: 'claude-sonnet-4-20250514',
aliases: ['claude-sonnet-4', 'claude-sonnet-4-latest'],
id: 'claude/claude-sonnet-4-20250514',
aliases: ['claude-sonnet-4-20250514', 'claude-sonnet-4', 'claude-sonnet-4-latest'],
name: 'Claude Sonnet 4',
costs_currency: 'usd-cents',
input_cost_key: 'input_tokens',
@@ -110,8 +110,8 @@ export const CLAUDE_MODELS: IChatModel[] = [
max_tokens: 64000,
},
{
id: 'claude-3-7-sonnet-20250219',
aliases: ['claude-3-7-sonnet-latest'],
id: 'claude/claude-3-7-sonnet-20250219',
aliases: ['claude-3-7-sonnet-20250219', 'claude-3-7-sonnet-latest'],
succeeded_by: 'claude-sonnet-4-20250514',
costs_currency: 'usd-cents',
input_cost_key: 'input_tokens',
@@ -128,9 +128,9 @@ export const CLAUDE_MODELS: IChatModel[] = [
max_tokens: 8192,
},
{
id: 'claude-3-5-sonnet-20241022',
id: 'claude/claude-3-5-sonnet-20241022',
name: 'Claude 3.5 Sonnet',
aliases: ['claude-3-5-sonnet-latest'],
aliases: ['claude-3-5-sonnet-20241022', 'claude-3-5-sonnet-latest'],
costs_currency: 'usd-cents',
input_cost_key: 'input_tokens',
output_cost_key: 'output_tokens',
@@ -148,8 +148,9 @@ export const CLAUDE_MODELS: IChatModel[] = [
max_tokens: 8192,
},
{
id: 'claude-3-5-sonnet-20240620',
id: 'claude/claude-3-5-sonnet-20240620',
succeeded_by: 'claude-3-5-sonnet-20241022',
aliases: ['claude-3-5-sonnet-20240620'],
costs_currency: 'usd-cents',
input_cost_key: 'input_tokens',
output_cost_key: 'output_tokens',
@@ -165,7 +166,8 @@ export const CLAUDE_MODELS: IChatModel[] = [
max_tokens: 8192,
},
{
id: 'claude-3-haiku-20240307',
id: 'claude/claude-3-haiku-20240307',
aliases: ['claude-3-haiku-20240307'],
costs_currency: 'usd-cents',
input_cost_key: 'input_tokens',
output_cost_key: 'output_tokens',

View File

@@ -40,7 +40,7 @@ export class DeepSeekProvider implements IChatProvider {
}
getDefaultModel () {
return 'deepseek-chat';
return 'deepseek/deepseek-chat';
}
models () {

View File

@@ -2,9 +2,9 @@ import { IChatModel } from '../types.js';
export const DEEPSEEK_MODELS: IChatModel[] = [
{
id: 'deepseek-chat',
id: 'deepseek/deepseek-chat',
name: 'DeepSeek Chat',
aliases: [],
aliases: ['deepseek-chat'],
context: 128000,
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
@@ -18,9 +18,9 @@ export const DEEPSEEK_MODELS: IChatModel[] = [
max_tokens: 8000,
},
{
id: 'deepseek-reasoner',
id: 'deepseek/deepseek-reasoner',
name: 'DeepSeek Reasoner',
aliases: [],
aliases: ['deepseek-reasoner'],
context: 128000,
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',

View File

@@ -13,7 +13,7 @@ export class GeminiChatProvider implements IChatProvider {
meteringService: MeteringService;
openai: OpenAI;
defaultModel = 'gemini-2.5-flash';
defaultModel = 'gemini/gemini-2.5-flash';
constructor ( meteringService: MeteringService, config: { apiKey: string })
{

View File

@@ -2,8 +2,9 @@ import { IChatModel } from '../types';
export const GEMINI_MODELS: IChatModel[] = [
{
id: 'gemini-2.0-flash',
id: 'gemini/gemini-2.0-flash',
name: 'Gemini 2.0 Flash',
aliases: ['gemini-2.0-flash'],
context: 131072,
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
@@ -18,8 +19,9 @@ export const GEMINI_MODELS: IChatModel[] = [
max_tokens: 8192,
},
{
id: 'gemini-2.0-flash-lite',
id: 'gemini/gemini-2.0-flash-lite',
name: 'Gemini 2.0 Flash-Lite',
aliases: ['gemini-2.0-flash-lite'],
context: 1_048_576,
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
@@ -32,8 +34,9 @@ export const GEMINI_MODELS: IChatModel[] = [
max_tokens: 8192,
},
{
id: 'gemini-2.5-flash',
id: 'gemini/gemini-2.5-flash',
name: 'Gemini 2.5 Flash',
aliases: ['gemini-2.5-flash'],
context: 1_048_576,
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
@@ -47,8 +50,9 @@ export const GEMINI_MODELS: IChatModel[] = [
max_tokens: 65536,
},
{
id: 'gemini-2.5-flash-lite',
id: 'gemini/gemini-2.5-flash-lite',
name: 'Gemini 2.5 Flash-Lite',
aliases: ['gemini-2.5-flash-lite'],
context: 1_048_576,
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
@@ -62,8 +66,9 @@ export const GEMINI_MODELS: IChatModel[] = [
max_tokens: 65536,
},
{
id: 'gemini-2.5-pro',
id: 'gemini/gemini-2.5-pro',
name: 'Gemini 2.5 Pro',
aliases: ['gemini-2.5-pro'],
context: 1_048_576,
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
@@ -77,8 +82,9 @@ export const GEMINI_MODELS: IChatModel[] = [
max_tokens: 200_000,
},
{
id: 'gemini-3-pro-preview',
id: 'gemini/gemini-3-pro-preview',
name: 'Gemini 3 Pro',
aliases: ['gemini-3-pro-preview'],
context: 1_048_576,
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',

View File

@@ -39,7 +39,7 @@ export class GroqAIProvider implements IChatProvider {
}
getDefaultModel () {
return 'llama-3.1-8b-instant';
return 'groq/llama-3.1-8b-instant';
}
models () {

View File

@@ -6,6 +6,7 @@ const makeModel = ({
context,
input,
output,
aliases,
max_tokens,
}: {
id: string;
@@ -13,10 +14,12 @@ const makeModel = ({
context?: number;
input: number;
output: number;
aliases?: string[];
max_tokens?: number;
}): IChatModel => ({
id,
name,
aliases,
context,
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
@@ -32,49 +35,56 @@ const makeModel = ({
export const GROQ_MODELS: IChatModel[] = [
makeModel({
id: 'gemma2-9b-it',
id: 'groq/gemma2-9b-it',
aliases: ['gemma2-9b-it'],
name: 'Gemma 2 9B 8k',
context: 8192,
input: 20,
output: 20,
}),
makeModel({
id: 'gemma-7b-it',
id: 'groq/gemma-7b-it',
aliases: ['gemma-7b-it'],
name: 'Gemma 7B 8k Instruct',
context: 8192,
input: 7,
output: 7,
}),
makeModel({
id: 'llama3-groq-70b-8192-tool-use-preview',
id: 'groq/llama3-groq-70b-8192-tool-use-preview',
aliases: ['llama3-groq-70b-8192-tool-use-preview'],
name: 'Llama 3 Groq 70B Tool Use Preview 8k',
context: 8192,
input: 89,
output: 89,
}),
makeModel({
id: 'llama3-groq-8b-8192-tool-use-preview',
id: 'groq/llama3-groq-8b-8192-tool-use-preview',
aliases: ['llama3-groq-8b-8192-tool-use-preview'],
name: 'Llama 3 Groq 8B Tool Use Preview 8k',
context: 8192,
input: 19,
output: 19,
}),
makeModel({
id: 'llama-3.1-70b-versatile',
id: 'groq/llama-3.1-70b-versatile',
aliases: ['llama-3.1-70b-versatile'],
name: 'Llama 3.1 70B Versatile 128k',
context: 128000,
input: 59,
output: 79,
}),
makeModel({
id: 'llama-3.1-70b-specdec',
id: 'groq/llama-3.1-70b-specdec',
aliases: ['llama-3.1-70b-specdec'],
name: 'Llama 3.1 8B Instant 128k',
context: 128000,
input: 59,
output: 99,
}),
makeModel({
id: 'llama-3.1-8b-instant',
id: 'groq/llama-3.1-8b-instant',
aliases: ['llama-3.1-8b-instant'],
name: 'Llama 3.1 8B Instant 128k',
context: 131072,
input: 5,
@@ -82,7 +92,8 @@ export const GROQ_MODELS: IChatModel[] = [
max_tokens: 131072,
}),
makeModel({
id: 'meta-llama/llama-guard-4-12b',
id: 'groq/meta-llama/llama-guard-4-12b',
aliases: ['meta-llama/llama-guard-4-12b'],
name: 'Llama Guard 4 12B',
context: 131072,
input: 20,
@@ -90,7 +101,8 @@ export const GROQ_MODELS: IChatModel[] = [
max_tokens: 1024,
}),
makeModel({
id: 'meta-llama/llama-prompt-guard-2-86m',
id: 'groq/meta-llama/llama-prompt-guard-2-86m',
aliases: ['meta-llama/llama-prompt-guard-2-86m'],
name: 'Prompt Guard 2 86M',
context: 512,
input: 4,
@@ -98,56 +110,64 @@ export const GROQ_MODELS: IChatModel[] = [
max_tokens: 512,
}),
makeModel({
id: 'llama-3.2-1b-preview',
id: 'groq/llama-3.2-1b-preview',
aliases: ['llama-3.2-1b-preview'],
name: 'Llama 3.2 1B (Preview) 8k',
context: 128000,
input: 4,
output: 4,
}),
makeModel({
id: 'llama-3.2-3b-preview',
id: 'groq/llama-3.2-3b-preview',
aliases: ['llama-3.2-3b-preview'],
name: 'Llama 3.2 3B (Preview) 8k',
context: 128000,
input: 6,
output: 6,
}),
makeModel({
id: 'llama-3.2-11b-vision-preview',
id: 'groq/llama-3.2-11b-vision-preview',
aliases: ['llama-3.2-11b-vision-preview'],
name: 'Llama 3.2 11B Vision 8k (Preview)',
context: 8000,
input: 18,
output: 18,
}),
makeModel({
id: 'llama-3.2-90b-vision-preview',
id: 'groq/llama-3.2-90b-vision-preview',
aliases: ['llama-3.2-90b-vision-preview'],
name: 'Llama 3.2 90B Vision 8k (Preview)',
context: 8000,
input: 90,
output: 90,
}),
makeModel({
id: 'llama3-70b-8192',
id: 'groq/llama3-70b-8192',
aliases: ['llama3-70b-8192'],
name: 'Llama 3 70B 8k',
context: 8192,
input: 59,
output: 79,
}),
makeModel({
id: 'llama3-8b-8192',
id: 'groq/llama3-8b-8192',
aliases: ['llama3-8b-8192'],
name: 'Llama 3 8B 8k',
context: 8192,
input: 5,
output: 8,
}),
makeModel({
id: 'mixtral-8x7b-32768',
id: 'groq/mixtral-8x7b-32768',
aliases: ['mixtral-8x7b-32768'],
name: 'Mixtral 8x7B Instruct 32k',
context: 32768,
input: 24,
output: 24,
}),
makeModel({
id: 'llama-guard-3-8b',
id: 'groq/llama-guard-3-8b',
aliases: ['llama-guard-3-8b'],
name: 'Llama Guard 3 8B 8k',
context: 8192,
input: 20,

View File

@@ -38,7 +38,7 @@ export class MistralAIProvider implements IChatProvider {
}
getDefaultModel () {
return 'mistral-small-2506';
return 'mistral/mistral-small-2506';
}
async models () {

View File

@@ -2,9 +2,10 @@ import { IChatModel } from '../types';
export const MISTRAL_MODELS: IChatModel[] = [
{
id: 'mistral-medium-2508',
id: 'mistral/mistral-medium-2508',
name: 'mistral-medium-2508',
aliases: [
'mistral-medium-2508',
'mistral-medium-latest',
'mistral-medium',
],
@@ -21,9 +22,10 @@ export const MISTRAL_MODELS: IChatModel[] = [
},
},
{
id: 'open-mistral-7b',
id: 'mistral/open-mistral-7b',
name: 'open-mistral-7b',
aliases: [
'open-mistral-7b',
'mistral-tiny',
'mistral-tiny-2312',
],
@@ -40,9 +42,10 @@ export const MISTRAL_MODELS: IChatModel[] = [
},
},
{
id: 'open-mistral-nemo',
id: 'mistral/open-mistral-nemo',
name: 'open-mistral-nemo',
aliases: [
'open-mistral-nemo',
'open-mistral-nemo-2407',
'mistral-tiny-2407',
'mistral-tiny-latest',
@@ -60,9 +63,10 @@ export const MISTRAL_MODELS: IChatModel[] = [
},
},
{
id: 'pixtral-large-2411',
id: 'mistral/pixtral-large-2411',
name: 'pixtral-large-2411',
aliases: [
'pixtral-large-2411',
'pixtral-large-latest',
'mistral-large-pixtral-2411',
],
@@ -79,9 +83,10 @@ export const MISTRAL_MODELS: IChatModel[] = [
},
},
{
id: 'codestral-2508',
id: 'mistral/codestral-2508',
name: 'codestral-2508',
aliases: [
'codestral-2508',
'codestral-latest',
],
max_tokens: 256000,
@@ -97,9 +102,10 @@ export const MISTRAL_MODELS: IChatModel[] = [
},
},
{
id: 'devstral-small-2507',
id: 'mistral/devstral-small-2507',
name: 'devstral-small-2507',
aliases: [
'devstral-small-2507',
'devstral-small-latest',
],
max_tokens: 131072,
@@ -116,9 +122,10 @@ export const MISTRAL_MODELS: IChatModel[] = [
},
},
{
id: 'devstral-medium-2507',
id: 'mistral/devstral-medium-2507',
name: 'devstral-medium-2507',
aliases: [
'devstral-medium-2507',
'devstral-medium-latest',
],
max_tokens: 131072,
@@ -135,9 +142,10 @@ export const MISTRAL_MODELS: IChatModel[] = [
},
},
{
id: 'mistral-small-2506',
id: 'mistral/mistral-small-2506',
name: 'mistral-small-2506',
aliases: [
'mistral-small-2506',
'mistral-small-latest',
],
max_tokens: 131072,
@@ -153,9 +161,10 @@ export const MISTRAL_MODELS: IChatModel[] = [
},
},
{
id: 'magistral-medium-2509',
id: 'mistral/magistral-medium-2509',
name: 'magistral-medium-2509',
aliases: [
'magistral-medium-2509',
'magistral-medium-latest',
],
max_tokens: 131072,
@@ -171,9 +180,10 @@ export const MISTRAL_MODELS: IChatModel[] = [
},
},
{
id: 'magistral-small-2509',
id: 'mistral/magistral-small-2509',
name: 'magistral-small-2509',
aliases: [
'magistral-small-2509',
'magistral-small-latest',
],
max_tokens: 131072,
@@ -189,9 +199,10 @@ export const MISTRAL_MODELS: IChatModel[] = [
},
},
{
id: 'voxtral-mini-2507',
id: 'mistral/voxtral-mini-2507',
name: 'voxtral-mini-2507',
aliases: [
'voxtral-mini-2507',
'voxtral-mini-latest',
],
max_tokens: 32768,
@@ -207,9 +218,10 @@ export const MISTRAL_MODELS: IChatModel[] = [
},
},
{
id: 'voxtral-small-2507',
id: 'mistral/voxtral-small-2507',
name: 'voxtral-small-2507',
aliases: [
'voxtral-small-2507',
'voxtral-small-latest',
],
max_tokens: 32768,
@@ -225,9 +237,10 @@ export const MISTRAL_MODELS: IChatModel[] = [
},
},
{
id: 'mistral-large-latest',
id: 'mistral/mistral-large-latest',
name: 'mistral-large-2512',
aliases: [
'mistral-large-latest',
'mistral-large-2512',
],
max_tokens: 262144,
@@ -243,9 +256,10 @@ export const MISTRAL_MODELS: IChatModel[] = [
},
},
{
id: 'ministral-3b-2512',
id: 'mistral/ministral-3b-2512',
name: 'ministral-3b-2512',
aliases: [
'ministral-3b-2512',
'ministral-3b-latest',
],
max_tokens: 131072,
@@ -261,9 +275,10 @@ export const MISTRAL_MODELS: IChatModel[] = [
},
},
{
id: 'ministral-8b-2512',
id: 'mistral/ministral-8b-2512',
name: 'ministral-8b-2512',
aliases: [
'ministral-8b-2512',
'ministral-8b-latest',
],
max_tokens: 262144,
@@ -279,9 +294,10 @@ export const MISTRAL_MODELS: IChatModel[] = [
},
},
{
id: 'ministral-14b-2512',
id: 'mistral/ministral-14b-2512',
name: 'ministral-14b-2512',
aliases: [
'ministral-14b-2512',
'ministral-14b-latest',
],
max_tokens: 262144,

View File

@@ -50,7 +50,7 @@ export class OpenAiChatProvider implements IChatProvider {
*/
#openAi: OpenAI;
#defaultModel = 'gpt-5-nano';
#defaultModel = 'openai/gpt-5-nano';
#meteringService: MeteringService;

View File

@@ -4,7 +4,8 @@ import { IChatModel } from '../types';
export const OPEN_AI_MODELS: IChatModel[] = [
{
id: 'gpt-5.1',
id: 'openai/gpt-5.1',
aliases: ['gpt-5.1'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -17,7 +18,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 128000,
},
{
id: 'gpt-5.1-codex',
id: 'openai/gpt-5.1-codex',
aliases: ['gpt-5.1-codex'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -30,7 +32,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 128000,
},
{
id: 'gpt-5.1-codex-mini',
id: 'openai/gpt-5.1-codex-mini',
aliases: ['gpt-5.1-codex-mini'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -43,7 +46,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 128000,
},
{
id: 'gpt-5.1-chat-latest',
id: 'openai/gpt-5.1-chat-latest',
aliases: ['gpt-5.1-chat-latest'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -56,8 +60,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 16384,
},
{
id: 'gpt-5-2025-08-07',
aliases: ['gpt-5'],
id: 'openai/gpt-5-2025-08-07',
aliases: ['gpt-5-2025-08-07', 'gpt-5'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -70,8 +74,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 128000,
},
{
id: 'gpt-5-mini-2025-08-07',
aliases: ['gpt-5-mini'],
id: 'openai/gpt-5-mini-2025-08-07',
aliases: ['gpt-5-mini-2025-08-07', 'gpt-5-mini'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -84,8 +88,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 128000,
},
{
id: 'gpt-5-nano-2025-08-07',
aliases: ['gpt-5-nano'],
id: 'openai/gpt-5-nano-2025-08-07',
aliases: ['gpt-5-nano-2025-08-07', 'gpt-5-nano'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -98,7 +102,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 128000,
},
{
id: 'gpt-5-chat-latest',
id: 'openai/gpt-5-chat-latest',
aliases: ['gpt-5-chat-latest'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -111,7 +116,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 16384,
},
{
id: 'gpt-4o',
id: 'openai/gpt-4o',
aliases: ['gpt-4o'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -124,7 +130,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 16384,
},
{
id: 'gpt-4o-mini',
id: 'openai/gpt-4o-mini',
aliases: ['gpt-4o-mini'],
max_tokens: 16384,
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
@@ -137,7 +144,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
},
},
{
id: 'o1',
id: 'openai/o1',
aliases: ['o1'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -150,7 +158,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 100000,
},
{
id: 'o1-mini',
id: 'openai/o1-mini',
aliases: ['o1-mini'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -162,7 +171,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 65536,
},
{
id: 'o1-pro',
id: 'openai/o1-pro',
aliases: ['o1-pro'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -174,7 +184,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 100000,
},
{
id: 'o3',
id: 'openai/o3',
aliases: ['o3'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -187,7 +198,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 100000,
},
{
id: 'o3-mini',
id: 'openai/o3-mini',
aliases: ['o3-mini'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -200,7 +212,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 100000,
},
{
id: 'o4-mini',
id: 'openai/o4-mini',
aliases: ['o4-mini'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -212,7 +225,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 100000,
},
{
id: 'gpt-4.1',
id: 'openai/gpt-4.1',
aliases: ['gpt-4.1'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -225,7 +239,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 32768,
},
{
id: 'gpt-4.1-mini',
id: 'openai/gpt-4.1-mini',
aliases: ['gpt-4.1-mini'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -238,7 +253,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 32768,
},
{
id: 'gpt-4.1-nano',
id: 'openai/gpt-4.1-nano',
aliases: ['gpt-4.1-nano'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',
@@ -251,7 +267,8 @@ export const OPEN_AI_MODELS: IChatModel[] = [
max_tokens: 32768,
},
{
id: 'gpt-4.5-preview',
id: 'openai/gpt-4.5-preview',
aliases: ['gpt-4.5-preview'],
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
output_cost_key: 'completion_tokens',

View File

@@ -138,7 +138,7 @@ export class OpenRouterProvider implements IChatProvider {
coerced_models.push({
id: `openrouter:${model.id}`,
name: `${model.name} (OpenRouter)`,
aliases: [model.id, model.name, `openrouter/${model.id}`, model.id.split('/').slice(1).join('/')],
aliases: [model.id, `openrouter/${model.id}`, model.id.split('/').slice(1).join('/')],
max_tokens: model.top_provider.max_completion_tokens,
costs_currency: 'usd-cents',
input_cost_key: 'prompt',

View File

@@ -39,7 +39,7 @@ export class XAIProvider implements IChatProvider {
}
getDefaultModel () {
return 'grok-beta';
return 'xai/grok-beta';
}
models () {

View File

@@ -6,15 +6,18 @@ const makeModel = ({
context,
input,
output,
aliases,
}: {
id: string;
name: string;
context: number;
input: number;
output: number;
aliases?: string[];
}): IChatModel => ({
id,
name,
aliases,
context,
costs_currency: 'usd-cents',
input_cost_key: 'prompt_tokens',
@@ -29,56 +32,64 @@ const makeModel = ({
export const XAI_MODELS: IChatModel[] = [
makeModel({
id: 'grok-beta',
id: 'xai/grok-beta',
aliases: ['grok-beta'],
name: 'Grok Beta',
context: 131072,
input: 500,
output: 1500,
}),
makeModel({
id: 'grok-vision-beta',
id: 'xai/grok-vision-beta',
aliases: ['grok-vision-beta'],
name: 'Grok Vision Beta',
context: 8192,
input: 500,
output: 1500,
}),
makeModel({
id: 'grok-3',
id: 'xai/grok-3',
aliases: ['grok-3'],
name: 'Grok 3',
context: 131072,
input: 300,
output: 1500,
}),
makeModel({
id: 'grok-3-fast',
id: 'xai/grok-3-fast',
aliases: ['grok-3-fast'],
name: 'Grok 3 Fast',
context: 131072,
input: 500,
output: 2500,
}),
makeModel({
id: 'grok-3-mini',
id: 'xai/grok-3-mini',
aliases: ['grok-3-mini'],
name: 'Grok 3 Mini',
context: 131072,
input: 30,
output: 50,
}),
makeModel({
id: 'grok-3-mini-fast',
id: 'xai/grok-3-mini-fast',
aliases: ['grok-3-mini-fast'],
name: 'Grok 3 Mini Fast',
context: 131072,
input: 60,
output: 400,
}),
makeModel({
id: 'grok-2-vision',
id: 'xai/grok-2-vision',
aliases: ['grok-2-vision'],
name: 'Grok 2 Vision',
context: 8192,
input: 200,
output: 1000,
}),
makeModel({
id: 'grok-2',
id: 'xai/grok-2',
aliases: ['grok-2'],
name: 'Grok 2',
context: 131072,
input: 200,