From dfabc7d2efbfb9bf11673a9f9259fe901e3f41e8 Mon Sep 17 00:00:00 2001 From: Riccardo Giorato Date: Tue, 27 Jan 2026 14:45:29 +0100 Subject: [PATCH] Remove dynamic Together AI model discovery to prevent API timeouts --- src/agents/models-config.providers.ts | 15 +-- src/agents/together-models.ts | 133 -------------------------- 2 files changed, 5 insertions(+), 143 deletions(-) diff --git a/src/agents/models-config.providers.ts b/src/agents/models-config.providers.ts index cf9445c59..ec3b96bd9 100644 --- a/src/agents/models-config.providers.ts +++ b/src/agents/models-config.providers.ts @@ -13,11 +13,7 @@ import { SYNTHETIC_MODEL_CATALOG, } from "./synthetic-models.js"; import { discoverVeniceModels, VENICE_BASE_URL } from "./venice-models.js"; -import { - discoverTogetherModels, - TOGETHER_BASE_URL, - TOGETHER_MODEL_CATALOG, -} from "./together-models.js"; +import { TOGETHER_BASE_URL, TOGETHER_MODEL_CATALOG } from "./together-models.js"; type ModelsConfig = NonNullable; export type ProviderConfig = NonNullable[string]; @@ -364,12 +360,11 @@ async function buildOllamaProvider(): Promise { }; } -async function buildTogetherProvider(apiKey?: string): Promise { - // Only discover models if we have an API key, otherwise use static catalog - const models = apiKey ? await discoverTogetherModels(apiKey) : TOGETHER_MODEL_CATALOG; +async function buildTogetherProvider(_apiKey?: string): Promise { + // Always use static catalog instead of dynamic discovery + // This prevents timeout issues with the Together AI API + const models = TOGETHER_MODEL_CATALOG; - // If we successfully discovered models, return them and let the merge logic handle conflicts - // If discovery failed, return empty array to fallback to static catalog return { baseUrl: TOGETHER_BASE_URL, api: "openai-completions", diff --git a/src/agents/together-models.ts b/src/agents/together-models.ts index 16803f50d..ccd887546 100644 --- a/src/agents/together-models.ts +++ b/src/agents/together-models.ts @@ -126,136 +126,3 @@ export function buildTogetherModelDefinition( maxTokens: model.maxTokens, }; } - -// Together AI API response types -interface TogetherModel { - id: string; - name?: string; - display_name?: string; - description?: string; - context_length?: number; - tokenizer?: string; - type?: string; - capabilities?: { - vision?: boolean; - function_calling?: boolean; - tool_use?: boolean; - }; - pricing?: { - input?: number; - output?: number; - }; -} - -/** - * Discover models from Together AI API. - * The /models endpoint requires authentication via API key. - */ -export async function discoverTogetherModels(apiKey?: string): Promise { - // Skip API discovery in test environment - if (process.env.NODE_ENV === "test" || process.env.VITEST) { - return []; - } - - try { - // Together AI requires authentication for /models endpoint - const headers: Record = { - "Content-Type": "application/json", - }; - - if (apiKey) { - headers["Authorization"] = `Bearer ${apiKey}`; - } - - const response = await fetch(`${TOGETHER_BASE_URL}/models`, { - signal: AbortSignal.timeout(5000), - headers, - }); - - if (!response.ok) { - // Try to get error details from response - try { - const errorText = await response.text(); - console.warn( - `[together-models] Failed to discover models: HTTP ${response.status}`, - errorText, - ); - } catch (e) { - console.warn(`[together-models] Could not read error response body: ${String(e)}`); - } - - return []; - } - - const rawResponse = await response.text(); - - let models: TogetherModel[]; - try { - const parsed = JSON.parse(rawResponse); - - // Together AI returns array directly, not { data: array } - if (Array.isArray(parsed)) { - models = parsed as TogetherModel[]; - } else if (parsed.data && Array.isArray(parsed.data)) { - models = parsed.data as TogetherModel[]; - } else { - console.error(`[together-models] Unexpected response format:`, parsed); - return []; - } - } catch (e) { - console.error(`[together-models] Failed to parse JSON: ${String(e)}`); - return []; - } - - if (!Array.isArray(models) || models.length === 0) { - return []; - } - - // Filter for chat models only and map to ModelDefinitionConfig - const chatModels = models.filter((model) => model.type === "chat"); - - return chatModels.map((model: TogetherModel) => { - const modelId = model.id; - const displayName = model.display_name || model.name || modelId; - - // Determine if model supports reasoning - const isReasoning = - modelId.toLowerCase().includes("reason") || - modelId.toLowerCase().includes("r1") || - modelId.toLowerCase().includes("thinking") || - model.description?.toLowerCase().includes("reasoning") || - false; - - // Determine input types - const hasVision = - model.capabilities?.vision || - modelId.toLowerCase().includes("vision") || - modelId.toLowerCase().includes("vl") || - model.description?.toLowerCase().includes("vision") || - false; - - // Use pricing from API if available, otherwise use defaults - const cost = model.pricing - ? { - input: model.pricing.input || TOGETHER_DEFAULT_COST.input, - output: model.pricing.output || TOGETHER_DEFAULT_COST.output, - cacheRead: model.pricing.input || TOGETHER_DEFAULT_COST.cacheRead, - cacheWrite: model.pricing.output || TOGETHER_DEFAULT_COST.cacheWrite, - } - : TOGETHER_DEFAULT_COST; - - return { - id: modelId, - name: displayName, - reasoning: isReasoning, - input: hasVision ? ["text", "image"] : ["text"], - cost, - contextWindow: model.context_length || 131072, - maxTokens: 8192, // Default max tokens for most models - }; - }); - } catch (error) { - console.warn(`[together-models] Discovery failed: ${String(error)}`); - return []; - } -}