export const getBestProvider = async (model: string, provider?: string) => { const lower = model?.toLowerCase?.() ?? ""; // Handle Google models without querying HF router if ( lower.startsWith("gemini-") || lower.startsWith("gemma-") || lower.startsWith("learnlm-") ) { return { provider: provider && provider !== "auto" ? provider : "google", status: "live", // Generous context length for Gemini models; exact value varies by SKU context_length: 1_000_000, }; } try { const response = await fetch(`https://router.huggingface.co/v1/models/${model}`) const { data } = await response.json() let bestProvider = null; if (provider === "auto") { const sortedProviders = data.providers.sort((a: any, b: any) => { if (a.status === "live" && b.status !== "live") return -1 if (a.status !== "live" && b.status === "live") return 1 return a?.pricing?.output - b?.pricing?.output + a?.pricing?.input - b?.pricing?.input }) bestProvider = sortedProviders[0] } else { const providerData = data.providers.find((p: any) => p.provider === provider) if (providerData?.status === "live") { bestProvider = providerData } else { bestProvider = data.providers?.find((p: any) => p.status === "live") } } return bestProvider } catch { // Graceful fallback return { provider: provider || "auto", status: "unknown", context_length: 4096 } } }