OpenRouter acts as a unified interface to access multiple LLM providers through a single API. This simplifies working with different AI models in your application.
// app/actions/ai.ts'use server';import { streamText } from 'ai';import { openrouter } from '@/lib/openrouter';export async function TextStream(prompt: string) { const result = streamText({ model: openrouter("gemini-1.5-flash"), messages: [ { role: "user", content: prompt, }, ], }); // streamText is a function from the ai package return result;}