Skip to content
'; user_status_content.firstChild.appendChild(avatarContainer); } else { // Placeholder for LoggedOutUserMenu let loggedOutContainer = document.createElement('div'); // if LoggedOutUserMenu fallback let userBtn = document.createElement('button'); userBtn.style.width = "33px"; userBtn.style.height = "33px"; userBtn.style.display = "flex"; userBtn.style.alignItems = "center"; userBtn.style.justifyContent = "center"; userBtn.style.color = "var(--ds-gray-900)"; userBtn.style.border = "1px solid var(--ds-gray-300)"; userBtn.style.borderRadius = "100%"; userBtn.style.cursor = "pointer"; userBtn.style.background = "transparent"; userBtn.style.padding = "0"; // user icon ( from geist) let svg = document.createElementNS('http://www.w3.org/2000/svg', 'svg'); svg.setAttribute('data-testid', 'geist-icon'); svg.setAttribute('height', '16'); svg.setAttribute('stroke-linejoin', 'round'); svg.setAttribute('style', 'color:currentColor'); svg.setAttribute('viewBox', '0 0 16 16'); svg.setAttribute('width', '16'); let path = document.createElementNS('http://www.w3.org/2000/svg', 'path'); path.setAttribute('fill-rule', 'evenodd'); path.setAttribute('clip-rule', 'evenodd'); path.setAttribute('d', 'M7.75 0C5.95507 0 4.5 1.45507 4.5 3.25V3.75C4.5 5.54493 5.95507 7 7.75 7H8.25C10.0449 7 11.5 5.54493 11.5 3.75V3.25C11.5 1.45507 10.0449 0 8.25 0H7.75ZM6 3.25C6 2.2835 6.7835 1.5 7.75 1.5H8.25C9.2165 1.5 10 2.2835 10 3.25V3.75C10 4.7165 9.2165 5.5 8.25 5.5H7.75C6.7835 5.5 6 4.7165 6 3.75V3.25ZM2.5 14.5V13.1709C3.31958 11.5377 4.99308 10.5 6.82945 10.5H9.17055C11.0069 10.5 12.6804 11.5377 13.5 13.1709V14.5H2.5ZM6.82945 9C4.35483 9 2.10604 10.4388 1.06903 12.6857L1 12.8353V13V15.25V16H1.75H14.25H15V15.25V13V12.8353L14.931 12.6857C13.894 10.4388 11.6452 9 9.17055 9H6.82945Z'); path.setAttribute('fill', 'currentColor'); svg.appendChild(path); userBtn.appendChild(svg); loggedOutContainer.appendChild(userBtn); loggedOutContainer.style.display = 'flex'; loggedOutContainer.style.gap = '8px'; loggedOutContainer.style.alignItems = 'center'; user_status_content.firstChild.appendChild(loggedOutContainer); } })();
Menu

Text Generation Quickstart

Last updated March 7, 2026

This quickstart walks you through making your first text generation request with AI Gateway.

AI Assistance

Set up AI Gateway for text generation. First, make sure the Vercel CLI is installed (`npm i -g vercel`). If I'm using Claude Code or Cursor, install the Vercel Plugin (`npx plugins add vercel/vercel-plugin`). For other agents, install Vercel Skills (`npx skills add vercel-labs/agent-skills`). Then: 1. Initialize a Node.js project and install the `ai` package, dotenv, @types/node, tsx, and typescript. 2. Save my AI_GATEWAY_API_KEY in .env.local. 3. Create an index.ts that uses the AI SDK streamText function with the model 'openai/gpt-5.4' to stream a response and log token usage. 4. Run it with tsx to verify it works.

  1. Create a new directory and initialize a Node.js project:

    Terminal
    mkdir ai-text-demo
    cd ai-text-demo
    pnpm init
  2. Install the AI SDK and development dependencies:

    Terminal
    npm install ai dotenv @types/node tsx typescript
    Terminal
    yarn add ai dotenv @types/node tsx typescript
    Terminal
    pnpm add ai dotenv @types/node tsx typescript
    Terminal
    bun add ai dotenv @types/node tsx typescript
  3. Go to the AI Gateway API Keys page in your Vercel dashboard and click Create key to generate a new API key.

    Create a .env.local file and save your API key:

    .env.local
    AI_GATEWAY_API_KEY=your_ai_gateway_api_key

    Instead of using an API key, you can use OIDC tokens to authenticate your requests.

  4. Create an index.ts file:

    index.ts
    import { streamText } from 'ai';
    import 'dotenv/config';
     
    async function main() {
      const result = streamText({
        model: 'openai/gpt-5.4',
        prompt: 'Invent a new holiday and describe its traditions.',
      });
     
      for await (const textPart of result.textStream) {
        process.stdout.write(textPart);
      }
     
      console.log();
      console.log('Token usage:', await result.usage);
      console.log('Finish reason:', await result.finishReason);
    }
     
    main().catch(console.error);

    Run your script:

    Terminal
    pnpm tsx index.ts

    You should see the AI model's response stream to your terminal.

Use any OpenAI SDK or HTTP client with AI Gateway:

index.ts
import OpenAI from 'openai';
import 'dotenv/config';
 
const client = new OpenAI({
  apiKey: process.env.AI_GATEWAY_API_KEY,
  baseURL: 'https://ai-gateway.vercel.sh/v1',
});
 
async function main() {
  const response = await client.chat.completions.create({
    model: 'anthropic/claude-opus-4.6',
    messages: [
      {
        role: 'user',
        content: 'Invent a new holiday and describe its traditions.',
      },
    ],
  });
 
  console.log(response.choices[0].message.content);
}
 
main().catch(console.error);
main.py
import os
from openai import OpenAI
from dotenv import load_dotenv
 
load_dotenv()
 
client = OpenAI(
    api_key=os.getenv('AI_GATEWAY_API_KEY'),
    base_url='https://ai-gateway.vercel.sh/v1',
)
 
response = client.chat.completions.create(
    model='anthropic/claude-opus-4.6',
    messages=[
        {
            'role': 'user',
            'content': 'Invent a new holiday and describe its traditions.',
        },
    ],
)
 
print(response.choices[0].message.content)

Learn more in the OpenAI Chat Completions API docs.

Use any Anthropic SDK or HTTP client with AI Gateway:

index.ts
import Anthropic from '@anthropic-ai/sdk';
import 'dotenv/config';
 
const client = new Anthropic({
  apiKey: process.env.AI_GATEWAY_API_KEY,
  baseURL: 'https://ai-gateway.vercel.sh',
});
 
async function main() {
  const message = await client.messages.create({
    model: 'anthropic/claude-opus-4.6',
    max_tokens: 1024,
    messages: [
      {
        role: 'user',
        content: 'Invent a new holiday and describe its traditions.',
      },
    ],
  });
 
  console.log(message.content[0].text);
}
 
main().catch(console.error);
main.py
import os
import anthropic
from dotenv import load_dotenv
 
load_dotenv()
 
client = anthropic.Anthropic(
    api_key=os.getenv('AI_GATEWAY_API_KEY'),
    base_url='https://ai-gateway.vercel.sh',
)
 
message = client.messages.create(
    model='anthropic/claude-opus-4.6',
    max_tokens=1024,
    messages=[
        {
            'role': 'user',
            'content': 'Invent a new holiday and describe its traditions.',
        },
    ],
)
 
print(message.content[0].text)

Learn more in the Anthropic Messages API docs.

Use the OpenResponses API, an open standard for AI model interactions:

index.ts
import 'dotenv/config';
 
async function main() {
  const response = await fetch('https://ai-gateway.vercel.sh/v1/responses', {
    method: 'POST',
    headers: {
      'Content-Type': 'application/json',
      Authorization: `Bearer ${process.env.AI_GATEWAY_API_KEY}`,
    },
    body: JSON.stringify({
      model: 'anthropic/claude-opus-4.6',
      input: [
        {
          type: 'message',
          role: 'user',
          content: 'Invent a new holiday and describe its traditions.',
        },
      ],
    }),
  });
 
  const result = await response.json();
  console.log(result.output[0].content[0].text);
}
 
main().catch(console.error);
main.py
import os
import requests
from dotenv import load_dotenv
 
load_dotenv()
 
response = requests.post(
    'https://ai-gateway.vercel.sh/v1/responses',
    headers={
        'Content-Type': 'application/json',
        'Authorization': f'Bearer {os.getenv("AI_GATEWAY_API_KEY")}',
    },
    json={
        'model': 'anthropic/claude-opus-4.6',
        'input': [
            {
                'type': 'message',
                'role': 'user',
                'content': 'Invent a new holiday and describe its traditions.',
            },
        ],
    },
)
 
result = response.json()
print(result['output'][0]['content'][0]['text'])
Terminal
curl -X POST "https://ai-gateway.vercel.sh/v1/responses" \
  -H "Authorization: Bearer $AI_GATEWAY_API_KEY" \
  -H "Content-Type: application/json" \
  -d '{
    "model": "anthropic/claude-opus-4.6",
    "input": [
      {
        "type": "message",
        "role": "user",
        "content": "Invent a new holiday and describe its traditions."
      }
    ]
  }'

Learn more in the OpenResponses API docs.


Was this helpful?

supported.