Skip to content
'; user_status_content.firstChild.appendChild(avatarContainer); } else { // Placeholder for LoggedOutUserMenu let loggedOutContainer = document.createElement('div'); // if LoggedOutUserMenu fallback let userBtn = document.createElement('button'); userBtn.style.width = "33px"; userBtn.style.height = "33px"; userBtn.style.display = "flex"; userBtn.style.alignItems = "center"; userBtn.style.justifyContent = "center"; userBtn.style.color = "var(--ds-gray-900)"; userBtn.style.border = "1px solid var(--ds-gray-300)"; userBtn.style.borderRadius = "100%"; userBtn.style.cursor = "pointer"; userBtn.style.background = "transparent"; userBtn.style.padding = "0"; // user icon ( from geist) let svg = document.createElementNS('http://www.w3.org/2000/svg', 'svg'); svg.setAttribute('data-testid', 'geist-icon'); svg.setAttribute('height', '16'); svg.setAttribute('stroke-linejoin', 'round'); svg.setAttribute('style', 'color:currentColor'); svg.setAttribute('viewBox', '0 0 16 16'); svg.setAttribute('width', '16'); let path = document.createElementNS('http://www.w3.org/2000/svg', 'path'); path.setAttribute('fill-rule', 'evenodd'); path.setAttribute('clip-rule', 'evenodd'); path.setAttribute('d', 'M7.75 0C5.95507 0 4.5 1.45507 4.5 3.25V3.75C4.5 5.54493 5.95507 7 7.75 7H8.25C10.0449 7 11.5 5.54493 11.5 3.75V3.25C11.5 1.45507 10.0449 0 8.25 0H7.75ZM6 3.25C6 2.2835 6.7835 1.5 7.75 1.5H8.25C9.2165 1.5 10 2.2835 10 3.25V3.75C10 4.7165 9.2165 5.5 8.25 5.5H7.75C6.7835 5.5 6 4.7165 6 3.75V3.25ZM2.5 14.5V13.1709C3.31958 11.5377 4.99308 10.5 6.82945 10.5H9.17055C11.0069 10.5 12.6804 11.5377 13.5 13.1709V14.5H2.5ZM6.82945 9C4.35483 9 2.10604 10.4388 1.06903 12.6857L1 12.8353V13V15.25V16H1.75H14.25H15V15.25V13V12.8353L14.931 12.6857C13.894 10.4388 11.6452 9 9.17055 9H6.82945Z'); path.setAttribute('fill', 'currentColor'); svg.appendChild(path); userBtn.appendChild(svg); loggedOutContainer.appendChild(userBtn); loggedOutContainer.style.display = 'flex'; loggedOutContainer.style.gap = '8px'; loggedOutContainer.style.alignItems = 'center'; user_status_content.firstChild.appendChild(loggedOutContainer); } })();
Menu

OpenAI Codex

Last updated March 16, 2026

OpenAI Codex is OpenAI's agentic coding tool. You can configure it to use Vercel AI Gateway, enabling you to:

  • Route requests through multiple AI providers
  • Monitor traffic and spend in your AI Gateway Overview
  • View detailed traces in Vercel Observability under AI
  • Use any model available through the gateway

Configure Codex to use AI Gateway through its configuration file for persistent settings.

  1. Follow the installation instructions on the OpenAI Codex repository to install the Codex CLI tool.

  2. Set your AI Gateway API key in your shell configuration file, for example in ~/.zshrc or ~/.bashrc:

    export AI_GATEWAY_API_KEY="your-ai-gateway-api-key"

    After adding this, reload your shell configuration:

    source ~/.zshrc  # or source ~/.bashrc
  3. Open ~/.codex/config.toml and add the following:

    ~/.codex/config.toml
    [model_providers.vercel]
    name = "Vercel AI Gateway"
    base_url = "https://ai-gateway.vercel.sh/v1"
    env_key = "AI_GATEWAY_API_KEY"
    wire_api = "responses"
     
    [profiles.vercel]
    model_provider = "vercel"
    model = "openai/gpt-5.2-codex"

    The configuration above:

    • Sets up a model provider named vercel that points to the AI Gateway
    • References your AI_GATEWAY_API_KEY environment variable
    • Creates a vercel profile that uses the Vercel provider
    • Specifies openai/gpt-5.2-codex as the default model
    • Uses wire_api = "responses" for the OpenAI Responses API format
  4. Start Codex with the vercel profile:

    codex --profile vercel

    Vercel AI Gateway routes your requests. To confirm, check your AI Gateway Overview in the Vercel dashboard.

  5. To use a different model, update the model field in your config:

    ~/.codex/config.toml
    [profiles.vercel]
    model_provider = "vercel"
    model = "anthropic/claude-sonnet-4.5"
    # Or try other models:
    # model = "google/gemini-3-flash"
    # model = "openai/o3"

    When using non-OpenAI models through the gateway, you may see warnings about model metadata not being found. These warnings are safe to ignore since the gateway handles model routing.

  6. Add each profile to your config file:

    ~/.codex/config.toml
    [model_providers.vercel]
    name = "Vercel AI Gateway"
    base_url = "https://ai-gateway.vercel.sh/v1"
    env_key = "AI_GATEWAY_API_KEY"
    wire_api = "responses"
     
    [profiles.vercel]
    model_provider = "vercel"
    model = "openai/gpt-5.2-codex"
     
    [profiles.fast]
    model_provider = "vercel"
    model = "openai/gpt-4o-mini"
     
    [profiles.reasoning]
    model_provider = "vercel"
    model = "openai/o3"
     
    [profiles.claude]
    model_provider = "vercel"
    model = "anthropic/claude-sonnet-4.5"

    Switch between profiles using the --profile flag:

    codex --profile vercel
    codex --profile claude

Was this helpful?

supported.