Skip to content
'; user_status_content.firstChild.appendChild(avatarContainer); } else { // Placeholder for LoggedOutUserMenu let loggedOutContainer = document.createElement('div'); // if LoggedOutUserMenu fallback let userBtn = document.createElement('button'); userBtn.style.width = "33px"; userBtn.style.height = "33px"; userBtn.style.display = "flex"; userBtn.style.alignItems = "center"; userBtn.style.justifyContent = "center"; userBtn.style.color = "var(--ds-gray-900)"; userBtn.style.border = "1px solid var(--ds-gray-300)"; userBtn.style.borderRadius = "100%"; userBtn.style.cursor = "pointer"; userBtn.style.background = "transparent"; userBtn.style.padding = "0"; // user icon ( from geist) let svg = document.createElementNS('http://www.w3.org/2000/svg', 'svg'); svg.setAttribute('data-testid', 'geist-icon'); svg.setAttribute('height', '16'); svg.setAttribute('stroke-linejoin', 'round'); svg.setAttribute('style', 'color:currentColor'); svg.setAttribute('viewBox', '0 0 16 16'); svg.setAttribute('width', '16'); let path = document.createElementNS('http://www.w3.org/2000/svg', 'path'); path.setAttribute('fill-rule', 'evenodd'); path.setAttribute('clip-rule', 'evenodd'); path.setAttribute('d', 'M7.75 0C5.95507 0 4.5 1.45507 4.5 3.25V3.75C4.5 5.54493 5.95507 7 7.75 7H8.25C10.0449 7 11.5 5.54493 11.5 3.75V3.25C11.5 1.45507 10.0449 0 8.25 0H7.75ZM6 3.25C6 2.2835 6.7835 1.5 7.75 1.5H8.25C9.2165 1.5 10 2.2835 10 3.25V3.75C10 4.7165 9.2165 5.5 8.25 5.5H7.75C6.7835 5.5 6 4.7165 6 3.75V3.25ZM2.5 14.5V13.1709C3.31958 11.5377 4.99308 10.5 6.82945 10.5H9.17055C11.0069 10.5 12.6804 11.5377 13.5 13.1709V14.5H2.5ZM6.82945 9C4.35483 9 2.10604 10.4388 1.06903 12.6857L1 12.8353V13V15.25V16H1.75H14.25H15V15.25V13V12.8353L14.931 12.6857C13.894 10.4388 11.6452 9 9.17055 9H6.82945Z'); path.setAttribute('fill', 'currentColor'); svg.appendChild(path); userBtn.appendChild(svg); loggedOutContainer.appendChild(userBtn); loggedOutContainer.style.display = 'flex'; loggedOutContainer.style.gap = '8px'; loggedOutContainer.style.alignItems = 'center'; user_status_content.firstChild.appendChild(loggedOutContainer); } })();
Menu

LangChain

Last updated March 7, 2026

LangChain gives you tools for every step of the agent development lifecycle. This guide demonstrates how to integrate Vercel AI Gateway with LangChain to access various AI models and providers.

  1. First, create a new directory for your project and initialize it:

    terminal
    mkdir langchain-ai-gateway
    cd langchain-ai-gateway
    pnpm dlx init -y
  2. Install the required LangChain packages along with the dotenv and @types/node packages:

    Terminal
    pnpm i langchain @langchain/core @langchain/openai dotenv @types/node
  3. Create a .env file with your Vercel AI Gateway API key:

    .env
    AI_GATEWAY_API_KEY=your-api-key-here

    If you're using the AI Gateway from within a Vercel deployment, you can also use the VERCEL_OIDC_TOKEN environment variable which will be automatically provided.

  4. Create a new file called index.ts with the following code:

    index.ts
    import 'dotenv/config';
    import { ChatOpenAI } from '@langchain/openai';
    import { HumanMessage } from '@langchain/core/messages';
     
    async function main() {
      console.log('=== LangChain Chat Completion with AI Gateway ===');
     
      const apiKey =
        process.env.AI_GATEWAY_API_KEY || process.env.VERCEL_OIDC_TOKEN;
     
      const chat = new ChatOpenAI({
        apiKey: apiKey,
        modelName: 'openai/gpt-5.4',
        temperature: 0.7,
        configuration: {
          baseURL: 'https://ai-gateway.vercel.sh/v1',
        },
      });
     
      try {
        const response = await chat.invoke([
          new HumanMessage('Write a one-sentence bedtime story about a unicorn.'),
        ]);
     
        console.log('Response:', response.content);
      } catch (error) {
        console.error('Error:', error);
      }
    }
     
    main().catch(console.error);

    The following code:

    • Initializes a ChatOpenAI instance configured to use the AI Gateway
    • Sets the model temperature to 0.7
    • Makes a chat completion request
    • Handles any potential errors
  5. Run your application using Node.js:

    Terminal
    pnpm dlx tsx index.ts 

    You should see a response from the AI model in your console.


Was this helpful?

supported.