Skip to main content
To use the Parallel MCP servers programmatically, please see your provider’s documentation for details on how to specify MCP servers in your request. The code below provides an example for our Search MCP with OpenAI- and Anthropic-compatible LLM requests. To use the MCP server programmatically, you need to either perform the OAuth flow to provide an API key, or use your Parallel API key directly as a Bearer token in the Authorization header.
from openai import OpenAI
from openai.types import responses as openai_responses

parallel_api_key = "PARALLEL_API_KEY"  # Your Parallel API key
openai_api_key = "YOUR_OPENAI_API_KEY"  # Your OpenAI API key
tools = [
    openai_responses.tool_param.Mcp(
        server_label="parallel_web_search",
        server_url="https://search-mcp.parallel.ai/mcp",
        headers={"Authorization": "Bearer " + parallel_api_key},
        type="mcp",
        require_approval="never",
    )
]

response = OpenAI(
    api_key=openai_api_key
).responses.create(
    model="gpt-4.1",
    input="Who is the CEO of Apple?",
    tools=tools,
    tool_choice="required",
)

print(response)
I