Skip to main content
Give your LLM access to real-time web information. Octen Web Search can be used as a tool with any LLM provider that supports function calling.

Get API Key

Get your API key from the API Platform

API Reference

View the full Web Search API reference

Install

pip install octen

OpenAI

Define Octen Web Search as a tool and let GPT decide when to search.
import json
from openai import OpenAI
from octen import Octen

openai = OpenAI(
    api_key="your-openai-api-key",
    base_url="https://api.openai.com/v1",  # or your custom endpoint
)
octen = Octen(api_key="your-octen-api-key")

tools = [
    {
        "type": "function",
        "function": {
            "name": "web_search",
            "description": "Search the web for real-time information.",
            "parameters": {
                "type": "object",
                "properties": {
                    "query": {
                        "type": "string",
                        "description": "The search query"
                    }
                },
                "required": ["query"]
            }
        }
    }
]

messages = [{"role": "user", "content": "What are the latest AI news today?"}]

response = openai.chat.completions.create(
    model="gpt-5.4",
    messages=messages,
    tools=tools,
)

# Handle tool calls
if response.choices[0].message.tool_calls:
    tool_call = response.choices[0].message.tool_calls[0]
    args = json.loads(tool_call.function.arguments)

    # Call Octen Web Search
    search_results = octen.search.search(query=args["query"], count=5)

    # Feed results back to GPT
    messages.append(response.choices[0].message)
    messages.append({
        "role": "tool",
        "tool_call_id": tool_call.id,
        "content": json.dumps(search_results.results)
    })

    final = openai.chat.completions.create(
        model="gpt-5.4",
        messages=messages,
        tools=tools,
    )
    print(final.choices[0].message.content)

Anthropic

Use Octen Web Search as a tool with Claude.
import json
import anthropic
from octen import Octen

client = anthropic.Anthropic(
    api_key="your-anthropic-api-key",
    base_url="https://api.anthropic.com",  # or your custom endpoint
)
octen = Octen(api_key="your-octen-api-key")

tools = [
    {
        "name": "web_search",
        "description": "Search the web for real-time information.",
        "input_schema": {
            "type": "object",
            "properties": {
                "query": {
                    "type": "string",
                    "description": "The search query"
                }
            },
            "required": ["query"]
        }
    }
]

messages = [{"role": "user", "content": "What are the latest AI news today?"}]

response = client.messages.create(
    model="claude-opus-4-6",
    max_tokens=4096,
    messages=messages,
    tools=tools,
)

# Handle tool use
if response.stop_reason == "tool_use":
    tool_block = next(b for b in response.content if b.type == "tool_use")

    # Call Octen Web Search
    search_results = octen.search.search(
        query=tool_block.input["query"], count=5
    )

    # Feed results back to Claude
    messages.append({"role": "assistant", "content": response.content})
    messages.append({
        "role": "user",
        "content": [
            {
                "type": "tool_result",
                "tool_use_id": tool_block.id,
                "content": json.dumps(search_results.results)
            }
        ]
    })

    final = client.messages.create(
        model="claude-opus-4-6",
        max_tokens=4096,
        messages=messages,
        tools=tools,
    )
    print(final.content[0].text)

Google Gemini

Use Octen Web Search as a tool with Gemini.
from google import genai
from google.genai import types
from octen import Octen

client = genai.Client(api_key="your-gemini-api-key")
octen = Octen(api_key="your-octen-api-key")

web_search_tool = types.Tool(
    function_declarations=[
        types.FunctionDeclaration(
            name="web_search",
            description="Search the web for real-time information.",
            parameters=types.Schema(
                type="OBJECT",
                properties={
                    "query": types.Schema(
                        type="STRING",
                        description="The search query",
                    ),
                },
                required=["query"],
            ),
        )
    ]
)

response = client.models.generate_content(
    model="gemini-3.1-pro-preview",
    contents="What are the latest AI news today?",
    config=types.GenerateContentConfig(tools=[web_search_tool]),
)

# Handle function call
part = response.candidates[0].content.parts[0]
if part.function_call:
    query = part.function_call.args["query"]

    # Call Octen Web Search
    search_results = octen.search.search(query=query, count=5)

    # Feed results back to Gemini
    response = client.models.generate_content(
        model="gemini-3.1-pro-preview",
        contents=[
            types.Content(
                role="user",
                parts=[types.Part.from_text("What are the latest AI news today?")],
            ),
            response.candidates[0].content,
            types.Content(
                role="tool",
                parts=[
                    types.Part.from_function_response(
                        name="web_search",
                        response={"results": search_results.results},
                    )
                ],
            ),
        ],
        config=types.GenerateContentConfig(tools=[web_search_tool]),
    )
    print(response.text)

LangChain

Use Octen Web Search as a LangChain tool.
from langchain_core.tools import tool
from langchain_openai import ChatOpenAI
from langchain_core.messages import HumanMessage
from langgraph.prebuilt import create_react_agent
from octen import Octen

octen = Octen(api_key="your-octen-api-key")

@tool
def web_search(query: str) -> str:
    """Search the web for real-time information."""
    results = octen.search.search(query=query, count=5)
    return str(results.results)

llm = ChatOpenAI(
    model="openai/gpt-5.4",
    api_key="your-api-key",
    base_url="https://api.openai.com/v1",  # or your custom endpoint
)
agent = create_react_agent(llm, [web_search])

response = agent.invoke(
    {"messages": [HumanMessage(content="What are the latest AI news today?")]}
)

print(response["messages"][-1].content)