Skip to main content
OpenAI-compatible. Use as LLM backend for CrewAI agents.

Setup

pip install crewai

Usage

Set environment variables for global routing:
export TELNYX_API_KEY=your_telnyx_api_key
export OPENAI_BASE_URL=https://api.telnyx.com/v2/ai/openai
Or configure per-agent:
import os
from crewai import Agent, Task, Crew, LLM

llm = LLM(
    model="openai/moonshotai/Kimi-K2.5",
    base_url="https://api.telnyx.com/v2/ai/openai",
    api_key=os.getenv("TELNYX_API_KEY"),
)

researcher = Agent(
    role="Research Analyst",
    goal="Find and analyze information",
    backstory="You are an experienced research analyst.",
    llm=llm,
)

writer = Agent(
    role="Technical Writer",
    goal="Write clear, accurate reports",
    backstory="You are a skilled technical writer.",
    llm=llm,
)

research_task = Task(
    description="Research the latest trends in AI infrastructure",
    agent=researcher,
)

write_task = Task(
    description="Write a summary report based on the research findings",
    agent=writer,
)

crew = Crew(agents=[researcher, writer], tasks=[research_task, write_task])
result = crew.kickoff()
print(result)

Tool Calling

from crewai.tools import tool

@tool("Search the web")
def search_web(query: str) -> str:
    """Search the web for information."""
    return f"Results for: {query}"

researcher = Agent(
    role="Research Analyst",
    goal="Find and analyze information",
    backstory="You are an experienced research analyst.",
    llm=llm,
    tools=[search_web],
)