Skip to main content

Documentation Index

Fetch the complete documentation index at: https://developers.telnyx.com/llms.txt

Use this file to discover all available pages before exploring further.

OpenAI-compatible. Use ChatOpenAI with a base_url swap.

Setup

pip install langchain-openai

Usage

import os
from langchain_openai import ChatOpenAI

llm = ChatOpenAI(
    base_url="https://api.telnyx.com/v2/ai/openai",
    api_key=os.getenv("TELNYX_API_KEY"),
    model="moonshotai/Kimi-K2.6",
)

for chunk in llm.stream("Help me plan my vacation"):
    print(chunk.content, end="", flush=True)

Function Calling

import os
from langchain_openai import ChatOpenAI
from langchain_core.tools import tool

@tool
def get_weather(location: str) -> str:
    """Get the current weather for a location."""
    return f"The weather in {location} is sunny and 72°F."

llm_with_tools = ChatOpenAI(
    base_url="https://api.telnyx.com/v2/ai/openai",
    api_key=os.getenv("TELNYX_API_KEY"),
    model="moonshotai/Kimi-K2.6",
).bind_tools([get_weather])

result = llm_with_tools.invoke("What's the weather in Chicago?")
print(result.tool_calls)

Streaming

from langchain_core.messages import HumanMessage

messages = [HumanMessage(content="Explain quantum computing in 3 sentences")]
for chunk in llm.stream(messages):
    print(chunk.content, end="", flush=True)