From cf5cd6f7975372d5f222958d8a31c9bf759c2946 Mon Sep 17 00:00:00 2001 From: Richard Solomou Date: Fri, 20 Mar 2026 11:57:17 +0200 Subject: [PATCH 1/7] feat: Add AI provider integration examples Add self-contained examples for each AI provider supported by posthog.ai: - Anthropic (chat, streaming, extended thinking) - OpenAI (Chat Completions, Responses, streaming, embeddings, transcription, image generation) - Google Gemini (chat, streaming, image generation) - LangChain (callback handler, OTEL) - LiteLLM (chat, streaming) - Pydantic AI (agent with OTEL) - OpenAI Agents SDK (multi-agent, single agent, guardrails, custom spans) Each example directory is self-contained with its own requirements.txt, .env.example, and README. Files are designed to be copy-pasted by users as starting points for their own integrations. --- examples/example-ai-anthropic/.env.example | 3 + examples/example-ai-anthropic/README.md | 26 +++++++ examples/example-ai-anthropic/chat.py | 53 +++++++++++++++ .../example-ai-anthropic/extended_thinking.py | 27 ++++++++ .../example-ai-anthropic/requirements.txt | 2 + examples/example-ai-anthropic/streaming.py | 24 +++++++ examples/example-ai-gemini/.env.example | 3 + examples/example-ai-gemini/README.md | 25 +++++++ examples/example-ai-gemini/chat.py | 61 +++++++++++++++++ .../example-ai-gemini/image_generation.py | 23 +++++++ examples/example-ai-gemini/requirements.txt | 2 + examples/example-ai-gemini/streaming.py | 23 +++++++ examples/example-ai-langchain/.env.example | 3 + examples/example-ai-langchain/README.md | 30 +++++++++ .../example-ai-langchain/callback_handler.py | 50 ++++++++++++++ examples/example-ai-langchain/otel.py | 31 +++++++++ .../example-ai-langchain/requirements.txt | 3 + examples/example-ai-litellm/.env.example | 3 + examples/example-ai-litellm/README.md | 24 +++++++ examples/example-ai-litellm/chat.py | 63 +++++++++++++++++ examples/example-ai-litellm/requirements.txt | 2 + examples/example-ai-litellm/streaming.py | 25 +++++++ .../example-ai-openai-agents/.env.example | 3 + examples/example-ai-openai-agents/README.md | 28 ++++++++ .../example-ai-openai-agents/custom_spans.py | 38 +++++++++++ .../example-ai-openai-agents/guardrails.py | 67 +++++++++++++++++++ .../example-ai-openai-agents/multi_agent.py | 63 +++++++++++++++++ .../example-ai-openai-agents/requirements.txt | 2 + .../example-ai-openai-agents/single_agent.py | 43 ++++++++++++ examples/example-ai-openai/.env.example | 3 + examples/example-ai-openai/README.md | 29 ++++++++ .../example-ai-openai/chat_completions.py | 63 +++++++++++++++++ .../chat_completions_streaming.py | 26 +++++++ examples/example-ai-openai/embeddings.py | 20 ++++++ .../example-ai-openai/image_generation.py | 22 ++++++ examples/example-ai-openai/requirements.txt | 2 + examples/example-ai-openai/responses.py | 57 ++++++++++++++++ .../example-ai-openai/responses_streaming.py | 24 +++++++ examples/example-ai-openai/transcription.py | 22 ++++++ examples/example-ai-pydantic-ai/.env.example | 3 + examples/example-ai-pydantic-ai/README.md | 22 ++++++ .../example-ai-pydantic-ai/agent_with_otel.py | 48 +++++++++++++ .../example-ai-pydantic-ai/requirements.txt | 4 ++ 43 files changed, 1095 insertions(+) create mode 100644 examples/example-ai-anthropic/.env.example create mode 100644 examples/example-ai-anthropic/README.md create mode 100644 examples/example-ai-anthropic/chat.py create mode 100644 examples/example-ai-anthropic/extended_thinking.py create mode 100644 examples/example-ai-anthropic/requirements.txt create mode 100644 examples/example-ai-anthropic/streaming.py create mode 100644 examples/example-ai-gemini/.env.example create mode 100644 examples/example-ai-gemini/README.md create mode 100644 examples/example-ai-gemini/chat.py create mode 100644 examples/example-ai-gemini/image_generation.py create mode 100644 examples/example-ai-gemini/requirements.txt create mode 100644 examples/example-ai-gemini/streaming.py create mode 100644 examples/example-ai-langchain/.env.example create mode 100644 examples/example-ai-langchain/README.md create mode 100644 examples/example-ai-langchain/callback_handler.py create mode 100644 examples/example-ai-langchain/otel.py create mode 100644 examples/example-ai-langchain/requirements.txt create mode 100644 examples/example-ai-litellm/.env.example create mode 100644 examples/example-ai-litellm/README.md create mode 100644 examples/example-ai-litellm/chat.py create mode 100644 examples/example-ai-litellm/requirements.txt create mode 100644 examples/example-ai-litellm/streaming.py create mode 100644 examples/example-ai-openai-agents/.env.example create mode 100644 examples/example-ai-openai-agents/README.md create mode 100644 examples/example-ai-openai-agents/custom_spans.py create mode 100644 examples/example-ai-openai-agents/guardrails.py create mode 100644 examples/example-ai-openai-agents/multi_agent.py create mode 100644 examples/example-ai-openai-agents/requirements.txt create mode 100644 examples/example-ai-openai-agents/single_agent.py create mode 100644 examples/example-ai-openai/.env.example create mode 100644 examples/example-ai-openai/README.md create mode 100644 examples/example-ai-openai/chat_completions.py create mode 100644 examples/example-ai-openai/chat_completions_streaming.py create mode 100644 examples/example-ai-openai/embeddings.py create mode 100644 examples/example-ai-openai/image_generation.py create mode 100644 examples/example-ai-openai/requirements.txt create mode 100644 examples/example-ai-openai/responses.py create mode 100644 examples/example-ai-openai/responses_streaming.py create mode 100644 examples/example-ai-openai/transcription.py create mode 100644 examples/example-ai-pydantic-ai/.env.example create mode 100644 examples/example-ai-pydantic-ai/README.md create mode 100644 examples/example-ai-pydantic-ai/agent_with_otel.py create mode 100644 examples/example-ai-pydantic-ai/requirements.txt diff --git a/examples/example-ai-anthropic/.env.example b/examples/example-ai-anthropic/.env.example new file mode 100644 index 00000000..8979a330 --- /dev/null +++ b/examples/example-ai-anthropic/.env.example @@ -0,0 +1,3 @@ +POSTHOG_API_KEY=phc_your_project_api_key +POSTHOG_HOST=https://us.i.posthog.com +ANTHROPIC_API_KEY=sk-ant-your_api_key diff --git a/examples/example-ai-anthropic/README.md b/examples/example-ai-anthropic/README.md new file mode 100644 index 00000000..bbdfb53d --- /dev/null +++ b/examples/example-ai-anthropic/README.md @@ -0,0 +1,26 @@ +# Anthropic + PostHog AI Examples + +Track Anthropic Claude API calls with PostHog. + +## Setup + +```bash +pip install -r requirements.txt +cp .env.example .env +# Fill in your API keys in .env +``` + +## Examples + +- **chat.py** - Basic chat with tool calling +- **streaming.py** - Streaming responses +- **extended_thinking.py** - Claude's extended thinking feature + +## Run + +```bash +source .env +python chat.py +python streaming.py +python extended_thinking.py +``` diff --git a/examples/example-ai-anthropic/chat.py b/examples/example-ai-anthropic/chat.py new file mode 100644 index 00000000..4fa9aaab --- /dev/null +++ b/examples/example-ai-anthropic/chat.py @@ -0,0 +1,53 @@ +"""Anthropic chat with tool calling, tracked by PostHog.""" + +import os +import json +import urllib.request +from posthog import Posthog +from posthog.ai.anthropic import Anthropic + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], posthog_client=posthog) + +tools = [ + { + "name": "get_weather", + "description": "Get current weather for a location", + "input_schema": { + "type": "object", + "properties": { + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "location_name": {"type": "string"}, + }, + "required": ["latitude", "longitude", "location_name"], + }, + } +] + + +def get_weather(latitude: float, longitude: float, location_name: str) -> str: + url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,relative_humidity_2m,wind_speed_10m" + with urllib.request.urlopen(url) as resp: + data = json.loads(resp.read()) + current = data["current"] + return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h" + + +message = client.messages.create( + model="claude-sonnet-4-5-20250929", + max_tokens=1024, + posthog_distinct_id="example-user", + tools=tools, + messages=[{"role": "user", "content": "What's the weather like in San Francisco?"}], +) + +# Handle tool use if the model requests it +for block in message.content: + if block.type == "text": + print(block.text) + elif block.type == "tool_use": + result = get_weather(**block.input) + print(result) + +posthog.shutdown() diff --git a/examples/example-ai-anthropic/extended_thinking.py b/examples/example-ai-anthropic/extended_thinking.py new file mode 100644 index 00000000..45d6931e --- /dev/null +++ b/examples/example-ai-anthropic/extended_thinking.py @@ -0,0 +1,27 @@ +"""Anthropic extended thinking, tracked by PostHog. + +Extended thinking lets Claude show its reasoning process before responding. +""" + +import os +from posthog import Posthog +from posthog.ai.anthropic import Anthropic + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], posthog_client=posthog) + +message = client.messages.create( + model="claude-sonnet-4-5-20250929", + max_tokens=16000, + posthog_distinct_id="example-user", + thinking={"type": "enabled", "budget_tokens": 10000}, + messages=[{"role": "user", "content": "What is the probability of rolling at least one six in four rolls of a fair die?"}], +) + +for block in message.content: + if block.type == "thinking": + print(f"Thinking: {block.thinking}\n") + elif block.type == "text": + print(f"Answer: {block.text}") + +posthog.shutdown() diff --git a/examples/example-ai-anthropic/requirements.txt b/examples/example-ai-anthropic/requirements.txt new file mode 100644 index 00000000..8eb8ac34 --- /dev/null +++ b/examples/example-ai-anthropic/requirements.txt @@ -0,0 +1,2 @@ +posthog>=6.6.1 +anthropic diff --git a/examples/example-ai-anthropic/streaming.py b/examples/example-ai-anthropic/streaming.py new file mode 100644 index 00000000..4c21c6a4 --- /dev/null +++ b/examples/example-ai-anthropic/streaming.py @@ -0,0 +1,24 @@ +"""Anthropic streaming chat, tracked by PostHog.""" + +import os +from posthog import Posthog +from posthog.ai.anthropic import Anthropic + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], posthog_client=posthog) + +stream = client.messages.create( + model="claude-sonnet-4-5-20250929", + max_tokens=1024, + posthog_distinct_id="example-user", + messages=[{"role": "user", "content": "Write a haiku about observability."}], + stream=True, +) + +for event in stream: + if hasattr(event, "type"): + if event.type == "content_block_delta" and hasattr(event.delta, "text"): + print(event.delta.text, end="", flush=True) + +print() +posthog.shutdown() diff --git a/examples/example-ai-gemini/.env.example b/examples/example-ai-gemini/.env.example new file mode 100644 index 00000000..821414cc --- /dev/null +++ b/examples/example-ai-gemini/.env.example @@ -0,0 +1,3 @@ +POSTHOG_API_KEY=phc_your_project_api_key +POSTHOG_HOST=https://us.i.posthog.com +GEMINI_API_KEY=your_gemini_api_key diff --git a/examples/example-ai-gemini/README.md b/examples/example-ai-gemini/README.md new file mode 100644 index 00000000..df0af9ae --- /dev/null +++ b/examples/example-ai-gemini/README.md @@ -0,0 +1,25 @@ +# Google Gemini + PostHog AI Examples + +Track Google Gemini API calls with PostHog. + +## Setup + +```bash +pip install -r requirements.txt +cp .env.example .env +# Fill in your API keys in .env +``` + +## Examples + +- **chat.py** - Chat with tool calling +- **streaming.py** - Streaming responses +- **image_generation.py** - Image generation + +## Run + +```bash +source .env +python chat.py +python streaming.py +``` diff --git a/examples/example-ai-gemini/chat.py b/examples/example-ai-gemini/chat.py new file mode 100644 index 00000000..3e169717 --- /dev/null +++ b/examples/example-ai-gemini/chat.py @@ -0,0 +1,61 @@ +"""Google Gemini chat with tool calling, tracked by PostHog.""" + +import os +import json +import urllib.request +from google.genai import types +from posthog import Posthog +from posthog.ai.gemini import Client + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +client = Client(api_key=os.environ["GEMINI_API_KEY"], posthog_client=posthog) + +tool_declarations = [ + { + "name": "get_weather", + "description": "Get current weather for a location", + "parameters": { + "type": "object", + "properties": { + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "location_name": {"type": "string"}, + }, + "required": ["latitude", "longitude", "location_name"], + }, + } +] + + +def get_weather(latitude: float, longitude: float, location_name: str) -> str: + url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,relative_humidity_2m,wind_speed_10m" + with urllib.request.urlopen(url) as resp: + data = json.loads(resp.read()) + current = data["current"] + return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h" + + +config = types.GenerateContentConfig( + tools=[types.Tool(function_declarations=tool_declarations)] +) + +response = client.models.generate_content( + model="gemini-2.5-flash", + posthog_distinct_id="example-user", + contents=[{"role": "user", "parts": [{"text": "What's the weather in London?"}]}], + config=config, +) + +for candidate in response.candidates: + for part in candidate.content.parts: + if hasattr(part, "function_call") and part.function_call: + result = get_weather( + latitude=part.function_call.args["latitude"], + longitude=part.function_call.args["longitude"], + location_name=part.function_call.args["location_name"], + ) + print(result) + elif hasattr(part, "text"): + print(part.text) + +posthog.shutdown() diff --git a/examples/example-ai-gemini/image_generation.py b/examples/example-ai-gemini/image_generation.py new file mode 100644 index 00000000..dcdb6f8d --- /dev/null +++ b/examples/example-ai-gemini/image_generation.py @@ -0,0 +1,23 @@ +"""Google Gemini image generation, tracked by PostHog.""" + +import os +from posthog import Posthog +from posthog.ai.gemini import Client + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +client = Client(api_key=os.environ["GEMINI_API_KEY"], posthog_client=posthog) + +response = client.models.generate_content( + model="gemini-2.5-flash-image", + posthog_distinct_id="example-user", + contents=[{"role": "user", "parts": [{"text": "Generate a pixel art hedgehog"}]}], +) + +for candidate in response.candidates: + for part in candidate.content.parts: + if hasattr(part, "inline_data") and part.inline_data: + print(f"Generated image: {part.inline_data.mime_type}, {len(part.inline_data.data)} bytes") + elif hasattr(part, "text"): + print(part.text) + +posthog.shutdown() diff --git a/examples/example-ai-gemini/requirements.txt b/examples/example-ai-gemini/requirements.txt new file mode 100644 index 00000000..6d0a0446 --- /dev/null +++ b/examples/example-ai-gemini/requirements.txt @@ -0,0 +1,2 @@ +posthog>=6.6.1 +google-genai diff --git a/examples/example-ai-gemini/streaming.py b/examples/example-ai-gemini/streaming.py new file mode 100644 index 00000000..87d4b97a --- /dev/null +++ b/examples/example-ai-gemini/streaming.py @@ -0,0 +1,23 @@ +"""Google Gemini streaming chat, tracked by PostHog.""" + +import os +from posthog import Posthog +from posthog.ai.gemini import Client + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +client = Client(api_key=os.environ["GEMINI_API_KEY"], posthog_client=posthog) + +stream = client.models.generate_content_stream( + model="gemini-2.5-flash", + posthog_distinct_id="example-user", + contents=[{"role": "user", "parts": [{"text": "Explain product analytics in three sentences."}]}], +) + +for chunk in stream: + for candidate in chunk.candidates: + for part in candidate.content.parts: + if hasattr(part, "text"): + print(part.text, end="", flush=True) + +print() +posthog.shutdown() diff --git a/examples/example-ai-langchain/.env.example b/examples/example-ai-langchain/.env.example new file mode 100644 index 00000000..3d1dd067 --- /dev/null +++ b/examples/example-ai-langchain/.env.example @@ -0,0 +1,3 @@ +POSTHOG_API_KEY=phc_your_project_api_key +POSTHOG_HOST=https://us.i.posthog.com +OPENAI_API_KEY=sk-your_api_key diff --git a/examples/example-ai-langchain/README.md b/examples/example-ai-langchain/README.md new file mode 100644 index 00000000..f6dae0fb --- /dev/null +++ b/examples/example-ai-langchain/README.md @@ -0,0 +1,30 @@ +# LangChain + PostHog AI Examples + +Track LangChain LLM calls with PostHog. + +## Setup + +```bash +pip install -r requirements.txt +cp .env.example .env +# Fill in your API keys in .env +``` + +For the OTEL example, also install: + +```bash +pip install opentelemetry-sdk opentelemetry-exporter-otlp-proto-http +``` + +## Examples + +- **callback_handler.py** - PostHog callback handler with tool calling +- **otel.py** - OpenTelemetry instrumentation exporting to PostHog + +## Run + +```bash +source .env +python callback_handler.py +python otel.py +``` diff --git a/examples/example-ai-langchain/callback_handler.py b/examples/example-ai-langchain/callback_handler.py new file mode 100644 index 00000000..1c0f5935 --- /dev/null +++ b/examples/example-ai-langchain/callback_handler.py @@ -0,0 +1,50 @@ +"""LangChain with PostHog callback handler for automatic tracking.""" + +import os +import json +import urllib.request +from langchain_openai import ChatOpenAI +from langchain_core.tools import tool +from langchain_core.messages import HumanMessage, ToolMessage +from posthog import Posthog +from posthog.ai.langchain import CallbackHandler + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +callback_handler = CallbackHandler(client=posthog) + + +@tool +def get_weather(latitude: float, longitude: float, location_name: str) -> str: + """Get current weather for a location. + + Args: + latitude: The latitude of the location + longitude: The longitude of the location + location_name: A human-readable name for the location + """ + url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,relative_humidity_2m,wind_speed_10m" + with urllib.request.urlopen(url) as resp: + data = json.loads(resp.read()) + current = data["current"] + return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h" + + +tools = [get_weather] +tool_map = {t.name: t for t in tools} + +model = ChatOpenAI(openai_api_key=os.environ["OPENAI_API_KEY"], temperature=0) +model_with_tools = model.bind_tools(tools) + +messages = [HumanMessage(content="What's the weather in Berlin?")] + +response = model_with_tools.invoke(messages, config={"callbacks": [callback_handler]}) + +if response.content: + print(response.content) + +if response.tool_calls: + for tool_call in response.tool_calls: + result = tool_map[tool_call["name"]].invoke(tool_call["args"]) + print(result) + +posthog.shutdown() diff --git a/examples/example-ai-langchain/otel.py b/examples/example-ai-langchain/otel.py new file mode 100644 index 00000000..73f896ce --- /dev/null +++ b/examples/example-ai-langchain/otel.py @@ -0,0 +1,31 @@ +"""LangChain with OpenTelemetry instrumentation, exporting to PostHog.""" + +import os +from langchain_openai import ChatOpenAI +from langchain_core.messages import HumanMessage +from opentelemetry import trace +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter + +# Configure OTEL to export traces to PostHog +posthog_api_key = os.environ["POSTHOG_API_KEY"] +posthog_host = os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com") + +os.environ["OTEL_EXPORTER_OTLP_TRACES_ENDPOINT"] = f"{posthog_host}/i/v0/ai/otel" +os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Bearer {posthog_api_key}" + +tracer_provider = TracerProvider( + resource=Resource.create({"service.name": "langchain-example", "user.id": "example-user"}) +) +tracer_provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporter())) +trace.set_tracer_provider(tracer_provider) + +# Use LangChain as normal — OTEL captures the traces automatically +model = ChatOpenAI(openai_api_key=os.environ["OPENAI_API_KEY"], temperature=0) + +response = model.invoke([HumanMessage(content="What is product analytics?")]) +print(response.content) + +tracer_provider.shutdown() diff --git a/examples/example-ai-langchain/requirements.txt b/examples/example-ai-langchain/requirements.txt new file mode 100644 index 00000000..b48c05ff --- /dev/null +++ b/examples/example-ai-langchain/requirements.txt @@ -0,0 +1,3 @@ +posthog>=6.6.1 +langchain +langchain-openai diff --git a/examples/example-ai-litellm/.env.example b/examples/example-ai-litellm/.env.example new file mode 100644 index 00000000..3d1dd067 --- /dev/null +++ b/examples/example-ai-litellm/.env.example @@ -0,0 +1,3 @@ +POSTHOG_API_KEY=phc_your_project_api_key +POSTHOG_HOST=https://us.i.posthog.com +OPENAI_API_KEY=sk-your_api_key diff --git a/examples/example-ai-litellm/README.md b/examples/example-ai-litellm/README.md new file mode 100644 index 00000000..9421428c --- /dev/null +++ b/examples/example-ai-litellm/README.md @@ -0,0 +1,24 @@ +# LiteLLM + PostHog AI Examples + +Track LiteLLM calls with PostHog using the built-in callback integration. + +## Setup + +```bash +pip install -r requirements.txt +cp .env.example .env +# Fill in your API keys in .env +``` + +## Examples + +- **chat.py** - Chat with tool calling (works with any LiteLLM-supported model) +- **streaming.py** - Streaming responses + +## Run + +```bash +source .env +python chat.py +python streaming.py +``` diff --git a/examples/example-ai-litellm/chat.py b/examples/example-ai-litellm/chat.py new file mode 100644 index 00000000..1d4fd789 --- /dev/null +++ b/examples/example-ai-litellm/chat.py @@ -0,0 +1,63 @@ +"""LiteLLM chat with PostHog tracking via built-in callback.""" + +import os +import json +import urllib.request +import litellm + +# Enable PostHog callbacks — LiteLLM has built-in PostHog support +os.environ["POSTHOG_API_KEY"] = os.environ.get("POSTHOG_API_KEY", "") +os.environ["POSTHOG_API_URL"] = os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com") +litellm.success_callback = ["posthog"] +litellm.failure_callback = ["posthog"] + +tools = [ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get current weather for a location", + "parameters": { + "type": "object", + "properties": { + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "location_name": {"type": "string"}, + }, + "required": ["latitude", "longitude", "location_name"], + }, + }, + } +] + + +def get_weather(latitude: float, longitude: float, location_name: str) -> str: + url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,relative_humidity_2m,wind_speed_10m" + with urllib.request.urlopen(url) as resp: + data = json.loads(resp.read()) + current = data["current"] + return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h" + + +# LiteLLM supports any model — just change the model string +response = litellm.completion( + model="gpt-4o-mini", + messages=[ + {"role": "system", "content": "You are a helpful assistant with access to weather data."}, + {"role": "user", "content": "What's the weather in Paris?"}, + ], + tools=tools, + tool_choice="auto", + metadata={"distinct_id": "example-user"}, +) + +message = response.choices[0].message + +if message.content: + print(message.content) + +if hasattr(message, "tool_calls") and message.tool_calls: + for tool_call in message.tool_calls: + args = json.loads(tool_call.function.arguments) + result = get_weather(**args) + print(result) diff --git a/examples/example-ai-litellm/requirements.txt b/examples/example-ai-litellm/requirements.txt new file mode 100644 index 00000000..0fc073c3 --- /dev/null +++ b/examples/example-ai-litellm/requirements.txt @@ -0,0 +1,2 @@ +posthog>=6.6.1 +litellm diff --git a/examples/example-ai-litellm/streaming.py b/examples/example-ai-litellm/streaming.py new file mode 100644 index 00000000..4a6a2cb1 --- /dev/null +++ b/examples/example-ai-litellm/streaming.py @@ -0,0 +1,25 @@ +"""LiteLLM streaming chat with PostHog tracking.""" + +import os +import litellm + +os.environ["POSTHOG_API_KEY"] = os.environ.get("POSTHOG_API_KEY", "") +os.environ["POSTHOG_API_URL"] = os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com") +litellm.success_callback = ["posthog"] +litellm.failure_callback = ["posthog"] + +response = litellm.completion( + model="gpt-4o-mini", + messages=[ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "Explain feature flags in three sentences."}, + ], + stream=True, + metadata={"distinct_id": "example-user"}, +) + +for chunk in response: + if chunk.choices and chunk.choices[0].delta.content: + print(chunk.choices[0].delta.content, end="", flush=True) + +print() diff --git a/examples/example-ai-openai-agents/.env.example b/examples/example-ai-openai-agents/.env.example new file mode 100644 index 00000000..3d1dd067 --- /dev/null +++ b/examples/example-ai-openai-agents/.env.example @@ -0,0 +1,3 @@ +POSTHOG_API_KEY=phc_your_project_api_key +POSTHOG_HOST=https://us.i.posthog.com +OPENAI_API_KEY=sk-your_api_key diff --git a/examples/example-ai-openai-agents/README.md b/examples/example-ai-openai-agents/README.md new file mode 100644 index 00000000..dd8f89f1 --- /dev/null +++ b/examples/example-ai-openai-agents/README.md @@ -0,0 +1,28 @@ +# OpenAI Agents SDK + PostHog AI Examples + +Track OpenAI Agents SDK calls with PostHog. + +## Setup + +```bash +pip install -r requirements.txt +cp .env.example .env +# Fill in your API keys in .env +``` + +## Examples + +- **multi_agent.py** - Triage agent routing to specialist agents via handoffs +- **single_agent.py** - Single agent with weather and math tools +- **guardrails.py** - Input/output guardrails for content filtering +- **custom_spans.py** - Custom spans for tracking non-LLM operations within a trace + +## Run + +```bash +source .env +python multi_agent.py +python single_agent.py +python guardrails.py +python custom_spans.py +``` diff --git a/examples/example-ai-openai-agents/custom_spans.py b/examples/example-ai-openai-agents/custom_spans.py new file mode 100644 index 00000000..2627d42f --- /dev/null +++ b/examples/example-ai-openai-agents/custom_spans.py @@ -0,0 +1,38 @@ +"""OpenAI Agents SDK with custom spans for tracking custom operations, traced by PostHog.""" + +import asyncio +import os +from agents import Agent, Runner, trace +from agents.tracing import custom_span +from posthog import Posthog +from posthog.ai.openai_agents import instrument + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +instrument(posthog, distinct_id="example-user") + +agent = Agent( + name="Assistant", + instructions="You are a helpful assistant.", + model="gpt-4o-mini", +) + + +async def main(): + user_input = "Summarize the benefits of product analytics" + + # Wrap the workflow in a trace with custom spans for each stage + with trace("processing_pipeline"): + with custom_span(name="preprocess", data={"input_length": len(user_input)}): + processed = user_input.strip().lower() + + with custom_span(name="validate", data={"input": processed}): + is_valid = 0 < len(processed) < 1000 + + if is_valid: + with custom_span(name="llm_call"): + result = await Runner.run(agent, user_input) + print(result.final_output) + + +asyncio.run(main()) +posthog.shutdown() diff --git a/examples/example-ai-openai-agents/guardrails.py b/examples/example-ai-openai-agents/guardrails.py new file mode 100644 index 00000000..f68d5117 --- /dev/null +++ b/examples/example-ai-openai-agents/guardrails.py @@ -0,0 +1,67 @@ +"""OpenAI Agents SDK with input/output guardrails, tracked by PostHog.""" + +import asyncio +import os +from agents import Agent, Runner, input_guardrail, output_guardrail, GuardrailFunctionOutput, RunContextWrapper, TResponseInputItem +from posthog import Posthog +from posthog.ai.openai_agents import instrument + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +instrument(posthog, distinct_id="example-user") + +BLOCKED_INPUT_WORDS = ["hack", "exploit", "bypass"] +BLOCKED_OUTPUT_WORDS = ["confidential", "secret", "classified"] + + +@input_guardrail +async def content_filter( + ctx: RunContextWrapper[None], agent: Agent, input: str | list[TResponseInputItem] +) -> GuardrailFunctionOutput: + """Block requests containing prohibited words.""" + text = str(input).lower() if isinstance(input, str) else " ".join(str(i) for i in input).lower() + for word in BLOCKED_INPUT_WORDS: + if word in text: + return GuardrailFunctionOutput( + output_info={"blocked_word": word}, + tripwire_triggered=True, + ) + return GuardrailFunctionOutput(output_info={"status": "passed"}, tripwire_triggered=False) + + +@output_guardrail +async def sensitive_data_filter( + ctx: RunContextWrapper[None], agent: Agent, output: str +) -> GuardrailFunctionOutput: + """Prevent sensitive information from being returned.""" + for word in BLOCKED_OUTPUT_WORDS: + if word in output.lower(): + return GuardrailFunctionOutput( + output_info={"blocked_word": word}, + tripwire_triggered=True, + ) + return GuardrailFunctionOutput(output_info={"status": "passed"}, tripwire_triggered=False) + + +guarded_agent = Agent( + name="GuardedAgent", + instructions="You are a helpful assistant. Be informative but avoid sensitive topics.", + model="gpt-4o-mini", + input_guardrails=[content_filter], + output_guardrails=[sensitive_data_filter], +) + + +async def main(): + # This should pass guardrails + result = await Runner.run(guarded_agent, "What is product analytics?") + print(f"Passed: {result.final_output}") + + # This should trigger the input guardrail + try: + await Runner.run(guarded_agent, "How do I hack into a system?") + except Exception as e: + print(f"Blocked: {e}") + + +asyncio.run(main()) +posthog.shutdown() diff --git a/examples/example-ai-openai-agents/multi_agent.py b/examples/example-ai-openai-agents/multi_agent.py new file mode 100644 index 00000000..64817d82 --- /dev/null +++ b/examples/example-ai-openai-agents/multi_agent.py @@ -0,0 +1,63 @@ +"""OpenAI Agents SDK multi-agent with handoffs, tracked by PostHog.""" + +import asyncio +import os +from typing import Annotated +from agents import Agent, Runner, function_tool +from posthog import Posthog +from posthog.ai.openai_agents import instrument + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +instrument(posthog, distinct_id="example-user") + + +@function_tool +def get_weather(city: Annotated[str, "The city to get weather for"]) -> str: + """Get current weather for a city.""" + return f"Weather in {city}: 18°C, partly cloudy, humidity 65%" + + +@function_tool +def calculate(expression: Annotated[str, "A math expression to evaluate"]) -> str: + """Evaluate a mathematical expression.""" + allowed = set("0123456789+-*/().^ ") + if not all(c in allowed for c in expression): + return "Error: invalid characters" + return f"Result: {eval(expression.replace('^', '**'))}" + + +weather_agent = Agent( + name="WeatherAgent", + instructions="You handle weather queries. Use the get_weather tool.", + model="gpt-4o-mini", + tools=[get_weather], +) + +math_agent = Agent( + name="MathAgent", + instructions="You handle math problems. Use the calculate tool.", + model="gpt-4o-mini", + tools=[calculate], +) + +general_agent = Agent( + name="GeneralAgent", + instructions="You handle general questions and conversation.", + model="gpt-4o-mini", +) + +triage_agent = Agent( + name="TriageAgent", + instructions="Route to WeatherAgent for weather, MathAgent for math, GeneralAgent for everything else.", + model="gpt-4o-mini", + handoffs=[weather_agent, math_agent, general_agent], +) + + +async def main(): + result = await Runner.run(triage_agent, "What's the weather in Tokyo?") + print(result.final_output) + + +asyncio.run(main()) +posthog.shutdown() diff --git a/examples/example-ai-openai-agents/requirements.txt b/examples/example-ai-openai-agents/requirements.txt new file mode 100644 index 00000000..be0e3cd2 --- /dev/null +++ b/examples/example-ai-openai-agents/requirements.txt @@ -0,0 +1,2 @@ +posthog>=6.6.1 +openai-agents diff --git a/examples/example-ai-openai-agents/single_agent.py b/examples/example-ai-openai-agents/single_agent.py new file mode 100644 index 00000000..01b611c7 --- /dev/null +++ b/examples/example-ai-openai-agents/single_agent.py @@ -0,0 +1,43 @@ +"""OpenAI Agents SDK single agent with tools, tracked by PostHog.""" + +import asyncio +import os +from typing import Annotated +from agents import Agent, Runner, function_tool +from posthog import Posthog +from posthog.ai.openai_agents import instrument + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +instrument(posthog, distinct_id="example-user") + + +@function_tool +def get_weather(city: Annotated[str, "The city to get weather for"]) -> str: + """Get current weather for a city.""" + return f"Weather in {city}: 22°C, clear skies, humidity 45%" + + +@function_tool +def calculate(expression: Annotated[str, "A math expression to evaluate"]) -> str: + """Evaluate a mathematical expression.""" + allowed = set("0123456789+-*/().^ ") + if not all(c in allowed for c in expression): + return "Error: invalid characters" + return f"Result: {eval(expression.replace('^', '**'))}" + + +agent = Agent( + name="Assistant", + instructions="You are a helpful assistant with weather and math tools.", + model="gpt-4o-mini", + tools=[get_weather, calculate], +) + + +async def main(): + result = await Runner.run(agent, "What's 15% of 280?") + print(result.final_output) + + +asyncio.run(main()) +posthog.shutdown() diff --git a/examples/example-ai-openai/.env.example b/examples/example-ai-openai/.env.example new file mode 100644 index 00000000..3d1dd067 --- /dev/null +++ b/examples/example-ai-openai/.env.example @@ -0,0 +1,3 @@ +POSTHOG_API_KEY=phc_your_project_api_key +POSTHOG_HOST=https://us.i.posthog.com +OPENAI_API_KEY=sk-your_api_key diff --git a/examples/example-ai-openai/README.md b/examples/example-ai-openai/README.md new file mode 100644 index 00000000..2da15076 --- /dev/null +++ b/examples/example-ai-openai/README.md @@ -0,0 +1,29 @@ +# OpenAI + PostHog AI Examples + +Track OpenAI API calls with PostHog. + +## Setup + +```bash +pip install -r requirements.txt +cp .env.example .env +# Fill in your API keys in .env +``` + +## Examples + +- **chat_completions.py** - Chat Completions API with tool calling +- **chat_completions_streaming.py** - Chat Completions with streaming +- **responses.py** - Responses API with tool calling +- **responses_streaming.py** - Responses API with streaming +- **embeddings.py** - Text embeddings +- **transcription.py** - Audio transcription (Whisper) +- **image_generation.py** - Image generation via Responses API + +## Run + +```bash +source .env +python chat_completions.py +python responses_streaming.py +``` diff --git a/examples/example-ai-openai/chat_completions.py b/examples/example-ai-openai/chat_completions.py new file mode 100644 index 00000000..74e52790 --- /dev/null +++ b/examples/example-ai-openai/chat_completions.py @@ -0,0 +1,63 @@ +"""OpenAI Chat Completions API with tool calling, tracked by PostHog.""" + +import os +import json +import urllib.request +from posthog import Posthog +from posthog.ai.openai import OpenAI + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) + +tools = [ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get current weather for a location", + "parameters": { + "type": "object", + "properties": { + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "location_name": {"type": "string"}, + }, + "required": ["latitude", "longitude", "location_name"], + }, + }, + } +] + + +def get_weather(latitude: float, longitude: float, location_name: str) -> str: + url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,relative_humidity_2m,wind_speed_10m" + with urllib.request.urlopen(url) as resp: + data = json.loads(resp.read()) + current = data["current"] + return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h" + + +response = client.chat.completions.create( + model="gpt-4o-mini", + max_completion_tokens=1024, + posthog_distinct_id="example-user", + tools=tools, + tool_choice="auto", + messages=[ + {"role": "system", "content": "You are a helpful assistant with access to weather data."}, + {"role": "user", "content": "What's the weather like in Dublin, Ireland?"}, + ], +) + +message = response.choices[0].message + +if message.content: + print(message.content) + +if message.tool_calls: + for tool_call in message.tool_calls: + args = json.loads(tool_call.function.arguments) + result = get_weather(**args) + print(result) + +posthog.shutdown() diff --git a/examples/example-ai-openai/chat_completions_streaming.py b/examples/example-ai-openai/chat_completions_streaming.py new file mode 100644 index 00000000..6256622e --- /dev/null +++ b/examples/example-ai-openai/chat_completions_streaming.py @@ -0,0 +1,26 @@ +"""OpenAI Chat Completions API with streaming, tracked by PostHog.""" + +import os +from posthog import Posthog +from posthog.ai.openai import OpenAI + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) + +stream = client.chat.completions.create( + model="gpt-4o-mini", + max_completion_tokens=1024, + posthog_distinct_id="example-user", + stream=True, + messages=[ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "Explain observability in three sentences."}, + ], +) + +for chunk in stream: + if chunk.choices and chunk.choices[0].delta.content: + print(chunk.choices[0].delta.content, end="", flush=True) + +print() +posthog.shutdown() diff --git a/examples/example-ai-openai/embeddings.py b/examples/example-ai-openai/embeddings.py new file mode 100644 index 00000000..9f60f9c1 --- /dev/null +++ b/examples/example-ai-openai/embeddings.py @@ -0,0 +1,20 @@ +"""OpenAI embeddings, tracked by PostHog.""" + +import os +from posthog import Posthog +from posthog.ai.openai import OpenAI + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) + +response = client.embeddings.create( + model="text-embedding-3-small", + input="PostHog is an open-source product analytics platform.", + posthog_distinct_id="example-user", +) + +embedding = response.data[0].embedding +print(f"Embedding dimensions: {len(embedding)}") +print(f"First 5 values: {embedding[:5]}") + +posthog.shutdown() diff --git a/examples/example-ai-openai/image_generation.py b/examples/example-ai-openai/image_generation.py new file mode 100644 index 00000000..5f21aee7 --- /dev/null +++ b/examples/example-ai-openai/image_generation.py @@ -0,0 +1,22 @@ +"""OpenAI image generation via Responses API, tracked by PostHog.""" + +import os +from posthog import Posthog +from posthog.ai.openai import OpenAI + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) + +response = client.responses.create( + model="gpt-image-1-mini", + input="A hedgehog wearing a PostHog t-shirt, pixel art style", + tools=[{"type": "image_generation"}], + posthog_distinct_id="example-user", +) + +for output_item in response.output: + if hasattr(output_item, "type") and output_item.type == "image_generation_call": + image_base64 = output_item.result + print(f"Generated image: {len(image_base64)} chars of base64 data") + +posthog.shutdown() diff --git a/examples/example-ai-openai/requirements.txt b/examples/example-ai-openai/requirements.txt new file mode 100644 index 00000000..b01b2c58 --- /dev/null +++ b/examples/example-ai-openai/requirements.txt @@ -0,0 +1,2 @@ +posthog>=6.6.1 +openai diff --git a/examples/example-ai-openai/responses.py b/examples/example-ai-openai/responses.py new file mode 100644 index 00000000..50d9d51d --- /dev/null +++ b/examples/example-ai-openai/responses.py @@ -0,0 +1,57 @@ +"""OpenAI Responses API with tool calling, tracked by PostHog.""" + +import os +import json +import urllib.request +from posthog import Posthog +from posthog.ai.openai import OpenAI + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) + +tools = [ + { + "type": "function", + "name": "get_weather", + "description": "Get current weather for a location", + "parameters": { + "type": "object", + "properties": { + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "location_name": {"type": "string"}, + }, + "required": ["latitude", "longitude", "location_name"], + }, + } +] + + +def get_weather(latitude: float, longitude: float, location_name: str) -> str: + url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,relative_humidity_2m,wind_speed_10m" + with urllib.request.urlopen(url) as resp: + data = json.loads(resp.read()) + current = data["current"] + return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h" + + +response = client.responses.create( + model="gpt-4o-mini", + max_output_tokens=1024, + posthog_distinct_id="example-user", + tools=tools, + instructions="You are a helpful assistant with access to weather data.", + input=[{"role": "user", "content": "What's the weather like in Tokyo?"}], +) + +for output_item in response.output: + if hasattr(output_item, "content"): + for content_item in output_item.content: + if hasattr(content_item, "text"): + print(content_item.text) + elif hasattr(output_item, "name"): + args = json.loads(output_item.arguments) + result = get_weather(**args) + print(result) + +posthog.shutdown() diff --git a/examples/example-ai-openai/responses_streaming.py b/examples/example-ai-openai/responses_streaming.py new file mode 100644 index 00000000..3569c032 --- /dev/null +++ b/examples/example-ai-openai/responses_streaming.py @@ -0,0 +1,24 @@ +"""OpenAI Responses API with streaming, tracked by PostHog.""" + +import os +from posthog import Posthog +from posthog.ai.openai import OpenAI + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) + +stream = client.responses.create( + model="gpt-4o-mini", + max_output_tokens=1024, + posthog_distinct_id="example-user", + stream=True, + instructions="You are a helpful assistant.", + input=[{"role": "user", "content": "Write a haiku about product analytics."}], +) + +for event in stream: + if hasattr(event, "type") and event.type == "response.output_text.delta": + print(event.delta, end="", flush=True) + +print() +posthog.shutdown() diff --git a/examples/example-ai-openai/transcription.py b/examples/example-ai-openai/transcription.py new file mode 100644 index 00000000..cd27dced --- /dev/null +++ b/examples/example-ai-openai/transcription.py @@ -0,0 +1,22 @@ +"""OpenAI audio transcription (Whisper), tracked by PostHog.""" + +import os +from posthog import Posthog +from posthog.ai.openai import OpenAI + +posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) + +# Replace with the path to your audio file +audio_path = "audio.mp3" + +with open(audio_path, "rb") as audio_file: + transcription = client.audio.transcriptions.create( + file=audio_file, + model="whisper-1", + posthog_distinct_id="example-user", + ) + +print(f"Transcription: {transcription.text}") + +posthog.shutdown() diff --git a/examples/example-ai-pydantic-ai/.env.example b/examples/example-ai-pydantic-ai/.env.example new file mode 100644 index 00000000..3d1dd067 --- /dev/null +++ b/examples/example-ai-pydantic-ai/.env.example @@ -0,0 +1,3 @@ +POSTHOG_API_KEY=phc_your_project_api_key +POSTHOG_HOST=https://us.i.posthog.com +OPENAI_API_KEY=sk-your_api_key diff --git a/examples/example-ai-pydantic-ai/README.md b/examples/example-ai-pydantic-ai/README.md new file mode 100644 index 00000000..012f0fee --- /dev/null +++ b/examples/example-ai-pydantic-ai/README.md @@ -0,0 +1,22 @@ +# Pydantic AI + PostHog AI Examples + +Track Pydantic AI agent calls with PostHog via OpenTelemetry. + +## Setup + +```bash +pip install -r requirements.txt +cp .env.example .env +# Fill in your API keys in .env +``` + +## Examples + +- **agent_with_otel.py** - Agent with tool calling, instrumented via OTEL + +## Run + +```bash +source .env +python agent_with_otel.py +``` diff --git a/examples/example-ai-pydantic-ai/agent_with_otel.py b/examples/example-ai-pydantic-ai/agent_with_otel.py new file mode 100644 index 00000000..32b06038 --- /dev/null +++ b/examples/example-ai-pydantic-ai/agent_with_otel.py @@ -0,0 +1,48 @@ +"""Pydantic AI agent with OpenTelemetry instrumentation, exporting to PostHog.""" + +import os +import json +import urllib.request +from pydantic_ai import Agent, RunContext +from pydantic_ai.models.openai import OpenAIModel +from opentelemetry import trace +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter + +# Configure OTEL to export traces to PostHog +posthog_api_key = os.environ["POSTHOG_API_KEY"] +posthog_host = os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com") + +os.environ["OTEL_EXPORTER_OTLP_TRACES_ENDPOINT"] = f"{posthog_host}/i/v0/ai/otel" +os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Bearer {posthog_api_key}" + +tracer_provider = TracerProvider( + resource=Resource.create({"service.name": "pydantic-ai-example", "user.id": "example-user"}) +) +tracer_provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporter())) +trace.set_tracer_provider(tracer_provider) + +# Create an agent with a tool +model = OpenAIModel("gpt-4o-mini") +agent = Agent(model, system_prompt="You are a helpful assistant with access to weather data.") + + +@agent.tool +def get_weather(ctx: RunContext[None], latitude: float, longitude: float, location_name: str) -> str: + """Get current weather for a location.""" + url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,relative_humidity_2m,wind_speed_10m" + with urllib.request.urlopen(url) as resp: + data = json.loads(resp.read()) + current = data["current"] + return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h" + + +# Enable automatic OTEL instrumentation for all agents +Agent.instrument_all() + +result = agent.run_sync("What's the weather in Amsterdam?") +print(result.output) + +tracer_provider.shutdown() diff --git a/examples/example-ai-pydantic-ai/requirements.txt b/examples/example-ai-pydantic-ai/requirements.txt new file mode 100644 index 00000000..b5e997a2 --- /dev/null +++ b/examples/example-ai-pydantic-ai/requirements.txt @@ -0,0 +1,4 @@ +posthog>=6.6.1 +pydantic-ai +opentelemetry-sdk +opentelemetry-exporter-otlp-proto-http From b9643239f5007b825094dcd9fbe78bd376f2ceb3 Mon Sep 17 00:00:00 2001 From: Richard Solomou Date: Fri, 20 Mar 2026 12:27:08 +0200 Subject: [PATCH 2/7] style: apply ruff formatting to AI examples --- examples/example-ai-anthropic/chat.py | 5 +++- .../example-ai-anthropic/extended_thinking.py | 12 ++++++-- examples/example-ai-anthropic/streaming.py | 5 +++- examples/example-ai-gemini/chat.py | 5 +++- .../example-ai-gemini/image_generation.py | 9 ++++-- examples/example-ai-gemini/streaming.py | 12 ++++++-- .../example-ai-langchain/callback_handler.py | 7 +++-- examples/example-ai-langchain/otel.py | 4 ++- examples/example-ai-litellm/chat.py | 9 ++++-- examples/example-ai-litellm/streaming.py | 4 ++- .../example-ai-openai-agents/custom_spans.py | 5 +++- .../example-ai-openai-agents/guardrails.py | 29 +++++++++++++++---- .../example-ai-openai-agents/multi_agent.py | 5 +++- .../example-ai-openai-agents/single_agent.py | 5 +++- .../example-ai-openai/chat_completions.py | 10 +++++-- .../chat_completions_streaming.py | 5 +++- examples/example-ai-openai/embeddings.py | 5 +++- .../example-ai-openai/image_generation.py | 5 +++- examples/example-ai-openai/responses.py | 5 +++- .../example-ai-openai/responses_streaming.py | 5 +++- examples/example-ai-openai/transcription.py | 5 +++- .../example-ai-pydantic-ai/agent_with_otel.py | 12 ++++++-- 22 files changed, 134 insertions(+), 34 deletions(-) diff --git a/examples/example-ai-anthropic/chat.py b/examples/example-ai-anthropic/chat.py index 4fa9aaab..fb8ca253 100644 --- a/examples/example-ai-anthropic/chat.py +++ b/examples/example-ai-anthropic/chat.py @@ -6,7 +6,10 @@ from posthog import Posthog from posthog.ai.anthropic import Anthropic -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], posthog_client=posthog) tools = [ diff --git a/examples/example-ai-anthropic/extended_thinking.py b/examples/example-ai-anthropic/extended_thinking.py index 45d6931e..e4f50381 100644 --- a/examples/example-ai-anthropic/extended_thinking.py +++ b/examples/example-ai-anthropic/extended_thinking.py @@ -7,7 +7,10 @@ from posthog import Posthog from posthog.ai.anthropic import Anthropic -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], posthog_client=posthog) message = client.messages.create( @@ -15,7 +18,12 @@ max_tokens=16000, posthog_distinct_id="example-user", thinking={"type": "enabled", "budget_tokens": 10000}, - messages=[{"role": "user", "content": "What is the probability of rolling at least one six in four rolls of a fair die?"}], + messages=[ + { + "role": "user", + "content": "What is the probability of rolling at least one six in four rolls of a fair die?", + } + ], ) for block in message.content: diff --git a/examples/example-ai-anthropic/streaming.py b/examples/example-ai-anthropic/streaming.py index 4c21c6a4..001a8bed 100644 --- a/examples/example-ai-anthropic/streaming.py +++ b/examples/example-ai-anthropic/streaming.py @@ -4,7 +4,10 @@ from posthog import Posthog from posthog.ai.anthropic import Anthropic -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], posthog_client=posthog) stream = client.messages.create( diff --git a/examples/example-ai-gemini/chat.py b/examples/example-ai-gemini/chat.py index 3e169717..736293aa 100644 --- a/examples/example-ai-gemini/chat.py +++ b/examples/example-ai-gemini/chat.py @@ -7,7 +7,10 @@ from posthog import Posthog from posthog.ai.gemini import Client -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) client = Client(api_key=os.environ["GEMINI_API_KEY"], posthog_client=posthog) tool_declarations = [ diff --git a/examples/example-ai-gemini/image_generation.py b/examples/example-ai-gemini/image_generation.py index dcdb6f8d..42cc741e 100644 --- a/examples/example-ai-gemini/image_generation.py +++ b/examples/example-ai-gemini/image_generation.py @@ -4,7 +4,10 @@ from posthog import Posthog from posthog.ai.gemini import Client -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) client = Client(api_key=os.environ["GEMINI_API_KEY"], posthog_client=posthog) response = client.models.generate_content( @@ -16,7 +19,9 @@ for candidate in response.candidates: for part in candidate.content.parts: if hasattr(part, "inline_data") and part.inline_data: - print(f"Generated image: {part.inline_data.mime_type}, {len(part.inline_data.data)} bytes") + print( + f"Generated image: {part.inline_data.mime_type}, {len(part.inline_data.data)} bytes" + ) elif hasattr(part, "text"): print(part.text) diff --git a/examples/example-ai-gemini/streaming.py b/examples/example-ai-gemini/streaming.py index 87d4b97a..dde9696f 100644 --- a/examples/example-ai-gemini/streaming.py +++ b/examples/example-ai-gemini/streaming.py @@ -4,13 +4,21 @@ from posthog import Posthog from posthog.ai.gemini import Client -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) client = Client(api_key=os.environ["GEMINI_API_KEY"], posthog_client=posthog) stream = client.models.generate_content_stream( model="gemini-2.5-flash", posthog_distinct_id="example-user", - contents=[{"role": "user", "parts": [{"text": "Explain product analytics in three sentences."}]}], + contents=[ + { + "role": "user", + "parts": [{"text": "Explain product analytics in three sentences."}], + } + ], ) for chunk in stream: diff --git a/examples/example-ai-langchain/callback_handler.py b/examples/example-ai-langchain/callback_handler.py index 1c0f5935..ac2d4309 100644 --- a/examples/example-ai-langchain/callback_handler.py +++ b/examples/example-ai-langchain/callback_handler.py @@ -5,11 +5,14 @@ import urllib.request from langchain_openai import ChatOpenAI from langchain_core.tools import tool -from langchain_core.messages import HumanMessage, ToolMessage +from langchain_core.messages import HumanMessage from posthog import Posthog from posthog.ai.langchain import CallbackHandler -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) callback_handler = CallbackHandler(client=posthog) diff --git a/examples/example-ai-langchain/otel.py b/examples/example-ai-langchain/otel.py index 73f896ce..bf18b38b 100644 --- a/examples/example-ai-langchain/otel.py +++ b/examples/example-ai-langchain/otel.py @@ -17,7 +17,9 @@ os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Bearer {posthog_api_key}" tracer_provider = TracerProvider( - resource=Resource.create({"service.name": "langchain-example", "user.id": "example-user"}) + resource=Resource.create( + {"service.name": "langchain-example", "user.id": "example-user"} + ) ) tracer_provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporter())) trace.set_tracer_provider(tracer_provider) diff --git a/examples/example-ai-litellm/chat.py b/examples/example-ai-litellm/chat.py index 1d4fd789..253082aa 100644 --- a/examples/example-ai-litellm/chat.py +++ b/examples/example-ai-litellm/chat.py @@ -7,7 +7,9 @@ # Enable PostHog callbacks — LiteLLM has built-in PostHog support os.environ["POSTHOG_API_KEY"] = os.environ.get("POSTHOG_API_KEY", "") -os.environ["POSTHOG_API_URL"] = os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com") +os.environ["POSTHOG_API_URL"] = os.environ.get( + "POSTHOG_HOST", "https://us.i.posthog.com" +) litellm.success_callback = ["posthog"] litellm.failure_callback = ["posthog"] @@ -43,7 +45,10 @@ def get_weather(latitude: float, longitude: float, location_name: str) -> str: response = litellm.completion( model="gpt-4o-mini", messages=[ - {"role": "system", "content": "You are a helpful assistant with access to weather data."}, + { + "role": "system", + "content": "You are a helpful assistant with access to weather data.", + }, {"role": "user", "content": "What's the weather in Paris?"}, ], tools=tools, diff --git a/examples/example-ai-litellm/streaming.py b/examples/example-ai-litellm/streaming.py index 4a6a2cb1..f82e0080 100644 --- a/examples/example-ai-litellm/streaming.py +++ b/examples/example-ai-litellm/streaming.py @@ -4,7 +4,9 @@ import litellm os.environ["POSTHOG_API_KEY"] = os.environ.get("POSTHOG_API_KEY", "") -os.environ["POSTHOG_API_URL"] = os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com") +os.environ["POSTHOG_API_URL"] = os.environ.get( + "POSTHOG_HOST", "https://us.i.posthog.com" +) litellm.success_callback = ["posthog"] litellm.failure_callback = ["posthog"] diff --git a/examples/example-ai-openai-agents/custom_spans.py b/examples/example-ai-openai-agents/custom_spans.py index 2627d42f..c766c467 100644 --- a/examples/example-ai-openai-agents/custom_spans.py +++ b/examples/example-ai-openai-agents/custom_spans.py @@ -7,7 +7,10 @@ from posthog import Posthog from posthog.ai.openai_agents import instrument -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) instrument(posthog, distinct_id="example-user") agent = Agent( diff --git a/examples/example-ai-openai-agents/guardrails.py b/examples/example-ai-openai-agents/guardrails.py index f68d5117..bc155f80 100644 --- a/examples/example-ai-openai-agents/guardrails.py +++ b/examples/example-ai-openai-agents/guardrails.py @@ -2,11 +2,22 @@ import asyncio import os -from agents import Agent, Runner, input_guardrail, output_guardrail, GuardrailFunctionOutput, RunContextWrapper, TResponseInputItem +from agents import ( + Agent, + Runner, + input_guardrail, + output_guardrail, + GuardrailFunctionOutput, + RunContextWrapper, + TResponseInputItem, +) from posthog import Posthog from posthog.ai.openai_agents import instrument -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) instrument(posthog, distinct_id="example-user") BLOCKED_INPUT_WORDS = ["hack", "exploit", "bypass"] @@ -18,14 +29,20 @@ async def content_filter( ctx: RunContextWrapper[None], agent: Agent, input: str | list[TResponseInputItem] ) -> GuardrailFunctionOutput: """Block requests containing prohibited words.""" - text = str(input).lower() if isinstance(input, str) else " ".join(str(i) for i in input).lower() + text = ( + str(input).lower() + if isinstance(input, str) + else " ".join(str(i) for i in input).lower() + ) for word in BLOCKED_INPUT_WORDS: if word in text: return GuardrailFunctionOutput( output_info={"blocked_word": word}, tripwire_triggered=True, ) - return GuardrailFunctionOutput(output_info={"status": "passed"}, tripwire_triggered=False) + return GuardrailFunctionOutput( + output_info={"status": "passed"}, tripwire_triggered=False + ) @output_guardrail @@ -39,7 +56,9 @@ async def sensitive_data_filter( output_info={"blocked_word": word}, tripwire_triggered=True, ) - return GuardrailFunctionOutput(output_info={"status": "passed"}, tripwire_triggered=False) + return GuardrailFunctionOutput( + output_info={"status": "passed"}, tripwire_triggered=False + ) guarded_agent = Agent( diff --git a/examples/example-ai-openai-agents/multi_agent.py b/examples/example-ai-openai-agents/multi_agent.py index 64817d82..554ac846 100644 --- a/examples/example-ai-openai-agents/multi_agent.py +++ b/examples/example-ai-openai-agents/multi_agent.py @@ -7,7 +7,10 @@ from posthog import Posthog from posthog.ai.openai_agents import instrument -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) instrument(posthog, distinct_id="example-user") diff --git a/examples/example-ai-openai-agents/single_agent.py b/examples/example-ai-openai-agents/single_agent.py index 01b611c7..9423d800 100644 --- a/examples/example-ai-openai-agents/single_agent.py +++ b/examples/example-ai-openai-agents/single_agent.py @@ -7,7 +7,10 @@ from posthog import Posthog from posthog.ai.openai_agents import instrument -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) instrument(posthog, distinct_id="example-user") diff --git a/examples/example-ai-openai/chat_completions.py b/examples/example-ai-openai/chat_completions.py index 74e52790..2f73bc5e 100644 --- a/examples/example-ai-openai/chat_completions.py +++ b/examples/example-ai-openai/chat_completions.py @@ -6,7 +6,10 @@ from posthog import Posthog from posthog.ai.openai import OpenAI -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) tools = [ @@ -44,7 +47,10 @@ def get_weather(latitude: float, longitude: float, location_name: str) -> str: tools=tools, tool_choice="auto", messages=[ - {"role": "system", "content": "You are a helpful assistant with access to weather data."}, + { + "role": "system", + "content": "You are a helpful assistant with access to weather data.", + }, {"role": "user", "content": "What's the weather like in Dublin, Ireland?"}, ], ) diff --git a/examples/example-ai-openai/chat_completions_streaming.py b/examples/example-ai-openai/chat_completions_streaming.py index 6256622e..60721d49 100644 --- a/examples/example-ai-openai/chat_completions_streaming.py +++ b/examples/example-ai-openai/chat_completions_streaming.py @@ -4,7 +4,10 @@ from posthog import Posthog from posthog.ai.openai import OpenAI -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) stream = client.chat.completions.create( diff --git a/examples/example-ai-openai/embeddings.py b/examples/example-ai-openai/embeddings.py index 9f60f9c1..826acfcb 100644 --- a/examples/example-ai-openai/embeddings.py +++ b/examples/example-ai-openai/embeddings.py @@ -4,7 +4,10 @@ from posthog import Posthog from posthog.ai.openai import OpenAI -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) response = client.embeddings.create( diff --git a/examples/example-ai-openai/image_generation.py b/examples/example-ai-openai/image_generation.py index 5f21aee7..1cdf1f7b 100644 --- a/examples/example-ai-openai/image_generation.py +++ b/examples/example-ai-openai/image_generation.py @@ -4,7 +4,10 @@ from posthog import Posthog from posthog.ai.openai import OpenAI -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) response = client.responses.create( diff --git a/examples/example-ai-openai/responses.py b/examples/example-ai-openai/responses.py index 50d9d51d..82b138aa 100644 --- a/examples/example-ai-openai/responses.py +++ b/examples/example-ai-openai/responses.py @@ -6,7 +6,10 @@ from posthog import Posthog from posthog.ai.openai import OpenAI -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) tools = [ diff --git a/examples/example-ai-openai/responses_streaming.py b/examples/example-ai-openai/responses_streaming.py index 3569c032..a0f92865 100644 --- a/examples/example-ai-openai/responses_streaming.py +++ b/examples/example-ai-openai/responses_streaming.py @@ -4,7 +4,10 @@ from posthog import Posthog from posthog.ai.openai import OpenAI -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) stream = client.responses.create( diff --git a/examples/example-ai-openai/transcription.py b/examples/example-ai-openai/transcription.py index cd27dced..61928368 100644 --- a/examples/example-ai-openai/transcription.py +++ b/examples/example-ai-openai/transcription.py @@ -4,7 +4,10 @@ from posthog import Posthog from posthog.ai.openai import OpenAI -posthog = Posthog(os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com")) +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) # Replace with the path to your audio file diff --git a/examples/example-ai-pydantic-ai/agent_with_otel.py b/examples/example-ai-pydantic-ai/agent_with_otel.py index 32b06038..802fba7d 100644 --- a/examples/example-ai-pydantic-ai/agent_with_otel.py +++ b/examples/example-ai-pydantic-ai/agent_with_otel.py @@ -19,18 +19,24 @@ os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Bearer {posthog_api_key}" tracer_provider = TracerProvider( - resource=Resource.create({"service.name": "pydantic-ai-example", "user.id": "example-user"}) + resource=Resource.create( + {"service.name": "pydantic-ai-example", "user.id": "example-user"} + ) ) tracer_provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporter())) trace.set_tracer_provider(tracer_provider) # Create an agent with a tool model = OpenAIModel("gpt-4o-mini") -agent = Agent(model, system_prompt="You are a helpful assistant with access to weather data.") +agent = Agent( + model, system_prompt="You are a helpful assistant with access to weather data." +) @agent.tool -def get_weather(ctx: RunContext[None], latitude: float, longitude: float, location_name: str) -> str: +def get_weather( + ctx: RunContext[None], latitude: float, longitude: float, location_name: str +) -> str: """Get current weather for a location.""" url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,relative_humidity_2m,wind_speed_10m" with urllib.request.urlopen(url) as resp: From 2c60a69b799dcfb8464b70ab91a2833b56fc7b57 Mon Sep 17 00:00:00 2001 From: Richard Solomou Date: Fri, 20 Mar 2026 12:51:56 +0200 Subject: [PATCH 3/7] fix: add __init__.py to example directories for mypy Mypy reports duplicate module names when multiple example directories contain files with the same name (e.g. chat.py). Adding __init__.py makes each directory a proper package so mypy can disambiguate them. --- examples/example-ai-anthropic/__init__.py | 0 examples/example-ai-gemini/__init__.py | 0 examples/example-ai-langchain/__init__.py | 0 examples/example-ai-litellm/__init__.py | 0 examples/example-ai-openai-agents/__init__.py | 0 examples/example-ai-openai/__init__.py | 0 examples/example-ai-pydantic-ai/__init__.py | 0 7 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 examples/example-ai-anthropic/__init__.py create mode 100644 examples/example-ai-gemini/__init__.py create mode 100644 examples/example-ai-langchain/__init__.py create mode 100644 examples/example-ai-litellm/__init__.py create mode 100644 examples/example-ai-openai-agents/__init__.py create mode 100644 examples/example-ai-openai/__init__.py create mode 100644 examples/example-ai-pydantic-ai/__init__.py diff --git a/examples/example-ai-anthropic/__init__.py b/examples/example-ai-anthropic/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/examples/example-ai-gemini/__init__.py b/examples/example-ai-gemini/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/examples/example-ai-langchain/__init__.py b/examples/example-ai-langchain/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/examples/example-ai-litellm/__init__.py b/examples/example-ai-litellm/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/examples/example-ai-openai-agents/__init__.py b/examples/example-ai-openai-agents/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/examples/example-ai-openai/__init__.py b/examples/example-ai-openai/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/examples/example-ai-pydantic-ai/__init__.py b/examples/example-ai-pydantic-ai/__init__.py new file mode 100644 index 00000000..e69de29b From 4bba7f4453b606df17f6b7312abeca075ae1d6e8 Mon Sep 17 00:00:00 2001 From: Richard Solomou Date: Fri, 20 Mar 2026 12:57:09 +0200 Subject: [PATCH 4/7] fix: exclude examples from mypy and sync baseline Example directory names contain hyphens (e.g. example-ai-anthropic) which aren't valid Python package names, so __init__.py doesn't help. Exclude them from mypy instead and sync the baseline. --- examples/example-ai-anthropic/__init__.py | 0 examples/example-ai-gemini/__init__.py | 0 examples/example-ai-langchain/__init__.py | 0 examples/example-ai-litellm/__init__.py | 0 examples/example-ai-openai-agents/__init__.py | 0 examples/example-ai-openai/__init__.py | 0 examples/example-ai-pydantic-ai/__init__.py | 0 mypy-baseline.txt | 2 +- mypy.ini | 2 +- 9 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 examples/example-ai-anthropic/__init__.py delete mode 100644 examples/example-ai-gemini/__init__.py delete mode 100644 examples/example-ai-langchain/__init__.py delete mode 100644 examples/example-ai-litellm/__init__.py delete mode 100644 examples/example-ai-openai-agents/__init__.py delete mode 100644 examples/example-ai-openai/__init__.py delete mode 100644 examples/example-ai-pydantic-ai/__init__.py diff --git a/examples/example-ai-anthropic/__init__.py b/examples/example-ai-anthropic/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/examples/example-ai-gemini/__init__.py b/examples/example-ai-gemini/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/examples/example-ai-langchain/__init__.py b/examples/example-ai-langchain/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/examples/example-ai-litellm/__init__.py b/examples/example-ai-litellm/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/examples/example-ai-openai-agents/__init__.py b/examples/example-ai-openai-agents/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/examples/example-ai-openai/__init__.py b/examples/example-ai-openai/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/examples/example-ai-pydantic-ai/__init__.py b/examples/example-ai-pydantic-ai/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/mypy-baseline.txt b/mypy-baseline.txt index 232ce8be..46d23dbf 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -31,5 +31,5 @@ posthog/client.py:0: error: Incompatible types in assignment (expression has typ posthog/client.py:0: error: "None" has no attribute "start" [attr-defined] posthog/client.py:0: error: Statement is unreachable [unreachable] posthog/client.py:0: error: Statement is unreachable [unreachable] -posthog/client.py:0: error: Name "urlparse" already defined (possibly by an import) [no-redef] posthog/client.py:0: error: Name "parse_qs" already defined (possibly by an import) [no-redef] +posthog/client.py:0: error: Name "urlparse" already defined (possibly by an import) [no-redef] diff --git a/mypy.ini b/mypy.ini index 16f71be5..bd8447d9 100644 --- a/mypy.ini +++ b/mypy.ini @@ -9,7 +9,7 @@ check_untyped_defs = True warn_unreachable = True strict_equality = True ignore_missing_imports = True -exclude = env/.*|venv/.*|build/.* +exclude = env/.*|venv/.*|build/.*|examples/example-.* [mypy-django.*] ignore_missing_imports = True From 0246254ef6a3c8e8a18aab3b9a0aa48848cc50f2 Mon Sep 17 00:00:00 2001 From: Richard Solomou Date: Fri, 20 Mar 2026 13:36:15 +0200 Subject: [PATCH 5/7] fix: fix image gen, transcription, and gemini logging in examples - Use images.generate API instead of responses API for image generation - Gracefully skip transcription when audio file is missing - Use privacy mode for Gemini image gen to avoid huge event payloads - Suppress Gemini SDK base64 logging --- .../example-ai-gemini/image_generation.py | 6 ++++++ .../example-ai-openai/image_generation.py | 19 +++++++++---------- examples/example-ai-openai/transcription.py | 9 ++++++++- 3 files changed, 23 insertions(+), 11 deletions(-) diff --git a/examples/example-ai-gemini/image_generation.py b/examples/example-ai-gemini/image_generation.py index 42cc741e..3300586a 100644 --- a/examples/example-ai-gemini/image_generation.py +++ b/examples/example-ai-gemini/image_generation.py @@ -1,9 +1,14 @@ """Google Gemini image generation, tracked by PostHog.""" +import logging import os + from posthog import Posthog from posthog.ai.gemini import Client +# Suppress verbose Gemini SDK logging of base64 image data +logging.getLogger("google.genai").setLevel(logging.WARNING) + posthog = Posthog( os.environ["POSTHOG_API_KEY"], host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), @@ -13,6 +18,7 @@ response = client.models.generate_content( model="gemini-2.5-flash-image", posthog_distinct_id="example-user", + posthog_privacy_mode=True, # Redact base64 image data from the PostHog event contents=[{"role": "user", "parts": [{"text": "Generate a pixel art hedgehog"}]}], ) diff --git a/examples/example-ai-openai/image_generation.py b/examples/example-ai-openai/image_generation.py index 1cdf1f7b..f2a37cdc 100644 --- a/examples/example-ai-openai/image_generation.py +++ b/examples/example-ai-openai/image_generation.py @@ -1,4 +1,4 @@ -"""OpenAI image generation via Responses API, tracked by PostHog.""" +"""OpenAI image generation, tracked by PostHog.""" import os from posthog import Posthog @@ -10,16 +10,15 @@ ) client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) -response = client.responses.create( - model="gpt-image-1-mini", - input="A hedgehog wearing a PostHog t-shirt, pixel art style", - tools=[{"type": "image_generation"}], - posthog_distinct_id="example-user", +# Note: posthog.ai does not wrap images.generate yet, +# so this call is not automatically tracked. +response = client.images.generate( + model="gpt-image-1", + prompt="A hedgehog wearing a PostHog t-shirt, pixel art style", + size="1024x1024", ) -for output_item in response.output: - if hasattr(output_item, "type") and output_item.type == "image_generation_call": - image_base64 = output_item.result - print(f"Generated image: {len(image_base64)} chars of base64 data") +image_base64 = response.data[0].b64_json +print(f"Generated image: {len(image_base64)} chars of base64 data") posthog.shutdown() diff --git a/examples/example-ai-openai/transcription.py b/examples/example-ai-openai/transcription.py index 61928368..708209a0 100644 --- a/examples/example-ai-openai/transcription.py +++ b/examples/example-ai-openai/transcription.py @@ -1,6 +1,7 @@ """OpenAI audio transcription (Whisper), tracked by PostHog.""" import os +import sys from posthog import Posthog from posthog.ai.openai import OpenAI @@ -11,7 +12,13 @@ client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) # Replace with the path to your audio file -audio_path = "audio.mp3" +audio_path = os.environ.get("AUDIO_PATH", "audio.mp3") + +if not os.path.exists(audio_path): + print(f"Skipping: audio file not found at '{audio_path}'") + print("Set AUDIO_PATH to a valid audio file (mp3, wav, m4a, etc.)") + posthog.shutdown() + sys.exit(0) with open(audio_path, "rb") as audio_file: transcription = client.audio.transcriptions.create( From 67007295b1291ef303f35085eba08e87488f2312 Mon Sep 17 00:00:00 2001 From: Richard Solomou Date: Fri, 20 Mar 2026 14:09:46 +0200 Subject: [PATCH 6/7] fix: add missing run commands to example READMEs --- examples/example-ai-gemini/README.md | 1 + examples/example-ai-openai/README.md | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/examples/example-ai-gemini/README.md b/examples/example-ai-gemini/README.md index df0af9ae..daa40403 100644 --- a/examples/example-ai-gemini/README.md +++ b/examples/example-ai-gemini/README.md @@ -22,4 +22,5 @@ cp .env.example .env source .env python chat.py python streaming.py +python image_generation.py ``` diff --git a/examples/example-ai-openai/README.md b/examples/example-ai-openai/README.md index 2da15076..f766e360 100644 --- a/examples/example-ai-openai/README.md +++ b/examples/example-ai-openai/README.md @@ -25,5 +25,10 @@ cp .env.example .env ```bash source .env python chat_completions.py +python chat_completions_streaming.py +python responses.py python responses_streaming.py +python embeddings.py +python transcription.py +python image_generation.py ``` From 3d5e958ddddbf07af8738068b07978ac59753711 Mon Sep 17 00:00:00 2001 From: Richard Solomou Date: Fri, 20 Mar 2026 14:11:52 +0200 Subject: [PATCH 7/7] Add comment about incomplete tool-calling loop in examples --- examples/example-ai-anthropic/chat.py | 3 ++- examples/example-ai-gemini/chat.py | 1 + examples/example-ai-langchain/callback_handler.py | 1 + examples/example-ai-litellm/chat.py | 1 + examples/example-ai-openai/chat_completions.py | 1 + 5 files changed, 6 insertions(+), 1 deletion(-) diff --git a/examples/example-ai-anthropic/chat.py b/examples/example-ai-anthropic/chat.py index fb8ca253..99726046 100644 --- a/examples/example-ai-anthropic/chat.py +++ b/examples/example-ai-anthropic/chat.py @@ -45,7 +45,8 @@ def get_weather(latitude: float, longitude: float, location_name: str) -> str: messages=[{"role": "user", "content": "What's the weather like in San Francisco?"}], ) -# Handle tool use if the model requests it +# Handle tool use if the model requests it. +# In production, send tool results back to the model for a final response. for block in message.content: if block.type == "text": print(block.text) diff --git a/examples/example-ai-gemini/chat.py b/examples/example-ai-gemini/chat.py index 736293aa..ac10be71 100644 --- a/examples/example-ai-gemini/chat.py +++ b/examples/example-ai-gemini/chat.py @@ -49,6 +49,7 @@ def get_weather(latitude: float, longitude: float, location_name: str) -> str: config=config, ) +# In production, send tool results back to the model for a final response. for candidate in response.candidates: for part in candidate.content.parts: if hasattr(part, "function_call") and part.function_call: diff --git a/examples/example-ai-langchain/callback_handler.py b/examples/example-ai-langchain/callback_handler.py index ac2d4309..3259fc01 100644 --- a/examples/example-ai-langchain/callback_handler.py +++ b/examples/example-ai-langchain/callback_handler.py @@ -45,6 +45,7 @@ def get_weather(latitude: float, longitude: float, location_name: str) -> str: if response.content: print(response.content) +# In production, send tool results back to the model for a final response. if response.tool_calls: for tool_call in response.tool_calls: result = tool_map[tool_call["name"]].invoke(tool_call["args"]) diff --git a/examples/example-ai-litellm/chat.py b/examples/example-ai-litellm/chat.py index 253082aa..13cf5a13 100644 --- a/examples/example-ai-litellm/chat.py +++ b/examples/example-ai-litellm/chat.py @@ -61,6 +61,7 @@ def get_weather(latitude: float, longitude: float, location_name: str) -> str: if message.content: print(message.content) +# In production, send tool results back to the model for a final response. if hasattr(message, "tool_calls") and message.tool_calls: for tool_call in message.tool_calls: args = json.loads(tool_call.function.arguments) diff --git a/examples/example-ai-openai/chat_completions.py b/examples/example-ai-openai/chat_completions.py index 2f73bc5e..32238ca0 100644 --- a/examples/example-ai-openai/chat_completions.py +++ b/examples/example-ai-openai/chat_completions.py @@ -60,6 +60,7 @@ def get_weather(latitude: float, longitude: float, location_name: str) -> str: if message.content: print(message.content) +# In production, send tool results back to the model for a final response. if message.tool_calls: for tool_call in message.tool_calls: args = json.loads(tool_call.function.arguments)