diff --git a/examples/example-ai-anthropic/.env.example b/examples/example-ai-anthropic/.env.example new file mode 100644 index 00000000..8979a330 --- /dev/null +++ b/examples/example-ai-anthropic/.env.example @@ -0,0 +1,3 @@ +POSTHOG_API_KEY=phc_your_project_api_key +POSTHOG_HOST=https://us.i.posthog.com +ANTHROPIC_API_KEY=sk-ant-your_api_key diff --git a/examples/example-ai-anthropic/README.md b/examples/example-ai-anthropic/README.md new file mode 100644 index 00000000..bbdfb53d --- /dev/null +++ b/examples/example-ai-anthropic/README.md @@ -0,0 +1,26 @@ +# Anthropic + PostHog AI Examples + +Track Anthropic Claude API calls with PostHog. + +## Setup + +```bash +pip install -r requirements.txt +cp .env.example .env +# Fill in your API keys in .env +``` + +## Examples + +- **chat.py** - Basic chat with tool calling +- **streaming.py** - Streaming responses +- **extended_thinking.py** - Claude's extended thinking feature + +## Run + +```bash +source .env +python chat.py +python streaming.py +python extended_thinking.py +``` diff --git a/examples/example-ai-anthropic/chat.py b/examples/example-ai-anthropic/chat.py new file mode 100644 index 00000000..99726046 --- /dev/null +++ b/examples/example-ai-anthropic/chat.py @@ -0,0 +1,57 @@ +"""Anthropic chat with tool calling, tracked by PostHog.""" + +import os +import json +import urllib.request +from posthog import Posthog +from posthog.ai.anthropic import Anthropic + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], posthog_client=posthog) + +tools = [ + { + "name": "get_weather", + "description": "Get current weather for a location", + "input_schema": { + "type": "object", + "properties": { + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "location_name": {"type": "string"}, + }, + "required": ["latitude", "longitude", "location_name"], + }, + } +] + + +def get_weather(latitude: float, longitude: float, location_name: str) -> str: + url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,relative_humidity_2m,wind_speed_10m" + with urllib.request.urlopen(url) as resp: + data = json.loads(resp.read()) + current = data["current"] + return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h" + + +message = client.messages.create( + model="claude-sonnet-4-5-20250929", + max_tokens=1024, + posthog_distinct_id="example-user", + tools=tools, + messages=[{"role": "user", "content": "What's the weather like in San Francisco?"}], +) + +# Handle tool use if the model requests it. +# In production, send tool results back to the model for a final response. +for block in message.content: + if block.type == "text": + print(block.text) + elif block.type == "tool_use": + result = get_weather(**block.input) + print(result) + +posthog.shutdown() diff --git a/examples/example-ai-anthropic/extended_thinking.py b/examples/example-ai-anthropic/extended_thinking.py new file mode 100644 index 00000000..e4f50381 --- /dev/null +++ b/examples/example-ai-anthropic/extended_thinking.py @@ -0,0 +1,35 @@ +"""Anthropic extended thinking, tracked by PostHog. + +Extended thinking lets Claude show its reasoning process before responding. +""" + +import os +from posthog import Posthog +from posthog.ai.anthropic import Anthropic + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], posthog_client=posthog) + +message = client.messages.create( + model="claude-sonnet-4-5-20250929", + max_tokens=16000, + posthog_distinct_id="example-user", + thinking={"type": "enabled", "budget_tokens": 10000}, + messages=[ + { + "role": "user", + "content": "What is the probability of rolling at least one six in four rolls of a fair die?", + } + ], +) + +for block in message.content: + if block.type == "thinking": + print(f"Thinking: {block.thinking}\n") + elif block.type == "text": + print(f"Answer: {block.text}") + +posthog.shutdown() diff --git a/examples/example-ai-anthropic/requirements.txt b/examples/example-ai-anthropic/requirements.txt new file mode 100644 index 00000000..8eb8ac34 --- /dev/null +++ b/examples/example-ai-anthropic/requirements.txt @@ -0,0 +1,2 @@ +posthog>=6.6.1 +anthropic diff --git a/examples/example-ai-anthropic/streaming.py b/examples/example-ai-anthropic/streaming.py new file mode 100644 index 00000000..001a8bed --- /dev/null +++ b/examples/example-ai-anthropic/streaming.py @@ -0,0 +1,27 @@ +"""Anthropic streaming chat, tracked by PostHog.""" + +import os +from posthog import Posthog +from posthog.ai.anthropic import Anthropic + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], posthog_client=posthog) + +stream = client.messages.create( + model="claude-sonnet-4-5-20250929", + max_tokens=1024, + posthog_distinct_id="example-user", + messages=[{"role": "user", "content": "Write a haiku about observability."}], + stream=True, +) + +for event in stream: + if hasattr(event, "type"): + if event.type == "content_block_delta" and hasattr(event.delta, "text"): + print(event.delta.text, end="", flush=True) + +print() +posthog.shutdown() diff --git a/examples/example-ai-gemini/.env.example b/examples/example-ai-gemini/.env.example new file mode 100644 index 00000000..821414cc --- /dev/null +++ b/examples/example-ai-gemini/.env.example @@ -0,0 +1,3 @@ +POSTHOG_API_KEY=phc_your_project_api_key +POSTHOG_HOST=https://us.i.posthog.com +GEMINI_API_KEY=your_gemini_api_key diff --git a/examples/example-ai-gemini/README.md b/examples/example-ai-gemini/README.md new file mode 100644 index 00000000..daa40403 --- /dev/null +++ b/examples/example-ai-gemini/README.md @@ -0,0 +1,26 @@ +# Google Gemini + PostHog AI Examples + +Track Google Gemini API calls with PostHog. + +## Setup + +```bash +pip install -r requirements.txt +cp .env.example .env +# Fill in your API keys in .env +``` + +## Examples + +- **chat.py** - Chat with tool calling +- **streaming.py** - Streaming responses +- **image_generation.py** - Image generation + +## Run + +```bash +source .env +python chat.py +python streaming.py +python image_generation.py +``` diff --git a/examples/example-ai-gemini/chat.py b/examples/example-ai-gemini/chat.py new file mode 100644 index 00000000..ac10be71 --- /dev/null +++ b/examples/example-ai-gemini/chat.py @@ -0,0 +1,65 @@ +"""Google Gemini chat with tool calling, tracked by PostHog.""" + +import os +import json +import urllib.request +from google.genai import types +from posthog import Posthog +from posthog.ai.gemini import Client + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +client = Client(api_key=os.environ["GEMINI_API_KEY"], posthog_client=posthog) + +tool_declarations = [ + { + "name": "get_weather", + "description": "Get current weather for a location", + "parameters": { + "type": "object", + "properties": { + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "location_name": {"type": "string"}, + }, + "required": ["latitude", "longitude", "location_name"], + }, + } +] + + +def get_weather(latitude: float, longitude: float, location_name: str) -> str: + url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,relative_humidity_2m,wind_speed_10m" + with urllib.request.urlopen(url) as resp: + data = json.loads(resp.read()) + current = data["current"] + return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h" + + +config = types.GenerateContentConfig( + tools=[types.Tool(function_declarations=tool_declarations)] +) + +response = client.models.generate_content( + model="gemini-2.5-flash", + posthog_distinct_id="example-user", + contents=[{"role": "user", "parts": [{"text": "What's the weather in London?"}]}], + config=config, +) + +# In production, send tool results back to the model for a final response. +for candidate in response.candidates: + for part in candidate.content.parts: + if hasattr(part, "function_call") and part.function_call: + result = get_weather( + latitude=part.function_call.args["latitude"], + longitude=part.function_call.args["longitude"], + location_name=part.function_call.args["location_name"], + ) + print(result) + elif hasattr(part, "text"): + print(part.text) + +posthog.shutdown() diff --git a/examples/example-ai-gemini/image_generation.py b/examples/example-ai-gemini/image_generation.py new file mode 100644 index 00000000..3300586a --- /dev/null +++ b/examples/example-ai-gemini/image_generation.py @@ -0,0 +1,34 @@ +"""Google Gemini image generation, tracked by PostHog.""" + +import logging +import os + +from posthog import Posthog +from posthog.ai.gemini import Client + +# Suppress verbose Gemini SDK logging of base64 image data +logging.getLogger("google.genai").setLevel(logging.WARNING) + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +client = Client(api_key=os.environ["GEMINI_API_KEY"], posthog_client=posthog) + +response = client.models.generate_content( + model="gemini-2.5-flash-image", + posthog_distinct_id="example-user", + posthog_privacy_mode=True, # Redact base64 image data from the PostHog event + contents=[{"role": "user", "parts": [{"text": "Generate a pixel art hedgehog"}]}], +) + +for candidate in response.candidates: + for part in candidate.content.parts: + if hasattr(part, "inline_data") and part.inline_data: + print( + f"Generated image: {part.inline_data.mime_type}, {len(part.inline_data.data)} bytes" + ) + elif hasattr(part, "text"): + print(part.text) + +posthog.shutdown() diff --git a/examples/example-ai-gemini/requirements.txt b/examples/example-ai-gemini/requirements.txt new file mode 100644 index 00000000..6d0a0446 --- /dev/null +++ b/examples/example-ai-gemini/requirements.txt @@ -0,0 +1,2 @@ +posthog>=6.6.1 +google-genai diff --git a/examples/example-ai-gemini/streaming.py b/examples/example-ai-gemini/streaming.py new file mode 100644 index 00000000..dde9696f --- /dev/null +++ b/examples/example-ai-gemini/streaming.py @@ -0,0 +1,31 @@ +"""Google Gemini streaming chat, tracked by PostHog.""" + +import os +from posthog import Posthog +from posthog.ai.gemini import Client + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +client = Client(api_key=os.environ["GEMINI_API_KEY"], posthog_client=posthog) + +stream = client.models.generate_content_stream( + model="gemini-2.5-flash", + posthog_distinct_id="example-user", + contents=[ + { + "role": "user", + "parts": [{"text": "Explain product analytics in three sentences."}], + } + ], +) + +for chunk in stream: + for candidate in chunk.candidates: + for part in candidate.content.parts: + if hasattr(part, "text"): + print(part.text, end="", flush=True) + +print() +posthog.shutdown() diff --git a/examples/example-ai-langchain/.env.example b/examples/example-ai-langchain/.env.example new file mode 100644 index 00000000..3d1dd067 --- /dev/null +++ b/examples/example-ai-langchain/.env.example @@ -0,0 +1,3 @@ +POSTHOG_API_KEY=phc_your_project_api_key +POSTHOG_HOST=https://us.i.posthog.com +OPENAI_API_KEY=sk-your_api_key diff --git a/examples/example-ai-langchain/README.md b/examples/example-ai-langchain/README.md new file mode 100644 index 00000000..f6dae0fb --- /dev/null +++ b/examples/example-ai-langchain/README.md @@ -0,0 +1,30 @@ +# LangChain + PostHog AI Examples + +Track LangChain LLM calls with PostHog. + +## Setup + +```bash +pip install -r requirements.txt +cp .env.example .env +# Fill in your API keys in .env +``` + +For the OTEL example, also install: + +```bash +pip install opentelemetry-sdk opentelemetry-exporter-otlp-proto-http +``` + +## Examples + +- **callback_handler.py** - PostHog callback handler with tool calling +- **otel.py** - OpenTelemetry instrumentation exporting to PostHog + +## Run + +```bash +source .env +python callback_handler.py +python otel.py +``` diff --git a/examples/example-ai-langchain/callback_handler.py b/examples/example-ai-langchain/callback_handler.py new file mode 100644 index 00000000..3259fc01 --- /dev/null +++ b/examples/example-ai-langchain/callback_handler.py @@ -0,0 +1,54 @@ +"""LangChain with PostHog callback handler for automatic tracking.""" + +import os +import json +import urllib.request +from langchain_openai import ChatOpenAI +from langchain_core.tools import tool +from langchain_core.messages import HumanMessage +from posthog import Posthog +from posthog.ai.langchain import CallbackHandler + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +callback_handler = CallbackHandler(client=posthog) + + +@tool +def get_weather(latitude: float, longitude: float, location_name: str) -> str: + """Get current weather for a location. + + Args: + latitude: The latitude of the location + longitude: The longitude of the location + location_name: A human-readable name for the location + """ + url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,relative_humidity_2m,wind_speed_10m" + with urllib.request.urlopen(url) as resp: + data = json.loads(resp.read()) + current = data["current"] + return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h" + + +tools = [get_weather] +tool_map = {t.name: t for t in tools} + +model = ChatOpenAI(openai_api_key=os.environ["OPENAI_API_KEY"], temperature=0) +model_with_tools = model.bind_tools(tools) + +messages = [HumanMessage(content="What's the weather in Berlin?")] + +response = model_with_tools.invoke(messages, config={"callbacks": [callback_handler]}) + +if response.content: + print(response.content) + +# In production, send tool results back to the model for a final response. +if response.tool_calls: + for tool_call in response.tool_calls: + result = tool_map[tool_call["name"]].invoke(tool_call["args"]) + print(result) + +posthog.shutdown() diff --git a/examples/example-ai-langchain/otel.py b/examples/example-ai-langchain/otel.py new file mode 100644 index 00000000..bf18b38b --- /dev/null +++ b/examples/example-ai-langchain/otel.py @@ -0,0 +1,33 @@ +"""LangChain with OpenTelemetry instrumentation, exporting to PostHog.""" + +import os +from langchain_openai import ChatOpenAI +from langchain_core.messages import HumanMessage +from opentelemetry import trace +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter + +# Configure OTEL to export traces to PostHog +posthog_api_key = os.environ["POSTHOG_API_KEY"] +posthog_host = os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com") + +os.environ["OTEL_EXPORTER_OTLP_TRACES_ENDPOINT"] = f"{posthog_host}/i/v0/ai/otel" +os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Bearer {posthog_api_key}" + +tracer_provider = TracerProvider( + resource=Resource.create( + {"service.name": "langchain-example", "user.id": "example-user"} + ) +) +tracer_provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporter())) +trace.set_tracer_provider(tracer_provider) + +# Use LangChain as normal — OTEL captures the traces automatically +model = ChatOpenAI(openai_api_key=os.environ["OPENAI_API_KEY"], temperature=0) + +response = model.invoke([HumanMessage(content="What is product analytics?")]) +print(response.content) + +tracer_provider.shutdown() diff --git a/examples/example-ai-langchain/requirements.txt b/examples/example-ai-langchain/requirements.txt new file mode 100644 index 00000000..b48c05ff --- /dev/null +++ b/examples/example-ai-langchain/requirements.txt @@ -0,0 +1,3 @@ +posthog>=6.6.1 +langchain +langchain-openai diff --git a/examples/example-ai-litellm/.env.example b/examples/example-ai-litellm/.env.example new file mode 100644 index 00000000..3d1dd067 --- /dev/null +++ b/examples/example-ai-litellm/.env.example @@ -0,0 +1,3 @@ +POSTHOG_API_KEY=phc_your_project_api_key +POSTHOG_HOST=https://us.i.posthog.com +OPENAI_API_KEY=sk-your_api_key diff --git a/examples/example-ai-litellm/README.md b/examples/example-ai-litellm/README.md new file mode 100644 index 00000000..9421428c --- /dev/null +++ b/examples/example-ai-litellm/README.md @@ -0,0 +1,24 @@ +# LiteLLM + PostHog AI Examples + +Track LiteLLM calls with PostHog using the built-in callback integration. + +## Setup + +```bash +pip install -r requirements.txt +cp .env.example .env +# Fill in your API keys in .env +``` + +## Examples + +- **chat.py** - Chat with tool calling (works with any LiteLLM-supported model) +- **streaming.py** - Streaming responses + +## Run + +```bash +source .env +python chat.py +python streaming.py +``` diff --git a/examples/example-ai-litellm/chat.py b/examples/example-ai-litellm/chat.py new file mode 100644 index 00000000..13cf5a13 --- /dev/null +++ b/examples/example-ai-litellm/chat.py @@ -0,0 +1,69 @@ +"""LiteLLM chat with PostHog tracking via built-in callback.""" + +import os +import json +import urllib.request +import litellm + +# Enable PostHog callbacks — LiteLLM has built-in PostHog support +os.environ["POSTHOG_API_KEY"] = os.environ.get("POSTHOG_API_KEY", "") +os.environ["POSTHOG_API_URL"] = os.environ.get( + "POSTHOG_HOST", "https://us.i.posthog.com" +) +litellm.success_callback = ["posthog"] +litellm.failure_callback = ["posthog"] + +tools = [ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get current weather for a location", + "parameters": { + "type": "object", + "properties": { + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "location_name": {"type": "string"}, + }, + "required": ["latitude", "longitude", "location_name"], + }, + }, + } +] + + +def get_weather(latitude: float, longitude: float, location_name: str) -> str: + url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,relative_humidity_2m,wind_speed_10m" + with urllib.request.urlopen(url) as resp: + data = json.loads(resp.read()) + current = data["current"] + return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h" + + +# LiteLLM supports any model — just change the model string +response = litellm.completion( + model="gpt-4o-mini", + messages=[ + { + "role": "system", + "content": "You are a helpful assistant with access to weather data.", + }, + {"role": "user", "content": "What's the weather in Paris?"}, + ], + tools=tools, + tool_choice="auto", + metadata={"distinct_id": "example-user"}, +) + +message = response.choices[0].message + +if message.content: + print(message.content) + +# In production, send tool results back to the model for a final response. +if hasattr(message, "tool_calls") and message.tool_calls: + for tool_call in message.tool_calls: + args = json.loads(tool_call.function.arguments) + result = get_weather(**args) + print(result) diff --git a/examples/example-ai-litellm/requirements.txt b/examples/example-ai-litellm/requirements.txt new file mode 100644 index 00000000..0fc073c3 --- /dev/null +++ b/examples/example-ai-litellm/requirements.txt @@ -0,0 +1,2 @@ +posthog>=6.6.1 +litellm diff --git a/examples/example-ai-litellm/streaming.py b/examples/example-ai-litellm/streaming.py new file mode 100644 index 00000000..f82e0080 --- /dev/null +++ b/examples/example-ai-litellm/streaming.py @@ -0,0 +1,27 @@ +"""LiteLLM streaming chat with PostHog tracking.""" + +import os +import litellm + +os.environ["POSTHOG_API_KEY"] = os.environ.get("POSTHOG_API_KEY", "") +os.environ["POSTHOG_API_URL"] = os.environ.get( + "POSTHOG_HOST", "https://us.i.posthog.com" +) +litellm.success_callback = ["posthog"] +litellm.failure_callback = ["posthog"] + +response = litellm.completion( + model="gpt-4o-mini", + messages=[ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "Explain feature flags in three sentences."}, + ], + stream=True, + metadata={"distinct_id": "example-user"}, +) + +for chunk in response: + if chunk.choices and chunk.choices[0].delta.content: + print(chunk.choices[0].delta.content, end="", flush=True) + +print() diff --git a/examples/example-ai-openai-agents/.env.example b/examples/example-ai-openai-agents/.env.example new file mode 100644 index 00000000..3d1dd067 --- /dev/null +++ b/examples/example-ai-openai-agents/.env.example @@ -0,0 +1,3 @@ +POSTHOG_API_KEY=phc_your_project_api_key +POSTHOG_HOST=https://us.i.posthog.com +OPENAI_API_KEY=sk-your_api_key diff --git a/examples/example-ai-openai-agents/README.md b/examples/example-ai-openai-agents/README.md new file mode 100644 index 00000000..dd8f89f1 --- /dev/null +++ b/examples/example-ai-openai-agents/README.md @@ -0,0 +1,28 @@ +# OpenAI Agents SDK + PostHog AI Examples + +Track OpenAI Agents SDK calls with PostHog. + +## Setup + +```bash +pip install -r requirements.txt +cp .env.example .env +# Fill in your API keys in .env +``` + +## Examples + +- **multi_agent.py** - Triage agent routing to specialist agents via handoffs +- **single_agent.py** - Single agent with weather and math tools +- **guardrails.py** - Input/output guardrails for content filtering +- **custom_spans.py** - Custom spans for tracking non-LLM operations within a trace + +## Run + +```bash +source .env +python multi_agent.py +python single_agent.py +python guardrails.py +python custom_spans.py +``` diff --git a/examples/example-ai-openai-agents/custom_spans.py b/examples/example-ai-openai-agents/custom_spans.py new file mode 100644 index 00000000..c766c467 --- /dev/null +++ b/examples/example-ai-openai-agents/custom_spans.py @@ -0,0 +1,41 @@ +"""OpenAI Agents SDK with custom spans for tracking custom operations, traced by PostHog.""" + +import asyncio +import os +from agents import Agent, Runner, trace +from agents.tracing import custom_span +from posthog import Posthog +from posthog.ai.openai_agents import instrument + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +instrument(posthog, distinct_id="example-user") + +agent = Agent( + name="Assistant", + instructions="You are a helpful assistant.", + model="gpt-4o-mini", +) + + +async def main(): + user_input = "Summarize the benefits of product analytics" + + # Wrap the workflow in a trace with custom spans for each stage + with trace("processing_pipeline"): + with custom_span(name="preprocess", data={"input_length": len(user_input)}): + processed = user_input.strip().lower() + + with custom_span(name="validate", data={"input": processed}): + is_valid = 0 < len(processed) < 1000 + + if is_valid: + with custom_span(name="llm_call"): + result = await Runner.run(agent, user_input) + print(result.final_output) + + +asyncio.run(main()) +posthog.shutdown() diff --git a/examples/example-ai-openai-agents/guardrails.py b/examples/example-ai-openai-agents/guardrails.py new file mode 100644 index 00000000..bc155f80 --- /dev/null +++ b/examples/example-ai-openai-agents/guardrails.py @@ -0,0 +1,86 @@ +"""OpenAI Agents SDK with input/output guardrails, tracked by PostHog.""" + +import asyncio +import os +from agents import ( + Agent, + Runner, + input_guardrail, + output_guardrail, + GuardrailFunctionOutput, + RunContextWrapper, + TResponseInputItem, +) +from posthog import Posthog +from posthog.ai.openai_agents import instrument + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +instrument(posthog, distinct_id="example-user") + +BLOCKED_INPUT_WORDS = ["hack", "exploit", "bypass"] +BLOCKED_OUTPUT_WORDS = ["confidential", "secret", "classified"] + + +@input_guardrail +async def content_filter( + ctx: RunContextWrapper[None], agent: Agent, input: str | list[TResponseInputItem] +) -> GuardrailFunctionOutput: + """Block requests containing prohibited words.""" + text = ( + str(input).lower() + if isinstance(input, str) + else " ".join(str(i) for i in input).lower() + ) + for word in BLOCKED_INPUT_WORDS: + if word in text: + return GuardrailFunctionOutput( + output_info={"blocked_word": word}, + tripwire_triggered=True, + ) + return GuardrailFunctionOutput( + output_info={"status": "passed"}, tripwire_triggered=False + ) + + +@output_guardrail +async def sensitive_data_filter( + ctx: RunContextWrapper[None], agent: Agent, output: str +) -> GuardrailFunctionOutput: + """Prevent sensitive information from being returned.""" + for word in BLOCKED_OUTPUT_WORDS: + if word in output.lower(): + return GuardrailFunctionOutput( + output_info={"blocked_word": word}, + tripwire_triggered=True, + ) + return GuardrailFunctionOutput( + output_info={"status": "passed"}, tripwire_triggered=False + ) + + +guarded_agent = Agent( + name="GuardedAgent", + instructions="You are a helpful assistant. Be informative but avoid sensitive topics.", + model="gpt-4o-mini", + input_guardrails=[content_filter], + output_guardrails=[sensitive_data_filter], +) + + +async def main(): + # This should pass guardrails + result = await Runner.run(guarded_agent, "What is product analytics?") + print(f"Passed: {result.final_output}") + + # This should trigger the input guardrail + try: + await Runner.run(guarded_agent, "How do I hack into a system?") + except Exception as e: + print(f"Blocked: {e}") + + +asyncio.run(main()) +posthog.shutdown() diff --git a/examples/example-ai-openai-agents/multi_agent.py b/examples/example-ai-openai-agents/multi_agent.py new file mode 100644 index 00000000..554ac846 --- /dev/null +++ b/examples/example-ai-openai-agents/multi_agent.py @@ -0,0 +1,66 @@ +"""OpenAI Agents SDK multi-agent with handoffs, tracked by PostHog.""" + +import asyncio +import os +from typing import Annotated +from agents import Agent, Runner, function_tool +from posthog import Posthog +from posthog.ai.openai_agents import instrument + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +instrument(posthog, distinct_id="example-user") + + +@function_tool +def get_weather(city: Annotated[str, "The city to get weather for"]) -> str: + """Get current weather for a city.""" + return f"Weather in {city}: 18°C, partly cloudy, humidity 65%" + + +@function_tool +def calculate(expression: Annotated[str, "A math expression to evaluate"]) -> str: + """Evaluate a mathematical expression.""" + allowed = set("0123456789+-*/().^ ") + if not all(c in allowed for c in expression): + return "Error: invalid characters" + return f"Result: {eval(expression.replace('^', '**'))}" + + +weather_agent = Agent( + name="WeatherAgent", + instructions="You handle weather queries. Use the get_weather tool.", + model="gpt-4o-mini", + tools=[get_weather], +) + +math_agent = Agent( + name="MathAgent", + instructions="You handle math problems. Use the calculate tool.", + model="gpt-4o-mini", + tools=[calculate], +) + +general_agent = Agent( + name="GeneralAgent", + instructions="You handle general questions and conversation.", + model="gpt-4o-mini", +) + +triage_agent = Agent( + name="TriageAgent", + instructions="Route to WeatherAgent for weather, MathAgent for math, GeneralAgent for everything else.", + model="gpt-4o-mini", + handoffs=[weather_agent, math_agent, general_agent], +) + + +async def main(): + result = await Runner.run(triage_agent, "What's the weather in Tokyo?") + print(result.final_output) + + +asyncio.run(main()) +posthog.shutdown() diff --git a/examples/example-ai-openai-agents/requirements.txt b/examples/example-ai-openai-agents/requirements.txt new file mode 100644 index 00000000..be0e3cd2 --- /dev/null +++ b/examples/example-ai-openai-agents/requirements.txt @@ -0,0 +1,2 @@ +posthog>=6.6.1 +openai-agents diff --git a/examples/example-ai-openai-agents/single_agent.py b/examples/example-ai-openai-agents/single_agent.py new file mode 100644 index 00000000..9423d800 --- /dev/null +++ b/examples/example-ai-openai-agents/single_agent.py @@ -0,0 +1,46 @@ +"""OpenAI Agents SDK single agent with tools, tracked by PostHog.""" + +import asyncio +import os +from typing import Annotated +from agents import Agent, Runner, function_tool +from posthog import Posthog +from posthog.ai.openai_agents import instrument + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +instrument(posthog, distinct_id="example-user") + + +@function_tool +def get_weather(city: Annotated[str, "The city to get weather for"]) -> str: + """Get current weather for a city.""" + return f"Weather in {city}: 22°C, clear skies, humidity 45%" + + +@function_tool +def calculate(expression: Annotated[str, "A math expression to evaluate"]) -> str: + """Evaluate a mathematical expression.""" + allowed = set("0123456789+-*/().^ ") + if not all(c in allowed for c in expression): + return "Error: invalid characters" + return f"Result: {eval(expression.replace('^', '**'))}" + + +agent = Agent( + name="Assistant", + instructions="You are a helpful assistant with weather and math tools.", + model="gpt-4o-mini", + tools=[get_weather, calculate], +) + + +async def main(): + result = await Runner.run(agent, "What's 15% of 280?") + print(result.final_output) + + +asyncio.run(main()) +posthog.shutdown() diff --git a/examples/example-ai-openai/.env.example b/examples/example-ai-openai/.env.example new file mode 100644 index 00000000..3d1dd067 --- /dev/null +++ b/examples/example-ai-openai/.env.example @@ -0,0 +1,3 @@ +POSTHOG_API_KEY=phc_your_project_api_key +POSTHOG_HOST=https://us.i.posthog.com +OPENAI_API_KEY=sk-your_api_key diff --git a/examples/example-ai-openai/README.md b/examples/example-ai-openai/README.md new file mode 100644 index 00000000..f766e360 --- /dev/null +++ b/examples/example-ai-openai/README.md @@ -0,0 +1,34 @@ +# OpenAI + PostHog AI Examples + +Track OpenAI API calls with PostHog. + +## Setup + +```bash +pip install -r requirements.txt +cp .env.example .env +# Fill in your API keys in .env +``` + +## Examples + +- **chat_completions.py** - Chat Completions API with tool calling +- **chat_completions_streaming.py** - Chat Completions with streaming +- **responses.py** - Responses API with tool calling +- **responses_streaming.py** - Responses API with streaming +- **embeddings.py** - Text embeddings +- **transcription.py** - Audio transcription (Whisper) +- **image_generation.py** - Image generation via Responses API + +## Run + +```bash +source .env +python chat_completions.py +python chat_completions_streaming.py +python responses.py +python responses_streaming.py +python embeddings.py +python transcription.py +python image_generation.py +``` diff --git a/examples/example-ai-openai/chat_completions.py b/examples/example-ai-openai/chat_completions.py new file mode 100644 index 00000000..32238ca0 --- /dev/null +++ b/examples/example-ai-openai/chat_completions.py @@ -0,0 +1,70 @@ +"""OpenAI Chat Completions API with tool calling, tracked by PostHog.""" + +import os +import json +import urllib.request +from posthog import Posthog +from posthog.ai.openai import OpenAI + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) + +tools = [ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get current weather for a location", + "parameters": { + "type": "object", + "properties": { + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "location_name": {"type": "string"}, + }, + "required": ["latitude", "longitude", "location_name"], + }, + }, + } +] + + +def get_weather(latitude: float, longitude: float, location_name: str) -> str: + url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,relative_humidity_2m,wind_speed_10m" + with urllib.request.urlopen(url) as resp: + data = json.loads(resp.read()) + current = data["current"] + return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h" + + +response = client.chat.completions.create( + model="gpt-4o-mini", + max_completion_tokens=1024, + posthog_distinct_id="example-user", + tools=tools, + tool_choice="auto", + messages=[ + { + "role": "system", + "content": "You are a helpful assistant with access to weather data.", + }, + {"role": "user", "content": "What's the weather like in Dublin, Ireland?"}, + ], +) + +message = response.choices[0].message + +if message.content: + print(message.content) + +# In production, send tool results back to the model for a final response. +if message.tool_calls: + for tool_call in message.tool_calls: + args = json.loads(tool_call.function.arguments) + result = get_weather(**args) + print(result) + +posthog.shutdown() diff --git a/examples/example-ai-openai/chat_completions_streaming.py b/examples/example-ai-openai/chat_completions_streaming.py new file mode 100644 index 00000000..60721d49 --- /dev/null +++ b/examples/example-ai-openai/chat_completions_streaming.py @@ -0,0 +1,29 @@ +"""OpenAI Chat Completions API with streaming, tracked by PostHog.""" + +import os +from posthog import Posthog +from posthog.ai.openai import OpenAI + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) + +stream = client.chat.completions.create( + model="gpt-4o-mini", + max_completion_tokens=1024, + posthog_distinct_id="example-user", + stream=True, + messages=[ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "Explain observability in three sentences."}, + ], +) + +for chunk in stream: + if chunk.choices and chunk.choices[0].delta.content: + print(chunk.choices[0].delta.content, end="", flush=True) + +print() +posthog.shutdown() diff --git a/examples/example-ai-openai/embeddings.py b/examples/example-ai-openai/embeddings.py new file mode 100644 index 00000000..826acfcb --- /dev/null +++ b/examples/example-ai-openai/embeddings.py @@ -0,0 +1,23 @@ +"""OpenAI embeddings, tracked by PostHog.""" + +import os +from posthog import Posthog +from posthog.ai.openai import OpenAI + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) + +response = client.embeddings.create( + model="text-embedding-3-small", + input="PostHog is an open-source product analytics platform.", + posthog_distinct_id="example-user", +) + +embedding = response.data[0].embedding +print(f"Embedding dimensions: {len(embedding)}") +print(f"First 5 values: {embedding[:5]}") + +posthog.shutdown() diff --git a/examples/example-ai-openai/image_generation.py b/examples/example-ai-openai/image_generation.py new file mode 100644 index 00000000..f2a37cdc --- /dev/null +++ b/examples/example-ai-openai/image_generation.py @@ -0,0 +1,24 @@ +"""OpenAI image generation, tracked by PostHog.""" + +import os +from posthog import Posthog +from posthog.ai.openai import OpenAI + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) + +# Note: posthog.ai does not wrap images.generate yet, +# so this call is not automatically tracked. +response = client.images.generate( + model="gpt-image-1", + prompt="A hedgehog wearing a PostHog t-shirt, pixel art style", + size="1024x1024", +) + +image_base64 = response.data[0].b64_json +print(f"Generated image: {len(image_base64)} chars of base64 data") + +posthog.shutdown() diff --git a/examples/example-ai-openai/requirements.txt b/examples/example-ai-openai/requirements.txt new file mode 100644 index 00000000..b01b2c58 --- /dev/null +++ b/examples/example-ai-openai/requirements.txt @@ -0,0 +1,2 @@ +posthog>=6.6.1 +openai diff --git a/examples/example-ai-openai/responses.py b/examples/example-ai-openai/responses.py new file mode 100644 index 00000000..82b138aa --- /dev/null +++ b/examples/example-ai-openai/responses.py @@ -0,0 +1,60 @@ +"""OpenAI Responses API with tool calling, tracked by PostHog.""" + +import os +import json +import urllib.request +from posthog import Posthog +from posthog.ai.openai import OpenAI + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) + +tools = [ + { + "type": "function", + "name": "get_weather", + "description": "Get current weather for a location", + "parameters": { + "type": "object", + "properties": { + "latitude": {"type": "number"}, + "longitude": {"type": "number"}, + "location_name": {"type": "string"}, + }, + "required": ["latitude", "longitude", "location_name"], + }, + } +] + + +def get_weather(latitude: float, longitude: float, location_name: str) -> str: + url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,relative_humidity_2m,wind_speed_10m" + with urllib.request.urlopen(url) as resp: + data = json.loads(resp.read()) + current = data["current"] + return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h" + + +response = client.responses.create( + model="gpt-4o-mini", + max_output_tokens=1024, + posthog_distinct_id="example-user", + tools=tools, + instructions="You are a helpful assistant with access to weather data.", + input=[{"role": "user", "content": "What's the weather like in Tokyo?"}], +) + +for output_item in response.output: + if hasattr(output_item, "content"): + for content_item in output_item.content: + if hasattr(content_item, "text"): + print(content_item.text) + elif hasattr(output_item, "name"): + args = json.loads(output_item.arguments) + result = get_weather(**args) + print(result) + +posthog.shutdown() diff --git a/examples/example-ai-openai/responses_streaming.py b/examples/example-ai-openai/responses_streaming.py new file mode 100644 index 00000000..a0f92865 --- /dev/null +++ b/examples/example-ai-openai/responses_streaming.py @@ -0,0 +1,27 @@ +"""OpenAI Responses API with streaming, tracked by PostHog.""" + +import os +from posthog import Posthog +from posthog.ai.openai import OpenAI + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) + +stream = client.responses.create( + model="gpt-4o-mini", + max_output_tokens=1024, + posthog_distinct_id="example-user", + stream=True, + instructions="You are a helpful assistant.", + input=[{"role": "user", "content": "Write a haiku about product analytics."}], +) + +for event in stream: + if hasattr(event, "type") and event.type == "response.output_text.delta": + print(event.delta, end="", flush=True) + +print() +posthog.shutdown() diff --git a/examples/example-ai-openai/transcription.py b/examples/example-ai-openai/transcription.py new file mode 100644 index 00000000..708209a0 --- /dev/null +++ b/examples/example-ai-openai/transcription.py @@ -0,0 +1,32 @@ +"""OpenAI audio transcription (Whisper), tracked by PostHog.""" + +import os +import sys +from posthog import Posthog +from posthog.ai.openai import OpenAI + +posthog = Posthog( + os.environ["POSTHOG_API_KEY"], + host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"), +) +client = OpenAI(api_key=os.environ["OPENAI_API_KEY"], posthog_client=posthog) + +# Replace with the path to your audio file +audio_path = os.environ.get("AUDIO_PATH", "audio.mp3") + +if not os.path.exists(audio_path): + print(f"Skipping: audio file not found at '{audio_path}'") + print("Set AUDIO_PATH to a valid audio file (mp3, wav, m4a, etc.)") + posthog.shutdown() + sys.exit(0) + +with open(audio_path, "rb") as audio_file: + transcription = client.audio.transcriptions.create( + file=audio_file, + model="whisper-1", + posthog_distinct_id="example-user", + ) + +print(f"Transcription: {transcription.text}") + +posthog.shutdown() diff --git a/examples/example-ai-pydantic-ai/.env.example b/examples/example-ai-pydantic-ai/.env.example new file mode 100644 index 00000000..3d1dd067 --- /dev/null +++ b/examples/example-ai-pydantic-ai/.env.example @@ -0,0 +1,3 @@ +POSTHOG_API_KEY=phc_your_project_api_key +POSTHOG_HOST=https://us.i.posthog.com +OPENAI_API_KEY=sk-your_api_key diff --git a/examples/example-ai-pydantic-ai/README.md b/examples/example-ai-pydantic-ai/README.md new file mode 100644 index 00000000..012f0fee --- /dev/null +++ b/examples/example-ai-pydantic-ai/README.md @@ -0,0 +1,22 @@ +# Pydantic AI + PostHog AI Examples + +Track Pydantic AI agent calls with PostHog via OpenTelemetry. + +## Setup + +```bash +pip install -r requirements.txt +cp .env.example .env +# Fill in your API keys in .env +``` + +## Examples + +- **agent_with_otel.py** - Agent with tool calling, instrumented via OTEL + +## Run + +```bash +source .env +python agent_with_otel.py +``` diff --git a/examples/example-ai-pydantic-ai/agent_with_otel.py b/examples/example-ai-pydantic-ai/agent_with_otel.py new file mode 100644 index 00000000..802fba7d --- /dev/null +++ b/examples/example-ai-pydantic-ai/agent_with_otel.py @@ -0,0 +1,54 @@ +"""Pydantic AI agent with OpenTelemetry instrumentation, exporting to PostHog.""" + +import os +import json +import urllib.request +from pydantic_ai import Agent, RunContext +from pydantic_ai.models.openai import OpenAIModel +from opentelemetry import trace +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter + +# Configure OTEL to export traces to PostHog +posthog_api_key = os.environ["POSTHOG_API_KEY"] +posthog_host = os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com") + +os.environ["OTEL_EXPORTER_OTLP_TRACES_ENDPOINT"] = f"{posthog_host}/i/v0/ai/otel" +os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Bearer {posthog_api_key}" + +tracer_provider = TracerProvider( + resource=Resource.create( + {"service.name": "pydantic-ai-example", "user.id": "example-user"} + ) +) +tracer_provider.add_span_processor(BatchSpanProcessor(OTLPSpanExporter())) +trace.set_tracer_provider(tracer_provider) + +# Create an agent with a tool +model = OpenAIModel("gpt-4o-mini") +agent = Agent( + model, system_prompt="You are a helpful assistant with access to weather data." +) + + +@agent.tool +def get_weather( + ctx: RunContext[None], latitude: float, longitude: float, location_name: str +) -> str: + """Get current weather for a location.""" + url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m,relative_humidity_2m,wind_speed_10m" + with urllib.request.urlopen(url) as resp: + data = json.loads(resp.read()) + current = data["current"] + return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h" + + +# Enable automatic OTEL instrumentation for all agents +Agent.instrument_all() + +result = agent.run_sync("What's the weather in Amsterdam?") +print(result.output) + +tracer_provider.shutdown() diff --git a/examples/example-ai-pydantic-ai/requirements.txt b/examples/example-ai-pydantic-ai/requirements.txt new file mode 100644 index 00000000..b5e997a2 --- /dev/null +++ b/examples/example-ai-pydantic-ai/requirements.txt @@ -0,0 +1,4 @@ +posthog>=6.6.1 +pydantic-ai +opentelemetry-sdk +opentelemetry-exporter-otlp-proto-http diff --git a/mypy-baseline.txt b/mypy-baseline.txt index 232ce8be..46d23dbf 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -31,5 +31,5 @@ posthog/client.py:0: error: Incompatible types in assignment (expression has typ posthog/client.py:0: error: "None" has no attribute "start" [attr-defined] posthog/client.py:0: error: Statement is unreachable [unreachable] posthog/client.py:0: error: Statement is unreachable [unreachable] -posthog/client.py:0: error: Name "urlparse" already defined (possibly by an import) [no-redef] posthog/client.py:0: error: Name "parse_qs" already defined (possibly by an import) [no-redef] +posthog/client.py:0: error: Name "urlparse" already defined (possibly by an import) [no-redef] diff --git a/mypy.ini b/mypy.ini index 16f71be5..bd8447d9 100644 --- a/mypy.ini +++ b/mypy.ini @@ -9,7 +9,7 @@ check_untyped_defs = True warn_unreachable = True strict_equality = True ignore_missing_imports = True -exclude = env/.*|venv/.*|build/.* +exclude = env/.*|venv/.*|build/.*|examples/example-.* [mypy-django.*] ignore_missing_imports = True