Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions examples/example-ai-anthropic/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
POSTHOG_API_KEY=phc_your_project_api_key
POSTHOG_HOST=https://us.i.posthog.com
ANTHROPIC_API_KEY=sk-ant-your_api_key
26 changes: 26 additions & 0 deletions examples/example-ai-anthropic/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# Anthropic + PostHog AI Examples

Track Anthropic Claude API calls with PostHog.

## Setup

```bash
pip install -r requirements.txt
cp .env.example .env
# Fill in your API keys in .env
```

## Examples

- **chat.py** - Basic chat with tool calling
- **streaming.py** - Streaming responses
- **extended_thinking.py** - Claude's extended thinking feature

## Run

```bash
source .env
python chat.py
python streaming.py
python extended_thinking.py
```
57 changes: 57 additions & 0 deletions examples/example-ai-anthropic/chat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
"""Anthropic chat with tool calling, tracked by PostHog."""

import os
import json
import urllib.request
from posthog import Posthog
from posthog.ai.anthropic import Anthropic

posthog = Posthog(
os.environ["POSTHOG_API_KEY"],
host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"),
)
client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], posthog_client=posthog)

tools = [
{
"name": "get_weather",
"description": "Get current weather for a location",
"input_schema": {
"type": "object",
"properties": {
"latitude": {"type": "number"},
"longitude": {"type": "number"},
"location_name": {"type": "string"},
},
"required": ["latitude", "longitude", "location_name"],
},
}
]


def get_weather(latitude: float, longitude: float, location_name: str) -> str:
url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}&current=temperature_2m,relative_humidity_2m,wind_speed_10m"
with urllib.request.urlopen(url) as resp:
data = json.loads(resp.read())
current = data["current"]
return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h"


message = client.messages.create(
model="claude-sonnet-4-5-20250929",
max_tokens=1024,
posthog_distinct_id="example-user",
tools=tools,
messages=[{"role": "user", "content": "What's the weather like in San Francisco?"}],
)

# Handle tool use if the model requests it.
# In production, send tool results back to the model for a final response.
for block in message.content:
if block.type == "text":
print(block.text)
elif block.type == "tool_use":
result = get_weather(**block.input)
print(result)

posthog.shutdown()
35 changes: 35 additions & 0 deletions examples/example-ai-anthropic/extended_thinking.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
"""Anthropic extended thinking, tracked by PostHog.

Extended thinking lets Claude show its reasoning process before responding.
"""

import os
from posthog import Posthog
from posthog.ai.anthropic import Anthropic

posthog = Posthog(
os.environ["POSTHOG_API_KEY"],
host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"),
)
client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], posthog_client=posthog)

message = client.messages.create(
model="claude-sonnet-4-5-20250929",
max_tokens=16000,
posthog_distinct_id="example-user",
thinking={"type": "enabled", "budget_tokens": 10000},
messages=[
{
"role": "user",
"content": "What is the probability of rolling at least one six in four rolls of a fair die?",
}
],
)

for block in message.content:
if block.type == "thinking":
print(f"Thinking: {block.thinking}\n")
elif block.type == "text":
print(f"Answer: {block.text}")

posthog.shutdown()
2 changes: 2 additions & 0 deletions examples/example-ai-anthropic/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
posthog>=6.6.1
anthropic
27 changes: 27 additions & 0 deletions examples/example-ai-anthropic/streaming.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
"""Anthropic streaming chat, tracked by PostHog."""

import os
from posthog import Posthog
from posthog.ai.anthropic import Anthropic

posthog = Posthog(
os.environ["POSTHOG_API_KEY"],
host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"),
)
client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], posthog_client=posthog)

stream = client.messages.create(
model="claude-sonnet-4-5-20250929",
max_tokens=1024,
posthog_distinct_id="example-user",
messages=[{"role": "user", "content": "Write a haiku about observability."}],
stream=True,
)

for event in stream:
if hasattr(event, "type"):
if event.type == "content_block_delta" and hasattr(event.delta, "text"):
print(event.delta.text, end="", flush=True)

print()
posthog.shutdown()
3 changes: 3 additions & 0 deletions examples/example-ai-gemini/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
POSTHOG_API_KEY=phc_your_project_api_key
POSTHOG_HOST=https://us.i.posthog.com
GEMINI_API_KEY=your_gemini_api_key
26 changes: 26 additions & 0 deletions examples/example-ai-gemini/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# Google Gemini + PostHog AI Examples

Track Google Gemini API calls with PostHog.

## Setup

```bash
pip install -r requirements.txt
cp .env.example .env
# Fill in your API keys in .env
```

## Examples

- **chat.py** - Chat with tool calling
- **streaming.py** - Streaming responses
- **image_generation.py** - Image generation

## Run

```bash
source .env
python chat.py
python streaming.py
python image_generation.py
```
65 changes: 65 additions & 0 deletions examples/example-ai-gemini/chat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
"""Google Gemini chat with tool calling, tracked by PostHog."""

import os
import json
import urllib.request
from google.genai import types
from posthog import Posthog
from posthog.ai.gemini import Client

posthog = Posthog(
os.environ["POSTHOG_API_KEY"],
host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"),
)
client = Client(api_key=os.environ["GEMINI_API_KEY"], posthog_client=posthog)

tool_declarations = [
{
"name": "get_weather",
"description": "Get current weather for a location",
"parameters": {
"type": "object",
"properties": {
"latitude": {"type": "number"},
"longitude": {"type": "number"},
"location_name": {"type": "string"},
},
"required": ["latitude", "longitude", "location_name"],
},
}
]


def get_weather(latitude: float, longitude: float, location_name: str) -> str:
url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}&current=temperature_2m,relative_humidity_2m,wind_speed_10m"
with urllib.request.urlopen(url) as resp:
data = json.loads(resp.read())
current = data["current"]
return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h"


config = types.GenerateContentConfig(
tools=[types.Tool(function_declarations=tool_declarations)]
)

response = client.models.generate_content(
model="gemini-2.5-flash",
posthog_distinct_id="example-user",
contents=[{"role": "user", "parts": [{"text": "What's the weather in London?"}]}],
config=config,
)

# In production, send tool results back to the model for a final response.
for candidate in response.candidates:
for part in candidate.content.parts:
if hasattr(part, "function_call") and part.function_call:
result = get_weather(
latitude=part.function_call.args["latitude"],
longitude=part.function_call.args["longitude"],
location_name=part.function_call.args["location_name"],
)
print(result)
elif hasattr(part, "text"):
print(part.text)

posthog.shutdown()
34 changes: 34 additions & 0 deletions examples/example-ai-gemini/image_generation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
"""Google Gemini image generation, tracked by PostHog."""

import logging
import os

from posthog import Posthog
from posthog.ai.gemini import Client

# Suppress verbose Gemini SDK logging of base64 image data
logging.getLogger("google.genai").setLevel(logging.WARNING)

posthog = Posthog(
os.environ["POSTHOG_API_KEY"],
host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"),
)
client = Client(api_key=os.environ["GEMINI_API_KEY"], posthog_client=posthog)

response = client.models.generate_content(
model="gemini-2.5-flash-image",
posthog_distinct_id="example-user",
posthog_privacy_mode=True, # Redact base64 image data from the PostHog event
contents=[{"role": "user", "parts": [{"text": "Generate a pixel art hedgehog"}]}],
)

for candidate in response.candidates:
for part in candidate.content.parts:
if hasattr(part, "inline_data") and part.inline_data:
print(
f"Generated image: {part.inline_data.mime_type}, {len(part.inline_data.data)} bytes"
)
elif hasattr(part, "text"):
print(part.text)

posthog.shutdown()
2 changes: 2 additions & 0 deletions examples/example-ai-gemini/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
posthog>=6.6.1
google-genai
31 changes: 31 additions & 0 deletions examples/example-ai-gemini/streaming.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
"""Google Gemini streaming chat, tracked by PostHog."""

import os
from posthog import Posthog
from posthog.ai.gemini import Client

posthog = Posthog(
os.environ["POSTHOG_API_KEY"],
host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"),
)
client = Client(api_key=os.environ["GEMINI_API_KEY"], posthog_client=posthog)

stream = client.models.generate_content_stream(
model="gemini-2.5-flash",
posthog_distinct_id="example-user",
contents=[
{
"role": "user",
"parts": [{"text": "Explain product analytics in three sentences."}],
}
],
)

for chunk in stream:
for candidate in chunk.candidates:
for part in candidate.content.parts:
if hasattr(part, "text"):
print(part.text, end="", flush=True)

print()
posthog.shutdown()
3 changes: 3 additions & 0 deletions examples/example-ai-langchain/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
POSTHOG_API_KEY=phc_your_project_api_key
POSTHOG_HOST=https://us.i.posthog.com
OPENAI_API_KEY=sk-your_api_key
30 changes: 30 additions & 0 deletions examples/example-ai-langchain/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# LangChain + PostHog AI Examples

Track LangChain LLM calls with PostHog.

## Setup

```bash
pip install -r requirements.txt
cp .env.example .env
# Fill in your API keys in .env
```

For the OTEL example, also install:

```bash
pip install opentelemetry-sdk opentelemetry-exporter-otlp-proto-http
```

## Examples

- **callback_handler.py** - PostHog callback handler with tool calling
- **otel.py** - OpenTelemetry instrumentation exporting to PostHog

## Run

```bash
source .env
python callback_handler.py
python otel.py
```
Loading
Loading