Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
# 7.2.0 - 2025-11-28

Capture Langchain, OpenAI and Anthropic errors as exceptions (if exception autocapture is enabled)
Add reference to exception in LLMA trace and span events

# 7.1.0 - 2025-11-26

Add support for the async version of Gemini.
Expand Down
55 changes: 46 additions & 9 deletions posthog/ai/langchain/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@

try:
# LangChain 1.0+ and modern 0.x with langchain-core
from langchain_core.callbacks.base import BaseCallbackHandler
from langchain_core.agents import AgentAction, AgentFinish
from langchain_core.callbacks.base import BaseCallbackHandler
except (ImportError, ModuleNotFoundError):
# Fallback for older LangChain versions
from langchain.callbacks.base import BaseCallbackHandler
Expand All @@ -35,15 +35,15 @@
FunctionMessage,
HumanMessage,
SystemMessage,
ToolMessage,
ToolCall,
ToolMessage,
)
from langchain_core.outputs import ChatGeneration, LLMResult
from pydantic import BaseModel

from posthog import setup
from posthog.ai.utils import get_model_params, with_privacy_mode
from posthog.ai.sanitization import sanitize_langchain
from posthog.ai.utils import get_model_params, with_privacy_mode
from posthog.client import Client

log = logging.getLogger("posthog")
Expand Down Expand Up @@ -506,6 +506,14 @@ def _capture_trace_or_span(
if isinstance(outputs, BaseException):
event_properties["$ai_error"] = _stringify_exception(outputs)
event_properties["$ai_is_error"] = True
event_properties = _capture_exception_and_update_properties(
self._ph_client,
outputs,
self._distinct_id,
self._groups,
event_properties,
)

elif outputs is not None:
event_properties["$ai_output_state"] = with_privacy_mode(
self._ph_client, self._privacy_mode, outputs
Expand Down Expand Up @@ -576,10 +584,24 @@ def _capture_generation(
if run.tools:
event_properties["$ai_tools"] = run.tools

if self._properties:
event_properties.update(self._properties)

if self._distinct_id is None:
event_properties["$process_person_profile"] = False

if isinstance(output, BaseException):
event_properties["$ai_http_status"] = _get_http_status(output)
event_properties["$ai_error"] = _stringify_exception(output)
event_properties["$ai_is_error"] = True

event_properties = _capture_exception_and_update_properties(
self._ph_client,
output,
self._distinct_id,
self._groups,
event_properties,
)
else:
# Add usage
usage = _parse_usage(output, run.provider, run.model)
Expand Down Expand Up @@ -607,12 +629,6 @@ def _capture_generation(
self._ph_client, self._privacy_mode, completions
)

if self._properties:
event_properties.update(self._properties)

if self._distinct_id is None:
event_properties["$process_person_profile"] = False

self._ph_client.capture(
distinct_id=self._distinct_id or trace_id,
event="$ai_generation",
Expand Down Expand Up @@ -861,6 +877,27 @@ def _parse_usage(
return llm_usage


def _capture_exception_and_update_properties(
client: Client,
exception: BaseException,
distinct_id: Optional[Union[str, int, UUID]],
groups: Optional[Dict[str, Any]],
event_properties: Dict[str, Any],
):
if client.enable_exception_autocapture:
exception_id = client.capture_exception(
exception,
distinct_id=distinct_id,
groups=groups,
properties=event_properties,
)

if exception_id:
event_properties["$exception_event_id"] = exception_id

return event_properties


def _get_http_status(error: BaseException) -> int:
# OpenAI: https://github.com/openai/openai-python/blob/main/src/openai/_exceptions.py
# Anthropic: https://github.com/anthropics/anthropic-sdk-python/blob/main/src/anthropic/_exceptions.py
Expand Down
Loading
Loading