posthog 7.4.3__tar.gz → 7.5.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {posthog-7.4.3/posthog.egg-info → posthog-7.5.1}/PKG-INFO +9 -1
- {posthog-7.4.3 → posthog-7.5.1}/README.md +8 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/__init__.py +16 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/langchain/callbacks.py +46 -9
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/utils.py +202 -173
- {posthog-7.4.3 → posthog-7.5.1}/posthog/consumer.py +6 -2
- {posthog-7.4.3 → posthog-7.5.1}/posthog/contexts.py +5 -6
- {posthog-7.4.3 → posthog-7.5.1}/posthog/test/test_consumer.py +86 -83
- {posthog-7.4.3 → posthog-7.5.1}/posthog/test/test_contexts.py +26 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/version.py +1 -1
- {posthog-7.4.3 → posthog-7.5.1/posthog.egg-info}/PKG-INFO +9 -1
- {posthog-7.4.3 → posthog-7.5.1}/LICENSE +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/MANIFEST.in +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/__init__.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/anthropic/__init__.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/anthropic/anthropic.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/anthropic/anthropic_async.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/anthropic/anthropic_converter.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/anthropic/anthropic_providers.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/gemini/__init__.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/gemini/gemini.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/gemini/gemini_async.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/gemini/gemini_converter.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/langchain/__init__.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/openai/__init__.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/openai/openai.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/openai/openai_async.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/openai/openai_converter.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/openai/openai_providers.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/sanitization.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/ai/types.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/args.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/client.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/exception_capture.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/exception_utils.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/feature_flags.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/flag_definition_cache.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/integrations/__init__.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/integrations/django.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/poller.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/py.typed +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/request.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/test/__init__.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/test/test_before_send.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/test/test_client.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/test/test_exception_capture.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/test/test_feature_flag.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/test/test_feature_flag_result.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/test/test_feature_flags.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/test/test_flag_definition_cache.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/test/test_module.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/test/test_request.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/test/test_size_limited_dict.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/test/test_types.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/test/test_utils.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/types.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog/utils.py +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog.egg-info/SOURCES.txt +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog.egg-info/dependency_links.txt +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog.egg-info/requires.txt +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/posthog.egg-info/top_level.txt +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/pyproject.toml +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/setup.cfg +0 -0
- {posthog-7.4.3 → posthog-7.5.1}/setup.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: posthog
|
|
3
|
-
Version: 7.
|
|
3
|
+
Version: 7.5.1
|
|
4
4
|
Summary: Integrate PostHog into any python application.
|
|
5
5
|
Home-page: https://github.com/posthog/posthog-python
|
|
6
6
|
Author: Posthog
|
|
@@ -87,6 +87,14 @@ Dynamic: maintainer
|
|
|
87
87
|
|
|
88
88
|
Please see the [Python integration docs](https://posthog.com/docs/integrations/python-integration) for details.
|
|
89
89
|
|
|
90
|
+
## Python Version Support
|
|
91
|
+
|
|
92
|
+
| SDK Version | Python Versions Supported | Notes |
|
|
93
|
+
|-------------|---------------------------|-------|
|
|
94
|
+
| 7.3.1+ | 3.10, 3.11, 3.12, 3.13, 3.14 | Added Python 3.14 support |
|
|
95
|
+
| 7.0.0 - 7.0.1 | 3.10, 3.11, 3.12, 3.13 | Dropped Python 3.9 support |
|
|
96
|
+
| 4.0.1 - 6.x | 3.9, 3.10, 3.11, 3.12, 3.13 | Python 3.9+ required |
|
|
97
|
+
|
|
90
98
|
## Development
|
|
91
99
|
|
|
92
100
|
### Testing Locally
|
|
@@ -12,6 +12,14 @@
|
|
|
12
12
|
|
|
13
13
|
Please see the [Python integration docs](https://posthog.com/docs/integrations/python-integration) for details.
|
|
14
14
|
|
|
15
|
+
## Python Version Support
|
|
16
|
+
|
|
17
|
+
| SDK Version | Python Versions Supported | Notes |
|
|
18
|
+
|-------------|---------------------------|-------|
|
|
19
|
+
| 7.3.1+ | 3.10, 3.11, 3.12, 3.13, 3.14 | Added Python 3.14 support |
|
|
20
|
+
| 7.0.0 - 7.0.1 | 3.10, 3.11, 3.12, 3.13 | Dropped Python 3.9 support |
|
|
21
|
+
| 4.0.1 - 6.x | 3.9, 3.10, 3.11, 3.12, 3.13 | Python 3.9+ required |
|
|
22
|
+
|
|
15
23
|
## Development
|
|
16
24
|
|
|
17
25
|
### Testing Locally
|
|
@@ -29,6 +29,9 @@ from posthog.contexts import (
|
|
|
29
29
|
from posthog.contexts import (
|
|
30
30
|
tag as inner_tag,
|
|
31
31
|
)
|
|
32
|
+
from posthog.contexts import (
|
|
33
|
+
get_tags as inner_get_tags,
|
|
34
|
+
)
|
|
32
35
|
from posthog.exception_utils import (
|
|
33
36
|
DEFAULT_CODE_VARIABLES_IGNORE_PATTERNS,
|
|
34
37
|
DEFAULT_CODE_VARIABLES_MASK_PATTERNS,
|
|
@@ -190,6 +193,19 @@ def tag(name: str, value: Any):
|
|
|
190
193
|
return inner_tag(name, value)
|
|
191
194
|
|
|
192
195
|
|
|
196
|
+
def get_tags() -> Dict[str, Any]:
|
|
197
|
+
"""
|
|
198
|
+
Get all tags from the current context.
|
|
199
|
+
|
|
200
|
+
Returns:
|
|
201
|
+
Dict of all tags in the current context
|
|
202
|
+
|
|
203
|
+
Category:
|
|
204
|
+
Contexts
|
|
205
|
+
"""
|
|
206
|
+
return inner_get_tags()
|
|
207
|
+
|
|
208
|
+
|
|
193
209
|
"""Settings."""
|
|
194
210
|
api_key = None # type: Optional[str]
|
|
195
211
|
host = None # type: Optional[str]
|
|
@@ -22,8 +22,8 @@ from uuid import UUID
|
|
|
22
22
|
|
|
23
23
|
try:
|
|
24
24
|
# LangChain 1.0+ and modern 0.x with langchain-core
|
|
25
|
-
from langchain_core.callbacks.base import BaseCallbackHandler
|
|
26
25
|
from langchain_core.agents import AgentAction, AgentFinish
|
|
26
|
+
from langchain_core.callbacks.base import BaseCallbackHandler
|
|
27
27
|
except (ImportError, ModuleNotFoundError):
|
|
28
28
|
# Fallback for older LangChain versions
|
|
29
29
|
from langchain.callbacks.base import BaseCallbackHandler
|
|
@@ -35,15 +35,15 @@ from langchain_core.messages import (
|
|
|
35
35
|
FunctionMessage,
|
|
36
36
|
HumanMessage,
|
|
37
37
|
SystemMessage,
|
|
38
|
-
ToolMessage,
|
|
39
38
|
ToolCall,
|
|
39
|
+
ToolMessage,
|
|
40
40
|
)
|
|
41
41
|
from langchain_core.outputs import ChatGeneration, LLMResult
|
|
42
42
|
from pydantic import BaseModel
|
|
43
43
|
|
|
44
44
|
from posthog import setup
|
|
45
|
-
from posthog.ai.utils import get_model_params, with_privacy_mode
|
|
46
45
|
from posthog.ai.sanitization import sanitize_langchain
|
|
46
|
+
from posthog.ai.utils import get_model_params, with_privacy_mode
|
|
47
47
|
from posthog.client import Client
|
|
48
48
|
|
|
49
49
|
log = logging.getLogger("posthog")
|
|
@@ -506,6 +506,14 @@ class CallbackHandler(BaseCallbackHandler):
|
|
|
506
506
|
if isinstance(outputs, BaseException):
|
|
507
507
|
event_properties["$ai_error"] = _stringify_exception(outputs)
|
|
508
508
|
event_properties["$ai_is_error"] = True
|
|
509
|
+
event_properties = _capture_exception_and_update_properties(
|
|
510
|
+
self._ph_client,
|
|
511
|
+
outputs,
|
|
512
|
+
self._distinct_id,
|
|
513
|
+
self._groups,
|
|
514
|
+
event_properties,
|
|
515
|
+
)
|
|
516
|
+
|
|
509
517
|
elif outputs is not None:
|
|
510
518
|
event_properties["$ai_output_state"] = with_privacy_mode(
|
|
511
519
|
self._ph_client, self._privacy_mode, outputs
|
|
@@ -576,10 +584,24 @@ class CallbackHandler(BaseCallbackHandler):
|
|
|
576
584
|
if run.tools:
|
|
577
585
|
event_properties["$ai_tools"] = run.tools
|
|
578
586
|
|
|
587
|
+
if self._properties:
|
|
588
|
+
event_properties.update(self._properties)
|
|
589
|
+
|
|
590
|
+
if self._distinct_id is None:
|
|
591
|
+
event_properties["$process_person_profile"] = False
|
|
592
|
+
|
|
579
593
|
if isinstance(output, BaseException):
|
|
580
594
|
event_properties["$ai_http_status"] = _get_http_status(output)
|
|
581
595
|
event_properties["$ai_error"] = _stringify_exception(output)
|
|
582
596
|
event_properties["$ai_is_error"] = True
|
|
597
|
+
|
|
598
|
+
event_properties = _capture_exception_and_update_properties(
|
|
599
|
+
self._ph_client,
|
|
600
|
+
output,
|
|
601
|
+
self._distinct_id,
|
|
602
|
+
self._groups,
|
|
603
|
+
event_properties,
|
|
604
|
+
)
|
|
583
605
|
else:
|
|
584
606
|
# Add usage
|
|
585
607
|
usage = _parse_usage(output, run.provider, run.model)
|
|
@@ -607,12 +629,6 @@ class CallbackHandler(BaseCallbackHandler):
|
|
|
607
629
|
self._ph_client, self._privacy_mode, completions
|
|
608
630
|
)
|
|
609
631
|
|
|
610
|
-
if self._properties:
|
|
611
|
-
event_properties.update(self._properties)
|
|
612
|
-
|
|
613
|
-
if self._distinct_id is None:
|
|
614
|
-
event_properties["$process_person_profile"] = False
|
|
615
|
-
|
|
616
632
|
self._ph_client.capture(
|
|
617
633
|
distinct_id=self._distinct_id or trace_id,
|
|
618
634
|
event="$ai_generation",
|
|
@@ -863,6 +879,27 @@ def _parse_usage(
|
|
|
863
879
|
return llm_usage
|
|
864
880
|
|
|
865
881
|
|
|
882
|
+
def _capture_exception_and_update_properties(
|
|
883
|
+
client: Client,
|
|
884
|
+
exception: BaseException,
|
|
885
|
+
distinct_id: Optional[Union[str, int, UUID]],
|
|
886
|
+
groups: Optional[Dict[str, Any]],
|
|
887
|
+
event_properties: Dict[str, Any],
|
|
888
|
+
):
|
|
889
|
+
if client.enable_exception_autocapture:
|
|
890
|
+
exception_id = client.capture_exception(
|
|
891
|
+
exception,
|
|
892
|
+
distinct_id=distinct_id,
|
|
893
|
+
groups=groups,
|
|
894
|
+
properties=event_properties,
|
|
895
|
+
)
|
|
896
|
+
|
|
897
|
+
if exception_id:
|
|
898
|
+
event_properties["$exception_event_id"] = exception_id
|
|
899
|
+
|
|
900
|
+
return event_properties
|
|
901
|
+
|
|
902
|
+
|
|
866
903
|
def _get_http_status(error: BaseException) -> int:
|
|
867
904
|
# OpenAI: https://github.com/openai/openai-python/blob/main/src/openai/_exceptions.py
|
|
868
905
|
# Anthropic: https://github.com/anthropics/anthropic-sdk-python/blob/main/src/anthropic/_exceptions.py
|
|
@@ -2,14 +2,15 @@ import time
|
|
|
2
2
|
import uuid
|
|
3
3
|
from typing import Any, Callable, Dict, List, Optional, cast
|
|
4
4
|
|
|
5
|
-
from posthog
|
|
6
|
-
from posthog.ai.types import FormattedMessage, StreamingEventData, TokenUsage
|
|
5
|
+
from posthog import get_tags, identify_context, new_context, tag
|
|
7
6
|
from posthog.ai.sanitization import (
|
|
8
|
-
sanitize_openai,
|
|
9
7
|
sanitize_anthropic,
|
|
10
8
|
sanitize_gemini,
|
|
11
9
|
sanitize_langchain,
|
|
10
|
+
sanitize_openai,
|
|
12
11
|
)
|
|
12
|
+
from posthog.ai.types import FormattedMessage, StreamingEventData, TokenUsage
|
|
13
|
+
from posthog.client import Client as PostHogClient
|
|
13
14
|
|
|
14
15
|
|
|
15
16
|
def merge_usage_stats(
|
|
@@ -256,94 +257,108 @@ def call_llm_and_track_usage(
|
|
|
256
257
|
usage: TokenUsage = TokenUsage()
|
|
257
258
|
error_params: Dict[str, Any] = {}
|
|
258
259
|
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
"$
|
|
294
|
-
|
|
295
|
-
)
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
"$ai_latency": latency,
|
|
300
|
-
"$ai_trace_id": posthog_trace_id,
|
|
301
|
-
"$ai_base_url": str(base_url),
|
|
302
|
-
**(posthog_properties or {}),
|
|
303
|
-
**(error_params or {}),
|
|
304
|
-
}
|
|
305
|
-
|
|
306
|
-
available_tool_calls = extract_available_tool_calls(provider, kwargs)
|
|
307
|
-
|
|
308
|
-
if available_tool_calls:
|
|
309
|
-
event_properties["$ai_tools"] = available_tool_calls
|
|
310
|
-
|
|
311
|
-
cache_read = usage.get("cache_read_input_tokens")
|
|
312
|
-
if cache_read is not None and cache_read > 0:
|
|
313
|
-
event_properties["$ai_cache_read_input_tokens"] = cache_read
|
|
314
|
-
|
|
315
|
-
cache_creation = usage.get("cache_creation_input_tokens")
|
|
316
|
-
if cache_creation is not None and cache_creation > 0:
|
|
317
|
-
event_properties["$ai_cache_creation_input_tokens"] = cache_creation
|
|
318
|
-
|
|
319
|
-
reasoning = usage.get("reasoning_tokens")
|
|
320
|
-
if reasoning is not None and reasoning > 0:
|
|
321
|
-
event_properties["$ai_reasoning_tokens"] = reasoning
|
|
322
|
-
|
|
323
|
-
web_search_count = usage.get("web_search_count")
|
|
324
|
-
if web_search_count is not None and web_search_count > 0:
|
|
325
|
-
event_properties["$ai_web_search_count"] = web_search_count
|
|
326
|
-
|
|
327
|
-
if posthog_distinct_id is None:
|
|
328
|
-
event_properties["$process_person_profile"] = False
|
|
329
|
-
|
|
330
|
-
# Process instructions for Responses API
|
|
331
|
-
if provider == "openai" and kwargs.get("instructions") is not None:
|
|
332
|
-
event_properties["$ai_instructions"] = with_privacy_mode(
|
|
333
|
-
ph_client, posthog_privacy_mode, kwargs.get("instructions")
|
|
260
|
+
with new_context(client=ph_client, capture_exceptions=False):
|
|
261
|
+
if posthog_distinct_id:
|
|
262
|
+
identify_context(posthog_distinct_id)
|
|
263
|
+
|
|
264
|
+
try:
|
|
265
|
+
response = call_method(**kwargs)
|
|
266
|
+
except Exception as exc:
|
|
267
|
+
error = exc
|
|
268
|
+
http_status = getattr(
|
|
269
|
+
exc, "status_code", 0
|
|
270
|
+
) # default to 0 becuase its likely an SDK error
|
|
271
|
+
error_params = {
|
|
272
|
+
"$ai_is_error": True,
|
|
273
|
+
"$ai_error": exc.__str__(),
|
|
274
|
+
}
|
|
275
|
+
# TODO: Add exception capture for OpenAI/Anthropic/Gemini wrappers when
|
|
276
|
+
# enable_exception_autocapture is True, similar to LangChain callbacks.
|
|
277
|
+
# See _capture_exception_and_update_properties in langchain/callbacks.py
|
|
278
|
+
finally:
|
|
279
|
+
end_time = time.time()
|
|
280
|
+
latency = end_time - start_time
|
|
281
|
+
|
|
282
|
+
if posthog_trace_id is None:
|
|
283
|
+
posthog_trace_id = str(uuid.uuid4())
|
|
284
|
+
|
|
285
|
+
if response and (
|
|
286
|
+
hasattr(response, "usage")
|
|
287
|
+
or (provider == "gemini" and hasattr(response, "usage_metadata"))
|
|
288
|
+
):
|
|
289
|
+
usage = get_usage(response, provider)
|
|
290
|
+
|
|
291
|
+
messages = merge_system_prompt(kwargs, provider)
|
|
292
|
+
sanitized_messages = sanitize_messages(messages, provider)
|
|
293
|
+
|
|
294
|
+
tag("$ai_provider", provider)
|
|
295
|
+
tag("$ai_model", kwargs.get("model") or getattr(response, "model", None))
|
|
296
|
+
tag("$ai_model_parameters", get_model_params(kwargs))
|
|
297
|
+
tag(
|
|
298
|
+
"$ai_input",
|
|
299
|
+
with_privacy_mode(ph_client, posthog_privacy_mode, sanitized_messages),
|
|
334
300
|
)
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
event="$ai_generation",
|
|
341
|
-
properties=event_properties,
|
|
342
|
-
groups=posthog_groups,
|
|
301
|
+
tag(
|
|
302
|
+
"$ai_output_choices",
|
|
303
|
+
with_privacy_mode(
|
|
304
|
+
ph_client, posthog_privacy_mode, format_response(response, provider)
|
|
305
|
+
),
|
|
343
306
|
)
|
|
307
|
+
tag("$ai_http_status", http_status)
|
|
308
|
+
tag("$ai_input_tokens", usage.get("input_tokens", 0))
|
|
309
|
+
tag("$ai_output_tokens", usage.get("output_tokens", 0))
|
|
310
|
+
tag("$ai_latency", latency)
|
|
311
|
+
tag("$ai_trace_id", posthog_trace_id)
|
|
312
|
+
tag("$ai_base_url", str(base_url))
|
|
313
|
+
|
|
314
|
+
available_tool_calls = extract_available_tool_calls(provider, kwargs)
|
|
315
|
+
|
|
316
|
+
if available_tool_calls:
|
|
317
|
+
tag("$ai_tools", available_tool_calls)
|
|
318
|
+
|
|
319
|
+
cache_read = usage.get("cache_read_input_tokens")
|
|
320
|
+
if cache_read is not None and cache_read > 0:
|
|
321
|
+
tag("$ai_cache_read_input_tokens", cache_read)
|
|
322
|
+
|
|
323
|
+
cache_creation = usage.get("cache_creation_input_tokens")
|
|
324
|
+
if cache_creation is not None and cache_creation > 0:
|
|
325
|
+
tag("$ai_cache_creation_input_tokens", cache_creation)
|
|
326
|
+
|
|
327
|
+
reasoning = usage.get("reasoning_tokens")
|
|
328
|
+
if reasoning is not None and reasoning > 0:
|
|
329
|
+
tag("$ai_reasoning_tokens", reasoning)
|
|
330
|
+
|
|
331
|
+
web_search_count = usage.get("web_search_count")
|
|
332
|
+
if web_search_count is not None and web_search_count > 0:
|
|
333
|
+
tag("$ai_web_search_count", web_search_count)
|
|
334
|
+
|
|
335
|
+
if posthog_distinct_id is None:
|
|
336
|
+
tag("$process_person_profile", False)
|
|
337
|
+
|
|
338
|
+
# Process instructions for Responses API
|
|
339
|
+
if provider == "openai" and kwargs.get("instructions") is not None:
|
|
340
|
+
tag(
|
|
341
|
+
"$ai_instructions",
|
|
342
|
+
with_privacy_mode(
|
|
343
|
+
ph_client, posthog_privacy_mode, kwargs.get("instructions")
|
|
344
|
+
),
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
# send the event to posthog
|
|
348
|
+
if hasattr(ph_client, "capture") and callable(ph_client.capture):
|
|
349
|
+
ph_client.capture(
|
|
350
|
+
distinct_id=posthog_distinct_id or posthog_trace_id,
|
|
351
|
+
event="$ai_generation",
|
|
352
|
+
properties={
|
|
353
|
+
**get_tags(),
|
|
354
|
+
**(posthog_properties or {}),
|
|
355
|
+
**(error_params or {}),
|
|
356
|
+
},
|
|
357
|
+
groups=posthog_groups,
|
|
358
|
+
)
|
|
344
359
|
|
|
345
|
-
|
|
346
|
-
|
|
360
|
+
if error:
|
|
361
|
+
raise error
|
|
347
362
|
|
|
348
363
|
return response
|
|
349
364
|
|
|
@@ -367,94 +382,108 @@ async def call_llm_and_track_usage_async(
|
|
|
367
382
|
usage: TokenUsage = TokenUsage()
|
|
368
383
|
error_params: Dict[str, Any] = {}
|
|
369
384
|
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
"$
|
|
405
|
-
|
|
406
|
-
)
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
"$ai_latency": latency,
|
|
411
|
-
"$ai_trace_id": posthog_trace_id,
|
|
412
|
-
"$ai_base_url": str(base_url),
|
|
413
|
-
**(posthog_properties or {}),
|
|
414
|
-
**(error_params or {}),
|
|
415
|
-
}
|
|
416
|
-
|
|
417
|
-
available_tool_calls = extract_available_tool_calls(provider, kwargs)
|
|
418
|
-
|
|
419
|
-
if available_tool_calls:
|
|
420
|
-
event_properties["$ai_tools"] = available_tool_calls
|
|
421
|
-
|
|
422
|
-
cache_read = usage.get("cache_read_input_tokens")
|
|
423
|
-
if cache_read is not None and cache_read > 0:
|
|
424
|
-
event_properties["$ai_cache_read_input_tokens"] = cache_read
|
|
425
|
-
|
|
426
|
-
cache_creation = usage.get("cache_creation_input_tokens")
|
|
427
|
-
if cache_creation is not None and cache_creation > 0:
|
|
428
|
-
event_properties["$ai_cache_creation_input_tokens"] = cache_creation
|
|
429
|
-
|
|
430
|
-
reasoning = usage.get("reasoning_tokens")
|
|
431
|
-
if reasoning is not None and reasoning > 0:
|
|
432
|
-
event_properties["$ai_reasoning_tokens"] = reasoning
|
|
433
|
-
|
|
434
|
-
web_search_count = usage.get("web_search_count")
|
|
435
|
-
if web_search_count is not None and web_search_count > 0:
|
|
436
|
-
event_properties["$ai_web_search_count"] = web_search_count
|
|
437
|
-
|
|
438
|
-
if posthog_distinct_id is None:
|
|
439
|
-
event_properties["$process_person_profile"] = False
|
|
440
|
-
|
|
441
|
-
# Process instructions for Responses API
|
|
442
|
-
if provider == "openai" and kwargs.get("instructions") is not None:
|
|
443
|
-
event_properties["$ai_instructions"] = with_privacy_mode(
|
|
444
|
-
ph_client, posthog_privacy_mode, kwargs.get("instructions")
|
|
385
|
+
with new_context(client=ph_client, capture_exceptions=False):
|
|
386
|
+
if posthog_distinct_id:
|
|
387
|
+
identify_context(posthog_distinct_id)
|
|
388
|
+
|
|
389
|
+
try:
|
|
390
|
+
response = await call_async_method(**kwargs)
|
|
391
|
+
except Exception as exc:
|
|
392
|
+
error = exc
|
|
393
|
+
http_status = getattr(
|
|
394
|
+
exc, "status_code", 0
|
|
395
|
+
) # default to 0 because its likely an SDK error
|
|
396
|
+
error_params = {
|
|
397
|
+
"$ai_is_error": True,
|
|
398
|
+
"$ai_error": exc.__str__(),
|
|
399
|
+
}
|
|
400
|
+
# TODO: Add exception capture for OpenAI/Anthropic/Gemini wrappers when
|
|
401
|
+
# enable_exception_autocapture is True, similar to LangChain callbacks.
|
|
402
|
+
# See _capture_exception_and_update_properties in langchain/callbacks.py
|
|
403
|
+
finally:
|
|
404
|
+
end_time = time.time()
|
|
405
|
+
latency = end_time - start_time
|
|
406
|
+
|
|
407
|
+
if posthog_trace_id is None:
|
|
408
|
+
posthog_trace_id = str(uuid.uuid4())
|
|
409
|
+
|
|
410
|
+
if response and (
|
|
411
|
+
hasattr(response, "usage")
|
|
412
|
+
or (provider == "gemini" and hasattr(response, "usage_metadata"))
|
|
413
|
+
):
|
|
414
|
+
usage = get_usage(response, provider)
|
|
415
|
+
|
|
416
|
+
messages = merge_system_prompt(kwargs, provider)
|
|
417
|
+
sanitized_messages = sanitize_messages(messages, provider)
|
|
418
|
+
|
|
419
|
+
tag("$ai_provider", provider)
|
|
420
|
+
tag("$ai_model", kwargs.get("model") or getattr(response, "model", None))
|
|
421
|
+
tag("$ai_model_parameters", get_model_params(kwargs))
|
|
422
|
+
tag(
|
|
423
|
+
"$ai_input",
|
|
424
|
+
with_privacy_mode(ph_client, posthog_privacy_mode, sanitized_messages),
|
|
445
425
|
)
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
event="$ai_generation",
|
|
452
|
-
properties=event_properties,
|
|
453
|
-
groups=posthog_groups,
|
|
426
|
+
tag(
|
|
427
|
+
"$ai_output_choices",
|
|
428
|
+
with_privacy_mode(
|
|
429
|
+
ph_client, posthog_privacy_mode, format_response(response, provider)
|
|
430
|
+
),
|
|
454
431
|
)
|
|
432
|
+
tag("$ai_http_status", http_status)
|
|
433
|
+
tag("$ai_input_tokens", usage.get("input_tokens", 0))
|
|
434
|
+
tag("$ai_output_tokens", usage.get("output_tokens", 0))
|
|
435
|
+
tag("$ai_latency", latency)
|
|
436
|
+
tag("$ai_trace_id", posthog_trace_id)
|
|
437
|
+
tag("$ai_base_url", str(base_url))
|
|
438
|
+
|
|
439
|
+
available_tool_calls = extract_available_tool_calls(provider, kwargs)
|
|
440
|
+
|
|
441
|
+
if available_tool_calls:
|
|
442
|
+
tag("$ai_tools", available_tool_calls)
|
|
443
|
+
|
|
444
|
+
cache_read = usage.get("cache_read_input_tokens")
|
|
445
|
+
if cache_read is not None and cache_read > 0:
|
|
446
|
+
tag("$ai_cache_read_input_tokens", cache_read)
|
|
447
|
+
|
|
448
|
+
cache_creation = usage.get("cache_creation_input_tokens")
|
|
449
|
+
if cache_creation is not None and cache_creation > 0:
|
|
450
|
+
tag("$ai_cache_creation_input_tokens", cache_creation)
|
|
451
|
+
|
|
452
|
+
reasoning = usage.get("reasoning_tokens")
|
|
453
|
+
if reasoning is not None and reasoning > 0:
|
|
454
|
+
tag("$ai_reasoning_tokens", reasoning)
|
|
455
|
+
|
|
456
|
+
web_search_count = usage.get("web_search_count")
|
|
457
|
+
if web_search_count is not None and web_search_count > 0:
|
|
458
|
+
tag("$ai_web_search_count", web_search_count)
|
|
459
|
+
|
|
460
|
+
if posthog_distinct_id is None:
|
|
461
|
+
tag("$process_person_profile", False)
|
|
462
|
+
|
|
463
|
+
# Process instructions for Responses API
|
|
464
|
+
if provider == "openai" and kwargs.get("instructions") is not None:
|
|
465
|
+
tag(
|
|
466
|
+
"$ai_instructions",
|
|
467
|
+
with_privacy_mode(
|
|
468
|
+
ph_client, posthog_privacy_mode, kwargs.get("instructions")
|
|
469
|
+
),
|
|
470
|
+
)
|
|
471
|
+
|
|
472
|
+
# send the event to posthog
|
|
473
|
+
if hasattr(ph_client, "capture") and callable(ph_client.capture):
|
|
474
|
+
ph_client.capture(
|
|
475
|
+
distinct_id=posthog_distinct_id or posthog_trace_id,
|
|
476
|
+
event="$ai_generation",
|
|
477
|
+
properties={
|
|
478
|
+
**get_tags(),
|
|
479
|
+
**(posthog_properties or {}),
|
|
480
|
+
**(error_params or {}),
|
|
481
|
+
},
|
|
482
|
+
groups=posthog_groups,
|
|
483
|
+
)
|
|
455
484
|
|
|
456
|
-
|
|
457
|
-
|
|
485
|
+
if error:
|
|
486
|
+
raise error
|
|
458
487
|
|
|
459
488
|
return response
|
|
460
489
|
|
|
@@ -84,12 +84,16 @@ class Consumer(Thread):
|
|
|
84
84
|
self.log.error("error uploading: %s", e)
|
|
85
85
|
success = False
|
|
86
86
|
if self.on_error:
|
|
87
|
-
|
|
87
|
+
try:
|
|
88
|
+
self.on_error(e, batch)
|
|
89
|
+
except Exception as e:
|
|
90
|
+
self.log.error("on_error handler failed: %s", e)
|
|
88
91
|
finally:
|
|
89
92
|
# mark items as acknowledged from queue
|
|
90
93
|
for item in batch:
|
|
91
94
|
self.queue.task_done()
|
|
92
|
-
|
|
95
|
+
|
|
96
|
+
return success
|
|
93
97
|
|
|
94
98
|
def next(self):
|
|
95
99
|
"""Return the next batch of items to upload."""
|
|
@@ -62,14 +62,13 @@ class ContextScope:
|
|
|
62
62
|
return None
|
|
63
63
|
|
|
64
64
|
def collect_tags(self) -> Dict[str, Any]:
|
|
65
|
-
tags = self.tags.copy()
|
|
66
65
|
if self.parent and not self.fresh:
|
|
67
66
|
# We want child tags to take precedence over parent tags,
|
|
68
|
-
# so
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
tags
|
|
72
|
-
return tags
|
|
67
|
+
# so collect parent tags first, then update with child tags.
|
|
68
|
+
tags = self.parent.collect_tags()
|
|
69
|
+
tags.update(self.tags)
|
|
70
|
+
return tags
|
|
71
|
+
return self.tags.copy()
|
|
73
72
|
|
|
74
73
|
def get_capture_exception_code_variables(self) -> Optional[bool]:
|
|
75
74
|
if self.capture_exception_code_variables is not None:
|
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
import json
|
|
2
2
|
import time
|
|
3
3
|
import unittest
|
|
4
|
+
from typing import Any
|
|
4
5
|
|
|
5
6
|
import mock
|
|
7
|
+
from parameterized import parameterized
|
|
6
8
|
|
|
7
9
|
try:
|
|
8
10
|
from queue import Queue
|
|
@@ -14,15 +16,19 @@ from posthog.request import APIError
|
|
|
14
16
|
from posthog.test.test_utils import TEST_API_KEY
|
|
15
17
|
|
|
16
18
|
|
|
19
|
+
def _track_event(event_name: str = "python event") -> dict[str, str]:
|
|
20
|
+
return {"type": "track", "event": event_name, "distinct_id": "distinct_id"}
|
|
21
|
+
|
|
22
|
+
|
|
17
23
|
class TestConsumer(unittest.TestCase):
|
|
18
|
-
def test_next(self):
|
|
24
|
+
def test_next(self) -> None:
|
|
19
25
|
q = Queue()
|
|
20
26
|
consumer = Consumer(q, "")
|
|
21
27
|
q.put(1)
|
|
22
28
|
next = consumer.next()
|
|
23
29
|
self.assertEqual(next, [1])
|
|
24
30
|
|
|
25
|
-
def test_next_limit(self):
|
|
31
|
+
def test_next_limit(self) -> None:
|
|
26
32
|
q = Queue()
|
|
27
33
|
flush_at = 50
|
|
28
34
|
consumer = Consumer(q, "", flush_at)
|
|
@@ -31,7 +37,7 @@ class TestConsumer(unittest.TestCase):
|
|
|
31
37
|
next = consumer.next()
|
|
32
38
|
self.assertEqual(next, list(range(flush_at)))
|
|
33
39
|
|
|
34
|
-
def test_dropping_oversize_msg(self):
|
|
40
|
+
def test_dropping_oversize_msg(self) -> None:
|
|
35
41
|
q = Queue()
|
|
36
42
|
consumer = Consumer(q, "")
|
|
37
43
|
oversize_msg = {"m": "x" * MAX_MSG_SIZE}
|
|
@@ -40,15 +46,14 @@ class TestConsumer(unittest.TestCase):
|
|
|
40
46
|
self.assertEqual(next, [])
|
|
41
47
|
self.assertTrue(q.empty())
|
|
42
48
|
|
|
43
|
-
def test_upload(self):
|
|
49
|
+
def test_upload(self) -> None:
|
|
44
50
|
q = Queue()
|
|
45
51
|
consumer = Consumer(q, TEST_API_KEY)
|
|
46
|
-
|
|
47
|
-
q.put(track)
|
|
52
|
+
q.put(_track_event())
|
|
48
53
|
success = consumer.upload()
|
|
49
54
|
self.assertTrue(success)
|
|
50
55
|
|
|
51
|
-
def test_flush_interval(self):
|
|
56
|
+
def test_flush_interval(self) -> None:
|
|
52
57
|
# Put _n_ items in the queue, pausing a little bit more than
|
|
53
58
|
# _flush_interval_ after each one.
|
|
54
59
|
# The consumer should upload _n_ times.
|
|
@@ -57,17 +62,12 @@ class TestConsumer(unittest.TestCase):
|
|
|
57
62
|
consumer = Consumer(q, TEST_API_KEY, flush_at=10, flush_interval=flush_interval)
|
|
58
63
|
with mock.patch("posthog.consumer.batch_post") as mock_post:
|
|
59
64
|
consumer.start()
|
|
60
|
-
for i in range(
|
|
61
|
-
|
|
62
|
-
"type": "track",
|
|
63
|
-
"event": "python event %d" % i,
|
|
64
|
-
"distinct_id": "distinct_id",
|
|
65
|
-
}
|
|
66
|
-
q.put(track)
|
|
65
|
+
for i in range(3):
|
|
66
|
+
q.put(_track_event("python event %d" % i))
|
|
67
67
|
time.sleep(flush_interval * 1.1)
|
|
68
68
|
self.assertEqual(mock_post.call_count, 3)
|
|
69
69
|
|
|
70
|
-
def test_multiple_uploads_per_interval(self):
|
|
70
|
+
def test_multiple_uploads_per_interval(self) -> None:
|
|
71
71
|
# Put _flush_at*2_ items in the queue at once, then pause for
|
|
72
72
|
# _flush_interval_. The consumer should upload 2 times.
|
|
73
73
|
q = Queue()
|
|
@@ -78,88 +78,60 @@ class TestConsumer(unittest.TestCase):
|
|
|
78
78
|
)
|
|
79
79
|
with mock.patch("posthog.consumer.batch_post") as mock_post:
|
|
80
80
|
consumer.start()
|
|
81
|
-
for i in range(
|
|
82
|
-
|
|
83
|
-
"type": "track",
|
|
84
|
-
"event": "python event %d" % i,
|
|
85
|
-
"distinct_id": "distinct_id",
|
|
86
|
-
}
|
|
87
|
-
q.put(track)
|
|
81
|
+
for i in range(flush_at * 2):
|
|
82
|
+
q.put(_track_event("python event %d" % i))
|
|
88
83
|
time.sleep(flush_interval * 1.1)
|
|
89
84
|
self.assertEqual(mock_post.call_count, 2)
|
|
90
85
|
|
|
91
|
-
def test_request(self):
|
|
86
|
+
def test_request(self) -> None:
|
|
92
87
|
consumer = Consumer(None, TEST_API_KEY)
|
|
93
|
-
|
|
94
|
-
consumer.request([track])
|
|
88
|
+
consumer.request([_track_event()])
|
|
95
89
|
|
|
96
|
-
def
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
raise expected_exception
|
|
90
|
+
def _run_retry_test(
|
|
91
|
+
self, exception: Exception, exception_count: int, retries: int = 10
|
|
92
|
+
) -> None:
|
|
93
|
+
call_count = [0]
|
|
101
94
|
|
|
102
|
-
mock_post
|
|
95
|
+
def mock_post(*args: Any, **kwargs: Any) -> None:
|
|
96
|
+
call_count[0] += 1
|
|
97
|
+
if call_count[0] <= exception_count:
|
|
98
|
+
raise exception
|
|
103
99
|
|
|
100
|
+
consumer = Consumer(None, TEST_API_KEY, retries=retries)
|
|
104
101
|
with mock.patch(
|
|
105
102
|
"posthog.consumer.batch_post", mock.Mock(side_effect=mock_post)
|
|
106
103
|
):
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
"event": "python event",
|
|
110
|
-
"distinct_id": "distinct_id",
|
|
111
|
-
}
|
|
112
|
-
# request() should succeed if the number of exceptions raised is
|
|
113
|
-
# less than the retries paramater.
|
|
114
|
-
if exception_count <= consumer.retries:
|
|
115
|
-
consumer.request([track])
|
|
104
|
+
if exception_count <= retries:
|
|
105
|
+
consumer.request([_track_event()])
|
|
116
106
|
else:
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
self.
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
self.
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
consumer = Consumer(None, TEST_API_KEY)
|
|
141
|
-
self._test_request_retry(consumer, APIError(429, "Too Many Requests"), 2)
|
|
142
|
-
|
|
143
|
-
# we should NOT retry on other client errors
|
|
144
|
-
consumer = Consumer(None, TEST_API_KEY)
|
|
145
|
-
api_error = APIError(400, "Client Errors")
|
|
146
|
-
try:
|
|
147
|
-
self._test_request_retry(consumer, api_error, 1)
|
|
148
|
-
except APIError:
|
|
149
|
-
pass
|
|
150
|
-
else:
|
|
151
|
-
self.fail("request() should not retry on client errors")
|
|
152
|
-
|
|
153
|
-
# test for number of exceptions raise > retries value
|
|
154
|
-
consumer = Consumer(None, TEST_API_KEY, retries=3)
|
|
155
|
-
self._test_request_retry(consumer, APIError(500, "Internal Server Error"), 3)
|
|
156
|
-
|
|
157
|
-
def test_pause(self):
|
|
107
|
+
with self.assertRaises(type(exception)):
|
|
108
|
+
consumer.request([_track_event()])
|
|
109
|
+
|
|
110
|
+
@parameterized.expand(
|
|
111
|
+
[
|
|
112
|
+
("general_errors", Exception("generic exception"), 2),
|
|
113
|
+
("server_errors", APIError(500, "Internal Server Error"), 2),
|
|
114
|
+
("rate_limit_errors", APIError(429, "Too Many Requests"), 2),
|
|
115
|
+
]
|
|
116
|
+
)
|
|
117
|
+
def test_request_retries_on_retriable_errors(
|
|
118
|
+
self, _name: str, exception: Exception, exception_count: int
|
|
119
|
+
) -> None:
|
|
120
|
+
self._run_retry_test(exception, exception_count)
|
|
121
|
+
|
|
122
|
+
def test_request_does_not_retry_client_errors(self) -> None:
|
|
123
|
+
with self.assertRaises(APIError):
|
|
124
|
+
self._run_retry_test(APIError(400, "Client Errors"), 1)
|
|
125
|
+
|
|
126
|
+
def test_request_fails_when_exceptions_exceed_retries(self) -> None:
|
|
127
|
+
self._run_retry_test(APIError(500, "Internal Server Error"), 4, retries=3)
|
|
128
|
+
|
|
129
|
+
def test_pause(self) -> None:
|
|
158
130
|
consumer = Consumer(None, TEST_API_KEY)
|
|
159
131
|
consumer.pause()
|
|
160
132
|
self.assertFalse(consumer.running)
|
|
161
133
|
|
|
162
|
-
def test_max_batch_size(self):
|
|
134
|
+
def test_max_batch_size(self) -> None:
|
|
163
135
|
q = Queue()
|
|
164
136
|
consumer = Consumer(q, TEST_API_KEY, flush_at=100000, flush_interval=3)
|
|
165
137
|
properties = {}
|
|
@@ -175,7 +147,7 @@ class TestConsumer(unittest.TestCase):
|
|
|
175
147
|
# Let's capture 8MB of data to trigger two batches
|
|
176
148
|
n_msgs = int(8_000_000 / msg_size)
|
|
177
149
|
|
|
178
|
-
def mock_post_fn(_, data, **kwargs):
|
|
150
|
+
def mock_post_fn(_: str, data: str, **kwargs: Any) -> mock.Mock:
|
|
179
151
|
res = mock.Mock()
|
|
180
152
|
res.status_code = 200
|
|
181
153
|
request_size = len(data.encode())
|
|
@@ -194,3 +166,34 @@ class TestConsumer(unittest.TestCase):
|
|
|
194
166
|
q.put(track)
|
|
195
167
|
q.join()
|
|
196
168
|
self.assertEqual(mock_post.call_count, 2)
|
|
169
|
+
|
|
170
|
+
@parameterized.expand(
|
|
171
|
+
[
|
|
172
|
+
("on_error_succeeds", False),
|
|
173
|
+
("on_error_raises", True),
|
|
174
|
+
]
|
|
175
|
+
)
|
|
176
|
+
def test_upload_exception_calls_on_error_and_does_not_raise(
|
|
177
|
+
self, _name: str, on_error_raises: bool
|
|
178
|
+
) -> None:
|
|
179
|
+
on_error_called: list[tuple[Exception, list[dict[str, str]]]] = []
|
|
180
|
+
|
|
181
|
+
def on_error(e: Exception, batch: list[dict[str, str]]) -> None:
|
|
182
|
+
on_error_called.append((e, batch))
|
|
183
|
+
if on_error_raises:
|
|
184
|
+
raise Exception("on_error failed")
|
|
185
|
+
|
|
186
|
+
q = Queue()
|
|
187
|
+
consumer = Consumer(q, TEST_API_KEY, on_error=on_error)
|
|
188
|
+
track = _track_event()
|
|
189
|
+
q.put(track)
|
|
190
|
+
|
|
191
|
+
with mock.patch.object(
|
|
192
|
+
consumer, "request", side_effect=Exception("request failed")
|
|
193
|
+
):
|
|
194
|
+
result = consumer.upload()
|
|
195
|
+
|
|
196
|
+
self.assertFalse(result)
|
|
197
|
+
self.assertEqual(len(on_error_called), 1)
|
|
198
|
+
self.assertEqual(str(on_error_called[0][0]), "request failed")
|
|
199
|
+
self.assertEqual(on_error_called[0][1], [track])
|
|
@@ -191,6 +191,32 @@ class TestContexts(unittest.TestCase):
|
|
|
191
191
|
assert get_context_distinct_id() == "user123"
|
|
192
192
|
assert get_context_session_id() == "session456"
|
|
193
193
|
|
|
194
|
+
def test_child_tags_override_parent_tags_in_non_fresh_context(self):
|
|
195
|
+
with new_context(fresh=True):
|
|
196
|
+
tag("shared_key", "parent_value")
|
|
197
|
+
tag("parent_only", "parent")
|
|
198
|
+
|
|
199
|
+
with new_context(fresh=False):
|
|
200
|
+
# Child should inherit parent tags
|
|
201
|
+
assert get_tags()["parent_only"] == "parent"
|
|
202
|
+
|
|
203
|
+
# Child sets same key - should override parent
|
|
204
|
+
tag("shared_key", "child_value")
|
|
205
|
+
tag("child_only", "child")
|
|
206
|
+
|
|
207
|
+
tags = get_tags()
|
|
208
|
+
# Child value should win for shared key
|
|
209
|
+
assert tags["shared_key"] == "child_value"
|
|
210
|
+
# Both parent and child tags should be present
|
|
211
|
+
assert tags["parent_only"] == "parent"
|
|
212
|
+
assert tags["child_only"] == "child"
|
|
213
|
+
|
|
214
|
+
# Parent context should be unchanged
|
|
215
|
+
parent_tags = get_tags()
|
|
216
|
+
assert parent_tags["shared_key"] == "parent_value"
|
|
217
|
+
assert parent_tags["parent_only"] == "parent"
|
|
218
|
+
assert "child_only" not in parent_tags
|
|
219
|
+
|
|
194
220
|
def test_scoped_decorator_with_context_ids(self):
|
|
195
221
|
@scoped()
|
|
196
222
|
def function_with_context():
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: posthog
|
|
3
|
-
Version: 7.
|
|
3
|
+
Version: 7.5.1
|
|
4
4
|
Summary: Integrate PostHog into any python application.
|
|
5
5
|
Home-page: https://github.com/posthog/posthog-python
|
|
6
6
|
Author: Posthog
|
|
@@ -87,6 +87,14 @@ Dynamic: maintainer
|
|
|
87
87
|
|
|
88
88
|
Please see the [Python integration docs](https://posthog.com/docs/integrations/python-integration) for details.
|
|
89
89
|
|
|
90
|
+
## Python Version Support
|
|
91
|
+
|
|
92
|
+
| SDK Version | Python Versions Supported | Notes |
|
|
93
|
+
|-------------|---------------------------|-------|
|
|
94
|
+
| 7.3.1+ | 3.10, 3.11, 3.12, 3.13, 3.14 | Added Python 3.14 support |
|
|
95
|
+
| 7.0.0 - 7.0.1 | 3.10, 3.11, 3.12, 3.13 | Dropped Python 3.9 support |
|
|
96
|
+
| 4.0.1 - 6.x | 3.9, 3.10, 3.11, 3.12, 3.13 | Python 3.9+ required |
|
|
97
|
+
|
|
90
98
|
## Development
|
|
91
99
|
|
|
92
100
|
### Testing Locally
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|