posthoganalytics 6.9.0__py3-none-any.whl → 6.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,8 +14,19 @@ from posthoganalytics.contexts import (
14
14
  set_code_variables_mask_patterns_context as inner_set_code_variables_mask_patterns_context,
15
15
  set_code_variables_ignore_patterns_context as inner_set_code_variables_ignore_patterns_context,
16
16
  )
17
- from posthoganalytics.feature_flags import InconclusiveMatchError, RequiresServerEvaluation
18
- from posthoganalytics.types import FeatureFlag, FlagsAndPayloads, FeatureFlagResult
17
+ from posthoganalytics.exception_utils import (
18
+ DEFAULT_CODE_VARIABLES_IGNORE_PATTERNS,
19
+ DEFAULT_CODE_VARIABLES_MASK_PATTERNS,
20
+ )
21
+ from posthoganalytics.feature_flags import (
22
+ InconclusiveMatchError as InconclusiveMatchError,
23
+ RequiresServerEvaluation as RequiresServerEvaluation,
24
+ )
25
+ from posthoganalytics.types import (
26
+ FeatureFlag,
27
+ FlagsAndPayloads,
28
+ FeatureFlagResult as FeatureFlagResult,
29
+ )
19
30
  from posthoganalytics.version import VERSION
20
31
 
21
32
  __version__ = VERSION
@@ -177,6 +188,10 @@ enable_local_evaluation = True # type: bool
177
188
 
178
189
  default_client = None # type: Optional[Client]
179
190
 
191
+ capture_exception_code_variables = False
192
+ code_variables_mask_patterns = DEFAULT_CODE_VARIABLES_MASK_PATTERNS
193
+ code_variables_ignore_patterns = DEFAULT_CODE_VARIABLES_IGNORE_PATTERNS
194
+
180
195
 
181
196
  # NOTE - this and following functions take unpacked kwargs because we needed to make
182
197
  # it impossible to write `posthog.capture(distinct-id, event-name)` - basically, to enforce
@@ -771,6 +786,9 @@ def setup() -> Client:
771
786
  enable_exception_autocapture=enable_exception_autocapture,
772
787
  log_captured_exceptions=log_captured_exceptions,
773
788
  enable_local_evaluation=enable_local_evaluation,
789
+ capture_exception_code_variables=capture_exception_code_variables,
790
+ code_variables_mask_patterns=code_variables_mask_patterns,
791
+ code_variables_ignore_patterns=code_variables_ignore_patterns,
774
792
  )
775
793
 
776
794
  # always set incase user changes it
@@ -575,7 +575,7 @@ class CallbackHandler(BaseCallbackHandler):
575
575
  event_properties["$ai_is_error"] = True
576
576
  else:
577
577
  # Add usage
578
- usage = _parse_usage(output)
578
+ usage = _parse_usage(output, run.provider, run.model)
579
579
  event_properties["$ai_input_tokens"] = usage.input_tokens
580
580
  event_properties["$ai_output_tokens"] = usage.output_tokens
581
581
  event_properties["$ai_cache_creation_input_tokens"] = (
@@ -696,6 +696,8 @@ class ModelUsage:
696
696
 
697
697
  def _parse_usage_model(
698
698
  usage: Union[BaseModel, dict],
699
+ provider: Optional[str] = None,
700
+ model: Optional[str] = None,
699
701
  ) -> ModelUsage:
700
702
  if isinstance(usage, BaseModel):
701
703
  usage = usage.__dict__
@@ -764,16 +766,30 @@ def _parse_usage_model(
764
766
  for mapped_key, dataclass_key in field_mapping.items()
765
767
  },
766
768
  )
767
- # In LangChain, input_tokens is the sum of input and cache read tokens.
768
- # Our cost calculation expects them to be separate, for Anthropic.
769
- if normalized_usage.input_tokens and normalized_usage.cache_read_tokens:
769
+ # For Anthropic providers, LangChain reports input_tokens as the sum of input and cache read tokens.
770
+ # Our cost calculation expects them to be separate for Anthropic, so we subtract cache tokens.
771
+ # For other providers (OpenAI, etc.), input_tokens already includes cache tokens as expected.
772
+ # Match logic consistent with plugin-server: exact match on provider OR substring match on model
773
+ is_anthropic = False
774
+ if provider and provider.lower() == "anthropic":
775
+ is_anthropic = True
776
+ elif model and "anthropic" in model.lower():
777
+ is_anthropic = True
778
+
779
+ if (
780
+ is_anthropic
781
+ and normalized_usage.input_tokens
782
+ and normalized_usage.cache_read_tokens
783
+ ):
770
784
  normalized_usage.input_tokens = max(
771
785
  normalized_usage.input_tokens - normalized_usage.cache_read_tokens, 0
772
786
  )
773
787
  return normalized_usage
774
788
 
775
789
 
776
- def _parse_usage(response: LLMResult) -> ModelUsage:
790
+ def _parse_usage(
791
+ response: LLMResult, provider: Optional[str] = None, model: Optional[str] = None
792
+ ) -> ModelUsage:
777
793
  # langchain-anthropic uses the usage field
778
794
  llm_usage_keys = ["token_usage", "usage"]
779
795
  llm_usage: ModelUsage = ModelUsage(
@@ -787,13 +803,15 @@ def _parse_usage(response: LLMResult) -> ModelUsage:
787
803
  if response.llm_output is not None:
788
804
  for key in llm_usage_keys:
789
805
  if response.llm_output.get(key):
790
- llm_usage = _parse_usage_model(response.llm_output[key])
806
+ llm_usage = _parse_usage_model(
807
+ response.llm_output[key], provider, model
808
+ )
791
809
  break
792
810
 
793
811
  if hasattr(response, "generations"):
794
812
  for generation in response.generations:
795
813
  if "usage" in generation:
796
- llm_usage = _parse_usage_model(generation["usage"])
814
+ llm_usage = _parse_usage_model(generation["usage"], provider, model)
797
815
  break
798
816
 
799
817
  for generation_chunk in generation:
@@ -801,7 +819,9 @@ def _parse_usage(response: LLMResult) -> ModelUsage:
801
819
  "usage_metadata" in generation_chunk.generation_info
802
820
  ):
803
821
  llm_usage = _parse_usage_model(
804
- generation_chunk.generation_info["usage_metadata"]
822
+ generation_chunk.generation_info["usage_metadata"],
823
+ provider,
824
+ model,
805
825
  )
806
826
  break
807
827
 
@@ -828,7 +848,7 @@ def _parse_usage(response: LLMResult) -> ModelUsage:
828
848
  bedrock_anthropic_usage or bedrock_titan_usage or ollama_usage
829
849
  )
830
850
  if chunk_usage:
831
- llm_usage = _parse_usage_model(chunk_usage)
851
+ llm_usage = _parse_usage_model(chunk_usage, provider, model)
832
852
  break
833
853
 
834
854
  return llm_usage
@@ -725,21 +725,7 @@ class Client(object):
725
725
  Examples:
726
726
  ```python
727
727
  # Set with distinct id
728
- posthog.capture(
729
- 'event_name',
730
- distinct_id='user-distinct-id',
731
- properties={
732
- '$set': {'name': 'Max Hedgehog'},
733
- '$set_once': {'initial_url': '/blog'}
734
- }
735
- )
736
- ```
737
- ```python
738
- # Set using context
739
- from posthoganalytics import new_context, identify_context
740
- with new_context():
741
- identify_context('user-distinct-id')
742
- posthog.capture('event_name')
728
+ posthog.set(distinct_id='user123', properties={'name': 'Max Hedgehog'})
743
729
  ```
744
730
 
745
731
  Category:
@@ -929,7 +929,7 @@ def _compile_patterns(patterns):
929
929
  for pattern in patterns:
930
930
  try:
931
931
  compiled.append(re.compile(pattern))
932
- except:
932
+ except Exception:
933
933
  pass
934
934
  return compiled
935
935
 
@@ -1,4 +1,4 @@
1
- VERSION = "6.9.0"
1
+ VERSION = "6.9.2"
2
2
 
3
3
  if __name__ == "__main__":
4
4
  print(VERSION, end="") # noqa: T201
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: posthoganalytics
3
- Version: 6.9.0
3
+ Version: 6.9.2
4
4
  Summary: Integrate PostHog into any python application.
5
5
  Home-page: https://github.com/posthog/posthog-python
6
6
  Author: Posthog
@@ -1,17 +1,17 @@
1
- posthoganalytics/__init__.py,sha256=_zAqJWGGR_saWREp2GioAaZM99qoZyXPB8GXct-z_3U,27256
1
+ posthoganalytics/__init__.py,sha256=cEfWzFRmhHhtQs4Tl3yZoV0um7Wr-pkQzzfttuh4nYQ,27903
2
2
  posthoganalytics/args.py,sha256=iZ2JWeANiAREJKhS-Qls9tIngjJOSfAVR8C4xFT5sHw,3307
3
- posthoganalytics/client.py,sha256=8QTaZN84U_NyWCpNMnO7yQtxYw4vfMJ0ixXVikA6FCg,74670
3
+ posthoganalytics/client.py,sha256=BrVlQWWqh7h-9MtIFFeHvDGWx5X4wVk5vkzkqsTWnHU,74202
4
4
  posthoganalytics/consumer.py,sha256=CiNbJBdyW9jER3ZYCKbX-JFmEDXlE1lbDy1MSl43-a0,4617
5
5
  posthoganalytics/contexts.py,sha256=Qj8eprL71IVGo4nMtHCs7kIEhezOmxfkYpiPTg-rWjU,12618
6
6
  posthoganalytics/exception_capture.py,sha256=1VHBfffrXXrkK0PT8iVgKPpj_R1pGAzG5f3Qw0WF79w,1783
7
- posthoganalytics/exception_utils.py,sha256=DrKNkZdgYXOPtEG5qPvqlPE86nefBc9yazSS5akA34Y,31857
7
+ posthoganalytics/exception_utils.py,sha256=uBSm03agMQW0fr4v5CXvcGTkc-QFOHK-cuIrlE4ClJ4,31867
8
8
  posthoganalytics/feature_flags.py,sha256=yHjiH6LSvhQgurbsPCHUdGakZKvkzOLdqB8vL3iyhmw,22544
9
9
  posthoganalytics/poller.py,sha256=jBz5rfH_kn_bBz7wCB46Fpvso4ttx4uzqIZWvXBCFmQ,595
10
10
  posthoganalytics/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  posthoganalytics/request.py,sha256=Bsl2c5WwONKPQzwWMmKPX5VgOlwSiIcSNfhXgoz62Y8,6186
12
12
  posthoganalytics/types.py,sha256=Dl3aFGX9XUR0wMmK12r2s5Hjan9jL4HpQ9GHpVcEq5U,10207
13
13
  posthoganalytics/utils.py,sha256=-0w-OLcCaoldkbBebPzQyBzLJSo9G9yBOg8NDVz7La8,16088
14
- posthoganalytics/version.py,sha256=qTlEuCWb-slrdA36sMFDZPyA5fyBwCpQaHtWJmuEIgI,87
14
+ posthoganalytics/version.py,sha256=zvqnhz0fFaGVjl6Hb6DGlJacDLQ1AwGr9KdJwg82zOA,87
15
15
  posthoganalytics/ai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
16
  posthoganalytics/ai/sanitization.py,sha256=owipZ4eJYtd4JTI-CM_klatclXaeaIec3XJBOUfsOnQ,5770
17
17
  posthoganalytics/ai/types.py,sha256=arX98hR1PIPeJ3vFikxTlACIh1xPp6aEUw1gBLcKoB0,3273
@@ -25,7 +25,7 @@ posthoganalytics/ai/gemini/__init__.py,sha256=JV_9-gBR87leHgZW4XAYZP7LSl4YaXeuhq
25
25
  posthoganalytics/ai/gemini/gemini.py,sha256=A2acjT_m8ru2YwgIk15aN21CRVEl2jh8pbqjmHplMC8,15035
26
26
  posthoganalytics/ai/gemini/gemini_converter.py,sha256=lfd-AqBYdM3_OJtuvkFb9AlSba1gQt4K5TpKqzXykdk,18749
27
27
  posthoganalytics/ai/langchain/__init__.py,sha256=9CqAwLynTGj3ASAR80C3PmdTdrYGmu99tz0JL-HPFgI,70
28
- posthoganalytics/ai/langchain/callbacks.py,sha256=syDeSb4hOrwxjEtlmRodVhdgVAQi8iwg1Z63YHNUhvA,30297
28
+ posthoganalytics/ai/langchain/callbacks.py,sha256=5PQHBt9ddp_HBF3lPpW119mCMEGuuL5Be3p-uBsxDRk,31102
29
29
  posthoganalytics/ai/openai/__init__.py,sha256=u4OuUT7k1NgFj0TrxjuyegOg7a_UA8nAU6a-Hszr0OM,490
30
30
  posthoganalytics/ai/openai/openai.py,sha256=I05NruE9grWezM_EgOZBiG5Ej_gABsDcYKN0pRQWvzU,20235
31
31
  posthoganalytics/ai/openai/openai_async.py,sha256=mIxFZykDgMi3ws_fNWikEhwvkZmKqfYgeeB2yhxlZjQ,22490
@@ -47,8 +47,8 @@ posthoganalytics/test/test_request.py,sha256=Zc0VbkjpVmj8mKokQm9rzdgTr0b1U44vvMY
47
47
  posthoganalytics/test/test_size_limited_dict.py,sha256=-5IQjIEr_-Dql24M0HusdR_XroOMrtgiT0v6ZQCRvzo,774
48
48
  posthoganalytics/test/test_types.py,sha256=bRPHdwVpP7hu7emsplU8UVyzSQptv6PaG5lAoOD_BtM,7595
49
49
  posthoganalytics/test/test_utils.py,sha256=sqUTbfweVcxxFRd3WDMFXqPMyU6DvzOBeAOc68Py9aw,9620
50
- posthoganalytics-6.9.0.dist-info/licenses/LICENSE,sha256=wGf9JBotDkSygFj43m49oiKlFnpMnn97keiZKF-40vE,2450
51
- posthoganalytics-6.9.0.dist-info/METADATA,sha256=eVZTY6z46Bgtxy3p_6YtoAGXX7Mot6YfEdEKouixpew,6024
52
- posthoganalytics-6.9.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
53
- posthoganalytics-6.9.0.dist-info/top_level.txt,sha256=8QsNIqIkBh1p2TXvKp0Em9ZLZKwe3uIqCETyW4s1GOE,17
54
- posthoganalytics-6.9.0.dist-info/RECORD,,
50
+ posthoganalytics-6.9.2.dist-info/licenses/LICENSE,sha256=wGf9JBotDkSygFj43m49oiKlFnpMnn97keiZKF-40vE,2450
51
+ posthoganalytics-6.9.2.dist-info/METADATA,sha256=J98kRSXrzBD4ISmrdueN7DWccefvIZaqnYAtlZhyG1c,6024
52
+ posthoganalytics-6.9.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
53
+ posthoganalytics-6.9.2.dist-info/top_level.txt,sha256=8QsNIqIkBh1p2TXvKp0Em9ZLZKwe3uIqCETyW4s1GOE,17
54
+ posthoganalytics-6.9.2.dist-info/RECORD,,