langfun 0.1.2.dev202510220805__py3-none-any.whl → 0.1.2.dev202510240805__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langfun might be problematic. Click here for more details.

@@ -101,7 +101,7 @@ class EvaluationTest(unittest.TestCase):
101
101
  self.assertEqual(s.dir, os.path.join(s.root_dir, s.id))
102
102
  self.assertEqual(s.hash, s.clone().hash)
103
103
  # Test persistent hash.
104
- self.assertEqual(s.hash, 'e43392e4')
104
+ self.assertEqual(s.hash, 'ee958159')
105
105
  self.assertEqual(
106
106
  s.hash, s.clone(override={'max_workers': 2, 'lm.timeout': 20}).hash
107
107
  )
@@ -211,7 +211,7 @@ class EvaluationTest(unittest.TestCase):
211
211
  s.result,
212
212
  dict(
213
213
  experiment_setup=dict(
214
- id='Evaluation@2fbf1b05',
214
+ id='Evaluation@27a702cb',
215
215
  dir=s.dir,
216
216
  model='StaticSequence',
217
217
  prompt_template='{{example.question}}',
@@ -376,7 +376,7 @@ class EvaluationTest(unittest.TestCase):
376
376
  s.children[0].dir, os.path.join(s.root_dir, s.children[0].id)
377
377
  )
378
378
  # Test persistent hash.
379
- self.assertEqual(s.hash, 'de23bf31')
379
+ self.assertEqual(s.hash, 'f47532a7')
380
380
 
381
381
  summary = s.run(verbose=True)
382
382
  self.assertEqual(len(summary.evaluations), 2)
@@ -526,7 +526,7 @@ class SuiteTest(unittest.TestCase):
526
526
  lm=lm
527
527
  )
528
528
  # Test for persistent hash.
529
- self.assertEqual(s.hash, '1c42f93e')
529
+ self.assertEqual(s.hash, '4bd6a2f5')
530
530
  s.run()
531
531
  expected = {
532
532
  s.children[0].id: dict(
@@ -107,8 +107,8 @@ class LangFuncCallTest(unittest.TestCase):
107
107
  ' lm=ExcitedEchoer(sampling_options=LMSamplingOptions(temperature=None,'
108
108
  ' max_tokens=None, n=1, top_k=40, top_p=None, stop=None,'
109
109
  ' random_seed=None, logprobs=False, top_logprobs=None,'
110
- ' max_thinking_tokens=None, reasoning_effort=None), cache=None,'
111
- ' max_concurrency=None, timeout=120.0, max_attempts=5,'
110
+ ' max_thinking_tokens=None, reasoning_effort=None, extras={}),'
111
+ ' cache=None, max_concurrency=None, timeout=120.0, max_attempts=5,'
112
112
  ' retry_interval=(5, 60), exponential_backoff=True,'
113
113
  ' max_retry_interval=300, debug=False))',
114
114
  )
@@ -584,6 +584,15 @@ class LMSamplingOptions(component.Component):
584
584
  ),
585
585
  ] = None
586
586
 
587
+ extras: Annotated[
588
+ dict[str, Any],
589
+ (
590
+ 'Extra arguments (e.g. configuration for tool calls) to pass to '
591
+ 'the model. This is model-specific, please check model '
592
+ 'implementation to see how to use this.'
593
+ ),
594
+ ] = {}
595
+
587
596
  def cache_key(self) -> tuple[Any, ...]:
588
597
  """Returns a tuple of current values as cache key."""
589
598
  return (
@@ -151,6 +151,9 @@ from langfun.core.llms.openai import Gpt35
151
151
 
152
152
  # Anthropic models.
153
153
 
154
+ from langfun.core.llms.anthropic import Claude45
155
+ from langfun.core.llms.anthropic import Claude45Haiku_20251001
156
+ from langfun.core.llms.anthropic import Claude45Sonnet_20250929
154
157
  from langfun.core.llms.anthropic import Claude4
155
158
  from langfun.core.llms.anthropic import Claude4Sonnet_20250514
156
159
  from langfun.core.llms.anthropic import Claude4Opus_20250514
@@ -168,6 +171,8 @@ from langfun.core.llms.anthropic import Claude3Haiku
168
171
  from langfun.core.llms.anthropic import Claude3Haiku_20240307
169
172
 
170
173
  from langfun.core.llms.vertexai import VertexAIAnthropic
174
+ from langfun.core.llms.vertexai import VertexAIClaude45Haiku_20251001
175
+ from langfun.core.llms.vertexai import VertexAIClaude45Sonnet_20250929
171
176
  from langfun.core.llms.vertexai import VertexAIClaude4Opus_20250514
172
177
  from langfun.core.llms.vertexai import VertexAIClaude4Sonnet_20250514
173
178
  from langfun.core.llms.vertexai import VertexAIClaude37Sonnet_20250219
@@ -59,6 +59,60 @@ class AnthropicModelInfo(lf.ModelInfo):
59
59
 
60
60
 
61
61
  SUPPORTED_MODELS = [
62
+ AnthropicModelInfo(
63
+ model_id='claude-haiku-4-5-20251001',
64
+ provider='Anthropic',
65
+ in_service=True,
66
+ description='Claude 4.5 Haiku model (10/15/2025).',
67
+ release_date=datetime.datetime(2025, 10, 15),
68
+ input_modalities=(
69
+ AnthropicModelInfo.INPUT_IMAGE_TYPES
70
+ + AnthropicModelInfo.INPUT_DOC_TYPES
71
+ ),
72
+ context_length=lf.ModelInfo.ContextLength(
73
+ max_input_tokens=200_000,
74
+ max_output_tokens=64_000,
75
+ ),
76
+ pricing=lf.ModelInfo.Pricing(
77
+ cost_per_1m_cached_input_tokens=0.1,
78
+ cost_per_1m_input_tokens=1,
79
+ cost_per_1m_output_tokens=5,
80
+ ),
81
+ rate_limits=AnthropicModelInfo.RateLimits(
82
+ # Tier 4 rate limits
83
+ max_requests_per_minute=4000,
84
+ max_input_tokens_per_minute=4_000_000,
85
+ max_output_tokens_per_minute=800_000,
86
+ ),
87
+ ),
88
+ AnthropicModelInfo(
89
+ model_id='claude-sonnet-4-5-20250929',
90
+ provider='Anthropic',
91
+ in_service=True,
92
+ description='Claude 4.5 Sonnet model (9/29/2025).',
93
+ release_date=datetime.datetime(2025, 9, 29),
94
+ input_modalities=(
95
+ AnthropicModelInfo.INPUT_IMAGE_TYPES
96
+ + AnthropicModelInfo.INPUT_DOC_TYPES
97
+ ),
98
+ context_length=lf.ModelInfo.ContextLength(
99
+ max_input_tokens=200_000,
100
+ max_output_tokens=64_000,
101
+ ),
102
+ pricing=lf.ModelInfo.Pricing(
103
+ cost_per_1m_cached_input_tokens=0.3,
104
+ cost_per_1m_input_tokens=3,
105
+ cost_per_1m_output_tokens=15,
106
+ ),
107
+ rate_limits=AnthropicModelInfo.RateLimits(
108
+ # Tier 4 rate limits
109
+ # This rate limit is a total limit that applies to combined traffic
110
+ # across both Sonnet 4 and Sonnet 4.5.
111
+ max_requests_per_minute=4000,
112
+ max_input_tokens_per_minute=2_000_000,
113
+ max_output_tokens_per_minute=400_000,
114
+ ),
115
+ ),
62
116
  AnthropicModelInfo(
63
117
  model_id='claude-4-opus-20250514',
64
118
  provider='Anthropic',
@@ -190,6 +244,62 @@ SUPPORTED_MODELS = [
190
244
  max_output_tokens_per_minute=80_000,
191
245
  ),
192
246
  ),
247
+ AnthropicModelInfo(
248
+ model_id='claude-haiku-4-5@20251001',
249
+ alias_for='claude-haiku-4-5-20251001',
250
+ provider='VertexAI',
251
+ in_service=True,
252
+ description='Claude 4.5 Haiku model served on VertexAI (10/15/2025).',
253
+ release_date=datetime.datetime(2025, 10, 15),
254
+ input_modalities=(
255
+ AnthropicModelInfo.INPUT_IMAGE_TYPES
256
+ + AnthropicModelInfo.INPUT_DOC_TYPES
257
+ ),
258
+ context_length=lf.ModelInfo.ContextLength(
259
+ max_input_tokens=200_000,
260
+ max_output_tokens=64_000,
261
+ ),
262
+ pricing=lf.ModelInfo.Pricing(
263
+ # For global endpoint
264
+ cost_per_1m_cached_input_tokens=0.1,
265
+ cost_per_1m_input_tokens=1,
266
+ cost_per_1m_output_tokens=5,
267
+ ),
268
+ rate_limits=AnthropicModelInfo.RateLimits(
269
+ # For global endpoint
270
+ max_requests_per_minute=2500,
271
+ max_input_tokens_per_minute=200_000,
272
+ max_output_tokens_per_minute=0,
273
+ ),
274
+ ),
275
+ AnthropicModelInfo(
276
+ model_id='claude-sonnet-4-5@20250929',
277
+ alias_for='claude-sonnet-4-5-20250929',
278
+ provider='VertexAI',
279
+ in_service=True,
280
+ description='Claude 4.5 Sonnet model (9/29/2025).',
281
+ release_date=datetime.datetime(2025, 9, 29),
282
+ input_modalities=(
283
+ AnthropicModelInfo.INPUT_IMAGE_TYPES
284
+ + AnthropicModelInfo.INPUT_DOC_TYPES
285
+ ),
286
+ context_length=lf.ModelInfo.ContextLength(
287
+ max_input_tokens=200_000,
288
+ max_output_tokens=64_000,
289
+ ),
290
+ pricing=lf.ModelInfo.Pricing(
291
+ # For global endpoint
292
+ cost_per_1m_cached_input_tokens=0.3,
293
+ cost_per_1m_input_tokens=3,
294
+ cost_per_1m_output_tokens=15,
295
+ ),
296
+ rate_limits=AnthropicModelInfo.RateLimits(
297
+ # For global endpoint
298
+ max_requests_per_minute=1500,
299
+ max_input_tokens_per_minute=200_000,
300
+ max_output_tokens_per_minute=0,
301
+ ),
302
+ ),
193
303
  AnthropicModelInfo(
194
304
  model_id='claude-opus-4@20250514',
195
305
  alias_for='claude-opus-4-20250514',
@@ -658,6 +768,8 @@ class Anthropic(rest.REST):
658
768
  args.pop('temperature', None)
659
769
  args.pop('top_k', None)
660
770
  args.pop('top_p', None)
771
+ if options.extras:
772
+ args.update(options.extras)
661
773
  return args
662
774
 
663
775
  def result(self, json: dict[str, Any]) -> lf.LMSamplingResult:
@@ -679,6 +791,24 @@ class Anthropic(rest.REST):
679
791
  return super()._error(status_code, content)
680
792
 
681
793
 
794
+ class Claude45(Anthropic):
795
+ """Base class for Claude 4.5 models."""
796
+
797
+
798
+ # pylint: disable=invalid-name
799
+ class Claude45Haiku_20251001(Claude45):
800
+ """Claude 4.5 Haiku model 20251001."""
801
+
802
+ model = 'claude-haiku-4-5-20251001'
803
+
804
+
805
+ # pylint: disable=invalid-name
806
+ class Claude45Sonnet_20250929(Claude45):
807
+ """Claude 4.5 Sonnet model 20250929."""
808
+
809
+ model = 'claude-sonnet-4-5-20250929'
810
+
811
+
682
812
  class Claude4(Anthropic):
683
813
  """Base class for Claude 4 models."""
684
814
 
@@ -752,11 +752,8 @@ class Gemini(rest.REST):
752
752
  prompt.as_format('gemini', chunk_preprocessor=modality_conversion)
753
753
  )
754
754
  request['contents'] = contents
755
- # Users could use `metadata_gemini_tools` to pass Gemini tools. For example,
756
- # for enabling Search Grounding, users could pass:
757
- # metadata_gemini_tools=[{'google_search': {}}]
758
- if tools := prompt.metadata.get('gemini_tools'):
759
- request['tools'] = tools
755
+ if sampling_options.extras:
756
+ request.update(sampling_options.extras)
760
757
  return request
761
758
 
762
759
  def _generation_config(
@@ -62,6 +62,8 @@ class OpenAICompatible(rest.REST):
62
62
  args['seed'] = options.random_seed
63
63
  if options.reasoning_effort is not None:
64
64
  args['reasoning_effort'] = options.reasoning_effort
65
+ if options.extras:
66
+ args.update(options.extras)
65
67
  return args
66
68
 
67
69
  def request(
@@ -369,6 +369,16 @@ class VertexAIAnthropic(VertexAI, anthropic.Anthropic):
369
369
  # pylint: disable=invalid-name
370
370
 
371
371
 
372
+ class VertexAIClaude45Haiku_20251001(VertexAIAnthropic):
373
+ """Anthropic's Claude 4.5 Haiku model on VertexAI."""
374
+ model = 'claude-haiku-4-5@20251001'
375
+
376
+
377
+ class VertexAIClaude45Sonnet_20250929(VertexAIAnthropic):
378
+ """Anthropic's Claude 4.5 Sonnet model on VertexAI."""
379
+ model = 'claude-sonnet-4-5@20250929'
380
+
381
+
372
382
  class VertexAIClaude4Opus_20250514(VertexAIAnthropic):
373
383
  """Anthropic's Claude 4 Opus model on VertexAI."""
374
384
  model = 'claude-opus-4@20250514'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langfun
3
- Version: 0.1.2.dev202510220805
3
+ Version: 0.1.2.dev202510240805
4
4
  Summary: Langfun: Language as Functions.
5
5
  Home-page: https://github.com/google/langfun
6
6
  Author: Langfun Authors
@@ -16,8 +16,8 @@ langfun/core/concurrent_test.py,sha256=KzXOlfR3i_-s_GKBLYrO5-ETCvHoFbFY2o9FEeOeX
16
16
  langfun/core/console.py,sha256=cLQEf84aDxItA9fStJV22xJch0TqFLNf9hLqwJ0RHmU,2652
17
17
  langfun/core/console_test.py,sha256=pBOcuNMJdVELywvroptfcRtJMsegMm3wSlHAL2TdxVk,1679
18
18
  langfun/core/langfunc.py,sha256=G50YgoVZ0y1GFw2ev41MlOqr6qa8YakbvNC0h_E0PiA,11140
19
- langfun/core/langfunc_test.py,sha256=CDn-gJCa5EnjN7cotAVCfSCbuzddq2o-HzEt7kV8HbY,8882
20
- langfun/core/language_model.py,sha256=xrSaO7KLBRf0rEjnFHBH9fZ0oGfmWhlA0ezAlWEMnTc,52107
19
+ langfun/core/langfunc_test.py,sha256=S6iM9YKLd23gw_t2Mn3UtM1Qz7wsXym1PndZSqPDom8,8893
20
+ langfun/core/language_model.py,sha256=vZ2KLxJRxloEAY3Ig1B9yY5hVkEUgYPm3xy4kKOo0jY,52371
21
21
  langfun/core/language_model_test.py,sha256=aJWn3UJm_S6U7VhU7EVXdJHPe1xza5glngPkRGtx280,38426
22
22
  langfun/core/logging.py,sha256=7IGAhp7mGokZxxqtL-XZvFLKaZ5k3F5_Xp2NUtR4GwE,9136
23
23
  langfun/core/logging_test.py,sha256=vbVGOQxwMmVSiFfbt2897gUt-8nqDpV64jCAeUG_q5U,6924
@@ -61,7 +61,7 @@ langfun/core/data/conversion/openai.py,sha256=sSpkDSxMJWJ3I1dNICBCzvLsJv4iiLg8FP
61
61
  langfun/core/data/conversion/openai_test.py,sha256=38WV_3ofFZiUF10bTKnZp4VyuDP5-81aR3h3Q0HlBm0,5283
62
62
  langfun/core/eval/__init__.py,sha256=OEXr1ZRuvLuhJJfuQ1ZWQ-SvYzjyrtiAAEogYaB7E6o,1933
63
63
  langfun/core/eval/base.py,sha256=g2qnOg9zyU20coNTCwX6Fbi0pPFEUoOxHY5uw98LcNg,75810
64
- langfun/core/eval/base_test.py,sha256=q4wEd2KDUxzUkeELwof0HXBKe9TMQYUq84ddA043VPg,27191
64
+ langfun/core/eval/base_test.py,sha256=jbl5NMKk9QUsp8R-OeTJ4dEefQdK8JL1lIouuihglbc,27191
65
65
  langfun/core/eval/matching.py,sha256=AVKkGoc-BaHEzgSBamaAk3194TgqckDe_dinpS6LrXI,9323
66
66
  langfun/core/eval/matching_test.py,sha256=2xtwsTi-UzLTt0QnXl3u_eAG3fFjCG2tsae7YkcQTB0,5312
67
67
  langfun/core/eval/patching.py,sha256=R0s2eAd1m97exQt06dmUL0V_MBG0W2Hxg7fhNB7cXW0,3866
@@ -90,8 +90,8 @@ langfun/core/eval/v2/reporting.py,sha256=yUIPCAMnp7InIzpv1DDWrcLO-75iiOUTpscj7sm
90
90
  langfun/core/eval/v2/reporting_test.py,sha256=CMK-vwho8cNRJwlbkCqm_v5fykE7Y3V6SaIOCY0CDyA,5671
91
91
  langfun/core/eval/v2/runners.py,sha256=bEniZDNu44AQgvqpwLsvBU4V_7WltAe-NPhYgIsLj1E,16848
92
92
  langfun/core/eval/v2/runners_test.py,sha256=spjkmqlls_vyERdZMdjv6dhIN9ZfxsDDvIQAWTj2kMk,11954
93
- langfun/core/llms/__init__.py,sha256=SViaAza5E00WG_vdsB69FF1n1vTm5pQrqR6eTfjXlhE,9793
94
- langfun/core/llms/anthropic.py,sha256=YcQ2VG8iOfXtry_tTpAukmiwXa2hK_9LkpkmXk41Nm0,26226
93
+ langfun/core/llms/__init__.py,sha256=g2OQWlgmgblE2wps8mLUcBhr0IP2bylwMhOjGOI2ze0,10110
94
+ langfun/core/llms/anthropic.py,sha256=O_OuBiFhHou1Y15W2GwYFY1gV3FUsSUBOsAJp9UORqI,30710
95
95
  langfun/core/llms/anthropic_test.py,sha256=qA9vByp_cwwXNlXzcwHpPWFnO9lfFo8NKfDi5nBNqgI,9052
96
96
  langfun/core/llms/azure_openai.py,sha256=-KkSLaR54MlsIqz_XIwv0TnsBnvNTAxnjA2Q2O2u5KM,2733
97
97
  langfun/core/llms/azure_openai_test.py,sha256=lkMZkQdJBV97fTM4C4z8qNfvr6spgiN5G4hvVUIVr0M,1735
@@ -101,7 +101,7 @@ langfun/core/llms/deepseek.py,sha256=jvTxdXPr-vH6HNakn_Ootx1heDg8Fen2FUkUW36bpCs
101
101
  langfun/core/llms/deepseek_test.py,sha256=DvROWPlDuow5E1lfoSkhyGt_ELA19JoQoDsTnRgDtTg,1847
102
102
  langfun/core/llms/fake.py,sha256=bDk_4u7V2LmYUotyOaicwzi0-lnWOIIBbR3-Bil1P3o,3481
103
103
  langfun/core/llms/fake_test.py,sha256=lC-C2TpEsnf2kmZpa3OiH2H944I4hMWTAaHEXzRj1DU,7855
104
- langfun/core/llms/gemini.py,sha256=qR_rBdkFO6z9MRFoXhq3jiJjKo1tFi4yLkcD2wXzJlY,30337
104
+ langfun/core/llms/gemini.py,sha256=-DL5PebzaTjz7rTFw_1RC5O1aE4EYSv3oNsM65YKCoo,30143
105
105
  langfun/core/llms/gemini_test.py,sha256=y1s0W65SrdepbZxzgIeoTB2MI7sXnfBDf1NsGn57LbM,7617
106
106
  langfun/core/llms/google_genai.py,sha256=ogyoOUK4s1OcSFKun0YK5xBRDVyxmvz9WsYNKAwuB0g,5918
107
107
  langfun/core/llms/google_genai_test.py,sha256=NKNtpebArQ9ZR7Qsnhd2prFIpMjleojy6o6VMXkJ1zY,1502
@@ -110,12 +110,12 @@ langfun/core/llms/groq_test.py,sha256=P4EgexCqsh4K2x11w0UL_vz-YYNaPdQU0WsDAdnTRQ
110
110
  langfun/core/llms/llama_cpp.py,sha256=Z7P3gc4xeIjc2bX0Ey1y5EUYJVMnMa2Q67PZ9iye9sE,1409
111
111
  langfun/core/llms/llama_cpp_test.py,sha256=wfTO7nmUwL65U2kK9P9fcMt92JjNDuVia4G1E7znf_4,1086
112
112
  langfun/core/llms/openai.py,sha256=UZM0j3BHRz5NVLs8q7YYRkneM1CwuGQSt7sFaM4IAPU,44164
113
- langfun/core/llms/openai_compatible.py,sha256=JlFUTiK4e3ox2DGeGBcAD-cXkxmBdx5g6LrYkyMIaps,5777
113
+ langfun/core/llms/openai_compatible.py,sha256=PIQw8jnToqfPwEDVHBdJO5eWZ77ZHhc0kvhCHuGAfQk,5834
114
114
  langfun/core/llms/openai_compatible_test.py,sha256=KwOMA7tsmOxFBjezltkBDSU77AvOQkI23dO2nHLAlB4,17689
115
115
  langfun/core/llms/openai_test.py,sha256=gwuO6aoa296iM2welWV9ua4KF8gEVGsEPakgbtkWkFQ,2687
116
116
  langfun/core/llms/rest.py,sha256=YXzSjr7YgtZ5zKDgjA-3D-sabo5wTjpLQDHUIcEPPgg,4773
117
117
  langfun/core/llms/rest_test.py,sha256=_zM7nV8DEVyoXNiQOnuwJ917mWjki0614H88rNmDboE,5020
118
- langfun/core/llms/vertexai.py,sha256=M0GySUbTukyfc6Pwo7i6AX4uapYyshY49VfPsTU8xac,20245
118
+ langfun/core/llms/vertexai.py,sha256=SeUDcH9mis5e7XsdBOCLIH6AxqT1-TpnTKRWNPuRXOY,20554
119
119
  langfun/core/llms/vertexai_test.py,sha256=_e-acnNBAf9C3WO6i1b2J_mhRzdDdYQTorD9hIVZKOg,5034
120
120
  langfun/core/llms/cache/__init__.py,sha256=QAo3InUMDM_YpteNnVCSejI4zOsnjSMWKJKzkb3VY64,993
121
121
  langfun/core/llms/cache/base.py,sha256=rt3zwmyw0y9jsSGW-ZbV1vAfLxQ7_3AVk0l2EySlse4,3918
@@ -188,8 +188,8 @@ langfun/env/event_handlers/event_logger.py,sha256=3dbPjBe53dBgntYHlyLlj_77hVecPS
188
188
  langfun/env/event_handlers/event_logger_test.py,sha256=PGof3rPllNnyzs3Yp8kaOHLeTkVrzUgCJwlODTrVRKI,9111
189
189
  langfun/env/event_handlers/metric_writer.py,sha256=NgJKsd6xWOtEd0IjYi7coGEaqGYkkPcDjXN9CQ3vxPU,18043
190
190
  langfun/env/event_handlers/metric_writer_test.py,sha256=flRqK10wonhJk4idGD_8jjEjrfjgH0R-qcu-7Bj1G5s,5335
191
- langfun-0.1.2.dev202510220805.dist-info/licenses/LICENSE,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
192
- langfun-0.1.2.dev202510220805.dist-info/METADATA,sha256=zlMkgvV6eEicxrri6y7EWd3D07jX8N7Q8WDAZytXQ8w,7522
193
- langfun-0.1.2.dev202510220805.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
194
- langfun-0.1.2.dev202510220805.dist-info/top_level.txt,sha256=RhlEkHxs1qtzmmtWSwYoLVJAc1YrbPtxQ52uh8Z9VvY,8
195
- langfun-0.1.2.dev202510220805.dist-info/RECORD,,
191
+ langfun-0.1.2.dev202510240805.dist-info/licenses/LICENSE,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
192
+ langfun-0.1.2.dev202510240805.dist-info/METADATA,sha256=74oMU7_OzCL7m-XDTqb9L2LeRY24jjoZmoXozLX5akE,7522
193
+ langfun-0.1.2.dev202510240805.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
194
+ langfun-0.1.2.dev202510240805.dist-info/top_level.txt,sha256=RhlEkHxs1qtzmmtWSwYoLVJAc1YrbPtxQ52uh8Z9VvY,8
195
+ langfun-0.1.2.dev202510240805.dist-info/RECORD,,