lionagi 0.5.5__py3-none-any.whl → 0.6.1__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (433) hide show
  1. lionagi/__init__.py +18 -24
  2. lionagi/{core/_class_registry.py → _class_registry.py} +51 -10
  3. lionagi/_errors.py +35 -0
  4. lionagi/libs/__init__.py +3 -0
  5. lionagi/libs/compress/__init__.py +3 -0
  6. lionagi/libs/compress/models.py +6 -2
  7. lionagi/libs/compress/utils.py +4 -16
  8. lionagi/libs/file/__init__.py +3 -0
  9. lionagi/libs/file/chunk.py +4 -0
  10. lionagi/libs/file/file_ops.py +4 -0
  11. lionagi/libs/file/params.py +4 -41
  12. lionagi/libs/file/process.py +4 -0
  13. lionagi/libs/file/save.py +5 -1
  14. lionagi/libs/{parse/flatten → nested}/flatten.py +4 -0
  15. lionagi/libs/{parse/nested → nested}/nfilter.py +4 -0
  16. lionagi/libs/{parse/nested → nested}/nget.py +6 -1
  17. lionagi/libs/{parse/nested → nested}/ninsert.py +5 -1
  18. lionagi/libs/{parse/nested → nested}/nmerge.py +4 -0
  19. lionagi/libs/{parse/nested → nested}/npop.py +5 -2
  20. lionagi/libs/{parse/nested → nested}/nset.py +6 -1
  21. lionagi/libs/{parse/flatten → nested}/unflatten.py +4 -0
  22. lionagi/libs/{parse/nested → nested}/utils.py +5 -1
  23. lionagi/libs/package/__init__.py +3 -0
  24. lionagi/libs/package/imports.py +6 -2
  25. lionagi/libs/package/management.py +7 -3
  26. lionagi/libs/package/params.py +4 -0
  27. lionagi/libs/package/system.py +4 -0
  28. lionagi/libs/parse.py +30 -0
  29. lionagi/libs/{parse/json → schema}/as_readable.py +10 -4
  30. lionagi/libs/{parse/string_parse/code_block.py → schema/extract_code_block.py} +4 -0
  31. lionagi/libs/{parse/string_parse/docstring.py → schema/extract_docstring.py} +4 -0
  32. lionagi/libs/{parse/string_parse/function_.py → schema/function_to_schema.py} +21 -9
  33. lionagi/libs/{parse/json/schema.py → schema/json_schema.py} +5 -1
  34. lionagi/libs/validate/common_field_validators.py +170 -0
  35. lionagi/libs/{parse/validate/keys.py → validate/fuzzy_match_keys.py} +42 -8
  36. lionagi/libs/{parse/validate/mapping.py → validate/fuzzy_validate_mapping.py} +41 -6
  37. lionagi/libs/{string_similarity/algorithms.py → validate/string_similarity.py} +115 -1
  38. lionagi/libs/{parse/validate/boolean.py → validate/validate_boolean.py} +42 -3
  39. lionagi/operations/__init__.py +13 -3
  40. lionagi/operations/brainstorm/__init__.py +3 -3
  41. lionagi/operations/brainstorm/brainstorm.py +33 -19
  42. lionagi/operations/brainstorm/prompt.py +4 -0
  43. lionagi/operations/plan/__init__.py +4 -0
  44. lionagi/operations/plan/plan.py +19 -16
  45. lionagi/operations/plan/prompt.py +4 -0
  46. lionagi/operations/select/__init__.py +4 -0
  47. lionagi/operations/select/prompt.py +4 -0
  48. lionagi/operations/select/select.py +2 -2
  49. lionagi/operations/select/utils.py +4 -4
  50. lionagi/{strategies → operations/strategies}/base.py +6 -2
  51. lionagi/{strategies → operations/strategies}/concurrent.py +8 -5
  52. lionagi/{strategies → operations/strategies}/concurrent_chunk.py +6 -3
  53. lionagi/{strategies → operations/strategies}/concurrent_sequential_chunk.py +8 -4
  54. lionagi/{strategies → operations/strategies}/params.py +26 -6
  55. lionagi/{strategies → operations/strategies}/sequential.py +6 -2
  56. lionagi/{strategies → operations/strategies}/sequential_chunk.py +7 -3
  57. lionagi/{strategies → operations/strategies}/sequential_concurrent_chunk.py +9 -4
  58. lionagi/{strategies → operations/strategies}/utils.py +6 -3
  59. lionagi/operations/types.py +13 -0
  60. lionagi/operations/utils.py +6 -3
  61. lionagi/operatives/action/function_calling.py +136 -0
  62. lionagi/operatives/action/manager.py +236 -0
  63. lionagi/operatives/action/request_response_model.py +90 -0
  64. lionagi/operatives/action/tool.py +141 -0
  65. lionagi/{protocols/operatives/action.py → operatives/action/utils.py} +52 -90
  66. lionagi/{core → operatives}/forms/base.py +9 -4
  67. lionagi/{core → operatives}/forms/form.py +8 -13
  68. lionagi/{core → operatives}/forms/report.py +5 -3
  69. lionagi/operatives/instruct/base.py +79 -0
  70. lionagi/operatives/instruct/instruct.py +105 -0
  71. lionagi/operatives/instruct/instruct_collection.py +52 -0
  72. lionagi/operatives/instruct/node.py +13 -0
  73. lionagi/{protocols/operatives → operatives/instruct}/prompts.py +0 -34
  74. lionagi/{protocols/operatives → operatives/instruct}/reason.py +14 -7
  75. lionagi/{core/models/__init__.py → operatives/manager.py} +5 -1
  76. lionagi/operatives/models/field_model.py +194 -0
  77. lionagi/operatives/models/model_params.py +307 -0
  78. lionagi/{core → operatives}/models/note.py +20 -28
  79. lionagi/{core → operatives}/models/operable_model.py +153 -71
  80. lionagi/{core → operatives}/models/schema_model.py +4 -3
  81. lionagi/{protocols/operatives → operatives}/operative.py +10 -7
  82. lionagi/{protocols/operatives → operatives}/step.py +67 -26
  83. lionagi/operatives/types.py +69 -0
  84. lionagi/protocols/_concepts.py +94 -0
  85. lionagi/protocols/adapters/adapter.py +23 -7
  86. lionagi/protocols/adapters/json_adapter.py +72 -14
  87. lionagi/protocols/adapters/pandas_/csv_adapter.py +50 -0
  88. lionagi/protocols/adapters/pandas_/excel_adapter.py +52 -0
  89. lionagi/protocols/adapters/pandas_/pd_dataframe_adapter.py +31 -0
  90. lionagi/protocols/adapters/pandas_/pd_series_adapter.py +17 -0
  91. lionagi/protocols/adapters/types.py +18 -0
  92. lionagi/protocols/generic/element.py +460 -0
  93. lionagi/protocols/generic/event.py +177 -0
  94. lionagi/protocols/generic/log.py +237 -0
  95. lionagi/{core → protocols}/generic/pile.py +193 -131
  96. lionagi/protocols/generic/processor.py +316 -0
  97. lionagi/protocols/generic/progression.py +500 -0
  98. lionagi/protocols/graph/edge.py +166 -0
  99. lionagi/protocols/graph/graph.py +290 -0
  100. lionagi/protocols/graph/node.py +125 -0
  101. lionagi/protocols/mail/exchange.py +116 -0
  102. lionagi/protocols/mail/mail.py +25 -0
  103. lionagi/protocols/mail/mailbox.py +47 -0
  104. lionagi/protocols/mail/manager.py +168 -0
  105. lionagi/protocols/mail/package.py +55 -0
  106. lionagi/protocols/messages/action_request.py +165 -0
  107. lionagi/protocols/messages/action_response.py +132 -0
  108. lionagi/{core/communication → protocols/messages}/assistant_response.py +55 -79
  109. lionagi/protocols/messages/base.py +73 -0
  110. lionagi/protocols/messages/instruction.py +582 -0
  111. lionagi/protocols/messages/manager.py +429 -0
  112. lionagi/protocols/messages/message.py +216 -0
  113. lionagi/protocols/messages/system.py +115 -0
  114. lionagi/protocols/messages/templates/assistant_response.jinja2 +6 -0
  115. lionagi/{core/communication → protocols/messages}/templates/instruction_message.jinja2 +2 -2
  116. lionagi/protocols/types.py +96 -0
  117. lionagi/service/__init__.py +1 -16
  118. lionagi/service/endpoints/base.py +517 -0
  119. lionagi/service/endpoints/chat_completion.py +102 -0
  120. lionagi/service/endpoints/match_endpoint.py +60 -0
  121. lionagi/service/endpoints/rate_limited_processor.py +146 -0
  122. lionagi/service/endpoints/token_calculator.py +209 -0
  123. lionagi/service/imodel.py +263 -96
  124. lionagi/service/manager.py +45 -0
  125. lionagi/service/providers/anthropic_/messages.py +64 -0
  126. lionagi/service/providers/groq_/chat_completions.py +56 -0
  127. lionagi/service/providers/openai_/chat_completions.py +62 -0
  128. lionagi/service/providers/openrouter_/chat_completions.py +62 -0
  129. lionagi/service/providers/perplexity_/__init__.py +3 -0
  130. lionagi/service/providers/perplexity_/chat_completions.py +40 -0
  131. lionagi/service/types.py +18 -0
  132. lionagi/session/__init__.py +3 -0
  133. lionagi/session/branch.py +1287 -0
  134. lionagi/session/session.py +296 -0
  135. lionagi/settings.py +62 -118
  136. lionagi/utils.py +2386 -0
  137. lionagi/version.py +1 -1
  138. {lionagi-0.5.5.dist-info → lionagi-0.6.1.dist-info}/METADATA +10 -9
  139. lionagi-0.6.1.dist-info/RECORD +169 -0
  140. lionagi/core/action/action_manager.py +0 -289
  141. lionagi/core/action/base.py +0 -109
  142. lionagi/core/action/function_calling.py +0 -153
  143. lionagi/core/action/tool.py +0 -202
  144. lionagi/core/action/types.py +0 -16
  145. lionagi/core/communication/action_request.py +0 -163
  146. lionagi/core/communication/action_response.py +0 -149
  147. lionagi/core/communication/base_mail.py +0 -49
  148. lionagi/core/communication/instruction.py +0 -376
  149. lionagi/core/communication/message.py +0 -286
  150. lionagi/core/communication/message_manager.py +0 -543
  151. lionagi/core/communication/system.py +0 -116
  152. lionagi/core/communication/templates/assistant_response.jinja2 +0 -2
  153. lionagi/core/communication/types.py +0 -27
  154. lionagi/core/communication/utils.py +0 -256
  155. lionagi/core/forms/types.py +0 -13
  156. lionagi/core/generic/component.py +0 -422
  157. lionagi/core/generic/edge.py +0 -163
  158. lionagi/core/generic/element.py +0 -199
  159. lionagi/core/generic/graph.py +0 -377
  160. lionagi/core/generic/log.py +0 -151
  161. lionagi/core/generic/log_manager.py +0 -320
  162. lionagi/core/generic/node.py +0 -11
  163. lionagi/core/generic/progression.py +0 -395
  164. lionagi/core/generic/types.py +0 -23
  165. lionagi/core/generic/utils.py +0 -53
  166. lionagi/core/models/base.py +0 -28
  167. lionagi/core/models/field_model.py +0 -145
  168. lionagi/core/models/model_params.py +0 -194
  169. lionagi/core/models/types.py +0 -19
  170. lionagi/core/session/branch.py +0 -130
  171. lionagi/core/session/branch_mixins.py +0 -581
  172. lionagi/core/session/session.py +0 -163
  173. lionagi/core/session/types.py +0 -8
  174. lionagi/core/typing/__init__.py +0 -9
  175. lionagi/core/typing/_concepts.py +0 -173
  176. lionagi/core/typing/_id.py +0 -104
  177. lionagi/core/typing/_pydantic.py +0 -33
  178. lionagi/core/typing/_typing.py +0 -54
  179. lionagi/integrations/_services.py +0 -17
  180. lionagi/integrations/anthropic_/AnthropicModel.py +0 -268
  181. lionagi/integrations/anthropic_/AnthropicService.py +0 -127
  182. lionagi/integrations/anthropic_/anthropic_max_output_token_data.yaml +0 -12
  183. lionagi/integrations/anthropic_/anthropic_price_data.yaml +0 -34
  184. lionagi/integrations/anthropic_/api_endpoints/api_request.py +0 -277
  185. lionagi/integrations/anthropic_/api_endpoints/data_models.py +0 -40
  186. lionagi/integrations/anthropic_/api_endpoints/match_response.py +0 -119
  187. lionagi/integrations/anthropic_/api_endpoints/messages/request/message_models.py +0 -14
  188. lionagi/integrations/anthropic_/api_endpoints/messages/request/request_body.py +0 -74
  189. lionagi/integrations/anthropic_/api_endpoints/messages/response/__init__.py +0 -0
  190. lionagi/integrations/anthropic_/api_endpoints/messages/response/content_models.py +0 -32
  191. lionagi/integrations/anthropic_/api_endpoints/messages/response/response_body.py +0 -101
  192. lionagi/integrations/anthropic_/api_endpoints/messages/response/usage_models.py +0 -25
  193. lionagi/integrations/anthropic_/version.py +0 -5
  194. lionagi/integrations/groq_/GroqModel.py +0 -325
  195. lionagi/integrations/groq_/GroqService.py +0 -156
  196. lionagi/integrations/groq_/api_endpoints/__init__.py +0 -0
  197. lionagi/integrations/groq_/api_endpoints/data_models.py +0 -187
  198. lionagi/integrations/groq_/api_endpoints/groq_request.py +0 -288
  199. lionagi/integrations/groq_/api_endpoints/match_response.py +0 -106
  200. lionagi/integrations/groq_/api_endpoints/response_utils.py +0 -105
  201. lionagi/integrations/groq_/groq_max_output_token_data.yaml +0 -21
  202. lionagi/integrations/groq_/groq_price_data.yaml +0 -58
  203. lionagi/integrations/groq_/groq_rate_limits.yaml +0 -105
  204. lionagi/integrations/groq_/version.py +0 -5
  205. lionagi/integrations/litellm_/imodel.py +0 -76
  206. lionagi/integrations/ollama_/OllamaModel.py +0 -244
  207. lionagi/integrations/ollama_/OllamaService.py +0 -142
  208. lionagi/integrations/ollama_/api_endpoints/api_request.py +0 -179
  209. lionagi/integrations/ollama_/api_endpoints/chat_completion/message_models.py +0 -31
  210. lionagi/integrations/ollama_/api_endpoints/chat_completion/request_body.py +0 -46
  211. lionagi/integrations/ollama_/api_endpoints/chat_completion/response_body.py +0 -67
  212. lionagi/integrations/ollama_/api_endpoints/chat_completion/tool_models.py +0 -49
  213. lionagi/integrations/ollama_/api_endpoints/completion/__init__.py +0 -0
  214. lionagi/integrations/ollama_/api_endpoints/completion/request_body.py +0 -72
  215. lionagi/integrations/ollama_/api_endpoints/completion/response_body.py +0 -59
  216. lionagi/integrations/ollama_/api_endpoints/data_models.py +0 -15
  217. lionagi/integrations/ollama_/api_endpoints/embedding/__init__.py +0 -0
  218. lionagi/integrations/ollama_/api_endpoints/embedding/request_body.py +0 -33
  219. lionagi/integrations/ollama_/api_endpoints/embedding/response_body.py +0 -29
  220. lionagi/integrations/ollama_/api_endpoints/match_data_model.py +0 -62
  221. lionagi/integrations/ollama_/api_endpoints/match_response.py +0 -190
  222. lionagi/integrations/ollama_/api_endpoints/model/copy_model.py +0 -13
  223. lionagi/integrations/ollama_/api_endpoints/model/create_model.py +0 -28
  224. lionagi/integrations/ollama_/api_endpoints/model/delete_model.py +0 -11
  225. lionagi/integrations/ollama_/api_endpoints/model/list_model.py +0 -60
  226. lionagi/integrations/ollama_/api_endpoints/model/pull_model.py +0 -34
  227. lionagi/integrations/ollama_/api_endpoints/model/push_model.py +0 -35
  228. lionagi/integrations/ollama_/api_endpoints/model/show_model.py +0 -36
  229. lionagi/integrations/ollama_/api_endpoints/option_models.py +0 -68
  230. lionagi/integrations/openai_/OpenAIModel.py +0 -419
  231. lionagi/integrations/openai_/OpenAIService.py +0 -435
  232. lionagi/integrations/openai_/__init__.py +0 -0
  233. lionagi/integrations/openai_/api_endpoints/__init__.py +0 -3
  234. lionagi/integrations/openai_/api_endpoints/api_request.py +0 -277
  235. lionagi/integrations/openai_/api_endpoints/audio/__init__.py +0 -9
  236. lionagi/integrations/openai_/api_endpoints/audio/speech_models.py +0 -34
  237. lionagi/integrations/openai_/api_endpoints/audio/transcription_models.py +0 -136
  238. lionagi/integrations/openai_/api_endpoints/audio/translation_models.py +0 -41
  239. lionagi/integrations/openai_/api_endpoints/audio/types.py +0 -41
  240. lionagi/integrations/openai_/api_endpoints/batch/__init__.py +0 -17
  241. lionagi/integrations/openai_/api_endpoints/batch/batch_models.py +0 -146
  242. lionagi/integrations/openai_/api_endpoints/batch/cancel_batch.py +0 -7
  243. lionagi/integrations/openai_/api_endpoints/batch/create_batch.py +0 -26
  244. lionagi/integrations/openai_/api_endpoints/batch/list_batch.py +0 -37
  245. lionagi/integrations/openai_/api_endpoints/batch/request_object_models.py +0 -65
  246. lionagi/integrations/openai_/api_endpoints/batch/retrieve_batch.py +0 -7
  247. lionagi/integrations/openai_/api_endpoints/batch/types.py +0 -4
  248. lionagi/integrations/openai_/api_endpoints/chat_completions/__init__.py +0 -1
  249. lionagi/integrations/openai_/api_endpoints/chat_completions/request/__init__.py +0 -39
  250. lionagi/integrations/openai_/api_endpoints/chat_completions/request/message_models.py +0 -121
  251. lionagi/integrations/openai_/api_endpoints/chat_completions/request/request_body.py +0 -221
  252. lionagi/integrations/openai_/api_endpoints/chat_completions/request/response_format.py +0 -71
  253. lionagi/integrations/openai_/api_endpoints/chat_completions/request/stream_options.py +0 -14
  254. lionagi/integrations/openai_/api_endpoints/chat_completions/request/tool_choice_models.py +0 -17
  255. lionagi/integrations/openai_/api_endpoints/chat_completions/request/tool_models.py +0 -54
  256. lionagi/integrations/openai_/api_endpoints/chat_completions/request/types.py +0 -18
  257. lionagi/integrations/openai_/api_endpoints/chat_completions/response/__init__.py +0 -0
  258. lionagi/integrations/openai_/api_endpoints/chat_completions/response/choice_models.py +0 -62
  259. lionagi/integrations/openai_/api_endpoints/chat_completions/response/function_models.py +0 -16
  260. lionagi/integrations/openai_/api_endpoints/chat_completions/response/log_prob_models.py +0 -47
  261. lionagi/integrations/openai_/api_endpoints/chat_completions/response/message_models.py +0 -25
  262. lionagi/integrations/openai_/api_endpoints/chat_completions/response/response_body.py +0 -99
  263. lionagi/integrations/openai_/api_endpoints/chat_completions/response/types.py +0 -8
  264. lionagi/integrations/openai_/api_endpoints/chat_completions/response/usage_models.py +0 -24
  265. lionagi/integrations/openai_/api_endpoints/chat_completions/util.py +0 -46
  266. lionagi/integrations/openai_/api_endpoints/data_models.py +0 -23
  267. lionagi/integrations/openai_/api_endpoints/embeddings/__init__.py +0 -3
  268. lionagi/integrations/openai_/api_endpoints/embeddings/request_body.py +0 -79
  269. lionagi/integrations/openai_/api_endpoints/embeddings/response_body.py +0 -67
  270. lionagi/integrations/openai_/api_endpoints/files/__init__.py +0 -11
  271. lionagi/integrations/openai_/api_endpoints/files/delete_file.py +0 -20
  272. lionagi/integrations/openai_/api_endpoints/files/file_models.py +0 -56
  273. lionagi/integrations/openai_/api_endpoints/files/list_files.py +0 -27
  274. lionagi/integrations/openai_/api_endpoints/files/retrieve_file.py +0 -9
  275. lionagi/integrations/openai_/api_endpoints/files/upload_file.py +0 -38
  276. lionagi/integrations/openai_/api_endpoints/fine_tuning/__init__.py +0 -37
  277. lionagi/integrations/openai_/api_endpoints/fine_tuning/cancel_jobs.py +0 -9
  278. lionagi/integrations/openai_/api_endpoints/fine_tuning/create_jobs.py +0 -133
  279. lionagi/integrations/openai_/api_endpoints/fine_tuning/fine_tuning_job_checkpoint_models.py +0 -58
  280. lionagi/integrations/openai_/api_endpoints/fine_tuning/fine_tuning_job_event_models.py +0 -31
  281. lionagi/integrations/openai_/api_endpoints/fine_tuning/fine_tuning_job_models.py +0 -140
  282. lionagi/integrations/openai_/api_endpoints/fine_tuning/list_fine_tuning_checkpoints.py +0 -51
  283. lionagi/integrations/openai_/api_endpoints/fine_tuning/list_fine_tuning_events.py +0 -42
  284. lionagi/integrations/openai_/api_endpoints/fine_tuning/list_fine_tuning_jobs.py +0 -31
  285. lionagi/integrations/openai_/api_endpoints/fine_tuning/retrieve_jobs.py +0 -9
  286. lionagi/integrations/openai_/api_endpoints/fine_tuning/training_format.py +0 -30
  287. lionagi/integrations/openai_/api_endpoints/images/__init__.py +0 -9
  288. lionagi/integrations/openai_/api_endpoints/images/image_edit_models.py +0 -69
  289. lionagi/integrations/openai_/api_endpoints/images/image_models.py +0 -56
  290. lionagi/integrations/openai_/api_endpoints/images/image_variation_models.py +0 -56
  291. lionagi/integrations/openai_/api_endpoints/images/response_body.py +0 -30
  292. lionagi/integrations/openai_/api_endpoints/match_data_model.py +0 -197
  293. lionagi/integrations/openai_/api_endpoints/match_response.py +0 -336
  294. lionagi/integrations/openai_/api_endpoints/models/__init__.py +0 -7
  295. lionagi/integrations/openai_/api_endpoints/models/delete_fine_tuned_model.py +0 -17
  296. lionagi/integrations/openai_/api_endpoints/models/models_models.py +0 -31
  297. lionagi/integrations/openai_/api_endpoints/models/retrieve_model.py +0 -9
  298. lionagi/integrations/openai_/api_endpoints/moderations/__init__.py +0 -3
  299. lionagi/integrations/openai_/api_endpoints/moderations/request_body.py +0 -20
  300. lionagi/integrations/openai_/api_endpoints/moderations/response_body.py +0 -139
  301. lionagi/integrations/openai_/api_endpoints/uploads/__init__.py +0 -19
  302. lionagi/integrations/openai_/api_endpoints/uploads/add_upload_part.py +0 -11
  303. lionagi/integrations/openai_/api_endpoints/uploads/cancel_upload.py +0 -7
  304. lionagi/integrations/openai_/api_endpoints/uploads/complete_upload.py +0 -18
  305. lionagi/integrations/openai_/api_endpoints/uploads/create_upload.py +0 -17
  306. lionagi/integrations/openai_/api_endpoints/uploads/uploads_models.py +0 -52
  307. lionagi/integrations/openai_/image_token_calculator/__init__.py +0 -0
  308. lionagi/integrations/openai_/image_token_calculator/image_token_calculator.py +0 -98
  309. lionagi/integrations/openai_/image_token_calculator/openai_image_token_data.yaml +0 -15
  310. lionagi/integrations/openai_/openai_max_output_token_data.yaml +0 -12
  311. lionagi/integrations/openai_/openai_price_data.yaml +0 -26
  312. lionagi/integrations/openai_/version.py +0 -1
  313. lionagi/integrations/pandas_/__init__.py +0 -24
  314. lionagi/integrations/pandas_/extend_df.py +0 -61
  315. lionagi/integrations/pandas_/read.py +0 -103
  316. lionagi/integrations/pandas_/remove_rows.py +0 -61
  317. lionagi/integrations/pandas_/replace_keywords.py +0 -65
  318. lionagi/integrations/pandas_/save.py +0 -131
  319. lionagi/integrations/pandas_/search_keywords.py +0 -69
  320. lionagi/integrations/pandas_/to_df.py +0 -196
  321. lionagi/integrations/pandas_/update_cells.py +0 -54
  322. lionagi/integrations/perplexity_/PerplexityModel.py +0 -274
  323. lionagi/integrations/perplexity_/PerplexityService.py +0 -118
  324. lionagi/integrations/perplexity_/api_endpoints/__init__.py +0 -0
  325. lionagi/integrations/perplexity_/api_endpoints/api_request.py +0 -171
  326. lionagi/integrations/perplexity_/api_endpoints/chat_completions/__init__.py +0 -0
  327. lionagi/integrations/perplexity_/api_endpoints/chat_completions/request/__init__.py +0 -0
  328. lionagi/integrations/perplexity_/api_endpoints/chat_completions/request/request_body.py +0 -121
  329. lionagi/integrations/perplexity_/api_endpoints/chat_completions/response/__init__.py +0 -0
  330. lionagi/integrations/perplexity_/api_endpoints/chat_completions/response/response_body.py +0 -146
  331. lionagi/integrations/perplexity_/api_endpoints/data_models.py +0 -63
  332. lionagi/integrations/perplexity_/api_endpoints/match_response.py +0 -26
  333. lionagi/integrations/perplexity_/perplexity_max_output_token_data.yaml +0 -3
  334. lionagi/integrations/perplexity_/perplexity_price_data.yaml +0 -10
  335. lionagi/integrations/perplexity_/version.py +0 -1
  336. lionagi/integrations/pydantic_/__init__.py +0 -8
  337. lionagi/integrations/pydantic_/break_down_annotation.py +0 -81
  338. lionagi/integrations/pydantic_/new_model.py +0 -208
  339. lionagi/libs/constants.py +0 -98
  340. lionagi/libs/file/path.py +0 -301
  341. lionagi/libs/file/types.py +0 -22
  342. lionagi/libs/func/__init__.py +0 -0
  343. lionagi/libs/func/async_calls/__init__.py +0 -24
  344. lionagi/libs/func/async_calls/alcall.py +0 -210
  345. lionagi/libs/func/async_calls/bcall.py +0 -130
  346. lionagi/libs/func/async_calls/mcall.py +0 -134
  347. lionagi/libs/func/async_calls/pcall.py +0 -149
  348. lionagi/libs/func/async_calls/rcall.py +0 -217
  349. lionagi/libs/func/async_calls/tcall.py +0 -114
  350. lionagi/libs/func/async_calls/ucall.py +0 -85
  351. lionagi/libs/func/decorators.py +0 -277
  352. lionagi/libs/func/lcall.py +0 -57
  353. lionagi/libs/func/params.py +0 -64
  354. lionagi/libs/func/throttle.py +0 -119
  355. lionagi/libs/func/types.py +0 -39
  356. lionagi/libs/func/utils.py +0 -96
  357. lionagi/libs/package/types.py +0 -26
  358. lionagi/libs/parse/__init__.py +0 -1
  359. lionagi/libs/parse/flatten/__init__.py +0 -9
  360. lionagi/libs/parse/flatten/params.py +0 -52
  361. lionagi/libs/parse/json/__init__.py +0 -27
  362. lionagi/libs/parse/json/extract.py +0 -102
  363. lionagi/libs/parse/json/parse.py +0 -179
  364. lionagi/libs/parse/json/to_json.py +0 -71
  365. lionagi/libs/parse/nested/__init__.py +0 -33
  366. lionagi/libs/parse/nested/to_flat_list.py +0 -64
  367. lionagi/libs/parse/params.py +0 -0
  368. lionagi/libs/parse/string_parse/__init__.py +0 -11
  369. lionagi/libs/parse/type_convert/__init__.py +0 -19
  370. lionagi/libs/parse/type_convert/params.py +0 -145
  371. lionagi/libs/parse/type_convert/to_dict.py +0 -333
  372. lionagi/libs/parse/type_convert/to_list.py +0 -186
  373. lionagi/libs/parse/type_convert/to_num.py +0 -358
  374. lionagi/libs/parse/type_convert/to_str.py +0 -195
  375. lionagi/libs/parse/types.py +0 -9
  376. lionagi/libs/parse/validate/__init__.py +0 -14
  377. lionagi/libs/parse/validate/params.py +0 -62
  378. lionagi/libs/parse/xml/__init__.py +0 -10
  379. lionagi/libs/parse/xml/convert.py +0 -56
  380. lionagi/libs/parse/xml/parser.py +0 -93
  381. lionagi/libs/string_similarity/__init__.py +0 -32
  382. lionagi/libs/string_similarity/matcher.py +0 -102
  383. lionagi/libs/string_similarity/utils.py +0 -15
  384. lionagi/libs/utils.py +0 -266
  385. lionagi/protocols/adapters/pandas_adapter.py +0 -96
  386. lionagi/protocols/configs/__init__.py +0 -0
  387. lionagi/protocols/configs/branch_config.py +0 -86
  388. lionagi/protocols/configs/id_config.py +0 -15
  389. lionagi/protocols/configs/imodel_config.py +0 -73
  390. lionagi/protocols/configs/log_config.py +0 -93
  391. lionagi/protocols/configs/retry_config.py +0 -29
  392. lionagi/protocols/configs/types.py +0 -15
  393. lionagi/protocols/operatives/instruct.py +0 -194
  394. lionagi/protocols/operatives/types.py +0 -19
  395. lionagi/protocols/registries/_component_registry.py +0 -23
  396. lionagi/protocols/registries/_pile_registry.py +0 -30
  397. lionagi/service/complete_request_info.py +0 -11
  398. lionagi/service/rate_limiter.py +0 -108
  399. lionagi/service/service.py +0 -41
  400. lionagi/service/service_match_util.py +0 -131
  401. lionagi/service/service_util.py +0 -72
  402. lionagi/service/token_calculator.py +0 -51
  403. lionagi/strategies/__init__.py +0 -0
  404. lionagi/strategies/types.py +0 -21
  405. lionagi-0.5.5.dist-info/RECORD +0 -374
  406. /lionagi/{core → libs/nested}/__init__.py +0 -0
  407. /lionagi/{core/action → libs/schema}/__init__.py +0 -0
  408. /lionagi/{core/communication → libs/validate}/__init__.py +0 -0
  409. /lionagi/{core/forms → operations/strategies}/__init__.py +0 -0
  410. /lionagi/{core/generic → operatives}/__init__.py +0 -0
  411. /lionagi/{core/session → operatives/action}/__init__.py +0 -0
  412. /lionagi/{integrations/anthropic_ → operatives/forms}/__init__.py +0 -0
  413. /lionagi/{core → operatives}/forms/utils.py +0 -0
  414. /lionagi/{integrations/anthropic_/api_endpoints → operatives/instruct}/__init__.py +0 -0
  415. /lionagi/{integrations/anthropic_/api_endpoints/messages → operatives/models}/__init__.py +0 -0
  416. /lionagi/{integrations → protocols/adapters/pandas_}/__init__.py +0 -0
  417. /lionagi/{integrations/anthropic_/api_endpoints/messages/request → protocols/generic}/__init__.py +0 -0
  418. /lionagi/{integrations/groq_ → protocols/graph}/__init__.py +0 -0
  419. /lionagi/{integrations/litellm_ → protocols/mail}/__init__.py +0 -0
  420. /lionagi/{integrations/ollama_ → protocols/messages}/__init__.py +0 -0
  421. /lionagi/{core/communication → protocols/messages}/templates/README.md +0 -0
  422. /lionagi/{core/communication → protocols/messages}/templates/action_request.jinja2 +0 -0
  423. /lionagi/{core/communication → protocols/messages}/templates/action_response.jinja2 +0 -0
  424. /lionagi/{core/communication → protocols/messages}/templates/system_message.jinja2 +0 -0
  425. /lionagi/{core/communication → protocols/messages}/templates/tool_schemas.jinja2 +0 -0
  426. /lionagi/{integrations/ollama_/api_endpoints → service/endpoints}/__init__.py +0 -0
  427. /lionagi/{integrations/ollama_/api_endpoints/chat_completion → service/providers}/__init__.py +0 -0
  428. /lionagi/{integrations/ollama_/api_endpoints/model → service/providers/anthropic_}/__init__.py +0 -0
  429. /lionagi/{integrations/perplexity_ → service/providers/groq_}/__init__.py +0 -0
  430. /lionagi/{protocols/operatives → service/providers/openai_}/__init__.py +0 -0
  431. /lionagi/{protocols/registries → service/providers/openrouter_}/__init__.py +0 -0
  432. {lionagi-0.5.5.dist-info → lionagi-0.6.1.dist-info}/WHEEL +0 -0
  433. {lionagi-0.5.5.dist-info → lionagi-0.6.1.dist-info}/licenses/LICENSE +0 -0
lionagi/utils.py ADDED
@@ -0,0 +1,2386 @@
1
+ # Copyright (c) 2023 - 2024, HaiyangLi <quantocean.li at gmail dot com>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ import asyncio
6
+ import contextlib
7
+ import copy as _copy
8
+ import functools
9
+ import json
10
+ import logging
11
+ import re
12
+ import shutil
13
+ import subprocess
14
+ import sys
15
+ import time as t_
16
+ import uuid
17
+ import xml.etree.ElementTree as ET
18
+ from abc import ABC
19
+ from collections.abc import (
20
+ AsyncGenerator,
21
+ Callable,
22
+ Iterable,
23
+ Mapping,
24
+ Sequence,
25
+ )
26
+ from concurrent.futures import ThreadPoolExecutor
27
+ from datetime import datetime, timezone
28
+ from decimal import Decimal
29
+ from enum import Enum
30
+ from functools import lru_cache, partial
31
+ from inspect import isclass
32
+ from pathlib import Path
33
+ from typing import (
34
+ Any,
35
+ Literal,
36
+ Self,
37
+ TypedDict,
38
+ TypeVar,
39
+ get_args,
40
+ get_origin,
41
+ overload,
42
+ )
43
+
44
+ from pydantic import BaseModel, model_validator
45
+ from pydantic_core import PydanticUndefinedType
46
+
47
+ from .settings import Settings
48
+
49
+ R = TypeVar("R")
50
+ T = TypeVar("T")
51
+ B = TypeVar("B", bound=BaseModel)
52
+
53
+ logger = logging.getLogger(__name__)
54
+
55
+
56
+ __all__ = (
57
+ "UndefinedType",
58
+ "KeysDict",
59
+ "HashableModel",
60
+ "Params",
61
+ "DataClass",
62
+ "UNDEFINED",
63
+ "copy",
64
+ "is_same_dtype",
65
+ "get_file_classes",
66
+ "get_class_file_registry",
67
+ "get_class_objects",
68
+ "is_coro_func",
69
+ "custom_error_handler",
70
+ "to_list",
71
+ "lcall",
72
+ "alcall",
73
+ "bcall",
74
+ "create_path",
75
+ "time",
76
+ "fuzzy_parse_json",
77
+ "fix_json_string",
78
+ "ToListParams",
79
+ "LCallParams",
80
+ "ALCallParams",
81
+ "BCallParams",
82
+ "CreatePathParams",
83
+ "get_bins",
84
+ "EventStatus",
85
+ "logger",
86
+ "throttle",
87
+ "max_concurrent",
88
+ "force_async",
89
+ "to_num",
90
+ "breakdown_pydantic_annotation",
91
+ "run_package_manager_command",
92
+ )
93
+
94
+
95
+ # --- General Global Utilities Types ---
96
+
97
+
98
+ class UndefinedType:
99
+ def __init__(self) -> None:
100
+ self.undefined = True
101
+
102
+ def __bool__(self) -> Literal[False]:
103
+ return False
104
+
105
+ def __deepcopy__(self, memo):
106
+ # Ensure UNDEFINED is universal
107
+ return self
108
+
109
+ def __repr__(self) -> Literal["UNDEFINED"]:
110
+ return "UNDEFINED"
111
+
112
+ __slots__ = ["undefined"]
113
+
114
+
115
+ class KeysDict(TypedDict, total=False):
116
+ """TypedDict for keys dictionary."""
117
+
118
+ key: Any # Represents any key-type pair
119
+
120
+
121
+ class HashableModel(BaseModel):
122
+
123
+ def to_dict(self, **kwargs) -> dict:
124
+ """provides interface, specific methods need to be implemented in subclass kwargs for pydantic model_dump"""
125
+ return {
126
+ k: v
127
+ for k, v in self.model_dump(**kwargs).items()
128
+ if v is not UNDEFINED
129
+ }
130
+
131
+ @classmethod
132
+ def from_dict(cls, data: dict, **kwargs) -> Self:
133
+ """provides interface, specific methods need to be implemented in subclass kwargs for pydantic model_validate"""
134
+ return cls.model_validate(data, **kwargs)
135
+
136
+ def __hash__(self):
137
+ # Convert kwargs to a hashable format by serializing unhashable types
138
+ return hash_dict(self.to_dict())
139
+
140
+
141
+ def hash_dict(data) -> int:
142
+ hashable_items = []
143
+ if isinstance(data, BaseModel):
144
+ data = data.model_dump()
145
+ for k, v in data.items():
146
+ if isinstance(v, (list, dict)):
147
+ # Convert unhashable types to JSON string for hashing
148
+ v = json.dumps(v, sort_keys=True)
149
+ elif not isinstance(v, (str, int, float, bool, type(None))):
150
+ # Convert other unhashable types to string representation
151
+ v = str(v)
152
+ hashable_items.append((k, v))
153
+ return hash(frozenset(hashable_items))
154
+
155
+
156
+ class Params(BaseModel):
157
+
158
+ def keys(self):
159
+ return self.model_fields.keys()
160
+
161
+ def __call__(self, *args, **kwargs):
162
+ raise NotImplementedError(
163
+ "This method should be implemented in a subclass"
164
+ )
165
+
166
+
167
+ class DataClass(ABC):
168
+ pass
169
+
170
+
171
+ # --- Create a global UNDEFINED object ---
172
+ UNDEFINED = UndefinedType()
173
+
174
+
175
+ # --- General Global Utilities Functions ---
176
+
177
+
178
+ def time(
179
+ *,
180
+ tz: timezone = Settings.Config.TIMEZONE,
181
+ type_: Literal["timestamp", "datetime", "iso", "custom"] = "timestamp",
182
+ sep: str | None = "T",
183
+ timespec: str | None = "auto",
184
+ custom_format: str | None = None,
185
+ custom_sep: str | None = None,
186
+ ) -> float | str | datetime:
187
+ """
188
+ Get current time in various formats.
189
+
190
+ Args:
191
+ tz: Timezone for the time (default: utc).
192
+ type_: Type of time to return (default: "timestamp").
193
+ Options: "timestamp", "datetime", "iso", "custom".
194
+ sep: Separator for ISO format (default: "T").
195
+ timespec: Timespec for ISO format (default: "auto").
196
+ custom_format: Custom strftime format string for
197
+ type_="custom".
198
+ custom_sep: Custom separator for type_="custom",
199
+ replaces "-", ":", ".".
200
+
201
+ Returns:
202
+ Current time in the specified format.
203
+
204
+ Raises:
205
+ ValueError: If an invalid type_ is provided or if custom_format
206
+ is not provided when type_="custom".
207
+ """
208
+ now = datetime.now(tz=tz)
209
+
210
+ if type_ == "iso":
211
+ return now.isoformat(sep=sep, timespec=timespec)
212
+ elif type_ == "timestamp":
213
+ return now.timestamp()
214
+ elif type_ == "datetime":
215
+ return now
216
+ elif type_ == "custom":
217
+ if not custom_format:
218
+ raise ValueError(
219
+ "custom_format must be provided when type_='custom'"
220
+ )
221
+ formatted_time = now.strftime(custom_format)
222
+ if custom_sep is not None:
223
+ for old_sep in ("-", ":", "."):
224
+ formatted_time = formatted_time.replace(old_sep, custom_sep)
225
+ return formatted_time
226
+
227
+ raise ValueError(
228
+ f"Invalid value <{type_}> for `type_`, must be"
229
+ " one of 'timestamp', 'datetime', 'iso', or 'custom'."
230
+ )
231
+
232
+
233
+ def copy(obj: T, /, *, deep: bool = True, num: int = 1) -> T | list[T]:
234
+ if num < 1:
235
+ raise ValueError("Number of copies must be at least 1")
236
+
237
+ copy_func = _copy.deepcopy if deep else _copy.copy
238
+ return [copy_func(obj) for _ in range(num)] if num > 1 else copy_func(obj)
239
+
240
+
241
+ def is_same_dtype(
242
+ input_: list[T] | dict[Any, T],
243
+ dtype: type | None = None,
244
+ return_dtype: bool = False,
245
+ ) -> bool | tuple[bool, type | None]:
246
+ if not input_:
247
+ # If empty, trivially true. dtype is None since no elements exist.
248
+ return (True, None) if return_dtype else True
249
+
250
+ if isinstance(input_, Mapping):
251
+ # For dictionaries, use values
252
+ values_iter = iter(input_.values())
253
+ first_val = next(values_iter, None)
254
+ if dtype is None:
255
+ dtype = type(first_val) if first_val is not None else None
256
+ # Check the first element
257
+ result = (dtype is None or isinstance(first_val, dtype)) and all(
258
+ isinstance(v, dtype) for v in values_iter
259
+ )
260
+ else:
261
+ # For lists (or list-like), directly access the first element
262
+ first_val = input_[0]
263
+ if dtype is None:
264
+ dtype = type(first_val) if first_val is not None else None
265
+ # Check all elements including the first
266
+ result = all(isinstance(e, dtype) for e in input_)
267
+
268
+ return (result, dtype) if return_dtype else result
269
+
270
+
271
+ @lru_cache(maxsize=None)
272
+ def is_coro_func(func: Callable[..., Any]) -> bool:
273
+ return asyncio.iscoroutinefunction(func)
274
+
275
+
276
+ async def custom_error_handler(
277
+ error: Exception, error_map: dict[type, Callable[[Exception], None]]
278
+ ) -> None:
279
+ if type(error) in error_map:
280
+ if is_coro_func(error_map[type(error)]):
281
+ return await error_map[type(error)](error)
282
+ return error_map[type(error)](error)
283
+ logging.error(f"Unhandled error: {error}")
284
+ raise error
285
+
286
+
287
+ @overload
288
+ def to_list(
289
+ input_: None | UndefinedType | PydanticUndefinedType,
290
+ /,
291
+ ) -> list: ...
292
+
293
+
294
+ @overload
295
+ def to_list(
296
+ input_: str | bytes | bytearray,
297
+ /,
298
+ use_values: bool = False,
299
+ ) -> list[str | bytes | bytearray]: ...
300
+
301
+
302
+ @overload
303
+ def to_list(
304
+ input_: Mapping,
305
+ /,
306
+ use_values: bool = False,
307
+ ) -> list[Any]: ...
308
+
309
+
310
+ @overload
311
+ def to_list(
312
+ input_: Any,
313
+ /,
314
+ *,
315
+ flatten: bool = False,
316
+ dropna: bool = False,
317
+ unique: bool = False,
318
+ use_values: bool = False,
319
+ flatten_tuple_set: bool = False,
320
+ ) -> list: ...
321
+
322
+
323
+ def to_list(
324
+ input_: Any,
325
+ /,
326
+ *,
327
+ flatten: bool = False,
328
+ dropna: bool = False,
329
+ unique: bool = False,
330
+ use_values: bool = False,
331
+ flatten_tuple_set: bool = False,
332
+ ) -> list:
333
+ """Convert input to a list with optional transformations.
334
+
335
+ Transforms various input types into a list with configurable processing
336
+ options for flattening, filtering, and value extraction.
337
+
338
+ Args:
339
+ input_: Value to convert to list.
340
+ flatten: If True, recursively flatten nested iterables.
341
+ dropna: If True, remove None and undefined values.
342
+ unique: If True, remove duplicates (requires flatten=True).
343
+ use_values: If True, extract values from enums/mappings.
344
+ flatten_tuple_items: If True, include tuples in flattening.
345
+ flatten_set_items: If True, include sets in flattening.
346
+
347
+ Returns:
348
+ list: Processed list based on input and specified options.
349
+
350
+ Raises:
351
+ ValueError: If unique=True is used without flatten=True.
352
+
353
+ Examples:
354
+ >>> to_list([1, [2, 3], 4], flatten=True)
355
+ [1, 2, 3, 4]
356
+ >>> to_list([1, None, 2], dropna=True)
357
+ [1, 2]
358
+ """
359
+
360
+ def _process_list(
361
+ lst: list[Any],
362
+ flatten: bool,
363
+ dropna: bool,
364
+ ) -> list[Any]:
365
+ """Process list according to flatten and dropna options.
366
+
367
+ Args:
368
+ lst: Input list to process.
369
+ flatten: Whether to flatten nested iterables.
370
+ dropna: Whether to remove None/undefined values.
371
+
372
+ Returns:
373
+ list: Processed list based on specified options.
374
+ """
375
+ result = []
376
+ skip_types = (str, bytes, bytearray, Mapping, BaseModel, Enum)
377
+
378
+ if not flatten_tuple_set:
379
+ skip_types += (tuple, set, frozenset)
380
+
381
+ for item in lst:
382
+ if dropna and (
383
+ item is None
384
+ or isinstance(item, (UndefinedType, PydanticUndefinedType))
385
+ ):
386
+ continue
387
+
388
+ is_iterable = isinstance(item, Iterable)
389
+ should_skip = isinstance(item, skip_types)
390
+
391
+ if is_iterable and not should_skip:
392
+ item_list = list(item)
393
+ if flatten:
394
+ result.extend(
395
+ _process_list(
396
+ item_list, flatten=flatten, dropna=dropna
397
+ )
398
+ )
399
+ else:
400
+ result.append(
401
+ _process_list(
402
+ item_list, flatten=flatten, dropna=dropna
403
+ )
404
+ )
405
+ else:
406
+ result.append(item)
407
+
408
+ return result
409
+
410
+ def _to_list_type(input_: Any, use_values: bool) -> list[Any]:
411
+ """Convert input to initial list based on type.
412
+
413
+ Args:
414
+ input_: Value to convert to list.
415
+ use_values: Whether to extract values from containers.
416
+
417
+ Returns:
418
+ list: Initial list conversion of input.
419
+ """
420
+ if input_ is None or isinstance(
421
+ input_, (UndefinedType, PydanticUndefinedType)
422
+ ):
423
+ return []
424
+
425
+ if isinstance(input_, list):
426
+ return input_
427
+
428
+ if isinstance(input_, type) and issubclass(input_, Enum):
429
+ members = input_.__members__.values()
430
+ return (
431
+ [member.value for member in members]
432
+ if use_values
433
+ else list(members)
434
+ )
435
+
436
+ if isinstance(input_, (str, bytes, bytearray)):
437
+ return list(input_) if use_values else [input_]
438
+
439
+ if isinstance(input_, Mapping):
440
+ return (
441
+ list(input_.values())
442
+ if use_values and hasattr(input_, "values")
443
+ else [input_]
444
+ )
445
+
446
+ if isinstance(input_, BaseModel):
447
+ return [input_]
448
+
449
+ if isinstance(input_, Iterable) and not isinstance(
450
+ input_, (str, bytes, bytearray)
451
+ ):
452
+ return list(input_)
453
+
454
+ return [input_]
455
+
456
+ if unique and not flatten:
457
+ raise ValueError("unique=True requires flatten=True")
458
+
459
+ initial_list = _to_list_type(input_, use_values=use_values)
460
+ processed = _process_list(initial_list, flatten=flatten, dropna=dropna)
461
+
462
+ if unique:
463
+ seen = set()
464
+ out = []
465
+ try:
466
+ return [x for x in processed if not (x in seen or seen.add(x))]
467
+ except TypeError:
468
+ for i in processed:
469
+ hash_value = None
470
+ try:
471
+ hash_value = hash(i)
472
+ except TypeError:
473
+ if isinstance(i, (BaseModel, Mapping)):
474
+ hash_value = hash_dict(i)
475
+ else:
476
+ raise ValueError(
477
+ "Unhashable type encountered in list unique value processing."
478
+ )
479
+ if hash_value not in seen:
480
+ seen.add(hash_value)
481
+ out.append(i)
482
+ return out
483
+
484
+ return processed
485
+
486
+
487
+ class ToListParams(Params):
488
+ flatten: bool = False
489
+ dropna: bool = False
490
+ unique: bool = False
491
+ use_values: bool = False
492
+ flatten_tuple_set: bool = False
493
+
494
+ def __call__(self, input_: Any):
495
+ return to_list(
496
+ input_,
497
+ flatten=self.flatten,
498
+ dropna=self.dropna,
499
+ unique=self.unique,
500
+ use_values=self.use_values,
501
+ flatten_tuple_set=self.flatten_tuple_set,
502
+ )
503
+
504
+
505
+ def lcall(
506
+ input_: Iterable[T] | T,
507
+ func: Callable[[T], R] | Iterable[Callable[[T], R]],
508
+ /,
509
+ *args: Any,
510
+ sanitize_input: bool = False,
511
+ unique_input: bool = False,
512
+ flatten: bool = False,
513
+ dropna: bool = False,
514
+ unique_output: bool = False,
515
+ flatten_tuple_set: bool = False,
516
+ use_input_values: bool = False,
517
+ **kwargs: Any,
518
+ ) -> list[R]:
519
+ """Apply function to each element in input list with optional processing.
520
+
521
+ Maps a function over input elements and processes results. Can sanitize input
522
+ and output using various filtering options.
523
+
524
+ Args:
525
+ input_: Single item or iterable to process.
526
+ func: Function to apply or single-item iterable containing function.
527
+ *args: Additional positional arguments passed to func.
528
+ sanitize_input: If True, sanitize input using to_list.
529
+ unique_input: If True with sanitize_input, remove input duplicates.
530
+ flatten: If True, flatten output nested structures.
531
+ dropna: If True, remove None values from output.
532
+ unique_output: If True with flatten/dropna, remove output duplicates.
533
+ flatten_tuple_set: If True, flatten tuples and sets.
534
+ **kwargs: Additional keyword arguments passed to func.
535
+
536
+ Returns:
537
+ list: Results of applying func to each input element.
538
+
539
+ Raises:
540
+ ValueError: If func is not callable or unique_output used incorrectly.
541
+ TypeError: If func or input processing fails.
542
+
543
+ Examples:
544
+ >>> lcall([1, 2, 3], str)
545
+ ['1', '2', '3']
546
+ >>> lcall([1, [2, 3]], str, flatten=True)
547
+ ['1', '2', '3']
548
+ """
549
+ # Validate and extract callable function
550
+ if not callable(func):
551
+ try:
552
+ func_list = list(func)
553
+ if len(func_list) != 1 or not callable(func_list[0]):
554
+ raise ValueError(
555
+ "func must contain exactly one callable function."
556
+ )
557
+ func = func_list[0]
558
+ except TypeError as e:
559
+ raise ValueError(
560
+ "func must be callable or iterable with one callable."
561
+ ) from e
562
+
563
+ # Process input based on sanitization flag
564
+ if sanitize_input:
565
+ input_ = to_list(
566
+ input_,
567
+ flatten=True,
568
+ dropna=True,
569
+ unique=unique_input,
570
+ flatten_tuple_set=flatten_tuple_set,
571
+ use_values=use_input_values,
572
+ )
573
+ else:
574
+ if not isinstance(input_, list):
575
+ try:
576
+ input_ = list(input_)
577
+ except TypeError:
578
+ input_ = [input_]
579
+
580
+ # Validate output processing options
581
+ if unique_output and not (flatten or dropna):
582
+ raise ValueError(
583
+ "unique_output requires flatten or dropna for post-processing."
584
+ )
585
+
586
+ # Process elements and collect results
587
+ out = []
588
+ append = out.append
589
+
590
+ for item in input_:
591
+ try:
592
+ result = func(item, *args, **kwargs)
593
+ append(result)
594
+ except InterruptedError:
595
+ return out
596
+ except Exception:
597
+ raise
598
+
599
+ # Apply output processing if requested
600
+ if flatten or dropna:
601
+ out = to_list(
602
+ out,
603
+ flatten=flatten,
604
+ dropna=dropna,
605
+ unique=unique_output,
606
+ flatten_tuple_set=flatten_tuple_set,
607
+ )
608
+
609
+ return out
610
+
611
+
612
+ class CallParams(Params):
613
+ """params class for high order function with additional handling of lower order function parameters, can take arbitrary number of args and kwargs, args need to be in agrs=, kwargs can be passed as is"""
614
+
615
+ args: list = []
616
+ kwargs: dict = {}
617
+
618
+ @model_validator(mode="before")
619
+ def _validate_data(cls, data: dict):
620
+ _d = {}
621
+ for k in list(data.keys()):
622
+ if k in cls.keys():
623
+ _d[k] = data.pop(k)
624
+ _d.setdefault("args", [])
625
+ _d.setdefault("kwargs", {})
626
+ _d["kwargs"].update(data)
627
+ return _d
628
+
629
+ def __call__(self, *args, **kwargs):
630
+ raise NotImplementedError(
631
+ "This method should be implemented in a subclass"
632
+ )
633
+
634
+
635
+ class LCallParams(CallParams):
636
+ func: Any = None
637
+ sanitize_input: bool = False
638
+ unique_input: bool = False
639
+ flatten: bool = False
640
+ dropna: bool = False
641
+ unique_output: bool = False
642
+ flatten_tuple_set: bool = False
643
+
644
+ def __call__(self, input_: Any, func=None):
645
+ if self.func is None and func is None:
646
+ raise ValueError("a sync func must be provided")
647
+ return lcall(
648
+ input_,
649
+ func or self.func,
650
+ *self.args,
651
+ sanitize_input=self.sanitize_input,
652
+ unique_input=self.unique_input,
653
+ flatten=self.flatten,
654
+ dropna=self.dropna,
655
+ unique_output=self.unique_output,
656
+ flatten_tuple_set=self.flatten_tuple_set,
657
+ **self.kwargs,
658
+ )
659
+
660
+
661
+ async def alcall(
662
+ input_: list[Any],
663
+ func: Callable[..., T],
664
+ /,
665
+ *,
666
+ sanitize_input: bool = False,
667
+ unique_input: bool = False,
668
+ num_retries: int = 0,
669
+ initial_delay: float = 0,
670
+ retry_delay: float = 0,
671
+ backoff_factor: float = 1,
672
+ retry_default: Any = UNDEFINED,
673
+ retry_timeout: float | None = None,
674
+ retry_timing: bool = False,
675
+ max_concurrent: int | None = None,
676
+ throttle_period: float | None = None,
677
+ flatten: bool = False,
678
+ dropna: bool = False,
679
+ unique_output: bool = False,
680
+ flatten_tuple_set: bool = False,
681
+ **kwargs: Any,
682
+ ) -> list[T] | list[tuple[T, float]]:
683
+ """
684
+ Asynchronously apply a function to each element of a list, with optional input sanitization,
685
+ retries, timeout, and output processing.
686
+
687
+ Args:
688
+ input_ (list[Any]): The list of inputs to process.
689
+ func (Callable[..., T]): The function to apply (async or sync).
690
+ sanitize_input (bool): If True, input is flattened, dropna applied, and made unique if unique_input.
691
+ unique_input (bool): If True and sanitize_input is True, input is made unique.
692
+ num_retries (int): Number of retry attempts on exception.
693
+ initial_delay (float): Initial delay before starting executions.
694
+ retry_delay (float): Delay between retries.
695
+ backoff_factor (float): Multiplier for delay after each retry.
696
+ retry_default (Any): Default value if all retries fail.
697
+ retry_timeout (float | None): Timeout for each function call.
698
+ retry_timing (bool): If True, return (result, duration) tuples.
699
+ max_concurrent (int | None): Maximum number of concurrent operations.
700
+ throttle_period (float | None): Delay after each completed operation.
701
+ flatten (bool): Flatten the final result if True.
702
+ dropna (bool): Remove None values from the final result if True.
703
+ unique_output (bool): Deduplicate the output if True.
704
+ **kwargs: Additional arguments passed to func.
705
+
706
+ Returns:
707
+ list[T] or list[tuple[T, float]]: The processed results, or results with timing if retry_timing is True.
708
+
709
+ Raises:
710
+ asyncio.TimeoutError: If a call times out and no default is provided.
711
+ Exception: If retries are exhausted and no default is provided.
712
+ """
713
+
714
+ # Validate func is a single callable
715
+ if not callable(func):
716
+ # If func is not callable, maybe it's an iterable. Extract one callable if possible.
717
+ try:
718
+ func_list = list(func) # Convert iterable to list
719
+ except TypeError:
720
+ raise ValueError(
721
+ "func must be callable or an iterable containing one callable."
722
+ )
723
+
724
+ # Ensure exactly one callable is present
725
+ if len(func_list) != 1 or not callable(func_list[0]):
726
+ raise ValueError("Only one callable function is allowed.")
727
+
728
+ func = func_list[0]
729
+
730
+ # Process input if requested
731
+ if sanitize_input:
732
+ input_ = to_list(
733
+ input_,
734
+ flatten=True,
735
+ dropna=True,
736
+ unique=unique_input,
737
+ flatten_tuple_set=flatten_tuple_set,
738
+ )
739
+ else:
740
+ if not isinstance(input_, list):
741
+ # Attempt to iterate
742
+ if isinstance(input_, BaseModel):
743
+ # Pydantic model, convert to list
744
+ input_ = [input_]
745
+ else:
746
+ try:
747
+ iter(input_)
748
+ # It's iterable (tuple), convert to list of its contents
749
+ input_ = list(input_)
750
+ except TypeError:
751
+ # Not iterable, just wrap in a list
752
+ input_ = [input_]
753
+
754
+ # Optional initial delay before processing
755
+ if initial_delay:
756
+ await asyncio.sleep(initial_delay)
757
+
758
+ semaphore = asyncio.Semaphore(max_concurrent) if max_concurrent else None
759
+ throttle_delay = throttle_period or 0
760
+ coro_func = is_coro_func(func)
761
+
762
+ async def call_func(item: Any) -> T:
763
+ if coro_func:
764
+ # Async function
765
+ if retry_timeout is not None:
766
+ return await asyncio.wait_for(
767
+ func(item, **kwargs), timeout=retry_timeout
768
+ )
769
+ else:
770
+ return await func(item, **kwargs)
771
+ else:
772
+ # Sync function
773
+ if retry_timeout is not None:
774
+ return await asyncio.wait_for(
775
+ asyncio.to_thread(func, item, **kwargs),
776
+ timeout=retry_timeout,
777
+ )
778
+ else:
779
+ return func(item, **kwargs)
780
+
781
+ async def execute_task(i: Any, index: int) -> Any:
782
+ start_time = asyncio.get_running_loop().time()
783
+ attempts = 0
784
+ current_delay = retry_delay
785
+ while True:
786
+ try:
787
+ result = await call_func(i)
788
+ if retry_timing:
789
+ end_time = asyncio.get_running_loop().time()
790
+ return index, result, end_time - start_time
791
+ else:
792
+ return index, result
793
+ except asyncio.CancelledError as e:
794
+ raise e
795
+
796
+ except Exception:
797
+ attempts += 1
798
+ if attempts <= num_retries:
799
+ if current_delay:
800
+ await asyncio.sleep(current_delay)
801
+ current_delay *= backoff_factor
802
+ # Retry loop continues
803
+ else:
804
+ # Exhausted retries
805
+ if retry_default is not UNDEFINED:
806
+ # Return default if provided
807
+ if retry_timing:
808
+ end_time = asyncio.get_running_loop().time()
809
+ duration = end_time - (start_time or end_time)
810
+ return index, retry_default, duration
811
+ else:
812
+ return index, retry_default
813
+ # No default, re-raise
814
+ raise
815
+
816
+ async def task_wrapper(item: Any, idx: int) -> Any:
817
+ if semaphore:
818
+ async with semaphore:
819
+ return await execute_task(item, idx)
820
+ else:
821
+ return await execute_task(item, idx)
822
+
823
+ # Create tasks
824
+ tasks = [task_wrapper(item, idx) for idx, item in enumerate(input_)]
825
+
826
+ # Collect results as they complete
827
+ results = []
828
+ for coro in asyncio.as_completed(tasks):
829
+ res = await coro
830
+ results.append(res)
831
+ if throttle_delay:
832
+ await asyncio.sleep(throttle_delay)
833
+
834
+ # Sort by original index
835
+ results.sort(key=lambda x: x[0])
836
+
837
+ if retry_timing:
838
+ # (index, result, duration)
839
+ filtered = [
840
+ (r[1], r[2]) for r in results if not dropna or r[1] is not None
841
+ ]
842
+ return filtered
843
+ else:
844
+ # (index, result)
845
+ output_list = [r[1] for r in results]
846
+ return to_list(
847
+ output_list,
848
+ flatten=flatten,
849
+ dropna=dropna,
850
+ unique=unique_output,
851
+ flatten_tuple_set=flatten_tuple_set,
852
+ )
853
+
854
+
855
+ class ALCallParams(CallParams):
856
+ func: Any = None
857
+ sanitize_input: bool = False
858
+ unique_input: bool = False
859
+ num_retries: int = 0
860
+ initial_delay: float = 0
861
+ retry_delay: float = 0
862
+ backoff_factor: float = 1
863
+ retry_default: Any = UNDEFINED
864
+ retry_timeout: float | None = None
865
+ retry_timing: bool = False
866
+ max_concurrent: int | None = None
867
+ throttle_period: float | None = None
868
+ flatten: bool = False
869
+ dropna: bool = False
870
+ unique_output: bool = False
871
+ flatten_tuple_set: bool = False
872
+
873
+ async def __call__(self, input_: Any, func=None):
874
+ if self.func is None and func is None:
875
+ raise ValueError("a sync/async func must be provided")
876
+ return await alcall(
877
+ input_,
878
+ func or self.func,
879
+ *self.args,
880
+ sanitize_input=self.sanitize_input,
881
+ unique_input=self.unique_input,
882
+ num_retries=self.num_retries,
883
+ initial_delay=self.initial_delay,
884
+ retry_delay=self.retry_delay,
885
+ backoff_factor=self.backoff_factor,
886
+ retry_default=self.retry_default,
887
+ retry_timeout=self.retry_timeout,
888
+ retry_timing=self.retry_timing,
889
+ max_concurrent=self.max_concurrent,
890
+ throttle_period=self.throttle_period,
891
+ flatten=self.flatten,
892
+ dropna=self.dropna,
893
+ unique_output=self.unique_output,
894
+ flatten_tuple_set=self.flatten_tuple_set,
895
+ **self.kwargs,
896
+ )
897
+
898
+
899
+ async def bcall(
900
+ input_: Any,
901
+ func: Callable[..., T],
902
+ /,
903
+ batch_size: int,
904
+ *,
905
+ sanitize_input: bool = False,
906
+ unique_input: bool = False,
907
+ num_retries: int = 0,
908
+ initial_delay: float = 0,
909
+ retry_delay: float = 0,
910
+ backoff_factor: float = 1,
911
+ retry_default: Any = UNDEFINED,
912
+ retry_timeout: float | None = None,
913
+ retry_timing: bool = False,
914
+ max_concurrent: int | None = None,
915
+ throttle_period: float | None = None,
916
+ flatten: bool = False,
917
+ dropna: bool = False,
918
+ unique_output: bool = False,
919
+ flatten_tuple_set: bool = False,
920
+ **kwargs: Any,
921
+ ) -> AsyncGenerator[list[T | tuple[T, float]], None]:
922
+
923
+ input_ = to_list(input_, flatten=True, dropna=True)
924
+
925
+ for i in range(0, len(input_), batch_size):
926
+ batch = input_[i : i + batch_size] # noqa: E203
927
+ yield await alcall(
928
+ batch,
929
+ func,
930
+ sanitize_input=sanitize_input,
931
+ unique_input=unique_input,
932
+ num_retries=num_retries,
933
+ initial_delay=initial_delay,
934
+ retry_delay=retry_delay,
935
+ backoff_factor=backoff_factor,
936
+ retry_default=retry_default,
937
+ retry_timeout=retry_timeout,
938
+ retry_timing=retry_timing,
939
+ max_concurrent=max_concurrent,
940
+ throttle_period=throttle_period,
941
+ flatten=flatten,
942
+ dropna=dropna,
943
+ unique_output=unique_output,
944
+ flatten_tuple_set=flatten_tuple_set,
945
+ **kwargs,
946
+ )
947
+
948
+
949
+ class BCallParams(CallParams):
950
+ func: Any = None
951
+ batch_size: int
952
+ sanitize_input: bool = False
953
+ unique_input: bool = False
954
+ num_retries: int = 0
955
+ initial_delay: float = 0
956
+ retry_delay: float = 0
957
+ backoff_factor: float = 1
958
+ retry_default: Any = UNDEFINED
959
+ retry_timeout: float | None = None
960
+ retry_timing: bool = False
961
+ max_concurrent: int | None = None
962
+ throttle_period: float | None = None
963
+ flatten: bool = False
964
+ dropna: bool = False
965
+ unique_output: bool = False
966
+ flatten_tuple_set: bool = False
967
+
968
+ async def __call__(self, input_, func=None):
969
+ if self.func is None and func is None:
970
+ raise ValueError("a sync/async func must be provided")
971
+ return await bcall(
972
+ input_,
973
+ func or self.func,
974
+ *self.args,
975
+ batch_size=self.batch_size,
976
+ sanitize_input=self.sanitize_input,
977
+ unique_input=self.unique_input,
978
+ num_retries=self.num_retries,
979
+ initial_delay=self.initial_delay,
980
+ retry_delay=self.retry_delay,
981
+ backoff_factor=self.backoff_factor,
982
+ retry_default=self.retry_default,
983
+ retry_timeout=self.retry_timeout,
984
+ retry_timing=self.retry_timing,
985
+ max_concurrent=self.max_concurrent,
986
+ throttle_period=self.throttle_period,
987
+ flatten=self.flatten,
988
+ dropna=self.dropna,
989
+ unique_output=self.unique_output,
990
+ flatten_tuple_set=self.flatten_tuple_set,
991
+ **self.kwargs,
992
+ )
993
+
994
+
995
+ def create_path(
996
+ directory: Path | str,
997
+ filename: str,
998
+ extension: str = None,
999
+ timestamp: bool = False,
1000
+ dir_exist_ok: bool = True,
1001
+ file_exist_ok: bool = False,
1002
+ time_prefix: bool = False,
1003
+ timestamp_format: str | None = None,
1004
+ random_hash_digits: int = 0,
1005
+ ) -> Path:
1006
+ """
1007
+ Generate a new file path with optional timestamp and a random suffix.
1008
+
1009
+ Args:
1010
+ directory: The directory where the file will be created.
1011
+ filename: The base name of the file to create.
1012
+ extension: The file extension, if not part of filename.
1013
+ timestamp: If True, add a timestamp to the filename.
1014
+ dir_exist_ok: If True, don't error if directory exists.
1015
+ file_exist_ok: If True, allow overwriting existing files.
1016
+ time_prefix: If True, timestamp is prefixed instead of suffixed.
1017
+ timestamp_format: Custom format for timestamp (default: "%Y%m%d%H%M%S").
1018
+ random_hash_digits: Number of hex digits for a random suffix.
1019
+
1020
+ Returns:
1021
+ The full Path to the new or existing file.
1022
+
1023
+ Raises:
1024
+ ValueError: If no extension or filename invalid.
1025
+ FileExistsError: If file exists and file_exist_ok=False.
1026
+ """
1027
+ if "/" in filename or "\\" in filename:
1028
+ raise ValueError("Filename cannot contain directory separators.")
1029
+
1030
+ directory = Path(directory)
1031
+
1032
+ # Extract name and extension from filename if present
1033
+ if "." in filename:
1034
+ name, ext = filename.rsplit(".", 1)
1035
+ else:
1036
+ name, ext = filename, extension
1037
+
1038
+ if not ext:
1039
+ raise ValueError("No extension provided for filename.")
1040
+
1041
+ # Ensure extension has a single leading dot
1042
+ ext = f".{ext.lstrip('.')}" if ext else ""
1043
+
1044
+ # Add timestamp if requested
1045
+ if timestamp:
1046
+ ts_str = datetime.now().strftime(timestamp_format or "%Y%m%d%H%M%S")
1047
+ name = f"{ts_str}_{name}" if time_prefix else f"{name}_{ts_str}"
1048
+
1049
+ # Add random suffix if requested
1050
+ if random_hash_digits > 0:
1051
+ # Use UUID4 and truncate its hex for random suffix
1052
+ random_suffix = uuid.uuid4().hex[:random_hash_digits]
1053
+ name = f"{name}-{random_suffix}"
1054
+
1055
+ full_path = directory / f"{name}{ext}"
1056
+
1057
+ # Check if file or directory existence
1058
+ full_path.parent.mkdir(parents=True, exist_ok=dir_exist_ok)
1059
+ if full_path.exists() and not file_exist_ok:
1060
+ raise FileExistsError(
1061
+ f"File {full_path} already exists and file_exist_ok is False."
1062
+ )
1063
+
1064
+ return full_path
1065
+
1066
+
1067
+ class CreatePathParams(Params):
1068
+ directory: Path | str
1069
+ filename: str
1070
+ extension: str = None
1071
+ timestamp: bool = False
1072
+ dir_exist_ok: bool = True
1073
+ file_exist_ok: bool = False
1074
+ time_prefix: bool = False
1075
+ timestamp_format: str | None = None
1076
+ random_hash_digits: int = 0
1077
+
1078
+ def __call__(
1079
+ self, directory: Path | str = None, filename: str = None
1080
+ ) -> Path:
1081
+ return create_path(
1082
+ directory or self.directory,
1083
+ filename or self.filename,
1084
+ extension=self.extension,
1085
+ timestamp=self.timestamp,
1086
+ dir_exist_ok=self.dir_exist_ok,
1087
+ file_exist_ok=self.file_exist_ok,
1088
+ time_prefix=self.time_prefix,
1089
+ timestamp_format=self.timestamp_format,
1090
+ random_hash_digits=self.random_hash_digits,
1091
+ )
1092
+
1093
+
1094
+ # --- JSON and XML Conversion ---
1095
+
1096
+
1097
+ def to_xml(
1098
+ obj: dict | list | str | int | float | bool | None,
1099
+ root_name: str = "root",
1100
+ ) -> str:
1101
+ """
1102
+ Convert a dictionary into an XML formatted string.
1103
+
1104
+ Rules:
1105
+ - A dictionary key becomes an XML tag.
1106
+ - If the dictionary value is:
1107
+ - A primitive type (str, int, float, bool, None): it becomes the text content of the tag.
1108
+ - A list: each element of the list will repeat the same tag.
1109
+ - Another dictionary: it is recursively converted to nested XML.
1110
+ - root_name sets the top-level XML element name.
1111
+
1112
+ Args:
1113
+ obj: The Python object to convert (typically a dictionary).
1114
+ root_name: The name of the root XML element.
1115
+
1116
+ Returns:
1117
+ A string representing the XML.
1118
+
1119
+ Examples:
1120
+ >>> to_xml({"a": 1, "b": {"c": "hello", "d": [10, 20]}}, root_name="data")
1121
+ '<data><a>1</a><b><c>hello</c><d>10</d><d>20</d></b></data>'
1122
+ """
1123
+
1124
+ def _convert(value: Any, tag_name: str) -> str:
1125
+ # If value is a dict, recursively convert its keys
1126
+ if isinstance(value, dict):
1127
+ inner = "".join(_convert(v, k) for k, v in value.items())
1128
+ return f"<{tag_name}>{inner}</{tag_name}>"
1129
+ # If value is a list, repeat the same tag for each element
1130
+ elif isinstance(value, list):
1131
+ return "".join(_convert(item, tag_name) for item in value)
1132
+ # If value is a primitive, convert to string and place inside tag
1133
+ else:
1134
+ text = "" if value is None else str(value)
1135
+ # Escape special XML characters if needed (minimal)
1136
+ text = (
1137
+ text.replace("&", "&amp;")
1138
+ .replace("<", "&lt;")
1139
+ .replace(">", "&gt;")
1140
+ .replace('"', "&quot;")
1141
+ .replace("'", "&apos;")
1142
+ )
1143
+ return f"<{tag_name}>{text}</{tag_name}>"
1144
+
1145
+ # If top-level obj is not a dict, wrap it in one
1146
+ if not isinstance(obj, dict):
1147
+ obj = {root_name: obj}
1148
+
1149
+ inner_xml = "".join(_convert(v, k) for k, v in obj.items())
1150
+ return f"<{root_name}>{inner_xml}</{root_name}>"
1151
+
1152
+
1153
+ def fuzzy_parse_json(
1154
+ str_to_parse: str, /
1155
+ ) -> dict[str, Any] | list[dict[str, Any]]:
1156
+ """
1157
+ Attempt to parse a JSON string, trying a few minimal "fuzzy" fixes if needed.
1158
+
1159
+ Steps:
1160
+ 1. Parse directly with json.loads.
1161
+ 2. Replace single quotes with double quotes, normalize spacing, and try again.
1162
+ 3. Attempt to fix unmatched brackets using fix_json_string.
1163
+ 4. If all fail, raise ValueError.
1164
+
1165
+ Args:
1166
+ str_to_parse: The JSON string to parse
1167
+
1168
+ Returns:
1169
+ Parsed JSON (dict or list of dicts)
1170
+
1171
+ Raises:
1172
+ ValueError: If the string cannot be parsed as valid JSON
1173
+ TypeError: If the input is not a string
1174
+ """
1175
+ _check_valid_str(str_to_parse)
1176
+
1177
+ # 1. Direct attempt
1178
+ with contextlib.suppress(Exception):
1179
+ return json.loads(str_to_parse)
1180
+
1181
+ # 2. Try cleaning: replace single quotes with double and normalize
1182
+ cleaned = _clean_json_string(str_to_parse.replace("'", '"'))
1183
+ with contextlib.suppress(Exception):
1184
+ return json.loads(cleaned)
1185
+
1186
+ # 3. Try fixing brackets
1187
+ fixed = fix_json_string(cleaned)
1188
+ with contextlib.suppress(Exception):
1189
+ return json.loads(fixed)
1190
+
1191
+ # If all attempts fail
1192
+ raise ValueError("Invalid JSON string")
1193
+
1194
+
1195
+ def _check_valid_str(str_to_parse: str, /):
1196
+ if not isinstance(str_to_parse, str):
1197
+ raise TypeError("Input must be a string")
1198
+ if not str_to_parse.strip():
1199
+ raise ValueError("Input string is empty")
1200
+
1201
+
1202
+ def _clean_json_string(s: str) -> str:
1203
+ """Basic normalization: replace unescaped single quotes, trim spaces, ensure keys are quoted."""
1204
+ # Replace unescaped single quotes with double quotes
1205
+ # '(?<!\\)'" means a single quote not preceded by a backslash
1206
+ s = re.sub(r"(?<!\\)'", '"', s)
1207
+ # Collapse multiple whitespaces
1208
+ s = re.sub(r"\s+", " ", s)
1209
+ # Ensure keys are quoted
1210
+ # This attempts to find patterns like { key: value } and turn them into {"key": value}
1211
+ s = re.sub(r'([{,])\s*([^"\s]+)\s*:', r'\1"\2":', s)
1212
+ return s.strip()
1213
+
1214
+
1215
+ def fix_json_string(str_to_parse: str, /) -> str:
1216
+ """Try to fix JSON string by ensuring brackets are matched properly."""
1217
+ if not str_to_parse:
1218
+ raise ValueError("Input string is empty")
1219
+
1220
+ brackets = {"{": "}", "[": "]"}
1221
+ open_brackets = []
1222
+ pos = 0
1223
+ length = len(str_to_parse)
1224
+
1225
+ while pos < length:
1226
+ char = str_to_parse[pos]
1227
+
1228
+ if char == "\\":
1229
+ pos += 2 # Skip escaped chars
1230
+ continue
1231
+
1232
+ if char == '"':
1233
+ pos += 1
1234
+ # skip string content
1235
+ while pos < length:
1236
+ if str_to_parse[pos] == "\\":
1237
+ pos += 2
1238
+ continue
1239
+ if str_to_parse[pos] == '"':
1240
+ pos += 1
1241
+ break
1242
+ pos += 1
1243
+ continue
1244
+
1245
+ if char in brackets:
1246
+ open_brackets.append(brackets[char])
1247
+ elif char in brackets.values():
1248
+ if not open_brackets:
1249
+ # Extra closing bracket
1250
+ # Better to raise error than guess
1251
+ raise ValueError("Extra closing bracket found.")
1252
+ if open_brackets[-1] != char:
1253
+ # Mismatched bracket
1254
+ raise ValueError("Mismatched brackets.")
1255
+ open_brackets.pop()
1256
+
1257
+ pos += 1
1258
+
1259
+ # Add missing closing brackets if any
1260
+ if open_brackets:
1261
+ str_to_parse += "".join(reversed(open_brackets))
1262
+
1263
+ return str_to_parse
1264
+
1265
+
1266
+ class XMLParser:
1267
+ def __init__(self, xml_string: str):
1268
+ self.xml_string = xml_string.strip()
1269
+ self.index = 0
1270
+
1271
+ def parse(self) -> dict[str, Any]:
1272
+ """Parse the XML string and return the root element as a dictionary."""
1273
+ return self._parse_element()
1274
+
1275
+ def _parse_element(self) -> dict[str, Any]:
1276
+ """Parse a single XML element and its children."""
1277
+ self._skip_whitespace()
1278
+ if self.xml_string[self.index] != "<":
1279
+ raise ValueError(
1280
+ f"Expected '<', found '{self.xml_string[self.index]}'"
1281
+ )
1282
+
1283
+ tag, attributes = self._parse_opening_tag()
1284
+ children: dict[str, str | list | dict] = {}
1285
+ text = ""
1286
+
1287
+ while self.index < len(self.xml_string):
1288
+ self._skip_whitespace()
1289
+ if self.xml_string.startswith("</", self.index):
1290
+ closing_tag = self._parse_closing_tag()
1291
+ if closing_tag != tag:
1292
+ raise ValueError(
1293
+ f"Mismatched tags: '{tag}' and '{closing_tag}'"
1294
+ )
1295
+ break
1296
+ elif self.xml_string.startswith("<", self.index):
1297
+ child = self._parse_element()
1298
+ child_tag, child_data = next(iter(child.items()))
1299
+ if child_tag in children:
1300
+ if not isinstance(children[child_tag], list):
1301
+ children[child_tag] = [children[child_tag]]
1302
+ children[child_tag].append(child_data)
1303
+ else:
1304
+ children[child_tag] = child_data
1305
+ else:
1306
+ text += self._parse_text()
1307
+
1308
+ result: dict[str, Any] = {}
1309
+ if attributes:
1310
+ result["@attributes"] = attributes
1311
+ if children:
1312
+ result.update(children)
1313
+ elif text.strip():
1314
+ result = text.strip()
1315
+
1316
+ return {tag: result}
1317
+
1318
+ def _parse_opening_tag(self) -> tuple[str, dict[str, str]]:
1319
+ """Parse an opening XML tag and its attributes."""
1320
+ match = re.match(
1321
+ r'<(\w+)((?:\s+\w+="[^"]*")*)\s*/?>',
1322
+ self.xml_string[self.index :], # noqa
1323
+ )
1324
+ if not match:
1325
+ raise ValueError("Invalid opening tag")
1326
+ self.index += match.end()
1327
+ tag = match.group(1)
1328
+ attributes = dict(re.findall(r'(\w+)="([^"]*)"', match.group(2)))
1329
+ return tag, attributes
1330
+
1331
+ def _parse_closing_tag(self) -> str:
1332
+ """Parse a closing XML tag."""
1333
+ match = re.match(r"</(\w+)>", self.xml_string[self.index :]) # noqa
1334
+ if not match:
1335
+ raise ValueError("Invalid closing tag")
1336
+ self.index += match.end()
1337
+ return match.group(1)
1338
+
1339
+ def _parse_text(self) -> str:
1340
+ """Parse text content between XML tags."""
1341
+ start = self.index
1342
+ while (
1343
+ self.index < len(self.xml_string)
1344
+ and self.xml_string[self.index] != "<"
1345
+ ):
1346
+ self.index += 1
1347
+ return self.xml_string[start : self.index] # noqa
1348
+
1349
+ def _skip_whitespace(self) -> None:
1350
+ """Skip any whitespace characters at the current parsing position."""
1351
+ p_ = len(self.xml_string[self.index :]) # noqa
1352
+ m_ = len(self.xml_string[self.index :].lstrip()) # noqa
1353
+
1354
+ self.index += p_ - m_
1355
+
1356
+
1357
+ def xml_to_dict(
1358
+ xml_string: str,
1359
+ /,
1360
+ suppress=False,
1361
+ remove_root: bool = True,
1362
+ root_tag: str = None,
1363
+ ) -> dict[str, Any]:
1364
+ """
1365
+ Parse an XML string into a nested dictionary structure.
1366
+
1367
+ This function converts an XML string into a dictionary where:
1368
+ - Element tags become dictionary keys
1369
+ - Text content is assigned directly to the tag key if there are no children
1370
+ - Attributes are stored in a '@attributes' key
1371
+ - Multiple child elements with the same tag are stored as lists
1372
+
1373
+ Args:
1374
+ xml_string: The XML string to parse.
1375
+
1376
+ Returns:
1377
+ A dictionary representation of the XML structure.
1378
+
1379
+ Raises:
1380
+ ValueError: If the XML is malformed or parsing fails.
1381
+ """
1382
+ try:
1383
+ a = XMLParser(xml_string).parse()
1384
+ if remove_root and (root_tag or "root") in a:
1385
+ a = a[root_tag or "root"]
1386
+ return a
1387
+ except ValueError as e:
1388
+ if not suppress:
1389
+ raise e
1390
+
1391
+
1392
+ def dict_to_xml(data: dict, /, root_tag: str = "root") -> str:
1393
+
1394
+ root = ET.Element(root_tag)
1395
+
1396
+ def convert(dict_obj: dict, parent: Any) -> None:
1397
+ for key, val in dict_obj.items():
1398
+ if isinstance(val, dict):
1399
+ element = ET.SubElement(parent, key)
1400
+ convert(dict_obj=val, parent=element)
1401
+ else:
1402
+ element = ET.SubElement(parent, key)
1403
+ element.text = str(object=val)
1404
+
1405
+ convert(dict_obj=data, parent=root)
1406
+ return ET.tostring(root, encoding="unicode")
1407
+
1408
+
1409
+ def to_dict(
1410
+ input_: Any,
1411
+ /,
1412
+ *,
1413
+ use_model_dump: bool = True,
1414
+ fuzzy_parse: bool = False,
1415
+ suppress: bool = False,
1416
+ str_type: Literal["json", "xml"] | None = "json",
1417
+ parser: Callable[[str], Any] | None = None,
1418
+ recursive: bool = False,
1419
+ max_recursive_depth: int = None,
1420
+ recursive_python_only: bool = True,
1421
+ use_enum_values: bool = False,
1422
+ **kwargs: Any,
1423
+ ) -> dict[str, Any]:
1424
+ """
1425
+ Convert various input types to a dictionary, with optional recursive processing.
1426
+
1427
+ Args:
1428
+ input_: The input to convert.
1429
+ use_model_dump: Use model_dump() for Pydantic models if available.
1430
+ fuzzy_parse: Use fuzzy parsing for string inputs.
1431
+ suppress: Return empty dict on errors if True.
1432
+ str_type: Input string type ("json" or "xml").
1433
+ parser: Custom parser function for string inputs.
1434
+ recursive: Enable recursive conversion of nested structures.
1435
+ max_recursive_depth: Maximum recursion depth (default 5, max 10).
1436
+ recursive_python_only: If False, attempts to convert custom types recursively.
1437
+ use_enum_values: Use enum values instead of names.
1438
+ **kwargs: Additional arguments for parsing functions.
1439
+
1440
+ Returns:
1441
+ dict[str, Any]: A dictionary derived from the input.
1442
+
1443
+ Raises:
1444
+ ValueError: If parsing fails and suppress is False.
1445
+
1446
+ Examples:
1447
+ >>> to_dict({"a": 1, "b": [2, 3]})
1448
+ {'a': 1, 'b': [2, 3]}
1449
+ >>> to_dict('{"x": 10}', str_type="json")
1450
+ {'x': 10}
1451
+ >>> to_dict({"a": {"b": {"c": 1}}}, recursive=True, max_recursive_depth=2)
1452
+ {'a': {'b': {'c': 1}}}
1453
+ """
1454
+
1455
+ try:
1456
+ if recursive:
1457
+ input_ = recursive_to_dict(
1458
+ input_,
1459
+ use_model_dump=use_model_dump,
1460
+ fuzzy_parse=fuzzy_parse,
1461
+ str_type=str_type,
1462
+ parser=parser,
1463
+ max_recursive_depth=max_recursive_depth,
1464
+ recursive_custom_types=not recursive_python_only,
1465
+ use_enum_values=use_enum_values,
1466
+ **kwargs,
1467
+ )
1468
+
1469
+ return _to_dict(
1470
+ input_,
1471
+ fuzzy_parse=fuzzy_parse,
1472
+ parser=parser,
1473
+ str_type=str_type,
1474
+ use_model_dump=use_model_dump,
1475
+ use_enum_values=use_enum_values,
1476
+ **kwargs,
1477
+ )
1478
+ except Exception as e:
1479
+ if suppress or input_ == "":
1480
+ return {}
1481
+ raise e
1482
+
1483
+
1484
+ def recursive_to_dict(
1485
+ input_: Any,
1486
+ /,
1487
+ *,
1488
+ max_recursive_depth: int = None,
1489
+ recursive_custom_types: bool = False,
1490
+ **kwargs: Any,
1491
+ ) -> Any:
1492
+
1493
+ if not isinstance(max_recursive_depth, int):
1494
+ max_recursive_depth = 5
1495
+ else:
1496
+ if max_recursive_depth < 0:
1497
+ raise ValueError(
1498
+ "max_recursive_depth must be a non-negative integer"
1499
+ )
1500
+ if max_recursive_depth == 0:
1501
+ return input_
1502
+ if max_recursive_depth > 10:
1503
+ raise ValueError(
1504
+ "max_recursive_depth must be less than or equal to 10"
1505
+ )
1506
+
1507
+ return _recur_to_dict(
1508
+ input_,
1509
+ max_recursive_depth=max_recursive_depth,
1510
+ current_depth=0,
1511
+ recursive_custom_types=recursive_custom_types,
1512
+ **kwargs,
1513
+ )
1514
+
1515
+
1516
+ def _recur_to_dict(
1517
+ input_: Any,
1518
+ /,
1519
+ *,
1520
+ max_recursive_depth: int,
1521
+ current_depth: int = 0,
1522
+ recursive_custom_types: bool = False,
1523
+ **kwargs: Any,
1524
+ ) -> Any:
1525
+
1526
+ if current_depth >= max_recursive_depth:
1527
+ return input_
1528
+
1529
+ if isinstance(input_, str):
1530
+ try:
1531
+ # Attempt to parse the string
1532
+ parsed = _to_dict(input_, **kwargs)
1533
+ # Recursively process the parsed result
1534
+ return _recur_to_dict(
1535
+ parsed,
1536
+ max_recursive_depth=max_recursive_depth,
1537
+ current_depth=current_depth + 1,
1538
+ recursive_custom_types=recursive_custom_types,
1539
+ **kwargs,
1540
+ )
1541
+ except Exception:
1542
+ # Return the original string if parsing fails
1543
+ return input_
1544
+
1545
+ elif isinstance(input_, dict):
1546
+ # Recursively process dictionary values
1547
+ return {
1548
+ key: _recur_to_dict(
1549
+ value,
1550
+ max_recursive_depth=max_recursive_depth,
1551
+ current_depth=current_depth + 1,
1552
+ recursive_custom_types=recursive_custom_types,
1553
+ **kwargs,
1554
+ )
1555
+ for key, value in input_.items()
1556
+ }
1557
+
1558
+ elif isinstance(input_, (list, tuple, set)):
1559
+ # Recursively process list or tuple elements
1560
+ processed = [
1561
+ _recur_to_dict(
1562
+ element,
1563
+ max_recursive_depth=max_recursive_depth,
1564
+ current_depth=current_depth + 1,
1565
+ recursive_custom_types=recursive_custom_types,
1566
+ **kwargs,
1567
+ )
1568
+ for element in input_
1569
+ ]
1570
+ return type(input_)(processed)
1571
+
1572
+ elif isinstance(input_, type) and issubclass(input_, Enum):
1573
+ try:
1574
+ obj_dict = _to_dict(input_, **kwargs)
1575
+ return _recur_to_dict(
1576
+ obj_dict,
1577
+ max_recursive_depth=max_recursive_depth,
1578
+ current_depth=current_depth + 1,
1579
+ **kwargs,
1580
+ )
1581
+ except Exception:
1582
+ return input_
1583
+
1584
+ elif recursive_custom_types:
1585
+ # Process custom classes if enabled
1586
+ try:
1587
+ obj_dict = _to_dict(input_, **kwargs)
1588
+ return _recur_to_dict(
1589
+ obj_dict,
1590
+ max_recursive_depth=max_recursive_depth,
1591
+ current_depth=current_depth + 1,
1592
+ recursive_custom_types=recursive_custom_types,
1593
+ **kwargs,
1594
+ )
1595
+ except Exception:
1596
+ return input_
1597
+
1598
+ else:
1599
+ # Return the input as is for other data types
1600
+ return input_
1601
+
1602
+
1603
+ def _enum_to_dict(input_, /, use_enum_values: bool = True):
1604
+ dict_ = dict(input_.__members__).copy()
1605
+ if use_enum_values:
1606
+ return {key: value.value for key, value in dict_.items()}
1607
+ return dict_
1608
+
1609
+
1610
+ def _str_to_dict(
1611
+ input_: str,
1612
+ /,
1613
+ fuzzy_parse: bool = False,
1614
+ str_type: Literal["json", "xml"] | None = "json",
1615
+ parser: Callable[[str], Any] | None = None,
1616
+ remove_root: bool = False,
1617
+ root_tag: str = "root",
1618
+ **kwargs: Any,
1619
+ ):
1620
+ """
1621
+ kwargs for parser
1622
+ """
1623
+ if not parser:
1624
+ if str_type == "xml" and not parser:
1625
+ parser = partial(
1626
+ xml_to_dict, remove_root=remove_root, root_tag=root_tag
1627
+ )
1628
+
1629
+ elif fuzzy_parse:
1630
+ parser = fuzzy_parse_json
1631
+ else:
1632
+ parser = json.loads
1633
+
1634
+ return parser(input_, **kwargs)
1635
+
1636
+
1637
+ def _na_to_dict(input_: type[None] | UndefinedType | PydanticUndefinedType, /):
1638
+ return {}
1639
+
1640
+
1641
+ def _model_to_dict(input_: Any, /, use_model_dump=True, **kwargs):
1642
+ """
1643
+ kwargs: built-in serialization methods kwargs
1644
+ accepted built-in serialization methods:
1645
+ - mdoel_dump
1646
+ - to_dict
1647
+ - to_json
1648
+ - dict
1649
+ - json
1650
+ """
1651
+
1652
+ if use_model_dump and hasattr(input_, "model_dump"):
1653
+ return input_.model_dump(**kwargs)
1654
+
1655
+ methods = (
1656
+ "to_dict",
1657
+ "to_json",
1658
+ "json",
1659
+ "dict",
1660
+ )
1661
+ for method in methods:
1662
+ if hasattr(input_, method):
1663
+ result = getattr(input_, method)(**kwargs)
1664
+ return json.loads(result) if isinstance(result, str) else result
1665
+
1666
+ if hasattr(input_, "__dict__"):
1667
+ return input_.__dict__
1668
+
1669
+ try:
1670
+ return dict(input_)
1671
+ except Exception as e:
1672
+ raise ValueError(f"Unable to convert input to dictionary: {e}")
1673
+
1674
+
1675
+ def _set_to_dict(input_: set, /) -> dict:
1676
+ return {v: v for v in input_}
1677
+
1678
+
1679
+ def _iterable_to_dict(input_: Iterable, /) -> dict:
1680
+ return {idx: v for idx, v in enumerate(input_)}
1681
+
1682
+
1683
+ def _to_dict(
1684
+ input_: Any,
1685
+ /,
1686
+ *,
1687
+ fuzzy_parse: bool = False,
1688
+ str_type: Literal["json", "xml"] | None = "json",
1689
+ parser: Callable[[str], Any] | None = None,
1690
+ remove_root: bool = False,
1691
+ root_tag: str = "root",
1692
+ use_model_dump: bool = True,
1693
+ use_enum_values: bool = True,
1694
+ **kwargs: Any,
1695
+ ) -> dict[str, Any]:
1696
+
1697
+ if isinstance(input_, set):
1698
+ return _set_to_dict(input_)
1699
+
1700
+ if isinstance(input_, type) and issubclass(input_, Enum):
1701
+ return _enum_to_dict(input_, use_enum_values=use_enum_values)
1702
+
1703
+ if isinstance(input_, Mapping):
1704
+ return dict(input_)
1705
+
1706
+ if isinstance(input_, type(None) | UndefinedType | PydanticUndefinedType):
1707
+ return _na_to_dict(input_)
1708
+
1709
+ if isinstance(input_, str):
1710
+ return _str_to_dict(
1711
+ input_,
1712
+ fuzzy_parse=fuzzy_parse,
1713
+ str_type=str_type,
1714
+ parser=parser,
1715
+ remove_root=remove_root,
1716
+ root_tag=root_tag,
1717
+ **kwargs,
1718
+ )
1719
+
1720
+ if isinstance(input_, BaseModel) or not isinstance(input_, Sequence):
1721
+ return _model_to_dict(input_, use_model_dump=use_model_dump, **kwargs)
1722
+
1723
+ if isinstance(input_, Iterable):
1724
+ return _iterable_to_dict(input_)
1725
+
1726
+ return dict(input_)
1727
+
1728
+
1729
+ # Precompile the regex for extracting JSON code blocks
1730
+ _JSON_BLOCK_PATTERN = re.compile(r"```json\s*(.*?)\s*```", re.DOTALL)
1731
+
1732
+
1733
+ def to_json(
1734
+ input_data: str | list[str], /, *, fuzzy_parse: bool = False
1735
+ ) -> dict[str, Any] | list[dict[str, Any]]:
1736
+ """
1737
+ Extract and parse JSON content from a string or markdown code blocks.
1738
+
1739
+ Attempts direct JSON parsing first. If that fails, looks for JSON content
1740
+ within markdown code blocks denoted by ```json.
1741
+
1742
+ Args:
1743
+ input_data (str | list[str]): The input string or list of strings to parse.
1744
+ fuzzy_parse (bool): If True, attempts fuzzy JSON parsing on failed attempts.
1745
+
1746
+ Returns:
1747
+ dict or list of dicts:
1748
+ - If a single JSON object is found: returns a dict.
1749
+ - If multiple JSON objects are found: returns a list of dicts.
1750
+ - If no valid JSON found: returns an empty list.
1751
+ """
1752
+
1753
+ # If input_data is a list, join into a single string
1754
+ if isinstance(input_data, list):
1755
+ input_str = "\n".join(input_data)
1756
+ else:
1757
+ input_str = input_data
1758
+
1759
+ # 1. Try direct parsing
1760
+ try:
1761
+ if fuzzy_parse:
1762
+ return fuzzy_parse_json(input_str)
1763
+ return json.loads(input_str)
1764
+ except Exception:
1765
+ pass
1766
+
1767
+ # 2. Attempt extracting JSON blocks from markdown
1768
+ matches = _JSON_BLOCK_PATTERN.findall(input_str)
1769
+ if not matches:
1770
+ return []
1771
+
1772
+ # If only one match, return single dict; if multiple, return list of dicts
1773
+ if len(matches) == 1:
1774
+ data_str = matches[0]
1775
+ return (
1776
+ fuzzy_parse_json(data_str) if fuzzy_parse else json.loads(data_str)
1777
+ )
1778
+
1779
+ # Multiple matches
1780
+ if fuzzy_parse:
1781
+ return [fuzzy_parse_json(m) for m in matches]
1782
+ else:
1783
+ return [json.loads(m) for m in matches]
1784
+
1785
+
1786
+ def get_bins(input_: list[str], upper: int) -> list[list[int]]:
1787
+ """Organizes indices of strings into bins based on a cumulative upper limit.
1788
+
1789
+ Args:
1790
+ input_ (List[str]): The list of strings to be binned.
1791
+ upper (int): The cumulative length upper limit for each bin.
1792
+
1793
+ Returns:
1794
+ List[List[int]]: A list of bins, each bin is a list of indices from the input list.
1795
+ """
1796
+ current = 0
1797
+ bins = []
1798
+ current_bin = []
1799
+ for idx, item in enumerate(input_):
1800
+ if current + len(item) < upper:
1801
+ current_bin.append(idx)
1802
+ current += len(item)
1803
+ else:
1804
+ bins.append(current_bin)
1805
+ current_bin = [idx]
1806
+ current = len(item)
1807
+ if current_bin:
1808
+ bins.append(current_bin)
1809
+ return bins
1810
+
1811
+
1812
+ class Throttle:
1813
+ """
1814
+ Provide a throttling mechanism for function calls.
1815
+
1816
+ When used as a decorator, it ensures that the decorated function can only
1817
+ be called once per specified period. Subsequent calls within this period
1818
+ are delayed to enforce this constraint.
1819
+
1820
+ Attributes:
1821
+ period: The minimum time period (in seconds) between successive calls.
1822
+ """
1823
+
1824
+ def __init__(self, period: float) -> None:
1825
+ """
1826
+ Initialize a new instance of Throttle.
1827
+
1828
+ Args:
1829
+ period: The minimum time period (in seconds) between
1830
+ successive calls.
1831
+ """
1832
+ self.period = period
1833
+ self.last_called = 0
1834
+
1835
+ def __call__(self, func: Callable[..., T]) -> Callable[..., T]:
1836
+ """
1837
+ Decorate a synchronous function with the throttling mechanism.
1838
+
1839
+ Args:
1840
+ func: The synchronous function to be throttled.
1841
+
1842
+ Returns:
1843
+ The throttled synchronous function.
1844
+ """
1845
+
1846
+ @functools.wraps(func)
1847
+ def wrapper(*args, **kwargs) -> Any:
1848
+ elapsed = time() - self.last_called
1849
+ if elapsed < self.period:
1850
+ t_.sleep(self.period - elapsed)
1851
+ self.last_called = time()
1852
+ return func(*args, **kwargs)
1853
+
1854
+ return wrapper
1855
+
1856
+ def __call_async__(
1857
+ self, func: Callable[..., Callable[..., T]]
1858
+ ) -> Callable[..., Callable[..., T]]:
1859
+ """
1860
+ Decorate an asynchronous function with the throttling mechanism.
1861
+
1862
+ Args:
1863
+ func: The asynchronous function to be throttled.
1864
+
1865
+ Returns:
1866
+ The throttled asynchronous function.
1867
+ """
1868
+
1869
+ @functools.wraps(func)
1870
+ async def wrapper(*args, **kwargs) -> Any:
1871
+ elapsed = time() - self.last_called
1872
+ if elapsed < self.period:
1873
+ await asyncio.sleep(self.period - elapsed)
1874
+ self.last_called = time()
1875
+ return await func(*args, **kwargs)
1876
+
1877
+ return wrapper
1878
+
1879
+
1880
+ def force_async(fn: Callable[..., T]) -> Callable[..., Callable[..., T]]:
1881
+ """
1882
+ Convert a synchronous function to an asynchronous function
1883
+ using a thread pool.
1884
+
1885
+ Args:
1886
+ fn: The synchronous function to convert.
1887
+
1888
+ Returns:
1889
+ The asynchronous version of the function.
1890
+ """
1891
+ pool = ThreadPoolExecutor()
1892
+
1893
+ @functools.wraps(fn)
1894
+ def wrapper(*args, **kwargs):
1895
+ future = pool.submit(fn, *args, **kwargs)
1896
+ return asyncio.wrap_future(future) # Make it awaitable
1897
+
1898
+ return wrapper
1899
+
1900
+
1901
+ def throttle(
1902
+ func: Callable[..., T], period: float
1903
+ ) -> Callable[..., Callable[..., T]]:
1904
+ """
1905
+ Throttle function execution to limit the rate of calls.
1906
+
1907
+ Args:
1908
+ func: The function to throttle.
1909
+ period: The minimum time interval between consecutive calls.
1910
+
1911
+ Returns:
1912
+ The throttled function.
1913
+ """
1914
+ if not is_coro_func(func):
1915
+ func = force_async(func)
1916
+ throttle_instance = Throttle(period)
1917
+
1918
+ @functools.wraps(func)
1919
+ async def wrapper(*args, **kwargs):
1920
+ await throttle_instance(func)(*args, **kwargs)
1921
+ return await func(*args, **kwargs)
1922
+
1923
+ return wrapper
1924
+
1925
+
1926
+ def max_concurrent(
1927
+ func: Callable[..., T], limit: int
1928
+ ) -> Callable[..., Callable[..., T]]:
1929
+ """
1930
+ Limit the concurrency of function execution using a semaphore.
1931
+
1932
+ Args:
1933
+ func: The function to limit concurrency for.
1934
+ limit: The maximum number of concurrent executions.
1935
+
1936
+ Returns:
1937
+ The function wrapped with concurrency control.
1938
+ """
1939
+ if not is_coro_func(func):
1940
+ func = force_async(func)
1941
+ semaphore = asyncio.Semaphore(limit)
1942
+
1943
+ @functools.wraps(func)
1944
+ async def wrapper(*args, **kwargs):
1945
+ async with semaphore:
1946
+ return await func(*args, **kwargs)
1947
+
1948
+ return wrapper
1949
+
1950
+
1951
+ # Type definitions
1952
+ NUM_TYPE_LITERAL = Literal["int", "float", "complex"]
1953
+ NUM_TYPES = type[int] | type[float] | type[complex] | NUM_TYPE_LITERAL
1954
+ NumericType = TypeVar("NumericType", int, float, complex)
1955
+
1956
+ # Type mapping
1957
+ TYPE_MAP = {"int": int, "float": float, "complex": complex}
1958
+
1959
+ # Regex patterns for different numeric formats
1960
+ PATTERNS = {
1961
+ "scientific": r"[-+]?(?:\d*\.)?\d+[eE][-+]?\d+",
1962
+ "complex_sci": r"[-+]?(?:\d*\.)?\d+(?:[eE][-+]?\d+)?[-+](?:\d*\.)?\d+(?:[eE][-+]?\d+)?[jJ]",
1963
+ "complex": r"[-+]?(?:\d*\.)?\d+[-+](?:\d*\.)?\d+[jJ]",
1964
+ "pure_imaginary": r"[-+]?(?:\d*\.)?\d*[jJ]",
1965
+ "percentage": r"[-+]?(?:\d*\.)?\d+%",
1966
+ "fraction": r"[-+]?\d+/\d+",
1967
+ "decimal": r"[-+]?(?:\d*\.)?\d+",
1968
+ "special": r"[-+]?(?:inf|infinity|nan)",
1969
+ }
1970
+
1971
+
1972
+ def to_num(
1973
+ input_: Any,
1974
+ /,
1975
+ *,
1976
+ upper_bound: int | float | None = None,
1977
+ lower_bound: int | float | None = None,
1978
+ num_type: NUM_TYPES = float,
1979
+ precision: int | None = None,
1980
+ num_count: int = 1,
1981
+ ) -> int | float | complex | list[int | float | complex]:
1982
+ """Convert input to numeric type(s) with validation and bounds checking.
1983
+
1984
+ Args:
1985
+ input_value: The input to convert to number(s).
1986
+ upper_bound: Maximum allowed value (inclusive).
1987
+ lower_bound: Minimum allowed value (inclusive).
1988
+ num_type: Target numeric type ('int', 'float', 'complex' or type objects).
1989
+ precision: Number of decimal places for rounding (float only).
1990
+ num_count: Number of numeric values to extract.
1991
+
1992
+ Returns:
1993
+ Converted number(s). Single value if num_count=1, else list.
1994
+
1995
+ Raises:
1996
+ ValueError: For invalid input or out of bounds values.
1997
+ TypeError: For invalid input types or invalid type conversions.
1998
+ """
1999
+ # Validate input
2000
+ if isinstance(input_, (list, tuple)):
2001
+ raise TypeError("Input cannot be a sequence")
2002
+
2003
+ # Handle boolean input
2004
+ if isinstance(input_, bool):
2005
+ return validate_num_type(num_type)(input_)
2006
+
2007
+ # Handle direct numeric input
2008
+ if isinstance(input_, (int, float, complex, Decimal)):
2009
+ inferred_type = type(input_)
2010
+ if isinstance(input_, Decimal):
2011
+ inferred_type = float
2012
+ value = float(input_) if not isinstance(input_, complex) else input_
2013
+ value = apply_bounds(value, upper_bound, lower_bound)
2014
+ value = apply_precision(value, precision)
2015
+ return convert_type(value, validate_num_type(num_type), inferred_type)
2016
+
2017
+ # Convert input to string and extract numbers
2018
+ input_str = str(input_)
2019
+ number_matches = extract_numbers(input_str)
2020
+
2021
+ if not number_matches:
2022
+ raise ValueError(f"No valid numbers found in: {input_str}")
2023
+
2024
+ # Process numbers
2025
+ results = []
2026
+ target_type = validate_num_type(num_type)
2027
+
2028
+ number_matches = (
2029
+ number_matches[:num_count]
2030
+ if num_count < len(number_matches)
2031
+ else number_matches
2032
+ )
2033
+
2034
+ for type_and_value in number_matches:
2035
+ try:
2036
+ # Infer appropriate type
2037
+ inferred_type = infer_type(type_and_value)
2038
+
2039
+ # Parse to numeric value
2040
+ value = parse_number(type_and_value)
2041
+
2042
+ # Apply bounds if not complex
2043
+ value = apply_bounds(value, upper_bound, lower_bound)
2044
+
2045
+ # Apply precision
2046
+ value = apply_precision(value, precision)
2047
+
2048
+ # Convert to target type if different from inferred
2049
+ value = convert_type(value, target_type, inferred_type)
2050
+
2051
+ results.append(value)
2052
+
2053
+ except Exception as e:
2054
+ if len(type_and_value) == 2:
2055
+ raise type(e)(
2056
+ f"Error processing {type_and_value[1]}: {str(e)}"
2057
+ )
2058
+ raise type(e)(f"Error processing {type_and_value}: {str(e)}")
2059
+
2060
+ if results and num_count == 1:
2061
+ return results[0]
2062
+ return results
2063
+
2064
+
2065
+ def extract_numbers(text: str) -> list[tuple[str, str]]:
2066
+ """Extract numeric values from text using ordered regex patterns.
2067
+
2068
+ Args:
2069
+ text: The text to extract numbers from.
2070
+
2071
+ Returns:
2072
+ List of tuples containing (pattern_type, matched_value).
2073
+ """
2074
+ combined_pattern = "|".join(PATTERNS.values())
2075
+ matches = re.finditer(combined_pattern, text, re.IGNORECASE)
2076
+ numbers = []
2077
+
2078
+ for match in matches:
2079
+ value = match.group()
2080
+ # Check which pattern matched
2081
+ for pattern_name, pattern in PATTERNS.items():
2082
+ if re.fullmatch(pattern, value, re.IGNORECASE):
2083
+ numbers.append((pattern_name, value))
2084
+ break
2085
+
2086
+ return numbers
2087
+
2088
+
2089
+ def validate_num_type(num_type: NUM_TYPES) -> type:
2090
+ """Validate and normalize numeric type specification.
2091
+
2092
+ Args:
2093
+ num_type: The numeric type to validate.
2094
+
2095
+ Returns:
2096
+ The normalized Python type object.
2097
+
2098
+ Raises:
2099
+ ValueError: If the type specification is invalid.
2100
+ """
2101
+ if isinstance(num_type, str):
2102
+ if num_type not in TYPE_MAP:
2103
+ raise ValueError(f"Invalid number type: {num_type}")
2104
+ return TYPE_MAP[num_type]
2105
+
2106
+ if num_type not in (int, float, complex):
2107
+ raise ValueError(f"Invalid number type: {num_type}")
2108
+ return num_type
2109
+
2110
+
2111
+ def infer_type(value: tuple[str, str]) -> type:
2112
+ """Infer appropriate numeric type from value.
2113
+
2114
+ Args:
2115
+ value: Tuple of (pattern_type, matched_value).
2116
+
2117
+ Returns:
2118
+ The inferred Python type.
2119
+ """
2120
+ pattern_type, _ = value
2121
+ if pattern_type in ("complex", "complex_sci", "pure_imaginary"):
2122
+ return complex
2123
+ return float
2124
+
2125
+
2126
+ def convert_special(value: str) -> float:
2127
+ """Convert special float values (inf, -inf, nan).
2128
+
2129
+ Args:
2130
+ value: The string value to convert.
2131
+
2132
+ Returns:
2133
+ The converted float value.
2134
+ """
2135
+ value = value.lower()
2136
+ if "infinity" in value or "inf" in value:
2137
+ return float("-inf") if value.startswith("-") else float("inf")
2138
+ return float("nan")
2139
+
2140
+
2141
+ def convert_percentage(value: str) -> float:
2142
+ """Convert percentage string to float.
2143
+
2144
+ Args:
2145
+ value: The percentage string to convert.
2146
+
2147
+ Returns:
2148
+ The converted float value.
2149
+
2150
+ Raises:
2151
+ ValueError: If the percentage value is invalid.
2152
+ """
2153
+ try:
2154
+ return float(value.rstrip("%")) / 100
2155
+ except ValueError as e:
2156
+ raise ValueError(f"Invalid percentage value: {value}") from e
2157
+
2158
+
2159
+ def convert_complex(value: str) -> complex:
2160
+ """Convert complex number string to complex.
2161
+
2162
+ Args:
2163
+ value: The complex number string to convert.
2164
+
2165
+ Returns:
2166
+ The converted complex value.
2167
+
2168
+ Raises:
2169
+ ValueError: If the complex number is invalid.
2170
+ """
2171
+ try:
2172
+ # Handle pure imaginary numbers
2173
+ if value.endswith("j") or value.endswith("J"):
2174
+ if value in ("j", "J"):
2175
+ return complex(0, 1)
2176
+ if value in ("+j", "+J"):
2177
+ return complex(0, 1)
2178
+ if value in ("-j", "-J"):
2179
+ return complex(0, -1)
2180
+ if "+" not in value and "-" not in value[1:]:
2181
+ # Pure imaginary number
2182
+ imag = float(value[:-1] or "1")
2183
+ return complex(0, imag)
2184
+
2185
+ return complex(value.replace(" ", ""))
2186
+ except ValueError as e:
2187
+ raise ValueError(f"Invalid complex number: {value}") from e
2188
+
2189
+
2190
+ def convert_type(
2191
+ value: float | complex,
2192
+ target_type: type,
2193
+ inferred_type: type,
2194
+ ) -> int | float | complex:
2195
+ """Convert value to target type if specified, otherwise use inferred type.
2196
+
2197
+ Args:
2198
+ value: The value to convert.
2199
+ target_type: The requested target type.
2200
+ inferred_type: The inferred type from the value.
2201
+
2202
+ Returns:
2203
+ The converted value.
2204
+
2205
+ Raises:
2206
+ TypeError: If the conversion is not possible.
2207
+ """
2208
+ try:
2209
+ # If no specific type requested, use inferred type
2210
+ if target_type is float and inferred_type is complex:
2211
+ return value
2212
+
2213
+ # Handle explicit type conversions
2214
+ if target_type is int and isinstance(value, complex):
2215
+ raise TypeError("Cannot convert complex number to int")
2216
+ return target_type(value)
2217
+ except (ValueError, TypeError) as e:
2218
+ raise TypeError(
2219
+ f"Cannot convert {value} to {target_type.__name__}"
2220
+ ) from e
2221
+
2222
+
2223
+ def apply_bounds(
2224
+ value: float | complex,
2225
+ upper_bound: float | None = None,
2226
+ lower_bound: float | None = None,
2227
+ ) -> float | complex:
2228
+ """Apply bounds checking to numeric value.
2229
+
2230
+ Args:
2231
+ value: The value to check.
2232
+ upper_bound: Maximum allowed value (inclusive).
2233
+ lower_bound: Minimum allowed value (inclusive).
2234
+
2235
+ Returns:
2236
+ The validated value.
2237
+
2238
+ Raises:
2239
+ ValueError: If the value is outside bounds.
2240
+ """
2241
+ if isinstance(value, complex):
2242
+ return value
2243
+
2244
+ if upper_bound is not None and value > upper_bound:
2245
+ raise ValueError(f"Value {value} exceeds upper bound {upper_bound}")
2246
+ if lower_bound is not None and value < lower_bound:
2247
+ raise ValueError(f"Value {value} below lower bound {lower_bound}")
2248
+ return value
2249
+
2250
+
2251
+ def apply_precision(
2252
+ value: float | complex,
2253
+ precision: int | None,
2254
+ ) -> float | complex:
2255
+ """Apply precision rounding to numeric value.
2256
+
2257
+ Args:
2258
+ value: The value to round.
2259
+ precision: Number of decimal places.
2260
+
2261
+ Returns:
2262
+ The rounded value.
2263
+ """
2264
+ if precision is None or isinstance(value, complex):
2265
+ return value
2266
+ if isinstance(value, float):
2267
+ return round(value, precision)
2268
+ return value
2269
+
2270
+
2271
+ def parse_number(type_and_value: tuple[str, str]) -> float | complex:
2272
+ """Parse string to numeric value based on pattern type.
2273
+
2274
+ Args:
2275
+ type_and_value: Tuple of (pattern_type, matched_value).
2276
+
2277
+ Returns:
2278
+ The parsed numeric value.
2279
+
2280
+ Raises:
2281
+ ValueError: If parsing fails.
2282
+ """
2283
+ num_type, value = type_and_value
2284
+ value = value.strip()
2285
+
2286
+ try:
2287
+ if num_type == "special":
2288
+ return convert_special(value)
2289
+
2290
+ if num_type == "percentage":
2291
+ return convert_percentage(value)
2292
+
2293
+ if num_type == "fraction":
2294
+ if "/" not in value:
2295
+ raise ValueError(f"Invalid fraction: {value}")
2296
+ if value.count("/") > 1:
2297
+ raise ValueError(f"Invalid fraction: {value}")
2298
+ num, denom = value.split("/")
2299
+ if not (num.strip("-").isdigit() and denom.isdigit()):
2300
+ raise ValueError(f"Invalid fraction: {value}")
2301
+ denom_val = float(denom)
2302
+ if denom_val == 0:
2303
+ raise ValueError("Division by zero")
2304
+ return float(num) / denom_val
2305
+ if num_type in ("complex", "complex_sci", "pure_imaginary"):
2306
+ return convert_complex(value)
2307
+ if num_type == "scientific":
2308
+ if "e" not in value.lower():
2309
+ raise ValueError(f"Invalid scientific notation: {value}")
2310
+ parts = value.lower().split("e")
2311
+ if len(parts) != 2:
2312
+ raise ValueError(f"Invalid scientific notation: {value}")
2313
+ if not (parts[1].lstrip("+-").isdigit()):
2314
+ raise ValueError(f"Invalid scientific notation: {value}")
2315
+ return float(value)
2316
+ if num_type == "decimal":
2317
+ return float(value)
2318
+
2319
+ raise ValueError(f"Unknown number type: {num_type}")
2320
+ except Exception as e:
2321
+ # Preserve the specific error type but wrap with more context
2322
+ raise type(e)(f"Failed to parse {value} as {num_type}: {str(e)}")
2323
+
2324
+
2325
+ def breakdown_pydantic_annotation(
2326
+ model: type[B], max_depth: int | None = None, current_depth: int = 0
2327
+ ) -> dict[str, Any]:
2328
+
2329
+ if not _is_pydantic_model(model):
2330
+ raise TypeError("Input must be a Pydantic model")
2331
+
2332
+ if max_depth is not None and current_depth >= max_depth:
2333
+ raise RecursionError("Maximum recursion depth reached")
2334
+
2335
+ out: dict[str, Any] = {}
2336
+ for k, v in model.__annotations__.items():
2337
+ origin = get_origin(v)
2338
+ if _is_pydantic_model(v):
2339
+ out[k] = breakdown_pydantic_annotation(
2340
+ v, max_depth, current_depth + 1
2341
+ )
2342
+ elif origin is list:
2343
+ args = get_args(v)
2344
+ if args and _is_pydantic_model(args[0]):
2345
+ out[k] = [
2346
+ breakdown_pydantic_annotation(
2347
+ args[0], max_depth, current_depth + 1
2348
+ )
2349
+ ]
2350
+ else:
2351
+ out[k] = [args[0] if args else Any]
2352
+ else:
2353
+ out[k] = v
2354
+
2355
+ return out
2356
+
2357
+
2358
+ def _is_pydantic_model(x: Any) -> bool:
2359
+ return isclass(x) and issubclass(x, BaseModel)
2360
+
2361
+
2362
+ def run_package_manager_command(
2363
+ args: Sequence[str],
2364
+ ) -> subprocess.CompletedProcess[bytes]:
2365
+ """Run a package manager command, using uv if available, otherwise falling back to pip."""
2366
+ # Check if uv is available in PATH
2367
+ uv_path = shutil.which("uv")
2368
+
2369
+ if uv_path:
2370
+ # Use uv if available
2371
+ try:
2372
+ return subprocess.run(
2373
+ [uv_path] + list(args),
2374
+ check=True,
2375
+ capture_output=True,
2376
+ )
2377
+ except subprocess.CalledProcessError:
2378
+ # If uv fails, fall back to pip
2379
+ print("uv command failed, falling back to pip...")
2380
+
2381
+ # Fall back to pip
2382
+ return subprocess.run(
2383
+ [sys.executable, "-m", "pip"] + list(args),
2384
+ check=True,
2385
+ capture_output=True,
2386
+ )