UncountablePythonSDK 0.0.55__py3-none-any.whl → 0.0.57__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

Files changed (38) hide show
  1. {UncountablePythonSDK-0.0.55.dist-info → UncountablePythonSDK-0.0.57.dist-info}/METADATA +1 -1
  2. {UncountablePythonSDK-0.0.55.dist-info → UncountablePythonSDK-0.0.57.dist-info}/RECORD +38 -37
  3. {UncountablePythonSDK-0.0.55.dist-info → UncountablePythonSDK-0.0.57.dist-info}/WHEEL +1 -1
  4. examples/create_entity.py +3 -1
  5. examples/edit_recipe_inputs.py +3 -1
  6. pkgs/argument_parser/argument_parser.py +4 -2
  7. pkgs/filesystem_utils/_gdrive_session.py +5 -2
  8. pkgs/filesystem_utils/_s3_session.py +2 -1
  9. pkgs/filesystem_utils/_sftp_session.py +5 -4
  10. pkgs/serialization/serial_class.py +6 -2
  11. pkgs/serialization/yaml.py +4 -1
  12. pkgs/type_spec/actions_registry/emit_typescript.py +3 -1
  13. pkgs/type_spec/builder.py +16 -6
  14. pkgs/type_spec/config.py +3 -1
  15. pkgs/type_spec/emit_io_ts.py +5 -5
  16. pkgs/type_spec/emit_open_api.py +10 -6
  17. pkgs/type_spec/emit_open_api_util.py +3 -4
  18. pkgs/type_spec/emit_python.py +9 -5
  19. pkgs/type_spec/emit_typescript.py +17 -8
  20. pkgs/type_spec/type_info/emit_type_info.py +5 -3
  21. pkgs/type_spec/value_spec/convert_type.py +3 -1
  22. pkgs/type_spec/value_spec/emit_python.py +12 -4
  23. uncountable/core/client.py +6 -2
  24. uncountable/core/file_upload.py +15 -3
  25. uncountable/integration/construct_client.py +2 -1
  26. uncountable/integration/executors/generic_upload_executor.py +9 -7
  27. uncountable/integration/secret_retrieval/retrieve_secret.py +1 -3
  28. uncountable/integration/telemetry.py +12 -4
  29. uncountable/types/__init__.py +2 -0
  30. uncountable/types/api/entity/create_entities.py +1 -1
  31. uncountable/types/api/entity/create_entity.py +1 -1
  32. uncountable/types/api/recipes/clear_recipe_outputs.py +35 -0
  33. uncountable/types/async_batch.py +1 -0
  34. uncountable/types/async_batch_processor.py +72 -0
  35. uncountable/types/async_batch_t.py +9 -0
  36. uncountable/types/client_base.py +22 -2
  37. uncountable/types/entity_t.py +2 -0
  38. {UncountablePythonSDK-0.0.55.dist-info → UncountablePythonSDK-0.0.57.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: UncountablePythonSDK
3
- Version: 0.0.55
3
+ Version: 0.0.57
4
4
  Summary: Uncountable SDK
5
5
  Project-URL: Homepage, https://github.com/uncountableinc/uncountable-python-sdk
6
6
  Project-URL: Repository, https://github.com/uncountableinc/uncountable-python-sdk.git
@@ -15,29 +15,29 @@ docs/static/favicons/manifest.json,sha256=6q_3nZkcg_x0xut4eE-xpdeMY1TydwiZIcbXlL
15
15
  docs/static/favicons/mstile-150x150.png,sha256=eAK4QdEofhdLtfmjuPTpnX3MJqYnvGXsHYUjlcQekyY,1035
16
16
  docs/static/favicons/safari-pinned-tab.svg,sha256=S84fRnz0ZxLnQrKtmmFZytiRyu1xLtMR_RVy5jmwU7k,1926
17
17
  examples/async_batch.py,sha256=CffQ8O9drJ-Mdd6S5DnMIOBsHv5aVkTZrD3l3xBnB4s,1094
18
- examples/create_entity.py,sha256=noZdtJ5f9Wfiob3zUH-8bDVbrCPJnFtXFk_W9pSjvUA,664
19
- examples/edit_recipe_inputs.py,sha256=9ZtpdeMSO5HGsVBQtftc_PI7-qpafY-YMWoYbtcUXvo,1649
18
+ examples/create_entity.py,sha256=t6WBZsWRDbWZgFCWXKGgKL5LAB6-38oaiNYGxMAa2No,686
19
+ examples/edit_recipe_inputs.py,sha256=mtk_oSkN-OT2hKkb1XKXrRiUaGYTJstXuOKyTR51Fjo,1663
20
20
  examples/invoke_uploader.py,sha256=rEvmVY5TjigN_-4PTQdkjY-bC5DrYMcJgquyZ4Tt5FM,748
21
21
  examples/upload_files.py,sha256=tUfKFqiqwnw08OL5Y8_e4j5pSRhp94cFex8XTuVa_ig,487
22
22
  pkgs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
23
  pkgs/argument_parser/__init__.py,sha256=CsQ6QoPKSLLRVl-z6URAmPkiUL9ZPZoV4rJHgy_-RjA,385
24
24
  pkgs/argument_parser/_is_enum.py,sha256=Gw6jJa8nBwYGqXwwCZbSnWL8Rvr5alkg5lSVAqXtOZM,257
25
25
  pkgs/argument_parser/_is_namedtuple.py,sha256=Rjc1bKanIPPogl3qG5JPBxglG1TqWYOo1nxxhBASQWY,265
26
- pkgs/argument_parser/argument_parser.py,sha256=S5x4yDpaBqTRkmcOyX2UuBWw9iCE4j2Po5LZPg9jhe4,17308
26
+ pkgs/argument_parser/argument_parser.py,sha256=uI_fSs7XmiQf9kt4ztWqqF9v8w4dZmQRy3tssPYVRDE,17330
27
27
  pkgs/argument_parser/case_convert.py,sha256=NuJLJUJRbyVb6_Slen4uqaStEHbcOS1d-hBBfDrrw-c,605
28
28
  pkgs/filesystem_utils/__init__.py,sha256=NSsQrUCoGISBCqCCyq6_583sYHTVEQeDjDO8hvZn3ag,1261
29
- pkgs/filesystem_utils/_gdrive_session.py,sha256=OZudNoP2HikolnpurVJhJdh5fgzqbaZQvn53ReGGXx4,11015
29
+ pkgs/filesystem_utils/_gdrive_session.py,sha256=GJuZYJq1W4QQ_7OLvZIMK99FgRq8FxJHg6cMUx9prtA,11077
30
30
  pkgs/filesystem_utils/_local_session.py,sha256=xFEYhAvNqrOYqwt4jrEYOuYkjJn0zclZhTelW_Q1-rw,2325
31
- pkgs/filesystem_utils/_s3_session.py,sha256=UbVTUM5olc2Kq_1TX7e5rI3UD5w49ko7CWjsMnSJVmg,3946
32
- pkgs/filesystem_utils/_sftp_session.py,sha256=gNoUD_b4MuVqWj31nU-FpfpXZlyWkwdEHtX1S8W6gpQ,4727
31
+ pkgs/filesystem_utils/_s3_session.py,sha256=FSdCZBmargKgOWDfyvlsgVNJxwYHBrmHscD9pjN6Bgk,3962
32
+ pkgs/filesystem_utils/_sftp_session.py,sha256=6zoF7YsEUp0GpyFb-BeIhUAWvbTK7IUjvPNJ1B0vEyI,4743
33
33
  pkgs/filesystem_utils/file_type_utils.py,sha256=Xd-mg35mAENUgNJVz5uK8nEfrUp-NQld_gnXFEq3K-8,1487
34
34
  pkgs/filesystem_utils/filesystem_session.py,sha256=BQ2Go8Mu9-GcnaWh2Pm4x7ugLVsres6XrOQ8RoiEpcE,1045
35
35
  pkgs/serialization/__init__.py,sha256=LifasRW0a50A3qRFmo2bf3FQ6TXhZWOTz2-CVTgPjcQ,753
36
36
  pkgs/serialization/missing_sentry.py,sha256=aM_9KxbCk9dVvXvcOKgkIQBqFWvLhv8QlIUCiuFEXMo,806
37
37
  pkgs/serialization/opaque_key.py,sha256=FIfXEE0DA1U8R_taFbQ1RCoTSgehrPjP06-qvo-GeNQ,177
38
- pkgs/serialization/serial_class.py,sha256=r0hrQdIbJA_X0W0_jKEVrxi_JzVRT9qHCjsUgGu3cCI,5290
38
+ pkgs/serialization/serial_class.py,sha256=2oH5XZ04R14knr7eTB3afPQwpuaW0qUvt3woS5Q0y5Y,5370
39
39
  pkgs/serialization/serial_union.py,sha256=z8Ptj4bVHyb1ROfg0UPTwZ6Ef6iXLr0YJfAH5o_PU9A,2601
40
- pkgs/serialization/yaml.py,sha256=t-31FS0pL1YbhfspK88iQEGa1WDn60Fw16CpoLSClJM,1453
40
+ pkgs/serialization/yaml.py,sha256=yoJtu7_ixnJV6uTxA_U1PpK5F_ixT08AKVh5ocyYwXM,1466
41
41
  pkgs/serialization_util/__init__.py,sha256=MVKqHTUl2YnWZAFG9xCxu1SgmkQ5xPofrAGlYg6h7rI,330
42
42
  pkgs/serialization_util/_get_type_for_serialization.py,sha256=dW5_W9MFd6wgWfW5qlWork-GBb-QFLtiOZkjk2Zqn2M,1177
43
43
  pkgs/serialization_util/convert_to_snakecase.py,sha256=H2BAo5ZdcCDN77RpLb-uP0s7-FQ5Ukwnsd3VYc1vD0M,583
@@ -46,13 +46,13 @@ pkgs/strenum_compat/__init__.py,sha256=wXRFeNvBm8RU6dy1PFJ5sRLgUIEeH_DVR95Sv5qpG
46
46
  pkgs/strenum_compat/strenum_compat.py,sha256=uOUAgpYTjHs1MX8dG81jRlyTkt3KNbkV_25zp7xTX2s,36
47
47
  pkgs/type_spec/__init__.py,sha256=h5DmJTca4QVV10sZR1x0-MlkZfuGYDfapR3zHvXfzto,19
48
48
  pkgs/type_spec/__main__.py,sha256=5bJaX9Y_-FavP0qwzhk-z-V97UY7uaezJTa1zhO_HHQ,1048
49
- pkgs/type_spec/builder.py,sha256=xQcY2HcQTI2FSOMycgx3yD23_Oz3_LfWdyW65pDaHoc,46667
50
- pkgs/type_spec/config.py,sha256=IQyo2Vj11uNt7_d6jQxvominAOU-oPB8ldEmuGzJLpU,4644
51
- pkgs/type_spec/emit_io_ts.py,sha256=Ghd8XYqyNYldHQDepwa9GLfHXcoi48ztBw84K28ETic,5707
52
- pkgs/type_spec/emit_open_api.py,sha256=92POd3j4nrrROrw9M-bgEmK4ukYbI0TW6E9IigJzoTE,24512
53
- pkgs/type_spec/emit_open_api_util.py,sha256=y2slouAflUJmyTPH_d4CbXql9zpOoD1uTOELqL3NX-M,2448
54
- pkgs/type_spec/emit_python.py,sha256=bQUqVmfVgAXw8rGcqlg93YbUsUvawgR2o_NrV_c9Zio,46873
55
- pkgs/type_spec/emit_typescript.py,sha256=cdr5h8N70PuwORcvhURUujzwH9r1LVwJB8V2EoipGkw,17917
49
+ pkgs/type_spec/builder.py,sha256=dC-TLQ6_nJfDUKYPNHkqwP_9e1RanZMeK8PX-jvXpe4,46825
50
+ pkgs/type_spec/config.py,sha256=yq4pxv6huIXd-GAGEicSK07rPTJQtgutC7oECxliHZU,4660
51
+ pkgs/type_spec/emit_io_ts.py,sha256=U03sQBpgRqYOaMKrPCRnYb70YboiCgaZfseCXSzW5NY,5707
52
+ pkgs/type_spec/emit_open_api.py,sha256=5a0iAHBbgFD4wfKuyjPvxCYYHNTjKxEHA0aYjMGSqe4,24596
53
+ pkgs/type_spec/emit_open_api_util.py,sha256=x4GCiZSGdypJ9Qtm6I5W_3UvwdJyMs8_OGhJ8_THznA,2401
54
+ pkgs/type_spec/emit_python.py,sha256=SB0-KOuqo-PxZPFKvMfzZoSp71CGEBX3ZNE8f3HlkYA,46973
55
+ pkgs/type_spec/emit_typescript.py,sha256=Tv8EbXBQewZN9q3zRKfTy9lWwElBmIV6fprIjA4RmJQ,18010
56
56
  pkgs/type_spec/emit_typescript_util.py,sha256=sR7ys3Ilnh6SQiXJbfYk4pxfOu0bDjbUFTEYEW-ud6c,863
57
57
  pkgs/type_spec/load_types.py,sha256=BOLyndtxPqqhUqZAh-lIbN5IZBaW_m-bdYpKGsbPyXM,3654
58
58
  pkgs/type_spec/open_api_util.py,sha256=IGh-_snGPST_P_8FdYtO8MTEa9PUxRW6Rzg9X9EgQik,7114
@@ -60,56 +60,56 @@ pkgs/type_spec/test.py,sha256=4ueujBq-pEgnX3Z69HyPmD-bullFXmpixcpVzfOkhP4,489
60
60
  pkgs/type_spec/util.py,sha256=6m6MPfY-SwjyZf2FWQKclswWB5o7gcdd-3tdpViPYOQ,4844
61
61
  pkgs/type_spec/actions_registry/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
62
  pkgs/type_spec/actions_registry/__main__.py,sha256=JGwKxcAmrQdbpVR2vwknoimN1Q-r5h4SADw1cYLYzgk,4331
63
- pkgs/type_spec/actions_registry/emit_typescript.py,sha256=ben0W7qwaVCzLO-t3NEJPPNGEE_6sKLdJMuAh8aMBnw,6044
63
+ pkgs/type_spec/actions_registry/emit_typescript.py,sha256=Z1ZM4zOw26tvLspvW6Emg79-jxjhNBse-8yaionbmeo,6066
64
64
  pkgs/type_spec/parts/base.py.prepart,sha256=wGNoDyQnLolHRZGRwHQX5TrPfKnu558NXCocYvqyroc,2174
65
65
  pkgs/type_spec/parts/base.ts.prepart,sha256=2FJJvpg2olCcavxj0nbYWdwKl6KeScour2JjSvN42l8,1001
66
66
  pkgs/type_spec/type_info/__main__.py,sha256=pmVjVqXyVh8vKTNCTFgz80Sg74C5BKToP3E6GS-X_So,857
67
- pkgs/type_spec/type_info/emit_type_info.py,sha256=7FtMm_WOhxoT8Hy2DDorexIccwCNVZ9mJTBURD8l9Rk,13282
67
+ pkgs/type_spec/type_info/emit_type_info.py,sha256=1R1ygKbGBIrKDCh3NDiBB6w9ofRCoCjujhHZx9A4_Wc,13289
68
68
  pkgs/type_spec/value_spec/__init__.py,sha256=Z-grlcZtxAfEXhPHsK0nD7PFLGsv4eqvunaPN7_TA84,83
69
69
  pkgs/type_spec/value_spec/__main__.py,sha256=6bzP85p_Cm4bPp5tXz8D_4p64wMn5SKsXC7SqSZquYc,8318
70
- pkgs/type_spec/value_spec/convert_type.py,sha256=SAYyEV6orQJJbkXSE4hhtOQJ2vKUXJCKPeYPrB8G9oA,2272
71
- pkgs/type_spec/value_spec/emit_python.py,sha256=rjg6LIGYdaagrZ19XpDfW_Z7LPNwCMDceBje5dsMFbw,6959
70
+ pkgs/type_spec/value_spec/convert_type.py,sha256=Tg5YsYOwvmf_EqbCAtCmqy3-dud8OwdbEOzAaRN7cCs,2286
71
+ pkgs/type_spec/value_spec/emit_python.py,sha256=KXZqEw7ZNoDk2i77UV7jljiKuE_kgmp7oRyKRIxYUhY,7007
72
72
  pkgs/type_spec/value_spec/types.py,sha256=a2zxbbCRWepY1l8OtjeCDKgBKFPFHVgV99oP6pTtaro,441
73
73
  uncountable/__init__.py,sha256=8l8XWNCKsu7TG94c-xa2KHpDegvxDC2FyQISdWC763Y,89
74
74
  uncountable/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
75
75
  uncountable/core/__init__.py,sha256=RFv0kO6rKFf1PtBPu83hCGmxqkJamRtsgQ9_-ztw7tA,341
76
76
  uncountable/core/async_batch.py,sha256=Gur0VOS0AH2ugwvk65hwoX-iqwQAAyJaejY_LyAZZPo,1210
77
- uncountable/core/client.py,sha256=C0hJ0_SGL5WEhPuAWDSj4ShjjIiQasxpfpnisTi-Uag,10554
78
- uncountable/core/file_upload.py,sha256=TkQ0fKbbYrPgns1Jh51JU35DUqZHB3ljOaVgjSlBx9Y,3149
77
+ uncountable/core/client.py,sha256=kKd9MvvlSKDWh69iZ6K2IfbLxMMQ8l0tFLk6p0YG6GM,10622
78
+ uncountable/core/file_upload.py,sha256=qR7BBBWVxFNrb1_WICreo3dkZygE9lcE1fmZCQrDZU0,3469
79
79
  uncountable/core/types.py,sha256=s2CjqYJpsmbC7xMwxxT7kJ_V9bwokrjjWVVjpMcQpKI,333
80
80
  uncountable/core/version.py,sha256=SqQIHLhiVZXQBeOwygS2FRZ4WEO27JmWhse0lKm7fgU,274
81
81
  uncountable/integration/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
82
- uncountable/integration/construct_client.py,sha256=e1uAMVp4FTbValCJ3gfaZFuObKxHbTXq496-T-KMOG4,1899
82
+ uncountable/integration/construct_client.py,sha256=I2XTamht13vs-JYkV4PpNS_Pc4FJm-KVYqNNvxI4qNk,1916
83
83
  uncountable/integration/cron.py,sha256=e5456IYJF2ipiSsd1R2T334lfe7mtp-gwP7JpS645L0,1858
84
84
  uncountable/integration/entrypoint.py,sha256=9rk06gBTsCqytIs8Shsnlf6ir_4Uq5d5rfP1veiSLzc,1437
85
85
  uncountable/integration/job.py,sha256=UTzcMes2KrBBRLOM3u94imMKLLnv50glqOkNf8-JOZw,1022
86
86
  uncountable/integration/server.py,sha256=bmX-ukLiNDq0ThVB2lUyXl-vtID5HI4gqJHxhsVNG3w,4440
87
- uncountable/integration/telemetry.py,sha256=wgQnaWZFmUM_4lwyghOmL4Pd8DU0qUJvxXcrgBekAKQ,5261
87
+ uncountable/integration/telemetry.py,sha256=LAXKVqq96h2pbEwUODIXKhfSt8GVqTDpDiGvAiBQhMk,5341
88
88
  uncountable/integration/db/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
89
89
  uncountable/integration/db/connect.py,sha256=YtQHJ1DBGPhxKFRCfiXqohOYUceKSxMVOJ88aPI48Ug,181
90
90
  uncountable/integration/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
91
91
  uncountable/integration/executors/executors.py,sha256=v5ClGVUlvrZcMdmGQa8Ll668G_HGTnKpGOnTM7UMZCQ,956
92
- uncountable/integration/executors/generic_upload_executor.py,sha256=Tl7Aw908CJzH3Sv-mXRAvDVHbVgghMd2WGJH-JXmQ10,10251
92
+ uncountable/integration/executors/generic_upload_executor.py,sha256=nB-kgLWXePbl8u6UwCKFXRrXnFpguMR-91ylnfeWPSA,10280
93
93
  uncountable/integration/executors/script_executor.py,sha256=OmSBOtU48G3mqza9c2lCm84pGGyaDk-ZBJCx3RsdJXc,846
94
94
  uncountable/integration/secret_retrieval/__init__.py,sha256=3QXVj35w8rRMxVvmmsViFYDi3lcb3g70incfalOEm6o,87
95
- uncountable/integration/secret_retrieval/retrieve_secret.py,sha256=BS1dXVaChgpgBDKWkcLrl9hBDQIASRQMr3l41ytfbEc,3036
96
- uncountable/types/__init__.py,sha256=UuLBpo8Uxv9k-UZS8DINg3y-LPrs2dC7Qg83BB8p2_k,8222
97
- uncountable/types/async_batch.py,sha256=ihCv5XWSTTPmuO-GMPn1EACGI2CBUIJTATZ3aPgsNBA,523
98
- uncountable/types/async_batch_processor.py,sha256=rU9s_EiaoHFp0dEL0f2tmNM0bMa01Loqv4px5SKnhj4,8595
99
- uncountable/types/async_batch_t.py,sha256=9jp9rOyetRdD5aQVyijzQggTyYU4021PBVGXk0ooBCQ,1911
95
+ uncountable/integration/secret_retrieval/retrieve_secret.py,sha256=eoPWbkUtCn_63A4TFlK_nvEDvfm4u2fiOoglmAkBG3U,3004
96
+ uncountable/types/__init__.py,sha256=0KN0QKnwQgEHP90-BfnW67OF77L4Ino-hN4ea-Tx1M0,8324
97
+ uncountable/types/async_batch.py,sha256=_OhT25_dEVts_z_n1kqfJH3xlZg3btLqR6TNkfFLlXE,609
98
+ uncountable/types/async_batch_processor.py,sha256=obVzN-PcYLV2pHScszfCGjSq6-Xc34WM1ysx6Fv6tZk,11293
99
+ uncountable/types/async_batch_t.py,sha256=ipSGz93O1KB-WE2dvlvflTKS51rJrf3bJkUojyxos7I,2193
100
100
  uncountable/types/base.py,sha256=xVSjWvA_fUUnkCg83EjoYEFvAfmskinKFMeYFOxNc9E,359
101
101
  uncountable/types/base_t.py,sha256=XXjZXexx0xWFUxMMhW8i9nIL6n8dsZVsHwdgnhZ0zJ4,2714
102
102
  uncountable/types/calculations.py,sha256=FFO_D3BbKoGDZnqWvTKpW4KF359i2vrKjpdFCLYzJC0,284
103
103
  uncountable/types/calculations_t.py,sha256=7GTSi2L8NYjzjUJJx3cmtVkK9uD-uhfYvIFK-ffQj-8,556
104
104
  uncountable/types/chemical_structure.py,sha256=E-LnikTFDoVQ1b2zKaVUIO_PAKm-7aZZYJi8I8SDSic,302
105
105
  uncountable/types/chemical_structure_t.py,sha256=aFsTkkbzy6Gvyde3qrrEYD95gcYhxkgKMiDRaRE0o-Y,760
106
- uncountable/types/client_base.py,sha256=qu1g7K5eLirZbAXRLJ_QDaVAbotC4tDW5-HZ_mkSHwE,65355
106
+ uncountable/types/client_base.py,sha256=my-n1qrfXM6TwIYw0hzp-2W-rH8FVFg1657DeYLreGU,66315
107
107
  uncountable/types/client_config.py,sha256=4h5Liko9uKCo9_0gdbPhoK6Jr2Kv7tioLiQ8iKeq-_4,301
108
108
  uncountable/types/client_config_t.py,sha256=_HdS37gMSTIiD4qLnW9dIgt8_Rt5A6xhwMGGga7vnLg,625
109
109
  uncountable/types/curves.py,sha256=W6uMpG5SyW1MS82szNpxkFEn1MnxNpBFyFbQb2Ysfng,366
110
110
  uncountable/types/curves_t.py,sha256=TDpsThz4lKmiBmS9b4ItUSCp64TGv8-qDkxb4B2RoTo,1314
111
111
  uncountable/types/entity.py,sha256=3XhLteFDRDZvHejDuYh-KvB65hpwrBygljFfiUcOAM8,315
112
- uncountable/types/entity_t.py,sha256=HhKmcH5Op8rEkDhpw7SXQ7QakbFdvG5FpB6O4e7cMY0,14439
112
+ uncountable/types/entity_t.py,sha256=kH3RyuAXEs6moMKwDGSQokzHaRBDcfuO-tHkFbre2zk,14537
113
113
  uncountable/types/experiment_groups.py,sha256=_0OXcPzSAbkE-rfKt5tPx178YJ4pcEKZvrCxUHgDnvw,309
114
114
  uncountable/types/experiment_groups_t.py,sha256=0IGAXwkYiwdjj6aFjLMihxwauACQTyuRU_1usJTeUg4,593
115
115
  uncountable/types/field_values.py,sha256=uuIWX-xmfvcinYPdfkWJeb56zzQY01mc9rmotMPMh24,503
@@ -171,8 +171,8 @@ uncountable/types/api/batch/execute_batch_load_async.py,sha256=3ptjtokj7eZ_A7OUX
171
171
  uncountable/types/api/chemical/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr8APdZYUniAor8,55
172
172
  uncountable/types/api/chemical/convert_chemical_formats.py,sha256=-FKBOcg1jteFu920NM-0lBk90pfucpcg2WAsaddfDc8,1323
173
173
  uncountable/types/api/entity/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr8APdZYUniAor8,55
174
- uncountable/types/api/entity/create_entities.py,sha256=qvra6BustbxWmt7_c83mCMkfxaBJ4B8xayJUvk6Ab9E,1780
175
- uncountable/types/api/entity/create_entity.py,sha256=vfbJiHOMEEzJFZNNiHbIBqisytYdKa7rTOraWdZrXpw,1950
174
+ uncountable/types/api/entity/create_entities.py,sha256=1WK3HTKR5D5Y3yFkaa5jJNourGC-IE45yRkfulty75U,1888
175
+ uncountable/types/api/entity/create_entity.py,sha256=HhR8GBSvqMdMXhm8aUtvkUPrW9bCPIfejD0QOUX4aYE,2058
176
176
  uncountable/types/api/entity/get_entities_data.py,sha256=_dyDJ8Aukeijj2U3ZEQYNECoI3TC4phn8RhgJQP1e1s,1162
177
177
  uncountable/types/api/entity/list_entities.py,sha256=93J8jbHOdBL7Ee2_z_M57JpIVsmI2RddDMpprTakYks,1717
178
178
  uncountable/types/api/entity/lock_entity.py,sha256=twQ-f61AKS_NrKP-TzqHkMmzTJGMc4yxZFl-2JctOg4,1004
@@ -219,6 +219,7 @@ uncountable/types/api/recipes/add_recipe_to_project.py,sha256=JPYk25a5JSqM2X_93W
219
219
  uncountable/types/api/recipes/archive_recipes.py,sha256=w8r7-NpWhW5oE-K6PGf0Th0T1-HkdHjjVmk5AcxUrio,852
220
220
  uncountable/types/api/recipes/associate_recipe_as_input.py,sha256=NGZ3RH2-sDcqzoVKuqnAWFmkDszvCXKLTLhs-YZe0x0,1012
221
221
  uncountable/types/api/recipes/associate_recipe_as_lot.py,sha256=iwRDSje33GaDpOVhQrFXcJjBax9k80gwbs8IUkFNKpU,940
222
+ uncountable/types/api/recipes/clear_recipe_outputs.py,sha256=htm2MsFRWvlPsIyogSiVHzQ02_ZtAOCyV5M6AMh_FHQ,890
222
223
  uncountable/types/api/recipes/create_recipe.py,sha256=-ybqIZ3LH5018g0fQ07XWq9L6OL3qkiYkXnH02kxnL0,1371
223
224
  uncountable/types/api/recipes/create_recipes.py,sha256=Cr7hXRAMnFeBNxOBv7_j-K5nkaC67cu3vFLqMBH3yVc,1555
224
225
  uncountable/types/api/recipes/disassociate_recipe_as_input.py,sha256=EreRtLP4o29RazwbM5qb1PJZqLf4Ii8xJ9NT7ciKfKo,898
@@ -242,7 +243,7 @@ uncountable/types/api/triggers/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr
242
243
  uncountable/types/api/triggers/run_trigger.py,sha256=_Rpha9nxXI3Xr17CrGDtofg4HZ81x2lt0rMZ6As0qfE,893
243
244
  uncountable/types/api/uploader/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr8APdZYUniAor8,55
244
245
  uncountable/types/api/uploader/invoke_uploader.py,sha256=Rc77y5q-3R9-SNQgm8P35zKaW2D1Hbtm7PDixnOn1G0,1025
245
- UncountablePythonSDK-0.0.55.dist-info/METADATA,sha256=yp10_5j-eNLvAsecV4ydazDBKLEnYB0N8-62LPziMDE,1934
246
- UncountablePythonSDK-0.0.55.dist-info/WHEEL,sha256=Mdi9PDNwEZptOjTlUcAth7XJDFtKrHYaQMPulZeBCiQ,91
247
- UncountablePythonSDK-0.0.55.dist-info/top_level.txt,sha256=1UVGjAU-6hJY9qw2iJ7nCBeEwZ793AEN5ZfKX9A1uj4,31
248
- UncountablePythonSDK-0.0.55.dist-info/RECORD,,
246
+ UncountablePythonSDK-0.0.57.dist-info/METADATA,sha256=Ay-ZqQ6b7KqlG96Kv9ZJjBheNvYMCQBgfhkbaRCQLas,1934
247
+ UncountablePythonSDK-0.0.57.dist-info/WHEEL,sha256=uCRv0ZEik_232NlR4YDw4Pv3Ajt5bKvMH13NUU7hFuI,91
248
+ UncountablePythonSDK-0.0.57.dist-info/top_level.txt,sha256=1UVGjAU-6hJY9qw2iJ7nCBeEwZ793AEN5ZfKX9A1uj4,31
249
+ UncountablePythonSDK-0.0.57.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (73.0.1)
2
+ Generator: setuptools (74.1.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
examples/create_entity.py CHANGED
@@ -14,7 +14,9 @@ entities = client.create_entity(
14
14
  definition_id=24,
15
15
  entity_type=entity_t.EntityType.LAB_REQUEST,
16
16
  field_values=[
17
- field_values_t.FieldRefNameValue(field_ref_name="name", value="SDK Lab Request"),
17
+ field_values_t.FieldRefNameValue(
18
+ field_ref_name="name", value="SDK Lab Request"
19
+ ),
18
20
  field_values_t.FieldRefNameValue(field_ref_name="materialFamilyId", value=1),
19
21
  ],
20
22
  )
@@ -38,7 +38,9 @@ edits.append(
38
38
  )
39
39
  )
40
40
  edits.append(
41
- edit_recipe_inputs_t.RecipeInputEditAddInstructions(instructions="Mix for 3 minutes")
41
+ edit_recipe_inputs_t.RecipeInputEditAddInstructions(
42
+ instructions="Mix for 3 minutes"
43
+ )
42
44
  )
43
45
  batch_loader.edit_recipe_inputs(
44
46
  recipe_key=IdentifierKeyBatchReference(reference=created_recipe_reference),
@@ -48,7 +48,7 @@ class ParserExtraFieldsError(ParserError):
48
48
  self.extra_fields = extra_fields
49
49
 
50
50
  def __str__(self) -> str:
51
- return f"extra fields were provided: {', '.join(self.extra_fields)}"
51
+ return f"extra fields were provided: {", ".join(self.extra_fields)}"
52
52
 
53
53
 
54
54
  def is_optional(field_type: typing.Any) -> bool:
@@ -250,7 +250,9 @@ def _build_parser_inner(
250
250
  convert_string_to_snake_case=context.options.convert_to_snake_case,
251
251
  )
252
252
  v_parser = _build_parser_inner(args[1], context)
253
- return lambda value: origin((k_parser(k), v_parser(v)) for k, v in value.items())
253
+ return lambda value: origin(
254
+ (k_parser(k), v_parser(v)) for k, v in value.items()
255
+ )
254
256
 
255
257
  if origin == typing.Literal:
256
258
  valid_values: set[T] = set(typing.get_args(parsed_type))
@@ -212,7 +212,8 @@ class GDriveSession(FileSystemSession):
212
212
  for file_context in files:
213
213
  if (
214
214
  valid_file_extensions is not None
215
- and os.path.splitext(file_context["name"])[1] not in valid_file_extensions
215
+ and os.path.splitext(file_context["name"])[1]
216
+ not in valid_file_extensions
216
217
  ):
217
218
  continue
218
219
  gdrive_files.append(
@@ -256,7 +257,9 @@ class GDriveSession(FileSystemSession):
256
257
  dest_filename=new_filename,
257
258
  )
258
259
  else:
259
- move_gdrive_file(self.connection, src_file.file_id, dest_file.file_id)
260
+ move_gdrive_file(
261
+ self.connection, src_file.file_id, dest_file.file_id
262
+ )
260
263
  elif isinstance(src_file, FileObjectData):
261
264
  if src_file.mime_type is None:
262
265
  raise IncompatibleFileReference(
@@ -64,7 +64,8 @@ class S3Session(FileSystemSession):
64
64
  if not recursive and (obj.key == prefix or "/" in obj.key[len(prefix) :]):
65
65
  continue
66
66
  if valid_extensions is None or any(
67
- obj.key.endswith(valid_extension) for valid_extension in valid_extensions
67
+ obj.key.endswith(valid_extension)
68
+ for valid_extension in valid_extensions
68
69
  ):
69
70
  filesystem_references.append(FileSystemFileReference(obj.key))
70
71
 
@@ -42,7 +42,8 @@ def list_sftp_files(
42
42
 
43
43
  def _add_file(path: str) -> None:
44
44
  if (
45
- valid_extensions is None or os.path.splitext(path)[1] in valid_extensions
45
+ valid_extensions is None
46
+ or os.path.splitext(path)[1] in valid_extensions
46
47
  ) and (parent_dir_path is None or os.path.dirname(path) == parent_dir_path):
47
48
  file_paths.append(path)
48
49
 
@@ -106,9 +107,9 @@ class SFTPSession(FileSystemSession):
106
107
  recursive: bool = True,
107
108
  valid_extensions: list[str] | None = None,
108
109
  ) -> list[FileSystemObject]:
109
- if not isinstance(dir_path, FileSystemFileReference) or not self.connection.isdir(
110
- dir_path.filepath
111
- ):
110
+ if not isinstance(
111
+ dir_path, FileSystemFileReference
112
+ ) or not self.connection.isdir(dir_path.filepath):
112
113
  raise IncompatibleFileReference()
113
114
 
114
115
  return [
@@ -91,11 +91,15 @@ def _get_merged_serial_class_data(type_class: type[Any]) -> _SerialClassData | N
91
91
  if curr_base_class_data is not None:
92
92
  if base_class_data is None:
93
93
  base_class_data = _SerialClassData()
94
- base_class_data.unconverted_keys |= curr_base_class_data.unconverted_keys
94
+ base_class_data.unconverted_keys |= (
95
+ curr_base_class_data.unconverted_keys
96
+ )
95
97
  base_class_data.unconverted_values |= (
96
98
  curr_base_class_data.unconverted_values
97
99
  )
98
- base_class_data.to_string_values |= curr_base_class_data.to_string_values
100
+ base_class_data.to_string_values |= (
101
+ curr_base_class_data.to_string_values
102
+ )
99
103
  base_class_data.parse_require |= curr_base_class_data.parse_require
100
104
  return base_class_data
101
105
 
@@ -45,7 +45,10 @@ def safe_load(src: str | bytes | SupportsRead) -> Any:
45
45
 
46
46
 
47
47
  def safe_dump(
48
- obj: Any, sort_keys: bool = False, indent: int | None = None, width: int | None = None
48
+ obj: Any,
49
+ sort_keys: bool = False,
50
+ indent: int | None = None,
51
+ width: int | None = None,
49
52
  ) -> str:
50
53
  return yaml.safe_dump(obj, sort_keys=sort_keys, indent=indent, width=width)
51
54
 
@@ -129,7 +129,9 @@ def _emit_action_definition(
129
129
  out.write(f"{indent}{_action_symbol_name(action_definition)}: {{\n")
130
130
  out.write(f"{sub_indent}name: {encode_common_string(action_definition.name)},\n")
131
131
  if action_definition.icon is not None:
132
- out.write(f"{sub_indent}icon: {encode_common_string(action_definition.icon)},\n")
132
+ out.write(
133
+ f"{sub_indent}icon: {encode_common_string(action_definition.icon)},\n"
134
+ )
133
135
  out.write(
134
136
  f"{sub_indent}shortDescription: {encode_common_string(action_definition.short_description)},\n"
135
137
  )
pkgs/type_spec/builder.py CHANGED
@@ -712,7 +712,9 @@ class SpecTypeDefnStringEnum(SpecTypeDefn):
712
712
  elif isinstance(data_values, list):
713
713
  for value in data_values:
714
714
  if value in self.values:
715
- raise Exception("duplicate value in typespec enum", self.name, value)
715
+ raise Exception(
716
+ "duplicate value in typespec enum", self.name, value
717
+ )
716
718
  self.values[value] = StringEnumEntry(name=value, value=value)
717
719
  else:
718
720
  raise Exception("unsupported values type")
@@ -972,7 +974,9 @@ def _parse_const(
972
974
  return value
973
975
 
974
976
  if const_type.name == BaseTypeName.s_boolean:
975
- builder.ensure(isinstance(value, bool), "invalid value for boolean constant")
977
+ builder.ensure(
978
+ isinstance(value, bool), "invalid value for boolean constant"
979
+ )
976
980
  return value
977
981
 
978
982
  raise Exception("unsupported-const-scalar-type", const_type)
@@ -1004,7 +1008,9 @@ class SpecConstant:
1004
1008
  assert isinstance(self.value, dict)
1005
1009
  # the parsing checks that the values are correct, so a simple length check
1006
1010
  # should be enough to check completeness
1007
- builder.ensure(len(key_type.values) == len(self.value), "incomplete-enum-map")
1011
+ builder.ensure(
1012
+ len(key_type.values) == len(self.value), "incomplete-enum-map"
1013
+ )
1008
1014
 
1009
1015
 
1010
1016
  class SpecNamespace:
@@ -1335,7 +1341,9 @@ class SpecBuilder:
1335
1341
  ) -> SpecType:
1336
1342
  self.push_where(spec)
1337
1343
  parsed_type = util.parse_type_str(spec)
1338
- result = self._convert_parsed_type(parsed_type, namespace, top=True, scope=scope)
1344
+ result = self._convert_parsed_type(
1345
+ parsed_type, namespace, top=True, scope=scope
1346
+ )
1339
1347
  self.pop_where()
1340
1348
  return result
1341
1349
 
@@ -1385,7 +1393,9 @@ class SpecBuilder:
1385
1393
  path_meta: list[str] | None = meta.get("path")
1386
1394
  guide_key: SpecGuideKey = RootGuideKey()
1387
1395
  if path_meta is not None:
1388
- path_details = _resolve_endpoint_path("".join(path_meta), self.api_endpoints)
1396
+ path_details = _resolve_endpoint_path(
1397
+ "".join(path_meta), self.api_endpoints
1398
+ )
1389
1399
  guide_key = EndpointGuideKey(path=path_details.resolved_path)
1390
1400
 
1391
1401
  self.guides[guide_key].append(
@@ -1398,4 +1408,4 @@ class SpecBuilder:
1398
1408
  )
1399
1409
 
1400
1410
  def resolve_proper_name(self, stype: SpecTypeDefn) -> str:
1401
- return f"{'.'.join(stype.namespace.path)}.{stype.name}"
1411
+ return f"{".".join(stype.namespace.path)}.{stype.name}"
pkgs/type_spec/config.py CHANGED
@@ -125,7 +125,9 @@ def parse_yaml_config(config_file: str) -> Config:
125
125
  python = _parse_language(PythonConfig, raw_config["python"])
126
126
  raw_open_api = raw_config.get("open_api")
127
127
  open_api = (
128
- _parse_language(OpenAPIConfig, raw_open_api) if raw_open_api is not None else None
128
+ _parse_language(OpenAPIConfig, raw_open_api)
129
+ if raw_open_api is not None
130
+ else None
129
131
  )
130
132
 
131
133
  return Config(
@@ -100,14 +100,14 @@ def _emit_type_io_ts_impl(ctx: EmitTypescriptContext, stype: builder.SpecType) -
100
100
  else:
101
101
  assert len(missable_lines) > 0 and len(required_lines) > 0
102
102
  ctx.out.write("IO.intersection([\n")
103
- ctx.out.write(f"{INDENT}IO.partial({'{'}\n")
103
+ ctx.out.write(f"{INDENT}IO.partial({"{"}\n")
104
104
  for line in missable_lines:
105
105
  ctx.out.write(f"{INDENT}{line}")
106
- ctx.out.write(f"{INDENT}{'}'}),\n")
107
- ctx.out.write(f"{INDENT}IO.type({'{'}\n")
106
+ ctx.out.write(f"{INDENT}{"}"}),\n")
107
+ ctx.out.write(f"{INDENT}IO.type({"{"}\n")
108
108
  for line in required_lines:
109
109
  ctx.out.write(f"{INDENT}{line}")
110
- ctx.out.write(f"{INDENT}{'}'}),\n")
110
+ ctx.out.write(f"{INDENT}{"}"}),\n")
111
111
  ctx.out.write("])\n")
112
112
 
113
113
  ctx.out.write("\n")
@@ -129,7 +129,7 @@ def refer_to_io_ts(
129
129
  if stype.defn_type.name == builder.BaseTypeName.s_optional:
130
130
  return f"IO.optional({refer_to_io_ts(ctx, stype.parameters[0])})"
131
131
  if stype.defn_type.name == builder.BaseTypeName.s_tuple:
132
- return f"IO.tuple([{', '.join([refer_to_io_ts(ctx, p) for p in stype.parameters])}])"
132
+ return f"IO.tuple([{", ".join([refer_to_io_ts(ctx, p) for p in stype.parameters])}])"
133
133
  return refer_to_io_ts(ctx, stype.defn_type)
134
134
 
135
135
  assert isinstance(stype, builder.SpecTypeDefn)
@@ -286,7 +286,9 @@ def _emit_endpoint_request_body(
286
286
  "type": "object",
287
287
  "title": "Body",
288
288
  "required": ["data"],
289
- "properties": {"data": {"$ref": "#/components/schema/Arguments"}},
289
+ "properties": {
290
+ "data": {"$ref": "#/components/schema/Arguments"}
291
+ },
290
292
  }
291
293
  }
292
294
  | _emit_endpoint_argument_examples(examples)
@@ -361,7 +363,9 @@ def _emit_namespace(
361
363
  | _emit_is_beta(endpoint.is_beta)
362
364
  | _emit_stability_level(endpoint.stability_level)
363
365
  | _emit_endpoint_parameters(endpoint, argument_type, ctx.endpoint.examples)
364
- | _emit_endpoint_request_body(endpoint, argument_type, ctx.endpoint.examples)
366
+ | _emit_endpoint_request_body(
367
+ endpoint, argument_type, ctx.endpoint.examples
368
+ )
365
369
  | {
366
370
  "responses": {
367
371
  "200": {
@@ -412,7 +416,7 @@ def _emit_namespace(
412
416
  {name: value.asdict() for name, value in types.items()},
413
417
  )
414
418
 
415
- path = f"{config.types_output}/common/{'/'.join(namespace.path)}.yaml"
419
+ path = f"{config.types_output}/common/{"/".join(namespace.path)}.yaml"
416
420
  oa_namespace = {"components": oa_components}
417
421
  _rewrite_with_notice(path, yaml.dumps(oa_namespace, sort_keys=False))
418
422
 
@@ -555,7 +559,7 @@ def _emit_endpoint(
555
559
  gctx.tags.add(EmitOpenAPITag(name=tag_name, description=""))
556
560
  gctx.tag_groups[tag_group].add(tag_name)
557
561
 
558
- ref_path = f"common/{'/'.join(namespace.path)}.yaml#/components/endpoint"
562
+ ref_path = f"common/{"/".join(namespace.path)}.yaml#/components/endpoint"
559
563
  ep = namespace.endpoint
560
564
  gctx.paths.append(
561
565
  EmitOpenAPIPath(
@@ -573,7 +577,7 @@ def _emit_endpoint(
573
577
  ctx.endpoint = EmitOpenAPIEndpoint(
574
578
  method=namespace.endpoint.method.lower(),
575
579
  tags=[tag_name],
576
- summary=f"{'/'.join(namespace.path[path_cutoff:])}",
580
+ summary=f"{"/".join(namespace.path[path_cutoff:])}",
577
581
  description=description,
578
582
  is_beta=namespace.endpoint.is_beta,
579
583
  stability_level=namespace.endpoint.stability_level,
@@ -690,5 +694,5 @@ def open_api_type(
690
694
  ctx.namespaces.add(stype.namespace)
691
695
  # external namespace resolution
692
696
  return OpenAPIRefType(
693
- source=f"{resolve_namespace_ref(source_path=ctx.namespace.path, ref_path=stype.namespace.path, ref='/components/schema')}/{stype.name}"
697
+ source=f"{resolve_namespace_ref(source_path=ctx.namespace.path, ref_path=stype.namespace.path, ref="/components/schema")}/{stype.name}"
694
698
  )
@@ -6,7 +6,6 @@ WORK-IN-PROGRESS, DON'T USE!
6
6
 
7
7
  from collections import defaultdict
8
8
  from dataclasses import dataclass, field
9
- from typing import TypeAlias
10
9
 
11
10
  from pkgs.serialization_util.serialization_helpers import JsonValue
12
11
 
@@ -15,9 +14,9 @@ from .open_api_util import OpenAPIType
15
14
 
16
15
  MODIFY_NOTICE = "# DO NOT MODIFY -- This file is generated by type_spec"
17
16
 
18
- GlobalContextInfo: TypeAlias = dict[str, str | dict[str, str]]
19
- TagGroupToNamedTags: TypeAlias = dict[str, str | list[str]]
20
- TagPathsToRef: TypeAlias = dict[str, dict[str, str]]
17
+ type GlobalContextInfo = dict[str, str | dict[str, str]]
18
+ type TagGroupToNamedTags = dict[str, str | list[str]]
19
+ type TagPathsToRef = dict[str, dict[str, str]]
21
20
 
22
21
 
23
22
  @dataclass
@@ -296,7 +296,7 @@ def _emit_types(*, builder: builder.SpecBuilder, config: PythonConfig) -> None:
296
296
  full.write(f"# === END section from {namespace.name}.part.py ===\n")
297
297
 
298
298
  basename = "/".join(namespace.path)
299
- filename = f"{config.types_output}/{basename}{'' if len(namespace.path) > 1 else '_t'}.py"
299
+ filename = f"{config.types_output}/{basename}{"" if len(namespace.path) > 1 else "_t"}.py"
300
300
  util.rewrite_file(filename, full.getvalue())
301
301
 
302
302
  # Deprecated SDK support
@@ -621,7 +621,9 @@ def _emit_string_enum(ctx: Context, stype: builder.SpecTypeDefnStringEnum) -> No
621
621
  ctx.out.write(f"{INDENT}labels={{\n")
622
622
  for entry in stype.values.values():
623
623
  if entry.label is not None:
624
- ctx.out.write(f'{INDENT}{INDENT}"{entry.value}": "{entry.label}",\n')
624
+ ctx.out.write(
625
+ f'{INDENT}{INDENT}"{entry.value}": "{entry.label}",\n'
626
+ )
625
627
 
626
628
  ctx.out.write(f"{INDENT}}},\n")
627
629
  if need_deprecated:
@@ -722,7 +724,9 @@ def _emit_properties(
722
724
  if (
723
725
  isinstance(prop.spec_type, builder.SpecTypeInstance)
724
726
  and (
725
- prop.spec_type.defn_type.is_base_type(builder.BaseTypeName.s_list)
727
+ prop.spec_type.defn_type.is_base_type(
728
+ builder.BaseTypeName.s_list
729
+ )
726
730
  )
727
731
  and default == "[]"
728
732
  ):
@@ -1026,7 +1030,7 @@ def _emit_namespace_imports(
1026
1030
  if ns.endpoint is not None:
1027
1031
  import_alias = "_".join(ns.path[2:]) + "_t"
1028
1032
  out.write(
1029
- f"import {config.types_package}.{'.'.join(ns.path)} as {import_alias}\n"
1033
+ f"import {config.types_package}.{".".join(ns.path)} as {import_alias}\n"
1030
1034
  )
1031
1035
  continue
1032
1036
  elif from_namespace is not None:
@@ -1184,7 +1188,7 @@ def _emit_api_argument_lookup(
1184
1188
  continue
1185
1189
 
1186
1190
  import_alias = "_".join(namespace.path[1:])
1187
- api_import = f"{config.types_package}.{'.'.join(namespace.path)}"
1191
+ api_import = f"{config.types_package}.{".".join(namespace.path)}"
1188
1192
  imports.append(f"import {api_import} as {import_alias}")
1189
1193
 
1190
1194
  route_group = (
@@ -92,7 +92,9 @@ def _emit_types(builder: builder.SpecBuilder, config: TypeScriptConfig) -> None:
92
92
  builder.namespaces.values(),
93
93
  key=lambda ns: _resolve_namespace_name(ns),
94
94
  ):
95
- ctx = EmitTypescriptContext(out=io.StringIO(), namespace=namespace, config=config)
95
+ ctx = EmitTypescriptContext(
96
+ out=io.StringIO(), namespace=namespace, config=config
97
+ )
96
98
 
97
99
  _emit_namespace(ctx, namespace)
98
100
 
@@ -109,7 +111,7 @@ def _emit_types(builder: builder.SpecBuilder, config: TypeScriptConfig) -> None:
109
111
  # Try to capture some common incompleteness errors
110
112
  if namespace.endpoint is None or namespace.endpoint.function is None:
111
113
  raise Exception(
112
- f"Namespace {'/'.join(namespace.path)} is incomplete. It should have an endpoint with function, types, and/or constants"
114
+ f"Namespace {"/".join(namespace.path)} is incomplete. It should have an endpoint with function, types, and/or constants"
113
115
  )
114
116
  continue
115
117
 
@@ -161,7 +163,9 @@ def _emit_types(builder: builder.SpecBuilder, config: TypeScriptConfig) -> None:
161
163
  util.rewrite_file(f"{config.types_output}/index.ts", index_out.getvalue())
162
164
 
163
165
 
164
- def _emit_namespace(ctx: EmitTypescriptContext, namespace: builder.SpecNamespace) -> None:
166
+ def _emit_namespace(
167
+ ctx: EmitTypescriptContext, namespace: builder.SpecNamespace
168
+ ) -> None:
165
169
  for stype in namespace.types.values():
166
170
  if namespace.emit_io_ts:
167
171
  emit_type_io_ts(ctx, stype, namespace.derive_types_from_io_ts)
@@ -222,7 +226,7 @@ def _emit_endpoint(
222
226
  wrap_call = (
223
227
  f"{wrap_name}<Arguments>" if is_binary else f"{wrap_name}<Arguments, Response>"
224
228
  )
225
- type_path = f"unc_mat/types/{'/'.join(namespace.path)}"
229
+ type_path = f"unc_mat/types/{"/".join(namespace.path)}"
226
230
 
227
231
  if is_binary:
228
232
  tsx_response_part = f"""import {{ {wrap_name} }} from "unc_base/api"
@@ -261,12 +265,14 @@ export const apiCall = {wrap_call}(
261
265
  )
262
266
  {data_loader_body}"""
263
267
 
264
- output = f"{ctx.config.routes_output}/{'/'.join(namespace.path)}.tsx"
268
+ output = f"{ctx.config.routes_output}/{"/".join(namespace.path)}.tsx"
265
269
  util.rewrite_file(output, tsx_api)
266
270
 
267
271
  # Hacky index support, until enough is migrated to regen entirely
268
272
  # Emits the import into the UI API index file
269
- index_path = f"{ctx.config.routes_output}/{'/'.join(namespace.path[0:-1])}/index.tsx"
273
+ index_path = (
274
+ f"{ctx.config.routes_output}/{"/".join(namespace.path[0:-1])}/index.tsx"
275
+ )
270
276
  api_name = f"Api{ts_type_name(namespace.path[0 - 1])}"
271
277
  if os.path.exists(index_path):
272
278
  with open(index_path) as index:
@@ -404,7 +410,10 @@ def refer_to_impl(
404
410
  spec, multi = refer_to_impl(ctx, stype.parameters[0])
405
411
  return f"readonly ({spec})[]" if multi else f"readonly {spec}[]", False
406
412
  if stype.defn_type.name == builder.BaseTypeName.s_union:
407
- return f'({" | ".join([refer_to(ctx, p) for p in stype.parameters])})', False
413
+ return (
414
+ f'({" | ".join([refer_to(ctx, p) for p in stype.parameters])})',
415
+ False,
416
+ )
408
417
  if stype.defn_type.name == builder.BaseTypeName.s_literal:
409
418
  parts = []
410
419
  for parameter in stype.parameters:
@@ -414,7 +423,7 @@ def refer_to_impl(
414
423
  if stype.defn_type.name == builder.BaseTypeName.s_optional:
415
424
  return f"{refer_to(ctx, stype.parameters[0])} | null", True
416
425
  if stype.defn_type.name == builder.BaseTypeName.s_tuple:
417
- return f"[{', '.join([refer_to(ctx, p) for p in stype.parameters])}]", False
426
+ return f"[{", ".join([refer_to(ctx, p) for p in stype.parameters])}]", False
418
427
  params = ", ".join([refer_to(ctx, p) for p in stype.parameters])
419
428
  return f"{refer_to(ctx, stype.defn_type)}<{params}>", False
420
429
 
@@ -3,7 +3,7 @@ import dataclasses
3
3
  import decimal
4
4
  import io
5
5
  import json
6
- from typing import Any, Optional, TypeAlias, Union, cast
6
+ from typing import Any, Optional, Union, cast
7
7
 
8
8
  from main.base.types import data_t
9
9
  from main.base.types.base_t import PureJsonValue
@@ -134,7 +134,7 @@ class MapStringEnum(MapTypeBase):
134
134
  values: dict[str, str]
135
135
 
136
136
 
137
- MapType: TypeAlias = Union[MapTypeObject, MapTypeAlias, MapStringEnum]
137
+ type MapType = Union[MapTypeObject, MapTypeAlias, MapStringEnum]
138
138
 
139
139
 
140
140
  @dataclasses.dataclass
@@ -255,7 +255,9 @@ def _extract_and_validate_layout(
255
255
  assert group_ref_name in layout, f"missing-base-group:{group_ref_name}"
256
256
 
257
257
  for prop_ref_name in stype.properties:
258
- assert prop_ref_name in all_fields_group, f"layout-missing-field:{prop_ref_name}"
258
+ assert (
259
+ prop_ref_name in all_fields_group
260
+ ), f"layout-missing-field:{prop_ref_name}"
259
261
 
260
262
  return layout
261
263
 
@@ -25,7 +25,9 @@ TYPE_MAP = {
25
25
  "List": MappedType(base_type=value_spec_t.BaseType.LIST, param_count=1),
26
26
  "Optional": MappedType(base_type=value_spec_t.BaseType.OPTIONAL, param_count=1),
27
27
  "String": MappedType(base_type=value_spec_t.BaseType.STRING),
28
- "Union": MappedType(base_type=value_spec_t.BaseType.UNION, variable_param_count=True),
28
+ "Union": MappedType(
29
+ base_type=value_spec_t.BaseType.UNION, variable_param_count=True
30
+ ),
29
31
  # not part of type_spec's types now
30
32
  "Symbol": MappedType(base_type=value_spec_t.BaseType.SYMBOL),
31
33
  "Any": MappedType(base_type=value_spec_t.BaseType.ANY),
@@ -160,7 +160,9 @@ def _emit_function(function: value_spec_t.Function, indent: str) -> str:
160
160
  sub_indent = indent + INDENT
161
161
  out.write(f"{_function_symbol_name(function)} = value_spec_t.Function(\n")
162
162
  out.write(f"{sub_indent}name={encode_common_string(function.name)},\n")
163
- out.write(f"{sub_indent}description={encode_common_string(function.description)},\n")
163
+ out.write(
164
+ f"{sub_indent}description={encode_common_string(function.description)},\n"
165
+ )
164
166
  out.write(f"{sub_indent}brief={encode_common_string(function.brief)},\n")
165
167
  out.write(
166
168
  f"{sub_indent}return_value={_emit_function_return(function.return_value, sub_indent)},\n"
@@ -184,16 +186,22 @@ def _emit_argument(argument: value_spec_t.FunctionArgument, indent: str) -> str:
184
186
  out.write("value_spec_t.FunctionArgument(\n")
185
187
  out.write(f"{sub_indent}ref_name={encode_common_string(argument.ref_name)},\n")
186
188
  out.write(f"{sub_indent}name={encode_common_string(argument.name)},\n")
187
- out.write(f"{sub_indent}description={encode_common_string(argument.description)},\n")
189
+ out.write(
190
+ f"{sub_indent}description={encode_common_string(argument.description)},\n"
191
+ )
188
192
  out.write(f"{sub_indent}pass_null={str(argument.pass_null)},\n")
189
- out.write(f"{sub_indent}extant=value_spec_t.ArgumentExtant.{argument.extant.name},\n")
193
+ out.write(
194
+ f"{sub_indent}extant=value_spec_t.ArgumentExtant.{argument.extant.name},\n"
195
+ )
190
196
  out.write(f"{sub_indent}type={_emit_type(argument.type, sub_indent)},\n")
191
197
  out.write(f"{indent})")
192
198
 
193
199
  return out.getvalue()
194
200
 
195
201
 
196
- def _emit_function_return(return_value: value_spec_t.FunctionReturn, indent: str) -> str:
202
+ def _emit_function_return(
203
+ return_value: value_spec_t.FunctionReturn, indent: str
204
+ ) -> str:
197
205
  out = io.StringIO()
198
206
 
199
207
  sub_indent = indent + INDENT
@@ -56,7 +56,9 @@ HTTPRequest = HTTPPostRequest | HTTPGetRequest
56
56
 
57
57
  @dataclass(kw_only=True)
58
58
  class ClientConfig(ClientConfigOptions):
59
- transform_request: typing.Callable[[requests.Request], requests.Request] | None = None
59
+ transform_request: typing.Callable[[requests.Request], requests.Request] | None = (
60
+ None
61
+ )
60
62
  job_logger: typing.Optional[JobLogger] = None
61
63
 
62
64
 
@@ -153,10 +155,12 @@ class Client(ClientMethods):
153
155
  ):
154
156
  self._auth_details = auth_details
155
157
  self._base_url = base_url
156
- self._file_uploader = FileUploader(self._base_url, self._auth_details)
157
158
  self._cfg = config or ClientConfig()
158
159
  self._session = requests.Session()
159
160
  self._session.verify = not self._cfg.allow_insecure_tls
161
+ self._file_uploader = FileUploader(
162
+ self._base_url, self._auth_details, self._cfg.allow_insecure_tls
163
+ )
160
164
 
161
165
  def _get_response_json(
162
166
  self, response: requests.Response, request_id: str
@@ -68,10 +68,17 @@ class UploadFailed(Exception):
68
68
  class FileUploader:
69
69
  _auth_details: AuthDetailsAll
70
70
  _base_url: str
71
-
72
- def __init__(self: Self, base_url: str, auth_details: AuthDetailsAll) -> None:
71
+ _allow_insecure_tls: bool
72
+
73
+ def __init__(
74
+ self: Self,
75
+ base_url: str,
76
+ auth_details: AuthDetailsAll,
77
+ allow_insecure_tls: bool = False,
78
+ ) -> None:
73
79
  self._base_url = base_url
74
80
  self._auth_details = auth_details
81
+ self._allow_insecure_tls = allow_insecure_tls
75
82
 
76
83
  async def _upload_file(self: Self, file_upload: FileUpload) -> UploadedFile:
77
84
  creation_url = f"{self._base_url}/api/external/file_upload/files"
@@ -92,6 +99,9 @@ class FileUploader:
92
99
  file_bytes.bytes_data,
93
100
  {"filename": file_bytes.name.encode()},
94
101
  client_session=session,
102
+ config=aiotus.RetryConfiguration(
103
+ ssl=False if self._allow_insecure_tls else None
104
+ ),
95
105
  chunksize=_CHUNK_SIZE,
96
106
  )
97
107
  if location is None:
@@ -100,7 +110,9 @@ class FileUploader:
100
110
  name=file_bytes.name, file_id=int(location.path.split("/")[-1])
101
111
  )
102
112
 
103
- def upload_files(self: Self, *, file_uploads: list[FileUpload]) -> list[UploadedFile]:
113
+ def upload_files(
114
+ self: Self, *, file_uploads: list[FileUpload]
115
+ ) -> list[UploadedFile]:
104
116
  return [
105
117
  asyncio.run(self._upload_file(file_upload)) for file_upload in file_uploads
106
118
  ]
@@ -23,7 +23,8 @@ def _construct_auth_details(profile_meta: ProfileMetadata) -> AuthDetailsAll:
23
23
  profile_meta.auth_retrieval.api_id_secret, profile_metadata=profile_meta
24
24
  )
25
25
  api_key = retrieve_secret(
26
- profile_meta.auth_retrieval.api_key_secret, profile_metadata=profile_meta
26
+ profile_meta.auth_retrieval.api_key_secret,
27
+ profile_metadata=profile_meta,
27
28
  )
28
29
 
29
30
  return AuthDetailsApiKey(api_id=api_id, api_secret_key=api_key)
@@ -68,7 +68,8 @@ def _filter_by_file_extension(
68
68
  file
69
69
  for file in files
70
70
  if file.filename is not None
71
- and os.path.splitext(file.filename)[-1] in remote_directory.valid_file_extensions
71
+ and os.path.splitext(file.filename)[-1]
72
+ in remote_directory.valid_file_extensions
72
73
  ]
73
74
 
74
75
 
@@ -102,7 +103,7 @@ def _pull_remote_directory_data(
102
103
  files_to_pull = _filter_by_max_files(remote_directory, files_to_pull)
103
104
 
104
105
  logger.log_info(
105
- f"Accessing SFTP directory: {remote_directory.src_path} and pulling files: {', '.join([f.filename for f in files_to_pull if f.filename is not None])}",
106
+ f"Accessing SFTP directory: {remote_directory.src_path} and pulling files: {", ".join([f.filename for f in files_to_pull if f.filename is not None])}",
106
107
  )
107
108
  return filesystem_session.download_files(files_to_pull)
108
109
 
@@ -210,9 +211,7 @@ class GenericUploadJob(Job):
210
211
  assert (
211
212
  self.data_source.region_name is not None
212
213
  ), "region_name must be specified for cloud_provider OVH"
213
- endpoint_url = (
214
- f"https://s3.{self.data_source.region_name}.cloud.ovh.net"
215
- )
214
+ endpoint_url = f"https://s3.{self.data_source.region_name}.cloud.ovh.net"
216
215
  else:
217
216
  endpoint_url = self.data_source.endpoint_url
218
217
 
@@ -248,7 +247,8 @@ class GenericUploadJob(Job):
248
247
  for file_data in filtered_file_data:
249
248
  files_to_upload.append(
250
249
  DataFileUpload(
251
- data=io.BytesIO(file_data.file_data), name=file_data.filename
250
+ data=io.BytesIO(file_data.file_data),
251
+ name=file_data.filename,
252
252
  )
253
253
  )
254
254
  if not self.upload_strategy.skip_moving_files:
@@ -256,7 +256,9 @@ class GenericUploadJob(Job):
256
256
  filesystem_session=filesystem_session,
257
257
  remote_directory_scope=remote_directory,
258
258
  success_file_paths=[
259
- file.filepath if file.filepath is not None else file.filename
259
+ file.filepath
260
+ if file.filepath is not None
261
+ else file.filename
260
262
  for file in filtered_file_data
261
263
  ],
262
264
  # IMPROVE: use triggers/webhooks to mark failed files as failed
@@ -75,9 +75,7 @@ def retrieve_secret(
75
75
 
76
76
  match secret_retrieval:
77
77
  case SecretRetrievalEnv():
78
- env_name = (
79
- f"UNC_{profile_metadata.name.upper()}_{secret_retrieval.env_key.upper()}"
80
- )
78
+ env_name = f"UNC_{profile_metadata.name.upper()}_{secret_retrieval.env_key.upper()}"
81
79
  secret = os.environ.get(env_name)
82
80
  if secret is None:
83
81
  raise SecretRetrievalError(
@@ -72,13 +72,21 @@ class Logger:
72
72
  log_file.flush()
73
73
 
74
74
  def log_info(self, message: str, *, attributes: Attributes | None = None) -> None:
75
- self._emit_log(message=message, severity=LogSeverity.INFO, attributes=attributes)
75
+ self._emit_log(
76
+ message=message, severity=LogSeverity.INFO, attributes=attributes
77
+ )
76
78
 
77
- def log_warning(self, message: str, *, attributes: Attributes | None = None) -> None:
78
- self._emit_log(message=message, severity=LogSeverity.WARN, attributes=attributes)
79
+ def log_warning(
80
+ self, message: str, *, attributes: Attributes | None = None
81
+ ) -> None:
82
+ self._emit_log(
83
+ message=message, severity=LogSeverity.WARN, attributes=attributes
84
+ )
79
85
 
80
86
  def log_error(self, message: str, *, attributes: Attributes | None = None) -> None:
81
- self._emit_log(message=message, severity=LogSeverity.ERROR, attributes=attributes)
87
+ self._emit_log(
88
+ message=message, severity=LogSeverity.ERROR, attributes=attributes
89
+ )
82
90
 
83
91
 
84
92
  class JobLogger(Logger):
@@ -12,6 +12,7 @@ from . import async_batch_t as async_batch_t
12
12
  from . import base_t as base_t
13
13
  from . import calculations_t as calculations_t
14
14
  from . import chemical_structure_t as chemical_structure_t
15
+ from .api.recipes import clear_recipe_outputs as clear_recipe_outputs_t
15
16
  from . import client_config_t as client_config_t
16
17
  from .api.chemical import convert_chemical_formats as convert_chemical_formats_t
17
18
  from .api.entity import create_entities as create_entities_t
@@ -109,6 +110,7 @@ __all__: list[str] = [
109
110
  "base_t",
110
111
  "calculations_t",
111
112
  "chemical_structure_t",
113
+ "clear_recipe_outputs_t",
112
114
  "client_config_t",
113
115
  "convert_chemical_formats_t",
114
116
  "create_entities_t",
@@ -34,7 +34,7 @@ class EntityToCreate:
34
34
  @dataclasses.dataclass(kw_only=True)
35
35
  class Arguments:
36
36
  definition_id: base_t.ObjectId
37
- entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL]]
37
+ entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL], typing.Literal[entity_t.EntityType.INGREDIENT_TAG_MAP], typing.Literal[entity_t.EntityType.INGREDIENT_TAG]]
38
38
  entities_to_create: list[EntityToCreate]
39
39
 
40
40
 
@@ -40,7 +40,7 @@ class EntityFieldInitialValue:
40
40
  @dataclasses.dataclass(kw_only=True)
41
41
  class Arguments:
42
42
  definition_id: base_t.ObjectId
43
- entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL]]
43
+ entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL], typing.Literal[entity_t.EntityType.INGREDIENT_TAG_MAP], typing.Literal[entity_t.EntityType.INGREDIENT_TAG]]
44
44
  field_values: typing.Optional[typing.Optional[list[field_values_t.FieldRefNameValue]]] = None
45
45
 
46
46
 
@@ -0,0 +1,35 @@
1
+ # DO NOT MODIFY -- This file is generated by type_spec
2
+ # flake8: noqa: F821
3
+ # ruff: noqa: E402 Q003
4
+ # fmt: off
5
+ # isort: skip_file
6
+ from __future__ import annotations
7
+ import typing # noqa: F401
8
+ import datetime # noqa: F401
9
+ from decimal import Decimal # noqa: F401
10
+ import dataclasses
11
+ from ... import async_batch_t
12
+ from ... import identifier_t
13
+
14
+ __all__: list[str] = [
15
+ "Arguments",
16
+ "Data",
17
+ "ENDPOINT_METHOD",
18
+ "ENDPOINT_PATH",
19
+ ]
20
+
21
+ ENDPOINT_METHOD = "POST"
22
+ ENDPOINT_PATH = "api/external/recipes/clear_recipe_outputs"
23
+
24
+
25
+ # DO NOT MODIFY -- This file is generated by type_spec
26
+ @dataclasses.dataclass(kw_only=True)
27
+ class Arguments:
28
+ recipe_key: identifier_t.IdentifierKey
29
+
30
+
31
+ # DO NOT MODIFY -- This file is generated by type_spec
32
+ @dataclasses.dataclass(kw_only=True)
33
+ class Data(async_batch_t.AsyncBatchActionReturn):
34
+ pass
35
+ # DO NOT MODIFY -- This file is generated by type_spec
@@ -7,5 +7,6 @@
7
7
  from .async_batch_t import AsyncBatchRequestPath as AsyncBatchRequestPath
8
8
  from .async_batch_t import AsyncBatchRequest as AsyncBatchRequest
9
9
  from .async_batch_t import AsyncBatchActionReturn as AsyncBatchActionReturn
10
+ from .async_batch_t import SavedAsyncBatchActionReturn as SavedAsyncBatchActionReturn
10
11
  from .async_batch_t import QueuedAsyncBatchRequest as QueuedAsyncBatchRequest
11
12
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -9,8 +9,10 @@ import typing # noqa: F401
9
9
  import datetime # noqa: F401
10
10
  from decimal import Decimal # noqa: F401
11
11
  import uncountable.types.api.equipment.associate_equipment_input as associate_equipment_input_t
12
+ import uncountable.types.api.recipes.associate_recipe_as_input as associate_recipe_as_input_t
12
13
  from uncountable.types import async_batch_t
13
14
  from uncountable.types import base_t
15
+ import uncountable.types.api.recipes.clear_recipe_outputs as clear_recipe_outputs_t
14
16
  import uncountable.types.api.recipes.create_recipe as create_recipe_t
15
17
  import uncountable.types.api.recipes.edit_recipe_inputs as edit_recipe_inputs_t
16
18
  from uncountable.types import generic_upload_t
@@ -69,6 +71,76 @@ class AsyncBatchProcessorBase(ABC):
69
71
  batch_reference=req.batch_reference,
70
72
  )
71
73
 
74
+ def associate_recipe_as_input(
75
+ self,
76
+ *,
77
+ recipe_key: identifier_t.IdentifierKey,
78
+ input_key: typing.Optional[identifier_t.IdentifierKey] = None,
79
+ show_in_listings: typing.Optional[bool] = None,
80
+ depends_on: typing.Optional[list[str]] = None,
81
+ ) -> async_batch_t.QueuedAsyncBatchRequest:
82
+ """Create or return the input association for a recipe
83
+
84
+ :param recipe_key: Identifier for the recipe
85
+ :param input_key: Identifier for an input to use for the association. Optionally supplied. If not supplied, one is created
86
+ :param show_in_listings: After associating the input should it be present in listings
87
+ :param depends_on: A list of batch reference keys to process before processing this request
88
+ """
89
+ args = associate_recipe_as_input_t.Arguments(
90
+ recipe_key=recipe_key,
91
+ input_key=input_key,
92
+ show_in_listings=show_in_listings,
93
+ )
94
+ json_data = serialize_for_api(args)
95
+
96
+ batch_reference = str(uuid.uuid4())
97
+
98
+ req = async_batch_t.AsyncBatchRequest(
99
+ path=async_batch_t.AsyncBatchRequestPath.ASSOCIATE_RECIPE_AS_INPUT,
100
+ data=json_data,
101
+ depends_on=depends_on,
102
+ batch_reference=batch_reference,
103
+ )
104
+
105
+ self._enqueue(req)
106
+
107
+ return async_batch_t.QueuedAsyncBatchRequest(
108
+ path=req.path,
109
+ batch_reference=req.batch_reference,
110
+ )
111
+
112
+ def clear_recipe_outputs(
113
+ self,
114
+ *,
115
+ recipe_key: identifier_t.IdentifierKey,
116
+ depends_on: typing.Optional[list[str]] = None,
117
+ ) -> async_batch_t.QueuedAsyncBatchRequest:
118
+ """Clears all output values & output metadata for a given recipe
119
+
120
+ :param recipe_key: The identifier of the recipe
121
+ :param depends_on: A list of batch reference keys to process before processing this request
122
+ """
123
+ args = clear_recipe_outputs_t.Arguments(
124
+ recipe_key=recipe_key,
125
+ )
126
+ json_data = serialize_for_api(args)
127
+
128
+ batch_reference = str(uuid.uuid4())
129
+
130
+ req = async_batch_t.AsyncBatchRequest(
131
+ path=async_batch_t.AsyncBatchRequestPath.CLEAR_RECIPE_OUTPUTS,
132
+ data=json_data,
133
+ depends_on=depends_on,
134
+ batch_reference=batch_reference,
135
+ )
136
+
137
+ self._enqueue(req)
138
+
139
+ return async_batch_t.QueuedAsyncBatchRequest(
140
+ path=req.path,
141
+ batch_reference=req.batch_reference,
142
+ )
143
+
72
144
  def create_recipe(
73
145
  self,
74
146
  *,
@@ -17,6 +17,7 @@ __all__: list[str] = [
17
17
  "AsyncBatchRequest",
18
18
  "AsyncBatchRequestPath",
19
19
  "QueuedAsyncBatchRequest",
20
+ "SavedAsyncBatchActionReturn",
20
21
  ]
21
22
 
22
23
 
@@ -32,6 +33,7 @@ class AsyncBatchRequestPath(StrEnum):
32
33
  ASSOCIATE_EQUIPMENT_INPUT = "equipment/associate_equipment_input"
33
34
  INVOKE_UPLOADER = "uploader/invoke_uploader"
34
35
  ASSOCIATE_RECIPE_AS_INPUT = "recipes/associate_recipe_as_input"
36
+ CLEAR_RECIPE_OUTPUTS = "recipes/clear_recipe_outputs"
35
37
 
36
38
 
37
39
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -53,6 +55,13 @@ class AsyncBatchActionReturn:
53
55
  result_id: typing.Optional[base_t.ObjectId] = None
54
56
 
55
57
 
58
+ # DO NOT MODIFY -- This file is generated by type_spec
59
+ @dataclasses.dataclass(kw_only=True)
60
+ class SavedAsyncBatchActionReturn:
61
+ identifier: str
62
+ result_data: AsyncBatchActionReturn
63
+
64
+
56
65
  # DO NOT MODIFY -- This file is generated by type_spec
57
66
  @dataclasses.dataclass(kw_only=True, frozen=True, eq=True)
58
67
  class QueuedAsyncBatchRequest:
@@ -16,6 +16,7 @@ import uncountable.types.api.recipes.associate_recipe_as_input as associate_reci
16
16
  import uncountable.types.api.recipes.associate_recipe_as_lot as associate_recipe_as_lot_t
17
17
  from uncountable.types import async_batch_t
18
18
  from uncountable.types import base_t
19
+ import uncountable.types.api.recipes.clear_recipe_outputs as clear_recipe_outputs_t
19
20
  import uncountable.types.api.chemical.convert_chemical_formats as convert_chemical_formats_t
20
21
  import uncountable.types.api.entity.create_entities as create_entities_t
21
22
  import uncountable.types.api.entity.create_entity as create_entity_t
@@ -214,6 +215,25 @@ class ClientMethods(ABC):
214
215
  )
215
216
  return self.do_request(api_request=api_request, return_type=associate_recipe_as_lot_t.Data)
216
217
 
218
+ def clear_recipe_outputs(
219
+ self,
220
+ *,
221
+ recipe_key: identifier_t.IdentifierKey,
222
+ ) -> clear_recipe_outputs_t.Data:
223
+ """Clears all output values & output metadata for a given recipe
224
+
225
+ :param recipe_key: The identifier of the recipe
226
+ """
227
+ args = clear_recipe_outputs_t.Arguments(
228
+ recipe_key=recipe_key,
229
+ )
230
+ api_request = APIRequest(
231
+ method=clear_recipe_outputs_t.ENDPOINT_METHOD,
232
+ endpoint=clear_recipe_outputs_t.ENDPOINT_PATH,
233
+ args=args,
234
+ )
235
+ return self.do_request(api_request=api_request, return_type=clear_recipe_outputs_t.Data)
236
+
217
237
  def convert_chemical_formats(
218
238
  self,
219
239
  *,
@@ -236,7 +256,7 @@ class ClientMethods(ABC):
236
256
  self,
237
257
  *,
238
258
  definition_id: base_t.ObjectId,
239
- entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL]],
259
+ entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL], typing.Literal[entity_t.EntityType.INGREDIENT_TAG_MAP], typing.Literal[entity_t.EntityType.INGREDIENT_TAG]],
240
260
  entities_to_create: list[create_entities_t.EntityToCreate],
241
261
  ) -> create_entities_t.Data:
242
262
  """Creates new Uncountable entities
@@ -261,7 +281,7 @@ class ClientMethods(ABC):
261
281
  self,
262
282
  *,
263
283
  definition_id: base_t.ObjectId,
264
- entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL]],
284
+ entity_type: typing.Union[typing.Literal[entity_t.EntityType.LAB_REQUEST], typing.Literal[entity_t.EntityType.APPROVAL], typing.Literal[entity_t.EntityType.CUSTOM_ENTITY], typing.Literal[entity_t.EntityType.INVENTORY_AMOUNT], typing.Literal[entity_t.EntityType.TASK], typing.Literal[entity_t.EntityType.PROJECT], typing.Literal[entity_t.EntityType.EQUIPMENT], typing.Literal[entity_t.EntityType.INV_LOCAL_LOCATIONS], typing.Literal[entity_t.EntityType.FIELD_OPTION_SET], typing.Literal[entity_t.EntityType.WEBHOOK], typing.Literal[entity_t.EntityType.SPECS], typing.Literal[entity_t.EntityType.GOAL], typing.Literal[entity_t.EntityType.INGREDIENT_TAG_MAP], typing.Literal[entity_t.EntityType.INGREDIENT_TAG]],
265
285
  field_values: typing.Optional[typing.Optional[list[field_values_t.FieldRefNameValue]]] = None,
266
286
  ) -> create_entity_t.Data:
267
287
  """Creates a new Uncountable entity
@@ -71,6 +71,7 @@ __all__: list[str] = [
71
71
  "ingredient_lot": "Ingredient Lot",
72
72
  "ingredient_role": "Ingredient Role",
73
73
  "ingredient_tag": "Ingredient Subcategory",
74
+ "ingredient_tag_map": "Ingredient Tag Map",
74
75
  "input_group": "Input Group",
75
76
  "inv_local_locations": "Inventory Location",
76
77
  "inventory_amount": "Inventory Amount",
@@ -221,6 +222,7 @@ class EntityType(StrEnum):
221
222
  INGREDIENT_LOT = "ingredient_lot"
222
223
  INGREDIENT_ROLE = "ingredient_role"
223
224
  INGREDIENT_TAG = "ingredient_tag"
225
+ INGREDIENT_TAG_MAP = "ingredient_tag_map"
224
226
  INPUT_GROUP = "input_group"
225
227
  INV_LOCAL_LOCATIONS = "inv_local_locations"
226
228
  INVENTORY_AMOUNT = "inventory_amount"