UncountablePythonSDK 0.0.48__py3-none-any.whl → 0.0.50__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of UncountablePythonSDK might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: UncountablePythonSDK
3
- Version: 0.0.48
3
+ Version: 0.0.50
4
4
  Summary: Uncountable SDK
5
5
  Project-URL: Homepage, https://github.com/uncountableinc/uncountable-python-sdk
6
6
  Project-URL: Repository, https://github.com/uncountableinc/uncountable-python-sdk.git
@@ -27,6 +27,10 @@ Requires-Dist: PyYAML ==6.*
27
27
  Requires-Dist: google-api-python-client ==2.*
28
28
  Requires-Dist: tqdm ==4.*
29
29
  Requires-Dist: pysftp ==0.*
30
+ Requires-Dist: opentelemetry-api ==1.*
31
+ Requires-Dist: opentelemetry-exporter-otlp-proto-common ==1.*
32
+ Requires-Dist: opentelemetry-exporter-otlp-proto-http ==1.*
33
+ Requires-Dist: opentelemetry-sdk ==1.*
30
34
  Requires-Dist: paramiko ==3.*
31
35
  Requires-Dist: boto3 ==1.*
32
36
  Provides-Extra: test
@@ -3,7 +3,7 @@ docs/conf.py,sha256=YF5J-9g_Wg8wXmyHsGaE8xYlDEzqocNl3UWUmP0CwBg,1702
3
3
  docs/index.md,sha256=eEdirX_Ds6ICTRtIS5iT4irCquHcQyKN7E4M5QP9T8A,257
4
4
  docs/justfile,sha256=cvNcpb-ByPOF2aCrFlg3DDZBoYMx5W8xGdr13m9HcnI,215
5
5
  docs/quickstart.md,sha256=3GuJ0MB1O5kjlsrgAmdSkDq0rYqATrYy-tzEHDy8H-c,422
6
- docs/requirements.txt,sha256=Y6wdRPGnrd-HMShTgy9Fgydz868Bnyl007P9HCUKXhw,139
6
+ docs/requirements.txt,sha256=YDDAaHfuLxkdLhrjEUJeHDE-NSmD5chTgVTIO7BEeto,139
7
7
  docs/static/logo_blue.png,sha256=SyYpMTVhhBbhF5Wl8lWaVwz-_p1MIR6dW6bVhufQRME,46708
8
8
  docs/static/favicons/android-chrome-192x192.png,sha256=XoF-AhD55JlSBDGsEPJKfT_VeXT-awhwKyZnxLhrwvk,1369
9
9
  docs/static/favicons/android-chrome-512x512.png,sha256=1S4xwY9YtJQ5ifFsZ-DOzssoyBYs0t9uwdOUmYx0Xso,3888
@@ -25,7 +25,7 @@ pkgs/argument_parser/_is_enum.py,sha256=Gw6jJa8nBwYGqXwwCZbSnWL8Rvr5alkg5lSVAqXt
25
25
  pkgs/argument_parser/_is_namedtuple.py,sha256=Rjc1bKanIPPogl3qG5JPBxglG1TqWYOo1nxxhBASQWY,265
26
26
  pkgs/argument_parser/argument_parser.py,sha256=S5x4yDpaBqTRkmcOyX2UuBWw9iCE4j2Po5LZPg9jhe4,17308
27
27
  pkgs/argument_parser/case_convert.py,sha256=NuJLJUJRbyVb6_Slen4uqaStEHbcOS1d-hBBfDrrw-c,605
28
- pkgs/filesystem_utils/__init__.py,sha256=yxfwtYFvqq_fjMl-tg2jwa6eNPNZUF4ykqsSzALyNdw,1143
28
+ pkgs/filesystem_utils/__init__.py,sha256=NSsQrUCoGISBCqCCyq6_583sYHTVEQeDjDO8hvZn3ag,1261
29
29
  pkgs/filesystem_utils/_gdrive_session.py,sha256=OZudNoP2HikolnpurVJhJdh5fgzqbaZQvn53ReGGXx4,11015
30
30
  pkgs/filesystem_utils/_local_session.py,sha256=xFEYhAvNqrOYqwt4jrEYOuYkjJn0zclZhTelW_Q1-rw,2325
31
31
  pkgs/filesystem_utils/_s3_session.py,sha256=q4q0MTWXWu5RNRVZ5ibv4M4UXXxWl_J6xCnitvngIMM,3957
@@ -74,26 +74,28 @@ uncountable/__init__.py,sha256=8l8XWNCKsu7TG94c-xa2KHpDegvxDC2FyQISdWC763Y,89
74
74
  uncountable/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
75
75
  uncountable/core/__init__.py,sha256=RFv0kO6rKFf1PtBPu83hCGmxqkJamRtsgQ9_-ztw7tA,341
76
76
  uncountable/core/async_batch.py,sha256=Gur0VOS0AH2ugwvk65hwoX-iqwQAAyJaejY_LyAZZPo,1210
77
- uncountable/core/client.py,sha256=2QRLy0GcSamQDqUABQq8R7D_wGofYewSoTQiZzOyZXk,10168
77
+ uncountable/core/client.py,sha256=C0hJ0_SGL5WEhPuAWDSj4ShjjIiQasxpfpnisTi-Uag,10554
78
78
  uncountable/core/file_upload.py,sha256=TkQ0fKbbYrPgns1Jh51JU35DUqZHB3ljOaVgjSlBx9Y,3149
79
79
  uncountable/core/types.py,sha256=s2CjqYJpsmbC7xMwxxT7kJ_V9bwokrjjWVVjpMcQpKI,333
80
+ uncountable/core/version.py,sha256=SqQIHLhiVZXQBeOwygS2FRZ4WEO27JmWhse0lKm7fgU,274
80
81
  uncountable/integration/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
81
- uncountable/integration/construct_client.py,sha256=9ZsfkA0oZIE04mPnwGlt6EcrlnpaUPA93K100FS1F7A,1546
82
- uncountable/integration/cron.py,sha256=n4HjW-KNruYQ_09iwNBd9i4-Juy_9ddzbf71_wnydHA,1674
82
+ uncountable/integration/construct_client.py,sha256=m6iFAhyAkD3WETLGayv5t47WpwLP1bct4k1z2a2GPh0,1703
83
+ uncountable/integration/cron.py,sha256=e5456IYJF2ipiSsd1R2T334lfe7mtp-gwP7JpS645L0,1858
83
84
  uncountable/integration/entrypoint.py,sha256=9rk06gBTsCqytIs8Shsnlf6ir_4Uq5d5rfP1veiSLzc,1437
84
- uncountable/integration/job.py,sha256=5HJcjtwj_pGFYAZ6SK7xa9RqDXCoBb5rAb_OHdTR61Q,1463
85
- uncountable/integration/server.py,sha256=85yzQrrpp8OEG37CTN9wtgB9UrzWnDLuDkY0nWcp--k,3548
85
+ uncountable/integration/job.py,sha256=UTzcMes2KrBBRLOM3u94imMKLLnv50glqOkNf8-JOZw,1022
86
+ uncountable/integration/server.py,sha256=JlnbidtiSLCEod0DzLKIzTCxP1qgaHBIBolUvnihJn8,3980
87
+ uncountable/integration/telemetry.py,sha256=H5XQnRTszDL6Nl_oQYuWRr_oNYMocDaCCpKlwll-qZI,5127
86
88
  uncountable/integration/db/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
87
89
  uncountable/integration/db/connect.py,sha256=YtQHJ1DBGPhxKFRCfiXqohOYUceKSxMVOJ88aPI48Ug,181
88
90
  uncountable/integration/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
89
91
  uncountable/integration/executors/executors.py,sha256=v5ClGVUlvrZcMdmGQa8Ll668G_HGTnKpGOnTM7UMZCQ,956
90
- uncountable/integration/executors/generic_upload_executor.py,sha256=Ixxg01LLzc9pBpptBqJG6-jG82eltZTF7igFqORujyo,8393
92
+ uncountable/integration/executors/generic_upload_executor.py,sha256=wafNY_gpbUiQhvkFPDw-GGiJLmDVtTRgH_5jwMLy2Z4,10283
91
93
  uncountable/integration/executors/script_executor.py,sha256=OmSBOtU48G3mqza9c2lCm84pGGyaDk-ZBJCx3RsdJXc,846
92
94
  uncountable/integration/secret_retrieval/__init__.py,sha256=3QXVj35w8rRMxVvmmsViFYDi3lcb3g70incfalOEm6o,87
93
95
  uncountable/integration/secret_retrieval/retrieve_secret.py,sha256=M0qXVJpD8hMYIFypHFeyh598sqmIDX8ZOyXK23CluF0,1323
94
96
  uncountable/types/__init__.py,sha256=95iOd3WXWoI_4a461IS2ieWRic3zRyNaCYzfTpX764o,8162
95
97
  uncountable/types/async_batch.py,sha256=ihCv5XWSTTPmuO-GMPn1EACGI2CBUIJTATZ3aPgsNBA,523
96
- uncountable/types/async_batch_processor.py,sha256=VO0P-oSLAvzBK_Rk-KPS74AhSyrkj5ILZrAu-gs7Lpc,8564
98
+ uncountable/types/async_batch_processor.py,sha256=R--exgi4Gw0HWCnh8M-3_2PqG2ByTBtdyuSQ2eYtYn8,8671
97
99
  uncountable/types/async_batch_t.py,sha256=9jp9rOyetRdD5aQVyijzQggTyYU4021PBVGXk0ooBCQ,1911
98
100
  uncountable/types/base.py,sha256=xVSjWvA_fUUnkCg83EjoYEFvAfmskinKFMeYFOxNc9E,359
99
101
  uncountable/types/base_t.py,sha256=XXjZXexx0xWFUxMMhW8i9nIL6n8dsZVsHwdgnhZ0zJ4,2714
@@ -101,7 +103,7 @@ uncountable/types/calculations.py,sha256=FFO_D3BbKoGDZnqWvTKpW4KF359i2vrKjpdFCLY
101
103
  uncountable/types/calculations_t.py,sha256=7GTSi2L8NYjzjUJJx3cmtVkK9uD-uhfYvIFK-ffQj-8,556
102
104
  uncountable/types/chemical_structure.py,sha256=E-LnikTFDoVQ1b2zKaVUIO_PAKm-7aZZYJi8I8SDSic,302
103
105
  uncountable/types/chemical_structure_t.py,sha256=aFsTkkbzy6Gvyde3qrrEYD95gcYhxkgKMiDRaRE0o-Y,760
104
- uncountable/types/client_base.py,sha256=3CKr_v3bXez7ZjUg41q09pju5tgYGMsbgJAoYMffh-c,65240
106
+ uncountable/types/client_base.py,sha256=XjgzUcLIE-Q-Tc30IQ8hv7qw4LF7yQlKS0H_130ruwA,65347
105
107
  uncountable/types/client_config.py,sha256=4h5Liko9uKCo9_0gdbPhoK6Jr2Kv7tioLiQ8iKeq-_4,301
106
108
  uncountable/types/client_config_t.py,sha256=_HdS37gMSTIiD4qLnW9dIgt8_Rt5A6xhwMGGga7vnLg,625
107
109
  uncountable/types/curves.py,sha256=W6uMpG5SyW1MS82szNpxkFEn1MnxNpBFyFbQb2Ysfng,366
@@ -124,8 +126,8 @@ uncountable/types/input_attributes.py,sha256=IrIKQnHqHdS1Utdfzr9GnOe17a8riaqYcO1
124
126
  uncountable/types/input_attributes_t.py,sha256=wE1ekiQfb72Z9VpF5SHipKJkgaJFUHJrNkkJdmuhF9w,820
125
127
  uncountable/types/inputs.py,sha256=6RIEFfCxLqpeHEGOpu63O4i8zPogjGeB7wiV_rPBw_g,404
126
128
  uncountable/types/inputs_t.py,sha256=RW7gF9zTOwByu-nMTMVuBabLOuWKx4O1nvfgvx_R55o,1611
127
- uncountable/types/job_definition.py,sha256=W2wth9vPRBVL0temwnSmvfxxPMKFxZqbQtsMXn8BU3U,1637
128
- uncountable/types/job_definition_t.py,sha256=1VZO3l5oGnvtG-ikQB6YuukblHhEwtmJfQpGmo0Gi40,5735
129
+ uncountable/types/job_definition.py,sha256=sCQqtyHI3hksc5pVpk5tqbG55F91ST4FoDwr2TmTOuQ,1787
130
+ uncountable/types/job_definition_t.py,sha256=oDHLHCophse2x8vZOYagQ49yz7GTnCKwY6ecpDQsfRk,6616
129
131
  uncountable/types/outputs.py,sha256=sUZx_X-TKCZtLm1YCEH8OISX9DdPlv9ZuUfM3-askCc,281
130
132
  uncountable/types/outputs_t.py,sha256=2aORUOr0ls1ZYo-ddkWax3D1ZndmQsWtHfJxpYozlhg,656
131
133
  uncountable/types/permissions.py,sha256=1mRnSsmRgjuLgp6pylTwwACD_YRIcmlqxHkufwZtMns,297
@@ -237,8 +239,8 @@ uncountable/types/api/recipes/unlock_recipes.py,sha256=AvzQeZCLs9i7CuhMs3Xltdi4n
237
239
  uncountable/types/api/triggers/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr8APdZYUniAor8,55
238
240
  uncountable/types/api/triggers/run_trigger.py,sha256=_Rpha9nxXI3Xr17CrGDtofg4HZ81x2lt0rMZ6As0qfE,893
239
241
  uncountable/types/api/uploader/__init__.py,sha256=gCgbynxG3jA8FQHzercKtrHKHkiIKr8APdZYUniAor8,55
240
- uncountable/types/api/uploader/invoke_uploader.py,sha256=w7NVSpTpKxsF2TgLzVrji1Ql5Z8QWTz6d5OvXpRtyDo,992
241
- UncountablePythonSDK-0.0.48.dist-info/METADATA,sha256=j72rX6737WefzshiwZNBrM8nqCM55gDs2DH6dH79C24,1734
242
- UncountablePythonSDK-0.0.48.dist-info/WHEEL,sha256=HiCZjzuy6Dw0hdX5R3LCFPDmFS4BWl8H-8W39XfmgX4,91
243
- UncountablePythonSDK-0.0.48.dist-info/top_level.txt,sha256=1UVGjAU-6hJY9qw2iJ7nCBeEwZ793AEN5ZfKX9A1uj4,31
244
- UncountablePythonSDK-0.0.48.dist-info/RECORD,,
242
+ uncountable/types/api/uploader/invoke_uploader.py,sha256=4zOcB_38uT73Jm3-XqkG40fBM1R5vpvPpGAg-U4lzxY,1059
243
+ UncountablePythonSDK-0.0.50.dist-info/METADATA,sha256=h6s5vQQCK3UNrcaHgQxLbyyskpShrzKpEToBx0Rx5EQ,1934
244
+ UncountablePythonSDK-0.0.50.dist-info/WHEEL,sha256=Mdi9PDNwEZptOjTlUcAth7XJDFtKrHYaQMPulZeBCiQ,91
245
+ UncountablePythonSDK-0.0.50.dist-info/top_level.txt,sha256=1UVGjAU-6hJY9qw2iJ7nCBeEwZ793AEN5ZfKX9A1uj4,31
246
+ UncountablePythonSDK-0.0.50.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (72.2.0)
2
+ Generator: setuptools (73.0.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
docs/requirements.txt CHANGED
@@ -2,6 +2,6 @@ furo==2024.7.18
2
2
  myst-parser==3.0.1
3
3
  sphinx-autoapi==3.2.0
4
4
  sphinx-copybutton==0.5.2
5
- Sphinx==7.4.4
5
+ Sphinx==8.0.0
6
6
  sphinx_design==0.6.0
7
7
  sphinx-favicon==1.0.1
@@ -5,12 +5,14 @@ from ._gdrive_session import list_gdrive_files as list_gdrive_files
5
5
  from ._gdrive_session import move_gdrive_file as move_gdrive_file
6
6
  from ._gdrive_session import upload_file_gdrive as upload_file_gdrive
7
7
  from ._local_session import LocalSession as LocalSession
8
+ from ._s3_session import S3Session as S3Session
8
9
  from ._sftp_session import SFTPSession as SFTPSession
9
10
  from ._sftp_session import list_sftp_files as list_sftp_files
10
11
  from ._sftp_session import move_sftp_files as move_sftp_files
11
12
  from .file_type_utils import FileObjectData as FileObjectData
12
13
  from .file_type_utils import FileSystemFileReference as FileSystemFileReference
13
14
  from .file_type_utils import FileSystemObject as FileSystemObject
15
+ from .file_type_utils import FileSystemS3Config as FileSystemS3Config
14
16
  from .file_type_utils import FileSystemSFTPConfig as FileSystemSFTPConfig
15
17
  from .file_type_utils import FileTransfer as FileTransfer
16
18
  from .file_type_utils import IncompatibleFileReference as IncompatibleFileReference
@@ -4,16 +4,18 @@ import typing
4
4
  from dataclasses import dataclass
5
5
  from datetime import datetime, timedelta
6
6
  from enum import StrEnum
7
- from importlib.metadata import PackageNotFoundError, version
8
7
  from urllib.parse import urljoin
9
8
  from uuid import uuid4
10
9
 
11
10
  import requests
11
+ from opentelemetry.sdk.resources import Attributes
12
12
  from requests.exceptions import JSONDecodeError
13
13
 
14
14
  from pkgs.argument_parser import CachedParser
15
15
  from pkgs.serialization_util import serialize_for_api
16
16
  from pkgs.serialization_util.serialization_helpers import JsonValue
17
+ from uncountable.core.version import get_version
18
+ from uncountable.integration.telemetry import JobLogger
17
19
  from uncountable.types.client_base import APIRequest, ClientMethods
18
20
  from uncountable.types.client_config import ClientConfigOptions
19
21
 
@@ -25,12 +27,6 @@ UNC_REQUEST_ID_HEADER = "X-UNC-REQUEST-ID"
25
27
  UNC_SDK_VERSION_HEADER = "X-UNC-SDK-VERSION"
26
28
 
27
29
 
28
- try:
29
- __version__ = version("UncountablePythonSDK")
30
- except PackageNotFoundError:
31
- __version__ = "unknown"
32
-
33
-
34
30
  class EndpointMethod(StrEnum):
35
31
  POST = "POST"
36
32
  GET = "GET"
@@ -61,6 +57,7 @@ HTTPRequest = HTTPPostRequest | HTTPGetRequest
61
57
  @dataclass(kw_only=True)
62
58
  class ClientConfig(ClientConfigOptions):
63
59
  transform_request: typing.Callable[[requests.Request], requests.Request] | None = None
60
+ job_logger: typing.Optional[JobLogger] = None
64
61
 
65
62
 
66
63
  OAUTH_REFRESH_WINDOW_SECONDS = 60 * 5
@@ -204,7 +201,15 @@ class Client(ClientMethods):
204
201
  case _:
205
202
  typing.assert_never(http_request)
206
203
  request.headers = http_request.headers
207
- response = self._send_request(request)
204
+ if self._cfg.job_logger is not None:
205
+ attributes: Attributes = {
206
+ "method": http_request.method,
207
+ "endpoint": api_request.endpoint,
208
+ }
209
+ with self._cfg.job_logger.push_scope("api_call", attributes=attributes):
210
+ response = self._send_request(request)
211
+ else:
212
+ response = self._send_request(request)
208
213
  response_data = self._get_response_json(response, request_id=request_id)
209
214
  cached_parser = self._get_cached_parser(return_type)
210
215
  try:
@@ -260,7 +265,7 @@ class Client(ClientMethods):
260
265
  ) -> HTTPRequest:
261
266
  headers = self._build_auth_headers()
262
267
  headers[UNC_REQUEST_ID_HEADER] = request_id
263
- headers[UNC_SDK_VERSION_HEADER] = __version__
268
+ headers[UNC_SDK_VERSION_HEADER] = get_version()
264
269
  method = api_request.method.lower()
265
270
  data = {"data": json.dumps(serialize_for_api(api_request.args))}
266
271
  match method:
@@ -0,0 +1,11 @@
1
+ import functools
2
+ from importlib.metadata import PackageNotFoundError, version
3
+
4
+
5
+ @functools.cache
6
+ def get_version() -> str:
7
+ try:
8
+ version_str = version("UncountablePythonSDK")
9
+ except PackageNotFoundError:
10
+ version_str = "unknown"
11
+ return version_str
@@ -3,6 +3,7 @@ import os
3
3
  from uncountable.core import AuthDetailsApiKey, Client
4
4
  from uncountable.core.client import ClientConfig
5
5
  from uncountable.core.types import AuthDetailsAll
6
+ from uncountable.integration.telemetry import JobLogger
6
7
  from uncountable.types.job_definition_t import (
7
8
  AuthRetrievalEnv,
8
9
  ProfileMetadata,
@@ -30,18 +31,23 @@ def _construct_auth_details(profile_meta: ProfileMetadata) -> AuthDetailsAll:
30
31
  return AuthDetailsApiKey(api_id=api_id, api_secret_key=api_secret_key)
31
32
 
32
33
 
33
- def _construct_client_config(profile_meta: ProfileMetadata) -> ClientConfig | None:
34
+ def _construct_client_config(
35
+ profile_meta: ProfileMetadata, job_logger: JobLogger
36
+ ) -> ClientConfig | None:
34
37
  if profile_meta.client_options is None:
35
38
  return None
36
39
  return ClientConfig(
37
40
  allow_insecure_tls=profile_meta.client_options.allow_insecure_tls,
38
41
  extra_headers=profile_meta.client_options.extra_headers,
42
+ job_logger=job_logger,
39
43
  )
40
44
 
41
45
 
42
- def construct_uncountable_client(profile_meta: ProfileMetadata) -> Client:
46
+ def construct_uncountable_client(
47
+ profile_meta: ProfileMetadata, job_logger: JobLogger
48
+ ) -> Client:
43
49
  return Client(
44
50
  base_url=profile_meta.base_url,
45
51
  auth_details=_construct_auth_details(profile_meta),
46
- config=_construct_client_config(profile_meta),
52
+ config=_construct_client_config(profile_meta, job_logger),
47
53
  )
@@ -4,7 +4,8 @@ from pkgs.argument_parser import CachedParser
4
4
  from uncountable.core.async_batch import AsyncBatchProcessor
5
5
  from uncountable.integration.construct_client import construct_uncountable_client
6
6
  from uncountable.integration.executors.executors import resolve_executor
7
- from uncountable.integration.job import CronJobArguments, JobLogger
7
+ from uncountable.integration.job import CronJobArguments
8
+ from uncountable.integration.telemetry import JobLogger
8
9
  from uncountable.types.job_definition_t import JobDefinition, ProfileMetadata
9
10
 
10
11
 
@@ -19,29 +20,36 @@ cron_args_parser = CachedParser(CronJobArgs)
19
20
 
20
21
  def cron_job_executor(**kwargs: dict) -> None:
21
22
  args_passed = cron_args_parser.parse_storage(kwargs)
22
- client = construct_uncountable_client(profile_meta=args_passed.profile_metadata)
23
+ job_logger = JobLogger(
24
+ profile_metadata=args_passed.profile_metadata,
25
+ job_definition=args_passed.definition,
26
+ )
27
+ client = construct_uncountable_client(
28
+ profile_meta=args_passed.profile_metadata, job_logger=job_logger
29
+ )
23
30
  batch_processor = AsyncBatchProcessor(client=client)
24
31
  args = CronJobArguments(
25
32
  job_definition=args_passed.definition,
26
33
  client=client,
27
34
  batch_processor=batch_processor,
28
35
  profile_metadata=args_passed.profile_metadata,
29
- logger=JobLogger(
30
- profile_metadata=args_passed.profile_metadata,
31
- job_definition=args_passed.definition,
32
- ),
36
+ logger=job_logger,
33
37
  )
34
38
 
35
- job = resolve_executor(args_passed.definition.executor, args_passed.profile_metadata)
39
+ with job_logger.push_scope(args_passed.definition.name) as job_logger:
40
+ job = resolve_executor(
41
+ args_passed.definition.executor, args_passed.profile_metadata
42
+ )
36
43
 
37
- print(f"running job {args_passed.definition.name}")
44
+ job_logger.log_info("running job")
38
45
 
39
- job.run(args=args)
46
+ job.run(args=args)
40
47
 
41
- if batch_processor.current_queue_size() != 0:
42
- batch_processor.send()
48
+ if batch_processor.current_queue_size() != 0:
49
+ batch_processor.send()
43
50
 
44
- print(f"completed job {args_passed.definition.name}")
45
- submitted_batch_job_ids = batch_processor.get_submitted_job_ids()
46
- if len(submitted_batch_job_ids) != 0:
47
- print("submitted batch jobs", submitted_batch_job_ids)
51
+ submitted_batch_job_ids = batch_processor.get_submitted_job_ids()
52
+ job_logger.log_info(
53
+ "completed job",
54
+ attributes={"submitted_batch_job_ids": submitted_batch_job_ids},
55
+ )
@@ -9,22 +9,27 @@ from pkgs.filesystem_utils import (
9
9
  FileObjectData,
10
10
  FileSystemFileReference,
11
11
  FileSystemObject,
12
+ FileSystemS3Config,
12
13
  FileSystemSFTPConfig,
13
14
  FileTransfer,
15
+ S3Session,
14
16
  SFTPSession,
15
17
  )
16
18
  from pkgs.filesystem_utils.filesystem_session import FileSystemSession
17
19
  from uncountable.core.file_upload import DataFileUpload, FileUpload
18
- from uncountable.integration.job import Job, JobArguments, JobLogger
20
+ from uncountable.integration.job import Job, JobArguments
19
21
  from uncountable.integration.secret_retrieval import retrieve_secret
22
+ from uncountable.integration.telemetry import JobLogger
20
23
  from uncountable.types.generic_upload_t import (
21
24
  GenericRemoteDirectoryScope,
22
25
  GenericUploadStrategy,
23
26
  )
24
27
  from uncountable.types.job_definition_t import (
25
28
  GenericUploadDataSource,
29
+ GenericUploadDataSourceS3,
26
30
  GenericUploadDataSourceSFTP,
27
31
  JobResult,
32
+ S3CloudProvider,
28
33
  )
29
34
 
30
35
 
@@ -185,6 +190,42 @@ class GenericUploadJob(Job):
185
190
  pem_key=pem_key,
186
191
  )
187
192
  return SFTPSession(sftp_config=sftp_config)
193
+ case GenericUploadDataSourceS3():
194
+ if self.data_source.access_key_secret is not None:
195
+ secret_access_key = retrieve_secret(
196
+ self.data_source.access_key_secret,
197
+ profile_metadata=args.profile_metadata,
198
+ )
199
+ else:
200
+ secret_access_key = None
201
+
202
+ if self.data_source.endpoint_url is None:
203
+ assert (
204
+ self.data_source.cloud_provider is not None
205
+ ), "either cloud_provider or endpoint_url must be specified"
206
+ match self.data_source.cloud_provider:
207
+ case S3CloudProvider.AWS:
208
+ endpoint_url = "https://s3.amazonaws.com"
209
+ case S3CloudProvider.OVH:
210
+ assert (
211
+ self.data_source.region_name is not None
212
+ ), "region_name must be specified for cloud_provider OVH"
213
+ endpoint_url = (
214
+ f"https://s3.{self.data_source.region_name}.cloud.ovh.net"
215
+ )
216
+ else:
217
+ endpoint_url = self.data_source.endpoint_url
218
+
219
+ s3_config = FileSystemS3Config(
220
+ endpoint_url=endpoint_url,
221
+ bucket_name=self.data_source.bucket_name,
222
+ region_name=self.data_source.region_name,
223
+ access_key_id=self.data_source.access_key_id,
224
+ secret_access_key=secret_access_key,
225
+ session_token=None,
226
+ )
227
+
228
+ return S3Session(s3_config=s3_config)
188
229
 
189
230
  def run(self, args: JobArguments) -> JobResult:
190
231
  client = args.client
@@ -3,24 +3,10 @@ from dataclasses import dataclass
3
3
 
4
4
  from uncountable.core.async_batch import AsyncBatchProcessor
5
5
  from uncountable.core.client import Client
6
+ from uncountable.integration.telemetry import JobLogger
6
7
  from uncountable.types.job_definition_t import JobDefinition, JobResult, ProfileMetadata
7
8
 
8
9
 
9
- class JobLogger:
10
- def __init__(
11
- self, *, profile_metadata: ProfileMetadata, job_definition: JobDefinition
12
- ) -> None:
13
- self.profile_metadata = profile_metadata
14
- self.job_definition = job_definition
15
-
16
- def log_info(self, *log_objects: object) -> None:
17
- # IMPROVE: log a json message with context that can be parsed by OT
18
- print(
19
- f"[{self.job_definition.id}] in profile ({self.profile_metadata.name}): ",
20
- *log_objects,
21
- )
22
-
23
-
24
10
  @dataclass
25
11
  class JobArgumentsBase:
26
12
  job_definition: JobDefinition
@@ -11,6 +11,7 @@ from apscheduler.triggers.cron import CronTrigger
11
11
  from sqlalchemy.engine.base import Engine
12
12
 
13
13
  from uncountable.integration.cron import CronJobArgs, cron_job_executor
14
+ from uncountable.types import base_t
14
15
  from uncountable.types.client_config_t import ClientConfigOptions
15
16
  from uncountable.types.job_definition_t import (
16
17
  AuthRetrieval,
@@ -67,7 +68,14 @@ class IntegrationServer:
67
68
  existing_job.reschedule(
68
69
  CronTrigger.from_crontab(job_defn.cron_spec)
69
70
  )
71
+ if not job_defn.enabled:
72
+ existing_job.pause()
73
+ else:
74
+ existing_job.resume()
70
75
  else:
76
+ job_opts: dict[str, base_t.JsonValue] = {}
77
+ if not job_defn.enabled:
78
+ job_opts["next_run_time"] = None
71
79
  self._scheduler.add_job(
72
80
  cron_job_executor,
73
81
  # IMPROVE: reconsider these defaults
@@ -77,6 +85,7 @@ class IntegrationServer:
77
85
  name=job_defn.name,
78
86
  id=job_defn.id,
79
87
  kwargs=job_kwargs,
88
+ **job_opts,
80
89
  )
81
90
  case _:
82
91
  assert_never(job_defn.trigger)
@@ -0,0 +1,130 @@
1
+ import functools
2
+ import os
3
+ import sys
4
+ import time
5
+ from contextlib import contextmanager
6
+ from enum import StrEnum
7
+ from typing import Generator, TextIO, assert_never, cast
8
+
9
+ from opentelemetry import trace
10
+ from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
11
+ from opentelemetry.sdk._logs import LogRecord
12
+ from opentelemetry.sdk.resources import Attributes, Resource
13
+ from opentelemetry.sdk.trace import TracerProvider
14
+ from opentelemetry.sdk.trace.export import (
15
+ SimpleSpanProcessor,
16
+ )
17
+ from opentelemetry.trace import Tracer
18
+
19
+ from uncountable.core.version import get_version
20
+ from uncountable.types import base_t, job_definition_t
21
+
22
+
23
+ def _cast_attributes(attributes: dict[str, base_t.JsonValue]) -> Attributes:
24
+ return cast(Attributes, attributes)
25
+
26
+
27
+ @functools.cache
28
+ def get_tracer() -> Tracer:
29
+ attributes: dict[str, base_t.JsonValue] = {
30
+ "service.name": "integration-server",
31
+ "sdk.version": get_version(),
32
+ }
33
+ unc_version = os.environ.get("UNC_VERSION")
34
+ if unc_version is not None:
35
+ attributes["service.version"] = unc_version
36
+ unc_env = os.environ.get("UNC_INTEGRATION_ENV")
37
+ if unc_env is not None:
38
+ attributes["deployment.environment"] = unc_env
39
+ resource = Resource.create(attributes=_cast_attributes(attributes))
40
+ provider = TracerProvider(resource=resource)
41
+ provider.add_span_processor(SimpleSpanProcessor(OTLPSpanExporter()))
42
+ trace.set_tracer_provider(provider)
43
+ return provider.get_tracer("integration.telemetry")
44
+
45
+
46
+ class LogSeverity(StrEnum):
47
+ INFO = "Info"
48
+ WARN = "Warn"
49
+ ERROR = "Error"
50
+
51
+
52
+ class JobLogger:
53
+ current_span_id: int | None = None
54
+ current_trace_id: int | None = None
55
+
56
+ def __init__(
57
+ self,
58
+ *,
59
+ profile_metadata: job_definition_t.ProfileMetadata,
60
+ job_definition: job_definition_t.JobDefinition,
61
+ ) -> None:
62
+ self.profile_metadata = profile_metadata
63
+ self.job_definition = job_definition
64
+
65
+ def _patch_attributes(self, attributes: Attributes | None) -> Attributes:
66
+ patched_attributes: dict[str, base_t.JsonValue] = {
67
+ **(attributes if attributes is not None else {})
68
+ }
69
+ patched_attributes["profile.name"] = self.profile_metadata.name
70
+ patched_attributes["profile.base_url"] = self.profile_metadata.base_url
71
+ patched_attributes["job.name"] = self.job_definition.name
72
+ patched_attributes["job.id"] = self.job_definition.id
73
+ patched_attributes["job.definition_type"] = self.job_definition.type
74
+ match self.job_definition:
75
+ case job_definition_t.CronJobDefinition():
76
+ patched_attributes["job.definition.cron_spec"] = (
77
+ self.job_definition.cron_spec
78
+ )
79
+ case _:
80
+ assert_never(self.job_definition)
81
+ patched_attributes["job.definition.executor.type"] = (
82
+ self.job_definition.executor.type
83
+ )
84
+ match self.job_definition.executor:
85
+ case job_definition_t.JobExecutorScript():
86
+ patched_attributes["job.definition.executor.import_path"] = (
87
+ self.job_definition.executor.import_path
88
+ )
89
+ case job_definition_t.JobExecutorGenericUpload():
90
+ patched_attributes["job.definition.executor.data_source.type"] = (
91
+ self.job_definition.executor.data_source.type
92
+ )
93
+ case _:
94
+ assert_never(self.job_definition.executor)
95
+ return _cast_attributes(patched_attributes)
96
+
97
+ @contextmanager
98
+ def push_scope(
99
+ self, scope_name: str, *, attributes: Attributes | None = None
100
+ ) -> Generator["JobLogger", None, None]:
101
+ with get_tracer().start_as_current_span(
102
+ scope_name, attributes=self._patch_attributes(attributes)
103
+ ) as span:
104
+ self.current_span_id = span.get_span_context().span_id
105
+ self.current_trace_id = span.get_span_context().trace_id
106
+ yield self
107
+
108
+ def _emit_log(
109
+ self, message: str, *, severity: LogSeverity, attributes: Attributes | None
110
+ ) -> None:
111
+ log_record = LogRecord(
112
+ body=message,
113
+ severity_text=severity,
114
+ timestamp=time.time_ns(),
115
+ attributes=self._patch_attributes(attributes),
116
+ span_id=self.current_span_id,
117
+ trace_id=self.current_trace_id,
118
+ )
119
+ log_file: TextIO = sys.stderr if severity == LogSeverity.ERROR else sys.stdout
120
+ log_file.write(log_record.to_json())
121
+ log_file.flush()
122
+
123
+ def log_info(self, message: str, *, attributes: Attributes | None = None) -> None:
124
+ self._emit_log(message=message, severity=LogSeverity.INFO, attributes=attributes)
125
+
126
+ def log_warning(self, message: str, *, attributes: Attributes | None = None) -> None:
127
+ self._emit_log(message=message, severity=LogSeverity.WARN, attributes=attributes)
128
+
129
+ def log_error(self, message: str, *, attributes: Attributes | None = None) -> None:
130
+ self._emit_log(message=message, severity=LogSeverity.ERROR, attributes=attributes)
@@ -29,6 +29,7 @@ class Arguments:
29
29
  file_id: base_t.ObjectId
30
30
  uploader_key: identifier_t.IdentifierKey
31
31
  material_family_key: identifier_t.IdentifierKey
32
+ recipe_key: typing.Optional[identifier_t.IdentifierKey] = None
32
33
 
33
34
 
34
35
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -163,6 +163,7 @@ class AsyncBatchProcessorBase(ABC):
163
163
  file_id: base_t.ObjectId,
164
164
  uploader_key: identifier_t.IdentifierKey,
165
165
  material_family_key: identifier_t.IdentifierKey,
166
+ recipe_key: typing.Optional[identifier_t.IdentifierKey] = None,
166
167
  depends_on: typing.Optional[list[str]] = None,
167
168
  ) -> async_batch_t.QueuedAsyncBatchRequest:
168
169
  """Runs a file through an uploader.
@@ -173,6 +174,7 @@ class AsyncBatchProcessorBase(ABC):
173
174
  file_id=file_id,
174
175
  uploader_key=uploader_key,
175
176
  material_family_key=material_family_key,
177
+ recipe_key=recipe_key,
176
178
  )
177
179
  json_data = serialize_for_api(args)
178
180
 
@@ -857,6 +857,7 @@ class ClientMethods(ABC):
857
857
  file_id: base_t.ObjectId,
858
858
  uploader_key: identifier_t.IdentifierKey,
859
859
  material_family_key: identifier_t.IdentifierKey,
860
+ recipe_key: typing.Optional[identifier_t.IdentifierKey] = None,
860
861
  ) -> invoke_uploader_t.Data:
861
862
  """Runs a file through an uploader.
862
863
 
@@ -865,6 +866,7 @@ class ClientMethods(ABC):
865
866
  file_id=file_id,
866
867
  uploader_key=uploader_key,
867
868
  material_family_key=material_family_key,
869
+ recipe_key=recipe_key,
868
870
  )
869
871
  api_request = APIRequest(
870
872
  method=invoke_uploader_t.ENDPOINT_METHOD,
@@ -12,6 +12,8 @@ from .job_definition_t import JobExecutorBase as JobExecutorBase
12
12
  from .job_definition_t import JobExecutorScript as JobExecutorScript
13
13
  from .job_definition_t import GenericUploadDataSourceBase as GenericUploadDataSourceBase
14
14
  from .job_definition_t import GenericUploadDataSourceSFTP as GenericUploadDataSourceSFTP
15
+ from .job_definition_t import S3CloudProvider as S3CloudProvider
16
+ from .job_definition_t import GenericUploadDataSourceS3 as GenericUploadDataSourceS3
15
17
  from .job_definition_t import GenericUploadDataSource as GenericUploadDataSource
16
18
  from .job_definition_t import JobExecutorGenericUpload as JobExecutorGenericUpload
17
19
  from .job_definition_t import JobExecutor as JobExecutor
@@ -23,6 +23,7 @@ __all__: list[str] = [
23
23
  "CronJobDefinition",
24
24
  "GenericUploadDataSource",
25
25
  "GenericUploadDataSourceBase",
26
+ "GenericUploadDataSourceS3",
26
27
  "GenericUploadDataSourceSFTP",
27
28
  "GenericUploadDataSourceType",
28
29
  "JobDefinition",
@@ -36,6 +37,7 @@ __all__: list[str] = [
36
37
  "JobResult",
37
38
  "ProfileDefinition",
38
39
  "ProfileMetadata",
40
+ "S3CloudProvider",
39
41
  ]
40
42
 
41
43
 
@@ -58,6 +60,7 @@ class AuthRetrievalType(StrEnum):
58
60
  # DO NOT MODIFY -- This file is generated by type_spec
59
61
  class GenericUploadDataSourceType(StrEnum):
60
62
  SFTP = "sftp"
63
+ S3 = "s3"
61
64
 
62
65
 
63
66
  # DO NOT MODIFY -- This file is generated by type_spec
@@ -94,13 +97,35 @@ class GenericUploadDataSourceSFTP(GenericUploadDataSourceBase):
94
97
  pem_secret: secret_retrieval_t.SecretRetrieval
95
98
 
96
99
 
100
+ # DO NOT MODIFY -- This file is generated by type_spec
101
+ class S3CloudProvider(StrEnum):
102
+ OVH = "ovh"
103
+ AWS = "aws"
104
+
105
+
106
+ # DO NOT MODIFY -- This file is generated by type_spec
107
+ @serial_class(
108
+ parse_require={"type"},
109
+ )
110
+ @dataclasses.dataclass(kw_only=True)
111
+ class GenericUploadDataSourceS3(GenericUploadDataSourceBase):
112
+ type: typing.Literal[GenericUploadDataSourceType.S3] = GenericUploadDataSourceType.S3
113
+ bucket_name: str
114
+ cloud_provider: typing.Optional[S3CloudProvider] = None
115
+ endpoint_url: typing.Optional[str] = None
116
+ region_name: typing.Optional[str] = None
117
+ access_key_id: typing.Optional[str] = None
118
+ access_key_secret: typing.Optional[secret_retrieval_t.SecretRetrieval] = None
119
+
120
+
97
121
  # DO NOT MODIFY -- This file is generated by type_spec
98
122
  GenericUploadDataSource = typing.Annotated[
99
- typing.Union[GenericUploadDataSourceSFTP],
123
+ typing.Union[GenericUploadDataSourceSFTP, GenericUploadDataSourceS3],
100
124
  serial_union_annotation(
101
125
  discriminator="type",
102
126
  discriminator_map={
103
127
  "sftp": GenericUploadDataSourceSFTP,
128
+ "s3": GenericUploadDataSourceS3,
104
129
  },
105
130
  ),
106
131
  ]
@@ -137,6 +162,7 @@ class JobDefinitionBase:
137
162
  id: str
138
163
  name: str
139
164
  executor: JobExecutor
165
+ enabled: bool = False
140
166
 
141
167
 
142
168
  # DO NOT MODIFY -- This file is generated by type_spec