omdev 0.0.0.dev290__py3-none-any.whl → 0.0.0.dev292__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
omdev/.manifests.json CHANGED
@@ -39,7 +39,7 @@
39
39
  "module": ".cli.clicli",
40
40
  "attr": "_CLI_MODULE",
41
41
  "file": "omdev/cli/clicli.py",
42
- "line": 168,
42
+ "line": 173,
43
43
  "value": {
44
44
  "$.cli.types.CliModule": {
45
45
  "cmd_name": "cli",
omdev/ci/compose.py CHANGED
@@ -155,7 +155,7 @@ class DockerComposeRun(AsyncExitStacked):
155
155
 
156
156
  async with contextlib.AsyncExitStack() as es:
157
157
  if not (self._cfg.no_dependencies or self._cfg.no_dependency_cleanup):
158
- await es.enter_async_context(adefer(self._cleanup_dependencies)) # noqa
158
+ await es.enter_async_context(adefer(self._cleanup_dependencies())) # noqa
159
159
 
160
160
  sh_cmd = ' '.join([
161
161
  'docker',
@@ -430,7 +430,7 @@ class BaseGithubCacheClient(GithubCacheClient, abc.ABC):
430
430
  ):
431
431
  await self._upload_file_chunk_(chunk)
432
432
 
433
- async def _upload_file_chunks(
433
+ def _generate_file_upload_chunks(
434
434
  self,
435
435
  *,
436
436
  in_file: str,
@@ -438,7 +438,7 @@ class BaseGithubCacheClient(GithubCacheClient, abc.ABC):
438
438
  key: str,
439
439
 
440
440
  file_size: ta.Optional[int] = None,
441
- ) -> None:
441
+ ) -> ta.List[_UploadChunk]:
442
442
  check.state(os.path.isfile(in_file))
443
443
 
444
444
  if file_size is None:
@@ -446,17 +446,37 @@ class BaseGithubCacheClient(GithubCacheClient, abc.ABC):
446
446
 
447
447
  #
448
448
 
449
- upload_tasks = []
449
+ upload_chunks: ta.List[BaseGithubCacheClient._UploadChunk] = []
450
450
  chunk_size = self._chunk_size
451
451
  for i in range((file_size // chunk_size) + (1 if file_size % chunk_size else 0)):
452
452
  offset = i * chunk_size
453
453
  size = min(chunk_size, file_size - offset)
454
- upload_tasks.append(self._upload_file_chunk(self._UploadChunk(
454
+ upload_chunks.append(self._UploadChunk(
455
455
  url=url,
456
456
  key=key,
457
457
  in_file=in_file,
458
458
  offset=offset,
459
459
  size=size,
460
- )))
460
+ ))
461
+
462
+ return upload_chunks
463
+
464
+ async def _upload_file_chunks(
465
+ self,
466
+ *,
467
+ in_file: str,
468
+ url: str,
469
+ key: str,
470
+
471
+ file_size: ta.Optional[int] = None,
472
+ ) -> None:
473
+ upload_tasks = []
474
+ for chunk in self._generate_file_upload_chunks(
475
+ in_file=in_file,
476
+ url=url,
477
+ key=key,
478
+ file_size=file_size,
479
+ ):
480
+ upload_tasks.append(self._upload_file_chunk(chunk))
461
481
 
462
482
  await asyncio_wait_concurrent(upload_tasks, self._concurrency)
@@ -0,0 +1,185 @@
1
+ # ruff: noqa: UP006 UP007
2
+ # @omlish-lite
3
+ """
4
+ TODO:
5
+ - ominfra? no, circdep
6
+ """
7
+ import base64
8
+ import dataclasses as dc
9
+ import datetime
10
+ import typing as ta
11
+ import urllib.parse
12
+ import xml.etree.ElementTree as ET
13
+
14
+ from omlish.asyncs.asyncio.utils import asyncio_wait_concurrent
15
+ from omlish.lite.check import check
16
+ from omlish.lite.logs import log
17
+ from omlish.lite.timing import log_timing_context
18
+
19
+
20
+ ##
21
+
22
+
23
+ class AzureBlockBlobUploader:
24
+ """
25
+ https://learn.microsoft.com/en-us/rest/api/storageservices/put-block
26
+ https://learn.microsoft.com/en-us/rest/api/storageservices/put-block-list
27
+ """
28
+
29
+ DEFAULT_CONCURRENCY = 4
30
+
31
+ @dc.dataclass(frozen=True)
32
+ class Request:
33
+ method: str
34
+ url: str
35
+ headers: ta.Optional[ta.Dict[str, str]] = None
36
+ body: ta.Optional[bytes] = None
37
+
38
+ @dc.dataclass(frozen=True)
39
+ class Response:
40
+ status: int
41
+ headers: ta.Optional[ta.Mapping[str, str]] = None
42
+ data: ta.Optional[bytes] = None
43
+
44
+ def get_header(self, name: str) -> ta.Optional[str]:
45
+ for k, v in (self.headers or {}).items():
46
+ if k.lower() == name.lower():
47
+ return v
48
+ return None
49
+
50
+ def __init__(
51
+ self,
52
+ blob_url_with_sas: str,
53
+ make_request: ta.Callable[[Request], ta.Awaitable[Response]],
54
+ *,
55
+ api_version: str = '2020-10-02',
56
+ concurrency: int = DEFAULT_CONCURRENCY,
57
+ ) -> None:
58
+ """
59
+ blob_url_with_sas should be of the form:
60
+ https://<account>.blob.core.windows.net/<container>/<blob>?<SAS-token>
61
+ """
62
+
63
+ super().__init__()
64
+
65
+ self._make_request = make_request
66
+ self._api_version = api_version
67
+ check.arg(concurrency >= 1)
68
+ self._concurrency = concurrency
69
+
70
+ parsed = urllib.parse.urlparse(blob_url_with_sas)
71
+ self._base_url = f'{parsed.scheme}://{parsed.netloc}'
72
+ parts = parsed.path.lstrip('/').split('/', 1)
73
+ self._container = parts[0]
74
+ self._blob_name = parts[1]
75
+ self._sas = parsed.query
76
+
77
+ def _headers(self) -> ta.Dict[str, str]:
78
+ """Standard headers for Azure Blob REST calls."""
79
+
80
+ now = datetime.datetime.now(datetime.UTC).strftime('%a, %d %b %Y %H:%M:%S GMT')
81
+ return {
82
+ 'x-ms-date': now,
83
+ 'x-ms-version': self._api_version,
84
+ }
85
+
86
+ @dc.dataclass(frozen=True)
87
+ class FileChunk:
88
+ in_file: str
89
+ offset: int
90
+ size: int
91
+
92
+ async def _upload_file_chunk_(
93
+ self,
94
+ block_id: str,
95
+ chunk: FileChunk,
96
+ ) -> None:
97
+ with open(chunk.in_file, 'rb') as f: # noqa
98
+ f.seek(chunk.offset)
99
+ data = f.read(chunk.size)
100
+
101
+ check.equal(len(data), chunk.size)
102
+
103
+ params = {
104
+ 'comp': 'block',
105
+ 'blockid': block_id,
106
+ }
107
+ query = self._sas + '&' + urllib.parse.urlencode(params)
108
+ url = f'{self._base_url}/{self._container}/{self._blob_name}?{query}'
109
+
110
+ log.debug(f'Uploading azure blob chunk {chunk} with block id {block_id}') # noqa
111
+
112
+ resp = await self._make_request(self.Request(
113
+ 'PUT',
114
+ url,
115
+ headers=self._headers(),
116
+ body=data,
117
+ ))
118
+ if resp.status not in (201, 202):
119
+ raise RuntimeError(f'Put Block failed: {block_id=} {resp.status=}')
120
+
121
+ async def _upload_file_chunk(
122
+ self,
123
+ block_id: str,
124
+ chunk: FileChunk,
125
+ ) -> None:
126
+ with log_timing_context(f'Uploading azure blob chunk {chunk} with block id {block_id}'):
127
+ await self._upload_file_chunk_(
128
+ block_id,
129
+ chunk,
130
+ )
131
+
132
+ async def upload_file(
133
+ self,
134
+ chunks: ta.List[FileChunk],
135
+ ) -> ta.Dict[str, ta.Any]:
136
+ block_ids = []
137
+
138
+ # 1) Stage each block
139
+ upload_tasks = []
140
+ for idx, chunk in enumerate(chunks):
141
+ # Generate a predictable block ID (must be URL-safe base64)
142
+ raw_id = f'{idx:08d}'.encode()
143
+ block_id = base64.b64encode(raw_id).decode('utf-8')
144
+ block_ids.append(block_id)
145
+
146
+ upload_tasks.append(self._upload_file_chunk(
147
+ block_id,
148
+ chunk,
149
+ ))
150
+
151
+ await asyncio_wait_concurrent(upload_tasks, self._concurrency)
152
+
153
+ # 2) Commit block list
154
+ root = ET.Element('BlockList')
155
+ for bid in block_ids:
156
+ elm = ET.SubElement(root, 'Latest')
157
+ elm.text = bid
158
+ body = ET.tostring(root, encoding='utf-8', method='xml')
159
+
160
+ params = {'comp': 'blocklist'}
161
+ query = self._sas + '&' + urllib.parse.urlencode(params)
162
+ url = f'{self._base_url}/{self._container}/{self._blob_name}?{query}'
163
+
164
+ log.debug(f'Putting azure blob chunk list block ids {block_ids}') # noqa
165
+
166
+ resp = await self._make_request(self.Request(
167
+ 'PUT',
168
+ url,
169
+ headers={
170
+ **self._headers(),
171
+ 'Content-Type': 'application/xml',
172
+ },
173
+ body=body,
174
+ ))
175
+ if resp.status not in (200, 201):
176
+ raise RuntimeError(f'Put Block List failed: {resp.status} {resp.data!r}')
177
+
178
+ ret = {
179
+ 'status_code': resp.status,
180
+ 'etag': resp.get_header('ETag'),
181
+ }
182
+
183
+ log.debug(f'Uploaded azure blob chunk {ret}') # noqa
184
+
185
+ return ret
@@ -2,6 +2,7 @@
2
2
  import dataclasses as dc
3
3
  import os
4
4
  import typing as ta
5
+ import urllib.request
5
6
 
6
7
  from omlish.lite.check import check
7
8
  from omlish.lite.logs import log
@@ -13,6 +14,7 @@ from ..clients import GithubCacheClient
13
14
  from .api import GithubCacheServiceV2
14
15
  from .api import GithubCacheServiceV2RequestT
15
16
  from .api import GithubCacheServiceV2ResponseT
17
+ from .azure import AzureBlockBlobUploader
16
18
 
17
19
 
18
20
  ##
@@ -137,19 +139,52 @@ class GithubCacheServiceV2Client(BaseGithubCacheClient):
137
139
 
138
140
  #
139
141
 
140
- await self._upload_file_chunks(
142
+ upload_chunks = self._generate_file_upload_chunks(
141
143
  in_file=in_file,
142
144
  url=reserve_resp.signed_upload_url,
143
145
  key=fixed_key,
144
146
  file_size=file_size,
145
147
  )
146
148
 
149
+ az_chunks = [
150
+ AzureBlockBlobUploader.FileChunk(
151
+ in_file=in_file,
152
+ offset=c.offset,
153
+ size=c.size,
154
+ )
155
+ for c in upload_chunks
156
+ ]
157
+
158
+ async def az_make_request(req: AzureBlockBlobUploader.Request) -> AzureBlockBlobUploader.Response:
159
+ u_req = urllib.request.Request( # noqa
160
+ req.url,
161
+ method=req.method,
162
+ headers=req.headers or {},
163
+ data=req.body,
164
+ )
165
+
166
+ u_resp, u_body = await self._send_urllib_request(u_req)
167
+
168
+ return AzureBlockBlobUploader.Response(
169
+ status=u_resp.status,
170
+ headers=dict(u_resp.headers),
171
+ data=u_body,
172
+ )
173
+
174
+ az_uploader = AzureBlockBlobUploader(
175
+ reserve_resp.signed_upload_url,
176
+ az_make_request,
177
+ concurrency=self._concurrency,
178
+ )
179
+
180
+ await az_uploader.upload_file(az_chunks)
181
+
147
182
  #
148
183
 
149
184
  commit_resp = check.not_none(await self._send_method_request(
150
185
  GithubCacheServiceV2.FINALIZE_CACHE_ENTRY_METHOD, # type: ignore[arg-type]
151
186
  GithubCacheServiceV2.FinalizeCacheEntryUploadRequest(
152
- key=key,
187
+ key=fixed_key,
153
188
  size_bytes=file_size,
154
189
  version=version,
155
190
  ),
omdev/cli/clicli.py CHANGED
@@ -1,5 +1,6 @@
1
1
  import inspect
2
2
  import os
3
+ import shlex
3
4
  import subprocess
4
5
  import sys
5
6
  import typing as ta
@@ -76,6 +77,7 @@ class CliCli(ap.Cli):
76
77
  ap.arg('--url', default=DEFAULT_REINSTALL_URL),
77
78
  ap.arg('--local', action='store_true'),
78
79
  ap.arg('--no-deps', action='store_true'),
80
+ ap.arg('--dry-run', action='store_true'),
79
81
  ap.arg('extra_deps', nargs='*'),
80
82
  )
81
83
  def reinstall(self) -> None:
@@ -147,7 +149,7 @@ class CliCli(ap.Cli):
147
149
 
148
150
  reco_cmd = ' '.join([
149
151
  'curl -LsSf',
150
- url,
152
+ f"'{url}'" if (qu := shlex.quote(url)) == url else qu,
151
153
  '| python3 -',
152
154
  *deps,
153
155
  ])
@@ -156,6 +158,9 @@ class CliCli(ap.Cli):
156
158
  with urllib.request.urlopen(urllib.request.Request(url)) as resp: # noqa
157
159
  install_src = resp.read().decode('utf-8')
158
160
 
161
+ if self.args.dry_run:
162
+ return
163
+
159
164
  os.execl(
160
165
  sys.executable,
161
166
  sys.executable,
omdev/pyproject/pkg.py CHANGED
@@ -175,8 +175,11 @@ class BasePyprojectPackageGenerator(abc.ABC):
175
175
  #
176
176
 
177
177
  _STANDARD_FILES: ta.Sequence[str] = [
178
- 'LICENSE',
179
- 'README.rst',
178
+ *[
179
+ ''.join([n, x])
180
+ for n in ('LICENSE', 'README')
181
+ for x in ('', '.txt', '.md', '.rst')
182
+ ],
180
183
  ]
181
184
 
182
185
  def _symlink_standard_files(self) -> None:
omdev/scripts/ci.py CHANGED
@@ -71,6 +71,7 @@ import urllib.parse
71
71
  import urllib.request
72
72
  import uuid
73
73
  import weakref
74
+ import xml.etree.ElementTree as ET
74
75
 
75
76
 
76
77
  ########################################
@@ -3330,7 +3331,9 @@ class AsyncExitStacked:
3330
3331
 
3331
3332
 
3332
3333
  @contextlib.contextmanager
3333
- def defer(fn: ta.Callable) -> ta.Generator[ta.Callable, None, None]:
3334
+ def defer(fn: ta.Callable, *args: ta.Any, **kwargs: ta.Any) -> ta.Generator[ta.Callable, None, None]:
3335
+ if args or kwargs:
3336
+ fn = functools.partial(fn, *args, **kwargs)
3334
3337
  try:
3335
3338
  yield fn
3336
3339
  finally:
@@ -3338,11 +3341,11 @@ def defer(fn: ta.Callable) -> ta.Generator[ta.Callable, None, None]:
3338
3341
 
3339
3342
 
3340
3343
  @contextlib.asynccontextmanager
3341
- async def adefer(fn: ta.Callable) -> ta.AsyncGenerator[ta.Callable, None]:
3344
+ async def adefer(fn: ta.Awaitable) -> ta.AsyncGenerator[ta.Awaitable, None]:
3342
3345
  try:
3343
3346
  yield fn
3344
3347
  finally:
3345
- await fn()
3348
+ await fn
3346
3349
 
3347
3350
 
3348
3351
  ##
@@ -6478,7 +6481,7 @@ class BaseGithubCacheClient(GithubCacheClient, abc.ABC):
6478
6481
  ):
6479
6482
  await self._upload_file_chunk_(chunk)
6480
6483
 
6481
- async def _upload_file_chunks(
6484
+ def _generate_file_upload_chunks(
6482
6485
  self,
6483
6486
  *,
6484
6487
  in_file: str,
@@ -6486,7 +6489,7 @@ class BaseGithubCacheClient(GithubCacheClient, abc.ABC):
6486
6489
  key: str,
6487
6490
 
6488
6491
  file_size: ta.Optional[int] = None,
6489
- ) -> None:
6492
+ ) -> ta.List[_UploadChunk]:
6490
6493
  check.state(os.path.isfile(in_file))
6491
6494
 
6492
6495
  if file_size is None:
@@ -6494,22 +6497,218 @@ class BaseGithubCacheClient(GithubCacheClient, abc.ABC):
6494
6497
 
6495
6498
  #
6496
6499
 
6497
- upload_tasks = []
6500
+ upload_chunks: ta.List[BaseGithubCacheClient._UploadChunk] = []
6498
6501
  chunk_size = self._chunk_size
6499
6502
  for i in range((file_size // chunk_size) + (1 if file_size % chunk_size else 0)):
6500
6503
  offset = i * chunk_size
6501
6504
  size = min(chunk_size, file_size - offset)
6502
- upload_tasks.append(self._upload_file_chunk(self._UploadChunk(
6505
+ upload_chunks.append(self._UploadChunk(
6503
6506
  url=url,
6504
6507
  key=key,
6505
6508
  in_file=in_file,
6506
6509
  offset=offset,
6507
6510
  size=size,
6508
- )))
6511
+ ))
6512
+
6513
+ return upload_chunks
6514
+
6515
+ async def _upload_file_chunks(
6516
+ self,
6517
+ *,
6518
+ in_file: str,
6519
+ url: str,
6520
+ key: str,
6521
+
6522
+ file_size: ta.Optional[int] = None,
6523
+ ) -> None:
6524
+ upload_tasks = []
6525
+ for chunk in self._generate_file_upload_chunks(
6526
+ in_file=in_file,
6527
+ url=url,
6528
+ key=key,
6529
+ file_size=file_size,
6530
+ ):
6531
+ upload_tasks.append(self._upload_file_chunk(chunk))
6509
6532
 
6510
6533
  await asyncio_wait_concurrent(upload_tasks, self._concurrency)
6511
6534
 
6512
6535
 
6536
+ ########################################
6537
+ # ../github/api/v2/azure.py
6538
+ """
6539
+ TODO:
6540
+ - ominfra? no, circdep
6541
+ """
6542
+
6543
+
6544
+ ##
6545
+
6546
+
6547
+ class AzureBlockBlobUploader:
6548
+ """
6549
+ https://learn.microsoft.com/en-us/rest/api/storageservices/put-block
6550
+ https://learn.microsoft.com/en-us/rest/api/storageservices/put-block-list
6551
+ """
6552
+
6553
+ DEFAULT_CONCURRENCY = 4
6554
+
6555
+ @dc.dataclass(frozen=True)
6556
+ class Request:
6557
+ method: str
6558
+ url: str
6559
+ headers: ta.Optional[ta.Dict[str, str]] = None
6560
+ body: ta.Optional[bytes] = None
6561
+
6562
+ @dc.dataclass(frozen=True)
6563
+ class Response:
6564
+ status: int
6565
+ headers: ta.Optional[ta.Mapping[str, str]] = None
6566
+ data: ta.Optional[bytes] = None
6567
+
6568
+ def get_header(self, name: str) -> ta.Optional[str]:
6569
+ for k, v in (self.headers or {}).items():
6570
+ if k.lower() == name.lower():
6571
+ return v
6572
+ return None
6573
+
6574
+ def __init__(
6575
+ self,
6576
+ blob_url_with_sas: str,
6577
+ make_request: ta.Callable[[Request], ta.Awaitable[Response]],
6578
+ *,
6579
+ api_version: str = '2020-10-02',
6580
+ concurrency: int = DEFAULT_CONCURRENCY,
6581
+ ) -> None:
6582
+ """
6583
+ blob_url_with_sas should be of the form:
6584
+ https://<account>.blob.core.windows.net/<container>/<blob>?<SAS-token>
6585
+ """
6586
+
6587
+ super().__init__()
6588
+
6589
+ self._make_request = make_request
6590
+ self._api_version = api_version
6591
+ check.arg(concurrency >= 1)
6592
+ self._concurrency = concurrency
6593
+
6594
+ parsed = urllib.parse.urlparse(blob_url_with_sas)
6595
+ self._base_url = f'{parsed.scheme}://{parsed.netloc}'
6596
+ parts = parsed.path.lstrip('/').split('/', 1)
6597
+ self._container = parts[0]
6598
+ self._blob_name = parts[1]
6599
+ self._sas = parsed.query
6600
+
6601
+ def _headers(self) -> ta.Dict[str, str]:
6602
+ """Standard headers for Azure Blob REST calls."""
6603
+
6604
+ now = datetime.datetime.now(datetime.UTC).strftime('%a, %d %b %Y %H:%M:%S GMT')
6605
+ return {
6606
+ 'x-ms-date': now,
6607
+ 'x-ms-version': self._api_version,
6608
+ }
6609
+
6610
+ @dc.dataclass(frozen=True)
6611
+ class FileChunk:
6612
+ in_file: str
6613
+ offset: int
6614
+ size: int
6615
+
6616
+ async def _upload_file_chunk_(
6617
+ self,
6618
+ block_id: str,
6619
+ chunk: FileChunk,
6620
+ ) -> None:
6621
+ with open(chunk.in_file, 'rb') as f: # noqa
6622
+ f.seek(chunk.offset)
6623
+ data = f.read(chunk.size)
6624
+
6625
+ check.equal(len(data), chunk.size)
6626
+
6627
+ params = {
6628
+ 'comp': 'block',
6629
+ 'blockid': block_id,
6630
+ }
6631
+ query = self._sas + '&' + urllib.parse.urlencode(params)
6632
+ url = f'{self._base_url}/{self._container}/{self._blob_name}?{query}'
6633
+
6634
+ log.debug(f'Uploading azure blob chunk {chunk} with block id {block_id}') # noqa
6635
+
6636
+ resp = await self._make_request(self.Request(
6637
+ 'PUT',
6638
+ url,
6639
+ headers=self._headers(),
6640
+ body=data,
6641
+ ))
6642
+ if resp.status not in (201, 202):
6643
+ raise RuntimeError(f'Put Block failed: {block_id=} {resp.status=}')
6644
+
6645
+ async def _upload_file_chunk(
6646
+ self,
6647
+ block_id: str,
6648
+ chunk: FileChunk,
6649
+ ) -> None:
6650
+ with log_timing_context(f'Uploading azure blob chunk {chunk} with block id {block_id}'):
6651
+ await self._upload_file_chunk_(
6652
+ block_id,
6653
+ chunk,
6654
+ )
6655
+
6656
+ async def upload_file(
6657
+ self,
6658
+ chunks: ta.List[FileChunk],
6659
+ ) -> ta.Dict[str, ta.Any]:
6660
+ block_ids = []
6661
+
6662
+ # 1) Stage each block
6663
+ upload_tasks = []
6664
+ for idx, chunk in enumerate(chunks):
6665
+ # Generate a predictable block ID (must be URL-safe base64)
6666
+ raw_id = f'{idx:08d}'.encode()
6667
+ block_id = base64.b64encode(raw_id).decode('utf-8')
6668
+ block_ids.append(block_id)
6669
+
6670
+ upload_tasks.append(self._upload_file_chunk(
6671
+ block_id,
6672
+ chunk,
6673
+ ))
6674
+
6675
+ await asyncio_wait_concurrent(upload_tasks, self._concurrency)
6676
+
6677
+ # 2) Commit block list
6678
+ root = ET.Element('BlockList')
6679
+ for bid in block_ids:
6680
+ elm = ET.SubElement(root, 'Latest')
6681
+ elm.text = bid
6682
+ body = ET.tostring(root, encoding='utf-8', method='xml')
6683
+
6684
+ params = {'comp': 'blocklist'}
6685
+ query = self._sas + '&' + urllib.parse.urlencode(params)
6686
+ url = f'{self._base_url}/{self._container}/{self._blob_name}?{query}'
6687
+
6688
+ log.debug(f'Putting azure blob chunk list block ids {block_ids}') # noqa
6689
+
6690
+ resp = await self._make_request(self.Request(
6691
+ 'PUT',
6692
+ url,
6693
+ headers={
6694
+ **self._headers(),
6695
+ 'Content-Type': 'application/xml',
6696
+ },
6697
+ body=body,
6698
+ ))
6699
+ if resp.status not in (200, 201):
6700
+ raise RuntimeError(f'Put Block List failed: {resp.status} {resp.data!r}')
6701
+
6702
+ ret = {
6703
+ 'status_code': resp.status,
6704
+ 'etag': resp.get_header('ETag'),
6705
+ }
6706
+
6707
+ log.debug(f'Uploaded azure blob chunk {ret}') # noqa
6708
+
6709
+ return ret
6710
+
6711
+
6513
6712
  ########################################
6514
6713
  # ../../dataserver/targets.py
6515
6714
 
@@ -8043,19 +8242,52 @@ class GithubCacheServiceV2Client(BaseGithubCacheClient):
8043
8242
 
8044
8243
  #
8045
8244
 
8046
- await self._upload_file_chunks(
8245
+ upload_chunks = self._generate_file_upload_chunks(
8047
8246
  in_file=in_file,
8048
8247
  url=reserve_resp.signed_upload_url,
8049
8248
  key=fixed_key,
8050
8249
  file_size=file_size,
8051
8250
  )
8052
8251
 
8252
+ az_chunks = [
8253
+ AzureBlockBlobUploader.FileChunk(
8254
+ in_file=in_file,
8255
+ offset=c.offset,
8256
+ size=c.size,
8257
+ )
8258
+ for c in upload_chunks
8259
+ ]
8260
+
8261
+ async def az_make_request(req: AzureBlockBlobUploader.Request) -> AzureBlockBlobUploader.Response:
8262
+ u_req = urllib.request.Request( # noqa
8263
+ req.url,
8264
+ method=req.method,
8265
+ headers=req.headers or {},
8266
+ data=req.body,
8267
+ )
8268
+
8269
+ u_resp, u_body = await self._send_urllib_request(u_req)
8270
+
8271
+ return AzureBlockBlobUploader.Response(
8272
+ status=u_resp.status,
8273
+ headers=dict(u_resp.headers),
8274
+ data=u_body,
8275
+ )
8276
+
8277
+ az_uploader = AzureBlockBlobUploader(
8278
+ reserve_resp.signed_upload_url,
8279
+ az_make_request,
8280
+ concurrency=self._concurrency,
8281
+ )
8282
+
8283
+ await az_uploader.upload_file(az_chunks)
8284
+
8053
8285
  #
8054
8286
 
8055
8287
  commit_resp = check.not_none(await self._send_method_request(
8056
8288
  GithubCacheServiceV2.FINALIZE_CACHE_ENTRY_METHOD, # type: ignore[arg-type]
8057
8289
  GithubCacheServiceV2.FinalizeCacheEntryUploadRequest(
8058
- key=key,
8290
+ key=fixed_key,
8059
8291
  size_bytes=file_size,
8060
8292
  version=version,
8061
8293
  ),
@@ -11472,7 +11704,7 @@ class DockerComposeRun(AsyncExitStacked):
11472
11704
 
11473
11705
  async with contextlib.AsyncExitStack() as es:
11474
11706
  if not (self._cfg.no_dependencies or self._cfg.no_dependency_cleanup):
11475
- await es.enter_async_context(adefer(self._cleanup_dependencies)) # noqa
11707
+ await es.enter_async_context(adefer(self._cleanup_dependencies())) # noqa
11476
11708
 
11477
11709
  sh_cmd = ' '.join([
11478
11710
  'docker',
@@ -7972,8 +7972,11 @@ class BasePyprojectPackageGenerator(abc.ABC):
7972
7972
  #
7973
7973
 
7974
7974
  _STANDARD_FILES: ta.Sequence[str] = [
7975
- 'LICENSE',
7976
- 'README.rst',
7975
+ *[
7976
+ ''.join([n, x])
7977
+ for n in ('LICENSE', 'README')
7978
+ for x in ('', '.txt', '.md', '.rst')
7979
+ ],
7977
7980
  ]
7978
7981
 
7979
7982
  def _symlink_standard_files(self) -> None:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: omdev
3
- Version: 0.0.0.dev290
3
+ Version: 0.0.0.dev292
4
4
  Summary: omdev
5
5
  Author: wrmsr
6
6
  License: BSD-3-Clause
@@ -12,7 +12,7 @@ Classifier: Operating System :: OS Independent
12
12
  Classifier: Operating System :: POSIX
13
13
  Requires-Python: >=3.12
14
14
  License-File: LICENSE
15
- Requires-Dist: omlish==0.0.0.dev290
15
+ Requires-Dist: omlish==0.0.0.dev292
16
16
  Provides-Extra: all
17
17
  Requires-Dist: black~=25.1; extra == "all"
18
18
  Requires-Dist: pycparser~=2.22; extra == "all"
@@ -1,4 +1,4 @@
1
- omdev/.manifests.json,sha256=zmYuVLzazV03KggE8ZqY0MlVvpEwChvCWh_wfCSEn3w,10394
1
+ omdev/.manifests.json,sha256=NMpBMsMXhfagy-iPTPxxWagDgrBIE5CTXHmLhl8_FOU,10394
2
2
  omdev/__about__.py,sha256=OgO_8azOzKriF2qPeDWRLcRe9p1XW5kYdJCq5vewff0,1171
3
3
  omdev/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  omdev/cmake.py,sha256=9rfSvFHPmKDj9ngvfDB2vK8O-xO_ZwUm7hMKLWA-yOw,4578
@@ -68,7 +68,7 @@ omdev/ci/__main__.py,sha256=Jsrv3P7LX2Cg08W7ByZfZ1JQT4lgLDPW1qNAmShFuMk,75
68
68
  omdev/ci/cache.py,sha256=MMPx3BMKVGnF2yASEjikvao8H2lRrUdik4eYU8xcFnQ,8352
69
69
  omdev/ci/ci.py,sha256=w08Bmdm0dO_ew4ohaKH2mUnRSaJGNS57pbecAlI5X-A,7917
70
70
  omdev/ci/cli.py,sha256=jpgdmZS8qLrY9YGC29sizyo6ue3uR5yXf0lSzNFuRiw,7183
71
- omdev/ci/compose.py,sha256=LX2QKB8DP3ADjJA1-8G_cncZqXnhD_7fQSM5aAo93Zw,4510
71
+ omdev/ci/compose.py,sha256=QgN6H66nItcJniivFiw7nL4tgUARHqap8QzxEdcbwDc,4512
72
72
  omdev/ci/consts.py,sha256=HkSYz-_hHilcHPBvRs-SwcUxW7vMNlHXZ8OyIKnVQbQ,21
73
73
  omdev/ci/inject.py,sha256=-rEXOxGNZQLz-CUEen3w8p21xMVkbrcKi8FRNOP_p9k,1924
74
74
  omdev/ci/requirements.py,sha256=UKN6avU5V96YmmJL8XYvMPxzzOagrKpGTYadaxI2z9U,2105
@@ -94,17 +94,18 @@ omdev/ci/github/cli.py,sha256=aOOurzOJ6Srq8g2Z4VG0MhqkH2ulCAHVdDd-eD7kOLE,1121
94
94
  omdev/ci/github/env.py,sha256=FQFjP_m7JWM7es9I51U-6UgJTwAt_UCVHFIYKTd9NKM,394
95
95
  omdev/ci/github/inject.py,sha256=Bfu6L6omd80ei1BbmhkCzmgK1GDWPFG1sCSDzwNNrQw,558
96
96
  omdev/ci/github/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
97
- omdev/ci/github/api/clients.py,sha256=dg5BJjRCTIDd6JYRuO5Z7fG3kuQlWjbHXcdE05qpzBo,12971
97
+ omdev/ci/github/api/clients.py,sha256=lM6wryPL-gDBeGSWjMm7HJyRcqc6XYZlx7SULwGNaE8,13507
98
98
  omdev/ci/github/api/v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
99
99
  omdev/ci/github/api/v1/api.py,sha256=R_PN2PJzsDhS2Vi3pQPViUpWTOqP2WeW5tOHMxXfRTo,3193
100
100
  omdev/ci/github/api/v1/client.py,sha256=KD5knp0UKeBvHNcxzWnYJgVW233xUXHwMlVrpDB4SBY,4977
101
101
  omdev/ci/github/api/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
102
102
  omdev/ci/github/api/v2/api.py,sha256=A2OGixGgr-pTdKQaasBOfSLQQvwnhuSbN96neMQi5W0,3963
103
- omdev/ci/github/api/v2/client.py,sha256=l_jHiWGYugpW3_cxPA9o0GMAjpNaSaWZ4AGbVxfGMMs,5256
103
+ omdev/ci/github/api/v2/azure.py,sha256=N2Z1525WMCAyYSfDqICneRT6itwkAoX3_qp8OlE-jG8,5462
104
+ omdev/ci/github/api/v2/client.py,sha256=scY_Nmzt6wKjFt_YjTq0CjBHArV8DnLQeLlxlpShOOI,6348
104
105
  omdev/cli/__init__.py,sha256=V_l6VP1SZMlJbO-8CJwSuO9TThOy2S_oaPepNYgIrbE,37
105
106
  omdev/cli/__main__.py,sha256=mOJpgc07o0r5luQ1DlX4tk2PqZkgmbwPbdzJ3KmtjgQ,138
106
107
  omdev/cli/_pathhack.py,sha256=kxqb2kHap68Lkh8b211rDbcgj06hidBiAKA3f9posyc,2119
107
- omdev/cli/clicli.py,sha256=b3ZuzsgTIq3-OR90a2R8eI2vNYHr_S53NdLkcttPkxc,4313
108
+ omdev/cli/clicli.py,sha256=KWDQl1tsJctk-3AOWOf7IP2b4khoUhb0kyfrj3Df5VU,4476
108
109
  omdev/cli/install.py,sha256=C-W171YlIHt4Cfok-nWSMbHwWhqF_PFqq2HixFttYx8,4460
109
110
  omdev/cli/main.py,sha256=dxZFyzKuwRykHHhoKKUA0fUa9QsY0dgdvLHbXNuIPCY,6694
110
111
  omdev/cli/managers.py,sha256=BV98_n30Jj63OJrFgRoVZRfICxMLXEZKoEn4rMj9LV4,1160
@@ -218,16 +219,16 @@ omdev/pyproject/cexts.py,sha256=x13piOOnNrYbA17qZLDVuR0p1sqhgEwpk4FtImX-klM,4281
218
219
  omdev/pyproject/cli.py,sha256=Jp3OTevYndvtid2fr-Q7rWza29GnLexotmf19JJcX-o,8743
219
220
  omdev/pyproject/configs.py,sha256=HEo90bPUAo6CBnBHZFDYohlwiRD-4cxZCYR6oXv-5lQ,2802
220
221
  omdev/pyproject/inject.py,sha256=PgZnfWGoqjHsaHLUEPJaQW_66h1LRuSm8Njl--KDzOw,314
221
- omdev/pyproject/pkg.py,sha256=wqWC584ys_6jJ7qNWn6wHV3CAlsMCia9pme6T5DZ8cc,14556
222
+ omdev/pyproject/pkg.py,sha256=evhx3W7Os4V8y4cugp5-6KhmVe2-mxRel-IgrMZZoHQ,14657
222
223
  omdev/pyproject/reqs.py,sha256=8feZ71YnGzwKbLK4zO28CDQeNcZIIuq6cnkBhs6M-7E,2406
223
224
  omdev/pyproject/venvs.py,sha256=GUurjC7qzGlFL-su4C0YPO_pxbwDAyl1CqyLOB3WLCA,1911
224
225
  omdev/pyproject/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
225
226
  omdev/pyproject/resources/docker-dev.sh,sha256=DHkz5D18jok_oDolfg2mqrvGRWFoCe9GQo04dR1czcc,838
226
227
  omdev/pyproject/resources/python.sh,sha256=rFaN4SiJ9hdLDXXsDTwugI6zsw6EPkgYMmtacZeTbvw,749
227
228
  omdev/scripts/__init__.py,sha256=MKCvUAEQwsIvwLixwtPlpBqmkMXLCnjjXyAXvVpDwVk,91
228
- omdev/scripts/ci.py,sha256=tTcMmy7iHIFXsou_595G_7SBXohflkJOAP3SnXmrK3g,346354
229
+ omdev/scripts/ci.py,sha256=el2XV4fefwP3qMavQ6vDDAXOfEAyTepzpkIt9R5WyWw,353241
229
230
  omdev/scripts/interp.py,sha256=xIPlK_zF_6qqyZev8hsCrx5YQYMAPkYpa1i61ouOyEc,151774
230
- omdev/scripts/pyproject.py,sha256=aHyxQmxNI2V_xB_Kqqw2mx0lOtEciA8b_-loAmNqR9w,260824
231
+ omdev/scripts/pyproject.py,sha256=S_u1a-97woo7twmaTNN8yHNaVGP2Lye1OPocIc1A57M,260925
231
232
  omdev/scripts/slowcat.py,sha256=lssv4yrgJHiWfOiHkUut2p8E8Tq32zB-ujXESQxFFHY,2728
232
233
  omdev/scripts/tmpexec.py,sha256=WTYcf56Tj2qjYV14AWmV8SfT0u6Y8eIU6cKgQRvEK3c,1442
233
234
  omdev/tokens/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -264,9 +265,9 @@ omdev/tools/json/rendering.py,sha256=tMcjOW5edfozcMSTxxvF7WVTsbYLoe9bCKFh50qyaGw
264
265
  omdev/tools/pawk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
265
266
  omdev/tools/pawk/__main__.py,sha256=VCqeRVnqT1RPEoIrqHFSu4PXVMg4YEgF4qCQm90-eRI,66
266
267
  omdev/tools/pawk/pawk.py,sha256=zsEkfQX0jF5bn712uqPAyBSdJt2dno1LH2oeSMNfXQI,11424
267
- omdev-0.0.0.dev290.dist-info/licenses/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
268
- omdev-0.0.0.dev290.dist-info/METADATA,sha256=49codIQ-beDY5Uup8OdRe1fRJEwiIsb03MvUJtgQSM4,1478
269
- omdev-0.0.0.dev290.dist-info/WHEEL,sha256=pxyMxgL8-pra_rKaQ4drOZAegBVuX-G_4nRHjjgWbmo,91
270
- omdev-0.0.0.dev290.dist-info/entry_points.txt,sha256=dHLXFmq5D9B8qUyhRtFqTGWGxlbx3t5ejedjrnXNYLU,33
271
- omdev-0.0.0.dev290.dist-info/top_level.txt,sha256=1nr7j30fEWgLYHW3lGR9pkdHkb7knv1U1ES1XRNVQ6k,6
272
- omdev-0.0.0.dev290.dist-info/RECORD,,
268
+ omdev-0.0.0.dev292.dist-info/licenses/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
269
+ omdev-0.0.0.dev292.dist-info/METADATA,sha256=TuMFUm1uQeAWitZoLBEf1yrCGF8CKcWzdNVp1SN7AIw,1478
270
+ omdev-0.0.0.dev292.dist-info/WHEEL,sha256=pxyMxgL8-pra_rKaQ4drOZAegBVuX-G_4nRHjjgWbmo,91
271
+ omdev-0.0.0.dev292.dist-info/entry_points.txt,sha256=dHLXFmq5D9B8qUyhRtFqTGWGxlbx3t5ejedjrnXNYLU,33
272
+ omdev-0.0.0.dev292.dist-info/top_level.txt,sha256=1nr7j30fEWgLYHW3lGR9pkdHkb7knv1U1ES1XRNVQ6k,6
273
+ omdev-0.0.0.dev292.dist-info/RECORD,,