codeapi-client 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codeapi/__init__.py +17 -0
- codeapi/client/__init__.py +37 -0
- codeapi/client/_async/__init__.py +357 -0
- codeapi/client/_async/_app.py +369 -0
- codeapi/client/_async/_cli.py +334 -0
- codeapi/client/_async/_code.py +211 -0
- codeapi/client/_async/_jobs.py +512 -0
- codeapi/client/_async/_mcp.py +445 -0
- codeapi/client/_async/_web.py +343 -0
- codeapi/client/_base.py +118 -0
- codeapi/client/_sync/__init__.py +347 -0
- codeapi/client/_sync/_app.py +367 -0
- codeapi/client/_sync/_cli.py +332 -0
- codeapi/client/_sync/_code.py +203 -0
- codeapi/client/_sync/_jobs.py +497 -0
- codeapi/client/_sync/_mcp.py +442 -0
- codeapi/client/_sync/_web.py +341 -0
- codeapi/client/_utils.py +61 -0
- codeapi/client/py.typed +0 -0
- codeapi/types/__init__.py +77 -0
- codeapi/types/_api.py +30 -0
- codeapi/types/_base.py +21 -0
- codeapi/types/_code.py +31 -0
- codeapi/types/_enums.py +150 -0
- codeapi/types/_env.py +65 -0
- codeapi/types/_exc.py +35 -0
- codeapi/types/_job.py +67 -0
- codeapi/types/_json.py +67 -0
- codeapi/types/_stream.py +36 -0
- codeapi/types/_swarm.py +85 -0
- codeapi/types/_time.py +46 -0
- codeapi/types/_zips.py +466 -0
- codeapi/types/py.typed +0 -0
- codeapi_client-0.4.1.dist-info/METADATA +14 -0
- codeapi_client-0.4.1.dist-info/RECORD +36 -0
- codeapi_client-0.4.1.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,343 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, List
|
|
4
|
+
|
|
5
|
+
import httpx
|
|
6
|
+
|
|
7
|
+
from codeapi.types import (
|
|
8
|
+
CodeInfo,
|
|
9
|
+
CodeType,
|
|
10
|
+
CodeZip,
|
|
11
|
+
Job,
|
|
12
|
+
JobStage,
|
|
13
|
+
JobStatus,
|
|
14
|
+
JobType,
|
|
15
|
+
JsonData,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from . import AsyncClient
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class AsyncStoredWebClient:
|
|
23
|
+
def __init__(self, client: AsyncClient):
|
|
24
|
+
self._client = client
|
|
25
|
+
|
|
26
|
+
async def run(
|
|
27
|
+
self,
|
|
28
|
+
code_id: str,
|
|
29
|
+
) -> Job:
|
|
30
|
+
"""Runs a stored Web page.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
code_id (str): The code ID.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
Job: The created job.
|
|
37
|
+
|
|
38
|
+
Raises:
|
|
39
|
+
HTTPException: If the request fails.
|
|
40
|
+
"""
|
|
41
|
+
url = f"{self._client.base_url}/jobs/code/{code_id}/run/web"
|
|
42
|
+
async with httpx.AsyncClient() as client:
|
|
43
|
+
try:
|
|
44
|
+
response = await client.post(
|
|
45
|
+
url,
|
|
46
|
+
headers=self._client.api_key_header,
|
|
47
|
+
)
|
|
48
|
+
response.raise_for_status()
|
|
49
|
+
return Job(**response.json())
|
|
50
|
+
except httpx.HTTPStatusError as e:
|
|
51
|
+
raise self._client._get_http_exception(httpx_error=e)
|
|
52
|
+
|
|
53
|
+
async def list_info(self) -> list[CodeInfo]:
|
|
54
|
+
"""List all stored Web code.
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
list[CodeInfo]: List of Web code info.
|
|
58
|
+
"""
|
|
59
|
+
return await self._client.code.list_info(code_type=CodeType.WEB)
|
|
60
|
+
|
|
61
|
+
async def delete(self, code_id: str) -> str:
|
|
62
|
+
"""Delete stored Web code.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
code_id (str): The code ID to delete.
|
|
66
|
+
|
|
67
|
+
Returns:
|
|
68
|
+
str: Deletion confirmation message.
|
|
69
|
+
|
|
70
|
+
Raises:
|
|
71
|
+
ValueError: If the code_id is not Web code.
|
|
72
|
+
"""
|
|
73
|
+
# Verify this is actually Web code
|
|
74
|
+
code_info = await self._client.code.get_info(code_id)
|
|
75
|
+
if code_info.code_type != CodeType.WEB:
|
|
76
|
+
raise ValueError(
|
|
77
|
+
f"Code '{code_id}' is {code_info.code_type}, not Web code. "
|
|
78
|
+
"Cannot delete non-Web code from Web client."
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
return await self._client.code.delete(code_id)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class AsyncWebJobsClient:
|
|
85
|
+
def __init__(self, client):
|
|
86
|
+
self._client = client
|
|
87
|
+
|
|
88
|
+
async def list(
|
|
89
|
+
self,
|
|
90
|
+
job_status: JobStatus | None = None,
|
|
91
|
+
job_stage: JobStage | None = None,
|
|
92
|
+
) -> List[Job]:
|
|
93
|
+
"""List Web jobs.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
job_status (JobStatus | None): Filter by job status.
|
|
97
|
+
job_stage (JobStage | None): Filter by job stage.
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
list[Job]: List of Web jobs.
|
|
101
|
+
"""
|
|
102
|
+
return await self._client.jobs.list(
|
|
103
|
+
job_type=JobType.RUN_WEB,
|
|
104
|
+
job_status=job_status,
|
|
105
|
+
job_stage=job_stage,
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
async def get_latest(self) -> Job | None:
|
|
109
|
+
"""Get the most recent Web job.
|
|
110
|
+
|
|
111
|
+
Returns:
|
|
112
|
+
Job | None: The most recent Web job, or None if no jobs exist.
|
|
113
|
+
"""
|
|
114
|
+
jobs = await self.list()
|
|
115
|
+
return jobs[0] if jobs else None
|
|
116
|
+
|
|
117
|
+
async def list_queued(self) -> List[Job]:
|
|
118
|
+
"""Get all queued Web jobs.
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
List[Job]: List of queued Web jobs.
|
|
122
|
+
"""
|
|
123
|
+
return await self.list(job_status=JobStatus.QUEUED)
|
|
124
|
+
|
|
125
|
+
async def list_scheduled(self) -> List[Job]:
|
|
126
|
+
"""Get all scheduled Web jobs.
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
List[Job]: List of scheduled Web jobs.
|
|
130
|
+
"""
|
|
131
|
+
return await self.list(job_status=JobStatus.SCHEDULED)
|
|
132
|
+
|
|
133
|
+
async def list_started(self) -> List[Job]:
|
|
134
|
+
"""Get all started Web jobs.
|
|
135
|
+
|
|
136
|
+
Returns:
|
|
137
|
+
list[Job]: List of started Web jobs.
|
|
138
|
+
"""
|
|
139
|
+
return await self.list(job_status=JobStatus.STARTED)
|
|
140
|
+
|
|
141
|
+
async def list_deferred(self) -> List[Job]:
|
|
142
|
+
"""Get all deferred Web jobs.
|
|
143
|
+
|
|
144
|
+
Returns:
|
|
145
|
+
List[Job]: List of deferred Web jobs.
|
|
146
|
+
"""
|
|
147
|
+
return await self.list(job_status=JobStatus.DEFERRED)
|
|
148
|
+
|
|
149
|
+
async def list_canceled(self) -> List[Job]:
|
|
150
|
+
"""Get all canceled Web jobs.
|
|
151
|
+
|
|
152
|
+
Returns:
|
|
153
|
+
List[Job]: List of canceled Web jobs.
|
|
154
|
+
"""
|
|
155
|
+
return await self.list(job_status=JobStatus.CANCELED)
|
|
156
|
+
|
|
157
|
+
async def list_stopped(self) -> List[Job]:
|
|
158
|
+
"""Get all stopped Web jobs.
|
|
159
|
+
|
|
160
|
+
Returns:
|
|
161
|
+
List[Job]: List of stopped Web jobs.
|
|
162
|
+
"""
|
|
163
|
+
return await self.list(job_status=JobStatus.STOPPED)
|
|
164
|
+
|
|
165
|
+
async def list_failed(self) -> List[Job]:
|
|
166
|
+
"""Get all failed Web jobs.
|
|
167
|
+
|
|
168
|
+
Returns:
|
|
169
|
+
list[Job]: List of failed Web jobs.
|
|
170
|
+
"""
|
|
171
|
+
return await self.list(job_status=JobStatus.FAILED)
|
|
172
|
+
|
|
173
|
+
async def list_finished(self) -> List[Job]:
|
|
174
|
+
"""Get all finished Web jobs.
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
list[Job]: List of finished Web jobs.
|
|
178
|
+
"""
|
|
179
|
+
return await self.list(job_status=JobStatus.FINISHED)
|
|
180
|
+
|
|
181
|
+
async def list_timed_out(self) -> List[Job]:
|
|
182
|
+
"""Get all timed out Web jobs.
|
|
183
|
+
|
|
184
|
+
Returns:
|
|
185
|
+
List[Job]: List of timed out Web jobs.
|
|
186
|
+
"""
|
|
187
|
+
return await self.list(job_status=JobStatus.TIMEOUT)
|
|
188
|
+
|
|
189
|
+
async def list_pre_running(self) -> List[Job]:
|
|
190
|
+
"""Get all pre-running Web jobs.
|
|
191
|
+
|
|
192
|
+
Returns:
|
|
193
|
+
List[Job]: List of pre-running Web jobs.
|
|
194
|
+
"""
|
|
195
|
+
return await self.list(job_stage=JobStage.PRE_RUNNING)
|
|
196
|
+
|
|
197
|
+
async def list_running(self) -> List[Job]:
|
|
198
|
+
"""Get all running Web jobs.
|
|
199
|
+
|
|
200
|
+
Returns:
|
|
201
|
+
List[Job]: List of running Web jobs.
|
|
202
|
+
"""
|
|
203
|
+
return await self.list(job_stage=JobStage.RUNNING)
|
|
204
|
+
|
|
205
|
+
async def list_post_running(self) -> List[Job]:
|
|
206
|
+
"""Get all post-running Web jobs.
|
|
207
|
+
|
|
208
|
+
Returns:
|
|
209
|
+
List[Job]: List of post-running Web jobs.
|
|
210
|
+
"""
|
|
211
|
+
return await self.list(job_stage=JobStage.POST_RUNNING)
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
class AsyncWebClient:
|
|
215
|
+
def __init__(self, client: AsyncClient):
|
|
216
|
+
self._client = client
|
|
217
|
+
self.stored = AsyncStoredWebClient(client)
|
|
218
|
+
self.jobs = AsyncWebJobsClient(client)
|
|
219
|
+
|
|
220
|
+
async def run(
|
|
221
|
+
self,
|
|
222
|
+
code_zip: CodeZip,
|
|
223
|
+
) -> Job:
|
|
224
|
+
"""Runs a Web page from code zip.
|
|
225
|
+
|
|
226
|
+
Args:
|
|
227
|
+
code_zip (CodeZip): The code zip.
|
|
228
|
+
|
|
229
|
+
Returns:
|
|
230
|
+
Job: The created job.
|
|
231
|
+
|
|
232
|
+
Raises:
|
|
233
|
+
HTTPException: If the request fails.
|
|
234
|
+
"""
|
|
235
|
+
url = f"{self._client.base_url}/jobs/code/run/web"
|
|
236
|
+
|
|
237
|
+
files = self._client._prepare_files(code_zip=code_zip)
|
|
238
|
+
|
|
239
|
+
async with httpx.AsyncClient() as client:
|
|
240
|
+
try:
|
|
241
|
+
response = await client.post(
|
|
242
|
+
url,
|
|
243
|
+
headers=self._client.api_key_header,
|
|
244
|
+
files=files,
|
|
245
|
+
)
|
|
246
|
+
response.raise_for_status()
|
|
247
|
+
return Job(**response.json())
|
|
248
|
+
except httpx.HTTPStatusError as e:
|
|
249
|
+
raise self._client._get_http_exception(httpx_error=e)
|
|
250
|
+
|
|
251
|
+
def get_url(self, job_id: str) -> str:
|
|
252
|
+
"""Gets the URL for a Web page.
|
|
253
|
+
|
|
254
|
+
Args:
|
|
255
|
+
job_id (str): The job ID.
|
|
256
|
+
|
|
257
|
+
Returns:
|
|
258
|
+
str: The Web page URL.
|
|
259
|
+
"""
|
|
260
|
+
return self._client.get_subdomain_proxy_url(job_id)
|
|
261
|
+
|
|
262
|
+
async def upload(
|
|
263
|
+
self,
|
|
264
|
+
code_zip: CodeZip,
|
|
265
|
+
code_name: str,
|
|
266
|
+
metadata: JsonData | dict | None = None,
|
|
267
|
+
) -> str:
|
|
268
|
+
"""Upload Web code.
|
|
269
|
+
|
|
270
|
+
Args:
|
|
271
|
+
code_zip (CodeZip): The code zip.
|
|
272
|
+
code_name (str): The name of the code.
|
|
273
|
+
metadata (JsonData | dict | None): The JSON metadata of the code.
|
|
274
|
+
|
|
275
|
+
Returns:
|
|
276
|
+
str: The code ID.
|
|
277
|
+
"""
|
|
278
|
+
return await self._client.code.upload(
|
|
279
|
+
code_zip=code_zip,
|
|
280
|
+
code_name=code_name,
|
|
281
|
+
code_type=CodeType.WEB,
|
|
282
|
+
metadata=metadata,
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
async def is_healthy(self, job_id: str) -> bool:
|
|
286
|
+
"""Checks whether launched Web page is healthy.
|
|
287
|
+
|
|
288
|
+
Args:
|
|
289
|
+
job_id (str): The ID of the Web page launch job.
|
|
290
|
+
|
|
291
|
+
Returns:
|
|
292
|
+
bool: True if Web page is healthy else False.
|
|
293
|
+
|
|
294
|
+
Raises:
|
|
295
|
+
HTTPException: If the request fails.
|
|
296
|
+
"""
|
|
297
|
+
return await self._client.jobs.is_healthy(job_id=job_id)
|
|
298
|
+
|
|
299
|
+
async def await_healthy(self, job_id: str, timeout: float | None = None) -> Job:
|
|
300
|
+
"""Waits for a Web page to become healthy.
|
|
301
|
+
|
|
302
|
+
Args:
|
|
303
|
+
job_id (str): The ID of the Web page run job.
|
|
304
|
+
timeout (float | None): Maximum time to wait in seconds. If None, waits indefinitely.
|
|
305
|
+
|
|
306
|
+
Returns:
|
|
307
|
+
Job: The job object of the Web page run job.
|
|
308
|
+
|
|
309
|
+
Raises:
|
|
310
|
+
HTTPException: If the request fails.
|
|
311
|
+
APIException: If the job enters stage POST_RUNNING unexpectedly.
|
|
312
|
+
TimeoutError: If the timeout is exceeded.
|
|
313
|
+
"""
|
|
314
|
+
return await self._client.jobs.await_healthy(job_id=job_id, timeout=timeout)
|
|
315
|
+
|
|
316
|
+
async def ingest(
|
|
317
|
+
self,
|
|
318
|
+
code_zip: CodeZip,
|
|
319
|
+
code_name: str,
|
|
320
|
+
metadata: JsonData | dict | None = None,
|
|
321
|
+
build_pexenv: bool = False,
|
|
322
|
+
pexenv_python: str | None = None,
|
|
323
|
+
) -> Job:
|
|
324
|
+
"""Ingest Web code.
|
|
325
|
+
|
|
326
|
+
Args:
|
|
327
|
+
code_zip (CodeZip): The code zip.
|
|
328
|
+
code_name (str): The name of the code.
|
|
329
|
+
metadata (JsonData | dict | None): The JSON metadata of the code.
|
|
330
|
+
build_pexenv (bool): Whether to build the pex venv.
|
|
331
|
+
pexenv_python: (str | None): Python interpreter for the pex venv.
|
|
332
|
+
|
|
333
|
+
Returns:
|
|
334
|
+
Job: The code ingestion job.
|
|
335
|
+
"""
|
|
336
|
+
return await self._client.code.ingest(
|
|
337
|
+
code_zip=code_zip,
|
|
338
|
+
code_name=code_name,
|
|
339
|
+
code_type=CodeType.WEB,
|
|
340
|
+
metadata=metadata,
|
|
341
|
+
build_pexenv=build_pexenv,
|
|
342
|
+
pexenv_python=pexenv_python,
|
|
343
|
+
)
|
codeapi/client/_base.py
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import socket
|
|
5
|
+
from abc import ABC
|
|
6
|
+
from io import BytesIO
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any
|
|
9
|
+
from urllib.parse import urlparse
|
|
10
|
+
|
|
11
|
+
import httpx
|
|
12
|
+
from fastapi import HTTPException
|
|
13
|
+
|
|
14
|
+
from codeapi.client._utils import write_bytes, write_bytes_async
|
|
15
|
+
from codeapi.types import API_KEY_HEADER, CodeAPIError, CodeAPIException, JsonData
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class ClientBase(ABC):
|
|
19
|
+
def __init__(self, base_url: str, api_key: str = ""):
|
|
20
|
+
"""Initializes the CodeAPI Client.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
base_url (str): The base URL of the CodeAPI.
|
|
24
|
+
api_key (str): The CodeAPI key.
|
|
25
|
+
"""
|
|
26
|
+
parsed_url = urlparse(base_url)
|
|
27
|
+
self.protocol = parsed_url.scheme
|
|
28
|
+
if parsed_url.hostname == "0.0.0.0":
|
|
29
|
+
self.host = "localhost"
|
|
30
|
+
else:
|
|
31
|
+
try:
|
|
32
|
+
self.host = socket.gethostbyaddr(str(parsed_url.hostname))[0]
|
|
33
|
+
except Exception:
|
|
34
|
+
self.host = str(parsed_url.hostname)
|
|
35
|
+
self.port = parsed_url.port
|
|
36
|
+
self.base_url = f"{self.protocol}://{self.host}:{self.port}"
|
|
37
|
+
self.api_key = api_key
|
|
38
|
+
|
|
39
|
+
@property
|
|
40
|
+
def api_key_header(self) -> dict[str, str]:
|
|
41
|
+
return {API_KEY_HEADER: self.api_key}
|
|
42
|
+
|
|
43
|
+
def _get_response_detail(self, response: httpx.Response) -> str | None:
|
|
44
|
+
try:
|
|
45
|
+
return response.json()["detail"]
|
|
46
|
+
except Exception:
|
|
47
|
+
return None
|
|
48
|
+
|
|
49
|
+
def _get_http_exception(
|
|
50
|
+
self, httpx_error: httpx.HTTPStatusError
|
|
51
|
+
) -> HTTPException | CodeAPIException:
|
|
52
|
+
status_code = httpx_error.response.status_code
|
|
53
|
+
detail = httpx_error.response.json()["detail"]
|
|
54
|
+
if isinstance(detail, dict) and detail.keys() == set(["error", "message"]):
|
|
55
|
+
return CodeAPIException(
|
|
56
|
+
error=CodeAPIError(detail["error"]), message=detail["message"]
|
|
57
|
+
)
|
|
58
|
+
return HTTPException(status_code=status_code, detail=detail)
|
|
59
|
+
|
|
60
|
+
def get_proxy_url(self, job_id: str):
|
|
61
|
+
return f"{self.base_url}/{job_id}"
|
|
62
|
+
|
|
63
|
+
def get_subdomain_proxy_url(self, job_id: str):
|
|
64
|
+
return f"{self.protocol}://{job_id.lower()}.{self.host}:{self.port}"
|
|
65
|
+
|
|
66
|
+
def _prepare_files(self, **kwargs: Any) -> dict[str, tuple[str, BytesIO, str]]:
|
|
67
|
+
"""Prepare files for multipart upload."""
|
|
68
|
+
files = {}
|
|
69
|
+
for key, value in kwargs.items():
|
|
70
|
+
if value is not None:
|
|
71
|
+
if key.endswith("_zip"):
|
|
72
|
+
name = f"{key.replace('_zip', '')}.zip"
|
|
73
|
+
files[f"{key}file"] = (name, value.to_bytesio(), "application/zip")
|
|
74
|
+
elif key == "metadata":
|
|
75
|
+
files["metadata"] = (
|
|
76
|
+
"metadata.json",
|
|
77
|
+
JsonData(value).to_bytesio(),
|
|
78
|
+
"application/json",
|
|
79
|
+
)
|
|
80
|
+
return files
|
|
81
|
+
|
|
82
|
+
def _handle_stream_download(
|
|
83
|
+
self, response: httpx.Response, zip_path: Path | str | None = None
|
|
84
|
+
) -> bytes:
|
|
85
|
+
"""Handle streaming download with optional file saving."""
|
|
86
|
+
mem_bytes = BytesIO()
|
|
87
|
+
try:
|
|
88
|
+
for chunk in response.iter_bytes():
|
|
89
|
+
mem_bytes.write(chunk)
|
|
90
|
+
if response.is_error:
|
|
91
|
+
error_content = mem_bytes.getvalue().decode()
|
|
92
|
+
detail = json.loads(error_content)["detail"]
|
|
93
|
+
raise HTTPException(status_code=response.status_code, detail=detail)
|
|
94
|
+
zip_bytes = mem_bytes.getvalue()
|
|
95
|
+
if zip_path:
|
|
96
|
+
write_bytes(path=zip_path, data=zip_bytes)
|
|
97
|
+
return zip_bytes
|
|
98
|
+
finally:
|
|
99
|
+
mem_bytes.close()
|
|
100
|
+
|
|
101
|
+
async def _handle_stream_download_async(
|
|
102
|
+
self, response: httpx.Response, zip_path: Path | str | None = None
|
|
103
|
+
) -> bytes:
|
|
104
|
+
"""Handle streaming download with optional file saving."""
|
|
105
|
+
mem_bytes = BytesIO()
|
|
106
|
+
try:
|
|
107
|
+
async for chunk in response.aiter_bytes():
|
|
108
|
+
mem_bytes.write(chunk)
|
|
109
|
+
if response.is_error:
|
|
110
|
+
error_content = mem_bytes.getvalue().decode()
|
|
111
|
+
detail = json.loads(error_content)["detail"]
|
|
112
|
+
raise HTTPException(status_code=response.status_code, detail=detail)
|
|
113
|
+
zip_bytes = mem_bytes.getvalue()
|
|
114
|
+
if zip_path:
|
|
115
|
+
await write_bytes_async(path=zip_path, data=zip_bytes)
|
|
116
|
+
return zip_bytes
|
|
117
|
+
finally:
|
|
118
|
+
mem_bytes.close()
|