upnext-shared 0.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- upnext_shared-0.0.1/.gitignore +111 -0
- upnext_shared-0.0.1/PKG-INFO +6 -0
- upnext_shared-0.0.1/README.md +0 -0
- upnext_shared-0.0.1/pyproject.toml +19 -0
- upnext_shared-0.0.1/src/shared/__init__.py +139 -0
- upnext_shared-0.0.1/src/shared/_version.py +3 -0
- upnext_shared-0.0.1/src/shared/api.py +26 -0
- upnext_shared-0.0.1/src/shared/artifacts.py +20 -0
- upnext_shared-0.0.1/src/shared/events.py +189 -0
- upnext_shared-0.0.1/src/shared/models.py +335 -0
- upnext_shared-0.0.1/src/shared/patterns.py +88 -0
- upnext_shared-0.0.1/src/shared/schemas.py +484 -0
- upnext_shared-0.0.1/src/shared/workers.py +22 -0
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
# =============================================================================
|
|
2
|
+
# Python
|
|
3
|
+
# =============================================================================
|
|
4
|
+
__pycache__/
|
|
5
|
+
*.py[cod]
|
|
6
|
+
*$py.class
|
|
7
|
+
*.so
|
|
8
|
+
.Python
|
|
9
|
+
build/
|
|
10
|
+
develop-eggs/
|
|
11
|
+
dist/
|
|
12
|
+
downloads/
|
|
13
|
+
eggs/
|
|
14
|
+
.eggs/
|
|
15
|
+
*.egg-info/
|
|
16
|
+
.installed.cfg
|
|
17
|
+
*.egg
|
|
18
|
+
MANIFEST
|
|
19
|
+
*.manifest
|
|
20
|
+
pip-log.txt
|
|
21
|
+
pip-delete-this-directory.txt
|
|
22
|
+
htmlcov/
|
|
23
|
+
.tox/
|
|
24
|
+
.nox/
|
|
25
|
+
.coverage
|
|
26
|
+
.coverage.*
|
|
27
|
+
.cache
|
|
28
|
+
nosetests.xml
|
|
29
|
+
coverage.xml
|
|
30
|
+
*.cover
|
|
31
|
+
*.py,cover
|
|
32
|
+
.hypothesis/
|
|
33
|
+
.pytest_cache/
|
|
34
|
+
cover/
|
|
35
|
+
*.mo
|
|
36
|
+
*.pot
|
|
37
|
+
.venv/
|
|
38
|
+
venv/
|
|
39
|
+
ENV/
|
|
40
|
+
env/
|
|
41
|
+
.pdm.toml
|
|
42
|
+
.pdm-python
|
|
43
|
+
.pdm-build/
|
|
44
|
+
__pypackages__/
|
|
45
|
+
.mypy_cache/
|
|
46
|
+
.dmypy.json
|
|
47
|
+
dmypy.json
|
|
48
|
+
.pyre/
|
|
49
|
+
.pytype/
|
|
50
|
+
cython_debug/
|
|
51
|
+
.ruff_cache/
|
|
52
|
+
.pypirc
|
|
53
|
+
|
|
54
|
+
# =============================================================================
|
|
55
|
+
# Node / Next.js
|
|
56
|
+
# =============================================================================
|
|
57
|
+
node_modules/
|
|
58
|
+
.pnp
|
|
59
|
+
.pnp.js
|
|
60
|
+
.next/
|
|
61
|
+
out/
|
|
62
|
+
next-env.d.ts
|
|
63
|
+
.source/
|
|
64
|
+
npm-debug.log*
|
|
65
|
+
yarn-debug.log*
|
|
66
|
+
yarn-error.log*
|
|
67
|
+
.pnpm-debug.log*
|
|
68
|
+
*.tsbuildinfo
|
|
69
|
+
.vercel
|
|
70
|
+
|
|
71
|
+
# =============================================================================
|
|
72
|
+
# Database
|
|
73
|
+
# =============================================================================
|
|
74
|
+
*.sqlite
|
|
75
|
+
*.sqlite3
|
|
76
|
+
*.sqlite3-journal
|
|
77
|
+
db.sqlite
|
|
78
|
+
db.sqlite3
|
|
79
|
+
|
|
80
|
+
# =============================================================================
|
|
81
|
+
# Environment & Secrets
|
|
82
|
+
# =============================================================================
|
|
83
|
+
.env
|
|
84
|
+
.env*.local
|
|
85
|
+
.env.prod
|
|
86
|
+
|
|
87
|
+
# =============================================================================
|
|
88
|
+
# IDE & OS
|
|
89
|
+
# =============================================================================
|
|
90
|
+
.DS_Store
|
|
91
|
+
*.pem
|
|
92
|
+
.idea/
|
|
93
|
+
*.swp
|
|
94
|
+
*.swo
|
|
95
|
+
*~
|
|
96
|
+
|
|
97
|
+
# =============================================================================
|
|
98
|
+
# Testing & Coverage
|
|
99
|
+
# =============================================================================
|
|
100
|
+
coverage/
|
|
101
|
+
coverage_html/
|
|
102
|
+
test-results/
|
|
103
|
+
playwright-report/
|
|
104
|
+
blob-report/
|
|
105
|
+
.vitest/
|
|
106
|
+
|
|
107
|
+
# =============================================================================
|
|
108
|
+
# tanstack start
|
|
109
|
+
# =============================================================================
|
|
110
|
+
.output/
|
|
111
|
+
.tanstack/
|
|
File without changes
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "upnext-shared"
|
|
3
|
+
dynamic = ["version"]
|
|
4
|
+
description = "Shared models for UpNext packages"
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
requires-python = ">=3.12"
|
|
7
|
+
dependencies = [
|
|
8
|
+
"pydantic>=2.10.0",
|
|
9
|
+
]
|
|
10
|
+
|
|
11
|
+
[build-system]
|
|
12
|
+
requires = ["hatchling"]
|
|
13
|
+
build-backend = "hatchling.build"
|
|
14
|
+
|
|
15
|
+
[tool.hatch.build.targets.wheel]
|
|
16
|
+
packages = ["src/shared"]
|
|
17
|
+
|
|
18
|
+
[tool.hatch.version]
|
|
19
|
+
path = "src/shared/_version.py"
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
"""Shared models for UpNext packages."""
|
|
2
|
+
|
|
3
|
+
from shared._version import __version__
|
|
4
|
+
from shared.api import API_PREFIX, HOURLY_BUCKET_TTL, MINUTE_BUCKET_TTL, REGISTRY_TTL
|
|
5
|
+
from shared.artifacts import ArtifactType
|
|
6
|
+
from shared.events import (
|
|
7
|
+
BatchEventItem,
|
|
8
|
+
BatchEventRequest,
|
|
9
|
+
EventRequest,
|
|
10
|
+
EventType,
|
|
11
|
+
HealthResponse,
|
|
12
|
+
JobCheckpointEvent,
|
|
13
|
+
JobCompletedEvent,
|
|
14
|
+
JobFailedEvent,
|
|
15
|
+
JobProgressEvent,
|
|
16
|
+
JobRetryingEvent,
|
|
17
|
+
JobStartedEvent,
|
|
18
|
+
)
|
|
19
|
+
from shared.models import Job, JobStatus, StateTransition
|
|
20
|
+
from shared.patterns import get_matching_patterns, matches_event_pattern
|
|
21
|
+
from shared.schemas import (
|
|
22
|
+
ApiDetailResponse,
|
|
23
|
+
ApiEndpoint,
|
|
24
|
+
ApiInfo,
|
|
25
|
+
ApiInstance,
|
|
26
|
+
ApiOverview,
|
|
27
|
+
ApiPageResponse,
|
|
28
|
+
ApiRequestEvent,
|
|
29
|
+
ApiRequestEventsResponse,
|
|
30
|
+
ApiRequestSnapshotEvent,
|
|
31
|
+
ApisListResponse,
|
|
32
|
+
ApiSnapshotEvent,
|
|
33
|
+
ApisSnapshotEvent,
|
|
34
|
+
ApiStats,
|
|
35
|
+
ApiTrendHour,
|
|
36
|
+
ApiTrendsResponse,
|
|
37
|
+
ApiTrendsSnapshotEvent,
|
|
38
|
+
ArtifactCreateResponse,
|
|
39
|
+
ArtifactListResponse,
|
|
40
|
+
ArtifactQueuedResponse,
|
|
41
|
+
ArtifactResponse,
|
|
42
|
+
ArtifactStreamEvent,
|
|
43
|
+
CreateArtifactRequest,
|
|
44
|
+
DashboardStats,
|
|
45
|
+
EndpointsListResponse,
|
|
46
|
+
ErrorResponse,
|
|
47
|
+
FunctionDetailResponse,
|
|
48
|
+
FunctionInfo,
|
|
49
|
+
FunctionsListResponse,
|
|
50
|
+
FunctionType,
|
|
51
|
+
JobHistoryResponse,
|
|
52
|
+
JobListResponse,
|
|
53
|
+
JobStatsResponse,
|
|
54
|
+
JobTrendHour,
|
|
55
|
+
JobTrendsResponse,
|
|
56
|
+
JobTrendsSnapshotEvent,
|
|
57
|
+
Run,
|
|
58
|
+
RunStats,
|
|
59
|
+
WorkerInfo,
|
|
60
|
+
WorkerInstance,
|
|
61
|
+
WorkersListResponse,
|
|
62
|
+
WorkersSnapshotEvent,
|
|
63
|
+
WorkerStats,
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
__all__ = [
|
|
67
|
+
"__version__",
|
|
68
|
+
# API tracking constants
|
|
69
|
+
"API_PREFIX",
|
|
70
|
+
"MINUTE_BUCKET_TTL",
|
|
71
|
+
"HOURLY_BUCKET_TTL",
|
|
72
|
+
"REGISTRY_TTL",
|
|
73
|
+
# Core models
|
|
74
|
+
"Job",
|
|
75
|
+
"JobStatus",
|
|
76
|
+
"StateTransition",
|
|
77
|
+
# Events
|
|
78
|
+
"EventType",
|
|
79
|
+
"EventRequest",
|
|
80
|
+
"BatchEventItem",
|
|
81
|
+
"BatchEventRequest",
|
|
82
|
+
"JobStartedEvent",
|
|
83
|
+
"JobCompletedEvent",
|
|
84
|
+
"JobFailedEvent",
|
|
85
|
+
"JobRetryingEvent",
|
|
86
|
+
"JobProgressEvent",
|
|
87
|
+
"JobCheckpointEvent",
|
|
88
|
+
# Artifacts
|
|
89
|
+
"ArtifactType",
|
|
90
|
+
"ArtifactCreateResponse",
|
|
91
|
+
"ArtifactQueuedResponse",
|
|
92
|
+
"ArtifactResponse",
|
|
93
|
+
"ArtifactListResponse",
|
|
94
|
+
"ArtifactStreamEvent",
|
|
95
|
+
"CreateArtifactRequest",
|
|
96
|
+
"ErrorResponse",
|
|
97
|
+
# Health
|
|
98
|
+
"HealthResponse",
|
|
99
|
+
# Pattern matching
|
|
100
|
+
"matches_event_pattern",
|
|
101
|
+
"get_matching_patterns",
|
|
102
|
+
# API Schemas
|
|
103
|
+
"ApiInstance",
|
|
104
|
+
"ApiEndpoint",
|
|
105
|
+
"ApiInfo",
|
|
106
|
+
"ApiOverview",
|
|
107
|
+
"ApiPageResponse",
|
|
108
|
+
"ApiRequestEvent",
|
|
109
|
+
"ApiRequestEventsResponse",
|
|
110
|
+
"ApiRequestSnapshotEvent",
|
|
111
|
+
"ApisSnapshotEvent",
|
|
112
|
+
"ApisListResponse",
|
|
113
|
+
"ApiSnapshotEvent",
|
|
114
|
+
"EndpointsListResponse",
|
|
115
|
+
"ApiDetailResponse",
|
|
116
|
+
"ApiTrendHour",
|
|
117
|
+
"ApiTrendsSnapshotEvent",
|
|
118
|
+
"ApiTrendsResponse",
|
|
119
|
+
"FunctionType",
|
|
120
|
+
"Run",
|
|
121
|
+
"WorkerInstance",
|
|
122
|
+
"WorkerInfo",
|
|
123
|
+
"WorkersListResponse",
|
|
124
|
+
"WorkersSnapshotEvent",
|
|
125
|
+
"WorkerStats",
|
|
126
|
+
"FunctionInfo",
|
|
127
|
+
"FunctionsListResponse",
|
|
128
|
+
"FunctionDetailResponse",
|
|
129
|
+
"RunStats",
|
|
130
|
+
"ApiStats",
|
|
131
|
+
"DashboardStats",
|
|
132
|
+
# Job History
|
|
133
|
+
"JobHistoryResponse",
|
|
134
|
+
"JobListResponse",
|
|
135
|
+
"JobStatsResponse",
|
|
136
|
+
"JobTrendHour",
|
|
137
|
+
"JobTrendsSnapshotEvent",
|
|
138
|
+
"JobTrendsResponse",
|
|
139
|
+
]
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"""API tracking constants shared between SDK middleware and server reader.
|
|
2
|
+
|
|
3
|
+
Redis key structure:
|
|
4
|
+
upnext:api:registry -> SET of api names
|
|
5
|
+
upnext:api:{api}:endpoints -> SET of "METHOD:path"
|
|
6
|
+
upnext:api:{api}:{method}:{path}:m:{YYYY-MM-DDTHH:MM} -> HASH (minute bucket)
|
|
7
|
+
upnext:api:{api}:{method}:{path}:h:{YYYY-MM-DDTHH} -> HASH (hourly bucket)
|
|
8
|
+
|
|
9
|
+
Each hash contains:
|
|
10
|
+
requests, errors, total_latency_ms, status_2xx, status_4xx, status_5xx
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
# Key prefix for all API tracking keys
|
|
14
|
+
API_PREFIX = "upnext:apis"
|
|
15
|
+
|
|
16
|
+
# Key prefix for API instance heartbeats
|
|
17
|
+
API_INSTANCE_PREFIX = "upnext:apis:instances"
|
|
18
|
+
|
|
19
|
+
# TTLs (seconds)
|
|
20
|
+
MINUTE_BUCKET_TTL = 600 # 10 minutes
|
|
21
|
+
HOURLY_BUCKET_TTL = 2_592_000 # 30 days
|
|
22
|
+
REGISTRY_TTL = 2_592_000 # 30 days
|
|
23
|
+
|
|
24
|
+
# API instance TTL - must heartbeat within this time
|
|
25
|
+
# With 10s heartbeat interval, this gives 3 missed heartbeats before expiry
|
|
26
|
+
API_INSTANCE_TTL = 30
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
"""Artifact types for job outputs."""
|
|
2
|
+
|
|
3
|
+
from enum import StrEnum
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class ArtifactType(StrEnum):
|
|
7
|
+
"""Artifact types for job outputs."""
|
|
8
|
+
|
|
9
|
+
TEXT = "text"
|
|
10
|
+
JSON = "json"
|
|
11
|
+
PNG = "image/png"
|
|
12
|
+
JPEG = "image/jpeg"
|
|
13
|
+
WEBP = "image/webp"
|
|
14
|
+
GIF = "image/gif"
|
|
15
|
+
SVG = "image/svg"
|
|
16
|
+
PDF = "file/pdf"
|
|
17
|
+
CSV = "file/csv"
|
|
18
|
+
XML = "file/xml"
|
|
19
|
+
HTML = "file/html"
|
|
20
|
+
BINARY = "file/binary"
|
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
"""Event schemas for job tracking between workers and API."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from enum import StrEnum
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel, ConfigDict, Field
|
|
8
|
+
|
|
9
|
+
EVENTS_STREAM = "upnext:events"
|
|
10
|
+
API_REQUESTS_STREAM = "upnext:events:api:requests"
|
|
11
|
+
ARTIFACT_EVENTS_STREAM = "upnext:events:artifacts"
|
|
12
|
+
EVENTS_PUBSUB_CHANNEL = "upnext:events:pubsub"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class EventType(StrEnum):
|
|
16
|
+
"""Event types sent from workers to API."""
|
|
17
|
+
|
|
18
|
+
JOB_STARTED = "job.started"
|
|
19
|
+
JOB_COMPLETED = "job.completed"
|
|
20
|
+
JOB_FAILED = "job.failed"
|
|
21
|
+
JOB_RETRYING = "job.retrying"
|
|
22
|
+
JOB_PROGRESS = "job.progress"
|
|
23
|
+
JOB_CHECKPOINT = "job.checkpoint"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class JobStartedEvent(BaseModel):
|
|
27
|
+
"""Event data for job.started."""
|
|
28
|
+
|
|
29
|
+
job_id: str
|
|
30
|
+
function: str
|
|
31
|
+
function_name: str
|
|
32
|
+
parent_id: str | None = None
|
|
33
|
+
root_id: str
|
|
34
|
+
kwargs: dict[str, Any] = Field(default_factory=dict)
|
|
35
|
+
metadata: dict[str, Any] = Field(default_factory=dict)
|
|
36
|
+
attempt: int = 1
|
|
37
|
+
max_retries: int = 0
|
|
38
|
+
worker_id: str | None = None
|
|
39
|
+
started_at: datetime
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class JobCompletedEvent(BaseModel):
|
|
43
|
+
"""Event data for job.completed."""
|
|
44
|
+
|
|
45
|
+
job_id: str
|
|
46
|
+
function: str
|
|
47
|
+
function_name: str
|
|
48
|
+
parent_id: str | None = None
|
|
49
|
+
root_id: str
|
|
50
|
+
result: Any = None
|
|
51
|
+
duration_ms: float | None = None
|
|
52
|
+
attempt: int = 1
|
|
53
|
+
completed_at: datetime
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class JobFailedEvent(BaseModel):
|
|
57
|
+
"""Event data for job.failed."""
|
|
58
|
+
|
|
59
|
+
job_id: str
|
|
60
|
+
function: str
|
|
61
|
+
function_name: str
|
|
62
|
+
parent_id: str | None = None
|
|
63
|
+
root_id: str
|
|
64
|
+
error: str
|
|
65
|
+
traceback: str | None = None
|
|
66
|
+
attempt: int = 1
|
|
67
|
+
max_retries: int = 0
|
|
68
|
+
will_retry: bool = False
|
|
69
|
+
failed_at: datetime
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class JobRetryingEvent(BaseModel):
|
|
73
|
+
"""Event data for job.retrying."""
|
|
74
|
+
|
|
75
|
+
job_id: str
|
|
76
|
+
function: str
|
|
77
|
+
function_name: str
|
|
78
|
+
parent_id: str | None = None
|
|
79
|
+
root_id: str
|
|
80
|
+
error: str
|
|
81
|
+
delay_seconds: float
|
|
82
|
+
current_attempt: int
|
|
83
|
+
next_attempt: int
|
|
84
|
+
retry_at: datetime
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class JobProgressEvent(BaseModel):
|
|
88
|
+
"""Event data for job.progress."""
|
|
89
|
+
|
|
90
|
+
job_id: str
|
|
91
|
+
parent_id: str | None = None
|
|
92
|
+
root_id: str
|
|
93
|
+
progress: float
|
|
94
|
+
message: str | None = None
|
|
95
|
+
updated_at: datetime
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class JobCheckpointEvent(BaseModel):
|
|
99
|
+
"""Event data for job.checkpoint."""
|
|
100
|
+
|
|
101
|
+
job_id: str
|
|
102
|
+
parent_id: str | None = None
|
|
103
|
+
root_id: str
|
|
104
|
+
state: dict[str, Any]
|
|
105
|
+
checkpointed_at: datetime
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class SSEJobEvent(BaseModel):
|
|
109
|
+
"""Event payload streamed to browser clients via SSE.
|
|
110
|
+
|
|
111
|
+
Only includes fields the dashboard needs — sensitive data like
|
|
112
|
+
kwargs, result, traceback, and checkpoint state are excluded.
|
|
113
|
+
Extra fields from the source event are silently dropped.
|
|
114
|
+
"""
|
|
115
|
+
|
|
116
|
+
model_config = ConfigDict(extra="ignore")
|
|
117
|
+
|
|
118
|
+
type: str
|
|
119
|
+
job_id: str = ""
|
|
120
|
+
worker_id: str = ""
|
|
121
|
+
function: str | None = None
|
|
122
|
+
function_name: str | None = None
|
|
123
|
+
parent_id: str | None = None
|
|
124
|
+
root_id: str
|
|
125
|
+
# job.started / job.completed / job.failed
|
|
126
|
+
attempt: int | None = None
|
|
127
|
+
max_retries: int | None = None
|
|
128
|
+
started_at: datetime | None = None
|
|
129
|
+
# job.completed
|
|
130
|
+
duration_ms: float | None = None
|
|
131
|
+
completed_at: datetime | None = None
|
|
132
|
+
# job.failed
|
|
133
|
+
error: str | None = None
|
|
134
|
+
failed_at: datetime | None = None
|
|
135
|
+
# job.retrying
|
|
136
|
+
current_attempt: int | None = None
|
|
137
|
+
next_attempt: int | None = None
|
|
138
|
+
# job.progress
|
|
139
|
+
progress: float | None = None
|
|
140
|
+
message: str | None = None
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class EventRequest(BaseModel):
|
|
144
|
+
"""Generic event request from workers."""
|
|
145
|
+
|
|
146
|
+
type: str
|
|
147
|
+
data: dict[str, Any]
|
|
148
|
+
worker_id: str | None = None
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
class BatchEventItem(BaseModel):
|
|
152
|
+
"""Single event in a batch."""
|
|
153
|
+
|
|
154
|
+
type: str
|
|
155
|
+
job_id: str
|
|
156
|
+
worker_id: str
|
|
157
|
+
timestamp: float
|
|
158
|
+
data: dict[str, Any] = Field(default_factory=dict)
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
class BatchEventRequest(BaseModel):
|
|
162
|
+
"""Batch of events from workers."""
|
|
163
|
+
|
|
164
|
+
events: list[BatchEventItem]
|
|
165
|
+
worker_id: str | None = None
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
class HealthResponse(BaseModel):
|
|
169
|
+
"""Health check response."""
|
|
170
|
+
|
|
171
|
+
status: str = "ok"
|
|
172
|
+
version: str
|
|
173
|
+
tier: str = "free"
|
|
174
|
+
features: list[str] = Field(default_factory=list)
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
class EventResponse(BaseModel):
|
|
178
|
+
"""Event response."""
|
|
179
|
+
|
|
180
|
+
status: str = "ok"
|
|
181
|
+
event_type: str
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
class BatchEventResponse(BaseModel):
|
|
185
|
+
"""Batch event response."""
|
|
186
|
+
|
|
187
|
+
status: str = "ok"
|
|
188
|
+
processed: int
|
|
189
|
+
errors: int = 0
|