channel-app 0.0.155__py3-none-any.whl → 0.0.157a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- channel_app/app/order/service.py +82 -41
- channel_app/core/settings.py +1 -0
- channel_app/database/__init__.py +0 -0
- channel_app/database/models.py +58 -0
- channel_app/database/services.py +8 -0
- channel_app/logs/__init__.py +0 -0
- channel_app/logs/enums.py +13 -0
- channel_app/logs/services.py +160 -0
- {channel_app-0.0.155.dist-info → channel_app-0.0.157a1.dist-info}/METADATA +2 -2
- {channel_app-0.0.155.dist-info → channel_app-0.0.157a1.dist-info}/RECORD +12 -6
- {channel_app-0.0.155.dist-info → channel_app-0.0.157a1.dist-info}/WHEEL +1 -1
- {channel_app-0.0.155.dist-info → channel_app-0.0.157a1.dist-info}/top_level.txt +0 -0
channel_app/app/order/service.py
CHANGED
@@ -1,3 +1,4 @@
|
|
1
|
+
import uuid
|
1
2
|
from dataclasses import asdict
|
2
3
|
from typing import List, Generator, Union
|
3
4
|
|
@@ -19,6 +20,7 @@ from channel_app.core.data import (BatchRequestResponseDto,
|
|
19
20
|
CancelOrderDto,
|
20
21
|
ChannelUpdateOrderItemDto)
|
21
22
|
from channel_app.core.settings import OmnitronIntegration, ChannelIntegration
|
23
|
+
from channel_app.logs.services import LogService
|
22
24
|
from channel_app.omnitron.batch_request import ClientBatchRequest
|
23
25
|
from channel_app.omnitron.constants import (BatchRequestStatus, ContentType,
|
24
26
|
FailedReasonType)
|
@@ -33,48 +35,87 @@ class OrderService(object):
|
|
33
35
|
batch_service = ClientBatchRequest
|
34
36
|
|
35
37
|
def fetch_and_create_order(self, is_success_log=True):
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
get_orders: Generator
|
44
|
-
order_batch_objects = []
|
45
|
-
while True:
|
46
|
-
try:
|
47
|
-
channel_create_order, report_list, _ = next(get_orders)
|
48
|
-
except StopIteration:
|
49
|
-
break
|
50
|
-
|
51
|
-
# tips
|
52
|
-
channel_create_order: ChannelCreateOrderDto
|
53
|
-
report_list: List[ErrorReportDto]
|
54
|
-
for report in report_list:
|
55
|
-
if is_success_log or not report.is_ok:
|
56
|
-
report.error_code = \
|
57
|
-
f"{omnitron_integration.batch_request.local_batch_id}" \
|
58
|
-
f"-Channel-GetOrders_{channel_create_order.order.number}"
|
59
|
-
omnitron_integration.do_action(
|
60
|
-
key='create_error_report',
|
61
|
-
objects=report)
|
38
|
+
log_service = LogService()
|
39
|
+
tx_id = uuid.uuid4()
|
40
|
+
log_service.create_flow(
|
41
|
+
name="OrderSync",
|
42
|
+
transaction_id=tx_id,
|
43
|
+
)
|
62
44
|
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
45
|
+
try:
|
46
|
+
with log_service.step("fetch_orders"):
|
47
|
+
with OmnitronIntegration(
|
48
|
+
content_type=ContentType.order.value
|
49
|
+
) as omnitron_integration:
|
50
|
+
|
51
|
+
with log_service.step("get_orders"):
|
52
|
+
get_orders = ChannelIntegration().do_action(
|
53
|
+
key='get_orders',
|
54
|
+
batch_request=omnitron_integration.batch_request
|
55
|
+
)
|
56
|
+
|
57
|
+
get_orders: Generator
|
58
|
+
order_batch_objects = []
|
59
|
+
while True:
|
60
|
+
try:
|
61
|
+
channel_create_order, report_list, _ = next(get_orders)
|
62
|
+
except StopIteration:
|
63
|
+
break
|
64
|
+
|
65
|
+
# tips
|
66
|
+
channel_create_order: ChannelCreateOrderDto
|
67
|
+
metadata = {
|
68
|
+
"order_number": channel_create_order.order.number
|
69
|
+
}
|
70
|
+
|
71
|
+
report_list: List[ErrorReportDto]
|
72
|
+
for report in report_list:
|
73
|
+
if is_success_log or not report.is_ok:
|
74
|
+
report.error_code = \
|
75
|
+
f"{omnitron_integration.batch_request.local_batch_id}" \
|
76
|
+
f"-Channel-GetOrders_{channel_create_order.order.number}"
|
77
|
+
try:
|
78
|
+
|
79
|
+
with log_service.step("create_error_report", metadata=metadata):
|
80
|
+
omnitron_integration.do_action(
|
81
|
+
key='create_error_report',
|
82
|
+
objects=report
|
83
|
+
)
|
84
|
+
except Exception as err:
|
85
|
+
log_service.add_exception(err)
|
86
|
+
raise
|
87
|
+
|
88
|
+
try:
|
89
|
+
with log_service.step("create_order", metadata=metadata):
|
90
|
+
order = self.create_order(
|
91
|
+
omnitron_integration=omnitron_integration,
|
92
|
+
channel_order=channel_create_order
|
93
|
+
)
|
94
|
+
except Exception as err:
|
95
|
+
log_service.add_exception(err)
|
96
|
+
raise
|
97
|
+
|
98
|
+
if order and omnitron_integration.batch_request.objects:
|
99
|
+
order_batch_objects.extend(omnitron_integration.batch_request.objects)
|
100
|
+
|
101
|
+
omnitron_integration.batch_request.objects = order_batch_objects
|
102
|
+
|
103
|
+
with log_service.step("batch_to_done"):
|
104
|
+
try:
|
105
|
+
self.batch_service(settings.OMNITRON_CHANNEL_ID).to_done(
|
106
|
+
batch_request=omnitron_integration.batch_request
|
107
|
+
)
|
108
|
+
except requests_exceptions.HTTPError as exc:
|
109
|
+
log_service.add_exception(exc)
|
110
|
+
if exc.response.status_code == 406 and "batch_request_status_100_1" in exc.response.text:
|
111
|
+
pass
|
112
|
+
else:
|
113
|
+
raise exc
|
114
|
+
except Exception as fatal:
|
115
|
+
log_service.add_exception(fatal)
|
116
|
+
raise
|
117
|
+
finally:
|
118
|
+
log_service.save()
|
78
119
|
|
79
120
|
def create_order(self, omnitron_integration: OmnitronIntegration,
|
80
121
|
channel_order: ChannelCreateOrderDto
|
channel_app/core/settings.py
CHANGED
@@ -17,6 +17,7 @@ CACHE_PORT = os.getenv("CACHE_PORT")
|
|
17
17
|
BROKER_HOST = os.getenv("BROKER_HOST")
|
18
18
|
BROKER_PORT = os.getenv("BROKER_PORT")
|
19
19
|
BROKER_DATABASE_INDEX = os.getenv("BROKER_DATABASE_INDEX")
|
20
|
+
DATABASE_URI = os.getenv("DATABASE_URI")
|
20
21
|
SENTRY_DSN = os.getenv("SENTRY_DSN")
|
21
22
|
DEFAULT_CONNECTION_POOL_COUNT = os.getenv("DEFAULT_CONNECTION_POOL_COUNT") or 10
|
22
23
|
DEFAULT_CONNECTION_POOL_MAX_SIZE = os.getenv("DEFAULT_CONNECTION_POOL_COUNT") or 10
|
File without changes
|
@@ -0,0 +1,58 @@
|
|
1
|
+
from datetime import datetime, timezone
|
2
|
+
import uuid
|
3
|
+
from sqlalchemy import JSON, Column, DateTime, ForeignKey, Integer, String, Enum as SqlEnum, Text
|
4
|
+
from sqlalchemy.dialects.postgresql import UUID
|
5
|
+
from sqlalchemy.orm import DeclarativeBase, relationship
|
6
|
+
|
7
|
+
from channel_app.logs.enums import LogFlowAuthor, LogStepStatus
|
8
|
+
|
9
|
+
|
10
|
+
class Base(DeclarativeBase):
|
11
|
+
pass
|
12
|
+
|
13
|
+
|
14
|
+
class LogFlowModel(Base):
|
15
|
+
__tablename__ = "log_flows"
|
16
|
+
|
17
|
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
18
|
+
transaction_id = Column(UUID(as_uuid=True), unique=True, nullable=False)
|
19
|
+
flow_name = Column(String(255), nullable=False)
|
20
|
+
flow_author = Column(SqlEnum(LogFlowAuthor), default=LogFlowAuthor.system, nullable=False)
|
21
|
+
|
22
|
+
started_at = Column(DateTime(timezone=True), nullable=False, default=lambda: datetime.now(timezone.utc))
|
23
|
+
ended_at = Column(DateTime(timezone=True), nullable=True)
|
24
|
+
|
25
|
+
status = Column(SqlEnum(LogStepStatus), nullable=True)
|
26
|
+
s3_key = Column(Text, nullable=True)
|
27
|
+
|
28
|
+
def __repr__(self):
|
29
|
+
return f"<FlowLog(transaction_id={self.transaction_id}, flow_name={self.flow_name})>"
|
30
|
+
|
31
|
+
|
32
|
+
class LogStepModel(Base):
|
33
|
+
__tablename__ = "log_steps"
|
34
|
+
|
35
|
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
36
|
+
flow_id = Column(UUID(as_uuid=True), ForeignKey("log_flows.id", ondelete="CASCADE"), nullable=False)
|
37
|
+
step_name = Column(String(255), nullable=False)
|
38
|
+
status = Column(SqlEnum(LogStepStatus, native_enum=False), nullable=False)
|
39
|
+
start_time = Column(DateTime(timezone=True), nullable=False)
|
40
|
+
end_time = Column(DateTime(timezone=True))
|
41
|
+
duration_ms = Column(Integer)
|
42
|
+
error_message = Column(String)
|
43
|
+
step_metadata = Column(JSON)
|
44
|
+
|
45
|
+
exceptions = relationship("LogStepExceptionModel", back_populates="step", cascade="all, delete-orphan")
|
46
|
+
|
47
|
+
|
48
|
+
class LogStepExceptionModel(Base):
|
49
|
+
__tablename__ = "log_step_exceptions"
|
50
|
+
|
51
|
+
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
52
|
+
step_id = Column(UUID(as_uuid=True), ForeignKey("log_steps.id", ondelete="CASCADE"), nullable=False)
|
53
|
+
type = Column(String(128), nullable=False)
|
54
|
+
message = Column(String)
|
55
|
+
traceback = Column(String)
|
56
|
+
created_at = Column(DateTime(timezone=True), nullable=False)
|
57
|
+
|
58
|
+
step = relationship("LogStepModel", back_populates="exceptions")
|
File without changes
|
@@ -0,0 +1,160 @@
|
|
1
|
+
from contextlib import contextmanager
|
2
|
+
import traceback
|
3
|
+
from typing import Optional
|
4
|
+
import uuid
|
5
|
+
from datetime import datetime, timezone
|
6
|
+
from sqlalchemy.orm import scoped_session, sessionmaker
|
7
|
+
|
8
|
+
from channel_app.database.models import LogFlowModel, LogStepExceptionModel, LogStepModel
|
9
|
+
from channel_app.database.services import DatabaseService
|
10
|
+
from channel_app.logs.enums import LogFlowAuthor, LogStepStatus
|
11
|
+
|
12
|
+
|
13
|
+
class LogService:
|
14
|
+
database_service = DatabaseService()
|
15
|
+
|
16
|
+
def __init__(self):
|
17
|
+
self.flow = {}
|
18
|
+
self.steps = []
|
19
|
+
self.exceptions = []
|
20
|
+
|
21
|
+
self.db_engine = self.database_service.create_engine()
|
22
|
+
self.s3_client = None # TODO: Declare the boto3 client
|
23
|
+
self.s3_bucket = None # TODO: Get s3 bucket from exported environment variables
|
24
|
+
|
25
|
+
def create_flow(
|
26
|
+
self,
|
27
|
+
name: str,
|
28
|
+
transaction_id: str,
|
29
|
+
flow_author: LogFlowAuthor = LogFlowAuthor.system
|
30
|
+
):
|
31
|
+
self.flow = {
|
32
|
+
"id": uuid.uuid4(),
|
33
|
+
"transaction_id": transaction_id or str(uuid.uuid4()),
|
34
|
+
"flow_name": name,
|
35
|
+
"flow_author": flow_author.value,
|
36
|
+
"started_at": datetime.now(timezone.utc)
|
37
|
+
}
|
38
|
+
|
39
|
+
@contextmanager
|
40
|
+
def step(self, name: str, metadata: Optional[dict] = None):
|
41
|
+
now = datetime.now(timezone.utc)
|
42
|
+
self._add_step(name, start=True, metadata=metadata)
|
43
|
+
try:
|
44
|
+
yield
|
45
|
+
self._add_step(name, end=True)
|
46
|
+
except Exception as exc:
|
47
|
+
self.add_exception(exc)
|
48
|
+
for step in reversed(self.steps):
|
49
|
+
if step["step_name"] == name and step.get("status") == LogStepStatus.in_progress.value:
|
50
|
+
step["end_time"] = now
|
51
|
+
step["status"] = LogStepStatus.failure.value
|
52
|
+
step["error"] = str(exc)
|
53
|
+
break
|
54
|
+
raise
|
55
|
+
|
56
|
+
|
57
|
+
def _add_step(self, name, start=False, end=False, metadata=None):
|
58
|
+
now = datetime.now(timezone.utc)
|
59
|
+
if start:
|
60
|
+
self.steps.append(
|
61
|
+
{
|
62
|
+
"id": uuid.uuid4(),
|
63
|
+
"step_name": name,
|
64
|
+
"start_time": now,
|
65
|
+
"status": LogStepStatus.in_progress.value,
|
66
|
+
"metadata": metadata or {},
|
67
|
+
}
|
68
|
+
)
|
69
|
+
elif end:
|
70
|
+
for step in reversed(self.steps):
|
71
|
+
if step["step_name"] == name and step["status"] == LogStepStatus.in_progress.value:
|
72
|
+
step["end_time"] = now
|
73
|
+
step["status"] = LogStepStatus.success.value
|
74
|
+
step["duration_ms"] = int((now - step["start_time"]).total_seconds() * 1000)
|
75
|
+
|
76
|
+
def add_exception(self, exc: Exception):
|
77
|
+
tb = traceback.format_exc()
|
78
|
+
exc_obj = {
|
79
|
+
"id": uuid.uuid4(),
|
80
|
+
"type": type(exc).__name__,
|
81
|
+
"message": str(exc),
|
82
|
+
"traceback": tb
|
83
|
+
}
|
84
|
+
self.exceptions.append(exc_obj)
|
85
|
+
# If this flow has related step, update the step to FAILURE
|
86
|
+
if self.steps:
|
87
|
+
self.steps[-1]["status"] = LogStepStatus.failure.value
|
88
|
+
self.steps[-1]["error"] = str(exc)
|
89
|
+
self.steps[-1].setdefault("exceptions", []).append(exc_obj)
|
90
|
+
|
91
|
+
def save(self):
|
92
|
+
self.flow["ended_at"] = datetime.now(timezone.utc)
|
93
|
+
full_log_content = {
|
94
|
+
**self.flow,
|
95
|
+
"steps": self.steps,
|
96
|
+
"exceptions": self.exceptions,
|
97
|
+
}
|
98
|
+
s3_key = f"logs/{self.flow['flow_name']}/{self.flow['transaction_id']}.json"
|
99
|
+
|
100
|
+
self._upload_to_s3(s3_key, full_log_content)
|
101
|
+
|
102
|
+
|
103
|
+
log_flow_object = LogFlowModel(
|
104
|
+
id=self.flow["id"],
|
105
|
+
transaction_id=str(self.flow["transaction_id"]),
|
106
|
+
flow_name=self.flow["flow_name"],
|
107
|
+
flow_author=self.flow["flow_author"],
|
108
|
+
started_at=self.flow["started_at"],
|
109
|
+
ended_at=self.flow["ended_at"],
|
110
|
+
status=self.steps[-1]["status"] if self.steps else LogStepStatus.failure.value,
|
111
|
+
s3_key=s3_key,
|
112
|
+
)
|
113
|
+
|
114
|
+
step_models = []
|
115
|
+
exception_models = []
|
116
|
+
for step in self.steps:
|
117
|
+
step_model = LogStepModel(
|
118
|
+
id=step["id"],
|
119
|
+
flow_id=self.flow["id"],
|
120
|
+
step_name=step["step_name"],
|
121
|
+
status=step["status"],
|
122
|
+
start_time=step["start_time"],
|
123
|
+
end_time=step.get("end_time"),
|
124
|
+
duration_ms=step.get("duration_ms"),
|
125
|
+
error_message=step.get("error"),
|
126
|
+
step_metadata=step.get("metadata"),
|
127
|
+
)
|
128
|
+
step_models.append(step_model)
|
129
|
+
|
130
|
+
for exc in step.get("exceptions", []):
|
131
|
+
exception_models.append(
|
132
|
+
LogStepExceptionModel(
|
133
|
+
id=exc["id"],
|
134
|
+
step_id=step["id"],
|
135
|
+
type=exc["type"],
|
136
|
+
message=exc["message"],
|
137
|
+
traceback=exc["traceback"],
|
138
|
+
created_at=self.flow["ended_at"],
|
139
|
+
)
|
140
|
+
)
|
141
|
+
|
142
|
+
self._save_to_db(log_flow_object, step_models, exception_models)
|
143
|
+
|
144
|
+
def _upload_to_s3(self, key: str, content: dict):
|
145
|
+
# TODO: Implement this.
|
146
|
+
pass
|
147
|
+
|
148
|
+
def _save_to_db(self, flow_obj, step_objs, exception_objs):
|
149
|
+
session = scoped_session(sessionmaker(bind=self.db_engine))
|
150
|
+
try:
|
151
|
+
session.add(flow_obj)
|
152
|
+
session.add_all(step_objs)
|
153
|
+
if exception_objs:
|
154
|
+
session.add_all(exception_objs)
|
155
|
+
session.commit()
|
156
|
+
except Exception:
|
157
|
+
session.rollback()
|
158
|
+
raise
|
159
|
+
finally:
|
160
|
+
session.close()
|
@@ -1,7 +1,7 @@
|
|
1
1
|
channel_app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
2
|
channel_app/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
3
|
channel_app/app/order/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
4
|
-
channel_app/app/order/service.py,sha256=
|
4
|
+
channel_app/app/order/service.py,sha256=GZ8StGwvICtI0DWMtYFq1mB9GPdojNpjCh9oVrHlHJE,22366
|
5
5
|
channel_app/app/product/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
6
6
|
channel_app/app/product/service.py,sha256=7DZF-Vtoaf5eKT1m_ccEOAqUxWSO7Csop4HEtJmcrvw,10646
|
7
7
|
channel_app/app/product_image/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -29,9 +29,15 @@ channel_app/core/commands.py,sha256=wM0ZlH_GHaYLKy2SWab_aKuZCsSahUuHeBa_tqi3W4A,
|
|
29
29
|
channel_app/core/data.py,sha256=SlsXB0MW0epC2nrM0uEnaCBYK3Nz0kXFXZ1n4t8iomg,6931
|
30
30
|
channel_app/core/integration.py,sha256=OqpN8B3KBLsjjrbZXZaNVF6NtObejh7P_7kGFj1xU3o,2817
|
31
31
|
channel_app/core/products.py,sha256=uInjFw-vze1XP8vWEeq4VWDZVQQIiatoe1YsQ6n_H5E,2092
|
32
|
-
channel_app/core/settings.py,sha256=
|
32
|
+
channel_app/core/settings.py,sha256=ZkEiumBmRSBXd4W7RdyHrTwj7Un0Ynktab1zr8N_86I,1288
|
33
33
|
channel_app/core/tests.py,sha256=ucgnLyb3D8H2JvjjH6icdRZzZQoMFbnlnFLylhoJ0Js,434
|
34
34
|
channel_app/core/utilities.py,sha256=3iSU4RHFSsdTWBfUYBK23CRGtAIC-nYIBIJLm0Dlx3o,4168
|
35
|
+
channel_app/database/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
36
|
+
channel_app/database/models.py,sha256=cdjLcfJe-OYZzk1fl3JL6ght8jryRYLwMF2uV3srM-o,2314
|
37
|
+
channel_app/database/services.py,sha256=0zHLAcJAKRU6hKEaS9DmsX_2gIE29hh__DfHHx3JuSE,216
|
38
|
+
channel_app/logs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
39
|
+
channel_app/logs/enums.py,sha256=If6ZjwRTerbJypYI8WjdsleHR7FjlV-TP2nBppFVEc4,214
|
40
|
+
channel_app/logs/services.py,sha256=HmFiQNt5VZQErUqjy-uDJHD9ruQw-GtCoWY5UfCpROY,5751
|
35
41
|
channel_app/omnitron/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
36
42
|
channel_app/omnitron/batch_request.py,sha256=S8IHtbI1RtVLbnOwtfXSmkrREGp8wUYW2E-eu5omwyY,3550
|
37
43
|
channel_app/omnitron/constants.py,sha256=WZR6k_k2zZfN7lfi1ZLv1PphsHIq_LiZgw6Nd6LduvE,2793
|
@@ -58,7 +64,7 @@ channel_app/omnitron/commands/tests/test_product_images.py,sha256=y6tmiJ00kd2GTq
|
|
58
64
|
channel_app/omnitron/commands/tests/test_product_prices.py,sha256=5HPX9PmjGw6gk3oNrwtWLqdrOkfeNx1mYP-pYwOesZU,3496
|
59
65
|
channel_app/omnitron/commands/tests/test_product_stocks.py,sha256=q4RGlrCNUUJyN5CBL1fzrvdd4Q3xt816mbMRQT0XEd0,3496
|
60
66
|
channel_app/omnitron/commands/tests/test_products.py,sha256=uj5KLaubY3XNu0hidOH-u-Djfboe81Hj7-lP--01Le0,103494
|
61
|
-
channel_app-0.0.
|
62
|
-
channel_app-0.0.
|
63
|
-
channel_app-0.0.
|
64
|
-
channel_app-0.0.
|
67
|
+
channel_app-0.0.157a1.dist-info/METADATA,sha256=LEmj3OpOthxDuz4h1mK8_8ZbHIvRsY-KOIbW5FzTI0o,311
|
68
|
+
channel_app-0.0.157a1.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
|
69
|
+
channel_app-0.0.157a1.dist-info/top_level.txt,sha256=JT-gM6L5Cwxr1xEoN7NHrREDs-d6iGFGfRnK-NrJ3tU,12
|
70
|
+
channel_app-0.0.157a1.dist-info/RECORD,,
|
File without changes
|