channel-app 0.0.155__tar.gz → 0.0.157a1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. {channel_app-0.0.155/channel_app.egg-info → channel_app-0.0.157a1}/PKG-INFO +1 -2
  2. channel_app-0.0.157a1/alembic.ini +119 -0
  3. {channel_app-0.0.155 → channel_app-0.0.157a1}/bitbucket-pipelines.yml +2 -2
  4. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/app/order/service.py +82 -41
  5. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/core/settings.py +1 -0
  6. channel_app-0.0.157a1/channel_app/database/migrations/README +1 -0
  7. channel_app-0.0.157a1/channel_app/database/migrations/env.py +86 -0
  8. channel_app-0.0.157a1/channel_app/database/migrations/script.py.mako +28 -0
  9. channel_app-0.0.157a1/channel_app/database/migrations/versions/6049560b1ecb_create_flow_logs.py +43 -0
  10. channel_app-0.0.157a1/channel_app/database/migrations/versions/881a968ee603_add_step_and_exception_logs.py +55 -0
  11. channel_app-0.0.157a1/channel_app/database/models.py +58 -0
  12. channel_app-0.0.157a1/channel_app/database/services.py +8 -0
  13. channel_app-0.0.157a1/channel_app/logs/enums.py +13 -0
  14. channel_app-0.0.157a1/channel_app/logs/services.py +160 -0
  15. channel_app-0.0.157a1/channel_app/omnitron/commands/orders/__init__.py +0 -0
  16. channel_app-0.0.157a1/channel_app/omnitron/commands/tests/__init__.py +0 -0
  17. {channel_app-0.0.155 → channel_app-0.0.157a1/channel_app.egg-info}/PKG-INFO +2 -3
  18. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app.egg-info/SOURCES.txt +12 -0
  19. {channel_app-0.0.155 → channel_app-0.0.157a1}/requirements.txt +3 -0
  20. {channel_app-0.0.155 → channel_app-0.0.157a1}/setup.py +1 -1
  21. {channel_app-0.0.155 → channel_app-0.0.157a1}/tox.ini +1 -1
  22. {channel_app-0.0.155 → channel_app-0.0.157a1}/.gitignore +0 -0
  23. {channel_app-0.0.155 → channel_app-0.0.157a1}/.vscode/settings.json +0 -0
  24. {channel_app-0.0.155 → channel_app-0.0.157a1}/Makefile +0 -0
  25. {channel_app-0.0.155 → channel_app-0.0.157a1}/Procfile-dist +0 -0
  26. {channel_app-0.0.155 → channel_app-0.0.157a1}/README.md +0 -0
  27. {channel_app-0.0.155 → channel_app-0.0.157a1}/akinon.json-dist +0 -0
  28. {channel_app-0.0.155 → channel_app-0.0.157a1}/build.sh-dist +0 -0
  29. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/__init__.py +0 -0
  30. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/app/__init__.py +0 -0
  31. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/app/order/__init__.py +0 -0
  32. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/app/product/__init__.py +0 -0
  33. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/app/product/service.py +0 -0
  34. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/app/product_image/__init__.py +0 -0
  35. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/app/product_image/service.py +0 -0
  36. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/app/product_price/__init__.py +0 -0
  37. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/app/product_price/service.py +0 -0
  38. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/app/product_stock/__init__.py +0 -0
  39. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/app/product_stock/service.py +0 -0
  40. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/app/setup/__init__.py +0 -0
  41. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/app/setup/service.py +0 -0
  42. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/channel/__init__.py +0 -0
  43. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/channel/commands/__init__.py +0 -0
  44. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/channel/commands/orders/__init__.py +0 -0
  45. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/channel/commands/orders/orders.py +0 -0
  46. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/channel/commands/product_categories.py +0 -0
  47. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/channel/commands/product_images.py +0 -0
  48. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/channel/commands/product_prices.py +0 -0
  49. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/channel/commands/product_stocks.py +0 -0
  50. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/channel/commands/products.py +0 -0
  51. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/channel/commands/setup.py +0 -0
  52. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/channel/integration.py +0 -0
  53. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/core/__init__.py +0 -0
  54. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/core/clients.py +0 -0
  55. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/core/commands.py +0 -0
  56. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/core/data.py +0 -0
  57. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/core/integration.py +0 -0
  58. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/core/products.py +0 -0
  59. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/core/tests.py +0 -0
  60. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/core/utilities.py +0 -0
  61. {channel_app-0.0.155/channel_app/omnitron → channel_app-0.0.157a1/channel_app/database}/__init__.py +0 -0
  62. {channel_app-0.0.155/channel_app/omnitron/commands → channel_app-0.0.157a1/channel_app/logs}/__init__.py +0 -0
  63. {channel_app-0.0.155/channel_app/omnitron/commands/orders → channel_app-0.0.157a1/channel_app/omnitron}/__init__.py +0 -0
  64. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/batch_request.py +0 -0
  65. {channel_app-0.0.155/channel_app/omnitron/commands/tests → channel_app-0.0.157a1/channel_app/omnitron/commands}/__init__.py +0 -0
  66. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/batch_requests.py +0 -0
  67. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/error_reports.py +0 -0
  68. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/integration_actions.py +0 -0
  69. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/orders/addresses.py +0 -0
  70. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/orders/cargo_companies.py +0 -0
  71. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/orders/customers.py +0 -0
  72. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/orders/orders.py +0 -0
  73. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/product_categories.py +0 -0
  74. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/product_images.py +0 -0
  75. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/product_prices.py +0 -0
  76. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/product_stocks.py +0 -0
  77. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/products.py +0 -0
  78. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/setup.py +0 -0
  79. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/tests/test_orders.py +0 -0
  80. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/tests/test_product_images.py +0 -0
  81. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/tests/test_product_prices.py +0 -0
  82. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/tests/test_product_stocks.py +0 -0
  83. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/commands/tests/test_products.py +0 -0
  84. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/constants.py +0 -0
  85. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/exceptions.py +0 -0
  86. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app/omnitron/integration.py +0 -0
  87. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app.egg-info/dependency_links.txt +0 -0
  88. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app.egg-info/requires.txt +0 -0
  89. {channel_app-0.0.155 → channel_app-0.0.157a1}/channel_app.egg-info/top_level.txt +0 -0
  90. {channel_app-0.0.155 → channel_app-0.0.157a1}/docs/Makefile +0 -0
  91. {channel_app-0.0.155 → channel_app-0.0.157a1}/docs/make.bat +0 -0
  92. {channel_app-0.0.155 → channel_app-0.0.157a1}/docs/requirements.txt +0 -0
  93. {channel_app-0.0.155 → channel_app-0.0.157a1}/docs/source/architecture.rst +0 -0
  94. {channel_app-0.0.155 → channel_app-0.0.157a1}/docs/source/command_reference.rst +0 -0
  95. {channel_app-0.0.155 → channel_app-0.0.157a1}/docs/source/conf.py +0 -0
  96. {channel_app-0.0.155 → channel_app-0.0.157a1}/docs/source/flows.rst +0 -0
  97. {channel_app-0.0.155 → channel_app-0.0.157a1}/docs/source/images/async.png +0 -0
  98. {channel_app-0.0.155 → channel_app-0.0.157a1}/docs/source/images/batch_request_state_machine.png +0 -0
  99. {channel_app-0.0.155 → channel_app-0.0.157a1}/docs/source/images/sync.png +0 -0
  100. {channel_app-0.0.155 → channel_app-0.0.157a1}/docs/source/index.rst +0 -0
  101. {channel_app-0.0.155 → channel_app-0.0.157a1}/docs/source/installation_and_usage.rst +0 -0
  102. {channel_app-0.0.155 → channel_app-0.0.157a1}/docs/source/terminology.rst +0 -0
  103. {channel_app-0.0.155 → channel_app-0.0.157a1}/requirements-dev.txt +0 -0
  104. {channel_app-0.0.155 → channel_app-0.0.157a1}/setup.cfg +0 -0
@@ -1,10 +1,9 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: channel_app
3
- Version: 0.0.155
3
+ Version: 0.0.157a1
4
4
  Summary: Channel app for Sales Channels
5
5
  Home-page: https://github.com/akinon/channel_app
6
6
  Author: akinonteam
7
7
  Classifier: Development Status :: 5 - Production/Stable
8
8
  Requires-Python: >=3.5
9
9
  Description-Content-Type: text/markdown
10
- Requires-Dist: requests
@@ -0,0 +1,119 @@
1
+ # A generic, single database configuration.
2
+
3
+ [alembic]
4
+ # path to migration scripts
5
+ # Use forward slashes (/) also on windows to provide an os agnostic path
6
+ script_location = channel_app/database/migrations
7
+
8
+ # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
9
+ # Uncomment the line below if you want the files to be prepended with date and time
10
+ # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
11
+ # for all available tokens
12
+ # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
13
+
14
+ # sys.path path, will be prepended to sys.path if present.
15
+ # defaults to the current working directory.
16
+ prepend_sys_path = .
17
+
18
+ # timezone to use when rendering the date within the migration file
19
+ # as well as the filename.
20
+ # If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
21
+ # Any required deps can installed by adding `alembic[tz]` to the pip requirements
22
+ # string value is passed to ZoneInfo()
23
+ # leave blank for localtime
24
+ # timezone =
25
+
26
+ # max length of characters to apply to the "slug" field
27
+ # truncate_slug_length = 40
28
+
29
+ # set to 'true' to run the environment during
30
+ # the 'revision' command, regardless of autogenerate
31
+ # revision_environment = false
32
+
33
+ # set to 'true' to allow .pyc and .pyo files without
34
+ # a source .py file to be detected as revisions in the
35
+ # versions/ directory
36
+ # sourceless = false
37
+
38
+ # version location specification; This defaults
39
+ # to migrations/versions. When using multiple version
40
+ # directories, initial revisions must be specified with --version-path.
41
+ # The path separator used here should be the separator specified by "version_path_separator" below.
42
+ # version_locations = %(here)s/bar:%(here)s/bat:migrations/versions
43
+
44
+ # version path separator; As mentioned above, this is the character used to split
45
+ # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
46
+ # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
47
+ # Valid values for version_path_separator are:
48
+ #
49
+ # version_path_separator = :
50
+ # version_path_separator = ;
51
+ # version_path_separator = space
52
+ # version_path_separator = newline
53
+ #
54
+ # Use os.pathsep. Default configuration used for new projects.
55
+ version_path_separator = os
56
+
57
+ # set to 'true' to search source files recursively
58
+ # in each "version_locations" directory
59
+ # new in Alembic version 1.10
60
+ # recursive_version_locations = false
61
+
62
+ # the output encoding used when revision files
63
+ # are written from script.py.mako
64
+ # output_encoding = utf-8
65
+
66
+ sqlalchemy.url = postgresql+psycopg2://postgres:12345@127.0.0.1/sales_channel_logs
67
+
68
+
69
+ [post_write_hooks]
70
+ # post_write_hooks defines scripts or Python functions that are run
71
+ # on newly generated revision scripts. See the documentation for further
72
+ # detail and examples
73
+
74
+ # format using "black" - use the console_scripts runner, against the "black" entrypoint
75
+ # hooks = black
76
+ # black.type = console_scripts
77
+ # black.entrypoint = black
78
+ # black.options = -l 79 REVISION_SCRIPT_FILENAME
79
+
80
+ # lint with attempts to fix using "ruff" - use the exec runner, execute a binary
81
+ # hooks = ruff
82
+ # ruff.type = exec
83
+ # ruff.executable = %(here)s/.venv/bin/ruff
84
+ # ruff.options = check --fix REVISION_SCRIPT_FILENAME
85
+
86
+ # Logging configuration
87
+ [loggers]
88
+ keys = root,sqlalchemy,alembic
89
+
90
+ [handlers]
91
+ keys = console
92
+
93
+ [formatters]
94
+ keys = generic
95
+
96
+ [logger_root]
97
+ level = WARNING
98
+ handlers = console
99
+ qualname =
100
+
101
+ [logger_sqlalchemy]
102
+ level = WARNING
103
+ handlers =
104
+ qualname = sqlalchemy.engine
105
+
106
+ [logger_alembic]
107
+ level = INFO
108
+ handlers =
109
+ qualname = alembic
110
+
111
+ [handler_console]
112
+ class = StreamHandler
113
+ args = (sys.stderr,)
114
+ level = NOTSET
115
+ formatter = generic
116
+
117
+ [formatter_generic]
118
+ format = %(levelname)-5.5s [%(name)s] %(message)s
119
+ datefmt = %H:%M:%S
@@ -1,4 +1,4 @@
1
- image: python:3.8.10
1
+ image: python:3.11.9
2
2
 
3
3
  pipelines:
4
4
  pull-requests:
@@ -16,7 +16,7 @@ pipelines:
16
16
  '*':
17
17
  - step:
18
18
  name: Publish to PyPI
19
- image: python:3.7-alpine
19
+ image: python:3.11.9-alpine
20
20
  script:
21
21
  - apk add gcc git libffi-dev musl-dev openssl-dev python3-dev
22
22
  - pip install setuptools setuptools_scm twine
@@ -1,3 +1,4 @@
1
+ import uuid
1
2
  from dataclasses import asdict
2
3
  from typing import List, Generator, Union
3
4
 
@@ -19,6 +20,7 @@ from channel_app.core.data import (BatchRequestResponseDto,
19
20
  CancelOrderDto,
20
21
  ChannelUpdateOrderItemDto)
21
22
  from channel_app.core.settings import OmnitronIntegration, ChannelIntegration
23
+ from channel_app.logs.services import LogService
22
24
  from channel_app.omnitron.batch_request import ClientBatchRequest
23
25
  from channel_app.omnitron.constants import (BatchRequestStatus, ContentType,
24
26
  FailedReasonType)
@@ -33,48 +35,87 @@ class OrderService(object):
33
35
  batch_service = ClientBatchRequest
34
36
 
35
37
  def fetch_and_create_order(self, is_success_log=True):
36
- with OmnitronIntegration(
37
- content_type=ContentType.order.value) as omnitron_integration:
38
- get_orders = ChannelIntegration().do_action(
39
- key='get_orders',
40
- batch_request=omnitron_integration.batch_request
41
- )
42
-
43
- get_orders: Generator
44
- order_batch_objects = []
45
- while True:
46
- try:
47
- channel_create_order, report_list, _ = next(get_orders)
48
- except StopIteration:
49
- break
50
-
51
- # tips
52
- channel_create_order: ChannelCreateOrderDto
53
- report_list: List[ErrorReportDto]
54
- for report in report_list:
55
- if is_success_log or not report.is_ok:
56
- report.error_code = \
57
- f"{omnitron_integration.batch_request.local_batch_id}" \
58
- f"-Channel-GetOrders_{channel_create_order.order.number}"
59
- omnitron_integration.do_action(
60
- key='create_error_report',
61
- objects=report)
38
+ log_service = LogService()
39
+ tx_id = uuid.uuid4()
40
+ log_service.create_flow(
41
+ name="OrderSync",
42
+ transaction_id=tx_id,
43
+ )
62
44
 
63
- order = self.create_order(omnitron_integration=omnitron_integration,
64
- channel_order=channel_create_order)
65
- if order and omnitron_integration.batch_request.objects:
66
- order_batch_objects.extend(omnitron_integration.batch_request.objects)
67
-
68
- omnitron_integration.batch_request.objects = order_batch_objects
69
- try:
70
- self.batch_service(settings.OMNITRON_CHANNEL_ID).to_done(
71
- batch_request=omnitron_integration.batch_request
72
- )
73
- except requests_exceptions.HTTPError as exc:
74
- if exc.response.status_code == 406 and "batch_request_status_100_1" in exc.response.text:
75
- pass
76
- else:
77
- raise exc
45
+ try:
46
+ with log_service.step("fetch_orders"):
47
+ with OmnitronIntegration(
48
+ content_type=ContentType.order.value
49
+ ) as omnitron_integration:
50
+
51
+ with log_service.step("get_orders"):
52
+ get_orders = ChannelIntegration().do_action(
53
+ key='get_orders',
54
+ batch_request=omnitron_integration.batch_request
55
+ )
56
+
57
+ get_orders: Generator
58
+ order_batch_objects = []
59
+ while True:
60
+ try:
61
+ channel_create_order, report_list, _ = next(get_orders)
62
+ except StopIteration:
63
+ break
64
+
65
+ # tips
66
+ channel_create_order: ChannelCreateOrderDto
67
+ metadata = {
68
+ "order_number": channel_create_order.order.number
69
+ }
70
+
71
+ report_list: List[ErrorReportDto]
72
+ for report in report_list:
73
+ if is_success_log or not report.is_ok:
74
+ report.error_code = \
75
+ f"{omnitron_integration.batch_request.local_batch_id}" \
76
+ f"-Channel-GetOrders_{channel_create_order.order.number}"
77
+ try:
78
+
79
+ with log_service.step("create_error_report", metadata=metadata):
80
+ omnitron_integration.do_action(
81
+ key='create_error_report',
82
+ objects=report
83
+ )
84
+ except Exception as err:
85
+ log_service.add_exception(err)
86
+ raise
87
+
88
+ try:
89
+ with log_service.step("create_order", metadata=metadata):
90
+ order = self.create_order(
91
+ omnitron_integration=omnitron_integration,
92
+ channel_order=channel_create_order
93
+ )
94
+ except Exception as err:
95
+ log_service.add_exception(err)
96
+ raise
97
+
98
+ if order and omnitron_integration.batch_request.objects:
99
+ order_batch_objects.extend(omnitron_integration.batch_request.objects)
100
+
101
+ omnitron_integration.batch_request.objects = order_batch_objects
102
+
103
+ with log_service.step("batch_to_done"):
104
+ try:
105
+ self.batch_service(settings.OMNITRON_CHANNEL_ID).to_done(
106
+ batch_request=omnitron_integration.batch_request
107
+ )
108
+ except requests_exceptions.HTTPError as exc:
109
+ log_service.add_exception(exc)
110
+ if exc.response.status_code == 406 and "batch_request_status_100_1" in exc.response.text:
111
+ pass
112
+ else:
113
+ raise exc
114
+ except Exception as fatal:
115
+ log_service.add_exception(fatal)
116
+ raise
117
+ finally:
118
+ log_service.save()
78
119
 
79
120
  def create_order(self, omnitron_integration: OmnitronIntegration,
80
121
  channel_order: ChannelCreateOrderDto
@@ -17,6 +17,7 @@ CACHE_PORT = os.getenv("CACHE_PORT")
17
17
  BROKER_HOST = os.getenv("BROKER_HOST")
18
18
  BROKER_PORT = os.getenv("BROKER_PORT")
19
19
  BROKER_DATABASE_INDEX = os.getenv("BROKER_DATABASE_INDEX")
20
+ DATABASE_URI = os.getenv("DATABASE_URI")
20
21
  SENTRY_DSN = os.getenv("SENTRY_DSN")
21
22
  DEFAULT_CONNECTION_POOL_COUNT = os.getenv("DEFAULT_CONNECTION_POOL_COUNT") or 10
22
23
  DEFAULT_CONNECTION_POOL_MAX_SIZE = os.getenv("DEFAULT_CONNECTION_POOL_COUNT") or 10
@@ -0,0 +1 @@
1
+ Generic single-database configuration.
@@ -0,0 +1,86 @@
1
+ import os
2
+ from logging.config import fileConfig
3
+
4
+ from sqlalchemy import engine_from_config
5
+ from sqlalchemy import pool
6
+
7
+ from alembic import context
8
+
9
+ from channel_app.database.models import Base as BaseModel
10
+
11
+ # this is the Alembic Config object, which provides
12
+ # access to the values within the .ini file in use.
13
+ config = context.config
14
+
15
+ # Interpret the config file for Python logging.
16
+ # This line sets up loggers basically.
17
+ if config.config_file_name is not None:
18
+ fileConfig(config.config_file_name)
19
+
20
+ # add your model's MetaData object here
21
+ # for 'autogenerate' support
22
+ # from myapp import mymodel
23
+ # target_metadata = mymodel.Base.metadata
24
+ target_metadata = BaseModel.metadata
25
+
26
+ # other values from the config, defined by the needs of env.py,
27
+ # can be acquired:
28
+ # my_important_option = config.get_main_option("my_important_option")
29
+ # ... etc.
30
+
31
+
32
+ def run_migrations_offline() -> None:
33
+ """Run migrations in 'offline' mode.
34
+
35
+ This configures the context with just a URL
36
+ and not an Engine, though an Engine is acceptable
37
+ here as well. By skipping the Engine creation
38
+ we don't even need a DBAPI to be available.
39
+
40
+ Calls to context.execute() here emit the given string to the
41
+ script output.
42
+
43
+ """
44
+ url = config.get_main_option("sqlalchemy.url")
45
+
46
+ DATABASE_URL = os.getenv("DATABASE_URI")
47
+ if DATABASE_URL:
48
+ url = config.set_main_option("sqlalchemy.url", DATABASE_URL)
49
+
50
+ context.configure(
51
+ url=url,
52
+ target_metadata=target_metadata,
53
+ literal_binds=True,
54
+ dialect_opts={"paramstyle": "named"},
55
+ )
56
+
57
+ with context.begin_transaction():
58
+ context.run_migrations()
59
+
60
+
61
+ def run_migrations_online() -> None:
62
+ """Run migrations in 'online' mode.
63
+
64
+ In this scenario we need to create an Engine
65
+ and associate a connection with the context.
66
+
67
+ """
68
+ connectable = engine_from_config(
69
+ config.get_section(config.config_ini_section, {}),
70
+ prefix="sqlalchemy.",
71
+ poolclass=pool.NullPool,
72
+ )
73
+
74
+ with connectable.connect() as connection:
75
+ context.configure(
76
+ connection=connection, target_metadata=target_metadata
77
+ )
78
+
79
+ with context.begin_transaction():
80
+ context.run_migrations()
81
+
82
+
83
+ if context.is_offline_mode():
84
+ run_migrations_offline()
85
+ else:
86
+ run_migrations_online()
@@ -0,0 +1,28 @@
1
+ """${message}
2
+
3
+ Revision ID: ${up_revision}
4
+ Revises: ${down_revision | comma,n}
5
+ Create Date: ${create_date}
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ ${imports if imports else ""}
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = ${repr(up_revision)}
16
+ down_revision: Union[str, None] = ${repr(down_revision)}
17
+ branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
18
+ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
19
+
20
+
21
+ def upgrade() -> None:
22
+ """Upgrade schema."""
23
+ ${upgrades if upgrades else "pass"}
24
+
25
+
26
+ def downgrade() -> None:
27
+ """Downgrade schema."""
28
+ ${downgrades if downgrades else "pass"}
@@ -0,0 +1,43 @@
1
+ """create flow_logs
2
+
3
+ Revision ID: 6049560b1ecb
4
+ Revises:
5
+ Create Date: 2025-04-22 15:53:37.986109
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = '6049560b1ecb'
16
+ down_revision: Union[str, None] = None
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ """Upgrade schema."""
23
+ # ### commands auto generated by Alembic - please adjust! ###
24
+ op.create_table('log_flows',
25
+ sa.Column('id', sa.UUID(), nullable=False),
26
+ sa.Column('transaction_id', sa.UUID(), nullable=False),
27
+ sa.Column('flow_name', sa.String(length=255), nullable=False),
28
+ sa.Column('flow_author', sa.Enum('user', 'system', name='logflowauthor'), nullable=False),
29
+ sa.Column('started_at', sa.DateTime(timezone=True), nullable=False),
30
+ sa.Column('ended_at', sa.DateTime(timezone=True), nullable=True),
31
+ sa.Column('status', sa.Enum('in_progress', 'success', 'failure', name='logstepstatus'), nullable=True),
32
+ sa.Column('s3_key', sa.Text(), nullable=True),
33
+ sa.PrimaryKeyConstraint('id'),
34
+ sa.UniqueConstraint('transaction_id')
35
+ )
36
+ # ### end Alembic commands ###
37
+
38
+
39
+ def downgrade() -> None:
40
+ """Downgrade schema."""
41
+ # ### commands auto generated by Alembic - please adjust! ###
42
+ op.drop_table('log_flows')
43
+ # ### end Alembic commands ###
@@ -0,0 +1,55 @@
1
+ """add step and exception logs
2
+
3
+ Revision ID: 881a968ee603
4
+ Revises: 6049560b1ecb
5
+ Create Date: 2025-04-24 11:58:59.377131
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = '881a968ee603'
16
+ down_revision: Union[str, None] = '6049560b1ecb'
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ """Upgrade schema."""
23
+ # ### commands auto generated by Alembic - please adjust! ###
24
+ op.create_table('log_steps',
25
+ sa.Column('id', sa.UUID(), nullable=False),
26
+ sa.Column('flow_id', sa.UUID(), nullable=False),
27
+ sa.Column('step_name', sa.String(length=255), nullable=False),
28
+ sa.Column('status', sa.Enum('in_progress', 'success', 'failure', name='logstepstatus', native_enum=False), nullable=False),
29
+ sa.Column('start_time', sa.DateTime(timezone=True), nullable=False),
30
+ sa.Column('end_time', sa.DateTime(timezone=True), nullable=True),
31
+ sa.Column('duration_ms', sa.Integer(), nullable=True),
32
+ sa.Column('error_message', sa.String(), nullable=True),
33
+ sa.Column('step_metadata', sa.JSON(), nullable=True),
34
+ sa.ForeignKeyConstraint(['flow_id'], ['log_flows.id'], ondelete='CASCADE'),
35
+ sa.PrimaryKeyConstraint('id')
36
+ )
37
+ op.create_table('log_step_exceptions',
38
+ sa.Column('id', sa.UUID(), nullable=False),
39
+ sa.Column('step_id', sa.UUID(), nullable=False),
40
+ sa.Column('type', sa.String(length=128), nullable=False),
41
+ sa.Column('message', sa.String(), nullable=True),
42
+ sa.Column('traceback', sa.String(), nullable=True),
43
+ sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
44
+ sa.ForeignKeyConstraint(['step_id'], ['log_steps.id'], ondelete='CASCADE'),
45
+ sa.PrimaryKeyConstraint('id')
46
+ )
47
+ # ### end Alembic commands ###
48
+
49
+
50
+ def downgrade() -> None:
51
+ """Downgrade schema."""
52
+ # ### commands auto generated by Alembic - please adjust! ###
53
+ op.drop_table('log_step_exceptions')
54
+ op.drop_table('log_steps')
55
+ # ### end Alembic commands ###
@@ -0,0 +1,58 @@
1
+ from datetime import datetime, timezone
2
+ import uuid
3
+ from sqlalchemy import JSON, Column, DateTime, ForeignKey, Integer, String, Enum as SqlEnum, Text
4
+ from sqlalchemy.dialects.postgresql import UUID
5
+ from sqlalchemy.orm import DeclarativeBase, relationship
6
+
7
+ from channel_app.logs.enums import LogFlowAuthor, LogStepStatus
8
+
9
+
10
+ class Base(DeclarativeBase):
11
+ pass
12
+
13
+
14
+ class LogFlowModel(Base):
15
+ __tablename__ = "log_flows"
16
+
17
+ id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
18
+ transaction_id = Column(UUID(as_uuid=True), unique=True, nullable=False)
19
+ flow_name = Column(String(255), nullable=False)
20
+ flow_author = Column(SqlEnum(LogFlowAuthor), default=LogFlowAuthor.system, nullable=False)
21
+
22
+ started_at = Column(DateTime(timezone=True), nullable=False, default=lambda: datetime.now(timezone.utc))
23
+ ended_at = Column(DateTime(timezone=True), nullable=True)
24
+
25
+ status = Column(SqlEnum(LogStepStatus), nullable=True)
26
+ s3_key = Column(Text, nullable=True)
27
+
28
+ def __repr__(self):
29
+ return f"<FlowLog(transaction_id={self.transaction_id}, flow_name={self.flow_name})>"
30
+
31
+
32
+ class LogStepModel(Base):
33
+ __tablename__ = "log_steps"
34
+
35
+ id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
36
+ flow_id = Column(UUID(as_uuid=True), ForeignKey("log_flows.id", ondelete="CASCADE"), nullable=False)
37
+ step_name = Column(String(255), nullable=False)
38
+ status = Column(SqlEnum(LogStepStatus, native_enum=False), nullable=False)
39
+ start_time = Column(DateTime(timezone=True), nullable=False)
40
+ end_time = Column(DateTime(timezone=True))
41
+ duration_ms = Column(Integer)
42
+ error_message = Column(String)
43
+ step_metadata = Column(JSON)
44
+
45
+ exceptions = relationship("LogStepExceptionModel", back_populates="step", cascade="all, delete-orphan")
46
+
47
+
48
+ class LogStepExceptionModel(Base):
49
+ __tablename__ = "log_step_exceptions"
50
+
51
+ id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
52
+ step_id = Column(UUID(as_uuid=True), ForeignKey("log_steps.id", ondelete="CASCADE"), nullable=False)
53
+ type = Column(String(128), nullable=False)
54
+ message = Column(String)
55
+ traceback = Column(String)
56
+ created_at = Column(DateTime(timezone=True), nullable=False)
57
+
58
+ step = relationship("LogStepModel", back_populates="exceptions")
@@ -0,0 +1,8 @@
1
+ from sqlalchemy import create_engine
2
+ from channel_app.core import settings
3
+
4
+
5
+ class DatabaseService:
6
+ def create_engine(self):
7
+ engine = create_engine(settings.DATABASE_URI, echo=False)
8
+ return engine
@@ -0,0 +1,13 @@
1
+ from enum import Enum
2
+
3
+
4
+ class LogStepStatus(str, Enum):
5
+ in_progress = "IN_PROGRESS"
6
+ success = "SUCCESS"
7
+ failure = "FAILURE"
8
+
9
+
10
+ class LogFlowAuthor(str, Enum):
11
+ user = "User"
12
+ system = "System"
13
+
@@ -0,0 +1,160 @@
1
+ from contextlib import contextmanager
2
+ import traceback
3
+ from typing import Optional
4
+ import uuid
5
+ from datetime import datetime, timezone
6
+ from sqlalchemy.orm import scoped_session, sessionmaker
7
+
8
+ from channel_app.database.models import LogFlowModel, LogStepExceptionModel, LogStepModel
9
+ from channel_app.database.services import DatabaseService
10
+ from channel_app.logs.enums import LogFlowAuthor, LogStepStatus
11
+
12
+
13
+ class LogService:
14
+ database_service = DatabaseService()
15
+
16
+ def __init__(self):
17
+ self.flow = {}
18
+ self.steps = []
19
+ self.exceptions = []
20
+
21
+ self.db_engine = self.database_service.create_engine()
22
+ self.s3_client = None # TODO: Declare the boto3 client
23
+ self.s3_bucket = None # TODO: Get s3 bucket from exported environment variables
24
+
25
+ def create_flow(
26
+ self,
27
+ name: str,
28
+ transaction_id: str,
29
+ flow_author: LogFlowAuthor = LogFlowAuthor.system
30
+ ):
31
+ self.flow = {
32
+ "id": uuid.uuid4(),
33
+ "transaction_id": transaction_id or str(uuid.uuid4()),
34
+ "flow_name": name,
35
+ "flow_author": flow_author.value,
36
+ "started_at": datetime.now(timezone.utc)
37
+ }
38
+
39
+ @contextmanager
40
+ def step(self, name: str, metadata: Optional[dict] = None):
41
+ now = datetime.now(timezone.utc)
42
+ self._add_step(name, start=True, metadata=metadata)
43
+ try:
44
+ yield
45
+ self._add_step(name, end=True)
46
+ except Exception as exc:
47
+ self.add_exception(exc)
48
+ for step in reversed(self.steps):
49
+ if step["step_name"] == name and step.get("status") == LogStepStatus.in_progress.value:
50
+ step["end_time"] = now
51
+ step["status"] = LogStepStatus.failure.value
52
+ step["error"] = str(exc)
53
+ break
54
+ raise
55
+
56
+
57
+ def _add_step(self, name, start=False, end=False, metadata=None):
58
+ now = datetime.now(timezone.utc)
59
+ if start:
60
+ self.steps.append(
61
+ {
62
+ "id": uuid.uuid4(),
63
+ "step_name": name,
64
+ "start_time": now,
65
+ "status": LogStepStatus.in_progress.value,
66
+ "metadata": metadata or {},
67
+ }
68
+ )
69
+ elif end:
70
+ for step in reversed(self.steps):
71
+ if step["step_name"] == name and step["status"] == LogStepStatus.in_progress.value:
72
+ step["end_time"] = now
73
+ step["status"] = LogStepStatus.success.value
74
+ step["duration_ms"] = int((now - step["start_time"]).total_seconds() * 1000)
75
+
76
+ def add_exception(self, exc: Exception):
77
+ tb = traceback.format_exc()
78
+ exc_obj = {
79
+ "id": uuid.uuid4(),
80
+ "type": type(exc).__name__,
81
+ "message": str(exc),
82
+ "traceback": tb
83
+ }
84
+ self.exceptions.append(exc_obj)
85
+ # If this flow has related step, update the step to FAILURE
86
+ if self.steps:
87
+ self.steps[-1]["status"] = LogStepStatus.failure.value
88
+ self.steps[-1]["error"] = str(exc)
89
+ self.steps[-1].setdefault("exceptions", []).append(exc_obj)
90
+
91
+ def save(self):
92
+ self.flow["ended_at"] = datetime.now(timezone.utc)
93
+ full_log_content = {
94
+ **self.flow,
95
+ "steps": self.steps,
96
+ "exceptions": self.exceptions,
97
+ }
98
+ s3_key = f"logs/{self.flow['flow_name']}/{self.flow['transaction_id']}.json"
99
+
100
+ self._upload_to_s3(s3_key, full_log_content)
101
+
102
+
103
+ log_flow_object = LogFlowModel(
104
+ id=self.flow["id"],
105
+ transaction_id=str(self.flow["transaction_id"]),
106
+ flow_name=self.flow["flow_name"],
107
+ flow_author=self.flow["flow_author"],
108
+ started_at=self.flow["started_at"],
109
+ ended_at=self.flow["ended_at"],
110
+ status=self.steps[-1]["status"] if self.steps else LogStepStatus.failure.value,
111
+ s3_key=s3_key,
112
+ )
113
+
114
+ step_models = []
115
+ exception_models = []
116
+ for step in self.steps:
117
+ step_model = LogStepModel(
118
+ id=step["id"],
119
+ flow_id=self.flow["id"],
120
+ step_name=step["step_name"],
121
+ status=step["status"],
122
+ start_time=step["start_time"],
123
+ end_time=step.get("end_time"),
124
+ duration_ms=step.get("duration_ms"),
125
+ error_message=step.get("error"),
126
+ step_metadata=step.get("metadata"),
127
+ )
128
+ step_models.append(step_model)
129
+
130
+ for exc in step.get("exceptions", []):
131
+ exception_models.append(
132
+ LogStepExceptionModel(
133
+ id=exc["id"],
134
+ step_id=step["id"],
135
+ type=exc["type"],
136
+ message=exc["message"],
137
+ traceback=exc["traceback"],
138
+ created_at=self.flow["ended_at"],
139
+ )
140
+ )
141
+
142
+ self._save_to_db(log_flow_object, step_models, exception_models)
143
+
144
+ def _upload_to_s3(self, key: str, content: dict):
145
+ # TODO: Implement this.
146
+ pass
147
+
148
+ def _save_to_db(self, flow_obj, step_objs, exception_objs):
149
+ session = scoped_session(sessionmaker(bind=self.db_engine))
150
+ try:
151
+ session.add(flow_obj)
152
+ session.add_all(step_objs)
153
+ if exception_objs:
154
+ session.add_all(exception_objs)
155
+ session.commit()
156
+ except Exception:
157
+ session.rollback()
158
+ raise
159
+ finally:
160
+ session.close()
@@ -1,10 +1,9 @@
1
1
  Metadata-Version: 2.1
2
- Name: channel_app
3
- Version: 0.0.155
2
+ Name: channel-app
3
+ Version: 0.0.157a1
4
4
  Summary: Channel app for Sales Channels
5
5
  Home-page: https://github.com/akinon/channel_app
6
6
  Author: akinonteam
7
7
  Classifier: Development Status :: 5 - Production/Stable
8
8
  Requires-Python: >=3.5
9
9
  Description-Content-Type: text/markdown
10
- Requires-Dist: requests
@@ -3,6 +3,7 @@ Makefile
3
3
  Procfile-dist
4
4
  README.md
5
5
  akinon.json-dist
6
+ alembic.ini
6
7
  bitbucket-pipelines.yml
7
8
  build.sh-dist
8
9
  requirements-dev.txt
@@ -49,6 +50,17 @@ channel_app/core/products.py
49
50
  channel_app/core/settings.py
50
51
  channel_app/core/tests.py
51
52
  channel_app/core/utilities.py
53
+ channel_app/database/__init__.py
54
+ channel_app/database/models.py
55
+ channel_app/database/services.py
56
+ channel_app/database/migrations/README
57
+ channel_app/database/migrations/env.py
58
+ channel_app/database/migrations/script.py.mako
59
+ channel_app/database/migrations/versions/6049560b1ecb_create_flow_logs.py
60
+ channel_app/database/migrations/versions/881a968ee603_add_step_and_exception_logs.py
61
+ channel_app/logs/__init__.py
62
+ channel_app/logs/enums.py
63
+ channel_app/logs/services.py
52
64
  channel_app/omnitron/__init__.py
53
65
  channel_app/omnitron/batch_request.py
54
66
  channel_app/omnitron/constants.py
@@ -6,3 +6,6 @@ omnisdk
6
6
  setuptools>=65.5.1 # not directly required, pinned by Snyk to avoid a vulnerability
7
7
  redis==5.0.1
8
8
  python-dotenv
9
+ sqlalchemy==2.0.40
10
+ psycopg2-binary==2.9.10
11
+ alembic==1.15.2
@@ -5,7 +5,7 @@ with open("README.md") as f:
5
5
 
6
6
  setup(
7
7
  name="channel_app",
8
- version="0.0.155",
8
+ version="0.0.157a1", # alpha prerelease
9
9
  packages=find_packages(),
10
10
  url="https://github.com/akinon/channel_app",
11
11
  description="Channel app for Sales Channels",
@@ -1,5 +1,5 @@
1
1
  [tox]
2
- envlist = py35,py36,py37,py38,py39
2
+ envlist = py35,py36,py37,py38,py39,py310,py311
3
3
 
4
4
  [testenv]
5
5
  # install pytest in the virtualenv where commands will be executed
File without changes
File without changes
File without changes