karrio-server-data 2025.5rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. karrio/server/data/__init__.py +0 -0
  2. karrio/server/data/admin.py +1 -0
  3. karrio/server/data/apps.py +13 -0
  4. karrio/server/data/filters.py +43 -0
  5. karrio/server/data/migrations/0001_initial.py +62 -0
  6. karrio/server/data/migrations/0002_alter_batchoperation_resource_type_and_more.py +28 -0
  7. karrio/server/data/migrations/0003_datatemplate_metadata_alter_batchoperation_resources.py +36 -0
  8. karrio/server/data/migrations/__init__.py +0 -0
  9. karrio/server/data/models.py +97 -0
  10. karrio/server/data/resources/__init__.py +53 -0
  11. karrio/server/data/resources/orders.py +523 -0
  12. karrio/server/data/resources/shipments.py +473 -0
  13. karrio/server/data/resources/trackers.py +212 -0
  14. karrio/server/data/serializers/__init__.py +26 -0
  15. karrio/server/data/serializers/base.py +107 -0
  16. karrio/server/data/serializers/batch.py +9 -0
  17. karrio/server/data/serializers/batch_orders.py +99 -0
  18. karrio/server/data/serializers/batch_shipments.py +102 -0
  19. karrio/server/data/serializers/batch_trackers.py +131 -0
  20. karrio/server/data/serializers/data.py +109 -0
  21. karrio/server/data/signals.py +52 -0
  22. karrio/server/data/tests.py +3 -0
  23. karrio/server/data/urls.py +13 -0
  24. karrio/server/data/views/__init__.py +0 -0
  25. karrio/server/data/views/batch.py +72 -0
  26. karrio/server/data/views/batch_order.py +40 -0
  27. karrio/server/data/views/batch_shipment.py +40 -0
  28. karrio/server/data/views/batch_tracking.py +40 -0
  29. karrio/server/data/views/data.py +171 -0
  30. karrio/server/events/task_definitions/__init__.py +1 -0
  31. karrio/server/events/task_definitions/data/__init__.py +136 -0
  32. karrio/server/events/task_definitions/data/batch.py +130 -0
  33. karrio/server/events/task_definitions/data/shipments.py +51 -0
  34. karrio/server/graph/schemas/__init__.py +1 -0
  35. karrio/server/graph/schemas/data/__init__.py +51 -0
  36. karrio/server/graph/schemas/data/inputs.py +39 -0
  37. karrio/server/graph/schemas/data/mutations.py +53 -0
  38. karrio/server/graph/schemas/data/types.py +78 -0
  39. karrio/server/settings/data.py +15 -0
  40. karrio_server_data-2025.5rc1.dist-info/METADATA +18 -0
  41. karrio_server_data-2025.5rc1.dist-info/RECORD +43 -0
  42. karrio_server_data-2025.5rc1.dist-info/WHEEL +5 -0
  43. karrio_server_data-2025.5rc1.dist-info/top_level.txt +2 -0
@@ -0,0 +1,72 @@
1
+ from django.urls import path
2
+ from rest_framework.request import Request
3
+ from rest_framework.response import Response
4
+ from rest_framework.pagination import LimitOffsetPagination
5
+ from django_filters.rest_framework import DjangoFilterBackend
6
+
7
+ from karrio.server.data.filters import BatchOperationFilter
8
+ import karrio.server.data.serializers as serializers
9
+ import karrio.server.data.models as models
10
+ import karrio.server.core.views.api as api
11
+ import karrio.server.openapi as openapi
12
+
13
+ ENDPOINT_ID = "&&&&$" # This endpoint id is used to make operation ids unique make sure not to duplicate
14
+ BatchOperations = serializers.PaginatedResult(
15
+ "BatchOperations", serializers.BatchOperation
16
+ )
17
+
18
+
19
+ class BatchList(api.GenericAPIView):
20
+ pagination_class = type(
21
+ "CustomPagination", (LimitOffsetPagination,), dict(default_limit=20)
22
+ )
23
+ filter_backends = (DjangoFilterBackend,)
24
+ filterset_class = BatchOperationFilter
25
+ serializer_class = BatchOperations
26
+ model = models.BatchOperation
27
+
28
+ @openapi.extend_schema(
29
+ tags=["Batches"],
30
+ operation_id=f"{ENDPOINT_ID}list",
31
+ extensions={"x-operationId": "listBatchOperations"},
32
+ summary="List all batch operations",
33
+ responses={
34
+ 200: BatchOperations(),
35
+ 404: serializers.ErrorResponse(),
36
+ 500: serializers.ErrorResponse(),
37
+ },
38
+ )
39
+ def get(self, _: Request):
40
+ """Retrieve all batch operations. `Beta`"""
41
+
42
+ batches = self.filter_queryset(self.get_queryset())
43
+ response = self.paginate_queryset(
44
+ serializers.BatchOperation(batches, many=True).data
45
+ )
46
+
47
+ return self.get_paginated_response(response)
48
+
49
+
50
+ class BatchDetails(api.APIView):
51
+ @openapi.extend_schema(
52
+ tags=["Batches"],
53
+ operation_id=f"{ENDPOINT_ID}retrieve",
54
+ extensions={"x-operationId": "retrieveBatchOperation"},
55
+ summary="Retrieve a batch operation",
56
+ responses={
57
+ 200: serializers.BatchOperation(),
58
+ 404: serializers.ErrorResponse(),
59
+ 500: serializers.ErrorResponse(),
60
+ },
61
+ )
62
+ def get(self, request: Request, pk: str):
63
+ """Retrieve a batch operation. `Beta`"""
64
+ batch = models.BatchOperation.access_by(request).get(pk=pk)
65
+
66
+ return Response(serializers.BatchOperation(batch).data)
67
+
68
+
69
+ urlpatterns = [
70
+ path("batches/operations", BatchList.as_view(), name="batch-list"),
71
+ path("batches/operations/<str:pk>", BatchDetails.as_view(), name="batch-details"),
72
+ ]
@@ -0,0 +1,40 @@
1
+ from django.urls import path
2
+ from rest_framework import status
3
+ from rest_framework.request import Request
4
+ from rest_framework.response import Response
5
+
6
+ import karrio.server.data.serializers as serializers
7
+ import karrio.server.core.views.api as api
8
+ import karrio.server.openapi as openapi
9
+
10
+ ENDPOINT_ID = "&&&&$" # This endpoint id is used to make operation ids unique make sure not to duplicate
11
+
12
+
13
+ class BatchList(api.APIView):
14
+ @openapi.extend_schema(
15
+ tags=["Batches"],
16
+ operation_id=f"{ENDPOINT_ID}create_orders",
17
+ summary="Create order batch",
18
+ responses={
19
+ 200: serializers.BatchOperation(),
20
+ 404: serializers.ErrorResponse(),
21
+ 500: serializers.ErrorResponse(),
22
+ },
23
+ request=serializers.BatchOrderData(),
24
+ )
25
+ def post(self, request: Request):
26
+ """Create order batch. `Beta`"""
27
+ operation = (
28
+ serializers.BatchOrderData.map(data=request.data, context=request)
29
+ .save()
30
+ .instance
31
+ )
32
+
33
+ return Response(
34
+ serializers.BatchOperation(operation).data, status=status.HTTP_201_CREATED
35
+ )
36
+
37
+
38
+ urlpatterns = [
39
+ path("batches/orders", BatchList.as_view(), name="batch-orders"),
40
+ ]
@@ -0,0 +1,40 @@
1
+ from django.urls import path
2
+ from rest_framework import status
3
+ from rest_framework.request import Request
4
+ from rest_framework.response import Response
5
+
6
+ import karrio.server.data.serializers as serializers
7
+ import karrio.server.core.views.api as api
8
+ import karrio.server.openapi as openapi
9
+
10
+ ENDPOINT_ID = "&&&&$" # This endpoint id is used to make operation ids unique make sure not to duplicate
11
+
12
+
13
+ class BatchList(api.APIView):
14
+ @openapi.extend_schema(
15
+ tags=["Batches"],
16
+ operation_id=f"{ENDPOINT_ID}create_shipments",
17
+ summary="Create shipment batch",
18
+ responses={
19
+ 200: serializers.BatchOperation(),
20
+ 404: serializers.ErrorResponse(),
21
+ 500: serializers.ErrorResponse(),
22
+ },
23
+ request=serializers.BatchShipmentData(),
24
+ )
25
+ def post(self, request: Request):
26
+ """Create shipment batch. `Beta`"""
27
+ operation = (
28
+ serializers.BatchShipmentData.map(data=request.data, context=request)
29
+ .save()
30
+ .instance
31
+ )
32
+
33
+ return Response(
34
+ serializers.BatchOperation(operation).data, status=status.HTTP_201_CREATED
35
+ )
36
+
37
+
38
+ urlpatterns = [
39
+ path("batches/shipments", BatchList.as_view(), name="batch-shipments"),
40
+ ]
@@ -0,0 +1,40 @@
1
+ from django.urls import path
2
+ from rest_framework import status
3
+ from rest_framework.request import Request
4
+ from rest_framework.response import Response
5
+
6
+ import karrio.server.data.serializers as serializers
7
+ import karrio.server.core.views.api as api
8
+ import karrio.server.openapi as openapi
9
+
10
+ ENDPOINT_ID = "&&&&$" # This endpoint id is used to make operation ids unique make sure not to duplicate
11
+
12
+
13
+ class BatchList(api.APIView):
14
+ @openapi.extend_schema(
15
+ tags=["Batches"],
16
+ operation_id=f"{ENDPOINT_ID}create_trackers",
17
+ summary="Create tracker batch",
18
+ responses={
19
+ 200: serializers.BatchOperation(),
20
+ 404: serializers.ErrorResponse(),
21
+ 500: serializers.ErrorResponse(),
22
+ },
23
+ request=serializers.BatchTrackerData(),
24
+ )
25
+ def post(self, request: Request):
26
+ """Create tracker batch. `Beta`"""
27
+ operation = (
28
+ serializers.BatchTrackerData.map(data=request.data, context=request)
29
+ .save()
30
+ .instance
31
+ )
32
+
33
+ return Response(
34
+ serializers.BatchOperation(operation).data, status=status.HTTP_201_CREATED
35
+ )
36
+
37
+
38
+ urlpatterns = [
39
+ path("batches/trackers", BatchList.as_view(), name="batch-trackers"),
40
+ ]
@@ -0,0 +1,171 @@
1
+ import io
2
+ from django.http import JsonResponse
3
+ from django.urls import re_path, path
4
+ from django.core.files.base import ContentFile
5
+ from django_downloadview import VirtualDownloadView
6
+ from django.views.decorators.csrf import csrf_exempt
7
+ from rest_framework.parsers import MultiPartParser, FormParser
8
+ from rest_framework.response import Response
9
+ from rest_framework.request import Request
10
+ from rest_framework import status
11
+
12
+ from karrio.server.data.serializers.data import ImportDataSerializer
13
+ import karrio.server.data.serializers as serializers
14
+ import karrio.server.data.resources as resources
15
+ import karrio.server.core.views.api as api
16
+ import karrio.server.openapi as openapi
17
+
18
+ ENDPOINT_ID = "&&&&$" # This endpoint id is used to make operation ids unique make sure not to duplicate
19
+ DataImportParameters: list = [
20
+ openapi.OpenApiParameter(
21
+ name="resource_type",
22
+ type=openapi.OpenApiTypes.STR,
23
+ enum=[e.name for e in list(serializers.ResourceType)],
24
+ description="The type of the resource to import",
25
+ ),
26
+ openapi.OpenApiParameter(
27
+ "data_template",
28
+ type=openapi.OpenApiTypes.STR,
29
+ required=False,
30
+ description="""A data template slug to use for the import.<br/>
31
+ **When nothing is specified, the system default headers are expected.**
32
+ """,
33
+ ),
34
+ openapi.OpenApiParameter(
35
+ name="data_file",
36
+ type=openapi.OpenApiTypes.BINARY,
37
+ ),
38
+ ]
39
+
40
+
41
+ class DataImport(api.BaseAPIView):
42
+ parser_classes = [MultiPartParser, FormParser]
43
+
44
+ @openapi.extend_schema(
45
+ tags=["Batches"],
46
+ operation_id=f"{ENDPOINT_ID}import_file",
47
+ summary="Import data files",
48
+ responses={
49
+ 202: serializers.BatchOperation(),
50
+ 400: serializers.ErrorResponse(),
51
+ 500: serializers.ErrorResponse(),
52
+ },
53
+ request={
54
+ 'multipart/form-data': {
55
+ 'type': 'object',
56
+ 'properties': {
57
+ 'resource_type': {
58
+ 'type': 'string',
59
+ },
60
+ 'data_template': {
61
+ 'type': 'string',
62
+ },
63
+ 'data_file': {
64
+ 'type': 'string',
65
+ 'format': 'binary'
66
+ }
67
+ }
68
+ }
69
+ },
70
+ parameters=DataImportParameters,
71
+ )
72
+ def post(self, request: Request):
73
+ """Import csv, xls and xlsx data files for: `Beta`<br/>
74
+ - trackers data
75
+ - orders data
76
+ - shipments data
77
+ - billing data (soon)<br/><br/>
78
+ **This operation will return a batch operation that you can poll to follow
79
+ the import progression.**
80
+ """
81
+ operation = (
82
+ ImportDataSerializer.map(data=request.data, context=request)
83
+ .save()
84
+ .instance
85
+ )
86
+
87
+ return Response(
88
+ serializers.BatchOperation(operation).data, status=status.HTTP_202_ACCEPTED
89
+ )
90
+
91
+
92
+ DataExportParameters: list = [
93
+ openapi.OpenApiParameter(
94
+ name="resource_type",
95
+ location=openapi.OpenApiParameter.PATH,
96
+ type=openapi.OpenApiTypes.STR,
97
+ enum=[e.name for e in list(serializers.ResourceType)],
98
+ ),
99
+ openapi.OpenApiParameter(
100
+ name="export_format",
101
+ location=openapi.OpenApiParameter.PATH,
102
+ type=openapi.OpenApiTypes.STR,
103
+ enum=[e.name for e in list(serializers.ResourceType)],
104
+ ),
105
+ openapi.OpenApiParameter(
106
+ "data_template",
107
+ location=openapi.OpenApiParameter.QUERY,
108
+ type=openapi.OpenApiTypes.STR,
109
+ required=False,
110
+ description="""A data template slug to use for the import.<br/>
111
+ **When nothing is specified, the system default headers are expected.**
112
+ """,
113
+ ),
114
+ ]
115
+
116
+
117
+ class DataExport(api.LoginRequiredView, VirtualDownloadView):
118
+ @openapi.extend_schema(
119
+ tags=["Batches"],
120
+ operation_id=f"{ENDPOINT_ID}export_file",
121
+ summary="Export data files",
122
+ responses={
123
+ (200, "application/octet-stream"): openapi.OpenApiTypes.BINARY,
124
+ 409: serializers.ErrorResponse(),
125
+ 500: serializers.ErrorResponse(),
126
+ },
127
+ parameters=DataExportParameters,
128
+ )
129
+ def get(
130
+ self,
131
+ request: Request,
132
+ resource_type: str = "orders",
133
+ export_format: str = "csv",
134
+ **kwargs,
135
+ ):
136
+ try:
137
+ """Generate a file to export."""
138
+ query_params = request.GET
139
+ self.attachment = "download" in query_params
140
+ self.resource = resource_type
141
+ self.format = export_format
142
+
143
+ self.dataset = resources.export(
144
+ resource_type, query_params, context=request
145
+ )
146
+
147
+ response = super(DataExport, self).get(request, **kwargs)
148
+ response["X-Frame-Options"] = "ALLOWALL"
149
+ return response
150
+ except Exception as e:
151
+ return JsonResponse(
152
+ dict(errors=[{"message": str(e)}]),
153
+ status=status.HTTP_409_CONFLICT,
154
+ )
155
+
156
+ def get_file(self):
157
+ content = getattr(self.dataset, self.format, "")
158
+ buffer = io.StringIO() if type(content) == str else io.BytesIO()
159
+ buffer.write(content)
160
+
161
+ return ContentFile(buffer.getvalue(), name=f"{self.resource}.{self.format}")
162
+
163
+
164
+ urlpatterns = [
165
+ path("batches/data/import", DataImport.as_view(), name="data-import"),
166
+ re_path(
167
+ r"^batches/data/export/(?P<resource_type>\w+).(?P<export_format>\w+)",
168
+ csrf_exempt(DataExport.as_view()),
169
+ name="data-export",
170
+ ),
171
+ ]
@@ -0,0 +1 @@
1
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
@@ -0,0 +1,136 @@
1
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
2
+
3
+ import typing
4
+ import logging
5
+ from huey.contrib.djhuey import db_task
6
+
7
+ import karrio.server.core.utils as utils
8
+ import karrio.server.data.models as models
9
+ import karrio.server.data.serializers as serializers
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ @db_task()
15
+ @utils.error_wrapper
16
+ @utils.tenant_aware
17
+ def queue_batch_import(*args, **kwargs):
18
+ from karrio.server.events.task_definitions.data import batch
19
+
20
+ batch.trigger_batch_import(*args, **kwargs)
21
+
22
+
23
+ @db_task()
24
+ @utils.error_wrapper
25
+ @utils.tenant_aware
26
+ def save_batch_resources(*args, **kwargs):
27
+ from karrio.server.events.task_definitions.data import batch
28
+
29
+ batch.trigger_batch_saving(*args, **kwargs)
30
+
31
+
32
+ @db_task()
33
+ @utils.tenant_aware
34
+ def process_batch_resources(batch_id, **kwargs):
35
+ logger.info(f"> start batch ({batch_id}) resources processing...")
36
+ try:
37
+ batch_operation = models.BatchOperation.objects.get(pk=batch_id)
38
+
39
+ if batch_operation.resource_type == serializers.ResourceType.trackers.value:
40
+ batch_operation.resources = _process_trackers(batch_operation.resources)
41
+
42
+ elif batch_operation.resource_type == serializers.ResourceType.order.value:
43
+ batch_operation.resources = _process_orders(batch_operation.resources)
44
+
45
+ elif batch_operation.resource_type == serializers.ResourceType.shipment.value:
46
+ batch_operation.resources = _process_shipments(batch_operation.resources)
47
+
48
+ elif batch_operation.resource_type == serializers.ResourceType.billing.value:
49
+ pass
50
+
51
+ batch_operation.status = serializers.BatchOperationStatus.completed.value
52
+ batch_operation.save(update_fields=["resources", "status"])
53
+ except Exception as e:
54
+ logger.exception(e)
55
+ batch_operation.status = serializers.BatchOperationStatus.failed.value
56
+ batch_operation.save()
57
+
58
+ logger.info(f"> ending batch ({batch_id}) resources processing...")
59
+
60
+
61
+ def _process_shipments(resources: typing.List[dict]):
62
+ from karrio.server.manager import models
63
+ from karrio.server.events.task_definitions.data import shipments
64
+
65
+ resource_ids = [res["id"] for res in resources]
66
+ shipment_ids = [
67
+ res["id"]
68
+ for res in resources
69
+ if res["status"] != serializers.ResourceStatus.processed.value
70
+ ]
71
+ shipments.process_shipments(shipment_ids=shipment_ids)
72
+ # check results and update resource statuses
73
+ results = models.Shipment.objects.filter(id__in=resource_ids)
74
+
75
+ def _compute_state(shipment=None):
76
+ # shipment with service not purchased
77
+ if (
78
+ any(shipment.options.get("perferred_service") or "")
79
+ and shipment.status == "draft"
80
+ ):
81
+ return serializers.ResourceStatus.incomplete
82
+ # shipment has errors and no rates
83
+ if len(shipment.rates) == 0 and any(shipment.messages):
84
+ return serializers.ResourceStatus.has_errors
85
+ # shipment is at the right state
86
+ return serializers.ResourceStatus.processed.value
87
+
88
+ return [
89
+ dict(
90
+ id=res["id"],
91
+ status=_compute_state(results.filter(id=res["id"]).first()),
92
+ )
93
+ for res in resources
94
+ ]
95
+
96
+
97
+ def _process_orders(resources: typing.List[dict]):
98
+ resource_ids = [res["id"] for res in resources]
99
+
100
+ return [dict(id=id, status="processed") for id in resource_ids]
101
+
102
+
103
+ def _process_trackers(resources: typing.List[dict]):
104
+ from karrio.server.manager import models
105
+ from karrio.server.events.task_definitions.base import tracking
106
+
107
+ resource_ids = [res["id"] for res in resources]
108
+ tracker_ids = [
109
+ res["id"]
110
+ for res in resources
111
+ if res["status"] != serializers.ResourceStatus.processed.value
112
+ ]
113
+ tracking.update_trackers(tracker_ids=tracker_ids)
114
+ # check results and update resource statuses
115
+ results = models.Tracking.objects.filter(id__in=resource_ids)
116
+
117
+ def _compute_tracker_state(tracker=None):
118
+ if tracker is None:
119
+ return serializers.ResourceStatus.incomplete.value
120
+
121
+ return serializers.ResourceStatus.processed.value
122
+
123
+ return [
124
+ dict(
125
+ id=res["id"],
126
+ status=_compute_tracker_state(results.filter(id=res["id"]).first()),
127
+ )
128
+ for res in resources
129
+ ]
130
+
131
+
132
+ TASK_DEFINITIONS = [
133
+ queue_batch_import,
134
+ save_batch_resources,
135
+ process_batch_resources,
136
+ ]
@@ -0,0 +1,130 @@
1
+ import typing
2
+ import tablib
3
+ import logging
4
+ from django.conf import settings
5
+ from django.contrib.auth import get_user_model
6
+ from import_export.resources import ModelResource
7
+
8
+ import karrio.server.core.utils as utils
9
+ import karrio.server.data.serializers as serializers
10
+ import karrio.server.data.resources as resources
11
+ import karrio.server.data.models as models
12
+
13
+ logger = logging.getLogger(__name__)
14
+ User = get_user_model()
15
+
16
+
17
+ @utils.tenant_aware
18
+ def trigger_batch_import(
19
+ batch_id: str,
20
+ data: dict,
21
+ ctx: dict,
22
+ **kwargs,
23
+ ):
24
+ logger.info(f"> starting batch import operation ({batch_id})")
25
+ try:
26
+ context = retrieve_context(ctx)
27
+ batch_operation = (
28
+ models.BatchOperation.access_by(context).filter(pk=batch_id).first()
29
+ )
30
+
31
+ if batch_operation is not None:
32
+ dataset = data["dataset"]
33
+ import_data = data["import_data"]
34
+ resource = resources.get_import_resource(
35
+ resource_type=batch_operation.resource_type,
36
+ params=import_data,
37
+ context=context,
38
+ batch_id=batch_id,
39
+ )
40
+
41
+ batch_resources = process_resources(resource, dataset)
42
+ update_batch_operation_resources(batch_operation, batch_resources)
43
+ else:
44
+ logger.info("batch operation not found")
45
+
46
+ except Exception as e:
47
+ logger.exception(e)
48
+
49
+ logger.info(f"> ending batch import operation ({batch_id})")
50
+
51
+
52
+ @utils.tenant_aware
53
+ def trigger_batch_saving(
54
+ batch_id: str,
55
+ data: dict,
56
+ ctx: dict,
57
+ **kwargs,
58
+ ):
59
+ logger.info(f"> beging batch resources saving ({batch_id})")
60
+ try:
61
+ context = retrieve_context(ctx)
62
+ batch_operation = (
63
+ models.BatchOperation.access_by(context).filter(pk=batch_id).first()
64
+ )
65
+
66
+ if batch_operation is not None:
67
+ batch_seriazlizer = serializers.ResourceType.get_serialiazer(
68
+ batch_operation.resource_type
69
+ )
70
+ batch_resources = batch_seriazlizer.save_resources(data, batch_id, context)
71
+ update_batch_operation_resources(batch_operation, batch_resources)
72
+ else:
73
+ logger.info("batch operation not found")
74
+
75
+ except Exception as e:
76
+ logger.exception(e)
77
+
78
+ logger.info(f"> ending batch resources saving ({batch_id})")
79
+
80
+
81
+ def process_resources(
82
+ resource: ModelResource,
83
+ dataset: tablib.Dataset,
84
+ ):
85
+ result = resource.import_data(dataset, dry_run=False)
86
+ _object_ids = [(row.object_id, row.errors) for row in result.rows]
87
+
88
+ return [
89
+ dict(
90
+ id=id,
91
+ status=(
92
+ serializers.ResourceStatus.failed.value
93
+ if any(errors)
94
+ else serializers.ResourceStatus.queued.value
95
+ ),
96
+ )
97
+ for id, errors in _object_ids
98
+ ]
99
+
100
+
101
+ def update_batch_operation_resources(
102
+ batch_operation: models.BatchOperation,
103
+ batch_resources: typing.List[dict],
104
+ ):
105
+ try:
106
+ logger.debug(f"update batch operation {batch_operation.id}")
107
+
108
+ batch_operation.resources = batch_resources
109
+ batch_operation.status = serializers.BatchOperationStatus.running.value
110
+ batch_operation.save(update_fields=["resources", "status"])
111
+
112
+ logger.debug(f"batch operation {batch_operation.id} updated successfully")
113
+ except Exception as update_error:
114
+ logger.warning(f"failed to update batch operation {batch_operation.id}")
115
+ logger.error(update_error, exc_info=True)
116
+
117
+
118
+ def retrieve_context(info: dict) -> serializers.Context:
119
+ org = None
120
+
121
+ if settings.MULTI_ORGANIZATIONS and "org_id" in info:
122
+ import karrio.server.orgs.models as orgs_models
123
+
124
+ org = orgs_models.Organization.objects.filter(id=info["org_id"]).first()
125
+
126
+ return serializers.Context(
127
+ org=org,
128
+ user=User.objects.filter(id=info["user_id"]).first(),
129
+ test_mode=(info.get("test_mode") or False),
130
+ )
@@ -0,0 +1,51 @@
1
+ import logging
2
+
3
+ import karrio.server.core.utils as utils
4
+ import karrio.server.manager.models as models
5
+ import karrio.server.serializers as serializers
6
+ from karrio.server.manager.serializers import (
7
+ fetch_shipment_rates,
8
+ can_mutate_shipment,
9
+ buy_shipment_label,
10
+ )
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ @utils.error_wrapper
16
+ def process_shipments(shipment_ids=[]):
17
+ logger.info("> starting batch shipments processing")
18
+
19
+ shipments = models.Shipment.objects.filter(id__in=shipment_ids, status="draft")
20
+
21
+ if any(shipments):
22
+ for shipment in shipments:
23
+ process_shipment(shipment)
24
+ else:
25
+ logger.info("no shipment found")
26
+
27
+ logger.info("> ending batch shipments processing")
28
+
29
+
30
+ @utils.error_wrapper
31
+ def process_shipment(shipment):
32
+ preferred_service = shipment.options.get("preferred_service")
33
+ should_purchase = any(preferred_service or "")
34
+ should_update = should_purchase or len(shipment.rates) == 0
35
+ context = serializers.get_object_context(shipment)
36
+
37
+ can_mutate_shipment(
38
+ shipment,
39
+ update=should_update,
40
+ purchase=should_purchase,
41
+ )
42
+
43
+ if len(shipment.rates) == 0:
44
+ shipment = fetch_shipment_rates(shipment, context=context)
45
+
46
+ if should_purchase:
47
+ shipment = buy_shipment_label(
48
+ shipment,
49
+ context=context,
50
+ service=preferred_service,
51
+ )
@@ -0,0 +1 @@
1
+ __path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore