documente_shared 0.1.79__py3-none-any.whl → 0.1.81__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of documente_shared might be problematic. Click here for more details.
- documente_shared/application/payloads.py +14 -0
- documente_shared/domain/entities/in_memory_result.py +15 -11
- documente_shared/domain/entities/processing_case.py +14 -24
- documente_shared/domain/entities/processing_case_item.py +42 -5
- documente_shared/domain/repositories/processing_case.py +1 -1
- documente_shared/infrastructure/repositories/dynamo_processing_case.py +4 -4
- documente_shared/infrastructure/repositories/http_processing_case.py +21 -9
- documente_shared/infrastructure/repositories/http_processing_case_item.py +4 -4
- {documente_shared-0.1.79.dist-info → documente_shared-0.1.81.dist-info}/METADATA +1 -1
- {documente_shared-0.1.79.dist-info → documente_shared-0.1.81.dist-info}/RECORD +11 -10
- {documente_shared-0.1.79.dist-info → documente_shared-0.1.81.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import re
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def camel_to_snake_key(name: str) -> str:
|
|
5
|
+
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
|
|
6
|
+
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
|
|
7
|
+
|
|
8
|
+
def camel_to_snake(data: dict | list) -> dict | list:
|
|
9
|
+
if isinstance(data, dict):
|
|
10
|
+
return {camel_to_snake_key(k): camel_to_snake(v) for k, v in data.items()}
|
|
11
|
+
elif isinstance(data, list):
|
|
12
|
+
return [camel_to_snake(item) for item in data]
|
|
13
|
+
else:
|
|
14
|
+
return data
|
|
@@ -11,6 +11,7 @@ from documente_shared.application.files import (
|
|
|
11
11
|
class InMemoryDocument(object):
|
|
12
12
|
file_path: Optional[str] = None
|
|
13
13
|
file_bytes: Optional[bytes] = None
|
|
14
|
+
file_base64: Optional[str] = None
|
|
14
15
|
|
|
15
16
|
@property
|
|
16
17
|
def is_valid(self) -> bool:
|
|
@@ -18,13 +19,7 @@ class InMemoryDocument(object):
|
|
|
18
19
|
|
|
19
20
|
@property
|
|
20
21
|
def has_content(self) -> bool:
|
|
21
|
-
return bool(self.file_bytes)
|
|
22
|
-
|
|
23
|
-
@property
|
|
24
|
-
def file_key(self) -> Optional[str]:
|
|
25
|
-
if not self.file_path:
|
|
26
|
-
return None
|
|
27
|
-
return remove_slash_from_path(self.file_path)
|
|
22
|
+
return bool(self.file_bytes) or bool(self.file_base64)
|
|
28
23
|
|
|
29
24
|
@property
|
|
30
25
|
def file_name(self) -> Optional[str]:
|
|
@@ -32,20 +27,29 @@ class InMemoryDocument(object):
|
|
|
32
27
|
return None
|
|
33
28
|
return get_filename_from_path(self.file_path)
|
|
34
29
|
|
|
30
|
+
@property
|
|
31
|
+
def file_key(self) -> Optional[str]:
|
|
32
|
+
return self.file_name
|
|
33
|
+
|
|
35
34
|
@property
|
|
36
35
|
def is_procesable(self) -> bool:
|
|
37
36
|
return self.is_valid and self.has_content
|
|
38
37
|
|
|
39
38
|
@property
|
|
40
39
|
def to_dict(self) -> dict:
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
'
|
|
44
|
-
|
|
40
|
+
data = dict()
|
|
41
|
+
if self.file_path:
|
|
42
|
+
data['file_path'] = remove_slash_from_path(self.file_path)
|
|
43
|
+
if self.file_bytes:
|
|
44
|
+
data['file_bytes'] = self.file_bytes.decode('utf-8')
|
|
45
|
+
if self.file_base64:
|
|
46
|
+
data['file_base64'] = self.file_base64
|
|
47
|
+
return data
|
|
45
48
|
|
|
46
49
|
@classmethod
|
|
47
50
|
def from_dict(cls, data: dict):
|
|
48
51
|
return cls(
|
|
49
52
|
file_path=data.get('file_path'),
|
|
50
53
|
file_bytes=data.get('file_bytes'),
|
|
54
|
+
file_base64=data.get('file_base64'),
|
|
51
55
|
)
|
|
@@ -96,37 +96,18 @@ class ProcessingCase(object):
|
|
|
96
96
|
|
|
97
97
|
@property
|
|
98
98
|
def to_persist_dict(self) -> dict:
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
self,
|
|
103
|
-
new_instance: 'ProcessingCase',
|
|
104
|
-
properties: List[str] = None,
|
|
105
|
-
):
|
|
106
|
-
instance_properties = properties or [
|
|
107
|
-
'label',
|
|
108
|
-
'status',
|
|
109
|
-
'category',
|
|
110
|
-
'enqueued_at',
|
|
111
|
-
'started_at',
|
|
112
|
-
'failed_at',
|
|
113
|
-
'feedback',
|
|
114
|
-
'completed_at',
|
|
115
|
-
'metadata',
|
|
116
|
-
'items',
|
|
99
|
+
persist_data = self.to_dict
|
|
100
|
+
persist_data["items"] = [
|
|
101
|
+
item.to_persist_dict for item in self.items
|
|
117
102
|
]
|
|
118
|
-
|
|
119
|
-
property_value = getattr(new_instance, _property)
|
|
120
|
-
if not hasattr(self, _property):
|
|
121
|
-
continue
|
|
122
|
-
setattr(self, _property, property_value)
|
|
123
|
-
return self
|
|
103
|
+
return persist_data
|
|
124
104
|
|
|
125
105
|
@classmethod
|
|
126
106
|
def from_dict(cls, data: dict) -> 'ProcessingCase':
|
|
127
107
|
return cls(
|
|
128
108
|
uuid=data.get('uuid'),
|
|
129
109
|
name=data.get('label'),
|
|
110
|
+
tenant_slug=data.get('tenant_slug'),
|
|
130
111
|
status=ProcessingStatus.from_value(data.get('status')),
|
|
131
112
|
case_type=(
|
|
132
113
|
ProcessingCaseType.from_value(data.get('category'))
|
|
@@ -143,3 +124,12 @@ class ProcessingCase(object):
|
|
|
143
124
|
for item_dict in data.get('items', [])
|
|
144
125
|
],
|
|
145
126
|
)
|
|
127
|
+
|
|
128
|
+
@classmethod
|
|
129
|
+
def from_persist_dict(cls, data: dict) -> 'ProcessingCase':
|
|
130
|
+
instance = cls.from_dict(data)
|
|
131
|
+
instance.items = [
|
|
132
|
+
ProcessingCaseItem.from_persist_dict(item_dict)
|
|
133
|
+
for item_dict in data.get('items', [])
|
|
134
|
+
]
|
|
135
|
+
return instance
|
|
@@ -135,12 +135,24 @@ class ProcessingCaseItem(object):
|
|
|
135
135
|
str(self.processing_confidence.quantize(Decimal('0.001')))
|
|
136
136
|
if self.processing_confidence else None
|
|
137
137
|
),
|
|
138
|
-
'uploaded_at':
|
|
139
|
-
|
|
140
|
-
|
|
138
|
+
'uploaded_at': (
|
|
139
|
+
self.uploaded_at.isoformat()
|
|
140
|
+
if self.uploaded_at else None
|
|
141
|
+
),
|
|
142
|
+
'started_at': (
|
|
143
|
+
self.started_at.isoformat()
|
|
144
|
+
if self.started_at else None
|
|
145
|
+
),
|
|
146
|
+
'failed_at': (
|
|
147
|
+
self.failed_at.isoformat()
|
|
148
|
+
if self.failed_at else None
|
|
149
|
+
),
|
|
141
150
|
'feedback': self.feedback,
|
|
142
151
|
'metadata': self.metadata,
|
|
143
|
-
'completed_at':
|
|
152
|
+
'completed_at': (
|
|
153
|
+
self.completed_at.isoformat()
|
|
154
|
+
if self.completed_at else None
|
|
155
|
+
),
|
|
144
156
|
}
|
|
145
157
|
|
|
146
158
|
@property
|
|
@@ -148,6 +160,15 @@ class ProcessingCaseItem(object):
|
|
|
148
160
|
simple_dict = self.to_dict.copy()
|
|
149
161
|
return simple_dict
|
|
150
162
|
|
|
163
|
+
@property
|
|
164
|
+
def to_persist_dict(self) -> dict:
|
|
165
|
+
simple_dict = self.to_dict.copy()
|
|
166
|
+
simple_dict["document_path"] = self.document.file_path if self.document else None
|
|
167
|
+
simple_dict["processed_csv_path"] = self.processed_csv.file_path if self.processed_csv else None
|
|
168
|
+
simple_dict["processed_xlsx_path"] = self.processed_xlsx.file_path if self.processed_xlsx else None
|
|
169
|
+
simple_dict["processed_json_path"] = self.processed_json.file_path if self.processed_json else None
|
|
170
|
+
return simple_dict
|
|
171
|
+
|
|
151
172
|
def overload(
|
|
152
173
|
self,
|
|
153
174
|
new_instance: 'ProcessingCaseItem',
|
|
@@ -184,7 +205,10 @@ class ProcessingCaseItem(object):
|
|
|
184
205
|
case_id=data.get('case_id'),
|
|
185
206
|
digest=data.get('digest'),
|
|
186
207
|
status=ProcessingStatus.from_value(data.get('status')),
|
|
187
|
-
document=
|
|
208
|
+
document=(
|
|
209
|
+
InMemoryDocument.from_dict(data.get('document'))
|
|
210
|
+
if data.get('document') else None
|
|
211
|
+
),
|
|
188
212
|
document_type=(
|
|
189
213
|
ProcessingDocumentType.from_value(data.get('document_type'))
|
|
190
214
|
if data.get('document_type') else None
|
|
@@ -220,3 +244,16 @@ class ProcessingCaseItem(object):
|
|
|
220
244
|
metadata=data.get('metadata', {}),
|
|
221
245
|
completed_at=get_datetime_from_data(input_datetime=data.get('completed_at')),
|
|
222
246
|
)
|
|
247
|
+
|
|
248
|
+
@classmethod
|
|
249
|
+
def from_persist_dict(cls, data: dict) -> 'ProcessingCaseItem':
|
|
250
|
+
instance = cls.from_dict(data)
|
|
251
|
+
if "document_path" in data:
|
|
252
|
+
instance.document = InMemoryDocument(file_path=data["document_path"])
|
|
253
|
+
if "processed_csv_path" in data:
|
|
254
|
+
instance.processed_csv = InMemoryDocument(file_path=data["processed_csv_path"])
|
|
255
|
+
if "processed_xlsx_path" in data:
|
|
256
|
+
instance.processed_xlsx = InMemoryDocument(file_path=data["processed_xlsx_path"])
|
|
257
|
+
if "processed_json_path" in data:
|
|
258
|
+
instance.processed_json = InMemoryDocument(file_path=data["processed_json_path"])
|
|
259
|
+
return instance
|
|
@@ -8,7 +8,7 @@ from documente_shared.domain.entities.processing_case_filters import ProcessingC
|
|
|
8
8
|
class ProcessingCaseRepository(ABC):
|
|
9
9
|
|
|
10
10
|
@abstractmethod
|
|
11
|
-
def find(self, uuid: str) -> Optional[ProcessingCase]:
|
|
11
|
+
def find(self, uuid: str, include_items: bool = False) -> Optional[ProcessingCase]:
|
|
12
12
|
raise NotImplementedError
|
|
13
13
|
|
|
14
14
|
@abstractmethod
|
|
@@ -3,7 +3,7 @@ from typing import Optional, List
|
|
|
3
3
|
from boto3.dynamodb.conditions import Key
|
|
4
4
|
|
|
5
5
|
from documente_shared.domain.entities.processing_case import ProcessingCase
|
|
6
|
-
from documente_shared.domain.
|
|
6
|
+
from documente_shared.domain.entities.processing_case_filters import ProcessingCaseFilters
|
|
7
7
|
from documente_shared.domain.repositories.processing_case import ProcessingCaseRepository
|
|
8
8
|
|
|
9
9
|
from documente_shared.infrastructure.dynamo_table import DynamoDBTable
|
|
@@ -13,7 +13,7 @@ class DynamoProcessingCaseRepository(
|
|
|
13
13
|
DynamoDBTable,
|
|
14
14
|
ProcessingCaseRepository,
|
|
15
15
|
):
|
|
16
|
-
def find(self, uuid: str) -> Optional[ProcessingCase]:
|
|
16
|
+
def find(self, uuid: str, include_items: bool = False) -> Optional[ProcessingCase]:
|
|
17
17
|
item = self.get(key={'uuid': uuid})
|
|
18
18
|
if item:
|
|
19
19
|
return ProcessingCase.from_dict(item)
|
|
@@ -26,10 +26,10 @@ class DynamoProcessingCaseRepository(
|
|
|
26
26
|
def remove(self, instance: ProcessingCase):
|
|
27
27
|
self.delete(key={'uuid': instance.uuid})
|
|
28
28
|
|
|
29
|
-
def filter(self,
|
|
29
|
+
def filter(self, tenant_slug: str, filters: ProcessingCaseFilters) -> List[ProcessingCase]:
|
|
30
30
|
items = []
|
|
31
31
|
|
|
32
|
-
for status in statuses:
|
|
32
|
+
for status in filters.statuses:
|
|
33
33
|
response = self._table.query(
|
|
34
34
|
IndexName='status',
|
|
35
35
|
KeyConditionExpression=Key('status').eq(status.value),
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from dataclasses import dataclass
|
|
2
2
|
from typing import List, Optional
|
|
3
3
|
|
|
4
|
+
from documente_shared.application.payloads import camel_to_snake
|
|
4
5
|
from documente_shared.domain.entities.processing_case import ProcessingCase
|
|
5
6
|
from documente_shared.domain.entities.processing_case_filters import ProcessingCaseFilters
|
|
6
7
|
from documente_shared.domain.repositories.processing_case import ProcessingCaseRepository
|
|
@@ -12,30 +13,41 @@ class HttpProcessingCaseRepository(
|
|
|
12
13
|
DocumenteClientMixin,
|
|
13
14
|
ProcessingCaseRepository,
|
|
14
15
|
):
|
|
15
|
-
def find(self, uuid: str) -> Optional[ProcessingCase]:
|
|
16
|
-
response = self.session.get(f"{self.api_url}/processing-cases/{uuid}/")
|
|
16
|
+
def find(self, uuid: str, include_items: bool = False) -> Optional[ProcessingCase]:
|
|
17
|
+
response = self.session.get(f"{self.api_url}/v1/processing-cases/{uuid}/")
|
|
17
18
|
if response.status_code == 200:
|
|
18
|
-
|
|
19
|
+
response_json = response.json()
|
|
20
|
+
instance_data = response_json.get('data', {})
|
|
21
|
+
return ProcessingCase.from_persist_dict(camel_to_snake(instance_data))
|
|
19
22
|
return None
|
|
20
23
|
|
|
21
24
|
def persist(self, instance: ProcessingCase) -> ProcessingCase:
|
|
22
25
|
response = self.session.put(
|
|
23
|
-
url=f"{self.api_url}/processing-cases/{instance.uuid}/",
|
|
26
|
+
url=f"{self.api_url}/v1/processing-cases/{instance.uuid}/",
|
|
24
27
|
json=instance.to_dict,
|
|
25
28
|
)
|
|
26
29
|
if response.status_code not in [200, 201]:
|
|
27
30
|
raise Exception(f'Error persisting processing case: {response.text}')
|
|
28
|
-
|
|
31
|
+
|
|
32
|
+
response_json = response.json()
|
|
33
|
+
instance_data = response_json.get('data', {})
|
|
34
|
+
return ProcessingCase.from_persist_dict(camel_to_snake(instance_data))
|
|
29
35
|
|
|
30
36
|
def remove(self, instance: ProcessingCase):
|
|
31
|
-
self.session.delete(f"{self.api_url}/processing-cases/{instance.uuid}/")
|
|
37
|
+
self.session.delete(f"{self.api_url}/v1/processing-cases/{instance.uuid}/")
|
|
32
38
|
|
|
33
39
|
def filter(self, tenant_slug: str, filters: ProcessingCaseFilters) -> List[ProcessingCase]:
|
|
34
|
-
response = self.session.get(
|
|
40
|
+
response = self.session.get(
|
|
41
|
+
url=f"{self.api_url}/v1/processing-cases/",
|
|
42
|
+
headers={
|
|
43
|
+
"X-Tenant": tenant_slug,
|
|
44
|
+
}
|
|
45
|
+
)
|
|
35
46
|
if response.status_code == 200:
|
|
36
47
|
raw_response = response.json()
|
|
48
|
+
instaces_data = raw_response.get('data', [])
|
|
37
49
|
return [
|
|
38
|
-
ProcessingCase.
|
|
39
|
-
for
|
|
50
|
+
ProcessingCase.from_persist_dict(item_data)
|
|
51
|
+
for item_data in camel_to_snake(instaces_data)
|
|
40
52
|
]
|
|
41
53
|
return []
|
|
@@ -16,19 +16,19 @@ class HttpProcessingCaseItemRepository(
|
|
|
16
16
|
def find(self, uuid: str) -> Optional[ProcessingCaseItem]:
|
|
17
17
|
response = self.session.get(f"{self.api_url}/processing-case-items/{uuid}/")
|
|
18
18
|
if response.status_code == 200:
|
|
19
|
-
return ProcessingCaseItem.
|
|
19
|
+
return ProcessingCaseItem.from_persist_dict(response.json())
|
|
20
20
|
return None
|
|
21
21
|
|
|
22
22
|
def find_by_digest(self, digest: str) -> Optional[ProcessingCaseItem]:
|
|
23
23
|
response = self.session.get(f"{self.api_url}/processing-case-items/{digest}/")
|
|
24
24
|
if response.status_code == 200:
|
|
25
|
-
return ProcessingCaseItem.
|
|
25
|
+
return ProcessingCaseItem.from_persist_dict(response.json())
|
|
26
26
|
return None
|
|
27
27
|
|
|
28
28
|
def persist(self, instance: ProcessingCaseItem) -> ProcessingCaseItem:
|
|
29
29
|
response = self.session.put(
|
|
30
30
|
url=f"{self.api_url}/processing-case-items/{instance.uuid}/",
|
|
31
|
-
json=instance.
|
|
31
|
+
json=instance.to_persist_dict,
|
|
32
32
|
)
|
|
33
33
|
if response.status_code in [200, 201]:
|
|
34
34
|
return ProcessingCaseItem.from_dict(response.json())
|
|
@@ -46,7 +46,7 @@ class HttpProcessingCaseItemRepository(
|
|
|
46
46
|
if response.status_code == 200:
|
|
47
47
|
raw_response = response.json()
|
|
48
48
|
return [
|
|
49
|
-
ProcessingCaseItem.
|
|
49
|
+
ProcessingCaseItem.from_persist_dict(item)
|
|
50
50
|
for item in raw_response.get('data', [])
|
|
51
51
|
]
|
|
52
52
|
return []
|
|
@@ -3,6 +3,7 @@ documente_shared/application/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NM
|
|
|
3
3
|
documente_shared/application/digest.py,sha256=Um6E8WfFri2_lly4RFWydJyvSfPZGFcOX-opEOzDCWc,172
|
|
4
4
|
documente_shared/application/exceptions.py,sha256=lQM8m7wmI9OTLGva0gd7s7YT7ldaTk_Ln4t32PpzNf8,654
|
|
5
5
|
documente_shared/application/files.py,sha256=ADiWi6Mk3YQGx3boGsDqdb5wk8qmabkGRy7bhNFa1OY,649
|
|
6
|
+
documente_shared/application/payloads.py,sha256=s6SjaNN18_aQ6IL083Zq2J8thRCZ_zC2sn7hkfjK_Go,453
|
|
6
7
|
documente_shared/application/query_params.py,sha256=JscPqFBx28p-x9i2g6waY7Yl4FQM1zn2zSbEoTrkK1k,3938
|
|
7
8
|
documente_shared/application/time_utils.py,sha256=_fxgh8VoGPkdsft47COJ16vFwt8pMbHIJCgDFHLSlrU,435
|
|
8
9
|
documente_shared/application/timezone.py,sha256=NHpzTzOPD_fWQiJ4BrRqt_TIDs5XyB5ZMR7x8vUk8gQ,183
|
|
@@ -12,10 +13,10 @@ documente_shared/domain/constants.py,sha256=NG5BGaXBr_FnzudjTRPxpDpyiSDdaB_PLCdl
|
|
|
12
13
|
documente_shared/domain/entities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
14
|
documente_shared/domain/entities/document.py,sha256=AthTUyA-QZE3WAT7lMoKVr_Z8mO_3qERuCnZge0DTLQ,12595
|
|
14
15
|
documente_shared/domain/entities/document_metadata.py,sha256=ygyFIC5qwxlm8DUM5kvVFny9zJfPQS8vNLM2br5XsQ8,2353
|
|
15
|
-
documente_shared/domain/entities/in_memory_result.py,sha256=
|
|
16
|
-
documente_shared/domain/entities/processing_case.py,sha256=
|
|
16
|
+
documente_shared/domain/entities/in_memory_result.py,sha256=0sLNUrovKFQx4M-E9e4DrAiVgch2i4AKA-9BQBRaeI8,1482
|
|
17
|
+
documente_shared/domain/entities/processing_case.py,sha256=59fhosBfKh9ijWRDTkdN9agV3Qa_AIwjHEWU6FBPF4k,4978
|
|
17
18
|
documente_shared/domain/entities/processing_case_filters.py,sha256=FgyxB4mQb0nEGjIbUB9OiazkKL4yHRRC6bvmjD5NT8k,1915
|
|
18
|
-
documente_shared/domain/entities/processing_case_item.py,sha256=
|
|
19
|
+
documente_shared/domain/entities/processing_case_item.py,sha256=5BqKAv56R3tba0_rPdYtiTCpJ9ytP-JEjkCZgr1cjh0,9660
|
|
19
20
|
documente_shared/domain/entities/processing_case_item_filters.py,sha256=-cAQTSWOepMMcGCBg2X3dd0W_8XHuBTlvOB1d-3sVVM,1971
|
|
20
21
|
documente_shared/domain/entities/processing_event.py,sha256=m1O0gcNaE_SszeIhxM3uYPHSpyOUmize6mfRw1_bYZo,1723
|
|
21
22
|
documente_shared/domain/enums/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -24,19 +25,19 @@ documente_shared/domain/enums/document.py,sha256=NltZA1YVgJ7dVfSQdJFIE0ZUGf9Y-nx
|
|
|
24
25
|
documente_shared/domain/enums/processing_case.py,sha256=LhFhcoWlockxcpplsVdC6M2kpXn7sOdzQySf24wFhx8,1572
|
|
25
26
|
documente_shared/domain/repositories/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
26
27
|
documente_shared/domain/repositories/document.py,sha256=vJzr6c92gqBzyhaEdjrvnoneKRrWmJ0AsvocPnhxiLU,767
|
|
27
|
-
documente_shared/domain/repositories/processing_case.py,sha256=
|
|
28
|
+
documente_shared/domain/repositories/processing_case.py,sha256=9jGpnUnXaDgQE1ZKiet7zDVCc7wVvHUrcPOeacebb3s,796
|
|
28
29
|
documente_shared/domain/repositories/processing_case_item.py,sha256=gPZaQCMYlD6vlnEt6cVIqRmi1K-JWvmDx1f74nd97Cs,974
|
|
29
30
|
documente_shared/infrastructure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
30
31
|
documente_shared/infrastructure/documente_client.py,sha256=UjVWs9DKa-yhw5DVbcEs8iJxalHOarusVayi_ob6QhE,494
|
|
31
32
|
documente_shared/infrastructure/dynamo_table.py,sha256=TMQbcuty7wjDMbuhI8PbT0IGXelgELsNTtqTEQeZ824,2112
|
|
32
33
|
documente_shared/infrastructure/repositories/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
33
34
|
documente_shared/infrastructure/repositories/dynamo_document.py,sha256=_Yp4gtA-n-hJ2w2wAM5BMCs2Mf46Q2Kq3eHqlxudkL4,1443
|
|
34
|
-
documente_shared/infrastructure/repositories/dynamo_processing_case.py,sha256=
|
|
35
|
+
documente_shared/infrastructure/repositories/dynamo_processing_case.py,sha256=IoIHtlaEe4G5TqIV9IvG45a3HRBVHLfNC9sSgQjabUk,1464
|
|
35
36
|
documente_shared/infrastructure/repositories/dynamo_processing_case_item.py,sha256=4guM8V3YfP7kzYcuVWunGJGmXi0kSSUW8otks39g1vs,1754
|
|
36
|
-
documente_shared/infrastructure/repositories/http_processing_case.py,sha256=
|
|
37
|
-
documente_shared/infrastructure/repositories/http_processing_case_item.py,sha256=
|
|
37
|
+
documente_shared/infrastructure/repositories/http_processing_case.py,sha256=zeGkuvF1SZMyDOUunp93fhn7lS61NQKa1GBcNF8dzZU,2251
|
|
38
|
+
documente_shared/infrastructure/repositories/http_processing_case_item.py,sha256=OSkOd7WdksS4O75uALU530kCsLkpjRTEBCmg3wg0ZFY,2110
|
|
38
39
|
documente_shared/infrastructure/s3_bucket.py,sha256=vT_yN42RFQXubtUn8ln-j13Os_-25UGClVtXg5Bkv6I,1932
|
|
39
40
|
documente_shared/infrastructure/sqs_queue.py,sha256=KZWeHZ9zmXmrxoNpOQX7GEdDhZ1knbPXgwSwFwJblGg,1504
|
|
40
|
-
documente_shared-0.1.
|
|
41
|
-
documente_shared-0.1.
|
|
42
|
-
documente_shared-0.1.
|
|
41
|
+
documente_shared-0.1.81.dist-info/METADATA,sha256=cPO-JJd3KwUjCci_9-5KKWqrDSokT-x2VCNZpoBW_Rk,881
|
|
42
|
+
documente_shared-0.1.81.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
|
43
|
+
documente_shared-0.1.81.dist-info/RECORD,,
|
|
File without changes
|