jsonpatch-trigger 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,37 @@
1
+ Metadata-Version: 2.4
2
+ Name: jsonpatch-trigger
3
+ Version: 0.1.0
4
+ Summary: Extension for JSONPatch (RFC6902) that introduces operation preconditions, change tracking and automated listeners that can introduce new operations based on the previous change
5
+ Requires-Python: >=3.11
6
+ Description-Content-Type: text/markdown
7
+ Requires-Dist: python-jsonpath==2.0.2
8
+ Requires-Dist: pydantic==2.12.5
9
+ Provides-Extra: test
10
+ Requires-Dist: coverage==7.13.4; extra == "test"
11
+ Requires-Dist: pytest==9.0.2; extra == "test"
12
+
13
+ # JSONPatch-Trigger
14
+
15
+ This package extends the JSON Patch (RFC 6902) functionality with the following features:
16
+ - Preconditions for operations that prevent their execution
17
+ - Usage of JSONPaths over JSONPointers to allow operation targets with wildcards and conditions
18
+ - Change tracking of each operation (a list of additionas and deletions as JSONPointers)
19
+ - Listeners that can react to the tracked changes to dynamically perform customizable actions when something in the JSON document has changed
20
+
21
+ The JSONPath and JSONPatch implementations used as a basis are from https://pypi.org/project/python-jsonpath/
22
+
23
+ ## Use Case
24
+ The functionalities in this package have been developed to serve the following use case:
25
+
26
+ A process P produces JSON objects.
27
+ Every time P executes the results needs to be adjusted with changes the user can configure.
28
+ So the set of operations is persisted and applied for every process run.
29
+ There are different processes and each requires a different set of user operations.
30
+ Additionally, the produced JSON objects can have patterns that can be changed automatically instead of with a manual user action.
31
+ However, the automated steps might be dependent on the order of user operations.
32
+ So instead of appending or prepending the automated operations, a listener approach is used to apply the automated operation as soon as a certain path in the document is modified (triggered).
33
+ For this to work properly the first operation is always an AddOperation that
34
+ adds the entire existing object therefore the tracking produces an addition for every JSONPointer in the document.
35
+
36
+ ## CI Debug Counter
37
+ 3
@@ -0,0 +1,25 @@
1
+ # JSONPatch-Trigger
2
+
3
+ This package extends the JSON Patch (RFC 6902) functionality with the following features:
4
+ - Preconditions for operations that prevent their execution
5
+ - Usage of JSONPaths over JSONPointers to allow operation targets with wildcards and conditions
6
+ - Change tracking of each operation (a list of additionas and deletions as JSONPointers)
7
+ - Listeners that can react to the tracked changes to dynamically perform customizable actions when something in the JSON document has changed
8
+
9
+ The JSONPath and JSONPatch implementations used as a basis are from https://pypi.org/project/python-jsonpath/
10
+
11
+ ## Use Case
12
+ The functionalities in this package have been developed to serve the following use case:
13
+
14
+ A process P produces JSON objects.
15
+ Every time P executes the results needs to be adjusted with changes the user can configure.
16
+ So the set of operations is persisted and applied for every process run.
17
+ There are different processes and each requires a different set of user operations.
18
+ Additionally, the produced JSON objects can have patterns that can be changed automatically instead of with a manual user action.
19
+ However, the automated steps might be dependent on the order of user operations.
20
+ So instead of appending or prepending the automated operations, a listener approach is used to apply the automated operation as soon as a certain path in the document is modified (triggered).
21
+ For this to work properly the first operation is always an AddOperation that
22
+ adds the entire existing object therefore the tracking produces an addition for every JSONPointer in the document.
23
+
24
+ ## CI Debug Counter
25
+ 3
@@ -0,0 +1,15 @@
1
+ [project]
2
+ name = "jsonpatch-trigger"
3
+ version = "0.1.0"
4
+ description = "Extension for JSONPatch (RFC6902) that introduces operation preconditions, change tracking and automated listeners that can introduce new operations based on the previous change"
5
+ readme = "README.md"
6
+ requires-python = ">=3.11"
7
+ dependencies = [
8
+ "python-jsonpath==2.0.2",
9
+ "pydantic==2.12.5"
10
+ ]
11
+ [project.optional-dependencies]
12
+ test = [
13
+ "coverage==7.13.4",
14
+ "pytest==9.0.2"
15
+ ]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,8 @@
1
+ from .common import json_type, make_jsonpath, normalize_jsonpath
2
+ from .execution import OperationExecutionContext, AutomatedOperationProducer
3
+ from .operations import Operation, AddOperation, MoveOperation, CopyOperation, CompoundOperation, RemoveOperation
4
+
5
+ __ALL__ = ["json_type", "make_jsonpath", "normalize_jsonpath",
6
+ "OperationExecutionContext", "AutomatedOperationProducer",
7
+ "Operation", "AddOperation", "MoveOperation", "CopyOperation", "CompoundOperation", "RemoveOperation"]
8
+
@@ -0,0 +1,50 @@
1
+ from typing import Sequence, Any, Mapping
2
+
3
+ import jsonpath
4
+ from jsonpath import JSONPath
5
+ from pydantic import TypeAdapter
6
+ from pydantic_core import core_schema
7
+
8
+ json_type = str | int | float | bool | None | Sequence[Any] | Mapping[str, Any]
9
+
10
+
11
+ def make_jsonpath(path_string: str) -> JSONPath:
12
+ return jsonpath.compile(str(jsonpath.compile(path_string)))
13
+
14
+
15
+ def normalize_jsonpath(path: JSONPath) -> JSONPath:
16
+ return jsonpath.compile(str(path))
17
+
18
+
19
+ def serialize_jsonpath(path: JSONPath) -> str:
20
+ return str(path)
21
+
22
+
23
+ #
24
+ #
25
+ # class ThirdPartyPydanticAdapter:
26
+ # @staticmethod
27
+ # def __get_pydantic_core_schema__(source_type, handler: GetCoreSchemaHandler):
28
+ # return core_schema.no_info_wrap_validator_function(
29
+ # make_jsonpath,
30
+ # core_schema.str_schema(),
31
+ # serialization=core_schema.plain_serializer_function_ser_schema(
32
+ # serialize_jsonpath,
33
+ # return_schema=core_schema.str_schema(),
34
+ # )
35
+ # )
36
+ #
37
+ #
38
+ # class JSONPathAdapter(TypeAdapter):
39
+ #
40
+ # @classmethod
41
+ # def __get_pydantic_core_schema__(cls, source_type, handler):
42
+ # return core_schema.chain_schema([
43
+ # core_schema.str_schema(),
44
+ # core_schema.no_info_after_validator_function(
45
+ # make_jsonpath,
46
+ # serialization=core_schema.plain_serializer_function_ser_schema(
47
+ # serialize_jsonpath
48
+ # )
49
+ # )
50
+ # ])
@@ -0,0 +1,177 @@
1
+ from typing import (
2
+ Any,
3
+ Callable,
4
+ )
5
+
6
+ from pydantic_core import core_schema
7
+ from typing_extensions import Annotated
8
+
9
+ from pydantic import (
10
+ BaseModel,
11
+ GetJsonSchemaHandler,
12
+ ValidationError,
13
+ )
14
+ from pydantic.json_schema import JsonSchemaValue
15
+
16
+ from jsonpath import JSONPath, JSONPointer
17
+
18
+ from jsonpatch_trigger import make_jsonpath
19
+
20
+
21
+ class _JSONPointerPydanticAnnotation:
22
+ @classmethod
23
+ def __get_pydantic_core_schema__(
24
+ cls,
25
+ _source_type: Any,
26
+ _handler: Callable[[Any], core_schema.CoreSchema],
27
+ ) -> core_schema.CoreSchema:
28
+ """
29
+ We return a pydantic_core.CoreSchema that behaves in the following ways:
30
+
31
+ * ints will be parsed as `ThirdPartyType` instances with the int as the x attribute
32
+ * `ThirdPartyType` instances will be parsed as `ThirdPartyType` instances without any changes
33
+ * Nothing else will pass validation
34
+ * Serialization will always return just an int
35
+ """
36
+
37
+ # def validate_from_int(value: int) -> ThirdPartyType:
38
+ # result = ThirdPartyType()
39
+ # result.x = value
40
+ # return result
41
+
42
+ from_string_schema = core_schema.chain_schema(
43
+ [
44
+ core_schema.str_schema(),
45
+ core_schema.no_info_plain_validator_function(JSONPointer),
46
+ ]
47
+ )
48
+
49
+ return core_schema.json_or_python_schema(
50
+ json_schema=from_string_schema,
51
+ python_schema=core_schema.union_schema(
52
+ [
53
+ # check if it's an instance first before doing any further work
54
+ core_schema.is_instance_schema(JSONPointer),
55
+ from_string_schema,
56
+ ]
57
+ ),
58
+ serialization=core_schema.plain_serializer_function_ser_schema(
59
+ lambda instance: str(instance)
60
+ ),
61
+ )
62
+
63
+ @classmethod
64
+ def __get_pydantic_json_schema__(
65
+ cls, _core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
66
+ ) -> JsonSchemaValue:
67
+ # Use the same schema that would be used for `str`
68
+ return handler(core_schema.str_schema())
69
+
70
+
71
+ PydanticJSONPointer = Annotated[
72
+ JSONPointer, _JSONPointerPydanticAnnotation
73
+ ]
74
+
75
+
76
+ class _JSONPathPydanticAnnotation:
77
+ @classmethod
78
+ def __get_pydantic_core_schema__(
79
+ cls,
80
+ _source_type: Any,
81
+ _handler: Callable[[Any], core_schema.CoreSchema],
82
+ ) -> core_schema.CoreSchema:
83
+ """
84
+ We return a pydantic_core.CoreSchema that behaves in the following ways:
85
+
86
+ * ints will be parsed as `ThirdPartyType` instances with the int as the x attribute
87
+ * `ThirdPartyType` instances will be parsed as `ThirdPartyType` instances without any changes
88
+ * Nothing else will pass validation
89
+ * Serialization will always return just an int
90
+ """
91
+
92
+ # def validate_from_int(value: int) -> ThirdPartyType:
93
+ # result = ThirdPartyType()
94
+ # result.x = value
95
+ # return result
96
+
97
+ from_string_schema = core_schema.chain_schema(
98
+ [
99
+ core_schema.str_schema(),
100
+ core_schema.no_info_plain_validator_function(make_jsonpath),
101
+ ]
102
+ )
103
+
104
+ return core_schema.json_or_python_schema(
105
+ json_schema=from_string_schema,
106
+ python_schema=core_schema.union_schema(
107
+ [
108
+ # check if it's an instance first before doing any further work
109
+ core_schema.is_instance_schema(JSONPath),
110
+ from_string_schema,
111
+ ]
112
+ ),
113
+ serialization=core_schema.plain_serializer_function_ser_schema(
114
+ lambda instance: str(instance)
115
+ ),
116
+ )
117
+
118
+ @classmethod
119
+ def __get_pydantic_json_schema__(
120
+ cls, _core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
121
+ ) -> JsonSchemaValue:
122
+ # Use the same schema that would be used for `str`
123
+ return handler(core_schema.str_schema())
124
+
125
+
126
+ # We now create an `Annotated` wrapper that we'll use as the annotation for fields on `BaseModel`s, etc.
127
+ PydanticJSONPath = Annotated[
128
+ JSONPath, _JSONPathPydanticAnnotation
129
+ ]
130
+
131
+
132
+
133
+ #
134
+ # # Create a model class that uses this annotation as a field
135
+ # class Model(BaseModel):
136
+ # third_party_type: PydanticJSONPath
137
+ #
138
+ #
139
+ # # Demonstrate that this field is handled correctly, that ints are parsed into `ThirdPartyType`, and that
140
+ # # these instances are also "dumped" directly into ints as expected.
141
+ # m_int = Model(third_party_type=1)
142
+ # assert isinstance(m_int.third_party_type, ThirdPartyType)
143
+ # assert m_int.third_party_type.x == 1
144
+ # assert m_int.model_dump() == {'third_party_type': 1}
145
+ #
146
+ # # Do the same thing where an instance of ThirdPartyType is passed in
147
+ # instance = ThirdPartyType()
148
+ # assert instance.x == 0
149
+ # instance.x = 10
150
+ #
151
+ # m_instance = Model(third_party_type=instance)
152
+ # assert isinstance(m_instance.third_party_type, ThirdPartyType)
153
+ # assert m_instance.third_party_type.x == 10
154
+ # assert m_instance.model_dump() == {'third_party_type': 10}
155
+ #
156
+ # # Demonstrate that validation errors are raised as expected for invalid inputs
157
+ # try:
158
+ # Model(third_party_type='a')
159
+ # except ValidationError as e:
160
+ # print(e)
161
+ # """
162
+ # 2 validation errors for Model
163
+ # third_party_type.is-instance[ThirdPartyType]
164
+ # Input should be an instance of ThirdPartyType [type=is_instance_of, input_value='a', input_type=str]
165
+ # third_party_type.chain[int,function-plain[validate_from_int()]]
166
+ # Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='a', input_type=str]
167
+ # """
168
+ #
169
+ #
170
+ # assert Model.model_json_schema() == {
171
+ # 'properties': {
172
+ # 'third_party_type': {'title': 'Third Party Type', 'type': 'integer'}
173
+ # },
174
+ # 'required': ['third_party_type'],
175
+ # 'title': 'Model',
176
+ # 'type': 'object',
177
+ # }
@@ -0,0 +1,243 @@
1
+ import abc
2
+ import collections
3
+ import json
4
+ from typing import Any, Self, ClassVar
5
+
6
+ from jsonpath import JSONPath, JSONPointer
7
+ from jsonpath.segments import JSONPathSegment
8
+ from jsonpath.selectors import NameSelector, KeySelector, WildcardSelector, KeysSelector, SliceSelector, \
9
+ SingularQuerySelector, Filter, KeysFilter
10
+ from pydantic import BaseModel, ConfigDict, Field, field_validator, field_serializer, computed_field, model_validator, \
11
+ ValidationError
12
+
13
+ from jsonpatch_trigger import make_jsonpath
14
+ from jsonpatch_trigger.operations import Operation, Operation
15
+ from jsonpatch_trigger.tracking import ChangeTracker
16
+
17
+
18
+ def can_pointer_match_path(pointer: JSONPointer, path: JSONPath) -> bool:
19
+ pointer_index = len(pointer.parts) - 1
20
+ path_index = len(path.segments) - 1
21
+
22
+ while pointer_index >= 0 and path_index >= 0:
23
+ pointer_part: str | int = pointer.parts[pointer_index]
24
+ segment: JSONPathSegment = path.segments[path_index]
25
+
26
+ solvable = False
27
+ for selector in segment.selectors:
28
+ if isinstance(selector, NameSelector):
29
+ if selector.name == pointer_part:
30
+ solvable = True
31
+ elif isinstance(selector, KeySelector):
32
+ if selector.key == pointer_part:
33
+ solvable = True
34
+ elif isinstance(selector, WildcardSelector):
35
+ solvable = True
36
+ elif isinstance(selector, KeysSelector):
37
+ pass
38
+ elif isinstance(selector, SliceSelector):
39
+ pass
40
+ elif isinstance(selector, SingularQuerySelector):
41
+ query = selector.query
42
+ sub_pointer_starting_index = pointer_index
43
+ inner_solvable = False
44
+ while sub_pointer_starting_index >= 0:
45
+ sub_pointer = JSONPointer.from_parts(pointer.parts[sub_pointer_starting_index:pointer_index + 1])
46
+ if can_pointer_match_path(sub_pointer, query):
47
+ sub_pointer_starting_index -= 1
48
+ inner_solvable = True
49
+ else:
50
+ sub_pointer_starting_index += 1
51
+ break
52
+ if inner_solvable:
53
+ pointer_index -= pointer_index - sub_pointer_starting_index
54
+ solvable = True
55
+ elif isinstance(selector, Filter):
56
+ pass
57
+ elif isinstance(selector, KeysFilter):
58
+ pass
59
+ if not solvable:
60
+ return False
61
+ pointer_index -= 1
62
+ path_index -= 1
63
+
64
+ return pointer_index == path_index
65
+
66
+
67
+ class AutomatedOperationProducer(BaseModel, abc.ABC):
68
+ model_config = ConfigDict(
69
+ arbitrary_types_allowed=True
70
+ )
71
+
72
+ triggers: list[JSONPath] = Field(default_factory=list)
73
+
74
+ @computed_field
75
+ @property
76
+ def producer_type(self) -> str:
77
+ return self.__class__.__qualname__
78
+
79
+ _registry: ClassVar[dict[str, type[Self]]] = {}
80
+
81
+ def __init_subclass__(cls, **kwargs):
82
+ cls._registry[cls.__qualname__] = cls
83
+
84
+ @model_validator(mode='before')
85
+ @classmethod
86
+ def validate(
87
+ cls,
88
+ value: Any,
89
+ *,
90
+ strict: bool | None = None,
91
+ from_attributes: bool | None = None,
92
+ context: Any | None = None,
93
+ ):
94
+ if isinstance(value, AutomatedOperationProducer):
95
+ return value
96
+ elif isinstance(value, dict):
97
+ if cls is not AutomatedOperationProducer:
98
+ return value
99
+ producer_type = value.pop('producer_type')
100
+ return cls._registry[producer_type](**value)
101
+ raise ValidationError("Cannot deserialize automated producer")
102
+
103
+ @abc.abstractmethod
104
+ def run(
105
+ self,
106
+ document: Any,
107
+ modified_pointers: list[JSONPointer]
108
+ ) -> list[Operation]:
109
+ ...
110
+
111
+ @field_validator('triggers', mode='before')
112
+ @classmethod
113
+ def parse_triggers(cls, value_list):
114
+ return [
115
+ p
116
+ if isinstance(p, JSONPath) else
117
+ make_jsonpath(p)
118
+ for p in value_list
119
+ ]
120
+
121
+ @field_serializer('triggers', when_used='always')
122
+ def serialize_triggers(self, path_list: list[JSONPath]):
123
+ return [
124
+ str(p)
125
+ for p in path_list
126
+ ]
127
+
128
+
129
+ # class OperationExecutionDTO(BaseModel):
130
+ # model_config = ConfigDict(
131
+ # revalidate_instances='never'
132
+ # )
133
+ #
134
+ # operations: list[Operation]
135
+ # # listeners: dict[str, list[str]]
136
+ # producers: list[AutomatedOperationProducer]
137
+ #
138
+ # @model_validator(mode='after')
139
+ # @classmethod
140
+ # def validate(cls, value: Any):
141
+ # if isinstance(value, cls):
142
+ # return value
143
+ # return cls(**value)
144
+ #
145
+ # # @field_validator('operations', mode='before')
146
+ # # @classmethod
147
+ # # def _serialize_operations(cls, operation_list: list[Operation]):
148
+ # # return [
149
+ # # op.model_dump()
150
+ # # for op in operation_list
151
+ # # ]
152
+ #
153
+ # @field_serializer('operations')
154
+ # def serialize_operations(self, operations: list[Operation]) -> list[dict]:
155
+ # return [
156
+ # op.model_dump() for op in operations
157
+ # ]
158
+ #
159
+ # @field_serializer('producers')
160
+ # def serialize_producers(self, producers: list[AutomatedOperationProducer]) -> list[dict]:
161
+ # return [
162
+ # prod.model_dump() for prod in producers
163
+ # ]
164
+
165
+
166
+ class OperationExecutionContext:
167
+
168
+ def __init__(self):
169
+ self.listeners: dict[JSONPath, list[AutomatedOperationProducer]] = collections.defaultdict(list)
170
+ self.operations: collections.deque[Operation] = collections.deque()
171
+
172
+ def register(self, producer: AutomatedOperationProducer):
173
+ for trigger in producer.triggers:
174
+ self.listeners[trigger].append(producer)
175
+
176
+ def add_custom_operations(self, operations: list[Operation]):
177
+ self.operations.extend(operations)
178
+
179
+ def add_custom_operation(self, operation: Operation):
180
+ self.operations.append(operation)
181
+
182
+ def run(self, document: Any) -> Any:
183
+
184
+ while self.operations:
185
+ operation = self.operations.popleft()
186
+ change_tracker = ChangeTracker()
187
+ document = operation.apply_rfc(document, change_tracker)
188
+ for trigger in self.listeners.keys():
189
+ # TODO we might encounter scenarios where we would want to also trigger on removal
190
+ relevant_pointers = [p for p in change_tracker.additions if can_pointer_match_path(p, trigger)]
191
+ if relevant_pointers:
192
+ for producer in self.listeners[trigger]:
193
+ added_operations = producer.run(document, relevant_pointers)
194
+ self.operations.extendleft(reversed(added_operations))
195
+ return document
196
+
197
+ def serialize(self) -> dict:
198
+ flat_producer_lookup = {
199
+ producer.__class__.__name__: producer
200
+ for producers in self.listeners.values()
201
+ for producer in producers
202
+ }
203
+ return dict(
204
+ operations=[
205
+ op.model_dump()
206
+ for op in self.operations
207
+ ],
208
+ producers=[
209
+ producer.model_dump()
210
+ for producer in flat_producer_lookup.values()
211
+ ]
212
+ )
213
+ # return OperationExecutionDTO(
214
+ # operations=list(self.operations),
215
+ # producers=list(flat_producer_lookup.values())
216
+ # # listeners={
217
+ # # str(path): [
218
+ # # producer.__class__.__name__
219
+ # # for producer in producers
220
+ # # ]
221
+ # # for path, producers in self.listeners.items()
222
+ # # }
223
+ # )
224
+
225
+ @classmethod
226
+ def deserialize(cls, data: Any) -> Self:
227
+ if isinstance(data, str):
228
+ data = json.loads(data)
229
+
230
+ operations = [
231
+ Operation.validate(op)
232
+ for op in data['operations']
233
+ ]
234
+ producers = [
235
+ AutomatedOperationProducer.validate(producer)
236
+ for producer in data['producers']
237
+ ]
238
+
239
+ self = cls()
240
+ self.add_custom_operations(operations)
241
+ for producer in producers:
242
+ self.register(producer)
243
+ return self