jsonpatch-trigger 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- jsonpatch_trigger/__init__.py +8 -0
- jsonpatch_trigger/common.py +50 -0
- jsonpatch_trigger/compat.py +177 -0
- jsonpatch_trigger/execution.py +243 -0
- jsonpatch_trigger/operations.py +466 -0
- jsonpatch_trigger/parents.py +61 -0
- jsonpatch_trigger/preconditions.py +58 -0
- jsonpatch_trigger/tracking.py +131 -0
- jsonpatch_trigger-0.1.0.dist-info/METADATA +37 -0
- jsonpatch_trigger-0.1.0.dist-info/RECORD +12 -0
- jsonpatch_trigger-0.1.0.dist-info/WHEEL +5 -0
- jsonpatch_trigger-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
from .common import json_type, make_jsonpath, normalize_jsonpath
|
|
2
|
+
from .execution import OperationExecutionContext, AutomatedOperationProducer
|
|
3
|
+
from .operations import Operation, AddOperation, MoveOperation, CopyOperation, CompoundOperation, RemoveOperation
|
|
4
|
+
|
|
5
|
+
__ALL__ = ["json_type", "make_jsonpath", "normalize_jsonpath",
|
|
6
|
+
"OperationExecutionContext", "AutomatedOperationProducer",
|
|
7
|
+
"Operation", "AddOperation", "MoveOperation", "CopyOperation", "CompoundOperation", "RemoveOperation"]
|
|
8
|
+
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
from typing import Sequence, Any, Mapping
|
|
2
|
+
|
|
3
|
+
import jsonpath
|
|
4
|
+
from jsonpath import JSONPath
|
|
5
|
+
from pydantic import TypeAdapter
|
|
6
|
+
from pydantic_core import core_schema
|
|
7
|
+
|
|
8
|
+
json_type = str | int | float | bool | None | Sequence[Any] | Mapping[str, Any]
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def make_jsonpath(path_string: str) -> JSONPath:
|
|
12
|
+
return jsonpath.compile(str(jsonpath.compile(path_string)))
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def normalize_jsonpath(path: JSONPath) -> JSONPath:
|
|
16
|
+
return jsonpath.compile(str(path))
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def serialize_jsonpath(path: JSONPath) -> str:
|
|
20
|
+
return str(path)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
#
|
|
24
|
+
#
|
|
25
|
+
# class ThirdPartyPydanticAdapter:
|
|
26
|
+
# @staticmethod
|
|
27
|
+
# def __get_pydantic_core_schema__(source_type, handler: GetCoreSchemaHandler):
|
|
28
|
+
# return core_schema.no_info_wrap_validator_function(
|
|
29
|
+
# make_jsonpath,
|
|
30
|
+
# core_schema.str_schema(),
|
|
31
|
+
# serialization=core_schema.plain_serializer_function_ser_schema(
|
|
32
|
+
# serialize_jsonpath,
|
|
33
|
+
# return_schema=core_schema.str_schema(),
|
|
34
|
+
# )
|
|
35
|
+
# )
|
|
36
|
+
#
|
|
37
|
+
#
|
|
38
|
+
# class JSONPathAdapter(TypeAdapter):
|
|
39
|
+
#
|
|
40
|
+
# @classmethod
|
|
41
|
+
# def __get_pydantic_core_schema__(cls, source_type, handler):
|
|
42
|
+
# return core_schema.chain_schema([
|
|
43
|
+
# core_schema.str_schema(),
|
|
44
|
+
# core_schema.no_info_after_validator_function(
|
|
45
|
+
# make_jsonpath,
|
|
46
|
+
# serialization=core_schema.plain_serializer_function_ser_schema(
|
|
47
|
+
# serialize_jsonpath
|
|
48
|
+
# )
|
|
49
|
+
# )
|
|
50
|
+
# ])
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
from typing import (
|
|
2
|
+
Any,
|
|
3
|
+
Callable,
|
|
4
|
+
)
|
|
5
|
+
|
|
6
|
+
from pydantic_core import core_schema
|
|
7
|
+
from typing_extensions import Annotated
|
|
8
|
+
|
|
9
|
+
from pydantic import (
|
|
10
|
+
BaseModel,
|
|
11
|
+
GetJsonSchemaHandler,
|
|
12
|
+
ValidationError,
|
|
13
|
+
)
|
|
14
|
+
from pydantic.json_schema import JsonSchemaValue
|
|
15
|
+
|
|
16
|
+
from jsonpath import JSONPath, JSONPointer
|
|
17
|
+
|
|
18
|
+
from jsonpatch_trigger import make_jsonpath
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class _JSONPointerPydanticAnnotation:
|
|
22
|
+
@classmethod
|
|
23
|
+
def __get_pydantic_core_schema__(
|
|
24
|
+
cls,
|
|
25
|
+
_source_type: Any,
|
|
26
|
+
_handler: Callable[[Any], core_schema.CoreSchema],
|
|
27
|
+
) -> core_schema.CoreSchema:
|
|
28
|
+
"""
|
|
29
|
+
We return a pydantic_core.CoreSchema that behaves in the following ways:
|
|
30
|
+
|
|
31
|
+
* ints will be parsed as `ThirdPartyType` instances with the int as the x attribute
|
|
32
|
+
* `ThirdPartyType` instances will be parsed as `ThirdPartyType` instances without any changes
|
|
33
|
+
* Nothing else will pass validation
|
|
34
|
+
* Serialization will always return just an int
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
# def validate_from_int(value: int) -> ThirdPartyType:
|
|
38
|
+
# result = ThirdPartyType()
|
|
39
|
+
# result.x = value
|
|
40
|
+
# return result
|
|
41
|
+
|
|
42
|
+
from_string_schema = core_schema.chain_schema(
|
|
43
|
+
[
|
|
44
|
+
core_schema.str_schema(),
|
|
45
|
+
core_schema.no_info_plain_validator_function(JSONPointer),
|
|
46
|
+
]
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
return core_schema.json_or_python_schema(
|
|
50
|
+
json_schema=from_string_schema,
|
|
51
|
+
python_schema=core_schema.union_schema(
|
|
52
|
+
[
|
|
53
|
+
# check if it's an instance first before doing any further work
|
|
54
|
+
core_schema.is_instance_schema(JSONPointer),
|
|
55
|
+
from_string_schema,
|
|
56
|
+
]
|
|
57
|
+
),
|
|
58
|
+
serialization=core_schema.plain_serializer_function_ser_schema(
|
|
59
|
+
lambda instance: str(instance)
|
|
60
|
+
),
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
@classmethod
|
|
64
|
+
def __get_pydantic_json_schema__(
|
|
65
|
+
cls, _core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
|
|
66
|
+
) -> JsonSchemaValue:
|
|
67
|
+
# Use the same schema that would be used for `str`
|
|
68
|
+
return handler(core_schema.str_schema())
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
PydanticJSONPointer = Annotated[
|
|
72
|
+
JSONPointer, _JSONPointerPydanticAnnotation
|
|
73
|
+
]
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class _JSONPathPydanticAnnotation:
|
|
77
|
+
@classmethod
|
|
78
|
+
def __get_pydantic_core_schema__(
|
|
79
|
+
cls,
|
|
80
|
+
_source_type: Any,
|
|
81
|
+
_handler: Callable[[Any], core_schema.CoreSchema],
|
|
82
|
+
) -> core_schema.CoreSchema:
|
|
83
|
+
"""
|
|
84
|
+
We return a pydantic_core.CoreSchema that behaves in the following ways:
|
|
85
|
+
|
|
86
|
+
* ints will be parsed as `ThirdPartyType` instances with the int as the x attribute
|
|
87
|
+
* `ThirdPartyType` instances will be parsed as `ThirdPartyType` instances without any changes
|
|
88
|
+
* Nothing else will pass validation
|
|
89
|
+
* Serialization will always return just an int
|
|
90
|
+
"""
|
|
91
|
+
|
|
92
|
+
# def validate_from_int(value: int) -> ThirdPartyType:
|
|
93
|
+
# result = ThirdPartyType()
|
|
94
|
+
# result.x = value
|
|
95
|
+
# return result
|
|
96
|
+
|
|
97
|
+
from_string_schema = core_schema.chain_schema(
|
|
98
|
+
[
|
|
99
|
+
core_schema.str_schema(),
|
|
100
|
+
core_schema.no_info_plain_validator_function(make_jsonpath),
|
|
101
|
+
]
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
return core_schema.json_or_python_schema(
|
|
105
|
+
json_schema=from_string_schema,
|
|
106
|
+
python_schema=core_schema.union_schema(
|
|
107
|
+
[
|
|
108
|
+
# check if it's an instance first before doing any further work
|
|
109
|
+
core_schema.is_instance_schema(JSONPath),
|
|
110
|
+
from_string_schema,
|
|
111
|
+
]
|
|
112
|
+
),
|
|
113
|
+
serialization=core_schema.plain_serializer_function_ser_schema(
|
|
114
|
+
lambda instance: str(instance)
|
|
115
|
+
),
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
@classmethod
|
|
119
|
+
def __get_pydantic_json_schema__(
|
|
120
|
+
cls, _core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler
|
|
121
|
+
) -> JsonSchemaValue:
|
|
122
|
+
# Use the same schema that would be used for `str`
|
|
123
|
+
return handler(core_schema.str_schema())
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
# We now create an `Annotated` wrapper that we'll use as the annotation for fields on `BaseModel`s, etc.
|
|
127
|
+
PydanticJSONPath = Annotated[
|
|
128
|
+
JSONPath, _JSONPathPydanticAnnotation
|
|
129
|
+
]
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
#
|
|
134
|
+
# # Create a model class that uses this annotation as a field
|
|
135
|
+
# class Model(BaseModel):
|
|
136
|
+
# third_party_type: PydanticJSONPath
|
|
137
|
+
#
|
|
138
|
+
#
|
|
139
|
+
# # Demonstrate that this field is handled correctly, that ints are parsed into `ThirdPartyType`, and that
|
|
140
|
+
# # these instances are also "dumped" directly into ints as expected.
|
|
141
|
+
# m_int = Model(third_party_type=1)
|
|
142
|
+
# assert isinstance(m_int.third_party_type, ThirdPartyType)
|
|
143
|
+
# assert m_int.third_party_type.x == 1
|
|
144
|
+
# assert m_int.model_dump() == {'third_party_type': 1}
|
|
145
|
+
#
|
|
146
|
+
# # Do the same thing where an instance of ThirdPartyType is passed in
|
|
147
|
+
# instance = ThirdPartyType()
|
|
148
|
+
# assert instance.x == 0
|
|
149
|
+
# instance.x = 10
|
|
150
|
+
#
|
|
151
|
+
# m_instance = Model(third_party_type=instance)
|
|
152
|
+
# assert isinstance(m_instance.third_party_type, ThirdPartyType)
|
|
153
|
+
# assert m_instance.third_party_type.x == 10
|
|
154
|
+
# assert m_instance.model_dump() == {'third_party_type': 10}
|
|
155
|
+
#
|
|
156
|
+
# # Demonstrate that validation errors are raised as expected for invalid inputs
|
|
157
|
+
# try:
|
|
158
|
+
# Model(third_party_type='a')
|
|
159
|
+
# except ValidationError as e:
|
|
160
|
+
# print(e)
|
|
161
|
+
# """
|
|
162
|
+
# 2 validation errors for Model
|
|
163
|
+
# third_party_type.is-instance[ThirdPartyType]
|
|
164
|
+
# Input should be an instance of ThirdPartyType [type=is_instance_of, input_value='a', input_type=str]
|
|
165
|
+
# third_party_type.chain[int,function-plain[validate_from_int()]]
|
|
166
|
+
# Input should be a valid integer, unable to parse string as an integer [type=int_parsing, input_value='a', input_type=str]
|
|
167
|
+
# """
|
|
168
|
+
#
|
|
169
|
+
#
|
|
170
|
+
# assert Model.model_json_schema() == {
|
|
171
|
+
# 'properties': {
|
|
172
|
+
# 'third_party_type': {'title': 'Third Party Type', 'type': 'integer'}
|
|
173
|
+
# },
|
|
174
|
+
# 'required': ['third_party_type'],
|
|
175
|
+
# 'title': 'Model',
|
|
176
|
+
# 'type': 'object',
|
|
177
|
+
# }
|
|
@@ -0,0 +1,243 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import collections
|
|
3
|
+
import json
|
|
4
|
+
from typing import Any, Self, ClassVar
|
|
5
|
+
|
|
6
|
+
from jsonpath import JSONPath, JSONPointer
|
|
7
|
+
from jsonpath.segments import JSONPathSegment
|
|
8
|
+
from jsonpath.selectors import NameSelector, KeySelector, WildcardSelector, KeysSelector, SliceSelector, \
|
|
9
|
+
SingularQuerySelector, Filter, KeysFilter
|
|
10
|
+
from pydantic import BaseModel, ConfigDict, Field, field_validator, field_serializer, computed_field, model_validator, \
|
|
11
|
+
ValidationError
|
|
12
|
+
|
|
13
|
+
from jsonpatch_trigger import make_jsonpath
|
|
14
|
+
from jsonpatch_trigger.operations import Operation, Operation
|
|
15
|
+
from jsonpatch_trigger.tracking import ChangeTracker
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def can_pointer_match_path(pointer: JSONPointer, path: JSONPath) -> bool:
|
|
19
|
+
pointer_index = len(pointer.parts) - 1
|
|
20
|
+
path_index = len(path.segments) - 1
|
|
21
|
+
|
|
22
|
+
while pointer_index >= 0 and path_index >= 0:
|
|
23
|
+
pointer_part: str | int = pointer.parts[pointer_index]
|
|
24
|
+
segment: JSONPathSegment = path.segments[path_index]
|
|
25
|
+
|
|
26
|
+
solvable = False
|
|
27
|
+
for selector in segment.selectors:
|
|
28
|
+
if isinstance(selector, NameSelector):
|
|
29
|
+
if selector.name == pointer_part:
|
|
30
|
+
solvable = True
|
|
31
|
+
elif isinstance(selector, KeySelector):
|
|
32
|
+
if selector.key == pointer_part:
|
|
33
|
+
solvable = True
|
|
34
|
+
elif isinstance(selector, WildcardSelector):
|
|
35
|
+
solvable = True
|
|
36
|
+
elif isinstance(selector, KeysSelector):
|
|
37
|
+
pass
|
|
38
|
+
elif isinstance(selector, SliceSelector):
|
|
39
|
+
pass
|
|
40
|
+
elif isinstance(selector, SingularQuerySelector):
|
|
41
|
+
query = selector.query
|
|
42
|
+
sub_pointer_starting_index = pointer_index
|
|
43
|
+
inner_solvable = False
|
|
44
|
+
while sub_pointer_starting_index >= 0:
|
|
45
|
+
sub_pointer = JSONPointer.from_parts(pointer.parts[sub_pointer_starting_index:pointer_index + 1])
|
|
46
|
+
if can_pointer_match_path(sub_pointer, query):
|
|
47
|
+
sub_pointer_starting_index -= 1
|
|
48
|
+
inner_solvable = True
|
|
49
|
+
else:
|
|
50
|
+
sub_pointer_starting_index += 1
|
|
51
|
+
break
|
|
52
|
+
if inner_solvable:
|
|
53
|
+
pointer_index -= pointer_index - sub_pointer_starting_index
|
|
54
|
+
solvable = True
|
|
55
|
+
elif isinstance(selector, Filter):
|
|
56
|
+
pass
|
|
57
|
+
elif isinstance(selector, KeysFilter):
|
|
58
|
+
pass
|
|
59
|
+
if not solvable:
|
|
60
|
+
return False
|
|
61
|
+
pointer_index -= 1
|
|
62
|
+
path_index -= 1
|
|
63
|
+
|
|
64
|
+
return pointer_index == path_index
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class AutomatedOperationProducer(BaseModel, abc.ABC):
|
|
68
|
+
model_config = ConfigDict(
|
|
69
|
+
arbitrary_types_allowed=True
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
triggers: list[JSONPath] = Field(default_factory=list)
|
|
73
|
+
|
|
74
|
+
@computed_field
|
|
75
|
+
@property
|
|
76
|
+
def producer_type(self) -> str:
|
|
77
|
+
return self.__class__.__qualname__
|
|
78
|
+
|
|
79
|
+
_registry: ClassVar[dict[str, type[Self]]] = {}
|
|
80
|
+
|
|
81
|
+
def __init_subclass__(cls, **kwargs):
|
|
82
|
+
cls._registry[cls.__qualname__] = cls
|
|
83
|
+
|
|
84
|
+
@model_validator(mode='before')
|
|
85
|
+
@classmethod
|
|
86
|
+
def validate(
|
|
87
|
+
cls,
|
|
88
|
+
value: Any,
|
|
89
|
+
*,
|
|
90
|
+
strict: bool | None = None,
|
|
91
|
+
from_attributes: bool | None = None,
|
|
92
|
+
context: Any | None = None,
|
|
93
|
+
):
|
|
94
|
+
if isinstance(value, AutomatedOperationProducer):
|
|
95
|
+
return value
|
|
96
|
+
elif isinstance(value, dict):
|
|
97
|
+
if cls is not AutomatedOperationProducer:
|
|
98
|
+
return value
|
|
99
|
+
producer_type = value.pop('producer_type')
|
|
100
|
+
return cls._registry[producer_type](**value)
|
|
101
|
+
raise ValidationError("Cannot deserialize automated producer")
|
|
102
|
+
|
|
103
|
+
@abc.abstractmethod
|
|
104
|
+
def run(
|
|
105
|
+
self,
|
|
106
|
+
document: Any,
|
|
107
|
+
modified_pointers: list[JSONPointer]
|
|
108
|
+
) -> list[Operation]:
|
|
109
|
+
...
|
|
110
|
+
|
|
111
|
+
@field_validator('triggers', mode='before')
|
|
112
|
+
@classmethod
|
|
113
|
+
def parse_triggers(cls, value_list):
|
|
114
|
+
return [
|
|
115
|
+
p
|
|
116
|
+
if isinstance(p, JSONPath) else
|
|
117
|
+
make_jsonpath(p)
|
|
118
|
+
for p in value_list
|
|
119
|
+
]
|
|
120
|
+
|
|
121
|
+
@field_serializer('triggers', when_used='always')
|
|
122
|
+
def serialize_triggers(self, path_list: list[JSONPath]):
|
|
123
|
+
return [
|
|
124
|
+
str(p)
|
|
125
|
+
for p in path_list
|
|
126
|
+
]
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
# class OperationExecutionDTO(BaseModel):
|
|
130
|
+
# model_config = ConfigDict(
|
|
131
|
+
# revalidate_instances='never'
|
|
132
|
+
# )
|
|
133
|
+
#
|
|
134
|
+
# operations: list[Operation]
|
|
135
|
+
# # listeners: dict[str, list[str]]
|
|
136
|
+
# producers: list[AutomatedOperationProducer]
|
|
137
|
+
#
|
|
138
|
+
# @model_validator(mode='after')
|
|
139
|
+
# @classmethod
|
|
140
|
+
# def validate(cls, value: Any):
|
|
141
|
+
# if isinstance(value, cls):
|
|
142
|
+
# return value
|
|
143
|
+
# return cls(**value)
|
|
144
|
+
#
|
|
145
|
+
# # @field_validator('operations', mode='before')
|
|
146
|
+
# # @classmethod
|
|
147
|
+
# # def _serialize_operations(cls, operation_list: list[Operation]):
|
|
148
|
+
# # return [
|
|
149
|
+
# # op.model_dump()
|
|
150
|
+
# # for op in operation_list
|
|
151
|
+
# # ]
|
|
152
|
+
#
|
|
153
|
+
# @field_serializer('operations')
|
|
154
|
+
# def serialize_operations(self, operations: list[Operation]) -> list[dict]:
|
|
155
|
+
# return [
|
|
156
|
+
# op.model_dump() for op in operations
|
|
157
|
+
# ]
|
|
158
|
+
#
|
|
159
|
+
# @field_serializer('producers')
|
|
160
|
+
# def serialize_producers(self, producers: list[AutomatedOperationProducer]) -> list[dict]:
|
|
161
|
+
# return [
|
|
162
|
+
# prod.model_dump() for prod in producers
|
|
163
|
+
# ]
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
class OperationExecutionContext:
|
|
167
|
+
|
|
168
|
+
def __init__(self):
|
|
169
|
+
self.listeners: dict[JSONPath, list[AutomatedOperationProducer]] = collections.defaultdict(list)
|
|
170
|
+
self.operations: collections.deque[Operation] = collections.deque()
|
|
171
|
+
|
|
172
|
+
def register(self, producer: AutomatedOperationProducer):
|
|
173
|
+
for trigger in producer.triggers:
|
|
174
|
+
self.listeners[trigger].append(producer)
|
|
175
|
+
|
|
176
|
+
def add_custom_operations(self, operations: list[Operation]):
|
|
177
|
+
self.operations.extend(operations)
|
|
178
|
+
|
|
179
|
+
def add_custom_operation(self, operation: Operation):
|
|
180
|
+
self.operations.append(operation)
|
|
181
|
+
|
|
182
|
+
def run(self, document: Any) -> Any:
|
|
183
|
+
|
|
184
|
+
while self.operations:
|
|
185
|
+
operation = self.operations.popleft()
|
|
186
|
+
change_tracker = ChangeTracker()
|
|
187
|
+
document = operation.apply_rfc(document, change_tracker)
|
|
188
|
+
for trigger in self.listeners.keys():
|
|
189
|
+
# TODO we might encounter scenarios where we would want to also trigger on removal
|
|
190
|
+
relevant_pointers = [p for p in change_tracker.additions if can_pointer_match_path(p, trigger)]
|
|
191
|
+
if relevant_pointers:
|
|
192
|
+
for producer in self.listeners[trigger]:
|
|
193
|
+
added_operations = producer.run(document, relevant_pointers)
|
|
194
|
+
self.operations.extendleft(reversed(added_operations))
|
|
195
|
+
return document
|
|
196
|
+
|
|
197
|
+
def serialize(self) -> dict:
|
|
198
|
+
flat_producer_lookup = {
|
|
199
|
+
producer.__class__.__name__: producer
|
|
200
|
+
for producers in self.listeners.values()
|
|
201
|
+
for producer in producers
|
|
202
|
+
}
|
|
203
|
+
return dict(
|
|
204
|
+
operations=[
|
|
205
|
+
op.model_dump()
|
|
206
|
+
for op in self.operations
|
|
207
|
+
],
|
|
208
|
+
producers=[
|
|
209
|
+
producer.model_dump()
|
|
210
|
+
for producer in flat_producer_lookup.values()
|
|
211
|
+
]
|
|
212
|
+
)
|
|
213
|
+
# return OperationExecutionDTO(
|
|
214
|
+
# operations=list(self.operations),
|
|
215
|
+
# producers=list(flat_producer_lookup.values())
|
|
216
|
+
# # listeners={
|
|
217
|
+
# # str(path): [
|
|
218
|
+
# # producer.__class__.__name__
|
|
219
|
+
# # for producer in producers
|
|
220
|
+
# # ]
|
|
221
|
+
# # for path, producers in self.listeners.items()
|
|
222
|
+
# # }
|
|
223
|
+
# )
|
|
224
|
+
|
|
225
|
+
@classmethod
|
|
226
|
+
def deserialize(cls, data: Any) -> Self:
|
|
227
|
+
if isinstance(data, str):
|
|
228
|
+
data = json.loads(data)
|
|
229
|
+
|
|
230
|
+
operations = [
|
|
231
|
+
Operation.validate(op)
|
|
232
|
+
for op in data['operations']
|
|
233
|
+
]
|
|
234
|
+
producers = [
|
|
235
|
+
AutomatedOperationProducer.validate(producer)
|
|
236
|
+
for producer in data['producers']
|
|
237
|
+
]
|
|
238
|
+
|
|
239
|
+
self = cls()
|
|
240
|
+
self.add_custom_operations(operations)
|
|
241
|
+
for producer in producers:
|
|
242
|
+
self.register(producer)
|
|
243
|
+
return self
|
|
@@ -0,0 +1,466 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
from typing import Any, Annotated, Union, ClassVar, Self
|
|
3
|
+
|
|
4
|
+
from jsonpath import JSONPointer, JSONPath, JSONPathMatch, JSONPointerIndexError, JSONPointerKeyError
|
|
5
|
+
from jsonpath.selectors import NameSelector, JSONPathSelector, IndexSelector
|
|
6
|
+
from pydantic import PrivateAttr, Field, ConfigDict, BaseModel, Discriminator, Tag, computed_field, model_validator, \
|
|
7
|
+
ValidationError, model_serializer, field_serializer
|
|
8
|
+
|
|
9
|
+
from jsonpatch_trigger import json_type
|
|
10
|
+
from jsonpatch_trigger.compat import PydanticJSONPath
|
|
11
|
+
from jsonpatch_trigger.parents import make_parent_key_pairs
|
|
12
|
+
from jsonpatch_trigger.preconditions import Precondition, IsArrayOrObjectPreconditionFunction, ExistsPreconditionFunction, \
|
|
13
|
+
DoesNotExistPreconditionFunction
|
|
14
|
+
from jsonpatch_trigger.tracking import ChangeTracker, TrackingJSONPatch, RemovalRegistrationMixin, \
|
|
15
|
+
CopyRegistrationMixin, AddRegistrationMixin, MoveRegistrationMixin
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class OperationBehavior(abc.ABC):
|
|
19
|
+
...
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class Operation(BaseModel, abc.ABC):
|
|
23
|
+
model_config = ConfigDict(
|
|
24
|
+
arbitrary_types_allowed=True,
|
|
25
|
+
revalidate_instances='never'
|
|
26
|
+
)
|
|
27
|
+
_preconditions: list[Precondition] = PrivateAttr(default_factory=list)
|
|
28
|
+
user_preconditions: list[Precondition] = Field(default_factory=list)
|
|
29
|
+
|
|
30
|
+
locator: PydanticJSONPath
|
|
31
|
+
|
|
32
|
+
@computed_field
|
|
33
|
+
@property
|
|
34
|
+
def operation_type(self) -> str:
|
|
35
|
+
return self.__class__.__qualname__
|
|
36
|
+
|
|
37
|
+
_registry: ClassVar[dict[str, type[Self]]] = {}
|
|
38
|
+
|
|
39
|
+
def __init_subclass__(cls, **kwargs):
|
|
40
|
+
cls._registry[cls.__qualname__] = cls
|
|
41
|
+
|
|
42
|
+
@model_validator(mode='before')
|
|
43
|
+
@classmethod
|
|
44
|
+
def validate(
|
|
45
|
+
cls,
|
|
46
|
+
value: Any,
|
|
47
|
+
*,
|
|
48
|
+
strict: bool | None = None,
|
|
49
|
+
from_attributes: bool | None = None,
|
|
50
|
+
context: Any | None = None,
|
|
51
|
+
):
|
|
52
|
+
if isinstance(value, Operation):
|
|
53
|
+
return value
|
|
54
|
+
elif isinstance(value, dict):
|
|
55
|
+
if cls is not Operation:
|
|
56
|
+
return value
|
|
57
|
+
operation_type = value.pop('operation_type')
|
|
58
|
+
return cls._registry[operation_type](**value)
|
|
59
|
+
raise ValidationError("Cannot deserialize operation")
|
|
60
|
+
|
|
61
|
+
# @model_serializer(mode='plain')
|
|
62
|
+
# def serialize(self) -> dict:
|
|
63
|
+
# cls = self.__class__
|
|
64
|
+
# return cls.model_dump(self)
|
|
65
|
+
|
|
66
|
+
@property
|
|
67
|
+
def preconditions(self) -> list[Precondition]:
|
|
68
|
+
return self._preconditions + self.user_preconditions
|
|
69
|
+
|
|
70
|
+
# @property
|
|
71
|
+
# def parent_locators(self) -> list[JSONPath]:
|
|
72
|
+
# return make_parent_key_pairs(self.locator)
|
|
73
|
+
|
|
74
|
+
# @property
|
|
75
|
+
# def key(self) -> int | str | None:
|
|
76
|
+
# return self.locator.segments[-1].selectors[0].name
|
|
77
|
+
|
|
78
|
+
@staticmethod
|
|
79
|
+
def iterate_matches(
|
|
80
|
+
path: JSONPath,
|
|
81
|
+
document: Any,
|
|
82
|
+
none_allowed=False,
|
|
83
|
+
only_resolvable_pointers=False
|
|
84
|
+
) -> list[tuple[JSONPath, JSONPathMatch, JSONPathSelector, JSONPointer]]:
|
|
85
|
+
results = []
|
|
86
|
+
|
|
87
|
+
parent_key_pairs = make_parent_key_pairs(path)
|
|
88
|
+
for parent_path, selector in parent_key_pairs:
|
|
89
|
+
for parent_match in parent_path.finditer(document):
|
|
90
|
+
pointer = JSONPointer.from_match(parent_match)
|
|
91
|
+
if isinstance(selector, NameSelector):
|
|
92
|
+
pointer = pointer.join(selector.name)
|
|
93
|
+
elif isinstance(selector, IndexSelector):
|
|
94
|
+
if selector.index == -1:
|
|
95
|
+
pointer = pointer.join('-')
|
|
96
|
+
elif selector.index < 0:
|
|
97
|
+
raise RuntimeError('Cannot use negative indexes other than -1 for json pointers')
|
|
98
|
+
else:
|
|
99
|
+
pointer = pointer.join(str(selector.index))
|
|
100
|
+
elif none_allowed and selector is None:
|
|
101
|
+
pass
|
|
102
|
+
else:
|
|
103
|
+
raise NotImplementedError('Selector type not supported')
|
|
104
|
+
if only_resolvable_pointers:
|
|
105
|
+
try:
|
|
106
|
+
pointer.resolve(document)
|
|
107
|
+
except (JSONPointerIndexError, JSONPointerKeyError):
|
|
108
|
+
continue
|
|
109
|
+
results.append((parent_path, parent_match, selector, pointer))
|
|
110
|
+
return results
|
|
111
|
+
|
|
112
|
+
# def apply(self, onto: Any) -> list[JSONPointer]:
|
|
113
|
+
# if not self.test_preconditions(onto):
|
|
114
|
+
# return []
|
|
115
|
+
# return self._apply(onto)
|
|
116
|
+
|
|
117
|
+
def apply_rfc(self, document: Any, change_tracker: ChangeTracker) -> Any:
|
|
118
|
+
if not self.test_preconditions(document):
|
|
119
|
+
return document
|
|
120
|
+
patch_runner = TrackingJSONPatch(change_tracker, self, document)
|
|
121
|
+
self.register_rfc_operations(document, patch_runner)
|
|
122
|
+
document = patch_runner.run(document)
|
|
123
|
+
return document
|
|
124
|
+
|
|
125
|
+
# @abc.abstractmethod
|
|
126
|
+
# def _apply(self, onto: Any) -> list[JSONPointer]:
|
|
127
|
+
# ...
|
|
128
|
+
|
|
129
|
+
@abc.abstractmethod
|
|
130
|
+
def register_rfc_operations(self, document: Any, patch_runner: TrackingJSONPatch):
|
|
131
|
+
...
|
|
132
|
+
|
|
133
|
+
def test_preconditions(self, onto: Any) -> bool:
|
|
134
|
+
for condition in self.preconditions:
|
|
135
|
+
match = condition.query.match(onto)
|
|
136
|
+
if match is None:
|
|
137
|
+
if isinstance(condition.function, DoesNotExistPreconditionFunction):
|
|
138
|
+
return True
|
|
139
|
+
if isinstance(condition.function, ExistsPreconditionFunction):
|
|
140
|
+
return False
|
|
141
|
+
return False
|
|
142
|
+
|
|
143
|
+
if match.obj != condition.function:
|
|
144
|
+
return False
|
|
145
|
+
return True
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
# class AddObjectOperation(Operation):
|
|
149
|
+
#
|
|
150
|
+
# object_key: str
|
|
151
|
+
#
|
|
152
|
+
# def __init__(self, **data: Any):
|
|
153
|
+
# super().__init__(**data)
|
|
154
|
+
# self._preconditions = [
|
|
155
|
+
# Precondition(
|
|
156
|
+
# query=self.locator,
|
|
157
|
+
# function=IsObjectPreconditionFunction()
|
|
158
|
+
# ),
|
|
159
|
+
# Precondition(
|
|
160
|
+
# query=jsonpath.compile(f'{self.locator}.{self.object_key}'),
|
|
161
|
+
# function=IsNonePreconditionFunction()
|
|
162
|
+
# )
|
|
163
|
+
# ]
|
|
164
|
+
#
|
|
165
|
+
# def _apply(self, onto: Any) -> list[JSONPointer]:
|
|
166
|
+
# pointers = []
|
|
167
|
+
# for ref in self.locator.finditer(onto):
|
|
168
|
+
# ref.obj[self.object_key] = {}
|
|
169
|
+
# pointers.append(JSONPointer.from_match(ref).join(self.object_key))
|
|
170
|
+
# return pointers
|
|
171
|
+
#
|
|
172
|
+
#
|
|
173
|
+
# class InsertScalarOperation(Operation):
|
|
174
|
+
#
|
|
175
|
+
# # key: int | str
|
|
176
|
+
# value: int | float | bool | str | None
|
|
177
|
+
#
|
|
178
|
+
# def __init__(self, /, **data: Any):
|
|
179
|
+
# super().__init__(**data)
|
|
180
|
+
# self._preconditions = [
|
|
181
|
+
# Precondition(
|
|
182
|
+
# query=self.locator,
|
|
183
|
+
# function=IsArrayOrObjectPreconditionFunction()
|
|
184
|
+
# ),
|
|
185
|
+
# Precondition(
|
|
186
|
+
# query=jsonpath.compile(f'{self.locator}.{self.key}'),
|
|
187
|
+
# function=IsNonePreconditionFunction()
|
|
188
|
+
# ),
|
|
189
|
+
# ]
|
|
190
|
+
#
|
|
191
|
+
# def _apply(self, onto: Any) -> list[JSONPointer]:
|
|
192
|
+
# match = self.locator.match(onto)
|
|
193
|
+
# if isinstance(match.obj, MutableMapping):
|
|
194
|
+
# match.obj[self.key] = self.value
|
|
195
|
+
# else:
|
|
196
|
+
# match.obj.insert(self.key, self.value)
|
|
197
|
+
# return [JSONPointer.from_match(match).join(self.key)]
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
# RFC 6902 compliant operations
|
|
202
|
+
|
|
203
|
+
class AddOperation(Operation, AddRegistrationMixin):
|
|
204
|
+
|
|
205
|
+
value: json_type
|
|
206
|
+
|
|
207
|
+
# def __init__(self, /, **data: Any):
|
|
208
|
+
# super().__init__(**data)
|
|
209
|
+
# locator = self.locator
|
|
210
|
+
|
|
211
|
+
# parent_locator = make_parent(locator)
|
|
212
|
+
|
|
213
|
+
# if len(locator.segments) == 1 and locator.segments[0].selectors:
|
|
214
|
+
# _ = 42
|
|
215
|
+
|
|
216
|
+
# self._preconditions = [
|
|
217
|
+
# Precondition(
|
|
218
|
+
# query=self.locator,
|
|
219
|
+
#
|
|
220
|
+
# )
|
|
221
|
+
# ]
|
|
222
|
+
|
|
223
|
+
def register_rfc_operations(self, document: Any, patch_runner: TrackingJSONPatch):
|
|
224
|
+
for parent_path, parent_match, selector, pointer in (
|
|
225
|
+
self.iterate_matches(self.locator, document, none_allowed=True)
|
|
226
|
+
):
|
|
227
|
+
patch_runner.add(pointer, self.value)
|
|
228
|
+
|
|
229
|
+
# def _apply(self, onto: Any) -> list[JSONPointer]:
|
|
230
|
+
# match = self.locator.match(onto)
|
|
231
|
+
# parent_matches = [x.match(onto) for x in self.parent_locators]
|
|
232
|
+
#
|
|
233
|
+
# pointers = []
|
|
234
|
+
# for parent_match in parent_matches:
|
|
235
|
+
# if isinstance(parent_match.obj, MutableMapping):
|
|
236
|
+
# parent_match.obj[self.key] = self.value
|
|
237
|
+
# pointers.extend(get_all_subtree_pointers(onto, JSONPointer.from_match(match)))
|
|
238
|
+
# elif isinstance(parent_match.obj, MutableSequence):
|
|
239
|
+
# parent_match.obj.insert(self.key, self.value)
|
|
240
|
+
# pointers.extend(get_all_subtree_pointers(onto, JSONPointer.from_match(match)))
|
|
241
|
+
# return pointers
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
class PointerPairConstraintResolver(BaseModel):
|
|
246
|
+
def resolve(
|
|
247
|
+
self,
|
|
248
|
+
pointers_a: list[JSONPointer],
|
|
249
|
+
pointers_b: list[JSONPointer]
|
|
250
|
+
) -> list[tuple[JSONPointer, JSONPointer]]:
|
|
251
|
+
if len(pointers_a) != 1 or len(pointers_b) != 1:
|
|
252
|
+
raise RuntimeError('Only a single pair is allowed')
|
|
253
|
+
return [(pointers_a[0], pointers_b[0])]
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
class OneToManyPointerPairConstraintResolver(PointerPairConstraintResolver):
|
|
257
|
+
def resolve(
|
|
258
|
+
self,
|
|
259
|
+
pointers_a: list[JSONPointer],
|
|
260
|
+
pointers_b: list[JSONPointer]
|
|
261
|
+
) -> list[tuple[JSONPointer, JSONPointer]]:
|
|
262
|
+
if len(pointers_a) != 1 or len(pointers_b) == 0:
|
|
263
|
+
raise RuntimeError('Invalid one to many pointer pairs')
|
|
264
|
+
return [
|
|
265
|
+
(pointers_a[0], pointer)
|
|
266
|
+
for pointer in pointers_b
|
|
267
|
+
]
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
class PairwisePointerPairConstraintResolver(PointerPairConstraintResolver):
|
|
271
|
+
def resolve(
|
|
272
|
+
self,
|
|
273
|
+
pointers_a: list[JSONPointer],
|
|
274
|
+
pointers_b: list[JSONPointer]
|
|
275
|
+
) -> list[tuple[JSONPointer, JSONPointer]]:
|
|
276
|
+
if len(pointers_a) != len(pointers_b):
|
|
277
|
+
raise RuntimeError('Can\'t handle number of pointers for move')
|
|
278
|
+
return [
|
|
279
|
+
(p_a, p_b)
|
|
280
|
+
for p_a, p_b in zip(
|
|
281
|
+
sorted(pointers_a, key=lambda p: str(p)),
|
|
282
|
+
sorted(pointers_b, key=lambda p: str(p))
|
|
283
|
+
)
|
|
284
|
+
]
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
class MoveOperation(Operation, MoveRegistrationMixin):
|
|
288
|
+
|
|
289
|
+
target_locator: PydanticJSONPath
|
|
290
|
+
constraint_strategy: PointerPairConstraintResolver = Field(default_factory=PointerPairConstraintResolver)
|
|
291
|
+
|
|
292
|
+
def register_rfc_operations(self, document: Any, patch_runner: TrackingJSONPatch):
|
|
293
|
+
source_data_tuple = self.iterate_matches(self.locator, document, none_allowed=True)
|
|
294
|
+
target_data_tuple = self.iterate_matches(self.target_locator, document, none_allowed=True)
|
|
295
|
+
|
|
296
|
+
source_pointers = [t[3] for t in source_data_tuple]
|
|
297
|
+
target_pointers = [t[3] for t in target_data_tuple]
|
|
298
|
+
|
|
299
|
+
for source_pointer, target_pointer in self.constraint_strategy.resolve(source_pointers, target_pointers):
|
|
300
|
+
try:
|
|
301
|
+
source_pointer.resolve(document)
|
|
302
|
+
except (JSONPointerIndexError, JSONPointerKeyError):
|
|
303
|
+
continue # this can happen, and we don't consider it an error
|
|
304
|
+
patch_runner.move(source_pointer, target_pointer)
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
# TODO check what happens if the target pointer is below the source pointer? will it just insert a copy of the original object or is this referencing each other
|
|
308
|
+
# parent_key_pairs = make_parent_key_pairs(self.locator)
|
|
309
|
+
#
|
|
310
|
+
# assert len(parent_key_pairs) == 1 # we should only have one source-key pair otherwise the behavior is not clear
|
|
311
|
+
# source_matches = list(self.locator.finditer(document))
|
|
312
|
+
#
|
|
313
|
+
# # however for actual matches it is fine if we can find clear pairs of source and target
|
|
314
|
+
# # e.g. to move several attributes with the same name in multiple classes
|
|
315
|
+
# # but this is a generic move operation that has no knowledge on what would be okay
|
|
316
|
+
# # i guess I need another workaround here...
|
|
317
|
+
#
|
|
318
|
+
# source_pointers = sorted([JSONPointer.from_match(match) for match in source_matches])
|
|
319
|
+
#
|
|
320
|
+
# target_matches = self.iterate_matches(self.target_locator, document, none_allowed=False)
|
|
321
|
+
# target_pointers = sorted([JSONPointer.from_match(match) for match in target_matches])
|
|
322
|
+
#
|
|
323
|
+
#
|
|
324
|
+
# for source_pointer, target_pointer in zip(source_pointers, target_pointers):
|
|
325
|
+
#
|
|
326
|
+
# # assert len(source_matches) == 1
|
|
327
|
+
# source_pointer = JSONPointer.from_match(source_matches[0])
|
|
328
|
+
#
|
|
329
|
+
#
|
|
330
|
+
# assert len(matches) == 1 # we should only have one target otherwise the behavior is not clear
|
|
331
|
+
#
|
|
332
|
+
# parent_path, parent_match, selector, pointer = matches[0]
|
|
333
|
+
# patch_runner.move(source_pointer, pointer)
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
class CopyOperation(Operation, CopyRegistrationMixin):
|
|
338
|
+
|
|
339
|
+
target_locator: PydanticJSONPath
|
|
340
|
+
constraint_strategy: PointerPairConstraintResolver = Field(default_factory=OneToManyPointerPairConstraintResolver)
|
|
341
|
+
# target_key: str | int
|
|
342
|
+
|
|
343
|
+
def __init__(self, **data: Any):
|
|
344
|
+
super().__init__(**data)
|
|
345
|
+
self._preconditions = [
|
|
346
|
+
Precondition(
|
|
347
|
+
query=self.locator,
|
|
348
|
+
function=IsArrayOrObjectPreconditionFunction()
|
|
349
|
+
)
|
|
350
|
+
]
|
|
351
|
+
|
|
352
|
+
def register_rfc_operations(self, document: Any, patch_runner: TrackingJSONPatch):
|
|
353
|
+
source_data_tuple = self.iterate_matches(self.locator, document, none_allowed=True,
|
|
354
|
+
only_resolvable_pointers=True)
|
|
355
|
+
target_data_tuple = self.iterate_matches(self.target_locator, document, none_allowed=True)
|
|
356
|
+
|
|
357
|
+
source_pointers = [t[3] for t in source_data_tuple]
|
|
358
|
+
target_pointers = [t[3] for t in target_data_tuple]
|
|
359
|
+
|
|
360
|
+
for source_pointer, target_pointer in self.constraint_strategy.resolve(source_pointers, target_pointers):
|
|
361
|
+
patch_runner.copy(source_pointer, target_pointer)
|
|
362
|
+
|
|
363
|
+
# parent_key_pairs = make_parent_key_pairs(self.locator)
|
|
364
|
+
# assert len(parent_key_pairs) == 1 # we should only have one source otherwise the behavior is not clear
|
|
365
|
+
# source_matches = list(self.locator.finditer(document))
|
|
366
|
+
# assert len(source_matches) == 1
|
|
367
|
+
# source_pointer = JSONPointer.from_match(source_matches[0])
|
|
368
|
+
#
|
|
369
|
+
# for parent_path, parent_match, selector, pointer in (
|
|
370
|
+
# self.iterate_matches(self.target_locator, document, none_allowed=False)
|
|
371
|
+
# ):
|
|
372
|
+
# patch_runner.copy(source_pointer, pointer)
|
|
373
|
+
|
|
374
|
+
# def _apply(self, onto: Any) -> list[JSONPointer]:
|
|
375
|
+
# match = self.locator.match(onto)
|
|
376
|
+
# target_parent = self.target_locator.match(onto)
|
|
377
|
+
# if isinstance(target_parent.obj, MutableMapping):
|
|
378
|
+
# target_parent.obj[self.target_key] = match.obj
|
|
379
|
+
# return get_all_subtree_pointers(
|
|
380
|
+
# onto,
|
|
381
|
+
# JSONPointer.from_match(target_parent)
|
|
382
|
+
# .join(self.target_key)
|
|
383
|
+
# )
|
|
384
|
+
# elif isinstance(target_parent.obj, MutableSequence):
|
|
385
|
+
# target_parent.obj.insert(self.target_key, match.obj)
|
|
386
|
+
# return get_all_subtree_pointers(
|
|
387
|
+
# onto,
|
|
388
|
+
# JSONPointer.from_match(target_parent)
|
|
389
|
+
# .join(self.target_key)
|
|
390
|
+
# )
|
|
391
|
+
# raise RuntimeError('Invalid state')
|
|
392
|
+
|
|
393
|
+
|
|
394
|
+
class RemoveOperation(Operation, RemovalRegistrationMixin):
|
|
395
|
+
|
|
396
|
+
# key: str | int
|
|
397
|
+
#
|
|
398
|
+
# def __init__(self, **data: Any):
|
|
399
|
+
# super().__init__(**data)
|
|
400
|
+
# # if self.key is not None:
|
|
401
|
+
# self._preconditions = [
|
|
402
|
+
# Precondition(
|
|
403
|
+
# query=self.locator,
|
|
404
|
+
# function=IsArrayOrObjectPreconditionFunction()
|
|
405
|
+
# )
|
|
406
|
+
# ]
|
|
407
|
+
# else:
|
|
408
|
+
# self._preconditions = [
|
|
409
|
+
# Precondition(
|
|
410
|
+
# query=self.locator,
|
|
411
|
+
# function=IsNotNonePreconditionFunction()
|
|
412
|
+
# )
|
|
413
|
+
# ]
|
|
414
|
+
def register_rfc_operations(self, document: Any, patch_runner: TrackingJSONPatch):
|
|
415
|
+
for parent_path, parent_match, selector, pointer in (
|
|
416
|
+
self.iterate_matches(self.locator, document, none_allowed=False)
|
|
417
|
+
):
|
|
418
|
+
patch_runner.remove(pointer)
|
|
419
|
+
|
|
420
|
+
# def _apply(self, onto: Any) -> list[JSONPointer]:
|
|
421
|
+
# match = self.locator.match(onto)
|
|
422
|
+
# # if isinstance(match.obj, (MutableMapping, MutableSequence)):
|
|
423
|
+
# pointers = get_all_subtree_pointers(
|
|
424
|
+
# onto,
|
|
425
|
+
# JSONPointer.from_match(match).join(self.key)
|
|
426
|
+
# )
|
|
427
|
+
# del match.obj[self.key]
|
|
428
|
+
# return pointers
|
|
429
|
+
|
|
430
|
+
# End RFC 6902 compliant operations
|
|
431
|
+
|
|
432
|
+
class CompoundOperation(Operation):
|
|
433
|
+
|
|
434
|
+
inner_operations: list[Operation]
|
|
435
|
+
|
|
436
|
+
def register_rfc_operations(self, document: Any, patch_runner: TrackingJSONPatch):
|
|
437
|
+
for operation in self.inner_operations:
|
|
438
|
+
operation.register_rfc_operations(document, patch_runner)
|
|
439
|
+
|
|
440
|
+
def apply_rfc(self, document: Any, change_tracker: ChangeTracker) -> Any:
|
|
441
|
+
if not self.test_preconditions(document):
|
|
442
|
+
return document
|
|
443
|
+
for operation in self.inner_operations:
|
|
444
|
+
document = operation.apply_rfc(document, change_tracker)
|
|
445
|
+
return document
|
|
446
|
+
|
|
447
|
+
@field_serializer('inner_operations')
|
|
448
|
+
def _serialize_inner_operations(self, ops: list[Operation]) -> list[dict]:
|
|
449
|
+
return [
|
|
450
|
+
op.model_dump() for op in ops
|
|
451
|
+
]
|
|
452
|
+
|
|
453
|
+
|
|
454
|
+
def _get_type_designator(obj: Operation) -> str:
|
|
455
|
+
return obj.__class__.__qualname__
|
|
456
|
+
|
|
457
|
+
# Operation = Annotated[
|
|
458
|
+
# Union[
|
|
459
|
+
# Annotated[AddOperation, Tag('AddOperation')],
|
|
460
|
+
# Annotated[RemoveOperation, Tag('RemoveOperation')],
|
|
461
|
+
# Annotated[CompoundOperation, Tag('CompoundOperation')],
|
|
462
|
+
# Annotated[MoveOperation, Tag('MoveOperation')],
|
|
463
|
+
# Annotated[CopyOperation, Tag('CopyOperation')],
|
|
464
|
+
# ],
|
|
465
|
+
# Discriminator(_get_type_designator)
|
|
466
|
+
# ]
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from jsonpath import JSONPath
|
|
2
|
+
from jsonpath.segments import JSONPathRecursiveDescentSegment
|
|
3
|
+
from jsonpath.selectors import JSONPathSelector
|
|
4
|
+
|
|
5
|
+
from jsonpatch_trigger.common import make_jsonpath, normalize_jsonpath
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def _make_raw_parent(path: JSONPath) -> JSONPath:
|
|
9
|
+
"""
|
|
10
|
+
This could potentially return an invalid JSONPath that returns on ".." and it loops back on the root if needed
|
|
11
|
+
|
|
12
|
+
:param JSONPath path: the JSONPath to create a raw parent from
|
|
13
|
+
:return: new JSONPath that was stripped by its last segment
|
|
14
|
+
"""
|
|
15
|
+
return normalize_jsonpath(JSONPath(
|
|
16
|
+
env=path.env,
|
|
17
|
+
segments=path.segments[:-1],
|
|
18
|
+
pseudo_root=path.pseudo_root
|
|
19
|
+
))
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def make_parent_key_pairs(path: JSONPath) -> list[tuple[JSONPath, JSONPathSelector | None]]:
|
|
23
|
+
if len(path.segments) == 0: # this is a root path
|
|
24
|
+
return [(path, None)]
|
|
25
|
+
|
|
26
|
+
last_segment = path.segments[-1]
|
|
27
|
+
selectors = last_segment.selectors
|
|
28
|
+
|
|
29
|
+
if isinstance(last_segment, JSONPathRecursiveDescentSegment):
|
|
30
|
+
base_parent = _make_raw_parent(path)
|
|
31
|
+
nested_parent = make_jsonpath(f'{base_parent}..*')
|
|
32
|
+
return [
|
|
33
|
+
(parent, selector)
|
|
34
|
+
for parent in [base_parent, nested_parent]
|
|
35
|
+
for selector in selectors
|
|
36
|
+
]
|
|
37
|
+
|
|
38
|
+
return [
|
|
39
|
+
(_make_raw_parent(path), selector)
|
|
40
|
+
for selector in selectors
|
|
41
|
+
]
|
|
42
|
+
|
|
43
|
+
allows_parent = True
|
|
44
|
+
for selector in segment.selectors:
|
|
45
|
+
if isinstance(selector, NameSelector):
|
|
46
|
+
pass
|
|
47
|
+
elif isinstance(selector, KeySelector):
|
|
48
|
+
if selector.key == pointer_part:
|
|
49
|
+
solvable = True
|
|
50
|
+
elif isinstance(selector, WildcardSelector):
|
|
51
|
+
solvable = True
|
|
52
|
+
elif isinstance(selector, KeysSelector):
|
|
53
|
+
pass
|
|
54
|
+
elif isinstance(selector, SliceSelector):
|
|
55
|
+
pass
|
|
56
|
+
elif isinstance(selector, SingularQuerySelector):
|
|
57
|
+
pass
|
|
58
|
+
elif isinstance(selector, Filter):
|
|
59
|
+
pass
|
|
60
|
+
elif isinstance(selector, KeysFilter):
|
|
61
|
+
pass
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
from typing import MutableSequence, MutableMapping, Any
|
|
2
|
+
|
|
3
|
+
from jsonpath import JSONPath
|
|
4
|
+
from pydantic import BaseModel
|
|
5
|
+
|
|
6
|
+
from jsonpatch_trigger.compat import PydanticJSONPath
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class PreconditionFunction(BaseModel):
|
|
10
|
+
def __neq__(self, other) -> bool:
|
|
11
|
+
return not self.__eq__(other)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class IsObjectPreconditionFunction(PreconditionFunction):
|
|
15
|
+
def __eq__(self, other):
|
|
16
|
+
return isinstance(other, MutableMapping)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class IsArrayPreconditionFunction(PreconditionFunction):
|
|
20
|
+
def __eq__(self, other):
|
|
21
|
+
return isinstance(other, MutableSequence)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class IsArrayOrObjectPreconditionFunction(PreconditionFunction):
|
|
25
|
+
def __eq__(self, other):
|
|
26
|
+
return isinstance(other, (MutableMapping, MutableSequence))
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class ValuePreconditionFunction(PreconditionFunction):
|
|
30
|
+
value: Any
|
|
31
|
+
def __eq__(self, other):
|
|
32
|
+
return other == self.value
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class IsNonePreconditionFunction(PreconditionFunction):
|
|
36
|
+
def __eq__(self, other):
|
|
37
|
+
return other is None
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class IsNotNonePreconditionFunction(PreconditionFunction):
|
|
41
|
+
def __eq__(self, other):
|
|
42
|
+
return other is not None
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class ExistsPreconditionFunction(PreconditionFunction):
|
|
46
|
+
...
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class DoesNotExistPreconditionFunction(PreconditionFunction):
|
|
50
|
+
...
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class Precondition(BaseModel):
|
|
54
|
+
query: PydanticJSONPath
|
|
55
|
+
function: PreconditionFunction
|
|
56
|
+
# def __init__(self, query: JSONPath, function: PreconditionFunction):
|
|
57
|
+
# self.query = query
|
|
58
|
+
# self.function = function
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
import collections
|
|
2
|
+
import copy
|
|
3
|
+
import functools
|
|
4
|
+
from typing import Any, MutableSequence, MutableMapping
|
|
5
|
+
|
|
6
|
+
import jsonpath
|
|
7
|
+
from jsonpath import JSONPointer, JSONPatch, JSONPointerIndexError, JSONPointerKeyError
|
|
8
|
+
from pydantic import BaseModel
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def get_all_subtree_pointers(
|
|
12
|
+
document: Any,
|
|
13
|
+
base_pointer: JSONPointer
|
|
14
|
+
) -> set[JSONPointer]:
|
|
15
|
+
try:
|
|
16
|
+
obj = base_pointer.resolve(document)
|
|
17
|
+
except (JSONPointerIndexError, JSONPointerKeyError):
|
|
18
|
+
return set()
|
|
19
|
+
if isinstance(obj, MutableMapping):
|
|
20
|
+
sub_pointers = {
|
|
21
|
+
copy.deepcopy(base_pointer).join(key)
|
|
22
|
+
for key in obj.keys()
|
|
23
|
+
}
|
|
24
|
+
return {base_pointer} | sub_pointers | {
|
|
25
|
+
child
|
|
26
|
+
for pointer in sub_pointers
|
|
27
|
+
for child in get_all_subtree_pointers(document, pointer)
|
|
28
|
+
}
|
|
29
|
+
elif isinstance(obj, MutableSequence):
|
|
30
|
+
sub_pointers = {
|
|
31
|
+
copy.deepcopy(base_pointer).join(str(idx))
|
|
32
|
+
for idx in range(len(obj))
|
|
33
|
+
}
|
|
34
|
+
return {base_pointer} | sub_pointers | {
|
|
35
|
+
child
|
|
36
|
+
for pointer in sub_pointers
|
|
37
|
+
for child in get_all_subtree_pointers(document, pointer)
|
|
38
|
+
}
|
|
39
|
+
return {base_pointer}
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class ChangeTracker:
|
|
43
|
+
def __init__(self):
|
|
44
|
+
self.additions: set[JSONPointer] = set()
|
|
45
|
+
self.removals: set[JSONPointer] = set()
|
|
46
|
+
|
|
47
|
+
def add_pointers(self, pointers: set[JSONPointer], removal=False):
|
|
48
|
+
if removal:
|
|
49
|
+
self.removals |= pointers
|
|
50
|
+
self.additions -= pointers # Removes all additions if they are removed again before other actions can trigger
|
|
51
|
+
else:
|
|
52
|
+
self.additions |= pointers
|
|
53
|
+
# TODO for now, we do not do the inverse here: so if something becomes added after removal we still keep it in the removal list
|
|
54
|
+
# this should not really matter because we don't trigger anything on removals yet
|
|
55
|
+
|
|
56
|
+
def add_pointer_pairs(self, pointer_pairs: list[tuple[JSONPointer, bool]]):
|
|
57
|
+
pointer_sorter = collections.defaultdict(set)
|
|
58
|
+
for pointer, removal in pointer_pairs:
|
|
59
|
+
pointer_sorter[removal].add(pointer)
|
|
60
|
+
self.add_pointers(pointer_sorter[True])
|
|
61
|
+
self.add_pointers(pointer_sorter[False])
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class ChangeRegistrationMixin(BaseModel):
|
|
65
|
+
@staticmethod
|
|
66
|
+
def _get_pointer(rfc_operation: jsonpath.patch.Op, slot_names: list[str]) -> JSONPointer:
|
|
67
|
+
for slot_name in slot_names:
|
|
68
|
+
pointer = getattr(rfc_operation, slot_name, None)
|
|
69
|
+
if pointer is not None:
|
|
70
|
+
return pointer
|
|
71
|
+
raise RuntimeError('No suitable pointer slot found')
|
|
72
|
+
|
|
73
|
+
def pre_execution_registration(self, rfc_operation: jsonpath.patch.Op, change_tracker: ChangeTracker, document: Any):
|
|
74
|
+
pass
|
|
75
|
+
|
|
76
|
+
def post_execution_registration(self, rfc_operation: jsonpath.patch.Op, change_tracker: ChangeTracker, document: Any):
|
|
77
|
+
pass
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class RemovalRegistrationMixin(ChangeRegistrationMixin):
|
|
81
|
+
def pre_execution_registration(self, rfc_operation: jsonpath.patch.Op, change_tracker: ChangeTracker, document: Any):
|
|
82
|
+
pointer = self._get_pointer(rfc_operation, ['path'])
|
|
83
|
+
change_tracker.add_pointers(get_all_subtree_pointers(document, pointer), removal=True)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class CopyRegistrationMixin(ChangeRegistrationMixin):
|
|
87
|
+
def post_execution_registration(self, rfc_operation: jsonpath.patch.Op, change_tracker: ChangeTracker, document: Any):
|
|
88
|
+
pointer = self._get_pointer(rfc_operation, ['dest', 'path'])
|
|
89
|
+
change_tracker.add_pointers(get_all_subtree_pointers(document, pointer), removal=False)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class MoveRegistrationMixin(CopyRegistrationMixin):
|
|
93
|
+
def pre_execution_registration(self, rfc_operation: jsonpath.patch.Op, change_tracker: ChangeTracker,
|
|
94
|
+
document: Any):
|
|
95
|
+
pointer = self._get_pointer(rfc_operation, ['source', 'path'])
|
|
96
|
+
change_tracker.add_pointers(get_all_subtree_pointers(document, pointer), removal=True)
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
# TODO we likely need some further checking if an operation actually overwrites something else or not
|
|
100
|
+
ReplaceRegistrationMixin = MoveRegistrationMixin
|
|
101
|
+
AddRegistrationMixin = MoveRegistrationMixin
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
class TrackingJSONPatch:
|
|
105
|
+
|
|
106
|
+
def __init__(self, change_tracker: ChangeTracker, base_operation: ChangeRegistrationMixin, document: Any):
|
|
107
|
+
self._patch = JSONPatch()
|
|
108
|
+
self._tracker = change_tracker
|
|
109
|
+
self._base = base_operation
|
|
110
|
+
self._document = document
|
|
111
|
+
|
|
112
|
+
def __getattr__(self, attr):
|
|
113
|
+
patch_attr = getattr(self._patch, attr)
|
|
114
|
+
|
|
115
|
+
if not attr.startswith('_') and attr != 'apply':
|
|
116
|
+
# this is a respective method to add an operation
|
|
117
|
+
|
|
118
|
+
@functools.wraps(patch_attr)
|
|
119
|
+
def wrapper(*args, **kwargs):
|
|
120
|
+
result = patch_attr(*args, **kwargs)
|
|
121
|
+
self._base.pre_execution_registration(result.ops[-1], self._tracker, self._document)
|
|
122
|
+
return result
|
|
123
|
+
|
|
124
|
+
return wrapper
|
|
125
|
+
return patch_attr
|
|
126
|
+
|
|
127
|
+
def run(self, document: Any) -> Any:
|
|
128
|
+
for operation in self._patch.ops:
|
|
129
|
+
document = operation.apply(document)
|
|
130
|
+
self._base.post_execution_registration(operation, self._tracker, document)
|
|
131
|
+
return document
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: jsonpatch-trigger
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Extension for JSONPatch (RFC6902) that introduces operation preconditions, change tracking and automated listeners that can introduce new operations based on the previous change
|
|
5
|
+
Requires-Python: >=3.11
|
|
6
|
+
Description-Content-Type: text/markdown
|
|
7
|
+
Requires-Dist: python-jsonpath==2.0.2
|
|
8
|
+
Requires-Dist: pydantic==2.12.5
|
|
9
|
+
Provides-Extra: test
|
|
10
|
+
Requires-Dist: coverage==7.13.4; extra == "test"
|
|
11
|
+
Requires-Dist: pytest==9.0.2; extra == "test"
|
|
12
|
+
|
|
13
|
+
# JSONPatch-Trigger
|
|
14
|
+
|
|
15
|
+
This package extends the JSON Patch (RFC 6902) functionality with the following features:
|
|
16
|
+
- Preconditions for operations that prevent their execution
|
|
17
|
+
- Usage of JSONPaths over JSONPointers to allow operation targets with wildcards and conditions
|
|
18
|
+
- Change tracking of each operation (a list of additionas and deletions as JSONPointers)
|
|
19
|
+
- Listeners that can react to the tracked changes to dynamically perform customizable actions when something in the JSON document has changed
|
|
20
|
+
|
|
21
|
+
The JSONPath and JSONPatch implementations used as a basis are from https://pypi.org/project/python-jsonpath/
|
|
22
|
+
|
|
23
|
+
## Use Case
|
|
24
|
+
The functionalities in this package have been developed to serve the following use case:
|
|
25
|
+
|
|
26
|
+
A process P produces JSON objects.
|
|
27
|
+
Every time P executes the results needs to be adjusted with changes the user can configure.
|
|
28
|
+
So the set of operations is persisted and applied for every process run.
|
|
29
|
+
There are different processes and each requires a different set of user operations.
|
|
30
|
+
Additionally, the produced JSON objects can have patterns that can be changed automatically instead of with a manual user action.
|
|
31
|
+
However, the automated steps might be dependent on the order of user operations.
|
|
32
|
+
So instead of appending or prepending the automated operations, a listener approach is used to apply the automated operation as soon as a certain path in the document is modified (triggered).
|
|
33
|
+
For this to work properly the first operation is always an AddOperation that
|
|
34
|
+
adds the entire existing object therefore the tracking produces an addition for every JSONPointer in the document.
|
|
35
|
+
|
|
36
|
+
## CI Debug Counter
|
|
37
|
+
3
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
jsonpatch_trigger/__init__.py,sha256=is1e5cn3odL5TN6RmW8tRmqa87MIQ9FcAx5RifhUwT0,505
|
|
2
|
+
jsonpatch_trigger/common.py,sha256=X2k8B24l7I8K-Up7whlkB3F_c4NODxgwbqr4W-bJRhg,1476
|
|
3
|
+
jsonpatch_trigger/compat.py,sha256=rkGYJVVWKYBiLfd172tQedLLbI8FfOwXy82vuwovd5M,5945
|
|
4
|
+
jsonpatch_trigger/execution.py,sha256=JY4esJspBUEb-nKX5ttPol6f1x-Ga3Ul69YxdD3FnfY,8323
|
|
5
|
+
jsonpatch_trigger/operations.py,sha256=ssC7xnQgMoUnL-YVJLxsP9YPcOiKDqPlimHsVnXwSKM,17807
|
|
6
|
+
jsonpatch_trigger/parents.py,sha256=lTOqTtDqbR7NdyepShRqfTBsJq2SC9Qe95OCFSxhzVE,2012
|
|
7
|
+
jsonpatch_trigger/preconditions.py,sha256=JOMnPr1AnnkkqSXCLFoidwRjf5ulWmsdvWUrKlbpC_o,1478
|
|
8
|
+
jsonpatch_trigger/tracking.py,sha256=HRSfr7pAhfNnUA0kGhHmQMiGGNl2Xa8cy9zaowne5zk,5188
|
|
9
|
+
jsonpatch_trigger-0.1.0.dist-info/METADATA,sha256=IJ7ldq-I0FMNgYtZo6FdaKhqBynQeDWZwSPx2izX3BU,2114
|
|
10
|
+
jsonpatch_trigger-0.1.0.dist-info/WHEEL,sha256=aeYiig01lYGDzBgS8HxWXOg3uV61G9ijOsup-k9o1sk,91
|
|
11
|
+
jsonpatch_trigger-0.1.0.dist-info/top_level.txt,sha256=gx0expLRzffeexZ_GE_aL-N5KmIQ6csIzqB9PRc1JfY,18
|
|
12
|
+
jsonpatch_trigger-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
jsonpatch_trigger
|