fractal-task-tools 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,5 @@
1
+ import logging
2
+
3
+ logging.basicConfig(level=logging.INFO)
4
+
5
+ __VERSION__ = "0.2.0"
@@ -0,0 +1,246 @@
1
+ import logging
2
+ import os
3
+ from collections import Counter
4
+ from pathlib import Path
5
+ from typing import Any
6
+ from typing import Callable
7
+ from typing import Optional
8
+
9
+ import pydantic
10
+ from docstring_parser import parse as docparse
11
+
12
+ from ._descriptions import _get_class_attrs_descriptions
13
+ from ._descriptions import _get_function_args_descriptions
14
+ from ._descriptions import _insert_class_attrs_descriptions
15
+ from ._descriptions import _insert_function_args_descriptions
16
+ from ._pydantic_generatejsonschema import CustomGenerateJsonSchema
17
+ from ._signature_constraints import _extract_function
18
+ from ._signature_constraints import _validate_function_signature
19
+ from ._titles import _include_titles
20
+
21
+
22
+ _Schema = dict[str, Any]
23
+
24
+
25
+ def _remove_attributes_from_descriptions(old_schema: _Schema) -> _Schema:
26
+ """
27
+ Keeps only the description part of the docstrings: e.g from
28
+ ```
29
+ 'Custom class for Omero-channel window, based on OME-NGFF v0.4.\\n'
30
+ '\\n'
31
+ 'Attributes:\\n'
32
+ 'min: Do not change. It will be set to `0` by default.\\n'
33
+ 'max: Do not change. It will be set according to bitdepth of the images\\n'
34
+ ' by default (e.g. 65535 for 16 bit images).\\n'
35
+ 'start: Lower-bound rescaling value for visualization.\\n'
36
+ 'end: Upper-bound rescaling value for visualization.'
37
+ ```
38
+ to `'Custom class for Omero-channel window, based on OME-NGFF v0.4.\\n'`.
39
+
40
+ Args:
41
+ old_schema: TBD
42
+ """
43
+ new_schema = old_schema.copy()
44
+ if "$defs" in new_schema:
45
+ for name, definition in new_schema["$defs"].items():
46
+ if "description" in definition.keys():
47
+ parsed_docstring = docparse(definition["description"])
48
+ new_schema["$defs"][name][
49
+ "description"
50
+ ] = parsed_docstring.short_description
51
+ elif "title" in definition.keys():
52
+ title = definition["title"]
53
+ new_schema["$defs"][name][
54
+ "description"
55
+ ] = f"Missing description for {title}."
56
+ else:
57
+ new_schema["$defs"][name][
58
+ "description"
59
+ ] = "Missing description"
60
+ logging.info("[_remove_attributes_from_descriptions] END")
61
+ return new_schema
62
+
63
+
64
+ def _create_schema_for_function(function: Callable) -> _Schema:
65
+ from packaging.version import parse
66
+
67
+ if parse(pydantic.__version__) >= parse("2.11.0"):
68
+ from pydantic.experimental.arguments_schema import (
69
+ generate_arguments_schema,
70
+ )
71
+ from pydantic import ConfigDict
72
+ from pydantic.fields import FieldInfo, ComputedFieldInfo
73
+
74
+ # NOTE: v2.12.0 modified the generated field titles. The function
75
+ # `make_title` restores the `<2.12.0` behavior
76
+ def make_title(name: str, info: FieldInfo | ComputedFieldInfo):
77
+ return name.title().replace("_", " ").strip()
78
+
79
+ core_schema = generate_arguments_schema(
80
+ function,
81
+ schema_type="arguments",
82
+ config=ConfigDict(field_title_generator=make_title),
83
+ )
84
+
85
+ elif parse(pydantic.__version__) >= parse("2.9.0"):
86
+ from pydantic._internal._config import ConfigWrapper # noqa
87
+ from pydantic._internal import _generate_schema # noqa
88
+
89
+ gen_core_schema = _generate_schema.GenerateSchema(
90
+ ConfigWrapper(None),
91
+ None,
92
+ )
93
+ core_schema = gen_core_schema.generate_schema(function)
94
+ core_schema = gen_core_schema.clean_schema(core_schema)
95
+ else:
96
+ from pydantic._internal._typing_extra import add_module_globals # noqa
97
+ from pydantic._internal import _generate_schema # noqa
98
+ from pydantic._internal._config import ConfigWrapper # noqa
99
+
100
+ namespace = add_module_globals(function, None)
101
+ gen_core_schema = _generate_schema.GenerateSchema(
102
+ ConfigWrapper(None), namespace
103
+ )
104
+ core_schema = gen_core_schema.generate_schema(function)
105
+ core_schema = gen_core_schema.clean_schema(core_schema)
106
+
107
+ gen_json_schema = CustomGenerateJsonSchema()
108
+ json_schema = gen_json_schema.generate(core_schema, mode="validation")
109
+ return json_schema
110
+
111
+
112
+ def create_schema_for_single_task(
113
+ executable: str,
114
+ package: Optional[str] = None,
115
+ pydantic_models: Optional[list[tuple[str, str, str]]] = None,
116
+ task_function: Optional[Callable] = None,
117
+ verbose: bool = False,
118
+ ) -> _Schema:
119
+ """
120
+ Main function to create a JSON Schema of task arguments
121
+
122
+ This function can be used in two ways:
123
+
124
+ 1. `task_function` argument is `None`, `package` is set, and `executable`
125
+ is a path relative to that package.
126
+ 2. `task_function` argument is provided, `executable` is an absolute path
127
+ to the function module, and `package` is `None. This is useful for
128
+ testing.
129
+ """
130
+
131
+ DEFINITIONS_KEY = "$defs"
132
+
133
+ logging.info("[create_schema_for_single_task] START")
134
+ if task_function is None:
135
+ usage = "1"
136
+ # Usage 1 (standard)
137
+ if package is None:
138
+ raise ValueError(
139
+ "Cannot call `create_schema_for_single_task with "
140
+ f"{task_function=} and {package=}. Exit."
141
+ )
142
+ if os.path.isabs(executable):
143
+ raise ValueError(
144
+ "Cannot call `create_schema_for_single_task with "
145
+ f"{task_function=} and absolute {executable=}. Exit."
146
+ )
147
+ else:
148
+ usage = "2"
149
+ # Usage 2 (testing)
150
+ if package is not None:
151
+ raise ValueError(
152
+ "Cannot call `create_schema_for_single_task with "
153
+ f"{task_function=} and non-None {package=}. Exit."
154
+ )
155
+ if not os.path.isabs(executable):
156
+ raise ValueError(
157
+ "Cannot call `create_schema_for_single_task with "
158
+ f"{task_function=} and non-absolute {executable=}. Exit."
159
+ )
160
+
161
+ # Extract function from module
162
+ if usage == "1":
163
+ # Extract the function name (for the moment we assume the function has
164
+ # the same name as the module)
165
+ function_name = Path(executable).with_suffix("").name
166
+ # Extract the function object
167
+ task_function = _extract_function(
168
+ package_name=package,
169
+ module_relative_path=executable,
170
+ function_name=function_name,
171
+ verbose=verbose,
172
+ )
173
+ else:
174
+ # The function object is already available, extract its name
175
+ function_name = task_function.__name__
176
+
177
+ if verbose:
178
+ logging.info(f"[create_schema_for_single_task] {function_name=}")
179
+ logging.info(f"[create_schema_for_single_task] {task_function=}")
180
+
181
+ # Validate function signature against some custom constraints
182
+ _validate_function_signature(task_function)
183
+
184
+ # Create and clean up schema
185
+ schema = _create_schema_for_function(task_function)
186
+ schema = _remove_attributes_from_descriptions(schema)
187
+
188
+ # Include titles for custom-model-typed arguments
189
+ schema = _include_titles(
190
+ schema, definitions_key=DEFINITIONS_KEY, verbose=verbose
191
+ )
192
+
193
+ # Include main title
194
+ if schema.get("title") is None:
195
+
196
+ def to_camel_case(snake_str):
197
+ return "".join(
198
+ x.capitalize() for x in snake_str.lower().split("_")
199
+ )
200
+
201
+ schema["title"] = to_camel_case(task_function.__name__)
202
+
203
+ # Include descriptions of function. Note: this function works both
204
+ # for usages 1 or 2 (see docstring).
205
+ function_args_descriptions = _get_function_args_descriptions(
206
+ package_name=package,
207
+ module_path=executable,
208
+ function_name=function_name,
209
+ verbose=verbose,
210
+ )
211
+
212
+ schema = _insert_function_args_descriptions(
213
+ schema=schema, descriptions=function_args_descriptions
214
+ )
215
+
216
+ if pydantic_models is not None:
217
+ # Check that model names are unique
218
+ pydantic_models_names = [item[2] for item in pydantic_models]
219
+ duplicate_class_names = [
220
+ name
221
+ for name, count in Counter(pydantic_models_names).items()
222
+ if count > 1
223
+ ]
224
+ if duplicate_class_names:
225
+ pydantic_models_str = " " + "\n ".join(map(str, pydantic_models))
226
+ raise ValueError(
227
+ "Cannot parse docstrings for models with non-unique names "
228
+ f"{duplicate_class_names}, in\n{pydantic_models_str}"
229
+ )
230
+
231
+ # Extract model-attribute descriptions and insert them into schema
232
+ for package_name, module_relative_path, class_name in pydantic_models:
233
+ attrs_descriptions = _get_class_attrs_descriptions(
234
+ package_name=package_name,
235
+ module_relative_path=module_relative_path,
236
+ class_name=class_name,
237
+ )
238
+ schema = _insert_class_attrs_descriptions(
239
+ schema=schema,
240
+ class_name=class_name,
241
+ descriptions=attrs_descriptions,
242
+ definition_key=DEFINITIONS_KEY,
243
+ )
244
+
245
+ logging.info("[create_schema_for_single_task] END")
246
+ return schema
@@ -0,0 +1,97 @@
1
+ import argparse as ap
2
+ import sys
3
+
4
+ from fractal_task_tools._cli_tools import check_manifest
5
+ from fractal_task_tools._cli_tools import write_manifest_to_file
6
+ from fractal_task_tools._create_manifest import create_manifest
7
+
8
+
9
+ main_parser = ap.ArgumentParser(
10
+ description="`fractal-manifest` command-line interface",
11
+ allow_abbrev=False,
12
+ )
13
+
14
+ subparsers = main_parser.add_subparsers(
15
+ title="Available commands",
16
+ dest="cmd",
17
+ )
18
+
19
+
20
+ create_manifest_parser = subparsers.add_parser(
21
+ "create",
22
+ description="Create new manifest file",
23
+ allow_abbrev=False,
24
+ )
25
+
26
+ check_manifest_parser = subparsers.add_parser(
27
+ "check",
28
+ description="Check existing manifest file",
29
+ allow_abbrev=False,
30
+ )
31
+
32
+
33
+ for subparser in (create_manifest_parser, check_manifest_parser):
34
+ subparser.add_argument(
35
+ "--package",
36
+ type=str,
37
+ help="Example: 'fractal_tasks_core'",
38
+ required=True,
39
+ )
40
+ subparser.add_argument(
41
+ "--task-list-path",
42
+ type=str,
43
+ help=(
44
+ "Dot-separated path to the `task_list.py` module, "
45
+ "relative to the package root (default value: 'dev.task_list')."
46
+ ),
47
+ default="dev.task_list",
48
+ required=False,
49
+ )
50
+
51
+ check_manifest_parser.add_argument(
52
+ "--ignore-keys-order",
53
+ type=bool,
54
+ help=(
55
+ "Ignore the order of dictionary keys when comparing manifests "
56
+ "(default value: False)."
57
+ ),
58
+ default=False,
59
+ required=False,
60
+ )
61
+
62
+
63
+ def _parse_arguments(sys_argv: list[str] | None = None) -> ap.Namespace:
64
+ """
65
+ Parse `sys.argv` or custom CLI arguments.
66
+
67
+ Arguments:
68
+ sys_argv: If set, overrides `sys.argv` (useful for testing).
69
+ """
70
+ if sys_argv is None:
71
+ sys_argv = sys.argv[:]
72
+ args = main_parser.parse_args(sys_argv[1:])
73
+ return args
74
+
75
+
76
+ def main():
77
+ args = _parse_arguments()
78
+ if args.cmd == "create":
79
+ manifest = create_manifest(
80
+ raw_package_name=args.package,
81
+ task_list_path=args.task_list_path,
82
+ )
83
+ write_manifest_to_file(
84
+ raw_package_name=args.package,
85
+ manifest=manifest,
86
+ )
87
+
88
+ elif args.cmd == "check":
89
+ manifest = create_manifest(
90
+ raw_package_name=args.package,
91
+ task_list_path=args.task_list_path,
92
+ )
93
+ check_manifest(
94
+ raw_package_name=args.package,
95
+ manifest=manifest,
96
+ ignore_keys_order=args.ignore_keys_order,
97
+ )
@@ -0,0 +1,86 @@
1
+ import json
2
+ import logging
3
+ import os
4
+ import sys
5
+ from importlib import import_module
6
+ from pathlib import Path
7
+
8
+ from fractal_task_tools._create_manifest import MANIFEST_FILENAME
9
+ from fractal_task_tools._deepdiff import deepdiff
10
+ from fractal_task_tools._package_name_tools import normalize_package_name
11
+
12
+
13
+ def write_manifest_to_file(
14
+ *,
15
+ raw_package_name: str,
16
+ manifest: str,
17
+ ) -> None:
18
+ """
19
+ Write manifest to file.
20
+
21
+ Arguments:
22
+ raw_package_name:
23
+ manifest: The manifest object
24
+ """
25
+ logging.info("[write_manifest_to_file] START")
26
+
27
+ package_name = normalize_package_name(raw_package_name)
28
+ logging.info(f"[write_manifest_to_file] {package_name=}")
29
+
30
+ imported_package = import_module(package_name)
31
+ package_root_dir = Path(imported_package.__file__).parent
32
+ manifest_path = (package_root_dir / MANIFEST_FILENAME).as_posix()
33
+ logging.info(f"[write_manifest_to_file] {os.getcwd()=}")
34
+ logging.info(f"[write_manifest_to_file] {package_root_dir=}")
35
+ logging.info(f"[write_manifest_to_file] {manifest_path=}")
36
+
37
+ with open(manifest_path, "w") as f:
38
+ json.dump(manifest, f, indent=2)
39
+ f.write("\n")
40
+
41
+ logging.info("[write_manifest_to_file] END")
42
+
43
+
44
+ def check_manifest(
45
+ *,
46
+ raw_package_name: str,
47
+ manifest: str,
48
+ ignore_keys_order: bool,
49
+ ) -> None:
50
+ """
51
+ Write manifest to file.
52
+
53
+ Arguments:
54
+ raw_package_name:
55
+ manifest: The manifest object
56
+ ignore_keys_order: Whether to ignore keys order.
57
+ """
58
+
59
+ package_name = normalize_package_name(raw_package_name)
60
+ logging.info(f"[check_manifest] {package_name=}")
61
+
62
+ imported_package = import_module(package_name)
63
+ package_root_dir = Path(imported_package.__file__).parent
64
+ manifest_path = (package_root_dir / MANIFEST_FILENAME).as_posix()
65
+ logging.info(f"[check_manifest] {os.getcwd()=}")
66
+ logging.info(f"[check_manifest] {package_root_dir=}")
67
+ logging.info(f"[check_manifest] {manifest_path=}")
68
+
69
+ with open(manifest_path, "r") as f:
70
+ old_manifest = json.load(f)
71
+ if manifest == old_manifest:
72
+ logging.info("[check_manifest] On-disk manifest is up to date.")
73
+ else:
74
+ logging.error("[check_manifest] On-disk manifest is not up to date.")
75
+ try:
76
+ deepdiff(
77
+ old_object=old_manifest,
78
+ new_object=manifest,
79
+ path="manifest",
80
+ ignore_keys_order=ignore_keys_order,
81
+ )
82
+ except ValueError as e:
83
+ logging.error(str(e))
84
+ sys.exit("New/old manifests differ")
85
+
86
+ logging.info("[check_manifest] END")
@@ -0,0 +1,163 @@
1
+ """
2
+ Generate JSON schemas for task arguments and combine them into a manifest.
3
+ """
4
+ import logging
5
+ from importlib import import_module
6
+ from typing import Any
7
+
8
+ from ._args_schemas import create_schema_for_single_task
9
+ from ._package_name_tools import normalize_package_name
10
+ from ._task_arguments import validate_arguments
11
+ from ._task_docs import create_docs_info
12
+ from ._task_docs import read_docs_info_from_file
13
+ from .task_models import _BaseTask
14
+
15
+ ARGS_SCHEMA_VERSION = "pydantic_v2"
16
+ MANIFEST_FILENAME = "__FRACTAL_MANIFEST__.json"
17
+ MANIFEST_VERSION = "2"
18
+
19
+
20
+ def create_manifest(
21
+ *,
22
+ raw_package_name: str,
23
+ task_list_path: str,
24
+ ) -> dict[str, Any]:
25
+ """
26
+ Create the package manifest based on a `task_list.py` module
27
+
28
+ Arguments:
29
+ raw_package_name:
30
+ The name of the package. Note that this name must be importable
31
+ (after normalization).
32
+ task_list_path:
33
+ Relative path to the `task_list.py` module, with respect to the
34
+ package root (example `dev.task_list`).
35
+
36
+ Returns:
37
+ Task-package manifest.
38
+ """
39
+
40
+ # Preliminary validation
41
+ if "/" in task_list_path or task_list_path.endswith(".py"):
42
+ raise ValueError(
43
+ f"Invalid {task_list_path=} (valid example: `dev.task_list`)."
44
+ )
45
+
46
+ # Normalize package name
47
+ package_name = normalize_package_name(raw_package_name)
48
+
49
+ logging.info(f"Start generating a new manifest for {package_name}")
50
+
51
+ # Prepare an empty manifest
52
+ manifest = dict(
53
+ manifest_version=MANIFEST_VERSION,
54
+ task_list=[],
55
+ has_args_schemas=True,
56
+ args_schema_version=ARGS_SCHEMA_VERSION,
57
+ authors=None,
58
+ )
59
+
60
+ # Import the task-list module
61
+ task_list_module = import_module(f"{package_name}.{task_list_path}")
62
+
63
+ # Load TASK_LIST
64
+ TASK_LIST: list[_BaseTask] = getattr(task_list_module, "TASK_LIST")
65
+
66
+ # Load INPUT_MODELS
67
+ try:
68
+ INPUT_MODELS = getattr(task_list_module, "INPUT_MODELS")
69
+ except AttributeError:
70
+ INPUT_MODELS = []
71
+ logging.warning(
72
+ "No `INPUT_MODELS` found in task_list module. Setting it to `[]`."
73
+ )
74
+
75
+ # Load AUTHORS
76
+ try:
77
+ manifest["authors"] = getattr(task_list_module, "AUTHORS")
78
+ except AttributeError:
79
+ logging.warning("No `AUTHORS` found in task_list module.")
80
+
81
+ # Load DOCS_LINK
82
+ try:
83
+ DOCS_LINK = getattr(task_list_module, "DOCS_LINK")
84
+ # Transform empty string into None
85
+ if DOCS_LINK == "":
86
+ DOCS_LINK = None
87
+ logging.warning(
88
+ "`DOCS_LINK=" "` transformed into `DOCS_LINK=None`."
89
+ )
90
+ except AttributeError:
91
+ DOCS_LINK = None
92
+ logging.warning("No `DOCS_LINK` found in task_list module.")
93
+
94
+ # Loop over TASK_LIST, and append the proper task dictionaries
95
+ # to manifest["task_list"]
96
+ for task_obj in TASK_LIST:
97
+ # Convert Pydantic object to dictionary
98
+ task_dict = task_obj.model_dump(
99
+ exclude={
100
+ "meta_init",
101
+ "executable_init",
102
+ "meta",
103
+ "executable",
104
+ },
105
+ exclude_unset=True,
106
+ )
107
+ task_dict["type"] = task_obj.type
108
+
109
+ # Copy some properties from `task_obj` to `task_dict`
110
+ if task_obj.executable_non_parallel is not None:
111
+ task_dict[
112
+ "executable_non_parallel"
113
+ ] = task_obj.executable_non_parallel
114
+ if task_obj.executable_parallel is not None:
115
+ task_dict["executable_parallel"] = task_obj.executable_parallel
116
+ if task_obj.meta_non_parallel is not None:
117
+ task_dict["meta_non_parallel"] = task_obj.meta_non_parallel
118
+ if task_obj.meta_parallel is not None:
119
+ task_dict["meta_parallel"] = task_obj.meta_parallel
120
+
121
+ # Autogenerate JSON Schemas for non-parallel/parallel task arguments
122
+ for kind in ["non_parallel", "parallel"]:
123
+ executable = task_dict.get(f"executable_{kind}")
124
+ if executable is not None:
125
+ logging.info(f"[{executable}] START")
126
+ schema = create_schema_for_single_task(
127
+ executable,
128
+ package=package_name,
129
+ pydantic_models=INPUT_MODELS,
130
+ )
131
+
132
+ validate_arguments(
133
+ task_type=task_obj.type,
134
+ schema=schema,
135
+ executable_kind=kind,
136
+ )
137
+
138
+ logging.info(f"[{executable}] END (new schema)")
139
+ task_dict[f"args_schema_{kind}"] = schema
140
+
141
+ # Compute and set `docs_info`
142
+ docs_info = task_dict.get("docs_info")
143
+ if docs_info is None:
144
+ docs_info = create_docs_info(
145
+ executable_non_parallel=task_obj.executable_non_parallel,
146
+ executable_parallel=task_obj.executable_parallel,
147
+ package=package_name,
148
+ )
149
+ elif docs_info.startswith("file:"):
150
+ docs_info = read_docs_info_from_file(
151
+ docs_info=docs_info,
152
+ task_list_path=task_list_module.__file__,
153
+ )
154
+ if docs_info is not None:
155
+ task_dict["docs_info"] = docs_info
156
+
157
+ # Set `docs_link`
158
+ if DOCS_LINK is not None:
159
+ task_dict["docs_link"] = DOCS_LINK
160
+
161
+ # Append task
162
+ manifest["task_list"].append(task_dict)
163
+ return manifest
@@ -0,0 +1,68 @@
1
+ from typing import Union
2
+
3
+
4
+ ValidType = Union[list, dict, str, int, float, bool, None]
5
+
6
+ MAX_RECURSION_LEVEL = 20
7
+
8
+
9
+ def deepdiff(
10
+ *,
11
+ old_object: ValidType,
12
+ new_object: ValidType,
13
+ path: str,
14
+ ignore_keys_order: bool,
15
+ recursion_level: int = 1,
16
+ ):
17
+ if type(old_object) is not type(new_object):
18
+ raise ValueError(
19
+ f"[{path}] Type difference:\n"
20
+ f"\tOld: {type(old_object)}\n\tNew: {type(new_object)}"
21
+ )
22
+
23
+ if type(old_object) not in [list, dict, str, int, float, bool, type(None)]:
24
+ raise ValueError(f"[{path}] Invalid type {type(old_object)}, exit.")
25
+
26
+ if recursion_level > MAX_RECURSION_LEVEL:
27
+ raise ValueError(f"Reached {MAX_RECURSION_LEVEL=}. Exit.")
28
+
29
+ if type(old_object) is dict:
30
+ old_keys = list(old_object.keys())
31
+ new_keys = list(new_object.keys())
32
+ if ignore_keys_order:
33
+ old_keys = sorted(old_keys)
34
+ new_keys = sorted(new_keys)
35
+ if old_keys != new_keys:
36
+ raise ValueError(
37
+ f"[{path}] Dictionaries have different keys:\n"
38
+ f"\tOld: {old_keys}\n\tNew: {new_keys}"
39
+ )
40
+
41
+ for key, value_a in old_object.items():
42
+ deepdiff(
43
+ old_object=value_a,
44
+ new_object=new_object[key],
45
+ path=f"{path}['{key}']",
46
+ ignore_keys_order=ignore_keys_order,
47
+ recursion_level=recursion_level + 1,
48
+ )
49
+ elif type(old_object) is list:
50
+ if len(old_object) != len(new_object):
51
+ raise ValueError(
52
+ f"{path} Lists have different lengths:\n"
53
+ f"\tOld:{len(old_object)}\n\tNew: {len(new_object)}"
54
+ )
55
+ for ind, item_a in enumerate(old_object):
56
+ deepdiff(
57
+ old_object=item_a,
58
+ new_object=new_object[ind],
59
+ path=f"{path}[{ind}]",
60
+ ignore_keys_order=ignore_keys_order,
61
+ recursion_level=recursion_level + 1,
62
+ )
63
+ else:
64
+ if old_object != new_object:
65
+ raise ValueError(
66
+ f"{path} Values are different:\n"
67
+ f"\tOld: '{old_object}'\n\tNew: '{new_object}'"
68
+ )