fractal-task-tools 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,252 @@
1
+ import ast
2
+ import logging
3
+ import os
4
+ from importlib import import_module
5
+ from pathlib import Path
6
+ from typing import Optional
7
+
8
+ from docstring_parser import parse as docparse
9
+
10
+
11
+ def _sanitize_description(string: str) -> str:
12
+ """
13
+ Sanitize a description string.
14
+
15
+ This is a provisional helper function that replaces newlines with spaces
16
+ and reduces multiple contiguous whitespace characters to a single one.
17
+ Future iterations of the docstrings format/parsing may render this function
18
+ not-needed or obsolete.
19
+
20
+ Args:
21
+ string: TBD
22
+ """
23
+ # Replace newline with space
24
+ new_string = string.replace("\n", " ")
25
+ # Replace N-whitespace characters with a single one
26
+ while " " in new_string:
27
+ new_string = new_string.replace(" ", " ")
28
+ return new_string
29
+
30
+
31
+ def _get_function_docstring(
32
+ *,
33
+ package_name: Optional[str],
34
+ module_path: str,
35
+ function_name: str,
36
+ verbose: bool = False,
37
+ ) -> str:
38
+ """
39
+ Extract docstring from a function.
40
+
41
+
42
+ Args:
43
+ package_name: Example `fractal_tasks_core`.
44
+ module_path:
45
+ This must be an absolute path like `/some/module.py` (if
46
+ `package_name` is `None`) or a relative path like `something.py`
47
+ (if `package_name` is not `None`).
48
+ function_name: Example `create_ome_zarr`.
49
+ """
50
+
51
+ if not module_path.endswith(".py"):
52
+ raise ValueError(f"Module {module_path} must end with '.py'")
53
+
54
+ # Get the function ast.FunctionDef object
55
+ if package_name is not None:
56
+ if os.path.isabs(module_path):
57
+ raise ValueError(
58
+ "Error in _get_function_docstring: `package_name` is not "
59
+ "None but `module_path` is absolute."
60
+ )
61
+ package_path = Path(import_module(package_name).__file__).parent
62
+ module_path = package_path / module_path
63
+ else:
64
+ if not os.path.isabs(module_path):
65
+ raise ValueError(
66
+ "Error in _get_function_docstring: `package_name` is None "
67
+ "but `module_path` is not absolute."
68
+ )
69
+ module_path = Path(module_path)
70
+
71
+ if verbose:
72
+ logging.info(f"[_get_function_docstring] {function_name=}")
73
+ logging.info(f"[_get_function_docstring] {module_path=}")
74
+
75
+ tree = ast.parse(module_path.read_text())
76
+ _function = next(
77
+ f
78
+ for f in ast.walk(tree)
79
+ if (isinstance(f, ast.FunctionDef) and f.name == function_name)
80
+ )
81
+
82
+ # Extract docstring from ast.FunctionDef
83
+ return ast.get_docstring(_function)
84
+
85
+
86
+ def _get_function_args_descriptions(
87
+ *,
88
+ package_name: Optional[str],
89
+ module_path: str,
90
+ function_name: str,
91
+ verbose: bool = False,
92
+ ) -> dict[str, str]:
93
+ """
94
+ Extract argument descriptions from a function.
95
+
96
+ Args:
97
+ package_name: Example `fractal_tasks_core`.
98
+ module_path:
99
+ This must be an absolute path like `/some/module.py` (if
100
+ `package_name` is `None`) or a relative path like `something.py`
101
+ (if `package_name` is not `None`).
102
+ function_name: Example `create_ome_zarr`.
103
+ """
104
+
105
+ # Extract docstring from ast.FunctionDef
106
+ docstring = _get_function_docstring(
107
+ package_name=package_name,
108
+ module_path=module_path,
109
+ function_name=function_name,
110
+ verbose=verbose,
111
+ )
112
+ if verbose:
113
+ logging.info(f"[_get_function_args_descriptions] {docstring}")
114
+
115
+ # Parse docstring (via docstring_parser) and prepare output
116
+ parsed_docstring = docparse(docstring)
117
+ descriptions = {
118
+ param.arg_name: _sanitize_description(param.description)
119
+ for param in parsed_docstring.params
120
+ }
121
+ logging.info(f"[_get_function_args_descriptions] END ({function_name=})")
122
+ return descriptions
123
+
124
+
125
+ def _get_class_attrs_descriptions_from_file(
126
+ *,
127
+ module_path: Path,
128
+ class_name: str,
129
+ ) -> dict[str, str]:
130
+ """
131
+ Extract class-attribute descriptions from a Python script
132
+
133
+ Args:
134
+ module_path: Example `/something/my_class.py`.
135
+ class_name: Example `OmeroChannel`.
136
+ """
137
+ tree = ast.parse(module_path.read_text())
138
+ try:
139
+ _class = next(
140
+ c
141
+ for c in ast.walk(tree)
142
+ if (isinstance(c, ast.ClassDef) and c.name == class_name)
143
+ )
144
+ except StopIteration:
145
+ raise RuntimeError(f"Cannot find {class_name=} in {module_path}.")
146
+ docstring = ast.get_docstring(_class)
147
+ parsed_docstring = docparse(docstring)
148
+ descriptions = {
149
+ x.arg_name: _sanitize_description(x.description)
150
+ if x.description
151
+ else "Missing description"
152
+ for x in parsed_docstring.params
153
+ }
154
+ return descriptions
155
+
156
+
157
+ def _get_class_attrs_descriptions(
158
+ package_name: str, module_relative_path: str, class_name: str
159
+ ) -> dict[str, str]:
160
+ """
161
+ Extract class-attribute descriptions from an imported module
162
+
163
+ Args:
164
+ package_name: Example `fractal_tasks_core`.
165
+ module_relative_path: Example `lib_channels.py`.
166
+ class_name: Example `OmeroChannel`.
167
+ """
168
+
169
+ if not module_relative_path.endswith(".py"):
170
+ raise ValueError(f"Module {module_relative_path} must end with '.py'")
171
+
172
+ # Get the class ast.ClassDef object
173
+ package_path = Path(import_module(package_name).__file__).parent
174
+ module_path = package_path / module_relative_path
175
+ descriptions = _get_class_attrs_descriptions_from_file(
176
+ module_path=module_path,
177
+ class_name=class_name,
178
+ )
179
+ logging.info(f"[_get_class_attrs_descriptions] END ({class_name=})")
180
+ return descriptions
181
+
182
+
183
+ def _insert_function_args_descriptions(
184
+ *, schema: dict, descriptions: dict, verbose: bool = False
185
+ ):
186
+ """
187
+ Merge the descriptions obtained via `_get_args_descriptions` into the
188
+ properties of an existing JSON Schema.
189
+
190
+ Args:
191
+ schema: TBD
192
+ descriptions: TBD
193
+ """
194
+ new_schema = schema.copy()
195
+ new_properties = schema["properties"].copy()
196
+ for key, value in schema["properties"].items():
197
+ if "description" in value:
198
+ raise ValueError("Property already has description")
199
+ else:
200
+ if key in descriptions:
201
+ value["description"] = descriptions[key]
202
+ else:
203
+ value["description"] = "Missing description"
204
+ new_properties[key] = value
205
+ if verbose:
206
+ logging.info(
207
+ "[_insert_function_args_descriptions] "
208
+ f"Add {key=}, {value=}"
209
+ )
210
+ new_schema["properties"] = new_properties
211
+ logging.info("[_insert_function_args_descriptions] END")
212
+ return new_schema
213
+
214
+
215
+ def _insert_class_attrs_descriptions(
216
+ *,
217
+ schema: dict,
218
+ class_name: str,
219
+ descriptions: dict,
220
+ definition_key: str,
221
+ ):
222
+ """
223
+ Merge the descriptions obtained via `_get_attributes_models_descriptions`
224
+ into the `class_name` definition, within an existing JSON Schema
225
+
226
+ Args:
227
+ schema: TBD
228
+ class_name: TBD
229
+ descriptions: TBD
230
+ definition_key: Either `"definitions"` (for Pydantic V1) or
231
+ `"$defs"` (for Pydantic V2)
232
+ """
233
+ new_schema = schema.copy()
234
+ if definition_key not in schema:
235
+ return new_schema
236
+ else:
237
+ new_definitions = schema[definition_key].copy()
238
+ # Loop over existing definitions
239
+ for name, definition in schema[definition_key].items():
240
+ if name == class_name:
241
+ for prop in definition["properties"]:
242
+ if "description" in new_definitions[name]["properties"][prop]:
243
+ raise ValueError(
244
+ f"Property {name}.{prop} already has description"
245
+ )
246
+ else:
247
+ new_definitions[name]["properties"][prop][
248
+ "description"
249
+ ] = descriptions[prop]
250
+ new_schema[definition_key] = new_definitions
251
+ logging.info("[_insert_class_attrs_descriptions] END")
252
+ return new_schema
@@ -0,0 +1,27 @@
1
+ import re
2
+
3
+
4
+ def normalize_package_name(pkg_name: str) -> str:
5
+ """
6
+ Implement both PyPa and custom package-name normalization
7
+
8
+ 1. PyPa normalization: The name should be lowercased with all runs of the
9
+ characters `.`, `-`, or `_` replaced with a single `-` character
10
+ (https://packaging.python.org/en/latest/specifications/name-normalization).
11
+ 2. Custom normalization: Replace `-` with `_`, to obtain the
12
+ imported-module name.
13
+
14
+ Args:
15
+ pkg_name: The non-normalized package name.
16
+
17
+ Returns:
18
+ The normalized package name.
19
+ """
20
+
21
+ # Apply PyPa normalization
22
+ pypa_normalized_package_name = re.sub(r"[-_.]+", "-", pkg_name).lower()
23
+
24
+ # Replace `-` with `_`
25
+ final_package_name = pypa_normalized_package_name.replace("-", "_")
26
+
27
+ return final_package_name
@@ -0,0 +1,81 @@
1
+ """
2
+ Custom Pydantic v2 JSON Schema generation tools.
3
+
4
+ As of Pydantic V2, the JSON Schema representation of model attributes marked
5
+ as `Optional` changed, and the new behavior consists in marking the
6
+ corresponding properties as an `anyOf` of either a `null` or the actual type.
7
+ This is not always the required behavior, see e.g.
8
+ * https://github.com/pydantic/pydantic/issues/7161
9
+ * https://github.com/pydantic/pydantic/issues/8394
10
+
11
+ Here we list some alternative ways of reverting this change.
12
+ """
13
+ import logging
14
+
15
+ from pydantic.json_schema import GenerateJsonSchema
16
+ from pydantic.json_schema import JsonSchemaValue
17
+ from pydantic_core.core_schema import WithDefaultSchema
18
+
19
+ logger = logging.getLogger("CustomGenerateJsonSchema")
20
+
21
+
22
+ class CustomGenerateJsonSchema(GenerateJsonSchema):
23
+ def get_flattened_anyof(
24
+ self, schemas: list[JsonSchemaValue]
25
+ ) -> JsonSchemaValue:
26
+ null_schema = {"type": "null"}
27
+ if null_schema in schemas:
28
+ logger.warning(
29
+ "Drop `null_schema` before calling `get_flattened_anyof`"
30
+ )
31
+ schemas.pop(schemas.index(null_schema))
32
+ return super().get_flattened_anyof(schemas)
33
+
34
+ def default_schema(self, schema: WithDefaultSchema) -> JsonSchemaValue:
35
+ json_schema = super().default_schema(schema)
36
+ if "default" in json_schema.keys() and json_schema["default"] is None:
37
+ logger.warning(f"Pop `None` default value from {json_schema=}")
38
+ json_schema.pop("default")
39
+ return json_schema
40
+
41
+
42
+ # class GenerateJsonSchemaA(GenerateJsonSchema):
43
+ # def nullable_schema(self, schema):
44
+ # null_schema = {"type": "null"}
45
+ # inner_json_schema = self.generate_inner(schema["schema"])
46
+ # if inner_json_schema == null_schema:
47
+ # return null_schema
48
+ # else:
49
+ # logging.info("A: Skip calling `get_flattened_anyof` method")
50
+ # return inner_json_schema
51
+
52
+
53
+ # class GenerateJsonSchemaB(GenerateJsonSchemaA):
54
+ # def default_schema(self, schema: WithDefaultSchema) -> JsonSchemaValue:
55
+ # original_json_schema = super().default_schema(schema)
56
+ # new_json_schema = deepcopy(original_json_schema)
57
+ # default = new_json_schema.get("default", None)
58
+ # if default is None:
59
+ # logging.info("B: Pop None default")
60
+ # new_json_schema.pop("default")
61
+ # return new_json_schema
62
+
63
+
64
+ # class GenerateJsonSchemaC(GenerateJsonSchema):
65
+ # def get_flattened_anyof(
66
+ # self, schemas: list[JsonSchemaValue]
67
+ # ) -> JsonSchemaValue:
68
+
69
+ # original_json_schema_value = super().get_flattened_anyof(schemas)
70
+ # members = original_json_schema_value.get("anyOf")
71
+ # logging.info("C", original_json_schema_value)
72
+ # if (
73
+ # members is not None
74
+ # and len(members) == 2
75
+ # and {"type": "null"} in members
76
+ # ):
77
+ # new_json_schema_value = {"type": [t["type"] for t in members]}
78
+ # logging.info("C", new_json_schema_value)
79
+ # return new_json_schema_value
80
+ # else:
81
+ # return original_json_schema_value
@@ -0,0 +1,143 @@
1
+ import inspect
2
+ import logging
3
+ from importlib import import_module
4
+ from inspect import Parameter
5
+ from inspect import Signature
6
+ from inspect import signature
7
+ from pathlib import Path
8
+ from typing import Any
9
+
10
+ from ._union_types import is_annotated_union
11
+ from ._union_types import is_tagged
12
+ from ._union_types import is_union
13
+
14
+
15
+ # The following variables are copied from `pydantic.v1.decorator`
16
+ # (for pydantic v2.11.10)
17
+ ALT_V_ARGS = "v__args"
18
+ ALT_V_KWARGS = "v__kwargs"
19
+ V_DUPLICATE_KWARGS = "v__duplicate_kwargs"
20
+ V_POSITIONAL_ONLY_NAME = "v__positional_only"
21
+
22
+ FORBIDDEN_PARAM_NAMES = (
23
+ "args",
24
+ "kwargs",
25
+ V_POSITIONAL_ONLY_NAME,
26
+ V_DUPLICATE_KWARGS,
27
+ ALT_V_ARGS,
28
+ ALT_V_KWARGS,
29
+ )
30
+
31
+
32
+ def _extract_function(
33
+ module_relative_path: str,
34
+ function_name: str,
35
+ package_name: str,
36
+ verbose: bool = False,
37
+ ) -> callable:
38
+ """
39
+ Extract function from a module with the same name.
40
+
41
+ Args:
42
+ package_name: Example `fractal_tasks_core`.
43
+ module_relative_path: Example `tasks/create_ome_zarr.py`.
44
+ function_name: Example `create_ome_zarr`.
45
+ verbose:
46
+ """
47
+ if not module_relative_path.endswith(".py"):
48
+ raise ValueError(f"{module_relative_path=} must end with '.py'")
49
+ module_relative_path_no_py = str(
50
+ Path(module_relative_path).with_suffix("")
51
+ )
52
+ module_relative_path_dots = module_relative_path_no_py.replace("/", ".")
53
+ if verbose:
54
+ logging.info(
55
+ f"Now calling `import_module` for "
56
+ f"{package_name}.{module_relative_path_dots}"
57
+ )
58
+ imported_module = import_module(
59
+ f"{package_name}.{module_relative_path_dots}"
60
+ )
61
+ if verbose:
62
+ logging.info(
63
+ f"Now getting attribute {function_name} from "
64
+ f"imported module {imported_module}."
65
+ )
66
+ task_function = getattr(imported_module, function_name)
67
+ return task_function
68
+
69
+
70
+ def _validate_plain_union(
71
+ *,
72
+ param: Parameter,
73
+ _type: Any,
74
+ ) -> None:
75
+ """
76
+ Fail for known cases of invalid plain-union types.
77
+
78
+ A plain union annotation is (by construction) one for which
79
+ `is_union(_type) = True`. The only supported forms of plain unions
80
+ are `X | None` or `X | None = None` (or equivalent forms).
81
+
82
+ Note that `Optional[X]` is equivalent to `X | None` and thus it also gets
83
+ validated through this function.
84
+
85
+ Args:
86
+ param: The full `inspect.Parameter` object.
87
+ _type:
88
+ The type annotation to review. Note that this may be equal to
89
+ `param.annotation` or to `param.annotation.__origin__` (when the
90
+ original `param.annotation` is an `Annotated` union).
91
+ """
92
+ args = _type.__args__
93
+ if len(args) != 2:
94
+ raise ValueError(
95
+ "Only unions of two elements are supported, but parameter "
96
+ f"'{param.name}' has type hint '{_type}'."
97
+ )
98
+ elif not any(arg is type(None) for arg in args):
99
+ raise ValueError(
100
+ "One union element must be None, but parameter "
101
+ f"'{param.name}' has type hint '{_type}'."
102
+ )
103
+ elif (param.default is not None) and (param.default != inspect._empty):
104
+ raise ValueError(
105
+ "Non-None default not supported, but parameter "
106
+ f"'{param.name}' has type hint '{_type}' "
107
+ f"and default {param.default}."
108
+ )
109
+
110
+
111
+ def _validate_function_signature(function: callable) -> Signature:
112
+ """
113
+ Validate the function signature of a task.
114
+
115
+ Implement a set of checks for type hints that do not play well with the
116
+ creation of JSON Schema, see issue 399 in `fractal-tasks-core` and issue
117
+ 65 in `fractal-task-tools`.
118
+
119
+ Args:
120
+ function: A callable function.
121
+ """
122
+ sig = signature(function)
123
+ for param in sig.parameters.values():
124
+ # Check that name is not forbidden
125
+ if param.name in FORBIDDEN_PARAM_NAMES:
126
+ raise ValueError(
127
+ f"Function {function} has argument with forbidden "
128
+ "name '{{param.name}}'"
129
+ )
130
+ # Validate plain unions or non-tagged annotated unions
131
+ if is_union(param.annotation):
132
+ _validate_plain_union(
133
+ _type=param.annotation,
134
+ param=param,
135
+ )
136
+ elif is_annotated_union(param.annotation):
137
+ if not is_tagged(param.annotation):
138
+ _validate_plain_union(
139
+ _type=param.annotation.__origin__,
140
+ param=param,
141
+ )
142
+ logging.info("[_validate_function_signature] END")
143
+ return sig
@@ -0,0 +1,75 @@
1
+ import logging
2
+ from typing import Any
3
+ from typing import Literal
4
+
5
+
6
+ REQUIRED_ARGUMENTS: dict[tuple[str, str], set[str]] = {
7
+ ("non_parallel", "non_parallel"): {"zarr_urls", "zarr_dir"},
8
+ ("compound", "non_parallel"): {"zarr_urls", "zarr_dir"},
9
+ ("parallel", "parallel"): {"zarr_url"},
10
+ ("compound", "parallel"): {"zarr_url", "init_args"},
11
+ ("converter_non_parallel", "non_parallel"): {"zarr_dir"},
12
+ ("converter_compound", "non_parallel"): {"zarr_dir"},
13
+ ("converter_compound", "parallel"): {"zarr_url", "init_args"},
14
+ }
15
+ FORBIDDEN_ARGUMENTS: dict[tuple[str, str], set[str]] = {
16
+ ("non_parallel", "non_parallel"): {"zarr_url"},
17
+ ("compound", "non_parallel"): {"zarr_url"},
18
+ ("parallel", "parallel"): {"zarr_urls", "zarr_dir"},
19
+ ("compound", "parallel"): {"zarr_urls", "zarr_dir"},
20
+ ("converter_non_parallel", "non_parallel"): {"zarr_url", "zarr_urls"},
21
+ ("converter_compound", "non_parallel"): {"zarr_url", "zarr_urls"},
22
+ ("converter_compound", "parallel"): {"zarr_urls", "zarr_dir"},
23
+ }
24
+
25
+
26
+ def validate_arguments(
27
+ *,
28
+ task_type: Literal["parallel", "non_parallel", "compound"],
29
+ executable_kind: Literal["parallel", "non_parallel"],
30
+ schema: dict[str, Any],
31
+ ) -> None:
32
+ """
33
+ Validate schema arguments against required/forbidden ones.
34
+
35
+ Arguments:
36
+ task_type:
37
+ executable_kind: The `parallel`/`non_parallel` part of the task.
38
+ schema:
39
+ """
40
+
41
+ key = (task_type, executable_kind)
42
+ if not (key in REQUIRED_ARGUMENTS and key in FORBIDDEN_ARGUMENTS):
43
+ logging.error(f"Invalid {task_type=}, {executable_kind=}.")
44
+ raise ValueError(f"Invalid {task_type=}, {executable_kind=}.")
45
+
46
+ required_args = REQUIRED_ARGUMENTS[key]
47
+ forbidden_args = FORBIDDEN_ARGUMENTS[key]
48
+
49
+ schema_properties = set(schema["properties"].keys())
50
+
51
+ logging.info(
52
+ f"[validate_arguments] Task has arguments: {schema_properties}"
53
+ )
54
+ logging.info(f"[validate_arguments] Required arguments: {required_args}")
55
+ logging.info(f"[validate_arguments] Forbidden arguments: {forbidden_args}")
56
+
57
+ missing_required_arguments = {
58
+ arg for arg in required_args if arg not in schema_properties
59
+ }
60
+ if missing_required_arguments:
61
+ error_msg = (
62
+ "[validate_arguments] Required arguments "
63
+ f"{missing_required_arguments} are missing."
64
+ )
65
+ logging.error(error_msg)
66
+ raise ValueError(error_msg)
67
+
68
+ present_forbidden_args = forbidden_args.intersection(schema_properties)
69
+ if present_forbidden_args:
70
+ error_msg = (
71
+ "[validate_arguments] Forbidden arguments "
72
+ f"{present_forbidden_args} are present."
73
+ )
74
+ logging.error(error_msg)
75
+ raise ValueError(error_msg)
@@ -0,0 +1,109 @@
1
+ import logging
2
+ from pathlib import Path
3
+ from typing import Optional
4
+
5
+ from docstring_parser import parse as docparse
6
+
7
+ from ._descriptions import _get_function_docstring
8
+
9
+
10
+ def _get_function_description(
11
+ package_name: str, module_path: str, function_name: str
12
+ ) -> str:
13
+ """
14
+ Extract function description from its docstring.
15
+
16
+ Args:
17
+ package_name: Example `fractal_tasks_core`.
18
+ module_path: Example `tasks/create_ome_zarr.py`.
19
+ function_name: Example `create_ome_zarr`.
20
+ """
21
+ # Extract docstring from ast.FunctionDef
22
+ docstring = _get_function_docstring(
23
+ package_name=package_name,
24
+ module_path=module_path,
25
+ function_name=function_name,
26
+ )
27
+ # Parse docstring (via docstring_parser)
28
+ parsed_docstring = docparse(docstring)
29
+ # Combine short/long descriptions (if present)
30
+ short_description = parsed_docstring.short_description
31
+ long_description = parsed_docstring.long_description
32
+ items = []
33
+ if short_description:
34
+ items.append(short_description)
35
+ if long_description:
36
+ items.append(long_description)
37
+ if items:
38
+ if parsed_docstring.blank_after_short_description:
39
+ return "\n\n".join(items)
40
+ else:
41
+ return "\n".join(items)
42
+ else:
43
+ return ""
44
+
45
+
46
+ def create_docs_info(
47
+ *,
48
+ executable_non_parallel: Optional[str] = None,
49
+ executable_parallel: Optional[str] = None,
50
+ package: str,
51
+ ) -> str:
52
+ """
53
+ Return task description based on function docstring.
54
+ """
55
+ logging.info("[create_docs_info] START")
56
+ docs_info = []
57
+ for executable in [executable_non_parallel, executable_parallel]:
58
+ if executable is None:
59
+ continue
60
+ # Extract the function name.
61
+ # Note: this could be made more general, but for the moment we assume
62
+ # that the function has the same name as the module)
63
+ function_name = Path(executable).with_suffix("").name
64
+ logging.info(f"[create_docs_info] {function_name=}")
65
+ # Get function description
66
+ description = _get_function_description(
67
+ package_name=package,
68
+ module_path=executable,
69
+ function_name=function_name,
70
+ )
71
+ docs_info.append(f"## {function_name}\n{description}\n")
72
+ docs_info = "".join(docs_info)
73
+ logging.info("[create_docs_info] END")
74
+ return docs_info
75
+
76
+
77
+ def read_docs_info_from_file(
78
+ *,
79
+ docs_info: str,
80
+ task_list_path: str,
81
+ ) -> str:
82
+ """
83
+ Return task description based on the content of a file.
84
+
85
+ An example of valid argument is
86
+ ```
87
+ docs_info = "file:relative/path/info.md"
88
+ ```
89
+ where the path is relative to the folder where `task_list.py` is.
90
+ """
91
+ logging.info("[read_docs_info_from_file] START")
92
+
93
+ # Preliminary checks
94
+ if not docs_info.startswith("file:"):
95
+ raise ValueError(f"Invalid docs_info='{docs_info}'.")
96
+ relative_path = Path(docs_info[5:])
97
+ if relative_path.is_absolute():
98
+ raise ValueError(
99
+ f"Invalid docs_info='{docs_info}' (path must be relative)."
100
+ )
101
+
102
+ base_path = Path(task_list_path).parent
103
+ docs_path = (base_path / relative_path).as_posix()
104
+ logging.info(f"[read_docs_info_from_file] Reading docs from {docs_path}")
105
+ with open(docs_path, "r") as f:
106
+ docs_info = f.read()
107
+ logging.info("[read_docs_info_from_file] END")
108
+
109
+ return docs_info