gammasimtools 0.5.1__py3-none-any.whl → 0.6.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. {gammasimtools-0.5.1.dist-info → gammasimtools-0.6.1.dist-info}/METADATA +80 -28
  2. gammasimtools-0.6.1.dist-info/RECORD +91 -0
  3. {gammasimtools-0.5.1.dist-info → gammasimtools-0.6.1.dist-info}/WHEEL +1 -1
  4. {gammasimtools-0.5.1.dist-info → gammasimtools-0.6.1.dist-info}/entry_points.txt +4 -2
  5. simtools/_version.py +14 -2
  6. simtools/applications/add_file_to_db.py +2 -1
  7. simtools/applications/compare_cumulative_psf.py +10 -15
  8. simtools/applications/db_development_tools/add_new_parameter_to_db.py +12 -6
  9. simtools/applications/derive_mirror_rnda.py +95 -71
  10. simtools/applications/generate_corsika_histograms.py +216 -131
  11. simtools/applications/generate_default_metadata.py +110 -0
  12. simtools/applications/generate_simtel_array_histograms.py +192 -0
  13. simtools/applications/get_file_from_db.py +1 -1
  14. simtools/applications/get_parameter.py +3 -3
  15. simtools/applications/make_regular_arrays.py +89 -93
  16. simtools/applications/{plot_layout_array.py → plot_array_layout.py} +15 -14
  17. simtools/applications/print_array_elements.py +81 -34
  18. simtools/applications/produce_array_config.py +2 -2
  19. simtools/applications/production.py +39 -5
  20. simtools/applications/sim_showers_for_trigger_rates.py +26 -30
  21. simtools/applications/simulate_prod.py +49 -107
  22. simtools/applications/submit_data_from_external.py +8 -10
  23. simtools/applications/tune_psf.py +16 -18
  24. simtools/applications/validate_camera_efficiency.py +63 -9
  25. simtools/applications/validate_camera_fov.py +9 -13
  26. simtools/applications/validate_file_using_schema.py +127 -0
  27. simtools/applications/validate_optics.py +13 -15
  28. simtools/camera_efficiency.py +73 -80
  29. simtools/configuration/commandline_parser.py +52 -22
  30. simtools/configuration/configurator.py +98 -33
  31. simtools/constants.py +9 -0
  32. simtools/corsika/corsika_config.py +28 -22
  33. simtools/corsika/corsika_default_config.py +282 -0
  34. simtools/corsika/corsika_histograms.py +328 -282
  35. simtools/corsika/corsika_histograms_visualize.py +162 -163
  36. simtools/corsika/corsika_runner.py +8 -4
  37. simtools/corsika_simtel/corsika_simtel_runner.py +18 -23
  38. simtools/data_model/data_reader.py +129 -0
  39. simtools/data_model/metadata_collector.py +346 -118
  40. simtools/data_model/metadata_model.py +123 -218
  41. simtools/data_model/model_data_writer.py +79 -22
  42. simtools/data_model/validate_data.py +96 -46
  43. simtools/db_handler.py +67 -42
  44. simtools/io_operations/__init__.py +0 -0
  45. simtools/io_operations/hdf5_handler.py +112 -0
  46. simtools/{io_handler.py → io_operations/io_handler.py} +51 -22
  47. simtools/job_execution/job_manager.py +1 -1
  48. simtools/layout/{layout_array.py → array_layout.py} +168 -199
  49. simtools/layout/geo_coordinates.py +196 -0
  50. simtools/layout/telescope_position.py +12 -12
  51. simtools/model/array_model.py +16 -14
  52. simtools/model/camera.py +5 -8
  53. simtools/model/mirrors.py +136 -73
  54. simtools/model/model_utils.py +1 -69
  55. simtools/model/telescope_model.py +32 -25
  56. simtools/psf_analysis.py +26 -19
  57. simtools/ray_tracing.py +54 -26
  58. simtools/schemas/data.metaschema.yml +400 -0
  59. simtools/schemas/metadata.metaschema.yml +566 -0
  60. simtools/simtel/simtel_config_writer.py +14 -5
  61. simtools/simtel/simtel_histograms.py +266 -83
  62. simtools/simtel/simtel_runner.py +8 -7
  63. simtools/simtel/simtel_runner_array.py +7 -8
  64. simtools/simtel/simtel_runner_camera_efficiency.py +48 -2
  65. simtools/simtel/simtel_runner_ray_tracing.py +61 -25
  66. simtools/simulator.py +43 -50
  67. simtools/utils/general.py +232 -286
  68. simtools/utils/geometry.py +163 -0
  69. simtools/utils/names.py +294 -142
  70. simtools/visualization/legend_handlers.py +115 -9
  71. simtools/visualization/visualize.py +13 -13
  72. gammasimtools-0.5.1.dist-info/RECORD +0 -83
  73. simtools/applications/plot_simtel_histograms.py +0 -120
  74. simtools/applications/validate_schema_files.py +0 -135
  75. simtools/corsika/corsika_output_visualize.py +0 -345
  76. simtools/data_model/validate_schema.py +0 -285
  77. {gammasimtools-0.5.1.dist-info → gammasimtools-0.6.1.dist-info}/LICENSE +0 -0
  78. {gammasimtools-0.5.1.dist-info → gammasimtools-0.6.1.dist-info}/top_level.txt +0 -0
@@ -1,281 +1,186 @@
1
1
  """
2
2
  Definition of metadata model for input to and output of simtools.
3
- Follows CTA top-level data model definition.
4
- Metadata is broken into hierarchical categories in this module.
5
-
6
- Metadata definitions require the three following fields to be defined:
7
-
8
- * type: data type
9
- * required: boolean if this field is required as input(!)
10
- * default: default value
11
-
12
- These definitions are used for:
3
+ Follows CTAO top-level data model definition.
13
4
 
14
5
  * data products submitted to SimPipe ('input')
15
6
  * data products generated by SimPipe ('output')
16
7
 
17
8
  """
18
9
 
10
+ import logging
11
+ from importlib.resources import files
19
12
 
20
- class InvalidSchemaList(Exception):
21
- """
22
- Exception raised for requests of unknown schema lists
23
- """
24
-
25
-
26
- def _category_reference():
27
- """
28
- Metadata field REFERENCE.
29
-
30
- Refers to the CTA Top-Level Data Model (document CTA-SPE-OSO-000000-0001.)
31
-
32
- """
33
- return {"VERSION": {"type": "str", "required": True, "default": "1.0.0"}}
34
-
35
-
36
- def _category_contact():
37
- """
38
- Metadata field CONTACT.
13
+ import jsonschema
39
14
 
40
- Organisation and person submitting or processing these data.
15
+ import simtools.constants
16
+ import simtools.utils.general as gen
41
17
 
42
- """
43
- return {
44
- "ORGANIZATION": {"type": "str", "required": True, "default": None},
45
- "NAME": {"type": "str", "required": True, "default": None},
46
- "EMAIL": {"type": "email", "required": True, "default": None},
47
- }
18
+ _logger = logging.getLogger(__name__)
48
19
 
49
20
 
50
- def _category_product():
21
+ def validate_schema(data, schema_file):
51
22
  """
52
- Metadata field PRODUCT.
53
-
54
- Describes the data product, especially its validity and associations.
55
-
56
- """
57
- return {
58
- "DESCRIPTION": {"type": "str", "required": True, "default": None},
59
- "CREATION_TIME": {"type": "datetime", "required": True, "default": None},
60
- "ID": {"type": "str", "required": False, "default": None},
61
- "DATA": {
62
- "CATEGORY": {"type": "str", "required": False, "default": None},
63
- "LEVEL": {"type": "str", "required": False, "default": None},
64
- "TYPE": {"type": "str", "required": False, "default": None},
65
- "MODEL": {
66
- "NAME": {"type": "str", "required": False, "default": None},
67
- "VERSION": {"type": "str", "required": False, "default": None},
68
- "URL": {"type": "str", "required": False, "default": None},
69
- "TYPE": {"type": "str", "required": False, "default": None},
70
- "SUBTYPE": {"type": "str", "required": False, "default": None},
71
- },
72
- },
73
- "FORMAT": {"type": "str", "required": False, "default": "ecsv"},
74
- "VALID": {
75
- "START": {"type": "datetime", "required": False, "default": None},
76
- "END": {"type": "datetime", "required": False, "default": None},
77
- },
78
- "ASSOCIATION": {"type": "instrumentlist", "required": True, "default": []},
79
- }
80
-
81
-
82
- def _category_process():
83
- """
84
- Metadata field PROCESS.
85
-
86
- Description of the process which generated these data.
23
+ Validate dictionary against schema.
87
24
 
88
- """
89
- return {
90
- "TYPE": {"type": "str", "required": True, "default": None},
91
- "SUBTYPE": {"type": "str", "required": False, "default": None},
92
- "ID": {"type": "str", "required": True, "default": None},
93
- }
25
+ Parameters
26
+ ----------
27
+ data
28
+ dictionary to be validated
29
+ schema_file (dict)
30
+ schema used for validation
94
31
 
32
+ Raises
33
+ ------
34
+ jsonschema.exceptions.ValidationError
35
+ if validation fails
95
36
 
96
- def _category_context():
97
37
  """
98
- Metadata field CONTEXT.
99
38
 
100
- Describes list of context documents.
101
-
102
- """
103
- return {"DOCUMENT": {"type": "documentlist", "required": False, "default": None}}
39
+ schema, schema_file = _load_schema(schema_file)
104
40
 
41
+ try:
42
+ jsonschema.validate(data, schema=schema)
43
+ except jsonschema.exceptions.ValidationError:
44
+ _logger.error(f"Failed using {schema}")
45
+ raise
46
+ _logger.debug(f"Successful validation of data using schema from {schema_file}")
105
47
 
106
- def _category_document():
107
- """
108
- Metadata field DOCUMENT.
109
48
 
49
+ def get_default_metadata_dict(schema_file=None, observatory="CTA"):
110
50
  """
51
+ Returns metadata schema with default values.
52
+ Follows the CTA Top-Level Data Model.
111
53
 
112
- return {
113
- "TYPE": {"type": "str", "required": False, "default": None},
114
- "ID": {"type": "str", "required": False, "default": None},
115
- "LINK": {"type": "str", "required": False, "default": None},
116
- "TITLE": {"type": "str", "required": False, "default": None},
117
- }
118
-
54
+ Parameters
55
+ ----------
56
+ schema_file: str
57
+ Schema file (jsonschema format) used for validation
58
+ observatory: str
59
+ Observatory name
119
60
 
120
- def _category_instrument():
121
- """
122
- Metadata field INSTRUMENT.
61
+ Returns
62
+ -------
63
+ dict
64
+ Reference schema dictionary.
123
65
 
124
- Describes the instrument used to obtain these data or the instrument these data is applied for.
125
66
 
126
67
  """
127
- return {
128
- "SITE": {"type": "str", "required": True, "default": None},
129
- "CLASS": {"type": "str", "required": True, "default": None},
130
- "TYPE": {"type": "str", "required": True, "default": None},
131
- "SUBTYPE": {"type": "str", "required": False, "default": None},
132
- "ID": {"type": "str", "required": True, "default": None},
133
- }
134
68
 
69
+ schema, _ = _load_schema(schema_file)
70
+ return _fill_defaults(schema["definitions"], observatory)
135
71
 
136
- def _category_activity():
137
- """
138
- Metadata field ACTIVITY.
139
-
140
- Describes the software used to process these data.
141
72
 
73
+ def _load_schema(schema_file=None):
142
74
  """
143
- return {
144
- "NAME": {"type": "str", "required": True, "default": None},
145
- "TYPE": {"type": "str", "required": True, "default": "software"},
146
- "ID": {"type": "str", "required": True, "default": None},
147
- "START": {"type": "datetime", "required": False, "default": None},
148
- "SOFTWARE": {
149
- "NAME": {"type": "str", "required": True, "default": None},
150
- "VERSION": {"type": "str", "required": True, "default": None},
151
- },
152
- }
153
-
154
-
155
- def top_level_reference_schema():
156
- """
157
- Reference schema following the CTA Top-Level Data Model.
158
-
159
- This metadata schema is used for simtools data products.
75
+ Load parameter schema from file from simpipe metadata schema.
160
76
 
161
77
  Returns
162
78
  -------
163
- dict with reference schema
79
+ schema_file dict
80
+ Schema used for validation.
81
+ schema_file str
82
+ File name schema is loaded from. If schema_file is not given,
83
+ the default schema file name is returned.
164
84
 
85
+ Raises
86
+ ------
87
+ FileNotFoundError
88
+ if schema file is not found
165
89
 
166
90
  """
167
91
 
168
- _ref_schema = {
169
- "CTA": {
170
- "REFERENCE": _category_reference(),
171
- "CONTACT": _category_contact(),
172
- "PRODUCT": _category_product(),
173
- "INSTRUMENT": _category_instrument(),
174
- "PROCESS": _category_process(),
175
- "ACTIVITY": _category_activity(),
176
- "CONTEXT": {
177
- "SIM": {
178
- "ASSOCIATION": [
179
- _category_instrument(),
180
- ],
181
- "DOCUMENT": [
182
- _category_document(),
183
- ],
184
- },
185
- },
186
- }
187
- }
188
- _ref_schema = _metadata_dict_with_defaults(_ref_schema)
189
- return _remove_empty_lists(_ref_schema)
190
-
191
-
192
- def metadata_input_reference_schema():
193
- """
194
- Reference data model scheme for input metadata.
195
- Describes metadata provided for input to simtools applications.
92
+ if schema_file is None:
93
+ schema_file = files("simtools").joinpath(simtools.constants.METADATA_JSON_SCHEMA)
196
94
 
197
- Returns
198
- -------
199
- dict with input reference schema
200
-
201
- """
95
+ schema = gen.collect_data_from_file_or_dict(file_name=schema_file, in_dict=None)
96
+ _logger.debug(f"Loading schema from {schema_file}")
202
97
 
203
- return {
204
- "REFERENCE": _category_reference(),
205
- "CONTACT": _category_contact(),
206
- "PRODUCT": _category_product(),
207
- "INSTRUMENT": _category_instrument(),
208
- "PROCESS": _category_process(),
209
- "CONTEXT": _category_context(),
210
- }
98
+ return schema, schema_file
211
99
 
212
100
 
213
- def metadata_input_reference_document_list(schema_list):
101
+ def _resolve_references(yaml_data, observatory="CTA"):
214
102
  """
215
- Reference model data for input metata data of type documentlist or instrumentlist
103
+ Resolve references in yaml data and expand the received dictionary accordingly.
216
104
 
217
105
  Parameters
218
106
  ----------
219
- schema_list: str
220
- List type to be returned (e.g., instrumentlist or documentlist)
107
+ yaml_data: dict
108
+ Dictionary with yaml data.
109
+ observatory: str
110
+ Observatory name
221
111
 
222
112
  Returns
223
113
  -------
224
- dict with input reference schema for documentlist or instrumentlist
114
+ dict
115
+ Dictionary with resolved references.
225
116
 
226
117
  """
227
- if schema_list.lower() == "instrumentlist":
228
- return _category_instrument()
229
- if schema_list.lower() == "documentlist":
230
- return _category_document()
231
118
 
232
- msg = f"Invalid schema list: {schema_list}"
233
- raise InvalidSchemaList(msg)
119
+ def expand_ref(ref):
120
+ ref_path = ref.lstrip("#/")
121
+ parts = ref_path.split("/")
122
+ ref_data = yaml_data
123
+ for part in parts:
124
+ if part in ("definitions", observatory):
125
+ continue
126
+ ref_data = ref_data.get(part, {})
127
+ return ref_data
128
+
129
+ def _resolve_references_recursive(data):
130
+ if isinstance(data, dict):
131
+ if "$ref" in data:
132
+ ref = data["$ref"]
133
+ resolved_data = expand_ref(ref)
134
+ if isinstance(resolved_data, dict) and len(resolved_data) > 1:
135
+ return _resolve_references_recursive(resolved_data)
136
+ return resolved_data
137
+ return {k: _resolve_references_recursive(v) for k, v in data.items()}
138
+ if isinstance(data, list):
139
+ return [_resolve_references_recursive(item) for item in data]
140
+ return data
141
+
142
+ return _resolve_references_recursive(yaml_data)
234
143
 
235
144
 
236
- def _metadata_dict_with_defaults(meta_dict):
145
+ def _fill_defaults(schema, observatory="CTA"):
237
146
  """
238
- Prepare dictionary with default values filled and removal of all type/required/default dicts.
147
+ Fill default values from json schema.
239
148
 
240
149
  Parameters
241
150
  ----------
242
- meta_dict
243
- Metadata dictionary
151
+ schema: dict
152
+ Schema describing the input data.
153
+ observatory: str
154
+ Observatory name
244
155
 
245
156
  Returns
246
157
  -------
247
158
  dict
248
- Metadata dictionary with default values filled
249
-
250
- """
251
-
252
- for key, value in meta_dict.items():
253
- if isinstance(value, dict):
254
- if value.keys() >= {"type", "required", "default"}:
255
- meta_dict[key] = value["default"]
256
- else:
257
- _metadata_dict_with_defaults(value)
258
- elif isinstance(value, list):
259
- for list_entry in value:
260
- _metadata_dict_with_defaults(list_entry)
261
- else:
262
- msg = f"Invalid schema list with missing type, required, or default fields: {key}"
263
- raise InvalidSchemaList(msg)
264
- return meta_dict
265
-
266
-
267
- def _remove_empty_lists(meta_dict):
268
- """
269
- Remove entries of type list with length zero, as those are not used for output.
270
-
271
- """
272
- _entries_to_pop = []
273
- for key, value in meta_dict.items():
274
- if isinstance(value, dict):
275
- _remove_empty_lists(value)
276
- elif isinstance(value, list) and len(value) == 0:
277
- _entries_to_pop.append(key)
278
- for key in _entries_to_pop:
279
- meta_dict.pop(key)
280
-
281
- return meta_dict
159
+ Dictionary with default values.
160
+
161
+ """
162
+
163
+ defaults = {observatory: {}}
164
+
165
+ schema = _resolve_references(schema[observatory])
166
+
167
+ def _fill_defaults_recursive(subschema, current_dict):
168
+ try:
169
+ for prop, prop_schema in subschema["properties"].items():
170
+ if "default" in prop_schema:
171
+ current_dict[prop] = prop_schema["default"]
172
+ elif "type" in prop_schema:
173
+ if prop_schema["type"] == "object":
174
+ current_dict[prop] = {}
175
+ _fill_defaults_recursive(prop_schema, current_dict[prop])
176
+ elif prop_schema["type"] == "array":
177
+ current_dict[prop] = [{}]
178
+ if "items" in prop_schema and isinstance(prop_schema["items"], dict):
179
+ _fill_defaults_recursive(prop_schema["items"], current_dict[prop][0])
180
+ except KeyError:
181
+ msg = "Missing 'properties' key in schema."
182
+ _logger.error(msg)
183
+ raise
184
+
185
+ _fill_defaults_recursive(schema, defaults[observatory])
186
+ return defaults
@@ -5,7 +5,8 @@ import astropy
5
5
  import yaml
6
6
 
7
7
  import simtools.utils.general as gen
8
- from simtools import io_handler
8
+ from simtools.data_model import validate_data
9
+ from simtools.io_operations import io_handler
9
10
 
10
11
  __all__ = ["ModelDataWriter"]
11
12
 
@@ -16,66 +17,124 @@ class ModelDataWriter:
16
17
 
17
18
  Parameters
18
19
  ----------
20
+ product_data_file: str
21
+ Name of output file.
22
+ product_data_format: str
23
+ Format of output file.
19
24
  args_dict: Dictionary
20
25
  Dictionary with configuration parameters.
21
26
  """
22
27
 
23
- def __init__(self, product_data_file=None, product_data_format=None):
28
+ def __init__(self, product_data_file=None, product_data_format=None, args_dict=None):
24
29
  """
25
30
  Initialize model data writer.
26
31
  """
27
32
 
28
33
  self._logger = logging.getLogger(__name__)
29
34
  self.io_handler = io_handler.IOHandler()
35
+ if args_dict is not None:
36
+ self.io_handler.set_paths(
37
+ output_path=args_dict.get("output_path", None),
38
+ use_plain_output_path=args_dict.get("use_plain_output_path", False),
39
+ )
30
40
  try:
31
- self.product_data_file = self.io_handler.get_output_file(file_name=product_data_file)
41
+ self.product_data_file = self.io_handler.get_output_file(
42
+ file_name=product_data_file, dir_type="simtools-result"
43
+ )
32
44
  except TypeError:
33
45
  self.product_data_file = None
34
46
  self.product_data_format = self._astropy_data_format(product_data_format)
35
47
 
36
- def write(self, metadata=None, product_data=None):
48
+ @staticmethod
49
+ def dump(
50
+ args_dict, output_file=None, metadata=None, product_data=None, validate_schema_file=None
51
+ ):
37
52
  """
38
- Write model data and metadata
53
+ Write model data and metadata (as static method).
39
54
 
40
- Parameters:
41
- -----------
55
+ Parameters
56
+ ----------
57
+ args_dict: dict
58
+ Dictionary with configuration parameters (including output file name and path).
59
+ output_file: string or Path
60
+ Name of output file (args["output_file"] is used if this parameter is not set).
42
61
  metadata: dict
43
62
  Metadata to be written.
44
63
  product_data: astropy Table
45
64
  Model data to be written
65
+ validate_schema_file: str
66
+ Schema file used in validation of output data.
46
67
 
47
68
  """
48
69
 
49
- self.write_metadata(metadata=metadata)
50
- self.write_data(product_data=product_data)
70
+ writer = ModelDataWriter(
71
+ product_data_file=(
72
+ args_dict.get("output_file", None) if output_file is None else output_file
73
+ ),
74
+ product_data_format=args_dict.get("output_file_format", "ascii.ecsv"),
75
+ args_dict=args_dict,
76
+ )
77
+ if validate_schema_file is not None and not args_dict.get("skip_output_validation", True):
78
+ product_data = writer.validate_and_transform(
79
+ product_data=product_data,
80
+ validate_schema_file=validate_schema_file,
81
+ )
82
+ writer.write(metadata=metadata, product_data=product_data)
83
+
84
+ def validate_and_transform(self, product_data=None, validate_schema_file=None):
85
+ """
86
+ Validate product data using jsonschema given in metadata.
87
+ If necessary, transform product data to match schema.
88
+
89
+ Parameters
90
+ ----------
91
+ product_data: astropy Table
92
+ Model data to be validated
93
+ validate_schema_file: str
94
+ Schema file used in validation of output data.
51
95
 
52
- def write_data(self, product_data):
53
96
  """
54
- Write model data.
97
+
98
+ _validator = validate_data.DataValidator(
99
+ schema_file=validate_schema_file,
100
+ data_table=product_data,
101
+ )
102
+ return _validator.validate_and_transform()
103
+
104
+ def write(self, product_data=None, metadata=None):
105
+ """
106
+ Write model data and metadata
55
107
 
56
108
  Parameters
57
109
  ----------
58
110
  product_data: astropy Table
59
- Model data to be written.
111
+ Model data to be written
112
+ metadata: dict
113
+ Metadata to be written.
60
114
 
61
115
  Raises
62
116
  ------
63
117
  FileNotFoundError
64
- if data writing was not sucessfull.
118
+ if data writing was not successful.
65
119
 
66
120
  """
67
121
 
122
+ if product_data is None:
123
+ return
124
+
125
+ if metadata is not None:
126
+ product_data.meta.update(gen.change_dict_keys_case(metadata, False))
127
+
68
128
  try:
69
- if product_data is not None:
70
- self._logger.info(f"Writing data to {self.product_data_file}")
71
- product_data.write(
72
- self.product_data_file, format=self.product_data_format, overwrite=True
73
- )
129
+ self._logger.info(f"Writing data to {self.product_data_file}")
130
+ product_data.write(
131
+ self.product_data_file, format=self.product_data_format, overwrite=True
132
+ )
74
133
  except astropy.io.registry.base.IORegistryError:
75
134
  self._logger.error(f"Error writing model data to {self.product_data_file}.")
76
135
  raise
77
136
 
78
- def write_metadata(self, metadata, yml_file=None, keys_lower_case=False):
137
+ def write_metadata_to_yml(self, metadata, yml_file=None, keys_lower_case=False):
79
138
  """
80
139
  Write model metadata file (yaml file format).
81
140
 
@@ -97,21 +156,19 @@ class ModelDataWriter:
97
156
  ------
98
157
  FileNotFoundError
99
158
  If yml_file not found.
100
- AttributeError
101
- If no metadata defined for writing.
102
159
  TypeError
103
160
  If yml_file is not defined.
104
161
  """
105
162
 
106
163
  try:
107
164
  yml_file = Path(yml_file or self.product_data_file).with_suffix(".metadata.yml")
108
- self._logger.info(f"Writing metadata to {yml_file}")
109
165
  with open(yml_file, "w", encoding="UTF-8") as file:
110
166
  yaml.safe_dump(
111
167
  gen.change_dict_keys_case(metadata, keys_lower_case),
112
168
  file,
113
169
  sort_keys=False,
114
170
  )
171
+ self._logger.info(f"Writing metadata to {yml_file}")
115
172
  return yml_file
116
173
  except FileNotFoundError:
117
174
  self._logger.error(f"Error writing model data to {yml_file}")