metadata-crawler 2510.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metadata-crawler might be problematic. Click here for more details.

Files changed (35) hide show
  1. metadata_crawler/__init__.py +263 -0
  2. metadata_crawler/__main__.py +8 -0
  3. metadata_crawler/_version.py +1 -0
  4. metadata_crawler/api/__init__.py +1 -0
  5. metadata_crawler/api/cli.py +57 -0
  6. metadata_crawler/api/config.py +831 -0
  7. metadata_crawler/api/drs_config.toml +440 -0
  8. metadata_crawler/api/index.py +151 -0
  9. metadata_crawler/api/metadata_stores.py +755 -0
  10. metadata_crawler/api/mixin/__init__.py +7 -0
  11. metadata_crawler/api/mixin/lookup_mixin.py +112 -0
  12. metadata_crawler/api/mixin/lookup_tables.py +10010 -0
  13. metadata_crawler/api/mixin/path_mixin.py +46 -0
  14. metadata_crawler/api/mixin/template_mixin.py +145 -0
  15. metadata_crawler/api/storage_backend.py +277 -0
  16. metadata_crawler/backends/__init__.py +1 -0
  17. metadata_crawler/backends/intake.py +211 -0
  18. metadata_crawler/backends/posix.py +121 -0
  19. metadata_crawler/backends/s3.py +140 -0
  20. metadata_crawler/backends/swift.py +305 -0
  21. metadata_crawler/cli.py +547 -0
  22. metadata_crawler/data_collector.py +278 -0
  23. metadata_crawler/ingester/__init__.py +1 -0
  24. metadata_crawler/ingester/mongo.py +206 -0
  25. metadata_crawler/ingester/solr.py +282 -0
  26. metadata_crawler/logger.py +153 -0
  27. metadata_crawler/py.typed +0 -0
  28. metadata_crawler/run.py +419 -0
  29. metadata_crawler/utils/__init__.py +482 -0
  30. metadata_crawler/utils/cftime_utils.py +207 -0
  31. metadata_crawler-2510.1.0.dist-info/METADATA +401 -0
  32. metadata_crawler-2510.1.0.dist-info/RECORD +35 -0
  33. metadata_crawler-2510.1.0.dist-info/WHEEL +4 -0
  34. metadata_crawler-2510.1.0.dist-info/entry_points.txt +14 -0
  35. metadata_crawler-2510.1.0.dist-info/licenses/LICENSE +28 -0
@@ -0,0 +1,263 @@
1
+ """Metadata Crawler API high level functions."""
2
+
3
+ import asyncio
4
+ from pathlib import Path
5
+ from typing import Any, Dict, List, Optional, Union
6
+
7
+ import tomlkit
8
+ import uvloop
9
+
10
+ from ._version import __version__
11
+ from .api.config import ConfigMerger, DRSConfig
12
+ from .api.metadata_stores import CatalogueBackendType, IndexName
13
+ from .data_collector import DataCollector
14
+ from .logger import logger
15
+ from .run import async_add, async_delete, async_index
16
+
17
+ asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
18
+
19
+ __all__ = [
20
+ "logger",
21
+ "__version__",
22
+ "DataCollector",
23
+ "index",
24
+ "add",
25
+ "delete",
26
+ "get_config",
27
+ "async_index",
28
+ "async_delete",
29
+ "async_add",
30
+ "get_config",
31
+ ]
32
+
33
+
34
+ def get_config(config: Optional[Union[Path, str]] = None) -> ConfigMerger:
35
+ """Get a drs config file merged with the default config.
36
+
37
+ The method is helpful to inspect all possible configurations and their
38
+ default values.
39
+
40
+ Parameters
41
+ ^^^^^^^^^^
42
+
43
+ config:
44
+ Path to a user defined config file that is going to be merged with
45
+ the default config.
46
+ """
47
+ _ = DRSConfig.load(config)
48
+ return ConfigMerger(config)
49
+
50
+
51
+ def index(
52
+ index_system: str,
53
+ *catalogue_files: Union[Path, str, List[str], List[Path]],
54
+ batch_size: int = 2500,
55
+ verbosity: int = 0,
56
+ log_suffix: Optional[str] = None,
57
+ **kwargs: Any,
58
+ ) -> None:
59
+ """Index metadata in the indexing system.
60
+
61
+ Parameters
62
+ ^^^^^^^^^^
63
+
64
+ index_system:
65
+ The index server where the metadata is indexed.
66
+ catalogue_files:
67
+ Path to the file(s) where the metadata was stored.
68
+ batch_size:
69
+ If the index system supports batch-sizes, the size of the batches.
70
+ verbosity:
71
+ Set the verbosity level.
72
+ log_suffix:
73
+ Add a suffix to the log file output.
74
+
75
+ Other Parameters
76
+ ^^^^^^^^^^^^^^^^
77
+
78
+ **kwargs:
79
+ Keyword arguments used to delete data from the index.
80
+
81
+ Examples
82
+ ^^^^^^^^
83
+
84
+ .. code-block:: python
85
+
86
+ index(
87
+ "solr",
88
+ "/tmp/catalog-1.yml",
89
+ "/tmp/catalog-2.yml",
90
+ batch_size=50,
91
+ server="localhost:8983",
92
+ )
93
+ """
94
+ uvloop.run(
95
+ async_index(
96
+ index_system,
97
+ *catalogue_files,
98
+ batch_size=batch_size,
99
+ verbosity=verbosity,
100
+ log_suffix=log_suffix,
101
+ **kwargs,
102
+ )
103
+ )
104
+
105
+
106
+ def delete(
107
+ index_system: str,
108
+ batch_size: int = 2500,
109
+ verbosity: int = 0,
110
+ log_suffix: Optional[str] = None,
111
+ **kwargs: Any,
112
+ ) -> None:
113
+ """Delete metadata from the indexing system.
114
+
115
+ Parameters
116
+ ^^^^^^^^^^
117
+
118
+ index_system:
119
+ The index server where the metadata is indexed.
120
+ batch_size:
121
+ If the index system supports batch-sizes, the size of the batches.
122
+ verbosity:
123
+ Set the verbosity of the system.
124
+ log_suffix:
125
+ Add a suffix to the log file output.
126
+
127
+ Other Parameters
128
+ ^^^^^^^^^^^^^^^^
129
+
130
+ **kwargs:
131
+ Keyword arguments used to delete data from the index.
132
+
133
+
134
+ Examples
135
+ ^^^^^^^^
136
+
137
+ .. code-block:: python
138
+
139
+ delete(
140
+ "solr",
141
+ server="localhost:8983",
142
+ facets=[("project", "CMIP6"), ("institute", "MPI-M")],
143
+ )
144
+ """
145
+ uvloop.run(
146
+ async_delete(
147
+ index_system, batch_size=batch_size, log_suffix=log_suffix, **kwargs
148
+ )
149
+ )
150
+
151
+
152
+ def add(
153
+ store: Optional[Union[str, Path]] = None,
154
+ config_file: Optional[
155
+ Union[Path, str, Dict[str, Any], tomlkit.TOMLDocument]
156
+ ] = None,
157
+ data_object: Optional[Union[str, List[str]]] = None,
158
+ data_set: Optional[Union[str, List[str]]] = None,
159
+ data_store_prefix: str = "metadata",
160
+ catalogue_backend: CatalogueBackendType = "jsonlines",
161
+ batch_size: int = 25_000,
162
+ comp_level: int = 4,
163
+ storage_options: Optional[Dict[str, Any]] = None,
164
+ shadow: Optional[Union[str, List[str]]] = None,
165
+ latest_version: str = IndexName().latest,
166
+ all_versions: str = IndexName().all,
167
+ n_procs: Optional[int] = None,
168
+ verbosity: int = 0,
169
+ log_suffix: Optional[str] = None,
170
+ password: bool = False,
171
+ fail_under: int = -1,
172
+ **kwargs: Any,
173
+ ) -> None:
174
+ """Harvest metadata from storage systems and add them to an intake catalogue.
175
+
176
+ Parameters
177
+ ^^^^^^^^^^
178
+
179
+ store:
180
+ Path to the intake catalogue.
181
+ config_file:
182
+ Path to the drs-config file / loaded configuration.
183
+ data_ojbect:
184
+ Instead of defining datasets that are to be crawled you can crawl
185
+ data based on their directories. The directories must be a root dirs
186
+ given in the drs-config file. By default all root dirs are crawled.
187
+ data_set:
188
+ Datasets that should be crawled. The datasets need to be defined
189
+ in the drs-config file. By default all datasets are crawled.
190
+ Names can contain wildcards such as ``xces-*``.
191
+ data_store_prefix:
192
+ Absolute path or relative path to intake catalogue source
193
+ data_dir:
194
+ Instead of defining datasets are are to be crawled you can crawl
195
+ data based on their directories. The directories must be a root dirs
196
+ given in the drs-config file. By default all root dirs are crawled.
197
+ bach_size:
198
+ Batch size that is used to collect the meta data. This can affect
199
+ performance.
200
+ comp_level:
201
+ Compression level used to write the meta data to csv.gz
202
+ storage_options:
203
+ Set additional storage options for adding metadata to the metadata store
204
+ shadow:
205
+ 'Shadow' this storage options. This is useful to hide secrets in public
206
+ data catalogues.
207
+ catalogue_backend:
208
+ Intake catalogue backend
209
+ latest_version:
210
+ Name of the core holding 'latest' metadata.
211
+ all_versions:
212
+ Name of the core holding 'all' metadata versions.
213
+ password:
214
+ Display a password prompt and set password before beginning.
215
+ n_procs:
216
+ Set the number of parallel processes for collecting.
217
+ verbosity:
218
+ Set the verbosity of the system.
219
+ log_suffix:
220
+ Add a suffix to the log file output.
221
+ fail_under:
222
+ Fail if less than X of the discovered files could be indexed.
223
+
224
+ Other Parameters
225
+ ^^^^^^^^^^^^^^^^
226
+
227
+ **kwargs:
228
+ Additional keyword arguments.
229
+
230
+
231
+ Examples
232
+ ^^^^^^^^
233
+
234
+ .. code-block:: python
235
+
236
+ add(
237
+ "my-data.yaml",
238
+ "~/data/drs-config.toml",
239
+ data_set=["cmip6", "cordex"],
240
+ )
241
+ """
242
+ uvloop.run(
243
+ async_add(
244
+ store=store,
245
+ config_file=config_file,
246
+ data_object=data_object,
247
+ data_set=data_set,
248
+ batch_size=batch_size,
249
+ comp_level=comp_level,
250
+ password=password,
251
+ catalogue_backend=catalogue_backend,
252
+ data_store_prefix=data_store_prefix,
253
+ shadow=shadow,
254
+ latest_version=latest_version,
255
+ all_versions=all_versions,
256
+ n_procs=n_procs,
257
+ storage_options=storage_options,
258
+ verbosity=verbosity,
259
+ log_suffix=log_suffix,
260
+ fail_under=fail_under,
261
+ **kwargs,
262
+ )
263
+ )
@@ -0,0 +1,8 @@
1
+ """Call the Command line interface of the metadata-crawler."""
2
+
3
+ import sys
4
+
5
+ from metadata_crawler.cli import cli
6
+
7
+ if __name__ == "__main__":
8
+ cli(sys.argv[1:])
@@ -0,0 +1 @@
1
+ __version__ = "2510.1.0"
@@ -0,0 +1 @@
1
+ """Metadata-crawler API."""
@@ -0,0 +1,57 @@
1
+ """API for adding commands to the cli."""
2
+
3
+ from functools import wraps
4
+ from typing import Any, Callable, Dict, Tuple, Union
5
+
6
+ from pydantic import BaseModel, ConfigDict
7
+
8
+
9
+ class Parameter(BaseModel):
10
+ """CLI Parameter model."""
11
+
12
+ model_config = ConfigDict(extra="allow")
13
+
14
+ args: Union[str, Tuple[str, ...]]
15
+ """Names for the arpargse.Namespace."""
16
+ help: str
17
+ """Help string that is going to be displayed."""
18
+
19
+
20
+ def cli_parameter(*args: str, **kwargs: Any) -> Dict[str, Any]:
21
+ """Construct a ``argparse.Namespace``.
22
+
23
+ Parameters
24
+ ^^^^^^^^^^
25
+ *args:
26
+ Any arguments passed to ``argparse.ArgumentParser().add_argument``
27
+ **kwargs:
28
+ Any keyword arguments passed to ``argparse.ArgumentParser().add_arguent``
29
+
30
+ """
31
+ return Parameter(args=args, **kwargs).model_dump()
32
+
33
+
34
+ def cli_function(
35
+ help: str = "",
36
+ ) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
37
+ """Wrap command line arguments around a method.
38
+
39
+ Those arguments represent the arguments you would normally use to create
40
+ a `argparse subcommand <https://docs.python.org/3/library/argparse.html>`_.
41
+
42
+ Parameters
43
+ ^^^^^^^^^^
44
+ help:
45
+ Help string for this sub command.
46
+ """
47
+
48
+ def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
49
+ setattr(func, "_cli_help", help or func.__doc__)
50
+
51
+ @wraps(func)
52
+ def wrapper(*args: Any, **kwargs: Any) -> Any:
53
+ return func(*args, **kwargs)
54
+
55
+ return wrapper
56
+
57
+ return decorator