ghga-transpiler 1.2.0__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,158 +0,0 @@
1
- #!/usr/bin/env python3
2
-
3
- # Copyright 2021 - 2023 Universität Tübingen, DKFZ, EMBL, and Universität zu Köln
4
- # for the German Human Genome-Phenome Archive (GHGA)
5
- #
6
- # Licensed under the Apache License, Version 2.0 (the "License");
7
- # you may not use this file except in compliance with the License.
8
- # You may obtain a copy of the License at
9
- #
10
- # http://www.apache.org/licenses/LICENSE-2.0
11
- #
12
- # Unless required by applicable law or agreed to in writing, software
13
- # distributed under the License is distributed on an "AS IS" BASIS,
14
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
- # See the License for the specific language governing permissions and
16
- # limitations under the License.
17
-
18
- """Generates a JSON schema from the service's Config class as well as a corresponding
19
- example config yaml (or check whether these files are up to date).
20
- """
21
-
22
- import importlib
23
- import json
24
- import subprocess
25
- import sys
26
- from difflib import unified_diff
27
- from pathlib import Path
28
- from typing import Any, Type
29
-
30
- import yaml
31
- from pydantic import BaseSettings
32
- from script_utils.cli import echo_failure, echo_success, run
33
-
34
- HERE = Path(__file__).parent.resolve()
35
- REPO_ROOT_DIR = HERE.parent
36
- DEV_CONFIG_YAML = REPO_ROOT_DIR / ".devcontainer" / ".dev_config.yaml"
37
- GET_PACKAGE_NAME_SCRIPT = HERE / "get_package_name.py"
38
- EXAMPLE_CONFIG_YAML = REPO_ROOT_DIR / "example_config.yaml"
39
- CONFIG_SCHEMA_JSON = REPO_ROOT_DIR / "config_schema.json"
40
-
41
-
42
- class ValidationError(RuntimeError):
43
- """Raised when validation of config documentation fails."""
44
-
45
-
46
- def get_config_class() -> Type[BaseSettings]:
47
- """
48
- Dynamically imports and returns the Config class from the current service.
49
- This makes the script service repo agnostic.
50
- """
51
- # get the name of the microservice package
52
- with subprocess.Popen(
53
- args=[GET_PACKAGE_NAME_SCRIPT],
54
- stdout=subprocess.PIPE,
55
- stderr=subprocess.DEVNULL,
56
- ) as process:
57
- assert (
58
- process.wait() == 0 and process.stdout is not None
59
- ), "Failed to get package name."
60
- package_name = process.stdout.read().decode("utf-8").strip("\n")
61
-
62
- # import the Config class from the microservice package:
63
- config_module: Any = importlib.import_module(f"{package_name}.config")
64
- config_class = config_module.Config
65
-
66
- return config_class
67
-
68
-
69
- def get_dev_config():
70
- """Get dev config object."""
71
- config_class = get_config_class()
72
- return config_class(config_yaml=DEV_CONFIG_YAML)
73
-
74
-
75
- def get_schema() -> str:
76
- """Returns a JSON schema generated from a Config class."""
77
-
78
- config = get_dev_config()
79
- return config.schema_json(indent=2)
80
-
81
-
82
- def get_example() -> str:
83
- """Returns an example config YAML."""
84
-
85
- config = get_dev_config()
86
- normalized_config_dict = json.loads(config.json())
87
- return yaml.dump(normalized_config_dict) # pyright: ignore
88
-
89
-
90
- def update_docs():
91
- """Update the example config and config schema files documenting the config
92
- options."""
93
-
94
- example = get_example()
95
- with open(EXAMPLE_CONFIG_YAML, "w", encoding="utf-8") as example_file:
96
- example_file.write(example)
97
-
98
- schema = get_schema()
99
- with open(CONFIG_SCHEMA_JSON, "w", encoding="utf-8") as schema_file:
100
- schema_file.write(schema)
101
-
102
-
103
- def print_diff(expected: str, observed: str):
104
- """Print differences between expected and observed files."""
105
- echo_failure("Differences in Config YAML:")
106
- for line in unified_diff(
107
- expected.splitlines(keepends=True),
108
- observed.splitlines(keepends=True),
109
- fromfile="expected",
110
- tofile="observed",
111
- ):
112
- print(" ", line.rstrip())
113
-
114
-
115
- def check_docs():
116
- """Check whether the example config and config schema files documenting the config
117
- options are up to date.
118
-
119
- Raises:
120
- ValidationError: if not up to date.
121
- """
122
-
123
- example_expected = get_example()
124
- with open(EXAMPLE_CONFIG_YAML, "r", encoding="utf-8") as example_file:
125
- example_observed = example_file.read()
126
- if example_expected != example_observed:
127
- print_diff(example_expected, example_observed)
128
- raise ValidationError(
129
- f"Example config YAML at '{EXAMPLE_CONFIG_YAML}' is not up to date."
130
- )
131
-
132
- schema_expected = get_schema()
133
- with open(CONFIG_SCHEMA_JSON, "r", encoding="utf-8") as schema_file:
134
- schema_observed = schema_file.read()
135
- if schema_expected != schema_observed:
136
- raise ValidationError(
137
- f"Config schema JSON at '{CONFIG_SCHEMA_JSON}' is not up to date."
138
- )
139
-
140
-
141
- def main(check: bool = False):
142
- """Update or check the config documentation files."""
143
-
144
- if check:
145
- try:
146
- check_docs()
147
- except ValidationError as error:
148
- echo_failure(f"Validation failed: {error}")
149
- sys.exit(1)
150
- echo_success("Config docs are up to date.")
151
- return
152
-
153
- update_docs()
154
- echo_success("Successfully updated the config docs.")
155
-
156
-
157
- if __name__ == "__main__":
158
- run(main)
scripts/update_readme.py DELETED
@@ -1,221 +0,0 @@
1
- #!/usr/bin/env python3
2
-
3
- # Copyright 2021 - 2023 Universität Tübingen, DKFZ, EMBL, and Universität zu Köln
4
- # for the German Human Genome-Phenome Archive (GHGA)
5
- #
6
- # Licensed under the Apache License, Version 2.0 (the "License");
7
- # you may not use this file except in compliance with the License.
8
- # You may obtain a copy of the License at
9
- #
10
- # http://www.apache.org/licenses/LICENSE-2.0
11
- #
12
- # Unless required by applicable law or agreed to in writing, software
13
- # distributed under the License is distributed on an "AS IS" BASIS,
14
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
- # See the License for the specific language governing permissions and
16
- # limitations under the License.
17
-
18
- """Generate documentation for this package using different sources."""
19
-
20
- import json
21
- import subprocess # nosec
22
- import sys
23
- from pathlib import Path
24
- from string import Template
25
-
26
- import jsonschema2md
27
- from pydantic import BaseModel, Field
28
- from script_utils.cli import echo_failure, echo_success, run
29
- from setuptools.config.setupcfg import read_configuration
30
- from stringcase import spinalcase, titlecase
31
-
32
- ROOT_DIR = Path(__file__).parent.parent.resolve()
33
- SETUP_CFG_PATH = ROOT_DIR / "setup.cfg"
34
- DESCRIPTION_PATH = ROOT_DIR / ".description.md"
35
- DESIGN_PATH = ROOT_DIR / ".design.md"
36
- README_TEMPLATE_PATH = ROOT_DIR / ".readme_template.md"
37
- CONFIG_SCHEMA_PATH = ROOT_DIR / "config_schema.json"
38
- OPENAPI_YAML_REL_PATH = "./openapi.yaml"
39
- README_PATH = ROOT_DIR / "README.md"
40
-
41
-
42
- class PackageHeader(BaseModel):
43
- """A basic summary of a package."""
44
-
45
- shortname: str = Field(
46
- ...,
47
- description=(
48
- "The abbreviation of the package name. Is identical to the package name."
49
- ),
50
- )
51
- version: str = Field(..., description="The version of the package.")
52
- summary: str = Field(
53
- ..., description="A short 1 or 2 sentence summary of the package."
54
- )
55
-
56
-
57
- class PackageName(BaseModel):
58
- """The name of a package and it's different representations."""
59
-
60
- name: str = Field(..., description="The full name of the package in spinal case.")
61
- title: str = Field(..., description="The name of the package formatted as title.")
62
-
63
-
64
- class PackageDetails(PackageHeader, PackageName):
65
- """A container for details on a package used to build documentation."""
66
-
67
- description: str = Field(
68
- ..., description="A markdown-formatted description of the package."
69
- )
70
- design_description: str = Field(
71
- ...,
72
- description=(
73
- "A markdown-formatted description of overall architecture and design of"
74
- + " the package."
75
- ),
76
- )
77
- config_description: str = Field(
78
- ...,
79
- description=(
80
- "A markdown-formatted list of all configuration parameters of this package."
81
- ),
82
- )
83
- openapi_doc: str = Field(
84
- ...,
85
- description=(
86
- "A markdown-formatted description rendering or linking to an OpenAPI"
87
- " specification of the package."
88
- ),
89
- )
90
-
91
-
92
- def read_package_header() -> PackageHeader:
93
- """Read basic information about the package from the setup.cfg."""
94
-
95
- setup_config = read_configuration(SETUP_CFG_PATH)
96
- setup_metadata = setup_config["metadata"]
97
- return PackageHeader(
98
- shortname=setup_metadata["name"],
99
- version=setup_metadata["version"],
100
- summary=setup_metadata["description"],
101
- )
102
-
103
-
104
- def read_package_name() -> PackageName:
105
- """Infer the package name from the name of the git origin."""
106
-
107
- with subprocess.Popen(
108
- args="basename -s .git `git config --get remote.origin.url`",
109
- cwd=ROOT_DIR,
110
- stdout=subprocess.PIPE,
111
- shell=True,
112
- ) as process:
113
- stdout, _ = process.communicate()
114
-
115
- if not stdout:
116
- raise RuntimeError("The name of the git origin could not be resolved.")
117
- git_origin_name = stdout.decode("utf-8").strip()
118
-
119
- return PackageName(
120
- name=spinalcase(git_origin_name), title=titlecase(git_origin_name)
121
- )
122
-
123
-
124
- def read_package_description() -> str:
125
- """Read the package description."""
126
-
127
- return DESCRIPTION_PATH.read_text()
128
-
129
-
130
- def read_design_description() -> str:
131
- """Read the design description."""
132
-
133
- return DESIGN_PATH.read_text()
134
-
135
-
136
- def generate_config_docs() -> str:
137
- """Generate markdown-formatted documentation for the configration parameters
138
- listed in the config schema."""
139
-
140
- parser = jsonschema2md.Parser(
141
- examples_as_yaml=False,
142
- show_examples="all",
143
- )
144
-
145
- if not CONFIG_SCHEMA_PATH.exists():
146
- return ""
147
-
148
- with open(CONFIG_SCHEMA_PATH, "r", encoding="utf-8") as json_file:
149
- config_schema = json.load(json_file)
150
-
151
- md_lines = parser.parse_schema(config_schema)
152
-
153
- # ignore everything before the properites header:
154
- properties_index = md_lines.index("## Properties\n\n")
155
- md_lines = md_lines[properties_index + 1 :]
156
-
157
- return "\n".join(md_lines)
158
-
159
-
160
- def generate_openapi_docs() -> str:
161
- """Generate markdown-formatted documentation linking to or rendering an OpenAPI
162
- specification of the package. If no OpenAPI specification is present, return an
163
- empty string."""
164
-
165
- open_api_yaml_path = ROOT_DIR / OPENAPI_YAML_REL_PATH
166
-
167
- if not open_api_yaml_path.exists():
168
- return ""
169
-
170
- return (
171
- "## HTTP API\n"
172
- + "An OpenAPI specification for this service can be found"
173
- + f" [here]({OPENAPI_YAML_REL_PATH})."
174
- )
175
-
176
-
177
- def get_package_details() -> PackageDetails:
178
- """Get details required to build documentation for the package."""
179
-
180
- header = read_package_header()
181
- name = read_package_name()
182
- description = read_package_description()
183
- config_description = generate_config_docs()
184
- return PackageDetails(
185
- **header.dict(),
186
- **name.dict(),
187
- description=description,
188
- config_description=config_description,
189
- design_description=read_design_description(),
190
- openapi_doc=generate_openapi_docs(),
191
- )
192
-
193
-
194
- def generate_single_readme(*, details: PackageDetails) -> str:
195
- """Generate a single markdown-formatted readme file for the package based on the
196
- provided details."""
197
-
198
- template_content = README_TEMPLATE_PATH.read_text()
199
- template = Template(template_content)
200
- return template.substitute(details.dict())
201
-
202
-
203
- def main(check: bool = False) -> None:
204
- """Update the readme markdown."""
205
-
206
- details = get_package_details()
207
- readme_content = generate_single_readme(details=details)
208
-
209
- if check:
210
- if README_PATH.read_text() != readme_content:
211
- echo_failure("README.md is not up to date.")
212
- sys.exit(1)
213
- echo_success("README.md is up to date.")
214
- return
215
-
216
- README_PATH.write_text(readme_content)
217
- echo_success("Successfully updated README.md.")
218
-
219
-
220
- if __name__ == "__main__":
221
- run(main)
@@ -1,250 +0,0 @@
1
- #!/usr/bin/env python3
2
-
3
- # Copyright 2021 - 2023 Universität Tübingen, DKFZ, EMBL, and Universität zu Köln
4
- # for the German Human Genome-Phenome Archive (GHGA)
5
- #
6
- # Licensed under the Apache License, Version 2.0 (the "License");
7
- # you may not use this file except in compliance with the License.
8
- # You may obtain a copy of the License at
9
- #
10
- # http://www.apache.org/licenses/LICENSE-2.0
11
- #
12
- # Unless required by applicable law or agreed to in writing, software
13
- # distributed under the License is distributed on an "AS IS" BASIS,
14
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
- # See the License for the specific language governing permissions and
16
- # limitations under the License.
17
-
18
- """This script evaluates the entries in .static_files, .mandatory_files and
19
- .deprecated_files and compares them with the microservice template repository,
20
- or verifies their existence or non-existence depending on the list they are in.
21
- """
22
-
23
- import difflib
24
- import os
25
- import shutil
26
- import stat
27
- import sys
28
- import urllib.error
29
- import urllib.parse
30
- import urllib.request
31
- from pathlib import Path
32
-
33
- try:
34
- from script_utils.cli import echo_failure, echo_success, run
35
- except ImportError:
36
- echo_failure = echo_success = print
37
-
38
- def run(main_fn):
39
- """Run main function without cli tools (typer)."""
40
- main_fn(check="--check" in sys.argv[1:])
41
-
42
-
43
- REPO_ROOT_DIR = Path(__file__).parent.parent.resolve()
44
-
45
- DEPRECATED_FILES = ".deprecated_files"
46
- MANDATORY_FILES = ".mandatory_files"
47
- STATIC_FILES = ".static_files"
48
- IGNORE_SUFFIX = "_ignore"
49
- RAW_TEMPLATE_URL = (
50
- "https://raw.githubusercontent.com/ghga-de/microservice-repository-template/main/"
51
- )
52
-
53
-
54
- class ValidationError(RuntimeError):
55
- """Raised when files need to be updated."""
56
-
57
-
58
- def get_file_list(list_name: str) -> list[str]:
59
- """Return a list of all file names specified in a given list file."""
60
- list_path = REPO_ROOT_DIR / list_name
61
- with open(list_path, "r", encoding="utf8") as list_file:
62
- file_list = [
63
- clean_line
64
- for clean_line in (
65
- line.rstrip() for line in list_file if not line.startswith("#")
66
- )
67
- if clean_line
68
- ]
69
- if not list_name.endswith(IGNORE_SUFFIX):
70
- ignore_list_name = list_name + IGNORE_SUFFIX
71
- try:
72
- file_set_ignore = set(get_file_list(ignore_list_name))
73
- except FileNotFoundError:
74
- print(f" - {ignore_list_name} is missing, no exceptions from the template")
75
- else:
76
- file_list = [line for line in file_list if line not in file_set_ignore]
77
- return file_list
78
-
79
-
80
- def get_template_file_content(relative_file_path: str):
81
- """Get the content of the template file corresponding to the given path."""
82
- remote_file_url = urllib.parse.urljoin(RAW_TEMPLATE_URL, relative_file_path)
83
- remote_file_request = urllib.request.Request(remote_file_url)
84
- try:
85
- with urllib.request.urlopen(remote_file_request) as remote_file_response:
86
- return remote_file_response.read().decode(
87
- remote_file_response.headers.get_content_charset("utf-8")
88
- )
89
- except urllib.error.HTTPError as remote_file_error:
90
- print(
91
- f" - WARNING: request to remote file {remote_file_url} returned"
92
- f" status code {remote_file_error.code}"
93
- )
94
- return None
95
-
96
-
97
- def diff_content(local_file_path, local_file_content, template_file_content) -> bool:
98
- """Show diff between given local and remote template file content."""
99
- if local_file_content != template_file_content:
100
- print(f" - {local_file_path}: differs from template")
101
- for line in difflib.unified_diff(
102
- template_file_content.splitlines(keepends=True),
103
- local_file_content.splitlines(keepends=True),
104
- fromfile="template",
105
- tofile="local",
106
- ):
107
- print(" ", line.rstrip())
108
- return True
109
- return False
110
-
111
-
112
- def check_file(relative_file_path: str, diff: bool = False) -> bool:
113
- """Compare file at the given path with the given content.
114
-
115
- Returns True if there are differences.
116
- """
117
- local_file_path = REPO_ROOT_DIR / Path(relative_file_path)
118
-
119
- if not local_file_path.exists():
120
- print(f" - {local_file_path} does not exist")
121
- return True
122
-
123
- if diff:
124
- template_file_content = get_template_file_content(relative_file_path)
125
-
126
- if template_file_content is None:
127
- print(f" - {local_file_path}: cannot check, remote is missing")
128
- return True
129
-
130
- with open(local_file_path, "r", encoding="utf8") as file:
131
- return diff_content(local_file_path, file.read(), template_file_content)
132
-
133
- return False
134
-
135
-
136
- def update_file(relative_file_path: str, diff: bool = False) -> bool:
137
- """Update file at the given relative path.
138
-
139
- Returns True if there are updates.
140
- """
141
-
142
- local_file_path = REPO_ROOT_DIR / Path(relative_file_path)
143
- local_parent_dir = local_file_path.parent
144
-
145
- if not local_parent_dir.exists():
146
- local_parent_dir.mkdir(parents=True)
147
-
148
- if diff or not local_file_path.exists():
149
- template_file_content = get_template_file_content(relative_file_path)
150
-
151
- if template_file_content is None:
152
- print(f" - {local_file_path}: cannot update, remote is missing")
153
- return True
154
-
155
- if diff and local_file_path.exists():
156
- with open(local_file_path, "r", encoding="utf8") as file:
157
- if file.read() == template_file_content:
158
- return False
159
-
160
- executable = template_file_content.startswith("#!")
161
- executable_flags = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
162
- with open(local_file_path, "w", encoding="utf8") as file:
163
- file.write(template_file_content)
164
- mode = os.fstat(file.fileno()).st_mode
165
- if executable:
166
- mode |= executable_flags
167
- else:
168
- mode &= ~executable_flags
169
- os.fchmod(file.fileno(), stat.S_IMODE(mode))
170
-
171
- print(f" - {local_file_path}: updated")
172
- return True
173
-
174
- return False
175
-
176
-
177
- def update_files(files: list[str], check: bool = False, diff: bool = False) -> bool:
178
- """Update or check all the files in the given list.
179
-
180
- Returns True if there are updates.
181
- """
182
- updates = False
183
- update_or_check_file = check_file if check else update_file
184
- for relative_file_path in files:
185
- if update_or_check_file(relative_file_path, diff=diff):
186
- updates = True
187
- return updates
188
-
189
-
190
- def remove_files(files: list[str], check: bool = False) -> bool:
191
- """Remove or check all the files in the given list.
192
-
193
- Returns True if there are updates.
194
- """
195
- updates = False
196
- for relative_file_path in files:
197
- local_file_path = REPO_ROOT_DIR / Path(relative_file_path)
198
-
199
- if local_file_path.exists():
200
- if check:
201
- print(f" - {local_file_path}: deprecated, but exists")
202
- else:
203
- if local_file_path.is_dir():
204
- shutil.rmtree(local_file_path)
205
- else:
206
- local_file_path.unlink()
207
- print(f" - {local_file_path}: removed, since it is deprecated")
208
- updates = True
209
- return updates
210
-
211
-
212
- def main(check: bool = False):
213
- """Update the static files in the service template."""
214
- updated = False
215
- if not check:
216
- update_files([STATIC_FILES], diff=True, check=False)
217
-
218
- print("Static files...")
219
- files_to_update = get_file_list(STATIC_FILES)
220
- if check:
221
- files_to_update.append(STATIC_FILES)
222
- files_to_update.extend((MANDATORY_FILES, DEPRECATED_FILES))
223
- if update_files(files_to_update, diff=True, check=check):
224
- updated = True
225
-
226
- print("Mandatory files...")
227
- files_to_guarantee = get_file_list(MANDATORY_FILES)
228
- if update_files(files_to_guarantee, check=check):
229
- updated = True
230
-
231
- print("Deprecated files...")
232
- files_to_remove = get_file_list(DEPRECATED_FILES)
233
- if remove_files(files_to_remove, check=check):
234
- updated = True
235
-
236
- if check:
237
- if updated:
238
- echo_failure("Validating files from template failed.")
239
- sys.exit(1)
240
- echo_success("Successfully validated files from template.")
241
- else:
242
- echo_success(
243
- "Successfully updated files from template."
244
- if updated
245
- else "No updates from the template were necessary."
246
- )
247
-
248
-
249
- if __name__ == "__main__":
250
- run(main)
@@ -1,16 +0,0 @@
1
- # Copyright 2021 - 2023 Universität Tübingen, DKFZ, EMBL, and Universität zu Köln
2
- # for the German Human Genome-Phenome Archive (GHGA)
3
- #
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
- #
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
- #
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
15
-
16
- """Fixtures that are used in both integration and unit tests"""
@@ -1,16 +0,0 @@
1
- ghga_metadata_version: 0.0.0
2
- default_settings:
3
- header_row: 1
4
- start_row: 2
5
- start_column: 1
6
- end_column: 3
7
- worksheets:
8
- - sheet_name: books
9
- settings:
10
- name: books
11
- - sheet_name: publisher
12
- settings:
13
- name: publisher
14
- transformations:
15
- publisher_names: !!python/object/apply:ghga_transpiler.transformations.to_list []
16
- attributes: !!python/object/apply:ghga_transpiler.transformations.to_attributes []
@@ -1,15 +0,0 @@
1
- # Copyright 2021 - 2023 Universität Tübingen, DKFZ, EMBL, and Universität zu Köln
2
- # for the German Human Genome-Phenome Archive (GHGA)
3
- #
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
- #
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
- #
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
15
- #
@@ -1,16 +0,0 @@
1
- # Copyright 2021 - 2023 Universität Tübingen, DKFZ, EMBL, and Universität zu Köln
2
- # for the German Human Genome-Phenome Archive (GHGA)
3
- #
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
- #
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
- #
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
15
-
16
- """Data that are used in unit tests"""