eido 0.2.2__tar.gz → 0.2.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. {eido-0.2.2/eido.egg-info → eido-0.2.4}/PKG-INFO +4 -3
  2. {eido-0.2.2 → eido-0.2.4}/README.md +1 -1
  3. eido-0.2.4/eido/_version.py +1 -0
  4. {eido-0.2.2 → eido-0.2.4}/eido/const.py +8 -3
  5. {eido-0.2.2 → eido-0.2.4}/eido/conversion.py +9 -6
  6. {eido-0.2.2 → eido-0.2.4}/eido/conversion_plugins.py +11 -9
  7. {eido-0.2.2 → eido-0.2.4}/eido/inspection.py +8 -7
  8. {eido-0.2.2 → eido-0.2.4}/eido/output_formatters.py +1 -1
  9. {eido-0.2.2 → eido-0.2.4}/eido/schema.py +8 -16
  10. {eido-0.2.2 → eido-0.2.4}/eido/validation.py +42 -23
  11. {eido-0.2.2 → eido-0.2.4/eido.egg-info}/PKG-INFO +4 -3
  12. eido-0.2.4/eido.egg-info/requires.txt +8 -0
  13. eido-0.2.4/requirements/requirements-all.txt +6 -0
  14. {eido-0.2.2 → eido-0.2.4}/tests/test_conversions.py +31 -1
  15. {eido-0.2.2 → eido-0.2.4}/tests/test_validations.py +9 -4
  16. eido-0.2.2/eido/_version.py +0 -1
  17. eido-0.2.2/eido.egg-info/requires.txt +0 -5
  18. eido-0.2.2/requirements/requirements-all.txt +0 -5
  19. {eido-0.2.2 → eido-0.2.4}/LICENSE.txt +0 -0
  20. {eido-0.2.2 → eido-0.2.4}/MANIFEST.in +0 -0
  21. {eido-0.2.2 → eido-0.2.4}/eido/__init__.py +0 -0
  22. {eido-0.2.2 → eido-0.2.4}/eido/__main__.py +0 -0
  23. {eido-0.2.2 → eido-0.2.4}/eido/argparser.py +0 -0
  24. {eido-0.2.2 → eido-0.2.4}/eido/cli.py +0 -0
  25. {eido-0.2.2 → eido-0.2.4}/eido/exceptions.py +0 -0
  26. {eido-0.2.2 → eido-0.2.4}/eido.egg-info/SOURCES.txt +0 -0
  27. {eido-0.2.2 → eido-0.2.4}/eido.egg-info/dependency_links.txt +0 -0
  28. {eido-0.2.2 → eido-0.2.4}/eido.egg-info/entry_points.txt +0 -0
  29. {eido-0.2.2 → eido-0.2.4}/eido.egg-info/top_level.txt +0 -0
  30. {eido-0.2.2 → eido-0.2.4}/requirements/requirements-doc.txt +0 -0
  31. {eido-0.2.2 → eido-0.2.4}/requirements/requirements-test.txt +0 -0
  32. {eido-0.2.2 → eido-0.2.4}/setup.cfg +0 -0
  33. {eido-0.2.2 → eido-0.2.4}/setup.py +0 -0
  34. {eido-0.2.2 → eido-0.2.4}/tests/test_schema_operations.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: eido
3
- Version: 0.2.2
3
+ Version: 0.2.4
4
4
  Summary: A project metadata validator
5
5
  Home-page: https://github.com/pepkit/eido/
6
6
  Author: Michal Stolarczyk, Nathan Sheffield
@@ -15,10 +15,11 @@ Classifier: Programming Language :: Python :: 3.11
15
15
  Classifier: Topic :: Scientific/Engineering :: Bio-Informatics
16
16
  Description-Content-Type: text/markdown
17
17
  License-File: LICENSE.txt
18
+ Requires-Dist: importlib-metadata; python_version < "3.10"
18
19
  Requires-Dist: jsonschema>=3.0.1
19
20
  Requires-Dist: logmuse>=0.2.5
20
21
  Requires-Dist: pandas
21
- Requires-Dist: peppy>=0.35.7
22
+ Requires-Dist: peppy>=0.40.7
22
23
  Requires-Dist: ubiquerg>=0.5.2
23
24
 
24
25
  # <img src="docs/img/eido.svg" alt="eido logo" height="70">
@@ -28,4 +29,4 @@ Requires-Dist: ubiquerg>=0.5.2
28
29
  [![PEP compatible](http://pepkit.github.io/img/PEP-compatible-green.svg)](http://pepkit.github.io)
29
30
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
30
31
 
31
- [PEP](http://pepkit.github.io) validation tool based on [jsonschema](https://github.com/Julian/jsonschema). See [documentation](http://eido.databio.org) for usage.
32
+ [PEP](https://pep.databio.org) validation tool based on [jsonschema](https://github.com/Julian/jsonschema). See [documentation](http://pep.databio.org/eido) for usage.
@@ -5,4 +5,4 @@
5
5
  [![PEP compatible](http://pepkit.github.io/img/PEP-compatible-green.svg)](http://pepkit.github.io)
6
6
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
7
7
 
8
- [PEP](http://pepkit.github.io) validation tool based on [jsonschema](https://github.com/Julian/jsonschema). See [documentation](http://eido.databio.org) for usage.
8
+ [PEP](https://pep.databio.org) validation tool based on [jsonschema](https://github.com/Julian/jsonschema). See [documentation](http://pep.databio.org/eido) for usage.
@@ -0,0 +1 @@
1
+ __version__ = "0.2.4"
@@ -14,8 +14,11 @@ SUBPARSER_MSGS = {
14
14
  CONVERT_CMD: "Convert PEP format using filters",
15
15
  }
16
16
  PROP_KEY = "properties"
17
- REQUIRED_FILES_KEY = "required_files"
18
- FILES_KEY = "files"
17
+
18
+ SAMPLES_KEY = "samples"
19
+
20
+ TANGIBLE_KEY = "tangible"
21
+ SIZING_KEY = "sizing"
19
22
 
20
23
  # sample schema input validation key names, these values are required by looper
21
24
  # to refer to the dict values
@@ -34,7 +37,9 @@ GENERAL = [
34
37
  "FILTERS_CMD",
35
38
  "SUBPARSER_MSGS",
36
39
  ]
37
- SCHEMA_SECTIONS = ["PROP_KEY", "REQUIRED_FILES_KEY", "FILES_KEY"]
40
+
41
+ SCHEMA_SECTIONS = ["PROP_KEY", "TANGIBLE_KEY", "SIZING_KEY"]
42
+
38
43
  SCHEMA_VALIDAION_KEYS = [
39
44
  "MISSING_KEY",
40
45
  "REQUIRED_INPUTS_KEY",
@@ -1,12 +1,15 @@
1
- import inspect
2
1
  import sys
3
- import os
4
- from logging import getLogger
5
2
 
6
- from pkg_resources import iter_entry_points
3
+ if sys.version_info < (3, 10):
4
+ from importlib_metadata import entry_points
5
+ else:
6
+ from importlib.metadata import entry_points
7
+ import inspect
8
+ from logging import getLogger
9
+ import os
10
+ from typing import NoReturn
7
11
 
8
12
  from .exceptions import *
9
- from typing import NoReturn
10
13
 
11
14
  _LOGGER = getLogger(__name__)
12
15
 
@@ -21,7 +24,7 @@ def pep_conversion_plugins():
21
24
  :raise EidoFilterError: if any of the filters has an invalid signature.
22
25
  """
23
26
  plugins = {}
24
- for ep in iter_entry_points("pep.filters"):
27
+ for ep in entry_points(group="pep.filters"):
25
28
  plugin_fun = ep.load()
26
29
  if len(list(inspect.signature(plugin_fun).parameters)) != 2:
27
30
  raise EidoFilterError(
@@ -1,4 +1,5 @@
1
1
  """ built-in PEP filters """
2
+
2
3
  from typing import Dict
3
4
  from .output_formatters import MultilineOutputFormatter
4
5
 
@@ -41,8 +42,7 @@ def yaml_pep_filter(p, **kwargs) -> Dict[str, str]:
41
42
  """
42
43
  from yaml import dump
43
44
 
44
- data = p.config.to_dict()
45
- return {"project": dump(data, default_flow_style=False)}
45
+ return {"project": dump(p.config, default_flow_style=False)}
46
46
 
47
47
 
48
48
  def csv_pep_filter(p, **kwargs) -> Dict[str, str]:
@@ -69,14 +69,16 @@ def processed_pep_filter(p, **kwargs) -> Dict[str, str]:
69
69
  samples_as_objects = kwargs.get("samples_as_objects")
70
70
  subsamples_as_objects = kwargs.get("subsamples_as_objects")
71
71
 
72
- prj_repr = p.config.to_dict()
72
+ prj_repr = p.config
73
73
 
74
74
  return {
75
75
  "project": str(prj_repr),
76
- "samples": str(p.samples)
77
- if samples_as_objects
78
- else str(p.sample_table.to_csv()),
79
- "subsamples": str(p.subsamples)
80
- if subsamples_as_objects
81
- else str(p.subsample_table.to_csv()),
76
+ "samples": (
77
+ str(p.samples) if samples_as_objects else str(p.sample_table.to_csv())
78
+ ),
79
+ "subsamples": (
80
+ str(p.subsamples)
81
+ if subsamples_as_objects
82
+ else str(p.subsample_table.to_csv())
83
+ ),
82
84
  }
@@ -7,12 +7,13 @@ from ubiquerg import size
7
7
 
8
8
  from .const import (
9
9
  ALL_INPUTS_KEY,
10
- FILES_KEY,
11
10
  INPUT_FILE_SIZE_KEY,
12
11
  MISSING_KEY,
13
12
  PROP_KEY,
14
- REQUIRED_FILES_KEY,
15
13
  REQUIRED_INPUTS_KEY,
14
+ SIZING_KEY,
15
+ TANGIBLE_KEY,
16
+ SAMPLES_KEY,
16
17
  )
17
18
  from .schema import read_schema
18
19
  from .validation import _validate_sample_object, _get_attr_values
@@ -67,12 +68,12 @@ def get_input_files_size(sample, schema):
67
68
  all_inputs = set()
68
69
  required_inputs = set()
69
70
  schema = schema[-1] # use only first schema, in case there are imports
70
- sample_schema_dict = schema["properties"]["_samples"]["items"]
71
- if FILES_KEY in sample_schema_dict:
72
- all_inputs.update(_get_attr_values(sample, sample_schema_dict[FILES_KEY]))
73
- if REQUIRED_FILES_KEY in sample_schema_dict:
71
+ sample_schema_dict = schema[PROP_KEY][SAMPLES_KEY]["items"]
72
+ if SIZING_KEY in sample_schema_dict:
73
+ all_inputs.update(_get_attr_values(sample, sample_schema_dict[SIZING_KEY]))
74
+ if TANGIBLE_KEY in sample_schema_dict:
74
75
  required_inputs = set(
75
- _get_attr_values(sample, sample_schema_dict[REQUIRED_FILES_KEY])
76
+ _get_attr_values(sample, sample_schema_dict[TANGIBLE_KEY])
76
77
  )
77
78
  all_inputs.update(required_inputs)
78
79
  with catch_warnings(record=True) as w:
@@ -110,7 +110,7 @@ class MultilineOutputFormatter(BaseOutputFormatter):
110
110
  ):
111
111
  value = sample[attribute][sample_index]
112
112
  else:
113
- value = sample[attribute]
113
+ value = sample.get(attribute)
114
114
 
115
115
  sample_row.append(value or "")
116
116
 
@@ -2,7 +2,7 @@ from logging import getLogger
2
2
 
3
3
  from peppy.utils import load_yaml
4
4
 
5
- from .const import *
5
+ from .const import SAMPLES_KEY, PROP_KEY
6
6
 
7
7
  _LOGGER = getLogger(__name__)
8
8
 
@@ -21,23 +21,15 @@ def preprocess_schema(schema_dict):
21
21
  :return dict: preprocessed schema
22
22
  """
23
23
  _LOGGER.debug(f"schema ori: {schema_dict}")
24
- if "config" in schema_dict[PROP_KEY]:
25
- schema_dict[PROP_KEY]["_config"] = schema_dict[PROP_KEY]["config"]
26
- del schema_dict[PROP_KEY]["config"]
27
- else:
28
- _LOGGER.debug("No config section found in schema")
29
- if "samples" in schema_dict[PROP_KEY]:
30
- schema_dict[PROP_KEY]["_samples"] = schema_dict[PROP_KEY]["samples"]
31
- del schema_dict[PROP_KEY]["samples"]
32
- if "required" in schema_dict:
33
- schema_dict["required"][
34
- schema_dict["required"].index("samples")
35
- ] = "_samples"
24
+ if "project" not in schema_dict[PROP_KEY]:
25
+ _LOGGER.debug("No project section found in schema")
26
+
27
+ if SAMPLES_KEY in schema_dict[PROP_KEY]:
36
28
  if (
37
- "items" in schema_dict[PROP_KEY]["_samples"]
38
- and PROP_KEY in schema_dict[PROP_KEY]["_samples"]["items"]
29
+ "items" in schema_dict[PROP_KEY][SAMPLES_KEY]
30
+ and PROP_KEY in schema_dict[PROP_KEY][SAMPLES_KEY]["items"]
39
31
  ):
40
- s_props = schema_dict[PROP_KEY]["_samples"]["items"][PROP_KEY]
32
+ s_props = schema_dict[PROP_KEY][SAMPLES_KEY]["items"][PROP_KEY]
41
33
  for prop, val in s_props.items():
42
34
  if "type" in val and val["type"] in ["string", "number", "boolean"]:
43
35
  s_props[prop] = {}
@@ -1,4 +1,5 @@
1
1
  import os
2
+ from typing import NoReturn, Mapping, Union
2
3
  from copy import deepcopy as dpcpy
3
4
  from logging import getLogger
4
5
 
@@ -6,28 +7,25 @@ from warnings import warn
6
7
 
7
8
  from .exceptions import EidoValidationError
8
9
 
9
-
10
10
  from pandas.core.common import flatten
11
11
  from jsonschema import Draft7Validator
12
+ import peppy
12
13
 
13
- from .const import (
14
- FILES_KEY,
15
- PROP_KEY,
16
- REQUIRED_FILES_KEY,
17
- )
14
+ from .const import PROP_KEY, SIZING_KEY, TANGIBLE_KEY, SAMPLES_KEY
18
15
  from .exceptions import PathAttrNotFoundError
19
16
  from .schema import preprocess_schema, read_schema
20
17
 
21
18
  _LOGGER = getLogger(__name__)
22
19
 
23
20
 
24
- def _validate_object(obj, schema, sample_name_colname=False):
21
+ def _validate_object(obj: Mapping, schema: Union[str, dict], sample_name_colname=False):
25
22
  """
26
23
  Generic function to validate object against a schema
27
24
 
28
25
  :param Mapping obj: an object to validate
29
26
  :param str | dict schema: schema dict to validate against or a path to one
30
27
  from the error. Useful when used ith large projects
28
+
31
29
  :raises EidoValidationError: if validation is unsuccessful
32
30
  """
33
31
  validator = Draft7Validator(schema)
@@ -45,6 +43,10 @@ def _validate_object(obj, schema, sample_name_colname=False):
45
43
  instance_name = error.instance[sample_name_colname]
46
44
  except KeyError:
47
45
  instance_name = "project"
46
+ except TypeError:
47
+ instance_name = obj["samples"][error.absolute_path[1]][
48
+ sample_name_colname
49
+ ]
48
50
  errors_by_type[error.message].append(
49
51
  {
50
52
  "type": error.message,
@@ -58,13 +60,16 @@ def _validate_object(obj, schema, sample_name_colname=False):
58
60
  _LOGGER.debug("Validation was successful...")
59
61
 
60
62
 
61
- def validate_project(project, schema):
63
+ def validate_project(project: peppy.Project, schema: Union[str, dict]) -> NoReturn:
62
64
  """
63
65
  Validate a project object against a schema
64
66
 
65
67
  :param peppy.Project project: a project object to validate
66
68
  :param str | dict schema: schema dict to validate against or a path to one
67
69
  from the error. Useful when used ith large projects
70
+
71
+ :return: NoReturn
72
+ :raises EidoValidationError: if validation is unsuccessful
68
73
  """
69
74
  sample_name_colname = project.sample_name_colname
70
75
  schema_dicts = read_schema(schema=schema)
@@ -76,7 +81,7 @@ def validate_project(project, schema):
76
81
  _LOGGER.debug("Project validation successful")
77
82
 
78
83
 
79
- def _validate_sample_object(sample, schemas):
84
+ def _validate_sample_object(sample: peppy.Sample, schemas):
80
85
  """
81
86
  Internal function that allows to validate a peppy.Sample object without
82
87
  requiring a reference to peppy.Project.
@@ -86,20 +91,24 @@ def _validate_sample_object(sample, schemas):
86
91
  """
87
92
  for schema_dict in schemas:
88
93
  schema_dict = preprocess_schema(schema_dict)
89
- sample_schema_dict = schema_dict[PROP_KEY]["_samples"]["items"]
94
+ sample_schema_dict = schema_dict[PROP_KEY][SAMPLES_KEY]["items"]
90
95
  _validate_object(sample.to_dict(), sample_schema_dict)
91
96
  _LOGGER.debug(
92
97
  f"{getattr(sample, 'sample_name', '')} sample validation successful"
93
98
  )
94
99
 
95
100
 
96
- def validate_sample(project, sample_name, schema):
101
+ def validate_sample(
102
+ project: peppy.Project, sample_name: Union[str, int], schema: Union[str, dict]
103
+ ) -> NoReturn:
97
104
  """
98
105
  Validate the selected sample object against a schema
99
106
 
100
107
  :param peppy.Project project: a project object to validate
101
108
  :param str | int sample_name: name or index of the sample to validate
102
109
  :param str | dict schema: schema dict to validate against or a path to one
110
+
111
+ :raises EidoValidationError: if validation is unsuccessful
103
112
  """
104
113
  sample = (
105
114
  project.samples[sample_name]
@@ -112,7 +121,9 @@ def validate_sample(project, sample_name, schema):
112
121
  )
113
122
 
114
123
 
115
- def validate_config(project, schema):
124
+ def validate_config(
125
+ project: Union[peppy.Project, dict], schema: Union[str, dict]
126
+ ) -> NoReturn:
116
127
  """
117
128
  Validate the config part of the Project object against a schema
118
129
 
@@ -123,17 +134,21 @@ def validate_config(project, schema):
123
134
  for schema_dict in schema_dicts:
124
135
  schema_cpy = preprocess_schema(dpcpy(schema_dict))
125
136
  try:
126
- del schema_cpy[PROP_KEY]["_samples"]
137
+ del schema_cpy[PROP_KEY][SAMPLES_KEY]
127
138
  except KeyError:
128
139
  pass
129
140
  if "required" in schema_cpy:
130
141
  try:
131
- schema_cpy["required"].remove("_samples")
142
+ schema_cpy["required"].remove(SAMPLES_KEY)
132
143
  except ValueError:
133
144
  pass
134
- project_dict = project.to_dict()
135
- _validate_object(project_dict, schema_cpy)
136
- _LOGGER.debug("Config validation successful")
145
+ if isinstance(project, dict):
146
+ _validate_object({"project": project}, schema_cpy)
147
+
148
+ else:
149
+ project_dict = project.to_dict()
150
+ _validate_object(project_dict, schema_cpy)
151
+ _LOGGER.debug("Config validation successful")
137
152
 
138
153
 
139
154
  def _get_attr_values(obj, attrlist):
@@ -157,7 +172,11 @@ def _get_attr_values(obj, attrlist):
157
172
  return list(flatten([getattr(obj, attr, "") for attr in attrlist]))
158
173
 
159
174
 
160
- def validate_input_files(project, schemas, sample_name=None):
175
+ def validate_input_files(
176
+ project: peppy.Project,
177
+ schemas: Union[str, dict],
178
+ sample_name: Union[str, int] = None,
179
+ ):
161
180
  """
162
181
  Determine which of the required and optional files are missing.
163
182
 
@@ -197,12 +216,12 @@ def validate_input_files(project, schemas, sample_name=None):
197
216
  all_inputs = set()
198
217
  required_inputs = set()
199
218
  schema = schemas[-1] # use only first schema, in case there are imports
200
- sample_schema_dict = schema["properties"]["_samples"]["items"]
201
- if FILES_KEY in sample_schema_dict:
202
- all_inputs.update(_get_attr_values(sample, sample_schema_dict[FILES_KEY]))
203
- if REQUIRED_FILES_KEY in sample_schema_dict:
219
+ sample_schema_dict = schema[PROP_KEY][SAMPLES_KEY]["items"]
220
+ if SIZING_KEY in sample_schema_dict:
221
+ all_inputs.update(_get_attr_values(sample, sample_schema_dict[SIZING_KEY]))
222
+ if TANGIBLE_KEY in sample_schema_dict:
204
223
  required_inputs = set(
205
- _get_attr_values(sample, sample_schema_dict[REQUIRED_FILES_KEY])
224
+ _get_attr_values(sample, sample_schema_dict[TANGIBLE_KEY])
206
225
  )
207
226
  all_inputs.update(required_inputs)
208
227
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: eido
3
- Version: 0.2.2
3
+ Version: 0.2.4
4
4
  Summary: A project metadata validator
5
5
  Home-page: https://github.com/pepkit/eido/
6
6
  Author: Michal Stolarczyk, Nathan Sheffield
@@ -15,10 +15,11 @@ Classifier: Programming Language :: Python :: 3.11
15
15
  Classifier: Topic :: Scientific/Engineering :: Bio-Informatics
16
16
  Description-Content-Type: text/markdown
17
17
  License-File: LICENSE.txt
18
+ Requires-Dist: importlib-metadata; python_version < "3.10"
18
19
  Requires-Dist: jsonschema>=3.0.1
19
20
  Requires-Dist: logmuse>=0.2.5
20
21
  Requires-Dist: pandas
21
- Requires-Dist: peppy>=0.35.7
22
+ Requires-Dist: peppy>=0.40.7
22
23
  Requires-Dist: ubiquerg>=0.5.2
23
24
 
24
25
  # <img src="docs/img/eido.svg" alt="eido logo" height="70">
@@ -28,4 +29,4 @@ Requires-Dist: ubiquerg>=0.5.2
28
29
  [![PEP compatible](http://pepkit.github.io/img/PEP-compatible-green.svg)](http://pepkit.github.io)
29
30
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
30
31
 
31
- [PEP](http://pepkit.github.io) validation tool based on [jsonschema](https://github.com/Julian/jsonschema). See [documentation](http://eido.databio.org) for usage.
32
+ [PEP](https://pep.databio.org) validation tool based on [jsonschema](https://github.com/Julian/jsonschema). See [documentation](http://pep.databio.org/eido) for usage.
@@ -0,0 +1,8 @@
1
+ jsonschema>=3.0.1
2
+ logmuse>=0.2.5
3
+ pandas
4
+ peppy>=0.40.7
5
+ ubiquerg>=0.5.2
6
+
7
+ [:python_version < "3.10"]
8
+ importlib-metadata
@@ -0,0 +1,6 @@
1
+ importlib-metadata; python_version < '3.10'
2
+ jsonschema>=3.0.1
3
+ logmuse>=0.2.5
4
+ pandas
5
+ peppy>=0.40.7
6
+ ubiquerg>=0.5.2
@@ -1,4 +1,10 @@
1
- from eido.conversion import *
1
+ from eido.conversion import (
2
+ run_filter,
3
+ get_available_pep_filters,
4
+ pep_conversion_plugins,
5
+ convert_project,
6
+ )
7
+ import peppy
2
8
 
3
9
 
4
10
  class TestConversionInfrastructure:
@@ -74,3 +80,27 @@ class TestConversionInfrastructure:
74
80
 
75
81
  assert save_result_mock.called
76
82
  assert conv_result == {"samples": output_pep_nextflow_taxprofiler}
83
+
84
+ def test_multiple_subsamples(self, test_multiple_subs):
85
+ project = peppy.Project(test_multiple_subs, sample_table_index="sample_id")
86
+
87
+ conversion = convert_project(
88
+ project,
89
+ "csv",
90
+ )
91
+ assert isinstance(conversion["samples"], str)
92
+ conversion = convert_project(
93
+ project,
94
+ "basic",
95
+ )
96
+ assert isinstance(conversion["project"], str)
97
+ conversion = convert_project(
98
+ project,
99
+ "yaml",
100
+ )
101
+ assert isinstance(conversion["project"], str)
102
+ conversion = convert_project(
103
+ project,
104
+ "yaml-samples",
105
+ )
106
+ assert isinstance(conversion["samples"], str)
@@ -1,7 +1,6 @@
1
1
  import urllib
2
2
 
3
3
  import pytest
4
- from jsonschema.exceptions import ValidationError
5
4
  from peppy import Project
6
5
  from peppy.utils import load_yaml
7
6
 
@@ -140,9 +139,15 @@ class TestProjectWithoutConfigValidation:
140
139
  )
141
140
 
142
141
  def test_validate_file_existance(
143
- self, test_file_existance_pep, test_file_existence_schema
142
+ self, test_file_existing_pep, test_file_existing_schema
144
143
  ):
145
- schema_path = test_file_existence_schema
146
- prj = Project(test_file_existance_pep)
144
+ schema_path = test_file_existing_schema
145
+ prj = Project(test_file_existing_pep)
147
146
  with pytest.raises(PathAttrNotFoundError):
148
147
  validate_input_files(prj, schema_path)
148
+
149
+ def test_validation_values(self, test_schema_value_check, test_file_value_check):
150
+ schema_path = test_schema_value_check
151
+ prj = Project(test_file_value_check)
152
+ with pytest.raises(EidoValidationError):
153
+ validate_project(project=prj, schema=schema_path)
@@ -1 +0,0 @@
1
- __version__ = "0.2.2"
@@ -1,5 +0,0 @@
1
- jsonschema>=3.0.1
2
- logmuse>=0.2.5
3
- pandas
4
- peppy>=0.35.7
5
- ubiquerg>=0.5.2
@@ -1,5 +0,0 @@
1
- jsonschema>=3.0.1
2
- logmuse>=0.2.5
3
- pandas
4
- peppy>=0.35.7
5
- ubiquerg>=0.5.2
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes