geometallurgy 0.4.12__py3-none-any.whl → 0.4.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. elphick/geomet/__init__.py +11 -11
  2. elphick/geomet/base.py +1133 -1133
  3. elphick/geomet/block_model.py +319 -358
  4. elphick/geomet/config/__init__.py +1 -1
  5. elphick/geomet/config/config_read.py +39 -39
  6. elphick/geomet/config/flowsheet_example_partition.yaml +31 -31
  7. elphick/geomet/config/flowsheet_example_simple.yaml +25 -25
  8. elphick/geomet/config/mc_config.yml +35 -35
  9. elphick/geomet/data/downloader.py +39 -39
  10. elphick/geomet/data/register.csv +12 -12
  11. elphick/geomet/datasets/__init__.py +2 -2
  12. elphick/geomet/datasets/datasets.py +47 -47
  13. elphick/geomet/datasets/downloader.py +40 -40
  14. elphick/geomet/datasets/register.csv +12 -12
  15. elphick/geomet/datasets/sample_data.py +196 -196
  16. elphick/geomet/extras.py +35 -35
  17. elphick/geomet/flowsheet/__init__.py +1 -1
  18. elphick/geomet/flowsheet/flowsheet.py +1216 -1216
  19. elphick/geomet/flowsheet/loader.py +99 -99
  20. elphick/geomet/flowsheet/operation.py +256 -256
  21. elphick/geomet/flowsheet/stream.py +39 -39
  22. elphick/geomet/interval_sample.py +641 -641
  23. elphick/geomet/io.py +379 -379
  24. elphick/geomet/plot.py +147 -147
  25. elphick/geomet/sample.py +28 -28
  26. elphick/geomet/utils/amenability.py +49 -49
  27. elphick/geomet/utils/block_model_converter.py +93 -93
  28. elphick/geomet/utils/components.py +136 -136
  29. elphick/geomet/utils/data.py +49 -49
  30. elphick/geomet/utils/estimates.py +108 -108
  31. elphick/geomet/utils/interp.py +193 -193
  32. elphick/geomet/utils/interp2.py +134 -134
  33. elphick/geomet/utils/layout.py +72 -72
  34. elphick/geomet/utils/moisture.py +61 -61
  35. elphick/geomet/utils/output.html +617 -0
  36. elphick/geomet/utils/pandas.py +378 -378
  37. elphick/geomet/utils/parallel.py +29 -29
  38. elphick/geomet/utils/partition.py +63 -63
  39. elphick/geomet/utils/size.py +51 -51
  40. elphick/geomet/utils/timer.py +80 -80
  41. elphick/geomet/utils/viz.py +56 -56
  42. elphick/geomet/validate.py.hide +176 -176
  43. {geometallurgy-0.4.12.dist-info → geometallurgy-0.4.13.dist-info}/LICENSE +21 -21
  44. {geometallurgy-0.4.12.dist-info → geometallurgy-0.4.13.dist-info}/METADATA +7 -5
  45. geometallurgy-0.4.13.dist-info/RECORD +49 -0
  46. {geometallurgy-0.4.12.dist-info → geometallurgy-0.4.13.dist-info}/WHEEL +1 -1
  47. geometallurgy-0.4.12.dist-info/RECORD +0 -48
  48. {geometallurgy-0.4.12.dist-info → geometallurgy-0.4.13.dist-info}/entry_points.txt +0 -0
@@ -1,176 +1,176 @@
1
- """
2
- Classes to support validation of block model files.
3
- """
4
-
5
- import logging
6
- import tempfile
7
- from abc import ABC, abstractmethod
8
- from concurrent.futures import ThreadPoolExecutor
9
- from concurrent.futures import as_completed
10
- from pathlib import Path
11
- from typing import Optional
12
-
13
- import pandas as pd
14
-
15
- from elphick.geomet.readers import ParquetFileReader, OMFFileReader
16
- from elphick.geomet.utils.components import is_compositional
17
-
18
-
19
- #
20
- # class FileValidator(ABC):
21
- # def __init__(self, file_path: Path, schema_path: Optional[Path] = None,
22
- # lazy_validation: bool = True,
23
- # negative_to_nan_threshold: float = 0):
24
- # if not file_path.exists():
25
- # raise ValueError(f"File does not exist: {file_path}")
26
- # self._logger = logging.getLogger(self.__class__.__name__)
27
- # self.file_path = file_path
28
- # self.schema_path = schema_path
29
- # self.schema: DataFrameSchema = DataFrameSchema({}) if schema_path is None else pandera.io.from_yaml(schema_path)
30
- # self.lazy_validation = lazy_validation
31
- # self.negative_to_nan_threshold = negative_to_nan_threshold
32
- #
33
- # self.report: Optional[dict] = None
34
- #
35
- # @abstractmethod
36
- # def validate(self):
37
- # pass
38
- #
39
- # def create_schema_file(self, schema_output_path: Path):
40
- # """
41
- # Create an inferred schema file from the file being validated
42
- # Args:
43
- # schema_output_path: The output path for the schema file
44
- #
45
- # Returns:
46
- #
47
- # """
48
- #
49
- # df = self.read_column()
50
- #
51
- # with open(schema_output_path, 'w') as f:
52
- # yaml.dump(self.schema.to_yaml(), f)
53
-
54
-
55
- class BaseProcessor(ABC):
56
- """
57
- To support columnar processing of large datasets, the BaseProcessor class provides a framework for processing
58
- data by column. The process method will process the data by column if a file_path is provided, or the entire
59
- dataset if data is provided.
60
- """
61
-
62
- def __init__(self, file_path: Optional[Path] = None, data: Optional[pd.DataFrame] = None, **kwargs):
63
- self.logger = logging.getLogger(self.__class__.__name__)
64
- if file_path is None and data is None:
65
- raise ValueError("Either file_path or data must be provided.")
66
- self.file_path = file_path
67
- self.data = data
68
- self.temp_files = []
69
-
70
- if self.file_path.suffix == '.parquet':
71
- self.reader: ParquetFileReader = ParquetFileReader(self.file_path)
72
- elif self.file_path.suffix == '.omf':
73
- self.reader: OMFFileReader = OMFFileReader(self.file_path, **kwargs)
74
- else:
75
- raise ValueError(f"Unsupported file format: {self.file_path.suffix}")
76
-
77
- @property
78
- def composition_variables(self) -> list[str]:
79
- """
80
- Detect columns that contain composition data
81
-
82
- Returns:
83
- A list of column names that contain composition data
84
- """
85
- res = None
86
- if self.reader.variables_in_file:
87
- res = list(is_compositional(self.reader.variables_in_file, strict=False).keys())
88
- return res
89
-
90
- def process(self, num_workers: Optional[int] = 1, **kwargs):
91
- if self.data is None:
92
- with ThreadPoolExecutor(max_workers=num_workers, thread_name_prefix='geomet-processor') as executor:
93
- futures = {executor.submit(self._process_variable, variable, **kwargs): variable for variable in
94
- self.reader.variables_in_file}
95
- results = {}
96
- for future in as_completed(futures):
97
- variable = futures[future]
98
- try:
99
- results[variable] = future.result()
100
- except Exception as exc:
101
- print(f'{variable} generated an exception: {exc}')
102
- else:
103
- results = self._process_data()
104
- return results
105
-
106
- @abstractmethod
107
- def _process_variable(self, column, **kwargs):
108
- pass
109
-
110
- @abstractmethod
111
- def _process_data(self):
112
- pass
113
-
114
-
115
- class PreProcessor(BaseProcessor):
116
- def __init__(self, file_path: Optional[Path] = None, data: Optional[pd.DataFrame] = None, **kwargs):
117
- """
118
- Preprocess data before validation.
119
- For large datasets where memory may be constrained, file_path will provide processing by columns.
120
- If data is provided, the entire dataset already in memory will be processed.
121
- Args:
122
- file_path: The optional path to the file to be preprocessed.
123
- data: The optional DataFrame to be preprocessed.
124
- """
125
-
126
- super().__init__(file_path, data, **kwargs)
127
-
128
- def process(self, negative_to_nan_threshold: Optional[float] = -1,
129
- not_detected_assays_threshold: Optional[float] = 0.5,
130
- max_workers=1):
131
- super().process(max_workers=max_workers, negative_to_nan_threshold=negative_to_nan_threshold,
132
- not_detected_assays_threshold=not_detected_assays_threshold)
133
-
134
- def _process_variable(self, column, **kwargs):
135
- data = pd.read_parquet(self.file_path, columns=[column])
136
- processed_data = self._process_data(data)
137
- temp_file = tempfile.NamedTemporaryFile(delete=False)
138
- processed_data.to_parquet(temp_file.name)
139
- self.temp_files.append(temp_file)
140
-
141
- def _process_data(self) -> pd.DataFrame:
142
- # Preprocessing logic here
143
- return data
144
-
145
-
146
- class Validator(BaseProcessor):
147
- def __init__(self, file_path: Optional[Path] = None, data: Optional[pd.DataFrame] = None, **kwargs):
148
- """
149
- Validate the data using a pandera schema.
150
- For large datasets where memory may be constrained file_path will provide processing by columns.
151
- If data is provided, the entire dataset already in memory will be processed.
152
- Args:
153
- file_path: The optional path to the file to be preprocessed.
154
- data: The optional DataFrame to be preprocessed.
155
- """
156
- super().__init__(file_path, data, **kwargs)
157
-
158
- def process(self):
159
- if self.data is None:
160
- columns = get_parquet_columns(self.file_path)
161
- with ThreadPoolExecutor() as executor:
162
- for column in columns:
163
- executor.submit(self._process_variable, column)
164
- else:
165
- self._process_data()
166
-
167
- def _process_variable(self, column):
168
- data = pd.read_parquet(self.file_path, columns=[column])
169
- processed_data = self._process_data(data)
170
- temp_file = tempfile.NamedTemporaryFile(delete=False)
171
- processed_data.to_parquet(temp_file.name)
172
- self.temp_files.append(temp_file)
173
-
174
- def _process_data(self, data):
175
- # Validation logic here
176
- return data
1
+ """
2
+ Classes to support validation of block model files.
3
+ """
4
+
5
+ import logging
6
+ import tempfile
7
+ from abc import ABC, abstractmethod
8
+ from concurrent.futures import ThreadPoolExecutor
9
+ from concurrent.futures import as_completed
10
+ from pathlib import Path
11
+ from typing import Optional
12
+
13
+ import pandas as pd
14
+
15
+ from elphick.geomet.readers import ParquetFileReader, OMFFileReader
16
+ from elphick.geomet.utils.components import is_compositional
17
+
18
+
19
+ #
20
+ # class FileValidator(ABC):
21
+ # def __init__(self, file_path: Path, schema_path: Optional[Path] = None,
22
+ # lazy_validation: bool = True,
23
+ # negative_to_nan_threshold: float = 0):
24
+ # if not file_path.exists():
25
+ # raise ValueError(f"File does not exist: {file_path}")
26
+ # self._logger = logging.getLogger(self.__class__.__name__)
27
+ # self.file_path = file_path
28
+ # self.schema_path = schema_path
29
+ # self.schema: DataFrameSchema = DataFrameSchema({}) if schema_path is None else pandera.io.from_yaml(schema_path)
30
+ # self.lazy_validation = lazy_validation
31
+ # self.negative_to_nan_threshold = negative_to_nan_threshold
32
+ #
33
+ # self.report: Optional[dict] = None
34
+ #
35
+ # @abstractmethod
36
+ # def validate(self):
37
+ # pass
38
+ #
39
+ # def create_schema_file(self, schema_output_path: Path):
40
+ # """
41
+ # Create an inferred schema file from the file being validated
42
+ # Args:
43
+ # schema_output_path: The output path for the schema file
44
+ #
45
+ # Returns:
46
+ #
47
+ # """
48
+ #
49
+ # df = self.read_column()
50
+ #
51
+ # with open(schema_output_path, 'w') as f:
52
+ # yaml.dump(self.schema.to_yaml(), f)
53
+
54
+
55
+ class BaseProcessor(ABC):
56
+ """
57
+ To support columnar processing of large datasets, the BaseProcessor class provides a framework for processing
58
+ data by column. The process method will process the data by column if a file_path is provided, or the entire
59
+ dataset if data is provided.
60
+ """
61
+
62
+ def __init__(self, file_path: Optional[Path] = None, data: Optional[pd.DataFrame] = None, **kwargs):
63
+ self.logger = logging.getLogger(self.__class__.__name__)
64
+ if file_path is None and data is None:
65
+ raise ValueError("Either file_path or data must be provided.")
66
+ self.file_path = file_path
67
+ self.data = data
68
+ self.temp_files = []
69
+
70
+ if self.file_path.suffix == '.parquet':
71
+ self.reader: ParquetFileReader = ParquetFileReader(self.file_path)
72
+ elif self.file_path.suffix == '.omf':
73
+ self.reader: OMFFileReader = OMFFileReader(self.file_path, **kwargs)
74
+ else:
75
+ raise ValueError(f"Unsupported file format: {self.file_path.suffix}")
76
+
77
+ @property
78
+ def composition_variables(self) -> list[str]:
79
+ """
80
+ Detect columns that contain composition data
81
+
82
+ Returns:
83
+ A list of column names that contain composition data
84
+ """
85
+ res = None
86
+ if self.reader.variables_in_file:
87
+ res = list(is_compositional(self.reader.variables_in_file, strict=False).keys())
88
+ return res
89
+
90
+ def process(self, num_workers: Optional[int] = 1, **kwargs):
91
+ if self.data is None:
92
+ with ThreadPoolExecutor(max_workers=num_workers, thread_name_prefix='geomet-processor') as executor:
93
+ futures = {executor.submit(self._process_variable, variable, **kwargs): variable for variable in
94
+ self.reader.variables_in_file}
95
+ results = {}
96
+ for future in as_completed(futures):
97
+ variable = futures[future]
98
+ try:
99
+ results[variable] = future.result()
100
+ except Exception as exc:
101
+ print(f'{variable} generated an exception: {exc}')
102
+ else:
103
+ results = self._process_data()
104
+ return results
105
+
106
+ @abstractmethod
107
+ def _process_variable(self, column, **kwargs):
108
+ pass
109
+
110
+ @abstractmethod
111
+ def _process_data(self):
112
+ pass
113
+
114
+
115
+ class PreProcessor(BaseProcessor):
116
+ def __init__(self, file_path: Optional[Path] = None, data: Optional[pd.DataFrame] = None, **kwargs):
117
+ """
118
+ Preprocess data before validation.
119
+ For large datasets where memory may be constrained, file_path will provide processing by columns.
120
+ If data is provided, the entire dataset already in memory will be processed.
121
+ Args:
122
+ file_path: The optional path to the file to be preprocessed.
123
+ data: The optional DataFrame to be preprocessed.
124
+ """
125
+
126
+ super().__init__(file_path, data, **kwargs)
127
+
128
+ def process(self, negative_to_nan_threshold: Optional[float] = -1,
129
+ not_detected_assays_threshold: Optional[float] = 0.5,
130
+ max_workers=1):
131
+ super().process(max_workers=max_workers, negative_to_nan_threshold=negative_to_nan_threshold,
132
+ not_detected_assays_threshold=not_detected_assays_threshold)
133
+
134
+ def _process_variable(self, column, **kwargs):
135
+ data = pd.read_parquet(self.file_path, columns=[column])
136
+ processed_data = self._process_data(data)
137
+ temp_file = tempfile.NamedTemporaryFile(delete=False)
138
+ processed_data.to_parquet(temp_file.name)
139
+ self.temp_files.append(temp_file)
140
+
141
+ def _process_data(self) -> pd.DataFrame:
142
+ # Preprocessing logic here
143
+ return data
144
+
145
+
146
+ class Validator(BaseProcessor):
147
+ def __init__(self, file_path: Optional[Path] = None, data: Optional[pd.DataFrame] = None, **kwargs):
148
+ """
149
+ Validate the data using a pandera schema.
150
+ For large datasets where memory may be constrained file_path will provide processing by columns.
151
+ If data is provided, the entire dataset already in memory will be processed.
152
+ Args:
153
+ file_path: The optional path to the file to be preprocessed.
154
+ data: The optional DataFrame to be preprocessed.
155
+ """
156
+ super().__init__(file_path, data, **kwargs)
157
+
158
+ def process(self):
159
+ if self.data is None:
160
+ columns = get_parquet_columns(self.file_path)
161
+ with ThreadPoolExecutor() as executor:
162
+ for column in columns:
163
+ executor.submit(self._process_variable, column)
164
+ else:
165
+ self._process_data()
166
+
167
+ def _process_variable(self, column):
168
+ data = pd.read_parquet(self.file_path, columns=[column])
169
+ processed_data = self._process_data(data)
170
+ temp_file = tempfile.NamedTemporaryFile(delete=False)
171
+ processed_data.to_parquet(temp_file.name)
172
+ self.temp_files.append(temp_file)
173
+
174
+ def _process_data(self, data):
175
+ # Validation logic here
176
+ return data
@@ -1,21 +1,21 @@
1
- MIT License
2
-
3
- Copyright (c) 2024 Greg Elphick
4
-
5
- Permission is hereby granted, free of charge, to any person obtaining a copy
6
- of this software and associated documentation files (the "Software"), to deal
7
- in the Software without restriction, including without limitation the rights
8
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
- copies of the Software, and to permit persons to whom the Software is
10
- furnished to do so, subject to the following conditions:
11
-
12
- The above copyright notice and this permission notice shall be included in all
13
- copies or substantial portions of the Software.
14
-
15
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
- SOFTWARE.
1
+ MIT License
2
+
3
+ Copyright (c) 2024 Greg Elphick
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -1,26 +1,28 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: geometallurgy
3
- Version: 0.4.12
3
+ Version: 0.4.13
4
4
  Summary: Tools for the geometallurgist
5
5
  Home-page: https://github.com/elphick/geometallurgy
6
6
  Author: Greg
7
7
  Author-email: 11791585+elphick@users.noreply.github.com
8
- Requires-Python: >=3.9,<3.13
8
+ Requires-Python: >=3.10,<3.13
9
9
  Classifier: Programming Language :: Python :: 3
10
- Classifier: Programming Language :: Python :: 3.9
11
10
  Classifier: Programming Language :: Python :: 3.10
12
11
  Classifier: Programming Language :: Python :: 3.11
12
+ Classifier: Programming Language :: Python :: 3.12
13
13
  Provides-Extra: all
14
14
  Provides-Extra: blockmodel
15
- Provides-Extra: map
15
+ Provides-Extra: spatial
16
16
  Provides-Extra: validation
17
- Requires-Dist: folium (>=0.16.0,<0.17.0) ; extra == "map"
17
+ Requires-Dist: folium (>=0.16.0,<0.17.0) ; extra == "spatial"
18
+ Requires-Dist: omfpandas (>=0.8.1,<0.9.0) ; extra == "blockmodel"
18
19
  Requires-Dist: omfvista (>=0.3.0) ; extra == "blockmodel"
19
20
  Requires-Dist: pandas (>=1.0)
20
21
  Requires-Dist: pandera[io] (>=0.19.3,<0.21.0) ; extra == "validation"
21
22
  Requires-Dist: periodictable (>=1.7.0,<2.0.0)
22
23
  Requires-Dist: plotly (>=5.22.0,<6.0.0)
23
24
  Requires-Dist: pyarrow (>=16.1.0,<18.0.0)
25
+ Requires-Dist: rioxarray (>=0.18.2,<0.19.0) ; extra == "spatial"
24
26
  Project-URL: Documentation, https://elphick.github.io/geometallurgy
25
27
  Project-URL: Repository, https://github.com/elphick/geometallurgy
26
28
  Description-Content-Type: text/markdown
@@ -0,0 +1,49 @@
1
+ elphick/geomet/__init__.py,sha256=xBsTbmmhZbMi6G5AvdFgu5n73L735qV0OOc9anav5Io,285
2
+ elphick/geomet/base.py,sha256=eJ_armyAt1qOm39yG075SUW1HHdb5_anKpkmsxgrKww,51736
3
+ elphick/geomet/block_model.py,sha256=luKfYM3Z81BlyVscm8mvT5w3exo46P-axZMylJn-Rqw,13256
4
+ elphick/geomet/config/__init__.py,sha256=6ZhRvJV0zX0W65Jsl1QkCdXLXTL8aJjlcuGlcQM66so,36
5
+ elphick/geomet/config/config_read.py,sha256=ciSVB0DbLo_aTOWs26UBfsbggC31EEEoDf2CKStsdDI,1435
6
+ elphick/geomet/config/flowsheet_example_partition.yaml,sha256=mefwSRLm90UKKohk2WJOa0TYWFuwwU-wlP-FsI_Tf0I,747
7
+ elphick/geomet/config/flowsheet_example_simple.yaml,sha256=7-KKBpTxepJvsi7sYMwJWKrruC58PX0Qr4yIfDL0ovQ,453
8
+ elphick/geomet/config/mc_config.yml,sha256=0l8Riyzcq7q3soO1JWYl6BIa6sz6EDJp2Fb2etE43c8,948
9
+ elphick/geomet/data/downloader.py,sha256=t4luU6ojYb7IjSTHo4yrLfFyViJin5nbOU4LmD_nVf4,1660
10
+ elphick/geomet/data/register.csv,sha256=tBiBluQrWgeEen696pVAfiX-wZ8dOlN0memCu-ir4Qk,3058
11
+ elphick/geomet/datasets/__init__.py,sha256=vJz0740rRyG3Ppr6o7_ldgtO_7IU6gAVWbR255wFF0E,61
12
+ elphick/geomet/datasets/datasets.py,sha256=8Bou_EyqlVyb6S4YLCnxUCuvKhkLU_JEBkR9Tq5fiuA,1929
13
+ elphick/geomet/datasets/downloader.py,sha256=ai_9fxoUNwiiz3ZumBBe5WfWi_R-5bAOzoiLdoasiTU,1662
14
+ elphick/geomet/datasets/register.csv,sha256=tBiBluQrWgeEen696pVAfiX-wZ8dOlN0memCu-ir4Qk,3058
15
+ elphick/geomet/datasets/sample_data.py,sha256=Z45LT91lUfyax7zZZ1Gzo8V_wEjZViqT9vkQnwa5GEU,7880
16
+ elphick/geomet/extras.py,sha256=2UvV4rojLLa3v81CTKkBFiMLsBnkCFqU-Hbg8oiRvdc,1199
17
+ elphick/geomet/flowsheet/__init__.py,sha256=BfW1xVNbCKXXIsj-4qIA30QCzcpOz6uuU5tOMM2Xj0g,34
18
+ elphick/geomet/flowsheet/flowsheet.py,sha256=Axbgv-z5Uz5kk6itutMp5RC0QLVeChfmaDIBaNpcQA4,53248
19
+ elphick/geomet/flowsheet/loader.py,sha256=sScNaZHpjM2V6DWfnQ5XmvUkWBH0pmkSlzOB10mojzc,4855
20
+ elphick/geomet/flowsheet/operation.py,sha256=Pz7dk9KcDZ6TPp54MQJnTMZLPPS9aHLyQTDET4o7Ij8,9976
21
+ elphick/geomet/flowsheet/stream.py,sha256=vVkj95G59qDqg6ghZm28poFXQb5Lilh4U48DKV8XjNw,1409
22
+ elphick/geomet/interval_sample.py,sha256=TG2X7S2brWHsZ4emhWpX6j0dmelAGnaI9E-793A5ipk,31961
23
+ elphick/geomet/io.py,sha256=xGyYh9RH_LNEHg1Fr_x9LKWM7iYWaCENlfXXQKK1Wy8,16088
24
+ elphick/geomet/plot.py,sha256=Y8sx3BkK0WSSxogoRzhL02x5DtvWBh7QSHZiB_ZCr24,5879
25
+ elphick/geomet/profile.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
+ elphick/geomet/sample.py,sha256=hQgrU3xJm5HapHfrBfrUMHCfvH3K2pGsWkCnCkvZOBQ,1251
27
+ elphick/geomet/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
+ elphick/geomet/utils/amenability.py,sha256=CbiZQXjMSLkAAN7_IiSpuroAkRnVzlapTVAxHKUPOB0,1923
29
+ elphick/geomet/utils/block_model_converter.py,sha256=tiyrbmj8CbVk2sRuGy2TvfHgjbe8kGU9CgdDXg-V27o,3457
30
+ elphick/geomet/utils/components.py,sha256=h13cd5keaPotlOeNADhndmV-HtgFAHsglf_6pWTtBWQ,3682
31
+ elphick/geomet/utils/data.py,sha256=gUFXNbRXfFLPvWPmCtt1IcnX2JaYZRbLPFSSuKBtaIg,2032
32
+ elphick/geomet/utils/estimates.py,sha256=NTBL4IPKJQwf7M6m3eFXZJHlhkgGXe4cnA07ca10LW4,5000
33
+ elphick/geomet/utils/interp.py,sha256=WCdylv4hBHcgZaOSR649lbColBTCwcUGfkiPeosYgog,10872
34
+ elphick/geomet/utils/interp2.py,sha256=1GPp4IM_CTRJS52yE20mNXCypgR-cThitSpfmsqMkCs,6767
35
+ elphick/geomet/utils/layout.py,sha256=QnoX32-b8ttWBkEP1Ubxm_TyGoPu11qCAALnNgSZchA,2152
36
+ elphick/geomet/utils/moisture.py,sha256=qqMoepVGNCcYiRU7EuAazjp2gfEE_y8IKuS9pU8kwzM,2253
37
+ elphick/geomet/utils/output.html,sha256=MB_P8NEErxhBqTeg38bZt4xFX3VPtfDHSWjxiUJQk0Y,25543
38
+ elphick/geomet/utils/pandas.py,sha256=vVBQQ-yzTB0WXor0jdB9lmeL021TTSr6rC7_0RNcYwg,17775
39
+ elphick/geomet/utils/parallel.py,sha256=S5vX9ktjy3xOX9vGf3xoe0J-ENiP6fs9VEPHbSZATPs,871
40
+ elphick/geomet/utils/partition.py,sha256=7wTNW__uV_UpURJtVgQsBdHrgbRyh8Pr3xmI7a6-aPs,1597
41
+ elphick/geomet/utils/size.py,sha256=n5oGuGOGeKbv3hS4P4SskdfNMnU7xor09Y4g9SLFeNM,2323
42
+ elphick/geomet/utils/timer.py,sha256=YaroUXieJsrDWFNSA4En-sNV66bFRATo_VlmfJYeqhw,3192
43
+ elphick/geomet/utils/viz.py,sha256=CmwqENEl8oQuOOwVUDPfFQ5fxe7TdpYoC7c4cJV-nJI,1774
44
+ elphick/geomet/validate.py.hide,sha256=0-JMpIKJ3y1T3Rr6vht9omCUGzUgr9_d-UYcwbu6UU0,6914
45
+ geometallurgy-0.4.13.dist-info/entry_points.txt,sha256=aQI-8kmaba_c9ZGOFkJgWl0MWBke5BQLNyPSVcbS7EU,58
46
+ geometallurgy-0.4.13.dist-info/LICENSE,sha256=JysuEoMoZt5QE0QDVBEmG3vDZ2Eqqu-lADSPWBjGbUk,1090
47
+ geometallurgy-0.4.13.dist-info/METADATA,sha256=WG5ph8NRdF1vjB_im8NJSErfJmJl82PSvZgPvjGgq-4,4527
48
+ geometallurgy-0.4.13.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
49
+ geometallurgy-0.4.13.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.6.0
2
+ Generator: poetry-core 1.9.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,48 +0,0 @@
1
- elphick/geomet/__init__.py,sha256=gcaArz-agLsm_Tf9KNvmGznw4Jml2QTjj_CxKUC1Ejg,274
2
- elphick/geomet/base.py,sha256=vp0C7DEAAUDGhTdQ8_Hz9WmqZRjNGn7m5CqEGy3L_98,50603
3
- elphick/geomet/block_model.py,sha256=Q-LEdILajUoteO0834IXeS75k8pcPqSaMrinMcWD8R4,14639
4
- elphick/geomet/config/__init__.py,sha256=F94hbxN3KzSaljbElIGVhdEwX0FKmHxST4jJ7rNohxY,35
5
- elphick/geomet/config/config_read.py,sha256=frRwfRwUXpgxwMNCiBVFUw1-yPbBHs3h2KjmzXImvxY,1396
6
- elphick/geomet/config/flowsheet_example_partition.yaml,sha256=85vrhOotQHhaKkYN-0QQA7ed03EMRkyaKZc-GtXMKro,716
7
- elphick/geomet/config/flowsheet_example_simple.yaml,sha256=u4sM2gkyyHXnOlqW3TvwBjRdl4x34zGeZS7YGNmdfC4,428
8
- elphick/geomet/config/mc_config.yml,sha256=6eUGCJlU5Aw9M5Rn6Xuezf8_bShxwxZ1g5XB8P5iLWc,913
9
- elphick/geomet/data/downloader.py,sha256=x_107mTNCaG9YwEO54mBx7l3KaqOavCi93uZjPdC5GU,1621
10
- elphick/geomet/data/register.csv,sha256=-N3F6L0097C-I79axINi_ewFAxiqbT_SOSW3-XtPkI4,3046
11
- elphick/geomet/datasets/__init__.py,sha256=7CX648YFMv39FchI1_oEF28zF52v4gX0lnqomPwkcvU,59
12
- elphick/geomet/datasets/datasets.py,sha256=RUqQWXZTWEA3R4S5RRdjwlosQZFy2PaMX8x329eP9mo,1882
13
- elphick/geomet/datasets/downloader.py,sha256=JXHQfwQYbe1X-tIfajx4kGbqkcWh0U2k5R03ur2J6E4,1622
14
- elphick/geomet/datasets/register.csv,sha256=-N3F6L0097C-I79axINi_ewFAxiqbT_SOSW3-XtPkI4,3046
15
- elphick/geomet/datasets/sample_data.py,sha256=jt5DWxdMmPbZGDuon2s8Q2wlX3cEegB0dSmRKF4pz4I,7684
16
- elphick/geomet/extras.py,sha256=0yDwbPMylP21EOo27juu4gUiewygSXLSjggYDrPvDcQ,1128
17
- elphick/geomet/flowsheet/__init__.py,sha256=-lxSLPZNQfiLXKZ2qqS5XbbhrZA2ABi3ppx0LaHnNEI,33
18
- elphick/geomet/flowsheet/flowsheet.py,sha256=__kgowBIyWfvXcdPWCFihoEUdOqTj7KszSbKGF1AkBo,52032
19
- elphick/geomet/flowsheet/loader.py,sha256=8nd9Vqbg1de35iuoc4mdRFxrUsIBZed0ivXIAu80jBk,4756
20
- elphick/geomet/flowsheet/operation.py,sha256=f8k0-Gr_Uy2SlEp8bwAaG4yeBa3DU0HoPn9wyWhYipE,9720
21
- elphick/geomet/flowsheet/stream.py,sha256=NOXcYeZLSmOSoSRFc7M36Jc8c1ARgjiCvtRuixYfuqA,1370
22
- elphick/geomet/interval_sample.py,sha256=fhcWBTA01TqvCBsJv7dzWZHRBpw_4W2Ahawks5SPj28,31320
23
- elphick/geomet/io.py,sha256=tZsX_getGxL07dPlF3Ozyzvt2tFHE5OdgPM5pc5xL68,15709
24
- elphick/geomet/plot.py,sha256=e9uz8L3QZ23CW4OYm78NhdZl01i0DxHfC4r1kigz7Ss,5732
25
- elphick/geomet/profile.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
- elphick/geomet/sample.py,sha256=cbkqkSbCu7IU09IOlEp_Wfx0-tYT6hfaOqUy30gMctM,1223
27
- elphick/geomet/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
- elphick/geomet/utils/amenability.py,sha256=wZ4eIAmcJRfWUp0ZwzfVPUaHEe0g9N2LBaoNv1fZ61E,1874
29
- elphick/geomet/utils/block_model_converter.py,sha256=REYbS4cu-8z0IzCpuU9ISlPnAvxs9nyX4Bm4yo9mBC8,3364
30
- elphick/geomet/utils/components.py,sha256=oDR8w7aFKRP38u98yfHp-8MtOaG-c0YcGtwNjgrhGWA,3546
31
- elphick/geomet/utils/data.py,sha256=AnQ3JXEt2M-T5doGljM_fvdX1CvGbMr6wwjxqcw0fjs,1983
32
- elphick/geomet/utils/estimates.py,sha256=-x6KDQb-04IrxN8yO38Fx93F6_SGG67koagCtYBtW3c,4892
33
- elphick/geomet/utils/interp.py,sha256=9lb2sEFfAWYzFECybLPD4nF0S85Xo37nNkxU1DG__A4,10679
34
- elphick/geomet/utils/interp2.py,sha256=ybuQBNTQOdVzmVYOhQDx2LkGKpl8yxgbQPz_hwS8ClQ,6633
35
- elphick/geomet/utils/layout.py,sha256=-c1EF-G0qGRQbLrrTS-LsbII-lnvw71y97iUBLd02do,2080
36
- elphick/geomet/utils/moisture.py,sha256=t9WMwADyz-QAMW-cdah1tIlzTDrhooSoKOPdIlVQHvU,2192
37
- elphick/geomet/utils/pandas.py,sha256=6sKl3WUjXLR7qFmqBzuCjnfCoUsLRapwZk2nO5BfzYI,17397
38
- elphick/geomet/utils/parallel.py,sha256=l38JBTkCmdqKHQkS8njoA-sBN9XQGkhF59XtAhWShgs,842
39
- elphick/geomet/utils/partition.py,sha256=U0jFpvdvZJVdutfB6RzUzKfO9NWCGtBkeySx-QbP-l4,1534
40
- elphick/geomet/utils/size.py,sha256=EmV_sv2bOImQN3s7TWCniU_y83HNJEPtZH7fMMkYTcc,2272
41
- elphick/geomet/utils/timer.py,sha256=8WNKLFcINRsZ3IsKtOIZ77YbKtqczyOOTEWY9h9Uxxw,3112
42
- elphick/geomet/utils/viz.py,sha256=M0CnfDXBHtYb8aak1Sfz6XLvRSmkzX3ybIDllEmDR8A,1718
43
- elphick/geomet/validate.py.hide,sha256=qAWJlgq0jp19UakVV0dEU_AsqV_JctUn1QTHn8cCRw0,6738
44
- geometallurgy-0.4.12.dist-info/LICENSE,sha256=GrSVdcGtNbGvAYC_tIjLHBrIVPyg-Ksfe7ZGr087yCI,1069
45
- geometallurgy-0.4.12.dist-info/METADATA,sha256=iatuwf4Pey2ZHhaEu-p-NJ-tEVCdZAijNXZf91jhgpM,4386
46
- geometallurgy-0.4.12.dist-info/WHEEL,sha256=WGfLGfLX43Ei_YORXSnT54hxFygu34kMpcQdmgmEwCQ,88
47
- geometallurgy-0.4.12.dist-info/entry_points.txt,sha256=aQI-8kmaba_c9ZGOFkJgWl0MWBke5BQLNyPSVcbS7EU,58
48
- geometallurgy-0.4.12.dist-info/RECORD,,