forgeo-gmlib 0.6.2__cp312-cp312-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,9 @@
1
+ try:
2
+ from ._version import __version__, __version_tuple__, version, version_tuple
3
+ except ImportError:
4
+ __version__ = version = None
5
+ __version_tuple__ = version_tuple = ()
6
+
7
+ from .GeologicalModel3D import GeologicalModel
8
+
9
+ __all__ = ["GeologicalModel"]
@@ -0,0 +1,34 @@
1
+ # file generated by setuptools-scm
2
+ # don't change, don't track in version control
3
+
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
12
+
13
+ TYPE_CHECKING = False
14
+ if TYPE_CHECKING:
15
+ from typing import Tuple
16
+ from typing import Union
17
+
18
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
20
+ else:
21
+ VERSION_TUPLE = object
22
+ COMMIT_ID = object
23
+
24
+ version: str
25
+ __version__: str
26
+ __version_tuple__: VERSION_TUPLE
27
+ version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
30
+
31
+ __version__ = version = '0.6.2'
32
+ __version_tuple__ = version_tuple = (0, 6, 2)
33
+
34
+ __commit_id__ = commit_id = 'g5d87bbd74'
@@ -0,0 +1 @@
1
+ from .core import *
@@ -0,0 +1,130 @@
1
+ from dataclasses import dataclass
2
+
3
+ from .pyarchitecture import *
4
+
5
+
6
+ @dataclass
7
+ class NodeInfo:
8
+ name: str
9
+ color: tuple = None
10
+
11
+
12
+ def export_model_without_leaves(model):
13
+ """Converts a geological architecture to python dictionnary.
14
+ This is suboptimal because underlying functors are wrapped several times.
15
+ """
16
+
17
+ def build(node):
18
+ if node is None:
19
+ return None
20
+ interface = model.interface(node)
21
+ if interface is None: # leaf
22
+ return None
23
+ return {
24
+ "interface": interface,
25
+ "below": build(model.below(node)),
26
+ "above": build(model.above(node)),
27
+ }
28
+
29
+ return build(model.root)
30
+
31
+
32
+ def _from_GeoModeller(
33
+ model,
34
+ add_interface_node,
35
+ add_formation_node,
36
+ add_topography_node=False,
37
+ topography_color=None,
38
+ keep_info=False, # FIXME: C++ does not store node information
39
+ ):
40
+ """Converts a GeoModeller model to architecture.
41
+ This is suboptimal because underlying functors are wrapped several times.
42
+ """
43
+ fields = model.fields
44
+ values = model.values
45
+ relations = model.relations
46
+ formations = model.pile_formations
47
+ formation_color = model.formation_colors
48
+ pile_reference = model.pile.reference
49
+ assert len(fields) == len(values)
50
+ assert len(fields) == len(relations)
51
+ assert len(fields) + 1 == len(formations)
52
+ assert set(relations).issubset({"onlap", "erode"})
53
+
54
+ def build_onlap_range(first, last):
55
+ assert 0 <= first <= last
56
+ # We add 1 to formations to leave 0 for the default formation (above topography)
57
+ node = add_formation_node(last + 1)
58
+ for k in range(first, last)[::-1]:
59
+ below = add_formation_node(k + 1)
60
+ name = formations[k]
61
+ info = (
62
+ NodeInfo(f"{pile_reference}-{name}", formation_color[name])
63
+ if keep_info
64
+ else None
65
+ )
66
+ node = add_interface_node(fields[k], values[k], below, node, info)
67
+ return node
68
+
69
+ erosions = [k for k, relation in enumerate(relations) if relation == "erode"]
70
+ erosions.append(len(relations))
71
+ node = build_onlap_range(0, erosions[0])
72
+ for e, next_e in zip(erosions[:-1], erosions[1:]):
73
+ name = formations[e]
74
+ info = (
75
+ NodeInfo(f"erosion-{pile_reference}-{name}", formation_color[name])
76
+ if keep_info
77
+ else None
78
+ )
79
+ node = add_interface_node(
80
+ fields[e], values[e], node, build_onlap_range(e + 1, next_e), info
81
+ )
82
+ if add_topography_node:
83
+ node = add_interface_node(
84
+ model.topography,
85
+ 0,
86
+ node,
87
+ None,
88
+ NodeInfo("topography", topography_color) if keep_info else None,
89
+ )
90
+ return node
91
+
92
+
93
+ def from_GeoModeller(model, **kwargs):
94
+ """Converts a GeoModeller model to architecture.
95
+ This is suboptimal because underlying functors are wrapped several times.
96
+ """
97
+ architecture = Model()
98
+
99
+ def add_interface(field, value, below, above, info=None):
100
+ return architecture.add_interface_node(Field(field), value, below, above, info)
101
+
102
+ def add_formation(formation):
103
+ return architecture.add_formation_node(formation)
104
+
105
+ architecture.set_root_node(
106
+ _from_GeoModeller(model, add_interface, add_formation, **kwargs)
107
+ )
108
+ return architecture
109
+
110
+
111
+ @dataclass
112
+ class ArchitectureNode:
113
+ interface: tuple
114
+ below: "typing.Callable" = None
115
+ above: "typing.Callable" = None
116
+ info: NodeInfo = None
117
+
118
+
119
+ def simple_tree_from_GeoModeller(model, **kwargs):
120
+ """Converts a GeoModeller model to architecture.
121
+ This is suboptimal because underlying functors are wrapped several times.
122
+ """
123
+
124
+ def add_interface(field, value, below, above, info=None):
125
+ return ArchitectureNode((field, value), below, above, info)
126
+
127
+ def add_formation(formation):
128
+ return None
129
+
130
+ return _from_GeoModeller(model, add_interface, add_formation, **kwargs)
Binary file
@@ -0,0 +1,171 @@
1
+ #
2
+ # This file is part of gmlib. It is free software.
3
+ # You can redistribute it and/or modify it under the terms of the GNU Affero General Public License version 3.
4
+ #
5
+
6
+ from collections import namedtuple
7
+ from itertools import product
8
+
9
+ import numpy as np
10
+
11
+ Fault_node = namedtuple("Fault_node", ["fault", "negative", "positive"])
12
+
13
+
14
+ def _build_family_tree(limits):
15
+ nbf = len(limits)
16
+ assert all(all(fk < nbf for fk in limit) for limit in limits)
17
+ fault_ancestors = [set() for _ in range(nbf)]
18
+ fault_progeny = [set() for _ in range(nbf)]
19
+ for fk, data in enumerate(zip(limits, fault_ancestors)):
20
+ stops_on, ancestors = data
21
+ for limit in stops_on:
22
+ ancestors.add(limit)
23
+ fault_progeny[limit].add(fk)
24
+ progenyless = [k for k, progeny in enumerate(fault_progeny) if not progeny]
25
+ ancestorless = [k for k, ancestors in enumerate(fault_ancestors) if not ancestors]
26
+
27
+ # FIXME: we do not check there are no cycles
28
+ # OPTIMIZE: this may not be usefull...
29
+ # we complete the set of all ancestors to eventually keep only direct ancestors
30
+ # the problem is that we don't have the guarantee the set of all ancestors
31
+ # and complete progeny are correct from the begining
32
+ # here are the big recursive loops :-(
33
+ def add_progeny(progeny, ancestors):
34
+ for ancestor in ancestors:
35
+ family = fault_progeny[ancestor]
36
+ family |= progeny
37
+ add_progeny(family, fault_ancestors[ancestor])
38
+
39
+ def add_ancestors(ancestors, progeny):
40
+ for descendant in progeny:
41
+ family = fault_ancestors[descendant]
42
+ family |= ancestors
43
+ add_ancestors(family, fault_progeny[descendant])
44
+
45
+ for alone in progenyless:
46
+ assert not fault_progeny[alone]
47
+ add_progeny({alone}, fault_ancestors[alone])
48
+ for orphan in ancestorless:
49
+ assert not fault_ancestors[orphan]
50
+ add_ancestors({orphan}, fault_progeny[orphan])
51
+
52
+ # we now keep only direct links
53
+ def test_set_intersection(S1, S2):
54
+ return any(x in S1 for x in S2)
55
+
56
+ for descendant in range(nbf):
57
+ ancestors = fault_ancestors[descendant]
58
+ all_ancestors = list(ancestors)
59
+ for ancestor in all_ancestors:
60
+ if test_set_intersection(ancestors, fault_progeny[ancestor]):
61
+ fault_progeny[ancestor].remove(descendant)
62
+ ancestors.remove(ancestor)
63
+ return fault_progeny, fault_ancestors
64
+
65
+
66
+ def _build_fault_node(faults, fk, progeny):
67
+ fault = faults[fk]
68
+ assert fk not in progeny
69
+ positive = []
70
+ negative = []
71
+ for fl in progeny:
72
+ # FIXME: this is a legacy test which is bugprone
73
+ gravity_center = faults[fl].data_centroid()
74
+ if fault(gravity_center) > 0:
75
+ positive.append(fl)
76
+ else:
77
+ negative.append(fl)
78
+ return Fault_node(fault, tuple(sorted(negative)), tuple(sorted(positive)))
79
+
80
+
81
+ def _build_fault_nodes(faults, limits):
82
+ assert all(all(fk < len(faults) for fk in limit) for limit in limits)
83
+ fault_progeny, fault_ancestors = _build_family_tree(limits)
84
+ ancestorless = [k for k, ancestors in enumerate(fault_ancestors) if not ancestors]
85
+ nodes = [
86
+ _build_fault_node(faults, k, progeny) for k, progeny in enumerate(fault_progeny)
87
+ ]
88
+ roots = tuple(sorted(ancestorless))
89
+ return nodes, roots
90
+
91
+
92
+ def build(faults, limits):
93
+ assert len(faults) == len(limits)
94
+ if not faults:
95
+ return None
96
+ nodes, roots = _build_fault_nodes(faults, limits)
97
+ assert roots
98
+ # TODO: all of the following could be grouped into a iterator class
99
+ # yielding evaluation values
100
+ nbf = len(faults)
101
+ evaluations = []
102
+ evaluation = np.zeros(nbf, dtype=np.int8)
103
+ check = np.zeros(nbf, dtype=bool)
104
+ level = nbf
105
+ iterators = []
106
+ exploration = [roots]
107
+
108
+ # NB: we use numpy.put method because given an array a
109
+ # a[tuple()] = 1 will set all the array to one
110
+ # whereas a.pu(tuple(), 1) will do nothing wich
111
+ # is the expected behavior here
112
+ def explore_progeny(progeny, sides, level):
113
+ for fk, side in zip(progeny, sides):
114
+ nk = nodes[fk]
115
+ if side < 0:
116
+ deeper = nk.negative
117
+ nullify = nk.positive
118
+ else:
119
+ assert side == 1
120
+ deeper = nk.positive
121
+ nullify = nk.negative
122
+ assert not (nullify and np.any(check[nullify]))
123
+ if nullify:
124
+ check.put(nullify, True)
125
+ evaluation.put(nullify, 0)
126
+ level -= len(nullify)
127
+ iterators.append((nullify, iter([])))
128
+ if deeper:
129
+ exploration.append(deeper)
130
+ return level
131
+
132
+ while iterators or exploration:
133
+ if level == 0:
134
+ assert not exploration
135
+ assert np.all(check)
136
+ faults, it = iterators.pop()
137
+ while True:
138
+ try:
139
+ evaluations.append(np.copy(evaluation))
140
+ evaluation.put(faults, next(it))
141
+ except StopIteration:
142
+ break
143
+ check.put(faults, False)
144
+ level += len(faults)
145
+ while iterators:
146
+ faults, it = iterators[-1]
147
+ try:
148
+ assert faults
149
+ assert np.all(check[faults])
150
+ sides = next(it)
151
+ evaluation.put(faults, sides)
152
+ level = explore_progeny(faults, sides, level)
153
+ break
154
+ except StopIteration:
155
+ check.put(faults, False)
156
+ level += len(faults)
157
+ iterators.pop()
158
+ while exploration:
159
+ explore = exploration.pop()
160
+ it = iter(product((-1, 1), repeat=len(explore)))
161
+ sides = next(it)
162
+ assert explore
163
+ assert not np.any(check[explore])
164
+ check.put(explore, True)
165
+ evaluation.put(explore, sides)
166
+ iterators.append((explore, it))
167
+ level -= len(explore)
168
+ level = explore_progeny(explore, sides, level)
169
+ assert level in (0, nbf)
170
+ assert np.all(check) or np.all(np.logical_not(check))
171
+ return nodes, roots, evaluations
@@ -0,0 +1,101 @@
1
+ """
2
+ Created on Mon Apr 16 09:58:12 2018
3
+
4
+ @author: lopez
5
+ """
6
+
7
+ from collections import namedtuple
8
+
9
+ import numpy as np
10
+
11
+
12
+ class NotImplemented(Exception):
13
+ pass
14
+
15
+
16
+ GradientData = namedtuple("GradientData", ["locations", "values"])
17
+
18
+
19
+ class PotentialData:
20
+ pass
21
+
22
+
23
+ class FaultData:
24
+ def __init__(self, name):
25
+ self.name = name
26
+ self.infinite = True
27
+ self.center_type = None
28
+ self.influence_radius = None
29
+ self.lateral_extent = None
30
+ self.vertical_extent = None
31
+ self.stops_on = []
32
+ self.potential_data = None
33
+ self.color = None
34
+
35
+
36
+ class SeriesData:
37
+ def __init__(self, name):
38
+ self.name = name
39
+ self.formations = None
40
+ self.relation = None
41
+ self.influenced_by_fault = None
42
+ self.potential_data = None
43
+
44
+
45
+ class Pile:
46
+ def __init__(self, reference):
47
+ assert reference in {"top", "base"}
48
+ self.reference = reference
49
+ self.all_series = None
50
+
51
+ def __repr__(self):
52
+ lines = []
53
+ lines.append("pile (reference " + self.reference + ")")
54
+ for serie in self.all_series[::-1]:
55
+ lines.append(serie.name + " (" + serie.relation + ") ")
56
+ for formation in serie.formations[::-1]:
57
+ lines.append(" " * 2 + formation)
58
+ return "\n".join(lines)
59
+
60
+
61
+ class CovarianceModel:
62
+ def __init__(self, xml_node, box):
63
+ """
64
+ Here's the magic.
65
+
66
+ In GeoModeller interpolation are performed in a rescaled box.
67
+ This affects the gradient nugget effect.
68
+ The variance used to compute the model is not exactly the one given by
69
+ the user...
70
+ 42 is the Answer to the Ultimate Question of Life, the Universe,
71
+ and Everything but only for cubic covariances!
72
+ """
73
+ L = max(box[s + "max"] - box[s + "min"] for s in ("X", "Y", "Z"))
74
+ assert L > 0
75
+ d = xml_node.attrib # attributes dictionnary
76
+ self.anisotropy_angles = np.array(
77
+ [np.double(d[s]) for s in ["anisotropyA1", "anisotropyA2", "anisotropyA3"]]
78
+ )
79
+ self.anisotropy_values = np.array(
80
+ [np.double(d[s]) for s in ["anisotropyV1", "anisotropyV2", "anisotropyV3"]]
81
+ )
82
+ # Type de covariance (cf. Potentiel.cdl)
83
+ self.covariance_model = {
84
+ 0: "gauss",
85
+ 1: "cubique",
86
+ 2: "h4log",
87
+ 3: "h3",
88
+ 4: "penta",
89
+ }[int(d["covarianceModel"])]
90
+ assert self.covariance_model == "cubique", (
91
+ "Only cubic covariance models are handled!"
92
+ )
93
+ self.drift_order = int(d["degreDrift"])
94
+ self.isotropic = {"true": True, "false": False}[d["isotropic"]]
95
+ self.gradient_nugget = float(d["nuggetGradient"]) * (1 / L) ** 2
96
+ self.potential_nugget = float(d["nuggetPotentiel"])
97
+ self.range = R = float(d["range"])
98
+ gvar = float(d["gradient"])
99
+ assert gvar > 0
100
+ self.gradient_variance = ((R / L) ** 2 * gvar) / 42.0
101
+ self.tangent_variance = float(d["tangentVariance"])