pyAgrum-nightly 2.3.1.9.dev202512261765915415__cp310-abi3-macosx_10_15_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. pyagrum/__init__.py +165 -0
  2. pyagrum/_pyagrum.so +0 -0
  3. pyagrum/bnmixture/BNMInference.py +268 -0
  4. pyagrum/bnmixture/BNMLearning.py +376 -0
  5. pyagrum/bnmixture/BNMixture.py +464 -0
  6. pyagrum/bnmixture/__init__.py +60 -0
  7. pyagrum/bnmixture/notebook.py +1058 -0
  8. pyagrum/causal/_CausalFormula.py +280 -0
  9. pyagrum/causal/_CausalModel.py +436 -0
  10. pyagrum/causal/__init__.py +81 -0
  11. pyagrum/causal/_causalImpact.py +356 -0
  12. pyagrum/causal/_dSeparation.py +598 -0
  13. pyagrum/causal/_doAST.py +761 -0
  14. pyagrum/causal/_doCalculus.py +361 -0
  15. pyagrum/causal/_doorCriteria.py +374 -0
  16. pyagrum/causal/_exceptions.py +95 -0
  17. pyagrum/causal/_types.py +61 -0
  18. pyagrum/causal/causalEffectEstimation/_CausalEffectEstimation.py +1175 -0
  19. pyagrum/causal/causalEffectEstimation/_IVEstimators.py +718 -0
  20. pyagrum/causal/causalEffectEstimation/_RCTEstimators.py +132 -0
  21. pyagrum/causal/causalEffectEstimation/__init__.py +46 -0
  22. pyagrum/causal/causalEffectEstimation/_backdoorEstimators.py +774 -0
  23. pyagrum/causal/causalEffectEstimation/_causalBNEstimator.py +324 -0
  24. pyagrum/causal/causalEffectEstimation/_frontdoorEstimators.py +396 -0
  25. pyagrum/causal/causalEffectEstimation/_learners.py +118 -0
  26. pyagrum/causal/causalEffectEstimation/_utils.py +466 -0
  27. pyagrum/causal/notebook.py +172 -0
  28. pyagrum/clg/CLG.py +658 -0
  29. pyagrum/clg/GaussianVariable.py +111 -0
  30. pyagrum/clg/SEM.py +312 -0
  31. pyagrum/clg/__init__.py +63 -0
  32. pyagrum/clg/canonicalForm.py +408 -0
  33. pyagrum/clg/constants.py +54 -0
  34. pyagrum/clg/forwardSampling.py +202 -0
  35. pyagrum/clg/learning.py +776 -0
  36. pyagrum/clg/notebook.py +480 -0
  37. pyagrum/clg/variableElimination.py +271 -0
  38. pyagrum/common.py +60 -0
  39. pyagrum/config.py +319 -0
  40. pyagrum/ctbn/CIM.py +513 -0
  41. pyagrum/ctbn/CTBN.py +573 -0
  42. pyagrum/ctbn/CTBNGenerator.py +216 -0
  43. pyagrum/ctbn/CTBNInference.py +459 -0
  44. pyagrum/ctbn/CTBNLearner.py +161 -0
  45. pyagrum/ctbn/SamplesStats.py +671 -0
  46. pyagrum/ctbn/StatsIndepTest.py +355 -0
  47. pyagrum/ctbn/__init__.py +79 -0
  48. pyagrum/ctbn/constants.py +54 -0
  49. pyagrum/ctbn/notebook.py +264 -0
  50. pyagrum/defaults.ini +199 -0
  51. pyagrum/deprecated.py +95 -0
  52. pyagrum/explain/_ComputationCausal.py +75 -0
  53. pyagrum/explain/_ComputationConditional.py +48 -0
  54. pyagrum/explain/_ComputationMarginal.py +48 -0
  55. pyagrum/explain/_CustomShapleyCache.py +110 -0
  56. pyagrum/explain/_Explainer.py +176 -0
  57. pyagrum/explain/_Explanation.py +70 -0
  58. pyagrum/explain/_FIFOCache.py +54 -0
  59. pyagrum/explain/_ShallCausalValues.py +204 -0
  60. pyagrum/explain/_ShallConditionalValues.py +155 -0
  61. pyagrum/explain/_ShallMarginalValues.py +155 -0
  62. pyagrum/explain/_ShallValues.py +296 -0
  63. pyagrum/explain/_ShapCausalValues.py +208 -0
  64. pyagrum/explain/_ShapConditionalValues.py +126 -0
  65. pyagrum/explain/_ShapMarginalValues.py +191 -0
  66. pyagrum/explain/_ShapleyValues.py +298 -0
  67. pyagrum/explain/__init__.py +81 -0
  68. pyagrum/explain/_explGeneralizedMarkovBlanket.py +152 -0
  69. pyagrum/explain/_explIndependenceListForPairs.py +146 -0
  70. pyagrum/explain/_explInformationGraph.py +264 -0
  71. pyagrum/explain/notebook/__init__.py +54 -0
  72. pyagrum/explain/notebook/_bar.py +142 -0
  73. pyagrum/explain/notebook/_beeswarm.py +174 -0
  74. pyagrum/explain/notebook/_showShapValues.py +97 -0
  75. pyagrum/explain/notebook/_waterfall.py +220 -0
  76. pyagrum/explain/shapley.py +225 -0
  77. pyagrum/lib/__init__.py +46 -0
  78. pyagrum/lib/_colors.py +390 -0
  79. pyagrum/lib/bn2graph.py +299 -0
  80. pyagrum/lib/bn2roc.py +1026 -0
  81. pyagrum/lib/bn2scores.py +217 -0
  82. pyagrum/lib/bn_vs_bn.py +605 -0
  83. pyagrum/lib/cn2graph.py +305 -0
  84. pyagrum/lib/discreteTypeProcessor.py +1102 -0
  85. pyagrum/lib/discretizer.py +58 -0
  86. pyagrum/lib/dynamicBN.py +390 -0
  87. pyagrum/lib/explain.py +57 -0
  88. pyagrum/lib/export.py +84 -0
  89. pyagrum/lib/id2graph.py +258 -0
  90. pyagrum/lib/image.py +387 -0
  91. pyagrum/lib/ipython.py +307 -0
  92. pyagrum/lib/mrf2graph.py +471 -0
  93. pyagrum/lib/notebook.py +1821 -0
  94. pyagrum/lib/proba_histogram.py +552 -0
  95. pyagrum/lib/utils.py +138 -0
  96. pyagrum/pyagrum.py +31495 -0
  97. pyagrum/skbn/_MBCalcul.py +242 -0
  98. pyagrum/skbn/__init__.py +49 -0
  99. pyagrum/skbn/_learningMethods.py +282 -0
  100. pyagrum/skbn/_utils.py +297 -0
  101. pyagrum/skbn/bnclassifier.py +1014 -0
  102. pyagrum_nightly-2.3.1.9.dev202512261765915415.dist-info/LICENSE.md +12 -0
  103. pyagrum_nightly-2.3.1.9.dev202512261765915415.dist-info/LICENSES/LGPL-3.0-or-later.txt +304 -0
  104. pyagrum_nightly-2.3.1.9.dev202512261765915415.dist-info/LICENSES/MIT.txt +18 -0
  105. pyagrum_nightly-2.3.1.9.dev202512261765915415.dist-info/METADATA +145 -0
  106. pyagrum_nightly-2.3.1.9.dev202512261765915415.dist-info/RECORD +107 -0
  107. pyagrum_nightly-2.3.1.9.dev202512261765915415.dist-info/WHEEL +4 -0
@@ -0,0 +1,376 @@
1
+ ############################################################################
2
+ # This file is part of the aGrUM/pyAgrum library. #
3
+ # #
4
+ # Copyright (c) 2005-2025 by #
5
+ # - Pierre-Henri WUILLEMIN(_at_LIP6) #
6
+ # - Christophe GONZALES(_at_AMU) #
7
+ # #
8
+ # The aGrUM/pyAgrum library is free software; you can redistribute it #
9
+ # and/or modify it under the terms of either : #
10
+ # #
11
+ # - the GNU Lesser General Public License as published by #
12
+ # the Free Software Foundation, either version 3 of the License, #
13
+ # or (at your option) any later version, #
14
+ # - the MIT license (MIT), #
15
+ # - or both in dual license, as here. #
16
+ # #
17
+ # (see https://agrum.gitlab.io/articles/dual-licenses-lgplv3mit.html) #
18
+ # #
19
+ # This aGrUM/pyAgrum library is distributed in the hope that it will be #
20
+ # useful, but WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, #
21
+ # INCLUDING BUT NOT LIMITED TO THE WARRANTIES MERCHANTABILITY or FITNESS #
22
+ # FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
23
+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
24
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, #
25
+ # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR #
26
+ # OTHER DEALINGS IN THE SOFTWARE. #
27
+ # #
28
+ # See LICENCES for more details. #
29
+ # #
30
+ # SPDX-FileCopyrightText: Copyright 2005-2025 #
31
+ # - Pierre-Henri WUILLEMIN(_at_LIP6) #
32
+ # - Christophe GONZALES(_at_AMU) #
33
+ # SPDX-License-Identifier: LGPL-3.0-or-later OR MIT #
34
+ # #
35
+ # Contact : info_at_agrum_dot_org #
36
+ # homepage : http://agrum.gitlab.io #
37
+ # gitlab : https://gitlab.com/agrumery/agrum #
38
+ # #
39
+ ############################################################################
40
+
41
+ import pandas
42
+ from numpy.random import dirichlet
43
+
44
+ import pyagrum
45
+ import pyagrum.bnmixture as BNM
46
+
47
+
48
+ class IMixtureLearner:
49
+ def __init__(self):
50
+ self._ref_learner = None
51
+ raise NotImplementedError("Learner interface should not be initialized !")
52
+
53
+ def updateState(self, learner: pyagrum.BNLearner, **kargs):
54
+ """
55
+ Updates a learner using methods in parameters. If there are no parameters given, ``learner`` will copy state of the reference learner, if it exists.
56
+
57
+ Parameters
58
+ ----------
59
+ learner : pyagrum.BNLearner
60
+ Learner to update.
61
+ algorithm : str
62
+ Algorithm to use.
63
+ order : List[str or int]
64
+ Order for K2 algorithm.
65
+ tabu_size : int
66
+ size for local search with tabu list.
67
+ nb_decrease : int
68
+ decrease for local search with tabu list.
69
+ score : str
70
+ Type of score to use.
71
+ correction : str
72
+ Correction to use.
73
+ prior : str
74
+ Prior to use.
75
+ source : str | pyagrum.BayesNet
76
+ Source for dirichlet prior
77
+ prior_weight : float
78
+ Weight used for prior.
79
+ """
80
+ if len(kargs) == 0 and self._ref_learner is not None:
81
+ learner.copyState(self._ref_learner)
82
+ else:
83
+ zemethod = kargs.get("algorithm", "Greedy Hill Climbing")
84
+ if zemethod == "Greedy Hill Climbing":
85
+ learner.useGreedyHillClimbing()
86
+ elif zemethod == "MIIC":
87
+ learner.useMIIC()
88
+ elif zemethod == "K2":
89
+ if kargs.get("order", None) is None:
90
+ raise pyagrum.ArgumentError("Tried to use K2 without an order")
91
+ learner.useK2(kargs["order"])
92
+ elif zemethod == "Local Search with Tabu List":
93
+ learner.useLocalSearchWithTabuList(kargs.get("tabu_size", 100), kargs.get("nb_decrease", 2))
94
+ else:
95
+ raise pyagrum.ArgumentError(f"could not find the algorithm {kargs['algorithm']}")
96
+
97
+ zescore = kargs.get("score", "BDeu")
98
+ if zescore == "K2":
99
+ learner.useScoreK2()
100
+ elif zescore == "AIC":
101
+ learner.useScoreAIC()
102
+ elif zescore == "BIC":
103
+ learner.useScoreBIC()
104
+ elif zescore == "BD":
105
+ learner.useScoreBD()
106
+ elif zescore == "BDeu":
107
+ learner.useScoreBDeu()
108
+ elif zescore == "Log2Likelihood":
109
+ learner.useScoreLog2Likelihood()
110
+ else:
111
+ raise pyagrum.ArgumentError(f"could not find a suitable score : {kargs['score']}")
112
+
113
+ zecorrec = kargs.get("correction", "MDL")
114
+ if zecorrec == "MDL":
115
+ learner.useMDLCorrection()
116
+ elif zecorrec == "NML":
117
+ learner.useNMLCorrection()
118
+ elif zecorrec == "No correction":
119
+ learner.useNoCorrection()
120
+ else:
121
+ raise pyagrum.ArgumentError(f"could not find a suitable correction : {kargs['correction']}")
122
+
123
+ zeprior = kargs.get("prior", "-")
124
+ if zeprior == "-":
125
+ learner.useNoPrior()
126
+ elif zeprior == "Dirichlet":
127
+ if kargs.get("source", None) is None:
128
+ raise pyagrum.ArgumentError("could not find source for dirichlet prior")
129
+ if kargs.get("prior_weight", None) is not None:
130
+ learner.useDirichletPrior(kargs["source"], kargs["prior_weight"])
131
+ else:
132
+ learner.useDirichletPrior(kargs["source"])
133
+ elif zeprior == "BDEU":
134
+ if kargs.get("prior_weight", None) is not None:
135
+ learner.useBDeuPrior(kargs["prior_weight"])
136
+ else:
137
+ learner.useBDeuPrior()
138
+ elif zeprior == "Smoothing":
139
+ if kargs.get("prior_weight", None) is not None:
140
+ learner.useSmoothingPrior(kargs["prior_weight"])
141
+ else:
142
+ learner.useSmoothingPrior()
143
+
144
+
145
+ class BNMLearner(IMixtureLearner):
146
+ """
147
+ Allows to learn mutiple BNs from a database. Learned BNs are given a weight and are stored in a BNMixture.
148
+
149
+ Notes
150
+ -----
151
+
152
+ - How is memory handled? First, to reduce memory consumption, only one BNLearner is instancied at most at a given time. Their are more improvements if ``source`` contains files.
153
+ In that case, dataframes are loaded one at a time to reduce memory consumption. Otherwise all DataFrames are stored together.
154
+
155
+ - We create a reference learner using the source with maximum weight.
156
+
157
+
158
+ Parameters
159
+ ----------
160
+ weights : List[float]
161
+ Weights of each sample of the database.
162
+ source : List[str] | List[pandas.DataFrame]
163
+ Samples to learn from (csv format for now).
164
+ states : List["state"]
165
+ List of learners state.
166
+ template : pyagrum.BayesNet | Optional
167
+ BN to use to find modalities.
168
+
169
+ Raises
170
+ ------
171
+ pyagrum.ArgumentError
172
+ If arguments don't have the same dimensions.
173
+ """
174
+
175
+ def __init__(self, weights, source, template=None, states=None):
176
+ if len(source) != len(weights):
177
+ raise pyagrum.ArgumentError(
178
+ f"weights and source don't have the same dimensions : weights({len(weights)}) and source({len(source)})"
179
+ )
180
+ self._data = source.copy()
181
+ self._weights = weights.copy()
182
+ self._template = template
183
+ self._states = states.copy() if states else [{}] * len(weights)
184
+ self._ref_learner = None
185
+
186
+ def add(self, source, weight, **kargs):
187
+ """
188
+ Adds a new BNLearner(its parameters) to the learner.
189
+ """
190
+ self._data.append(source)
191
+ self._weights.append(weight)
192
+ self._states.append(kargs)
193
+
194
+ def learnBNM(self):
195
+ """
196
+ Learns the BNs from the database and return them stored in a BNMixture with corresponding weights.
197
+
198
+ Returns
199
+ -------
200
+ BNM.BNMixture
201
+ The learned BNMixture.
202
+ """
203
+ # finding heaviest source
204
+ index, max_w = max(enumerate(self._weights), key=lambda x: x[1])
205
+ if isinstance(self._data[index], str):
206
+ datatmp = pandas.read_csv(self._data[index])
207
+ elif isinstance(self._data[index], pandas.DataFrame):
208
+ datatmp = self._data[index]
209
+
210
+ # setting reference learner
211
+ if self._template is None:
212
+ self._ref_learner = pyagrum.BNLearner(datatmp)
213
+ else:
214
+ self._ref_learner = pyagrum.BNLearner(datatmp, self._template)
215
+ self.updateState(self._ref_learner, **self._states[index])
216
+
217
+ # create Mixture and learn reference BN
218
+ bnm = BNM.BNMixture()
219
+ refbn = self._ref_learner.learnBN()
220
+ refname = "bn0"
221
+ refbn.setProperty("name", refname)
222
+ bnm.add(refname, refbn, w=max_w)
223
+
224
+ # adding others BNs
225
+ for i in range(len(self._data)):
226
+ if i == index:
227
+ continue
228
+
229
+ # loading data
230
+ if isinstance(self._data[i], str):
231
+ datatmp = pandas.read_csv(self._data[i])
232
+ elif isinstance(self._data[i], pandas.DataFrame):
233
+ datatmp = self._data[i]
234
+
235
+ # creating learner
236
+ if self._template is None:
237
+ learner = pyagrum.BNLearner(datatmp)
238
+ else:
239
+ learner = pyagrum.BNLearner(datatmp, self._template)
240
+
241
+ self.updateState(learner, **self._states[i])
242
+ bni = learner.learnBN()
243
+ namei = f"bn{bnm.size()}"
244
+ bni.setProperty("name", namei)
245
+ bnm.add(namei, bni, w=self._weights[i])
246
+
247
+ return bnm
248
+
249
+
250
+ class BNMBootstrapLearner(IMixtureLearner):
251
+ """
252
+ Allows to learn a BN and bootsrap-generated BNs. Learning a BN is not the only goal of this class. The purpose of bootstraping is to have an accuracy indicator about the
253
+ BN learned from a given database.
254
+
255
+ Notes
256
+ -----
257
+
258
+ - How is memory handled? First, to reduce memory consumption, only one BNLearner is instancied at most at a given time.
259
+
260
+ - To keep one BNLearner at a time, we create a reference learner. To apply a method for the learning algorithm of all bootstraped BNs,
261
+ "use" methods modify the reference learner. Then the other learners make use of ``BNLearner.copyState`` to update themself according to the reference learner.
262
+
263
+
264
+ Parameters
265
+ ----------
266
+ source : str | pandas.DataFrame
267
+ Database to learn from (csv format for now).
268
+ template : pyagrum.BayesNet | Optional
269
+ BN to use to find modalities.
270
+ """
271
+
272
+ def __init__(self, source, template=None, N=100):
273
+ # loading source
274
+ if isinstance(source, str):
275
+ self._data = pandas.read_csv(source)
276
+ elif isinstance(source, pandas.DataFrame):
277
+ self._data = source
278
+
279
+ # creating reference learner
280
+ if template is None:
281
+ self._ref_learner = pyagrum.BNLearner(self._data)
282
+ else:
283
+ self._ref_learner = pyagrum.BNLearner(self._data, template)
284
+ self._template = template
285
+ self._iter = N
286
+
287
+ def updateState(self, learner, **kargs):
288
+ raise NotImplementedError("Use the use... methods instead")
289
+
290
+ def learnBNM(self):
291
+ """
292
+ Learns a reference BN from the database. Then add bootstrap-generated BNs to a BootstrapMixture object.
293
+ """
294
+ # Learn reference BN
295
+ learner = self._ref_learner
296
+ refBN = learner.learnBN()
297
+ refBN.setProperty("name", "bn0")
298
+ bnm = BNM.BootstrapMixture("bn0", refBN)
299
+
300
+ n = self._data.shape[0]
301
+
302
+ for i in range(self._iter):
303
+ # updating learner
304
+ weights_samples = dirichlet([1 / n] * n, self._iter) * n
305
+ for j in range(n):
306
+ learner.setRecordWeight(j, weights_samples[i][j])
307
+
308
+ bni = learner.learnBN()
309
+ namei = f"bn{bnm.size() + 1}"
310
+ bni.setProperty("name", namei)
311
+
312
+ bnm.add(namei, bni)
313
+
314
+ return bnm
315
+
316
+ def useIter(self, N):
317
+ """
318
+ Set the number of bootstrap iterations used for learning.
319
+
320
+ Parameters
321
+ ----------
322
+ N : int
323
+ Number of iterations.
324
+ """
325
+ self._iter = N
326
+
327
+ def useBDeuPrior(self, weight=1.0):
328
+ self._ref_learner.useBDeuPrior(weight)
329
+
330
+ def useDirichletPrior(self, source, weight=1.0):
331
+ self._ref_learner.useDirichletPrior(source, weight)
332
+
333
+ def useGreedyHillClimbing(self):
334
+ self._ref_learner.useGreedyHillClimbing()
335
+
336
+ def useK2(self, order):
337
+ self._ref_learner.useK2(order)
338
+
339
+ def useLocalSearchWithTabuList(self, tabu_size=100, nb_decrease=2):
340
+ self._ref_learner.useLocalSearchWithTabuList(tabu_size, nb_decrease)
341
+
342
+ def useMDLCorrection(self):
343
+ self._ref_learner.useMDLCorrection()
344
+
345
+ def useMIIC(self):
346
+ self._ref_learner.useMIIC()
347
+
348
+ def useNMLCorrection(self):
349
+ self._ref_learner.useNMLCorrection()
350
+
351
+ def useNoCorrection(self):
352
+ self._ref_learner.useNoCorrection()
353
+
354
+ def useNoPrior(self):
355
+ self._ref_learner.useNoPrior()
356
+
357
+ def useScoreAIC(self):
358
+ self._ref_learner.useScoreAIC()
359
+
360
+ def useScoreBD(self):
361
+ self._ref_learner.useScoreBD()
362
+
363
+ def useScoreBDeu(self):
364
+ self._ref_learner.useScoreBDeu()
365
+
366
+ def useScoreBIC(self):
367
+ self._ref_learner.useScoreBIC()
368
+
369
+ def useScoreK2(self):
370
+ self._ref_learner.useScoreK2()
371
+
372
+ def useScoreLog2Likelihood(self):
373
+ self._ref_learner.useScoreLog2Likelihood()
374
+
375
+ def useSmoothingPrior(self, weight=1):
376
+ self._ref_learner.useSmoothingPrior(weight)