pyAgrum-nightly 2.1.1.9.dev202506061747485979__cp310-abi3-manylinux2014_aarch64.whl → 2.3.1.9.dev202601031765915415__cp310-abi3-manylinux2014_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. pyagrum/__init__.py +6 -2
  2. pyagrum/_pyagrum.so +0 -0
  3. pyagrum/bnmixture/BNMInference.py +6 -2
  4. pyagrum/bnmixture/BNMLearning.py +12 -2
  5. pyagrum/bnmixture/BNMixture.py +6 -2
  6. pyagrum/bnmixture/__init__.py +6 -2
  7. pyagrum/bnmixture/notebook.py +6 -2
  8. pyagrum/causal/_CausalFormula.py +6 -2
  9. pyagrum/causal/_CausalModel.py +6 -2
  10. pyagrum/causal/__init__.py +6 -2
  11. pyagrum/causal/_causalImpact.py +6 -2
  12. pyagrum/causal/_dSeparation.py +6 -2
  13. pyagrum/causal/_doAST.py +6 -2
  14. pyagrum/causal/_doCalculus.py +6 -2
  15. pyagrum/causal/_doorCriteria.py +6 -2
  16. pyagrum/causal/_exceptions.py +6 -2
  17. pyagrum/causal/_types.py +6 -2
  18. pyagrum/causal/causalEffectEstimation/_CausalEffectEstimation.py +6 -2
  19. pyagrum/causal/causalEffectEstimation/_IVEstimators.py +6 -2
  20. pyagrum/causal/causalEffectEstimation/_RCTEstimators.py +6 -2
  21. pyagrum/causal/causalEffectEstimation/__init__.py +6 -2
  22. pyagrum/causal/causalEffectEstimation/_backdoorEstimators.py +6 -2
  23. pyagrum/causal/causalEffectEstimation/_causalBNEstimator.py +6 -2
  24. pyagrum/causal/causalEffectEstimation/_frontdoorEstimators.py +6 -2
  25. pyagrum/causal/causalEffectEstimation/_learners.py +6 -2
  26. pyagrum/causal/causalEffectEstimation/_utils.py +6 -2
  27. pyagrum/causal/notebook.py +8 -3
  28. pyagrum/clg/CLG.py +6 -2
  29. pyagrum/clg/GaussianVariable.py +6 -2
  30. pyagrum/clg/SEM.py +6 -2
  31. pyagrum/clg/__init__.py +6 -2
  32. pyagrum/clg/canonicalForm.py +6 -2
  33. pyagrum/clg/constants.py +6 -2
  34. pyagrum/clg/forwardSampling.py +6 -2
  35. pyagrum/clg/learning.py +6 -2
  36. pyagrum/clg/notebook.py +6 -2
  37. pyagrum/clg/variableElimination.py +6 -2
  38. pyagrum/common.py +7 -3
  39. pyagrum/config.py +7 -2
  40. pyagrum/ctbn/CIM.py +6 -2
  41. pyagrum/ctbn/CTBN.py +6 -2
  42. pyagrum/ctbn/CTBNGenerator.py +6 -2
  43. pyagrum/ctbn/CTBNInference.py +6 -2
  44. pyagrum/ctbn/CTBNLearner.py +6 -2
  45. pyagrum/ctbn/SamplesStats.py +6 -2
  46. pyagrum/ctbn/StatsIndepTest.py +6 -2
  47. pyagrum/ctbn/__init__.py +6 -2
  48. pyagrum/ctbn/constants.py +6 -2
  49. pyagrum/ctbn/notebook.py +6 -2
  50. pyagrum/deprecated.py +6 -2
  51. pyagrum/explain/_ComputationCausal.py +75 -0
  52. pyagrum/explain/_ComputationConditional.py +48 -0
  53. pyagrum/explain/_ComputationMarginal.py +48 -0
  54. pyagrum/explain/_CustomShapleyCache.py +110 -0
  55. pyagrum/explain/_Explainer.py +176 -0
  56. pyagrum/explain/_Explanation.py +70 -0
  57. pyagrum/explain/_FIFOCache.py +54 -0
  58. pyagrum/explain/_ShallCausalValues.py +204 -0
  59. pyagrum/explain/_ShallConditionalValues.py +155 -0
  60. pyagrum/explain/_ShallMarginalValues.py +155 -0
  61. pyagrum/explain/_ShallValues.py +296 -0
  62. pyagrum/explain/_ShapCausalValues.py +208 -0
  63. pyagrum/explain/_ShapConditionalValues.py +126 -0
  64. pyagrum/explain/_ShapMarginalValues.py +191 -0
  65. pyagrum/explain/_ShapleyValues.py +298 -0
  66. pyagrum/explain/__init__.py +81 -0
  67. pyagrum/explain/_explGeneralizedMarkovBlanket.py +152 -0
  68. pyagrum/explain/_explIndependenceListForPairs.py +146 -0
  69. pyagrum/explain/_explInformationGraph.py +264 -0
  70. pyagrum/explain/notebook/__init__.py +54 -0
  71. pyagrum/explain/notebook/_bar.py +142 -0
  72. pyagrum/explain/notebook/_beeswarm.py +174 -0
  73. pyagrum/explain/notebook/_showShapValues.py +97 -0
  74. pyagrum/explain/notebook/_waterfall.py +220 -0
  75. pyagrum/explain/shapley.py +225 -0
  76. pyagrum/lib/__init__.py +6 -2
  77. pyagrum/lib/_colors.py +6 -2
  78. pyagrum/lib/bn2graph.py +6 -2
  79. pyagrum/lib/bn2roc.py +6 -2
  80. pyagrum/lib/bn2scores.py +6 -2
  81. pyagrum/lib/bn_vs_bn.py +6 -2
  82. pyagrum/lib/cn2graph.py +6 -2
  83. pyagrum/lib/discreteTypeProcessor.py +99 -81
  84. pyagrum/lib/discretizer.py +6 -2
  85. pyagrum/lib/dynamicBN.py +6 -2
  86. pyagrum/lib/explain.py +17 -492
  87. pyagrum/lib/export.py +6 -2
  88. pyagrum/lib/id2graph.py +6 -2
  89. pyagrum/lib/image.py +6 -2
  90. pyagrum/lib/ipython.py +6 -2
  91. pyagrum/lib/mrf2graph.py +6 -2
  92. pyagrum/lib/notebook.py +6 -2
  93. pyagrum/lib/proba_histogram.py +6 -2
  94. pyagrum/lib/utils.py +6 -2
  95. pyagrum/pyagrum.py +976 -126
  96. pyagrum/skbn/_MBCalcul.py +6 -2
  97. pyagrum/skbn/__init__.py +6 -2
  98. pyagrum/skbn/_learningMethods.py +6 -2
  99. pyagrum/skbn/_utils.py +6 -2
  100. pyagrum/skbn/bnclassifier.py +6 -2
  101. pyagrum_nightly-2.1.1.9.dev202506061747485979.dist-info/LICENSE → pyagrum_nightly-2.3.1.9.dev202601031765915415.dist-info/LICENSE.md +3 -1
  102. pyagrum_nightly-2.3.1.9.dev202601031765915415.dist-info/LICENSES/LGPL-3.0-or-later.txt +304 -0
  103. pyagrum_nightly-2.3.1.9.dev202601031765915415.dist-info/LICENSES/MIT.txt +18 -0
  104. {pyagrum_nightly-2.1.1.9.dev202506061747485979.dist-info → pyagrum_nightly-2.3.1.9.dev202601031765915415.dist-info}/METADATA +3 -1
  105. pyagrum_nightly-2.3.1.9.dev202601031765915415.dist-info/RECORD +107 -0
  106. {pyagrum_nightly-2.1.1.9.dev202506061747485979.dist-info → pyagrum_nightly-2.3.1.9.dev202601031765915415.dist-info}/WHEEL +1 -1
  107. pyagrum/lib/shapley.py +0 -657
  108. pyagrum_nightly-2.1.1.9.dev202506061747485979.dist-info/LICENSE.LGPL +0 -165
  109. pyagrum_nightly-2.1.1.9.dev202506061747485979.dist-info/LICENSE.MIT +0 -17
  110. pyagrum_nightly-2.1.1.9.dev202506061747485979.dist-info/RECORD +0 -83
pyagrum/pyagrum.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # This file was automatically generated by SWIG (https://www.swig.org).
2
- # Version 4.2.1
2
+ # Version 4.3.1
3
3
  #
4
4
  # Do not make changes to this file unless you know what you are doing - modify
5
5
  # the SWIG interface file instead.
@@ -1505,9 +1505,47 @@ class DiscreteVariable(Variable):
1505
1505
  return _pyagrum.DiscreteVariable_isEmpirical(self)
1506
1506
 
1507
1507
  def closestIndex(self, val: float) -> int:
1508
+ r"""
1509
+
1510
+ For numerical discrete variables (all except :class:`pyagrum.LabelizedVariable`), this method returns the index of the closest value to a given float value in the variable's domain.
1511
+
1512
+ Parameters
1513
+ ----------
1514
+ value : float
1515
+ the value for which we want to find the closest index
1516
+ Returns
1517
+ -------
1518
+ int
1519
+ the index of the closest value to `value` in the variable's domain
1520
+
1521
+ Raises
1522
+ ------
1523
+ pyagrum.NotImplementedYet
1524
+ if the variable is not numerical discrete (i.e., if it is a :class:`pyagrum.LabelizedVariable`).
1525
+
1526
+ """
1508
1527
  return _pyagrum.DiscreteVariable_closestIndex(self, val)
1509
1528
 
1510
1529
  def closestLabel(self, val: float) -> str:
1530
+ r"""
1531
+
1532
+ For numerical discrete variables (all except :class:`pyagrum.LabelizedVariable`), this method returns the label of the closest value to a given float value in the variable's domain.
1533
+
1534
+ Parameters
1535
+ ----------
1536
+ value : float
1537
+ the value for which we want to find the closest index
1538
+ Returns
1539
+ -------
1540
+ str
1541
+ the label of the closest value to `value` in the variable's domain
1542
+
1543
+ Raises
1544
+ ------
1545
+ pyagrum.InvalidArgument
1546
+ if the variable is not numerical discrete (i.e., if it is a :class:`pyagrum.LabelizedVariable`).
1547
+
1548
+ """
1511
1549
  return _pyagrum.DiscreteVariable_closestLabel(self, val)
1512
1550
 
1513
1551
  def varType(self) -> int:
@@ -1983,6 +2021,25 @@ class LabelizedVariable(DiscreteVariable):
1983
2021
  return _pyagrum.LabelizedVariable_numerical(self, index)
1984
2022
 
1985
2023
  def closestIndex(self, val: float) -> int:
2024
+ r"""
2025
+
2026
+ For numerical discrete variables (all except :class:`pyagrum.LabelizedVariable`), this method returns the index of the closest value to a given float value in the variable's domain.
2027
+
2028
+ Parameters
2029
+ ----------
2030
+ value : float
2031
+ the value for which we want to find the closest index
2032
+ Returns
2033
+ -------
2034
+ int
2035
+ the index of the closest value to `value` in the variable's domain
2036
+
2037
+ Raises
2038
+ ------
2039
+ pyagrum.NotImplementedYet
2040
+ if the variable is not numerical discrete (i.e., if it is a :class:`pyagrum.LabelizedVariable`).
2041
+
2042
+ """
1986
2043
  return _pyagrum.LabelizedVariable_closestIndex(self, val)
1987
2044
 
1988
2045
  def domainSize(self) -> int:
@@ -2257,6 +2314,25 @@ class RangeVariable(DiscreteVariable):
2257
2314
  return _pyagrum.RangeVariable_index(self, arg2)
2258
2315
 
2259
2316
  def closestIndex(self, val: float) -> int:
2317
+ r"""
2318
+
2319
+ For numerical discrete variables (all except :class:`pyagrum.LabelizedVariable`), this method returns the index of the closest value to a given float value in the variable's domain.
2320
+
2321
+ Parameters
2322
+ ----------
2323
+ value : float
2324
+ the value for which we want to find the closest index
2325
+ Returns
2326
+ -------
2327
+ int
2328
+ the index of the closest value to `value` in the variable's domain
2329
+
2330
+ Raises
2331
+ ------
2332
+ pyagrum.NotImplementedYet
2333
+ if the variable is not numerical discrete (i.e., if it is a :class:`pyagrum.LabelizedVariable`).
2334
+
2335
+ """
2260
2336
  return _pyagrum.RangeVariable_closestIndex(self, val)
2261
2337
 
2262
2338
  def domain(self) -> str:
@@ -2387,6 +2463,25 @@ class IntegerVariable(DiscreteVariable):
2387
2463
  return _pyagrum.IntegerVariable_index(self, label)
2388
2464
 
2389
2465
  def closestIndex(self, val: float) -> int:
2466
+ r"""
2467
+
2468
+ For numerical discrete variables (all except :class:`pyagrum.LabelizedVariable`), this method returns the index of the closest value to a given float value in the variable's domain.
2469
+
2470
+ Parameters
2471
+ ----------
2472
+ value : float
2473
+ the value for which we want to find the closest index
2474
+ Returns
2475
+ -------
2476
+ int
2477
+ the index of the closest value to `value` in the variable's domain
2478
+
2479
+ Raises
2480
+ ------
2481
+ pyagrum.NotImplementedYet
2482
+ if the variable is not numerical discrete (i.e., if it is a :class:`pyagrum.LabelizedVariable`).
2483
+
2484
+ """
2390
2485
  return _pyagrum.IntegerVariable_closestIndex(self, val)
2391
2486
 
2392
2487
  def label(self, index: int) -> str:
@@ -2533,6 +2628,25 @@ class IntegerVariable(DiscreteVariable):
2533
2628
  return _pyagrum.IntegerVariable_eraseValues(self)
2534
2629
 
2535
2630
  def closestLabel(self, val: float) -> str:
2631
+ r"""
2632
+
2633
+ For numerical discrete variables (all except :class:`pyagrum.LabelizedVariable`), this method returns the label of the closest value to a given float value in the variable's domain.
2634
+
2635
+ Parameters
2636
+ ----------
2637
+ value : float
2638
+ the value for which we want to find the closest index
2639
+ Returns
2640
+ -------
2641
+ str
2642
+ the label of the closest value to `value` in the variable's domain
2643
+
2644
+ Raises
2645
+ ------
2646
+ pyagrum.InvalidArgument
2647
+ if the variable is not numerical discrete (i.e., if it is a :class:`pyagrum.LabelizedVariable`).
2648
+
2649
+ """
2536
2650
  return _pyagrum.IntegerVariable_closestLabel(self, val)
2537
2651
 
2538
2652
  def __repr__(self) -> str:
@@ -2652,6 +2766,25 @@ class NumericalDiscreteVariable(DiscreteVariable):
2652
2766
  return _pyagrum.NumericalDiscreteVariable_index(self, label)
2653
2767
 
2654
2768
  def closestIndex(self, val: float) -> int:
2769
+ r"""
2770
+
2771
+ For numerical discrete variables (all except :class:`pyagrum.LabelizedVariable`), this method returns the index of the closest value to a given float value in the variable's domain.
2772
+
2773
+ Parameters
2774
+ ----------
2775
+ value : float
2776
+ the value for which we want to find the closest index
2777
+ Returns
2778
+ -------
2779
+ int
2780
+ the index of the closest value to `value` in the variable's domain
2781
+
2782
+ Raises
2783
+ ------
2784
+ pyagrum.NotImplementedYet
2785
+ if the variable is not numerical discrete (i.e., if it is a :class:`pyagrum.LabelizedVariable`).
2786
+
2787
+ """
2655
2788
  return _pyagrum.NumericalDiscreteVariable_closestIndex(self, val)
2656
2789
 
2657
2790
  def label(self, index: int) -> str:
@@ -2798,6 +2931,25 @@ class NumericalDiscreteVariable(DiscreteVariable):
2798
2931
  return _pyagrum.NumericalDiscreteVariable_eraseValues(self)
2799
2932
 
2800
2933
  def closestLabel(self, val: float) -> str:
2934
+ r"""
2935
+
2936
+ For numerical discrete variables (all except :class:`pyagrum.LabelizedVariable`), this method returns the label of the closest value to a given float value in the variable's domain.
2937
+
2938
+ Parameters
2939
+ ----------
2940
+ value : float
2941
+ the value for which we want to find the closest index
2942
+ Returns
2943
+ -------
2944
+ str
2945
+ the label of the closest value to `value` in the variable's domain
2946
+
2947
+ Raises
2948
+ ------
2949
+ pyagrum.InvalidArgument
2950
+ if the variable is not numerical discrete (i.e., if it is a :class:`pyagrum.LabelizedVariable`).
2951
+
2952
+ """
2801
2953
  return _pyagrum.NumericalDiscreteVariable_closestLabel(self, val)
2802
2954
 
2803
2955
  def __repr__(self) -> str:
@@ -3916,6 +4068,9 @@ class DAG(DiGraph):
3916
4068
  def moralGraph(self) -> "pyagrum.UndiGraph":
3917
4069
  return _pyagrum.DAG_moralGraph(self)
3918
4070
 
4071
+ def minimalCondSet(self, *args) -> List[int]:
4072
+ return _pyagrum.DAG_minimalCondSet(self, *args)
4073
+
3919
4074
  def __repr__(self) -> str:
3920
4075
  return _pyagrum.DAG___repr__(self)
3921
4076
 
@@ -3923,9 +4078,41 @@ class DAG(DiGraph):
3923
4078
  return _pyagrum.DAG___str__(self)
3924
4079
 
3925
4080
  def dSeparation(self, *args) -> bool:
4081
+ r"""
4082
+
4083
+ Check if the sets of nodes X and Y are d-separated (by the set of nodes Z if given) in the DAG.
4084
+
4085
+ Parameters
4086
+ ----------
4087
+ X : int sequence of int
4088
+ a sequence of node ids (int) or a single node id (int)
4089
+ Y : int sequence of int
4090
+ a sequence of node ids (int) or a single node id (int)
4091
+ Z : int sequence of int (optional)
4092
+ a sequence of node ids (int) or a single node id (int)
4093
+ Returns
4094
+ -------
4095
+ bool
4096
+ True if X and Y are d-separated (by Z if given), False otherwise.
4097
+
4098
+ """
3926
4099
  return _pyagrum.DAG_dSeparation(self, *args)
3927
4100
 
3928
4101
  def moralizedAncestralGraph(self, *args) -> "pyagrum.UndiGraph":
4102
+ r"""
4103
+
4104
+ Compute the moralized ancestral graph of the nodes from the DAG.
4105
+
4106
+ Parameters
4107
+ ----------
4108
+ nodes : int sequence of int
4109
+ a sequence of node ids (int) or a single node id (int)
4110
+ Returns
4111
+ -------
4112
+ "pyagrum.UndiGraph"
4113
+ the moralized ancestral graph of the nodes from the DAG.
4114
+
4115
+ """
3929
4116
  return _pyagrum.DAG_moralizedAncestralGraph(self, *args)
3930
4117
 
3931
4118
  def addNodes(self, n: int) -> object:
@@ -4631,9 +4818,41 @@ class PDAG(MixedGraph):
4631
4818
  return _pyagrum.PDAG___str__(self)
4632
4819
 
4633
4820
  def cSeparation(self, *args) -> bool:
4821
+ r"""
4822
+
4823
+ Check if the sets of nodes X and Y are c-separated (by the set of nodes Z if given) in the PDAG.
4824
+
4825
+ Parameters
4826
+ ----------
4827
+ X : int sequence of int
4828
+ a sequence of node ids (int) or a single node id (int)
4829
+ Y : int sequence of int
4830
+ a sequence of node ids (int) or a single node id (int)
4831
+ Z : int sequence of int (optional)
4832
+ a sequence of node ids (int) or a single node id (int)
4833
+ Returns
4834
+ -------
4835
+ bool
4836
+ True if X and Y are c-separated (by Z if given), False otherwise.
4837
+
4838
+ """
4634
4839
  return _pyagrum.PDAG_cSeparation(self, *args)
4635
4840
 
4636
4841
  def moralizedAncestralGraph(self, *args) -> "pyagrum.UndiGraph":
4842
+ r"""
4843
+
4844
+ Compute the moralized ancestral graph of the nodes from the DAG.
4845
+
4846
+ Parameters
4847
+ ----------
4848
+ nodes : int sequence of int
4849
+ a sequence of node ids (int) or a single node id (int)
4850
+ Returns
4851
+ -------
4852
+ "pyagrum.UndiGraph"
4853
+ the moralized ancestral graph of the nodes from the DAG.
4854
+
4855
+ """
4637
4856
  return _pyagrum.PDAG_moralizedAncestralGraph(self, *args)
4638
4857
 
4639
4858
  def addNodes(self, n: int) -> object:
@@ -6432,6 +6651,9 @@ class DAGmodel(GraphicalModel):
6432
6651
  """
6433
6652
  return _pyagrum.DAGmodel_hasSameStructure(self, other)
6434
6653
 
6654
+ def minimalCondSet(self, *args) -> List[int]:
6655
+ return _pyagrum.DAGmodel_minimalCondSet(self, *args)
6656
+
6435
6657
  # Register DAGmodel in _pyagrum:
6436
6658
  _pyagrum.DAGmodel_swigregister(DAGmodel)
6437
6659
  class UGmodel(GraphicalModel):
@@ -7215,6 +7437,25 @@ class DiscretizedVariable(IDiscretizedVariable):
7215
7437
  return _pyagrum.DiscretizedVariable_ticks(self)
7216
7438
 
7217
7439
  def closestIndex(self, val: float) -> int:
7440
+ r"""
7441
+
7442
+ For numerical discrete variables (all except :class:`pyagrum.LabelizedVariable`), this method returns the index of the closest value to a given float value in the variable's domain.
7443
+
7444
+ Parameters
7445
+ ----------
7446
+ value : float
7447
+ the value for which we want to find the closest index
7448
+ Returns
7449
+ -------
7450
+ int
7451
+ the index of the closest value to `value` in the variable's domain
7452
+
7453
+ Raises
7454
+ ------
7455
+ pyagrum.NotImplementedYet
7456
+ if the variable is not numerical discrete (i.e., if it is a :class:`pyagrum.LabelizedVariable`).
7457
+
7458
+ """
7218
7459
  return _pyagrum.DiscretizedVariable_closestIndex(self, val)
7219
7460
 
7220
7461
  def toFast(self) -> str:
@@ -7308,6 +7549,23 @@ class Tensor(object):
7308
7549
 
7309
7550
  @staticmethod
7310
7551
  def deterministicTensor(*args) -> "pyagrum.Tensor":
7552
+ r"""
7553
+
7554
+ This static method generates a Tensor representing a deterministic function of a :class:`pyagrum.DiscreteVariable`) such as a hard evidence.
7555
+
7556
+ Parameters
7557
+ ----------
7558
+ var : pyagrum.DiscreteVariable
7559
+ the variable to use
7560
+ value: int str
7561
+ the indice or the label of the value for the variable
7562
+
7563
+ Returns
7564
+ -------
7565
+ pyagrum.Tensor
7566
+ The representation of the deterministic function as a :class:`pyagrum.Tensor`.
7567
+
7568
+ """
7311
7569
  return _pyagrum.Tensor_deterministicTensor(*args)
7312
7570
 
7313
7571
  @staticmethod
@@ -7433,6 +7691,21 @@ class Tensor(object):
7433
7691
  return _pyagrum.Tensor_minNonZero(self)
7434
7692
 
7435
7693
  def findAll(self, v: float) -> List[Dict[str,int]]:
7694
+ r"""
7695
+
7696
+ Find all the position of a value in the Tensor.
7697
+
7698
+ Parameters
7699
+ ----------
7700
+ v : float
7701
+ the value to find
7702
+
7703
+ Returns
7704
+ -------
7705
+ list[dict[str,int]]
7706
+ a list of all the instantiations (as python dictionary) where the value is found
7707
+
7708
+ """
7436
7709
  return _pyagrum.Tensor_findAll(self, v)
7437
7710
 
7438
7711
  def entropy(self) -> float:
@@ -7861,7 +8134,7 @@ class Tensor(object):
7861
8134
 
7862
8135
  Returns
7863
8136
  -------
7864
- pyAgrum.Te nsor
8137
+ pyAgrum.Tensor
7865
8138
  The representation of the evidence as a :class:`~pyagrum.Tensor`.
7866
8139
 
7867
8140
  """
@@ -8806,6 +9079,16 @@ class Tensor(object):
8806
9079
 
8807
9080
 
8808
9081
  def domainSize(self) -> int:
9082
+ r"""
9083
+
9084
+ Compute the size of the domain of the Tensor, i.e., the product of the domain sizes of the variables in the Tensor.
9085
+
9086
+ Returns
9087
+ -------
9088
+ int
9089
+ the size of the domain of the Tensor (the number of values it can take)
9090
+
9091
+ """
8809
9092
  return _pyagrum.Tensor_domainSize(self)
8810
9093
 
8811
9094
  def nbrDim(self, *args) -> int:
@@ -9428,6 +9711,9 @@ class InformationTheory(object):
9428
9711
 
9429
9712
  # Register InformationTheory in _pyagrum:
9430
9713
  _pyagrum.InformationTheory_swigregister(InformationTheory)
9714
+
9715
+ import warnings
9716
+
9431
9717
  class PRMexplorer(object):
9432
9718
  r"""
9433
9719
 
@@ -11512,19 +11798,20 @@ class BayesNet(IBayesNet):
11512
11798
  - the structure 'a->b->c;b->d<-e;'.
11513
11799
  - the type of the variables with different syntax:
11514
11800
 
11515
- - by default, a variable is a pyagrum.RangeVariable using the default domain size ([2])
11516
- - with 'a[10]', the variable is a pyagrum.RangeVariable using 10 as domain size (from 0 to 9)
11517
- - with 'a[3,7]', the variable is a pyagrum.RangeVariable using a domainSize from 3 to 7
11518
- - with 'a[1,3.14,5,6.2]', the variable is a pyagrum.DiscretizedVariable using the given ticks (at least 3 values)
11519
- - with 'a{top|middle|bottom}', the variable is a pyagrum.LabelizedVariable using the given labels.
11520
- - with 'a{-1|5|0|3}', the variable is a pyagrum.IntegerVariable using the sorted given values.
11521
- - with 'a{-0.5|5.01|0|3.1415}', the variable is a pyagrum.NumericalDiscreteVariable using the sorted given values.
11801
+ - by default, a variable is a binary `pyagrum.RangeVariable` using the default domain size ([2])
11802
+ - with 'a[10]', the variable is a `pyagrum.RangeVariable` using 10 as domain size (from 0 to 9)
11803
+ - with 'a[3,7]', the variable is a `pyagrum.RangeVariable` using a domainSize from 3 to 7
11804
+ - with 'a[1,3.14,5,6.2]', the variable is a `pyagrum.DiscretizedVariable` using the given ticks (at least 3 values)
11805
+ - with 'a[0.0:3.14:10]', the variable is a `pyagrum.DiscretizedVariable` of 10 intervals of same width from 0 to 3.14 (including both)
11806
+ - with 'a{top|middle|bottom}', the variable is a `pyagrum.LabelizedVariable` using the given labels.
11807
+ - with 'a{-1|5|0|3}', the variable is a `pyagrum.IntegerVariable` using the sorted given values.
11808
+ - with 'a{-0.5|5.01|0|3.1415}', the variable is a `pyagrum.NumericalDiscreteVariable` using the sorted given values.
11522
11809
 
11523
11810
  Note
11524
11811
  ----
11525
11812
  - If the dot-like string contains such a specification more than once for a variable, the first specification will be used.
11526
11813
  - the CPTs are randomly generated.
11527
- - see also pyagrum.fastBN.
11814
+ - see also the function pyagrum.fastBN.
11528
11815
 
11529
11816
  Examples
11530
11817
  --------
@@ -24223,12 +24510,12 @@ class BNLearner(object):
24223
24510
  def learnBN(self) -> "pyagrum.BayesNet":
24224
24511
  r"""
24225
24512
 
24226
- learn a BayesNet from a file (must have read the db before)
24513
+ Learns a BayesNet (both parameters and structure) from the BNLearner's database
24227
24514
 
24228
24515
  Returns
24229
24516
  -------
24230
24517
  pyagrum.BayesNet
24231
- the learned BayesNet
24518
+ the learnt BayesNet
24232
24519
 
24233
24520
  """
24234
24521
  return _pyagrum.BNLearner_learnBN(self)
@@ -24236,42 +24523,83 @@ class BNLearner(object):
24236
24523
  def learnParameters(self, *args) -> "pyagrum.BayesNet":
24237
24524
  r"""
24238
24525
 
24239
- Create a new BN copying its structure from the argument (dag or BN) and learning its parameters from the database w.r.t the BNLearner's state (priors, etc.).
24526
+ Creates a Bayes net whose structure corresponds to that passed in argument or to
24527
+ the last structure learnt by Method `learnDAG()`, and whose parameters are learnt
24528
+ from the BNLearner's database.
24240
24529
 
24241
- Warnings
24242
- --------
24243
- When using a `pyagrum.DAG` as input parameter, NodeIds in the dag and index of rows in the database must fit in order to coherently fix the structure of the BN.
24244
- Generally, it is safer to use a `pyagrum.BayesianNet` as input or even to use `pyagrum.BNLearner.fitParameters`.
24530
+ usage:
24531
+ 1. `learnParameters(dag, take_into_account_score=True)`
24532
+ 2. `learnParameters(bn, take_into_account_score=True)`
24533
+ 3. `learnParameters(take_into_account_score=True)`
24534
+
24535
+ When the first argument of Method `learnParameters()` is a DAG or a Bayes net
24536
+ (usages 1. and 2.), this one specifies the graphical structure of the returned
24537
+ Bayes net. Otherwise (usage 3.), Method `learnParameters()` is called implicitly
24538
+ with the last DAG learnt by the BNLearner.
24539
+
24540
+ The difference between calling this method with a DAG (usages 1. and 3.) or a
24541
+ Bayes net (usage 2.) arises when the database contains missing values and EM is
24542
+ used to learn the parameters. EM needs to initialize the conditional probability
24543
+ distributions (CPT) before iterating the expectation/maximimzation steps. When a
24544
+ DAG is passed in argument, these initializations are performed using a specific
24545
+ estimator that does not take into account the missing values in the database. The
24546
+ resulting CPTs are then perturbed randomly (see the noise in method `useEM()`).
24547
+ When a Bayes net is passed in argument, its CPT for a node A can be either filled
24548
+ exclusively with zeroes or not. In the first case, the initialization is performed
24549
+ as described above. In the second case, the value of A's CPT is used as is, and
24550
+ a subsequent perturbation controlled by the noise level is applied.
24245
24551
 
24246
24552
  Parameters
24247
24553
  ----------
24248
24554
  dag : pyagrum.DAG
24555
+ specifies the graphical structure of the returned Bayes net.
24249
24556
  bn : pyagrum.BayesNet
24250
- take_into_account_score : bool
24251
- The dag passed in argument may have been learnt from a structure learning. In this case, if the score used to learn the structure has an implicit prior (like K2 which has a 1-smoothing prior), it is important to also take into account this implicit prior for parameter learning. By default (`take_into_account_score=True`), we will learn parameters by taking into account the prior specified by methods usePriorXXX () + the implicit prior of the score (if any). If `take_into_account_score=False`, we just take into account the prior specified by `usePriorXXX()`.
24557
+ specifies the graphical structure of the returned Bayes net and, when
24558
+ the database contains missing values and EM is used for learning, force
24559
+ EM to initialize the CPTs of the resulting Bayes net to the values of
24560
+ those passed in argument (when they are not fully filled with zeroes)
24561
+ before iterating over the expectation/maximization steps.
24562
+ take_into_account_score : bool, default=True
24563
+ The graphical structure passed in argument may have been learnt from a
24564
+ structure learning. In this case, if the score used to learn the structure
24565
+ has an implicit prior (like K2 which has a 1-smoothing prior), it is important
24566
+ to also take into account this implicit prior for parameter learning. By
24567
+ default (`take_into_account_score=True`), we will learn parameters by taking
24568
+ into account the prior specified by methods usePriorXXX() + the implicit prior
24569
+ of the score (if any). If `take_into_account_score=False`, we just take into
24570
+ account the prior specified by `usePriorXXX()`.
24252
24571
 
24253
24572
  Returns
24254
24573
  -------
24255
24574
  pyagrum.BayesNet
24256
- the learned BayesNet
24575
+ the learnt BayesNet
24257
24576
 
24258
24577
  Raises
24259
24578
  ------
24260
24579
  pyagrum.MissingVariableInDatabase
24261
- If a variable of the BN is not found in the database
24580
+ If a variable of the Bayes net is not found in the database
24581
+ pyagrum.MissingValueInDatabase
24582
+ If the database contains some missing values and EM is not used for the learning
24583
+ pyagrum.OperationNotAllowed
24584
+ If EM is used but no stopping criterion has been selected
24262
24585
  pyagrum.UnknownLabelInDatabase
24263
24586
  If a label is found in the database that do not correspond to the variable
24264
24587
 
24265
- """
24588
+ Warnings
24589
+ --------
24590
+ When using a `pyagrum.DAG` as input parameter, the NodeIds in the dag and index of
24591
+ rows in the database must fit in order to coherently fix the structure of the BN.
24592
+ Generally, it is safer to use a `pyagrum.BayesNet` as input or even to use
24593
+ `pyagrum.BNLearner.fitParameters`.
24266
24594
 
24267
- if type(args[0])==pyagrum.BayesNet:
24268
- res=pyagrum.BayesNet(args[0])
24269
- self.fitParameters(res)
24270
- return res
24595
+ """
24596
+ val = _pyagrum.BNLearner_learnParameters(self, *args)
24271
24597
 
24598
+ if self._EM_warning():
24599
+ warnings.warn("\nthe learnParameters's EM algorithm has completed prematurely due to a likelihood divergence\n", UserWarning)
24272
24600
 
24273
24601
 
24274
- return _pyagrum.BNLearner_learnParameters(self, *args)
24602
+ return val
24275
24603
 
24276
24604
 
24277
24605
  def copyState(self, learner: "BNLearner") -> None:
@@ -24296,6 +24624,8 @@ class BNLearner(object):
24296
24624
  def setInitialDAG(self, dag: "pyagrum.DAG") -> "pyagrum.BNLearner":
24297
24625
  r"""
24298
24626
 
24627
+ Sets the initial structure (DAG) used by the structure learning algorithm.
24628
+
24299
24629
  Parameters
24300
24630
  ----------
24301
24631
  dag : pyagrum.DAG
@@ -24304,56 +24634,407 @@ class BNLearner(object):
24304
24634
  """
24305
24635
  return _pyagrum.BNLearner_setInitialDAG(self, dag)
24306
24636
 
24307
- def useEM(self, epsilon: float) -> "pyagrum.BNLearner":
24637
+ def useEM(self, *args) -> "pyagrum.BNLearner":
24308
24638
  r"""
24309
24639
 
24310
- Indicates if we use EM for parameter learning.
24640
+ Sets whether we use EM for parameter learning or not, depending on the value of epsilon.
24641
+
24642
+ usage:
24643
+ * `useEM(epsilon, noise=0.1)`
24644
+
24645
+ When epsilon is equal to 0.0, EM is forbidden, else EM is used for parameter learning
24646
+ whenever the database contains missing values. In this case, its stopping criterion
24647
+ is a threshold on the log-likelihood evolution rate, i.e., if llc and llo refer to
24648
+ the log-likelihoods at the current and previous EM steps respectively, EM will stop
24649
+ when (llc - llo) / llc drops below epsilon. If you wish to be more specific on which
24650
+ stopping criterion to use, you may prefer exploiting methods `useEMWithRateCriterion()`
24651
+ or `useEMWithDiffCriterion()`.
24311
24652
 
24312
24653
  Parameters
24313
24654
  ----------
24314
24655
  epsilon : float
24315
- if epsilon=0.0 then EM is not used.
24316
- if epsilon>0 then EM is used and stops when the sum of the cumulative squared error on parameters is less than epsilon.
24317
-
24318
- """
24319
- return _pyagrum.BNLearner_useEM(self, epsilon)
24656
+ if epsilon>0 then EM is used and stops whenever the relative difference between two
24657
+ consecutive log-likelihoods (log-likelihood evolution rate) drops below epsilon.
24320
24658
 
24321
- def useScoreAIC(self) -> "pyagrum.BNLearner":
24322
- r"""
24323
-
24324
- Indicate that we wish to use an AIC score.
24659
+ if epsilon=0.0 then EM is not used. But if you wish to forbid the use of EM, prefer
24660
+ executing Method `forbidEM()` rather than useEM(0.0) as it is more unequivocal.
24325
24661
 
24326
- """
24327
- return _pyagrum.BNLearner_useScoreAIC(self)
24662
+ noise: float, default=0.1
24663
+ During EM's initialization, the CPTs are randomly perturbed using the following formula:
24664
+ new_CPT = (1-noise) * CPT + noise * random_CPT. Parameter noise must belong
24665
+ to interval [0,1]. By default, noise is equal to 0.1.
24328
24666
 
24329
- def useScoreBD(self) -> "pyagrum.BNLearner":
24330
- r"""
24667
+ Returns
24668
+ -------
24669
+ pyagrum.BNLearner
24670
+ the BNLearner itself, so that we can chain useXXX() methods.
24331
24671
 
24332
- Indicate that we wish to use a BD score.
24672
+ Raises
24673
+ ------
24674
+ pyagrum.OutOfBounds
24675
+ if `epsilon` is strictly negative or if `noise` does not belong to interval [0,1].
24333
24676
 
24334
24677
  """
24335
- return _pyagrum.BNLearner_useScoreBD(self)
24678
+ return _pyagrum.BNLearner_useEM(self, *args)
24336
24679
 
24337
- def useScoreBDeu(self) -> "pyagrum.BNLearner":
24680
+ def useEMWithRateCriterion(self, *args) -> "pyagrum.BNLearner":
24338
24681
  r"""
24339
24682
 
24340
- Indicate that we wish to use a BDeu score.
24683
+ Enforces that EM with the log-likelihood min evolution rate stopping criterion will be
24684
+ used for parameter learning when the dataset contains missing values.
24341
24685
 
24342
- """
24343
- return _pyagrum.BNLearner_useScoreBDeu(self)
24686
+ Parameters
24687
+ ----------
24688
+ epsilon : float
24689
+ epsilon sets the approximation stopping criterion: EM stops whenever the absolute
24690
+ value of the relative difference between two consecutive log-likelihoods drops below
24691
+ epsilon. Note that epsilon should be strictly positive.
24344
24692
 
24345
- def useScoreBIC(self) -> "pyagrum.BNLearner":
24346
- r"""
24693
+ noise: float, default=0.1
24694
+ During EM's initialization, the CPTs are randomly perturbed using the following formula:
24695
+ new_CPT = (1-noise) * CPT + noise * random_CPT. Parameter noise must belong
24696
+ to interval [0,1]. By default, noise is equal to 0.1.
24347
24697
 
24348
- Indicate that we wish to use a BIC score.
24698
+ Returns
24699
+ -------
24700
+ pyagrum.BNLearner
24701
+ the BNLearner itself, so that we can chain useXXX() methods.
24702
+
24703
+ Raises
24704
+ ------
24705
+ pyagrum.OutOfBounds
24706
+ if `epsilon` is not strictly positive or if `noise` does not belong to interval [0,1].
24349
24707
 
24350
24708
  """
24351
- return _pyagrum.BNLearner_useScoreBIC(self)
24709
+ return _pyagrum.BNLearner_useEMWithRateCriterion(self, *args)
24352
24710
 
24353
- def useScoreK2(self) -> "pyagrum.BNLearner":
24711
+ def useEMWithDiffCriterion(self, *args) -> "pyagrum.BNLearner":
24354
24712
  r"""
24355
24713
 
24356
- Indicate that we wish to use a K2 score.
24714
+ Enforces that EM with the log-likelihood min difference criterion will be used for
24715
+ parameter learning whenever the dataset contains missing values.
24716
+
24717
+ Parameters
24718
+ ----------
24719
+ epsilon : float
24720
+ epsilon sets the approximation stopping criterion: EM stops whenever the
24721
+ difference between two consecutive log-likelihoods drops below
24722
+ epsilon. Note that epsilon should be strictly positive.
24723
+
24724
+ noise: float (optional, default = 0.1)
24725
+ During EM's initialization, the CPTs are randomly perturbed using the following formula:
24726
+ new_CPT = (1-noise) * CPT + noise * random_CPT. Parameter noise must belong
24727
+ to interval [0,1]. By default, noise is equal to 0.1.
24728
+
24729
+ Returns
24730
+ -------
24731
+ pyagrum.BNLearner
24732
+ the BNLearner itself, so that we can chain useXXX() methods.
24733
+
24734
+ Raises
24735
+ ------
24736
+ pyagrum.OutOfBounds
24737
+ if epsilon is not strictly positive or if noise does not belong to interval [0,1].
24738
+
24739
+ """
24740
+ return _pyagrum.BNLearner_useEMWithDiffCriterion(self, *args)
24741
+
24742
+ def forbidEM(self) -> "pyagrum.BNLearner":
24743
+ r"""
24744
+
24745
+ Forbids the use of EM for parameter learning.
24746
+
24747
+ Returns
24748
+ -------
24749
+ pyagrum.BNLearner
24750
+ the BNLearner itself, so that we can chain useXXX() methods.
24751
+
24752
+ """
24753
+ return _pyagrum.BNLearner_forbidEM(self)
24754
+
24755
+ def EMsetEpsilon(self, eps: float) -> "pyagrum.BNLearner":
24756
+ r"""
24757
+
24758
+ Enforces that the minimal difference between two consecutive log-likelihoods
24759
+ is chosen as a stopping criterion of the EM parameter learning algorithm and
24760
+ specifies the threshold on this criterion.
24761
+
24762
+ Parameters
24763
+ ----------
24764
+ eps: float
24765
+ the log-likelihood difference below which EM stops its iterations
24766
+
24767
+ Returns
24768
+ -------
24769
+ pyagrum.BNLearner
24770
+ the BNLearner itself, so that we can chain useXXX() methods.
24771
+
24772
+ Raises
24773
+ ------
24774
+ pyagrum.OutOfBounds
24775
+ If eps <= 0.
24776
+
24777
+ """
24778
+ return _pyagrum.BNLearner_EMsetEpsilon(self, eps)
24779
+
24780
+ def EMdisableEpsilon(self) -> "pyagrum.BNLearner":
24781
+ r"""
24782
+
24783
+ Disables the minimal difference between two consecutive log-likelihoods as a
24784
+ stopping criterion for the EM parameter learning algorithm.
24785
+
24786
+ Returns
24787
+ -------
24788
+ pyagrum.BNLearner
24789
+ the BNLearner itself, so that we can chain useXXX() methods.
24790
+
24791
+ """
24792
+ return _pyagrum.BNLearner_EMdisableEpsilon(self)
24793
+
24794
+ def EMenableEpsilon(self) -> "pyagrum.BNLearner":
24795
+ r"""
24796
+
24797
+ Enforces that the minimal difference between two consecutive log-likelihoods is
24798
+ a stopping criterion for the EM parameter learning algorithm.
24799
+
24800
+ Returns
24801
+ -------
24802
+ pyagrum.BNLearner
24803
+ the BNLearner itself, so that we can chain useXXX() methods.
24804
+
24805
+ Warnings:
24806
+ ---------
24807
+ Setting the min difference between two consecutive log-likelihoods as a stopping
24808
+ criterion disables the min log-likelihood evolution rate as a stopping criterion.
24809
+
24810
+ """
24811
+ return _pyagrum.BNLearner_EMenableEpsilon(self)
24812
+
24813
+ def EMsetMinEpsilonRate(self, rate: float) -> "pyagrum.BNLearner":
24814
+ r"""
24815
+
24816
+ Enforces that the minimal log-likelihood's evolution rate is considered by the EM
24817
+ parameter learning algorithm as a stopping criterion.
24818
+
24819
+ Parameters
24820
+ ----------
24821
+ rate: float
24822
+ the log-likelihood evolution rate below which EM stops its iterations
24823
+
24824
+ Returns
24825
+ -------
24826
+ pyagrum.BNLearner
24827
+ the BNLearner itself, so that we can chain useXXX() methods.
24828
+
24829
+ Raises
24830
+ ------
24831
+ pyagrum.OutOfBounds
24832
+ If rate <= 0.
24833
+
24834
+ Warnings
24835
+ --------
24836
+ Setting this stopping criterion disables the min log-likelihod difference criterion
24837
+ (if this one was enabled).
24838
+
24839
+ """
24840
+ return _pyagrum.BNLearner_EMsetMinEpsilonRate(self, rate)
24841
+
24842
+ def EMdisableMinEpsilonRate(self) -> "pyagrum.BNLearner":
24843
+ r"""
24844
+
24845
+ Disables the minimal log-likelihood's evolution rate as an EM parameter learning
24846
+ stopping criterion.
24847
+
24848
+ Returns
24849
+ -------
24850
+ pyagrum.BNLearner
24851
+ the BNLearner itself, so that we can chain useXXX() methods.
24852
+
24853
+ """
24854
+ return _pyagrum.BNLearner_EMdisableMinEpsilonRate(self)
24855
+
24856
+ def EMenableMinEpsilonRate(self) -> "pyagrum.BNLearner":
24857
+ r"""
24858
+
24859
+ Enables the minimal log-likelihood's evolution rate as an EM parameter learning
24860
+ stopping criterion.
24861
+
24862
+ Returns
24863
+ -------
24864
+ pyagrum.BNLearner
24865
+ the BNLearner itself, so that we can chain useXXX() methods.
24866
+
24867
+ Warnings
24868
+ --------
24869
+ Setting this stopping criterion disables the min log-likelihod difference criterion.
24870
+
24871
+ """
24872
+ return _pyagrum.BNLearner_EMenableMinEpsilonRate(self)
24873
+
24874
+ def EMsetMaxIter(self, max: int) -> "pyagrum.BNLearner":
24875
+ r"""
24876
+
24877
+ Enforces a limit on the number of expectation/maximization steps performed by EM.
24878
+
24879
+ Parameters
24880
+ ----------
24881
+ max : int
24882
+ the maximal number of iterations that EM is allowed to perform
24883
+
24884
+ Returns
24885
+ -------
24886
+ pyagrum.BNLearner
24887
+ the BNLearner itself, so that we can chain useXXX() methods.
24888
+
24889
+ Raises
24890
+ ------
24891
+ pyagrum.OutOfBounds
24892
+ If max <= 1.
24893
+
24894
+ """
24895
+ return _pyagrum.BNLearner_EMsetMaxIter(self, max)
24896
+
24897
+ def EMdisableMaxIter(self) -> "pyagrum.BNLearner":
24898
+ r"""
24899
+
24900
+ Do not limit EM to perform a maximal number of iterations.
24901
+
24902
+ Returns
24903
+ -------
24904
+ pyagrum.BNLearner
24905
+ the BNLearner itself, so that we can chain useXXX() methods.
24906
+
24907
+ """
24908
+ return _pyagrum.BNLearner_EMdisableMaxIter(self)
24909
+
24910
+ def EMenableMaxIter(self) -> "pyagrum.BNLearner":
24911
+ r"""
24912
+
24913
+ Enables a limit on the number of iterations performed by EM. This number is
24914
+ equal to the last number specified with Method `EMsetMaxIter()`.
24915
+ See Method `EMMaxIter()` to get its current value.
24916
+
24917
+ Returns
24918
+ -------
24919
+ pyagrum.BNLearner
24920
+ the BNLearner itself, so that we can chain useXXX() methods.
24921
+
24922
+ """
24923
+ return _pyagrum.BNLearner_EMenableMaxIter(self)
24924
+
24925
+ def EMsetMaxTime(self, timeout: float) -> "pyagrum.BNLearner":
24926
+ r"""
24927
+
24928
+ Adds a constraint on the time that EM is allowed to run for learning
24929
+ parameters.
24930
+
24931
+ Parameters
24932
+ ----------
24933
+ timeout : float
24934
+ the timeout in milliseconds
24935
+
24936
+ Returns
24937
+ -------
24938
+ pyagrum.BNLearner
24939
+ the BNLearner itself, so that we can chain useXXX() methods.
24940
+
24941
+ Raises
24942
+ ------
24943
+ pyagrum.OutOfBounds
24944
+ If timeout<=0.0
24945
+
24946
+ """
24947
+ return _pyagrum.BNLearner_EMsetMaxTime(self, timeout)
24948
+
24949
+ def EMdisableMaxTime(self) -> "pyagrum.BNLearner":
24950
+ r"""
24951
+
24952
+ Allow EM to learn parameters for an infinite amount of time.
24953
+
24954
+ Returns
24955
+ -------
24956
+ pyagrum.BNLearner
24957
+ the BNLearner itself, so that we can chain useXXX() methods.
24958
+
24959
+ """
24960
+ return _pyagrum.BNLearner_EMdisableMaxTime(self)
24961
+
24962
+ def EMenableMaxTime(self) -> "pyagrum.BNLearner":
24963
+ r"""
24964
+
24965
+ Forbid EM to run more than a given amount of time.
24966
+
24967
+ Returns
24968
+ -------
24969
+ pyagrum.BNLearner
24970
+ the BNLearner itself, so that we can chain useXXX() methods.
24971
+
24972
+ """
24973
+ return _pyagrum.BNLearner_EMenableMaxTime(self)
24974
+
24975
+ def EMsetPeriodSize(self, p: int) -> "pyagrum.BNLearner":
24976
+ return _pyagrum.BNLearner_EMsetPeriodSize(self, p)
24977
+
24978
+ def EMPeriodSize(self) -> int:
24979
+ return _pyagrum.BNLearner_EMPeriodSize(self)
24980
+
24981
+ def EMsetVerbosity(self, v: bool) -> "pyagrum.BNLearner":
24982
+ r"""
24983
+
24984
+ Sets or unsets the verbosity of the EM parameter learning algorithm.
24985
+
24986
+ Verbosity is necessary for keeping track of the history of the learning.
24987
+ See Method `EMHistory()`.
24988
+
24989
+ Parameters
24990
+ ----------
24991
+ v : bool
24992
+ sets EM's verbose mode if and only if v = True.
24993
+
24994
+ Returns
24995
+ -------
24996
+ pyagrum.BNLearner
24997
+ the BNLearner itself, so that we can chain useXXX() methods.
24998
+
24999
+ """
25000
+ return _pyagrum.BNLearner_EMsetVerbosity(self, v)
25001
+
25002
+ def useScoreAIC(self) -> "pyagrum.BNLearner":
25003
+ r"""
25004
+
25005
+ Indicate that we wish to use an AIC score.
25006
+
25007
+ """
25008
+ return _pyagrum.BNLearner_useScoreAIC(self)
25009
+
25010
+ def useScoreBD(self) -> "pyagrum.BNLearner":
25011
+ r"""
25012
+
25013
+ Indicate that we wish to use a BD score.
25014
+
25015
+ """
25016
+ return _pyagrum.BNLearner_useScoreBD(self)
25017
+
25018
+ def useScoreBDeu(self) -> "pyagrum.BNLearner":
25019
+ r"""
25020
+
25021
+ Indicate that we wish to use a BDeu score.
25022
+
25023
+ """
25024
+ return _pyagrum.BNLearner_useScoreBDeu(self)
25025
+
25026
+ def useScoreBIC(self) -> "pyagrum.BNLearner":
25027
+ r"""
25028
+
25029
+ Indicate that we wish to use a BIC score.
25030
+
25031
+ """
25032
+ return _pyagrum.BNLearner_useScoreBIC(self)
25033
+
25034
+ def useScoreK2(self) -> "pyagrum.BNLearner":
25035
+ r"""
25036
+
25037
+ Indicate that we wish to use a K2 score.
24357
25038
 
24358
25039
  """
24359
25040
  return _pyagrum.BNLearner_useScoreK2(self)
@@ -24487,7 +25168,12 @@ class BNLearner(object):
24487
25168
  def addForbiddenArc(self, *args) -> "pyagrum.BNLearner":
24488
25169
  r"""
24489
25170
 
24490
- The arc in parameters won't be added.
25171
+ Forbid the arc passed in argument to be added during structure learning
25172
+ (methods `learnDAG()` or `learnBN()`).
25173
+
25174
+ Usage:
25175
+ 1. addForbiddenArc(tail, head)
25176
+ 2. addForbiddenArc(arc)
24491
25177
 
24492
25178
  Parameters
24493
25179
  ----------
@@ -24521,7 +25207,12 @@ class BNLearner(object):
24521
25207
  def addMandatoryArc(self, *args) -> "pyagrum.BNLearner":
24522
25208
  r"""
24523
25209
 
24524
- Allow to add prior structural knowledge.
25210
+ Allow an arc to be added if necessary during structure learning
25211
+ (methods `learnDAG()` or `learnBN()`).
25212
+
25213
+ Usage:
25214
+ 1. addMandatoryArc(tail, head)
25215
+ 2. addMandatoryArc(arc)
24525
25216
 
24526
25217
  Parameters
24527
25218
  ----------
@@ -24562,8 +25253,8 @@ class BNLearner(object):
24562
25253
 
24563
25254
  Warnings
24564
25255
  --------
24565
- By default, all edge is possible. However, once at least one possible edge is defined, all other edges not declared possible
24566
- are considered as impossible.
25256
+ By default, all edge is possible. However, once at least one possible edge is defined,
25257
+ all other edges not declared possible are considered as impossible.
24567
25258
 
24568
25259
  Parameters
24569
25260
  ----------
@@ -24615,7 +25306,7 @@ class BNLearner(object):
24615
25306
  Parameters
24616
25307
  ----------
24617
25308
  node : int str
24618
- a variable's id or name
25309
+ a variable's id or name
24619
25310
 
24620
25311
  """
24621
25312
  return _pyagrum.BNLearner_addNoParentNode(self, *args)
@@ -24628,7 +25319,7 @@ class BNLearner(object):
24628
25319
  Parameters
24629
25320
  ----------
24630
25321
  node : int str
24631
- a variable's id or name
25322
+ a variable's id or name
24632
25323
 
24633
25324
  """
24634
25325
  return _pyagrum.BNLearner_eraseNoParentNode(self, *args)
@@ -24636,12 +25327,12 @@ class BNLearner(object):
24636
25327
  def addNoChildrenNode(self, *args) -> "pyagrum.BNLearner":
24637
25328
  r"""
24638
25329
 
24639
- Add the constraint that this node cannot have any children.
25330
+ Add to structure learning algorithms the constraint that this node cannot have any children.
24640
25331
 
24641
25332
  Parameters
24642
25333
  ----------
24643
25334
  node : int str
24644
- a variable's id or name
25335
+ a variable's id or name
24645
25336
 
24646
25337
  """
24647
25338
  return _pyagrum.BNLearner_addNoChildrenNode(self, *args)
@@ -24649,12 +25340,12 @@ class BNLearner(object):
24649
25340
  def eraseNoChildrenNode(self, *args) -> "pyagrum.BNLearner":
24650
25341
  r"""
24651
25342
 
24652
- Remove the constraint that this node cannot have any children.
25343
+ Remove in structure learning algorithms the constraint that this node cannot have any children.
24653
25344
 
24654
25345
  Parameters
24655
25346
  ----------
24656
25347
  node : int str
24657
- a variable's id or name
25348
+ a variable's id or name
24658
25349
 
24659
25350
  """
24660
25351
  return _pyagrum.BNLearner_eraseNoChildrenNode(self, *args)
@@ -24916,17 +25607,18 @@ class BNLearner(object):
24916
25607
  def learnPDAG(self) -> "pyagrum.PDAG":
24917
25608
  r"""
24918
25609
 
24919
- learn a PDAG from a file
24920
-
24921
- Warnings
24922
- --------
24923
- The learning method must be constraint-based (MIIC, etc.) and not score-based (K2, GreedyHillClimbing, etc.)
25610
+ learn a partially directed acyclic graph (PDAG) from the BNLearner's database
24924
25611
 
24925
25612
  Returns
24926
25613
  -------
24927
25614
  pyagrum.PDAG
24928
25615
  the learned PDAG
24929
25616
 
25617
+ Warnings
25618
+ --------
25619
+ The learning method must be constraint-based (MIIC, etc.) and not score-based
25620
+ (K2, GreedyHillClimbing, etc.)
25621
+
24930
25622
  """
24931
25623
  return _pyagrum.BNLearner_learnPDAG(self)
24932
25624
 
@@ -24936,7 +25628,7 @@ class BNLearner(object):
24936
25628
  Returns
24937
25629
  -------
24938
25630
  Tuple[str]
24939
- the names of the variables in the database
25631
+ the names of the variables in the database
24940
25632
 
24941
25633
  """
24942
25634
  return _pyagrum.BNLearner_names(self)
@@ -24968,12 +25660,12 @@ class BNLearner(object):
24968
25660
  Parameters
24969
25661
  ----------
24970
25662
  id
24971
- a node id
25663
+ a node id
24972
25664
 
24973
25665
  Returns
24974
25666
  -------
24975
25667
  str
24976
- the variable's name
25668
+ the variable's name
24977
25669
 
24978
25670
  """
24979
25671
  return _pyagrum.BNLearner_nameFromId(self, id)
@@ -24991,7 +25683,7 @@ class BNLearner(object):
24991
25683
  Parameters
24992
25684
  ----------
24993
25685
  weight : float
24994
- the database weight
25686
+ the database weight
24995
25687
 
24996
25688
  """
24997
25689
  return _pyagrum.BNLearner_setDatabaseWeight(self, new_weight)
@@ -25074,15 +25766,15 @@ class BNLearner(object):
25074
25766
  Parameters
25075
25767
  ----------
25076
25768
  vars: List[str]
25077
- the name of the columns of interest
25769
+ the name of the columns of interest
25078
25770
 
25079
25771
  knowing : List[str]
25080
- the (optional) list of names of conditioning columns
25772
+ the (optional) list of names of conditioning columns
25081
25773
 
25082
25774
  Returns
25083
25775
  -------
25084
25776
  float
25085
- the log-likelihood (base 2)
25777
+ the log-likelihood (base 2)
25086
25778
 
25087
25779
  """
25088
25780
  return _pyagrum.BNLearner_logLikelihood(self, *args)
@@ -25121,13 +25813,13 @@ class BNLearner(object):
25121
25813
  Parameters
25122
25814
  ----------
25123
25815
  name1: str
25124
- the name of the first column
25816
+ the name of the first column
25125
25817
 
25126
25818
  name2 : str
25127
- the name of the second column
25819
+ the name of the second column
25128
25820
 
25129
25821
  knowing : List[str]
25130
- the list of names of conditioning columns
25822
+ the list of names of conditioning columns
25131
25823
 
25132
25824
  Returns
25133
25825
  -------
@@ -25151,18 +25843,18 @@ class BNLearner(object):
25151
25843
  Parameters
25152
25844
  ----------
25153
25845
  name1: str
25154
- the name of the first column
25846
+ the name of the first column
25155
25847
 
25156
25848
  name2 : str
25157
- the name of the second column
25849
+ the name of the second column
25158
25850
 
25159
25851
  knowing : List[str]
25160
- the list of names of conditioning columns
25852
+ the list of names of conditioning columns
25161
25853
 
25162
25854
  Returns
25163
25855
  -------
25164
25856
  Tuple[float,float]
25165
- the G2 statistic and the associated p-value as a Tuple
25857
+ the G2 statistic and the associated p-value as a Tuple
25166
25858
 
25167
25859
  """
25168
25860
  return _pyagrum.BNLearner_correctedMutualInformation(self, *args)
@@ -25176,12 +25868,12 @@ class BNLearner(object):
25176
25868
  Parameters
25177
25869
  ----------
25178
25870
  vars: List[intstr]
25179
- the list of variables
25871
+ the list of variables
25180
25872
 
25181
25873
  Returns
25182
25874
  -------
25183
25875
  List[float]
25184
- the pseudo-count as a list of float
25876
+ the pseudo-count as a list of float
25185
25877
 
25186
25878
  """
25187
25879
  return _pyagrum.BNLearner_rawPseudoCount(self, *args)
@@ -25195,7 +25887,7 @@ class BNLearner(object):
25195
25887
  Returns
25196
25888
  -------
25197
25889
  int
25198
- the number of rows in the database
25890
+ the number of rows in the database
25199
25891
 
25200
25892
  """
25201
25893
  return _pyagrum.BNLearner_nbRows(self)
@@ -25209,7 +25901,7 @@ class BNLearner(object):
25209
25901
  Returns
25210
25902
  -------
25211
25903
  int
25212
- the number of columns in the database
25904
+ the number of columns in the database
25213
25905
 
25214
25906
  """
25215
25907
  return _pyagrum.BNLearner_nbCols(self)
@@ -25227,6 +25919,157 @@ class BNLearner(object):
25227
25919
  """
25228
25920
  return _pyagrum.BNLearner_domainSize(self, *args)
25229
25921
 
25922
+ def isUsingEM(self) -> bool:
25923
+ r"""
25924
+
25925
+ returns a Boolean indicating whether EM is used for parameter learning when
25926
+ the database contains missing values.
25927
+
25928
+ """
25929
+ return _pyagrum.BNLearner_isUsingEM(self)
25930
+
25931
+ def EMEpsilon(self) -> float:
25932
+ r"""
25933
+
25934
+ Returns a float corresponding to the minimal difference between two consecutive
25935
+ log-likelihoods under which the EM parameter learning algorithm stops.
25936
+
25937
+ Returns
25938
+ -------
25939
+ float
25940
+ the minimal difference between two consecutive log-likelihoods under which EM stops.
25941
+
25942
+ """
25943
+ return _pyagrum.BNLearner_EMEpsilon(self)
25944
+
25945
+ def EMisEnabledEpsilon(self) -> bool:
25946
+ r"""
25947
+
25948
+ Returns a Boolean indicating whether the minimal difference between two consecutive
25949
+ log-likelihoods is a stopping criterion for the EM parameter learning algorithm.
25950
+
25951
+ """
25952
+ return _pyagrum.BNLearner_EMisEnabledEpsilon(self)
25953
+
25954
+ def EMMinEpsilonRate(self) -> float:
25955
+ r"""
25956
+
25957
+ Returns a float corresponding to the minimal log-likelihood's evolution rate under
25958
+ which the EM parameter learning algorithm stops its iterations.
25959
+
25960
+ Returns
25961
+ -------
25962
+ float
25963
+ the limit under which EM stops its expectation/maximization iterations
25964
+
25965
+ """
25966
+ return _pyagrum.BNLearner_EMMinEpsilonRate(self)
25967
+
25968
+ def EMisEnabledMinEpsilonRate(self) -> bool:
25969
+ r"""
25970
+
25971
+ Returns a Boolean indicating whether the minimal log-likelihood's evolution rate is
25972
+ considered as a stopping criterion by the EM parameter learning algorithm.
25973
+
25974
+ """
25975
+ return _pyagrum.BNLearner_EMisEnabledMinEpsilonRate(self)
25976
+
25977
+ def EMMaxIter(self) -> int:
25978
+ r"""
25979
+
25980
+ Returns an int containing the max number of iterations the EM parameter learning
25981
+ algorithm is allowed to perform when the max iterations stopping criterion is enabled.
25982
+
25983
+ Returns
25984
+ -------
25985
+ float
25986
+ the max number of expectation/maximization iterations EM is allowed to perform
25987
+
25988
+ """
25989
+ return _pyagrum.BNLearner_EMMaxIter(self)
25990
+
25991
+ def EMisEnabledMaxIter(self) -> bool:
25992
+ r"""
25993
+
25994
+ Returns a Boolean indicating whether the max number of iterations is used
25995
+ by EM as a stopping criterion.
25996
+
25997
+ """
25998
+ return _pyagrum.BNLearner_EMisEnabledMaxIter(self)
25999
+
26000
+ def EMMaxTime(self) -> float:
26001
+ r"""
26002
+
26003
+ Returns a float indicating EM's time limit when the max time stopping
26004
+ criterion is used by the EM parameter learning algorithm.
26005
+
26006
+ Returns
26007
+ -------
26008
+ float
26009
+ the max time EM is allowed to execute its expectation/maximization iterations
26010
+
26011
+ """
26012
+ return _pyagrum.BNLearner_EMMaxTime(self)
26013
+
26014
+ def EMisEnabledMaxTime(self) -> bool:
26015
+ r"""
26016
+
26017
+ Returns a Boolean indicating whether the max time criterion is used as
26018
+ an EM stopping criterion.
26019
+
26020
+ """
26021
+ return _pyagrum.BNLearner_EMisEnabledMaxTime(self)
26022
+
26023
+ def EMVerbosity(self) -> bool:
26024
+ r"""
26025
+
26026
+ Returns a Boolean indicating whether the EM parameter learning algorithm
26027
+ is in a verbose mode.
26028
+
26029
+ Note that EM verbosity is necessary for recording the history of the
26030
+ log-likelihoods computed at each expectation/maximization step.
26031
+
26032
+ Returns
26033
+ -------
26034
+ bool
26035
+ indicates whether EM's verbose mode is active or not
26036
+
26037
+ """
26038
+ return _pyagrum.BNLearner_EMVerbosity(self)
26039
+
26040
+ def EMnbrIterations(self) -> int:
26041
+ r"""
26042
+
26043
+ Returns the number of iterations performed by the EM parameter learning algorithm.
26044
+
26045
+ """
26046
+ return _pyagrum.BNLearner_EMnbrIterations(self)
26047
+
26048
+ def EMHistory(self) -> List[float]:
26049
+ r"""
26050
+
26051
+ Returns a list containing the log-likelihoods recorded after each
26052
+ expectation/maximization iteration of the EM parameter learning algorithm.
26053
+
26054
+ Returns
26055
+ -------
26056
+ List[float]
26057
+ A list of all the log-likelihoods recorded during EM's execution
26058
+
26059
+ Warnings
26060
+ --------
26061
+ Recording log-likelihoods is enabled only when EM is executed in verbose
26062
+ mode. See method `EMsetVerbosity()`.
26063
+
26064
+ """
26065
+ return _pyagrum.BNLearner_EMHistory(self)
26066
+
26067
+ def EMStateApproximationScheme(self) -> int:
26068
+ return _pyagrum.BNLearner_EMStateApproximationScheme(self)
26069
+
26070
+ def EMStateMessage(self) -> str:
26071
+ return _pyagrum.BNLearner_EMStateMessage(self)
26072
+
25230
26073
  def setNumberOfThreads(self, nb: int) -> None:
25231
26074
  r"""
25232
26075
 
@@ -25270,24 +26113,31 @@ class BNLearner(object):
25270
26113
  def chi2(self, *args) -> object:
25271
26114
  r"""
25272
26115
 
25273
- chi2 computes the chi2 statistic and p-value for two columns, given a list of other columns.
26116
+ chi2 computes the chi2 statistic and p-value of two variables conditionally to a
26117
+ list of other variables.
25274
26118
 
26119
+ The variables correspond to columns in the database and are specified as the
26120
+ names of these columns in the database. The list of variables in the conditioning
26121
+ set can be empty. In this case, no need to specify it.
26122
+
26123
+ Usage:
26124
+ * `chi2(name1, name2, knowing=[])`
25275
26125
 
25276
26126
  Parameters
25277
26127
  ----------
25278
26128
  name1: str
25279
- the name of the first column
26129
+ the name of a variable/column in the database
25280
26130
 
25281
26131
  name2 : str
25282
- the name of the second column
26132
+ the name/column of another variable
25283
26133
 
25284
26134
  knowing : List[str]
25285
- the list of names of conditioning columns
26135
+ the list of the column names of the conditioning variables
25286
26136
 
25287
26137
  Returns
25288
26138
  -------
25289
26139
  Tuple[float,float]
25290
- the chi2 statistic and the associated p-value as a Tuple
26140
+ the chi2 statistics and the associated p-value as a Tuple
25291
26141
 
25292
26142
  """
25293
26143
  return _pyagrum.BNLearner_chi2(self, *args)
@@ -25295,28 +26145,38 @@ class BNLearner(object):
25295
26145
  def G2(self, *args) -> object:
25296
26146
  r"""
25297
26147
 
25298
- G2 computes the G2 statistic and p-value for two columns, given a list of other columns.
26148
+ G2 computes the G2 statistic and p-value of two variables conditionally to a
26149
+ list of other variables.
26150
+
26151
+ The variables correspond to columns in the database and are specified as the
26152
+ names of these columns in the database. The list of variables in the conditioning
26153
+ set can be empty. In this case, no need to specify it.
25299
26154
 
26155
+ Usage:
26156
+ * `G2(name1, name2, knowing=[])`
25300
26157
 
25301
26158
  Parameters
25302
26159
  ----------
25303
26160
  name1: str
25304
- the name of the first column
26161
+ the name of a variable/column in the database
25305
26162
 
25306
26163
  name2 : str
25307
- the name of the second column
26164
+ the name/column of another variable
25308
26165
 
25309
26166
  knowing : List[str]
25310
- the list of names of conditioning columns
26167
+ the list of the column names of the conditioning variables
25311
26168
 
25312
26169
  Returns
25313
26170
  -------
25314
26171
  Tuple[float,float]
25315
- the G2 statistic and the associated p-value as a Tuple
26172
+ the G2 statistics and the corresponding p-value as a Tuple
25316
26173
 
25317
26174
  """
25318
26175
  return _pyagrum.BNLearner_G2(self, *args)
25319
26176
 
26177
+ def _EM_warning(self) -> bool:
26178
+ return _pyagrum.BNLearner__EM_warning(self)
26179
+
25320
26180
  def setSliceOrder(self, *args) -> "pyagrum.BNLearner":
25321
26181
  r"""
25322
26182
 
@@ -25374,7 +26234,7 @@ class BNLearner(object):
25374
26234
  def setPossibleEdges(self, *args) -> None:
25375
26235
  r"""
25376
26236
 
25377
- Add a constraint by fixing the set of possible edges.
26237
+ Adds a constraint to the structure learning algorithm by fixing the set of possible edges.
25378
26238
 
25379
26239
  Parameters
25380
26240
  ----------
@@ -25409,30 +26269,12 @@ class BNLearner(object):
25409
26269
  return p
25410
26270
 
25411
26271
  def fitParameters(self,bn,take_into_account_score=True):
25412
- """
25413
- fitParameters directly populates the CPTs of the argument using the database and the structure of the BN.
26272
+ if not set(self.names()).issuperset(bn.names()):
26273
+ raise Exception(f"Some variables are in the BN but not in the data : {bn.names()-set(self.names())}")
25414
26274
 
25415
- Parameters
25416
- ----------
25417
- bn : pyagrum.BayesNet
25418
- a BN which will directly have its parameters learned inplace.
25419
-
25420
- take_into_account_score : bool
25421
- The dag passed in argument may have been learnt from a structure learning. In this case, if the score used to learn the structure has an implicit prior (like K2 which has a 1-smoothing prior), it is important to also take into account this implicit prior for parameter learning. By default (`take_into_account_score=True`), we will learn parameters by taking into account the prior specified by methods usePriorXXX () + the implicit prior of the score (if any). If `take_into_account_score=False`, we just take into account the prior specified by `usePriorXXX()`.
25422
-
25423
- """
25424
- if set(self.names())!=bn.names():
25425
- raise Exception("Not the same variable names in the database and in the BN")
25426
-
25427
- from pyagrum import DAG
25428
- d=DAG()
25429
- for n in bn.names():
25430
- d.addNodeWithId(self.idFromName(n))
25431
- for i1,i2 in bn.arcs():
25432
- d.addArc(self.idFromName(bn.variable(i1).name()),self.idFromName(bn.variable(i2).name()))
25433
- tmp=self.learnParameters(d,take_into_account_score)
26275
+ tmp=self.learnParameters(bn,take_into_account_score)
25434
26276
  for n in tmp.names():
25435
- bn.cpt(n).fillWith(tmp.cpt(n))
26277
+ bn.cpt(bn.idFromName(n)).fillWith(tmp.cpt(n))
25436
26278
  return self
25437
26279
 
25438
26280
  def learnEssentialGraph(self):
@@ -28707,7 +29549,7 @@ class ShaferShenoyLIMIDInference(object):
28707
29549
 
28708
29550
  # Register ShaferShenoyLIMIDInference in _pyagrum:
28709
29551
  _pyagrum.ShaferShenoyLIMIDInference_swigregister(ShaferShenoyLIMIDInference)
28710
- __version__ = '2.1.1.9'
29552
+ __version__ = '2.3.1.9'
28711
29553
  __license__ = __doc__
28712
29554
  __project_url__ = 'https://agrum.org'
28713
29555
  __project_name__ = 'pyAgrum'
@@ -28754,8 +29596,12 @@ def about():
28754
29596
  # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR #
28755
29597
  # OTHER DEALINGS IN THE SOFTWARE. #
28756
29598
  # #
28757
- # See the GNU Lesser General Public License (LICENSE.LGPL) and the MIT #
28758
- # licence (LICENSE.MIT) for more details. #
29599
+ # See LICENCES for more details. #
29600
+ # #
29601
+ # SPDX-FileCopyrightText: Copyright 2005-2025 #
29602
+ # - Pierre-Henri WUILLEMIN(_at_LIP6) #
29603
+ # - Christophe GONZALES(_at_AMU) #
29604
+ # SPDX-License-Identifier: LGPL-3.0-or-later OR MIT #
28759
29605
  # #
28760
29606
  # Contact : info_at_agrum_dot_org #
28761
29607
  # homepage : http://agrum.gitlab.io #
@@ -30585,7 +31431,7 @@ def getPosterior(model, *, target, evs=None):
30585
31431
  # creating a new Tensor from posterior(will disappear with ie)
30586
31432
  return pyagrum.Tensor(inf.posterior(target))
30587
31433
 
30588
- __version__ = '2.1.1.9'
31434
+ __version__ = '2.3.1.9'
30589
31435
  __license__ = __doc__
30590
31436
  __project_url__ = 'https://agrum.org'
30591
31437
  __project_name__ = 'pyAgrum'
@@ -30632,8 +31478,12 @@ def about():
30632
31478
  # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR #
30633
31479
  # OTHER DEALINGS IN THE SOFTWARE. #
30634
31480
  # #
30635
- # See the GNU Lesser General Public License (LICENSE.LGPL) and the MIT #
30636
- # licence (LICENSE.MIT) for more details. #
31481
+ # See LICENCES for more details. #
31482
+ # #
31483
+ # SPDX-FileCopyrightText: Copyright 2005-2025 #
31484
+ # - Pierre-Henri WUILLEMIN(_at_LIP6) #
31485
+ # - Christophe GONZALES(_at_AMU) #
31486
+ # SPDX-License-Identifier: LGPL-3.0-or-later OR MIT #
30637
31487
  # #
30638
31488
  # Contact : info_at_agrum_dot_org #
30639
31489
  # homepage : http://agrum.gitlab.io #