compiled-knowledge 4.1.0a2__cp313-cp313-macosx_11_0_arm64.whl → 4.2.0a1__cp313-cp313-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of compiled-knowledge might be problematic. Click here for more details.

Files changed (36) hide show
  1. ck/circuit/_circuit_cy.c +1 -1
  2. ck/circuit/_circuit_cy.cpython-313-darwin.so +0 -0
  3. ck/circuit_compiler/cython_vm_compiler/_compiler.c +152 -152
  4. ck/circuit_compiler/cython_vm_compiler/_compiler.cpython-313-darwin.so +0 -0
  5. ck/circuit_compiler/llvm_compiler.py +4 -4
  6. ck/circuit_compiler/support/circuit_analyser/_circuit_analyser_cy.c +1 -1
  7. ck/circuit_compiler/support/circuit_analyser/_circuit_analyser_cy.cpython-313-darwin.so +0 -0
  8. ck/circuit_compiler/support/input_vars.py +4 -4
  9. ck/dataset/cross_table.py +143 -79
  10. ck/dataset/dataset.py +95 -7
  11. ck/dataset/dataset_builder.py +11 -4
  12. ck/dataset/dataset_from_crosstable.py +21 -2
  13. ck/learning/coalesce_cross_tables.py +403 -0
  14. ck/learning/model_from_cross_tables.py +296 -0
  15. ck/learning/parameters.py +117 -0
  16. ck/learning/train_generative_bn.py +198 -0
  17. ck/pgm.py +10 -8
  18. ck/pgm_circuit/marginals_program.py +5 -0
  19. ck/pgm_circuit/wmc_program.py +5 -0
  20. ck/pgm_compiler/support/circuit_table/_circuit_table_cy.c +1 -1
  21. ck/pgm_compiler/support/circuit_table/_circuit_table_cy.cpython-313-darwin.so +0 -0
  22. ck/probability/divergence.py +226 -0
  23. ck/probability/probability_space.py +43 -19
  24. ck/utils/map_dict.py +89 -0
  25. ck_demos/dataset/demo_dataset_from_sampler.py +18 -0
  26. ck_demos/learning/__init__.py +0 -0
  27. ck_demos/learning/demo_bayesian_network_from_cross_tables.py +70 -0
  28. ck_demos/learning/demo_simple_learning.py +55 -0
  29. ck_demos/sampling/demo_wmc_direct_sampler.py +2 -2
  30. {compiled_knowledge-4.1.0a2.dist-info → compiled_knowledge-4.2.0a1.dist-info}/METADATA +2 -1
  31. {compiled_knowledge-4.1.0a2.dist-info → compiled_knowledge-4.2.0a1.dist-info}/RECORD +35 -26
  32. ck/learning/train_generative.py +0 -149
  33. /ck/{dataset/cross_table_probabilities.py → probability/cross_table_probability_space.py} +0 -0
  34. {compiled_knowledge-4.1.0a2.dist-info → compiled_knowledge-4.2.0a1.dist-info}/WHEEL +0 -0
  35. {compiled_knowledge-4.1.0a2.dist-info → compiled_knowledge-4.2.0a1.dist-info}/licenses/LICENSE.txt +0 -0
  36. {compiled_knowledge-4.1.0a2.dist-info → compiled_knowledge-4.2.0a1.dist-info}/top_level.txt +0 -0
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import math
2
4
  from abc import ABC, abstractmethod
3
5
  from itertools import chain
@@ -203,16 +205,19 @@ class ProbabilitySpace(ABC):
203
205
  loop_rvs.append([rv[i] for i in sorted(states)])
204
206
  reduced_space = True
205
207
 
208
+ best_probability = float('-inf')
209
+ best_states = None
210
+
206
211
  # If the random variables we are looping over does not have any conditions
207
212
  # then it is expected to be faster by using computed marginal probabilities.
208
213
  if not reduced_space:
209
214
  prs = self.marginal_distribution(*rvs, condition=condition)
210
- best_probability = float('-inf')
211
- best_states = None
212
215
  for probability, inst in zip(prs, rv_instances(*rvs)):
213
216
  if probability > best_probability:
214
217
  best_probability = probability
215
218
  best_states = inst
219
+ if best_states is None:
220
+ return _NAN, ()
216
221
  return best_probability, best_states
217
222
 
218
223
  else:
@@ -220,8 +225,6 @@ class ProbabilitySpace(ABC):
220
225
  new_conditions = tuple(ind for ind in condition if ind.rv_idx not in rv_indexes)
221
226
 
222
227
  # Loop over the state space of the 'loop' rvs
223
- best_probability = float('-inf')
224
- best_states = None
225
228
  indicators: Tuple[Indicator, ...]
226
229
  for indicators in _combos(loop_rvs):
227
230
  probability = self.wmc(*(indicators + new_conditions))
@@ -229,6 +232,8 @@ class ProbabilitySpace(ABC):
229
232
  best_probability = probability
230
233
  best_states = tuple(ind.state_idx for ind in indicators)
231
234
  condition_probability = self.wmc(*condition)
235
+ if best_states is None:
236
+ return _NAN, ()
232
237
  return best_probability / condition_probability, best_states
233
238
 
234
239
  def correlation(self, indicator1: Indicator, indicator2: Indicator, condition: Condition = ()) -> float:
@@ -245,6 +250,20 @@ class ProbabilitySpace(ABC):
245
250
  """
246
251
  condition = check_condition(condition)
247
252
 
253
+ if indicator1.rv_idx == indicator2.rv_idx:
254
+ # Special case - same random variable
255
+ condition_groups: MapSet[int, Indicator] = _group_indicators(condition)
256
+ rv_idx: int = indicator1.rv_idx
257
+ if indicator1 not in condition_groups.get(rv_idx, (indicator1,)):
258
+ return _NAN
259
+ if indicator1 == indicator2:
260
+ return 1
261
+ else:
262
+ if indicator2 not in condition_groups.get(rv_idx, (indicator2,)):
263
+ return _NAN
264
+ else:
265
+ return 0
266
+
248
267
  p1 = self.probability(indicator1, condition=condition)
249
268
  p2 = self.probability(indicator2, condition=condition)
250
269
  p12 = self._joint_probability(indicator1, indicator2, condition=condition)
@@ -267,12 +286,7 @@ class ProbabilitySpace(ABC):
267
286
  entropy of the given random variable.
268
287
  """
269
288
  condition = check_condition(condition)
270
- e = 0.0
271
- for ind in rv:
272
- p = self.probability(ind, condition=condition)
273
- if p > 0.0:
274
- e -= p * math.log2(p)
275
- return e
289
+ return -sum(plogp(self.probability(ind, condition=condition)) for ind in rv)
276
290
 
277
291
  def conditional_entropy(self, rv1: RandomVariable, rv2: RandomVariable, condition: Condition = ()) -> float:
278
292
  """
@@ -309,13 +323,11 @@ class ProbabilitySpace(ABC):
309
323
  joint entropy of the given random variables.
310
324
  """
311
325
  condition = check_condition(condition)
312
- e = 0.0
313
- for ind1 in rv1:
314
- for ind2 in rv2:
315
- p = self._joint_probability(ind1, ind2, condition=condition)
316
- if p > 0.0:
317
- e -= p * math.log2(p)
318
- return e
326
+ return -sum(
327
+ plogp(self._joint_probability(ind1, ind2, condition=condition))
328
+ for ind1 in rv1
329
+ for ind2 in rv2
330
+ )
319
331
 
320
332
  def mutual_information(self, rv1: RandomVariable, rv2: RandomVariable, condition: Condition = ()) -> float:
321
333
  """
@@ -419,8 +431,12 @@ class ProbabilitySpace(ABC):
419
431
  denominator = self.joint_entropy(rv1, rv2, condition=condition)
420
432
  return self._normalised_mutual_information(rv1, rv2, denominator, condition=condition)
421
433
 
422
- def covariant_normalised_mutual_information(self, rv1: RandomVariable, rv2: RandomVariable,
423
- condition: Condition = ()) -> float:
434
+ def covariant_normalised_mutual_information(
435
+ self,
436
+ rv1: RandomVariable,
437
+ rv2: RandomVariable,
438
+ condition: Condition = (),
439
+ ) -> float:
424
440
  """
425
441
  Calculate the covariant normalised mutual information
426
442
  = I(rv1; rv2) / sqrt(H(rv1) * H(rv2)).
@@ -549,6 +565,14 @@ class ProbabilitySpace(ABC):
549
565
  return wmc
550
566
 
551
567
 
568
+ def plogp(p: float) -> float:
569
+ """
570
+ Returns:
571
+ p * log2(p)
572
+ """
573
+ return p * math.log2(p) if p > 0 else 0
574
+
575
+
552
576
  def check_condition(condition: Condition) -> Tuple[Indicator, ...]:
553
577
  """
554
578
  Make the best effort to interpret the given condition.
ck/utils/map_dict.py ADDED
@@ -0,0 +1,89 @@
1
+ """
2
+ This module defines a class "MapDict" for mapping keys to dicts.
3
+ """
4
+
5
+ from typing import TypeVar, Generic, Dict, MutableMapping, Iterator, KeysView, ValuesView, ItemsView
6
+
7
+ _K = TypeVar('_K')
8
+ _KV = TypeVar('_KV')
9
+ _V = TypeVar('_V')
10
+
11
+
12
+ class MapDict(Generic[_K, _KV, _V], MutableMapping[_K, Dict[_KV, _V]]):
13
+ """
14
+ A MapDict keeps a dict for each key.
15
+ """
16
+ __slots__ = ('_map',)
17
+
18
+ def __init__(self):
19
+ self._map: Dict[_K, Dict[_KV, _V]] = {}
20
+
21
+ def __str__(self) -> str:
22
+ return str(self._map)
23
+
24
+ def __repr__(self) -> str:
25
+ args = ', '.join(f'{key!r}:{key!r}' for key, val in self.items())
26
+ class_name = self.__class__.__name__
27
+ return f'{class_name}({args})'
28
+
29
+ def __len__(self) -> int:
30
+ return len(self._map)
31
+
32
+ def __bool__(self) -> bool:
33
+ return len(self) > 0
34
+
35
+ def __getitem__(self, key: _K) -> Dict[_KV, _V]:
36
+ return self._map[key]
37
+
38
+ def __setitem__(self, key: _K, val: Dict[_KV, _V]):
39
+ if not isinstance(val, dict):
40
+ class_name = self.__class__.__name__
41
+ raise RuntimeError(f'every {class_name} value must be a dict')
42
+ self._map[key] = val
43
+
44
+ def __delitem__(self, key: _K):
45
+ del self._map[key]
46
+
47
+ def __iter__(self) -> Iterator[_K]:
48
+ return iter(self._map)
49
+
50
+ def __contains__(self, key: _K) -> bool:
51
+ return key in self._map
52
+
53
+ def keys(self) -> KeysView[_K]:
54
+ return self._map.keys()
55
+
56
+ def values(self) -> ValuesView[Dict[_KV, _V]]:
57
+ return self._map.values()
58
+
59
+ def items(self) -> ItemsView[_K, Dict[_KV, _V]]:
60
+ return self._map.items()
61
+
62
+ def get(self, key: _K, default=None):
63
+ """
64
+ Get the list corresponding to the given key.
65
+ If the key is not yet in the MapList then the
66
+ supplied default will be returned.
67
+ """
68
+ return self._map.get(key, default)
69
+
70
+ def get_dict(self, key: _K) -> Dict[_KV, _V]:
71
+ """
72
+ Get the dict corresponding to the given key.
73
+
74
+ This method will always return a dict in the MapDict, even if
75
+ it requires a new dict being created.
76
+
77
+ Modifying the returned dict affects this MapDict object.
78
+ """
79
+ the_dict = self._map.get(key)
80
+ if the_dict is None:
81
+ the_dict = {}
82
+ self._map[key] = the_dict
83
+ return the_dict
84
+
85
+ def clear(self):
86
+ """
87
+ Remove all items.
88
+ """
89
+ return self._map.clear()
@@ -0,0 +1,18 @@
1
+ from ck import example
2
+ from ck.dataset.sampled_dataset import dataset_from_sampler
3
+ from ck.pgm import PGM
4
+ from ck.pgm_circuit.wmc_program import WMCProgram
5
+ from ck.pgm_compiler import DEFAULT_PGM_COMPILER
6
+ from ck.sampling.sampler import Sampler
7
+
8
+
9
+ def main() -> None:
10
+ pgm: PGM = example.Student()
11
+ sampler: Sampler = WMCProgram(DEFAULT_PGM_COMPILER(pgm)).sample_direct()
12
+ dataset = dataset_from_sampler(sampler, 10)
13
+
14
+ dataset.dump()
15
+
16
+
17
+ if __name__ == '__main__':
18
+ main()
File without changes
@@ -0,0 +1,70 @@
1
+ from typing import List, Set
2
+
3
+ from ck import example
4
+ from ck.dataset import HardDataset
5
+ from ck.dataset.cross_table import CrossTable, cross_table_from_hard_dataset
6
+ from ck.dataset.sampled_dataset import dataset_from_sampler
7
+ from ck.learning.model_from_cross_tables import model_from_cross_tables
8
+ from ck.pgm import PGM, RandomVariable
9
+ from ck.pgm_circuit.wmc_program import WMCProgram
10
+ from ck.pgm_compiler import DEFAULT_PGM_COMPILER
11
+ from ck.probability import divergence
12
+
13
+ EXCLUDE_UNNECESSARY_CROSS_TABLES = True
14
+
15
+
16
+ def main() -> None:
17
+ # Create a dataset based on model which is an example PGM
18
+ number_of_samples: int = 10000 # How many instances to make for the model dataset
19
+ model: PGM = example.Student()
20
+ model_dataset: HardDataset = dataset_from_sampler(
21
+ WMCProgram(DEFAULT_PGM_COMPILER(model)).sample_direct(),
22
+ number_of_samples,
23
+ )
24
+
25
+ # Clone the model, without factors, and transport the dataset to the new PGM
26
+ pgm = PGM()
27
+ dataset = HardDataset(weights=model_dataset.weights)
28
+ for model_rv in model.rvs:
29
+ rv = pgm.new_rv(model_rv.name, model_rv.states)
30
+ dataset.add_rv_from_state_idxs(rv, model_dataset.state_idxs(model_rv))
31
+
32
+ # What model rvs have a child
33
+ model_rvs_with_children: Set[RandomVariable] = set()
34
+ for model_factor in model.factors:
35
+ for parent_rv in model_factor.rvs[1:]:
36
+ model_rvs_with_children.add(parent_rv)
37
+
38
+ # Construct cross-tables from the dataset
39
+ cross_tables: List[CrossTable] = []
40
+ for model_factor in model.factors:
41
+ if (
42
+ EXCLUDE_UNNECESSARY_CROSS_TABLES
43
+ and len(model_factor.rvs) == 1
44
+ and model_factor.rvs[0] in model_rvs_with_children
45
+ ):
46
+ # The factor relates to a single random variable (has
47
+ # no parents) but it does have children.
48
+ # No need to include a cross-table as it is inferable from
49
+ # cross-tables of its children.
50
+ continue
51
+
52
+ rvs = tuple(pgm.rvs[model_rv.idx] for model_rv in model_factor.rvs)
53
+ cross_tables.append(cross_table_from_hard_dataset(dataset, rvs))
54
+ print('cross-table:', *rvs)
55
+
56
+ # Train the PGM
57
+ model_from_cross_tables(pgm, cross_tables)
58
+
59
+ # Show results
60
+ print()
61
+ pgm.dump(show_function_values=True)
62
+ print()
63
+ model_space = WMCProgram(DEFAULT_PGM_COMPILER(model))
64
+ pgm_space = WMCProgram(DEFAULT_PGM_COMPILER(pgm))
65
+ print('HI', divergence.hi(model_space, pgm_space))
66
+ print('KL', divergence.kl(model_space, pgm_space))
67
+
68
+
69
+ if __name__ == '__main__':
70
+ main()
@@ -0,0 +1,55 @@
1
+ from ck.dataset.dataset_from_csv import hard_dataset_from_csv
2
+ from ck.learning.train_generative_bn import train_generative_bn
3
+ from ck.pgm import PGM
4
+
5
+
6
+ def main() -> None:
7
+ pgm = PGM('Student')
8
+
9
+ difficult = pgm.new_rv('difficult', ['y', 'n'])
10
+ intelligent = pgm.new_rv('intelligent', ['y', 'n'])
11
+ grade = pgm.new_rv('grade', ['low', 'medium', 'high'])
12
+ award = pgm.new_rv('award', ['y', 'n'])
13
+ letter = pgm.new_rv('letter', ['y', 'n'])
14
+
15
+ pgm.new_factor(difficult)
16
+ pgm.new_factor(intelligent)
17
+ pgm.new_factor(grade, intelligent, difficult)
18
+ pgm.new_factor(award, intelligent)
19
+ pgm.new_factor(letter, grade)
20
+
21
+ rvs = (difficult, intelligent, grade, award, letter)
22
+ csv = """
23
+ 0,1,2,0,1
24
+ 1,1,2,0,1
25
+ 1,1,2,0,1
26
+ 0,0,2,0,0
27
+ 0,1,1,1,0
28
+ 1,1,1,1,1
29
+ 1,1,0,0,0
30
+ 1,1,0,0,1
31
+ 1,0,0,0,0
32
+ """
33
+
34
+ dataset = hard_dataset_from_csv(rvs, csv.splitlines())
35
+
36
+ # Learn parameters values for `pgm` using the training data `dataset`.
37
+ # This updates the PGMs potential functions.
38
+ train_generative_bn(pgm, dataset)
39
+
40
+ show_pgm_factors(pgm)
41
+
42
+ print('Done.')
43
+
44
+
45
+ def show_pgm_factors(pgm: PGM) -> None:
46
+ for factor in pgm.factors:
47
+ potential_function = factor.function
48
+ print(f'Factor: {factor} {type(potential_function)}')
49
+ for instance, _, param_value in potential_function.keys_with_param:
50
+ print(f'Factor{instance} = {param_value}')
51
+ print()
52
+
53
+
54
+ if __name__ == '__main__':
55
+ main()
@@ -2,7 +2,7 @@ import random
2
2
 
3
3
  from ck import example
4
4
  from ck.pgm import PGM
5
- from ck.pgm_compiler import factor_elimination
5
+ from ck.pgm_compiler import DEFAULT_PGM_COMPILER
6
6
  from ck.pgm_circuit import PGMCircuit
7
7
  from ck.pgm_circuit.wmc_program import WMCProgram
8
8
  from ck.probability.empirical_probability_space import EmpiricalProbabilitySpace
@@ -18,7 +18,7 @@ def main():
18
18
 
19
19
  pgm: PGM = example.Rain()
20
20
 
21
- pgm_cct: PGMCircuit = factor_elimination.compile_pgm(pgm)
21
+ pgm_cct: PGMCircuit = DEFAULT_PGM_COMPILER(pgm)
22
22
  wmc = WMCProgram(pgm_cct)
23
23
  sampler = wmc.sample_direct()
24
24
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: compiled-knowledge
3
- Version: 4.1.0a2
3
+ Version: 4.2.0a1
4
4
  Summary: A Python package for compiling and querying discrete probabilistic graphical models.
5
5
  Author-email: Barry Drake <barry@compiledknowledge.org>
6
6
  License-Expression: MIT
@@ -13,6 +13,7 @@ Description-Content-Type: text/markdown
13
13
  License-File: LICENSE.txt
14
14
  Requires-Dist: llvmlite
15
15
  Requires-Dist: numpy
16
+ Requires-Dist: scipy
16
17
  Dynamic: license-file
17
18
 
18
19
  Compiled Knowledge
@@ -1,13 +1,17 @@
1
1
  ck_demos/all_demos.py,sha256=tqnMFbW6t1F4ksErf6QYTz9XtvbfayWl35lD3Bjm47E,2468
2
2
  ck_demos/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ ck_demos/dataset/demo_dataset_from_sampler.py,sha256=N2UDctHWePuUfJNWDnsd-UOSqeRfio6YQI21ZvyYhts,485
3
4
  ck_demos/dataset/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
5
  ck_demos/dataset/demo_dataset_builder.py,sha256=a9o-rw8PzpLq_5wtwjH0L15-eacbELlc7tfLrREJBqM,987
6
+ ck_demos/learning/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
+ ck_demos/learning/demo_simple_learning.py,sha256=CZPzcsnTD8TK7nzGg3XUsx4exqggQXOT6UVwrV0ScF8,1483
8
+ ck_demos/learning/demo_bayesian_network_from_cross_tables.py,sha256=FmW5ylFVu7ONkKQVDCUGXXcOuNKdz3f1qbktOt2cX6Q,2591
5
9
  ck_demos/circuit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
10
  ck_demos/circuit/demo_derivatives.py,sha256=6VwnW_Dbm2MWQFfJ46UQQFecV56QdfGpL7srthw5Py0,1143
7
11
  ck_demos/circuit/demo_circuit_dump.py,sha256=85x7UJV6cg6XVYU-PPsuKQVTBw5WZBfkhi6Avo9XbOs,436
8
12
  ck_demos/sampling/demo_uniform_sampler.py,sha256=zY5Kz97r43b1YvFz_4xNAeXvSpd7Kc2l0geZhWrz2no,924
9
13
  ck_demos/sampling/check_sampler.py,sha256=9Xy7oS3KnlNzcbdIU3bLnWlQ1SNo6S9hEp3TWoSM6C8,2035
10
- ck_demos/sampling/demo_wmc_direct_sampler.py,sha256=USz7vynHOEYUQgu5dJY-dG_Z_zNEDAfoYJ3VtX6uFmk,1073
14
+ ck_demos/sampling/demo_wmc_direct_sampler.py,sha256=zLwygZ-LNZ_L47XM5czdhCDkj8m8dcq7eZyie-dtmiM,1065
11
15
  ck_demos/sampling/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
16
  ck_demos/sampling/demo_marginal_direct_sampler.py,sha256=nv4smqYl1VhpB6pkF4L_aqnpVgVMcv3FrSvUkJ0EJz0,1109
13
17
  ck_demos/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -54,26 +58,28 @@ ck_demos/pgm_inference/demo_inferencing_mpe_cancer.py,sha256=hS9U2kyqjFgJ8jnVBtT
54
58
  ck_demos/pgm_inference/demo_inferencing_wmc_and_mpe_sprinkler.py,sha256=-q4Z1Fzf7-BuwVFTFXdGRY-zUNrY-SAU7ooaov2o_lM,5128
55
59
  ck_demos/getting_started/simple_demo.py,sha256=hiYscNnfkEwHCQ3ymXAswAYO5jAKR7cseb36pjzuus8,650
56
60
  ck_demos/getting_started/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
57
- compiled_knowledge-4.1.0a2.dist-info/RECORD,,
58
- compiled_knowledge-4.1.0a2.dist-info/WHEEL,sha256=oqGJCpG61FZJmvyZ3C_0aCv-2mdfcY9e3fXvyUNmWfM,136
59
- compiled_knowledge-4.1.0a2.dist-info/top_level.txt,sha256=Cf8DAfd2vcnLiA7HlxoduOzV0Q-8surE3kzX8P9qdks,12
60
- compiled_knowledge-4.1.0a2.dist-info/METADATA,sha256=j7AzqZmwYrOMFnvPL1kLGolIJn5YUVP-lehG4IPBZGA,1787
61
- compiled_knowledge-4.1.0a2.dist-info/licenses/LICENSE.txt,sha256=-LmkmqXKYojmS3zDxXAeTbsA82fnHA0KaRvpfIoEdjA,1068
62
- ck/pgm.py,sha256=PsB2DboRtuiOrnbYGbYNOB-R2k94iET2o02UalKFy3I,117611
61
+ compiled_knowledge-4.2.0a1.dist-info/RECORD,,
62
+ compiled_knowledge-4.2.0a1.dist-info/WHEEL,sha256=oqGJCpG61FZJmvyZ3C_0aCv-2mdfcY9e3fXvyUNmWfM,136
63
+ compiled_knowledge-4.2.0a1.dist-info/top_level.txt,sha256=Cf8DAfd2vcnLiA7HlxoduOzV0Q-8surE3kzX8P9qdks,12
64
+ compiled_knowledge-4.2.0a1.dist-info/METADATA,sha256=3YM6rMFGtHgoZ7Uj1QPmIUiSN0oznniDl6p7JEl0_Rk,1808
65
+ compiled_knowledge-4.2.0a1.dist-info/licenses/LICENSE.txt,sha256=-LmkmqXKYojmS3zDxXAeTbsA82fnHA0KaRvpfIoEdjA,1068
66
+ ck/pgm.py,sha256=EwKTWuYV9-0OfgJQfBw59MfGDLtxFe3wlgbYlkTqj1Y,117703
63
67
  ck/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
64
68
  ck/pgm_circuit/target_marginals_program.py,sha256=qWz9FkAFzt8YHLZJzPkpRnvDH76BXm-dcEWhoqCkrOw,3665
65
69
  ck/pgm_circuit/slot_map.py,sha256=pqN0t5ElmUjR7SzvzldQwnO-jjRIz1rNZHH1PzE-V88,822
66
70
  ck/pgm_circuit/mpe_program.py,sha256=uDOykbBIbvvDQtxXOgBj6gzoehq1AfaQzZIWW3rMZnY,9990
67
71
  ck/pgm_circuit/program_with_slotmap.py,sha256=31Rgk4WoY7KW09L3TGySf1teYnf-ItvICTYEC17zB1w,7808
68
72
  ck/pgm_circuit/__init__.py,sha256=FctIFEYdL1pwxFMMEEu5Rwgq3kjPar-vJTqAmgIqb-I,36
69
- ck/pgm_circuit/marginals_program.py,sha256=E-L-4Rc2YLs3ndXIfXpTxUYGEFJG1_BkaZVDBs9gcgQ,14434
70
- ck/pgm_circuit/wmc_program.py,sha256=Btq7jUot-PodWXrgDFaE6zhUtr6GPUNF217CVLTaB70,12376
73
+ ck/pgm_circuit/marginals_program.py,sha256=SOc31sxk_hNL0QgNQAbdYjVYRf0aOwsiHTh6CSyVsiM,14782
74
+ ck/pgm_circuit/wmc_program.py,sha256=v7DLS2oq34uW5v99fvtadk8CbRSu7gipLA--DxtGSYo,12724
71
75
  ck/pgm_circuit/pgm_circuit.py,sha256=XBXANPREwp5Cl8P0x5XuG9besOJV5DjVxtNkqyt2DK8,3287
72
76
  ck/pgm_circuit/support/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
73
77
  ck/pgm_circuit/support/compile_circuit.py,sha256=XJFzi-BdFNTsdozRv0EHBM8cJ0SUZpbQwuTWONUzGck,3125
74
- ck/probability/probability_space.py,sha256=TTNSe6z40hs94kLBR_YHNjjRvBGVI86tza-CU2FKd9M,25482
78
+ ck/probability/probability_space.py,sha256=fn_z3KWcRyBMF9XqoIE89Kij8-jpcmIjytGdnoNg2os,26125
79
+ ck/probability/cross_table_probability_space.py,sha256=exaAVxzpQkqTmGIQx6ui64p6QTcy66IRYi5eWz6DFiE,1944
75
80
  ck/probability/pgm_probability_space.py,sha256=9al9sZk2LGvnTITvxS8x_ntabHKhaliUW-6JUeAEEl4,1231
76
81
  ck/probability/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
82
+ ck/probability/divergence.py,sha256=l9mhHmCJQWNtY6Xf67ZCBeW1nry0B7-Jec6Tb99DP08,8258
77
83
  ck/probability/empirical_probability_space.py,sha256=Lp7_N_uNYq-W_S5caUC5ub9sTqaL-Vn4hudF0WYXPdU,2088
78
84
  ck/example/survey.py,sha256=ubjM8EP7aQMQbx7XFMaXvSYBOPuUDHeyG6wZIlRDqD8,1565
79
85
  ck/example/pathfinder.py,sha256=rQckvasnbzBYYESxngE_xbhyXxoJlELeiYc6Ghh7iFk,2257125
@@ -107,20 +113,22 @@ ck/example/diamond_square.py,sha256=ic8adEomQHMFlGQ3gMYGEja0LxEla8KEQKhET0XpULs,
107
113
  ck/example/rain.py,sha256=kLTU_9f_-_yy0ymPnS9-cbFVT6fYyCanDgszk3vQOgc,1187
108
114
  ck/example/cancer.py,sha256=-FnLbfb9yxriLl97N5BDZ0VrDZ5UnOWlT-Ep_tzO6QI,1698
109
115
  ck/dataset/dataset_compute.py,sha256=Bdxjl4c_0OttHgVWx-C3WdOI-imgupUQnnQVzNesPCw,5705
110
- ck/dataset/cross_table.py,sha256=RvrSMv8WX5eadpk7ImEZB1QouRVSe-1uy_5o1W0j6R4,9475
116
+ ck/dataset/cross_table.py,sha256=-uBlzapzZ5SKB3Y2OdUs51syZZp4x9805NM3yfLJfk8,13014
111
117
  ck/dataset/__init__.py,sha256=QXCZWPHusMfXtl9bLPrIJP89ZnqWMz9KfdxScVrB3UQ,55
112
- ck/dataset/dataset_builder.py,sha256=2P4fktgGyZWRSCbITgJeDdW5BZ2tUEMSfxKdiX8sivo,18400
113
- ck/dataset/dataset.py,sha256=tkCIZpeHUNYBCY5xQugMVstfma_hYEol-rGO7TQ0StI,22152
114
- ck/dataset/dataset_from_crosstable.py,sha256=f-H9Q9G5HF6RRT1ltReuqg69HhnDcrKn8vJrAviyMkA,1278
118
+ ck/dataset/dataset_builder.py,sha256=ewsz6znW_GtBvwsw6k9uXHT8yh_u6zQI5PFBZ_ykXlM,18873
119
+ ck/dataset/dataset.py,sha256=iQGOqVrNll6QMPcRcV2phUbe0fCfpVmUVbcBIaqYx0s,25531
120
+ ck/dataset/dataset_from_crosstable.py,sha256=rOdDFfb_2rnUJT3iZrLbZkeQcRJa5EzFVBs0hSdE57U,2281
115
121
  ck/dataset/sampled_dataset.py,sha256=Vcz2WN6IKdmK8DsFeXLten7Id3Kc2epC6qs2ZW7mvWU,3261
116
122
  ck/dataset/dataset_from_csv.py,sha256=q4qjOsJFYAmw22xDaHcS6XC3nwqmkT-RoOaRNr_zJ8I,5802
117
- ck/dataset/cross_table_probabilities.py,sha256=exaAVxzpQkqTmGIQx6ui64p6QTcy66IRYi5eWz6DFiE,1944
118
- ck/learning/train_generative.py,sha256=_mXWcslgW1Tqfv1o0HhHDnU4CI7_KOUMdpxypQD3tQs,5551
123
+ ck/learning/model_from_cross_tables.py,sha256=227hBGF0hAmcObdI3wG1RUIPE-Y92wYp2gOcSAeUp44,10750
119
124
  ck/learning/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
120
- ck/circuit/_circuit_cy.c,sha256=fZ-5hiE1tToCL6vMtEttKoyq4We98Z8KTkea8mXsZPk,1704292
125
+ ck/learning/parameters.py,sha256=x5yP-zxkpm0HfBusOxK5vImUnYanUJeZUjxgOwKNVAc,4388
126
+ ck/learning/coalesce_cross_tables.py,sha256=pPBH4GcNHtmLfEpstyq3zFYyarYxyAduEEXpqaimLAM,13297
127
+ ck/learning/train_generative_bn.py,sha256=hwmhbg4RKh3JvDlG7xOJm1apScXJ1Mmfgu4nasM-cwQ,8019
128
+ ck/circuit/_circuit_cy.c,sha256=gYwPt5JgPL9uq2kjpqyN_IelnfxdG4dEmm2SqNQCbig,1704292
121
129
  ck/circuit/_circuit_cy.pyx,sha256=mER1HK5yyf4UAj9ibn7fUQNyXwoxwxp7PClULPhY9B4,26995
122
130
  ck/circuit/__init__.py,sha256=B1jwDE_Xb6hOQE8DecjaTVotOnDxJaT7jsvPfGDXqCU,401
123
- ck/circuit/_circuit_cy.cpython-313-darwin.so,sha256=toCgEMG85BfSrWlqyApuTsB22Srh5O7Y5sHkeZG73qs,334944
131
+ ck/circuit/_circuit_cy.cpython-313-darwin.so,sha256=acQxIR20sI46qIdPeeJBHy5t-9u3csHOE6DpWwKGPjk,334944
124
132
  ck/circuit/_circuit_cy.pxd,sha256=ZcW8xjw4oGQqD5gwz73GXc1H8NxpdAswFWzc2CUWWcA,1025
125
133
  ck/circuit/_circuit_py.py,sha256=hADjCFDC1LJKUdyiKZzNLFt7ZkUNJ0IYwEYRj594K4g,27495
126
134
  ck/circuit/tmp_const.py,sha256=q01bkIvTEg1l-qFcfl2B8NrSzKlqcWU7McNm4HKv7bU,2300
@@ -134,6 +142,7 @@ ck/sampling/uniform_sampler.py,sha256=XV7i0urWgsJ0nIQA6ONlO8GevsfRdw1dfZuqzRdbnB
134
142
  ck/sampling/sampler.py,sha256=LtMm9_kBlZeuIEdYr_DcO218f7OUSnciddiEaEE22Dc,2244
135
143
  ck/sampling/wmc_gibbs_sampler.py,sha256=t5pIxr3Kkz37hG0guxVUQWivUZ1T-4lT47yu8qxrZko,6414
136
144
  ck/sampling/forward_sampler.py,sha256=gHWEue69Z7EcrnHlVURJBhZh1t5RApMV0ioxJao3GkU,8137
145
+ ck/utils/map_dict.py,sha256=TSSh4CL1wZC9JBlwGWRGmFQ3hcpZwmZ0kpjI1gPZ-pQ,2458
137
146
  ck/utils/random_extras.py,sha256=l9CfQM6k-b6KGESJXw9zF--Hqp4yadw2IU9uSoklai0,1796
138
147
  ck/utils/map_set.py,sha256=T5E3j4Lz08vg8eviRBc-4F10xz1-CKIg6KiHVoGhdts,3681
139
148
  ck/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -154,17 +163,17 @@ ck/pgm_compiler/support/clusters.py,sha256=r1Z8b4IvXMfY5xeyg5AHoU3TxUI0yNDvh3Xkv
154
163
  ck/pgm_compiler/support/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
155
164
  ck/pgm_compiler/support/named_compiler_maker.py,sha256=Qz8a9gwY46Q3dtRCZEZ2czq5z52QroGVKN5UDcoXI3c,1377
156
165
  ck/pgm_compiler/support/circuit_table/__init__.py,sha256=1kWjAZR5Rj6PYNdbCEbuyE2VtIDQU4Qf-3HPFzBlezs,562
157
- ck/pgm_compiler/support/circuit_table/_circuit_table_cy.cpython-313-darwin.so,sha256=bKNXw2gSzKKo7z8QOq9TWSV7G5_a3HXhqeVchVTnSb4,164776
166
+ ck/pgm_compiler/support/circuit_table/_circuit_table_cy.cpython-313-darwin.so,sha256=4MAB4_3bIoWfKza5pbIxT9f3p_ARxrDeiJ_F8ggoQQk,164776
158
167
  ck/pgm_compiler/support/circuit_table/_circuit_table_cy.pyx,sha256=Fsjw8P9clKQioqlLyr1JirUK5oYkeotpDMy5sMo7Khk,11683
159
168
  ck/pgm_compiler/support/circuit_table/_circuit_table_py.py,sha256=OZJC-JGX3ovCSv7nJtNYq7735KZ2eb4TQOlZdZbhPmk,10983
160
- ck/pgm_compiler/support/circuit_table/_circuit_table_cy.c,sha256=nXMBaV_n1YmV0vtTLC9MhaSa_VPwLvFoIZGcZ35HmCM,714044
169
+ ck/pgm_compiler/support/circuit_table/_circuit_table_cy.c,sha256=jl_Vj-dyyNjLvg8Bwqjg8qjLVtmT5QIsdQ4GfMnRo5M,714044
161
170
  ck/pgm_compiler/ace/ace.py,sha256=An83dHxE_gQFcEs6H5qgm0PlNFnJSGGuvLJNC2H3hGU,10098
162
171
  ck/pgm_compiler/ace/__init__.py,sha256=5HWep-yL1Mr6z5VWEaIYpLumCdeso85J-l_-hQaVusM,96
163
172
  ck/program/raw_program.py,sha256=U7kLBCSLtP1CfG09RrzmGo7E3sZdNr7wr2V1qkTfVGc,4106
164
173
  ck/program/program_buffer.py,sha256=IHwAHTKIaUlhcbNFTuSxPWKyExIsOxxX6ffUn4KfheU,5485
165
174
  ck/program/__init__.py,sha256=Rifdxk-l6cCjXLpwc6Q0pVXNDsllAwaFlRqRx3cURho,107
166
175
  ck/program/program.py,sha256=ohsnE0CEy8O4q8uGB_YEjoJKAPhY1Mz_a08Z7fy7TLw,4047
167
- ck/circuit_compiler/llvm_compiler.py,sha256=SFhfrthrDuAYUjH_DYRD7FBU8eg2db5T4QGBGfoewnw,13635
176
+ ck/circuit_compiler/llvm_compiler.py,sha256=XaAPrMaR5Y0EQT7Zukpa5TFybdBXVLo8_A2cU2lzPtw,13656
168
177
  ck/circuit_compiler/circuit_compiler.py,sha256=Sl7FS42GXrDL6eG_WNKILcSQl7Wlccgs5Dd1l0EZMsU,1121
169
178
  ck/circuit_compiler/__init__.py,sha256=eRN6chBEt64PK5e6EFGyBNZBn6BXhXb6R3m12zPA1Qg,130
170
179
  ck/circuit_compiler/named_circuit_compilers.py,sha256=paKyG876tdG_bdSHJU6KW5HxQrutmV_T80GPpz8A65s,2227
@@ -173,15 +182,15 @@ ck/circuit_compiler/llvm_vm_compiler.py,sha256=rM_6F5st3k9X5K1_MwzKJwDhQo1794voo
173
182
  ck/circuit_compiler/cython_vm_compiler/cython_vm_compiler.py,sha256=GdtBkipud8vylXYArOJvZ-10U9L_PL0oJrkyrnFGH2Q,4345
174
183
  ck/circuit_compiler/cython_vm_compiler/__init__.py,sha256=ks0sISOJ-XHIHgHnESyFsheNWvcSJQkbsrj1wVlnzTE,48
175
184
  ck/circuit_compiler/cython_vm_compiler/_compiler.pyx,sha256=RssdkoAcB3Ahes8xisqFy0PQyOPmC3GLEC2xR-miQaE,12898
176
- ck/circuit_compiler/cython_vm_compiler/_compiler.c,sha256=k9AgBZE11aK2--Rn2f5s3ZLYygvfsYyLYGLMpdpaYG8,857789
177
- ck/circuit_compiler/cython_vm_compiler/_compiler.cpython-313-darwin.so,sha256=Ch0RR-iuWFe-dr_F4oxPvxlosgcgeEYO52CRHjviJww,163296
185
+ ck/circuit_compiler/cython_vm_compiler/_compiler.c,sha256=p-g6Nb_j5zrgZ46hTwAPjsvhgqNlIvKKWOS4q7KAIX4,857789
186
+ ck/circuit_compiler/cython_vm_compiler/_compiler.cpython-313-darwin.so,sha256=rjPoZhJ8nuxkI3Jiwl0zPppCHL13ep8xWyA0bDjshT4,163296
178
187
  ck/circuit_compiler/support/llvm_ir_function.py,sha256=sMLKfwz90YcsrVyxsuY0Ymo1ibFOcul4Qiwdake-VkI,8321
179
- ck/circuit_compiler/support/input_vars.py,sha256=EZrvyhD9XVtf5GuDBluFNWhAOVixP7-_ETxAHLTpBcs,4664
188
+ ck/circuit_compiler/support/input_vars.py,sha256=0g1I5GezT6Dt6ptJJgNFPTyHfRrpunTIkJOUqZhkP84,4673
180
189
  ck/circuit_compiler/support/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
181
190
  ck/circuit_compiler/support/circuit_analyser/_circuit_analyser_cy.pyx,sha256=a0fKmkwRNscJmy6qoO2AOqJYmHYptrQmkRSrDg3G-wg,3233
182
- ck/circuit_compiler/support/circuit_analyser/_circuit_analyser_cy.cpython-313-darwin.so,sha256=ei_jpYWXo73SSab_L4UEjASnzyAaoPxmEWIxrpoR2sU,104760
191
+ ck/circuit_compiler/support/circuit_analyser/_circuit_analyser_cy.cpython-313-darwin.so,sha256=ZuFi002oQbhtMgVk3UG9zw0Q0hXLZvGniH-A3V1F-hw,104760
183
192
  ck/circuit_compiler/support/circuit_analyser/__init__.py,sha256=WhNwfg7GHVeI4k_m7owPGWxX0MyZg_wtcp2MA07qbWg,523
184
- ck/circuit_compiler/support/circuit_analyser/_circuit_analyser_cy.c,sha256=8NyLKC9-sSg-g8WtOVwZHpkjvdhe6ZOnRVQ7CJwjWqw,438223
193
+ ck/circuit_compiler/support/circuit_analyser/_circuit_analyser_cy.c,sha256=OiC5DlYl76XoiEzX71ssoRLT-a3aEJsyQ82WyDEakVQ,438223
185
194
  ck/circuit_compiler/support/circuit_analyser/_circuit_analyser_py.py,sha256=CMdXV6Rot5CCoK1UsurQdGK0UOx_09B6V7mCc_6-gfI,2993
186
195
  ck/in_out/render_net.py,sha256=VePvN6aYWuzEkW-Hv-qGT9QneOvsnrBMmS_KYueuj2I,4970
187
196
  ck/in_out/render_bugs.py,sha256=c39KbaD4gEiauFsZq2KUhDEEa-3cuY5kuvz97pEWVpw,3272