ripple-down-rules 0.4.88__py3-none-any.whl → 0.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -258,12 +258,12 @@ class TemplateFileCreator:
258
258
  func_name = f"{prompt_for.value.lower()}_for_"
259
259
  case_name = case_query.name.replace(".", "_")
260
260
  if case_query.is_function:
261
- # convert any CamelCase word into snake_case by adding _ before each capital letter
262
- case_name = case_name.replace(f"_{case_query.attribute_name}", "")
263
- func_name += case_name
264
- attribute_types = TemplateFileCreator.get_core_attribute_types(case_query)
265
- attribute_type_names = [t.__name__ for t in attribute_types]
266
- func_name += f"_of_type_{'_or_'.join(attribute_type_names)}"
261
+ func_name += case_name.replace(f"_{case_query.attribute_name}", "")
262
+ else:
263
+ func_name += case_name
264
+ attribute_types = TemplateFileCreator.get_core_attribute_types(case_query)
265
+ attribute_type_names = [t.__name__ for t in attribute_types]
266
+ func_name += f"_of_type_{'_or_'.join(attribute_type_names)}"
267
267
  return str_to_snake_case(func_name)
268
268
 
269
269
  @cached_property
@@ -178,7 +178,7 @@ def extract_function_source(file_path: str,
178
178
  functions_source: Dict[str, Union[str, List[str]]] = {}
179
179
  line_numbers = []
180
180
  for node in tree.body:
181
- if isinstance(node, ast.FunctionDef) and node.name in function_names:
181
+ if isinstance(node, ast.FunctionDef) and (node.name in function_names or len(function_names) == 0):
182
182
  # Get the line numbers of the function
183
183
  lines = source.splitlines()
184
184
  func_lines = lines[node.lineno - 1:node.end_lineno]
@@ -186,9 +186,9 @@ def extract_function_source(file_path: str,
186
186
  func_lines = func_lines[1:]
187
187
  line_numbers.append((node.lineno, node.end_lineno))
188
188
  functions_source[node.name] = dedent("\n".join(func_lines)) if join_lines else func_lines
189
- if len(functions_source) == len(function_names):
189
+ if len(functions_source) >= len(function_names):
190
190
  break
191
- if len(functions_source) != len(function_names):
191
+ if len(functions_source) < len(function_names):
192
192
  raise ValueError(f"Could not find all functions in {file_path}: {function_names} not found,"
193
193
  f"functions not found: {set(function_names) - set(functions_source.keys())}")
194
194
  if return_line_numbers:
@@ -953,9 +953,6 @@ class SubclassJSONSerializer:
953
953
 
954
954
  raise ValueError("Unknown type {}".format(data["_type"]))
955
955
 
956
- save = to_json_file
957
- load = from_json_file
958
-
959
956
 
960
957
  def _pickle_thread(thread_obj) -> Any:
961
958
  """Return a plain object with user-defined attributes but no thread behavior."""
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ripple_down_rules
3
- Version: 0.4.88
3
+ Version: 0.5.1
4
4
  Summary: Implements the various versions of Ripple Down Rules (RDR) for knowledge representation and reasoning.
5
5
  Author-email: Abdelrhman Bassiouny <abassiou@uni-bremen.de>
6
6
  License: GNU GENERAL PUBLIC LICENSE
@@ -693,6 +693,7 @@ Requires-Dist: colorama
693
693
  Requires-Dist: pygments
694
694
  Requires-Dist: sqlalchemy
695
695
  Requires-Dist: pandas
696
+ Requires-Dist: pyparsing
696
697
  Provides-Extra: viz
697
698
  Requires-Dist: networkx>=3.1; extra == "viz"
698
699
  Requires-Dist: matplotlib>=3.7.5; extra == "viz"
@@ -735,7 +736,7 @@ and render the tree to a file:
735
736
  ```python
736
737
  from ripple_down_rules.datastructures.dataclasses import CaseQuery
737
738
  from ripple_down_rules.rdr import SingleClassRDR
738
- from ripple_down_rules.datasets import load_zoo_dataset
739
+ from datasets import load_zoo_dataset
739
740
  from ripple_down_rules.utils import render_tree
740
741
 
741
742
  all_cases, targets = load_zoo_dataset()
@@ -0,0 +1,25 @@
1
+ ripple_down_rules/__init__.py,sha256=eaqN_CQPn7hOrKpan5PYhltW0v7z_ySJ5iHY-x4gzZQ,99
2
+ ripple_down_rules/experts.py,sha256=9Vc3vx0uhDPy3YlNjwKuWJLl_A-kubRPUU6bMvQhaAg,13237
3
+ ripple_down_rules/failures.py,sha256=E6ajDUsw3Blom8eVLbA7d_Qnov2conhtZ0UmpQ9ZtSE,302
4
+ ripple_down_rules/helpers.py,sha256=TvTJU0BA3dPcAyzvZFvAu7jZqsp8Lu0HAAwvuizlGjg,2018
5
+ ripple_down_rules/rdr.py,sha256=E1OiiZClQyAfGjL64ID-MWYFO4-h8iUAX-Vm9qrOoeQ,48727
6
+ ripple_down_rules/rdr_decorators.py,sha256=pYCKLgMKgQ6x_252WQtF2t4ZNjWPBxnaWtJ6TpGdcc0,7820
7
+ ripple_down_rules/rules.py,sha256=TPNVMqW9T-_46BS4WemrspLg5uG8kP6tsPvWWBAzJxg,17515
8
+ ripple_down_rules/start-code-server.sh,sha256=otClk7VmDgBOX2TS_cjws6K0UwvgAUJhoA0ugkPCLqQ,949
9
+ ripple_down_rules/utils.py,sha256=uS38KcFceRMzT_470DCL1M0LzETdP5RLwE7cCmfo7eI,51086
10
+ ripple_down_rules/datastructures/__init__.py,sha256=V2aNgf5C96Y5-IGghra3n9uiefpoIm_QdT7cc_C8cxQ,111
11
+ ripple_down_rules/datastructures/callable_expression.py,sha256=3EucsD3jWzekhjyzL2y0dyUsucd-aqC9glmgPL0Ubb4,12425
12
+ ripple_down_rules/datastructures/case.py,sha256=r8kjL9xP_wk84ThXusspgPMrAoed2bGQmKi54fzhmH8,15258
13
+ ripple_down_rules/datastructures/dataclasses.py,sha256=PuD-7zWqWT2p4FnGvnihHvZlZKg9A1ctnFgVYf2cs-8,8554
14
+ ripple_down_rules/datastructures/enums.py,sha256=ce7tqS0otfSTNAOwsnXlhsvIn4iW_Y_N3TNebF3YoZs,5700
15
+ ripple_down_rules/user_interface/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
+ ripple_down_rules/user_interface/gui.py,sha256=SB0gUhgReJ3yx-NEHRPMGVuNRLPRUwW8-qup-Kd4Cfo,27182
17
+ ripple_down_rules/user_interface/ipython_custom_shell.py,sha256=24MIFwqnAhC6ofObEO6x5xRWRnyQmPpPmTvxbCKBrzM,6514
18
+ ripple_down_rules/user_interface/object_diagram.py,sha256=tsB6iuLNEbHxp5lR2WjyejjWbnAX_nHF9xS8jNPOQVk,4548
19
+ ripple_down_rules/user_interface/prompt.py,sha256=AkkltdDIaioN43lkRKDPKSjJcmdSSGZDMYz7AL7X9lE,8082
20
+ ripple_down_rules/user_interface/template_file_creator.py,sha256=ycCbddy_BJP8d0Q2Sj21UzamhGtqGZuK_e73VTJqznY,13766
21
+ ripple_down_rules-0.5.1.dist-info/licenses/LICENSE,sha256=ixuiBLtpoK3iv89l7ylKkg9rs2GzF9ukPH7ynZYzK5s,35148
22
+ ripple_down_rules-0.5.1.dist-info/METADATA,sha256=BWKLHm71B9jCeEi-Ro14s4hBNEcZqKvLQJNe2pij22w,43313
23
+ ripple_down_rules-0.5.1.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
24
+ ripple_down_rules-0.5.1.dist-info/top_level.txt,sha256=VeoLhEhyK46M1OHwoPbCQLI1EifLjChqGzhQ6WEUqeM,18
25
+ ripple_down_rules-0.5.1.dist-info/RECORD,,
@@ -1,222 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import os
4
- import pickle
5
- from dataclasses import dataclass, field
6
-
7
- import sqlalchemy
8
- from sqlalchemy import ForeignKey
9
- from sqlalchemy.orm import MappedAsDataclass, Mapped, mapped_column, relationship, MappedColumn
10
- from typing_extensions import Tuple, List, Set, Optional, Self
11
- from ucimlrepo import fetch_ucirepo
12
-
13
- from .datastructures.case import Case, create_cases_from_dataframe
14
- from .datastructures.enums import Category
15
- from .rdr_decorators import RDRDecorator
16
-
17
-
18
- def load_cached_dataset(cache_file):
19
- """Loads the dataset from cache if it exists."""
20
- dataset = {}
21
- if '.pkl' not in cache_file:
22
- cache_file += ".pkl"
23
- for key in ["features", "targets", "ids"]:
24
- part_file = cache_file.replace(".pkl", f"_{key}.pkl")
25
- if not os.path.exists(part_file):
26
- return None
27
- with open(part_file, "rb") as f:
28
- dataset[key] = pickle.load(f)
29
- return dataset
30
-
31
-
32
- def save_dataset_to_cache(dataset, cache_file):
33
- """Saves only essential parts of the dataset to cache."""
34
- dataset_to_cache = {
35
- "features": dataset.data.features,
36
- "targets": dataset.data.targets,
37
- "ids": dataset.data.ids,
38
- }
39
-
40
- for key, value in dataset_to_cache.items():
41
- with open(cache_file.replace(".pkl", f"_{key}.pkl"), "wb") as f:
42
- pickle.dump(dataset_to_cache[key], f)
43
- print("Dataset cached successfully.")
44
-
45
-
46
- def get_dataset(dataset_id, cache_file: Optional[str] = None):
47
- """Fetches dataset from cache or downloads it if not available."""
48
- if cache_file is not None:
49
- if not cache_file.endswith(".pkl"):
50
- cache_file += ".pkl"
51
- dataset = load_cached_dataset(cache_file) if cache_file else None
52
- if dataset is None:
53
- print("Downloading dataset...")
54
- dataset = fetch_ucirepo(id=dataset_id)
55
-
56
- # Check if dataset is valid before caching
57
- if dataset is None or not hasattr(dataset, "data"):
58
- print("Error: Failed to fetch dataset.")
59
- return None
60
-
61
- if cache_file:
62
- save_dataset_to_cache(dataset, cache_file)
63
-
64
- dataset = {
65
- "features": dataset.data.features,
66
- "targets": dataset.data.targets,
67
- "ids": dataset.data.ids,
68
- }
69
-
70
- return dataset
71
-
72
-
73
- def load_zoo_dataset(cache_file: Optional[str] = None) -> Tuple[List[Case], List[Species]]:
74
- """
75
- Load the zoo dataset.
76
-
77
- :param cache_file: the cache file to store the dataset or load it from.
78
- :return: all cases and targets.
79
- """
80
- # fetch dataset
81
- zoo = get_dataset(111, cache_file)
82
-
83
- # data (as pandas dataframes)
84
- X = zoo['features']
85
- y = zoo['targets']
86
- # get ids as list of strings
87
- ids = zoo['ids'].values.flatten()
88
- all_cases = create_cases_from_dataframe(X, name="Animal")
89
-
90
- category_names = ["mammal", "bird", "reptile", "fish", "amphibian", "insect", "molusc"]
91
- category_id_to_name = {i + 1: name for i, name in enumerate(category_names)}
92
- # targets = [getattr(SpeciesCol, category_id_to_name[i]) for i in y.values.flatten()]
93
- targets = [Species.from_str(category_id_to_name[i]) for i in y.values.flatten()]
94
- return all_cases, targets
95
-
96
-
97
- class Species(Category):
98
- mammal = "mammal"
99
- bird = "bird"
100
- reptile = "reptile"
101
- fish = "fish"
102
- amphibian = "amphibian"
103
- insect = "insect"
104
- molusc = "molusc"
105
-
106
-
107
- class Habitat(Category):
108
- """
109
- A habitat category is a category that represents the habitat of an animal.
110
- """
111
- land = "land"
112
- water = "water"
113
- air = "air"
114
-
115
-
116
- class PhysicalObject:
117
- """
118
- A physical object is an object that can be contained in a container.
119
- """
120
- _rdr_json_dir: str = os.path.join(os.path.dirname(__file__), "../../test/test_results")
121
- """
122
- The directory where the RDR serialized JSON files are stored.
123
- """
124
- _rdr_python_dir: str = os.path.join(os.path.dirname(__file__), "../../test/test_generated_rdrs")
125
- """
126
- The directory where the RDR generated Python files are stored.
127
- """
128
- _is_a_robot_rdr: RDRDecorator = RDRDecorator(_rdr_json_dir, (bool,), True,
129
- python_dir=_rdr_python_dir)
130
- """
131
- The RDR decorator that is used to determine if the object is a robot or not.
132
- """
133
- _select_parts_rdr: RDRDecorator = RDRDecorator(_rdr_json_dir, (Self,), False,
134
- python_dir=_rdr_python_dir)
135
- """
136
- The RDR decorator that is used to determine if the object is a robot or not.
137
- """
138
-
139
- def __init__(self, name: str, contained_objects: Optional[List[PhysicalObject]] = None):
140
- self.name: str = name
141
- self._contained_objects: List[PhysicalObject] = contained_objects or []
142
-
143
- @property
144
- def contained_objects(self) -> List[PhysicalObject]:
145
- return self._contained_objects
146
-
147
- @contained_objects.setter
148
- def contained_objects(self, value: List[PhysicalObject]):
149
- self._contained_objects = value
150
-
151
- @_is_a_robot_rdr.decorator
152
- def is_a_robot(self) -> bool:
153
- pass
154
-
155
- @_select_parts_rdr.decorator
156
- def select_objects_that_are_parts_of_robot(self, objects: List[PhysicalObject], robot: Robot) -> List[PhysicalObject]:
157
- pass
158
-
159
- def __str__(self):
160
- return self.name
161
-
162
- def __repr__(self):
163
- return self.name
164
-
165
-
166
- class Part(PhysicalObject):
167
- ...
168
-
169
-
170
- class Robot(PhysicalObject):
171
-
172
- def __init__(self, name: str, parts: Optional[List[Part]] = None):
173
- super().__init__(name)
174
- self.parts: List[Part] = parts if parts else []
175
-
176
-
177
- class Base(sqlalchemy.orm.DeclarativeBase):
178
- pass
179
-
180
-
181
- class HabitatTable(MappedAsDataclass, Base):
182
- __tablename__ = "Habitat"
183
-
184
- id: Mapped[int] = mapped_column(init=False, primary_key=True, autoincrement=True)
185
- habitat: Mapped[Habitat]
186
- animal_id: MappedColumn = mapped_column(ForeignKey("Animal.id"), init=False)
187
-
188
- def __hash__(self):
189
- return hash(self.habitat)
190
-
191
- def __str__(self):
192
- return f"{HabitatTable.__name__}({Habitat.__name__}.{self.habitat.name})"
193
-
194
- def __repr__(self):
195
- return self.__str__()
196
-
197
-
198
- class MappedAnimal(MappedAsDataclass, Base):
199
- __tablename__ = "Animal"
200
-
201
- id: Mapped[int] = mapped_column(init=False, primary_key=True, autoincrement=True)
202
- name: Mapped[str]
203
- hair: Mapped[bool]
204
- feathers: Mapped[bool]
205
- eggs: Mapped[bool]
206
- milk: Mapped[bool]
207
- airborne: Mapped[bool]
208
- aquatic: Mapped[bool]
209
- predator: Mapped[bool]
210
- toothed: Mapped[bool]
211
- backbone: Mapped[bool]
212
- breathes: Mapped[bool]
213
- venomous: Mapped[bool]
214
- fins: Mapped[bool]
215
- legs: Mapped[int]
216
- tail: Mapped[bool]
217
- domestic: Mapped[bool]
218
- catsize: Mapped[bool]
219
- species: Mapped[Species] = mapped_column(nullable=True)
220
-
221
- habitats: Mapped[Set[HabitatTable]] = relationship(default_factory=set)
222
-
@@ -1,26 +0,0 @@
1
- ripple_down_rules/__init__.py,sha256=gvXUS_xmUCsWcUwVy5Sd8tyjdLhlPGbjfDrfDImrt7o,100
2
- ripple_down_rules/datasets.py,sha256=fJbZ7V-UUYTu5XVVpFinTbuzN3YePCnUB01L3AyZVM8,6837
3
- ripple_down_rules/experts.py,sha256=RWDR-xxbeFIrUQiMYLEDr_PLQFdpPZ-hOXo4dpeiUpI,6630
4
- ripple_down_rules/failures.py,sha256=E6ajDUsw3Blom8eVLbA7d_Qnov2conhtZ0UmpQ9ZtSE,302
5
- ripple_down_rules/helpers.py,sha256=TvTJU0BA3dPcAyzvZFvAu7jZqsp8Lu0HAAwvuizlGjg,2018
6
- ripple_down_rules/rdr.py,sha256=a7sSxvJewzG5FZvbUW_Ss7VVYQtBnH-H--hni8-pWC4,45528
7
- ripple_down_rules/rdr_decorators.py,sha256=VdmE0JrE8j89b6Af1R1tLZiKfy3h1VCvhAUefN_FLLQ,6753
8
- ripple_down_rules/rules.py,sha256=7NB8qWW7XEB45tmJRYsKJqBG8DN3v02fzAFYmOkX8ow,17458
9
- ripple_down_rules/start-code-server.sh,sha256=otClk7VmDgBOX2TS_cjws6K0UwvgAUJhoA0ugkPCLqQ,949
10
- ripple_down_rules/utils.py,sha256=t_yutgZvrOOGb6Wa-uAuoTafLicwovSFRiUa746ALOw,51108
11
- ripple_down_rules/datastructures/__init__.py,sha256=V2aNgf5C96Y5-IGghra3n9uiefpoIm_QdT7cc_C8cxQ,111
12
- ripple_down_rules/datastructures/callable_expression.py,sha256=jA7424_mWPbOoPICW3eLMX0-ypxnsW6gOqxrJ7JpDbE,11610
13
- ripple_down_rules/datastructures/case.py,sha256=r8kjL9xP_wk84ThXusspgPMrAoed2bGQmKi54fzhmH8,15258
14
- ripple_down_rules/datastructures/dataclasses.py,sha256=GWnUF4h4zfNHSsyBIz3L9y8sLkrXRv0FK_OxzzLc8L8,8183
15
- ripple_down_rules/datastructures/enums.py,sha256=ce7tqS0otfSTNAOwsnXlhsvIn4iW_Y_N3TNebF3YoZs,5700
16
- ripple_down_rules/user_interface/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- ripple_down_rules/user_interface/gui.py,sha256=SB0gUhgReJ3yx-NEHRPMGVuNRLPRUwW8-qup-Kd4Cfo,27182
18
- ripple_down_rules/user_interface/ipython_custom_shell.py,sha256=24MIFwqnAhC6ofObEO6x5xRWRnyQmPpPmTvxbCKBrzM,6514
19
- ripple_down_rules/user_interface/object_diagram.py,sha256=tsB6iuLNEbHxp5lR2WjyejjWbnAX_nHF9xS8jNPOQVk,4548
20
- ripple_down_rules/user_interface/prompt.py,sha256=AkkltdDIaioN43lkRKDPKSjJcmdSSGZDMYz7AL7X9lE,8082
21
- ripple_down_rules/user_interface/template_file_creator.py,sha256=J_bBOJltc1fsrIYeHdrSUA_jep2DhDbTK5NYRbL6QyY,13831
22
- ripple_down_rules-0.4.88.dist-info/licenses/LICENSE,sha256=ixuiBLtpoK3iv89l7ylKkg9rs2GzF9ukPH7ynZYzK5s,35148
23
- ripple_down_rules-0.4.88.dist-info/METADATA,sha256=ytWRoIfcAHeBfJMqT1KtQJPsAEGDqXZZegjDaq6YcuM,43307
24
- ripple_down_rules-0.4.88.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
25
- ripple_down_rules-0.4.88.dist-info/top_level.txt,sha256=VeoLhEhyK46M1OHwoPbCQLI1EifLjChqGzhQ6WEUqeM,18
26
- ripple_down_rules-0.4.88.dist-info/RECORD,,