ripple-down-rules 0.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ripple_down_rules/__init__.py +0 -0
- ripple_down_rules/datasets.py +148 -0
- ripple_down_rules/datastructures/__init__.py +4 -0
- ripple_down_rules/datastructures/callable_expression.py +237 -0
- ripple_down_rules/datastructures/dataclasses.py +76 -0
- ripple_down_rules/datastructures/enums.py +173 -0
- ripple_down_rules/datastructures/generated/__init__.py +0 -0
- ripple_down_rules/datastructures/generated/column/__init__.py +0 -0
- ripple_down_rules/datastructures/generated/row/__init__.py +0 -0
- ripple_down_rules/datastructures/table.py +544 -0
- ripple_down_rules/experts.py +281 -0
- ripple_down_rules/failures.py +10 -0
- ripple_down_rules/prompt.py +101 -0
- ripple_down_rules/rdr.py +687 -0
- ripple_down_rules/rules.py +260 -0
- ripple_down_rules/utils.py +463 -0
- ripple_down_rules-0.0.0.dist-info/METADATA +54 -0
- ripple_down_rules-0.0.0.dist-info/RECORD +20 -0
- ripple_down_rules-0.0.0.dist-info/WHEEL +5 -0
- ripple_down_rules-0.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,463 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import logging
|
4
|
+
from abc import abstractmethod
|
5
|
+
from collections import UserDict
|
6
|
+
from copy import deepcopy
|
7
|
+
from dataclasses import dataclass
|
8
|
+
|
9
|
+
import matplotlib
|
10
|
+
import networkx as nx
|
11
|
+
from anytree import Node, RenderTree
|
12
|
+
from anytree.exporter import DotExporter
|
13
|
+
from matplotlib import pyplot as plt
|
14
|
+
from sqlalchemy import MetaData, inspect
|
15
|
+
from sqlalchemy.orm import Mapped, registry, class_mapper, DeclarativeBase as SQLTable, Session
|
16
|
+
from tabulate import tabulate
|
17
|
+
from typing_extensions import Callable, Set, Any, Type, Dict, TYPE_CHECKING, get_type_hints, \
|
18
|
+
get_origin, get_args, Tuple, Optional, List, Union, Self
|
19
|
+
|
20
|
+
if TYPE_CHECKING:
|
21
|
+
from .datastructures import Case
|
22
|
+
|
23
|
+
matplotlib.use("Qt5Agg") # or "Qt5Agg", depending on availability
|
24
|
+
|
25
|
+
|
26
|
+
def get_full_class_name(cls):
|
27
|
+
"""
|
28
|
+
Returns the full name of a class, including the module name.
|
29
|
+
Copied from: https://github.com/tomsch420/random-events/blob/master/src/random_events/utils.py#L6C1-L21C101
|
30
|
+
|
31
|
+
:param cls: The class.
|
32
|
+
:return: The full name of the class
|
33
|
+
"""
|
34
|
+
return cls.__module__ + "." + cls.__name__
|
35
|
+
|
36
|
+
|
37
|
+
def recursive_subclasses(cls):
|
38
|
+
"""
|
39
|
+
Copied from: https://github.com/tomsch420/random-events/blob/master/src/random_events/utils.py#L6C1-L21C101
|
40
|
+
:param cls: The class.
|
41
|
+
:return: A list of the classes subclasses.
|
42
|
+
"""
|
43
|
+
return cls.__subclasses__() + [g for s in cls.__subclasses__() for g in recursive_subclasses(s)]
|
44
|
+
|
45
|
+
|
46
|
+
class SubclassJSONSerializer:
|
47
|
+
"""
|
48
|
+
Copied from: https://github.com/tomsch420/random-events/blob/master/src/random_events/utils.py#L6C1-L21C101
|
49
|
+
Class for automatic (de)serialization of subclasses.
|
50
|
+
Classes that inherit from this class can be serialized and deserialized automatically by calling this classes
|
51
|
+
'from_json' method.
|
52
|
+
"""
|
53
|
+
|
54
|
+
def to_json(self) -> Dict[str, Any]:
|
55
|
+
return {"_type": get_full_class_name(self.__class__)}
|
56
|
+
|
57
|
+
@classmethod
|
58
|
+
@abstractmethod
|
59
|
+
def _from_json(cls, data: Dict[str, Any]) -> Self:
|
60
|
+
"""
|
61
|
+
Create a variable from a json dict.
|
62
|
+
This method is called from the from_json method after the correct subclass is determined and should be
|
63
|
+
overwritten by the respective subclass.
|
64
|
+
|
65
|
+
:param data: The json dict
|
66
|
+
:return: The deserialized object
|
67
|
+
"""
|
68
|
+
raise NotImplementedError()
|
69
|
+
|
70
|
+
@classmethod
|
71
|
+
def from_json(cls, data: Dict[str, Any]) -> Self:
|
72
|
+
"""
|
73
|
+
Create the correct instanceof the subclass from a json dict.
|
74
|
+
|
75
|
+
:param data: The json dict
|
76
|
+
:return: The correct instance of the subclass
|
77
|
+
"""
|
78
|
+
for subclass in recursive_subclasses(SubclassJSONSerializer):
|
79
|
+
if get_full_class_name(subclass) == data["_type"]:
|
80
|
+
return subclass._from_json(data)
|
81
|
+
|
82
|
+
raise ValueError("Unknown type {}".format(data["_type"]))
|
83
|
+
|
84
|
+
|
85
|
+
def copy_case(case: Union[Case, SQLTable]) -> Union[Case, SQLTable]:
|
86
|
+
"""
|
87
|
+
Copy a case.
|
88
|
+
|
89
|
+
:param case: The case to copy.
|
90
|
+
:return: The copied case.
|
91
|
+
"""
|
92
|
+
if isinstance(case, SQLTable):
|
93
|
+
return copy_orm_instance_with_relationships(case)
|
94
|
+
else:
|
95
|
+
return deepcopy(case)
|
96
|
+
|
97
|
+
|
98
|
+
def copy_orm_instance(instance: SQLTable) -> SQLTable:
|
99
|
+
"""
|
100
|
+
Copy an ORM instance by expunging it from the session then deep copying it and adding it back to the session. This
|
101
|
+
is useful when you want to copy an instance and make changes to it without affecting the original instance.
|
102
|
+
|
103
|
+
:param instance: The instance to copy.
|
104
|
+
:return: The copied instance.
|
105
|
+
"""
|
106
|
+
session: Session = inspect(instance).session
|
107
|
+
session.expunge(instance)
|
108
|
+
new_instance = deepcopy(instance)
|
109
|
+
session.add(instance)
|
110
|
+
return new_instance
|
111
|
+
|
112
|
+
|
113
|
+
def copy_orm_instance_with_relationships(instance: SQLTable) -> SQLTable:
|
114
|
+
"""
|
115
|
+
Copy an ORM instance with its relationships (i.e. its foreign keys).
|
116
|
+
|
117
|
+
:param instance: The instance to copy.
|
118
|
+
:return: The copied instance.
|
119
|
+
"""
|
120
|
+
instance_cp = copy_orm_instance(instance)
|
121
|
+
for rel in class_mapper(instance.__class__).relationships:
|
122
|
+
related_obj = getattr(instance, rel.key)
|
123
|
+
if related_obj is not None:
|
124
|
+
setattr(instance_cp, rel.key, related_obj)
|
125
|
+
return instance_cp
|
126
|
+
|
127
|
+
|
128
|
+
def get_value_type_from_type_hint(attr_name: str, obj: Any) -> Type:
|
129
|
+
"""
|
130
|
+
Get the value type from the type hint of an object attribute.
|
131
|
+
|
132
|
+
:param attr_name: The name of the attribute.
|
133
|
+
:param obj: The object to get the attributes from.
|
134
|
+
"""
|
135
|
+
hint, origin, args = get_hint_for_attribute(attr_name, obj)
|
136
|
+
if not origin and not hint:
|
137
|
+
if hasattr(obj, attr_name):
|
138
|
+
attr_value = getattr(obj, attr_name)
|
139
|
+
if attr_value is not None:
|
140
|
+
return type(attr_value)
|
141
|
+
raise ValueError(f"Couldn't get type for Attribute {attr_name}, please provide a type hint")
|
142
|
+
if origin in [list, set, tuple, type, dict]:
|
143
|
+
attr_value_type = args[0]
|
144
|
+
elif hint:
|
145
|
+
attr_value_type = hint
|
146
|
+
else:
|
147
|
+
raise ValueError(f"Attribute {attr_name} has unsupported type {hint}.")
|
148
|
+
return attr_value_type
|
149
|
+
|
150
|
+
|
151
|
+
def get_hint_for_attribute(attr_name: str, obj: Any) -> Tuple[Optional[Any], Optional[Any], Tuple[Any]]:
|
152
|
+
"""
|
153
|
+
Get the type hint for an attribute of an object.
|
154
|
+
|
155
|
+
:param attr_name: The name of the attribute.
|
156
|
+
:param obj: The object to get the attribute from.
|
157
|
+
:return: The type hint of the attribute.
|
158
|
+
"""
|
159
|
+
if attr_name is None or not hasattr(obj.__class__, attr_name):
|
160
|
+
return None, None, ()
|
161
|
+
class_attr = getattr(obj.__class__, attr_name)
|
162
|
+
if isinstance(class_attr, property):
|
163
|
+
if not class_attr.fget:
|
164
|
+
raise ValueError(f"Attribute {attr_name} has no getter.")
|
165
|
+
hint = get_type_hints(class_attr.fget)['return']
|
166
|
+
else:
|
167
|
+
try:
|
168
|
+
hint = get_type_hints(obj.__class__)[attr_name]
|
169
|
+
except KeyError:
|
170
|
+
hint = type(class_attr)
|
171
|
+
origin = get_origin(hint)
|
172
|
+
args = get_args(hint)
|
173
|
+
if origin is Mapped:
|
174
|
+
return args[0], get_origin(args[0]), get_args(args[0])
|
175
|
+
else:
|
176
|
+
return hint, origin, args
|
177
|
+
|
178
|
+
|
179
|
+
def table_rows_as_str(row_dict: Dict[str, Any], columns_per_row: int = 9):
|
180
|
+
"""
|
181
|
+
Print a table row.
|
182
|
+
|
183
|
+
:param row_dict: The row to print.
|
184
|
+
:param columns_per_row: The maximum number of columns per row.
|
185
|
+
"""
|
186
|
+
all_items = list(row_dict.items())
|
187
|
+
# make items a list of n rows such that each row has a max size of 4
|
188
|
+
all_items = [all_items[i:i + columns_per_row] for i in range(0, len(all_items), columns_per_row)]
|
189
|
+
keys = [list(map(lambda i: i[0], row)) for row in all_items]
|
190
|
+
values = [list(map(lambda i: i[1], row)) for row in all_items]
|
191
|
+
all_table_rows = []
|
192
|
+
for row_keys, row_values in zip(keys, values):
|
193
|
+
table = tabulate([row_values], headers=row_keys, tablefmt='plain')
|
194
|
+
all_table_rows.append(table)
|
195
|
+
return "\n".join(all_table_rows)
|
196
|
+
|
197
|
+
|
198
|
+
def row_to_dict(obj):
|
199
|
+
return {
|
200
|
+
col.name: getattr(obj, col.name)
|
201
|
+
for col in obj.__table__.columns
|
202
|
+
if not col.primary_key and not col.foreign_keys
|
203
|
+
}
|
204
|
+
|
205
|
+
|
206
|
+
def get_attribute_name(obj: Any, attribute: Optional[Any] = None, attribute_type: Optional[Type] = None,
|
207
|
+
possible_value: Optional[Any] = None) -> Optional[str]:
|
208
|
+
"""
|
209
|
+
Get the name of an attribute from an object. The attribute can be given as a value, a type or a target value.
|
210
|
+
And this method will try to find the attribute name using the given information.
|
211
|
+
|
212
|
+
:param obj: The object to get the attribute name from.
|
213
|
+
:param attribute: The attribute to get the name of.
|
214
|
+
:param attribute_type: The type of the attribute to get the name of.
|
215
|
+
:param possible_value: A possible value of the attribute to get the name of.
|
216
|
+
:return: The name of the attribute.
|
217
|
+
"""
|
218
|
+
attribute_name: Optional[str] = None
|
219
|
+
if attribute_name is None and attribute is not None:
|
220
|
+
attribute_name = get_attribute_name_from_value(obj, attribute)
|
221
|
+
if attribute_name is None and attribute_type is not None:
|
222
|
+
attribute_name = get_attribute_by_type(obj, attribute_type)[0]
|
223
|
+
if attribute_name is None and possible_value is not None:
|
224
|
+
attribute_name = get_attribute_by_type(obj, type(possible_value))[0]
|
225
|
+
return attribute_name
|
226
|
+
|
227
|
+
|
228
|
+
def get_attribute_by_type(obj: Any, prop_type: Type) -> Tuple[Optional[str], Optional[Any]]:
|
229
|
+
"""
|
230
|
+
Get a property from an object by type.
|
231
|
+
|
232
|
+
:param obj: The object to get the property from.
|
233
|
+
:param prop_type: The type of the property.
|
234
|
+
"""
|
235
|
+
for name in dir(obj):
|
236
|
+
if name.startswith("_") or callable(getattr(obj, name)):
|
237
|
+
continue
|
238
|
+
if isinstance(getattr(obj, name), (MetaData, registry)):
|
239
|
+
continue
|
240
|
+
prop_value = getattr(obj, name)
|
241
|
+
if isinstance(prop_value, prop_type):
|
242
|
+
return name, prop_value
|
243
|
+
if hasattr(prop_value, "__iter__") and not isinstance(prop_value, str):
|
244
|
+
if len(prop_value) > 0 and any(isinstance(v, prop_type) for v in prop_value):
|
245
|
+
return name, prop_value
|
246
|
+
else:
|
247
|
+
# get args of type hint
|
248
|
+
hint, origin, args = get_hint_for_attribute(name, obj)
|
249
|
+
if origin in [list, set, tuple, dict, List, Set, Tuple, Dict]:
|
250
|
+
if prop_type is args[0]:
|
251
|
+
return name, prop_value
|
252
|
+
else:
|
253
|
+
# get the type hint of the attribute
|
254
|
+
hint, origin, args = get_hint_for_attribute(name, obj)
|
255
|
+
if hint is prop_type:
|
256
|
+
return name, prop_value
|
257
|
+
elif origin in [list, set, tuple, dict, List, Set, Tuple, Dict]:
|
258
|
+
if prop_type is args[0]:
|
259
|
+
return name, prop_value
|
260
|
+
return None, None
|
261
|
+
|
262
|
+
|
263
|
+
def get_attribute_name_from_value(obj: Any, attribute_value: Any) -> Optional[str]:
|
264
|
+
"""
|
265
|
+
Get the name of an attribute from an object.
|
266
|
+
|
267
|
+
:param obj: The object to get the attribute name from.
|
268
|
+
:param attribute_value: The attribute value to get the name of.
|
269
|
+
"""
|
270
|
+
for name in dir(obj):
|
271
|
+
if name.startswith("_") or callable(getattr(obj, name)):
|
272
|
+
continue
|
273
|
+
prop_value = getattr(obj, name)
|
274
|
+
if prop_value is attribute_value:
|
275
|
+
return name
|
276
|
+
|
277
|
+
|
278
|
+
def get_attribute_values_transitively(obj: Any, attribute: Any) -> Any:
|
279
|
+
"""
|
280
|
+
Get an attribute from a python object, if it is iterable, get the attribute values from all elements and unpack them
|
281
|
+
into a list.
|
282
|
+
|
283
|
+
:param obj: The object to get the sub attribute from.
|
284
|
+
:param attribute: The attribute to get.
|
285
|
+
"""
|
286
|
+
if hasattr(obj, "__iter__") and not isinstance(obj, str):
|
287
|
+
if isinstance(obj, (dict, UserDict)):
|
288
|
+
all_values = [get_attribute_values_transitively(v, attribute) for v in obj.values()
|
289
|
+
if not isinstance(v, (str, type)) and hasattr(v, attribute)]
|
290
|
+
else:
|
291
|
+
all_values = [get_attribute_values_transitively(a, attribute) for a in obj
|
292
|
+
if not isinstance(a, (str, type)) and hasattr(a, attribute)]
|
293
|
+
if can_be_a_set(all_values):
|
294
|
+
return set().union(*all_values)
|
295
|
+
else:
|
296
|
+
return set(all_values)
|
297
|
+
return getattr(obj, attribute)
|
298
|
+
|
299
|
+
|
300
|
+
def can_be_a_set(value: Any) -> bool:
|
301
|
+
"""
|
302
|
+
Check if a value can be a set.
|
303
|
+
|
304
|
+
:param value: The value to check.
|
305
|
+
"""
|
306
|
+
if hasattr(value, "__iter__") and not isinstance(value, str):
|
307
|
+
if len(value) > 0 and any(hasattr(v, "__iter__") and not isinstance(v, str) for v in value):
|
308
|
+
return False
|
309
|
+
else:
|
310
|
+
return True
|
311
|
+
else:
|
312
|
+
return False
|
313
|
+
|
314
|
+
|
315
|
+
def get_all_subclasses(cls: Type) -> Dict[str, Type]:
|
316
|
+
"""
|
317
|
+
Get all subclasses of a class recursively.
|
318
|
+
|
319
|
+
:param cls: The class to get the subclasses of.
|
320
|
+
:return: A dictionary of all subclasses.
|
321
|
+
"""
|
322
|
+
all_subclasses: Dict[str, Type] = {}
|
323
|
+
for sub_cls in cls.__subclasses__():
|
324
|
+
all_subclasses[sub_cls.__name__.lower()] = sub_cls
|
325
|
+
all_subclasses.update(get_all_subclasses(sub_cls))
|
326
|
+
return all_subclasses
|
327
|
+
|
328
|
+
|
329
|
+
def make_set(value: Any) -> Set:
|
330
|
+
"""
|
331
|
+
Make a set from a value.
|
332
|
+
|
333
|
+
:param value: The value to make a set from.
|
334
|
+
"""
|
335
|
+
if hasattr(value, "__iter__") and not isinstance(value, (str, type)):
|
336
|
+
return set(value)
|
337
|
+
return {value}
|
338
|
+
|
339
|
+
|
340
|
+
def make_list(value: Any) -> List:
|
341
|
+
"""
|
342
|
+
Make a list from a value.
|
343
|
+
|
344
|
+
:param value: The value to make a list from.
|
345
|
+
"""
|
346
|
+
if hasattr(value, "__iter__") and not isinstance(value, (str, type)):
|
347
|
+
return list(value)
|
348
|
+
return [value]
|
349
|
+
|
350
|
+
|
351
|
+
def make_value_or_raise_error(value: Any) -> Any:
|
352
|
+
"""
|
353
|
+
Make a value or raise an error if the value is not a single value.
|
354
|
+
|
355
|
+
:param value: The value to check.
|
356
|
+
"""
|
357
|
+
if hasattr(value, "__iter__") and not isinstance(value, str):
|
358
|
+
if hasattr(value, "__len__") and len(value) == 1:
|
359
|
+
return list(value)[0]
|
360
|
+
else:
|
361
|
+
raise ValueError(f"Expected a single value, got {value}")
|
362
|
+
return value
|
363
|
+
|
364
|
+
|
365
|
+
def tree_to_graph(root_node: Node) -> nx.DiGraph:
|
366
|
+
"""
|
367
|
+
Convert anytree to a networkx graph.
|
368
|
+
|
369
|
+
:param root_node: The root node of the tree.
|
370
|
+
:return: A networkx graph.
|
371
|
+
"""
|
372
|
+
graph = nx.DiGraph()
|
373
|
+
unique_node_names = get_unique_node_names_func(root_node)
|
374
|
+
|
375
|
+
def add_edges(node):
|
376
|
+
if unique_node_names(node) not in graph.nodes:
|
377
|
+
graph.add_node(unique_node_names(node))
|
378
|
+
for child in node.children:
|
379
|
+
if unique_node_names(child) not in graph.nodes:
|
380
|
+
graph.add_node(unique_node_names(child))
|
381
|
+
graph.add_edge(unique_node_names(node), unique_node_names(child), weight=child.weight)
|
382
|
+
add_edges(child)
|
383
|
+
|
384
|
+
add_edges(root_node)
|
385
|
+
return graph
|
386
|
+
|
387
|
+
|
388
|
+
def get_unique_node_names_func(root_node) -> Callable[[Node], str]:
|
389
|
+
nodes = [root_node]
|
390
|
+
|
391
|
+
def get_all_nodes(node):
|
392
|
+
for c in node.children:
|
393
|
+
nodes.append(c)
|
394
|
+
get_all_nodes(c)
|
395
|
+
|
396
|
+
get_all_nodes(root_node)
|
397
|
+
|
398
|
+
def nodenamefunc(node: Node):
|
399
|
+
"""
|
400
|
+
Set the node name for the dot exporter.
|
401
|
+
"""
|
402
|
+
similar_nodes = [n for n in nodes if n.name == node.name]
|
403
|
+
node_idx = similar_nodes.index(node)
|
404
|
+
return node.name if node_idx == 0 else f"{node.name}_{node_idx}"
|
405
|
+
|
406
|
+
return nodenamefunc
|
407
|
+
|
408
|
+
|
409
|
+
def edge_attr_setter(parent, child):
|
410
|
+
"""
|
411
|
+
Set the edge attributes for the dot exporter.
|
412
|
+
"""
|
413
|
+
if child and hasattr(child, "weight") and child.weight:
|
414
|
+
return f'style="bold", label=" {child.weight}"'
|
415
|
+
return ""
|
416
|
+
|
417
|
+
|
418
|
+
def render_tree(root: Node, use_dot_exporter: bool = False,
|
419
|
+
filename: str = "scrdr"):
|
420
|
+
"""
|
421
|
+
Render the tree using the console and optionally export it to a dot file.
|
422
|
+
|
423
|
+
:param root: The root node of the tree.
|
424
|
+
:param use_dot_exporter: Whether to export the tree to a dot file.
|
425
|
+
:param filename: The name of the file to export the tree to.
|
426
|
+
"""
|
427
|
+
if not root:
|
428
|
+
logging.warning("No rules to render")
|
429
|
+
return
|
430
|
+
for pre, _, node in RenderTree(root):
|
431
|
+
print(f"{pre}{node.weight if hasattr(node, 'weight') and node.weight else ''} {node.__str__()}")
|
432
|
+
if use_dot_exporter:
|
433
|
+
unique_node_names = get_unique_node_names_func(root)
|
434
|
+
|
435
|
+
de = DotExporter(root,
|
436
|
+
nodenamefunc=unique_node_names,
|
437
|
+
edgeattrfunc=edge_attr_setter
|
438
|
+
)
|
439
|
+
de.to_dotfile(f"{filename}{'.dot'}")
|
440
|
+
de.to_picture(f"{filename}{'.png'}")
|
441
|
+
|
442
|
+
|
443
|
+
def draw_tree(root: Node, fig: plt.Figure):
|
444
|
+
"""
|
445
|
+
Draw the tree using matplotlib and networkx.
|
446
|
+
"""
|
447
|
+
if root is None:
|
448
|
+
return
|
449
|
+
fig.clf()
|
450
|
+
graph = tree_to_graph(root)
|
451
|
+
fig_sz_x = 13
|
452
|
+
fig_sz_y = 10
|
453
|
+
fig.set_size_inches(fig_sz_x, fig_sz_y)
|
454
|
+
pos = nx.drawing.nx_agraph.graphviz_layout(graph, prog="dot")
|
455
|
+
# scale down pos
|
456
|
+
max_pos_x = max([v[0] for v in pos.values()])
|
457
|
+
max_pos_y = max([v[1] for v in pos.values()])
|
458
|
+
pos = {k: (v[0] * fig_sz_x / max_pos_x, v[1] * fig_sz_y / max_pos_y) for k, v in pos.items()}
|
459
|
+
nx.draw(graph, pos, with_labels=True, node_color="lightblue", edge_color="gray", node_size=1000,
|
460
|
+
ax=fig.gca(), node_shape="o", font_size=8)
|
461
|
+
nx.draw_networkx_edge_labels(graph, pos, edge_labels=nx.get_edge_attributes(graph, 'weight'),
|
462
|
+
ax=fig.gca(), rotate=False, clip_on=False)
|
463
|
+
plt.pause(0.1)
|
@@ -0,0 +1,54 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: ripple_down_rules
|
3
|
+
Version: 0.0.0
|
4
|
+
Summary: Implements the various versions of Ripple Down Rules (RDR) for knowledge representation and reasoning.
|
5
|
+
Author-email: Abdelrhman Bassiouny <abassiou@uni-bremen.de>
|
6
|
+
Project-URL: Homepage, https://github.com/AbdelrhmanBassiouny/ripple_down_rules
|
7
|
+
Keywords: robotics,knowledge,reasoning,representation
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
9
|
+
Requires-Python: >=3.8
|
10
|
+
Description-Content-Type: text/markdown
|
11
|
+
Requires-Dist: neem_pycram_interface==1.0.167
|
12
|
+
|
13
|
+
# Ripple Down Rules (RDR)
|
14
|
+
|
15
|
+
A python implementation of the various ripple down rules versions, including Single Classification (SCRDR),
|
16
|
+
Multi Classification (MCRDR), and Generalised Ripple Down Rules (GRDR).
|
17
|
+
|
18
|
+
SCRDR, MCRDR, and GRDR are rule-based classifiers that are built incrementally, and can be used to classify
|
19
|
+
data cases. The rules are refined as new data cases are classified.
|
20
|
+
|
21
|
+
SCRDR, MCRDR, and GRDR implementation were inspired from the book:
|
22
|
+
["Ripple Down Rules: An Alternative to Machine Learning"](https://doi.org/10.1201/9781003126157) by Paul Compton, Byeong Ho Kang.
|
23
|
+
|
24
|
+
## Installation
|
25
|
+
|
26
|
+
```bash
|
27
|
+
sudo apt-get install graphviz graphviz-dev
|
28
|
+
pip install ripple_down_rules
|
29
|
+
```
|
30
|
+
|
31
|
+
## Example Usage
|
32
|
+
|
33
|
+
Fit the SCRDR to the data, then classify one of the data cases to check if its correct,
|
34
|
+
and render the tree to a file:
|
35
|
+
|
36
|
+
```Python
|
37
|
+
from ripple_down_rules.rdr import SingleClassRDR
|
38
|
+
from ripple_down_rules.datasets import load_zoo_dataset
|
39
|
+
from ripple_down_rules.utils import render_tree
|
40
|
+
|
41
|
+
all_cases, targets = load_zoo_dataset()
|
42
|
+
|
43
|
+
scrdr = SingleClassRDR()
|
44
|
+
|
45
|
+
# Fit the SCRDR to the data
|
46
|
+
scrdr.fit(all_cases, targets,
|
47
|
+
animate_tree=True, n_iter=10)
|
48
|
+
|
49
|
+
# Render the tree to a file
|
50
|
+
render_tree(scrdr.start_rule, use_dot_exporter=True, filename="scrdr")
|
51
|
+
|
52
|
+
cat = scrdr.fit_case(all_cases[50], targets[50])
|
53
|
+
assert cat == targets[50]
|
54
|
+
```
|
@@ -0,0 +1,20 @@
|
|
1
|
+
ripple_down_rules/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
|
+
ripple_down_rules/datasets.py,sha256=meZ2zpDkA3v_Hnat6r01bzWQi9eQMp6Qv0ESXyB8aTc,4214
|
3
|
+
ripple_down_rules/experts.py,sha256=wss-rMjmVRNoNpPNo_EygNsh57iebNi-g2j7_SkgWIU,11636
|
4
|
+
ripple_down_rules/failures.py,sha256=E6ajDUsw3Blom8eVLbA7d_Qnov2conhtZ0UmpQ9ZtSE,302
|
5
|
+
ripple_down_rules/prompt.py,sha256=lmREZRyleBTHrVtcf2j_48oc0v3VlxXYGhl6w1mk8qI,4208
|
6
|
+
ripple_down_rules/rdr.py,sha256=AGfUmfuTmxMd3M7wTCXJpK_mK1Tuhzsqg088Q3Kr9Cs,33059
|
7
|
+
ripple_down_rules/rules.py,sha256=H4Zm_9YN91GxttrgcUcpeLBPrU68oijqMVnON1uOxyY,10252
|
8
|
+
ripple_down_rules/utils.py,sha256=DgGJ0wzYkMlNbUfuLLpS_uynyJMJId4cc3m1DkZgGas,16345
|
9
|
+
ripple_down_rules/datastructures/__init__.py,sha256=wY9WqXavuE3wQ1YP65cs_SZyr7CEMB9tol-4oxgK9CM,104
|
10
|
+
ripple_down_rules/datastructures/callable_expression.py,sha256=QscF3nvVrZhH6dgAFO29_UGjcHprDV2wgjdeDXNYLUw,9066
|
11
|
+
ripple_down_rules/datastructures/dataclasses.py,sha256=z_9B7Nj_MIf2Iyrs5VeUhXhYxwaqnuKVjgwxhZZTygY,2525
|
12
|
+
ripple_down_rules/datastructures/enums.py,sha256=6Mh55_8QRuXyYZXtonWr01VBgLP-jYp91K_8hIgh8u8,4244
|
13
|
+
ripple_down_rules/datastructures/table.py,sha256=PffU8_NUY9q-lzvU9-L4cVI0hvJPFkX1uRpMq3zlq5M,22902
|
14
|
+
ripple_down_rules/datastructures/generated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
15
|
+
ripple_down_rules/datastructures/generated/column/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
16
|
+
ripple_down_rules/datastructures/generated/row/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
17
|
+
ripple_down_rules-0.0.0.dist-info/METADATA,sha256=8uBKAK8WCDkSrP_d9wqbEX0A2vr8WgTUwdudGVBYaQA,1858
|
18
|
+
ripple_down_rules-0.0.0.dist-info/WHEEL,sha256=DK49LOLCYiurdXXOXwGJm6U4DkHkg4lcxjhqwRa0CP4,91
|
19
|
+
ripple_down_rules-0.0.0.dist-info/top_level.txt,sha256=VeoLhEhyK46M1OHwoPbCQLI1EifLjChqGzhQ6WEUqeM,18
|
20
|
+
ripple_down_rules-0.0.0.dist-info/RECORD,,
|
@@ -0,0 +1 @@
|
|
1
|
+
ripple_down_rules
|