kele 0.0.1a1__cp313-cp313-win32.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. kele/__init__.py +38 -0
  2. kele/_version.py +1 -0
  3. kele/config.py +243 -0
  4. kele/control/README_metrics.md +102 -0
  5. kele/control/__init__.py +20 -0
  6. kele/control/callback.py +255 -0
  7. kele/control/grounding_selector/__init__.py +5 -0
  8. kele/control/grounding_selector/_rule_strategies/README.md +13 -0
  9. kele/control/grounding_selector/_rule_strategies/__init__.py +24 -0
  10. kele/control/grounding_selector/_rule_strategies/_sequential_strategy.py +42 -0
  11. kele/control/grounding_selector/_rule_strategies/strategy_protocol.py +51 -0
  12. kele/control/grounding_selector/_selector_utils.py +123 -0
  13. kele/control/grounding_selector/_term_strategies/__init__.py +24 -0
  14. kele/control/grounding_selector/_term_strategies/_exhausted_strategy.py +34 -0
  15. kele/control/grounding_selector/_term_strategies/strategy_protocol.py +50 -0
  16. kele/control/grounding_selector/rule_selector.py +98 -0
  17. kele/control/grounding_selector/term_selector.py +89 -0
  18. kele/control/infer_path.py +306 -0
  19. kele/control/metrics.py +357 -0
  20. kele/control/status.py +286 -0
  21. kele/egg_equiv.pyd +0 -0
  22. kele/egg_equiv.pyi +11 -0
  23. kele/equality/README.md +8 -0
  24. kele/equality/__init__.py +4 -0
  25. kele/equality/_egg_equiv/src/lib.rs +267 -0
  26. kele/equality/_equiv_elem.py +67 -0
  27. kele/equality/_utils.py +36 -0
  28. kele/equality/equivalence.py +141 -0
  29. kele/executer/__init__.py +4 -0
  30. kele/executer/executing.py +139 -0
  31. kele/grounder/README.md +83 -0
  32. kele/grounder/__init__.py +17 -0
  33. kele/grounder/grounded_rule_ds/__init__.py +6 -0
  34. kele/grounder/grounded_rule_ds/_nodes/__init__.py +24 -0
  35. kele/grounder/grounded_rule_ds/_nodes/_assertion.py +353 -0
  36. kele/grounder/grounded_rule_ds/_nodes/_conn.py +116 -0
  37. kele/grounder/grounded_rule_ds/_nodes/_op.py +57 -0
  38. kele/grounder/grounded_rule_ds/_nodes/_root.py +71 -0
  39. kele/grounder/grounded_rule_ds/_nodes/_rule.py +119 -0
  40. kele/grounder/grounded_rule_ds/_nodes/_term.py +390 -0
  41. kele/grounder/grounded_rule_ds/_nodes/_tftable.py +15 -0
  42. kele/grounder/grounded_rule_ds/_nodes/_tupletable.py +444 -0
  43. kele/grounder/grounded_rule_ds/_nodes/_typing_polars.py +26 -0
  44. kele/grounder/grounded_rule_ds/grounded_class.py +461 -0
  45. kele/grounder/grounded_rule_ds/grounded_ds_utils.py +91 -0
  46. kele/grounder/grounded_rule_ds/rule_check.py +373 -0
  47. kele/grounder/grounding.py +118 -0
  48. kele/knowledge_bases/README.md +112 -0
  49. kele/knowledge_bases/__init__.py +6 -0
  50. kele/knowledge_bases/builtin_base/__init__.py +1 -0
  51. kele/knowledge_bases/builtin_base/builtin_concepts.py +13 -0
  52. kele/knowledge_bases/builtin_base/builtin_facts.py +43 -0
  53. kele/knowledge_bases/builtin_base/builtin_operators.py +105 -0
  54. kele/knowledge_bases/builtin_base/builtin_rules.py +14 -0
  55. kele/knowledge_bases/fact_base.py +158 -0
  56. kele/knowledge_bases/ontology_base.py +67 -0
  57. kele/knowledge_bases/rule_base.py +194 -0
  58. kele/main.py +464 -0
  59. kele/py.typed +0 -0
  60. kele/syntax/CONCEPT_README.md +117 -0
  61. kele/syntax/__init__.py +40 -0
  62. kele/syntax/_cnf_converter.py +161 -0
  63. kele/syntax/_sat_solver.py +116 -0
  64. kele/syntax/base_classes.py +1482 -0
  65. kele/syntax/connectives.py +20 -0
  66. kele/syntax/dnf_converter.py +145 -0
  67. kele/syntax/external.py +17 -0
  68. kele/syntax/sub_concept.py +87 -0
  69. kele/syntax/syntacticsugar.py +201 -0
  70. kele-0.0.1a1.dist-info/METADATA +166 -0
  71. kele-0.0.1a1.dist-info/RECORD +74 -0
  72. kele-0.0.1a1.dist-info/WHEEL +4 -0
  73. kele-0.0.1a1.dist-info/licenses/LICENSE +28 -0
  74. kele-0.0.1a1.dist-info/licenses/licensecheck.json +20 -0
@@ -0,0 +1,20 @@
1
+ from __future__ import annotations
2
+
3
+ from enum import StrEnum
4
+ from typing import Final
5
+
6
+
7
+ class Connective(StrEnum):
8
+ """Logical connectives supported by the inference engine syntax layer."""
9
+ AND = "AND"
10
+ OR = "OR"
11
+ NOT = "NOT"
12
+ IMPLIES = "IMPLIES"
13
+ EQUAL = "EQUAL"
14
+
15
+
16
+ AND: Final[Connective] = Connective.AND
17
+ OR: Final[Connective] = Connective.OR
18
+ NOT: Final[Connective] = Connective.NOT
19
+ IMPLIES: Final[Connective] = Connective.IMPLIES
20
+ EQUAL: Final[Connective] = Connective.EQUAL
@@ -0,0 +1,145 @@
1
+ from functools import reduce
2
+ import warnings
3
+ from uuid import uuid1
4
+ from typing import TYPE_CHECKING, Any, ClassVar, cast
5
+
6
+ from sympy import Symbol
7
+ from sympy.logic.boolalg import (
8
+ And as SymAnd,
9
+ Equivalent,
10
+ Implies,
11
+ Not as SymNot,
12
+ Or as SymOr,
13
+ simplify_logic,
14
+ )
15
+ from bidict import bidict
16
+
17
+ from .base_classes import (
18
+ Assertion,
19
+ Formula,
20
+ Intro,
21
+ Rule,
22
+ )
23
+
24
+ from .connectives import (
25
+ AND,
26
+ EQUAL,
27
+ IMPLIES,
28
+ NOT,
29
+ OR,
30
+ )
31
+
32
+
33
+ class _AssertionSymbol:
34
+ assertion_to_id: ClassVar[bidict[Assertion, Symbol]] = bidict()
35
+
36
+ @classmethod
37
+ def get_symbol(cls, assertion: Assertion) -> Symbol:
38
+ if assertion not in cls.assertion_to_id:
39
+ cls.assertion_to_id[assertion] = Symbol(str(uuid1()))
40
+ return cls.assertion_to_id[assertion]
41
+
42
+ @classmethod
43
+ def get_assertion(cls, assertion_id: Symbol) -> Assertion:
44
+ return cls.assertion_to_id.inverse[assertion_id]
45
+
46
+
47
+ def convert_to_dnf(formula: Formula | Assertion) -> Formula | Assertion:
48
+ """
49
+ 将一个公式转化为dnf格式
50
+
51
+ :param formula: _description_
52
+ :type formula: Formula | Assertion
53
+ :return: _description_
54
+ :rtype: Formula | Assertion
55
+ """
56
+ sympy_expr = _convert_to_sympy_expr(formula)
57
+ dnf_sympy_expr = simplify_logic(sympy_expr, form='dnf')
58
+ return _rebuild_formula(dnf_sympy_expr)
59
+
60
+
61
+ def _rebuild_formula(sympy_expr: Any) -> Formula | Assertion: # noqa: ANN401
62
+ if isinstance(sympy_expr, Symbol):
63
+ return _AssertionSymbol.get_assertion(sympy_expr)
64
+ if isinstance(sympy_expr, SymNot):
65
+ return Formula(connective=NOT, formula_left=_rebuild_formula(sympy_expr.args[0]))
66
+ if isinstance(sympy_expr, SymAnd):
67
+ return reduce(lambda x, y: Formula(connective=AND, formula_left=x, formula_right=y),
68
+ map(_rebuild_formula, sympy_expr.args))
69
+ if isinstance(sympy_expr, SymOr):
70
+ return reduce(lambda x, y: Formula(connective=OR, formula_left=x, formula_right=y),
71
+ map(_rebuild_formula, sympy_expr.args))
72
+
73
+ raise ValueError(f"Unknown sympy expression type {type(sympy_expr)}")
74
+
75
+
76
+ def _convert_to_sympy_expr(cur_formula: Formula | Assertion) -> Any: # noqa: ANN401
77
+ if isinstance(cur_formula, Assertion):
78
+ return _AssertionSymbol.get_symbol(cur_formula)
79
+ if cur_formula.connective == NOT:
80
+ return SymNot(_convert_to_sympy_expr(cur_formula.formula_left))
81
+ if TYPE_CHECKING:
82
+ cur_formula.formula_right = cast("Formula | Assertion", cur_formula.formula_right)
83
+ if cur_formula.connective == AND:
84
+ return SymAnd(_convert_to_sympy_expr(cur_formula.formula_left), _convert_to_sympy_expr(cur_formula.formula_right))
85
+ if cur_formula.connective == OR:
86
+ return SymOr(_convert_to_sympy_expr(cur_formula.formula_left), _convert_to_sympy_expr(cur_formula.formula_right))
87
+ if cur_formula.connective == IMPLIES:
88
+ return Implies(_convert_to_sympy_expr(cur_formula.formula_left), _convert_to_sympy_expr(cur_formula.formula_right))
89
+ if cur_formula.connective == EQUAL:
90
+ return Equivalent(_convert_to_sympy_expr(cur_formula.formula_left), _convert_to_sympy_expr(cur_formula.formula_right))
91
+ raise ValueError(f"Unknown connective {cur_formula.connective}")
92
+
93
+
94
+ class RuleSafetyProcesser:
95
+ """
96
+ 将规则拆分成一系列规则,且body部分是DNF
97
+ """
98
+ def _split_into_dnf_formulas(self, formula: Formula | Assertion) -> list[Assertion | Formula]:
99
+ """
100
+ 将公式拆分成DNF规则
101
+
102
+ :param formula: 待拆分的公式
103
+ :type formula: Formula | Assertion
104
+ :return: 拆分后的DNF规则
105
+ :rtype: list[Assertion | Formula]
106
+ """
107
+ if isinstance(formula, Assertion) or formula.connective != OR:
108
+ return [formula]
109
+ if TYPE_CHECKING:
110
+ formula.formula_right = cast("Formula | Assertion", formula.formula_right)
111
+ return self._split_into_dnf_formulas(formula.formula_left) + self._split_into_dnf_formulas(formula.formula_right)
112
+
113
+ def split_rule_and_process_safety[T1: Rule](self, rule: T1) -> list[T1]:
114
+ """
115
+ 将公式拆分成DNF规则,并且将unsafe_variables以intro的形式加入规则
116
+
117
+ :param formula: 待拆分的公式
118
+ :type formula: Formula | Assertion
119
+ :return: 拆分后的DNF规则
120
+ :rtype: list[Assertion | Formula]
121
+ """
122
+ # 1、转化规则并拆分
123
+ new_rules: list[T1] = []
124
+ dnf_body = convert_to_dnf(rule.body)
125
+ body_formulas = self._split_into_dnf_formulas(dnf_body)
126
+ new_rules.extend(rule.replace(body=single_body_formula) for single_body_formula in body_formulas)
127
+
128
+ # 2、将unsafe_variables以intro的形式加入新规则
129
+ processed_new_rules: list[T1] = []
130
+
131
+ for r in new_rules:
132
+ unsafe_variables = r.unsafe_variables
133
+ new_rule_body = r.body
134
+ if unsafe_variables:
135
+ warnings.warn(f"""Rule {r!s} contains unsafe variables {[str(u) for u in unsafe_variables]}; auto-handled.\n
136
+ A rule is safe if variables in action terms and negative literals are all included in non-action, positive assertions.\n
137
+ For details, see the engine tutorial: #TODO
138
+ """, stacklevel=4) # TODO: add URL
139
+ for single_variable in unsafe_variables:
140
+ new_rule_body = Formula(new_rule_body, AND, Intro(single_variable))
141
+
142
+ new_rule = r.replace(body=new_rule_body)
143
+ processed_new_rules.append(new_rule)
144
+
145
+ return processed_new_rules
@@ -0,0 +1,17 @@
1
+ from collections.abc import Callable
2
+ from pydantic import BaseModel, ConfigDict
3
+
4
+ from .base_classes import FACT_TYPE, Question, Assertion
5
+
6
+
7
+ class SankuManagementSystem(BaseModel):
8
+ """三库系统的抽象,主要用于类型检查和说明数据结构"""
9
+
10
+ model_config = ConfigDict(arbitrary_types_allowed=True)
11
+
12
+ knowledge: list[FACT_TYPE] = []
13
+ initial_by_question: Callable[[Question, int | None], list[FACT_TYPE]] = lambda x, y: []
14
+ # TODO(lbq): 需与泳融沟通,使用一个question和一个取出的上限值,由三库系统决定返回哪些有用的信息协助初始化。
15
+ # 另外 HACK: 实则应该用memcached语言传入,而且这是一个http链接,不是函数
16
+ query_assertion: Callable[[Assertion | list[Assertion]], list[Assertion]] = lambda x: []
17
+ update_facts: Callable[[FACT_TYPE | list[FACT_TYPE]], None] = lambda x: None
@@ -0,0 +1,87 @@
1
+ # ruff: noqa: PLC0415
2
+
3
+ from __future__ import annotations
4
+ import warnings
5
+ from contextlib import contextmanager
6
+ import re
7
+ from collections.abc import Generator
8
+ from collections.abc import Sequence, Mapping
9
+ from typing import TYPE_CHECKING
10
+ if TYPE_CHECKING:
11
+ from kele.syntax import Concept
12
+
13
+
14
+ # Concept从属关系录入
15
+ def register_concept_relations(relations: str | Mapping[Concept | str, Sequence[Concept | str]] | Sequence[tuple[Concept | str, Concept | str]]) \
16
+ -> None:
17
+ """
18
+ 批量注册从属关系列表 [(child, parent), ...],每条对应一个概念从属关系 child ⊆ parent。
19
+ 支持三种录入形式:字符串DSL / 映射(子->[父...]) / 列表[(子,父),...]
20
+ """
21
+ if isinstance(relations, str):
22
+ register_concept_from_string(relations)
23
+ elif isinstance(relations, Mapping):
24
+ register_concept_from_mapping(relations)
25
+ else:
26
+ register_concept_subsumptions(relations)
27
+
28
+
29
+ def register_concept_subsumptions(pairs: Sequence[tuple[Concept | str, Concept | str]]) -> None:
30
+ """批量注册从属关系列表 [(child, parent), ...]"""
31
+ from kele.syntax import Concept
32
+ for ch, pa in pairs:
33
+ Concept.add_subsumption(ch, pa)
34
+
35
+
36
+ def register_concept_from_mapping(mapping: Mapping[Concept | str, Sequence[Concept | str]]) -> None:
37
+ """从映射(子->[父...])注册"""
38
+ from kele.syntax import Concept
39
+ for ch, parents in mapping.items():
40
+ for pa in parents:
41
+ Concept.add_subsumption(ch, pa)
42
+
43
+
44
+ def register_concept_from_string(spec: str) -> None:
45
+ """
46
+ 从字符串录入多条从属关系。支持分隔符:逗号/分号/换行;关系符:'⊆'、'<='。
47
+ :raise ValueError: 分隔符需要用'⊆'、'<='
48
+ """ # noqa: DOC501
49
+ from kele.syntax import Concept
50
+ items = re.split(r"[;,]+", spec)
51
+ for item in items:
52
+ s = item.strip()
53
+ if not s:
54
+ continue
55
+ m = re.match(r"^(.+?)(?:⊆|<=)(.+)$", s)
56
+ if not m:
57
+ raise ValueError(f"Unable to parse inclusion statement: {s!r}. Expected 'A ⊆ B' or 'A <= B'.")
58
+ left, right = m.group(1).strip(), m.group(2).strip()
59
+ Concept.add_subsumption(left, right)
60
+
61
+
62
+ def with_concept_relations(relations: str | Mapping[Concept | str, Sequence[Concept | str]] | Sequence[tuple[Concept | str, Concept | str]]) \
63
+ -> object:
64
+ """装饰器:在被装饰对象定义时注册关系。
65
+
66
+ 用法:
67
+ @with_concept_relations("int ⊆ real; positive_int <= int")
68
+ def build_ops(): ...
69
+ """
70
+ def _decorator(obj: object) -> object:
71
+ register_concept_relations(relations)
72
+ return obj
73
+ return _decorator
74
+
75
+
76
+ @contextmanager
77
+ def concept_relation_scope(relations: str | Mapping[Concept | str, Sequence[Concept | str]] | Sequence[tuple[Concept | str, Concept | str]]) \
78
+ -> Generator: # type: ignore[type-arg]
79
+ """上下文管理器:进入时注册给定关系,注意退出时不做回滚,因为我们暂时认为子集关系是不变的。"""
80
+ register_concept_relations(relations)
81
+ try:
82
+ yield
83
+ finally:
84
+ warnings.warn(
85
+ "Registers the given relation on entry; no rollback on exit because subset relations are assumed immutable.",
86
+ stacklevel=2,
87
+ )
@@ -0,0 +1,201 @@
1
+ # ruff: noqa: PLR6301
2
+
3
+ """
4
+ 这个模块提供四则运算的语法糖,使用 Lark 语法解析器。
5
+ 请注意每个CompoundTerm都必须用括号括起来,包括但不限于嵌套、原子的CompoundTerm等
6
+ 支持的符号包括:
7
+ - 数字(整数或浮点数)
8
+ - 变量名([A-Za-z_]\\w*)
9
+ - 括号(支持嵌套)
10
+ - 二元操作符:+、-、*、/
11
+ - 一元操作符:+、-
12
+ - 方程式:e_term "=" e_term
13
+ 解析后会将语法树转换为推理引擎的syntax:CompoundTerm、Constant、Variable等
14
+ 支持的op和concept置于builtin_base文件夹,例如 arithmetic_plus_op、arithmetic_minus_op 等。
15
+ """
16
+
17
+ from __future__ import annotations
18
+
19
+ from typing import Final
20
+
21
+ from lark import Lark, Transformer, Token
22
+
23
+ from kele.knowledge_bases.builtin_base.builtin_concepts import (
24
+ COMPLEX_NUMBER_CONCEPT,
25
+ )
26
+ from kele.knowledge_bases.builtin_base.builtin_operators import (
27
+ arithmetic_divide_op,
28
+ get_arithmetic_equation_op,
29
+ arithmetic_minus_op,
30
+ arithmetic_negate_op,
31
+ arithmetic_plus_op,
32
+ arithmetic_times_op,
33
+ )
34
+ from kele.syntax import CompoundTerm, Constant, Variable, TERM_TYPE, Assertion, Formula
35
+
36
+
37
+ GRAMMAR: Final[str] = r"""
38
+ // 顶层:(方程中的)等式 | 带括号表达式 | 裸数字
39
+ start: equation
40
+ | pexpr -> top_paren_expr
41
+ | number
42
+
43
+ // (方程中的)等式:两侧允许 带括号表达式 或 裸数字
44
+ equation: e_term "=" e_term -> equation
45
+ e_term: pexpr | number
46
+
47
+ // 带括号表达式:目前是四则运算 + 一元正负,允许嵌套
48
+ pexpr: "(" expr ")" -> paren
49
+ | "(" pexpr ")" -> nested_paren
50
+
51
+ // 四则运算
52
+ ?expr: expr "+" term -> add
53
+ | expr "-" term -> sub
54
+ | expr "*" term -> mul
55
+ | expr "/" term -> div
56
+ | "-" expr -> neg
57
+ | "+" expr -> pos
58
+ | term
59
+
60
+ ?term: number
61
+ | symbol
62
+ | pexpr // 支持嵌套括号
63
+
64
+ number: NUMBER
65
+ symbol: NAME
66
+
67
+ NAME: /[A-Za-z_]\w*/
68
+ %import common.NUMBER
69
+ %ignore /[ \t]+/
70
+ """
71
+
72
+
73
+ class ToSyntax(Transformer): # type: ignore[type-arg]
74
+ """Transform Lark parse trees into AL inference engine terms."""
75
+
76
+ # --- Entrypoints & helpers -------------------------------------------------
77
+
78
+ def start(self, items: list[TERM_TYPE | Assertion | Formula]) -> TERM_TYPE | Assertion | Formula:
79
+ """Return the single top-level term."""
80
+ return items[0]
81
+
82
+ def e_term(self, items: list[TERM_TYPE]) -> TERM_TYPE:
83
+ """Return an equation-side term unchanged."""
84
+ return items[0]
85
+
86
+ # --- Parentheses handling --------------------------------------------------
87
+
88
+ def top_paren_expr(self, items: list[TERM_TYPE]) -> TERM_TYPE:
89
+ """Alias for a top-level parenthesized expression."""
90
+ return items[0]
91
+
92
+ def paren(self, items: list[TERM_TYPE]) -> TERM_TYPE:
93
+ """Elide a single layer of parentheses."""
94
+ return items[0]
95
+
96
+ def nested_paren(self, items: list[TERM_TYPE]) -> TERM_TYPE:
97
+ """Elide nested parentheses."""
98
+ return items[0]
99
+
100
+ # --- Atoms -----------------------------------------------------------------
101
+
102
+ def number(self, items: list[Token]) -> Constant:
103
+ """Convert a numeric token into a :class:`Constant`."""
104
+ s = str(items[0])
105
+ v = float(s) if ("." in s or "e" in s or "E" in s) else int(s)
106
+ return Constant(v, COMPLEX_NUMBER_CONCEPT)
107
+
108
+ def name(self, items: list[Token]) -> Variable:
109
+ """Convert an identifier token into a :class:`Variable`."""
110
+ return Variable(str(items[0]))
111
+
112
+ # --- Unary ops -------------------------------------------------------------
113
+
114
+ def neg(self, items: list[CompoundTerm]) -> CompoundTerm:
115
+ """Build a unary negation term."""
116
+ return CompoundTerm.from_parts(arithmetic_negate_op, [items[0]])
117
+
118
+ def pos(self, items: list[CompoundTerm]) -> TERM_TYPE:
119
+ """Return the inner term for unary plus (no-op)."""
120
+ return items[0]
121
+
122
+ # --- Binary ops ------------------------------------------------------------
123
+
124
+ def add(self, items: list[CompoundTerm]) -> CompoundTerm:
125
+ """Build an addition term."""
126
+ left, right = items
127
+ return CompoundTerm.from_parts(arithmetic_plus_op, [left, right])
128
+
129
+ def sub(self, items: list[CompoundTerm]) -> CompoundTerm:
130
+ """Build a subtraction term."""
131
+ left, right = items
132
+ return CompoundTerm.from_parts(arithmetic_minus_op, [left, right])
133
+
134
+ def mul(self, items: list[CompoundTerm]) -> CompoundTerm:
135
+ """Build a multiplication term."""
136
+ left, right = items
137
+ return CompoundTerm.from_parts(arithmetic_times_op, [left, right])
138
+
139
+ def div(self, items: list[CompoundTerm]) -> CompoundTerm:
140
+ """Build a division term."""
141
+ left, right = items
142
+ return CompoundTerm.from_parts(arithmetic_divide_op, [left, right])
143
+
144
+ # --- Equation --------------------------------------------------------------
145
+
146
+ def equation(self, items: list[CompoundTerm]) -> CompoundTerm:
147
+ """Build an equality term for the left/right sides."""
148
+ left, right = items
149
+ return CompoundTerm.from_parts(get_arithmetic_equation_op, [left, right])
150
+
151
+
152
+ _parser: Lark = Lark(GRAMMAR, parser="lalr", maybe_placeholders=False)
153
+ _to_syntax: ToSyntax = ToSyntax()
154
+
155
+
156
+ class SyntacticSugar:
157
+ """Callable facade that delegates to :func:`parse_term`."""
158
+
159
+ def __call__(self, input_str: str) -> TERM_TYPE | Assertion | Formula:
160
+ """Parse ``input_str`` and return a transformed term.
161
+
162
+ Parameters
163
+ ----------
164
+ input_str
165
+ The input string to parse.
166
+
167
+ Returns
168
+ -------
169
+ Term
170
+ The transformed term produced by :func:`parse_term`.
171
+
172
+ Raises
173
+ ------
174
+ SyntaxError
175
+ If parsing fails for any reason.
176
+ """
177
+ try:
178
+ return _to_syntax.transform(_parser.parse(s)) # type: ignore[no-any-return]
179
+ except Exception as e:
180
+ raise SyntaxError(f"Parsing failed: {e}\n") from e
181
+
182
+
183
+ syntactic_sugar: Final[SyntacticSugar] = SyntacticSugar()
184
+
185
+
186
+ if __name__ == "__main__":
187
+ cases: list[str] = [
188
+ "(1+2)=3",
189
+ "(x*2+3)",
190
+ "1",
191
+ "((1)+(2))=((3))", # 多余括号仍可
192
+ "((1)+(2))=3",
193
+ "((1+2))=3",
194
+ "(1+(2*3))=7",
195
+ "(-(1+2))", # 顶层表达式需括号
196
+ "(1+x)", # 所有的term都要求有括号
197
+ "(1+(-x))",
198
+ "(((1+x)))",
199
+ ]
200
+ for s in cases:
201
+ print(s, "=>", syntactic_sugar(s)) # noqa: T201
@@ -0,0 +1,166 @@
1
+ Metadata-Version: 2.4
2
+ Name: kele
3
+ Version: 0.0.1a1
4
+ Summary: 推理引擎
5
+ Author: Bingqian Li, BoYang Zhang, Weiming Hong, Hao Zhang, Yi Zhou
6
+ Maintainer-email: Bingqian Li <libq2022@alumni.shanghaitech.edu.cn>, Yi Zhou <yi_zhou@ustc.edu.cn>
7
+ License: BSD-3-Clause
8
+ License-File: LICENSE
9
+ License-File: licensecheck.json
10
+ Requires-Python: <4.0,>=3.13
11
+ Requires-Dist: bidict<1.0,>=0.23
12
+ Requires-Dist: dacite<2.0,>=1.9
13
+ Requires-Dist: graphviz==0.20.3
14
+ Requires-Dist: lark<2.0,>=1.0
15
+ Requires-Dist: networkx<4.0,>=3.5
16
+ Requires-Dist: numpy>=2.3.3
17
+ Requires-Dist: polars>=1.32
18
+ Requires-Dist: prometheus-client>=0.22
19
+ Requires-Dist: psutil>=7.0
20
+ Requires-Dist: pydantic<3.0,>=2.0.0
21
+ Requires-Dist: python-sat<2.0,>=1.7.dev0
22
+ Requires-Dist: pyvis<0.4,>=0.3
23
+ Requires-Dist: pyyaml<7.0,>=6.0
24
+ Requires-Dist: sympy<2.0,>=1.13
25
+ Requires-Dist: tyro<2.0,>=0.9
26
+ Description-Content-Type: text/markdown
27
+
28
+ # KELE Inference Engine
29
+
30
+ [English](README.md) | [中文](README.zh.md)
31
+
32
+ <!-- Badges: If services are not configured, badges may show unknown/404; enable as needed. -->
33
+ [![License](https://img.shields.io/github/license/USTC-KnowledgeComputingLab/KELE.svg)](LICENSE)
34
+ [![Build](https://github.com/USTC-KnowledgeComputingLab/KELE/actions/workflows/release.yml/badge.svg?branch=main)](https://github.com/USTC-KnowledgeComputingLab/KELE/actions/workflows/release.yml)
35
+ ![Python 3.13+](https://img.shields.io/badge/python-3.13%2B-blue)
36
+ [![Docs](https://img.shields.io/badge/docs-GitHub%20Pages-blue)](https://msg-bq.github.io/)
37
+ [![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg)](CONTRIBUTING.md)
38
+ [![Commit Message](https://img.shields.io/badge/commit%20message-style%20guide-yellow)](CONTRIBUTING.md)
39
+
40
+ ---
41
+
42
+ KELE is a forward-chaining inference engine based on [Assertion Logic](https://link.springer.com/chapter/10.1007/978-3-319-63703-7_9), implementing a subset of the logic.
43
+
44
+ It supports **term-level facts**, **nested terms**, **equivalence axioms**, and **operators with functions**, and integrates well with modern Python (3.13+). You can embed your tools through operator implementations (and embed KELE into your tools), while we leave the burden of wrapping/binding other languages to developers rather than users.
45
+
46
+ > ⚠️ **Project status** \
47
+ > We released the first alpha version on **12/31** and will move it to a beta release as soon as possible after the holiday. The engine will maintain backward compatibility for commonly used public classes and modules, while internal components are still evolving and under active development.
48
+
49
+ ### ✨ Features
50
+
51
+ - **Term-level facts and reasoning**: term-centric organization and inference, suited for equality knowledge
52
+ - **Equivalence axioms**: convenient equivalence expressions with internal maintenance
53
+ - **Nested compound terms**: operators can nest to build complex structures
54
+ - **Implement functions for operators**: implement functions for operators (e.g., arithmetic, equation solving)
55
+
56
+ > Implement functions for operators ≈ Prolog meta-predicates / ASP HEX external predicates (not identical semantics, similar usage).
57
+
58
+ ### 🔍 Matching semantics
59
+
60
+ - **Loose matching**: treat subsumption as "intersection" matching, without input/constraint distinction
61
+ - **Concept overlap check**: returns whether there is a non-empty common concept set aligned
62
+ - **Mismatch handling**: incompatible
63
+
64
+ ### 🔧 Installation
65
+
66
+ #### Option A: PyPI (after release)
67
+
68
+ You can grab the latest built wheel from GitHub Actions or install a published release directly.
69
+
70
+ ```bash
71
+ pip install kele
72
+ ```
73
+
74
+ #### Option B: Build from source
75
+
76
+ > **Requirements**: Python 3.13+; Rust toolchain (`rustup`); on Windows, MSVC (Visual Studio Build Tools).
77
+
78
+ ```bash
79
+ git clone https://github.com/USTC-KnowledgeComputingLab/KELE
80
+ cd KELE
81
+ uv sync
82
+ uv run maturin develop --skip-install # install rust and MSVC (Windows) beforehand
83
+ ```
84
+
85
+ ### 🚀 Quick start
86
+
87
+ > Full example: `examples/relationship_quick_start.py`
88
+
89
+ ```bash
90
+ uv run python examples/relationship_quick_start.py
91
+ # Output: grandparent relation inference result (forward-chaining demo)
92
+ ```
93
+
94
+ ### 🧩 Core syntax at a glance
95
+
96
+ | Type | Meaning | Example/Hint |
97
+ | ----------------- |---------------------------------------------| ---------------------------------------------------- |
98
+ | `Concept` | Group of objects sharing something in common | `Person = Concept("Person")` |
99
+ | `Constant` | Object (belongs to concepts) | `alice = Constant("Alice", Person)` |
100
+ | `Variable` | Placeholder in rules/queries | `X = Variable("X")` |
101
+ | `Operator` | Map a tuple of objects into a single one | `parent(Person, Person) -> Bool` |
102
+ | `CompoundTerm` | Operator + arguments | `CompoundTerm(parent, [alice, bob])` |
103
+ | `Assertion` | “term = term” assertion | `Assertion(..., ...)` |
104
+ | `Formula` | Combine assertions with AND/OR/… | `Formula(A, "AND", B)` |
105
+ | `Rule` | body → head | `Rule(head=..., body=...)` |
106
+ | `QueryStructure` | Query input (premises + question) | `QueryStructure(premises=[...], question=[...])` |
107
+ | `InferenceEngine` | Engine core | `InferenceEngine(facts=[...], rules=[...])` |
108
+
109
+ `examples/relationship_quick_start.py` shows a family-relation inference example, illustrating how the pieces fit together:
110
+
111
+ 1. Define concepts (`Concept`) and operators (`Operator`), such as `Person`, `parent`, `grandparent`.
112
+ 2. Add initial facts (`Assertion`), e.g. “Bob is Alice’s parent”.
113
+ 3. Write rules (`Rule` + `Formula`), e.g. “if parent(X, Y) and parent(Y, Z), then grandparent(X, Z)”.
114
+ 4. Build a query (`QueryStructure`) and run `InferenceEngine`.
115
+
116
+ Example snippet (imports/details omitted; see `examples/relationship_quick_start.py` for a runnable version):
117
+
118
+ ```python
119
+ # 1. Define concepts and operators
120
+ Person = Concept("Person")
121
+ ...
122
+
123
+ # 2. Add facts
124
+ alice = Constant("Alice", Person)
125
+ ...
126
+
127
+ facts = [
128
+ # parent(Alice, Bob) = True
129
+ Assertion(CompoundTerm(parent, [alice, bob]), true_const),
130
+ ...
131
+ ]
132
+
133
+ # 3. Define rules + query
134
+ rules = [Rule(
135
+ head=...,
136
+ body=...,
137
+ )]
138
+
139
+ engine = InferenceEngine(facts=facts, rules=rules)
140
+ query = QueryStructure(premises=facts, question=[...]) # e.g., ask for grandparent(Alice, X)
141
+
142
+ print(engine.infer_query(query))
143
+ ```
144
+
145
+ ### 🧭 Documentation
146
+
147
+ * **Sphinx docs**:
148
+
149
+ * Read the Docs: WIP
150
+ * Build locally: `uv run sphinx-build -b html docs\source docs\build\html`
151
+
152
+ * **Tutorial**: https://msg-bq.github.io/
153
+
154
+ ### 🗺️ Roadmap
155
+
156
+ WIP
157
+
158
+ ### 🤝 Contributing
159
+
160
+ Issues/PRs welcome! Please read [CONTRIBUTING.md](CONTRIBUTING.md), and consider enabling `ruff` and `mypy`.
161
+
162
+ If you have any questions about using the engine—including usage, syntax/semantics, or theoretical foundations—please open an issue or contact us.
163
+
164
+ ### 🪪 License
165
+
166
+ This project uses the BSD 3-Clause license. See [LICENSE](LICENSE).