unisi 0.3.14__py3-none-any.whl → 0.3.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
unisi/llmrag.py CHANGED
@@ -2,62 +2,125 @@
2
2
  from .common import Unishare
3
3
  from langchain_groq import ChatGroq
4
4
  from langchain_openai import ChatOpenAI
5
+ from langchain_mistralai import ChatMistralAI
5
6
  from langchain_google_genai import (
6
7
  ChatGoogleGenerativeAI,
7
8
  HarmBlockThreshold,
8
9
  HarmCategory,
9
10
  )
10
- from functools import lru_cache
11
- from pydantic import RootModel, create_model, BaseModel
12
- import collections, inspect
11
+ from datetime import datetime
12
+ import collections, inspect, re, json
13
+ from typing import get_origin, get_args
13
14
 
14
- def is_standard_type(obj):
15
- return isinstance(obj, (collections.abc.Sequence, collections.abc.Mapping,
16
- int, float, complex, bool, str, bytes, bytearray, range))
17
-
18
- def Model(name, type_value = None, **parameters):
19
- model = {}
20
- if type_value is None:
21
- for k, v in parameters.items():
22
- vtype = is_standard_type(v)
23
- if vtype:
24
- model[k] = (v, ...)
15
+ def jstype(type_value):
16
+ if isinstance(type_value, type):
17
+ if type_value == int:
18
+ return 'integer'
19
+ elif type_value == float:
20
+ return 'number'
21
+ elif type_value == bool:
22
+ return 'boolean'
23
+ elif type_value == str:
24
+ return 'string'
25
+ elif type_value == dict:
26
+ return 'object'
27
+ elif type_value == list:
28
+ return 'array'
29
+ else:
30
+ origin = get_origin(type_value)
31
+ args = get_args(type_value)
32
+ if origin == list:
33
+ return f'array of {jstype(args[0])} '
34
+ elif origin == dict:
35
+ return f'object of {jstype(args[0])} to {jstype(args[1])} structure.'
25
36
  else:
26
- model[k] = (vtype, v)
27
- return create_model(name, **model) if model else RootModel[str]
28
- return RootModel[type_value]
37
+ return 'string'
38
+ else:
39
+ match type_value:
40
+ case str():
41
+ jtype = 'string'
42
+ case int():
43
+ jtype = 'integer'
44
+ case float():
45
+ jtype = 'number'
46
+ case bool():
47
+ jtype = 'boolean'
48
+ case dict():
49
+ if type_value:
50
+ ptypes = ','.join(f'"{k}": "[Type: {jstype(v)}]"' for k, v in type_value.items())
51
+ jtype = f'object with {{{ptypes}}} structure'
52
+ else:
53
+ jtype = 'object'
54
+ case list():
55
+ jtype = 'array'
56
+ case _:
57
+ jtype = 'string'
58
+ return jtype
29
59
 
30
- class Question:
31
- index = 0
32
- """contains question, format of answer"""
33
- def __init__(self, question, type_value = None, **format_model):
34
- self.question = question
35
- self.format = Model(f'Question {Question.index}', type_value, **format_model)
36
- Question.index += 1
60
+ def is_type(variable, expected_type):
61
+ """
62
+ Check if the variable matches the expected type hint.
63
+ """
64
+ origin = get_origin(expected_type)
65
+ if origin is None:
66
+ return isinstance(variable, expected_type)
67
+ args = get_args(expected_type)
68
+
69
+ # Check if the type matches the generic type
70
+ if not isinstance(variable, origin):
71
+ return False
37
72
 
38
- def __str__(self):
39
- return f'Qustion: {self.question} \n Format: {self.format}'
40
-
41
- @lru_cache(maxsize=None)
42
- def get(question, type_value = None, **format_model):
43
- return Question(question, type_value, **format_model)
73
+ if not args:
74
+ return True
75
+
76
+ if origin is list:
77
+ return all(isinstance(item, args[0]) for item in variable)
78
+ elif origin is dict:
79
+ return all(isinstance(k, args[0]) and isinstance(v, args[1]) for k, v in variable.items())
44
80
 
45
- async def Q(question, type_value = None, **format_model):
46
- """returns LLM answer for a question"""
47
- q = Question.get(question, type_value, **format_model)
48
- llm = Unishare.llm_model
49
- str_prompt = q.question
50
- if '{' in str_prompt:
51
- caller_frame = inspect.currentframe().f_back
52
- str_prompt = str_prompt.format(**caller_frame.f_locals)
53
- io = await llm.ainvoke(str_prompt)
54
- js = io.content.strip('`')
55
- js = js.replace('json', '').replace('\n', '')
56
- return q.format.parse_raw(js).root
81
+ return False
57
82
 
83
+ def Q(str_prompt, type_value = str, blank = True, **format_model):
84
+ """returns LLM async call for a question"""
85
+ llm = Unishare.llm_model
86
+ if '{' in str_prompt:
87
+ caller_frame = inspect.currentframe().f_back
88
+ format_model = caller_frame.f_locals | format_model if format_model else caller_frame.f_locals
89
+ str_prompt = str_prompt.format(**format_model)
90
+ if not re.search(r'json', str_prompt, re.IGNORECASE):
91
+ jtype = jstype(type_value)
92
+ format = " dd/mm/yyyy string" if type_value == 'date' else f'a JSON {jtype}' if jtype != 'string' else jtype
93
+ str_prompt = f"System: You are an intelligent and extremely smart assistant. Output STRONGLY {format}." + str_prompt
94
+ async def f():
95
+ io = await llm.ainvoke(str_prompt)
96
+ js = io.content.strip().strip('`').replace('json', '')
97
+ if type_value == str or type_value == 'date':
98
+ return js
99
+ parsed = json.loads(js)
100
+ if isinstance(type_value, dict):
101
+ for k, v in type_value.items():
102
+ if k not in parsed:
103
+ for k2, v2 in parsed.items():
104
+ if re.fullmatch(k, k2, re.IGNORECASE) is not None:
105
+ parsed[k] = parsed.pop(k2)
106
+ break
107
+ else:
108
+ if blank:
109
+ parsed[k] = None
110
+ continue
111
+ else:
112
+ raise KeyError(f'Key {k} not found in {parsed}')
113
+
114
+ if not is_type(parsed[k], v):
115
+ raise TypeError(f'Invalid type for {k}: {type(parsed[k])} != {v}')
116
+ else:
117
+ if not is_type(parsed, type_value):
118
+ raise TypeError(f'Invalid type: {type(parsed)} != {type_value}')
119
+ return parsed
120
+ return f()
58
121
 
59
122
  def setup_llmrag():
60
- import config #the module is loaded before config.py
123
+ import config #the module is loaded before config analysis
61
124
  temperature = getattr(config, 'temperature', 0.0)
62
125
  if config.llm:
63
126
  match config.llm:
@@ -79,7 +142,7 @@ def setup_llmrag():
79
142
  openai_api_base = address
80
143
  )
81
144
  case 'openai':
82
- Unishare.llm_model = ChatOpenAI(temperature=0.0)
145
+ Unishare.llm_model = ChatOpenAI(temperature = temperature)
83
146
 
84
147
  case 'groq':
85
148
  Unishare.llm_model = ChatGroq(
@@ -100,43 +163,23 @@ def setup_llmrag():
100
163
  HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_NONE
101
164
  }
102
165
  )
166
+ case 'mistral':
167
+ Unishare.llm_model = ChatMistralAI(
168
+ model = model,
169
+ temperature=0,
170
+ max_retries=2,
171
+ # other params...
172
+ )
103
173
 
104
- numeric_types = ['number', 'int', 'float', 'double']
105
-
106
- async def get_property(name, context = '', type = 'string', options = None, attempts = 1, messages = None):
107
- if messages is None:
108
- limits = f'type is {type}'
109
- if type == 'date':
110
- limits = f'{limits}, use format "dd/mm/yyyy"'
111
- if options:
112
- limits = f'{limits}, and its possible options are {",".join(opt for opt in options)}'
113
- messages = [
114
- (
115
- "system",
116
- f"""You are an intelligent and extremely smart assistant."""
117
- ),
118
- ("human", f"""{context} . Reason and infer {name}, which {limits}.
119
- Do not include any additional text or commentary in your answer, just exact the property value.""")
120
- ]
121
- ai_msg = await Unishare.llm_model.ainvoke(messages)
122
- value = ai_msg.content
123
- log_error = ''
124
- if type in numeric_types:
125
- try:
126
- value = float(value)
127
- except:
128
- log_error = f'Invalid value {value} from llm-rag for {messages[1][1]}'
129
- return value
130
- else:
131
- value = value.strip('""')
132
-
133
- if not log_error and options and value not in options:
134
- attempts -= 1
135
- if attempts > 0:
136
- value = get_property(name, context, type, options, attempts, messages)
137
- else:
138
- log_error = f'Invalid value {value} from llm-rag for {messages[1][1]}'
139
-
140
- if log_error:
141
- Unishare.message_logger(log_error)
142
- return value
174
+ async def get_property(name, context = '', type = str, options = None):
175
+ if type == str and re.search(r'date', name, re.IGNORECASE):
176
+ type = 'date'
177
+ limits = f', which possible options are {",".join(opt for opt in options)},' if options else ''
178
+ prompt = """Context: {context} . Output ONLY "{name}" explicit value{limits} based on the context. """
179
+ try:
180
+ value = await Q(prompt, type)
181
+ except Exception as e:
182
+ Unishare.message_logger(e)
183
+ return None
184
+ return value
185
+
unisi/server.py CHANGED
@@ -18,9 +18,13 @@ def context_screen():
18
18
  user = context_user()
19
19
  return user.screen if user else None
20
20
 
21
- def message_logger(str, type = 'error'):
21
+ def message_logger(message, type = 'error'):
22
22
  user = context_user()
23
- user.log(str, type)
23
+ if user:
24
+ user.log(message, type)
25
+ else:
26
+ with logging_lock:
27
+ logging.error(message)
24
28
 
25
29
  Unishare.context_user = context_user
26
30
  Unishare.message_logger = message_logger
unisi/users.py CHANGED
@@ -312,18 +312,18 @@ class User:
312
312
  def sync_send(self, obj):
313
313
  asyncio.run(self.send(obj))
314
314
 
315
- def log(self, str, type = 'error'):
315
+ def log(self, message, type = 'error'):
316
316
  scr = self.screen.name if self.screens else 'void'
317
- str = f"session: {self.session}, screen: {scr}, message: {self.last_message}\n {str}"
317
+ message = f"session: {self.session}, screen: {scr}, message: {self.last_message}\n {message}"
318
318
  with logging_lock:
319
319
  if type == 'error':
320
- logging.error(str)
320
+ logging.error(message)
321
321
  elif type == 'warning':
322
- logging.warning(str)
322
+ logging.warning(message)
323
323
  else:
324
324
  func = logging.getLogger().setLevel
325
325
  func(level = logging.INFO)
326
- logging.info(str)
326
+ logging.info(message)
327
327
  func(level = logging.WARNING)
328
328
 
329
329
  def init_user():
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: unisi
3
- Version: 0.3.14
3
+ Version: 0.3.15
4
4
  Summary: Unified System Interface, GUI and Remote API
5
5
  Author-Email: UNISI Tech <g.dernovoy@gmail.com>
6
6
  License: Apache-2.0
@@ -20,6 +20,7 @@ Requires-Dist: langchain-groq
20
20
  Requires-Dist: langchain-community
21
21
  Requires-Dist: langchain-openai
22
22
  Requires-Dist: langchain-google-genai
23
+ Requires-Dist: langchain_mistralai
23
24
  Requires-Dist: word2number
24
25
  Description-Content-Type: text/markdown
25
26
 
@@ -1,7 +1,7 @@
1
- unisi-0.3.14.dist-info/METADATA,sha256=dJlmgx4DW_-CzGkW4kUxEB1Te_Q34JFcKJLxbKZ347A,27231
2
- unisi-0.3.14.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
- unisi-0.3.14.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
4
- unisi-0.3.14.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
1
+ unisi-0.3.15.dist-info/METADATA,sha256=r-Ax_zHww1_W5413mkStd6mb-9dBiG-_Pnzf9fcxkNI,27266
2
+ unisi-0.3.15.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
+ unisi-0.3.15.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
4
+ unisi-0.3.15.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
5
5
  unisi/__init__.py,sha256=prG4FwJzpNJRX1trto0x_4Bne3kkpEX1dUxcRnIxWVw,301
6
6
  unisi/autotest.py,sha256=qYKwSPEPUEio6koUSu1tc71pDkX-doCQJlyRppaXCtY,8709
7
7
  unisi/common.py,sha256=bMPZo7V9nlJW5HC0yJLRDbrh0DZ4oqmEtBuOvGyN6fw,5759
@@ -14,14 +14,14 @@ unisi/jsoncomparison/config.py,sha256=LbdLJE1KIebFq_tX7zcERhPvopKhnzcTqMCnS3jN12
14
14
  unisi/jsoncomparison/errors.py,sha256=wqphE1Xn7K6n16uvUhDC45m2BxbsMUhIF2olPbhqf4o,1192
15
15
  unisi/jsoncomparison/ignore.py,sha256=xfF0a_BBEyGdZBoq-ovpCpawgcX8SRwwp7IrGnu1c2w,2634
16
16
  unisi/kdb.py,sha256=K-Lqc3e9hLTwO0i1ilTC6qrwZp90tXjLm7HFb_lM1Os,13621
17
- unisi/llmrag.py,sha256=gspPfYcdqCkRLJh8L3J2SkRl-ywwcAsNV_BySjGli1c,5293
17
+ unisi/llmrag.py,sha256=M5BvP8MbVjJhnhrnoyP-PfN0OnFYXBzgSGtd52W56tM,7057
18
18
  unisi/multimon.py,sha256=YKwCuvMsMfdgOGkJoqiqh_9wywXMeo9bUhHmbAIUeSE,4060
19
19
  unisi/proxy.py,sha256=QMHSSFJtmVZIexIMAsuFNlF5JpnYNG90rkTM3PYJhY4,7750
20
20
  unisi/reloader.py,sha256=qml-ufoUME7mrWrPMwMo3T8Jsh4e26CBj564cHCB6I0,6749
21
- unisi/server.py,sha256=V0I3OAWcebttN1KXHd_-5Vx9tOZ_RzPfSg-3ZJVxWY0,6084
21
+ unisi/server.py,sha256=xoUSn4lNv0o3Jn68wE4hL4UcfEBo_jVBvKxUfu1bIGU,6185
22
22
  unisi/tables.py,sha256=tszF62VToSchILzPhJgA4U02MFjv44LopXgD5mYg7fg,13822
23
23
  unisi/units.py,sha256=SCUZAOV0nu9khg6JE0lWwsKjiCVz29hiUCRXyZJffeA,11111
24
- unisi/users.py,sha256=h4kjPAo8LkUG9mKSDthLoDC-XVFLlPxjUvXcJdXT47g,16145
24
+ unisi/users.py,sha256=JeIori4XsW1blkasLwqZeK8XloX7UjDV_0aHE7WNWjo,16169
25
25
  unisi/utils.py,sha256=yNhDKCTjHL1H2Suk9DRQkXAZKYy6nqub-dNSdwPwl9I,2625
26
26
  unisi/voicecom.py,sha256=QzS1gIrBeGLO5dEwiu7KIEdJIIVbPBZFGb5nY632Ws8,16707
27
27
  unisi/web/css/885.703d8f36.css,sha256=9O3mFR661UJ_WySZjYt69TbPXhKwz9yEPE7seHR_3aY,3264
@@ -46,4 +46,4 @@ unisi/web/js/885.d3e9dd2b.js,sha256=7A39S4SDApVc4iHHABjOd5julybSa4UwaH4kj8vSn0E,
46
46
  unisi/web/js/935.cc0c012c.js,sha256=FzVIRBr4vyQgW38ROCoh929gtzuXqM73Cf77vejfDWk,6561
47
47
  unisi/web/js/app.3d5227f7.js,sha256=lJkD2OPQOYlxivZmNY8FYKI1JMQ_bh1Pm4zC7y8Ayt0,6150
48
48
  unisi/web/js/vendor.1bb14e9d.js,sha256=7q80jaZcms7UhWSqHAk2pXSx67cYQJGlsp-6DBXBZuU,1253597
49
- unisi-0.3.14.dist-info/RECORD,,
49
+ unisi-0.3.15.dist-info/RECORD,,
File without changes