unisi 0.1.16__py3-none-any.whl → 0.1.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
unisi/__init__.py CHANGED
@@ -1,9 +1,9 @@
1
1
  from .utils import *
2
- from .guielements import *
2
+ from .units import *
3
3
  from .users import User, handle, context_user, context_screen
4
4
  from .server import start
5
5
  from .tables import *
6
6
  from .containers import *
7
7
  from .proxy import *
8
- from .dbelements import *
8
+ from .dbunits import *
9
9
  from .kdb import Database, Dbtable
unisi/autotest.py CHANGED
@@ -1,6 +1,6 @@
1
- import config, os, logging, json, asyncio
1
+ import config, os, json, asyncio
2
2
  from .utils import *
3
- from .guielements import *
3
+ from .units import *
4
4
  from .containers import Block, Dialog
5
5
  from .users import User
6
6
  from .common import *
@@ -135,8 +135,8 @@ def check_block(block, hash_elements):
135
135
  errors.append(f'The block {block.name} contains already used "{child.name}" in block "{hash_elements[hash_element]}"!')
136
136
  else:
137
137
  hash_elements[hash_element] = block.name
138
- if not isinstance(child, Gui) or not child:
139
- errors.append(f'The block {block.name} contains invalid element {child} instead of Gui+ object!')
138
+ if not isinstance(child, Unit) or not child:
139
+ errors.append(f'The block {block.name} contains invalid element {child} instead of Unit+ object!')
140
140
  elif isinstance(child, Block):
141
141
  errors.append(f'The block {block.name} contains block {child.name}. Blocks cannot contain blocks!')
142
142
  elif child.name in child_names and child.type != 'line':
unisi/common.py CHANGED
@@ -39,7 +39,7 @@ class ArgObject:
39
39
 
40
40
  class ReceivedMessage(ArgObject):
41
41
  def __init__(self, kwargs):
42
- super().__init__(**kwargs)
42
+ self.__dict__.update(kwargs)
43
43
  def __str__(self):
44
44
  return f'{self.block}/{self.element}->{self.event}({self.value})'
45
45
 
@@ -89,7 +89,7 @@ def get_default_args(func):
89
89
  defaults[name] = param.default
90
90
  return defaults
91
91
 
92
- references = ArgObject(context_user = None)
92
+ Unishare = ArgObject(context_user = None, sessions = {})
93
93
 
94
94
  class Message:
95
95
  def __init__(self, *gui_objects, user = None, type = 'update'):
unisi/containers.py CHANGED
@@ -1,5 +1,5 @@
1
- from .guielements import *
2
- from .common import pretty4, flatten, ArgObject
1
+ from .units import *
2
+ from .common import pretty4, flatten
3
3
  from numbers import Number
4
4
 
5
5
  class ContentScaler(Range):
@@ -21,7 +21,7 @@ class ContentScaler(Range):
21
21
  element.height /= prev
22
22
  return elements
23
23
 
24
- class Block(Gui):
24
+ class Block(Unit):
25
25
  def __init__(self, name, *elems, **options):
26
26
  self.name = name
27
27
  self.type = 'block'
@@ -45,14 +45,13 @@ class Block(Gui):
45
45
  elif isinstance(elem.llm, list | tuple):
46
46
  dependencies = elem.llm
47
47
  exactly = True
48
- elif isinstance(elem.llm, Gui):
48
+ elif isinstance(elem.llm, Unit):
49
49
  dependencies = [elem.llm]
50
50
  exactly = True
51
51
  elif isinstance(elem.llm, dict):
52
52
  if elem.type != 'table':
53
- raise AttributeError(f'{elem.name} llm parameter is a dictionary only for tables, not for {elem.type}!')
54
-
55
- elem.__llm_dependencies__ = {fld: (deps if isinstance(deps, list) else [deps]) for fld, deps in elem.llm.items()}
53
+ raise AttributeError(f'{elem.name} llm parameter is a dictionary only for tables, not for {elem.type}!')
54
+ elem.__llm_dependencies__ = {fld: (deps if isinstance(deps, list | bool) else [deps]) for fld, deps in elem.llm.items()}
56
55
  elem.llm = True
57
56
  continue
58
57
  else:
@@ -67,8 +66,8 @@ class Block(Gui):
67
66
  print(f'Empty dependency list for llm calculation for {elem.name} {elem.type}!')
68
67
 
69
68
  @property
70
- def compact_view(self):
71
- return [obj for obj in flatten(self.value) if obj.value is not None]
69
+ def compact_view(self) -> str:
70
+ return ','.join(obj.compact_view for obj in flatten(self.value) if obj.value)
72
71
 
73
72
  @property
74
73
  def scroll_list(self):
@@ -1,3 +1,16 @@
1
+ from .common import Unishare
2
+ from collections import defaultdict
3
+
4
+ #storage id -> screen name -> [elem name, block name]
5
+ dbshare = defaultdict(lambda: defaultdict(lambda: []))
6
+ # (db id, exclude user from updating) -> update
7
+ dbupdates = defaultdict(lambda: [])
8
+
9
+ def iterate(iter, times):
10
+ for i, val in enumerate(iter):
11
+ if i == times:
12
+ return val
13
+
1
14
  class Dblist:
2
15
  def __init__(self, dbtable, init_list = None, cache = None):
3
16
  self.cache = cache
@@ -8,22 +21,17 @@ class Dblist:
8
21
  raise AttributeError('init_list or cache has to be assigned!')
9
22
 
10
23
  self.delta_list = {0 : init_list}
11
- self.dbtable = dbtable
12
- self.update = dict(type ='init', length = len(self),
13
- limit = self.limit, data = init_list)
24
+ self.dbtable = dbtable
14
25
 
15
26
  def get_delta_0(self):
16
- return self.delta_list[0]
27
+ return self.delta_list[0] if self.cache is None else self.cache[:self.limit]
17
28
 
18
- def __getattribute__(self, name):
19
- if name == '__dict__':
20
- return object.__getattribute__(self, 'update')
21
- return object.__getattribute__(self, name)
29
+ def __getstate__(self):
30
+ return dict(length = len(self), limit = self.limit, data = self.get_delta_0())
22
31
 
23
32
  def __getattr__(self, name):
24
33
  return self.dbtable.limit if name == 'limit' else None
25
34
 
26
- """ The methods causes invalid serialization in Python and not used!
27
35
  def __iter__(self):
28
36
  "Override the default iterator to provide custom behavior."
29
37
  self._index = 0
@@ -36,12 +44,12 @@ class Dblist:
36
44
  return value
37
45
  else:
38
46
  raise StopIteration
39
- """
40
-
47
+
41
48
  def __str__(self):
42
- return f'\ndeltas: {self.delta_list}\nupdate: {self.update}'
49
+ return str(self.__getstate__())
43
50
 
44
- def get_delta_chunk(self, index):
51
+ def get_delta_chunk(self, index) -> tuple[int, list]:
52
+ """return delta list and chunk of data"""
45
53
  if index >= len(self):
46
54
  return -1, None
47
55
  delta_list = index // self.limit * self.limit
@@ -56,22 +64,26 @@ class Dblist:
56
64
  self.delta_list[delta_list] = lst
57
65
  return delta_list, lst
58
66
 
59
- def __getitem__(self, index):
67
+ def __getitem__(self, index) -> list:
68
+ """return row from delta list or cache"""
60
69
  if self.cache is not None:
61
70
  return self.cache[index]
62
71
  delta_list, chunk = self.get_delta_chunk(index)
63
72
  if chunk:
64
73
  return chunk[index - delta_list]
65
74
 
66
- def __setitem__(self, index, value):
75
+ def __setitem__(self, index, value: list):
76
+ """update row in delta list or cache"""
67
77
  if self.cache is not None:
68
78
  self.cache[index] = value
69
79
  else:
70
80
  delta_list, chunk = self.get_delta_chunk(index)
71
81
  if chunk:
72
- chunk[index - delta_list] = value
73
- self.update = dict(type = 'update', index = index, data = value)
82
+ chunk[index - delta_list] = value
74
83
  self.dbtable.assign_row(value)
84
+ update = dict(update = 'update', index = index, data = value)
85
+ dbupdates[self.dbtable.id].append(update)
86
+ return update
75
87
 
76
88
  def clean_cache_from(self, delta_list):
77
89
  """clear dirty delta_list cache"""
@@ -81,40 +93,64 @@ class Dblist:
81
93
  delta_list, chunk = self.get_delta_chunk(index)
82
94
  if chunk:
83
95
  self.dbtable.delete_row(index)
84
- self.update = dict(type ='delete', index = index)
96
+ update = dict(update ='delete', index = index, exclude = True)
97
+ dbupdates[self.dbtable.id].append(update)
85
98
  del chunk[index - delta_list]
86
99
  limit = self.dbtable.limit
87
100
  next_delta_list = delta_list + limit
88
101
  if len(chunk) == limit - 1: #chunk was fully filled
89
102
  next_list = self.delta_list.get(next_delta_list)
90
103
  if next_list:
91
- chunk.append(next_list[0])
92
- else:
93
- delta_list, chunk = self.get_delta_chunk(delta_list)
94
- self.update = dict(type = 'update', index = delta_list, data = chunk)
95
- self.clean_cache_from(next_delta_list)
104
+ chunk.append(next_list[0])
105
+ self.clean_cache_from(next_delta_list)
106
+ return update
96
107
 
97
108
  def __len__(self):
98
109
  return len(self.cache) if self.cache is not None else self.dbtable.length
110
+
111
+ def index2node_relation(self, cell_index):
112
+ """calculate delta to property of node or link for persistent"""
113
+ table_fields = self.dbtable.table_fields
114
+ delta = cell_index - len(table_fields)
115
+ if delta < 0:
116
+ return True, iterate(table_fields, cell_index)
117
+ delta -= 1 #ID field
118
+ return False, iterate(self.link, delta)
119
+
120
+ def update_cell(self, delta, cell, value, id = None) -> dict:
121
+ in_node, field = self.index2node_relation(cell)
122
+ if in_node:
123
+ table_id = self.dbtable.id
124
+ row_id = self[delta][len(self.dbtable.table_fields)]
125
+ else:
126
+ table_id = self.link[2]
127
+ row_id = id
128
+ self.dbtable.db.update_row(table_id, row_id, {field: value}, in_node)
129
+ self[delta][cell] = value
130
+ update = dict(update = 'update', index = delta, data = self[delta])
131
+ dbupdates[self.dbtable.id].append(update)
132
+ return update
99
133
 
100
134
  def append(self, value):
101
135
  if self.cache is not None:
102
136
  self.cache.append(value)
103
137
  return value[-1]
104
138
  index = len(self)
105
- id = self.dbtable.append_row(value)
106
- delta_list = index // self.limit * self.limit
107
- list = self.delta_list.get(delta_list)
139
+ row = self.dbtable.append_row(value)
140
+ list = self.get_delta_chunk(index)
108
141
  if list:
109
- list.append(value)
110
- self.update = dict(type = 'add', index = index, data = value)
111
- return id
142
+ list.append(row)
143
+ update = dict(update = 'add', index = index, data = row)
144
+ dbupdates[self.dbtable.id].append(update)
145
+ return update
112
146
 
113
- def extend(self, rows):
114
- start = self.dbtable.length
147
+ def extend(self, rows) -> dict:
148
+ delta_start = self.dbtable.length
149
+ start = delta_start
115
150
  rows = self.dbtable.append_rows(rows)
116
151
  len_rows = len(rows)
117
152
  i_rows = 0
153
+ length = len_rows + start
118
154
  while len_rows > 0:
119
155
  delta_list = start // self.limit * self.limit
120
156
  list = self.delta_list.get(delta_list)
@@ -129,9 +165,12 @@ class Dblist:
129
165
 
130
166
  i_rows += can_fill
131
167
  start += can_fill
132
- len_rows -= can_fill
133
- self.update = self.dbtable.get_init_list().update
134
-
168
+ len_rows -= can_fill
169
+ delta, data = self.get_delta_chunk(delta_start)
170
+ update = dict(update = 'updates', index = delta, data = data, length = length)
171
+ dbupdates[self.dbtable.id].append(update)
172
+ return update
173
+
135
174
  def insert(self, index, value):
136
175
  self.append(value)
137
176
 
unisi/kdb.py CHANGED
@@ -1,9 +1,12 @@
1
- import kuzu, shutil, os, re, time
1
+ import kuzu, shutil, os, re
2
2
  from datetime import date, datetime
3
3
  from cymple import QueryBuilder as qb
4
4
  from cymple.typedefs import Properties
5
- from .common import get_default_args, equal_dicts
6
- from .dbelements import Dblist
5
+ from .common import get_default_args
6
+ from .dbunits import Dblist
7
+
8
+ def equal_fields_dicts(dict1, dict2):
9
+ return dict1.keys() == dict2.keys() and all(dict1[key].lower() == dict2[key].lower() for key in dict1)
7
10
 
8
11
  def is_modifying_query(cypher_query):
9
12
  query = cypher_query.lower()
@@ -46,6 +49,7 @@ def dict_to_cypher_set(properties, alias = 'a'):
46
49
  return "SET " + ", ".join(set_items)
47
50
 
48
51
  class Database:
52
+ tables = {} #id -> Dbtable
49
53
  def __init__(self, dbpath, message_logger = print) -> None:
50
54
  self.db = kuzu.Database(dbpath)
51
55
  self.conn = kuzu.Connection(self.db)
@@ -109,11 +113,11 @@ class Database:
109
113
  fields = {headers[i]: type for i, type in enumerate(types)}
110
114
 
111
115
  if (table_fields := self.get_table_fields(id)) is not None:
112
- if not equal_dicts(table_fields, fields):
116
+ if not equal_fields_dicts(table_fields, fields):
113
117
  if self.delete_table(id):
114
118
  self.message_logger(f'Node table {id} was deleted because of fields contradiction!', 'warning')
115
119
  else:
116
- return Dbtable(id, self, limit, table_fields)
120
+ return self.tables.get(id) or Dbtable(id, self, limit, table_fields)
117
121
 
118
122
  return self.create_table(id, fields, limit, rows)
119
123
 
@@ -123,9 +127,7 @@ class Database:
123
127
  def set_db_list(self, gui_table):
124
128
  table = self.get_table(**self.get_table_params(gui_table.__dict__))
125
129
  tlst = table.list
126
- gui_table.rows = tlst
127
- if tlst.update['type'] != 'init':
128
- tlst.update = dict(type ='init', length = table.length, limit = table.limit, data = tlst.get_delta_0())
130
+ gui_table.rows = tlst
129
131
 
130
132
  def create_table(self, id, fields : dict, limit = 100, rows = None):
131
133
  specs = ','.join(f'{prop} {type}' for prop, type in fields.items())
@@ -159,6 +161,7 @@ class Database:
159
161
  class Dbtable:
160
162
  def __init__(self, id, db, limit = 100, table_fields = None) -> None:
161
163
  self.db = db
164
+ db.tables[id] = self
162
165
  self.id = id
163
166
  self.table_fields = table_fields
164
167
  self.limit = limit
@@ -191,7 +194,7 @@ class Dbtable:
191
194
  rel_table_fields = self.db.get_table_fields(relname)
192
195
  if isinstance(rel_table_fields, dict):
193
196
  if isinstance(fields, dict):
194
- if equal_dicts(rel_table_fields, fields):
197
+ if equal_fields_dicts(rel_table_fields, fields):
195
198
  return relname, rel_table_fields
196
199
  else:
197
200
  self.db.delete_table(relname)
@@ -247,9 +250,8 @@ class Dbtable:
247
250
  condition = f'r.ID in {link_ids}'
248
251
  else:
249
252
  if not isinstance(source_ids, list):
250
- source_ids = list(source_ids)
251
- condition = f'a.ID in {source_ids}'
252
- condition = f'({condition}) AND b.ID = {link_node_id}'
253
+ source_ids = list(source_ids)
254
+ condition = f'(a.ID in {source_ids}) AND b.ID = {link_node_id}'
253
255
  query = f"""
254
256
  MATCH (a:{self.id})-[r:{index_name}]->(b:{link_table_id})
255
257
  WHERE {condition}
@@ -261,19 +263,12 @@ class Dbtable:
261
263
  list = self.read_rows(limit = self.limit)
262
264
  length = len(list)
263
265
  #possibly the table has more rows
264
- if length == self.limit:
265
- #qresult = self.db.execute()
266
+ if length == self.limit:
266
267
  ql = self.db.qlist(f"MATCH (n:{self.id}) RETURN count(n)")
267
268
  self.length = ql[0][0]
268
269
  else:
269
270
  self.length = length
270
271
  self.list = Dblist(self, list)
271
-
272
- def get_init_list(self, search_string = None):
273
- lst = self.list
274
- lst.update = dict(type ='init', length = self.length,
275
- limit = self.limit, data = self.list.get_delta_0())
276
- return lst
277
272
 
278
273
  def read_rows(self, skip = 0, limit = 0):
279
274
  query = qb().match().node(self.id, 'a').return_literal('a.*').order_by('a.ID')
@@ -283,7 +278,8 @@ class Dbtable:
283
278
  return self.db.qlist(query)
284
279
 
285
280
  def assign_row(self, row_array):
286
- return self.db.update_row(self.id, row_array[-1], {name : value for name, value in zip(self.node_columns, row_array)})
281
+ return self.db.update_row(self.id, row_array[-1],
282
+ {name : value for name, value in zip(self.node_columns, row_array)})
287
283
 
288
284
  def delete_row(self, id):
289
285
  query = query_offset(self.id, id)
@@ -304,12 +300,10 @@ class Dbtable:
304
300
  if isinstance(row, list):
305
301
  props = {name: value for name, value in zip(self.node_columns, row) if value is not None}
306
302
 
307
- answer = self.db.execute(qb().create().node(self.id, 'a', props).return_literal('a.ID'))
308
-
303
+ answer = self.db.execute(qb().create().node(self.id, 'a', props).return_literal('a.*'))
309
304
  if answer and answer.has_next():
310
305
  self.length += 1
311
- return answer.get_next()[-1]
312
- return None
306
+ return answer.get_next()[-1]
313
307
 
314
308
  def append_rows(self, rows):
315
309
  """row can be list or dict"""
unisi/llmrag.py CHANGED
@@ -1,12 +1,18 @@
1
- from .common import references
1
+ from .common import Unishare
2
2
  from langchain_groq import ChatGroq
3
3
  from langchain_openai import ChatOpenAI
4
+ from langchain_google_genai import (
5
+ ChatGoogleGenerativeAI,
6
+ HarmBlockThreshold,
7
+ HarmCategory,
8
+ )
4
9
 
5
10
  def setup_llmrag():
6
11
  import config #the module is loaded before config.py
7
- if hasattr(config, 'llm'):
12
+ temperature = getattr(config, 'temperature', 0.0)
13
+ if config.llm:
8
14
  match config.llm:
9
- case ['local', address]:
15
+ case ['host', address]:
10
16
  model = None
11
17
  type = 'openai' #provider type is openai for local llms
12
18
  case [type, model, address]: ...
@@ -16,25 +22,39 @@ def setup_llmrag():
16
22
  return
17
23
 
18
24
  type = type.lower()
19
- if type == 'openai':
20
- references.llm_model = ChatOpenAI(
21
- api_key = 'llm-studio',
22
- temperature = 0.0,
23
- openai_api_base = address
24
- ) if address else ChatOpenAI(temperature=0.0)
25
+ match type:
26
+ case 'host':
27
+ Unishare.llm_model = ChatOpenAI(
28
+ api_key = 'llm-studio',
29
+ temperature = temperature,
30
+ openai_api_base = address
31
+ )
32
+ case 'openai':
33
+ Unishare.llm_model = ChatOpenAI(temperature=0.0)
25
34
 
26
- elif type == 'groq':
27
- references.llm_model = ChatGroq(
28
- model = model,
29
- temperature = 0.0,
30
- max_tokens = None,
31
- timeout = None,
32
- max_retries = 2,
33
- )
35
+ case 'groq':
36
+ Unishare.llm_model = ChatGroq(
37
+ model = model,
38
+ temperature = temperature,
39
+ max_tokens = None,
40
+ timeout = None,
41
+ max_retries = 2,
42
+ )
43
+ case 'google' | 'gemini':
44
+ Unishare.llm_model = ChatGoogleGenerativeAI(
45
+ model = model,
46
+ temperature = temperature,
47
+ max_tokens = None,
48
+ timeout = None,
49
+ max_retries = 2,
50
+ safety_settings = {
51
+ HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_NONE
52
+ }
53
+ )
34
54
 
35
55
  numeric_types = ['number', 'int', 'float', 'double']
36
56
 
37
- async def get_property(name, json_context = '', type = 'string', options = None, attempts = 1, messages = None):
57
+ async def get_property(name, context = '', type = 'string', options = None, attempts = 1, messages = None):
38
58
  if messages is None:
39
59
  limits = f'type is {type}'
40
60
  if type == 'date':
@@ -46,10 +66,10 @@ async def get_property(name, json_context = '', type = 'string', options = None,
46
66
  "system",
47
67
  f"""You are an intelligent and extremely concise assistant."""
48
68
  ),
49
- ("human", f"""{json_context} Reason and infer the "{name}" value, which {limits}.
50
- Do not include any additional text or commentary in your answer, just exact property value.""")
69
+ ("human", f"""{context} . Reason and infer the "{name}" value, which {limits}.
70
+ Do not include any additional text or commentary in your answer, just exact the property value.""")
51
71
  ]
52
- ai_msg = await references.llm_model.ainvoke(messages)
72
+ ai_msg = await Unishare.llm_model.ainvoke(messages)
53
73
  value = ai_msg.content
54
74
  log_error = ''
55
75
  if type in numeric_types:
@@ -64,12 +84,10 @@ async def get_property(name, json_context = '', type = 'string', options = None,
64
84
  if not log_error and options and value not in options:
65
85
  attempts -= 1
66
86
  if attempts > 0:
67
- value = get_property(name, json_context, type, options, attempts, messages)
87
+ value = get_property(name, context, type, options, attempts, messages)
68
88
  else:
69
89
  log_error = f'Invalid value {value} from llm-rag for {messages[1][1]}'
70
90
 
71
91
  if log_error:
72
- references.message_logger(log_error)
73
- return value
74
-
75
-
92
+ Unishare.message_logger(log_error)
93
+ return value
unisi/server.py CHANGED
@@ -5,6 +5,7 @@ from .reloader import empty_app
5
5
  from .autotest import recorder, run_tests
6
6
  from .common import *
7
7
  from.llmrag import setup_llmrag
8
+ from .dbunits import dbupdates
8
9
  from config import port, upload_dir
9
10
  import traceback, json
10
11
 
@@ -79,7 +80,9 @@ async def websocket_handler(request):
79
80
  if message:
80
81
  if recorder.record_file:
81
82
  recorder.accept(message, user.prepare_result (result))
82
- await user.reflect(message, result)
83
+ await user.reflect(message, result)
84
+ if dbupdates:
85
+ await user.sync_dbupdates()
83
86
  elif msg.type == WSMsgType.ERROR:
84
87
  user.log('ws connection closed with exception %s' % ws.exception())
85
88
  except BaseException as e: