unisi 0.1.17__py3-none-any.whl → 0.1.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unisi/autotest.py +1 -1
- unisi/containers.py +3 -4
- unisi/dbunits.py +69 -56
- unisi/kdb.py +21 -21
- unisi/llmrag.py +39 -19
- unisi/server.py +2 -2
- unisi/tables.py +32 -56
- unisi/units.py +5 -5
- unisi/users.py +20 -2
- unisi/web/css/{508.880242b5.css → 126.04ffe490.css} +1 -1
- unisi/web/index.html +1 -1
- unisi/web/js/126.9d066231.js +1 -0
- unisi/web/js/{app.44d431b1.js → app.db04f9eb.js} +1 -1
- {unisi-0.1.17.dist-info → unisi-0.1.19.dist-info}/METADATA +26 -35
- {unisi-0.1.17.dist-info → unisi-0.1.19.dist-info}/RECORD +17 -17
- unisi/web/js/508.4af55eb8.js +0 -2
- {unisi-0.1.17.dist-info → unisi-0.1.19.dist-info}/WHEEL +0 -0
- {unisi-0.1.17.dist-info → unisi-0.1.19.dist-info}/licenses/LICENSE +0 -0
unisi/autotest.py
CHANGED
unisi/containers.py
CHANGED
@@ -50,8 +50,7 @@ class Block(Unit):
|
|
50
50
|
exactly = True
|
51
51
|
elif isinstance(elem.llm, dict):
|
52
52
|
if elem.type != 'table':
|
53
|
-
raise AttributeError(f'{elem.name} llm parameter is a dictionary only for tables, not for {elem.type}!')
|
54
|
-
|
53
|
+
raise AttributeError(f'{elem.name} llm parameter is a dictionary only for tables, not for {elem.type}!')
|
55
54
|
elem.__llm_dependencies__ = {fld: (deps if isinstance(deps, list | bool) else [deps]) for fld, deps in elem.llm.items()}
|
56
55
|
elem.llm = True
|
57
56
|
continue
|
@@ -67,8 +66,8 @@ class Block(Unit):
|
|
67
66
|
print(f'Empty dependency list for llm calculation for {elem.name} {elem.type}!')
|
68
67
|
|
69
68
|
@property
|
70
|
-
def compact_view(self):
|
71
|
-
return
|
69
|
+
def compact_view(self) -> str:
|
70
|
+
return ','.join(obj.compact_view for obj in flatten(self.value) if obj.value)
|
72
71
|
|
73
72
|
@property
|
74
73
|
def scroll_list(self):
|
unisi/dbunits.py
CHANGED
@@ -1,25 +1,15 @@
|
|
1
1
|
from .common import Unishare
|
2
|
-
import asyncio
|
3
2
|
from collections import defaultdict
|
4
3
|
|
5
4
|
#storage id -> screen name -> [elem name, block name]
|
6
5
|
dbshare = defaultdict(lambda: defaultdict(lambda: []))
|
7
|
-
#db id -> update
|
6
|
+
# db id -> [update]
|
8
7
|
dbupdates = defaultdict(lambda: [])
|
9
8
|
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
screen2el_bl = dbshare[id]
|
15
|
-
for user in Unishare.sessions.values():
|
16
|
-
scr_name = user.screen.name
|
17
|
-
if scr_name in screen2el_bl:
|
18
|
-
for elem_block in screen2el_bl[scr_name]: #optim--
|
19
|
-
update4user = {**update, **elem_block.__dict__}
|
20
|
-
sync_calls.append(user.send(update4user))
|
21
|
-
dbupdates.clear()
|
22
|
-
await asyncio.gather(*sync_calls)
|
9
|
+
def at_iter(iter, times):
|
10
|
+
for i, val in enumerate(iter):
|
11
|
+
if i == times:
|
12
|
+
return val
|
23
13
|
|
24
14
|
class Dblist:
|
25
15
|
def __init__(self, dbtable, init_list = None, cache = None):
|
@@ -31,22 +21,17 @@ class Dblist:
|
|
31
21
|
raise AttributeError('init_list or cache has to be assigned!')
|
32
22
|
|
33
23
|
self.delta_list = {0 : init_list}
|
34
|
-
self.dbtable = dbtable
|
35
|
-
self.update = dict(type ='init', length = len(self),
|
36
|
-
limit = self.limit, data = init_list)
|
24
|
+
self.dbtable = dbtable
|
37
25
|
|
38
26
|
def get_delta_0(self):
|
39
|
-
return self.delta_list[0]
|
27
|
+
return self.delta_list[0] if self.cache is None else self.cache[:self.limit]
|
40
28
|
|
41
|
-
def
|
42
|
-
|
43
|
-
return object.__getattribute__(self, 'update')
|
44
|
-
return object.__getattribute__(self, name)
|
29
|
+
def __getstate__(self):
|
30
|
+
return dict(length = len(self), limit = self.limit, data = self.get_delta_0())
|
45
31
|
|
46
32
|
def __getattr__(self, name):
|
47
33
|
return self.dbtable.limit if name == 'limit' else None
|
48
34
|
|
49
|
-
""" The methods causes invalid serialization in Python and not used!
|
50
35
|
def __iter__(self):
|
51
36
|
"Override the default iterator to provide custom behavior."
|
52
37
|
self._index = 0
|
@@ -59,12 +44,12 @@ class Dblist:
|
|
59
44
|
return value
|
60
45
|
else:
|
61
46
|
raise StopIteration
|
62
|
-
|
63
|
-
|
47
|
+
|
64
48
|
def __str__(self):
|
65
|
-
return
|
49
|
+
return str(self.__getstate__())
|
66
50
|
|
67
|
-
def get_delta_chunk(self, index):
|
51
|
+
def get_delta_chunk(self, index) -> tuple[int, list]:
|
52
|
+
"""return delta list and chunk of data"""
|
68
53
|
if index >= len(self):
|
69
54
|
return -1, None
|
70
55
|
delta_list = index // self.limit * self.limit
|
@@ -79,22 +64,25 @@ class Dblist:
|
|
79
64
|
self.delta_list[delta_list] = lst
|
80
65
|
return delta_list, lst
|
81
66
|
|
82
|
-
def __getitem__(self, index):
|
67
|
+
def __getitem__(self, index) -> list:
|
68
|
+
"""return row from delta list or cache"""
|
83
69
|
if self.cache is not None:
|
84
70
|
return self.cache[index]
|
85
71
|
delta_list, chunk = self.get_delta_chunk(index)
|
86
72
|
if chunk:
|
87
73
|
return chunk[index - delta_list]
|
88
74
|
|
89
|
-
def __setitem__(self, index, value):
|
75
|
+
def __setitem__(self, index, value: list):
|
76
|
+
"""update row in delta list or cache"""
|
90
77
|
if self.cache is not None:
|
91
78
|
self.cache[index] = value
|
92
79
|
else:
|
93
80
|
delta_list, chunk = self.get_delta_chunk(index)
|
94
81
|
if chunk:
|
95
|
-
chunk[index - delta_list] = value
|
96
|
-
self.update = dict(type = 'update', index = index, data = value)
|
82
|
+
chunk[index - delta_list] = value
|
97
83
|
self.dbtable.assign_row(value)
|
84
|
+
update = dict(update = 'update', index = index, data = value)
|
85
|
+
dbupdates[self.dbtable.id].append(update)
|
98
86
|
|
99
87
|
def clean_cache_from(self, delta_list):
|
100
88
|
"""clear dirty delta_list cache"""
|
@@ -104,38 +92,61 @@ class Dblist:
|
|
104
92
|
delta_list, chunk = self.get_delta_chunk(index)
|
105
93
|
if chunk:
|
106
94
|
self.dbtable.delete_row(index)
|
107
|
-
|
95
|
+
update = dict(update ='delete', index = index, exclude = True)
|
96
|
+
dbupdates[self.dbtable.id].append(update)
|
108
97
|
del chunk[index - delta_list]
|
109
98
|
limit = self.dbtable.limit
|
110
99
|
next_delta_list = delta_list + limit
|
111
100
|
if len(chunk) == limit - 1: #chunk was fully filled
|
112
101
|
next_list = self.delta_list.get(next_delta_list)
|
113
102
|
if next_list:
|
114
|
-
chunk.append(next_list[0])
|
115
|
-
|
116
|
-
delta_list, chunk = self.get_delta_chunk(delta_list)
|
117
|
-
self.update = dict(type = 'updates', index = delta_list, data = chunk)
|
118
|
-
self.clean_cache_from(next_delta_list)
|
103
|
+
chunk.append(next_list[0])
|
104
|
+
self.clean_cache_from(next_delta_list)
|
119
105
|
|
120
106
|
def __len__(self):
|
121
107
|
return len(self.cache) if self.cache is not None else self.dbtable.length
|
108
|
+
|
109
|
+
def index2node_relation(self, cell_index):
|
110
|
+
"""calculate delta to property of node or link for persistent"""
|
111
|
+
table_fields = self.dbtable.table_fields
|
112
|
+
delta = cell_index - len(table_fields)
|
113
|
+
if delta < 0:
|
114
|
+
return True, at_iter(table_fields, cell_index)
|
115
|
+
delta -= 1 #ID field
|
116
|
+
return False, at_iter(self.dbtable.list.link[1], delta)
|
117
|
+
|
118
|
+
def update_cell(self, delta, cell, value, id = None) -> dict:
|
119
|
+
in_node, field = self.index2node_relation(cell)
|
120
|
+
if in_node:
|
121
|
+
table_id = self.dbtable.id
|
122
|
+
row_id = self[delta][len(self.dbtable.table_fields)]
|
123
|
+
else:
|
124
|
+
table_id = self.dbtable.list.link[2]
|
125
|
+
row_id = id
|
126
|
+
self.dbtable.db.update_row(table_id, row_id, {field: value}, in_node)
|
127
|
+
self[delta][cell] = value
|
128
|
+
if self.cache is None:
|
129
|
+
update = dict(update = 'update', index = delta, data = self[delta])
|
130
|
+
dbupdates[self.dbtable.id].append(update)
|
131
|
+
return update
|
122
132
|
|
123
|
-
def append(self,
|
133
|
+
def append(self, arr):
|
134
|
+
"""append row to list"""
|
124
135
|
if self.cache is not None:
|
125
|
-
self.cache.append(
|
126
|
-
return
|
136
|
+
self.cache.append(arr)
|
137
|
+
return arr
|
127
138
|
index = len(self)
|
128
|
-
|
129
|
-
|
130
|
-
list = self.delta_list.get(delta_list)
|
139
|
+
row = self.dbtable.append_row(arr)
|
140
|
+
delta_chunk,list = self.get_delta_chunk(index)
|
131
141
|
if list:
|
132
|
-
list.append(
|
133
|
-
|
134
|
-
|
142
|
+
list.append(row)
|
143
|
+
update = dict(update = 'add', index = index, data = row)
|
144
|
+
dbupdates[self.dbtable.id].append(update)
|
145
|
+
return row
|
135
146
|
|
136
|
-
def extend(self, rows):
|
137
|
-
|
138
|
-
|
147
|
+
def extend(self, rows) -> dict:
|
148
|
+
delta_start = self.dbtable.length
|
149
|
+
start = delta_start
|
139
150
|
rows = self.dbtable.append_rows(rows)
|
140
151
|
len_rows = len(rows)
|
141
152
|
i_rows = 0
|
@@ -155,10 +166,10 @@ class Dblist:
|
|
155
166
|
i_rows += can_fill
|
156
167
|
start += can_fill
|
157
168
|
len_rows -= can_fill
|
158
|
-
delta, data = self.get_delta_chunk(delta_start)
|
159
|
-
|
160
|
-
|
161
|
-
|
169
|
+
delta, data = self.get_delta_chunk(delta_start)
|
170
|
+
update = dict(update = 'updates', index = delta, data = data, length = length)
|
171
|
+
dbupdates[self.dbtable.id].append(update)
|
172
|
+
|
162
173
|
def insert(self, index, value):
|
163
174
|
self.append(value)
|
164
175
|
|
@@ -171,5 +182,7 @@ class Dblist:
|
|
171
182
|
del self[index]
|
172
183
|
return value
|
173
184
|
|
174
|
-
def clear(self):
|
175
|
-
self.dbtable.clear()
|
185
|
+
def clear(self, detach = False):
|
186
|
+
self.dbtable.clear(detach)
|
187
|
+
self.delta_list = {0: None}
|
188
|
+
dbupdates[self.dbtable.id].append(dict(update = 'updates', length = 0))
|
unisi/kdb.py
CHANGED
@@ -49,6 +49,7 @@ def dict_to_cypher_set(properties, alias = 'a'):
|
|
49
49
|
return "SET " + ", ".join(set_items)
|
50
50
|
|
51
51
|
class Database:
|
52
|
+
tables = {} #id -> Dbtable
|
52
53
|
def __init__(self, dbpath, message_logger = print) -> None:
|
53
54
|
self.db = kuzu.Database(dbpath)
|
54
55
|
self.conn = kuzu.Connection(self.db)
|
@@ -116,7 +117,7 @@ class Database:
|
|
116
117
|
if self.delete_table(id):
|
117
118
|
self.message_logger(f'Node table {id} was deleted because of fields contradiction!', 'warning')
|
118
119
|
else:
|
119
|
-
return Dbtable(id, self, limit, table_fields)
|
120
|
+
return self.tables.get(id) or Dbtable(id, self, limit, table_fields)
|
120
121
|
|
121
122
|
return self.create_table(id, fields, limit, rows)
|
122
123
|
|
@@ -126,9 +127,7 @@ class Database:
|
|
126
127
|
def set_db_list(self, gui_table):
|
127
128
|
table = self.get_table(**self.get_table_params(gui_table.__dict__))
|
128
129
|
tlst = table.list
|
129
|
-
gui_table.rows = tlst
|
130
|
-
if tlst.update['type'] != 'init':
|
131
|
-
tlst.update = dict(type ='init', length = table.length, limit = table.limit, data = tlst.get_delta_0())
|
130
|
+
gui_table.rows = tlst
|
132
131
|
|
133
132
|
def create_table(self, id, fields : dict, limit = 100, rows = None):
|
134
133
|
specs = ','.join(f'{prop} {type}' for prop, type in fields.items())
|
@@ -162,6 +161,7 @@ class Database:
|
|
162
161
|
class Dbtable:
|
163
162
|
def __init__(self, id, db, limit = 100, table_fields = None) -> None:
|
164
163
|
self.db = db
|
164
|
+
db.tables[id] = self
|
165
165
|
self.id = id
|
166
166
|
self.table_fields = table_fields
|
167
167
|
self.limit = limit
|
@@ -250,9 +250,8 @@ class Dbtable:
|
|
250
250
|
condition = f'r.ID in {link_ids}'
|
251
251
|
else:
|
252
252
|
if not isinstance(source_ids, list):
|
253
|
-
source_ids = list(source_ids)
|
254
|
-
condition = f'a.ID in {source_ids}'
|
255
|
-
condition = f'({condition}) AND b.ID = {link_node_id}'
|
253
|
+
source_ids = list(source_ids)
|
254
|
+
condition = f'(a.ID in {source_ids}) AND b.ID = {link_node_id}'
|
256
255
|
query = f"""
|
257
256
|
MATCH (a:{self.id})-[r:{index_name}]->(b:{link_table_id})
|
258
257
|
WHERE {condition}
|
@@ -264,19 +263,12 @@ class Dbtable:
|
|
264
263
|
list = self.read_rows(limit = self.limit)
|
265
264
|
length = len(list)
|
266
265
|
#possibly the table has more rows
|
267
|
-
if length == self.limit:
|
268
|
-
#qresult = self.db.execute()
|
266
|
+
if length == self.limit:
|
269
267
|
ql = self.db.qlist(f"MATCH (n:{self.id}) RETURN count(n)")
|
270
268
|
self.length = ql[0][0]
|
271
269
|
else:
|
272
270
|
self.length = length
|
273
271
|
self.list = Dblist(self, list)
|
274
|
-
|
275
|
-
def get_init_list(self, search_string = None):
|
276
|
-
lst = self.list
|
277
|
-
lst.update = dict(type ='init', length = self.length,
|
278
|
-
limit = self.limit, data = self.list.get_delta_0())
|
279
|
-
return lst
|
280
272
|
|
281
273
|
def read_rows(self, skip = 0, limit = 0):
|
282
274
|
query = qb().match().node(self.id, 'a').return_literal('a.*').order_by('a.ID')
|
@@ -286,7 +278,8 @@ class Dbtable:
|
|
286
278
|
return self.db.qlist(query)
|
287
279
|
|
288
280
|
def assign_row(self, row_array):
|
289
|
-
return self.db.update_row(self.id, row_array[-1],
|
281
|
+
return self.db.update_row(self.id, row_array[-1],
|
282
|
+
{name : value for name, value in zip(self.node_columns, row_array)})
|
290
283
|
|
291
284
|
def delete_row(self, id):
|
292
285
|
query = query_offset(self.id, id)
|
@@ -302,17 +295,24 @@ class Dbtable:
|
|
302
295
|
"""
|
303
296
|
return self.db.execute(query)
|
304
297
|
|
298
|
+
def clear(self, detach = False):
|
299
|
+
query = f'MATCH (a:{self.id})'
|
300
|
+
if detach:
|
301
|
+
query += ' DETACH DELETE a'
|
302
|
+
else:
|
303
|
+
query += ' DELETE a'
|
304
|
+
self.length = 0
|
305
|
+
return self.db.execute(query)
|
306
|
+
|
305
307
|
def append_row(self, row):
|
306
|
-
"""row can be list or dict, returns
|
308
|
+
"""row can be list or dict, returns new row"""
|
307
309
|
if isinstance(row, list):
|
308
310
|
props = {name: value for name, value in zip(self.node_columns, row) if value is not None}
|
309
311
|
|
310
|
-
answer = self.db.execute(qb().create().node(self.id, 'a', props).return_literal('a
|
311
|
-
|
312
|
+
answer = self.db.execute(qb().create().node(self.id, 'a', props).return_literal('a.*'))
|
312
313
|
if answer and answer.has_next():
|
313
314
|
self.length += 1
|
314
|
-
return answer.get_next()
|
315
|
-
return None
|
315
|
+
return answer.get_next()
|
316
316
|
|
317
317
|
def append_rows(self, rows):
|
318
318
|
"""row can be list or dict"""
|
unisi/llmrag.py
CHANGED
@@ -1,14 +1,20 @@
|
|
1
1
|
from .common import Unishare
|
2
2
|
from langchain_groq import ChatGroq
|
3
3
|
from langchain_openai import ChatOpenAI
|
4
|
+
from langchain_google_genai import (
|
5
|
+
ChatGoogleGenerativeAI,
|
6
|
+
HarmBlockThreshold,
|
7
|
+
HarmCategory,
|
8
|
+
)
|
4
9
|
|
5
10
|
def setup_llmrag():
|
6
11
|
import config #the module is loaded before config.py
|
12
|
+
temperature = getattr(config, 'temperature', 0.0)
|
7
13
|
if config.llm:
|
8
14
|
match config.llm:
|
9
15
|
case ['host', address]:
|
10
16
|
model = None
|
11
|
-
type = '
|
17
|
+
type = 'host' #provider type is openai for local llms
|
12
18
|
case [type, model, address]: ...
|
13
19
|
case [type, model]: address = None
|
14
20
|
case _:
|
@@ -16,25 +22,39 @@ def setup_llmrag():
|
|
16
22
|
return
|
17
23
|
|
18
24
|
type = type.lower()
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
+
match type:
|
26
|
+
case 'host':
|
27
|
+
Unishare.llm_model = ChatOpenAI(
|
28
|
+
api_key = 'llm-studio',
|
29
|
+
temperature = temperature,
|
30
|
+
openai_api_base = address
|
31
|
+
)
|
32
|
+
case 'openai':
|
33
|
+
Unishare.llm_model = ChatOpenAI(temperature=0.0)
|
25
34
|
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
35
|
+
case 'groq':
|
36
|
+
Unishare.llm_model = ChatGroq(
|
37
|
+
model = model,
|
38
|
+
temperature = temperature,
|
39
|
+
max_tokens = None,
|
40
|
+
timeout = None,
|
41
|
+
max_retries = 2,
|
42
|
+
)
|
43
|
+
case 'google' | 'gemini':
|
44
|
+
Unishare.llm_model = ChatGoogleGenerativeAI(
|
45
|
+
model = model,
|
46
|
+
temperature = temperature,
|
47
|
+
max_tokens = None,
|
48
|
+
timeout = None,
|
49
|
+
max_retries = 2,
|
50
|
+
safety_settings = {
|
51
|
+
HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_NONE
|
52
|
+
}
|
53
|
+
)
|
34
54
|
|
35
55
|
numeric_types = ['number', 'int', 'float', 'double']
|
36
56
|
|
37
|
-
async def get_property(name,
|
57
|
+
async def get_property(name, context = '', type = 'string', options = None, attempts = 1, messages = None):
|
38
58
|
if messages is None:
|
39
59
|
limits = f'type is {type}'
|
40
60
|
if type == 'date':
|
@@ -44,9 +64,9 @@ async def get_property(name, json_context = '', type = 'string', options = None,
|
|
44
64
|
messages = [
|
45
65
|
(
|
46
66
|
"system",
|
47
|
-
f"""You are an intelligent and extremely
|
67
|
+
f"""You are an intelligent and extremely smart assistant."""
|
48
68
|
),
|
49
|
-
("
|
69
|
+
("human", f"""{context} . Reason and infer {name}, which {limits}.
|
50
70
|
Do not include any additional text or commentary in your answer, just exact the property value.""")
|
51
71
|
]
|
52
72
|
ai_msg = await Unishare.llm_model.ainvoke(messages)
|
@@ -64,7 +84,7 @@ async def get_property(name, json_context = '', type = 'string', options = None,
|
|
64
84
|
if not log_error and options and value not in options:
|
65
85
|
attempts -= 1
|
66
86
|
if attempts > 0:
|
67
|
-
value = get_property(name,
|
87
|
+
value = get_property(name, context, type, options, attempts, messages)
|
68
88
|
else:
|
69
89
|
log_error = f'Invalid value {value} from llm-rag for {messages[1][1]}'
|
70
90
|
|
unisi/server.py
CHANGED
@@ -5,7 +5,7 @@ from .reloader import empty_app
|
|
5
5
|
from .autotest import recorder, run_tests
|
6
6
|
from .common import *
|
7
7
|
from.llmrag import setup_llmrag
|
8
|
-
from .dbunits import dbupdates
|
8
|
+
from .dbunits import dbupdates
|
9
9
|
from config import port, upload_dir
|
10
10
|
import traceback, json
|
11
11
|
|
@@ -82,7 +82,7 @@ async def websocket_handler(request):
|
|
82
82
|
recorder.accept(message, user.prepare_result (result))
|
83
83
|
await user.reflect(message, result)
|
84
84
|
if dbupdates:
|
85
|
-
await sync_dbupdates()
|
85
|
+
await user.sync_dbupdates()
|
86
86
|
elif msg.type == WSMsgType.ERROR:
|
87
87
|
user.log('ws connection closed with exception %s' % ws.exception())
|
88
88
|
except BaseException as e:
|
unisi/tables.py
CHANGED
@@ -3,6 +3,7 @@ from .common import *
|
|
3
3
|
from .dbunits import Dblist, dbupdates
|
4
4
|
from .llmrag import get_property
|
5
5
|
import asyncio
|
6
|
+
from collections import OrderedDict
|
6
7
|
|
7
8
|
relation_mark = 'Ⓡ'
|
8
9
|
exclude_mark = '✘'
|
@@ -10,14 +11,9 @@ max_len_rows4llm = 30
|
|
10
11
|
|
11
12
|
def get_chunk(obj, start_index):
|
12
13
|
delta, data = obj.rows.get_delta_chunk(start_index)
|
13
|
-
return {'
|
14
|
+
return {'update': 'updates', 'index': delta, 'data': data}
|
14
15
|
|
15
|
-
def
|
16
|
-
for i, val in enumerate(iter):
|
17
|
-
if i == times:
|
18
|
-
return val
|
19
|
-
|
20
|
-
def accept_cell_value(table, dval):
|
16
|
+
def accept_cell_value(table, dval):
|
21
17
|
value = dval['value']
|
22
18
|
if not isinstance(value, bool):
|
23
19
|
try:
|
@@ -25,24 +21,20 @@ def accept_cell_value(table, dval):
|
|
25
21
|
except:
|
26
22
|
pass
|
27
23
|
if hasattr(table,'id'):
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
else:
|
34
|
-
table_id = table.__link__[2]
|
35
|
-
row_id = dval['id']
|
36
|
-
dbt.db.update_row(table_id, row_id, {field: value}, in_node)
|
37
|
-
table.rows[dval['delta']][dval['cell']] = value
|
24
|
+
dval['value'] = value
|
25
|
+
if update := table.rows.update_cell(**dval):
|
26
|
+
update['exclude'] = True
|
27
|
+
else:
|
28
|
+
table.rows[dval['delta']][dval['cell']] = value
|
38
29
|
|
39
30
|
def delete_table_row(table, value):
|
40
31
|
if table.selected_list:
|
41
32
|
if hasattr(table, 'link') and table.filter:
|
42
|
-
link_table, rel_props, rel_name = table.
|
33
|
+
link_table, rel_props, rel_name = table.rows.dbtable.list.link
|
43
34
|
if not isinstance(value, list):
|
44
35
|
value = [value]
|
45
|
-
table.rows
|
36
|
+
link_ids = [table.rows[index][-1] for index in value]
|
37
|
+
table.rows.dbtable.delete_links(link_table.id, link_ids = link_ids, index_name = rel_name)
|
46
38
|
table.__link_table_selection_changed__(link_table, link_table.value)
|
47
39
|
return table
|
48
40
|
elif isinstance(value, list):
|
@@ -54,16 +46,16 @@ def delete_table_row(table, value):
|
|
54
46
|
del table.rows[value]
|
55
47
|
table.value = None
|
56
48
|
|
57
|
-
def append_table_row(table, search_str):
|
49
|
+
def append_table_row(table, search_str = ''):
|
58
50
|
''' append has to return new row, value is the search string value in the table'''
|
59
51
|
new_row = [None] * len(table.rows.dbtable.table_fields)
|
60
52
|
if getattr(table,'id', None):
|
61
|
-
|
62
|
-
new_row.append(id)
|
53
|
+
new_row = table.rows.dbtable.list.append(new_row)
|
63
54
|
if hasattr(table, 'link') and table.filter:
|
64
|
-
link_table, _, rel_name = table.
|
55
|
+
link_table, _, rel_name = table.rows.dbtable.list.link
|
65
56
|
for linked_id in link_table.selected_list:
|
66
|
-
relation = table.rows.dbtable.add_link(
|
57
|
+
relation = table.rows.dbtable.add_link(new_row[-1], link_table.id,
|
58
|
+
linked_id, link_index_name = rel_name)
|
67
59
|
new_row.extend(relation)
|
68
60
|
break
|
69
61
|
table.rows.append(new_row)
|
@@ -95,7 +87,7 @@ class Table(Unit):
|
|
95
87
|
rel_name, rel_fields = self.rows.dbtable.get_rel_fields2(link_table.id, prop_types, rel_name)
|
96
88
|
if not hasattr(link_table, 'id'):
|
97
89
|
raise AttributeError('Linked table has to be persistent!')
|
98
|
-
self.
|
90
|
+
self.rows.link = link_table, list(prop_types.keys()), rel_name
|
99
91
|
self.link = rel_fields
|
100
92
|
|
101
93
|
@Unishare.handle(link_table,'changed')
|
@@ -111,10 +103,9 @@ class Table(Unit):
|
|
111
103
|
self.rows = link_rows
|
112
104
|
else:
|
113
105
|
selected_ids = [link_rows[i][-1] for i in range(len(link_rows))]
|
114
|
-
self.value = selected_ids
|
115
|
-
#restore table rows if they are not rows
|
106
|
+
self.value = selected_ids
|
116
107
|
if self.rows.cache is not None:
|
117
|
-
|
108
|
+
self.rows = self.rows.dbtable.list
|
118
109
|
if not init:
|
119
110
|
master_table.accept(val)
|
120
111
|
return self
|
@@ -145,7 +136,7 @@ class Table(Unit):
|
|
145
136
|
else:
|
146
137
|
return Warning('The linked table is not in edit mode', self)
|
147
138
|
return self.accept(new_value)
|
148
|
-
|
139
|
+
"""
|
149
140
|
@Unishare.handle(self,'search')
|
150
141
|
def search_changed(table, value):
|
151
142
|
self.search = value
|
@@ -154,7 +145,7 @@ class Table(Unit):
|
|
154
145
|
else:
|
155
146
|
self.rows = self.rows.dbtable.get_init_list(self.search)
|
156
147
|
return self
|
157
|
-
|
148
|
+
"""
|
158
149
|
self.calc_headers()
|
159
150
|
|
160
151
|
elif hasattr(self,'ids'):
|
@@ -164,15 +155,13 @@ class Table(Unit):
|
|
164
155
|
set_defaults(self,{'delete': delete_table_row, 'append': append_table_row, 'modify': accept_cell_value})
|
165
156
|
|
166
157
|
@property
|
167
|
-
def compact_view(self):
|
158
|
+
def compact_view(self) -> str:
|
168
159
|
"""only selected are sended to llm"""
|
169
|
-
selected = self.selected_list
|
170
|
-
result = []
|
160
|
+
selected = self.selected_list
|
171
161
|
if not selected and len(self.rows) < max_len_rows4llm:
|
172
|
-
selected = range(len(self.rows))
|
173
|
-
for index in selected
|
174
|
-
|
175
|
-
return {'name': self.name, 'value': result}
|
162
|
+
selected = range(len(self.rows))
|
163
|
+
str_rows = ';'.join(','.join(f'{field}: {value}' for field, value in zip(self.headers, self.rows[index])) for index in selected)
|
164
|
+
return f'{self.name} : {str_rows}'
|
176
165
|
|
177
166
|
@property
|
178
167
|
def selected_list(self):
|
@@ -180,12 +169,7 @@ class Table(Unit):
|
|
180
169
|
|
181
170
|
def clean_selection(self):
|
182
171
|
self.value = [] if isinstance(self.value,tuple | list) else None
|
183
|
-
return self
|
184
|
-
|
185
|
-
def extend(self, new_rows):
|
186
|
-
update = self.rows.extend(new_rows)
|
187
|
-
if hasattr(self,'id'):
|
188
|
-
dbupdates[self.id].append(update)
|
172
|
+
return self
|
189
173
|
|
190
174
|
def calc_headers(self):
|
191
175
|
"""only for persistent"""
|
@@ -201,15 +185,6 @@ class Table(Unit):
|
|
201
185
|
self.headers.extend([relation_mark + pretty4(link_field) for link_field in self.link])
|
202
186
|
if self.ids:
|
203
187
|
self.headers.append(relation_mark + 'ID')
|
204
|
-
|
205
|
-
def index2node_relation(self, cell_index):
|
206
|
-
"""calculate delta to property of node or link for persistent"""
|
207
|
-
table_fields = self.rows.dbtable.table_fields
|
208
|
-
delta = cell_index - len(table_fields)
|
209
|
-
if delta < 0:
|
210
|
-
return True, iterate(table_fields, cell_index)
|
211
|
-
delta -= 1 #ID field
|
212
|
-
return False, iterate(self.link, delta)
|
213
188
|
|
214
189
|
async def emit(self, *_):
|
215
190
|
"""calcute llm field values for selected rows if they are None"""
|
@@ -222,7 +197,7 @@ class Table(Unit):
|
|
222
197
|
if deps is True:
|
223
198
|
context = values
|
224
199
|
else:
|
225
|
-
context =
|
200
|
+
context = OrderedDict()
|
226
201
|
for dep in deps:
|
227
202
|
value = values.get(dep, None)
|
228
203
|
if value is None:
|
@@ -236,9 +211,10 @@ class Table(Unit):
|
|
236
211
|
else:
|
237
212
|
raise AttributeError(f'Invalid llm parameter {dep} in {self.name} element!')
|
238
213
|
if context:
|
239
|
-
async def assign(index, fld,
|
240
|
-
self.rows[index][self.headers.index(fld)] = await get_property(fld,
|
241
|
-
|
214
|
+
async def assign(index, fld, context):
|
215
|
+
self.rows[index][self.headers.index(fld)] = await get_property(fld, context)
|
216
|
+
context = ','.join(f'{fld}:{val}' for fld, val in context.items())
|
217
|
+
tasks.append(asyncio.create_task(assign(index, fld, context)))
|
242
218
|
if tasks:
|
243
219
|
await asyncio.gather(*tasks)
|
244
220
|
return self
|