unisi 0.1.15__py3-none-any.whl → 0.1.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unisi/__init__.py +2 -2
- unisi/autotest.py +4 -4
- unisi/common.py +19 -17
- unisi/containers.py +38 -5
- unisi/{dbelements.py → dbunits.py} +34 -12
- unisi/kdb.py +15 -17
- unisi/llmrag.py +73 -0
- unisi/server.py +7 -2
- unisi/tables.py +76 -21
- unisi/{guielements.py → units.py} +39 -15
- unisi/users.py +41 -26
- unisi/web/css/{346.824522cf.css → 508.880242b5.css} +1 -1
- unisi/web/index.html +1 -1
- unisi/web/js/508.4af55eb8.js +2 -0
- unisi/web/js/{app.4b51aa78.js → app.44d431b1.js} +1 -1
- {unisi-0.1.15.dist-info → unisi-0.1.17.dist-info}/METADATA +29 -21
- {unisi-0.1.15.dist-info → unisi-0.1.17.dist-info}/RECORD +19 -18
- unisi/web/js/346.c574f9c3.js +0 -2
- {unisi-0.1.15.dist-info → unisi-0.1.17.dist-info}/WHEEL +0 -0
- {unisi-0.1.15.dist-info → unisi-0.1.17.dist-info}/licenses/LICENSE +0 -0
unisi/__init__.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
from .utils import *
|
2
|
-
from .
|
2
|
+
from .units import *
|
3
3
|
from .users import User, handle, context_user, context_screen
|
4
4
|
from .server import start
|
5
5
|
from .tables import *
|
6
6
|
from .containers import *
|
7
7
|
from .proxy import *
|
8
|
-
from .
|
8
|
+
from .dbunits import *
|
9
9
|
from .kdb import Database, Dbtable
|
unisi/autotest.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
import config, os, logging, json, asyncio
|
2
2
|
from .utils import *
|
3
|
-
from .
|
3
|
+
from .units import *
|
4
4
|
from .containers import Block, Dialog
|
5
5
|
from .users import User
|
6
6
|
from .common import *
|
@@ -135,8 +135,8 @@ def check_block(block, hash_elements):
|
|
135
135
|
errors.append(f'The block {block.name} contains already used "{child.name}" in block "{hash_elements[hash_element]}"!')
|
136
136
|
else:
|
137
137
|
hash_elements[hash_element] = block.name
|
138
|
-
if not isinstance(child,
|
139
|
-
errors.append(f'The block {block.name} contains invalid element {child} instead of
|
138
|
+
if not isinstance(child, Unit) or not child:
|
139
|
+
errors.append(f'The block {block.name} contains invalid element {child} instead of Unit+ object!')
|
140
140
|
elif isinstance(child, Block):
|
141
141
|
errors.append(f'The block {block.name} contains block {child.name}. Blocks cannot contain blocks!')
|
142
142
|
elif child.name in child_names and child.type != 'line':
|
@@ -156,7 +156,7 @@ def check_module(module):
|
|
156
156
|
errors.append(f"Screen file {module.__file__} does not contain name!")
|
157
157
|
screen.name = 'Unknown'
|
158
158
|
elif not isinstance(screen.name, str):
|
159
|
-
errors.append(f"
|
159
|
+
errors.append(f"name' variable in screen file {module.__file__} {screen.name} is not a string!")
|
160
160
|
if not isinstance(screen.blocks, list):
|
161
161
|
errors.append(f"Screen file {module.__file__} does not contain 'blocks' list!")
|
162
162
|
else:
|
unisi/common.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
import jsonpickle, inspect
|
1
|
+
import jsonpickle, inspect, asyncio
|
2
2
|
|
3
3
|
UpdateScreen = True
|
4
4
|
Redesign = 2
|
@@ -10,20 +10,22 @@ def flatten(*arr):
|
|
10
10
|
else:
|
11
11
|
yield a
|
12
12
|
|
13
|
-
def
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
13
|
+
def compose_handlers(*handlers):
|
14
|
+
async def compose(obj, value):
|
15
|
+
objs = set()
|
16
|
+
for handler in handlers:
|
17
|
+
result = (await handler(obj, value)) if asyncio.iscoroutinefunction(handler)\
|
18
|
+
else handler(obj, value)
|
19
|
+
if result == UpdateScreen or result == Redesign:
|
20
|
+
return result
|
21
|
+
if isinstance(result, list | tuple):
|
22
|
+
for obj in flatten(result):
|
23
|
+
objs.add(obj)
|
24
|
+
elif result:
|
25
|
+
objs.add(result)
|
26
|
+
if objs:
|
27
|
+
return list(objs)
|
28
|
+
return compose
|
27
29
|
|
28
30
|
def equal_dicts(dict1, dict2):
|
29
31
|
return dict1.keys() == dict2.keys() and all(dict1[key] == dict2[key] for key in dict1)
|
@@ -37,7 +39,7 @@ class ArgObject:
|
|
37
39
|
|
38
40
|
class ReceivedMessage(ArgObject):
|
39
41
|
def __init__(self, kwargs):
|
40
|
-
|
42
|
+
self.__dict__.update(kwargs)
|
41
43
|
def __str__(self):
|
42
44
|
return f'{self.block}/{self.element}->{self.event}({self.value})'
|
43
45
|
|
@@ -87,7 +89,7 @@ def get_default_args(func):
|
|
87
89
|
defaults[name] = param.default
|
88
90
|
return defaults
|
89
91
|
|
90
|
-
|
92
|
+
Unishare = ArgObject(context_user = None, sessions = {})
|
91
93
|
|
92
94
|
class Message:
|
93
95
|
def __init__(self, *gui_objects, user = None, type = 'update'):
|
unisi/containers.py
CHANGED
@@ -1,8 +1,7 @@
|
|
1
|
-
from .
|
2
|
-
from .common import pretty4
|
1
|
+
from .units import *
|
2
|
+
from .common import pretty4, flatten
|
3
3
|
from numbers import Number
|
4
4
|
|
5
|
-
|
6
5
|
class ContentScaler(Range):
|
7
6
|
def __init__(self, *args, **kwargs):
|
8
7
|
name = args[0] if args else 'Scale content'
|
@@ -22,7 +21,7 @@ class ContentScaler(Range):
|
|
22
21
|
element.height /= prev
|
23
22
|
return elements
|
24
23
|
|
25
|
-
class Block(
|
24
|
+
class Block(Unit):
|
26
25
|
def __init__(self, name, *elems, **options):
|
27
26
|
self.name = name
|
28
27
|
self.type = 'block'
|
@@ -36,7 +35,41 @@ class Block(Gui):
|
|
36
35
|
elif isinstance(self.value[0], list):
|
37
36
|
self.value[0].append(scaler)
|
38
37
|
else:
|
39
|
-
self.value[0] = [self.value, scaler]
|
38
|
+
self.value[0] = [self.value, scaler]
|
39
|
+
|
40
|
+
for elem in flatten(self.value):
|
41
|
+
if hasattr(elem, 'llm'):
|
42
|
+
if elem.llm is True:
|
43
|
+
dependencies = [obj for obj in flatten(self.value) if elem is not obj and obj.type != 'command']
|
44
|
+
exactly = False
|
45
|
+
elif isinstance(elem.llm, list | tuple):
|
46
|
+
dependencies = elem.llm
|
47
|
+
exactly = True
|
48
|
+
elif isinstance(elem.llm, Unit):
|
49
|
+
dependencies = [elem.llm]
|
50
|
+
exactly = True
|
51
|
+
elif isinstance(elem.llm, dict):
|
52
|
+
if elem.type != 'table':
|
53
|
+
raise AttributeError(f'{elem.name} llm parameter is a dictionary only for tables, not for {elem.type}!')
|
54
|
+
|
55
|
+
elem.__llm_dependencies__ = {fld: (deps if isinstance(deps, list | bool) else [deps]) for fld, deps in elem.llm.items()}
|
56
|
+
elem.llm = True
|
57
|
+
continue
|
58
|
+
else:
|
59
|
+
raise AttributeError(f'Invalid llm parameter value for {elem.name} {elem.type}!')
|
60
|
+
if dependencies:
|
61
|
+
elem.llm = exactly
|
62
|
+
for dependency in dependencies:
|
63
|
+
dependency.add_changed_handler(elem.emit)
|
64
|
+
elem.__llm_dependencies__ = dependencies
|
65
|
+
else:
|
66
|
+
elem.llm = None
|
67
|
+
print(f'Empty dependency list for llm calculation for {elem.name} {elem.type}!')
|
68
|
+
|
69
|
+
@property
|
70
|
+
def compact_view(self):
|
71
|
+
return [obj for obj in flatten(self.value) if obj.value is not None]
|
72
|
+
|
40
73
|
@property
|
41
74
|
def scroll_list(self):
|
42
75
|
return self.value[1] if len(self.value) > 1 and isinstance(self.value[1], (list, tuple)) else []
|
@@ -1,3 +1,26 @@
|
|
1
|
+
from .common import Unishare
|
2
|
+
import asyncio
|
3
|
+
from collections import defaultdict
|
4
|
+
|
5
|
+
#storage id -> screen name -> [elem name, block name]
|
6
|
+
dbshare = defaultdict(lambda: defaultdict(lambda: []))
|
7
|
+
#db id -> update
|
8
|
+
dbupdates = defaultdict(lambda: [])
|
9
|
+
|
10
|
+
async def sync_dbupdates():
|
11
|
+
sync_calls = []
|
12
|
+
for id, updates in dbupdates.items():
|
13
|
+
for update in updates:
|
14
|
+
screen2el_bl = dbshare[id]
|
15
|
+
for user in Unishare.sessions.values():
|
16
|
+
scr_name = user.screen.name
|
17
|
+
if scr_name in screen2el_bl:
|
18
|
+
for elem_block in screen2el_bl[scr_name]: #optim--
|
19
|
+
update4user = {**update, **elem_block.__dict__}
|
20
|
+
sync_calls.append(user.send(update4user))
|
21
|
+
dbupdates.clear()
|
22
|
+
await asyncio.gather(*sync_calls)
|
23
|
+
|
1
24
|
class Dblist:
|
2
25
|
def __init__(self, dbtable, init_list = None, cache = None):
|
3
26
|
self.cache = cache
|
@@ -85,19 +108,14 @@ class Dblist:
|
|
85
108
|
del chunk[index - delta_list]
|
86
109
|
limit = self.dbtable.limit
|
87
110
|
next_delta_list = delta_list + limit
|
88
|
-
|
111
|
+
if len(chunk) == limit - 1: #chunk was fully filled
|
89
112
|
next_list = self.delta_list.get(next_delta_list)
|
90
113
|
if next_list:
|
91
|
-
chunk.append(next_list[0])
|
92
|
-
chunk = next_list
|
93
|
-
next_delta_list += limit
|
94
|
-
del next_list[0]
|
114
|
+
chunk.append(next_list[0])
|
95
115
|
else:
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
self.clean_cache_from(next_delta_list)
|
100
|
-
break
|
116
|
+
delta_list, chunk = self.get_delta_chunk(delta_list)
|
117
|
+
self.update = dict(type = 'updates', index = delta_list, data = chunk)
|
118
|
+
self.clean_cache_from(next_delta_list)
|
101
119
|
|
102
120
|
def __len__(self):
|
103
121
|
return len(self.cache) if self.cache is not None else self.dbtable.length
|
@@ -117,9 +135,11 @@ class Dblist:
|
|
117
135
|
|
118
136
|
def extend(self, rows):
|
119
137
|
start = self.dbtable.length
|
138
|
+
delta_start = start // self.limit * self.limit
|
120
139
|
rows = self.dbtable.append_rows(rows)
|
121
140
|
len_rows = len(rows)
|
122
141
|
i_rows = 0
|
142
|
+
length = len_rows + start
|
123
143
|
while len_rows > 0:
|
124
144
|
delta_list = start // self.limit * self.limit
|
125
145
|
list = self.delta_list.get(delta_list)
|
@@ -134,8 +154,10 @@ class Dblist:
|
|
134
154
|
|
135
155
|
i_rows += can_fill
|
136
156
|
start += can_fill
|
137
|
-
len_rows -= can_fill
|
138
|
-
|
157
|
+
len_rows -= can_fill
|
158
|
+
delta, data = self.get_delta_chunk(delta_start)
|
159
|
+
self.update = dict(type = 'updates', index = delta, data = data, length = length)
|
160
|
+
return self.update
|
139
161
|
|
140
162
|
def insert(self, index, value):
|
141
163
|
self.append(value)
|
unisi/kdb.py
CHANGED
@@ -1,9 +1,12 @@
|
|
1
|
-
import kuzu, shutil, os, re
|
1
|
+
import kuzu, shutil, os, re
|
2
2
|
from datetime import date, datetime
|
3
3
|
from cymple import QueryBuilder as qb
|
4
4
|
from cymple.typedefs import Properties
|
5
|
-
from .common import get_default_args
|
6
|
-
from .
|
5
|
+
from .common import get_default_args
|
6
|
+
from .dbunits import Dblist
|
7
|
+
|
8
|
+
def equal_fields_dicts(dict1, dict2):
|
9
|
+
return dict1.keys() == dict2.keys() and all(dict1[key].lower() == dict2[key].lower() for key in dict1)
|
7
10
|
|
8
11
|
def is_modifying_query(cypher_query):
|
9
12
|
query = cypher_query.lower()
|
@@ -109,7 +112,7 @@ class Database:
|
|
109
112
|
fields = {headers[i]: type for i, type in enumerate(types)}
|
110
113
|
|
111
114
|
if (table_fields := self.get_table_fields(id)) is not None:
|
112
|
-
if not
|
115
|
+
if not equal_fields_dicts(table_fields, fields):
|
113
116
|
if self.delete_table(id):
|
114
117
|
self.message_logger(f'Node table {id} was deleted because of fields contradiction!', 'warning')
|
115
118
|
else:
|
@@ -173,7 +176,6 @@ class Dbtable:
|
|
173
176
|
return f'{self.id}2{link_table}'
|
174
177
|
|
175
178
|
def calc_linked_rows(self, index_name, link_ids, include_rels = False, search = ''):
|
176
|
-
#condition = ' OR '.join(f'b.ID = {id}' for id in link_ids) #bug in IN op!
|
177
179
|
condition = f'b.ID in {link_ids}'
|
178
180
|
rel_info = ', r.*' if include_rels else ''
|
179
181
|
query = f"""
|
@@ -192,7 +194,7 @@ class Dbtable:
|
|
192
194
|
rel_table_fields = self.db.get_table_fields(relname)
|
193
195
|
if isinstance(rel_table_fields, dict):
|
194
196
|
if isinstance(fields, dict):
|
195
|
-
if
|
197
|
+
if equal_fields_dicts(rel_table_fields, fields):
|
196
198
|
return relname, rel_table_fields
|
197
199
|
else:
|
198
200
|
self.db.delete_table(relname)
|
@@ -245,12 +247,10 @@ class Dbtable:
|
|
245
247
|
index_name = self.default_index_name2(link_table_id)
|
246
248
|
|
247
249
|
if link_ids:
|
248
|
-
condition = f'r.ID in {link_ids}'
|
249
|
-
#condition = ' OR '.join(f'r.ID = {id}' for id in link_ids) #bug in IN op!
|
250
|
+
condition = f'r.ID in {link_ids}'
|
250
251
|
else:
|
251
252
|
if not isinstance(source_ids, list):
|
252
|
-
source_ids = list(source_ids)
|
253
|
-
#condition = ' OR '.join(f'a.ID = {id}' for id in source_ids) #bug in IN op!
|
253
|
+
source_ids = list(source_ids)
|
254
254
|
condition = f'a.ID in {source_ids}'
|
255
255
|
condition = f'({condition}) AND b.ID = {link_node_id}'
|
256
256
|
query = f"""
|
@@ -294,7 +294,6 @@ class Dbtable:
|
|
294
294
|
return self.db.execute(query.detach_delete('a'))
|
295
295
|
|
296
296
|
def delete_rows(self, ids):
|
297
|
-
#condition = ' OR '.join(f'a.ID = {id}' for id in ids) #bug in IN op!
|
298
297
|
condition = f'a.ID in {ids}'
|
299
298
|
query = f"""
|
300
299
|
MATCH (a:{self.id})
|
@@ -307,13 +306,12 @@ class Dbtable:
|
|
307
306
|
"""row can be list or dict, returns ID"""
|
308
307
|
if isinstance(row, list):
|
309
308
|
props = {name: value for name, value in zip(self.node_columns, row) if value is not None}
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
if answer.has_next():
|
309
|
+
|
310
|
+
answer = self.db.execute(qb().create().node(self.id, 'a', props).return_literal('a.ID'))
|
311
|
+
|
312
|
+
if answer and answer.has_next():
|
315
313
|
self.length += 1
|
316
|
-
return answer.get_next()[
|
314
|
+
return answer.get_next()[-1]
|
317
315
|
return None
|
318
316
|
|
319
317
|
def append_rows(self, rows):
|
unisi/llmrag.py
ADDED
@@ -0,0 +1,73 @@
|
|
1
|
+
from .common import Unishare
|
2
|
+
from langchain_groq import ChatGroq
|
3
|
+
from langchain_openai import ChatOpenAI
|
4
|
+
|
5
|
+
def setup_llmrag():
|
6
|
+
import config #the module is loaded before config.py
|
7
|
+
if config.llm:
|
8
|
+
match config.llm:
|
9
|
+
case ['host', address]:
|
10
|
+
model = None
|
11
|
+
type = 'openai' #provider type is openai for local llms
|
12
|
+
case [type, model, address]: ...
|
13
|
+
case [type, model]: address = None
|
14
|
+
case _:
|
15
|
+
print(f'Error: Invalid llm configutation: {config.llm}')
|
16
|
+
return
|
17
|
+
|
18
|
+
type = type.lower()
|
19
|
+
if type == 'openai':
|
20
|
+
Unishare.llm_model = ChatOpenAI(
|
21
|
+
api_key = 'llm-studio',
|
22
|
+
temperature = 0.0,
|
23
|
+
openai_api_base = address
|
24
|
+
) if address else ChatOpenAI(temperature=0.0)
|
25
|
+
|
26
|
+
elif type == 'groq':
|
27
|
+
Unishare.llm_model = ChatGroq(
|
28
|
+
model = model,
|
29
|
+
temperature = 0.0,
|
30
|
+
max_tokens = None,
|
31
|
+
timeout = None,
|
32
|
+
max_retries = 2,
|
33
|
+
)
|
34
|
+
|
35
|
+
numeric_types = ['number', 'int', 'float', 'double']
|
36
|
+
|
37
|
+
async def get_property(name, json_context = '', type = 'string', options = None, attempts = 1, messages = None):
|
38
|
+
if messages is None:
|
39
|
+
limits = f'type is {type}'
|
40
|
+
if type == 'date':
|
41
|
+
limits = f'{limits}, use format "dd/mm/yyyy"'
|
42
|
+
if options:
|
43
|
+
limits = f'{limits}, and its possible options are {",".join(opt for opt in options)}'
|
44
|
+
messages = [
|
45
|
+
(
|
46
|
+
"system",
|
47
|
+
f"""You are an intelligent and extremely concise assistant."""
|
48
|
+
),
|
49
|
+
("user", f"""{json_context} Reason and infer the "{name}" value, which {limits}.
|
50
|
+
Do not include any additional text or commentary in your answer, just exact the property value.""")
|
51
|
+
]
|
52
|
+
ai_msg = await Unishare.llm_model.ainvoke(messages)
|
53
|
+
value = ai_msg.content
|
54
|
+
log_error = ''
|
55
|
+
if type in numeric_types:
|
56
|
+
try:
|
57
|
+
value = float(value)
|
58
|
+
except:
|
59
|
+
log_error = f'Invalid value {value} from llm-rag for {messages[1][1]}'
|
60
|
+
return value
|
61
|
+
else:
|
62
|
+
value = value.strip('""')
|
63
|
+
|
64
|
+
if not log_error and options and value not in options:
|
65
|
+
attempts -= 1
|
66
|
+
if attempts > 0:
|
67
|
+
value = get_property(name, json_context, type, options, attempts, messages)
|
68
|
+
else:
|
69
|
+
log_error = f'Invalid value {value} from llm-rag for {messages[1][1]}'
|
70
|
+
|
71
|
+
if log_error:
|
72
|
+
Unishare.message_logger(log_error)
|
73
|
+
return value
|
unisi/server.py
CHANGED
@@ -4,6 +4,8 @@ from pathlib import Path
|
|
4
4
|
from .reloader import empty_app
|
5
5
|
from .autotest import recorder, run_tests
|
6
6
|
from .common import *
|
7
|
+
from.llmrag import setup_llmrag
|
8
|
+
from .dbunits import dbupdates, sync_dbupdates
|
7
9
|
from config import port, upload_dir
|
8
10
|
import traceback, json
|
9
11
|
|
@@ -78,7 +80,9 @@ async def websocket_handler(request):
|
|
78
80
|
if message:
|
79
81
|
if recorder.record_file:
|
80
82
|
recorder.accept(message, user.prepare_result (result))
|
81
|
-
await user.reflect(message, result)
|
83
|
+
await user.reflect(message, result)
|
84
|
+
if dbupdates:
|
85
|
+
await sync_dbupdates()
|
82
86
|
elif msg.type == WSMsgType.ERROR:
|
83
87
|
user.log('ws connection closed with exception %s' % ws.exception())
|
84
88
|
except BaseException as e:
|
@@ -96,6 +100,7 @@ def ensure_directory_exists(directory_path):
|
|
96
100
|
def start(appname = None, user_type = User, http_handlers = []):
|
97
101
|
ensure_directory_exists(screens_dir)
|
98
102
|
ensure_directory_exists(blocks_dir)
|
103
|
+
setup_llmrag()
|
99
104
|
|
100
105
|
if appname:
|
101
106
|
config.appname = appname
|
@@ -112,5 +117,5 @@ def start(appname = None, user_type = User, http_handlers = []):
|
|
112
117
|
#print(f'Start {appname} web server..')
|
113
118
|
app = web.Application()
|
114
119
|
app.add_routes(http_handlers)
|
115
|
-
web.run_app(app,
|
120
|
+
web.run_app(app, port = port)
|
116
121
|
|
unisi/tables.py
CHANGED
@@ -1,9 +1,16 @@
|
|
1
|
-
from .
|
2
|
-
from .common import
|
3
|
-
from .
|
1
|
+
from .units import Unit
|
2
|
+
from .common import *
|
3
|
+
from .dbunits import Dblist, dbupdates
|
4
|
+
from .llmrag import get_property
|
5
|
+
import asyncio
|
4
6
|
|
5
7
|
relation_mark = 'Ⓡ'
|
6
8
|
exclude_mark = '✘'
|
9
|
+
max_len_rows4llm = 30
|
10
|
+
|
11
|
+
def get_chunk(obj, start_index):
|
12
|
+
delta, data = obj.rows.get_delta_chunk(start_index)
|
13
|
+
return {'type': 'updates', 'index': delta, 'data': data}
|
7
14
|
|
8
15
|
def iterate(iter, times):
|
9
16
|
for i, val in enumerate(iter):
|
@@ -26,8 +33,7 @@ def accept_cell_value(table, dval):
|
|
26
33
|
else:
|
27
34
|
table_id = table.__link__[2]
|
28
35
|
row_id = dval['id']
|
29
|
-
dbt.db.update_row(table_id, row_id, {field: value}, in_node)
|
30
|
-
|
36
|
+
dbt.db.update_row(table_id, row_id, {field: value}, in_node)
|
31
37
|
table.rows[dval['delta']][dval['cell']] = value
|
32
38
|
|
33
39
|
def delete_table_row(table, value):
|
@@ -63,11 +69,7 @@ def append_table_row(table, search_str):
|
|
63
69
|
table.rows.append(new_row)
|
64
70
|
return new_row
|
65
71
|
|
66
|
-
|
67
|
-
delta, data = obj.rows.get_delta_chunk(start_index)
|
68
|
-
return {'type': 'updates', 'index': delta, 'data': data}
|
69
|
-
|
70
|
-
class Table(Gui):
|
72
|
+
class Table(Unit):
|
71
73
|
def __init__(self, *args, panda = None, **kwargs):
|
72
74
|
if panda is not None:
|
73
75
|
self.mutate(PandaTable(*args, panda=panda, **kwargs))
|
@@ -75,10 +77,9 @@ class Table(Gui):
|
|
75
77
|
super().__init__(*args, **kwargs)
|
76
78
|
set_defaults(self, dict(headers = [], type = 'table', value = None, rows = [], editing = False, dense = True))
|
77
79
|
self.__headers__ = self.headers[:]
|
78
|
-
if
|
79
|
-
|
80
|
-
|
81
|
-
db.set_db_list(self)
|
80
|
+
if hasattr(self,'id'):
|
81
|
+
if Unishare.db:
|
82
|
+
Unishare.db.set_db_list(self)
|
82
83
|
else:
|
83
84
|
raise AssertionError('Config db_dir is not defined!')
|
84
85
|
self.get = get_chunk
|
@@ -97,7 +98,7 @@ class Table(Gui):
|
|
97
98
|
self.__link__ = link_table, list(prop_types.keys()), rel_name
|
98
99
|
self.link = rel_fields
|
99
100
|
|
100
|
-
@
|
101
|
+
@Unishare.handle(link_table,'changed')
|
101
102
|
def link_table_selection_changed(master_table, val, init = False):
|
102
103
|
lstvalue = val if isinstance(val, list) else [val] if val != None else []
|
103
104
|
if lstvalue:
|
@@ -120,14 +121,14 @@ class Table(Gui):
|
|
120
121
|
link_table_selection_changed(link_table, link_table.value, True)
|
121
122
|
self.__link_table_selection_changed__ = link_table_selection_changed
|
122
123
|
|
123
|
-
@
|
124
|
+
@Unishare.handle(self,'filter')
|
124
125
|
def filter_status_changed(table, value):
|
125
126
|
self.filter = value
|
126
127
|
link_table_selection_changed(link_table, link_table.value, True)
|
127
128
|
self.calc_headers()
|
128
129
|
return self
|
129
130
|
|
130
|
-
@
|
131
|
+
@Unishare.handle(self,'changed')
|
131
132
|
def changed_selection_causes__changing_links(self, new_value):
|
132
133
|
if link_table.value is not None and link_table.value != []:
|
133
134
|
#if link table is in multi mode, links are not editable
|
@@ -145,7 +146,7 @@ class Table(Gui):
|
|
145
146
|
return Warning('The linked table is not in edit mode', self)
|
146
147
|
return self.accept(new_value)
|
147
148
|
|
148
|
-
@
|
149
|
+
@Unishare.handle(self,'search')
|
149
150
|
def search_changed(table, value):
|
150
151
|
self.search = value
|
151
152
|
if has_link:
|
@@ -160,16 +161,32 @@ class Table(Gui):
|
|
160
161
|
raise ValueError("Only persistent tables can have 'ids' option!")
|
161
162
|
|
162
163
|
if getattr(self,'edit', True):
|
163
|
-
set_defaults(self,{'delete': delete_table_row, 'append': append_table_row, 'modify': accept_cell_value})
|
164
|
-
|
164
|
+
set_defaults(self,{'delete': delete_table_row, 'append': append_table_row, 'modify': accept_cell_value})
|
165
|
+
|
166
|
+
@property
|
167
|
+
def compact_view(self):
|
168
|
+
"""only selected are sended to llm"""
|
169
|
+
selected = self.selected_list
|
170
|
+
result = []
|
171
|
+
if not selected and len(self.rows) < max_len_rows4llm:
|
172
|
+
selected = range(len(self.rows))
|
173
|
+
for index in selected:
|
174
|
+
result.append({field: value for field, value in zip(self.headers, self.rows[index])})
|
175
|
+
return {'name': self.name, 'value': result}
|
176
|
+
|
165
177
|
@property
|
166
178
|
def selected_list(self):
|
167
|
-
return [
|
179
|
+
return [] if self.value is None else self.value if isinstance(self.value, list) else [self.value]
|
168
180
|
|
169
181
|
def clean_selection(self):
|
170
182
|
self.value = [] if isinstance(self.value,tuple | list) else None
|
171
183
|
return self
|
172
184
|
|
185
|
+
def extend(self, new_rows):
|
186
|
+
update = self.rows.extend(new_rows)
|
187
|
+
if hasattr(self,'id'):
|
188
|
+
dbupdates[self.id].append(update)
|
189
|
+
|
173
190
|
def calc_headers(self):
|
174
191
|
"""only for persistent"""
|
175
192
|
table_fields = self.rows.dbtable.table_fields
|
@@ -193,6 +210,44 @@ class Table(Gui):
|
|
193
210
|
return True, iterate(table_fields, cell_index)
|
194
211
|
delta -= 1 #ID field
|
195
212
|
return False, iterate(self.link, delta)
|
213
|
+
|
214
|
+
async def emit(self, *_):
|
215
|
+
"""calcute llm field values for selected rows if they are None"""
|
216
|
+
if Unishare.llm_model and getattr(self, 'llm', None) is not None:
|
217
|
+
tasks = []
|
218
|
+
for index in self.selected_list:
|
219
|
+
values = {field: value for field, value in zip(self.headers, self.rows[index]) if value}
|
220
|
+
for fld, deps in self.__llm_dependencies__.items():
|
221
|
+
if fld not in values:
|
222
|
+
if deps is True:
|
223
|
+
context = values
|
224
|
+
else:
|
225
|
+
context = {}
|
226
|
+
for dep in deps:
|
227
|
+
value = values.get(dep, None)
|
228
|
+
if value is None:
|
229
|
+
if self.llm: #exact
|
230
|
+
continue #not all fields
|
231
|
+
else:
|
232
|
+
if isinstance(dep, str):
|
233
|
+
context[dep] = value
|
234
|
+
elif isinstance(dep, Unit):
|
235
|
+
context[dep.name] = dep.value
|
236
|
+
else:
|
237
|
+
raise AttributeError(f'Invalid llm parameter {dep} in {self.name} element!')
|
238
|
+
if context:
|
239
|
+
async def assign(index, fld, jcontext):
|
240
|
+
self.rows[index][self.headers.index(fld)] = await get_property(fld, jcontext)
|
241
|
+
tasks.append(asyncio.create_task(assign(index, fld, toJson(context))))
|
242
|
+
if tasks:
|
243
|
+
await asyncio.gather(*tasks)
|
244
|
+
return self
|
245
|
+
@property
|
246
|
+
def is_base_table_list(self):
|
247
|
+
"""is table in basic view mode"""
|
248
|
+
if hasattr(self, 'id'):
|
249
|
+
dbtable = self.rows.dbtable
|
250
|
+
return dbtable.list is self.rows
|
196
251
|
|
197
252
|
def delete_panda_row(table, row_num):
|
198
253
|
df = table.__panda__
|