unisi 0.1.13__py3-none-any.whl → 0.1.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unisi/__init__.py +2 -0
- unisi/common.py +123 -7
- unisi/containers.py +3 -3
- unisi/dbelements.py +153 -0
- unisi/guielements.py +8 -23
- unisi/kdb.py +336 -0
- unisi/multimon.py +7 -5
- unisi/reloader.py +2 -2
- unisi/server.py +9 -1
- unisi/tables.py +176 -56
- unisi/users.py +44 -35
- unisi/utils.py +28 -81
- unisi/web/index.html +1 -1
- unisi/web/js/346.c574f9c3.js +2 -0
- unisi/web/js/{app.cf197a5a.js → app.4b51aa78.js} +1 -1
- {unisi-0.1.13.dist-info → unisi-0.1.15.dist-info}/METADATA +31 -5
- {unisi-0.1.13.dist-info → unisi-0.1.15.dist-info}/RECORD +20 -18
- {unisi-0.1.13.dist-info → unisi-0.1.15.dist-info}/WHEEL +1 -1
- unisi/web/js/493.97ca799d.js +0 -1
- /unisi/web/css/{493.824522cf.css → 346.824522cf.css} +0 -0
- {unisi-0.1.13.dist-info → unisi-0.1.15.dist-info}/licenses/LICENSE +0 -0
unisi/kdb.py
ADDED
@@ -0,0 +1,336 @@
|
|
1
|
+
import kuzu, shutil, os, re, time
|
2
|
+
from datetime import date, datetime
|
3
|
+
from cymple import QueryBuilder as qb
|
4
|
+
from cymple.typedefs import Properties
|
5
|
+
from .common import get_default_args, equal_dicts
|
6
|
+
from .dbelements import Dblist
|
7
|
+
|
8
|
+
def is_modifying_query(cypher_query):
|
9
|
+
query = cypher_query.lower()
|
10
|
+
modifying_pattern = r'\b(create|delete|detach\s+delete|set|merge|remove|call\s+\w+\s+yield|foreach)\b'
|
11
|
+
return re.search(modifying_pattern, query)
|
12
|
+
|
13
|
+
def query_offset(id, offset):
|
14
|
+
return qb().match().node(id, 'a').where('a.ID','=',offset)
|
15
|
+
|
16
|
+
def kuzu_data_type(value):
|
17
|
+
match value:
|
18
|
+
case bool():
|
19
|
+
return "BOOLEAN"
|
20
|
+
case int():
|
21
|
+
return "INT64"
|
22
|
+
case float():
|
23
|
+
return "DOUBLE"
|
24
|
+
case str():
|
25
|
+
return "STRING"
|
26
|
+
case datetime():
|
27
|
+
return "TIMESTAMP"
|
28
|
+
case date():
|
29
|
+
return "DATE"
|
30
|
+
case bytes():
|
31
|
+
return "BLOB"
|
32
|
+
case list() | tuple():
|
33
|
+
return "LIST"
|
34
|
+
case _:
|
35
|
+
return ""
|
36
|
+
|
37
|
+
number_types = ["DOUBLE", "INT64"]
|
38
|
+
|
39
|
+
def dict_to_cypher_set(properties, alias = 'a'):
|
40
|
+
set_items = []
|
41
|
+
for key, value in properties.items():
|
42
|
+
if isinstance(value, str):
|
43
|
+
set_items.append(f"{alias}.{key} = '{value}'")
|
44
|
+
else:
|
45
|
+
set_items.append(f"{alias}.{key} = {value}")
|
46
|
+
return "SET " + ", ".join(set_items)
|
47
|
+
|
48
|
+
class Database:
|
49
|
+
def __init__(self, dbpath, message_logger = print) -> None:
|
50
|
+
self.db = kuzu.Database(dbpath)
|
51
|
+
self.conn = kuzu.Connection(self.db)
|
52
|
+
self.message_logger = message_logger
|
53
|
+
self.table_params = get_default_args(self.get_table)
|
54
|
+
|
55
|
+
def execute(self, query_str, ignore_exception = False):
|
56
|
+
query_str = str(query_str)
|
57
|
+
""" if not query_str.endswith(';'):
|
58
|
+
query_str += ';'
|
59
|
+
print(query_str) """
|
60
|
+
try:
|
61
|
+
result = self.conn.execute(query_str)
|
62
|
+
except Exception as e:
|
63
|
+
if not ignore_exception:
|
64
|
+
self.message_logger(e)
|
65
|
+
return None
|
66
|
+
return True if result is None else result
|
67
|
+
|
68
|
+
def delete(dir_path):
|
69
|
+
if os.path.exists(dir_path):
|
70
|
+
# Remove the directory and all its contents
|
71
|
+
shutil.rmtree(dir_path)
|
72
|
+
|
73
|
+
@property
|
74
|
+
def table_names(self):
|
75
|
+
return self.conn._get_node_table_names()
|
76
|
+
|
77
|
+
def get_table_fields(self, table_name, remove_id = True) -> None | dict:
|
78
|
+
result = self.qlist(f"CALL table_info('{table_name}') RETURN *;", ignore_exception = True)
|
79
|
+
if result is not None:
|
80
|
+
return {info[1]: info[2] for info in result if not remove_id or info[1] != 'ID'}
|
81
|
+
|
82
|
+
def delete_table(self, table_name):
|
83
|
+
return self.execute( f'DROP TABLE {table_name};')
|
84
|
+
|
85
|
+
def get_table(self, id = None, limit = 100, headers = None, rows = None, fields = None):
|
86
|
+
if id:
|
87
|
+
if rows and fields is None:
|
88
|
+
if not headers:
|
89
|
+
self.message_logger(f'headers are not defined!')
|
90
|
+
return None
|
91
|
+
types = [None] * len(headers)
|
92
|
+
for row in rows:
|
93
|
+
for j, cell in enumerate(row):
|
94
|
+
if cell is not None:
|
95
|
+
ktype = kuzu_data_type(cell)
|
96
|
+
if ktype:
|
97
|
+
if types[j] is None:
|
98
|
+
types[j] = ktype
|
99
|
+
elif types[j] != ktype:
|
100
|
+
if types[j] in number_types and ktype in number_types:
|
101
|
+
types[j] = "DOUBLE"
|
102
|
+
else:
|
103
|
+
self.message_logger(f'Conflict types for {id} table in {j} column: {types[j], ktype}!')
|
104
|
+
return None
|
105
|
+
if None in types:
|
106
|
+
index = types.index(None)
|
107
|
+
self.message_logger(f'Rows data doesnt contain allowed values for {headers[index]} column!')
|
108
|
+
return None
|
109
|
+
fields = {headers[i]: type for i, type in enumerate(types)}
|
110
|
+
|
111
|
+
if (table_fields := self.get_table_fields(id)) is not None:
|
112
|
+
if not equal_dicts(table_fields, fields):
|
113
|
+
if self.delete_table(id):
|
114
|
+
self.message_logger(f'Node table {id} was deleted because of fields contradiction!', 'warning')
|
115
|
+
else:
|
116
|
+
return Dbtable(id, self, limit, table_fields)
|
117
|
+
|
118
|
+
return self.create_table(id, fields, limit, rows)
|
119
|
+
|
120
|
+
def get_table_params(self, params):
|
121
|
+
return {k: v for k, v in params.items() if k in self.table_params}
|
122
|
+
|
123
|
+
def set_db_list(self, gui_table):
|
124
|
+
table = self.get_table(**self.get_table_params(gui_table.__dict__))
|
125
|
+
tlst = table.list
|
126
|
+
gui_table.rows = tlst
|
127
|
+
if tlst.update['type'] != 'init':
|
128
|
+
tlst.update = dict(type ='init', length = table.length, limit = table.limit, data = tlst.get_delta_0())
|
129
|
+
|
130
|
+
def create_table(self, id, fields : dict, limit = 100, rows = None):
|
131
|
+
specs = ','.join(f'{prop} {type}' for prop, type in fields.items())
|
132
|
+
query = f"CREATE NODE TABLE {id}({specs},ID SERIAL, PRIMARY KEY(ID))"
|
133
|
+
self.execute(query)
|
134
|
+
table = Dbtable(id, self, limit, fields)
|
135
|
+
if rows:
|
136
|
+
table.list.extend(rows)
|
137
|
+
return table
|
138
|
+
|
139
|
+
def update_row(self, table_id, row_id, props, in_node = True):
|
140
|
+
set_props = dict_to_cypher_set(props)
|
141
|
+
query = f'MATCH (a: {table_id}) WHERE a.ID = {row_id} {set_props}' if in_node else\
|
142
|
+
f'MATCH ()-[a: {table_id}]->() WHERE a.ID = {row_id} {set_props}'
|
143
|
+
return self.execute(query)
|
144
|
+
|
145
|
+
def qlist(self, query, func = None, ignore_exception = False):
|
146
|
+
if answer := self.execute(query, ignore_exception):
|
147
|
+
result = []
|
148
|
+
while answer.has_next():
|
149
|
+
value = answer.get_next()
|
150
|
+
result.append(func(value) if func else value)
|
151
|
+
return result
|
152
|
+
|
153
|
+
def qiter(self, query, func = None, ignore_exception = False):
|
154
|
+
answer = self.execute(query, ignore_exception)
|
155
|
+
while answer.has_next():
|
156
|
+
value = answer.get_next()
|
157
|
+
yield func(value) if func else value
|
158
|
+
|
159
|
+
class Dbtable:
|
160
|
+
def __init__(self, id, db, limit = 100, table_fields = None) -> None:
|
161
|
+
self.db = db
|
162
|
+
self.id = id
|
163
|
+
self.table_fields = table_fields
|
164
|
+
self.limit = limit
|
165
|
+
self.node_columns = list(db.conn._get_node_property_names(id).keys())[:-1]
|
166
|
+
self.init_list()
|
167
|
+
|
168
|
+
@property
|
169
|
+
def rel_table_names(self):
|
170
|
+
return self.db.conn._get_rel_table_names()
|
171
|
+
|
172
|
+
def default_index_name2(self, link_table):
|
173
|
+
return f'{self.id}2{link_table}'
|
174
|
+
|
175
|
+
def calc_linked_rows(self, index_name, link_ids, include_rels = False, search = ''):
|
176
|
+
#condition = ' OR '.join(f'b.ID = {id}' for id in link_ids) #bug in IN op!
|
177
|
+
condition = f'b.ID in {link_ids}'
|
178
|
+
rel_info = ', r.*' if include_rels else ''
|
179
|
+
query = f"""
|
180
|
+
MATCH (a:{self.id})-[r:{index_name}]->(b:User)
|
181
|
+
WHERE {condition}
|
182
|
+
RETURN a.*{rel_info}
|
183
|
+
ORDER BY a.ID ASC
|
184
|
+
"""
|
185
|
+
lst = self.db.qlist(query)
|
186
|
+
return Dblist(self, cache = lst)
|
187
|
+
|
188
|
+
def get_rel_fields2(self, tname, fields : dict = None, relname = None):
|
189
|
+
"""return name of link table and fields and its fields dict"""
|
190
|
+
if not relname:
|
191
|
+
relname = self.default_index_name2(tname)
|
192
|
+
rel_table_fields = self.db.get_table_fields(relname)
|
193
|
+
if isinstance(rel_table_fields, dict):
|
194
|
+
if isinstance(fields, dict):
|
195
|
+
if equal_dicts(rel_table_fields, fields):
|
196
|
+
return relname, rel_table_fields
|
197
|
+
else:
|
198
|
+
self.db.delete_table(relname)
|
199
|
+
else:
|
200
|
+
fields = rel_table_fields
|
201
|
+
elif fields is None:
|
202
|
+
fields = {}
|
203
|
+
|
204
|
+
if not any(info['name'] == relname for info in self.rel_table_names):
|
205
|
+
fprops = ''.join(f', {field} {type}' for field, type in fields.items()) if fields else ''
|
206
|
+
fprops += ', ID SERIAL'
|
207
|
+
query = f"CREATE REL TABLE {relname}(FROM {self.id} TO {tname} {fprops})"
|
208
|
+
self.db.execute(query)
|
209
|
+
self.rel_table_names.append({'name' : relname})
|
210
|
+
return relname, fields
|
211
|
+
|
212
|
+
def add_link(self, snode_id, link_table, tnode_id, link_fields = None, link_index_name = None):
|
213
|
+
"""return added link"""
|
214
|
+
if link_index_name is None:
|
215
|
+
link_index_name = self.default_index_name2(link_table)
|
216
|
+
if link_fields is None:
|
217
|
+
link_fields = {}
|
218
|
+
query = f"""
|
219
|
+
MATCH (a:{self.id}), (b:{link_table})
|
220
|
+
WHERE a.ID = {snode_id} AND b.ID = {tnode_id}
|
221
|
+
CREATE (a)-[r:{link_index_name} {{{Properties(link_fields)}}}]->(b)
|
222
|
+
RETURN r.*
|
223
|
+
"""
|
224
|
+
lst = self.db.qlist(query)
|
225
|
+
return lst[0]
|
226
|
+
|
227
|
+
def add_links(self, link_table, snode_ids : iter, tnode_id, link_index_name = None):
|
228
|
+
result = []
|
229
|
+
for id in snode_ids:
|
230
|
+
result.append(self.add_link(id, link_table, tnode_id, link_index_name = link_index_name))
|
231
|
+
return result
|
232
|
+
|
233
|
+
def delete_link(self, link_table_id, link_id, index_name = None):
|
234
|
+
if not index_name:
|
235
|
+
index_name = self.default_index_name2(link_table_id)
|
236
|
+
query = f"""
|
237
|
+
MATCH (:{self.id})-[r:{index_name}]->(:{link_table_id})
|
238
|
+
WHERE r.ID = {link_id}
|
239
|
+
DELETE r
|
240
|
+
"""
|
241
|
+
self.db.execute(query)
|
242
|
+
|
243
|
+
def delete_links(self, link_table_id, link_node_id = None, source_ids = None, link_ids = None, index_name = None):
|
244
|
+
if not index_name:
|
245
|
+
index_name = self.default_index_name2(link_table_id)
|
246
|
+
|
247
|
+
if link_ids:
|
248
|
+
condition = f'r.ID in {link_ids}'
|
249
|
+
#condition = ' OR '.join(f'r.ID = {id}' for id in link_ids) #bug in IN op!
|
250
|
+
else:
|
251
|
+
if not isinstance(source_ids, list):
|
252
|
+
source_ids = list(source_ids)
|
253
|
+
#condition = ' OR '.join(f'a.ID = {id}' for id in source_ids) #bug in IN op!
|
254
|
+
condition = f'a.ID in {source_ids}'
|
255
|
+
condition = f'({condition}) AND b.ID = {link_node_id}'
|
256
|
+
query = f"""
|
257
|
+
MATCH (a:{self.id})-[r:{index_name}]->(b:{link_table_id})
|
258
|
+
WHERE {condition}
|
259
|
+
DELETE r
|
260
|
+
"""
|
261
|
+
self.db.execute(query)
|
262
|
+
|
263
|
+
def init_list(self):
|
264
|
+
list = self.read_rows(limit = self.limit)
|
265
|
+
length = len(list)
|
266
|
+
#possibly the table has more rows
|
267
|
+
if length == self.limit:
|
268
|
+
#qresult = self.db.execute()
|
269
|
+
ql = self.db.qlist(f"MATCH (n:{self.id}) RETURN count(n)")
|
270
|
+
self.length = ql[0][0]
|
271
|
+
else:
|
272
|
+
self.length = length
|
273
|
+
self.list = Dblist(self, list)
|
274
|
+
|
275
|
+
def get_init_list(self, search_string = None):
|
276
|
+
lst = self.list
|
277
|
+
lst.update = dict(type ='init', length = self.length,
|
278
|
+
limit = self.limit, data = self.list.get_delta_0())
|
279
|
+
return lst
|
280
|
+
|
281
|
+
def read_rows(self, skip = 0, limit = 0):
|
282
|
+
query = qb().match().node(self.id, 'a').return_literal('a.*').order_by('a.ID')
|
283
|
+
if skip:
|
284
|
+
query = query.skip(skip)
|
285
|
+
query = query.limit(limit if limit else self.limit)
|
286
|
+
return self.db.qlist(query)
|
287
|
+
|
288
|
+
def assign_row(self, row_array):
|
289
|
+
return self.db.update_row(self.id, row_array[-1], {name : value for name, value in zip(self.node_columns, row_array)})
|
290
|
+
|
291
|
+
def delete_row(self, id):
|
292
|
+
query = query_offset(self.id, id)
|
293
|
+
self.length -= 1
|
294
|
+
return self.db.execute(query.detach_delete('a'))
|
295
|
+
|
296
|
+
def delete_rows(self, ids):
|
297
|
+
#condition = ' OR '.join(f'a.ID = {id}' for id in ids) #bug in IN op!
|
298
|
+
condition = f'a.ID in {ids}'
|
299
|
+
query = f"""
|
300
|
+
MATCH (a:{self.id})
|
301
|
+
WHERE {condition}
|
302
|
+
DELETE a
|
303
|
+
"""
|
304
|
+
return self.db.execute(query)
|
305
|
+
|
306
|
+
def append_row(self, row):
|
307
|
+
"""row can be list or dict, returns ID"""
|
308
|
+
if isinstance(row, list):
|
309
|
+
props = {name: value for name, value in zip(self.node_columns, row) if value is not None}
|
310
|
+
try:
|
311
|
+
answer = self.db.execute(qb().create().node(self.id, 'a', props).return_literal('a.ID'))
|
312
|
+
except Exception as e:
|
313
|
+
return None
|
314
|
+
if answer.has_next():
|
315
|
+
self.length += 1
|
316
|
+
return answer.get_next()[0]
|
317
|
+
return None
|
318
|
+
|
319
|
+
def append_rows(self, rows):
|
320
|
+
"""row can be list or dict"""
|
321
|
+
rows_arr = []
|
322
|
+
for row in rows:
|
323
|
+
row = {name: value for name, value in zip(self.node_columns, row)} if not isinstance(row, dict) else row
|
324
|
+
srow = f' {{{Properties(row).to_str()}}}'
|
325
|
+
rows_arr.append(srow)
|
326
|
+
rows_arr = ','.join(rows_arr)
|
327
|
+
|
328
|
+
query = (qb().with_(f'[{rows_arr}] AS rows')
|
329
|
+
.unwind('rows AS row')
|
330
|
+
.create()
|
331
|
+
.node(self.id, 'n', {p: f'row.{p}' for p in self.node_columns}, escape=False)
|
332
|
+
.return_literal('n.*'))
|
333
|
+
|
334
|
+
self.length += len(rows)
|
335
|
+
return self.db.qlist(query)
|
336
|
+
|
unisi/multimon.py
CHANGED
@@ -17,12 +17,14 @@ def multiprocessing_pool():
|
|
17
17
|
_multiprocessing_pool = multiprocessing.Pool(pool)
|
18
18
|
return _multiprocessing_pool
|
19
19
|
|
20
|
-
async def run_external_process(long_running_task, *args,
|
20
|
+
async def run_external_process(long_running_task, *args, progress_callback = None, **kwargs):
|
21
21
|
if progress_callback:
|
22
|
-
if
|
22
|
+
if args[-1] is None:
|
23
23
|
queue = multiprocessing.Manager().Queue()
|
24
|
-
|
25
|
-
|
24
|
+
args = *args[:-1], queue
|
25
|
+
else:
|
26
|
+
queue = args[-1]
|
27
|
+
|
26
28
|
result = multiprocessing_pool().apply_async(long_running_task, args, kwargs)
|
27
29
|
if progress_callback:
|
28
30
|
while not result.ready() or not queue.empty():
|
@@ -44,7 +46,7 @@ def monitor_process(monitor_shared_arr):
|
|
44
46
|
while True:
|
45
47
|
#Wait for data in the shared array
|
46
48
|
while monitor_shared_arr[0] == b'\x00':
|
47
|
-
time.sleep(
|
49
|
+
time.sleep(monitor_tick)
|
48
50
|
if timer is not None:
|
49
51
|
timer -= monitor_tick
|
50
52
|
if timer < 0:
|
unisi/reloader.py
CHANGED
@@ -15,8 +15,8 @@ if config.hot_reload:
|
|
15
15
|
import os, sys, traceback
|
16
16
|
from watchdog.observers import Observer
|
17
17
|
from watchdog.events import PatternMatchingEventHandler
|
18
|
-
from .users import User
|
19
|
-
from .utils import divpath,
|
18
|
+
from .users import User, Redesign
|
19
|
+
from .utils import divpath, app_dir
|
20
20
|
from .autotest import check_module
|
21
21
|
import re, collections
|
22
22
|
|
unisi/server.py
CHANGED
@@ -88,7 +88,15 @@ async def websocket_handler(request):
|
|
88
88
|
await user.delete()
|
89
89
|
return ws
|
90
90
|
|
91
|
+
def ensure_directory_exists(directory_path):
|
92
|
+
if not os.path.exists(directory_path):
|
93
|
+
os.makedirs(directory_path)
|
94
|
+
print(f"Directory '{directory_path}' created.")
|
95
|
+
|
91
96
|
def start(appname = None, user_type = User, http_handlers = []):
|
97
|
+
ensure_directory_exists(screens_dir)
|
98
|
+
ensure_directory_exists(blocks_dir)
|
99
|
+
|
92
100
|
if appname:
|
93
101
|
config.appname = appname
|
94
102
|
|
@@ -101,7 +109,7 @@ def start(appname = None, user_type = User, http_handlers = []):
|
|
101
109
|
http_handlers += [web.static(f'/{config.upload_dir}', upload_dir),
|
102
110
|
web.get('/{tail:.*}', static_serve), web.post('/', post_handler)]
|
103
111
|
|
104
|
-
print(f'Start {appname} web server..')
|
112
|
+
#print(f'Start {appname} web server..')
|
105
113
|
app = web.Application()
|
106
114
|
app.add_routes(http_handlers)
|
107
115
|
web.run_app(app, port=port)
|
unisi/tables.py
CHANGED
@@ -1,75 +1,199 @@
|
|
1
1
|
from .guielements import Gui
|
2
|
+
from .common import references, set_defaults, Warning, pretty4
|
3
|
+
from .dbelements import Dblist
|
2
4
|
|
3
|
-
|
4
|
-
|
5
|
+
relation_mark = 'Ⓡ'
|
6
|
+
exclude_mark = '✘'
|
7
|
+
|
8
|
+
def iterate(iter, times):
|
9
|
+
for i, val in enumerate(iter):
|
10
|
+
if i == times:
|
11
|
+
return val
|
12
|
+
|
13
|
+
def accept_cell_value(table, dval):
|
14
|
+
value = dval['value']
|
5
15
|
if not isinstance(value, bool):
|
6
16
|
try:
|
7
17
|
value = float(value)
|
8
|
-
except
|
9
|
-
|
10
|
-
table
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
if isinstance(value, list):
|
17
|
-
if keyed:
|
18
|
-
table.rows = [row for row in table.rows if row[-1] not in value]
|
19
|
-
else:
|
20
|
-
value.sort(reverse=True)
|
21
|
-
for v in value:
|
22
|
-
del table.rows[v]
|
23
|
-
table.value = []
|
18
|
+
except:
|
19
|
+
pass
|
20
|
+
if hasattr(table,'id'):
|
21
|
+
dbt = table.rows.dbtable
|
22
|
+
in_node, field = table.index2node_relation(dval['cell'])
|
23
|
+
if in_node:
|
24
|
+
table_id = table.id
|
25
|
+
row_id = table.rows[dval['delta']][len(dbt.table_fields)]
|
24
26
|
else:
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
27
|
+
table_id = table.__link__[2]
|
28
|
+
row_id = dval['id']
|
29
|
+
dbt.db.update_row(table_id, row_id, {field: value}, in_node)
|
30
|
+
|
31
|
+
table.rows[dval['delta']][dval['cell']] = value
|
32
|
+
|
33
|
+
def delete_table_row(table, value):
|
34
|
+
if table.selected_list:
|
35
|
+
if hasattr(table, 'link') and table.filter:
|
36
|
+
link_table, rel_props, rel_name = table.__link__
|
37
|
+
if not isinstance(value, list):
|
38
|
+
value = [value]
|
39
|
+
table.rows.dbtable.delete_links(link_table.id, link_ids = value, index_name = rel_name)
|
40
|
+
table.__link_table_selection_changed__(link_table, link_table.value)
|
41
|
+
return table
|
42
|
+
elif isinstance(value, list):
|
43
|
+
value.sort(reverse = True)
|
44
|
+
for v in value:
|
45
|
+
del table.rows[v]
|
46
|
+
table.value = []
|
47
|
+
else:
|
48
|
+
del table.rows[value]
|
29
49
|
table.value = None
|
30
50
|
|
31
|
-
def append_table_row(table,
|
32
|
-
''' append has to return new row
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
new_row
|
51
|
+
def append_table_row(table, search_str):
|
52
|
+
''' append has to return new row, value is the search string value in the table'''
|
53
|
+
new_row = [None] * len(table.rows.dbtable.table_fields)
|
54
|
+
if getattr(table,'id', None):
|
55
|
+
id = table.rows.dbtable.list.append(new_row)
|
56
|
+
new_row.append(id)
|
57
|
+
if hasattr(table, 'link') and table.filter:
|
58
|
+
link_table, _, rel_name = table.__link__
|
59
|
+
for linked_id in link_table.selected_list:
|
60
|
+
relation = table.rows.dbtable.add_link(id, link_table.id, linked_id, link_index_name = rel_name)
|
61
|
+
new_row.extend(relation)
|
62
|
+
break
|
37
63
|
table.rows.append(new_row)
|
38
64
|
return new_row
|
39
65
|
|
66
|
+
def get_chunk(obj, start_index):
|
67
|
+
delta, data = obj.rows.get_delta_chunk(start_index)
|
68
|
+
return {'type': 'updates', 'index': delta, 'data': data}
|
69
|
+
|
40
70
|
class Table(Gui):
|
41
71
|
def __init__(self, *args, panda = None, **kwargs):
|
42
72
|
if panda is not None:
|
43
73
|
self.mutate(PandaTable(*args, panda=panda, **kwargs))
|
44
74
|
else:
|
45
|
-
super().__init__(*args, **kwargs)
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
if
|
51
|
-
self
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
75
|
+
super().__init__(*args, **kwargs)
|
76
|
+
set_defaults(self, dict(headers = [], type = 'table', value = None, rows = [], editing = False, dense = True))
|
77
|
+
self.__headers__ = self.headers[:]
|
78
|
+
if getattr(self,'id', None):
|
79
|
+
db = references.context_user().db
|
80
|
+
if db:
|
81
|
+
db.set_db_list(self)
|
82
|
+
else:
|
83
|
+
raise AssertionError('Config db_dir is not defined!')
|
84
|
+
self.get = get_chunk
|
85
|
+
has_link = hasattr(self, 'link')
|
86
|
+
set_defaults(self, {'filter': has_link, 'ids': False, 'search': ''})
|
87
|
+
if has_link:
|
88
|
+
prop_types = {}
|
89
|
+
rel_name = ''
|
90
|
+
match self.link:
|
91
|
+
case [link_table, prop_types, rel_name]: ...
|
92
|
+
case [link_table, prop_types]: ...
|
93
|
+
case link_table: ...
|
94
|
+
rel_name, rel_fields = self.rows.dbtable.get_rel_fields2(link_table.id, prop_types, rel_name)
|
95
|
+
if not hasattr(link_table, 'id'):
|
96
|
+
raise AttributeError('Linked table has to be persistent!')
|
97
|
+
self.__link__ = link_table, list(prop_types.keys()), rel_name
|
98
|
+
self.link = rel_fields
|
99
|
+
|
100
|
+
@references.handle(link_table,'changed')
|
101
|
+
def link_table_selection_changed(master_table, val, init = False):
|
102
|
+
lstvalue = val if isinstance(val, list) else [val] if val != None else []
|
103
|
+
if lstvalue:
|
104
|
+
link_ids = [link_table.rows[val][-1] for val in lstvalue]
|
105
|
+
link_rows = self.rows.dbtable.calc_linked_rows(rel_name, link_ids, self.filter, self.search)
|
106
|
+
else:
|
107
|
+
link_rows = Dblist(self.rows.dbtable, cache = [])
|
108
|
+
if self.filter:
|
109
|
+
self.clean_selection()
|
110
|
+
self.rows = link_rows
|
111
|
+
else:
|
112
|
+
selected_ids = [link_rows[i][-1] for i in range(len(link_rows))]
|
113
|
+
self.value = selected_ids
|
114
|
+
#restore table rows if they are not rows
|
115
|
+
if self.rows.cache is not None:
|
116
|
+
self.rows = self.rows.dbtable.get_init_list()
|
117
|
+
if not init:
|
118
|
+
master_table.accept(val)
|
119
|
+
return self
|
120
|
+
link_table_selection_changed(link_table, link_table.value, True)
|
121
|
+
self.__link_table_selection_changed__ = link_table_selection_changed
|
122
|
+
|
123
|
+
@references.handle(self,'filter')
|
124
|
+
def filter_status_changed(table, value):
|
125
|
+
self.filter = value
|
126
|
+
link_table_selection_changed(link_table, link_table.value, True)
|
127
|
+
self.calc_headers()
|
128
|
+
return self
|
129
|
+
|
130
|
+
@references.handle(self,'changed')
|
131
|
+
def changed_selection_causes__changing_links(self, new_value):
|
132
|
+
if link_table.value is not None and link_table.value != []:
|
133
|
+
#if link table is in multi mode, links are not editable
|
134
|
+
if not self.filter and not isinstance(link_table.value, list | tuple):
|
135
|
+
if self.editing:
|
136
|
+
actual = set(new_value if isinstance(new_value, list) else [] if new_value is None else [new_value])
|
137
|
+
old = set(self.value if isinstance(self.value, list) else ([] if self.value is None else [self.value]))
|
138
|
+
deselected = old - actual
|
139
|
+
if deselected:
|
140
|
+
self.rows.dbtable.delete_links(link_table.id, link_table.value, deselected)
|
141
|
+
selected = actual - old
|
142
|
+
if selected:
|
143
|
+
self.rows.dbtable.add_links(link_table.id, selected, link_table.value)
|
144
|
+
else:
|
145
|
+
return Warning('The linked table is not in edit mode', self)
|
146
|
+
return self.accept(new_value)
|
147
|
+
|
148
|
+
@references.handle(self,'search')
|
149
|
+
def search_changed(table, value):
|
150
|
+
self.search = value
|
151
|
+
if has_link:
|
152
|
+
link_table_selection_changed(link_table, link_table.value, True)
|
153
|
+
else:
|
154
|
+
self.rows = self.rows.dbtable.get_init_list(self.search)
|
155
|
+
return self
|
156
|
+
|
157
|
+
self.calc_headers()
|
158
|
+
|
159
|
+
elif hasattr(self,'ids'):
|
160
|
+
raise ValueError("Only persistent tables can have 'ids' option!")
|
56
161
|
|
57
162
|
if getattr(self,'edit', True):
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
self.append = append_table_row
|
62
|
-
if not hasattr(self,'modify'):
|
63
|
-
self.modify = accept_cell_value
|
64
|
-
|
163
|
+
set_defaults(self,{'delete': delete_table_row, 'append': append_table_row, 'modify': accept_cell_value})
|
164
|
+
|
165
|
+
@property
|
65
166
|
def selected_list(self):
|
66
167
|
return [self.value] if self.value != None else [] if type(self.value) == int else self.value
|
67
168
|
|
68
|
-
def
|
69
|
-
self.
|
70
|
-
self.value = [] if isinstance(self.value,(tuple, list)) else None
|
169
|
+
def clean_selection(self):
|
170
|
+
self.value = [] if isinstance(self.value,tuple | list) else None
|
71
171
|
return self
|
172
|
+
|
173
|
+
def calc_headers(self):
|
174
|
+
"""only for persistent"""
|
175
|
+
table_fields = self.rows.dbtable.table_fields
|
176
|
+
self.headers = self.__headers__[:] if self.__headers__ else [pretty4(prop)for prop in table_fields]
|
177
|
+
only_node_headers = len(self.headers) == len(table_fields)
|
178
|
+
if self.ids:
|
179
|
+
self.headers.insert(len(table_fields), 'ID')
|
180
|
+
elif self.filter:
|
181
|
+
self.headers.insert(len(table_fields), exclude_mark + 'ID')
|
182
|
+
if self.filter:
|
183
|
+
if only_node_headers:
|
184
|
+
self.headers.extend([relation_mark + pretty4(link_field) for link_field in self.link])
|
185
|
+
if self.ids:
|
186
|
+
self.headers.append(relation_mark + 'ID')
|
72
187
|
|
188
|
+
def index2node_relation(self, cell_index):
|
189
|
+
"""calculate delta to property of node or link for persistent"""
|
190
|
+
table_fields = self.rows.dbtable.table_fields
|
191
|
+
delta = cell_index - len(table_fields)
|
192
|
+
if delta < 0:
|
193
|
+
return True, iterate(table_fields, cell_index)
|
194
|
+
delta -= 1 #ID field
|
195
|
+
return False, iterate(self.link, delta)
|
196
|
+
|
73
197
|
def delete_panda_row(table, row_num):
|
74
198
|
df = table.__panda__
|
75
199
|
if row_num < 0 or row_num >= len(df):
|
@@ -99,17 +223,13 @@ class PandaTable(Table):
|
|
99
223
|
raise Exception('PandaTable has to get panda = pandaTable as an argument.')
|
100
224
|
self.headers = panda.columns.tolist()
|
101
225
|
if fix_headers:
|
102
|
-
self.headers = [header
|
226
|
+
self.headers = [pretty4(header) for header in self.headers]
|
103
227
|
self.rows = panda.values.tolist()
|
104
228
|
self.__panda__ = panda
|
105
229
|
|
106
230
|
if getattr(self,'edit', True):
|
107
|
-
|
108
|
-
|
109
|
-
if not hasattr(self,'append'):
|
110
|
-
self.append = append_panda_row
|
111
|
-
if not hasattr(self,'modify'):
|
112
|
-
self.modify = accept_panda_cell
|
231
|
+
set_defaults(self,{'delete': delete_panda_row, 'append': append_panda_row,
|
232
|
+
'modify': accept_panda_cell})
|
113
233
|
@property
|
114
234
|
def panda(self):
|
115
235
|
return getattr(self,'__panda__',None)
|