Qwael 3.9.0__tar.gz → 3.9.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {qwael-3.9.0 → qwael-3.9.1}/PKG-INFO +1 -1
- qwael-3.9.1/Qwael/MultiDB.py +328 -0
- {qwael-3.9.0 → qwael-3.9.1}/Qwael/__init__.py +2 -1
- {qwael-3.9.0 → qwael-3.9.1}/Qwael.egg-info/PKG-INFO +1 -1
- {qwael-3.9.0 → qwael-3.9.1}/Qwael.egg-info/SOURCES.txt +1 -0
- {qwael-3.9.0 → qwael-3.9.1}/setup.py +1 -1
- {qwael-3.9.0 → qwael-3.9.1}/LICENSE +0 -0
- {qwael-3.9.0 → qwael-3.9.1}/Qwael/DR/304/260VE.py" +0 -0
- {qwael-3.9.0 → qwael-3.9.1}/Qwael/DoIP.py +0 -0
- {qwael-3.9.0 → qwael-3.9.1}/Qwael/filesz.py +0 -0
- {qwael-3.9.0 → qwael-3.9.1}/Qwael.egg-info/dependency_links.txt +0 -0
- {qwael-3.9.0 → qwael-3.9.1}/Qwael.egg-info/requires.txt +0 -0
- {qwael-3.9.0 → qwael-3.9.1}/Qwael.egg-info/top_level.txt +0 -0
- {qwael-3.9.0 → qwael-3.9.1}/README.md +0 -0
- {qwael-3.9.0 → qwael-3.9.1}/setup.cfg +0 -0
|
@@ -0,0 +1,328 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
|
|
4
|
+
class MultiDB:
|
|
5
|
+
def __init__(self, filename="database.mdb"):
|
|
6
|
+
self.filename = filename
|
|
7
|
+
self.lockfile = filename + ".lock"
|
|
8
|
+
|
|
9
|
+
if not os.path.exists(self.filename):
|
|
10
|
+
with open(self.filename, "w", encoding="utf-8") as f:
|
|
11
|
+
f.write("")
|
|
12
|
+
|
|
13
|
+
# -----------------------------
|
|
14
|
+
# LOCK SYSTEM
|
|
15
|
+
# -----------------------------
|
|
16
|
+
def _lock(self):
|
|
17
|
+
with open(self.lockfile, "w") as f:
|
|
18
|
+
f.write("locked")
|
|
19
|
+
|
|
20
|
+
def _unlock(self):
|
|
21
|
+
if os.path.exists(self.lockfile):
|
|
22
|
+
os.remove(self.lockfile)
|
|
23
|
+
|
|
24
|
+
# -----------------------------
|
|
25
|
+
# FILE I/O
|
|
26
|
+
# -----------------------------
|
|
27
|
+
def _read(self):
|
|
28
|
+
with open(self.filename, "r", encoding="utf-8") as f:
|
|
29
|
+
return f.read().splitlines()
|
|
30
|
+
|
|
31
|
+
def _write(self, lines):
|
|
32
|
+
with open(self.filename, "w", encoding="utf-8") as f:
|
|
33
|
+
f.write("\n".join(lines))
|
|
34
|
+
|
|
35
|
+
# -----------------------------
|
|
36
|
+
# TABLE FINDER
|
|
37
|
+
# -----------------------------
|
|
38
|
+
def _find_table(self, lines, table):
|
|
39
|
+
for i, line in enumerate(lines):
|
|
40
|
+
if line.strip() == f"[TABLE {table}]":
|
|
41
|
+
return i
|
|
42
|
+
return -1
|
|
43
|
+
|
|
44
|
+
# -----------------------------
|
|
45
|
+
# RULE SPLITTER
|
|
46
|
+
# -----------------------------
|
|
47
|
+
def _split_rules(self, rule):
|
|
48
|
+
if not rule:
|
|
49
|
+
return []
|
|
50
|
+
rule = rule.replace(" ", "").replace("+", ",").replace("#-", ",#-")
|
|
51
|
+
return [r for r in rule.split(",") if r]
|
|
52
|
+
|
|
53
|
+
# -----------------------------
|
|
54
|
+
# CREATE TABLE
|
|
55
|
+
# -----------------------------
|
|
56
|
+
def create_table(self, name, columns):
|
|
57
|
+
self._lock()
|
|
58
|
+
lines = self._read()
|
|
59
|
+
|
|
60
|
+
if self._find_table(lines, name) != -1:
|
|
61
|
+
self._unlock()
|
|
62
|
+
raise ValueError("Bu tablo zaten var!")
|
|
63
|
+
|
|
64
|
+
col_names = []
|
|
65
|
+
rules = {}
|
|
66
|
+
|
|
67
|
+
for col in columns:
|
|
68
|
+
if isinstance(col, dict):
|
|
69
|
+
key = list(col.keys())[0]
|
|
70
|
+
rule = col[key]
|
|
71
|
+
col_names.append(key)
|
|
72
|
+
rules[key] = rule
|
|
73
|
+
else:
|
|
74
|
+
col_names.append(col)
|
|
75
|
+
rules[col] = ""
|
|
76
|
+
|
|
77
|
+
lines.append(f"[TABLE {name}]")
|
|
78
|
+
lines.append(json.dumps(col_names, ensure_ascii=False))
|
|
79
|
+
lines.append(json.dumps(rules, ensure_ascii=False))
|
|
80
|
+
|
|
81
|
+
self._write(lines)
|
|
82
|
+
self._unlock()
|
|
83
|
+
|
|
84
|
+
# -----------------------------
|
|
85
|
+
# VALIDATION
|
|
86
|
+
# -----------------------------
|
|
87
|
+
def _validate(self, value, rule, lines, table_pos, col_index):
|
|
88
|
+
rules = self._split_rules(rule)
|
|
89
|
+
|
|
90
|
+
for r in rules:
|
|
91
|
+
if r == "ID":
|
|
92
|
+
continue
|
|
93
|
+
|
|
94
|
+
if r == "number":
|
|
95
|
+
if not value.isdigit():
|
|
96
|
+
return False
|
|
97
|
+
|
|
98
|
+
if r == "gmail":
|
|
99
|
+
if not ("@" in value and value.endswith(".com")):
|
|
100
|
+
return False
|
|
101
|
+
|
|
102
|
+
if r == "big":
|
|
103
|
+
if not all(ch.isupper() or ch.isdigit() for ch in value):
|
|
104
|
+
return False
|
|
105
|
+
|
|
106
|
+
if r == "small":
|
|
107
|
+
if not all(ch.islower() or ch.isdigit() for ch in value):
|
|
108
|
+
return False
|
|
109
|
+
|
|
110
|
+
if r == "#-":
|
|
111
|
+
idx = table_pos + 3
|
|
112
|
+
while idx < len(lines) and not lines[idx].startswith("[TABLE"):
|
|
113
|
+
row = json.loads(lines[idx])
|
|
114
|
+
if row[col_index] == value:
|
|
115
|
+
return False
|
|
116
|
+
idx += 1
|
|
117
|
+
|
|
118
|
+
return True
|
|
119
|
+
|
|
120
|
+
# -----------------------------
|
|
121
|
+
# GIVE (True/False destekli)
|
|
122
|
+
# -----------------------------
|
|
123
|
+
def give(self, table, values, output_list=None):
|
|
124
|
+
try:
|
|
125
|
+
self._lock()
|
|
126
|
+
lines = self._read()
|
|
127
|
+
|
|
128
|
+
tpos = self._find_table(lines, table)
|
|
129
|
+
if tpos == -1:
|
|
130
|
+
raise ValueError("Tablo bulunamadı!")
|
|
131
|
+
|
|
132
|
+
col_names = json.loads(lines[tpos + 1])
|
|
133
|
+
rules = json.loads(lines[tpos + 2])
|
|
134
|
+
|
|
135
|
+
final_values = []
|
|
136
|
+
|
|
137
|
+
# AUTO ID
|
|
138
|
+
if len(values) + 1 == len(col_names) and "ID" in rules.values():
|
|
139
|
+
auto_id = 1
|
|
140
|
+
idx = tpos + 3
|
|
141
|
+
while idx < len(lines) and not lines[idx].startswith("[TABLE"):
|
|
142
|
+
row = json.loads(lines[idx])
|
|
143
|
+
auto_id = max(auto_id, int(row[0]) + 1)
|
|
144
|
+
idx += 1
|
|
145
|
+
|
|
146
|
+
final_values.append(str(auto_id))
|
|
147
|
+
final_values.extend(values)
|
|
148
|
+
|
|
149
|
+
elif len(values) != len(col_names):
|
|
150
|
+
raise ValueError("Gönderilen veri sayısı yanlış!")
|
|
151
|
+
|
|
152
|
+
else:
|
|
153
|
+
final_values = values[:]
|
|
154
|
+
|
|
155
|
+
# VALIDATION
|
|
156
|
+
for i, col in enumerate(col_names):
|
|
157
|
+
rule = rules[col]
|
|
158
|
+
if not self._validate(final_values[i], rule, lines, tpos, i):
|
|
159
|
+
raise ValueError(f"{col} alanı için veri kurala uymuyor: {rule}")
|
|
160
|
+
|
|
161
|
+
# Insert row
|
|
162
|
+
lines.insert(tpos + 3, json.dumps(final_values, ensure_ascii=False))
|
|
163
|
+
self._write(lines)
|
|
164
|
+
self._unlock()
|
|
165
|
+
|
|
166
|
+
if output_list is not None:
|
|
167
|
+
output_list.append(True)
|
|
168
|
+
|
|
169
|
+
return True
|
|
170
|
+
|
|
171
|
+
except Exception:
|
|
172
|
+
if output_list is not None:
|
|
173
|
+
output_list.append(False)
|
|
174
|
+
return False
|
|
175
|
+
raise
|
|
176
|
+
|
|
177
|
+
finally:
|
|
178
|
+
self._unlock()
|
|
179
|
+
|
|
180
|
+
# -----------------------------
|
|
181
|
+
# READ FULL TABLE
|
|
182
|
+
# -----------------------------
|
|
183
|
+
def table_full(self, table):
|
|
184
|
+
lines = self._read()
|
|
185
|
+
tpos = self._find_table(lines, table)
|
|
186
|
+
if tpos == -1:
|
|
187
|
+
raise ValueError("Tablo yok!")
|
|
188
|
+
|
|
189
|
+
result = []
|
|
190
|
+
idx = tpos + 3
|
|
191
|
+
while idx < len(lines) and not lines[idx].startswith("[TABLE"):
|
|
192
|
+
result.append(json.loads(lines[idx]))
|
|
193
|
+
idx += 1
|
|
194
|
+
return result
|
|
195
|
+
|
|
196
|
+
# -----------------------------
|
|
197
|
+
# CLEAR TABLE (DATA ONLY)
|
|
198
|
+
# -----------------------------
|
|
199
|
+
def clear_full(self, table):
|
|
200
|
+
self._lock()
|
|
201
|
+
lines = self._read()
|
|
202
|
+
|
|
203
|
+
tpos = self._find_table(lines, table)
|
|
204
|
+
if tpos == -1:
|
|
205
|
+
self._unlock()
|
|
206
|
+
raise ValueError("Tablo bulunamadı!")
|
|
207
|
+
|
|
208
|
+
new_lines = []
|
|
209
|
+
i = 0
|
|
210
|
+
while i < len(lines):
|
|
211
|
+
if i != tpos:
|
|
212
|
+
new_lines.append(lines[i])
|
|
213
|
+
i += 1
|
|
214
|
+
continue
|
|
215
|
+
|
|
216
|
+
new_lines.append(lines[i])
|
|
217
|
+
new_lines.append(lines[i + 1])
|
|
218
|
+
new_lines.append(lines[i + 2])
|
|
219
|
+
|
|
220
|
+
j = i + 3
|
|
221
|
+
while j < len(lines) and not lines[j].startswith("[TABLE"):
|
|
222
|
+
j += 1
|
|
223
|
+
|
|
224
|
+
i = j
|
|
225
|
+
|
|
226
|
+
self._write(new_lines)
|
|
227
|
+
self._unlock()
|
|
228
|
+
|
|
229
|
+
# -----------------------------
|
|
230
|
+
# UPDATE TABLE
|
|
231
|
+
# -----------------------------
|
|
232
|
+
def table_update(self, table, columns):
|
|
233
|
+
self._lock()
|
|
234
|
+
lines = self._read()
|
|
235
|
+
|
|
236
|
+
tpos = self._find_table(lines, table)
|
|
237
|
+
if tpos == -1:
|
|
238
|
+
self._unlock()
|
|
239
|
+
raise ValueError("Böyle bir tablo yok!")
|
|
240
|
+
|
|
241
|
+
col_names = []
|
|
242
|
+
rules = {}
|
|
243
|
+
for col in columns:
|
|
244
|
+
if isinstance(col, dict):
|
|
245
|
+
key = list(col.keys())[0]
|
|
246
|
+
rule = col[key]
|
|
247
|
+
col_names.append(key)
|
|
248
|
+
rules[key] = rule
|
|
249
|
+
else:
|
|
250
|
+
col_names.append(col)
|
|
251
|
+
rules[col] = ""
|
|
252
|
+
|
|
253
|
+
data_rows = []
|
|
254
|
+
idx = tpos + 3
|
|
255
|
+
while idx < len(lines) and not lines[idx].startswith("[TABLE"):
|
|
256
|
+
data_rows.append(json.loads(lines[idx]))
|
|
257
|
+
idx += 1
|
|
258
|
+
|
|
259
|
+
new_data_rows = []
|
|
260
|
+
old_col_names = json.loads(lines[tpos + 1])
|
|
261
|
+
|
|
262
|
+
for row in data_rows:
|
|
263
|
+
new_row = []
|
|
264
|
+
for col in col_names:
|
|
265
|
+
if col in old_col_names:
|
|
266
|
+
new_row.append(row[old_col_names.index(col)])
|
|
267
|
+
else:
|
|
268
|
+
new_row.append("")
|
|
269
|
+
new_data_rows.append(new_row)
|
|
270
|
+
|
|
271
|
+
new_lines = []
|
|
272
|
+
i = 0
|
|
273
|
+
while i < len(lines):
|
|
274
|
+
if i == tpos:
|
|
275
|
+
new_lines.append(f"[TABLE {table}]")
|
|
276
|
+
new_lines.append(json.dumps(col_names, ensure_ascii=False))
|
|
277
|
+
new_lines.append(json.dumps(rules, ensure_ascii=False))
|
|
278
|
+
for row in new_data_rows:
|
|
279
|
+
new_lines.append(json.dumps(row, ensure_ascii=False))
|
|
280
|
+
|
|
281
|
+
j = tpos + 3
|
|
282
|
+
while j < len(lines) and not lines[j].startswith("[TABLE"):
|
|
283
|
+
j += 1
|
|
284
|
+
i = j
|
|
285
|
+
|
|
286
|
+
else:
|
|
287
|
+
new_lines.append(lines[i])
|
|
288
|
+
i += 1
|
|
289
|
+
|
|
290
|
+
self._write(new_lines)
|
|
291
|
+
self._unlock()
|
|
292
|
+
|
|
293
|
+
# -----------------------------
|
|
294
|
+
# CONTROL SYSTEM
|
|
295
|
+
# -----------------------------
|
|
296
|
+
def control(self, table, conditions: dict, output_list=None):
|
|
297
|
+
lines = self._read()
|
|
298
|
+
tpos = self._find_table(lines, table)
|
|
299
|
+
if tpos == -1:
|
|
300
|
+
raise ValueError("Tablo bulunamadı!")
|
|
301
|
+
|
|
302
|
+
col_names = json.loads(lines[tpos + 1])
|
|
303
|
+
col_indexes = {k: col_names.index(k) for k in conditions}
|
|
304
|
+
|
|
305
|
+
found = False
|
|
306
|
+
idx = tpos + 3
|
|
307
|
+
|
|
308
|
+
while idx < len(lines) and not lines[idx].startswith("[TABLE"):
|
|
309
|
+
row = json.loads(lines[idx])
|
|
310
|
+
|
|
311
|
+
match = True
|
|
312
|
+
for key, val in conditions.items():
|
|
313
|
+
if row[col_indexes[key]] != val:
|
|
314
|
+
match = False
|
|
315
|
+
break
|
|
316
|
+
|
|
317
|
+
if match:
|
|
318
|
+
found = True
|
|
319
|
+
break
|
|
320
|
+
|
|
321
|
+
idx += 1
|
|
322
|
+
|
|
323
|
+
if output_list is None:
|
|
324
|
+
print("Var" if found else "Yok")
|
|
325
|
+
else:
|
|
326
|
+
output_list.append(found)
|
|
327
|
+
|
|
328
|
+
return found
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|