databae 0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- databae/__init__.py +5 -0
- databae/config.py +17 -0
- databae/edit.py +46 -0
- databae/getters.py +140 -0
- databae/lookup.py +59 -0
- databae/model.py +193 -0
- databae/properties.py +274 -0
- databae/query.py +83 -0
- databae/session.py +159 -0
- databae/setters.py +58 -0
- databae/util.py +90 -0
- databae-0.1.dist-info/LICENSE +21 -0
- databae-0.1.dist-info/METADATA +21 -0
- databae-0.1.dist-info/RECORD +16 -0
- databae-0.1.dist-info/WHEEL +5 -0
- databae-0.1.dist-info/top_level.txt +1 -0
databae/__init__.py
ADDED
databae/config.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from fyg import Config
|
|
2
|
+
|
|
3
|
+
config = Config({
|
|
4
|
+
"cache": True,
|
|
5
|
+
"refcount": False,
|
|
6
|
+
"main": "sqlite:///data.db",
|
|
7
|
+
"test": "sqlite:///data_test.db",
|
|
8
|
+
"blob": "blob",
|
|
9
|
+
"alter": False, # add new columns to tables - sqlite only!
|
|
10
|
+
"echo": False,
|
|
11
|
+
"pool": {
|
|
12
|
+
"null": True,
|
|
13
|
+
"size": 10,
|
|
14
|
+
"recycle": 30,
|
|
15
|
+
"overflow": 20
|
|
16
|
+
}
|
|
17
|
+
})
|
databae/edit.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from .util import get_model, blobify
|
|
3
|
+
from .properties import KeyWrapper
|
|
4
|
+
from .getters import get
|
|
5
|
+
|
|
6
|
+
def _trans_key(val):
|
|
7
|
+
return KeyWrapper(val)
|
|
8
|
+
|
|
9
|
+
def _trans_keylist(val):
|
|
10
|
+
return [KeyWrapper(v) for v in val]
|
|
11
|
+
|
|
12
|
+
def _trans_datetime(val):
|
|
13
|
+
return isinstance(val, datetime) and val or datetime.strptime(val, "%Y-%m-%d %H:%M:%S")
|
|
14
|
+
|
|
15
|
+
ETRANS = {
|
|
16
|
+
"key": _trans_key,
|
|
17
|
+
"keylist": _trans_keylist,
|
|
18
|
+
"datetime": _trans_datetime
|
|
19
|
+
}
|
|
20
|
+
def add_edit_transformation(ptype, func):
|
|
21
|
+
ETRANS[ptype] = func
|
|
22
|
+
|
|
23
|
+
def edit(data, session=None, blobifier=None):
|
|
24
|
+
haskey = "key" in data
|
|
25
|
+
extant = haskey and get(data["key"], session)
|
|
26
|
+
blobifier and blobify(data, blobifier, extant)
|
|
27
|
+
ent = extant or get_model(data["modelName"])()
|
|
28
|
+
if haskey: # what about nokey uniqueness checks?
|
|
29
|
+
editFailure = ent.beforeedit(data)
|
|
30
|
+
if editFailure:
|
|
31
|
+
return editFailure
|
|
32
|
+
for propname, val in list(data.items()):
|
|
33
|
+
if propname in ent._schema:
|
|
34
|
+
if val:
|
|
35
|
+
proptype = ent._schema[propname]
|
|
36
|
+
if hasattr(ent, "_pre_trans_%s"%(propname,)):
|
|
37
|
+
val = getattr(ent, "_pre_trans_%s"%(propname,))(val)
|
|
38
|
+
if proptype in ETRANS:
|
|
39
|
+
val = ETRANS[proptype](val)
|
|
40
|
+
if hasattr(ent, "_trans_%s"%(propname,)):
|
|
41
|
+
val = getattr(ent, "_trans_%s"%(propname,))(val)
|
|
42
|
+
setattr(ent, propname, val)
|
|
43
|
+
if not haskey and hasattr(ent, "oncreate"):
|
|
44
|
+
ent.oncreate()
|
|
45
|
+
ent.put()
|
|
46
|
+
return ent
|
databae/getters.py
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
import os, json, magic
|
|
2
|
+
from base64 import b64decode
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from sqlalchemy import func
|
|
5
|
+
from six import string_types
|
|
6
|
+
from .util import *
|
|
7
|
+
from .config import config
|
|
8
|
+
|
|
9
|
+
def _apply_filter(query, key, obj, modelName, joinz):
|
|
10
|
+
from .properties import KeyWrapper
|
|
11
|
+
val = obj["value"]
|
|
12
|
+
comp = obj["comparator"]
|
|
13
|
+
if "." in key:
|
|
14
|
+
altname, key = key.split(".")
|
|
15
|
+
mod = get_model(altname)
|
|
16
|
+
schema = get_schema(altname)
|
|
17
|
+
if altname not in joinz:
|
|
18
|
+
_join(modelName, altname, joinz, query)
|
|
19
|
+
else:
|
|
20
|
+
schema = get_schema(modelName)
|
|
21
|
+
mod = get_model(modelName)
|
|
22
|
+
prop = getattr(mod, key)
|
|
23
|
+
ptype = schema[key]
|
|
24
|
+
if ptype == "key" and not isinstance(val, KeyWrapper):
|
|
25
|
+
val = KeyWrapper(val)
|
|
26
|
+
elif ptype == "datetime" and not isinstance(val, datetime):
|
|
27
|
+
val = datetime.strptime(val, "%Y-%m-%d %H:%M:%S")
|
|
28
|
+
if comp == "like":
|
|
29
|
+
query.filter(func.lower(prop).like(val.lower()))
|
|
30
|
+
elif comp == "contains":
|
|
31
|
+
query.filter(prop.contains(val))
|
|
32
|
+
elif comp == "lacks":
|
|
33
|
+
query.filter(~prop.contains(val))
|
|
34
|
+
elif comp == "mod":
|
|
35
|
+
query.filter(prop % val == 0)
|
|
36
|
+
elif comp.startswith("near"): # lat/lng
|
|
37
|
+
mdist = 10
|
|
38
|
+
if "_" in comp:
|
|
39
|
+
mdist = int(comp.split("_")[1])
|
|
40
|
+
rad = mdist / 69.11
|
|
41
|
+
query.filter(prop > val - rad)
|
|
42
|
+
query.filter(prop < val + rad)
|
|
43
|
+
elif comp == "!=" and type(val) == list: # allow multiple exclusions...
|
|
44
|
+
for item in val:
|
|
45
|
+
query.filter(operators[comp](prop, item))
|
|
46
|
+
else:
|
|
47
|
+
query.filter(operators[comp](prop, val))
|
|
48
|
+
|
|
49
|
+
def _join(modelName, altname, joinz, query):
|
|
50
|
+
joinz.add(altname)
|
|
51
|
+
altschema = get_schema(altname)
|
|
52
|
+
if modelName in altschema["_kinds"]:
|
|
53
|
+
mod1 = get_model(modelName)
|
|
54
|
+
mod2 = get_model(altname)
|
|
55
|
+
kinds = altschema["_kinds"][modelName]
|
|
56
|
+
else:
|
|
57
|
+
mod1 = get_model(altname)
|
|
58
|
+
mod2 = get_model(modelName)
|
|
59
|
+
kinds = get_schema(modelName)["_kinds"][altname]
|
|
60
|
+
for kind in kinds:
|
|
61
|
+
if hasattr(mod2, kind):
|
|
62
|
+
mod2attr = getattr(mod2, kind)
|
|
63
|
+
break
|
|
64
|
+
query.join(get_model(altname), mod1.key == mod2attr)
|
|
65
|
+
|
|
66
|
+
def get_page(modelName, limit, offset, order='index', filters={}, session=None, count=False, exporter="export"):
|
|
67
|
+
query = get_model(modelName).query(session=session)
|
|
68
|
+
joinz = set()
|
|
69
|
+
for key, obj in list(filters.items()):
|
|
70
|
+
_apply_filter(query, key, obj, modelName, joinz)
|
|
71
|
+
if "." in order:
|
|
72
|
+
mod, attr = order.split(".")[-2:]
|
|
73
|
+
if joinz or not get_model(attr): # skip refcount shortcut if filtering on joined table
|
|
74
|
+
desc = False
|
|
75
|
+
if mod.startswith("-"):
|
|
76
|
+
desc = True
|
|
77
|
+
mod = mod[1:]
|
|
78
|
+
order = getattr(get_model(mod), attr)
|
|
79
|
+
if desc:
|
|
80
|
+
order = -order
|
|
81
|
+
mod not in joinz and _join(modelName, mod, joinz, query)
|
|
82
|
+
query.order(order)
|
|
83
|
+
if count:
|
|
84
|
+
return query.count()
|
|
85
|
+
return [getattr(d, exporter)() for d in query.fetch(limit, offset)]
|
|
86
|
+
|
|
87
|
+
def getall(entity=None, query=None, keys_only=False, session=None):
|
|
88
|
+
if query:
|
|
89
|
+
res = query.all()
|
|
90
|
+
elif entity:
|
|
91
|
+
res = entity.query(session=session).all()
|
|
92
|
+
if keys_only: # TODO: query for keys. for now, do with query.
|
|
93
|
+
return [r.key for r in res]
|
|
94
|
+
return res
|
|
95
|
+
|
|
96
|
+
def b64d(compkey):
|
|
97
|
+
return b64decode(pad_key(compkey)).decode()
|
|
98
|
+
|
|
99
|
+
def key2data(b64compkey):
|
|
100
|
+
if not isinstance(b64compkey, string_types):
|
|
101
|
+
b64compkey = b64compkey.urlsafe()
|
|
102
|
+
return json.loads(b64d(b64compkey))
|
|
103
|
+
|
|
104
|
+
def get(b64compkey, session=None):
|
|
105
|
+
try:
|
|
106
|
+
compkey = key2data(b64compkey)
|
|
107
|
+
except:
|
|
108
|
+
from fyg.util import error
|
|
109
|
+
error("bad key: %s"%(b64compkey,))
|
|
110
|
+
return modelsubs[compkey["model"]].query(session=session).query.get(compkey["index"])
|
|
111
|
+
|
|
112
|
+
def get_multi(b64keys, session=None):
|
|
113
|
+
# b64keys can be Key instances or b64 key strings
|
|
114
|
+
if b64keys and not isinstance(b64keys[0], string_types):
|
|
115
|
+
b64keys = [k.urlsafe() for k in b64keys]
|
|
116
|
+
keys = [json.loads(b64d(k)) for k in b64keys]
|
|
117
|
+
ents = {}
|
|
118
|
+
res = {}
|
|
119
|
+
for k in keys:
|
|
120
|
+
mod = k["model"]
|
|
121
|
+
if mod not in ents:
|
|
122
|
+
ents[mod] = {
|
|
123
|
+
"model": modelsubs[mod],
|
|
124
|
+
"indices": []
|
|
125
|
+
}
|
|
126
|
+
ents[mod]["indices"].append(k["index"])
|
|
127
|
+
for key, val in list(ents.items()):
|
|
128
|
+
mod = val["model"]
|
|
129
|
+
for r in mod.query(session=session).filter(mod.index.in_(val["indices"])).all():
|
|
130
|
+
res[r.id()] = r
|
|
131
|
+
return [res[k] for k in b64keys]
|
|
132
|
+
|
|
133
|
+
def get_blobs(variety):
|
|
134
|
+
bp = config.blob
|
|
135
|
+
bz = []
|
|
136
|
+
for f in next(os.walk(bp))[-1]:
|
|
137
|
+
fp = os.path.join(bp, f)
|
|
138
|
+
if variety in magic.from_file(fp):
|
|
139
|
+
bz.append("/%s"%(fp,))
|
|
140
|
+
return bz
|
databae/lookup.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
from sqlalchemy import func
|
|
2
|
+
from .model import seshman, get_model, put_multi, ForeignKey, String, Integer, ModelBase
|
|
3
|
+
|
|
4
|
+
class CTRefCount(ModelBase):
|
|
5
|
+
target = ForeignKey() # instance pointed at
|
|
6
|
+
reference = String() # table.property (such as 'building.owner')
|
|
7
|
+
count = Integer(default=0) # number of pointers
|
|
8
|
+
|
|
9
|
+
def mydata(self):
|
|
10
|
+
return {
|
|
11
|
+
"target": self.target.urlsafe(),
|
|
12
|
+
"reference": self.reference,
|
|
13
|
+
"count": self.count
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
def inc(self, amount=1):
|
|
17
|
+
self.count += amount
|
|
18
|
+
|
|
19
|
+
def dec(self, amount=1):
|
|
20
|
+
self.count -= amount
|
|
21
|
+
|
|
22
|
+
def refresh(self):
|
|
23
|
+
fname, fkey = self.reference.split(".")
|
|
24
|
+
fmod = get_model(fname)
|
|
25
|
+
self.count = fmod.query(getattr(fmod, fkey) == self.target).count()
|
|
26
|
+
|
|
27
|
+
def ref_counter(target, reference, session=None):
|
|
28
|
+
return CTRefCount.query(CTRefCount.target == target,
|
|
29
|
+
CTRefCount.reference == reference, session=session
|
|
30
|
+
).get() or CTRefCount(target=target, reference=reference)
|
|
31
|
+
|
|
32
|
+
def inc_counter(target, reference, amount=1, session=None):
|
|
33
|
+
rc = ref_counter(target, reference, session)
|
|
34
|
+
rc.inc(amount)
|
|
35
|
+
return rc
|
|
36
|
+
|
|
37
|
+
def dec_counter(target, reference, amount=1, session=None):
|
|
38
|
+
rc = ref_counter(target, reference, session)
|
|
39
|
+
rc.dec(amount)
|
|
40
|
+
return rc
|
|
41
|
+
|
|
42
|
+
def refresh_counter(target, reference, session=None):
|
|
43
|
+
rc = ref_counter(target, reference, session)
|
|
44
|
+
rc.refresh()
|
|
45
|
+
return rc
|
|
46
|
+
|
|
47
|
+
def refcount_subq(reference, session=None):
|
|
48
|
+
session = session or seshman.get()
|
|
49
|
+
if reference.count(".") == 2:
|
|
50
|
+
rp = reference.split(".")
|
|
51
|
+
t1 = get_model(rp[1])
|
|
52
|
+
t2 = get_model(rp[2])
|
|
53
|
+
tlink = getattr(t1, rp[2])
|
|
54
|
+
return session.query(CTRefCount.target, CTRefCount.count,
|
|
55
|
+
tlink).filter(CTRefCount.reference == ".".join(rp[:2])).join(t1,
|
|
56
|
+
CTRefCount.target == t1.key).join(t2,
|
|
57
|
+
t2.key == tlink).group_by(tlink).with_entities(t2.key.label("target"),
|
|
58
|
+
func.sum(CTRefCount.count).label("count")).subquery()
|
|
59
|
+
return session.query(CTRefCount.target, CTRefCount.count).filter(CTRefCount.reference == reference).subquery()
|
databae/model.py
ADDED
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
from sqlalchemy import orm
|
|
2
|
+
from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta
|
|
3
|
+
from fyg.util import log, error
|
|
4
|
+
from six import with_metaclass
|
|
5
|
+
from .query import *
|
|
6
|
+
|
|
7
|
+
def choice_validator(choices):
|
|
8
|
+
def cval(s, k, v):
|
|
9
|
+
if v not in choices:
|
|
10
|
+
error("can't set %s! %s not in %s"%(k, v, choices))
|
|
11
|
+
return v
|
|
12
|
+
return cval
|
|
13
|
+
|
|
14
|
+
class CTMeta(DeclarativeMeta):
|
|
15
|
+
def query(cls, *args, **kwargs):
|
|
16
|
+
return Query(cls, *args, **kwargs)
|
|
17
|
+
|
|
18
|
+
def __new__(cls, name, bases, attrs):
|
|
19
|
+
lname = name.lower()
|
|
20
|
+
attrs["__tablename__"] = lname
|
|
21
|
+
if lname != "modelbase":
|
|
22
|
+
attrs["__mapper_args__"] = {
|
|
23
|
+
"polymorphic_identity": lname
|
|
24
|
+
}
|
|
25
|
+
attrs["index"] = sqlForeignKey(bases[0], primary_key=True)
|
|
26
|
+
if "label" not in attrs:
|
|
27
|
+
for label in ["name", "title", "topic"]:
|
|
28
|
+
if label in attrs:
|
|
29
|
+
attrs["label"] = label
|
|
30
|
+
break
|
|
31
|
+
schema = attrs["_schema"] = merge_schemas(bases, attrs.get("label"))
|
|
32
|
+
for key, val in list(attrs.items()):
|
|
33
|
+
if getattr(val, "_ct_type", None):
|
|
34
|
+
schema[key] = val._ct_type
|
|
35
|
+
if val._ct_type.startswith("key"):
|
|
36
|
+
schema["_kinds"][key] = val._kinds
|
|
37
|
+
if getattr(val, "choices", None):
|
|
38
|
+
attrs["%s_validator"%(key,)] = sqlalchemy.orm.validates(key)(choice_validator(val.choices))
|
|
39
|
+
modelsubs[lname] = super(CTMeta, cls).__new__(cls, name, bases, attrs)
|
|
40
|
+
modelsubs[lname].__name__ = lname
|
|
41
|
+
return modelsubs[lname]
|
|
42
|
+
|
|
43
|
+
sa_dbase = declarative_base(metadata=metadata)
|
|
44
|
+
|
|
45
|
+
class ModelBase(with_metaclass(CTMeta, sa_dbase)):
|
|
46
|
+
index = Integer(primary_key=True)
|
|
47
|
+
polytype = String()
|
|
48
|
+
key = CompositeKey()
|
|
49
|
+
__mapper_args__ = {
|
|
50
|
+
"polymorphic_on": polytype,
|
|
51
|
+
"polymorphic_identity": "modelbase",
|
|
52
|
+
"with_polymorphic": "*"
|
|
53
|
+
}
|
|
54
|
+
label = "key"
|
|
55
|
+
_data_omit = []
|
|
56
|
+
_unique_cols = []
|
|
57
|
+
|
|
58
|
+
def __init__(self, *args, **kwargs):
|
|
59
|
+
sa_dbase.__init__(self, *args, **kwargs)
|
|
60
|
+
self._defaults()
|
|
61
|
+
self._init()
|
|
62
|
+
|
|
63
|
+
@orm.reconstructor
|
|
64
|
+
def _init(self):
|
|
65
|
+
self._name = "%s(%s)"%(self.polytype, getattr(self, self.label))
|
|
66
|
+
self._orig_fkeys = {}
|
|
67
|
+
for prop in self._schema["_kinds"]:
|
|
68
|
+
self._orig_fkeys[prop] = getattr(self, prop)
|
|
69
|
+
|
|
70
|
+
def _defaults(self):
|
|
71
|
+
for prop in self._schema["_kinds"]:
|
|
72
|
+
if getattr(self, prop, None) is None:
|
|
73
|
+
if self._schema[prop].endswith("list"):
|
|
74
|
+
val = []
|
|
75
|
+
else:
|
|
76
|
+
val = KeyWrapper()
|
|
77
|
+
setattr(self, prop, val)
|
|
78
|
+
self.key = KeyWrapper()
|
|
79
|
+
for key, val in list(self.__class__.__dict__.items()):
|
|
80
|
+
if getattr(self, key, None) is None and getattr(val, "_default", None) is not None:
|
|
81
|
+
setattr(self, key, val._default)
|
|
82
|
+
|
|
83
|
+
def __eq__(self, other):
|
|
84
|
+
return self.id() == (other and hasattr(other, "id") and other.id())
|
|
85
|
+
|
|
86
|
+
def __ne__(self, other):
|
|
87
|
+
return not self.__eq__(other)
|
|
88
|
+
|
|
89
|
+
def __hash__(self):
|
|
90
|
+
return self.key.__hash__()
|
|
91
|
+
|
|
92
|
+
def put(self, session=None):
|
|
93
|
+
try:
|
|
94
|
+
put_multi([self], session)
|
|
95
|
+
except Exception as e:
|
|
96
|
+
handle_error(e, session, self.polytype, "has no column named")
|
|
97
|
+
log("retrying put operation")
|
|
98
|
+
put_multi([self], session)
|
|
99
|
+
|
|
100
|
+
def otherwith(self, prop, val):
|
|
101
|
+
k = self._schema[prop]
|
|
102
|
+
c = self.__class__
|
|
103
|
+
col = getattr(c, prop)
|
|
104
|
+
q = c.query(c.key != self.key)
|
|
105
|
+
print("checking", c.__name__, "for", prop, k, "=", val)
|
|
106
|
+
if k.endswith("list"):
|
|
107
|
+
for v in val:
|
|
108
|
+
q.filter(col.contains(v))
|
|
109
|
+
else:
|
|
110
|
+
q.filter(col == val)
|
|
111
|
+
return q.get()
|
|
112
|
+
|
|
113
|
+
def beforeedit(self, edits):
|
|
114
|
+
for prop in edits:
|
|
115
|
+
if prop in self._unique_cols and self.otherwith(prop, edits[prop]):
|
|
116
|
+
print(prop, "conflict!\n\n")
|
|
117
|
+
return "%s must be unique"%(prop,)
|
|
118
|
+
|
|
119
|
+
def beforeremove(self, session):
|
|
120
|
+
pass
|
|
121
|
+
|
|
122
|
+
def afterremove(self, session):
|
|
123
|
+
pass
|
|
124
|
+
|
|
125
|
+
def rm(self, commit=True, session=None):
|
|
126
|
+
session = session or seshman.get()
|
|
127
|
+
self.beforeremove(session)
|
|
128
|
+
session.delete(self)
|
|
129
|
+
commit and session.commit()
|
|
130
|
+
self.afterremove(session)
|
|
131
|
+
|
|
132
|
+
def collection(self, entity_model, property_name=None, fetch=True, keys_only=False, data=False):
|
|
133
|
+
if isinstance(entity_model, str):
|
|
134
|
+
entity_model = get_model(entity_model)
|
|
135
|
+
q = entity_model.query(getattr(entity_model, property_name or self.polytype) == self.key)
|
|
136
|
+
if not fetch:
|
|
137
|
+
return q
|
|
138
|
+
if not data:
|
|
139
|
+
return q.fetch(1000, keys_only=keys_only)
|
|
140
|
+
return [d.data() for d in q.fetch(1000)]
|
|
141
|
+
|
|
142
|
+
def modeltype(self):
|
|
143
|
+
return self.__tablename__
|
|
144
|
+
|
|
145
|
+
def id(self):
|
|
146
|
+
return self.key.urlsafe()
|
|
147
|
+
|
|
148
|
+
def _has_complete_key(self):
|
|
149
|
+
return bool(self.id())
|
|
150
|
+
|
|
151
|
+
def mydata(self, isexport=False):
|
|
152
|
+
cols = {}
|
|
153
|
+
for key, prop in list(self._schema.items()):
|
|
154
|
+
if not isexport and key in self._data_omit:
|
|
155
|
+
continue
|
|
156
|
+
if not key.startswith("_"):
|
|
157
|
+
val = getattr(self, key)
|
|
158
|
+
if prop.startswith("key"):
|
|
159
|
+
if type(val) is list:
|
|
160
|
+
val = [v.urlsafe() for v in val]
|
|
161
|
+
elif hasattr(val, "urlsafe"):
|
|
162
|
+
val = val.urlsafe()
|
|
163
|
+
elif prop == "blob" and hasattr(val, "urlsafe"):
|
|
164
|
+
val = val.urlsafe()
|
|
165
|
+
elif val and prop == "datetime":
|
|
166
|
+
val = str(val)[:19]
|
|
167
|
+
elif prop in ["string", "text"]:
|
|
168
|
+
val = val or ""
|
|
169
|
+
cols[key] = val
|
|
170
|
+
return cols
|
|
171
|
+
|
|
172
|
+
def labeler(self):
|
|
173
|
+
if self.label == "key":
|
|
174
|
+
return self.id()
|
|
175
|
+
return getattr(self, self.label) or "%s %s"%(self.polytype, self.index)
|
|
176
|
+
|
|
177
|
+
def _basic(self, d):
|
|
178
|
+
d["key"] = self.id()
|
|
179
|
+
d["index"] = self.index
|
|
180
|
+
d["modelName"] = self.polytype
|
|
181
|
+
d["_label"] = self.label
|
|
182
|
+
d["label"] = self.labeler()
|
|
183
|
+
return d
|
|
184
|
+
|
|
185
|
+
def data(self):
|
|
186
|
+
return self._basic(self.mydata())
|
|
187
|
+
|
|
188
|
+
def export(self):
|
|
189
|
+
return self._basic(ModelBase.mydata(self, True))
|
|
190
|
+
|
|
191
|
+
class TimeStampedBase(ModelBase):
|
|
192
|
+
created = DateTime(auto_now_add=True)
|
|
193
|
+
modified = DateTime(auto_now=True)
|
databae/properties.py
ADDED
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
import os, json, sqlalchemy
|
|
2
|
+
from .getters import get
|
|
3
|
+
from .config import config
|
|
4
|
+
|
|
5
|
+
primis = ["Integer", "Float", "Boolean", "Text", "Date", "Time"]
|
|
6
|
+
|
|
7
|
+
class DynamicType(sqlalchemy.TypeDecorator):
|
|
8
|
+
cache_ok = config.cache
|
|
9
|
+
|
|
10
|
+
def __init__(self, *args, **kwargs):
|
|
11
|
+
self.choices = kwargs.pop("choices", None)
|
|
12
|
+
sqlalchemy.TypeDecorator.__init__(self, *args, **kwargs)
|
|
13
|
+
|
|
14
|
+
class StringType(DynamicType):
|
|
15
|
+
def __init__(self, *args, **kwargs):
|
|
16
|
+
# '500' (len) required for MySQL VARCHAR
|
|
17
|
+
DynamicType.__init__(self, 500, *args, **kwargs)
|
|
18
|
+
|
|
19
|
+
def basicType(colClass, baseType=DynamicType):
|
|
20
|
+
cname = colClass.__name__
|
|
21
|
+
attrs = { "impl": colClass }
|
|
22
|
+
if config.cache and cname in primis:
|
|
23
|
+
attrs["cache_ok"] = True
|
|
24
|
+
return type("%s"%(cname,), (baseType,), attrs)
|
|
25
|
+
|
|
26
|
+
def _col(colClass, *args, **kwargs):
|
|
27
|
+
cargs = {}
|
|
28
|
+
if "primary_key" in kwargs:
|
|
29
|
+
cargs["primary_key"] = kwargs.pop("primary_key")
|
|
30
|
+
default = kwargs.pop("default", None)
|
|
31
|
+
if kwargs.pop("repeated", None):
|
|
32
|
+
isKey = kwargs["isKey"] = colClass is Key
|
|
33
|
+
typeInstance = ArrayType(**kwargs)
|
|
34
|
+
col = sqlalchemy.Column(typeInstance, *args, **cargs)
|
|
35
|
+
col._ct_type = isKey and "keylist" or "list"
|
|
36
|
+
if isKey:
|
|
37
|
+
col._kinds = typeInstance.kinds
|
|
38
|
+
return col
|
|
39
|
+
typeInstance = colClass(**kwargs)
|
|
40
|
+
col = sqlalchemy.Column(typeInstance, *args, **cargs)
|
|
41
|
+
if hasattr(typeInstance, "choices"):
|
|
42
|
+
col.choices = typeInstance.choices
|
|
43
|
+
if colClass is DateTimeAutoStamper:
|
|
44
|
+
col.is_dt_autostamper = True
|
|
45
|
+
col.should_stamp = typeInstance.should_stamp
|
|
46
|
+
col._ct_type = "datetime"
|
|
47
|
+
elif colClass is BasicString:
|
|
48
|
+
col._ct_type = "string"
|
|
49
|
+
elif colClass is Key:
|
|
50
|
+
col._kinds = typeInstance.kinds
|
|
51
|
+
elif colClass is JSONType:
|
|
52
|
+
col._ct_type = "json"
|
|
53
|
+
if not hasattr(col, "_ct_type"):
|
|
54
|
+
col._ct_type = colClass.__name__.lower()
|
|
55
|
+
col._default = default
|
|
56
|
+
return col
|
|
57
|
+
|
|
58
|
+
def sqlColumn(colClass):
|
|
59
|
+
return lambda *args, **kwargs : _col(colClass, *args, **kwargs)
|
|
60
|
+
|
|
61
|
+
for prop in primis:
|
|
62
|
+
sqlprop = getattr(sqlalchemy, prop)
|
|
63
|
+
globals()["sql%s"%(prop,)] = sqlprop
|
|
64
|
+
globals()[prop] = sqlColumn(basicType(sqlprop))
|
|
65
|
+
|
|
66
|
+
# datetime
|
|
67
|
+
BasicDT = basicType(sqlalchemy.DateTime)
|
|
68
|
+
class DateTimeAutoStamper(BasicDT):
|
|
69
|
+
cache_ok = config.cache
|
|
70
|
+
|
|
71
|
+
def __init__(self, *args, **kwargs):
|
|
72
|
+
self.auto_now = kwargs.pop("auto_now", False)
|
|
73
|
+
self.auto_now_add = kwargs.pop("auto_now_add", False)
|
|
74
|
+
BasicDT.__init__(self, *args, **kwargs)
|
|
75
|
+
|
|
76
|
+
def should_stamp(self, is_new):
|
|
77
|
+
return self.auto_now or is_new and self.auto_now_add
|
|
78
|
+
|
|
79
|
+
DateTime = sqlColumn(DateTimeAutoStamper)
|
|
80
|
+
|
|
81
|
+
# strings, arrays, keys
|
|
82
|
+
class BasicString(basicType(sqlalchemy.UnicodeText, StringType)):
|
|
83
|
+
cache_ok = config.cache
|
|
84
|
+
|
|
85
|
+
def process_bind_param(self, data, dialect):
|
|
86
|
+
# if data and type(data) is not str:
|
|
87
|
+
# data = data.decode('utf-8')
|
|
88
|
+
return data
|
|
89
|
+
|
|
90
|
+
String = sqlColumn(BasicString)
|
|
91
|
+
|
|
92
|
+
class JSONType(BasicString):
|
|
93
|
+
def process_bind_param(self, value, dialect):
|
|
94
|
+
return json.dumps(value)
|
|
95
|
+
|
|
96
|
+
def process_result_value(self, value, dialect):
|
|
97
|
+
return json.loads(value or "{}")
|
|
98
|
+
|
|
99
|
+
JSON = sqlColumn(JSONType)
|
|
100
|
+
|
|
101
|
+
class BlobWrapper(object):
|
|
102
|
+
def __init__(self, data="", value=0):
|
|
103
|
+
self.value = value
|
|
104
|
+
if data:
|
|
105
|
+
self.set(data)
|
|
106
|
+
else:
|
|
107
|
+
self._set_path(value)
|
|
108
|
+
|
|
109
|
+
def __nonzero__(self): # py2
|
|
110
|
+
return bool(self.value)
|
|
111
|
+
|
|
112
|
+
def __bool__(self): # py3
|
|
113
|
+
return bool(self.value)
|
|
114
|
+
|
|
115
|
+
def get(self):
|
|
116
|
+
if self.value:
|
|
117
|
+
from fyg.util import read
|
|
118
|
+
return read(self.path, binary=True)
|
|
119
|
+
else:
|
|
120
|
+
return None
|
|
121
|
+
|
|
122
|
+
def _next_value(self): # safely handles gaps
|
|
123
|
+
p, d, f = next(os.walk(config.blob))
|
|
124
|
+
fiz = [int(i) for i in f]
|
|
125
|
+
fiz.sort()
|
|
126
|
+
v = 1
|
|
127
|
+
for n in fiz:
|
|
128
|
+
if n != v:
|
|
129
|
+
return v
|
|
130
|
+
v += 1
|
|
131
|
+
return len(fiz) + 1
|
|
132
|
+
|
|
133
|
+
def _set_path(self, data=None):
|
|
134
|
+
if data:
|
|
135
|
+
if not self.value:
|
|
136
|
+
self.value = self._next_value()
|
|
137
|
+
self.path = os.path.join(config.blob, str(self.value))
|
|
138
|
+
else:
|
|
139
|
+
self.value = 0
|
|
140
|
+
self.path = None
|
|
141
|
+
|
|
142
|
+
def set(self, data):
|
|
143
|
+
self._set_path(data)
|
|
144
|
+
if data:
|
|
145
|
+
from fyg.util import write
|
|
146
|
+
if type(data) != bytes:
|
|
147
|
+
data = data.encode()
|
|
148
|
+
write(data, self.path, binary=True)
|
|
149
|
+
|
|
150
|
+
def delete(self):
|
|
151
|
+
if self.value:
|
|
152
|
+
from fyg.util import rm
|
|
153
|
+
rm(self.path)
|
|
154
|
+
self._set_path()
|
|
155
|
+
|
|
156
|
+
def urlsafe(self):
|
|
157
|
+
return self.path and "/" + "/".join(os.path.split(self.path))
|
|
158
|
+
|
|
159
|
+
BasicInt = basicType(sqlInteger)
|
|
160
|
+
|
|
161
|
+
class Blob(BasicInt):
|
|
162
|
+
def __init__(self, *args, **kwargs):
|
|
163
|
+
self.unique = kwargs.pop("unique", False)
|
|
164
|
+
BasicInt.__init__(self, *args, **kwargs)
|
|
165
|
+
|
|
166
|
+
def process_bind_param(self, data, dialect):
|
|
167
|
+
if type(data) is not BlobWrapper:
|
|
168
|
+
if self.unique:
|
|
169
|
+
from fyg.util import indir
|
|
170
|
+
match = indir(data, config.blob)
|
|
171
|
+
if match:
|
|
172
|
+
return int(match)
|
|
173
|
+
data = BlobWrapper(data)
|
|
174
|
+
return data.value
|
|
175
|
+
|
|
176
|
+
def process_result_value(self, value, dialect):
|
|
177
|
+
return BlobWrapper(value=value)
|
|
178
|
+
|
|
179
|
+
Binary = sqlColumn(Blob)
|
|
180
|
+
|
|
181
|
+
"""
|
|
182
|
+
class Binary(basicType(sqlString)):
|
|
183
|
+
def process_bind_param(self, value, dialect):
|
|
184
|
+
return sqlalchemy.func.HEX(value)
|
|
185
|
+
|
|
186
|
+
def process_result_value(self, value, dialect):
|
|
187
|
+
return sqlalchemy.func.UNHEX(value)
|
|
188
|
+
"""
|
|
189
|
+
|
|
190
|
+
class ArrayType(BasicString):
|
|
191
|
+
cache_ok = config.cache
|
|
192
|
+
|
|
193
|
+
def __init__(self, *args, **kwargs):
|
|
194
|
+
self.isKey = kwargs.pop("isKey", False)
|
|
195
|
+
if self.isKey:
|
|
196
|
+
self.kinds = kwargs.pop("kinds", [kwargs.pop("kind", "*")])
|
|
197
|
+
for i in range(len(self.kinds)):
|
|
198
|
+
if not isinstance(self.kinds[i], str):
|
|
199
|
+
self.kinds[i] = self.kinds[i].__name__.lower()
|
|
200
|
+
BasicString.__init__(self, *args, **kwargs)
|
|
201
|
+
|
|
202
|
+
def process_bind_param(self, value, dialect):
|
|
203
|
+
if self.isKey:
|
|
204
|
+
for i in range(len(value)):
|
|
205
|
+
if hasattr(value[i], "urlsafe"):
|
|
206
|
+
value[i] = value[i].urlsafe()
|
|
207
|
+
return json.dumps(value)
|
|
208
|
+
|
|
209
|
+
def process_result_value(self, value, dialect):
|
|
210
|
+
try:
|
|
211
|
+
vlist = json.loads(value) or []
|
|
212
|
+
except:
|
|
213
|
+
vlist = []
|
|
214
|
+
if self.isKey:
|
|
215
|
+
for i in range(len(vlist)):
|
|
216
|
+
vlist[i] = KeyWrapper(vlist[i])
|
|
217
|
+
return vlist
|
|
218
|
+
|
|
219
|
+
class KeyWrapper(object):
|
|
220
|
+
def __init__(self, urlsafe=None):
|
|
221
|
+
self.value = urlsafe
|
|
222
|
+
|
|
223
|
+
def __nonzero__(self): # py2
|
|
224
|
+
return bool(self.value)
|
|
225
|
+
|
|
226
|
+
def __bool__(self): # py3
|
|
227
|
+
return bool(self.value)
|
|
228
|
+
|
|
229
|
+
def __eq__(self, other):
|
|
230
|
+
return hasattr(other, "value") and self.value == other.value
|
|
231
|
+
|
|
232
|
+
def __ne__(self, other):
|
|
233
|
+
return not hasattr(other, "value") or self.value != other.value
|
|
234
|
+
|
|
235
|
+
def __hash__(self):
|
|
236
|
+
return sum([ord(c) for c in self.value])
|
|
237
|
+
|
|
238
|
+
def get(self, session=None):
|
|
239
|
+
return get(self.value, session)
|
|
240
|
+
|
|
241
|
+
def delete(self):
|
|
242
|
+
ent = self.get()
|
|
243
|
+
ent and ent.rm() # should be more efficient way...
|
|
244
|
+
|
|
245
|
+
def urlsafe(self):
|
|
246
|
+
return self.value
|
|
247
|
+
|
|
248
|
+
class Key(BasicString):
|
|
249
|
+
cache_ok = config.cache
|
|
250
|
+
|
|
251
|
+
def __init__(self, *args, **kwargs):
|
|
252
|
+
self.kinds = kwargs.pop("kinds", [kwargs.pop("kind", "*")])
|
|
253
|
+
for i in range(len(self.kinds)):
|
|
254
|
+
if not isinstance(self.kinds[i], str):
|
|
255
|
+
self.kinds[i] = self.kinds[i].__name__.lower()
|
|
256
|
+
BasicString.__init__(self, *args, **kwargs)
|
|
257
|
+
|
|
258
|
+
def process_bind_param(self, value, dialect):
|
|
259
|
+
while True:#value and hasattr(value, "urlsafe"):
|
|
260
|
+
try: # for sqlite weirdness -- do this cleaner?
|
|
261
|
+
value = value.urlsafe()
|
|
262
|
+
except:
|
|
263
|
+
break
|
|
264
|
+
return value
|
|
265
|
+
|
|
266
|
+
def process_result_value(self, value, dialect):
|
|
267
|
+
return KeyWrapper(value)
|
|
268
|
+
|
|
269
|
+
CompositeKey = sqlColumn(Key)
|
|
270
|
+
ForeignKey = sqlColumn(Key)
|
|
271
|
+
|
|
272
|
+
def sqlForeignKey(targetClass, **kwargs):
|
|
273
|
+
return sqlalchemy.Column(sqlInteger,
|
|
274
|
+
sqlalchemy.ForeignKey("%s.index"%(targetClass.__tablename__,)), **kwargs)
|
databae/query.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
from sqlalchemy.sql import func, elements
|
|
2
|
+
from fyg.util import log, start_timer, end_timer
|
|
3
|
+
from fyg import config as confyg
|
|
4
|
+
from six import string_types
|
|
5
|
+
from .properties import *
|
|
6
|
+
from .getters import *
|
|
7
|
+
from .setters import *
|
|
8
|
+
from .session import session, seshman, testSession, metadata, Session, handle_error, set_scoper
|
|
9
|
+
|
|
10
|
+
_passthru = ["count", "all"]
|
|
11
|
+
_qmod = ["filter", "limit", "offset", "join"]
|
|
12
|
+
|
|
13
|
+
class Query(object):
|
|
14
|
+
def __init__(self, mod, *args, **kwargs):
|
|
15
|
+
self.mod = mod
|
|
16
|
+
self.schema = get_schema(mod)
|
|
17
|
+
self.session = kwargs.pop("session", None) or seshman.get()
|
|
18
|
+
self.query = kwargs.pop("query", self.session.query(mod))
|
|
19
|
+
for fname in _passthru:
|
|
20
|
+
setattr(self, fname, self._qpass(fname))
|
|
21
|
+
for fname in _qmod:
|
|
22
|
+
setattr(self, fname, self._qmlam(fname))
|
|
23
|
+
self.get = self._qpass("first")
|
|
24
|
+
self._order = self._qmlam("order_by")
|
|
25
|
+
self.filter(*args, **kwargs)
|
|
26
|
+
|
|
27
|
+
def order(self, prop):
|
|
28
|
+
if type(prop) == elements.UnaryExpression and "count" not in prop.element.description:
|
|
29
|
+
prop = "-%s"%(prop.element.description,)
|
|
30
|
+
if isinstance(prop, string_types):
|
|
31
|
+
asc = False
|
|
32
|
+
if prop.startswith("-"):
|
|
33
|
+
prop = prop[1:]
|
|
34
|
+
else:
|
|
35
|
+
asc = True
|
|
36
|
+
if "." in prop: # foreignkey reference from another table
|
|
37
|
+
from .lookup import refcount_subq
|
|
38
|
+
sub = refcount_subq(prop, self.session)
|
|
39
|
+
order = sub.c.count
|
|
40
|
+
if not asc:
|
|
41
|
+
order = -sub.c.count
|
|
42
|
+
return self.join(sub, self.mod.key == sub.c.target).order(order)
|
|
43
|
+
prop = getattr(self.mod, prop)
|
|
44
|
+
if not asc:
|
|
45
|
+
prop = prop.desc()
|
|
46
|
+
return self._order(prop)
|
|
47
|
+
|
|
48
|
+
def _qpass(self, fname):
|
|
49
|
+
def qp(*args, **kwargs):
|
|
50
|
+
qkey = "Query.%s: %s %s (%s)"%(fname, args, kwargs, self.query)
|
|
51
|
+
if "query" in confyg.log.allow:
|
|
52
|
+
start_timer(qkey)
|
|
53
|
+
try:
|
|
54
|
+
res = getattr(self.query, fname)(*args, **kwargs)
|
|
55
|
+
except Exception as e:
|
|
56
|
+
handle_error(e, self.session)
|
|
57
|
+
log("retrying query operation")
|
|
58
|
+
res = getattr(self.query, fname)(*args, **kwargs)
|
|
59
|
+
if "query" in confyg.log.allow:
|
|
60
|
+
end_timer(qkey)
|
|
61
|
+
return res
|
|
62
|
+
return qp
|
|
63
|
+
|
|
64
|
+
def _qmlam(self, fname):
|
|
65
|
+
return lambda *a, **k : self._qmod(fname, *a, **k)
|
|
66
|
+
|
|
67
|
+
def _qmod(self, modname, *args, **kwargs):
|
|
68
|
+
self.query = getattr(self.query, modname)(*args, **kwargs)
|
|
69
|
+
return self
|
|
70
|
+
|
|
71
|
+
def copy(self, *args, **kwargs):
|
|
72
|
+
kwargs["query"] = self.query
|
|
73
|
+
return Query(self.mod, *args, **kwargs)
|
|
74
|
+
|
|
75
|
+
def fetch(self, limit=None, offset=0, keys_only=False):
|
|
76
|
+
if limit:
|
|
77
|
+
self.limit(limit)
|
|
78
|
+
if offset:
|
|
79
|
+
self.offset(offset)
|
|
80
|
+
res = self.all()
|
|
81
|
+
if keys_only: # best way?
|
|
82
|
+
return [r.key for r in res]
|
|
83
|
+
return res
|
databae/session.py
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
from sqlalchemy import text
|
|
3
|
+
from sqlalchemy.pool import NullPool
|
|
4
|
+
from sqlalchemy import create_engine, MetaData
|
|
5
|
+
from sqlalchemy.orm import scoped_session, sessionmaker
|
|
6
|
+
from fyg.util import log, error, confirm
|
|
7
|
+
from fyg import config as confyg
|
|
8
|
+
from .config import config as dcfg
|
|
9
|
+
|
|
10
|
+
pcfg = dcfg.pool
|
|
11
|
+
metadata = MetaData()
|
|
12
|
+
|
|
13
|
+
def conn_ex(cmd):
|
|
14
|
+
log("issuing command: %s"%(cmd,), important=True)
|
|
15
|
+
with session.engine.connect() as conn:
|
|
16
|
+
conn.execute(text(cmd))
|
|
17
|
+
|
|
18
|
+
def add_column(mod, col): # sqlite only
|
|
19
|
+
log("adding '%s' to '%s'"%(col, mod))
|
|
20
|
+
conn_ex('ALTER TABLE "%s" ADD COLUMN "%s"'%(mod, col))
|
|
21
|
+
|
|
22
|
+
def handle_error(e, session=None, polytype=None, flag=" no such column: "):
|
|
23
|
+
log("Database operation failed: %s"%(e,), important=True)
|
|
24
|
+
session = session or seshman.get()
|
|
25
|
+
raise_anyway = True
|
|
26
|
+
stre = str(e)
|
|
27
|
+
if flag in stre:
|
|
28
|
+
target = stre.split(flag)[1].split(None, 1)[0]
|
|
29
|
+
log("Missing column: %s"%(target,), important=True)
|
|
30
|
+
if dcfg.alter:
|
|
31
|
+
if "." in target:
|
|
32
|
+
tmod, tcol = target.split(".")
|
|
33
|
+
else:
|
|
34
|
+
tcol = target
|
|
35
|
+
tmod = polytype
|
|
36
|
+
if dcfg.alter == "auto" or confirm("Add missing column '%s' to table '%s' (sqlite-only!)"%(tcol, tmod), True):
|
|
37
|
+
log("rolling back session")
|
|
38
|
+
session.rollback()
|
|
39
|
+
raise_anyway = False
|
|
40
|
+
add_column(tmod, tcol)
|
|
41
|
+
else:
|
|
42
|
+
log("To auto-update columns, add 'DB_ALTER = True' to your ct.cfg (sqlite only!)", important=True)
|
|
43
|
+
if raise_anyway:
|
|
44
|
+
error(e)
|
|
45
|
+
|
|
46
|
+
def threadname():
|
|
47
|
+
return threading.currentThread().getName()
|
|
48
|
+
|
|
49
|
+
scoper = None
|
|
50
|
+
|
|
51
|
+
def set_scoper(func):
|
|
52
|
+
global scoper
|
|
53
|
+
scoper = func
|
|
54
|
+
|
|
55
|
+
class Basic(object): # move elsewhere?
|
|
56
|
+
def sig(self):
|
|
57
|
+
return "%s(%s)"%(self.__class__.__name__, self.id)
|
|
58
|
+
|
|
59
|
+
def log(self, *msg):
|
|
60
|
+
if "db" in confyg.log.allow:
|
|
61
|
+
log("[db] session | %s :: %s"%(self.sig(), " ".join(msg)))
|
|
62
|
+
|
|
63
|
+
class Session(Basic):
|
|
64
|
+
def __init__(self, database):
|
|
65
|
+
Session._id += 1
|
|
66
|
+
self.id = Session._id
|
|
67
|
+
self.database = database
|
|
68
|
+
self.engine = database.engine
|
|
69
|
+
self.generator = scoped_session(sessionmaker(bind=self.engine), scopefunc=self._scope)
|
|
70
|
+
for fname in ["add", "add_all", "delete", "flush", "commit", "query", "rollback"]:
|
|
71
|
+
setattr(self, fname, self._func(fname))
|
|
72
|
+
self._refresh()
|
|
73
|
+
self.log("initialized")
|
|
74
|
+
|
|
75
|
+
def teardown(self):
|
|
76
|
+
self.engine = None
|
|
77
|
+
self.session = None
|
|
78
|
+
self.database = None
|
|
79
|
+
self.generator = None
|
|
80
|
+
|
|
81
|
+
def _scope(self):
|
|
82
|
+
threadId = threadname()
|
|
83
|
+
return scoper and scoper(threadId) or threadId
|
|
84
|
+
|
|
85
|
+
def _func(self, fname):
|
|
86
|
+
def f(*args):
|
|
87
|
+
self._refresh()
|
|
88
|
+
self.database.init()
|
|
89
|
+
return getattr(self.session, fname)(*args)
|
|
90
|
+
return f
|
|
91
|
+
|
|
92
|
+
def _refresh(self):
|
|
93
|
+
self.session = self.generator()
|
|
94
|
+
self.no_autoflush = self.session.no_autoflush
|
|
95
|
+
|
|
96
|
+
class DataBase(Basic):
|
|
97
|
+
def __init__(self, db=dcfg.main):
|
|
98
|
+
DataBase._id += 1
|
|
99
|
+
self.id = DataBase._id
|
|
100
|
+
if pcfg.null:
|
|
101
|
+
self.engine = create_engine(db, poolclass=NullPool, echo=dcfg.echo)
|
|
102
|
+
else:
|
|
103
|
+
self.engine = create_engine(db, pool_size=pcfg.size,
|
|
104
|
+
max_overflow=pcfg.overflow, pool_recycle=pcfg.recycle, echo=dcfg.echo)
|
|
105
|
+
self.sessions = {}
|
|
106
|
+
self._ready = False
|
|
107
|
+
self.log("initialized")
|
|
108
|
+
|
|
109
|
+
def init(self):
|
|
110
|
+
if not self._ready:
|
|
111
|
+
self._ready = True
|
|
112
|
+
metadata.create_all(self.engine)
|
|
113
|
+
|
|
114
|
+
def session(self):
|
|
115
|
+
thread = threadname()
|
|
116
|
+
if thread not in self.sessions:
|
|
117
|
+
self.sessions[thread] = Session(self)
|
|
118
|
+
self.log("session(%s) created!"%(thread,))
|
|
119
|
+
return self.sessions[thread]
|
|
120
|
+
|
|
121
|
+
def close(self):
|
|
122
|
+
thread = threadname()
|
|
123
|
+
if thread in self.sessions:
|
|
124
|
+
self.sessions[thread].generator.remove()
|
|
125
|
+
if thread == "MainThread":
|
|
126
|
+
note = "released"
|
|
127
|
+
else:
|
|
128
|
+
self.sessions[thread].teardown()
|
|
129
|
+
del self.sessions[thread]
|
|
130
|
+
note = "deleted"
|
|
131
|
+
else:
|
|
132
|
+
note = "not found!"
|
|
133
|
+
self.log("close(%s)"%(thread,), "session", note)
|
|
134
|
+
|
|
135
|
+
class SessionManager(Basic):
|
|
136
|
+
def __init__(self):
|
|
137
|
+
SessionManager._id += 1
|
|
138
|
+
self.id = SessionManager._id
|
|
139
|
+
self.dbs = {}
|
|
140
|
+
self.log("initialized")
|
|
141
|
+
|
|
142
|
+
def db(self, db=dcfg.main):
|
|
143
|
+
if db not in self.dbs:
|
|
144
|
+
self.dbs[db] = DataBase(db)
|
|
145
|
+
return self.dbs[db]
|
|
146
|
+
|
|
147
|
+
def get(self, db=dcfg.main):
|
|
148
|
+
return self.db(db).session()
|
|
149
|
+
|
|
150
|
+
def close(self, db=dcfg.main):
|
|
151
|
+
self.db(db).close()
|
|
152
|
+
|
|
153
|
+
Session._id = DataBase._id = SessionManager._id = 0
|
|
154
|
+
|
|
155
|
+
def testSession():
|
|
156
|
+
return seshman.get(dcfg.test)
|
|
157
|
+
|
|
158
|
+
seshman = SessionManager()
|
|
159
|
+
session = seshman.get()
|
databae/setters.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
from fyg.util import log, batch
|
|
2
|
+
from .session import seshman
|
|
3
|
+
from .edit import *
|
|
4
|
+
from .util import ct_key
|
|
5
|
+
from .config import config
|
|
6
|
+
|
|
7
|
+
def _init_entity(instance, session=None, preserve_timestamps=False):
|
|
8
|
+
from .lookup import inc_counter, dec_counter
|
|
9
|
+
puts = []
|
|
10
|
+
now = datetime.now()
|
|
11
|
+
cls = instance.__class__
|
|
12
|
+
tname = instance.__tablename__
|
|
13
|
+
if tname != "ctrefcount":
|
|
14
|
+
if hasattr(instance, "_pre_put"):
|
|
15
|
+
instance._pre_put()
|
|
16
|
+
for key, val in list(cls.__dict__.items()):
|
|
17
|
+
if not preserve_timestamps and getattr(val, "is_dt_autostamper", False) and val.should_stamp(not instance.index):
|
|
18
|
+
setattr(instance, key, now)
|
|
19
|
+
if config.refcount and key in instance._orig_fkeys:
|
|
20
|
+
oval = instance._orig_fkeys[key]
|
|
21
|
+
val = getattr(instance, key)
|
|
22
|
+
if oval != val:
|
|
23
|
+
reference = "%s.%s"%(tname, key)
|
|
24
|
+
if type(oval) is list or type(val) is list:
|
|
25
|
+
for o in [o for o in (oval or []) if o not in val]:
|
|
26
|
+
puts.append(dec_counter(o, reference, session=session))
|
|
27
|
+
for v in [v for v in (val or []) if v not in oval]:
|
|
28
|
+
puts.append(inc_counter(v, reference, session=session))
|
|
29
|
+
else:
|
|
30
|
+
if oval:
|
|
31
|
+
puts.append(dec_counter(oval, reference, session=session))
|
|
32
|
+
if val:
|
|
33
|
+
puts.append(inc_counter(val, reference, session=session))
|
|
34
|
+
return puts
|
|
35
|
+
|
|
36
|
+
def init_multi(instances, session=None, preserve_timestamps=False):
|
|
37
|
+
session = session or seshman.get()
|
|
38
|
+
if preserve_timestamps:
|
|
39
|
+
log("initializing %s instances -- preserving timestamps!"%(len(instances),))
|
|
40
|
+
lookups = []
|
|
41
|
+
with session.no_autoflush:
|
|
42
|
+
for instance in instances:
|
|
43
|
+
lookups += _init_entity(instance, session, preserve_timestamps)
|
|
44
|
+
session.add_all(instances + lookups)
|
|
45
|
+
session.flush()
|
|
46
|
+
for instance in instances:
|
|
47
|
+
instance.key = instance.key or KeyWrapper(ct_key(instance.polytype, instance.index))
|
|
48
|
+
|
|
49
|
+
def put_multi(instances, session=None, preserve_timestamps=False):
|
|
50
|
+
session = session or seshman.get()
|
|
51
|
+
batch(instances, init_multi, session, preserve_timestamps)
|
|
52
|
+
session.commit()
|
|
53
|
+
|
|
54
|
+
def delete_multi(instances, session=None):
|
|
55
|
+
session = session or seshman.get()
|
|
56
|
+
for instance in instances:
|
|
57
|
+
instance.rm(False, session)
|
|
58
|
+
session.commit()
|
databae/util.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import operator, base64, json, hashlib, requests
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from six import string_types
|
|
4
|
+
|
|
5
|
+
modbulkers = {}
|
|
6
|
+
modelsubs = {}
|
|
7
|
+
operators = {
|
|
8
|
+
"==": operator.__eq__,
|
|
9
|
+
">=": operator.__ge__,
|
|
10
|
+
"<=": operator.__le__,
|
|
11
|
+
"!=": operator.__ne__,
|
|
12
|
+
">": operator.__gt__,
|
|
13
|
+
"<": operator.__lt__
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
def get_bulker(modelName):
|
|
17
|
+
return modbulkers.get(modelName.lower(), None)
|
|
18
|
+
|
|
19
|
+
def reg_bulker(modelName, func):
|
|
20
|
+
modbulkers[modelName.lower()] = func
|
|
21
|
+
|
|
22
|
+
def get_model(modelName):
|
|
23
|
+
return modelsubs.get(modelName.lower(), None)
|
|
24
|
+
|
|
25
|
+
def get_schema(modname=None):
|
|
26
|
+
if modname:
|
|
27
|
+
if not isinstance(modname, string_types):
|
|
28
|
+
modname = modname.__name__
|
|
29
|
+
return modelsubs[modname.lower()]._schema
|
|
30
|
+
s = {}
|
|
31
|
+
for key, val in list(modelsubs.items()):
|
|
32
|
+
if key not in ["modelbase", "ctrefcount"]:
|
|
33
|
+
s[key] = val._schema
|
|
34
|
+
return s
|
|
35
|
+
|
|
36
|
+
def dprep(obj, schema=None): # prepares data object for model
|
|
37
|
+
schema = schema or get_schema(obj["modelName"])
|
|
38
|
+
o = {}
|
|
39
|
+
for key, prop in list(schema.items()):
|
|
40
|
+
if key in obj:
|
|
41
|
+
if prop == "datetime" and obj[key]:
|
|
42
|
+
o[key] = datetime.strptime(obj[key].replace("T", " ").replace("Z", ""), "%Y-%m-%d %X")
|
|
43
|
+
# elif prop == "string" and isinstance(obj[key], str):
|
|
44
|
+
# o[key] = obj[key].encode("utf-8")
|
|
45
|
+
elif key != "_label":
|
|
46
|
+
o[key] = obj[key]
|
|
47
|
+
return o
|
|
48
|
+
|
|
49
|
+
def pad_key(compkey):
|
|
50
|
+
if compkey[-3:-1] == "CT":
|
|
51
|
+
compkey = compkey[:-3] + int(compkey[-1]) * "="
|
|
52
|
+
return compkey
|
|
53
|
+
|
|
54
|
+
def unpad_key(compkey):
|
|
55
|
+
val = compkey and compkey.strip("=")
|
|
56
|
+
if val != compkey:
|
|
57
|
+
val += "CT" + str(len(compkey) - len(val))
|
|
58
|
+
return val
|
|
59
|
+
|
|
60
|
+
def ct_key(modelName, index):
|
|
61
|
+
return unpad_key(base64.b64encode(json.dumps({
|
|
62
|
+
"index": index,
|
|
63
|
+
"model": modelName
|
|
64
|
+
}).encode()).decode())
|
|
65
|
+
|
|
66
|
+
def merge_schemas(bases, label=None):
|
|
67
|
+
kinds = {}
|
|
68
|
+
schema = { "index": "immutable", "key": "key_immutable" }
|
|
69
|
+
for base in bases:
|
|
70
|
+
if hasattr(base, "_schema"):
|
|
71
|
+
schema.update(base._schema)
|
|
72
|
+
kinds.update(base._schema["_kinds"])
|
|
73
|
+
schema["_kinds"] = kinds
|
|
74
|
+
if label:
|
|
75
|
+
schema["_label"] = label
|
|
76
|
+
return schema
|
|
77
|
+
|
|
78
|
+
def hashpass(password, date):
|
|
79
|
+
return hashlib.md5((password + str(date.date()).replace('-','')).encode()).hexdigest()
|
|
80
|
+
|
|
81
|
+
def blobify(d, blobifier, extant=None):
|
|
82
|
+
for key, prop in list(db.get_schema(d["modelName"]).items()):
|
|
83
|
+
if prop == "blob" and d[key]:
|
|
84
|
+
entkey = d.get("gaekey", d["key"])
|
|
85
|
+
if extant and getattr(extant, key): # skip if some blob is present.........
|
|
86
|
+
log("%s.%s: already blobbed"%(d["modelName"], key))
|
|
87
|
+
del d[key]
|
|
88
|
+
else:
|
|
89
|
+
log("fetching %s.%s (%s.%s)"%(d["modelName"], key, entkey, d[key]))
|
|
90
|
+
d[key] = requests.get(blobifier%(entkey, key)).content
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 bubbleboy14
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: databae
|
|
3
|
+
Version: 0.1
|
|
4
|
+
Summary: DATabase ABstraction lAyEr
|
|
5
|
+
Author: Mario Balibrera
|
|
6
|
+
Author-email: mario.balibrera@gmail.com
|
|
7
|
+
License: MIT License
|
|
8
|
+
Classifier: Development Status :: 3 - Alpha
|
|
9
|
+
Classifier: Environment :: Console
|
|
10
|
+
Classifier: Intended Audience :: Developers
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Operating System :: OS Independent
|
|
13
|
+
Classifier: Programming Language :: Python
|
|
14
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
15
|
+
License-File: LICENSE
|
|
16
|
+
Requires-Dist: fyg >=0.1.7
|
|
17
|
+
Requires-Dist: requests >=2.3.0
|
|
18
|
+
Requires-Dist: six >=1.12.0
|
|
19
|
+
Requires-Dist: sqlalchemy >=2.0.30
|
|
20
|
+
|
|
21
|
+
sqlalchemy-based orm
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
databae/__init__.py,sha256=4XjLWG2Oo3T-XBS-ZHTqBCYbO68QfH9Lw-zhfnyOwzI,139
|
|
2
|
+
databae/config.py,sha256=Y0h_7LHFh_cJVi-LugpIaB8e5n3O0pao6r78HGwcjUA,316
|
|
3
|
+
databae/edit.py,sha256=83a2_gxX_ZnbrBFP7zvPduX8uPGXDup09HNTmYXph8s,1543
|
|
4
|
+
databae/getters.py,sha256=IcLsuonToE3GpXCYtt8mai7wlwjhXRDpKBhLlA7VaY0,4803
|
|
5
|
+
databae/lookup.py,sha256=G9Roq6VOEyY3agtAIZvk_KNigCN_-gyH9-zXR098gjM,2147
|
|
6
|
+
databae/model.py,sha256=Up7FFIOYL9cENINHM-RuNTFvEfp4WFqDPnPzoCcCLik,6561
|
|
7
|
+
databae/properties.py,sha256=WYIAjLTwwqnCscX86gGRKyDhoYaHQloV9dirRWjSVRY,7147
|
|
8
|
+
databae/query.py,sha256=r1gLsI_ma0ueO_TNOfCMdvHSKgHSFDnbjvooLffgymU,3039
|
|
9
|
+
databae/session.py,sha256=Nnf5Ftf7pevJsuRmVv898QRsO8CXwRapep13FOzqrIU,4399
|
|
10
|
+
databae/setters.py,sha256=J-Jp8fSG8Y7tF4ikjFCCon-lOpsdKJGIypck2Rr03aU,2514
|
|
11
|
+
databae/util.py,sha256=ABS3OiFJaytLjQwJALmhuxC9ab4nThmHrpoQFPXQnTg,2898
|
|
12
|
+
databae-0.1.dist-info/LICENSE,sha256=TCGFq2RZnPZed3cKBhfq8EIxNzHcwJWJsBW8p9y1dDk,1068
|
|
13
|
+
databae-0.1.dist-info/METADATA,sha256=XXE0N8wqbWCPpsQuG_E9qTDE1en5vfI6AwfvMizxvCE,673
|
|
14
|
+
databae-0.1.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
15
|
+
databae-0.1.dist-info/top_level.txt,sha256=odMXlkbmQm92xaUnNmomEgF2eStj6enLIlVGCS40oyc,8
|
|
16
|
+
databae-0.1.dist-info/RECORD,,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
databae
|