ct 0.10.8.114__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cantools/__init__.py +24 -0
- cantools/_db.py +142 -0
- cantools/_memcache.py +76 -0
- cantools/_pay.py +46 -0
- cantools/admin.py +31 -0
- cantools/cfg.py +347 -0
- cantools/config.py +131 -0
- cantools/db/__init__.py +18 -0
- cantools/db/admin.py +27 -0
- cantools/db/gae/__init__.py +0 -0
- cantools/db/gae/model.py +127 -0
- cantools/db/gae/properties.py +35 -0
- cantools/db/wp.py +99 -0
- cantools/geo.py +188 -0
- cantools/hooks.py +13 -0
- cantools/scripts/__init__.py +0 -0
- cantools/scripts/bench.py +167 -0
- cantools/scripts/builder.py +272 -0
- cantools/scripts/deploy.py +154 -0
- cantools/scripts/doc.py +239 -0
- cantools/scripts/index.py +226 -0
- cantools/scripts/init.py +345 -0
- cantools/scripts/migrate.py +593 -0
- cantools/scripts/pubsub/__init__.py +28 -0
- cantools/scripts/pubsub/actor.py +13 -0
- cantools/scripts/pubsub/bots.py +143 -0
- cantools/scripts/pubsub/channel.py +85 -0
- cantools/scripts/pubsub/ps.py +145 -0
- cantools/scripts/pubsub/user.py +51 -0
- cantools/scripts/start.py +53 -0
- cantools/scripts/util.py +24 -0
- cantools/util/__init__.py +78 -0
- cantools/util/admin.py +620 -0
- cantools/util/data.py +109 -0
- cantools/util/media.py +303 -0
- cantools/util/package.py +125 -0
- cantools/util/system.py +73 -0
- cantools/web/__init__.py +9 -0
- cantools/web/dez_server/__init__.py +1 -0
- cantools/web/dez_server/controller.py +129 -0
- cantools/web/dez_server/cron.py +115 -0
- cantools/web/dez_server/daemons.py +64 -0
- cantools/web/dez_server/mail.py +24 -0
- cantools/web/dez_server/response.py +63 -0
- cantools/web/dez_server/routes.py +21 -0
- cantools/web/dez_server/server.py +229 -0
- cantools/web/dez_server/sms.py +12 -0
- cantools/web/gae_server.py +68 -0
- cantools/web/util.py +552 -0
- ct-0.10.8.114.dist-info/LICENSE +9 -0
- ct-0.10.8.114.dist-info/METADATA +25 -0
- ct-0.10.8.114.dist-info/RECORD +55 -0
- ct-0.10.8.114.dist-info/WHEEL +5 -0
- ct-0.10.8.114.dist-info/entry_points.txt +10 -0
- ct-0.10.8.114.dist-info/top_level.txt +1 -0
cantools/config.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from fyg import Config, PCache, config as confyg
|
|
3
|
+
from databae import config as dbcfg
|
|
4
|
+
from fyg.util import read
|
|
5
|
+
from .cfg import cfg
|
|
6
|
+
|
|
7
|
+
pc = PCache(".ctp")
|
|
8
|
+
|
|
9
|
+
def _getpass(val, ptype):
|
|
10
|
+
if "{PASSWORD}" in val:
|
|
11
|
+
val = val.replace("{PASSWORD}", pc("enter password (%s): "%(ptype,)))
|
|
12
|
+
return val
|
|
13
|
+
|
|
14
|
+
config = Config(cfg)
|
|
15
|
+
|
|
16
|
+
def include_plugins():
|
|
17
|
+
plugs = {}
|
|
18
|
+
def loadp(plugin):
|
|
19
|
+
plugin = plugin.split("/")[-1]
|
|
20
|
+
if plugin not in plugs:
|
|
21
|
+
try:
|
|
22
|
+
mod = plugs[plugin] = __import__(plugin)
|
|
23
|
+
except:
|
|
24
|
+
print("missing plugin: %s (fine if refreshing project for 1st time)"%(plugin,))
|
|
25
|
+
return
|
|
26
|
+
if hasattr(mod.init, "requires"):
|
|
27
|
+
for p in mod.init.requires:
|
|
28
|
+
loadp(p)
|
|
29
|
+
if hasattr(mod.init, "cfg"):
|
|
30
|
+
config.update(plugin, mod.init.cfg)
|
|
31
|
+
for plugin in config.plugin.modules:
|
|
32
|
+
loadp(plugin)
|
|
33
|
+
|
|
34
|
+
config.plugin.update("modules", [])
|
|
35
|
+
config.plugin.update("repos", [])
|
|
36
|
+
|
|
37
|
+
items = []
|
|
38
|
+
lines = read("ct.cfg", True)
|
|
39
|
+
gcpath = os.path.join(os.path.expanduser("~"), "ct.cfg")
|
|
40
|
+
if os.path.isfile(gcpath):
|
|
41
|
+
print("loading global configuration at: %s"%(gcpath,))
|
|
42
|
+
lines = read(gcpath, True) + lines
|
|
43
|
+
for line in lines:
|
|
44
|
+
if line.startswith("#"):
|
|
45
|
+
continue
|
|
46
|
+
try:
|
|
47
|
+
key, val = [term.strip() for term in line.split(" = ", 1)]
|
|
48
|
+
except Exception as e:
|
|
49
|
+
print("failed to parse config on line:", line)
|
|
50
|
+
raise e
|
|
51
|
+
if key == "PLUGIN_MODULES":
|
|
52
|
+
mods = val.split("|")
|
|
53
|
+
config.plugin.update("modules", [p.split("/")[-1] for p in mods])
|
|
54
|
+
config.plugin.update("repos", ["/" in p and p or "%s/%s"%(config.plugin.base, p) for p in mods])
|
|
55
|
+
include_plugins()
|
|
56
|
+
else:
|
|
57
|
+
items.append([key, val])
|
|
58
|
+
|
|
59
|
+
for key, val in items:
|
|
60
|
+
if key in ["ENCODE", "DB_ECHO", "DB_PUBLIC", "DB_REFCOUNT", "DB_CACHE", "DB_POOL_NULL", "GEO_TEST", "REL_VERBOSE", "REL_LOUDLISTEN", "MEMCACHE_REQUEST", "MEMCACHE_DB", "PUBSUB_ECHO", "PUBSUB_META", "PUBSUB_B64", "SSL_VERIFY", "ADMIN_MONITOR_LOG", "WEB_XORIGIN", "LOG_TIMESTAMP", "BUILD_PROD_CLOSURE", "BUILD_PROD_B64", "GMAILER", "MAILHTML", "MAILOUD"]:
|
|
61
|
+
val = val == "True"
|
|
62
|
+
elif key in ["PUBSUB_HISTORY", "MEMPAD", "WEB_SHIELD_CHUNK", "WEB_SHIELD_LIMIT", "WEB_SHIELD_INTERVAL", "MEMCACHE_PROX_TIMEOUT", "DB_POOL_SIZE", "DB_POOL_OVERFLOW", "DB_POOL_RECYCLE", "LOG_OPENFILES", "LOG_TRACEMALLOC", "MAILSCANTICK"]:
|
|
63
|
+
val = int(val)
|
|
64
|
+
elif key in ["REL_SLEEP", "REL_TURBO"]:
|
|
65
|
+
val = float(val)
|
|
66
|
+
if key == "DB":
|
|
67
|
+
config.db.update(config.web.server, _getpass(val, "db"))
|
|
68
|
+
elif key == "DB_TEST":
|
|
69
|
+
config.db.update("test", _getpass(val, "test db"))
|
|
70
|
+
elif key == "ADMINPWGAE":
|
|
71
|
+
# this property only exists for local finagling of a gae database (datastore).
|
|
72
|
+
# this is _not_ for production and frankly should _not_ even be committed to
|
|
73
|
+
# a repository. use with care!
|
|
74
|
+
config.admin.update("pw", val)
|
|
75
|
+
else:
|
|
76
|
+
if key == "SCRAMBLER":
|
|
77
|
+
config.update("customscrambler", True)
|
|
78
|
+
target = key.lower()
|
|
79
|
+
c = config
|
|
80
|
+
if target in ["pubsub_botnames", "log_allow", "geo_user_geonames", "geo_user_google", "admin_contacts", "admin_reportees", "web_rollz", "admin_monitor_geo", "build_dependencies", "build_exclude", "build_include", "build_notjs", "web_whitelist", "admin_whitelist", "web_blacklist", "web_eflags"]: # TODO: use default list detection instead
|
|
81
|
+
val = val.split("|")
|
|
82
|
+
if target == "web_rollz":
|
|
83
|
+
rollz = {}
|
|
84
|
+
for v in val:
|
|
85
|
+
flag, domain = v.split(":")
|
|
86
|
+
rollz[flag] = domain
|
|
87
|
+
val = rollz
|
|
88
|
+
if "_" in target:
|
|
89
|
+
path, target = target.rsplit("_", 1)
|
|
90
|
+
for part in path.split("_"):
|
|
91
|
+
c = c.sub(part)
|
|
92
|
+
c.update(target, val)
|
|
93
|
+
|
|
94
|
+
config.update("cache", pc)
|
|
95
|
+
config.db.update("main", config.db[config.web.server])
|
|
96
|
+
for prop in ["deep", "flush", "timestamp", "allow"]:
|
|
97
|
+
confyg.log.update(prop, config.log[prop])
|
|
98
|
+
for prop in ["cache", "refcount", "main", "test", "blob", "alter", "echo"]:
|
|
99
|
+
dbcfg.update(prop, config.db[prop])
|
|
100
|
+
for prop in ["null", "size", "recycle", "overflow"]:
|
|
101
|
+
dbcfg.pool.update(prop, config.db.pool[prop])
|
|
102
|
+
|
|
103
|
+
# set protocol based on certs
|
|
104
|
+
if config.ssl.certfile:
|
|
105
|
+
config.web.update("protocol", "https")
|
|
106
|
+
config.admin.update("protocol", "https")
|
|
107
|
+
|
|
108
|
+
# extract mailer name from MAILER
|
|
109
|
+
if config.mailer and " <" in config.mailer:
|
|
110
|
+
mn, m = config.mailer[:-1].split(" <")
|
|
111
|
+
config.update("mailer", m)
|
|
112
|
+
config.update("mailername", mn)
|
|
113
|
+
|
|
114
|
+
def mod_and_repo(plug):
|
|
115
|
+
repo = "/" in plug and plug or "%s/%s"%(config.plugin.base, plug)
|
|
116
|
+
if repo == plug:
|
|
117
|
+
plug = plug.split("/")[1]
|
|
118
|
+
return plug, repo
|
|
119
|
+
|
|
120
|
+
def mods_and_repos(plugs):
|
|
121
|
+
mz, rz = [], [] # slicker way to do this?
|
|
122
|
+
for plug in plugs:
|
|
123
|
+
m, r = mod_and_repo(plug)
|
|
124
|
+
mz.append(m)
|
|
125
|
+
rz.append(r)
|
|
126
|
+
return mz, rz
|
|
127
|
+
|
|
128
|
+
def include_plugin(plug):
|
|
129
|
+
mod, repo = mod_and_repo(plug)
|
|
130
|
+
config.plugin.repos.append(repo)
|
|
131
|
+
config.plugin.modules.append(mod)
|
cantools/db/__init__.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
from ..config import config
|
|
2
|
+
|
|
3
|
+
# later, switch these imports on 'db' instead of 'web.server'
|
|
4
|
+
if config.web.server == "gae":
|
|
5
|
+
from .gae.model import *
|
|
6
|
+
elif config.web.server == "dez":
|
|
7
|
+
from rel import tick
|
|
8
|
+
from databae import *
|
|
9
|
+
from cantools.web import set_pre_close, cgi_dump
|
|
10
|
+
def scoper(threadId):
|
|
11
|
+
if threadId == "MainThread":
|
|
12
|
+
threadId = tick()
|
|
13
|
+
return "%s%s"%(threadId, cgi_dump())
|
|
14
|
+
set_scoper(scoper)
|
|
15
|
+
set_pre_close(seshman.close)
|
|
16
|
+
else:
|
|
17
|
+
from fyg.util import error
|
|
18
|
+
error("no data backend specified")
|
cantools/db/admin.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from databae import get_model, get_schema, put_multi
|
|
2
|
+
from fyg.util import log
|
|
3
|
+
|
|
4
|
+
def index(kind, i=0): # be careful with this!
|
|
5
|
+
kinds = kind == "*" and list(get_schema().keys()) or [kind]
|
|
6
|
+
puts = []
|
|
7
|
+
for kind in kinds:
|
|
8
|
+
mod = get_model(kind)
|
|
9
|
+
schema = get_schema(kind)
|
|
10
|
+
q = mod.query()
|
|
11
|
+
for prop in ["created", "modified", "date"]:
|
|
12
|
+
if prop in schema:
|
|
13
|
+
q.order(getattr(mod, prop))
|
|
14
|
+
break
|
|
15
|
+
items = q.fetch() # gae doesn't support all()...
|
|
16
|
+
log("assigning sequential index properties to %s %s records"%(len(items), kind), important=True)
|
|
17
|
+
for n in range(len(items)):
|
|
18
|
+
item = items[n]
|
|
19
|
+
i += 1
|
|
20
|
+
item.index = i
|
|
21
|
+
if n and not n % 100:
|
|
22
|
+
log("processed %s %s entities"%(n, kind), 1)
|
|
23
|
+
log("processed %s %s entities"%(len(items), kind))
|
|
24
|
+
puts += items
|
|
25
|
+
log("saving %s records"%(len(puts),), important=True)
|
|
26
|
+
put_multi(puts)
|
|
27
|
+
return i
|
|
File without changes
|
cantools/db/gae/model.py
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from .properties import *
|
|
3
|
+
from six import with_metaclass
|
|
4
|
+
|
|
5
|
+
class CTMeta(ndb.MetaModel):
|
|
6
|
+
def __new__(cls, name, bases, attrs):
|
|
7
|
+
lname = name.lower()
|
|
8
|
+
if lname != "modelbase":
|
|
9
|
+
if "label" not in attrs:
|
|
10
|
+
for label in ["name", "title"]:
|
|
11
|
+
if label in attrs:
|
|
12
|
+
attrs["label"] = label
|
|
13
|
+
break
|
|
14
|
+
if "label" not in attrs:
|
|
15
|
+
attrs["label"] = "key"
|
|
16
|
+
schema = attrs["_schema"] = merge_schemas(bases, attrs["label"])
|
|
17
|
+
for key, val in list(attrs.items()):
|
|
18
|
+
if getattr(val, "_ct_type", None):
|
|
19
|
+
schema[key] = val._ct_type
|
|
20
|
+
if val._ct_type == "key":
|
|
21
|
+
schema["_kinds"][key] = getattr(val, "_kinds", "*") # always * for now...
|
|
22
|
+
modelsubs[lname] = super(CTMeta, cls).__new__(cls, name, bases, attrs)
|
|
23
|
+
return modelsubs[lname]
|
|
24
|
+
|
|
25
|
+
class ModelBase(with_metaclass(CTMeta, ndb.Model)):
|
|
26
|
+
index = Integer()
|
|
27
|
+
|
|
28
|
+
def __eq__(self, other):
|
|
29
|
+
return self.id() == (other and hasattr(other, "id") and other.id())
|
|
30
|
+
|
|
31
|
+
def __ne__(self, other):
|
|
32
|
+
return not self.__eq__(other)
|
|
33
|
+
|
|
34
|
+
def __hash__(self):
|
|
35
|
+
return 0 # ensures proper set-uniquification
|
|
36
|
+
|
|
37
|
+
def put(self):
|
|
38
|
+
if not self.index:
|
|
39
|
+
self.index = self.__class__.query().count() + 1
|
|
40
|
+
super(ModelBase, self).put()
|
|
41
|
+
|
|
42
|
+
def rm(self):
|
|
43
|
+
self.key.delete()
|
|
44
|
+
|
|
45
|
+
def collection(self, entity_model, property_name, fetch=True, keys_only=False, data=False):
|
|
46
|
+
q = entity_model.query(ndb.GenericProperty(property_name) == self.key)
|
|
47
|
+
if not fetch:
|
|
48
|
+
return q
|
|
49
|
+
if not data:
|
|
50
|
+
return q.fetch(1000, keys_only=keys_only)
|
|
51
|
+
return [d.data() for d in q.fetch(1000)]
|
|
52
|
+
|
|
53
|
+
def modeltype(self):
|
|
54
|
+
return self.__class__.__name__.lower()
|
|
55
|
+
|
|
56
|
+
def id(self):
|
|
57
|
+
return self.key.urlsafe()
|
|
58
|
+
|
|
59
|
+
def export(self):
|
|
60
|
+
cols = {}
|
|
61
|
+
mt = self.modeltype()
|
|
62
|
+
cols["key"] = self.id()
|
|
63
|
+
cols["ctkey"] = ct_key(mt, self.index)
|
|
64
|
+
cols["oldkey"] = str(self.key.to_old_key())
|
|
65
|
+
cols["index"] = self.index
|
|
66
|
+
cols["modelName"] = mt
|
|
67
|
+
cols["_label"] = self.label
|
|
68
|
+
for cname, prop in list(self._schema.items()):
|
|
69
|
+
if not cname.startswith("_"):
|
|
70
|
+
val = getattr(self, cname)
|
|
71
|
+
if prop.startswith("key"):
|
|
72
|
+
if type(val) is list:
|
|
73
|
+
val = [v.urlsafe() for v in val]
|
|
74
|
+
elif hasattr(val, "urlsafe"):
|
|
75
|
+
val = val.urlsafe()
|
|
76
|
+
elif prop == "blob":
|
|
77
|
+
val = bool(val)
|
|
78
|
+
elif val and prop == "datetime":
|
|
79
|
+
val = str(val)[:19]
|
|
80
|
+
cols[cname] = val
|
|
81
|
+
cols["label"] = cols[self.label] or "%s %s"%(mt, self.index)
|
|
82
|
+
return cols
|
|
83
|
+
|
|
84
|
+
def getall(entity=None, query=None, keys_only=False):
|
|
85
|
+
cursor = None
|
|
86
|
+
ents = []
|
|
87
|
+
while True:
|
|
88
|
+
q = query or entity.query()
|
|
89
|
+
batch, cursor, more = q.fetch_page(1000, start_cursor=cursor, keys_only=keys_only)
|
|
90
|
+
ents += batch
|
|
91
|
+
if len(batch) < 1000:
|
|
92
|
+
break
|
|
93
|
+
return ents
|
|
94
|
+
|
|
95
|
+
def get(key):
|
|
96
|
+
return Key(urlsafe=key).get()
|
|
97
|
+
|
|
98
|
+
def get_page(modelName, limit, offset, order='index', filters={}):
|
|
99
|
+
schema = get_schema(modelName)
|
|
100
|
+
mod = get_model(modelName)
|
|
101
|
+
query = mod.query()
|
|
102
|
+
for key, obj in list(filters.items()):
|
|
103
|
+
val = obj["value"]
|
|
104
|
+
comp = obj["comparator"]
|
|
105
|
+
prop = getattr(mod, key)
|
|
106
|
+
if schema[key] == "key" and not isinstance(val, KeyWrapper):
|
|
107
|
+
val = KeyWrapper(val)
|
|
108
|
+
elif schema[key] == "datetime" and not isinstance(val, datetime):
|
|
109
|
+
val = datetime.strptime(val, "%Y-%m-%d %H:%M:%S")
|
|
110
|
+
if comp == "like":
|
|
111
|
+
query.filter(func.lower(prop).like(val.lower()))
|
|
112
|
+
else:
|
|
113
|
+
query.filter(operators[comp](prop, val))
|
|
114
|
+
return [d.export() for d in query.order(order).fetch(limit, offset=offset)]
|
|
115
|
+
|
|
116
|
+
def edit(data):
|
|
117
|
+
ent = "key" in data and get(data["key"]) or get_model(data["modelName"])()
|
|
118
|
+
for propname, val in list(data.items()):
|
|
119
|
+
if propname in ent._schema:
|
|
120
|
+
if val:
|
|
121
|
+
if propname in ent._schema["_kinds"]: # foreignkey
|
|
122
|
+
val = KeyWrapper(val)
|
|
123
|
+
elif ent._schema[propname] == "datetime" and not isinstance(val, datetime):
|
|
124
|
+
val = datetime.strptime(val, "%Y-%m-%d %H:%M:%S")
|
|
125
|
+
setattr(ent, propname, val)
|
|
126
|
+
ent.put()
|
|
127
|
+
return ent
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from google.appengine.ext import ndb
|
|
2
|
+
from databae.util import *
|
|
3
|
+
|
|
4
|
+
# column properties
|
|
5
|
+
Integer = ndb.IntegerProperty
|
|
6
|
+
Float = ndb.FloatProperty
|
|
7
|
+
Boolean = ndb.BooleanProperty
|
|
8
|
+
String = ndb.StringProperty
|
|
9
|
+
Text = ndb.TextProperty
|
|
10
|
+
Binary = ndb.BlobProperty
|
|
11
|
+
Date = ndb.DateProperty
|
|
12
|
+
Time = ndb.TimeProperty
|
|
13
|
+
DateTime = ndb.DateTimeProperty
|
|
14
|
+
ForeignKey = ndb.KeyProperty
|
|
15
|
+
|
|
16
|
+
# ctypes
|
|
17
|
+
Integer._ct_type = "integer"
|
|
18
|
+
Float._ct_type = "float"
|
|
19
|
+
Boolean._ct_type = "boolean"
|
|
20
|
+
String._ct_type = "string"
|
|
21
|
+
Text._ct_type = "text"
|
|
22
|
+
Binary._ct_type = "blob"
|
|
23
|
+
Date._ct_type = "date"
|
|
24
|
+
Time._ct_type = "time"
|
|
25
|
+
DateTime._ct_type = "datetime"
|
|
26
|
+
ForeignKey._ct_type = "key"
|
|
27
|
+
|
|
28
|
+
# entity keys
|
|
29
|
+
Key = ndb.Key
|
|
30
|
+
KeyWrapper = ndb.Key
|
|
31
|
+
|
|
32
|
+
# funcs
|
|
33
|
+
get_multi = ndb.get_multi
|
|
34
|
+
put_multi = ndb.put_multi
|
|
35
|
+
delete_multi = ndb.delete_multi
|
cantools/db/wp.py
ADDED
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import pymysql, time
|
|
2
|
+
from fyg.util import log, read, error
|
|
3
|
+
|
|
4
|
+
class DBWrapper(object):
|
|
5
|
+
def __init__(self):
|
|
6
|
+
self._h, self._u, self._p, self._d = read(".c").strip().split("|")
|
|
7
|
+
|
|
8
|
+
def db(self):
|
|
9
|
+
try:
|
|
10
|
+
self._db.ping()
|
|
11
|
+
except:
|
|
12
|
+
self._db = pymysql.connect(host=self._h,
|
|
13
|
+
user=self._u, passwd=self._p, db=self._d)
|
|
14
|
+
return self._db
|
|
15
|
+
|
|
16
|
+
def query(self, q, fetch=True, silent=False):
|
|
17
|
+
silent or log("executing query: %s"%(q,))
|
|
18
|
+
db = self.db()
|
|
19
|
+
cur = db.cursor()
|
|
20
|
+
cur.execute(q)
|
|
21
|
+
if fetch:
|
|
22
|
+
rowz = cur.fetchall()
|
|
23
|
+
cur.close()
|
|
24
|
+
if fetch:
|
|
25
|
+
return rowz
|
|
26
|
+
|
|
27
|
+
def commit(self):
|
|
28
|
+
self.db().commit()
|
|
29
|
+
|
|
30
|
+
_db = None
|
|
31
|
+
|
|
32
|
+
def setdb():
|
|
33
|
+
global _db
|
|
34
|
+
if not _db:
|
|
35
|
+
try:
|
|
36
|
+
_db = DBWrapper()
|
|
37
|
+
except:
|
|
38
|
+
log("NO DATABASE CONNECTION!!!!!", important=True)
|
|
39
|
+
return _db
|
|
40
|
+
|
|
41
|
+
setdb()
|
|
42
|
+
|
|
43
|
+
def trydb(attempts=5, wait=0.4):
|
|
44
|
+
for attempt in range(attempts):
|
|
45
|
+
db = setdb()
|
|
46
|
+
if db:
|
|
47
|
+
return db
|
|
48
|
+
log("trydb failed retry #" + attempt, important=True)
|
|
49
|
+
time.sleep(wait)
|
|
50
|
+
|
|
51
|
+
def getdb(subdb=False):
|
|
52
|
+
db = trydb()
|
|
53
|
+
return subdb and db.db() or db
|
|
54
|
+
|
|
55
|
+
def dbcommit():
|
|
56
|
+
getdb().commit()
|
|
57
|
+
|
|
58
|
+
def query(q, fetch=True, silent=False):
|
|
59
|
+
return getdb().query(q, fetch, silent)
|
|
60
|
+
|
|
61
|
+
def memprop(email, prop):
|
|
62
|
+
res = query("select %s from wp_users where user_email = '%s';"%(prop, email))
|
|
63
|
+
if not res:
|
|
64
|
+
error("no matching member: %s"%(email,))
|
|
65
|
+
return res[0][0]
|
|
66
|
+
|
|
67
|
+
def memid(email):
|
|
68
|
+
return memprop(email, "ID")
|
|
69
|
+
|
|
70
|
+
def getmeta(pid, key):
|
|
71
|
+
return query("select meta_value from wp_usermeta where user_id = '%s' and meta_key = '%s';"%(pid, key))
|
|
72
|
+
|
|
73
|
+
def setmeta(pid, key, val):
|
|
74
|
+
mres = query("select * from wp_usermeta where user_id = '%s' and meta_key = '%s';"%(pid, key))
|
|
75
|
+
if mres:
|
|
76
|
+
query("update wp_usermeta set meta_value = '%s' where umeta_id = '%s';"%(val, mres[0][0]), False)
|
|
77
|
+
else:
|
|
78
|
+
query("insert into wp_usermeta(user_id,meta_key,meta_value) values(%s,'%s','%s');"%(pid, key, val), False)
|
|
79
|
+
|
|
80
|
+
def transmeta(pid, key, transformer):
|
|
81
|
+
mres = query("select * from wp_usermeta where user_id = '%s' and meta_key = '%s';"%(pid, key))
|
|
82
|
+
mid, uid, k, v = mres[0]
|
|
83
|
+
val = transformer(v)
|
|
84
|
+
query("update wp_usermeta set meta_value = '%s' where umeta_id = '%s';"%(val, mid), False)
|
|
85
|
+
|
|
86
|
+
def bymeta(key, val, props=["user_email"], like=True):
|
|
87
|
+
cond = (like and "like '%%%s%%'" or "= '%s'")%(val,)
|
|
88
|
+
q = "select %s from wp_users join wp_usermeta on wp_users.ID = wp_usermeta.user_id where wp_usermeta.meta_key = '%s' and wp_usermeta.meta_value %s;"%(", ".join(props), key, cond)
|
|
89
|
+
return query(q)
|
|
90
|
+
|
|
91
|
+
def capability2emails(capability):
|
|
92
|
+
return [row[0] for row in bymeta("wp_capabilities", capability)]
|
|
93
|
+
|
|
94
|
+
def id2email(memid):
|
|
95
|
+
e = query("select user_email from wp_users where ID = '%s';"%(memid,))
|
|
96
|
+
return e and e[0][0] or "[account %s not found]"%(memid,)
|
|
97
|
+
|
|
98
|
+
def ip2mems(ip):
|
|
99
|
+
return [id2email(m[0]) for m in query("select user_id from wp_usermeta where meta_value = '%s';"%(ip,))]
|
cantools/geo.py
ADDED
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
import os
|
|
2
|
+
try:
|
|
3
|
+
from urllib import parse # py3
|
|
4
|
+
except:
|
|
5
|
+
import urllib as parse # py2.7
|
|
6
|
+
from cantools.web import fetch
|
|
7
|
+
from cantools.util import log, read, write, writejson
|
|
8
|
+
from cantools import config
|
|
9
|
+
|
|
10
|
+
zcpath = os.path.join("logs", "json", "geo")
|
|
11
|
+
|
|
12
|
+
class Geo(object):
|
|
13
|
+
apis = {
|
|
14
|
+
"latlng": {
|
|
15
|
+
"user": "google",
|
|
16
|
+
"host": "maps.googleapis.com",
|
|
17
|
+
"path": "/maps/api/geocode/json?sensor=false&address={0}",
|
|
18
|
+
"sig": "key",
|
|
19
|
+
"property": "results",
|
|
20
|
+
"index": 0
|
|
21
|
+
},
|
|
22
|
+
"where": {
|
|
23
|
+
"user": "google",
|
|
24
|
+
"host": "maps.googleapis.com",
|
|
25
|
+
"path": "/maps/api/geocode/json?sensor=false&address={0}",
|
|
26
|
+
"sig": "key",
|
|
27
|
+
"property": "results",
|
|
28
|
+
"index": 0
|
|
29
|
+
},
|
|
30
|
+
"zip": {
|
|
31
|
+
"geonames": {
|
|
32
|
+
"user": "geonames",
|
|
33
|
+
"host": "ws.geonames.org",
|
|
34
|
+
"path": "/findNearbyPostalCodesJSON?radius=1&lat={0}&lng={1}",
|
|
35
|
+
"sig": "username",
|
|
36
|
+
"property": "postalCodes",
|
|
37
|
+
"index": 0
|
|
38
|
+
},
|
|
39
|
+
"google": {
|
|
40
|
+
"user": "google",
|
|
41
|
+
"host": "maps.googleapis.com",
|
|
42
|
+
"path": "/maps/api/geocode/json?sensor=false&latlng={0},{1}",
|
|
43
|
+
"sig": "key",
|
|
44
|
+
"property": "results",
|
|
45
|
+
"index": 0
|
|
46
|
+
}
|
|
47
|
+
}[config.geo.zip]
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
def __init__(self):
|
|
51
|
+
self.wherecache = {} # TODO : persist!
|
|
52
|
+
self.cache = read("%s.json"%(zcpath,), isjson=True, default={})
|
|
53
|
+
for addr in self.cache:
|
|
54
|
+
self.cache[addr]["count"] = 0
|
|
55
|
+
|
|
56
|
+
def _no_cache(self, addr):
|
|
57
|
+
return not (addr in self.cache and self.cache[addr]["lat"] and self.cache[addr]["lng"])
|
|
58
|
+
|
|
59
|
+
def _fetch(self, api, *args): # api is latlng/zip
|
|
60
|
+
path = self.apis[api]["path"].format(*[parse.quote(str(a).replace(" ", "+")) for a in args])
|
|
61
|
+
host = self.apis[api]["host"]
|
|
62
|
+
user = self.apis[api]["user"]
|
|
63
|
+
prop = self.apis[api]["property"]
|
|
64
|
+
keys = config.geo.user[user]
|
|
65
|
+
onum = num = self.apis[api]["index"]
|
|
66
|
+
sig = self.apis[api]["sig"]
|
|
67
|
+
kwargs = { "asjson": True }
|
|
68
|
+
result = []
|
|
69
|
+
while True:
|
|
70
|
+
fullpath = path
|
|
71
|
+
if keys[num]:
|
|
72
|
+
fullpath += "&%s=%s"%(sig, keys[num])
|
|
73
|
+
if user == "google":
|
|
74
|
+
kwargs["protocol"] = "https"
|
|
75
|
+
raw = fetch(host, fullpath, **kwargs)
|
|
76
|
+
result = raw.get(prop, [])
|
|
77
|
+
if len(result):
|
|
78
|
+
break
|
|
79
|
+
log("0-length %s result (got: '%s') - changing user"%(api, raw), important=True)
|
|
80
|
+
num = (num + 1) % len(keys)
|
|
81
|
+
if num == onum:
|
|
82
|
+
log("exhausted key list -- returning [] :'(")
|
|
83
|
+
break
|
|
84
|
+
self.apis[api]["index"] = num
|
|
85
|
+
return result
|
|
86
|
+
|
|
87
|
+
def where(self, place):
|
|
88
|
+
place = place.title()
|
|
89
|
+
if place not in self.wherecache:
|
|
90
|
+
if config.geo.where.nom:
|
|
91
|
+
res = fetch("https://nominatim.openstreetmap.org/search", asjson=True, qsp={
|
|
92
|
+
"limit": 1,
|
|
93
|
+
"format": "json",
|
|
94
|
+
"q": place.replace(" ", "%%20")
|
|
95
|
+
}, fakeua=config.geo.where.ua)
|
|
96
|
+
if res:
|
|
97
|
+
places = res[0]["display_name"].split(", ")
|
|
98
|
+
curplace = places.pop(0)
|
|
99
|
+
while places:
|
|
100
|
+
self.wherecache[curplace] = places[0]
|
|
101
|
+
curplace = places.pop(0)
|
|
102
|
+
self.wherecache[curplace] = "earth"
|
|
103
|
+
else:
|
|
104
|
+
self.wherecache[place] = "earth"
|
|
105
|
+
else:
|
|
106
|
+
results = self._fetch("where", place)
|
|
107
|
+
if not results:
|
|
108
|
+
return "space"
|
|
109
|
+
places = results[0]["address_components"]
|
|
110
|
+
self.wherecache[place] = len(places) > 1 and places[1]["long_name"] or "earth"
|
|
111
|
+
return self.wherecache[place]
|
|
112
|
+
|
|
113
|
+
def address2latlng(self, address):
|
|
114
|
+
if self._no_cache(address):
|
|
115
|
+
log("finding lat/lng for %s"%(address,), 3)
|
|
116
|
+
results = self._fetch("latlng", address)
|
|
117
|
+
if not len(results):
|
|
118
|
+
log("no results!!!", 4)
|
|
119
|
+
self.cache[address] = {
|
|
120
|
+
"lat": None,
|
|
121
|
+
"lng": None
|
|
122
|
+
}
|
|
123
|
+
else:
|
|
124
|
+
loc = results[0]['geometry']['location']
|
|
125
|
+
self.cache[address] = {
|
|
126
|
+
"lat": loc["lat"],
|
|
127
|
+
"lng": loc["lng"]
|
|
128
|
+
}
|
|
129
|
+
self.savecache()
|
|
130
|
+
return [self.cache[address]['lat'], self.cache[address]['lng']]
|
|
131
|
+
|
|
132
|
+
def latlng2zip(self, lat, lng):
|
|
133
|
+
result = self._fetch("zip", lat, lng)
|
|
134
|
+
log("finding zip for lat %s and lng %s"%(lat, lng), 3)
|
|
135
|
+
if not len(result):
|
|
136
|
+
log("can't find zipcode!!!", important=True)
|
|
137
|
+
return None
|
|
138
|
+
if config.geo.zip == "google":
|
|
139
|
+
return [c["short_name"] for c in result[0]["address_components"] if c["types"][0] == "postal_code"][0]
|
|
140
|
+
return result[0]["postalCode"]
|
|
141
|
+
|
|
142
|
+
def addr2zip(self, addr, allowNone=False):
|
|
143
|
+
log("finding zip for '%s'"%(addr,), 3)
|
|
144
|
+
if config.geo.test:
|
|
145
|
+
log("test mode! returning nonsense :)")
|
|
146
|
+
return '12345'
|
|
147
|
+
if self._no_cache(addr):
|
|
148
|
+
self.address2latlng(addr)
|
|
149
|
+
d = self.cache[addr]
|
|
150
|
+
if not d.get("zip", allowNone):
|
|
151
|
+
d["count"] = 1
|
|
152
|
+
d["zip"] = d["lat"] and self.latlng2zip(d["lat"], d["lng"])
|
|
153
|
+
self.savecache()
|
|
154
|
+
else:
|
|
155
|
+
d["count"] += 1
|
|
156
|
+
log("address referenced %s times"%(d["count"],), 4)
|
|
157
|
+
log("found zip: %s"%(d["zip"],), 4)
|
|
158
|
+
return d["zip"]
|
|
159
|
+
|
|
160
|
+
def savecache(self, pretty=False):
|
|
161
|
+
log("saving zipcode / latlng (geo) cache (pretty: %s)"%(pretty,), important=True)
|
|
162
|
+
if pretty:
|
|
163
|
+
writejson(self.cache, zcpath)
|
|
164
|
+
else:
|
|
165
|
+
write(self.cache, "%s.json"%(zcpath,), True)
|
|
166
|
+
|
|
167
|
+
def distance(self, lat1, lng1, lat2, lng2):
|
|
168
|
+
log("approximating distance between [%s %s] and [%s %s]"%(lat1, lng1, lat2, lng2))
|
|
169
|
+
latd = lat1 - lat2
|
|
170
|
+
lngd = lng1 - lng2
|
|
171
|
+
latsq = latd ** 2
|
|
172
|
+
lngsq = lngd ** 2
|
|
173
|
+
tsq = latsq + lngsq
|
|
174
|
+
hypot = tsq**(1.0/2)
|
|
175
|
+
miles = hypot * 69.11;
|
|
176
|
+
log("lat diff: %s square: %s"%(latd, latsq))
|
|
177
|
+
log("lng diff: %s square: %s"%(lngd, lngsq))
|
|
178
|
+
log("sum of squares: %s hypotenuse: %s"%(tsq, hypot))
|
|
179
|
+
log("about %s miles"%(miles,))
|
|
180
|
+
return miles
|
|
181
|
+
|
|
182
|
+
geo = Geo()
|
|
183
|
+
where = geo.where
|
|
184
|
+
address2latlng = geo.address2latlng
|
|
185
|
+
latlng2zip = geo.latlng2zip
|
|
186
|
+
addr2zip = geo.addr2zip
|
|
187
|
+
savecache = geo.savecache
|
|
188
|
+
distance = geo.distance
|
cantools/hooks.py
ADDED
|
File without changes
|