remote-run-everything 2.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- remote_run_everything/__init__.py +16 -0
- remote_run_everything/db/__init__.py +0 -0
- remote_run_everything/db/backup.py +38 -0
- remote_run_everything/db/crud_sqlalchemy.py +97 -0
- remote_run_everything/db/crude_duck.py +122 -0
- remote_run_everything/db/kv_store.py +109 -0
- remote_run_everything/deploy/__init__.py +0 -0
- remote_run_everything/deploy/by_http.py +82 -0
- remote_run_everything/deploy/by_http_server.py +56 -0
- remote_run_everything/deploy/by_http_tool.py +64 -0
- remote_run_everything/deploy/record_mod.py +26 -0
- remote_run_everything/nosql/__init__.py +0 -0
- remote_run_everything/nosql/no_sql.py +73 -0
- remote_run_everything/nosql/no_sql_mysql.py +67 -0
- remote_run_everything/nosql/no_sql_pg.py +69 -0
- remote_run_everything/nosql/no_sql_tool.py +81 -0
- remote_run_everything/tools/__init__.py +0 -0
- remote_run_everything/tools/common.py +56 -0
- remote_run_everything/tools/common1.py +100 -0
- remote_run_everything/tools/decorators.py +81 -0
- remote_run_everything/tools/sqlacodegen_go_struct.py +109 -0
- remote_run_everything/vsconf/conf_txt.py +106 -0
- remote_run_everything/vsconf/core.py +29 -0
- remote_run_everything-2.0.2.dist-info/METADATA +109 -0
- remote_run_everything-2.0.2.dist-info/RECORD +28 -0
- remote_run_everything-2.0.2.dist-info/WHEEL +5 -0
- remote_run_everything-2.0.2.dist-info/licenses/LICENSE +19 -0
- remote_run_everything-2.0.2.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import sqlite3, os, json
|
|
2
|
+
|
|
3
|
+
from remote_run_everything.nosql.no_sql_tool import NosqlTool
|
|
4
|
+
|
|
5
|
+
from remote_run_everything.db.crude_duck import CrudeDuck
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class Nosql:
|
|
9
|
+
def __init__(self, dbpath=None):
|
|
10
|
+
self.t = NosqlTool()
|
|
11
|
+
self.cd = CrudeDuck()
|
|
12
|
+
self.db_path = self.t.default_db_path(dbpath)
|
|
13
|
+
self.conn = self.cd.install_sql_ext(self.db_path)
|
|
14
|
+
self.conn.create_function("my_q", self.t.query)
|
|
15
|
+
self.table = None
|
|
16
|
+
|
|
17
|
+
def drop_db(self):
|
|
18
|
+
os.remove(self.db_path)
|
|
19
|
+
|
|
20
|
+
def drop_table(self):
|
|
21
|
+
assert self.table is not None
|
|
22
|
+
self.cd.drop_table(self.conn, self.table)
|
|
23
|
+
self.table=None
|
|
24
|
+
|
|
25
|
+
def __getitem__(self, table):
|
|
26
|
+
self.table = table
|
|
27
|
+
conn = sqlite3.connect(self.db_path, isolation_level=None)
|
|
28
|
+
conn.execute('pragma journal_mode=wal')
|
|
29
|
+
sql = f'''CREATE TABLE IF NOT EXISTS {table} (id INTEGER PRIMARY KEY AUTOINCREMENT, data text)'''
|
|
30
|
+
conn.execute(sql)
|
|
31
|
+
return self
|
|
32
|
+
|
|
33
|
+
def insert_one(self, dic):
|
|
34
|
+
assert self.table is not None
|
|
35
|
+
value = json.dumps(dic)
|
|
36
|
+
sql=f"insert INTO {self.table} (data) VALUES ('{value}')"
|
|
37
|
+
self.conn.execute(sql)
|
|
38
|
+
self.conn.commit()
|
|
39
|
+
|
|
40
|
+
def find(self, query={}):
|
|
41
|
+
return self.t.find(self.conn, self.table, query)
|
|
42
|
+
|
|
43
|
+
def delete(self, query={}):
|
|
44
|
+
return self.t.delete(self.conn, self.table, query)
|
|
45
|
+
|
|
46
|
+
# 保证queery唯一,最后会隐含删除
|
|
47
|
+
def upsert_one(self, query, dic):
|
|
48
|
+
assert self.table is not None
|
|
49
|
+
l = self.find(query)
|
|
50
|
+
if len(l) == 0:
|
|
51
|
+
return self.insert_one(dic)
|
|
52
|
+
retain = l[0]
|
|
53
|
+
self.t.revise_one(self.conn, self.table, retain, dic)
|
|
54
|
+
rest_ids = [i['id'] for i in l if i['id'] != retain["id"]]
|
|
55
|
+
if len(rest_ids) > 0:
|
|
56
|
+
self.cd.delete_by_ids(self.conn, self.table, rest_ids)
|
|
57
|
+
self.conn.commit()
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
if __name__ == '__main__':
|
|
61
|
+
db = Nosql()
|
|
62
|
+
db.drop_db()
|
|
63
|
+
t = "test"
|
|
64
|
+
col = db['test']
|
|
65
|
+
dic = {"a": 2, "b": 456, 'c': "adf", "d": "2020-02-02"}
|
|
66
|
+
col.insert_one(dic)
|
|
67
|
+
dic = {"a": 56, "b": 456, 'c': "adf", "d": "2020-07-02"}
|
|
68
|
+
col.insert_one(dic)
|
|
69
|
+
q = {"a": 2}
|
|
70
|
+
print(111, col.find(q))
|
|
71
|
+
db.upsert_one(q, {"b": 999})
|
|
72
|
+
q = {"a": 2}
|
|
73
|
+
print(222, col.find(q))
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import json
|
|
2
|
+
|
|
3
|
+
from remote_run_everything.nosql.no_sql_tool import NosqlTool
|
|
4
|
+
from remote_run_everything.tools.common import Common
|
|
5
|
+
|
|
6
|
+
from remote_run_everything.db.crude_duck import CrudeDuck
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class NosqlMysql:
|
|
10
|
+
def __init__(self, user, pwd, host, port, dbname):
|
|
11
|
+
self.t = NosqlTool()
|
|
12
|
+
self.cd = CrudeDuck()
|
|
13
|
+
self.conn = self.cd.install_mysql_ext(user, pwd, host, port, dbname)
|
|
14
|
+
self.conn.create_function("my_q", self.t.query)
|
|
15
|
+
self.table = None
|
|
16
|
+
|
|
17
|
+
def drop_table(self):
|
|
18
|
+
assert self.table is not None
|
|
19
|
+
self.cd.drop_table(self.conn, self.table)
|
|
20
|
+
self.table = None
|
|
21
|
+
|
|
22
|
+
def __getitem__(self, table):
|
|
23
|
+
self.table = table
|
|
24
|
+
sql = f'''CREATE TABLE IF NOT EXISTS {table} (
|
|
25
|
+
id bigint NOT NULL primary key ,
|
|
26
|
+
data text
|
|
27
|
+
)'''
|
|
28
|
+
self.conn.execute(sql)
|
|
29
|
+
self.conn.commit()
|
|
30
|
+
return self
|
|
31
|
+
|
|
32
|
+
def insert_one(self, dic):
|
|
33
|
+
assert self.table is not None
|
|
34
|
+
mid = self.cd.max_id(self.conn, self.table)
|
|
35
|
+
value = json.dumps(dic)
|
|
36
|
+
sql = f"insert INTO {self.table} (id,data) VALUES ({mid},'{value}')"
|
|
37
|
+
self.conn.execute(sql)
|
|
38
|
+
self.conn.commit()
|
|
39
|
+
|
|
40
|
+
def find(self, query={}):
|
|
41
|
+
return self.t.find(self.conn, self.table, query)
|
|
42
|
+
|
|
43
|
+
def delete(self, query={}):
|
|
44
|
+
return self.t.delete(self.conn, self.table, query)
|
|
45
|
+
|
|
46
|
+
# 保证queery唯一,最后会隐含删除
|
|
47
|
+
def upsert_one(self, query, dic):
|
|
48
|
+
assert self.table is not None
|
|
49
|
+
l = self.find(query)
|
|
50
|
+
if len(l) == 0:
|
|
51
|
+
return self.insert_one(dic)
|
|
52
|
+
retain = l[0]
|
|
53
|
+
self.t.revise_one(self.conn, self.table, retain, dic)
|
|
54
|
+
rest_ids = [i['id'] for i in l if i['id'] != retain["id"]]
|
|
55
|
+
if len(rest_ids) > 0:
|
|
56
|
+
self.cd.delete_by_ids(self.conn, self.table, rest_ids)
|
|
57
|
+
self.conn.commit()
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
if __name__ == '__main__':
|
|
61
|
+
conf = Common().read_conf()['mysql']['test']
|
|
62
|
+
dbname = Common().read_conf()['mysql']['price']['test']
|
|
63
|
+
t = 'nosql'
|
|
64
|
+
no = NosqlMysql(**{**conf, "dbname": dbname})
|
|
65
|
+
col = no[t]
|
|
66
|
+
col.insert_one({"a": 1})
|
|
67
|
+
print(col.find({}))
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import json
|
|
2
|
+
|
|
3
|
+
from remote_run_everything.nosql.no_sql_tool import NosqlTool
|
|
4
|
+
from remote_run_everything.tools.common import Common
|
|
5
|
+
|
|
6
|
+
from remote_run_everything.db.crude_duck import CrudeDuck
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class NosqlPg:
|
|
10
|
+
def __init__(self, user, pwd, host, port, dbname):
|
|
11
|
+
self.t = NosqlTool()
|
|
12
|
+
self.cd = CrudeDuck()
|
|
13
|
+
self.conn = self.cd.install_pg_ext(user, pwd, host, port, dbname)
|
|
14
|
+
self.conn.create_function("my_q", self.t.query)
|
|
15
|
+
self.table = None
|
|
16
|
+
|
|
17
|
+
def drop_table(self):
|
|
18
|
+
assert self.table is not None
|
|
19
|
+
self.cd.drop_table(self.conn, self.table)
|
|
20
|
+
self.table=None
|
|
21
|
+
|
|
22
|
+
def __getitem__(self, table):
|
|
23
|
+
self.table = table
|
|
24
|
+
sql = f'''CREATE TABLE IF NOT EXISTS {table} (
|
|
25
|
+
id bigint NOT NULL primary key ,
|
|
26
|
+
data text
|
|
27
|
+
)'''
|
|
28
|
+
self.conn.execute(sql)
|
|
29
|
+
self.conn.commit()
|
|
30
|
+
return self
|
|
31
|
+
|
|
32
|
+
def insert_one(self, dic):
|
|
33
|
+
assert self.table is not None
|
|
34
|
+
mid = self.cd.max_id(self.conn, self.table)
|
|
35
|
+
value = json.dumps(dic)
|
|
36
|
+
sql = f"insert INTO {self.table} (id,data) VALUES ({mid},'{value}')"
|
|
37
|
+
self.conn.execute(sql)
|
|
38
|
+
self.conn.commit()
|
|
39
|
+
|
|
40
|
+
def find(self, query={}):
|
|
41
|
+
return self.t.find(self.conn, self.table, query)
|
|
42
|
+
|
|
43
|
+
def delete(self, query={}):
|
|
44
|
+
return self.t.delete(self.conn, self.table, query)
|
|
45
|
+
|
|
46
|
+
# 保证queery唯一,最后会隐含删除
|
|
47
|
+
def upsert_one(self, query, dic):
|
|
48
|
+
assert self.table is not None
|
|
49
|
+
l = self.find(query)
|
|
50
|
+
if len(l) == 0:
|
|
51
|
+
return self.insert_one(dic)
|
|
52
|
+
retain = l[0]
|
|
53
|
+
self.t.revise_one(self.conn, self.table, retain, dic)
|
|
54
|
+
rest_ids = [i['id'] for i in l if i['id'] != retain["id"]]
|
|
55
|
+
if len(rest_ids) > 0:
|
|
56
|
+
self.cd.delete_by_ids(self.conn, self.table, rest_ids)
|
|
57
|
+
self.conn.commit()
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
if __name__ == '__main__':
|
|
61
|
+
conf = Common().read_conf()['pg']
|
|
62
|
+
t = 'nosql'
|
|
63
|
+
no = NosqlPg(**{**conf, "dbname": "projects"})
|
|
64
|
+
col = no[t]
|
|
65
|
+
# no.drop_table()
|
|
66
|
+
col.insert_one({"a": 1})
|
|
67
|
+
print(col.find({}))
|
|
68
|
+
# col.upsert_one({"a": 1}, {"b": 44})
|
|
69
|
+
# print(col.find({}))
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import sqlite3, os, json, arrow
|
|
2
|
+
from remote_run_everything.db.crude_duck import CrudeDuck
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class NosqlTool:
|
|
6
|
+
def __init__(self):
|
|
7
|
+
self.cd = CrudeDuck()
|
|
8
|
+
|
|
9
|
+
def default_db_path(self, db_path):
|
|
10
|
+
if db_path is None:
|
|
11
|
+
db_path = "D://wq/temp/emongo.db" if os.name == 'nt' else "/data/temp/emongo.db"
|
|
12
|
+
os.makedirs(os.path.dirname(db_path), exist_ok=True)
|
|
13
|
+
return db_path
|
|
14
|
+
|
|
15
|
+
def op_match(self, opdic, dvalue):
|
|
16
|
+
for op, qv in opdic.items():
|
|
17
|
+
if op == "$gt":
|
|
18
|
+
return dvalue > qv
|
|
19
|
+
elif op == "$gte":
|
|
20
|
+
return dvalue >= qv
|
|
21
|
+
elif op == "$lt":
|
|
22
|
+
return dvalue < qv
|
|
23
|
+
elif op == "$lte":
|
|
24
|
+
return dvalue < qv
|
|
25
|
+
elif op == "$ne":
|
|
26
|
+
return dvalue != qv
|
|
27
|
+
elif op == "$in":
|
|
28
|
+
return dvalue in qv
|
|
29
|
+
elif op == "$between":
|
|
30
|
+
return (dvalue >= qv[0]) and (dvalue <= qv[1])
|
|
31
|
+
return False
|
|
32
|
+
|
|
33
|
+
def query(self, data: str, query: str) -> int:
|
|
34
|
+
d = json.loads(data)
|
|
35
|
+
q = json.loads(query)
|
|
36
|
+
if len(q) == 0: return 1
|
|
37
|
+
for qk, qv in q.items():
|
|
38
|
+
# 查询的key在数据不存在
|
|
39
|
+
if qk not in d.keys(): return 0
|
|
40
|
+
if isinstance(qv, dict):
|
|
41
|
+
if not self.op_match(qv, d[qk]):
|
|
42
|
+
return 0
|
|
43
|
+
elif d[qk] != qv:
|
|
44
|
+
return 0
|
|
45
|
+
return 1
|
|
46
|
+
|
|
47
|
+
def add_id(self, l):
|
|
48
|
+
res = []
|
|
49
|
+
for i in l:
|
|
50
|
+
dic = json.loads(i['data'])
|
|
51
|
+
dic['id'] = i['id']
|
|
52
|
+
res.append(dic)
|
|
53
|
+
return res
|
|
54
|
+
|
|
55
|
+
def find(self, conn, table, query={}):
|
|
56
|
+
assert table is not None
|
|
57
|
+
qs = json.dumps(query)
|
|
58
|
+
sql = f"select * from {table} where my_q(data,'{qs}') = 1 "
|
|
59
|
+
df = conn.sql(sql).df()
|
|
60
|
+
if len(df) == 0: return []
|
|
61
|
+
return self.add_id(df.to_dict("records"))
|
|
62
|
+
|
|
63
|
+
def delete(self, conn, table, query={}):
|
|
64
|
+
assert table is not None
|
|
65
|
+
l = self.find(conn, table, query)
|
|
66
|
+
if len(l) == 0: return
|
|
67
|
+
ids = [i['id'] for i in l]
|
|
68
|
+
self.cd.delete_by_ids(conn, table, ids)
|
|
69
|
+
conn.commit()
|
|
70
|
+
|
|
71
|
+
# 保证queery唯一,最后会隐含删除
|
|
72
|
+
|
|
73
|
+
def revise_one(self, conn, table, ex, dic):
|
|
74
|
+
assert table is not None
|
|
75
|
+
id = ex["id"]
|
|
76
|
+
v = {**ex, **dic}
|
|
77
|
+
s = json.dumps(v)
|
|
78
|
+
sql = f'''update {table} set data='{s}' where id={id}
|
|
79
|
+
'''
|
|
80
|
+
conn.execute(sql)
|
|
81
|
+
conn.commit()
|
|
File without changes
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import jinja2, requests, os
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import socket, os, tomllib
|
|
4
|
+
import base64
|
|
5
|
+
import os, signal
|
|
6
|
+
import subprocess, sys
|
|
7
|
+
from remote_run_everything.tools.common1 import Common1
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class Common(Common1):
|
|
11
|
+
@property
|
|
12
|
+
def local_ip(self):
|
|
13
|
+
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
14
|
+
s.connect(("8.8.8.8", 80))
|
|
15
|
+
ip = s.getsockname()[0]
|
|
16
|
+
s.close()
|
|
17
|
+
return ip
|
|
18
|
+
|
|
19
|
+
def read_conf(self, path=None):
|
|
20
|
+
if path is None:
|
|
21
|
+
d = "D://mypy/conf" if os.name == 'nt' else "/data/mypy/conf"
|
|
22
|
+
n = "win" if os.name == "nt" else self.local_ip
|
|
23
|
+
path = f"{d}/{n}.toml"
|
|
24
|
+
with open(path, "rb") as f:
|
|
25
|
+
data = tomllib.load(f)
|
|
26
|
+
return data
|
|
27
|
+
|
|
28
|
+
def kill_by_pidfile(self, pidfile):
|
|
29
|
+
if os.path.exists(pidfile):
|
|
30
|
+
with open(pidfile, "rb") as f:
|
|
31
|
+
pid = f.read().decode()
|
|
32
|
+
print("exist pid===", pid)
|
|
33
|
+
try:
|
|
34
|
+
os.kill(int(pid), signal.SIGTERM)
|
|
35
|
+
except Exception as e:
|
|
36
|
+
print("kill err==", e)
|
|
37
|
+
|
|
38
|
+
def start_with_pidfile(self, workdir, pidfile, app):
|
|
39
|
+
os.chdir(workdir)
|
|
40
|
+
process = subprocess.Popen(app, creationflags=subprocess.CREATE_NEW_CONSOLE)
|
|
41
|
+
print("new pid====", process.pid)
|
|
42
|
+
with open(pidfile, "wb") as f:
|
|
43
|
+
s = str(process.pid).encode()
|
|
44
|
+
f.write(s)
|
|
45
|
+
sys.exit()
|
|
46
|
+
|
|
47
|
+
def supervise(self, pidfile, app, workdir=None):
|
|
48
|
+
if workdir is None:
|
|
49
|
+
workdir = os.path.dirname(pidfile)
|
|
50
|
+
self.kill_by_pidfile(pidfile)
|
|
51
|
+
self.start_with_pidfile(workdir, pidfile, app)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
if __name__ == '__main__':
|
|
55
|
+
g = Common()
|
|
56
|
+
a = g.prefix_zero(5, 111)
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
import jinja2, requests, os
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import base64
|
|
4
|
+
import os, signal, glob, arrow
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Common1:
|
|
8
|
+
|
|
9
|
+
def table_search(self, data, search):
|
|
10
|
+
res = []
|
|
11
|
+
for i in data:
|
|
12
|
+
if search == "":
|
|
13
|
+
res.append(i)
|
|
14
|
+
elif search != "" and search in str(i):
|
|
15
|
+
res.append(i)
|
|
16
|
+
return res
|
|
17
|
+
|
|
18
|
+
def split_page(self, numPerPage, cur, search, data):
|
|
19
|
+
data = self.table_search(data, search)
|
|
20
|
+
if data is None or len(data) == 0:
|
|
21
|
+
return {"total": 0, "data": []}
|
|
22
|
+
cur = int(cur)
|
|
23
|
+
res = {"total": len(data)}
|
|
24
|
+
n = int(numPerPage)
|
|
25
|
+
total = len(data)
|
|
26
|
+
start = (cur - 1) * n
|
|
27
|
+
end = cur * n
|
|
28
|
+
if start > total - 1:
|
|
29
|
+
res['data'] = []
|
|
30
|
+
elif end > total - 1:
|
|
31
|
+
remainder = total % numPerPage
|
|
32
|
+
if remainder == 0:
|
|
33
|
+
res['data'] = data[-numPerPage:]
|
|
34
|
+
else:
|
|
35
|
+
res['data'] = data[-remainder:]
|
|
36
|
+
else:
|
|
37
|
+
res['data'] = data[start:end]
|
|
38
|
+
return res
|
|
39
|
+
|
|
40
|
+
def render(self, dir, html, data):
|
|
41
|
+
env = jinja2.Environment(loader=jinja2.FileSystemLoader(f'{dir}/templates'))
|
|
42
|
+
template = env.get_template(html)
|
|
43
|
+
outputText = template.render(data=data) # this is where to put args
|
|
44
|
+
return outputText
|
|
45
|
+
|
|
46
|
+
def inven_everyday(self, df, begin, end):
|
|
47
|
+
df1 = df.groupby(by=['id', 'date']).sum()
|
|
48
|
+
# 可以把复合index重新拆分成columns
|
|
49
|
+
trades = df1.reset_index()
|
|
50
|
+
trades['id'] = trades['id']
|
|
51
|
+
trades['date'] = trades['date']
|
|
52
|
+
# create a range of dates for the merged dataframe pd.datetime.today()
|
|
53
|
+
if not begin:
|
|
54
|
+
begin = df['date'].min()
|
|
55
|
+
if not end:
|
|
56
|
+
end = df['date'].max()
|
|
57
|
+
index_of_dates = pd.date_range(begin, end).to_frame().reset_index(drop=True).rename(columns={0: 'date'}).astype(
|
|
58
|
+
str)
|
|
59
|
+
# create a merged dataframe with columns date / stock / stock_Tr.
|
|
60
|
+
merged = pd.merge(index_of_dates, trades, how='left', on='date')
|
|
61
|
+
# create a pivottable showing the shares_TR of each stock for each date
|
|
62
|
+
shares_tr = merged.pivot(index='date', columns='id', values='q')
|
|
63
|
+
shares_tr = shares_tr.dropna(axis=1, how='all').fillna(0)
|
|
64
|
+
cumShares = shares_tr.cumsum()
|
|
65
|
+
cumShares.index = cumShares.index.astype(str)
|
|
66
|
+
return cumShares
|
|
67
|
+
|
|
68
|
+
def writeb64(self, path, b64):
|
|
69
|
+
dir = os.path.dirname(path)
|
|
70
|
+
if not os.path.exists(dir):
|
|
71
|
+
os.makedirs(dir, exist_ok=True)
|
|
72
|
+
content = base64.b64decode(b64)
|
|
73
|
+
with open(path, "wb") as f:
|
|
74
|
+
f.write(content)
|
|
75
|
+
os.chmod(path, 0o777)
|
|
76
|
+
|
|
77
|
+
def readb64(self, f):
|
|
78
|
+
with open(f, "rb") as file:
|
|
79
|
+
encoded_string = base64.b64encode(file.read())
|
|
80
|
+
return encoded_string.decode()
|
|
81
|
+
|
|
82
|
+
def prefix_zero(self, n, d):
|
|
83
|
+
if len(str(d)) >= n: return str(d)[-n:]
|
|
84
|
+
l = ["0"] * (n - len(str(d)))
|
|
85
|
+
zeros = "".join(l)
|
|
86
|
+
return f"{zeros}{d}"
|
|
87
|
+
|
|
88
|
+
def clear_by_days(self, root, n):
|
|
89
|
+
files = glob.glob(f"{root}/*/*.*", recursive=True)
|
|
90
|
+
now = arrow.now()
|
|
91
|
+
for f in files:
|
|
92
|
+
info = os.stat(f).st_mtime
|
|
93
|
+
dif = now - arrow.get(info)
|
|
94
|
+
if dif.days > n:
|
|
95
|
+
os.remove(f)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
if __name__ == '__main__':
|
|
99
|
+
g = Common1()
|
|
100
|
+
a = g.prefix_zero(5, 111)
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
from functools import wraps
|
|
2
|
+
from remote_run_everything.db.kv_store import KvStore
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def cache_by_name(k, ts):
|
|
6
|
+
def _wrapper(f):
|
|
7
|
+
wraps(f)
|
|
8
|
+
|
|
9
|
+
def _wrapped(*args, **kwargs):
|
|
10
|
+
mykv = KvStore()
|
|
11
|
+
res = mykv.read_with_ex(k, ts)
|
|
12
|
+
if res is None:
|
|
13
|
+
res = f(*args, **kwargs)
|
|
14
|
+
if res is not None:
|
|
15
|
+
mykv.write_with_ex(k, res)
|
|
16
|
+
return res
|
|
17
|
+
return res
|
|
18
|
+
|
|
19
|
+
return _wrapped
|
|
20
|
+
|
|
21
|
+
return _wrapper
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def cache_by_1starg(sub, ts):
|
|
25
|
+
def _wrapper(f):
|
|
26
|
+
wraps(f)
|
|
27
|
+
|
|
28
|
+
def _wrapped(*args, **kwargs):
|
|
29
|
+
mykv = KvStore()
|
|
30
|
+
k = f"{sub}{args[0]}"
|
|
31
|
+
res = mykv.read_with_ex(k, ts)
|
|
32
|
+
if res is None:
|
|
33
|
+
res = f(*args, **kwargs)
|
|
34
|
+
if res is not None:
|
|
35
|
+
mykv.write_with_ex(k, res)
|
|
36
|
+
return res
|
|
37
|
+
return res
|
|
38
|
+
|
|
39
|
+
return _wrapped
|
|
40
|
+
|
|
41
|
+
return _wrapper
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def cache_by_nth_arg(sub, index, ts):
|
|
45
|
+
def _wrapper(f):
|
|
46
|
+
wraps(f)
|
|
47
|
+
|
|
48
|
+
def _wrapped(*args, **kwargs):
|
|
49
|
+
mykv = KvStore()
|
|
50
|
+
k = f"{sub}{args[index]}"
|
|
51
|
+
res = mykv.read_with_ex(k, ts)
|
|
52
|
+
if res is None:
|
|
53
|
+
res = f(*args, **kwargs)
|
|
54
|
+
if res is not None:
|
|
55
|
+
mykv.write_with_ex(k, res)
|
|
56
|
+
return res
|
|
57
|
+
return res
|
|
58
|
+
|
|
59
|
+
return _wrapped
|
|
60
|
+
|
|
61
|
+
return _wrapper
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def cache_by_rkey(ts):
|
|
65
|
+
def _wrapper(f):
|
|
66
|
+
wraps(f)
|
|
67
|
+
|
|
68
|
+
def _wrapped(*args, **kwargs):
|
|
69
|
+
mykv = KvStore()
|
|
70
|
+
k = str(kwargs['rkey'])
|
|
71
|
+
res = mykv.read_with_ex(k, ts)
|
|
72
|
+
if res is None:
|
|
73
|
+
res = f(*args, **kwargs)
|
|
74
|
+
if res is not None:
|
|
75
|
+
mykv.write_with_ex(k, res)
|
|
76
|
+
return res
|
|
77
|
+
return res
|
|
78
|
+
|
|
79
|
+
return _wrapped
|
|
80
|
+
|
|
81
|
+
return _wrapper
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
@dataclass
|
|
6
|
+
class Tab:
|
|
7
|
+
stname: str
|
|
8
|
+
tbname: str
|
|
9
|
+
fields: list
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Sql2go:
|
|
13
|
+
def __init__(self, path):
|
|
14
|
+
self.path = path
|
|
15
|
+
|
|
16
|
+
def read_lines(self):
|
|
17
|
+
with open(self.path, 'r') as f:
|
|
18
|
+
content = f.readlines()
|
|
19
|
+
return content
|
|
20
|
+
|
|
21
|
+
def replace(self, s, l):
|
|
22
|
+
for i in l:
|
|
23
|
+
s = s.replace(i, "")
|
|
24
|
+
return s
|
|
25
|
+
|
|
26
|
+
def split_cls(self):
|
|
27
|
+
res = []
|
|
28
|
+
content = self.read_lines()
|
|
29
|
+
cur_cls = ""
|
|
30
|
+
t = Tab("", "", [])
|
|
31
|
+
for s in content:
|
|
32
|
+
if s.startswith("class "):
|
|
33
|
+
if cur_cls != "":
|
|
34
|
+
res.append(t)
|
|
35
|
+
t = Tab("", "", [])
|
|
36
|
+
stname = self.replace(s, [" ", "\n", "class", "(Base)", ":"])
|
|
37
|
+
cur_cls = stname
|
|
38
|
+
t.stname = cur_cls
|
|
39
|
+
elif "__tablename__" in s:
|
|
40
|
+
tbname = self.replace(s, [" ", "__tablename__", "\n", "="])
|
|
41
|
+
t.tbname = tbname
|
|
42
|
+
elif "= Column" in s:
|
|
43
|
+
t.fields.append(s)
|
|
44
|
+
for t in res:
|
|
45
|
+
ll = []
|
|
46
|
+
for s in t.fields:
|
|
47
|
+
ll.append({
|
|
48
|
+
"field": self.parse_name(s),
|
|
49
|
+
"tag": self.parse_tag(s),
|
|
50
|
+
"ty": self.parse_ty(s)
|
|
51
|
+
})
|
|
52
|
+
t.fields = ll
|
|
53
|
+
print(res)
|
|
54
|
+
return res
|
|
55
|
+
|
|
56
|
+
def parse_name(self, s):
|
|
57
|
+
name = s.split("=")[0].replace(" ", "")
|
|
58
|
+
return name
|
|
59
|
+
|
|
60
|
+
def parse_ty(self, s):
|
|
61
|
+
if "DateTime" in s:
|
|
62
|
+
ty = "time.time"
|
|
63
|
+
elif "String" in s:
|
|
64
|
+
ty = "string"
|
|
65
|
+
elif "BigInteger" in s:
|
|
66
|
+
ty = "int64"
|
|
67
|
+
elif "Integer" in s or "TINYINT" in s:
|
|
68
|
+
ty = "int32"
|
|
69
|
+
elif "CHAR" in s:
|
|
70
|
+
ty = "string"
|
|
71
|
+
elif "Float" in s or "MONEY" in s:
|
|
72
|
+
ty = "float32"
|
|
73
|
+
elif "DECIMAL" in s:
|
|
74
|
+
ty = "float64"
|
|
75
|
+
else:
|
|
76
|
+
ty = "unk"
|
|
77
|
+
return ty
|
|
78
|
+
|
|
79
|
+
def parse_tag(self, s):
|
|
80
|
+
name = s.split("=")[0].replace(" ", "").lower()
|
|
81
|
+
tag = ""
|
|
82
|
+
if "primary_key=True" in s:
|
|
83
|
+
tag = f'''`xorm:"pk not null '{name}'" json:"{name}"`'''
|
|
84
|
+
elif "nullable=False" in s:
|
|
85
|
+
tag = f'''`xorm:"not null '{name}'" json:"{name}"`'''
|
|
86
|
+
else:
|
|
87
|
+
tag = f'''`xorm:"'{name}'" json:"{name}"`'''
|
|
88
|
+
return tag
|
|
89
|
+
|
|
90
|
+
def write_go(self):
|
|
91
|
+
res = self.split_cls()
|
|
92
|
+
lines = []
|
|
93
|
+
for tab in res:
|
|
94
|
+
header = f"type {tab.stname} struct " + "{"
|
|
95
|
+
st = [header]
|
|
96
|
+
for f in tab.fields:
|
|
97
|
+
ss = f"{f['field']} {f['ty']} {f['tag']}"
|
|
98
|
+
st.append(ss)
|
|
99
|
+
st.append("}" + "\r\n")
|
|
100
|
+
lines.append("\n".join(st))
|
|
101
|
+
ss = "package mod" + "\r\n" + "".join(lines)
|
|
102
|
+
with open("./mod.go", 'w') as f:
|
|
103
|
+
f.write(ss)
|
|
104
|
+
os.system("go fmt mod.go")
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
if __name__ == '__main__':
|
|
108
|
+
s = SqlRead("./pstarback.py")
|
|
109
|
+
s.write_go()
|