remote-run-everything 1.9__tar.gz → 2.0.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- remote_run_everything-2.0.2/LICENSE +19 -0
- remote_run_everything-2.0.2/PKG-INFO +109 -0
- remote_run_everything-2.0.2/README.md +94 -0
- remote_run_everything-2.0.2/pyproject.toml +20 -0
- remote_run_everything-2.0.2/remote_run_everything/__init__.py +16 -0
- remote_run_everything-2.0.2/remote_run_everything/db/backup.py +38 -0
- remote_run_everything-2.0.2/remote_run_everything/db/crud_sqlalchemy.py +97 -0
- remote_run_everything-2.0.2/remote_run_everything/db/crude_duck.py +122 -0
- remote_run_everything-2.0.2/remote_run_everything/db/kv_store.py +109 -0
- remote_run_everything-2.0.2/remote_run_everything/deploy/by_http.py +82 -0
- remote_run_everything-2.0.2/remote_run_everything/deploy/by_http_server.py +56 -0
- remote_run_everything-2.0.2/remote_run_everything/deploy/by_http_tool.py +64 -0
- remote_run_everything-2.0.2/remote_run_everything/deploy/record_mod.py +26 -0
- remote_run_everything-2.0.2/remote_run_everything/nosql/__init__.py +0 -0
- remote_run_everything-2.0.2/remote_run_everything/nosql/no_sql.py +73 -0
- remote_run_everything-2.0.2/remote_run_everything/nosql/no_sql_mysql.py +67 -0
- remote_run_everything-2.0.2/remote_run_everything/nosql/no_sql_pg.py +69 -0
- remote_run_everything-2.0.2/remote_run_everything/nosql/no_sql_tool.py +81 -0
- remote_run_everything-2.0.2/remote_run_everything/tools/__init__.py +0 -0
- remote_run_everything-2.0.2/remote_run_everything/tools/common.py +56 -0
- remote_run_everything-2.0.2/remote_run_everything/tools/common1.py +100 -0
- remote_run_everything-2.0.2/remote_run_everything/tools/decorators.py +81 -0
- {remote_run_everything-1.9/remote_run_everything/crud → remote_run_everything-2.0.2/remote_run_everything/tools}/sqlacodegen_go_struct.py +1 -1
- remote_run_everything-2.0.2/remote_run_everything/vsconf/conf_txt.py +106 -0
- remote_run_everything-2.0.2/remote_run_everything/vsconf/core.py +29 -0
- remote_run_everything-2.0.2/remote_run_everything.egg-info/PKG-INFO +109 -0
- remote_run_everything-2.0.2/remote_run_everything.egg-info/SOURCES.txt +33 -0
- remote_run_everything-2.0.2/test/test.py +55 -0
- remote_run_everything-2.0.2/test/test_server.py +22 -0
- remote_run_everything-1.9/PKG-INFO +0 -108
- remote_run_everything-1.9/README.md +0 -92
- remote_run_everything-1.9/remote_run_everything/__init__.py +0 -6
- remote_run_everything-1.9/remote_run_everything/crud/crud.py +0 -45
- remote_run_everything-1.9/remote_run_everything/deploy/__scripts__/kill_ps.py +0 -26
- remote_run_everything-1.9/remote_run_everything/deploy/__scripts__/kill_ss.py +0 -18
- remote_run_everything-1.9/remote_run_everything/deploy/__scripts__/mongo_dump_restore.py +0 -53
- remote_run_everything-1.9/remote_run_everything/deploy/by_http.py +0 -67
- remote_run_everything-1.9/remote_run_everything/deploy/conf.py +0 -17
- remote_run_everything-1.9/remote_run_everything/deploy/down.py +0 -83
- remote_run_everything-1.9/remote_run_everything/deploy/hist_pickle.py +0 -64
- remote_run_everything-1.9/remote_run_everything/deploy/local.py +0 -73
- remote_run_everything-1.9/remote_run_everything/deploy/remote.py +0 -71
- remote_run_everything-1.9/remote_run_everything.egg-info/PKG-INFO +0 -108
- remote_run_everything-1.9/remote_run_everything.egg-info/SOURCES.txt +0 -24
- remote_run_everything-1.9/remote_run_everything.egg-info/requires.txt +0 -2
- remote_run_everything-1.9/setup.py +0 -32
- remote_run_everything-1.9/test/test.py +0 -58
- remote_run_everything-1.9/test/test_pg.py +0 -22
- {remote_run_everything-1.9 → remote_run_everything-2.0.2}/MANIFEST.in +0 -0
- {remote_run_everything-1.9/remote_run_everything/deploy → remote_run_everything-2.0.2/remote_run_everything/db}/__init__.py +0 -0
- {remote_run_everything-1.9/remote_run_everything/deploy/__scripts__ → remote_run_everything-2.0.2/remote_run_everything/deploy}/__init__.py +0 -0
- {remote_run_everything-1.9 → remote_run_everything-2.0.2}/remote_run_everything.egg-info/dependency_links.txt +0 -0
- {remote_run_everything-1.9 → remote_run_everything-2.0.2}/remote_run_everything.egg-info/top_level.txt +0 -0
- {remote_run_everything-1.9 → remote_run_everything-2.0.2}/setup.cfg +0 -0
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
Copyright (c) 2018 The Python Packaging Authority
|
|
2
|
+
|
|
3
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
4
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
5
|
+
in the Software without restriction, including without limitation the rights
|
|
6
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
7
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
8
|
+
furnished to do so, subject to the following conditions:
|
|
9
|
+
|
|
10
|
+
The above copyright notice and this permission notice shall be included in all
|
|
11
|
+
copies or substantial portions of the Software.
|
|
12
|
+
|
|
13
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
14
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
15
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
16
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
17
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
18
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
19
|
+
SOFTWARE.
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: remote_run_everything
|
|
3
|
+
Version: 2.0.2
|
|
4
|
+
Summary: Deploy Tools
|
|
5
|
+
Author-email: Wang Qi <wangmarkqi@gmail.com>
|
|
6
|
+
License-Expression: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/wangmarkqi/remote_run_everything
|
|
8
|
+
Project-URL: Issues, https://github.com/wangmarkqi/remote_run_everything/issues
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: Operating System :: OS Independent
|
|
11
|
+
Requires-Python: >=3.9
|
|
12
|
+
Description-Content-Type: text/markdown
|
|
13
|
+
License-File: LICENSE
|
|
14
|
+
Dynamic: license-file
|
|
15
|
+
|
|
16
|
+
# remote_run_everthing 各类实用代码集合封装
|
|
17
|
+
|
|
18
|
+
## 安装
|
|
19
|
+
```shell
|
|
20
|
+
pip install -U --index-url https://test.pypi.org/simple/ remote_run_everything
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
## 运维功能
|
|
24
|
+
```python
|
|
25
|
+
# 服务端代码
|
|
26
|
+
from remote_run_everything import cherrypy_in_daemon,ByHttpServer
|
|
27
|
+
cherrypy_in_daemon(ByHttpServer,8888,"/deploy")
|
|
28
|
+
|
|
29
|
+
# 上推代码
|
|
30
|
+
from remote_run_everything import ByHttp
|
|
31
|
+
def test_up():
|
|
32
|
+
host = "http://x.x.x.x:8888/deploy"
|
|
33
|
+
local = "D://project/demand/shop"
|
|
34
|
+
remote = "/data/mypy/shop"
|
|
35
|
+
db = "D://wq/temp/shop.db"
|
|
36
|
+
bh = ByHttp(host, local, remote, db)
|
|
37
|
+
bh.up(['node_modules', ".pyc", ".idea"])
|
|
38
|
+
|
|
39
|
+
# 下拉代码
|
|
40
|
+
def test_down():
|
|
41
|
+
host = "http://x.x.x.x:8888/deploy"
|
|
42
|
+
local = "D://project/demand/shop"
|
|
43
|
+
remote = "/data/mypy/shop"
|
|
44
|
+
db = "D://wq/temp/shop.db"
|
|
45
|
+
bh = ByHttp(host, local, remote, db)
|
|
46
|
+
bh.down(['node_modules', ".pyc", ".idea"])
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
## 缓存功能
|
|
50
|
+
```python
|
|
51
|
+
from remote_run_everything import cache_by_1starg,cache_by_name,cache_by_rkey
|
|
52
|
+
@cache_by_name("asdf", 1)
|
|
53
|
+
def test1():
|
|
54
|
+
print("运行了函数!!!!!!!!!!!!!!!!")
|
|
55
|
+
return {"a": "adaf"}
|
|
56
|
+
@cache_by_1starg("asdf", 1)
|
|
57
|
+
def test2(arg1):
|
|
58
|
+
print("运行了函数!!!!!!!!!!!!!!!!")
|
|
59
|
+
return {"a": "adaf"}
|
|
60
|
+
@cache_by_rkey(1)
|
|
61
|
+
def test2(rkey="xx"):
|
|
62
|
+
print("运行了函数!!!!!!!!!!!!!!!!")
|
|
63
|
+
return {"a": "adaf"}
|
|
64
|
+
```
|
|
65
|
+
## KV数据库
|
|
66
|
+
```python
|
|
67
|
+
from remote_run_everything import KvStore
|
|
68
|
+
kv = KvStore('test.db')
|
|
69
|
+
print(len(kv))
|
|
70
|
+
kv['hello1'] = 'you1'
|
|
71
|
+
del kv['hello1']
|
|
72
|
+
print(len(kv))
|
|
73
|
+
print('hello1' in kv)
|
|
74
|
+
kv['hello3'] = 'newvalue'
|
|
75
|
+
print(kv.keys())
|
|
76
|
+
print(kv.values())
|
|
77
|
+
print(kv.items())
|
|
78
|
+
for k in kv:
|
|
79
|
+
print(k, kv[k])
|
|
80
|
+
```
|
|
81
|
+
## Mongodb like 数据库
|
|
82
|
+
```python
|
|
83
|
+
from remote_run_everything import Nosql,NosqlPg,NosqlMysql
|
|
84
|
+
db = Nosql()
|
|
85
|
+
t = "test"
|
|
86
|
+
col = db['test']
|
|
87
|
+
dic = {"a": 2, "b": 456, 'c': "adf", "d": "2020-02-02"}
|
|
88
|
+
col.insert_one(dic)
|
|
89
|
+
dic = {"a": 56, "b": 456, 'c': "adf", "d": "2020-07-02"}
|
|
90
|
+
col.insert_one(dic)
|
|
91
|
+
q = {"a": 2,"b":{"$gt":1}}
|
|
92
|
+
print(col.find(q))
|
|
93
|
+
db.drop_db()
|
|
94
|
+
```
|
|
95
|
+
## 进程管理
|
|
96
|
+
```python
|
|
97
|
+
class ProcessManage:
|
|
98
|
+
# nosql is instance of Nosql or Nosqlmysql or Nqsqlpg
|
|
99
|
+
def __init__(self, nosql):
|
|
100
|
+
self.db = nosql
|
|
101
|
+
self.col = self.db['pid']
|
|
102
|
+
|
|
103
|
+
# 只需要在程序中引入这个函数,启动后会把pid存入数据库,然后有了pid,结合psutil,什么都有了
|
|
104
|
+
def save_pid(self, name, cmd):
|
|
105
|
+
dic = {"name": name, "cmd": cmd, "pid": os.getpid()}
|
|
106
|
+
print("save pid", dic)
|
|
107
|
+
self.col.insert_one(dic)
|
|
108
|
+
|
|
109
|
+
```
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
# remote_run_everthing 各类实用代码集合封装
|
|
2
|
+
|
|
3
|
+
## 安装
|
|
4
|
+
```shell
|
|
5
|
+
pip install -U --index-url https://test.pypi.org/simple/ remote_run_everything
|
|
6
|
+
```
|
|
7
|
+
|
|
8
|
+
## 运维功能
|
|
9
|
+
```python
|
|
10
|
+
# 服务端代码
|
|
11
|
+
from remote_run_everything import cherrypy_in_daemon,ByHttpServer
|
|
12
|
+
cherrypy_in_daemon(ByHttpServer,8888,"/deploy")
|
|
13
|
+
|
|
14
|
+
# 上推代码
|
|
15
|
+
from remote_run_everything import ByHttp
|
|
16
|
+
def test_up():
|
|
17
|
+
host = "http://x.x.x.x:8888/deploy"
|
|
18
|
+
local = "D://project/demand/shop"
|
|
19
|
+
remote = "/data/mypy/shop"
|
|
20
|
+
db = "D://wq/temp/shop.db"
|
|
21
|
+
bh = ByHttp(host, local, remote, db)
|
|
22
|
+
bh.up(['node_modules', ".pyc", ".idea"])
|
|
23
|
+
|
|
24
|
+
# 下拉代码
|
|
25
|
+
def test_down():
|
|
26
|
+
host = "http://x.x.x.x:8888/deploy"
|
|
27
|
+
local = "D://project/demand/shop"
|
|
28
|
+
remote = "/data/mypy/shop"
|
|
29
|
+
db = "D://wq/temp/shop.db"
|
|
30
|
+
bh = ByHttp(host, local, remote, db)
|
|
31
|
+
bh.down(['node_modules', ".pyc", ".idea"])
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
## 缓存功能
|
|
35
|
+
```python
|
|
36
|
+
from remote_run_everything import cache_by_1starg,cache_by_name,cache_by_rkey
|
|
37
|
+
@cache_by_name("asdf", 1)
|
|
38
|
+
def test1():
|
|
39
|
+
print("运行了函数!!!!!!!!!!!!!!!!")
|
|
40
|
+
return {"a": "adaf"}
|
|
41
|
+
@cache_by_1starg("asdf", 1)
|
|
42
|
+
def test2(arg1):
|
|
43
|
+
print("运行了函数!!!!!!!!!!!!!!!!")
|
|
44
|
+
return {"a": "adaf"}
|
|
45
|
+
@cache_by_rkey(1)
|
|
46
|
+
def test2(rkey="xx"):
|
|
47
|
+
print("运行了函数!!!!!!!!!!!!!!!!")
|
|
48
|
+
return {"a": "adaf"}
|
|
49
|
+
```
|
|
50
|
+
## KV数据库
|
|
51
|
+
```python
|
|
52
|
+
from remote_run_everything import KvStore
|
|
53
|
+
kv = KvStore('test.db')
|
|
54
|
+
print(len(kv))
|
|
55
|
+
kv['hello1'] = 'you1'
|
|
56
|
+
del kv['hello1']
|
|
57
|
+
print(len(kv))
|
|
58
|
+
print('hello1' in kv)
|
|
59
|
+
kv['hello3'] = 'newvalue'
|
|
60
|
+
print(kv.keys())
|
|
61
|
+
print(kv.values())
|
|
62
|
+
print(kv.items())
|
|
63
|
+
for k in kv:
|
|
64
|
+
print(k, kv[k])
|
|
65
|
+
```
|
|
66
|
+
## Mongodb like 数据库
|
|
67
|
+
```python
|
|
68
|
+
from remote_run_everything import Nosql,NosqlPg,NosqlMysql
|
|
69
|
+
db = Nosql()
|
|
70
|
+
t = "test"
|
|
71
|
+
col = db['test']
|
|
72
|
+
dic = {"a": 2, "b": 456, 'c': "adf", "d": "2020-02-02"}
|
|
73
|
+
col.insert_one(dic)
|
|
74
|
+
dic = {"a": 56, "b": 456, 'c': "adf", "d": "2020-07-02"}
|
|
75
|
+
col.insert_one(dic)
|
|
76
|
+
q = {"a": 2,"b":{"$gt":1}}
|
|
77
|
+
print(col.find(q))
|
|
78
|
+
db.drop_db()
|
|
79
|
+
```
|
|
80
|
+
## 进程管理
|
|
81
|
+
```python
|
|
82
|
+
class ProcessManage:
|
|
83
|
+
# nosql is instance of Nosql or Nosqlmysql or Nqsqlpg
|
|
84
|
+
def __init__(self, nosql):
|
|
85
|
+
self.db = nosql
|
|
86
|
+
self.col = self.db['pid']
|
|
87
|
+
|
|
88
|
+
# 只需要在程序中引入这个函数,启动后会把pid存入数据库,然后有了pid,结合psutil,什么都有了
|
|
89
|
+
def save_pid(self, name, cmd):
|
|
90
|
+
dic = {"name": name, "cmd": cmd, "pid": os.getpid()}
|
|
91
|
+
print("save pid", dic)
|
|
92
|
+
self.col.insert_one(dic)
|
|
93
|
+
|
|
94
|
+
```
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "remote_run_everything"
|
|
3
|
+
version = "2.0.2"
|
|
4
|
+
authors = [
|
|
5
|
+
{ name="Wang Qi", email="wangmarkqi@gmail.com" },
|
|
6
|
+
]
|
|
7
|
+
description = "Deploy Tools"
|
|
8
|
+
readme = "README.md"
|
|
9
|
+
requires-python = ">=3.9"
|
|
10
|
+
classifiers = [
|
|
11
|
+
"Programming Language :: Python :: 3",
|
|
12
|
+
"Operating System :: OS Independent",
|
|
13
|
+
]
|
|
14
|
+
license = "MIT"
|
|
15
|
+
license-files = ["LICEN[CS]E*"]
|
|
16
|
+
|
|
17
|
+
[project.urls]
|
|
18
|
+
Homepage = 'https://github.com/wangmarkqi/remote_run_everything'
|
|
19
|
+
Issues = "https://github.com/wangmarkqi/remote_run_everything/issues"
|
|
20
|
+
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from remote_run_everything.deploy.by_http import ByHttp
|
|
2
|
+
from remote_run_everything.deploy.by_http_server import ByHttpServer, cherrypy_in_daemon
|
|
3
|
+
|
|
4
|
+
from remote_run_everything.db.crude_duck import CrudeDuck
|
|
5
|
+
from remote_run_everything.db.crud_sqlalchemy import Crud
|
|
6
|
+
from remote_run_everything.db.kv_store import KvStore
|
|
7
|
+
from remote_run_everything.db.backup import BackUp
|
|
8
|
+
|
|
9
|
+
from remote_run_everything.tools.common import Common
|
|
10
|
+
from remote_run_everything.tools.sqlacodegen_go_struct import Sql2go
|
|
11
|
+
from remote_run_everything.tools.decorators import cache_by_1starg, cache_by_name, cache_by_rkey,cache_by_nth_arg
|
|
12
|
+
|
|
13
|
+
from remote_run_everything.nosql.no_sql import Nosql
|
|
14
|
+
from remote_run_everything.nosql.no_sql_pg import NosqlPg
|
|
15
|
+
from remote_run_everything.nosql.no_sql_mysql import NosqlMysql
|
|
16
|
+
from remote_run_everything.vsconf.core import VsConf
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from bson import BSON, decode_all
|
|
2
|
+
import pymongo
|
|
3
|
+
import os, sys
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class BackUp:
|
|
7
|
+
def __init__(self, root):
|
|
8
|
+
self.root = root
|
|
9
|
+
os.makedirs(self.root, exist_ok=True)
|
|
10
|
+
|
|
11
|
+
def mongo_dump(self,cli,dbs):
|
|
12
|
+
for d in dbs:
|
|
13
|
+
db = cli[d]
|
|
14
|
+
dir = os.path.join(self.root, d)
|
|
15
|
+
os.makedirs(dir, exist_ok=True)
|
|
16
|
+
cols = db.list_collection_names()
|
|
17
|
+
for col in cols:
|
|
18
|
+
sr = db[col]
|
|
19
|
+
# Dump.
|
|
20
|
+
with open(f'{dir}/{col}.bson', 'wb+') as f:
|
|
21
|
+
for doc in sr.find():
|
|
22
|
+
f.write(BSON.encode(doc))
|
|
23
|
+
|
|
24
|
+
def mongo_restore(self,cli,dbs):
|
|
25
|
+
for d in dbs:
|
|
26
|
+
db = cli[d]
|
|
27
|
+
dir = os.path.join(self.root, d)
|
|
28
|
+
files = os.listdir(dir)
|
|
29
|
+
cols = [i.split('.')[0] for i in files]
|
|
30
|
+
print(cols)
|
|
31
|
+
for name in cols:
|
|
32
|
+
file = f"{dir}/{name}.bson"
|
|
33
|
+
col = db[name]
|
|
34
|
+
with open(file, 'rb') as f:
|
|
35
|
+
data = decode_all(f.read())
|
|
36
|
+
col.insert_many(data)
|
|
37
|
+
|
|
38
|
+
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
import os, pymysql
|
|
2
|
+
from urllib.parse import quote_plus
|
|
3
|
+
|
|
4
|
+
from sqlalchemy import create_engine, select, update, and_, insert, delete
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Crud:
|
|
8
|
+
def sqlite_engine(self, dbpath):
|
|
9
|
+
dir = os.path.dirname(dbpath)
|
|
10
|
+
os.makedirs(dir, exist_ok=True)
|
|
11
|
+
url = f"sqlite:///{dbpath}"
|
|
12
|
+
engine = create_engine(url, future=True, connect_args={'timeout': 30, 'check_same_thread': False})
|
|
13
|
+
return engine
|
|
14
|
+
|
|
15
|
+
def pg_url(self, user, pwd, host, port, db):
|
|
16
|
+
f = lambda x: quote_plus(x)
|
|
17
|
+
return f"postgresql://{f(user)}:{f(pwd)}@{f(host)}:{port}/{db}"
|
|
18
|
+
|
|
19
|
+
def pg_engine(self, user, pwd, host, port, db):
|
|
20
|
+
return create_engine(self.pg_url(user, pwd, host, port, db), pool_recycle=3600, pool_size=80, max_overflow=-1,
|
|
21
|
+
echo=False,
|
|
22
|
+
future=True)
|
|
23
|
+
|
|
24
|
+
def mysql_url(self, user, pwd, host, port, db):
|
|
25
|
+
f = lambda x: quote_plus(x)
|
|
26
|
+
return f"mysql+pymysql://{f(user)}:{f(pwd)}@{f(host)}:{port}/{db}"
|
|
27
|
+
|
|
28
|
+
def mysql_engine(self, user, pwd, host, port, db):
|
|
29
|
+
# print(f"sqlacodegen {url} >> {db}.py")
|
|
30
|
+
return create_engine(self.mysql_url(user, pwd, host, port, db), pool_recycle=3600, pool_size=80,
|
|
31
|
+
max_overflow=-1, echo=False,
|
|
32
|
+
future=True)
|
|
33
|
+
|
|
34
|
+
def drop_table(self, engine, mod):
|
|
35
|
+
mod.__table__.drop(engine)
|
|
36
|
+
|
|
37
|
+
def create_table(self, engine, mod):
|
|
38
|
+
mod.__table__.create(engine, checkfirst=True)
|
|
39
|
+
|
|
40
|
+
def exist_id(self, engine, mod, cond):
|
|
41
|
+
with engine.connect() as conn:
|
|
42
|
+
stmt = select(mod).where(cond).limit(1)
|
|
43
|
+
id = conn.scalar(stmt)
|
|
44
|
+
if id is not None:
|
|
45
|
+
return id
|
|
46
|
+
return None
|
|
47
|
+
|
|
48
|
+
def table_columns(self, mod):
|
|
49
|
+
if "__annotations__" in mod.__dict__:
|
|
50
|
+
cols = mod.__dict__['__annotations__'].keys()
|
|
51
|
+
else:
|
|
52
|
+
cols = [i for i in mod.__dict__.keys() if not i.startswith("__")]
|
|
53
|
+
return cols
|
|
54
|
+
|
|
55
|
+
def insert_many(self, engine, mod, l):
|
|
56
|
+
if len(l)==0:return
|
|
57
|
+
cols = self.table_columns(mod)
|
|
58
|
+
with engine.connect() as conn:
|
|
59
|
+
for dic in l:
|
|
60
|
+
dic = {k: v for k, v in dic.items() if k in cols}
|
|
61
|
+
stmt = insert(mod).values(dic)
|
|
62
|
+
conn.execute(stmt)
|
|
63
|
+
conn.commit()
|
|
64
|
+
|
|
65
|
+
def insert_one(self, engine, mod, dic):
|
|
66
|
+
cols = self.table_columns(mod)
|
|
67
|
+
dic = {k: v for k, v in dic.items() if k in cols}
|
|
68
|
+
with engine.connect() as conn:
|
|
69
|
+
stmt = insert(mod).values(dic)
|
|
70
|
+
conn.execute(stmt)
|
|
71
|
+
conn.commit()
|
|
72
|
+
|
|
73
|
+
def update_by_id(self, engine, mod, id, dic):
|
|
74
|
+
cols = self.table_columns(mod)
|
|
75
|
+
dic = {k: v for k, v in dic.items() if k in cols}
|
|
76
|
+
with engine.connect() as conn:
|
|
77
|
+
stmt = update(mod).where(mod.id == id).values(dic)
|
|
78
|
+
conn.execute(stmt)
|
|
79
|
+
conn.commit()
|
|
80
|
+
|
|
81
|
+
def upsert(self, engine, mod, cond, dic):
|
|
82
|
+
id = self.exist_id(engine, mod, cond)
|
|
83
|
+
if id is not None:
|
|
84
|
+
self.update_by_id(engine, mod, id, dic)
|
|
85
|
+
return
|
|
86
|
+
self.insert_one(engine, mod, dic)
|
|
87
|
+
|
|
88
|
+
def delete_by_id(self, engine, mod, id):
|
|
89
|
+
with engine.connect() as conn:
|
|
90
|
+
stmt = delete(mod).where(mod.id == id)
|
|
91
|
+
conn.execute(stmt)
|
|
92
|
+
conn.commit()
|
|
93
|
+
|
|
94
|
+
def delete(self, engine, mod, cond):
|
|
95
|
+
id = self.exist_id(engine, mod, cond)
|
|
96
|
+
if id is not None:
|
|
97
|
+
self.delete_by_id(engine, mod, id)
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
import duckdb, os, arrow
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class CrudeDuck:
|
|
5
|
+
|
|
6
|
+
def install_sql_ext(self, dbpath):
|
|
7
|
+
dir = os.path.dirname(dbpath)
|
|
8
|
+
os.makedirs(dir, exist_ok=True)
|
|
9
|
+
sql = f"ATTACH '{dbpath}' AS db (TYPE sqlite,journal_mode wal);use db;"
|
|
10
|
+
con = duckdb.connect()
|
|
11
|
+
con.install_extension("sqlite")
|
|
12
|
+
con.load_extension("sqlite")
|
|
13
|
+
con.sql(sql)
|
|
14
|
+
return con
|
|
15
|
+
|
|
16
|
+
def install_pg_ext(self, user, pwd, host, port, dbname):
|
|
17
|
+
sql = f'''ATTACH 'dbname={dbname} user={user}
|
|
18
|
+
host={host} port={port} connect_timeout=10 password={pwd}'
|
|
19
|
+
AS {dbname} (TYPE postgres);use {dbname};'''
|
|
20
|
+
con = duckdb.connect()
|
|
21
|
+
con.install_extension("postgres")
|
|
22
|
+
con.load_extension("postgres")
|
|
23
|
+
con.sql(sql)
|
|
24
|
+
return con
|
|
25
|
+
|
|
26
|
+
def install_mysql_ext(self, user, pwd, host, port, dbname):
|
|
27
|
+
con = duckdb.connect()
|
|
28
|
+
con.install_extension("mysql")
|
|
29
|
+
con.load_extension("mysql")
|
|
30
|
+
sql = f"ATTACH 'host={host} user={user} password={pwd} port={port} database={dbname}' AS msqldb (TYPE MYSQL);"
|
|
31
|
+
con.sql(sql)
|
|
32
|
+
con.sql(f"USE msqldb;")
|
|
33
|
+
return con
|
|
34
|
+
|
|
35
|
+
def scheme(self, con, db, table, dbtype):
|
|
36
|
+
# for mysql db== dbname ; for pg db==public for sqlite db=main
|
|
37
|
+
if dbtype == "mysql":
|
|
38
|
+
db = db
|
|
39
|
+
elif dbtype == "sqlite3":
|
|
40
|
+
db = "main"
|
|
41
|
+
elif dbtype == "pg":
|
|
42
|
+
db = "public"
|
|
43
|
+
else:
|
|
44
|
+
db = db
|
|
45
|
+
sql = f''' SELECT column_name, data_type FROM information_schema.columns
|
|
46
|
+
WHERE table_schema = '{db}' AND table_name = '{table}';
|
|
47
|
+
'''
|
|
48
|
+
scheme = {i[0]: i[1] for i in con.sql(sql).fetchall()}
|
|
49
|
+
return scheme
|
|
50
|
+
|
|
51
|
+
def max_id(self, con, table):
|
|
52
|
+
sql = f'select max(id) from {table}'
|
|
53
|
+
a = con.sql(sql).fetchone()
|
|
54
|
+
if a is None or a[0] is None: return 0
|
|
55
|
+
return a[0] + 1
|
|
56
|
+
|
|
57
|
+
def sql_from_ty(self, ty, v):
|
|
58
|
+
if v is None: return None
|
|
59
|
+
ty = ty.upper()
|
|
60
|
+
if ty in ['BIGINT', "TINYINT", "INTEGER", "BOOLEAN"]:
|
|
61
|
+
return str(int(v))
|
|
62
|
+
if ty in ["VARCHAR"]:
|
|
63
|
+
return f"'{str(v)}'"
|
|
64
|
+
if "TIMESTAMP" in ty:
|
|
65
|
+
return f"'{arrow.get(v).format()[:19]}'"
|
|
66
|
+
return None
|
|
67
|
+
|
|
68
|
+
def dic2sql(self, data, scheme):
|
|
69
|
+
dic = {}
|
|
70
|
+
for k, v in data.items():
|
|
71
|
+
if k not in scheme.keys(): continue
|
|
72
|
+
tyv = self.sql_from_ty(scheme[k], v)
|
|
73
|
+
if tyv is not None: dic[k] = tyv
|
|
74
|
+
if len(dic) == 0:
|
|
75
|
+
return "", ""
|
|
76
|
+
cols = ", ".join(dic.keys())
|
|
77
|
+
values = ", ".join(dic.values())
|
|
78
|
+
return f"({cols})", f"({values})"
|
|
79
|
+
|
|
80
|
+
def list2sql(self, l, scheme):
|
|
81
|
+
cols = ""
|
|
82
|
+
values = ""
|
|
83
|
+
for data in l:
|
|
84
|
+
dic = {}
|
|
85
|
+
for k, v in data.items():
|
|
86
|
+
if k not in scheme.keys(): continue
|
|
87
|
+
tyv = self.sql_from_ty(scheme[k], v)
|
|
88
|
+
if tyv is not None: dic[k] = tyv
|
|
89
|
+
if len(dic) == 0:
|
|
90
|
+
continue
|
|
91
|
+
cols = ", ".join(dic.keys())
|
|
92
|
+
s = ", ".join(dic.values())
|
|
93
|
+
values = values + f"({s}),"
|
|
94
|
+
return f"({cols})", values
|
|
95
|
+
|
|
96
|
+
def insert_many(self, con, db, table, data, dbtype=""):
|
|
97
|
+
sche = self.scheme(con, db, table, dbtype)
|
|
98
|
+
cols, values = self.list2sql(data, sche)
|
|
99
|
+
if values == "": return
|
|
100
|
+
sql = f'insert into {table} {cols} values {values}'
|
|
101
|
+
con.execute(sql)
|
|
102
|
+
|
|
103
|
+
def insert_one(self, con, db, table, data, dbtype=""):
|
|
104
|
+
sche = self.scheme(con, db, table, dbtype)
|
|
105
|
+
cols, values = self.dic2sql(data, sche)
|
|
106
|
+
if values == "": return
|
|
107
|
+
sql = f'insert into {table} {cols} values {values}'
|
|
108
|
+
con.execute(sql)
|
|
109
|
+
|
|
110
|
+
def drop_table(self, con, table):
|
|
111
|
+
sql = f"drop table if exists {table}"
|
|
112
|
+
con.execute(sql)
|
|
113
|
+
|
|
114
|
+
def delete_by_ids(self, con, table, ids):
|
|
115
|
+
ids = [str(i) for i in ids]
|
|
116
|
+
ids = ','.join(ids)
|
|
117
|
+
sql = f"delete from {table} where id in ({ids})"
|
|
118
|
+
con.execute(sql)
|
|
119
|
+
|
|
120
|
+
def delete_by_id(self, con, table, id):
|
|
121
|
+
sql = f"delete from {table} where id = {id}"
|
|
122
|
+
con.execute(sql)
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import sqlite3, os, json, arrow
|
|
2
|
+
|
|
3
|
+
'''
|
|
4
|
+
kv = KeyValueStore('test.db') # uses SQLite
|
|
5
|
+
|
|
6
|
+
print(len(kv)) # 0 item
|
|
7
|
+
kv['hello1'] = 'you1'
|
|
8
|
+
|
|
9
|
+
del kv['hello1']
|
|
10
|
+
print(len(kv)) # 2 items remaining
|
|
11
|
+
print('hello1' in kv) # False, it has just been deleted!
|
|
12
|
+
|
|
13
|
+
kv['hello3'] = 'newvalue' # redefine an already present key/value
|
|
14
|
+
|
|
15
|
+
print(kv.keys()) # ['hello2', 'hello3']
|
|
16
|
+
print(kv.values()) # ['you2', 'newvalue']
|
|
17
|
+
print(kv.items()) # [('hello2', 'you2'), ('hello3', 'newvalue')]
|
|
18
|
+
|
|
19
|
+
for k in kv:
|
|
20
|
+
print(k, kv[k])
|
|
21
|
+
|
|
22
|
+
kv.close()
|
|
23
|
+
'''
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class KvStore(dict):
|
|
27
|
+
def __init__(self, filename=None):
|
|
28
|
+
self.db_path = self.default_db_path(filename)
|
|
29
|
+
self.conn = sqlite3.connect(self.db_path, isolation_level=None)
|
|
30
|
+
self.conn.execute('pragma journal_mode=wal')
|
|
31
|
+
self.conn.execute("CREATE TABLE IF NOT EXISTS kv (key text unique, value text)")
|
|
32
|
+
|
|
33
|
+
def default_db_path(self, db_path):
|
|
34
|
+
if db_path is None:
|
|
35
|
+
db_path = "D://wq/temp/decor.db" if os.name == 'nt' else "/data/temp/decor.db"
|
|
36
|
+
os.makedirs(os.path.dirname(db_path), exist_ok=True)
|
|
37
|
+
return db_path
|
|
38
|
+
|
|
39
|
+
def close(self):
|
|
40
|
+
self.conn.close()
|
|
41
|
+
|
|
42
|
+
def commit(self):
|
|
43
|
+
self.conn.commit()
|
|
44
|
+
|
|
45
|
+
def __len__(self):
|
|
46
|
+
rows = self.conn.execute('SELECT COUNT(*) FROM kv').fetchone()[0]
|
|
47
|
+
return rows if rows is not None else 0
|
|
48
|
+
|
|
49
|
+
def iterkeys(self):
|
|
50
|
+
c = self.conn.cursor()
|
|
51
|
+
for row in c.execute('SELECT key FROM kv'):
|
|
52
|
+
yield row[0]
|
|
53
|
+
|
|
54
|
+
def itervalues(self):
|
|
55
|
+
c = self.conn.cursor()
|
|
56
|
+
for row in c.execute('SELECT value FROM kv'):
|
|
57
|
+
yield row[0]
|
|
58
|
+
|
|
59
|
+
def iteritems(self):
|
|
60
|
+
c = self.conn.cursor()
|
|
61
|
+
for row in c.execute('SELECT key, value FROM kv'):
|
|
62
|
+
yield row[0], row[1]
|
|
63
|
+
|
|
64
|
+
def keys(self):
|
|
65
|
+
return list(self.iterkeys())
|
|
66
|
+
|
|
67
|
+
def values(self):
|
|
68
|
+
return list(self.itervalues())
|
|
69
|
+
|
|
70
|
+
def items(self):
|
|
71
|
+
return list(self.iteritems())
|
|
72
|
+
|
|
73
|
+
def __contains__(self, key):
|
|
74
|
+
key = str(key)
|
|
75
|
+
return self.conn.execute('SELECT 1 FROM kv WHERE key = ?', (key,)).fetchone() is not None
|
|
76
|
+
|
|
77
|
+
def __getitem__(self, key):
|
|
78
|
+
key = str(key)
|
|
79
|
+
item = self.conn.execute('SELECT value FROM kv WHERE key = ?', (key,)).fetchone()
|
|
80
|
+
if item is None:
|
|
81
|
+
return None
|
|
82
|
+
v = json.loads(item[0])
|
|
83
|
+
return v
|
|
84
|
+
|
|
85
|
+
def __setitem__(self, key, value):
|
|
86
|
+
key = str(key)
|
|
87
|
+
value = json.dumps(value)
|
|
88
|
+
self.conn.execute('REPLACE INTO kv (key, value) VALUES (?,?)', (key, value))
|
|
89
|
+
|
|
90
|
+
def __delitem__(self, key):
|
|
91
|
+
key = str(key)
|
|
92
|
+
if key in self:
|
|
93
|
+
self.conn.execute('DELETE FROM kv WHERE key = ?', (key,))
|
|
94
|
+
|
|
95
|
+
def __iter__(self):
|
|
96
|
+
return self.iterkeys()
|
|
97
|
+
|
|
98
|
+
def read_with_ex(self, key, ex):
|
|
99
|
+
res = self.__getitem__(key)
|
|
100
|
+
if res is None: return None
|
|
101
|
+
if not (isinstance(res, dict) and "time" in res.keys()): return None
|
|
102
|
+
dif = arrow.now() - arrow.get(res['time'])
|
|
103
|
+
if dif.seconds >= ex:
|
|
104
|
+
self.__delitem__(key)
|
|
105
|
+
return None
|
|
106
|
+
return res['v']
|
|
107
|
+
|
|
108
|
+
def write_with_ex(self, k, v):
|
|
109
|
+
self.__setitem__(k, {"v": v, "time": arrow.now().format()})
|