pyfreeflow 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyfreeflow/__init__.py +34 -0
- pyfreeflow/ext/__init__.py +13 -0
- pyfreeflow/ext/buffer_operator.py +159 -0
- pyfreeflow/ext/crypto_operator.py +44 -0
- pyfreeflow/ext/data_transformer.py +217 -0
- pyfreeflow/ext/env_operator.py +23 -0
- pyfreeflow/ext/file_operator.py +207 -0
- pyfreeflow/ext/jwt_operator.py +140 -0
- pyfreeflow/ext/pgsql_executor.py +167 -0
- pyfreeflow/ext/rest_api_requester.py +159 -0
- pyfreeflow/ext/sleep_operator.py +53 -0
- pyfreeflow/ext/types.py +58 -0
- pyfreeflow/pipeline.py +127 -0
- pyfreeflow/registry.py +34 -0
- pyfreeflow/utils.py +67 -0
- pyfreeflow-0.1.0.data/scripts/pyfreeflow-cli.py +86 -0
- pyfreeflow-0.1.0.dist-info/METADATA +143 -0
- pyfreeflow-0.1.0.dist-info/RECORD +21 -0
- pyfreeflow-0.1.0.dist-info/WHEEL +5 -0
- pyfreeflow-0.1.0.dist-info/licenses/LICENSE +661 -0
- pyfreeflow-0.1.0.dist-info/top_level.txt +1 -0
pyfreeflow/__init__.py
ADDED
@@ -0,0 +1,34 @@
|
|
1
|
+
import importlib
|
2
|
+
import logging
|
3
|
+
import pyfreeflow.ext
|
4
|
+
import pyfreeflow.pipeline
|
5
|
+
from sys import version_info
|
6
|
+
|
7
|
+
|
8
|
+
handler = logging.StreamHandler()
|
9
|
+
if version_info.major > 3 or (version_info.major == 3 and
|
10
|
+
version_info.minor > 11):
|
11
|
+
formatter = logging.Formatter('[%(asctime)s] %(name)s - TaskName[%(taskName)s] - %(levelname)s - %(message)s')
|
12
|
+
else:
|
13
|
+
formatter = logging.Formatter('[%(asctime)s] %(name)s - %(levelname)s - %(message)s')
|
14
|
+
|
15
|
+
handler.setFormatter(formatter)
|
16
|
+
|
17
|
+
logger = logging.getLogger(__name__)
|
18
|
+
logger.setLevel(logging.WARNING)
|
19
|
+
logger.propagate = True
|
20
|
+
logger.addHandler(handler)
|
21
|
+
|
22
|
+
|
23
|
+
def load_extension(ext_name):
|
24
|
+
logger.info("Loading extension: %s", ext_name)
|
25
|
+
importlib.import_module(ext_name)
|
26
|
+
logger.info("Loaded extension: %s", ext_name)
|
27
|
+
|
28
|
+
|
29
|
+
def set_loglevel(level):
|
30
|
+
logger.setLevel(level)
|
31
|
+
|
32
|
+
|
33
|
+
def get_logformat():
|
34
|
+
return formatter._fmt
|
@@ -0,0 +1,13 @@
|
|
1
|
+
import importlib
|
2
|
+
|
3
|
+
__EXTENSIONS__ = [
|
4
|
+
"file_operator",
|
5
|
+
"buffer_operator",
|
6
|
+
"rest_api_requester",
|
7
|
+
"data_transformer",
|
8
|
+
"pgsql_executor",
|
9
|
+
]
|
10
|
+
|
11
|
+
for m in __EXTENSIONS__:
|
12
|
+
ext_name = ".".join([__name__, m])
|
13
|
+
importlib.import_module(ext_name)
|
@@ -0,0 +1,159 @@
|
|
1
|
+
from .types import FreeFlowExt
|
2
|
+
import json
|
3
|
+
import yaml
|
4
|
+
import logging
|
5
|
+
from sys import version_info
|
6
|
+
|
7
|
+
__JSON_TYPENAME__ = "JsonBufferOperator"
|
8
|
+
__YAML_TYPENAME__ = "YamlBufferOperator"
|
9
|
+
__TOML_TYPENAME__ = "TomlBufferOperator"
|
10
|
+
|
11
|
+
|
12
|
+
class JsonBufferOperator(FreeFlowExt):
|
13
|
+
__typename__ = __JSON_TYPENAME__
|
14
|
+
__version__ = "1.0"
|
15
|
+
|
16
|
+
def __init__(self, name, max_tasks=4):
|
17
|
+
super().__init__(name, max_tasks=max_tasks)
|
18
|
+
|
19
|
+
self._logger = logging.getLogger(".".join([__name__, self.__typename__,
|
20
|
+
self._name]))
|
21
|
+
|
22
|
+
self._action = {
|
23
|
+
"read": self._read,
|
24
|
+
"write": self._write,
|
25
|
+
}
|
26
|
+
|
27
|
+
async def _read(self, raw):
|
28
|
+
if not isinstance(raw, str):
|
29
|
+
self._logger.error("Invalid input format '{}'".format(type(raw)))
|
30
|
+
return None, 101
|
31
|
+
|
32
|
+
try:
|
33
|
+
j = json.loads(raw)
|
34
|
+
return j, 0
|
35
|
+
except Exception as ex:
|
36
|
+
self._logger.error("Cannot load json data '{}' {}".format(raw, ex))
|
37
|
+
return None, 102
|
38
|
+
|
39
|
+
async def _write(self, raw):
|
40
|
+
if not isinstance(raw, (dict, list, tuple)):
|
41
|
+
self._logger.error("Invalid input format '{}'".format(type(raw)))
|
42
|
+
return None, 101
|
43
|
+
|
44
|
+
try:
|
45
|
+
j = json.dumps(raw)
|
46
|
+
return j, 0
|
47
|
+
except Exception as ex:
|
48
|
+
self._logger.error("Cannot write json data '{}' {}".format(
|
49
|
+
raw, ex))
|
50
|
+
return None, 103
|
51
|
+
|
52
|
+
async def do(self, state, data):
|
53
|
+
op = data.get("op", "read")
|
54
|
+
raw = data.get("data", {} if op == "read" else "")
|
55
|
+
rval = await self._action[op](raw)
|
56
|
+
return state, rval
|
57
|
+
|
58
|
+
|
59
|
+
class YamlBufferOperator(FreeFlowExt):
|
60
|
+
__typename__ = __YAML_TYPENAME__
|
61
|
+
__version__ = "1.0"
|
62
|
+
|
63
|
+
def __init__(self, name, max_tasks=4):
|
64
|
+
super().__init__(name, max_tasks=max_tasks)
|
65
|
+
|
66
|
+
self._logger = logging.getLogger(".".join([__name__, self.__typename__,
|
67
|
+
self._name]))
|
68
|
+
|
69
|
+
self._action = {
|
70
|
+
"read": self._read,
|
71
|
+
"write": self._write,
|
72
|
+
}
|
73
|
+
|
74
|
+
async def _read(self, raw):
|
75
|
+
if not isinstance(raw, str):
|
76
|
+
self._logger.error("Invalid input format '{}'".format(type(raw)))
|
77
|
+
return None, 101
|
78
|
+
|
79
|
+
try:
|
80
|
+
j = yaml.safe_load(raw)
|
81
|
+
return j, 0
|
82
|
+
except Exception as ex:
|
83
|
+
self._logger.error("Cannot write yaml data '{}' {}".format(
|
84
|
+
raw, ex))
|
85
|
+
return None, 102
|
86
|
+
|
87
|
+
async def _write(self, raw):
|
88
|
+
if not isinstance(raw, (dict, list, tuple)):
|
89
|
+
self._logger.error("Invalid input format '{}'".format(type(raw)))
|
90
|
+
return None, 101
|
91
|
+
|
92
|
+
try:
|
93
|
+
j = yaml.safe_dump(raw)
|
94
|
+
return j, 0
|
95
|
+
except Exception as ex:
|
96
|
+
self._logger.error("Cannot write yaml data '{}' {}".format(
|
97
|
+
raw, ex))
|
98
|
+
return None, 103
|
99
|
+
|
100
|
+
async def do(self, state, data):
|
101
|
+
op = data.get("op", "read")
|
102
|
+
raw = data.get("data", {} if op == "read" else "")
|
103
|
+
rval = await self._action[op](raw)
|
104
|
+
return state, rval
|
105
|
+
|
106
|
+
|
107
|
+
if version_info.major > 3 or (version_info.major == 3 and
|
108
|
+
version_info.minor > 10):
|
109
|
+
import tomllib
|
110
|
+
import tomli_w
|
111
|
+
|
112
|
+
class TomlBufferOperator(FreeFlowExt):
|
113
|
+
__typename__ = __TOML_TYPENAME__
|
114
|
+
__version__ = "1.0"
|
115
|
+
|
116
|
+
def __init__(self, name, max_tasks=4):
|
117
|
+
super().__init__(name, max_tasks=max_tasks)
|
118
|
+
|
119
|
+
self._logger = logging.getLogger(".".join(
|
120
|
+
[__name__, self.__typename__, self._name]))
|
121
|
+
|
122
|
+
self._action = {
|
123
|
+
"read": self._read,
|
124
|
+
"write": self._write,
|
125
|
+
}
|
126
|
+
|
127
|
+
async def _read(self, raw):
|
128
|
+
if not isinstance(raw, str):
|
129
|
+
self._logger.error("Invalid input format '{}'".format(
|
130
|
+
type(raw)))
|
131
|
+
return None, 101
|
132
|
+
|
133
|
+
try:
|
134
|
+
j = tomllib.loads(raw)
|
135
|
+
return j, 0
|
136
|
+
except Exception as ex:
|
137
|
+
self._logger.error("Invalid input format '{}' {}".format(
|
138
|
+
type(raw), ex))
|
139
|
+
return None, 102
|
140
|
+
|
141
|
+
async def _write(self, raw):
|
142
|
+
if not isinstance(raw, (dict, list, tuple)):
|
143
|
+
self._logger.error("Invalid input format '{}'".format(
|
144
|
+
type(raw)))
|
145
|
+
return None, 101
|
146
|
+
|
147
|
+
try:
|
148
|
+
j = tomli_w.dumps(raw)
|
149
|
+
return j, 0
|
150
|
+
except Exception as ex:
|
151
|
+
self._logger.error("Invalid input format '{}' {}".format(
|
152
|
+
type(raw), ex))
|
153
|
+
return None, 102
|
154
|
+
|
155
|
+
async def do(self, state, data):
|
156
|
+
op = data.get("op", "read")
|
157
|
+
raw = data.get("data", {} if op == "read" else "")
|
158
|
+
rval = await self._action[op](raw)
|
159
|
+
return state, rval
|
@@ -0,0 +1,44 @@
|
|
1
|
+
from .types import FreeFlowExt
|
2
|
+
from cryptography.fernet import Fernet
|
3
|
+
import aiofiles
|
4
|
+
import logging
|
5
|
+
|
6
|
+
__TYPENAME__ = "{}CryptoOperator"
|
7
|
+
|
8
|
+
|
9
|
+
class FernetCryptoOperator(FreeFlowExt):
|
10
|
+
__typename__ = __TYPENAME__.format("Fernet")
|
11
|
+
__version__ = "1.0"
|
12
|
+
|
13
|
+
def __init__(self, name, max_tasks=4):
|
14
|
+
super().__init__(name, max_tasks=max_tasks)
|
15
|
+
|
16
|
+
self._logger = logging.getLogger(".".join([__name__, self.__typename__,
|
17
|
+
self._name]))
|
18
|
+
self._action = {
|
19
|
+
"decrypt": self._dec,
|
20
|
+
"encrypt": self._enc,
|
21
|
+
}
|
22
|
+
|
23
|
+
async def _read_key(self, path):
|
24
|
+
async with aiofiles.open(path, "rb") as f:
|
25
|
+
key = await f.read()
|
26
|
+
|
27
|
+
return Fernet(key)
|
28
|
+
|
29
|
+
async def _enc(self, data, key):
|
30
|
+
cipher = await self._read_key(key)
|
31
|
+
d = cipher.encrypt(data.encode('utf-8'))
|
32
|
+
return d.decode('utf-8'), 0
|
33
|
+
|
34
|
+
async def _dec(self, data, key):
|
35
|
+
cipher = await self._read_key(key)
|
36
|
+
d = cipher.decrypt(data.encode('utf-8'))
|
37
|
+
return d.decode('utf-8'), 0
|
38
|
+
|
39
|
+
async def do(self, state, data):
|
40
|
+
op = data.get("op", "decrypt")
|
41
|
+
key = data.get("key")
|
42
|
+
raw = data.get("data", "")
|
43
|
+
rval = await self._action[op](raw, key)
|
44
|
+
return state, rval
|
@@ -0,0 +1,217 @@
|
|
1
|
+
from .types import FreeFlowExt
|
2
|
+
import logging
|
3
|
+
import datetime as dt
|
4
|
+
from ..utils import deepupdate, DurationParser
|
5
|
+
|
6
|
+
try:
|
7
|
+
import lupa.luajit21 as lupa
|
8
|
+
except ImportError:
|
9
|
+
try:
|
10
|
+
import lupa.lua54 as lupa
|
11
|
+
except ImportError:
|
12
|
+
try:
|
13
|
+
import lupa.lua53 as lupa
|
14
|
+
except ImportError:
|
15
|
+
import lupa.lua as lupa
|
16
|
+
|
17
|
+
__TYPENAME__ = "DataTransformer"
|
18
|
+
|
19
|
+
|
20
|
+
"""
|
21
|
+
run parameter:
|
22
|
+
{
|
23
|
+
"state": { ... },
|
24
|
+
"data": { ... }
|
25
|
+
}
|
26
|
+
"""
|
27
|
+
|
28
|
+
|
29
|
+
class DataTransformerV1_0(FreeFlowExt):
|
30
|
+
__typename__ = __TYPENAME__
|
31
|
+
__version__ = "1.0"
|
32
|
+
|
33
|
+
def __init__(self, name, transformer="", lua_func=[],
|
34
|
+
force=False, max_tasks=4):
|
35
|
+
super().__init__(name, max_tasks=max_tasks)
|
36
|
+
self._force = force
|
37
|
+
self._env = self._create_safe_lua_env()
|
38
|
+
|
39
|
+
self._env.globals().safe_env["now"] = self._dt_now_ts
|
40
|
+
self._env.globals().safe_env["timedelta"] = self._dt_delta_ts
|
41
|
+
self._env.globals().safe_env["dict"] = dict
|
42
|
+
self._env.globals().safe_env["list"] = list
|
43
|
+
|
44
|
+
for _name, _func in lua_func:
|
45
|
+
self._env.globals().safe_env[_name] = _func
|
46
|
+
|
47
|
+
self._transformer = self._env.globals().eval_safe(
|
48
|
+
"\n".join(["function f(state, data)",
|
49
|
+
transformer,
|
50
|
+
"return state, data",
|
51
|
+
"end"]))
|
52
|
+
|
53
|
+
assert (self._transformer is not None)
|
54
|
+
self._logger = logging.getLogger(".".join([__name__, self.__typename__,
|
55
|
+
self._name]))
|
56
|
+
|
57
|
+
def __str__(self):
|
58
|
+
return "{typ}(name: {n}, version: {v})".format(
|
59
|
+
typ=self.__typename__, n=self._name, v=self.__version__)
|
60
|
+
|
61
|
+
def _create_safe_lua_env(self):
|
62
|
+
lua = lupa.LuaRuntime(unpack_returned_tuples=True)
|
63
|
+
|
64
|
+
lua.execute("""
|
65
|
+
if not table.find then
|
66
|
+
function table.find(t, value, start_index)
|
67
|
+
local si = start_index or 1
|
68
|
+
for i = si, #t do
|
69
|
+
if t[i] == value then
|
70
|
+
return i
|
71
|
+
end
|
72
|
+
end
|
73
|
+
return nil
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
if not table.find_value then
|
78
|
+
function table.find_key(t, key)
|
79
|
+
for k, v in pairs(t) do
|
80
|
+
if k == key then
|
81
|
+
return v
|
82
|
+
end
|
83
|
+
end
|
84
|
+
return nil
|
85
|
+
end
|
86
|
+
end
|
87
|
+
|
88
|
+
if not table.find_key then
|
89
|
+
function table.find_key(t, value)
|
90
|
+
for k, v in pairs(t) do
|
91
|
+
if v == value then
|
92
|
+
return k
|
93
|
+
end
|
94
|
+
end
|
95
|
+
return nil
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
if not string.split then
|
100
|
+
function string.split(s, pattern)
|
101
|
+
local t = {}
|
102
|
+
|
103
|
+
-- Se non è specificato un pattern, usa il default per le
|
104
|
+
-- parole
|
105
|
+
if not pattern then
|
106
|
+
for m in string.gmatch(s, "%S+") do
|
107
|
+
table.insert(t, m)
|
108
|
+
end
|
109
|
+
return t
|
110
|
+
end
|
111
|
+
|
112
|
+
-- Se il pattern è un singolo carattere (non regex),
|
113
|
+
-- usa la logica del separatore
|
114
|
+
if #pattern == 1 and pattern:match("^[%w%p%s]$") then
|
115
|
+
for m in string.gmatch(s, "([^" .. pattern:gsub("[%(%)%.%+%-%*%?%[%]%^%$%%]", "%%%1") .. "]+)") do
|
116
|
+
if m ~= "" then -- evita stringhe vuote
|
117
|
+
table.insert(t, m)
|
118
|
+
end
|
119
|
+
end
|
120
|
+
else
|
121
|
+
-- Per pattern regex complessi, usa direttamente il pattern
|
122
|
+
for m in string.gmatch(s, pattern) do
|
123
|
+
table.insert(t, m)
|
124
|
+
end
|
125
|
+
end
|
126
|
+
|
127
|
+
return t
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
safe_env = {
|
132
|
+
assert = assert,
|
133
|
+
pairs = pairs,
|
134
|
+
ipairs = ipairs,
|
135
|
+
next = next,
|
136
|
+
type = type,
|
137
|
+
tostring = tostring,
|
138
|
+
tonumber = tonumber,
|
139
|
+
string = string,
|
140
|
+
math = math,
|
141
|
+
table = table,
|
142
|
+
print = print,
|
143
|
+
lookup_object_by_key = lookup_object_by_key,
|
144
|
+
NIL = setmetatable({},{__tostring=function() return "nil" end}),
|
145
|
+
}
|
146
|
+
|
147
|
+
function eval_safe(code)
|
148
|
+
local f, e = load(code, "config", "t", safe_env)
|
149
|
+
if not f then
|
150
|
+
print(e)
|
151
|
+
return nil
|
152
|
+
end
|
153
|
+
f()
|
154
|
+
return safe_env.f
|
155
|
+
end
|
156
|
+
""")
|
157
|
+
|
158
|
+
return lua
|
159
|
+
|
160
|
+
def _dt_now_ts(self):
|
161
|
+
return int(dt.datetime.now(dt.UTC).timestamp())
|
162
|
+
|
163
|
+
def _dt_delta_ts(self, duration):
|
164
|
+
return DurationParser.parse(duration)
|
165
|
+
|
166
|
+
def _lua_nil_to_none(self, x):
|
167
|
+
return str(x) == "nil"
|
168
|
+
|
169
|
+
def _lua_to_py(self, a):
|
170
|
+
if lupa.lua_type(a) == "table":
|
171
|
+
if self._lua_nil_to_none(a):
|
172
|
+
return None
|
173
|
+
|
174
|
+
if len(a) == 0:
|
175
|
+
return {k: self._lua_to_py(v) for k, v in a.items()}
|
176
|
+
elif all([isinstance(x, int) for x in a.keys()]):
|
177
|
+
return [self._lua_to_py(v) for v in a.values()]
|
178
|
+
else:
|
179
|
+
return {}
|
180
|
+
else:
|
181
|
+
return a
|
182
|
+
|
183
|
+
def _py_to_lua(self, a):
|
184
|
+
if isinstance(a, dict):
|
185
|
+
return self._env.table_from({k: self._py_to_lua(v) for k, v in a.items()})
|
186
|
+
elif isinstance(a, (list, tuple)):
|
187
|
+
return self._env.table_from([self._py_to_lua(v) for v in a])
|
188
|
+
else:
|
189
|
+
return a
|
190
|
+
|
191
|
+
async def run(self, state, data=({}, 0)):
|
192
|
+
if isinstance(data, list):
|
193
|
+
_data = [x[0] for x in data if x[1] == 0]
|
194
|
+
err = len(_data) == 0
|
195
|
+
else:
|
196
|
+
_data = data[0]
|
197
|
+
err = data[1] != 0
|
198
|
+
|
199
|
+
if err:
|
200
|
+
return state, (None, 103)
|
201
|
+
|
202
|
+
try:
|
203
|
+
s, d = self._transformer(self._py_to_lua(state),
|
204
|
+
self._py_to_lua(_data))
|
205
|
+
|
206
|
+
s = self._lua_to_py(s)
|
207
|
+
d = self._lua_to_py(d)
|
208
|
+
|
209
|
+
deepupdate(state, s)
|
210
|
+
if not self._force:
|
211
|
+
return state, (d, 0)
|
212
|
+
else:
|
213
|
+
return state, d
|
214
|
+
|
215
|
+
except Exception as ex:
|
216
|
+
self._logger.error(ex)
|
217
|
+
return state, (None, 101)
|
@@ -0,0 +1,23 @@
|
|
1
|
+
from .types import FreeFlowExt
|
2
|
+
import os
|
3
|
+
import logging
|
4
|
+
|
5
|
+
__TYPENAME__ = "EnvOperator"
|
6
|
+
|
7
|
+
|
8
|
+
class EnvOperator(FreeFlowExt):
|
9
|
+
__typename__ = __TYPENAME__
|
10
|
+
__version__ = "1.0"
|
11
|
+
|
12
|
+
def __init__(self, name, vars=[], max_tasks=4):
|
13
|
+
super().__init__(name, max_tasks=max_tasks)
|
14
|
+
|
15
|
+
self._logger = logging.getLogger(".".join([__name__, self.__typename__,
|
16
|
+
self._name]))
|
17
|
+
self._vars = vars
|
18
|
+
|
19
|
+
async def do(self, state, data):
|
20
|
+
rval = {}
|
21
|
+
for v in self._vars:
|
22
|
+
rval[v] = os.getenv(v)
|
23
|
+
return state, (rval, 0)
|
@@ -0,0 +1,207 @@
|
|
1
|
+
from .types import FreeFlowExt
|
2
|
+
import aiofiles
|
3
|
+
import json
|
4
|
+
import yaml
|
5
|
+
import logging
|
6
|
+
from sys import version_info
|
7
|
+
|
8
|
+
__ANY_TYPENAME__ = "AnyFileOperator"
|
9
|
+
__JSON_TYPENAME__ = "JsonFileOperator"
|
10
|
+
__YAML_TYPENAME__ = "YamlFileOperator"
|
11
|
+
__TOML_TYPENAME__ = "TomlFileOperator"
|
12
|
+
|
13
|
+
|
14
|
+
class AnyFileOperator(FreeFlowExt):
|
15
|
+
__typename__ = __ANY_TYPENAME__
|
16
|
+
__version__ = "1.0"
|
17
|
+
|
18
|
+
def __init__(self, name, max_tasks=4, binary=False):
|
19
|
+
super().__init__(name, max_tasks=max_tasks)
|
20
|
+
|
21
|
+
self._mode = "b" if binary else ""
|
22
|
+
|
23
|
+
self._logger = logging.getLogger(".".join([__name__, self.__typename__,
|
24
|
+
self._name]))
|
25
|
+
|
26
|
+
self._action = {
|
27
|
+
"read": self._read,
|
28
|
+
"write": self._write,
|
29
|
+
}
|
30
|
+
|
31
|
+
async def _read(self, path, raw):
|
32
|
+
try:
|
33
|
+
async with aiofiles.open(path, "r" + self._mode) as f:
|
34
|
+
contents = await f.read()
|
35
|
+
return contents, 0
|
36
|
+
except Exception as ex:
|
37
|
+
self._logger.error("Cannot load file '{}' {}".format(path, ex))
|
38
|
+
return None, 102
|
39
|
+
|
40
|
+
async def _write(self, path, raw):
|
41
|
+
if self._mode == "b" and isinstance(raw, str):
|
42
|
+
raw_ = raw.encode()
|
43
|
+
else:
|
44
|
+
raw_ = raw
|
45
|
+
try:
|
46
|
+
async with aiofiles.open(path, "w" + self._mode) as f:
|
47
|
+
await f.write(raw_)
|
48
|
+
return raw, 0
|
49
|
+
except Exception as ex:
|
50
|
+
self._logger.error("Cannot write json file '{}' {}".format(
|
51
|
+
path, ex))
|
52
|
+
return raw, 103
|
53
|
+
|
54
|
+
async def do(self, state, data):
|
55
|
+
op = data.get("op", "read")
|
56
|
+
raw = data.get("data", {})
|
57
|
+
path = data.get("path")
|
58
|
+
rval = await self._action[op](path, raw)
|
59
|
+
return state, rval
|
60
|
+
|
61
|
+
|
62
|
+
class JsonFileOperator(FreeFlowExt):
|
63
|
+
__typename__ = __JSON_TYPENAME__
|
64
|
+
__version__ = "1.0"
|
65
|
+
|
66
|
+
def __init__(self, name, max_tasks=4):
|
67
|
+
super().__init__(name, max_tasks=max_tasks)
|
68
|
+
|
69
|
+
self._logger = logging.getLogger(".".join([__name__, self.__typename__,
|
70
|
+
self._name]))
|
71
|
+
|
72
|
+
self._action = {
|
73
|
+
"read": self._read,
|
74
|
+
"write": self._write,
|
75
|
+
}
|
76
|
+
|
77
|
+
async def _read(self, path, raw):
|
78
|
+
try:
|
79
|
+
async with aiofiles.open(path, "r") as f:
|
80
|
+
contents = await f.read()
|
81
|
+
j = json.loads(contents)
|
82
|
+
return j, 0
|
83
|
+
except Exception as ex:
|
84
|
+
self._logger.error("Cannot load json file '{}' {}".format(
|
85
|
+
path, ex))
|
86
|
+
return None, 102
|
87
|
+
|
88
|
+
async def _write(self, path, raw):
|
89
|
+
if not isinstance(raw, (dict, list, tuple)):
|
90
|
+
self._logger.error("Invalid input format '{}'".format(type(raw)))
|
91
|
+
return raw, 101
|
92
|
+
|
93
|
+
try:
|
94
|
+
async with aiofiles.open(path, "w") as f:
|
95
|
+
await f.write(json.dumps(raw))
|
96
|
+
return raw, 0
|
97
|
+
except Exception as ex:
|
98
|
+
self._logger.error("Cannot write json file '{}' {}".format(
|
99
|
+
path, ex))
|
100
|
+
return raw, 103
|
101
|
+
|
102
|
+
async def do(self, state, data):
|
103
|
+
op = data.get("op", "read")
|
104
|
+
raw = data.get("data", {})
|
105
|
+
path = data.get("path")
|
106
|
+
rval = await self._action[op](path, raw)
|
107
|
+
return state, rval
|
108
|
+
|
109
|
+
|
110
|
+
class YamlFileOperator(FreeFlowExt):
|
111
|
+
__typename__ = __YAML_TYPENAME__
|
112
|
+
__version__ = "1.0"
|
113
|
+
|
114
|
+
def __init__(self, name, max_tasks=4):
|
115
|
+
super().__init__(name, max_tasks=max_tasks)
|
116
|
+
|
117
|
+
self._logger = logging.getLogger(".".join([__name__, self.__typename__,
|
118
|
+
self._name]))
|
119
|
+
self._action = {
|
120
|
+
"read": self._read,
|
121
|
+
"write": self._write,
|
122
|
+
}
|
123
|
+
|
124
|
+
async def _read(self, path, raw):
|
125
|
+
try:
|
126
|
+
async with aiofiles.open(path, "r") as f:
|
127
|
+
contents = await f.read()
|
128
|
+
j = yaml.safe_load(contents)
|
129
|
+
return j, 0
|
130
|
+
except Exception as ex:
|
131
|
+
self._logger.error("Cannot load yaml file '{}' {}".format(
|
132
|
+
path, ex))
|
133
|
+
return None, 102
|
134
|
+
|
135
|
+
async def _write(self, path, raw):
|
136
|
+
if not isinstance(raw, (dict, list, tuple)):
|
137
|
+
self._logger.error("Invalid input format '{}'".format(type(raw)))
|
138
|
+
return raw, 101
|
139
|
+
|
140
|
+
try:
|
141
|
+
async with aiofiles.open(path, "w") as f:
|
142
|
+
await f.write(yaml.safe_dump(raw))
|
143
|
+
return raw, 0
|
144
|
+
except Exception as ex:
|
145
|
+
self._logger.error("Cannot write yaml file '{}' {}".format(
|
146
|
+
path, ex))
|
147
|
+
return raw, 103
|
148
|
+
|
149
|
+
async def do(self, state, data):
|
150
|
+
op = data.get("op", "read")
|
151
|
+
raw = data.get("data", {})
|
152
|
+
path = data.get("path")
|
153
|
+
rval = await self._action[op](path, raw)
|
154
|
+
return state, rval
|
155
|
+
|
156
|
+
|
157
|
+
if version_info.major > 3 or (version_info.major == 3 and
|
158
|
+
version_info.minor > 10):
|
159
|
+
import tomllib
|
160
|
+
import tomli_w
|
161
|
+
|
162
|
+
class TomlFileOperator(FreeFlowExt):
|
163
|
+
__typename__ = __TOML_TYPENAME__
|
164
|
+
__version__ = "1.0"
|
165
|
+
|
166
|
+
def __init__(self, name, max_tasks=4):
|
167
|
+
super().__init__(name, max_tasks=max_tasks)
|
168
|
+
|
169
|
+
self._logger = logging.getLogger(
|
170
|
+
".".join([__name__, self.__typename__, self._name]))
|
171
|
+
self._action = {
|
172
|
+
"read": self._read,
|
173
|
+
"write": self._write,
|
174
|
+
}
|
175
|
+
|
176
|
+
async def _read(self, path, raw):
|
177
|
+
try:
|
178
|
+
async with aiofiles.open(path, "r") as f:
|
179
|
+
contents = await f.read()
|
180
|
+
j = tomllib.loads(contents)
|
181
|
+
return j, 0
|
182
|
+
except Exception as ex:
|
183
|
+
self._logger.error("Cannot read toml file '{}' {}".format(
|
184
|
+
path, ex))
|
185
|
+
return None, 102
|
186
|
+
|
187
|
+
async def _write(self, path, raw):
|
188
|
+
if not isinstance(raw, (dict, list, tuple)):
|
189
|
+
self._logger.error("Invalid input format '{}'".format(
|
190
|
+
type(raw)))
|
191
|
+
return raw, 101
|
192
|
+
|
193
|
+
try:
|
194
|
+
async with aiofiles.open(path, "w") as f:
|
195
|
+
await f.write(tomli_w.dump(raw))
|
196
|
+
return raw, 0
|
197
|
+
except Exception as ex:
|
198
|
+
self._logger.error("Cannot write toml file '{}' {}".format(
|
199
|
+
path, ex))
|
200
|
+
return raw, 103
|
201
|
+
|
202
|
+
async def do(self, state, data):
|
203
|
+
op = data.get("op", "read")
|
204
|
+
raw = data.get("data", {})
|
205
|
+
path = data.get("path")
|
206
|
+
rval = await self._action[op](path, raw)
|
207
|
+
return state, rval
|