sqless 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sqless-0.1.0/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 pro1515151515
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
sqless-0.1.0/PKG-INFO ADDED
@@ -0,0 +1,167 @@
1
+ Metadata-Version: 2.4
2
+ Name: sqless
3
+ Version: 0.1.0
4
+ Summary: An async HTTP server for SQLite, FileStorage and WebPage.
5
+ Home-page: https://github.com/pro1515151515/sqless
6
+ Author: pro1515151515
7
+ Author-email: pro1515151515@qq.com
8
+ Classifier: Development Status :: 3 - Alpha
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Operating System :: OS Independent
12
+ Classifier: Programming Language :: Python :: 3
13
+ Requires-Python: >=3.7
14
+ Description-Content-Type: text/markdown
15
+ License-File: LICENSE
16
+ Requires-Dist: aiosqlite>=0.17.0
17
+ Requires-Dist: aiohttp>=3.8.0
18
+ Requires-Dist: orjson>=3.6.0
19
+ Requires-Dist: aiofiles>=0.8.0
20
+ Dynamic: author
21
+ Dynamic: author-email
22
+ Dynamic: classifier
23
+ Dynamic: description
24
+ Dynamic: description-content-type
25
+ Dynamic: home-page
26
+ Dynamic: license-file
27
+ Dynamic: requires-dist
28
+ Dynamic: requires-python
29
+ Dynamic: summary
30
+
31
+ # sqless
32
+
33
+ An async HTTP server for SQLite, FileStorage and WebPage.
34
+
35
+ ## Description
36
+
37
+ sqless is a Python application that provides web service with local database and local file storage.
38
+
39
+ ## Installation
40
+
41
+ ```bash
42
+ pip install sqless
43
+ ```
44
+
45
+ ## Quick Start
46
+
47
+ ### Running the server
48
+
49
+ ```bash
50
+ sqless --host 127.0.0.1 --port 12239 --secret your-secret-key
51
+ ```
52
+
53
+ This will create `www` directory in the current directory, which is used for WebPage.
54
+ You can access the `www/index.html` at `http://127.0.0.1:12239/index.html`
55
+
56
+ It will also creates `db` and `fs` directories in the current directory, when saving data by database API and file storage API.
57
+
58
+ ### Using the database API
59
+
60
+ ```python
61
+ import requests
62
+
63
+ # Set up the base URL and authentication
64
+ BASE_URL = "http://127.0.0.1:12239"
65
+ SECRET = "your-secret-key"
66
+ DB_TABLE = "users"
67
+
68
+ # Insert or update data
69
+ r = requests.post(
70
+ f"{BASE_URL}/db/{DB_TABLE}",
71
+ headers={"Authorization": f"Bearer {SECRET}"},
72
+ json={"key": "U001", "name": "Tom", "age": 14}
73
+ )
74
+
75
+ # Query data
76
+ r = requests.get(
77
+ f"{BASE_URL}/db/{DB_TABLE}/key = U001",
78
+ headers={"Authorization": f"Bearer {SECRET}"}
79
+ )
80
+
81
+ # Fuzzy query
82
+ r = requests.get(
83
+ f"{BASE_URL}/db/{DB_TABLE}/name like %om%?limit=10&page=1",
84
+ headers={"Authorization": f"Bearer {SECRET}"}
85
+ )
86
+
87
+ # Value query
88
+ r = requests.get(
89
+ f"{BASE_URL}/db/{DB_TABLE}/age > 10?limit=10&page=1",
90
+ headers={"Authorization": f"Bearer {SECRET}"}
91
+ )
92
+
93
+ # Delete data
94
+ r = requests.delete(
95
+ f"{BASE_URL}/db/{DB_TABLE}/key = U001",
96
+ headers={"Authorization": f"Bearer {SECRET}"}
97
+ )
98
+ ```
99
+
100
+ `DB_TABLE = "users"` will read/write records in `users` table in `db/default.sqlite`.
101
+ `DB_TABLE = "mall-users"` will read/write records in `users` table in `db/mall.sqlite`.
102
+ `DB_TABLE = "east-mall-users"` will read/write records in `users` table in `db/east/mall.sqlite`.
103
+
104
+ ### Using the FileStorage API
105
+ ```python
106
+ import requests
107
+
108
+ # Upload a file to ./fs/example.txt
109
+ with open("example.txt", "rb") as f:
110
+ r = requests.post(
111
+ f"{BASE_URL}/fs/example.txt",
112
+ headers={"Authorization": f"Bearer {SECRET}"},
113
+ files={"file": f}
114
+ )
115
+
116
+ # Check if a file exists
117
+ r = requests.get(
118
+ f"{BASE_URL}/fs/example.txt?check=1",
119
+ headers={"Authorization": f"Bearer {SECRET}"}
120
+ )
121
+
122
+ # Download a file
123
+ r = requests.get(
124
+ f"{BASE_URL}/fs/example.txt",
125
+ headers={"Authorization": f"Bearer {SECRET}"},
126
+ stream=True
127
+ )
128
+ with open("downloaded_example.txt", "wb") as f:
129
+ for chunk in r.iter_content(chunk_size=8192):
130
+ f.write(chunk)
131
+ ```
132
+
133
+ ### Using the Proxy API
134
+ ```python
135
+ import requests
136
+ import base64
137
+
138
+ payload = {
139
+ "method": "POST",
140
+ "url": "https://httpbin.org/post",
141
+ "headers": {
142
+ "User-Agent": "SQLESS-Client/1.0",
143
+ "Authorization": "Bearer mytoken"
144
+ },
145
+ "type": "form",
146
+ "data": {"foo": "bar"},
147
+ "files": [
148
+ {
149
+ "field": "file1",
150
+ "filename": "example.txt",
151
+ "content_type": "text/plain",
152
+ "base64": base64.b64encode(open("example.txt", "rb").read()).decode()
153
+ }
154
+ ]
155
+ }
156
+
157
+ r = requests.post(
158
+ f"{BASE_URL}/xmlhttpRequest",
159
+ headers={"Authorization": f"Bearer {SECRET}"},
160
+ json=payload
161
+ )
162
+ print(r.json())
163
+ ```
164
+
165
+ ## License
166
+
167
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
sqless-0.1.0/README.md ADDED
@@ -0,0 +1,137 @@
1
+ # sqless
2
+
3
+ An async HTTP server for SQLite, FileStorage and WebPage.
4
+
5
+ ## Description
6
+
7
+ sqless is a Python application that provides web service with local database and local file storage.
8
+
9
+ ## Installation
10
+
11
+ ```bash
12
+ pip install sqless
13
+ ```
14
+
15
+ ## Quick Start
16
+
17
+ ### Running the server
18
+
19
+ ```bash
20
+ sqless --host 127.0.0.1 --port 12239 --secret your-secret-key
21
+ ```
22
+
23
+ This will create `www` directory in the current directory, which is used for WebPage.
24
+ You can access the `www/index.html` at `http://127.0.0.1:12239/index.html`
25
+
26
+ It will also creates `db` and `fs` directories in the current directory, when saving data by database API and file storage API.
27
+
28
+ ### Using the database API
29
+
30
+ ```python
31
+ import requests
32
+
33
+ # Set up the base URL and authentication
34
+ BASE_URL = "http://127.0.0.1:12239"
35
+ SECRET = "your-secret-key"
36
+ DB_TABLE = "users"
37
+
38
+ # Insert or update data
39
+ r = requests.post(
40
+ f"{BASE_URL}/db/{DB_TABLE}",
41
+ headers={"Authorization": f"Bearer {SECRET}"},
42
+ json={"key": "U001", "name": "Tom", "age": 14}
43
+ )
44
+
45
+ # Query data
46
+ r = requests.get(
47
+ f"{BASE_URL}/db/{DB_TABLE}/key = U001",
48
+ headers={"Authorization": f"Bearer {SECRET}"}
49
+ )
50
+
51
+ # Fuzzy query
52
+ r = requests.get(
53
+ f"{BASE_URL}/db/{DB_TABLE}/name like %om%?limit=10&page=1",
54
+ headers={"Authorization": f"Bearer {SECRET}"}
55
+ )
56
+
57
+ # Value query
58
+ r = requests.get(
59
+ f"{BASE_URL}/db/{DB_TABLE}/age > 10?limit=10&page=1",
60
+ headers={"Authorization": f"Bearer {SECRET}"}
61
+ )
62
+
63
+ # Delete data
64
+ r = requests.delete(
65
+ f"{BASE_URL}/db/{DB_TABLE}/key = U001",
66
+ headers={"Authorization": f"Bearer {SECRET}"}
67
+ )
68
+ ```
69
+
70
+ `DB_TABLE = "users"` will read/write records in `users` table in `db/default.sqlite`.
71
+ `DB_TABLE = "mall-users"` will read/write records in `users` table in `db/mall.sqlite`.
72
+ `DB_TABLE = "east-mall-users"` will read/write records in `users` table in `db/east/mall.sqlite`.
73
+
74
+ ### Using the FileStorage API
75
+ ```python
76
+ import requests
77
+
78
+ # Upload a file to ./fs/example.txt
79
+ with open("example.txt", "rb") as f:
80
+ r = requests.post(
81
+ f"{BASE_URL}/fs/example.txt",
82
+ headers={"Authorization": f"Bearer {SECRET}"},
83
+ files={"file": f}
84
+ )
85
+
86
+ # Check if a file exists
87
+ r = requests.get(
88
+ f"{BASE_URL}/fs/example.txt?check=1",
89
+ headers={"Authorization": f"Bearer {SECRET}"}
90
+ )
91
+
92
+ # Download a file
93
+ r = requests.get(
94
+ f"{BASE_URL}/fs/example.txt",
95
+ headers={"Authorization": f"Bearer {SECRET}"},
96
+ stream=True
97
+ )
98
+ with open("downloaded_example.txt", "wb") as f:
99
+ for chunk in r.iter_content(chunk_size=8192):
100
+ f.write(chunk)
101
+ ```
102
+
103
+ ### Using the Proxy API
104
+ ```python
105
+ import requests
106
+ import base64
107
+
108
+ payload = {
109
+ "method": "POST",
110
+ "url": "https://httpbin.org/post",
111
+ "headers": {
112
+ "User-Agent": "SQLESS-Client/1.0",
113
+ "Authorization": "Bearer mytoken"
114
+ },
115
+ "type": "form",
116
+ "data": {"foo": "bar"},
117
+ "files": [
118
+ {
119
+ "field": "file1",
120
+ "filename": "example.txt",
121
+ "content_type": "text/plain",
122
+ "base64": base64.b64encode(open("example.txt", "rb").read()).decode()
123
+ }
124
+ ]
125
+ }
126
+
127
+ r = requests.post(
128
+ f"{BASE_URL}/xmlhttpRequest",
129
+ headers={"Authorization": f"Bearer {SECRET}"},
130
+ json=payload
131
+ )
132
+ print(r.json())
133
+ ```
134
+
135
+ ## License
136
+
137
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
sqless-0.1.0/setup.cfg ADDED
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
sqless-0.1.0/setup.py ADDED
@@ -0,0 +1,35 @@
1
+ from setuptools import setup, find_packages
2
+
3
+ with open("README.md", "r", encoding="utf-8") as fh:
4
+ long_description = fh.read()
5
+
6
+ setup(
7
+ name="sqless",
8
+ version="0.1.0",
9
+ author="pro1515151515",
10
+ author_email="pro1515151515@qq.com",
11
+ description="An async HTTP server for SQLite, FileStorage and WebPage.",
12
+ long_description=long_description,
13
+ long_description_content_type="text/markdown",
14
+ url="https://github.com/pro1515151515/sqless",
15
+ packages=find_packages(),
16
+ classifiers=[
17
+ "Development Status :: 3 - Alpha",
18
+ "Intended Audience :: Developers",
19
+ "License :: OSI Approved :: MIT License",
20
+ "Operating System :: OS Independent",
21
+ "Programming Language :: Python :: 3",
22
+ ],
23
+ python_requires=">=3.7",
24
+ install_requires=[
25
+ "aiosqlite>=0.17.0",
26
+ "aiohttp>=3.8.0",
27
+ "orjson>=3.6.0",
28
+ "aiofiles>=0.8.0",
29
+ ],
30
+ entry_points={
31
+ "console_scripts": [
32
+ "sqless=sqless.server:main",
33
+ ],
34
+ },
35
+ )
@@ -0,0 +1,14 @@
1
+ """
2
+ sqless - An async HTTP server for SQLite, FileStorage and WebPage.
3
+ """
4
+
5
+ __version__ = "0.1.0"
6
+ __author__ = "pro1515151515"
7
+ __email__ = "pro1515151515@qq.com"
8
+
9
+ from .database import DB
10
+ from .server import run_server
11
+
12
+ def hello():
13
+ """A simple function to test the package."""
14
+ return "Hello from sqless!"
@@ -0,0 +1,382 @@
1
+ import os
2
+ import re
3
+ import asyncio
4
+ import aiosqlite
5
+ import orjson
6
+ import pickle
7
+ from collections import defaultdict
8
+
9
+ type_map = {
10
+ str:'TEXT',
11
+ int:'INTEGER',
12
+ float:'REAL'
13
+ }
14
+ value_map = {
15
+ str:None,
16
+ int:None,
17
+ float:None
18
+ }
19
+ if 'np' in globals():
20
+ try:
21
+ _np = globals()['np']
22
+ type_map[_np.integer] = 'INTEGER'
23
+ type_map[_np.int8] = 'INTEGER'
24
+ type_map[_np.int16] = 'INTEGER'
25
+ type_map[_np.int32] = 'INTEGER'
26
+ type_map[_np.int64] = 'INTEGER'
27
+ type_map[_np.uint8] = 'INTEGER'
28
+ type_map[_np.uint16] = 'INTEGER'
29
+ type_map[_np.uint32] = 'INTEGER'
30
+ type_map[_np.uint64] = 'INTEGER'
31
+ type_map[_np.floating] = 'REAL'
32
+ type_map[_np.float16] = 'REAL'
33
+ type_map[_np.float32] = 'REAL'
34
+ type_map[_np.float64] = 'REAL'
35
+ value_map[_np.integer] = int
36
+ value_map[_np.int8] = int
37
+ value_map[_np.int16] = int
38
+ value_map[_np.int32] = int
39
+ value_map[_np.int64] = int
40
+ value_map[_np.uint8] = int
41
+ value_map[_np.uint16] = int
42
+ value_map[_np.uint32] = int
43
+ value_map[_np.uint64] = int
44
+ value_map[_np.floating] = float
45
+ value_map[_np.float16] = float
46
+ value_map[_np.float32] = float
47
+ value_map[_np.float64] = float
48
+ except:
49
+ pass
50
+
51
+
52
+ def encode(obj):
53
+ if type(obj) == None:
54
+ return None
55
+ if isinstance(obj, bytes):
56
+ return b'B' + obj
57
+ try:
58
+ return b'J' + orjson.dumps(obj,option=orjson.OPT_SERIALIZE_NUMPY)
59
+ except:
60
+ return b'P' + pickle.dumps(obj)
61
+
62
+ def decode(binary):
63
+ if type(binary) != bytes:
64
+ return binary
65
+ if binary[0] == ord('J'):
66
+ return orjson.loads(binary[1:])
67
+ if binary[0] == ord('P'):
68
+ return pickle.loads(binary[1:])
69
+ return binary[1:]
70
+
71
+ identifier_re = re.compile(r"^[A-Za-z_][A-Za-z0-9_\-]*[A-Za-z0-9]$")
72
+
73
+ def valid_identifier(name: str) -> bool:
74
+ return bool(name and identifier_re.fullmatch(name))
75
+ def sanitize_table_name(name: str):
76
+ if not valid_identifier(name):
77
+ raise ValueError(f"Invalid identifier: {name}")
78
+ return name
79
+
80
+ def split(s, sep=',', L="{[(\"'", R="}])\"'"):
81
+ stack = []
82
+ temp = ''
83
+ esc = False
84
+ for c in s:
85
+ if c == '\\':
86
+ esc = True
87
+ temp += c
88
+ continue
89
+ if not esc and c in R and stack:
90
+ if c == R[L.index(stack[-1])]:
91
+ stack.pop()
92
+ elif not esc and c in L:
93
+ stack.append(c)
94
+ elif c == sep and not stack:
95
+ if temp:
96
+ yield temp
97
+ temp = ''
98
+ continue
99
+ temp += c
100
+ esc = False
101
+ if temp:
102
+ yield temp
103
+
104
+ def parse_where(where_str):
105
+ """Parse limited where expressions into (sql, params).
106
+ This function is intentionally conservative: reject suspicious chars
107
+ and require simple pattern: col op value [and|or ...]
108
+ """
109
+ if not where_str:
110
+ return '', []
111
+ s = where_str.strip()
112
+ # reject common SQL injection characters / comments
113
+ if ';' in s or '--' in s or '/*' in s or '*/' in s:
114
+ return {'suc': False, 'msg': 'Invalid where clause: contains forbidden characters'}, []
115
+ # basic tokenization by spaces while respecting quotes
116
+ try:
117
+ tokens = list(split(s, ' ', "\"'"))
118
+ # expect pattern: col op value [and|or col op value ...]
119
+ if len(tokens) < 3 or (len(tokens) - 1) % 4 not in (0,):
120
+ # attempt fallback: allow "order by" presence handled below
121
+ pass
122
+ # detect 'order by' part
123
+ lower = s.lower()
124
+ if ' order by ' in lower:
125
+ where_part, order_part = s.rsplit('order by', 1)
126
+ else:
127
+ where_part, order_part = s, ''
128
+ # simple parser: split by and/or
129
+ # we will only allow operators in this set:
130
+ allowed_ops = {'=', '==', '!=', '<', '>', '<=', '>=', 'like', 'ilike', 'is'}
131
+ parts = re.split(r'\s+(and|or)\s+', where_part, flags=re.IGNORECASE)
132
+ sql_parts = []
133
+ params = []
134
+ # parts example: [cond1, connector, cond2, ...]
135
+ i = 0
136
+ while i < len(parts):
137
+ cond = parts[i].strip()
138
+ connector = ''
139
+ if i + 1 < len(parts):
140
+ connector = parts[i + 1]
141
+ # parse cond -> col op val
142
+ m = re.match(r'^\s*([A-Za-z_][A-Za-z0-9_]*)\s*(=|==|!=|<=|>=|<|>|like|ilike|is)\s*(.+)$', cond, flags=re.I)
143
+ if not m:
144
+ return {'suc': False, 'msg': f"Invalid condition: {cond}"}, []
145
+ col, op, val = m.group(1), m.group(2).lower(), m.group(3).strip()
146
+ if op not in allowed_ops:
147
+ return {'suc': False, 'msg': f"Operator not allowed: {op}"}, []
148
+ if not valid_identifier(col):
149
+ return {'suc': False, 'msg': f"Invalid column name: {col}"}, []
150
+ # handle null
151
+ if val.lower() == 'null' and op == 'is':
152
+ sql_parts.append(f"{col} is null")
153
+ else:
154
+ # strip quotes if present
155
+ if (val.startswith("'") and val.endswith("'")) or (val.startswith('"') and val.endswith('"')):
156
+ raw = val[1:-1]
157
+ else:
158
+ raw = val
159
+ sql_parts.append(f"{col} {op} ?")
160
+ params.append(raw)
161
+ if connector:
162
+ sql_parts.append(connector.lower())
163
+ i += 2
164
+ sql = "where " + " ".join(sql_parts)
165
+ # order by parsing (very minimal)
166
+ order_clause = ''
167
+ if order_part:
168
+ cols = []
169
+ for part in order_part.split(','):
170
+ y = part.strip().split()
171
+ if not y:
172
+ continue
173
+ colname = y[0]
174
+ if not valid_identifier(colname):
175
+ return {'suc': False, 'msg': f"Invalid order column: {colname}"}, []
176
+ if len(y) == 1:
177
+ cols.append(colname)
178
+ elif len(y) == 2 and y[1].lower() in ('asc', 'desc'):
179
+ cols.append(f"{colname} {y[1].lower()}")
180
+ else:
181
+ return {'suc': False, 'msg': f"Invalid order clause: {part}"}, []
182
+ if cols:
183
+ order_clause = " order by " + ", ".join(cols)
184
+ sql += order_clause
185
+ return sql, params
186
+ except Exception as e:
187
+ return {'suc': False, 'msg': f"parse error: {e}"}, []
188
+
189
+ # ---------- DB class ----------
190
+ class DB:
191
+ def __init__(self, path_db):
192
+ self.path_db = path_db
193
+ self.conn = None
194
+ # use per-table locks to reduce contention
195
+ self._locks = defaultdict(asyncio.Lock)
196
+ self._global_lock = asyncio.Lock()
197
+
198
+ async def connect(self):
199
+ os.makedirs(os.path.dirname(self.path_db) or '.', exist_ok=True)
200
+ self.conn = await aiosqlite.connect(self.path_db)
201
+ await self.conn.execute("PRAGMA journal_mode=WAL;")
202
+ await self.conn.commit()
203
+ self.conn.row_factory = aiosqlite.Row
204
+
205
+ async def close(self):
206
+ if self.conn:
207
+ await self.conn.close()
208
+
209
+ async def ensure_table_and_fields(self, table: str, data: dict, pkey='key'):
210
+ # 校验标识符
211
+ for name in [table, pkey] + list(data.keys()):
212
+ if not name.isidentifier():
213
+ return False, f"Illegal identifier: {name}"
214
+
215
+ # 查看现有表头
216
+ async with self.conn.execute(f"PRAGMA table_info({table});") as cursor:
217
+ rows = await cursor.fetchall()
218
+
219
+ is_pkey_exists = any(row[5] for row in rows)
220
+ existing_fields = {row[1] for row in rows}
221
+
222
+ # 新增字段
223
+ add_fields = {
224
+ k: type_map.get(type(v), 'BLOB')
225
+ for k, v in data.items()
226
+ if k not in existing_fields
227
+ }
228
+
229
+ if pkey in add_fields:
230
+ add_fields[pkey] += ' PRIMARY KEY'
231
+ elif not is_pkey_exists:
232
+ add_fields = {pkey: 'INTEGER PRIMARY KEY AUTOINCREMENT', **add_fields}
233
+
234
+ # 构建 SQL
235
+ if not rows:
236
+ sql_fields = ','.join(f'{k} {v}' for k, v in add_fields.items())
237
+ sql = f"CREATE TABLE {table} ({sql_fields});"
238
+ else:
239
+ sql = '\n'.join(f"ALTER TABLE {table} ADD COLUMN {k} {v};" for k, v in add_fields.items())
240
+
241
+ # 执行 SQL
242
+ try:
243
+ await self.conn.executescript(sql)
244
+ await self.conn.commit()
245
+ return True,'ok'
246
+ except Exception as e:
247
+ return False,f"Ensuring fields error: {e}({sql})"
248
+
249
+ async def upsert(self, table, data, key='key'):
250
+ if not isinstance(data, dict):
251
+ return {"suc": False, "msg": "data must be a dict"}
252
+ if key not in data:
253
+ return {"suc": False, "msg": f"Missing key field: '{key}'"}
254
+ try:
255
+ table = sanitize_table_name(table)
256
+ except ValueError as e:
257
+ return {"suc": False, "msg": str(e)}
258
+ lock = self._locks[table]
259
+ async with lock:
260
+ ok,msg = await self.ensure_table_and_fields(table, data, key)
261
+ if not ok:
262
+ return {"suc": False, "msg": msg}
263
+ keys = []
264
+ pins = []
265
+ values = []
266
+ for k,v in data.items():
267
+ keys.append(k)
268
+ pins.append('?')
269
+ L=value_map.get(type(v),encode)
270
+ values.append(L(v) if L else v)
271
+ updates = ", ".join([f"{k}=excluded.{k}" for k in keys])
272
+ keys = ','.join(keys)
273
+ pins = ','.join(pins)
274
+
275
+ sql = f"""
276
+ INSERT INTO {table} ({keys})
277
+ VALUES ({pins})
278
+ ON CONFLICT({key}) DO UPDATE SET {updates}
279
+ RETURNING *;
280
+ """
281
+ try:
282
+ async with self.conn.execute(sql, values) as cursor:
283
+ row = await cursor.fetchone()
284
+ await self.conn.commit()
285
+ if row:
286
+ return {'suc': True, 'data': {k: decode(row[k]) for k in row.keys()}}
287
+ return {'suc': True, 'data': {}}
288
+ except aiosqlite.Error as e:
289
+ return {'suc': False, 'msg': str(e), 'debug': sql}
290
+
291
+ async def query(self, table, where='', limit=0, offset=0):
292
+ try:
293
+ table = sanitize_table_name(table)
294
+ except ValueError as e:
295
+ return {'suc': False, 'msg': str(e)}
296
+ parsed = parse_where(where)
297
+ if isinstance(parsed, tuple):
298
+ sql, param = parsed
299
+ else:
300
+ sql, param = parsed
301
+ if isinstance(sql, dict) and sql.get('suc') is False:
302
+ return sql
303
+ if limit > 0:
304
+ sql += f" LIMIT {int(limit)}"
305
+ if offset > 0:
306
+ sql += f" OFFSET {int(offset)}"
307
+ try:
308
+ async with self.conn.execute(f"SELECT * from {table} {sql};", param) as cursor:
309
+ rows = await cursor.fetchall()
310
+ return {'suc': True, 'data': [{k: decode(row[k]) for k in row.keys()} for row in rows]}
311
+ except aiosqlite.Error as e:
312
+ return {'suc': False, 'msg': str(e), 'debug': sql}
313
+ except Exception as e:
314
+ return {'suc': False, 'msg': str(e)}
315
+
316
+ async def count(self, table, where=''):
317
+ try:
318
+ table = sanitize_table_name(table)
319
+ except ValueError as e:
320
+ return {'suc': False, 'msg': str(e)}
321
+ sql, param = parse_where(where)
322
+ if isinstance(sql, dict) and sql.get('suc') is False:
323
+ return sql
324
+ async with self.conn.execute(f"SELECT count(*) from {table} {sql};", param) as cursor:
325
+ return (await cursor.fetchone())[0]
326
+
327
+ async def list(self, table, where='', total=None):
328
+ limit = min(total, 10) if total else 10
329
+ if not total:
330
+ total = await self.count(table, where)
331
+ if isinstance(total, dict) and total.get('suc') is False:
332
+ return
333
+ max_page, rest = divmod(total, limit)
334
+ if rest != 0:
335
+ max_page += 1
336
+ for i in range(max_page):
337
+ ret = await self.query(table, where, limit=limit, offset=i * limit)
338
+ if not ret['suc']:
339
+ break
340
+ for row in ret['data']:
341
+ yield row
342
+
343
+ async def delete(self, table, where):
344
+ try:
345
+ table = sanitize_table_name(table)
346
+ except ValueError as e:
347
+ return {'suc': False, 'msg': str(e)}
348
+ sql, param = parse_where(where)
349
+ if isinstance(sql, dict) and sql.get('suc') is False:
350
+ return sql
351
+ try:
352
+ async with self.conn.execute(f"DELETE FROM {table} {sql};", param) as cursor:
353
+ await self.conn.commit()
354
+ return {'suc': True, 'data': cursor.rowcount}
355
+ except aiosqlite.Error as e:
356
+ return {'suc': False, 'msg': str(e), 'debug': sql}
357
+
358
+ async def columns(self, table):
359
+ try:
360
+ table = sanitize_table_name(table)
361
+ except ValueError as e:
362
+ return {'suc': False, 'msg': str(e)}
363
+ try:
364
+ async with self.conn.execute(f"PRAGMA table_info({table});") as cursor:
365
+ rows = await cursor.fetchall()
366
+ return {'suc': True, 'data': [row[1] for row in rows]}
367
+ except aiosqlite.Error as e:
368
+ return {'suc': False, 'msg': str(e)}
369
+
370
+ #if __name__ == '__main__':
371
+ # import asyncio
372
+ # async def main():
373
+ # db = DB(path_db = "your_database.db")
374
+ # await db.connect()
375
+ # r = await db.upsert("users", {"key": 'U0001', "name": "Tom", 'age':12, 'sex':'M', 'hobby':["football", 'basketball'],'meta':{"height": 1.75, "weight": 70}})
376
+ # print(r)
377
+ # r = await db.upsert("users", {"key": 'U0002', "name": "Jerry", 'age':8, 'sex':'M', 'hobby':["football", 'basketball'],'meta':{"height": 1.25, "weight": 30}})
378
+ # print(r)
379
+ # r = await db.count("users", 'meta like %"height":1.25%')
380
+ # print(r)
381
+ # await db.close()
382
+ # asyncio.run(main())
@@ -0,0 +1,286 @@
1
+ import os
2
+ import time
3
+ import base64
4
+ import asyncio
5
+ from aiohttp import web, ClientSession, FormData, ClientTimeout
6
+ import orjson
7
+ import aiofiles
8
+ from .database import DB
9
+ import re
10
+ path_src = os.path.dirname(os.path.abspath(__file__))
11
+ # ---------- Configuration (use env vars in production) ----------
12
+ DEFAULT_SECRET = os.environ.get("SQLESS_SECRET", None)
13
+
14
+ num2time = lambda t=None, f="%Y%m%d-%H%M%S": time.strftime(f, time.localtime(int(t if t else time.time())))
15
+ tspToday = lambda: int(time.time() // 86400 * 86400 - 8 * 3600) # UTC+8 today midnight
16
+
17
+ identifier_re = re.compile(r"^[A-Za-z_][A-Za-z0-9_\-]*[A-Za-z0-9]$")
18
+
19
+ def valid_identifier(name: str) -> bool:
20
+ return bool(name and identifier_re.fullmatch(name))
21
+
22
+ def sanitize_table_name(name: str):
23
+ if not valid_identifier(name):
24
+ raise ValueError(f"Invalid identifier: {name}")
25
+ return name
26
+
27
+ def check_path(path_file, path_base):
28
+ normalized_path = os.path.realpath(path_file)
29
+ try:
30
+ if os.path.commonpath([path_base, normalized_path]) == path_base:
31
+ return True, normalized_path
32
+ except Exception as e:
33
+ pass
34
+ return False, f"unsafe path: {normalized_path}"
35
+
36
+
37
+
38
+ async def run_server(
39
+ host='0.0.0.0',
40
+ port=27018,
41
+ secret=DEFAULT_SECRET,
42
+ path_this = os.getcwd(),
43
+ max_filesize = 200, # MB
44
+ ):
45
+ path_base_db = os.path.realpath(f"{path_this}/db")
46
+ path_base_fs = os.path.realpath(f"{path_this}/fs")
47
+ if not secret:
48
+ print("[ERROR] Please set SQLESS_SECRET environment variable or pass --secret <secret>")
49
+ return
50
+
51
+ dbs = {}
52
+ async def get_db(db_key='default'):
53
+ if db_key not in dbs:
54
+ suc, path_db = check_path(f"{path_this}/db/{db_key}.sqlite", path_base_db)
55
+ if not suc:
56
+ return False, path_db
57
+ db = DB(path_db)
58
+ await db.connect()
59
+ dbs[db_key] = db
60
+ return dbs[db_key]
61
+
62
+ async def auth_middleware(app, handler):
63
+ async def middleware_handler(request):
64
+ try:
65
+ request['client_ip'] = request.headers.get('X-Real-IP',request.transport.get_extra_info('peername')[0])
66
+ except:
67
+ request['client_ip'] = 'unknown'
68
+ route = request.match_info.route
69
+ if route and getattr(route, "handler", None) == handle_static:
70
+ return await handler(request)
71
+ auth = request.headers.get('Authorization', '')
72
+ if not auth.startswith("Bearer "):
73
+ return web.Response(status=401, text='Unauthorized')
74
+ token = auth.split(" ", 1)[1].strip()
75
+ if token != secret:
76
+ return web.Response(status=401, text='Unauthorized')
77
+ return await handler(request)
78
+ return middleware_handler
79
+
80
+ async def handle_post_db(request):
81
+ db_table = request.match_info['db_table']
82
+ if request.content_type == 'application/json':
83
+ data = await request.json()
84
+ else:
85
+ post = await request.post()
86
+ data = dict(post)
87
+ db_key, table = os.path.split(db_table.replace('-', '/'))
88
+ db_key = db_key or 'default'
89
+ try:
90
+ table = sanitize_table_name(table)
91
+ except ValueError:
92
+ return web.Response(body=orjson.dumps({'suc': False, 'data': 'invalid table name'}), content_type='application/json')
93
+ db = await get_db(db_key)
94
+ if isinstance(db, tuple) and db[0] is False:
95
+ return web.Response(body=orjson.dumps({'suc': False, 'data': db[1]}), content_type='application/json')
96
+ print(f"[{num2time()}]{request['client_ip']}|POST {db_key}|{table}|{data}")
97
+ if not isinstance(data, dict):
98
+ return web.Response(body=orjson.dumps({'suc': False, 'data': 'invalid data type'}), content_type='application/json')
99
+ ret = await db.upsert(table, data, 'key')
100
+ return web.Response(body=orjson.dumps(ret), content_type='application/json')
101
+
102
+ async def handle_delete_db(request):
103
+ db_table = request.match_info['db_table']
104
+ db_key, table = os.path.split(db_table.replace('-', '/'))
105
+ db_key = db_key or 'default'
106
+ try:
107
+ table = sanitize_table_name(table)
108
+ except ValueError:
109
+ return web.Response(body=orjson.dumps({'suc': False, 'data': 'invalid table name'}), content_type='application/json')
110
+ db = await get_db(db_key)
111
+ where = request.match_info['where']
112
+ print(f"[{num2time()}]{request['client_ip']}|DELETE {db_key}|{table}|{where}")
113
+ ret = await db.delete(table, where)
114
+ return web.Response(body=orjson.dumps(ret), content_type='application/json')
115
+
116
+ async def handle_get_db(request):
117
+ db_table = request.match_info['db_table']
118
+ db_key, table = os.path.split(db_table.replace('-', '/'))
119
+ db_key = db_key or 'default'
120
+ try:
121
+ table = sanitize_table_name(table)
122
+ except ValueError:
123
+ return web.Response(body=orjson.dumps({'suc': False, 'data': 'invalid table name'}), content_type='application/json')
124
+ db = await get_db(db_key)
125
+ where = request.match_info['where']
126
+ page = max(int(request.query.get('page', 1)), 1)
127
+ limit = min(max(int(request.query.get('per_page', 20)), 0), 100)
128
+ offset = (page - 1) * limit
129
+ print(f"[{num2time()}]{request['client_ip']}|GET {db_key}|{table}|{where}?page={page}&per_page={limit}")
130
+ ret = await db.query(table, where, limit, offset)
131
+ if isinstance(ret, dict) and ret.get('suc') and limit > 1 and not offset:
132
+ cnt = await db.count(table, where)
133
+ ret['count'] = cnt
134
+ ret['max_page'], rest = divmod(ret['count'], limit)
135
+ if rest:
136
+ ret['max_page'] += 1
137
+ return web.Response(body=orjson.dumps(ret), content_type='application/json')
138
+
139
+ async def handle_get_fs(request):
140
+ suc, path_file = check_path(f"fs/{request.match_info['path_file']}", path_base_fs)
141
+ if suc and os.path.isfile(path_file):
142
+ if request.query.get('check') is not None:
143
+ print(f"[{num2time()}]{request['client_ip']}|CHECK {path_file}")
144
+ return web.Response(body=orjson.dumps({'suc': True}), content_type='application/json')
145
+ else:
146
+ print(f"[{num2time()}]{request['client_ip']}|DOWNLOAD {path_file}")
147
+ return web.FileResponse(path_file)
148
+ else:
149
+ if request.query.get('check') is not None:
150
+ return web.Response(body=orjson.dumps({'suc': False}), content_type='application/json')
151
+ else:
152
+ return web.Response(status=404, text='File not found')
153
+
154
+ async def handle_post_fs(request):
155
+ suc, path_file = check_path(f"fs/{request.match_info['path_file']}", path_base_fs)
156
+ print(f"[{num2time()}]{request['client_ip']}|UPLOAD attempt {suc} {path_file}")
157
+ if not suc:
158
+ return web.Response(body=orjson.dumps({'suc': False, 'data': 'Unsafe path'}), content_type='application/json')
159
+ folder = os.path.dirname(path_file)
160
+ if not os.path.exists(folder):
161
+ os.makedirs(folder, exist_ok=True)
162
+ reader = await request.multipart()
163
+ field = await reader.next()
164
+ if not field:
165
+ return web.Response(body=orjson.dumps({'suc': False, 'data': 'No file uploaded'}), content_type='application/json')
166
+ # write file safely
167
+ try:
168
+ async with aiofiles.open(path_file, 'wb') as f:
169
+ while True:
170
+ chunk = await field.read_chunk()
171
+ if not chunk:
172
+ break
173
+ await f.write(chunk)
174
+ # ensure uploaded file isn't executable
175
+ try:
176
+ os.chmod(path_file, 0o644)
177
+ except Exception:
178
+ pass
179
+ return web.Response(body=orjson.dumps({'suc': True, 'data': 'File Saved'}), content_type='application/json')
180
+ except Exception as e:
181
+ return web.Response(body=orjson.dumps({'suc': False, 'data': str(e)}), content_type='application/json')
182
+
183
+ async def handle_static(request):
184
+ file = request.match_info.get('file') or 'index.html'
185
+ return web.FileResponse(f"{path_this}/www/{file}")
186
+
187
+ async def handle_xmlhttpRequest(request):
188
+ try:
189
+ data = await request.json()
190
+ method = data.get("method", "POST").upper()
191
+ url = data.get("url")
192
+ if not url:
193
+ return web.Response(body=orjson.dumps({"suc": False, "text": "no url"}), content_type='application/json')
194
+ headers = data.get("headers", {})
195
+ payload = None
196
+ if data.get('type') == 'form':
197
+ payload = FormData()
198
+ for k, v in data.get("data", {}).items():
199
+ payload.add_field(k, v)
200
+ for f in data.get("files", []):
201
+ content = base64.b64decode(f["base64"])
202
+ payload.add_field(
203
+ name=f["field"],
204
+ value=content,
205
+ filename=f["filename"],
206
+ content_type=f["content_type"]
207
+ )
208
+ else:
209
+ payload = data.get('data')
210
+ # enclose outgoing request with timeout
211
+ timeout = ClientTimeout(total=15)
212
+ async with ClientSession(timeout=timeout) as session:
213
+ async with session.request(method, url, headers=headers, data=payload, allow_redirects=True) as resp:
214
+ text = await resp.text()
215
+ return web.Response(body=orjson.dumps({
216
+ "suc": True,
217
+ "status": resp.status,
218
+ "text": text,
219
+ "url": str(resp.url)
220
+ }), content_type='application/json')
221
+ except Exception as e:
222
+ return web.Response(body=orjson.dumps({"suc": False, "text": str(e)}), content_type='application/json')
223
+
224
+ app = web.Application(middlewares=[auth_middleware], client_max_size=max_filesize * 1024 ** 2)
225
+ app.router.add_post('/db/{db_table}', handle_post_db)
226
+ app.router.add_get('/db/{db_table}/{where:.*}', handle_get_db)
227
+ app.router.add_delete('/db/{db_table}/{where:.*}', handle_delete_db)
228
+ app.router.add_get('/fs/{path_file:.*}', handle_get_fs)
229
+ app.router.add_post('/fs/{path_file:.*}', handle_post_fs)
230
+ app.router.add_post('/xmlhttpRequest', handle_xmlhttpRequest)
231
+ app.router.add_get('/{file:.*}', handle_static)
232
+
233
+ runner = web.AppRunner(app)
234
+ await runner.setup()
235
+ site = web.TCPSite(runner, host, port)
236
+ await site.start()
237
+ print(f"Serving on http://{'127.0.0.1' if host == '0.0.0.0' else host}:{port}")
238
+ print(f"Serving at {path_this.replace('\\','/')}")
239
+ if not os.path.exists(f"{path_this}/www"):
240
+ os.makedirs(f"{path_this}/www")
241
+ if not os.path.exists(f"{path_this}/www/openapi.yaml"):
242
+ with open(f"{path_src}/openapi.yaml",'r',encoding='utf-8') as f:
243
+ txt = f.read()
244
+ with open(f"{path_this}/www/openapi.yaml",'w',encoding='utf-8') as f:
245
+ f.write(txt.replace('127.0.0.1:12239',f"{'127.0.0.1' if host == '0.0.0.0' else host}:{port}"))
246
+ if not os.path.exists(f"{path_this}/www/index.html"):
247
+ with open(f"{path_src}/docs.html",'r',encoding='utf-8') as f:
248
+ txt = f.read()
249
+ with open(f"{path_this}/www/index.html",'w',encoding='utf-8') as f:
250
+ f.write(txt)
251
+ stop_event = asyncio.Event()
252
+ try:
253
+ # simplified loop, exit on Cancelled/Error
254
+ while not stop_event.is_set():
255
+ await asyncio.sleep(86400)
256
+ except asyncio.CancelledError:
257
+ pass
258
+ finally:
259
+ print("Cleaning up...")
260
+ await runner.cleanup()
261
+ for db_key in list(dbs.keys()):
262
+ await dbs[db_key].close()
263
+ del dbs[db_key]
264
+
265
+ def main():
266
+ import argparse
267
+ import asyncio
268
+
269
+ parser = argparse.ArgumentParser(description='Run the sqless server')
270
+ parser.add_argument('--host', default='127.0.0.1', help='Host to bind to (default: 127.0.0.1)')
271
+ parser.add_argument('--port', type=int, default=12239, help='Port to bind to (default: 12239)')
272
+ parser.add_argument('--secret', default=DEFAULT_SECRET, help='Secret for authentication')
273
+ parser.add_argument('--path', default=os.getcwd(), help=f'Base path for database and file storage (default: {os.getcwd()})')
274
+ parser.add_argument('--fsize', type=int, default=200, help='Max file size (in MB) allowed in POST /fs')
275
+ args = parser.parse_args()
276
+
277
+ asyncio.run(run_server(
278
+ host=args.host,
279
+ port=args.port,
280
+ secret=args.secret,
281
+ path_this=args.path,
282
+ max_filesize=args.fsize
283
+ ))
284
+
285
+ if __name__ == "__main__":
286
+ main()
@@ -0,0 +1,167 @@
1
+ Metadata-Version: 2.4
2
+ Name: sqless
3
+ Version: 0.1.0
4
+ Summary: An async HTTP server for SQLite, FileStorage and WebPage.
5
+ Home-page: https://github.com/pro1515151515/sqless
6
+ Author: pro1515151515
7
+ Author-email: pro1515151515@qq.com
8
+ Classifier: Development Status :: 3 - Alpha
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Operating System :: OS Independent
12
+ Classifier: Programming Language :: Python :: 3
13
+ Requires-Python: >=3.7
14
+ Description-Content-Type: text/markdown
15
+ License-File: LICENSE
16
+ Requires-Dist: aiosqlite>=0.17.0
17
+ Requires-Dist: aiohttp>=3.8.0
18
+ Requires-Dist: orjson>=3.6.0
19
+ Requires-Dist: aiofiles>=0.8.0
20
+ Dynamic: author
21
+ Dynamic: author-email
22
+ Dynamic: classifier
23
+ Dynamic: description
24
+ Dynamic: description-content-type
25
+ Dynamic: home-page
26
+ Dynamic: license-file
27
+ Dynamic: requires-dist
28
+ Dynamic: requires-python
29
+ Dynamic: summary
30
+
31
+ # sqless
32
+
33
+ An async HTTP server for SQLite, FileStorage and WebPage.
34
+
35
+ ## Description
36
+
37
+ sqless is a Python application that provides web service with local database and local file storage.
38
+
39
+ ## Installation
40
+
41
+ ```bash
42
+ pip install sqless
43
+ ```
44
+
45
+ ## Quick Start
46
+
47
+ ### Running the server
48
+
49
+ ```bash
50
+ sqless --host 127.0.0.1 --port 12239 --secret your-secret-key
51
+ ```
52
+
53
+ This will create `www` directory in the current directory, which is used for WebPage.
54
+ You can access the `www/index.html` at `http://127.0.0.1:12239/index.html`
55
+
56
+ It will also creates `db` and `fs` directories in the current directory, when saving data by database API and file storage API.
57
+
58
+ ### Using the database API
59
+
60
+ ```python
61
+ import requests
62
+
63
+ # Set up the base URL and authentication
64
+ BASE_URL = "http://127.0.0.1:12239"
65
+ SECRET = "your-secret-key"
66
+ DB_TABLE = "users"
67
+
68
+ # Insert or update data
69
+ r = requests.post(
70
+ f"{BASE_URL}/db/{DB_TABLE}",
71
+ headers={"Authorization": f"Bearer {SECRET}"},
72
+ json={"key": "U001", "name": "Tom", "age": 14}
73
+ )
74
+
75
+ # Query data
76
+ r = requests.get(
77
+ f"{BASE_URL}/db/{DB_TABLE}/key = U001",
78
+ headers={"Authorization": f"Bearer {SECRET}"}
79
+ )
80
+
81
+ # Fuzzy query
82
+ r = requests.get(
83
+ f"{BASE_URL}/db/{DB_TABLE}/name like %om%?limit=10&page=1",
84
+ headers={"Authorization": f"Bearer {SECRET}"}
85
+ )
86
+
87
+ # Value query
88
+ r = requests.get(
89
+ f"{BASE_URL}/db/{DB_TABLE}/age > 10?limit=10&page=1",
90
+ headers={"Authorization": f"Bearer {SECRET}"}
91
+ )
92
+
93
+ # Delete data
94
+ r = requests.delete(
95
+ f"{BASE_URL}/db/{DB_TABLE}/key = U001",
96
+ headers={"Authorization": f"Bearer {SECRET}"}
97
+ )
98
+ ```
99
+
100
+ `DB_TABLE = "users"` will read/write records in `users` table in `db/default.sqlite`.
101
+ `DB_TABLE = "mall-users"` will read/write records in `users` table in `db/mall.sqlite`.
102
+ `DB_TABLE = "east-mall-users"` will read/write records in `users` table in `db/east/mall.sqlite`.
103
+
104
+ ### Using the FileStorage API
105
+ ```python
106
+ import requests
107
+
108
+ # Upload a file to ./fs/example.txt
109
+ with open("example.txt", "rb") as f:
110
+ r = requests.post(
111
+ f"{BASE_URL}/fs/example.txt",
112
+ headers={"Authorization": f"Bearer {SECRET}"},
113
+ files={"file": f}
114
+ )
115
+
116
+ # Check if a file exists
117
+ r = requests.get(
118
+ f"{BASE_URL}/fs/example.txt?check=1",
119
+ headers={"Authorization": f"Bearer {SECRET}"}
120
+ )
121
+
122
+ # Download a file
123
+ r = requests.get(
124
+ f"{BASE_URL}/fs/example.txt",
125
+ headers={"Authorization": f"Bearer {SECRET}"},
126
+ stream=True
127
+ )
128
+ with open("downloaded_example.txt", "wb") as f:
129
+ for chunk in r.iter_content(chunk_size=8192):
130
+ f.write(chunk)
131
+ ```
132
+
133
+ ### Using the Proxy API
134
+ ```python
135
+ import requests
136
+ import base64
137
+
138
+ payload = {
139
+ "method": "POST",
140
+ "url": "https://httpbin.org/post",
141
+ "headers": {
142
+ "User-Agent": "SQLESS-Client/1.0",
143
+ "Authorization": "Bearer mytoken"
144
+ },
145
+ "type": "form",
146
+ "data": {"foo": "bar"},
147
+ "files": [
148
+ {
149
+ "field": "file1",
150
+ "filename": "example.txt",
151
+ "content_type": "text/plain",
152
+ "base64": base64.b64encode(open("example.txt", "rb").read()).decode()
153
+ }
154
+ ]
155
+ }
156
+
157
+ r = requests.post(
158
+ f"{BASE_URL}/xmlhttpRequest",
159
+ headers={"Authorization": f"Bearer {SECRET}"},
160
+ json=payload
161
+ )
162
+ print(r.json())
163
+ ```
164
+
165
+ ## License
166
+
167
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
@@ -0,0 +1,12 @@
1
+ LICENSE
2
+ README.md
3
+ setup.py
4
+ sqless/__init__.py
5
+ sqless/database.py
6
+ sqless/server.py
7
+ sqless.egg-info/PKG-INFO
8
+ sqless.egg-info/SOURCES.txt
9
+ sqless.egg-info/dependency_links.txt
10
+ sqless.egg-info/entry_points.txt
11
+ sqless.egg-info/requires.txt
12
+ sqless.egg-info/top_level.txt
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ sqless = sqless.server:main
@@ -0,0 +1,4 @@
1
+ aiosqlite>=0.17.0
2
+ aiohttp>=3.8.0
3
+ orjson>=3.6.0
4
+ aiofiles>=0.8.0
@@ -0,0 +1 @@
1
+ sqless