lfss 0.11.2__tar.gz → 0.11.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lfss-0.11.2 → lfss-0.11.3}/PKG-INFO +2 -2
- {lfss-0.11.2 → lfss-0.11.3}/docs/Enviroment_variables.md +3 -1
- {lfss-0.11.2 → lfss-0.11.3}/frontend/api.js +66 -4
- {lfss-0.11.2 → lfss-0.11.3}/lfss/api/connector.py +17 -2
- {lfss-0.11.2 → lfss-0.11.3}/lfss/cli/cli.py +7 -7
- {lfss-0.11.2 → lfss-0.11.3}/lfss/eng/config.py +3 -1
- {lfss-0.11.2 → lfss-0.11.3}/lfss/eng/database.py +66 -6
- {lfss-0.11.2 → lfss-0.11.3}/lfss/eng/log.py +21 -20
- {lfss-0.11.2 → lfss-0.11.3}/lfss/svc/app_base.py +4 -1
- {lfss-0.11.2 → lfss-0.11.3}/lfss/svc/app_native.py +51 -4
- {lfss-0.11.2 → lfss-0.11.3}/pyproject.toml +2 -2
- {lfss-0.11.2 → lfss-0.11.3}/Readme.md +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/docs/Known_issues.md +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/docs/Permission.md +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/docs/Webdav.md +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/docs/changelog.md +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/frontend/index.html +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/frontend/info.css +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/frontend/info.js +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/frontend/login.css +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/frontend/login.js +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/frontend/popup.css +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/frontend/popup.js +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/frontend/scripts.js +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/frontend/state.js +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/frontend/styles.css +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/frontend/thumb.css +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/frontend/thumb.js +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/frontend/utils.js +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/api/__init__.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/cli/__init__.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/cli/balance.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/cli/log.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/cli/panel.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/cli/serve.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/cli/user.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/cli/vacuum.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/eng/__init__.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/eng/bounded_pool.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/eng/connection_pool.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/eng/datatype.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/eng/error.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/eng/thumb.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/eng/utils.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/sql/init.sql +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/sql/pragma.sql +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/svc/app.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/svc/app_dav.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/svc/common_impl.py +0 -0
- {lfss-0.11.2 → lfss-0.11.3}/lfss/svc/request_log.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: lfss
|
3
|
-
Version: 0.11.
|
3
|
+
Version: 0.11.3
|
4
4
|
Summary: Lightweight file storage service
|
5
5
|
Home-page: https://github.com/MenxLi/lfss
|
6
6
|
Author: Li, Mengxun
|
@@ -10,7 +10,7 @@ Classifier: Programming Language :: Python :: 3
|
|
10
10
|
Classifier: Programming Language :: Python :: 3.10
|
11
11
|
Classifier: Programming Language :: Python :: 3.11
|
12
12
|
Classifier: Programming Language :: Python :: 3.12
|
13
|
-
Requires-Dist: aiofiles (==
|
13
|
+
Requires-Dist: aiofiles (==24.*)
|
14
14
|
Requires-Dist: aiosqlite (==0.*)
|
15
15
|
Requires-Dist: fastapi (==0.*)
|
16
16
|
Requires-Dist: mimesniff (==1.*)
|
@@ -4,8 +4,10 @@
|
|
4
4
|
**Server**
|
5
5
|
- `LFSS_DATA`: The directory to store the data. Default is `.storage_data`.
|
6
6
|
- `LFSS_WEBDAV`: Enable WebDAV support. Default is `0`, set to `1` to enable.
|
7
|
-
- `LFSS_LARGE_FILE`: The size limit of the file to store in the database. Default is `
|
7
|
+
- `LFSS_LARGE_FILE`: The size limit of the file to store in the database. Default is `1m`.
|
8
8
|
- `LFSS_DEBUG`: Enable debug mode for more verbose logging. Default is `0`, set to `1` to enable.
|
9
|
+
- `LFSS_DISABLE_LOGGING`: Disable all file logging. Default is 0; set to `1` to disable file logging.
|
10
|
+
- `LFSS_ORIGIN`: The `Origin` header to allow CORS requests. Use `,` to separate multiple origins. Default is `*`.
|
9
11
|
|
10
12
|
**Client**
|
11
13
|
- `LFSS_ENDPOINT`: The fallback server endpoint. Default is `http://localhost:8000`.
|
@@ -69,6 +69,10 @@ export default class Connector {
|
|
69
69
|
/**
|
70
70
|
* @param {string} path - the path to the file (url)
|
71
71
|
* @param {File} file - the file to upload
|
72
|
+
* @param {Object} [options] - Optional upload configuration.
|
73
|
+
* @param {'abort' | 'overwrite' | 'skip'} [options.conflict='abort'] - Conflict resolution strategy:
|
74
|
+
* `'abort'` to cancel and raise 409, `'overwrite'` to replace.
|
75
|
+
* @param {number} [options.permission=0] - Optional permission setting for the file (refer to backend impl).
|
72
76
|
* @returns {Promise<string>} - the promise of the request, the url of the file
|
73
77
|
*/
|
74
78
|
async put(path, file, {
|
@@ -96,8 +100,12 @@ export default class Connector {
|
|
96
100
|
}
|
97
101
|
|
98
102
|
/**
|
99
|
-
* @param {string} path - the path to the file (url)
|
103
|
+
* @param {string} path - the path to the file (url), should end with .json
|
100
104
|
* @param {File} file - the file to upload
|
105
|
+
* @param {Object} [options] - Optional upload configuration.
|
106
|
+
* @param {'abort' | 'overwrite' | 'skip'} [options.conflict='abort'] - Conflict resolution strategy:
|
107
|
+
* `'abort'` to cancel and raise 409, `'overwrite'` to replace, `'skip'` to ignore if already exists.
|
108
|
+
* @param {number} [options.permission=0] - Optional permission setting for the file (refer to backend impl).
|
101
109
|
* @returns {Promise<string>} - the promise of the request, the url of the file
|
102
110
|
*/
|
103
111
|
async post(path, file, {
|
@@ -129,13 +137,23 @@ export default class Connector {
|
|
129
137
|
|
130
138
|
/**
|
131
139
|
* @param {string} path - the path to the file (url), should end with .json
|
132
|
-
* @param {
|
140
|
+
* @param {Object} data - the data to upload
|
141
|
+
* @param {Object} [options] - Optional upload configuration.
|
142
|
+
* @param {'abort' | 'overwrite' | 'skip'} [options.conflict='abort'] - Conflict resolution strategy:
|
143
|
+
* `'abort'` to cancel and raise 409, `'overwrite'` to replace, `'skip'` to ignore if already exists.
|
144
|
+
* @param {number} [options.permission=0] - Optional permission setting for the file (refer to backend impl).
|
133
145
|
* @returns {Promise<string>} - the promise of the request, the url of the file
|
134
146
|
*/
|
135
|
-
async putJson(path, data
|
147
|
+
async putJson(path, data, {
|
148
|
+
conflict = "overwrite",
|
149
|
+
permission = 0
|
150
|
+
} = {}){
|
136
151
|
if (!path.endsWith('.json')){ throw new Error('Upload object must end with .json'); }
|
137
152
|
if (path.startsWith('/')){ path = path.slice(1); }
|
138
|
-
const
|
153
|
+
const dst = new URL(this.config.endpoint + '/' + path);
|
154
|
+
dst.searchParams.append('conflict', conflict);
|
155
|
+
dst.searchParams.append('permission', permission);
|
156
|
+
const res = await fetch(dst.toString(), {
|
139
157
|
method: 'PUT',
|
140
158
|
headers: {
|
141
159
|
'Authorization': 'Bearer ' + this.config.token,
|
@@ -149,6 +167,50 @@ export default class Connector {
|
|
149
167
|
return (await res.json()).url;
|
150
168
|
}
|
151
169
|
|
170
|
+
/**
|
171
|
+
* @param {string} path - the path to the file (url), should have content type application/json
|
172
|
+
* @returns {Promise<Object>} - return the json object
|
173
|
+
*/
|
174
|
+
async getJson(path){
|
175
|
+
if (path.startsWith('/')){ path = path.slice(1); }
|
176
|
+
const res = await fetch(this.config.endpoint + '/' + path, {
|
177
|
+
method: 'GET',
|
178
|
+
headers: {
|
179
|
+
"Authorization": 'Bearer ' + this.config.token
|
180
|
+
},
|
181
|
+
});
|
182
|
+
if (res.status != 200){
|
183
|
+
throw new Error(`Failed to get object, status code: ${res.status}, message: ${await fmtFailedResponse(res)}`);
|
184
|
+
}
|
185
|
+
return await res.json();
|
186
|
+
}
|
187
|
+
|
188
|
+
/**
|
189
|
+
* @param {string[]} paths - the paths to the files (url), should have content type plain/text, application/json, etc.
|
190
|
+
* @param {Object} [options] - Optional configuration.
|
191
|
+
* @param {boolean} [options.skipContent=false] - If true, skips fetching content and returns a record of <path, ''>.
|
192
|
+
* @returns {Promise<Record<string, string | null>>} - return the mapping of path to text content, non-existing paths will be ignored
|
193
|
+
*/
|
194
|
+
async getMultipleText(paths, {
|
195
|
+
skipContent = false
|
196
|
+
} = {}){
|
197
|
+
const url = new URL(this.config.endpoint + '/_api/get-multiple');
|
198
|
+
url.searchParams.append('skip_content', skipContent);
|
199
|
+
for (const path of paths){
|
200
|
+
url.searchParams.append('path', path);
|
201
|
+
}
|
202
|
+
const res = await fetch(url.toString(), {
|
203
|
+
method: 'GET',
|
204
|
+
headers: {
|
205
|
+
"Authorization": 'Bearer ' + this.config.token,
|
206
|
+
}
|
207
|
+
});
|
208
|
+
if (res.status != 200 && res.status != 206){
|
209
|
+
throw new Error(`Failed to get multiple files, status code: ${res.status}, message: ${await fmtFailedResponse(res)}`);
|
210
|
+
}
|
211
|
+
return await res.json();
|
212
|
+
}
|
213
|
+
|
152
214
|
async delete(path){
|
153
215
|
if (path.startsWith('/')){ path = path.slice(1); }
|
154
216
|
const res = await fetch(this.config.endpoint + '/' + path, {
|
@@ -1,7 +1,7 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
from typing import Optional, Literal
|
3
3
|
from collections.abc import Iterator
|
4
|
-
import os
|
4
|
+
import os, json
|
5
5
|
import requests
|
6
6
|
import requests.adapters
|
7
7
|
import urllib.parse
|
@@ -76,7 +76,11 @@ class Connector:
|
|
76
76
|
path = path[1:]
|
77
77
|
path = ensure_uri_compnents(path)
|
78
78
|
def f(**kwargs):
|
79
|
-
|
79
|
+
search_params_t = [
|
80
|
+
(k, str(v).lower() if isinstance(v, bool) else v)
|
81
|
+
for k, v in search_params.items()
|
82
|
+
] # tuple form
|
83
|
+
url = f"{self.config['endpoint']}/{path}" + "?" + urllib.parse.urlencode(search_params_t, doseq=True)
|
80
84
|
headers: dict = kwargs.pop('headers', {})
|
81
85
|
headers.update({
|
82
86
|
'Authorization': f"Bearer {self.config['token']}",
|
@@ -207,6 +211,17 @@ class Connector:
|
|
207
211
|
assert response.headers['Content-Type'] == 'application/json'
|
208
212
|
return response.json()
|
209
213
|
|
214
|
+
def get_multiple_text(self, *paths: str, skip_content = False) -> dict[str, Optional[str]]:
|
215
|
+
"""
|
216
|
+
Gets text contents of multiple files at once. Non-existing files will return None.
|
217
|
+
- skip_content: if True, the file contents will not be fetched, always be empty string ''.
|
218
|
+
"""
|
219
|
+
response = self._fetch_factory(
|
220
|
+
'GET', '_api/get-multiple',
|
221
|
+
{'path': paths, "skip_content": skip_content}
|
222
|
+
)()
|
223
|
+
return response.json()
|
224
|
+
|
210
225
|
def delete(self, path: str):
|
211
226
|
"""Deletes the file at the specified path."""
|
212
227
|
self._fetch_factory('DELETE', path)()
|
@@ -1,5 +1,5 @@
|
|
1
1
|
from pathlib import Path
|
2
|
-
import argparse, typing
|
2
|
+
import argparse, typing, sys
|
3
3
|
from lfss.api import Connector, upload_directory, upload_file, download_file, download_directory
|
4
4
|
from lfss.eng.datatype import FileReadPermission, FileSortKey, DirSortKey
|
5
5
|
from lfss.eng.utils import decode_uri_compnents
|
@@ -78,9 +78,9 @@ def main():
|
|
78
78
|
permission=args.permission
|
79
79
|
)
|
80
80
|
if failed_upload:
|
81
|
-
print("\033[91mFailed to upload:\033[0m")
|
81
|
+
print("\033[91mFailed to upload:\033[0m", file=sys.stderr)
|
82
82
|
for path in failed_upload:
|
83
|
-
print(f" {path}")
|
83
|
+
print(f" {path}", file=sys.stderr)
|
84
84
|
else:
|
85
85
|
success, msg = upload_file(
|
86
86
|
connector,
|
@@ -93,7 +93,7 @@ def main():
|
|
93
93
|
permission=args.permission
|
94
94
|
)
|
95
95
|
if not success:
|
96
|
-
print("\033[91mFailed to upload: \033[0m", msg)
|
96
|
+
print("\033[91mFailed to upload: \033[0m", msg, file=sys.stderr)
|
97
97
|
|
98
98
|
elif args.command == "download":
|
99
99
|
is_dir = args.src.endswith("/")
|
@@ -107,9 +107,9 @@ def main():
|
|
107
107
|
overwrite=args.overwrite
|
108
108
|
)
|
109
109
|
if failed_download:
|
110
|
-
print("\033[91mFailed to download:\033[0m")
|
110
|
+
print("\033[91mFailed to download:\033[0m", file=sys.stderr)
|
111
111
|
for path in failed_download:
|
112
|
-
print(f" {path}")
|
112
|
+
print(f" {path}", file=sys.stderr)
|
113
113
|
else:
|
114
114
|
success, msg = download_file(
|
115
115
|
connector,
|
@@ -121,7 +121,7 @@ def main():
|
|
121
121
|
overwrite=args.overwrite
|
122
122
|
)
|
123
123
|
if not success:
|
124
|
-
print("\033[91mFailed to download: \033[0m", msg)
|
124
|
+
print("\033[91mFailed to download: \033[0m", msg, file=sys.stderr)
|
125
125
|
|
126
126
|
elif args.command == "query":
|
127
127
|
for path in args.path:
|
@@ -13,6 +13,8 @@ LARGE_BLOB_DIR = DATA_HOME / 'large_blobs'
|
|
13
13
|
LARGE_BLOB_DIR.mkdir(exist_ok=True)
|
14
14
|
LOG_DIR = DATA_HOME / 'logs'
|
15
15
|
|
16
|
+
DISABLE_LOGGING = os.environ.get('DISABLE_LOGGING', '0') == '1'
|
17
|
+
|
16
18
|
# https://sqlite.org/fasterthanfs.html
|
17
19
|
__env_large_file = os.environ.get('LFSS_LARGE_FILE', None)
|
18
20
|
if __env_large_file is not None:
|
@@ -24,4 +26,4 @@ CHUNK_SIZE = 1024 * 1024 # 1MB chunks for streaming (on large files)
|
|
24
26
|
DEBUG_MODE = os.environ.get('LFSS_DEBUG', '0') == '1'
|
25
27
|
|
26
28
|
THUMB_DB = DATA_HOME / 'thumbs.v0-11.db'
|
27
|
-
THUMB_SIZE = (
|
29
|
+
THUMB_SIZE = (64, 64)
|
@@ -210,6 +210,10 @@ class FileConn(DBObjectBase):
|
|
210
210
|
return self.parse_record(res)
|
211
211
|
|
212
212
|
async def get_file_records(self, urls: list[str]) -> list[FileRecord]:
|
213
|
+
"""
|
214
|
+
Get all file records with the given urls, only urls in the database will be returned.
|
215
|
+
If the urls are not in the database, they will be ignored.
|
216
|
+
"""
|
213
217
|
await self.cur.execute("SELECT * FROM fmeta WHERE url IN ({})".format(','.join(['?'] * len(urls))), urls)
|
214
218
|
res = await self.cur.fetchall()
|
215
219
|
if res is None:
|
@@ -412,6 +416,10 @@ class FileConn(DBObjectBase):
|
|
412
416
|
self.logger.info(f"File {url} created")
|
413
417
|
|
414
418
|
async def copy_file(self, old_url: str, new_url: str, user_id: Optional[int] = None):
|
419
|
+
"""
|
420
|
+
Copy file from old_url to new_url,
|
421
|
+
if user_id is None, will not change the owner_id of the file. Otherwise, will change the owner_id to user_id.
|
422
|
+
"""
|
415
423
|
old = await self.get_file_record(old_url)
|
416
424
|
if old is None:
|
417
425
|
raise FileNotFoundError(f"File {old_url} not found")
|
@@ -428,14 +436,14 @@ class FileConn(DBObjectBase):
|
|
428
436
|
self.logger.info(f"Copied file {old_url} to {new_url}")
|
429
437
|
|
430
438
|
async def copy_dir(self, old_url: str, new_url: str, user_id: Optional[int] = None):
|
439
|
+
"""
|
440
|
+
Copy all files under old_url to new_url,
|
441
|
+
if user_id is None, will not change the owner_id of the files. Otherwise, will change the owner_id to user_id.
|
442
|
+
"""
|
431
443
|
assert old_url.endswith('/'), "Old path must end with /"
|
432
444
|
assert new_url.endswith('/'), "New path must end with /"
|
433
|
-
|
434
|
-
|
435
|
-
res = await cursor.fetchall()
|
436
|
-
else:
|
437
|
-
cursor = await self.cur.execute("SELECT * FROM fmeta WHERE url LIKE ? AND owner_id = ?", (old_url + '%', user_id))
|
438
|
-
res = await cursor.fetchall()
|
445
|
+
cursor = await self.cur.execute("SELECT * FROM fmeta WHERE url LIKE ?", (old_url + '%', ))
|
446
|
+
res = await cursor.fetchall()
|
439
447
|
for r in res:
|
440
448
|
old_record = FileRecord(*r)
|
441
449
|
new_r = new_url + old_record.url[len(old_url):]
|
@@ -826,6 +834,58 @@ class Database:
|
|
826
834
|
yield blob
|
827
835
|
ret = blob_stream()
|
828
836
|
return ret
|
837
|
+
|
838
|
+
async def read_files_bulk(
|
839
|
+
self, urls: list[str],
|
840
|
+
skip_content = False,
|
841
|
+
op_user: Optional[UserRecord] = None,
|
842
|
+
) -> dict[str, Optional[bytes]]:
|
843
|
+
"""
|
844
|
+
A frequent use case is to read multiple files at once,
|
845
|
+
this method will read all files in the list and return a dict of url -> blob.
|
846
|
+
if the file is not found, the value will be None.
|
847
|
+
- skip_content: if True, will not read the content of the file, resulting in a dict of url -> b''
|
848
|
+
|
849
|
+
may raise StorageExceededError if the total size of the files exceeds MAX_MEM_FILE_BYTES
|
850
|
+
"""
|
851
|
+
for url in urls:
|
852
|
+
validate_url(url)
|
853
|
+
|
854
|
+
async with unique_cursor() as cur:
|
855
|
+
fconn = FileConn(cur)
|
856
|
+
file_records = await fconn.get_file_records(urls)
|
857
|
+
|
858
|
+
if op_user is not None:
|
859
|
+
for r in file_records:
|
860
|
+
if await check_path_permission(r.url, op_user, cursor=cur) >= AccessLevel.READ:
|
861
|
+
continue
|
862
|
+
is_allowed, reason = await check_file_read_permission(op_user, r, cursor=cur)
|
863
|
+
if not is_allowed:
|
864
|
+
raise PermissionDeniedError(f"Permission denied: {op_user.username} cannot read file {r.url}: {reason}")
|
865
|
+
|
866
|
+
# first check if the files are too big
|
867
|
+
sum_size = sum([r.file_size for r in file_records])
|
868
|
+
if not skip_content and sum_size > MAX_MEM_FILE_BYTES:
|
869
|
+
raise StorageExceededError(f"Unable to read files at once, total size {sum_size} exceeds {MAX_MEM_FILE_BYTES}")
|
870
|
+
|
871
|
+
self.logger.debug(f"Reading {len(file_records)} files{' (skip content)' if skip_content else ''}, getting {sum_size} bytes, from {urls}")
|
872
|
+
# read the file content
|
873
|
+
async with unique_cursor() as cur:
|
874
|
+
fconn = FileConn(cur)
|
875
|
+
blobs: dict[str, bytes] = {}
|
876
|
+
for r in file_records:
|
877
|
+
if skip_content:
|
878
|
+
blobs[r.url] = b''
|
879
|
+
continue
|
880
|
+
|
881
|
+
if r.external:
|
882
|
+
blob_iter = fconn.get_file_blob_external(r.file_id)
|
883
|
+
blob = b''.join([chunk async for chunk in blob_iter])
|
884
|
+
else:
|
885
|
+
blob = await fconn.get_file_blob(r.file_id)
|
886
|
+
blobs[r.url] = blob
|
887
|
+
|
888
|
+
return {url: blobs.get(url, None) for url in urls}
|
829
889
|
|
830
890
|
async def delete_file(self, url: str, op_user: Optional[UserRecord] = None) -> Optional[FileRecord]:
|
831
891
|
validate_url(url)
|
@@ -1,9 +1,9 @@
|
|
1
|
-
from .config import LOG_DIR
|
1
|
+
from .config import LOG_DIR, DISABLE_LOGGING
|
2
2
|
import time, sqlite3, dataclasses
|
3
3
|
from typing import TypeVar, Callable, Literal, Optional
|
4
4
|
from concurrent.futures import ThreadPoolExecutor
|
5
5
|
from functools import wraps
|
6
|
-
import logging,
|
6
|
+
import logging, asyncio
|
7
7
|
from logging import handlers
|
8
8
|
|
9
9
|
class BCOLORS:
|
@@ -154,24 +154,25 @@ def get_logger(
|
|
154
154
|
if isinstance(color, str) and color.startswith('\033'):
|
155
155
|
format_str_plain = format_str_plain.replace(color, '')
|
156
156
|
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
157
|
+
if not DISABLE_LOGGING:
|
158
|
+
formatter_plain = logging.Formatter(format_str_plain)
|
159
|
+
log_home.mkdir(exist_ok=True)
|
160
|
+
log_file = log_home / f'{name}.log'
|
161
|
+
if file_handler_type == 'simple':
|
162
|
+
file_handler = logging.FileHandler(log_file)
|
163
|
+
elif file_handler_type == 'daily':
|
164
|
+
file_handler = handlers.TimedRotatingFileHandler(
|
165
|
+
log_file, when='midnight', interval=1, backupCount=30
|
166
|
+
)
|
167
|
+
elif file_handler_type == 'rotate':
|
168
|
+
file_handler = handlers.RotatingFileHandler(
|
169
|
+
log_file, maxBytes=1024*1024, backupCount=5
|
170
|
+
)
|
171
|
+
elif file_handler_type == 'sqlite':
|
172
|
+
file_handler = SQLiteFileHandler(log_file if log_file.suffix == '.db' else log_file.with_suffix('.log.db'))
|
173
|
+
|
174
|
+
file_handler.setFormatter(formatter_plain)
|
175
|
+
logger.addHandler(file_handler)
|
175
176
|
|
176
177
|
logger = BaseLogger(name)
|
177
178
|
setupLogger(logger)
|
@@ -60,10 +60,13 @@ def handle_exception(fn):
|
|
60
60
|
raise
|
61
61
|
return wrapper
|
62
62
|
|
63
|
+
env_origins = os.environ.get("LFSS_ORIGINS", "*")
|
64
|
+
logger.debug(f"LFSS_ORIGINS: {env_origins}")
|
65
|
+
origins = [x.strip() for x in env_origins.split(",") if x.strip()]
|
63
66
|
app = FastAPI(docs_url=None, redoc_url=None, lifespan=lifespan)
|
64
67
|
app.add_middleware(
|
65
68
|
CORSMiddleware,
|
66
|
-
allow_origins=
|
69
|
+
allow_origins=origins,
|
67
70
|
allow_credentials=True,
|
68
71
|
allow_methods=["*"],
|
69
72
|
allow_headers=["*"],
|
@@ -1,17 +1,19 @@
|
|
1
|
-
from typing import Optional, Literal
|
1
|
+
from typing import Optional, Literal, Annotated
|
2
|
+
from collections import OrderedDict
|
2
3
|
|
3
|
-
from fastapi import Depends, Request, Response, UploadFile
|
4
|
-
from fastapi.responses import StreamingResponse
|
4
|
+
from fastapi import Depends, Request, Response, UploadFile, Query
|
5
|
+
from fastapi.responses import StreamingResponse, JSONResponse
|
5
6
|
from fastapi.exceptions import HTTPException
|
6
7
|
|
7
8
|
from ..eng.utils import ensure_uri_compnents
|
8
9
|
from ..eng.config import MAX_MEM_FILE_BYTES
|
9
10
|
from ..eng.connection_pool import unique_cursor
|
10
|
-
from ..eng.database import check_file_read_permission, check_path_permission,
|
11
|
+
from ..eng.database import check_file_read_permission, check_path_permission, FileConn, delayed_log_access
|
11
12
|
from ..eng.datatype import (
|
12
13
|
FileReadPermission, UserRecord, AccessLevel,
|
13
14
|
FileSortKey, DirSortKey
|
14
15
|
)
|
16
|
+
from ..eng.error import InvalidPathError
|
15
17
|
|
16
18
|
from .app_base import *
|
17
19
|
from .common_impl import get_impl, put_file_impl, post_file_impl, delete_impl, copy_impl
|
@@ -189,6 +191,7 @@ async def validate_path_read_permission(path: str, user: UserRecord):
|
|
189
191
|
if not await check_path_permission(path, user) >= AccessLevel.READ:
|
190
192
|
raise HTTPException(status_code=403, detail="Permission denied")
|
191
193
|
@router_api.get("/count-files")
|
194
|
+
@handle_exception
|
192
195
|
async def count_files(path: str, flat: bool = False, user: UserRecord = Depends(registered_user)):
|
193
196
|
await validate_path_read_permission(path, user)
|
194
197
|
path = ensure_uri_compnents(path)
|
@@ -196,6 +199,7 @@ async def count_files(path: str, flat: bool = False, user: UserRecord = Depends(
|
|
196
199
|
fconn = FileConn(conn)
|
197
200
|
return { "count": await fconn.count_dir_files(url = path, flat = flat) }
|
198
201
|
@router_api.get("/list-files")
|
202
|
+
@handle_exception
|
199
203
|
async def list_files(
|
200
204
|
path: str, offset: int = 0, limit: int = 1000,
|
201
205
|
order_by: FileSortKey = "", order_desc: bool = False,
|
@@ -212,6 +216,7 @@ async def list_files(
|
|
212
216
|
)
|
213
217
|
|
214
218
|
@router_api.get("/count-dirs")
|
219
|
+
@handle_exception
|
215
220
|
async def count_dirs(path: str, user: UserRecord = Depends(registered_user)):
|
216
221
|
await validate_path_read_permission(path, user)
|
217
222
|
path = ensure_uri_compnents(path)
|
@@ -219,6 +224,7 @@ async def count_dirs(path: str, user: UserRecord = Depends(registered_user)):
|
|
219
224
|
fconn = FileConn(conn)
|
220
225
|
return { "count": await fconn.count_path_dirs(url = path) }
|
221
226
|
@router_api.get("/list-dirs")
|
227
|
+
@handle_exception
|
222
228
|
async def list_dirs(
|
223
229
|
path: str, offset: int = 0, limit: int = 1000,
|
224
230
|
order_by: DirSortKey = "", order_desc: bool = False,
|
@@ -232,6 +238,47 @@ async def list_dirs(
|
|
232
238
|
url = path, offset = offset, limit = limit,
|
233
239
|
order_by=order_by, order_desc=order_desc, skim=skim
|
234
240
|
)
|
241
|
+
|
242
|
+
# https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#query-parameter-list-multiple-values
|
243
|
+
@router_api.get("/get-multiple")
|
244
|
+
@handle_exception
|
245
|
+
async def get_multiple_files(
|
246
|
+
path: Annotated[list[str], Query()],
|
247
|
+
skip_content: bool = False,
|
248
|
+
user: UserRecord = Depends(registered_user)
|
249
|
+
):
|
250
|
+
"""
|
251
|
+
Get multiple files by path.
|
252
|
+
Please note that the content is supposed to be text and are small enough to fit in memory.
|
253
|
+
|
254
|
+
Not existing files will have content null, and the response will be 206 Partial Content if not all files are found.
|
255
|
+
if skip_content is True, the content of the files will always be ''
|
256
|
+
"""
|
257
|
+
for p in path:
|
258
|
+
if p.endswith("/"):
|
259
|
+
raise InvalidPathError(f"Path '{p}' must not end with /")
|
260
|
+
|
261
|
+
# here we unify the path, so need to keep a record of the inputs
|
262
|
+
# make output keys consistent with inputs
|
263
|
+
upath2path = OrderedDict[str, str]()
|
264
|
+
for p in path:
|
265
|
+
p_ = p if not p.startswith("/") else p[1:]
|
266
|
+
upath2path[ensure_uri_compnents(p_)] = p
|
267
|
+
upaths = list(upath2path.keys())
|
268
|
+
|
269
|
+
# get files
|
270
|
+
raw_res = await db.read_files_bulk(upaths, skip_content=skip_content, op_user=user)
|
271
|
+
for k in raw_res.keys():
|
272
|
+
await delayed_log_access(k)
|
273
|
+
partial_content = len(raw_res) != len(upaths)
|
274
|
+
|
275
|
+
return JSONResponse(
|
276
|
+
content = {
|
277
|
+
upath2path[k]: v.decode('utf-8') if v is not None else None for k, v in raw_res.items()
|
278
|
+
},
|
279
|
+
status_code = 206 if partial_content else 200
|
280
|
+
)
|
281
|
+
|
235
282
|
|
236
283
|
@router_api.get("/whoami")
|
237
284
|
@handle_exception
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[tool.poetry]
|
2
2
|
name = "lfss"
|
3
|
-
version = "0.11.
|
3
|
+
version = "0.11.3"
|
4
4
|
description = "Lightweight file storage service"
|
5
5
|
authors = ["Li, Mengxun <mengxunli@whu.edu.cn>"]
|
6
6
|
readme = "Readme.md"
|
@@ -12,7 +12,7 @@ include = ["Readme.md", "docs/*", "frontend/*", "lfss/sql/*"]
|
|
12
12
|
python = ">=3.10" # PEP-622
|
13
13
|
requests = "2.*"
|
14
14
|
aiosqlite = "0.*"
|
15
|
-
aiofiles = "
|
15
|
+
aiofiles = "24.*"
|
16
16
|
mimesniff = "1.*"
|
17
17
|
fastapi = "0.*"
|
18
18
|
uvicorn = "0.*"
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|