lfss 0.9.4__py3-none-any.whl → 0.11.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- Readme.md +4 -4
- docs/Enviroment_variables.md +4 -2
- docs/Permission.md +4 -4
- docs/changelog.md +58 -0
- frontend/api.js +66 -4
- frontend/login.js +0 -1
- frontend/popup.js +18 -3
- frontend/scripts.js +46 -39
- frontend/utils.js +98 -1
- lfss/api/__init__.py +7 -4
- lfss/api/connector.py +47 -11
- lfss/cli/cli.py +9 -9
- lfss/cli/log.py +77 -0
- lfss/cli/vacuum.py +69 -19
- lfss/eng/config.py +7 -5
- lfss/eng/connection_pool.py +11 -7
- lfss/eng/database.py +346 -133
- lfss/eng/error.py +2 -0
- lfss/eng/log.py +91 -21
- lfss/eng/thumb.py +16 -23
- lfss/eng/utils.py +4 -5
- lfss/sql/init.sql +9 -4
- lfss/svc/app.py +1 -1
- lfss/svc/app_base.py +6 -2
- lfss/svc/app_dav.py +7 -7
- lfss/svc/app_native.py +90 -52
- lfss/svc/common_impl.py +5 -8
- {lfss-0.9.4.dist-info → lfss-0.11.4.dist-info}/METADATA +10 -8
- lfss-0.11.4.dist-info/RECORD +52 -0
- {lfss-0.9.4.dist-info → lfss-0.11.4.dist-info}/entry_points.txt +1 -0
- docs/Changelog.md +0 -27
- lfss-0.9.4.dist-info/RECORD +0 -51
- {lfss-0.9.4.dist-info → lfss-0.11.4.dist-info}/WHEEL +0 -0
lfss/eng/error.py
CHANGED
@@ -12,6 +12,8 @@ class InvalidPathError(LFSSExceptionBase, ValueError):...
|
|
12
12
|
|
13
13
|
class DatabaseLockedError(LFSSExceptionBase, sqlite3.DatabaseError):...
|
14
14
|
|
15
|
+
class DatabaseTransactionError(LFSSExceptionBase, sqlite3.DatabaseError):...
|
16
|
+
|
15
17
|
class PathNotFoundError(LFSSExceptionBase, FileNotFoundError):...
|
16
18
|
|
17
19
|
class FileDuplicateError(LFSSExceptionBase, FileExistsError):...
|
lfss/eng/log.py
CHANGED
@@ -1,8 +1,9 @@
|
|
1
|
-
from .config import
|
1
|
+
from .config import LOG_DIR, DISABLE_LOGGING
|
2
|
+
import time, sqlite3, dataclasses
|
2
3
|
from typing import TypeVar, Callable, Literal, Optional
|
3
4
|
from concurrent.futures import ThreadPoolExecutor
|
4
5
|
from functools import wraps
|
5
|
-
import logging,
|
6
|
+
import logging, asyncio
|
6
7
|
from logging import handlers
|
7
8
|
|
8
9
|
class BCOLORS:
|
@@ -57,15 +58,81 @@ class BaseLogger(logging.Logger):
|
|
57
58
|
@thread_wrap
|
58
59
|
def error(self, *args, **kwargs): super().error(*args, **kwargs)
|
59
60
|
|
60
|
-
|
61
|
+
class SQLiteFileHandler(logging.FileHandler):
|
62
|
+
def __init__(self, filename, *args, **kwargs):
|
63
|
+
super().__init__(filename, *args, **kwargs)
|
64
|
+
self._db_file = filename
|
65
|
+
self._buffer: list[logging.LogRecord] = []
|
66
|
+
self._buffer_size = 100
|
67
|
+
self._flush_interval = 10
|
68
|
+
self._last_flush = time.time()
|
69
|
+
conn = sqlite3.connect(self._db_file, check_same_thread=False)
|
70
|
+
conn.execute('PRAGMA journal_mode=WAL')
|
71
|
+
conn.execute('''
|
72
|
+
CREATE TABLE IF NOT EXISTS log (
|
73
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
74
|
+
created TIMESTAMP,
|
75
|
+
created_epoch FLOAT,
|
76
|
+
name TEXT,
|
77
|
+
levelname VARCHAR(16),
|
78
|
+
level INTEGER,
|
79
|
+
message TEXT
|
80
|
+
)
|
81
|
+
''')
|
82
|
+
conn.commit()
|
83
|
+
conn.close()
|
84
|
+
|
85
|
+
def flush(self):
|
86
|
+
def format_time(self, record: logging.LogRecord):
|
87
|
+
""" Create a time stamp """
|
88
|
+
return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(record.created))
|
89
|
+
self.acquire()
|
90
|
+
try:
|
91
|
+
conn = sqlite3.connect(self._db_file, check_same_thread=False)
|
92
|
+
conn.executemany('''
|
93
|
+
INSERT INTO log (created, created_epoch, name, levelname, level, message)
|
94
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
95
|
+
''', [
|
96
|
+
(format_time(self, record), record.created, record.name, record.levelname, record.levelno, record.getMessage())
|
97
|
+
for record in self._buffer
|
98
|
+
])
|
99
|
+
conn.commit()
|
100
|
+
conn.close()
|
101
|
+
self._buffer.clear()
|
102
|
+
self._last_flush = time.time()
|
103
|
+
finally:
|
104
|
+
self.release()
|
105
|
+
|
106
|
+
def emit(self, record: logging.LogRecord):
|
107
|
+
self._buffer.append(record)
|
108
|
+
if len(self._buffer) > self._buffer_size or time.time() - self._last_flush > self._flush_interval:
|
109
|
+
self.flush()
|
110
|
+
|
111
|
+
def close(self):
|
112
|
+
self.flush()
|
113
|
+
return super().close()
|
114
|
+
|
115
|
+
def eval_logline(row: sqlite3.Row):
|
116
|
+
@dataclasses.dataclass
|
117
|
+
class DBLogRecord:
|
118
|
+
id: int
|
119
|
+
created: str
|
120
|
+
created_epoch: float
|
121
|
+
name: str
|
122
|
+
levelname: str
|
123
|
+
level: int
|
124
|
+
message: str
|
125
|
+
return DBLogRecord(*row)
|
126
|
+
|
127
|
+
_fh_T = Literal['rotate', 'simple', 'daily', 'sqlite']
|
61
128
|
|
62
129
|
__g_logger_dict: dict[str, BaseLogger] = {}
|
63
130
|
def get_logger(
|
64
131
|
name = 'default',
|
65
|
-
log_home =
|
132
|
+
log_home = LOG_DIR,
|
66
133
|
level = 'DEBUG',
|
67
134
|
term_level = 'INFO',
|
68
|
-
file_handler_type: _fh_T = '
|
135
|
+
file_handler_type: _fh_T = 'sqlite',
|
69
136
|
global_instance = True
|
70
137
|
)->BaseLogger:
|
71
138
|
if global_instance and name in __g_logger_dict:
|
@@ -87,22 +154,25 @@ def get_logger(
|
|
87
154
|
if isinstance(color, str) and color.startswith('\033'):
|
88
155
|
format_str_plain = format_str_plain.replace(color, '')
|
89
156
|
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
157
|
+
if not DISABLE_LOGGING:
|
158
|
+
formatter_plain = logging.Formatter(format_str_plain)
|
159
|
+
log_home.mkdir(exist_ok=True)
|
160
|
+
log_file = log_home / f'{name}.log'
|
161
|
+
if file_handler_type == 'simple':
|
162
|
+
file_handler = logging.FileHandler(log_file)
|
163
|
+
elif file_handler_type == 'daily':
|
164
|
+
file_handler = handlers.TimedRotatingFileHandler(
|
165
|
+
log_file, when='midnight', interval=1, backupCount=30
|
166
|
+
)
|
167
|
+
elif file_handler_type == 'rotate':
|
168
|
+
file_handler = handlers.RotatingFileHandler(
|
169
|
+
log_file, maxBytes=1024*1024, backupCount=5
|
170
|
+
)
|
171
|
+
elif file_handler_type == 'sqlite':
|
172
|
+
file_handler = SQLiteFileHandler(log_file if log_file.suffix == '.db' else log_file.with_suffix('.log.db'))
|
173
|
+
|
174
|
+
file_handler.setFormatter(formatter_plain)
|
175
|
+
logger.addHandler(file_handler)
|
106
176
|
|
107
177
|
logger = BaseLogger(name)
|
108
178
|
setupLogger(logger)
|
lfss/eng/thumb.py
CHANGED
@@ -11,47 +11,42 @@ from contextlib import asynccontextmanager
|
|
11
11
|
async def _maybe_init_thumb(c: aiosqlite.Cursor):
|
12
12
|
await c.execute('''
|
13
13
|
CREATE TABLE IF NOT EXISTS thumbs (
|
14
|
-
|
15
|
-
ctime TEXT,
|
14
|
+
file_id CHAR(32) PRIMARY KEY,
|
16
15
|
thumb BLOB
|
17
16
|
)
|
18
17
|
''')
|
19
|
-
await c.execute('CREATE INDEX IF NOT EXISTS thumbs_path_idx ON thumbs (
|
18
|
+
await c.execute('CREATE INDEX IF NOT EXISTS thumbs_path_idx ON thumbs (file_id)')
|
20
19
|
|
21
|
-
async def _get_cache_thumb(c: aiosqlite.Cursor,
|
20
|
+
async def _get_cache_thumb(c: aiosqlite.Cursor, file_id: str) -> Optional[bytes]:
|
22
21
|
res = await c.execute('''
|
23
|
-
SELECT
|
24
|
-
''', (
|
22
|
+
SELECT thumb FROM thumbs WHERE file_id = ?
|
23
|
+
''', (file_id, ))
|
25
24
|
row = await res.fetchone()
|
26
25
|
if row is None:
|
27
26
|
return None
|
28
|
-
|
29
|
-
if row[0] != ctime:
|
30
|
-
await _delete_cache_thumb(c, path)
|
31
|
-
return None
|
32
|
-
blob: bytes = row[1]
|
27
|
+
blob: bytes = row[0]
|
33
28
|
return blob
|
34
29
|
|
35
|
-
async def _save_cache_thumb(c: aiosqlite.Cursor,
|
30
|
+
async def _save_cache_thumb(c: aiosqlite.Cursor, file_id: str, raw_bytes: bytes) -> bytes:
|
36
31
|
try:
|
37
32
|
raw_img = Image.open(BytesIO(raw_bytes))
|
38
33
|
except Exception:
|
39
|
-
raise InvalidDataError('Invalid image data for thumbnail: ' +
|
34
|
+
raise InvalidDataError('Invalid image data for thumbnail: ' + file_id)
|
40
35
|
raw_img.thumbnail(THUMB_SIZE)
|
41
36
|
img = raw_img.convert('RGB')
|
42
37
|
bio = BytesIO()
|
43
38
|
img.save(bio, 'JPEG')
|
44
39
|
blob = bio.getvalue()
|
45
40
|
await c.execute('''
|
46
|
-
INSERT OR REPLACE INTO thumbs (
|
47
|
-
''', (
|
41
|
+
INSERT OR REPLACE INTO thumbs (file_id, thumb) VALUES (?, ?)
|
42
|
+
''', (file_id, blob))
|
48
43
|
await c.execute('COMMIT') # commit immediately
|
49
44
|
return blob
|
50
45
|
|
51
|
-
async def _delete_cache_thumb(c: aiosqlite.Cursor,
|
46
|
+
async def _delete_cache_thumb(c: aiosqlite.Cursor, file_id: str):
|
52
47
|
await c.execute('''
|
53
|
-
DELETE FROM thumbs WHERE
|
54
|
-
''', (
|
48
|
+
DELETE FROM thumbs WHERE file_id = ?
|
49
|
+
''', (file_id, ))
|
55
50
|
await c.execute('COMMIT')
|
56
51
|
|
57
52
|
@asynccontextmanager
|
@@ -75,15 +70,13 @@ async def get_thumb(path: str) -> Optional[tuple[bytes, str]]:
|
|
75
70
|
r = await fconn.get_file_record(path)
|
76
71
|
|
77
72
|
if r is None:
|
78
|
-
async with cache_cursor() as cur:
|
79
|
-
await _delete_cache_thumb(cur, path)
|
80
73
|
raise FileNotFoundError(f'File not found: {path}')
|
81
74
|
if not r.mime_type.startswith('image/'):
|
82
75
|
return None
|
83
76
|
|
77
|
+
file_id = r.file_id
|
84
78
|
async with cache_cursor() as cur:
|
85
|
-
|
86
|
-
thumb_blob = await _get_cache_thumb(cur, path, c_time)
|
79
|
+
thumb_blob = await _get_cache_thumb(cur, file_id)
|
87
80
|
if thumb_blob is not None:
|
88
81
|
return thumb_blob, "image/jpeg"
|
89
82
|
|
@@ -98,5 +91,5 @@ async def get_thumb(path: str) -> Optional[tuple[bytes, str]]:
|
|
98
91
|
data = await fconn.get_file_blob(r.file_id)
|
99
92
|
assert data is not None
|
100
93
|
|
101
|
-
thumb_blob = await _save_cache_thumb(cur,
|
94
|
+
thumb_blob = await _save_cache_thumb(cur, file_id, data)
|
102
95
|
return thumb_blob, "image/jpeg"
|
lfss/eng/utils.py
CHANGED
@@ -11,7 +11,6 @@ from concurrent.futures import ThreadPoolExecutor
|
|
11
11
|
from typing import TypeVar, Callable, Awaitable
|
12
12
|
from functools import wraps, partial
|
13
13
|
from uuid import uuid4
|
14
|
-
import os
|
15
14
|
|
16
15
|
async def copy_file(source: str|pathlib.Path, destination: str|pathlib.Path):
|
17
16
|
async with aiofiles.open(source, mode='rb') as src:
|
@@ -20,7 +19,7 @@ async def copy_file(source: str|pathlib.Path, destination: str|pathlib.Path):
|
|
20
19
|
await dest.write(chunk)
|
21
20
|
|
22
21
|
def hash_credential(username: str, password: str):
|
23
|
-
return hashlib.sha256(
|
22
|
+
return hashlib.sha256(f"{username}:{password}".encode()).hexdigest()
|
24
23
|
|
25
24
|
def encode_uri_compnents(path: str):
|
26
25
|
path_sp = path.split("/")
|
@@ -155,12 +154,12 @@ def fmt_storage_size(size: int) -> str:
|
|
155
154
|
return f"{size/1024**4:.2f}T"
|
156
155
|
|
157
156
|
_FnReturnT = TypeVar('_FnReturnT')
|
158
|
-
_AsyncReturnT = Awaitable
|
157
|
+
_AsyncReturnT = TypeVar('_AsyncReturnT', bound=Awaitable)
|
159
158
|
_g_executor = None
|
160
159
|
def get_global_executor():
|
161
160
|
global _g_executor
|
162
161
|
if _g_executor is None:
|
163
|
-
_g_executor = ThreadPoolExecutor(
|
162
|
+
_g_executor = ThreadPoolExecutor()
|
164
163
|
return _g_executor
|
165
164
|
def async_wrap(executor=None):
|
166
165
|
if executor is None:
|
@@ -179,7 +178,7 @@ def concurrent_wrap(executor=None):
|
|
179
178
|
def sync_fn(*args, **kwargs):
|
180
179
|
loop = asyncio.new_event_loop()
|
181
180
|
return loop.run_until_complete(func(*args, **kwargs))
|
182
|
-
return sync_fn
|
181
|
+
return sync_fn # type: ignore
|
183
182
|
return _concurrent_wrap
|
184
183
|
|
185
184
|
# https://stackoverflow.com/a/279586/6775765
|
lfss/sql/init.sql
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
CREATE TABLE IF NOT EXISTS user (
|
1
|
+
CREATE TABLE IF NOT EXISTS main.user (
|
2
2
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
3
3
|
username VARCHAR(256) UNIQUE NOT NULL,
|
4
4
|
credential VARCHAR(256) NOT NULL,
|
@@ -9,7 +9,7 @@ CREATE TABLE IF NOT EXISTS user (
|
|
9
9
|
permission INTEGER DEFAULT 0
|
10
10
|
);
|
11
11
|
|
12
|
-
CREATE TABLE IF NOT EXISTS fmeta (
|
12
|
+
CREATE TABLE IF NOT EXISTS main.fmeta (
|
13
13
|
url VARCHAR(1024) PRIMARY KEY,
|
14
14
|
owner_id INTEGER NOT NULL,
|
15
15
|
file_id CHAR(32) NOT NULL,
|
@@ -22,12 +22,17 @@ CREATE TABLE IF NOT EXISTS fmeta (
|
|
22
22
|
FOREIGN KEY(owner_id) REFERENCES user(id)
|
23
23
|
);
|
24
24
|
|
25
|
-
CREATE TABLE IF NOT EXISTS
|
25
|
+
CREATE TABLE IF NOT EXISTS main.dupcount (
|
26
|
+
file_id CHAR(32) PRIMARY KEY,
|
27
|
+
count INTEGER DEFAULT 0
|
28
|
+
);
|
29
|
+
|
30
|
+
CREATE TABLE IF NOT EXISTS main.usize (
|
26
31
|
user_id INTEGER PRIMARY KEY,
|
27
32
|
size INTEGER DEFAULT 0
|
28
33
|
);
|
29
34
|
|
30
|
-
CREATE TABLE IF NOT EXISTS upeer (
|
35
|
+
CREATE TABLE IF NOT EXISTS main.upeer (
|
31
36
|
src_user_id INTEGER NOT NULL,
|
32
37
|
dst_user_id INTEGER NOT NULL,
|
33
38
|
access_level INTEGER DEFAULT 0,
|
lfss/svc/app.py
CHANGED
lfss/svc/app_base.py
CHANGED
@@ -27,7 +27,7 @@ req_conn = RequestDB()
|
|
27
27
|
async def lifespan(app: FastAPI):
|
28
28
|
global db
|
29
29
|
try:
|
30
|
-
await global_connection_init(n_read =
|
30
|
+
await global_connection_init(n_read = 8 if not DEBUG_MODE else 1)
|
31
31
|
await asyncio.gather(db.init(), req_conn.init())
|
32
32
|
yield
|
33
33
|
await req_conn.commit()
|
@@ -54,15 +54,19 @@ def handle_exception(fn):
|
|
54
54
|
if isinstance(e, FileExistsError): raise HTTPException(status_code=409, detail=str(e))
|
55
55
|
if isinstance(e, TooManyItemsError): raise HTTPException(status_code=400, detail=str(e))
|
56
56
|
if isinstance(e, DatabaseLockedError): raise HTTPException(status_code=503, detail=str(e))
|
57
|
+
if isinstance(e, DatabaseTransactionError): raise HTTPException(status_code=503, detail=str(e))
|
57
58
|
if isinstance(e, FileLockedError): raise HTTPException(status_code=423, detail=str(e))
|
58
59
|
logger.error(f"Uncaptured error in {fn.__name__}: {e}")
|
59
60
|
raise
|
60
61
|
return wrapper
|
61
62
|
|
63
|
+
env_origins = os.environ.get("LFSS_ORIGINS", "*")
|
64
|
+
logger.debug(f"LFSS_ORIGINS: {env_origins}")
|
65
|
+
origins = [x.strip() for x in env_origins.split(",") if x.strip()]
|
62
66
|
app = FastAPI(docs_url=None, redoc_url=None, lifespan=lifespan)
|
63
67
|
app.add_middleware(
|
64
68
|
CORSMiddleware,
|
65
|
-
allow_origins=
|
69
|
+
allow_origins=origins,
|
66
70
|
allow_credentials=True,
|
67
71
|
allow_methods=["*"],
|
68
72
|
allow_headers=["*"],
|
lfss/svc/app_dav.py
CHANGED
@@ -57,9 +57,9 @@ async def eval_path(path: str) -> tuple[ptype, str, Optional[FileRecord | Direct
|
|
57
57
|
if len(dir_path_sp) > 2:
|
58
58
|
async with unique_cursor() as c:
|
59
59
|
fconn = FileConn(c)
|
60
|
-
if await fconn.
|
60
|
+
if await fconn.count_dir_files(path, flat=True) == 0:
|
61
61
|
return None, lfss_path, None
|
62
|
-
return "dir", lfss_path, await fconn.
|
62
|
+
return "dir", lfss_path, await fconn.get_dir_record(path)
|
63
63
|
else:
|
64
64
|
# test if its a user's root directory
|
65
65
|
assert len(dir_path_sp) == 2
|
@@ -85,8 +85,8 @@ async def eval_path(path: str) -> tuple[ptype, str, Optional[FileRecord | Direct
|
|
85
85
|
async with unique_cursor() as c:
|
86
86
|
lfss_path = path + "/"
|
87
87
|
fconn = FileConn(c)
|
88
|
-
if await fconn.
|
89
|
-
return "dir", lfss_path, await fconn.
|
88
|
+
if await fconn.count_dir_files(lfss_path) > 0:
|
89
|
+
return "dir", lfss_path, await fconn.get_dir_record(lfss_path)
|
90
90
|
|
91
91
|
return None, path, None
|
92
92
|
|
@@ -235,7 +235,7 @@ async def dav_propfind(request: Request, path: str, user: UserRecord = Depends(r
|
|
235
235
|
# query root directory content
|
236
236
|
async def user_path_record(user_name: str, cur) -> DirectoryRecord:
|
237
237
|
try:
|
238
|
-
return await FileConn(cur).
|
238
|
+
return await FileConn(cur).get_dir_record(user_name + "/")
|
239
239
|
except PathNotFoundError:
|
240
240
|
return DirectoryRecord(user_name + "/", size=0, n_files=0, create_time="1970-01-01 00:00:00", update_time="1970-01-01 00:00:00", access_time="1970-01-01 00:00:00")
|
241
241
|
|
@@ -253,7 +253,7 @@ async def dav_propfind(request: Request, path: str, user: UserRecord = Depends(r
|
|
253
253
|
elif path_type == "dir":
|
254
254
|
# query directory content
|
255
255
|
async with unique_cursor() as c:
|
256
|
-
flist = await FileConn(c).
|
256
|
+
flist = await FileConn(c).list_dir_files(lfss_path, flat = True if depth == "infinity" else False)
|
257
257
|
for frecord in flist:
|
258
258
|
if frecord.url.endswith(f"/{MKDIR_PLACEHOLDER}"): continue
|
259
259
|
file_el = await create_file_xml_element(frecord)
|
@@ -315,7 +315,7 @@ async def dav_move(request: Request, path: str, user: UserRecord = Depends(regis
|
|
315
315
|
assert ptype == "dir", "Directory path should end with /"
|
316
316
|
assert lfss_path.endswith("/"), "Directory path should end with /"
|
317
317
|
if not dlfss_path.endswith("/"): dlfss_path += "/" # the header destination may not end with /
|
318
|
-
await db.
|
318
|
+
await db.move_dir(lfss_path, dlfss_path, user)
|
319
319
|
return Response(status_code=201)
|
320
320
|
|
321
321
|
@router_dav.api_route("/{path:path}", methods=["COPY"])
|
lfss/svc/app_native.py
CHANGED
@@ -1,16 +1,19 @@
|
|
1
|
-
from typing import Optional, Literal
|
1
|
+
from typing import Optional, Literal, Annotated
|
2
|
+
from collections import OrderedDict
|
2
3
|
|
3
|
-
from fastapi import Depends, Request, Response, UploadFile
|
4
|
+
from fastapi import Depends, Request, Response, UploadFile, Query
|
5
|
+
from fastapi.responses import StreamingResponse, JSONResponse
|
4
6
|
from fastapi.exceptions import HTTPException
|
5
7
|
|
6
|
-
from ..eng.config import MAX_BUNDLE_BYTES
|
7
8
|
from ..eng.utils import ensure_uri_compnents
|
9
|
+
from ..eng.config import MAX_MEM_FILE_BYTES
|
8
10
|
from ..eng.connection_pool import unique_cursor
|
9
|
-
from ..eng.database import check_file_read_permission, check_path_permission,
|
11
|
+
from ..eng.database import check_file_read_permission, check_path_permission, FileConn, delayed_log_access
|
10
12
|
from ..eng.datatype import (
|
11
|
-
FileReadPermission,
|
13
|
+
FileReadPermission, UserRecord, AccessLevel,
|
12
14
|
FileSortKey, DirSortKey
|
13
15
|
)
|
16
|
+
from ..eng.error import InvalidPathError
|
14
17
|
|
15
18
|
from .app_base import *
|
16
19
|
from .common_impl import get_impl, put_file_impl, post_file_impl, delete_impl, copy_impl
|
@@ -81,48 +84,40 @@ async def delete_file(path: str, user: UserRecord = Depends(registered_user)):
|
|
81
84
|
async def bundle_files(path: str, user: UserRecord = Depends(registered_user)):
|
82
85
|
logger.info(f"GET bundle({path}), user: {user.username}")
|
83
86
|
path = ensure_uri_compnents(path)
|
84
|
-
|
85
|
-
|
86
|
-
if
|
87
|
+
if not path.endswith("/"):
|
88
|
+
raise HTTPException(status_code=400, detail="Path must end with /")
|
89
|
+
if path[0] == "/": # adapt to both /path and path
|
87
90
|
path = path[1:]
|
91
|
+
if path == "":
|
92
|
+
raise HTTPException(status_code=400, detail="Cannot bundle root")
|
88
93
|
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
raise HTTPException(status_code=400, detail="Too large to zip")
|
117
|
-
|
118
|
-
file_paths = [f.url for f in files]
|
119
|
-
zip_buffer = await db.zip_path(path, file_paths)
|
120
|
-
return Response(
|
121
|
-
content=zip_buffer.getvalue(), media_type="application/zip", headers={
|
122
|
-
"Content-Disposition": f"attachment; filename=bundle.zip",
|
123
|
-
"Content-Length": str(zip_buffer.getbuffer().nbytes)
|
124
|
-
}
|
125
|
-
)
|
94
|
+
async with unique_cursor() as cur:
|
95
|
+
dir_record = await FileConn(cur).get_dir_record(path)
|
96
|
+
|
97
|
+
pathname = f"{path.split('/')[-2]}"
|
98
|
+
|
99
|
+
if dir_record.size < MAX_MEM_FILE_BYTES:
|
100
|
+
logger.debug(f"Bundle {path} in memory")
|
101
|
+
dir_bytes = (await db.zip_dir(path, op_user=user)).getvalue()
|
102
|
+
return Response(
|
103
|
+
content = dir_bytes,
|
104
|
+
media_type = "application/zip",
|
105
|
+
headers = {
|
106
|
+
f"Content-Disposition": f"attachment; filename=bundle-{pathname}.zip",
|
107
|
+
"Content-Length": str(len(dir_bytes)),
|
108
|
+
"X-Content-Bytes": str(dir_record.size),
|
109
|
+
}
|
110
|
+
)
|
111
|
+
else:
|
112
|
+
logger.debug(f"Bundle {path} in stream")
|
113
|
+
return StreamingResponse(
|
114
|
+
content = await db.zip_dir_stream(path, op_user=user),
|
115
|
+
media_type = "application/zip",
|
116
|
+
headers = {
|
117
|
+
f"Content-Disposition": f"attachment; filename=bundle-{pathname}.zip",
|
118
|
+
"X-Content-Bytes": str(dir_record.size),
|
119
|
+
}
|
120
|
+
)
|
126
121
|
|
127
122
|
@router_api.get("/meta")
|
128
123
|
@handle_exception
|
@@ -135,15 +130,13 @@ async def get_file_meta(path: str, user: UserRecord = Depends(registered_user)):
|
|
135
130
|
if is_file:
|
136
131
|
record = await fconn.get_file_record(path, throw=True)
|
137
132
|
if await check_path_permission(path, user, cursor=cur) < AccessLevel.READ:
|
138
|
-
|
139
|
-
owner = await uconn.get_user_by_id(record.owner_id, throw=True)
|
140
|
-
is_allowed, reason = check_file_read_permission(user, owner, record)
|
133
|
+
is_allowed, reason = await check_file_read_permission(user, record, cursor=cur)
|
141
134
|
if not is_allowed:
|
142
135
|
raise HTTPException(status_code=403, detail=reason)
|
143
136
|
else:
|
144
137
|
if await check_path_permission(path, user, cursor=cur) < AccessLevel.READ:
|
145
138
|
raise HTTPException(status_code=403, detail="Permission denied")
|
146
|
-
record = await fconn.
|
139
|
+
record = await fconn.get_dir_record(path)
|
147
140
|
return record
|
148
141
|
|
149
142
|
@router_api.post("/meta")
|
@@ -180,7 +173,7 @@ async def update_file_meta(
|
|
180
173
|
new_path = ensure_uri_compnents(new_path)
|
181
174
|
logger.info(f"Update path of {path} to {new_path}")
|
182
175
|
# will raise duplicate path error if same name path exists in the new path
|
183
|
-
await db.
|
176
|
+
await db.move_dir(path, new_path, user)
|
184
177
|
|
185
178
|
return Response(status_code=200, content="OK")
|
186
179
|
|
@@ -198,13 +191,15 @@ async def validate_path_read_permission(path: str, user: UserRecord):
|
|
198
191
|
if not await check_path_permission(path, user) >= AccessLevel.READ:
|
199
192
|
raise HTTPException(status_code=403, detail="Permission denied")
|
200
193
|
@router_api.get("/count-files")
|
194
|
+
@handle_exception
|
201
195
|
async def count_files(path: str, flat: bool = False, user: UserRecord = Depends(registered_user)):
|
202
196
|
await validate_path_read_permission(path, user)
|
203
197
|
path = ensure_uri_compnents(path)
|
204
198
|
async with unique_cursor() as conn:
|
205
199
|
fconn = FileConn(conn)
|
206
|
-
return { "count": await fconn.
|
200
|
+
return { "count": await fconn.count_dir_files(url = path, flat = flat) }
|
207
201
|
@router_api.get("/list-files")
|
202
|
+
@handle_exception
|
208
203
|
async def list_files(
|
209
204
|
path: str, offset: int = 0, limit: int = 1000,
|
210
205
|
order_by: FileSortKey = "", order_desc: bool = False,
|
@@ -214,13 +209,14 @@ async def list_files(
|
|
214
209
|
path = ensure_uri_compnents(path)
|
215
210
|
async with unique_cursor() as conn:
|
216
211
|
fconn = FileConn(conn)
|
217
|
-
return await fconn.
|
212
|
+
return await fconn.list_dir_files(
|
218
213
|
url = path, offset = offset, limit = limit,
|
219
214
|
order_by=order_by, order_desc=order_desc,
|
220
215
|
flat=flat
|
221
216
|
)
|
222
217
|
|
223
218
|
@router_api.get("/count-dirs")
|
219
|
+
@handle_exception
|
224
220
|
async def count_dirs(path: str, user: UserRecord = Depends(registered_user)):
|
225
221
|
await validate_path_read_permission(path, user)
|
226
222
|
path = ensure_uri_compnents(path)
|
@@ -228,6 +224,7 @@ async def count_dirs(path: str, user: UserRecord = Depends(registered_user)):
|
|
228
224
|
fconn = FileConn(conn)
|
229
225
|
return { "count": await fconn.count_path_dirs(url = path) }
|
230
226
|
@router_api.get("/list-dirs")
|
227
|
+
@handle_exception
|
231
228
|
async def list_dirs(
|
232
229
|
path: str, offset: int = 0, limit: int = 1000,
|
233
230
|
order_by: DirSortKey = "", order_desc: bool = False,
|
@@ -241,6 +238,47 @@ async def list_dirs(
|
|
241
238
|
url = path, offset = offset, limit = limit,
|
242
239
|
order_by=order_by, order_desc=order_desc, skim=skim
|
243
240
|
)
|
241
|
+
|
242
|
+
# https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#query-parameter-list-multiple-values
|
243
|
+
@router_api.get("/get-multiple")
|
244
|
+
@handle_exception
|
245
|
+
async def get_multiple_files(
|
246
|
+
path: Annotated[list[str], Query()],
|
247
|
+
skip_content: bool = False,
|
248
|
+
user: UserRecord = Depends(registered_user)
|
249
|
+
):
|
250
|
+
"""
|
251
|
+
Get multiple files by path.
|
252
|
+
Please note that the content is supposed to be text and are small enough to fit in memory.
|
253
|
+
|
254
|
+
Not existing files will have content null, and the response will be 206 Partial Content if not all files are found.
|
255
|
+
if skip_content is True, the content of the files will always be ''
|
256
|
+
"""
|
257
|
+
for p in path:
|
258
|
+
if p.endswith("/"):
|
259
|
+
raise InvalidPathError(f"Path '{p}' must not end with /")
|
260
|
+
|
261
|
+
# here we unify the path, so need to keep a record of the inputs
|
262
|
+
# make output keys consistent with inputs
|
263
|
+
upath2path = OrderedDict[str, str]()
|
264
|
+
for p in path:
|
265
|
+
p_ = p if not p.startswith("/") else p[1:]
|
266
|
+
upath2path[ensure_uri_compnents(p_)] = p
|
267
|
+
upaths = list(upath2path.keys())
|
268
|
+
|
269
|
+
# get files
|
270
|
+
raw_res = await db.read_files_bulk(upaths, skip_content=skip_content, op_user=user)
|
271
|
+
for k in raw_res.keys():
|
272
|
+
await delayed_log_access(k)
|
273
|
+
partial_content = len(raw_res) != len(upaths)
|
274
|
+
|
275
|
+
return JSONResponse(
|
276
|
+
content = {
|
277
|
+
upath2path[k]: v.decode('utf-8') if v is not None else None for k, v in raw_res.items()
|
278
|
+
},
|
279
|
+
status_code = 206 if partial_content else 200
|
280
|
+
)
|
281
|
+
|
244
282
|
|
245
283
|
@router_api.get("/whoami")
|
246
284
|
@handle_exception
|
lfss/svc/common_impl.py
CHANGED
@@ -112,11 +112,8 @@ async def get_impl(
|
|
112
112
|
async with unique_cursor() as cur:
|
113
113
|
fconn = FileConn(cur)
|
114
114
|
file_record = await fconn.get_file_record(path, throw=True)
|
115
|
-
uconn = UserConn(cur)
|
116
|
-
owner = await uconn.get_user_by_id(file_record.owner_id, throw=True)
|
117
|
-
|
118
115
|
if not await check_path_permission(path, user, cursor=cur) >= AccessLevel.READ:
|
119
|
-
allow_access, reason = check_file_read_permission(user,
|
116
|
+
allow_access, reason = await check_file_read_permission(user, file_record, cursor=cur)
|
120
117
|
if not allow_access:
|
121
118
|
raise HTTPException(status_code=403 if user.id != 0 else 401, detail=reason)
|
122
119
|
|
@@ -183,7 +180,7 @@ async def _get_dir_impl(
|
|
183
180
|
else:
|
184
181
|
raise HTTPException(status_code=404, detail="User not found")
|
185
182
|
else:
|
186
|
-
if await FileConn(cur).
|
183
|
+
if await FileConn(cur).count_dir_files(path, flat=True) > 0:
|
187
184
|
return Response(status_code=200)
|
188
185
|
else:
|
189
186
|
raise HTTPException(status_code=404, detail="Path not found")
|
@@ -298,7 +295,7 @@ async def delete_impl(path: str, user: UserRecord):
|
|
298
295
|
logger.info(f"DELETE {path}, user: {user.username}")
|
299
296
|
|
300
297
|
if path.endswith("/"):
|
301
|
-
res = await db.
|
298
|
+
res = await db.delete_dir(path, user)
|
302
299
|
else:
|
303
300
|
res = await db.delete_file(path, user)
|
304
301
|
|
@@ -330,8 +327,8 @@ async def copy_impl(
|
|
330
327
|
else:
|
331
328
|
async with unique_cursor() as cur:
|
332
329
|
fconn = FileConn(cur)
|
333
|
-
dst_fcount = await fconn.
|
330
|
+
dst_fcount = await fconn.count_dir_files(dst_path, flat=True)
|
334
331
|
if dst_fcount > 0:
|
335
332
|
raise HTTPException(status_code=409, detail="Destination exists")
|
336
|
-
await db.
|
333
|
+
await db.copy_dir(src_path, dst_path, op_user)
|
337
334
|
return Response(status_code=201, content="OK")
|