lfss 0.5.2__tar.gz → 0.7.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lfss-0.5.2 → lfss-0.7.0}/PKG-INFO +1 -1
- {lfss-0.5.2 → lfss-0.7.0}/lfss/cli/balance.py +4 -4
- {lfss-0.5.2 → lfss-0.7.0}/lfss/cli/cli.py +1 -1
- {lfss-0.5.2 → lfss-0.7.0}/lfss/client/api.py +23 -3
- {lfss-0.5.2 → lfss-0.7.0}/lfss/sql/init.sql +17 -10
- lfss-0.7.0/lfss/sql/pragma.sql +8 -0
- {lfss-0.5.2 → lfss-0.7.0}/lfss/src/config.py +1 -0
- {lfss-0.5.2 → lfss-0.7.0}/lfss/src/database.py +49 -116
- lfss-0.7.0/lfss/src/datatype.py +55 -0
- {lfss-0.5.2 → lfss-0.7.0}/lfss/src/server.py +16 -13
- {lfss-0.5.2 → lfss-0.7.0}/pyproject.toml +1 -1
- lfss-0.5.2/lfss/sql/pragma.sql +0 -5
- {lfss-0.5.2 → lfss-0.7.0}/Readme.md +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/docs/Known_issues.md +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/docs/Permission.md +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/frontend/api.js +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/frontend/index.html +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/frontend/popup.css +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/frontend/popup.js +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/frontend/scripts.js +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/frontend/styles.css +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/frontend/utils.js +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/lfss/cli/panel.py +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/lfss/cli/serve.py +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/lfss/cli/user.py +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/lfss/client/__init__.py +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/lfss/src/__init__.py +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/lfss/src/error.py +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/lfss/src/log.py +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/lfss/src/stat.py +0 -0
- {lfss-0.5.2 → lfss-0.7.0}/lfss/src/utils.py +0 -0
@@ -24,10 +24,10 @@ def barriered(func):
|
|
24
24
|
@barriered
|
25
25
|
async def move_to_external(f_id: str, flag: str = ''):
|
26
26
|
async with aiosqlite.connect(db_file, timeout = 60) as c:
|
27
|
-
async with c.execute( "SELECT data FROM fdata WHERE file_id = ?", (f_id,)) as cursor:
|
27
|
+
async with c.execute( "SELECT data FROM blobs.fdata WHERE file_id = ?", (f_id,)) as cursor:
|
28
28
|
blob_row = await cursor.fetchone()
|
29
29
|
if blob_row is None:
|
30
|
-
print(f"{flag}File {f_id} not found in fdata")
|
30
|
+
print(f"{flag}File {f_id} not found in blobs.fdata")
|
31
31
|
return
|
32
32
|
await c.execute("BEGIN")
|
33
33
|
blob: bytes = blob_row[0]
|
@@ -35,7 +35,7 @@ async def move_to_external(f_id: str, flag: str = ''):
|
|
35
35
|
async with aiofiles.open(LARGE_BLOB_DIR / f_id, 'wb') as f:
|
36
36
|
await f.write(blob)
|
37
37
|
await c.execute( "UPDATE fmeta SET external = 1 WHERE file_id = ?", (f_id,))
|
38
|
-
await c.execute( "DELETE FROM fdata WHERE file_id = ?", (f_id,))
|
38
|
+
await c.execute( "DELETE FROM blobs.fdata WHERE file_id = ?", (f_id,))
|
39
39
|
await c.commit()
|
40
40
|
print(f"{flag}Moved {f_id} to external storage")
|
41
41
|
except Exception as e:
|
@@ -56,7 +56,7 @@ async def move_to_internal(f_id: str, flag: str = ''):
|
|
56
56
|
|
57
57
|
await c.execute("BEGIN")
|
58
58
|
try:
|
59
|
-
await c.execute("INSERT INTO fdata (file_id, data) VALUES (?, ?)", (f_id, blob))
|
59
|
+
await c.execute("INSERT INTO blobs.fdata (file_id, data) VALUES (?, ?)", (f_id, blob))
|
60
60
|
await c.execute("UPDATE fmeta SET external = 0 WHERE file_id = ?", (f_id,))
|
61
61
|
await c.commit()
|
62
62
|
(LARGE_BLOB_DIR / f_id).unlink(missing_ok=True)
|
@@ -14,7 +14,7 @@ def parse_arguments():
|
|
14
14
|
sp_upload.add_argument("dst", help="Destination path", type=str)
|
15
15
|
sp_upload.add_argument("-j", "--jobs", type=int, default=1, help="Number of concurrent uploads")
|
16
16
|
sp_upload.add_argument("--interval", type=float, default=0, help="Interval between files, only works with directory upload")
|
17
|
-
sp_upload.add_argument("--conflict", choices=["overwrite", "abort", "skip"], default="abort", help="Conflict resolution")
|
17
|
+
sp_upload.add_argument("--conflict", choices=["overwrite", "abort", "skip", "skip-ahead"], default="abort", help="Conflict resolution")
|
18
18
|
sp_upload.add_argument("--permission", type=FileReadPermission, default=FileReadPermission.UNSET, help="File permission")
|
19
19
|
sp_upload.add_argument("--retries", type=int, default=0, help="Number of retries, only works with directory upload")
|
20
20
|
|
@@ -2,7 +2,7 @@ from typing import Optional, Literal
|
|
2
2
|
import os
|
3
3
|
import requests
|
4
4
|
import urllib.parse
|
5
|
-
from lfss.src.
|
5
|
+
from lfss.src.datatype import (
|
6
6
|
FileReadPermission, FileRecord, DirectoryRecord, UserRecord, PathContents
|
7
7
|
)
|
8
8
|
|
@@ -34,8 +34,18 @@ class Connector:
|
|
34
34
|
return response
|
35
35
|
return f
|
36
36
|
|
37
|
-
def put(self, path: str, file_data: bytes, permission: int | FileReadPermission = 0, conflict: Literal['overwrite', 'abort', 'skip'] = 'abort'):
|
37
|
+
def put(self, path: str, file_data: bytes, permission: int | FileReadPermission = 0, conflict: Literal['overwrite', 'abort', 'skip', 'skip-ahead'] = 'abort'):
|
38
38
|
"""Uploads a file to the specified path."""
|
39
|
+
assert isinstance(file_data, bytes), "file_data must be bytes"
|
40
|
+
|
41
|
+
# Skip ahead by checking if the file already exists
|
42
|
+
if conflict == 'skip-ahead':
|
43
|
+
exists = self.get_metadata(path)
|
44
|
+
if exists is None:
|
45
|
+
conflict = 'skip'
|
46
|
+
else:
|
47
|
+
return {'status': 'skipped', 'path': path}
|
48
|
+
|
39
49
|
response = self._fetch('PUT', path, search_params={
|
40
50
|
'permission': int(permission),
|
41
51
|
'conflict': conflict
|
@@ -45,9 +55,19 @@ class Connector:
|
|
45
55
|
)
|
46
56
|
return response.json()
|
47
57
|
|
48
|
-
def put_json(self, path: str, data: dict, permission: int | FileReadPermission = 0, conflict: Literal['overwrite', 'abort', 'skip'] = 'abort'):
|
58
|
+
def put_json(self, path: str, data: dict, permission: int | FileReadPermission = 0, conflict: Literal['overwrite', 'abort', 'skip', 'skip-ahead'] = 'abort'):
|
49
59
|
"""Uploads a JSON file to the specified path."""
|
50
60
|
assert path.endswith('.json'), "Path must end with .json"
|
61
|
+
assert isinstance(data, dict), "data must be a dict"
|
62
|
+
|
63
|
+
# Skip ahead by checking if the file already exists
|
64
|
+
if conflict == 'skip-ahead':
|
65
|
+
exists = self.get_metadata(path)
|
66
|
+
if exists is None:
|
67
|
+
conflict = 'skip'
|
68
|
+
else:
|
69
|
+
return {'status': 'skipped', 'path': path}
|
70
|
+
|
51
71
|
response = self._fetch('PUT', path, search_params={
|
52
72
|
'permission': int(permission),
|
53
73
|
'conflict': conflict
|
@@ -1,7 +1,13 @@
|
|
1
|
+
PRAGMA journal_mode=MEMROY;
|
2
|
+
PRAGMA temp_store=MEMORY;
|
3
|
+
PRAGMA page_size=4096;
|
4
|
+
PRAGMA synchronous=NORMAL;
|
5
|
+
PRAGMA case_sensitive_like=ON;
|
6
|
+
|
1
7
|
CREATE TABLE IF NOT EXISTS user (
|
2
8
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
3
|
-
username VARCHAR(
|
4
|
-
credential VARCHAR(
|
9
|
+
username VARCHAR(256) UNIQUE NOT NULL,
|
10
|
+
credential VARCHAR(256) NOT NULL,
|
5
11
|
is_admin BOOLEAN DEFAULT FALSE,
|
6
12
|
create_time TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
7
13
|
last_active TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
@@ -10,22 +16,18 @@ CREATE TABLE IF NOT EXISTS user (
|
|
10
16
|
);
|
11
17
|
|
12
18
|
CREATE TABLE IF NOT EXISTS fmeta (
|
13
|
-
url VARCHAR(
|
19
|
+
url VARCHAR(1024) PRIMARY KEY,
|
14
20
|
owner_id INTEGER NOT NULL,
|
15
|
-
file_id
|
21
|
+
file_id CHAR(32) NOT NULL,
|
16
22
|
file_size INTEGER,
|
17
23
|
create_time TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
18
24
|
access_time TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
19
25
|
permission INTEGER DEFAULT 0,
|
20
|
-
external BOOLEAN DEFAULT FALSE,
|
26
|
+
external BOOLEAN DEFAULT FALSE,
|
27
|
+
mime_type VARCHAR(256) DEFAULT 'application/octet-stream',
|
21
28
|
FOREIGN KEY(owner_id) REFERENCES user(id)
|
22
29
|
);
|
23
30
|
|
24
|
-
CREATE TABLE IF NOT EXISTS fdata (
|
25
|
-
file_id VARCHAR(256) PRIMARY KEY,
|
26
|
-
data BLOB
|
27
|
-
);
|
28
|
-
|
29
31
|
CREATE TABLE IF NOT EXISTS usize (
|
30
32
|
user_id INTEGER PRIMARY KEY,
|
31
33
|
size INTEGER DEFAULT 0
|
@@ -36,3 +38,8 @@ CREATE INDEX IF NOT EXISTS idx_fmeta_url ON fmeta(url);
|
|
36
38
|
CREATE INDEX IF NOT EXISTS idx_user_username ON user(username);
|
37
39
|
|
38
40
|
CREATE INDEX IF NOT EXISTS idx_user_credential ON user(credential);
|
41
|
+
|
42
|
+
CREATE TABLE IF NOT EXISTS blobs.fdata (
|
43
|
+
file_id CHAR(32) PRIMARY KEY,
|
44
|
+
data BLOB
|
45
|
+
);
|
@@ -7,6 +7,7 @@ DATA_HOME = Path(os.environ.get('LFSS_DATA', __default_dir))
|
|
7
7
|
if not DATA_HOME.exists():
|
8
8
|
DATA_HOME.mkdir()
|
9
9
|
print(f"[init] Created data home at {DATA_HOME}")
|
10
|
+
DATA_HOME = DATA_HOME.resolve().absolute()
|
10
11
|
LARGE_BLOB_DIR = DATA_HOME / 'large_blobs'
|
11
12
|
LARGE_BLOB_DIR.mkdir(exist_ok=True)
|
12
13
|
|
@@ -1,19 +1,20 @@
|
|
1
1
|
|
2
2
|
from typing import Optional, overload, Literal, AsyncIterable
|
3
3
|
from abc import ABC, abstractmethod
|
4
|
+
import os
|
4
5
|
|
5
6
|
import urllib.parse
|
6
7
|
from pathlib import Path
|
7
|
-
import
|
8
|
+
import hashlib, uuid
|
8
9
|
from contextlib import asynccontextmanager
|
9
10
|
from functools import wraps
|
10
|
-
from enum import IntEnum
|
11
11
|
import zipfile, io, asyncio
|
12
12
|
|
13
13
|
import aiosqlite, aiofiles
|
14
14
|
import aiofiles.os
|
15
15
|
from asyncio import Lock
|
16
16
|
|
17
|
+
from .datatype import UserRecord, FileReadPermission, FileRecord, DirectoryRecord, PathContents
|
17
18
|
from .config import DATA_HOME, LARGE_BLOB_DIR
|
18
19
|
from .log import get_logger
|
19
20
|
from .utils import decode_uri_compnents
|
@@ -57,33 +58,18 @@ class DBConnBase(ABC):
|
|
57
58
|
"""Should return self"""
|
58
59
|
global _g_conn
|
59
60
|
if _g_conn is None:
|
60
|
-
|
61
|
+
if not os.environ.get('SQLITE_TEMPDIR'):
|
62
|
+
os.environ['SQLITE_TEMPDIR'] = str(DATA_HOME)
|
63
|
+
# large blobs are stored in a separate database, should be more efficient
|
64
|
+
_g_conn = await aiosqlite.connect(DATA_HOME / 'index.db')
|
65
|
+
async with _g_conn.cursor() as c:
|
66
|
+
await c.execute(f"ATTACH DATABASE ? AS blobs", (str(DATA_HOME/'blobs.db'), ))
|
61
67
|
await execute_sql(_g_conn, 'pragma.sql')
|
62
68
|
await execute_sql(_g_conn, 'init.sql')
|
63
69
|
|
64
70
|
async def commit(self):
|
65
71
|
await self.conn.commit()
|
66
72
|
|
67
|
-
class FileReadPermission(IntEnum):
|
68
|
-
UNSET = 0 # not set
|
69
|
-
PUBLIC = 1 # accessible by anyone
|
70
|
-
PROTECTED = 2 # accessible by any user
|
71
|
-
PRIVATE = 3 # accessible by owner only (including admin)
|
72
|
-
|
73
|
-
@dataclasses.dataclass
|
74
|
-
class UserRecord:
|
75
|
-
id: int
|
76
|
-
username: str
|
77
|
-
credential: str
|
78
|
-
is_admin: bool
|
79
|
-
create_time: str
|
80
|
-
last_active: str
|
81
|
-
max_storage: int
|
82
|
-
permission: 'FileReadPermission'
|
83
|
-
|
84
|
-
def __str__(self):
|
85
|
-
return f"User {self.username} (id={self.id}, admin={self.is_admin}, created at {self.create_time}, last active at {self.last_active}, storage={self.max_storage}, permission={self.permission})"
|
86
|
-
|
87
73
|
DECOY_USER = UserRecord(0, 'decoy', 'decoy', False, '2021-01-01 00:00:00', '2021-01-01 00:00:00', 0, FileReadPermission.PRIVATE)
|
88
74
|
class UserConn(DBConnBase):
|
89
75
|
|
@@ -174,37 +160,6 @@ class UserConn(DBConnBase):
|
|
174
160
|
await self.conn.execute("DELETE FROM user WHERE username = ?", (username, ))
|
175
161
|
self.logger.info(f"Delete user {username}")
|
176
162
|
|
177
|
-
@dataclasses.dataclass
|
178
|
-
class FileRecord:
|
179
|
-
url: str
|
180
|
-
owner_id: int
|
181
|
-
file_id: str # defines mapping from fmata to fdata
|
182
|
-
file_size: int
|
183
|
-
create_time: str
|
184
|
-
access_time: str
|
185
|
-
permission: FileReadPermission
|
186
|
-
external: bool
|
187
|
-
|
188
|
-
def __str__(self):
|
189
|
-
return f"File {self.url} (owner={self.owner_id}, created at {self.create_time}, accessed at {self.access_time}, " + \
|
190
|
-
f"file_id={self.file_id}, permission={self.permission}, size={self.file_size}, external={self.external})"
|
191
|
-
|
192
|
-
@dataclasses.dataclass
|
193
|
-
class DirectoryRecord:
|
194
|
-
url: str
|
195
|
-
size: int
|
196
|
-
create_time: str = ""
|
197
|
-
update_time: str = ""
|
198
|
-
access_time: str = ""
|
199
|
-
|
200
|
-
def __str__(self):
|
201
|
-
return f"Directory {self.url} (size={self.size})"
|
202
|
-
|
203
|
-
@dataclasses.dataclass
|
204
|
-
class PathContents:
|
205
|
-
dirs: list[DirectoryRecord]
|
206
|
-
files: list[FileRecord]
|
207
|
-
|
208
163
|
class FileConn(DBConnBase):
|
209
164
|
|
210
165
|
@staticmethod
|
@@ -213,28 +168,6 @@ class FileConn(DBConnBase):
|
|
213
168
|
|
214
169
|
async def init(self):
|
215
170
|
await super().init()
|
216
|
-
# backward compatibility, since 0.2.1
|
217
|
-
async with self.conn.execute("SELECT * FROM user") as cursor:
|
218
|
-
res = await cursor.fetchall()
|
219
|
-
for r in res:
|
220
|
-
async with self.conn.execute("SELECT user_id FROM usize WHERE user_id = ?", (r[0], )) as cursor:
|
221
|
-
size = await cursor.fetchone()
|
222
|
-
if size is None:
|
223
|
-
async with self.conn.execute("SELECT SUM(file_size) FROM fmeta WHERE owner_id = ?", (r[0], )) as cursor:
|
224
|
-
size = await cursor.fetchone()
|
225
|
-
if size is not None and size[0] is not None:
|
226
|
-
await self._user_size_inc(r[0], size[0])
|
227
|
-
|
228
|
-
# backward compatibility, since 0.5.0
|
229
|
-
# 'external' means the file is not stored in the database, but in the external storage
|
230
|
-
async with self.conn.execute("SELECT * FROM fmeta") as cursor:
|
231
|
-
res = await cursor.fetchone()
|
232
|
-
if res and len(res) < 8:
|
233
|
-
self.logger.info("Updating fmeta table")
|
234
|
-
await self.conn.execute('''
|
235
|
-
ALTER TABLE fmeta ADD COLUMN external BOOLEAN DEFAULT FALSE
|
236
|
-
''')
|
237
|
-
|
238
171
|
return self
|
239
172
|
|
240
173
|
async def get_file_record(self, url: str) -> Optional[FileRecord]:
|
@@ -372,43 +305,42 @@ class FileConn(DBConnBase):
|
|
372
305
|
assert res is not None
|
373
306
|
return res[0] or 0
|
374
307
|
|
308
|
+
@atomic
|
309
|
+
async def update_file_record(
|
310
|
+
self, url, owner_id: Optional[int] = None, permission: Optional[FileReadPermission] = None
|
311
|
+
):
|
312
|
+
old = await self.get_file_record(url)
|
313
|
+
assert old is not None, f"File {url} not found"
|
314
|
+
if owner_id is None:
|
315
|
+
owner_id = old.owner_id
|
316
|
+
if permission is None:
|
317
|
+
permission = old.permission
|
318
|
+
await self.conn.execute(
|
319
|
+
"UPDATE fmeta SET owner_id = ?, permission = ? WHERE url = ?",
|
320
|
+
(owner_id, int(permission), url)
|
321
|
+
)
|
322
|
+
self.logger.info(f"Updated file {url}")
|
323
|
+
|
375
324
|
@atomic
|
376
325
|
async def set_file_record(
|
377
326
|
self, url: str,
|
378
|
-
owner_id:
|
379
|
-
file_id:
|
380
|
-
file_size:
|
381
|
-
permission:
|
382
|
-
external:
|
327
|
+
owner_id: int,
|
328
|
+
file_id:str,
|
329
|
+
file_size: int,
|
330
|
+
permission: FileReadPermission,
|
331
|
+
external: bool,
|
332
|
+
mime_type: str
|
383
333
|
):
|
384
|
-
|
385
|
-
|
386
|
-
|
387
|
-
|
388
|
-
|
389
|
-
|
390
|
-
|
391
|
-
|
392
|
-
|
393
|
-
|
394
|
-
if permission is None: permission = old.permission
|
395
|
-
await self.conn.execute(
|
396
|
-
"""
|
397
|
-
UPDATE fmeta SET owner_id = ?, permission = ?,
|
398
|
-
access_time = CURRENT_TIMESTAMP WHERE url = ?
|
399
|
-
""", (owner_id, int(permission), url))
|
400
|
-
self.logger.info(f"File {url} updated")
|
401
|
-
else:
|
402
|
-
self.logger.debug(f"Creating fmeta {url}: permission={permission}, owner_id={owner_id}, file_id={file_id}, file_size={file_size}, external={external}")
|
403
|
-
if permission is None:
|
404
|
-
permission = FileReadPermission.UNSET
|
405
|
-
assert owner_id is not None and file_id is not None and file_size is not None and external is not None
|
406
|
-
await self.conn.execute(
|
407
|
-
"INSERT INTO fmeta (url, owner_id, file_id, file_size, permission, external) VALUES (?, ?, ?, ?, ?, ?)",
|
408
|
-
(url, owner_id, file_id, file_size, int(permission), external)
|
409
|
-
)
|
410
|
-
await self._user_size_inc(owner_id, file_size)
|
411
|
-
self.logger.info(f"File {url} created")
|
334
|
+
self.logger.debug(f"Creating fmeta {url}: permission={permission}, owner_id={owner_id}, file_id={file_id}, file_size={file_size}, external={external}, mime_type={mime_type}")
|
335
|
+
if permission is None:
|
336
|
+
permission = FileReadPermission.UNSET
|
337
|
+
assert owner_id is not None and file_id is not None and file_size is not None and external is not None
|
338
|
+
await self.conn.execute(
|
339
|
+
"INSERT INTO fmeta (url, owner_id, file_id, file_size, permission, external, mime_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
|
340
|
+
(url, owner_id, file_id, file_size, int(permission), external, mime_type)
|
341
|
+
)
|
342
|
+
await self._user_size_inc(owner_id, file_size)
|
343
|
+
self.logger.info(f"File {url} created")
|
412
344
|
|
413
345
|
@atomic
|
414
346
|
async def move_file(self, old_url: str, new_url: str):
|
@@ -479,7 +411,7 @@ class FileConn(DBConnBase):
|
|
479
411
|
|
480
412
|
@atomic
|
481
413
|
async def set_file_blob(self, file_id: str, blob: bytes):
|
482
|
-
await self.conn.execute("INSERT OR REPLACE INTO fdata (file_id, data) VALUES (?, ?)", (file_id, blob))
|
414
|
+
await self.conn.execute("INSERT OR REPLACE INTO blobs.fdata (file_id, data) VALUES (?, ?)", (file_id, blob))
|
483
415
|
|
484
416
|
@atomic
|
485
417
|
async def set_file_blob_external(self, file_id: str, stream: AsyncIterable[bytes])->int:
|
@@ -496,7 +428,7 @@ class FileConn(DBConnBase):
|
|
496
428
|
return size_sum
|
497
429
|
|
498
430
|
async def get_file_blob(self, file_id: str) -> Optional[bytes]:
|
499
|
-
async with self.conn.execute("SELECT data FROM fdata WHERE file_id = ?", (file_id, )) as cursor:
|
431
|
+
async with self.conn.execute("SELECT data FROM blobs.fdata WHERE file_id = ?", (file_id, )) as cursor:
|
500
432
|
res = await cursor.fetchone()
|
501
433
|
if res is None:
|
502
434
|
return None
|
@@ -515,11 +447,11 @@ class FileConn(DBConnBase):
|
|
515
447
|
|
516
448
|
@atomic
|
517
449
|
async def delete_file_blob(self, file_id: str):
|
518
|
-
await self.conn.execute("DELETE FROM fdata WHERE file_id = ?", (file_id, ))
|
450
|
+
await self.conn.execute("DELETE FROM blobs.fdata WHERE file_id = ?", (file_id, ))
|
519
451
|
|
520
452
|
@atomic
|
521
453
|
async def delete_file_blobs(self, file_ids: list[str]):
|
522
|
-
await self.conn.execute("DELETE FROM fdata WHERE file_id IN ({})".format(','.join(['?'] * len(file_ids))), file_ids)
|
454
|
+
await self.conn.execute("DELETE FROM blobs.fdata WHERE file_id IN ({})".format(','.join(['?'] * len(file_ids))), file_ids)
|
523
455
|
|
524
456
|
def validate_url(url: str, is_file = True):
|
525
457
|
prohibited_chars = ['..', ';', "'", '"', '\\', '\0', '\n', '\r', '\t', '\x0b', '\x0c']
|
@@ -587,7 +519,8 @@ class Database:
|
|
587
519
|
async def save_file(
|
588
520
|
self, u: int | str, url: str,
|
589
521
|
blob: bytes | AsyncIterable[bytes],
|
590
|
-
permission: FileReadPermission = FileReadPermission.UNSET
|
522
|
+
permission: FileReadPermission = FileReadPermission.UNSET,
|
523
|
+
mime_type: str = 'application/octet-stream'
|
591
524
|
):
|
592
525
|
"""
|
593
526
|
if file_size is not provided, the blob must be bytes
|
@@ -619,7 +552,7 @@ class Database:
|
|
619
552
|
await self.file.set_file_blob(f_id, blob)
|
620
553
|
await self.file.set_file_record(
|
621
554
|
url, owner_id=user.id, file_id=f_id, file_size=file_size,
|
622
|
-
permission=permission, external=False)
|
555
|
+
permission=permission, external=False, mime_type=mime_type)
|
623
556
|
await self.user.set_active(user.username)
|
624
557
|
else:
|
625
558
|
assert isinstance(blob, AsyncIterable)
|
@@ -631,7 +564,7 @@ class Database:
|
|
631
564
|
raise StorageExceededError(f"Unable to save file, user {user.username} has storage limit of {user.max_storage}, used {user_size_used}, requested {file_size}")
|
632
565
|
await self.file.set_file_record(
|
633
566
|
url, owner_id=user.id, file_id=f_id, file_size=file_size,
|
634
|
-
permission=permission, external=True)
|
567
|
+
permission=permission, external=True, mime_type=mime_type)
|
635
568
|
await self.user.set_active(user.username)
|
636
569
|
|
637
570
|
async def read_file_stream(self, url: str) -> AsyncIterable[bytes]:
|
@@ -0,0 +1,55 @@
|
|
1
|
+
from enum import IntEnum
|
2
|
+
import dataclasses
|
3
|
+
|
4
|
+
class FileReadPermission(IntEnum):
|
5
|
+
UNSET = 0 # not set
|
6
|
+
PUBLIC = 1 # accessible by anyone
|
7
|
+
PROTECTED = 2 # accessible by any user
|
8
|
+
PRIVATE = 3 # accessible by owner only (including admin)
|
9
|
+
|
10
|
+
@dataclasses.dataclass
|
11
|
+
class UserRecord:
|
12
|
+
id: int
|
13
|
+
username: str
|
14
|
+
credential: str
|
15
|
+
is_admin: bool
|
16
|
+
create_time: str
|
17
|
+
last_active: str
|
18
|
+
max_storage: int
|
19
|
+
permission: 'FileReadPermission'
|
20
|
+
|
21
|
+
def __str__(self):
|
22
|
+
return f"User {self.username} (id={self.id}, admin={self.is_admin}, created at {self.create_time}, last active at {self.last_active}, storage={self.max_storage}, permission={self.permission})"
|
23
|
+
|
24
|
+
@dataclasses.dataclass
|
25
|
+
class FileRecord:
|
26
|
+
url: str
|
27
|
+
owner_id: int
|
28
|
+
file_id: str # defines mapping from fmata to blobs.fdata
|
29
|
+
file_size: int
|
30
|
+
create_time: str
|
31
|
+
access_time: str
|
32
|
+
permission: FileReadPermission
|
33
|
+
external: bool
|
34
|
+
mime_type: str
|
35
|
+
|
36
|
+
def __str__(self):
|
37
|
+
return f"File {self.url} [{self.mime_type}] (owner={self.owner_id}, created at {self.create_time}, accessed at {self.access_time}, " + \
|
38
|
+
f"file_id={self.file_id}, permission={self.permission}, size={self.file_size}, external={self.external})"
|
39
|
+
|
40
|
+
@dataclasses.dataclass
|
41
|
+
class DirectoryRecord:
|
42
|
+
url: str
|
43
|
+
size: int
|
44
|
+
create_time: str = ""
|
45
|
+
update_time: str = ""
|
46
|
+
access_time: str = ""
|
47
|
+
|
48
|
+
def __str__(self):
|
49
|
+
return f"Directory {self.url} (size={self.size})"
|
50
|
+
|
51
|
+
@dataclasses.dataclass
|
52
|
+
class PathContents:
|
53
|
+
dirs: list[DirectoryRecord]
|
54
|
+
files: list[FileRecord]
|
55
|
+
|
@@ -15,7 +15,7 @@ from contextlib import asynccontextmanager
|
|
15
15
|
from .error import *
|
16
16
|
from .log import get_logger
|
17
17
|
from .stat import RequestDB
|
18
|
-
from .config import MAX_BUNDLE_BYTES, MAX_FILE_BYTES,
|
18
|
+
from .config import MAX_BUNDLE_BYTES, MAX_FILE_BYTES, LARGE_FILE_BYTES
|
19
19
|
from .utils import ensure_uri_compnents, format_last_modified, now_stamp
|
20
20
|
from .database import Database, UserRecord, DECOY_USER, FileRecord, check_user_permission, FileReadPermission
|
21
21
|
|
@@ -142,12 +142,10 @@ async def get_file(path: str, download = False, user: UserRecord = Depends(get_c
|
|
142
142
|
|
143
143
|
fname = path.split("/")[-1]
|
144
144
|
async def send(media_type: Optional[str] = None, disposition = "attachment"):
|
145
|
+
if media_type is None:
|
146
|
+
media_type = file_record.mime_type
|
145
147
|
if not file_record.external:
|
146
148
|
fblob = await conn.read_file(path)
|
147
|
-
if media_type is None:
|
148
|
-
media_type, _ = mimetypes.guess_type(fname)
|
149
|
-
if media_type is None:
|
150
|
-
media_type = mimesniff.what(fblob)
|
151
149
|
return Response(
|
152
150
|
content=fblob, media_type=media_type, headers={
|
153
151
|
"Content-Disposition": f"{disposition}; filename={fname}",
|
@@ -155,12 +153,7 @@ async def get_file(path: str, download = False, user: UserRecord = Depends(get_c
|
|
155
153
|
"Last-Modified": format_last_modified(file_record.create_time)
|
156
154
|
}
|
157
155
|
)
|
158
|
-
|
159
156
|
else:
|
160
|
-
if media_type is None:
|
161
|
-
media_type, _ = mimetypes.guess_type(fname)
|
162
|
-
if media_type is None:
|
163
|
-
media_type = mimesniff.what(str((LARGE_BLOB_DIR / file_record.file_id).absolute()))
|
164
157
|
return StreamingResponse(
|
165
158
|
await conn.read_file_stream(path), media_type=media_type, headers={
|
166
159
|
"Content-Disposition": f"{disposition}; filename={fname}",
|
@@ -228,14 +221,24 @@ async def put_file(
|
|
228
221
|
blobs = await request.body()
|
229
222
|
else:
|
230
223
|
blobs = await request.body()
|
224
|
+
|
225
|
+
# check file type
|
226
|
+
assert not path.endswith("/"), "Path must be a file"
|
227
|
+
fname = path.split("/")[-1]
|
228
|
+
mime_t, _ = mimetypes.guess_type(fname)
|
229
|
+
if mime_t is None:
|
230
|
+
mime_t = mimesniff.what(blobs)
|
231
|
+
if mime_t is None:
|
232
|
+
mime_t = "application/octet-stream"
|
233
|
+
|
231
234
|
if len(blobs) > LARGE_FILE_BYTES:
|
232
235
|
async def blob_reader():
|
233
236
|
chunk_size = 16 * 1024 * 1024 # 16MB
|
234
237
|
for b in range(0, len(blobs), chunk_size):
|
235
238
|
yield blobs[b:b+chunk_size]
|
236
|
-
await conn.save_file(user.id, path, blob_reader(), permission = FileReadPermission(permission))
|
239
|
+
await conn.save_file(user.id, path, blob_reader(), permission = FileReadPermission(permission), mime_type = mime_t)
|
237
240
|
else:
|
238
|
-
await conn.save_file(user.id, path, blobs, permission = FileReadPermission(permission))
|
241
|
+
await conn.save_file(user.id, path, blobs, permission = FileReadPermission(permission), mime_type=mime_t)
|
239
242
|
|
240
243
|
# https://developer.mozilla.org/zh-CN/docs/Web/HTTP/Methods/PUT
|
241
244
|
if exists_flag:
|
@@ -353,7 +356,7 @@ async def update_file_meta(
|
|
353
356
|
|
354
357
|
if perm is not None:
|
355
358
|
logger.info(f"Update permission of {path} to {perm}")
|
356
|
-
await conn.file.
|
359
|
+
await conn.file.update_file_record(
|
357
360
|
url = file_record.url,
|
358
361
|
permission = FileReadPermission(perm)
|
359
362
|
)
|
lfss-0.5.2/lfss/sql/pragma.sql
DELETED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|