lfss 0.9.0__tar.gz → 0.9.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. {lfss-0.9.0 → lfss-0.9.1}/PKG-INFO +9 -4
  2. {lfss-0.9.0 → lfss-0.9.1}/Readme.md +6 -1
  3. lfss-0.9.1/docs/Webdav.md +22 -0
  4. {lfss-0.9.0 → lfss-0.9.1}/lfss/api/__init__.py +3 -3
  5. {lfss-0.9.0 → lfss-0.9.1}/lfss/api/connector.py +3 -3
  6. {lfss-0.9.0 → lfss-0.9.1}/lfss/cli/balance.py +3 -3
  7. {lfss-0.9.0 → lfss-0.9.1}/lfss/cli/cli.py +2 -2
  8. {lfss-0.9.0 → lfss-0.9.1}/lfss/cli/panel.py +8 -0
  9. {lfss-0.9.0 → lfss-0.9.1}/lfss/cli/serve.py +4 -2
  10. {lfss-0.9.0 → lfss-0.9.1}/lfss/cli/user.py +4 -4
  11. {lfss-0.9.0 → lfss-0.9.1}/lfss/cli/vacuum.py +5 -5
  12. {lfss-0.9.0/lfss/src → lfss-0.9.1/lfss/eng}/config.py +1 -0
  13. {lfss-0.9.0/lfss/src → lfss-0.9.1/lfss/eng}/database.py +99 -4
  14. {lfss-0.9.0/lfss/src → lfss-0.9.1/lfss/eng}/error.py +4 -0
  15. {lfss-0.9.0/lfss/src → lfss-0.9.1/lfss/eng}/thumb.py +10 -9
  16. {lfss-0.9.0/lfss/src → lfss-0.9.1/lfss/eng}/utils.py +9 -1
  17. lfss-0.9.1/lfss/svc/app.py +9 -0
  18. lfss-0.9.1/lfss/svc/app_base.py +152 -0
  19. lfss-0.9.1/lfss/svc/app_dav.py +374 -0
  20. lfss-0.9.1/lfss/svc/app_native.py +247 -0
  21. lfss-0.9.1/lfss/svc/common_impl.py +270 -0
  22. lfss-0.9.0/lfss/src/stat.py → lfss-0.9.1/lfss/svc/request_log.py +2 -2
  23. {lfss-0.9.0 → lfss-0.9.1}/pyproject.toml +2 -2
  24. lfss-0.9.0/lfss/src/server.py +0 -604
  25. {lfss-0.9.0 → lfss-0.9.1}/docs/Known_issues.md +0 -0
  26. {lfss-0.9.0 → lfss-0.9.1}/docs/Permission.md +0 -0
  27. {lfss-0.9.0 → lfss-0.9.1}/frontend/api.js +0 -0
  28. {lfss-0.9.0 → lfss-0.9.1}/frontend/index.html +0 -0
  29. {lfss-0.9.0 → lfss-0.9.1}/frontend/info.css +0 -0
  30. {lfss-0.9.0 → lfss-0.9.1}/frontend/info.js +0 -0
  31. {lfss-0.9.0 → lfss-0.9.1}/frontend/login.css +0 -0
  32. {lfss-0.9.0 → lfss-0.9.1}/frontend/login.js +0 -0
  33. {lfss-0.9.0 → lfss-0.9.1}/frontend/popup.css +0 -0
  34. {lfss-0.9.0 → lfss-0.9.1}/frontend/popup.js +0 -0
  35. {lfss-0.9.0 → lfss-0.9.1}/frontend/scripts.js +0 -0
  36. {lfss-0.9.0 → lfss-0.9.1}/frontend/state.js +0 -0
  37. {lfss-0.9.0 → lfss-0.9.1}/frontend/styles.css +0 -0
  38. {lfss-0.9.0 → lfss-0.9.1}/frontend/thumb.css +0 -0
  39. {lfss-0.9.0 → lfss-0.9.1}/frontend/thumb.js +0 -0
  40. {lfss-0.9.0 → lfss-0.9.1}/frontend/utils.js +0 -0
  41. {lfss-0.9.0 → lfss-0.9.1}/lfss/cli/__init__.py +0 -0
  42. {lfss-0.9.0/lfss/src → lfss-0.9.1/lfss/eng}/__init__.py +0 -0
  43. {lfss-0.9.0/lfss/src → lfss-0.9.1/lfss/eng}/bounded_pool.py +0 -0
  44. {lfss-0.9.0/lfss/src → lfss-0.9.1/lfss/eng}/connection_pool.py +0 -0
  45. {lfss-0.9.0/lfss/src → lfss-0.9.1/lfss/eng}/datatype.py +0 -0
  46. {lfss-0.9.0/lfss/src → lfss-0.9.1/lfss/eng}/log.py +0 -0
  47. {lfss-0.9.0 → lfss-0.9.1}/lfss/sql/init.sql +0 -0
  48. {lfss-0.9.0 → lfss-0.9.1}/lfss/sql/pragma.sql +0 -0
@@ -1,10 +1,10 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lfss
3
- Version: 0.9.0
3
+ Version: 0.9.1
4
4
  Summary: Lightweight file storage service
5
5
  Home-page: https://github.com/MenxLi/lfss
6
- Author: li, mengxun
7
- Author-email: limengxun45@outlook.com
6
+ Author: li_mengxun
7
+ Author-email: limengxun45@outlookc.com
8
8
  Requires-Python: >=3.10
9
9
  Classifier: Programming Language :: Python :: 3
10
10
  Classifier: Programming Language :: Python :: 3.10
@@ -32,6 +32,7 @@ My experiment on a lightweight and high-performance file/object storage service.
32
32
  - Pagination and sorted file listing for vast number of files.
33
33
  - High performance: high concurrency, near-native speed on stress tests.
34
34
  - Support range requests, so you can stream large files / resume download.
35
+ - WebDAV compatible ([NOTE](./docs/Webdav.md)).
35
36
 
36
37
  It stores small files and metadata in sqlite, large files in the filesystem.
37
38
  Tested on 2 million files, and it is still fast.
@@ -53,7 +54,11 @@ lfss-panel --open
53
54
  Or, you can start a web server at `/frontend` and open `index.html` in your browser.
54
55
 
55
56
  The API usage is simple, just `GET`, `PUT`, `DELETE` to the `/<username>/file/url` path.
56
- Authentication via `Authorization` header with the value `Bearer <token>`, or through the `token` query parameter.
57
+ The authentication can be acheived through one of the following methods:
58
+ 1. `Authorization` header with the value `Bearer sha256(<username><password>)`.
59
+ 2. `token` query parameter with the value `sha256(<username><password>)`.
60
+ 3. HTTP Basic Authentication with the username and password.
61
+
57
62
  You can refer to `frontend` as an application example, `lfss/api/connector.py` for more APIs.
58
63
 
59
64
  By default, the service exposes all files to the public for `GET` requests,
@@ -9,6 +9,7 @@ My experiment on a lightweight and high-performance file/object storage service.
9
9
  - Pagination and sorted file listing for vast number of files.
10
10
  - High performance: high concurrency, near-native speed on stress tests.
11
11
  - Support range requests, so you can stream large files / resume download.
12
+ - WebDAV compatible ([NOTE](./docs/Webdav.md)).
12
13
 
13
14
  It stores small files and metadata in sqlite, large files in the filesystem.
14
15
  Tested on 2 million files, and it is still fast.
@@ -30,7 +31,11 @@ lfss-panel --open
30
31
  Or, you can start a web server at `/frontend` and open `index.html` in your browser.
31
32
 
32
33
  The API usage is simple, just `GET`, `PUT`, `DELETE` to the `/<username>/file/url` path.
33
- Authentication via `Authorization` header with the value `Bearer <token>`, or through the `token` query parameter.
34
+ The authentication can be acheived through one of the following methods:
35
+ 1. `Authorization` header with the value `Bearer sha256(<username><password>)`.
36
+ 2. `token` query parameter with the value `sha256(<username><password>)`.
37
+ 3. HTTP Basic Authentication with the username and password.
38
+
34
39
  You can refer to `frontend` as an application example, `lfss/api/connector.py` for more APIs.
35
40
 
36
41
  By default, the service exposes all files to the public for `GET` requests,
@@ -0,0 +1,22 @@
1
+ # WebDAV
2
+
3
+ It is convinient to make LFSS WebDAV compatible, because they both use HTTP `GET`, `PUT`, `DELETE` methods to interact with files.
4
+
5
+ However, WebDAV utilize more HTTP methods,
6
+ which are disabled by default in LFSS, because they may not be supported by many middlewares or clients.
7
+
8
+ The WebDAV support can be enabled by setting the `LFSS_WEBDAV` environment variable to `1`.
9
+ i.e.
10
+ ```sh
11
+ LFSS_WEBDAV=1 lfss-serve
12
+ ```
13
+ Please note:
14
+ 1. **WebDAV support is experimental, and is currently not well-tested.**
15
+ 2. LFSS not allow creating files in the root directory, however some client such as [Finder](https://sabre.io/dav/clients/finder/) will try to create files in the root directory. Thus, it is safer to mount the user directory only, e.g. `http://localhost:8000/<username>/`.
16
+ 3. LFSS not allow directory creation, instead it creates directoy implicitly when a file is uploaded to a non-exist directory.
17
+ i.e. `PUT http://localhost:8000/<username>/dir/file.txt` will create the `dir` directory if it does not exist.
18
+ However, the WebDAV `MKCOL` method requires the directory to be created explicitly, so WebDAV `MKCOL` method instead create a decoy file on the path (`.lfss-keep`), and hide the file from the file listing by `PROPFIND` method.
19
+ This leads to:
20
+ 1) You may see a `.lfss-keep` file in the directory with native file listing (e.g. `/_api/list-files`), but it is hidden in WebDAV clients.
21
+ 2) The directory may be deleted if there is no file in it and the `.lfss-keep` file is not created by WebDAV client.
22
+
@@ -1,9 +1,9 @@
1
1
  import os, time, pathlib
2
2
  from threading import Lock
3
3
  from .connector import Connector
4
- from ..src.datatype import FileRecord
5
- from ..src.utils import decode_uri_compnents
6
- from ..src.bounded_pool import BoundedThreadPoolExecutor
4
+ from ..eng.datatype import FileRecord
5
+ from ..eng.utils import decode_uri_compnents
6
+ from ..eng.bounded_pool import BoundedThreadPoolExecutor
7
7
 
8
8
  def upload_file(
9
9
  connector: Connector,
@@ -5,12 +5,12 @@ import requests
5
5
  import requests.adapters
6
6
  import urllib.parse
7
7
  from tempfile import SpooledTemporaryFile
8
- from lfss.src.error import PathNotFoundError
9
- from lfss.src.datatype import (
8
+ from lfss.eng.error import PathNotFoundError
9
+ from lfss.eng.datatype import (
10
10
  FileReadPermission, FileRecord, DirectoryRecord, UserRecord, PathContents,
11
11
  FileSortKey, DirSortKey
12
12
  )
13
- from lfss.src.utils import ensure_uri_compnents
13
+ from lfss.eng.utils import ensure_uri_compnents
14
14
 
15
15
  _default_endpoint = os.environ.get('LFSS_ENDPOINT', 'http://localhost:8000')
16
16
  _default_token = os.environ.get('LFSS_TOKEN', '')
@@ -2,14 +2,14 @@
2
2
  Balance the storage by ensuring that large file thresholds are met.
3
3
  """
4
4
 
5
- from lfss.src.config import LARGE_BLOB_DIR, LARGE_FILE_BYTES
5
+ from lfss.eng.config import LARGE_BLOB_DIR, LARGE_FILE_BYTES
6
6
  import argparse, time, itertools
7
7
  from functools import wraps
8
8
  from asyncio import Semaphore
9
9
  import aiofiles, asyncio
10
10
  import aiofiles.os
11
- from lfss.src.database import transaction, unique_cursor
12
- from lfss.src.connection_pool import global_entrance
11
+ from lfss.eng.database import transaction, unique_cursor
12
+ from lfss.eng.connection_pool import global_entrance
13
13
 
14
14
  sem: Semaphore
15
15
 
@@ -1,8 +1,8 @@
1
1
  from pathlib import Path
2
2
  import argparse, typing
3
3
  from lfss.api import Connector, upload_directory, upload_file, download_file, download_directory
4
- from lfss.src.datatype import FileReadPermission, FileSortKey, DirSortKey
5
- from lfss.src.utils import decode_uri_compnents
4
+ from lfss.eng.datatype import FileReadPermission, FileSortKey, DirSortKey
5
+ from lfss.eng.utils import decode_uri_compnents
6
6
  from . import catch_request_error, line_sep
7
7
 
8
8
  def parse_permission(s: str) -> FileReadPermission:
@@ -2,6 +2,7 @@
2
2
  import uvicorn
3
3
  from fastapi import FastAPI
4
4
  from fastapi.staticfiles import StaticFiles
5
+ from fastapi.middleware.cors import CORSMiddleware
5
6
 
6
7
  import argparse
7
8
  from contextlib import asynccontextmanager
@@ -27,6 +28,13 @@ assert (__frontend_dir / "index.html").exists(), "Frontend panel not found"
27
28
 
28
29
  app = FastAPI(lifespan=app_lifespan)
29
30
  app.mount("/", StaticFiles(directory=__frontend_dir, html=True), name="static")
31
+ app.add_middleware(
32
+ CORSMiddleware,
33
+ allow_origins=["*"],
34
+ allow_credentials=True,
35
+ allow_methods=["*"],
36
+ allow_headers=["*"],
37
+ )
30
38
 
31
39
  def main():
32
40
  parser = argparse.ArgumentParser(description="Serve frontend panel")
@@ -1,7 +1,9 @@
1
1
  import argparse
2
2
  from uvicorn import Config, Server
3
3
  from uvicorn.config import LOGGING_CONFIG
4
- from ..src.server import *
4
+ from ..eng.config import DEBUG_MODE
5
+ from ..svc.app_base import logger
6
+ from ..svc.app import app
5
7
 
6
8
  def main():
7
9
  parser = argparse.ArgumentParser()
@@ -19,7 +21,7 @@ def main():
19
21
  app=app,
20
22
  host=args.host,
21
23
  port=args.port,
22
- access_log=False,
24
+ access_log=True if DEBUG_MODE else False,
23
25
  workers=args.workers,
24
26
  log_config=default_logging_config
25
27
  )
@@ -1,10 +1,10 @@
1
1
  import argparse, asyncio, os
2
2
  from contextlib import asynccontextmanager
3
3
  from .cli import parse_permission, FileReadPermission
4
- from ..src.utils import parse_storage_size, fmt_storage_size
5
- from ..src.datatype import AccessLevel
6
- from ..src.database import Database, FileReadPermission, transaction, UserConn, unique_cursor, FileConn
7
- from ..src.connection_pool import global_entrance
4
+ from ..eng.utils import parse_storage_size, fmt_storage_size
5
+ from ..eng.datatype import AccessLevel
6
+ from ..eng.database import Database, FileReadPermission, transaction, UserConn, unique_cursor, FileConn
7
+ from ..eng.connection_pool import global_entrance
8
8
 
9
9
  def parse_access_level(s: str) -> AccessLevel:
10
10
  for p in AccessLevel:
@@ -2,17 +2,17 @@
2
2
  Vacuum the database and external storage to ensure that the storage is consistent and minimal.
3
3
  """
4
4
 
5
- from lfss.src.config import LARGE_BLOB_DIR
5
+ from lfss.eng.config import LARGE_BLOB_DIR
6
6
  import argparse, time
7
7
  from functools import wraps
8
8
  from asyncio import Semaphore
9
9
  import aiofiles, asyncio
10
10
  import aiofiles.os
11
11
  from contextlib import contextmanager
12
- from lfss.src.database import transaction, unique_cursor
13
- from lfss.src.stat import RequestDB
14
- from lfss.src.utils import now_stamp
15
- from lfss.src.connection_pool import global_entrance
12
+ from lfss.eng.database import transaction, unique_cursor
13
+ from lfss.svc.request_log import RequestDB
14
+ from lfss.eng.utils import now_stamp
15
+ from lfss.eng.connection_pool import global_entrance
16
16
 
17
17
  sem: Semaphore
18
18
 
@@ -21,6 +21,7 @@ else:
21
21
  MAX_MEM_FILE_BYTES = 128 * 1024 * 1024 # 128MB
22
22
  MAX_BUNDLE_BYTES = 512 * 1024 * 1024 # 512MB
23
23
  CHUNK_SIZE = 1024 * 1024 # 1MB chunks for streaming (on large files)
24
+ DEBUG_MODE = os.environ.get('LFSS_DEBUG', '0') == '1'
24
25
 
25
26
  THUMB_DB = DATA_HOME / 'thumbs.db'
26
27
  THUMB_SIZE = (48, 48)
@@ -19,7 +19,7 @@ from .datatype import (
19
19
  )
20
20
  from .config import LARGE_BLOB_DIR, CHUNK_SIZE, LARGE_FILE_BYTES, MAX_MEM_FILE_BYTES
21
21
  from .log import get_logger
22
- from .utils import decode_uri_compnents, hash_credential, concurrent_wrap, debounce_async
22
+ from .utils import decode_uri_compnents, hash_credential, concurrent_wrap, debounce_async, copy_file
23
23
  from .error import *
24
24
 
25
25
  class DBObjectBase(ABC):
@@ -405,6 +405,57 @@ class FileConn(DBObjectBase):
405
405
  )
406
406
  await self._user_size_inc(owner_id, file_size)
407
407
  self.logger.info(f"File {url} created")
408
+
409
+ # not tested
410
+ async def copy_file(self, old_url: str, new_url: str, user_id: Optional[int] = None):
411
+ old = await self.get_file_record(old_url)
412
+ if old is None:
413
+ raise FileNotFoundError(f"File {old_url} not found")
414
+ new_exists = await self.get_file_record(new_url)
415
+ if new_exists is not None:
416
+ raise FileExistsError(f"File {new_url} already exists")
417
+ new_fid = str(uuid.uuid4())
418
+ user_id = old.owner_id if user_id is None else user_id
419
+ await self.cur.execute(
420
+ "INSERT INTO fmeta (url, owner_id, file_id, file_size, permission, external, mime_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
421
+ (new_url, user_id, new_fid, old.file_size, old.permission, old.external, old.mime_type)
422
+ )
423
+ if not old.external:
424
+ await self.set_file_blob(new_fid, await self.get_file_blob(old.file_id))
425
+ else:
426
+ await copy_file(LARGE_BLOB_DIR / old.file_id, LARGE_BLOB_DIR / new_fid)
427
+ await self._user_size_inc(user_id, old.file_size)
428
+ self.logger.info(f"Copied file {old_url} to {new_url}")
429
+
430
+ # not tested
431
+ async def copy_path(self, old_url: str, new_url: str, conflict_handler: Literal['skip', 'overwrite'] = 'overwrite', user_id: Optional[int] = None):
432
+ assert old_url.endswith('/'), "Old path must end with /"
433
+ assert new_url.endswith('/'), "New path must end with /"
434
+ if user_id is None:
435
+ cursor = await self.cur.execute("SELECT * FROM fmeta WHERE url LIKE ?", (old_url + '%', ))
436
+ res = await cursor.fetchall()
437
+ else:
438
+ cursor = await self.cur.execute("SELECT * FROM fmeta WHERE url LIKE ? AND owner_id = ?", (old_url + '%', user_id))
439
+ res = await cursor.fetchall()
440
+ for r in res:
441
+ old_record = FileRecord(*r)
442
+ new_r = new_url + old_record.url[len(old_url):]
443
+ if conflict_handler == 'overwrite':
444
+ await self.cur.execute("DELETE FROM fmeta WHERE url = ?", (new_r, ))
445
+ elif conflict_handler == 'skip':
446
+ if (await self.cur.execute("SELECT url FROM fmeta WHERE url = ?", (new_r, ))) is not None:
447
+ continue
448
+ new_fid = str(uuid.uuid4())
449
+ user_id = old_record.owner_id if user_id is None else user_id
450
+ await self.cur.execute(
451
+ "INSERT INTO fmeta (url, owner_id, file_id, file_size, permission, external, mime_type) VALUES (?, ?, ?, ?, ?, ?, ?)",
452
+ (new_r, user_id, new_fid, old_record.file_size, old_record.permission, old_record.external, old_record.mime_type)
453
+ )
454
+ if not old_record.external:
455
+ await self.set_file_blob(new_fid, await self.get_file_blob(old_record.file_id))
456
+ else:
457
+ await copy_file(LARGE_BLOB_DIR / old_record.file_id, LARGE_BLOB_DIR / new_fid)
458
+ await self._user_size_inc(user_id, old_record.file_size)
408
459
 
409
460
  async def move_file(self, old_url: str, new_url: str):
410
461
  old = await self.get_file_record(old_url)
@@ -633,6 +684,9 @@ class Database:
633
684
  async with unique_cursor() as cur:
634
685
  user = await get_user(cur, u)
635
686
  assert user is not None, f"User {u} not found"
687
+
688
+ if await check_path_permission(url, user, cursor=cur) < AccessLevel.WRITE:
689
+ raise PermissionDeniedError(f"Permission denied: {user.username} cannot write to {url}")
636
690
 
637
691
  fconn_r = FileConn(cur)
638
692
  user_size_used = await fconn_r.user_size(user.id)
@@ -734,7 +788,7 @@ class Database:
734
788
  if r is None:
735
789
  raise FileNotFoundError(f"File {old_url} not found")
736
790
  if op_user is not None:
737
- if await check_path_permission(old_url, op_user) < AccessLevel.WRITE:
791
+ if await check_path_permission(old_url, op_user, cursor=cur) < AccessLevel.WRITE:
738
792
  raise PermissionDeniedError(f"Permission denied: {op_user.username} cannot move file {old_url}")
739
793
  await fconn.move_file(old_url, new_url)
740
794
 
@@ -742,6 +796,23 @@ class Database:
742
796
  if not new_mime is None:
743
797
  await fconn.update_file_record(new_url, mime_type=new_mime)
744
798
 
799
+ # not tested
800
+ async def copy_file(self, old_url: str, new_url: str, op_user: Optional[UserRecord] = None):
801
+ validate_url(old_url)
802
+ validate_url(new_url)
803
+
804
+ async with transaction() as cur:
805
+ fconn = FileConn(cur)
806
+ r = await fconn.get_file_record(old_url)
807
+ if r is None:
808
+ raise FileNotFoundError(f"File {old_url} not found")
809
+ if op_user is not None:
810
+ if await check_path_permission(old_url, op_user, cursor=cur) < AccessLevel.READ:
811
+ raise PermissionDeniedError(f"Permission denied: {op_user.username} cannot copy file {old_url}")
812
+ if await check_path_permission(new_url, op_user, cursor=cur) < AccessLevel.WRITE:
813
+ raise PermissionDeniedError(f"Permission denied: {op_user.username} cannot copy file to {new_url}")
814
+ await fconn.copy_file(old_url, new_url, user_id=op_user.id if op_user is not None else None)
815
+
745
816
  async def move_path(self, old_url: str, new_url: str, op_user: UserRecord):
746
817
  validate_url(old_url, is_file=False)
747
818
  validate_url(new_url, is_file=False)
@@ -756,14 +827,38 @@ class Database:
756
827
 
757
828
  async with unique_cursor() as cur:
758
829
  if not (
759
- await check_path_permission(old_url, op_user) >= AccessLevel.WRITE and
760
- await check_path_permission(new_url, op_user) >= AccessLevel.WRITE
830
+ await check_path_permission(old_url, op_user, cursor=cur) >= AccessLevel.WRITE and
831
+ await check_path_permission(new_url, op_user, cursor=cur) >= AccessLevel.WRITE
761
832
  ):
762
833
  raise PermissionDeniedError(f"Permission denied: {op_user.username} cannot move path {old_url} to {new_url}")
763
834
 
764
835
  async with transaction() as cur:
765
836
  fconn = FileConn(cur)
766
837
  await fconn.move_path(old_url, new_url, 'overwrite', op_user.id)
838
+
839
+ # not tested
840
+ async def copy_path(self, old_url: str, new_url: str, op_user: UserRecord):
841
+ validate_url(old_url, is_file=False)
842
+ validate_url(new_url, is_file=False)
843
+
844
+ if new_url.startswith('/'):
845
+ new_url = new_url[1:]
846
+ if old_url.startswith('/'):
847
+ old_url = old_url[1:]
848
+ assert old_url != new_url, "Old and new path must be different"
849
+ assert old_url.endswith('/'), "Old path must end with /"
850
+ assert new_url.endswith('/'), "New path must end with /"
851
+
852
+ async with unique_cursor() as cur:
853
+ if not (
854
+ await check_path_permission(old_url, op_user, cursor=cur) >= AccessLevel.READ and
855
+ await check_path_permission(new_url, op_user, cursor=cur) >= AccessLevel.WRITE
856
+ ):
857
+ raise PermissionDeniedError(f"Permission denied: {op_user.username} cannot copy path {old_url} to {new_url}")
858
+
859
+ async with transaction() as cur:
860
+ fconn = FileConn(cur)
861
+ await fconn.copy_path(old_url, new_url, 'overwrite', op_user.id)
767
862
 
768
863
  async def __batch_delete_file_blobs(self, fconn: FileConn, file_records: list[FileRecord], batch_size: int = 512):
769
864
  # https://github.com/langchain-ai/langchain/issues/10321
@@ -2,6 +2,10 @@ import sqlite3
2
2
 
3
3
  class LFSSExceptionBase(Exception):...
4
4
 
5
+ class FileLockedError(LFSSExceptionBase):...
6
+
7
+ class InvalidOptionsError(LFSSExceptionBase, ValueError):...
8
+
5
9
  class DatabaseLockedError(LFSSExceptionBase, sqlite3.DatabaseError):...
6
10
 
7
11
  class PathNotFoundError(LFSSExceptionBase, FileNotFoundError):...
@@ -1,6 +1,6 @@
1
- from lfss.src.config import THUMB_DB, THUMB_SIZE
2
- from lfss.src.database import FileConn
3
- from lfss.src.connection_pool import unique_cursor
1
+ from lfss.eng.config import THUMB_DB, THUMB_SIZE
2
+ from lfss.eng.database import FileConn
3
+ from lfss.eng.connection_pool import unique_cursor
4
4
  from typing import Optional
5
5
  from PIL import Image
6
6
  from io import BytesIO
@@ -69,12 +69,13 @@ async def get_thumb(path: str) -> Optional[tuple[bytes, str]]:
69
69
  async with unique_cursor() as main_c:
70
70
  fconn = FileConn(main_c)
71
71
  r = await fconn.get_file_record(path)
72
- if r is None:
73
- async with cache_cursor() as cur:
74
- await _delete_cache_thumb(cur, path)
75
- raise FileNotFoundError(f'File not found: {path}')
76
- if not r.mime_type.startswith('image/'):
77
- return None
72
+
73
+ if r is None:
74
+ async with cache_cursor() as cur:
75
+ await _delete_cache_thumb(cur, path)
76
+ raise FileNotFoundError(f'File not found: {path}')
77
+ if not r.mime_type.startswith('image/'):
78
+ return None
78
79
 
79
80
  async with cache_cursor() as cur:
80
81
  c_time = r.create_time
@@ -1,8 +1,10 @@
1
1
  import datetime, time
2
2
  import urllib.parse
3
- import asyncio
3
+ import pathlib
4
4
  import functools
5
5
  import hashlib
6
+ import aiofiles
7
+ import asyncio
6
8
  from asyncio import Lock
7
9
  from collections import OrderedDict
8
10
  from concurrent.futures import ThreadPoolExecutor
@@ -11,6 +13,12 @@ from functools import wraps, partial
11
13
  from uuid import uuid4
12
14
  import os
13
15
 
16
+ async def copy_file(source: str|pathlib.Path, destination: str|pathlib.Path):
17
+ async with aiofiles.open(source, mode='rb') as src:
18
+ async with aiofiles.open(destination, mode='wb') as dest:
19
+ while chunk := await src.read(1024):
20
+ await dest.write(chunk)
21
+
14
22
  def hash_credential(username: str, password: str):
15
23
  return hashlib.sha256((username + password).encode()).hexdigest()
16
24
 
@@ -0,0 +1,9 @@
1
+ from .app_native import *
2
+ import os
3
+
4
+ # order matters
5
+ app.include_router(router_api)
6
+ if os.environ.get("LFSS_WEBDAV", "0") == "1":
7
+ from .app_dav import *
8
+ app.include_router(router_dav)
9
+ app.include_router(router_fs)
@@ -0,0 +1,152 @@
1
+ import asyncio, time
2
+ from contextlib import asynccontextmanager
3
+ from typing import Optional
4
+ from functools import wraps
5
+
6
+ from fastapi import FastAPI, HTTPException, Request, Response, APIRouter, Depends
7
+ from fastapi.middleware.cors import CORSMiddleware
8
+ from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials, HTTPBasic, HTTPBasicCredentials
9
+
10
+ from ..eng.log import get_logger
11
+ from ..eng.datatype import UserRecord
12
+ from ..eng.connection_pool import unique_cursor
13
+ from ..eng.database import Database, UserConn, delayed_log_activity, DECOY_USER
14
+ from ..eng.connection_pool import global_connection_init, global_connection_close
15
+ from ..eng.utils import wait_for_debounce_tasks, now_stamp, hash_credential
16
+ from ..eng.error import *
17
+ from ..eng.config import DEBUG_MODE
18
+ from .request_log import RequestDB
19
+
20
+ logger = get_logger("server", term_level="DEBUG")
21
+ logger_failed_request = get_logger("failed_requests", term_level="INFO")
22
+ db = Database()
23
+ req_conn = RequestDB()
24
+
25
+ @asynccontextmanager
26
+ async def lifespan(app: FastAPI):
27
+ global db
28
+ try:
29
+ await global_connection_init(n_read = 2)
30
+ await asyncio.gather(db.init(), req_conn.init())
31
+ yield
32
+ await req_conn.commit()
33
+ finally:
34
+ await wait_for_debounce_tasks()
35
+ await asyncio.gather(req_conn.close(), global_connection_close())
36
+
37
+ def handle_exception(fn):
38
+ @wraps(fn)
39
+ async def wrapper(*args, **kwargs):
40
+ try:
41
+ return await fn(*args, **kwargs)
42
+ except Exception as e:
43
+ if isinstance(e, HTTPException):
44
+ print(f"HTTPException: {e}, detail: {e.detail}")
45
+ if isinstance(e, HTTPException): raise e
46
+ if isinstance(e, StorageExceededError): raise HTTPException(status_code=413, detail=str(e))
47
+ if isinstance(e, PermissionError): raise HTTPException(status_code=403, detail=str(e))
48
+ if isinstance(e, InvalidPathError): raise HTTPException(status_code=400, detail=str(e))
49
+ if isinstance(e, FileNotFoundError): raise HTTPException(status_code=404, detail=str(e))
50
+ if isinstance(e, FileExistsError): raise HTTPException(status_code=409, detail=str(e))
51
+ if isinstance(e, TooManyItemsError): raise HTTPException(status_code=400, detail=str(e))
52
+ if isinstance(e, DatabaseLockedError): raise HTTPException(status_code=503, detail=str(e))
53
+ if isinstance(e, FileLockedError): raise HTTPException(status_code=423, detail=str(e))
54
+ if isinstance(e, InvalidOptionsError): raise HTTPException(status_code=400, detail=str(e))
55
+ logger.error(f"Uncaptured error in {fn.__name__}: {e}")
56
+ raise
57
+ return wrapper
58
+
59
+ app = FastAPI(docs_url=None, redoc_url=None, lifespan=lifespan)
60
+ app.add_middleware(
61
+ CORSMiddleware,
62
+ allow_origins=["*"],
63
+ allow_credentials=True,
64
+ allow_methods=["*"],
65
+ allow_headers=["*"],
66
+ )
67
+
68
+ @app.middleware("http")
69
+ async def log_requests(request: Request, call_next):
70
+
71
+ request_time_stamp = now_stamp()
72
+ start_time = time.perf_counter()
73
+ response: Response = await call_next(request)
74
+ end_time = time.perf_counter()
75
+ response_time = end_time - start_time
76
+ response.headers["X-Response-Time"] = str(response_time)
77
+
78
+ if response.headers.get("X-Skip-Log", None) is not None:
79
+ return response
80
+
81
+ if response.status_code >= 400:
82
+ logger_failed_request.error(f"{request.method} {request.url.path} \033[91m{response.status_code}\033[0m")
83
+ if DEBUG_MODE:
84
+ print(f"{request.method} {request.url.path} {response.status_code} {response_time:.3f}s")
85
+ print(f"Request headers: {dict(request.headers)}")
86
+ await req_conn.log_request(
87
+ request_time_stamp,
88
+ request.method, request.url.path, response.status_code, response_time,
89
+ headers = dict(request.headers),
90
+ query = dict(request.query_params),
91
+ client = request.client,
92
+ request_size = int(request.headers.get("Content-Length", 0)),
93
+ response_size = int(response.headers.get("Content-Length", 0))
94
+ )
95
+ await req_conn.ensure_commit_once()
96
+ return response
97
+
98
+ def skip_request_log(fn):
99
+ @wraps(fn)
100
+ async def wrapper(*args, **kwargs):
101
+ response = await fn(*args, **kwargs)
102
+ assert isinstance(response, Response), "Response expected"
103
+ response.headers["X-Skip-Log"] = "1"
104
+ return response
105
+ return wrapper
106
+
107
+ async def get_credential_from_params(request: Request):
108
+ return request.query_params.get("token")
109
+ async def get_current_user(
110
+ h_token: Optional[HTTPAuthorizationCredentials] = Depends(HTTPBearer(auto_error=False)),
111
+ b_token: Optional[HTTPBasicCredentials] = Depends(HTTPBasic(auto_error=False)),
112
+ q_token: Optional[str] = Depends(get_credential_from_params)
113
+ ):
114
+ """
115
+ First try to get the user from the bearer token,
116
+ if not found, try to get the user from the query parameter
117
+ """
118
+ async with unique_cursor() as conn:
119
+ uconn = UserConn(conn)
120
+ if h_token:
121
+ user = await uconn.get_user_by_credential(h_token.credentials)
122
+ if not user: raise HTTPException(status_code=401, detail="Invalid token", headers={"WWW-Authenticate": "Basic"})
123
+ elif b_token:
124
+ user = await uconn.get_user_by_credential(hash_credential(b_token.username, b_token.password))
125
+ if not user: raise HTTPException(status_code=401, detail="Invalid token", headers={"WWW-Authenticate": "Basic"})
126
+ elif q_token:
127
+ user = await uconn.get_user_by_credential(q_token)
128
+ if not user: raise HTTPException(status_code=401, detail="Invalid token", headers={"WWW-Authenticate": "Basic"})
129
+ else:
130
+ return DECOY_USER
131
+
132
+ if not user.id == 0:
133
+ await delayed_log_activity(user.username)
134
+
135
+ return user
136
+
137
+ async def registered_user(user: UserRecord = Depends(get_current_user)):
138
+ if user.id == 0:
139
+ raise HTTPException(status_code=401, detail="Permission denied", headers={"WWW-Authenticate": "Basic"})
140
+ return user
141
+
142
+
143
+ router_api = APIRouter(prefix="/_api")
144
+ router_dav = APIRouter(prefix="")
145
+ router_fs = APIRouter(prefix="")
146
+
147
+ __all__ = [
148
+ "app", "db", "logger",
149
+ "handle_exception", "skip_request_log",
150
+ "router_api", "router_fs", "router_dav",
151
+ "get_current_user", "registered_user"
152
+ ]