lfss 0.11.2__py3-none-any.whl → 0.11.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,8 +4,10 @@
4
4
  **Server**
5
5
  - `LFSS_DATA`: The directory to store the data. Default is `.storage_data`.
6
6
  - `LFSS_WEBDAV`: Enable WebDAV support. Default is `0`, set to `1` to enable.
7
- - `LFSS_LARGE_FILE`: The size limit of the file to store in the database. Default is `8m`.
7
+ - `LFSS_LARGE_FILE`: The size limit of the file to store in the database. Default is `1m`.
8
8
  - `LFSS_DEBUG`: Enable debug mode for more verbose logging. Default is `0`, set to `1` to enable.
9
+ - `LFSS_DISABLE_LOGGING`: Disable all file logging. Default is 0; set to `1` to disable file logging.
10
+ - `LFSS_ORIGIN`: The `Origin` header to allow CORS requests. Use `,` to separate multiple origins. Default is `*`.
9
11
 
10
12
  **Client**
11
13
  - `LFSS_ENDPOINT`: The fallback server endpoint. Default is `http://localhost:8000`.
frontend/api.js CHANGED
@@ -69,6 +69,10 @@ export default class Connector {
69
69
  /**
70
70
  * @param {string} path - the path to the file (url)
71
71
  * @param {File} file - the file to upload
72
+ * @param {Object} [options] - Optional upload configuration.
73
+ * @param {'abort' | 'overwrite' | 'skip'} [options.conflict='abort'] - Conflict resolution strategy:
74
+ * `'abort'` to cancel and raise 409, `'overwrite'` to replace.
75
+ * @param {number} [options.permission=0] - Optional permission setting for the file (refer to backend impl).
72
76
  * @returns {Promise<string>} - the promise of the request, the url of the file
73
77
  */
74
78
  async put(path, file, {
@@ -96,8 +100,12 @@ export default class Connector {
96
100
  }
97
101
 
98
102
  /**
99
- * @param {string} path - the path to the file (url)
103
+ * @param {string} path - the path to the file (url), should end with .json
100
104
  * @param {File} file - the file to upload
105
+ * @param {Object} [options] - Optional upload configuration.
106
+ * @param {'abort' | 'overwrite' | 'skip'} [options.conflict='abort'] - Conflict resolution strategy:
107
+ * `'abort'` to cancel and raise 409, `'overwrite'` to replace, `'skip'` to ignore if already exists.
108
+ * @param {number} [options.permission=0] - Optional permission setting for the file (refer to backend impl).
101
109
  * @returns {Promise<string>} - the promise of the request, the url of the file
102
110
  */
103
111
  async post(path, file, {
@@ -129,13 +137,23 @@ export default class Connector {
129
137
 
130
138
  /**
131
139
  * @param {string} path - the path to the file (url), should end with .json
132
- * @param {Objec} data - the data to upload
140
+ * @param {Object} data - the data to upload
141
+ * @param {Object} [options] - Optional upload configuration.
142
+ * @param {'abort' | 'overwrite' | 'skip'} [options.conflict='abort'] - Conflict resolution strategy:
143
+ * `'abort'` to cancel and raise 409, `'overwrite'` to replace, `'skip'` to ignore if already exists.
144
+ * @param {number} [options.permission=0] - Optional permission setting for the file (refer to backend impl).
133
145
  * @returns {Promise<string>} - the promise of the request, the url of the file
134
146
  */
135
- async putJson(path, data){
147
+ async putJson(path, data, {
148
+ conflict = "overwrite",
149
+ permission = 0
150
+ } = {}){
136
151
  if (!path.endsWith('.json')){ throw new Error('Upload object must end with .json'); }
137
152
  if (path.startsWith('/')){ path = path.slice(1); }
138
- const res = await fetch(this.config.endpoint + '/' + path, {
153
+ const dst = new URL(this.config.endpoint + '/' + path);
154
+ dst.searchParams.append('conflict', conflict);
155
+ dst.searchParams.append('permission', permission);
156
+ const res = await fetch(dst.toString(), {
139
157
  method: 'PUT',
140
158
  headers: {
141
159
  'Authorization': 'Bearer ' + this.config.token,
@@ -149,6 +167,50 @@ export default class Connector {
149
167
  return (await res.json()).url;
150
168
  }
151
169
 
170
+ /**
171
+ * @param {string} path - the path to the file (url), should have content type application/json
172
+ * @returns {Promise<Object>} - return the json object
173
+ */
174
+ async getJson(path){
175
+ if (path.startsWith('/')){ path = path.slice(1); }
176
+ const res = await fetch(this.config.endpoint + '/' + path, {
177
+ method: 'GET',
178
+ headers: {
179
+ "Authorization": 'Bearer ' + this.config.token
180
+ },
181
+ });
182
+ if (res.status != 200){
183
+ throw new Error(`Failed to get object, status code: ${res.status}, message: ${await fmtFailedResponse(res)}`);
184
+ }
185
+ return await res.json();
186
+ }
187
+
188
+ /**
189
+ * @param {string[]} paths - the paths to the files (url), should have content type plain/text, application/json, etc.
190
+ * @param {Object} [options] - Optional configuration.
191
+ * @param {boolean} [options.skipContent=false] - If true, skips fetching content and returns a record of <path, ''>.
192
+ * @returns {Promise<Record<string, string | null>>} - return the mapping of path to text content, non-existing paths will be ignored
193
+ */
194
+ async getMultipleText(paths, {
195
+ skipContent = false
196
+ } = {}){
197
+ const url = new URL(this.config.endpoint + '/_api/get-multiple');
198
+ url.searchParams.append('skip_content', skipContent);
199
+ for (const path of paths){
200
+ url.searchParams.append('path', path);
201
+ }
202
+ const res = await fetch(url.toString(), {
203
+ method: 'GET',
204
+ headers: {
205
+ "Authorization": 'Bearer ' + this.config.token,
206
+ }
207
+ });
208
+ if (res.status != 200 && res.status != 206){
209
+ throw new Error(`Failed to get multiple files, status code: ${res.status}, message: ${await fmtFailedResponse(res)}`);
210
+ }
211
+ return await res.json();
212
+ }
213
+
152
214
  async delete(path){
153
215
  if (path.startsWith('/')){ path = path.slice(1); }
154
216
  const res = await fetch(this.config.endpoint + '/' + path, {
lfss/api/connector.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
  from typing import Optional, Literal
3
3
  from collections.abc import Iterator
4
- import os
4
+ import os, json
5
5
  import requests
6
6
  import requests.adapters
7
7
  import urllib.parse
@@ -76,7 +76,11 @@ class Connector:
76
76
  path = path[1:]
77
77
  path = ensure_uri_compnents(path)
78
78
  def f(**kwargs):
79
- url = f"{self.config['endpoint']}/{path}" + "?" + urllib.parse.urlencode(search_params)
79
+ search_params_t = [
80
+ (k, str(v).lower() if isinstance(v, bool) else v)
81
+ for k, v in search_params.items()
82
+ ] # tuple form
83
+ url = f"{self.config['endpoint']}/{path}" + "?" + urllib.parse.urlencode(search_params_t, doseq=True)
80
84
  headers: dict = kwargs.pop('headers', {})
81
85
  headers.update({
82
86
  'Authorization': f"Bearer {self.config['token']}",
@@ -207,6 +211,17 @@ class Connector:
207
211
  assert response.headers['Content-Type'] == 'application/json'
208
212
  return response.json()
209
213
 
214
+ def get_multiple_text(self, *paths: str, skip_content = False) -> dict[str, Optional[str]]:
215
+ """
216
+ Gets text contents of multiple files at once. Non-existing files will return None.
217
+ - skip_content: if True, the file contents will not be fetched, always be empty string ''.
218
+ """
219
+ response = self._fetch_factory(
220
+ 'GET', '_api/get-multiple',
221
+ {'path': paths, "skip_content": skip_content}
222
+ )()
223
+ return response.json()
224
+
210
225
  def delete(self, path: str):
211
226
  """Deletes the file at the specified path."""
212
227
  self._fetch_factory('DELETE', path)()
lfss/cli/cli.py CHANGED
@@ -1,5 +1,5 @@
1
1
  from pathlib import Path
2
- import argparse, typing
2
+ import argparse, typing, sys
3
3
  from lfss.api import Connector, upload_directory, upload_file, download_file, download_directory
4
4
  from lfss.eng.datatype import FileReadPermission, FileSortKey, DirSortKey
5
5
  from lfss.eng.utils import decode_uri_compnents
@@ -78,9 +78,9 @@ def main():
78
78
  permission=args.permission
79
79
  )
80
80
  if failed_upload:
81
- print("\033[91mFailed to upload:\033[0m")
81
+ print("\033[91mFailed to upload:\033[0m", file=sys.stderr)
82
82
  for path in failed_upload:
83
- print(f" {path}")
83
+ print(f" {path}", file=sys.stderr)
84
84
  else:
85
85
  success, msg = upload_file(
86
86
  connector,
@@ -93,7 +93,7 @@ def main():
93
93
  permission=args.permission
94
94
  )
95
95
  if not success:
96
- print("\033[91mFailed to upload: \033[0m", msg)
96
+ print("\033[91mFailed to upload: \033[0m", msg, file=sys.stderr)
97
97
 
98
98
  elif args.command == "download":
99
99
  is_dir = args.src.endswith("/")
@@ -107,9 +107,9 @@ def main():
107
107
  overwrite=args.overwrite
108
108
  )
109
109
  if failed_download:
110
- print("\033[91mFailed to download:\033[0m")
110
+ print("\033[91mFailed to download:\033[0m", file=sys.stderr)
111
111
  for path in failed_download:
112
- print(f" {path}")
112
+ print(f" {path}", file=sys.stderr)
113
113
  else:
114
114
  success, msg = download_file(
115
115
  connector,
@@ -121,7 +121,7 @@ def main():
121
121
  overwrite=args.overwrite
122
122
  )
123
123
  if not success:
124
- print("\033[91mFailed to download: \033[0m", msg)
124
+ print("\033[91mFailed to download: \033[0m", msg, file=sys.stderr)
125
125
 
126
126
  elif args.command == "query":
127
127
  for path in args.path:
lfss/eng/config.py CHANGED
@@ -13,6 +13,8 @@ LARGE_BLOB_DIR = DATA_HOME / 'large_blobs'
13
13
  LARGE_BLOB_DIR.mkdir(exist_ok=True)
14
14
  LOG_DIR = DATA_HOME / 'logs'
15
15
 
16
+ DISABLE_LOGGING = os.environ.get('DISABLE_LOGGING', '0') == '1'
17
+
16
18
  # https://sqlite.org/fasterthanfs.html
17
19
  __env_large_file = os.environ.get('LFSS_LARGE_FILE', None)
18
20
  if __env_large_file is not None:
@@ -24,4 +26,4 @@ CHUNK_SIZE = 1024 * 1024 # 1MB chunks for streaming (on large files)
24
26
  DEBUG_MODE = os.environ.get('LFSS_DEBUG', '0') == '1'
25
27
 
26
28
  THUMB_DB = DATA_HOME / 'thumbs.v0-11.db'
27
- THUMB_SIZE = (48, 48)
29
+ THUMB_SIZE = (64, 64)
lfss/eng/database.py CHANGED
@@ -210,6 +210,10 @@ class FileConn(DBObjectBase):
210
210
  return self.parse_record(res)
211
211
 
212
212
  async def get_file_records(self, urls: list[str]) -> list[FileRecord]:
213
+ """
214
+ Get all file records with the given urls, only urls in the database will be returned.
215
+ If the urls are not in the database, they will be ignored.
216
+ """
213
217
  await self.cur.execute("SELECT * FROM fmeta WHERE url IN ({})".format(','.join(['?'] * len(urls))), urls)
214
218
  res = await self.cur.fetchall()
215
219
  if res is None:
@@ -412,6 +416,10 @@ class FileConn(DBObjectBase):
412
416
  self.logger.info(f"File {url} created")
413
417
 
414
418
  async def copy_file(self, old_url: str, new_url: str, user_id: Optional[int] = None):
419
+ """
420
+ Copy file from old_url to new_url,
421
+ if user_id is None, will not change the owner_id of the file. Otherwise, will change the owner_id to user_id.
422
+ """
415
423
  old = await self.get_file_record(old_url)
416
424
  if old is None:
417
425
  raise FileNotFoundError(f"File {old_url} not found")
@@ -428,14 +436,14 @@ class FileConn(DBObjectBase):
428
436
  self.logger.info(f"Copied file {old_url} to {new_url}")
429
437
 
430
438
  async def copy_dir(self, old_url: str, new_url: str, user_id: Optional[int] = None):
439
+ """
440
+ Copy all files under old_url to new_url,
441
+ if user_id is None, will not change the owner_id of the files. Otherwise, will change the owner_id to user_id.
442
+ """
431
443
  assert old_url.endswith('/'), "Old path must end with /"
432
444
  assert new_url.endswith('/'), "New path must end with /"
433
- if user_id is None:
434
- cursor = await self.cur.execute("SELECT * FROM fmeta WHERE url LIKE ?", (old_url + '%', ))
435
- res = await cursor.fetchall()
436
- else:
437
- cursor = await self.cur.execute("SELECT * FROM fmeta WHERE url LIKE ? AND owner_id = ?", (old_url + '%', user_id))
438
- res = await cursor.fetchall()
445
+ cursor = await self.cur.execute("SELECT * FROM fmeta WHERE url LIKE ?", (old_url + '%', ))
446
+ res = await cursor.fetchall()
439
447
  for r in res:
440
448
  old_record = FileRecord(*r)
441
449
  new_r = new_url + old_record.url[len(old_url):]
@@ -826,6 +834,58 @@ class Database:
826
834
  yield blob
827
835
  ret = blob_stream()
828
836
  return ret
837
+
838
+ async def read_files_bulk(
839
+ self, urls: list[str],
840
+ skip_content = False,
841
+ op_user: Optional[UserRecord] = None,
842
+ ) -> dict[str, Optional[bytes]]:
843
+ """
844
+ A frequent use case is to read multiple files at once,
845
+ this method will read all files in the list and return a dict of url -> blob.
846
+ if the file is not found, the value will be None.
847
+ - skip_content: if True, will not read the content of the file, resulting in a dict of url -> b''
848
+
849
+ may raise StorageExceededError if the total size of the files exceeds MAX_MEM_FILE_BYTES
850
+ """
851
+ for url in urls:
852
+ validate_url(url)
853
+
854
+ async with unique_cursor() as cur:
855
+ fconn = FileConn(cur)
856
+ file_records = await fconn.get_file_records(urls)
857
+
858
+ if op_user is not None:
859
+ for r in file_records:
860
+ if await check_path_permission(r.url, op_user, cursor=cur) >= AccessLevel.READ:
861
+ continue
862
+ is_allowed, reason = await check_file_read_permission(op_user, r, cursor=cur)
863
+ if not is_allowed:
864
+ raise PermissionDeniedError(f"Permission denied: {op_user.username} cannot read file {r.url}: {reason}")
865
+
866
+ # first check if the files are too big
867
+ sum_size = sum([r.file_size for r in file_records])
868
+ if not skip_content and sum_size > MAX_MEM_FILE_BYTES:
869
+ raise StorageExceededError(f"Unable to read files at once, total size {sum_size} exceeds {MAX_MEM_FILE_BYTES}")
870
+
871
+ self.logger.debug(f"Reading {len(file_records)} files{' (skip content)' if skip_content else ''}, getting {sum_size} bytes, from {urls}")
872
+ # read the file content
873
+ async with unique_cursor() as cur:
874
+ fconn = FileConn(cur)
875
+ blobs: dict[str, bytes] = {}
876
+ for r in file_records:
877
+ if skip_content:
878
+ blobs[r.url] = b''
879
+ continue
880
+
881
+ if r.external:
882
+ blob_iter = fconn.get_file_blob_external(r.file_id)
883
+ blob = b''.join([chunk async for chunk in blob_iter])
884
+ else:
885
+ blob = await fconn.get_file_blob(r.file_id)
886
+ blobs[r.url] = blob
887
+
888
+ return {url: blobs.get(url, None) for url in urls}
829
889
 
830
890
  async def delete_file(self, url: str, op_user: Optional[UserRecord] = None) -> Optional[FileRecord]:
831
891
  validate_url(url)
lfss/eng/log.py CHANGED
@@ -1,9 +1,9 @@
1
- from .config import LOG_DIR
1
+ from .config import LOG_DIR, DISABLE_LOGGING
2
2
  import time, sqlite3, dataclasses
3
3
  from typing import TypeVar, Callable, Literal, Optional
4
4
  from concurrent.futures import ThreadPoolExecutor
5
5
  from functools import wraps
6
- import logging, pathlib, asyncio
6
+ import logging, asyncio
7
7
  from logging import handlers
8
8
 
9
9
  class BCOLORS:
@@ -154,24 +154,25 @@ def get_logger(
154
154
  if isinstance(color, str) and color.startswith('\033'):
155
155
  format_str_plain = format_str_plain.replace(color, '')
156
156
 
157
- formatter_plain = logging.Formatter(format_str_plain)
158
- log_home.mkdir(exist_ok=True)
159
- log_file = log_home / f'{name}.log'
160
- if file_handler_type == 'simple':
161
- file_handler = logging.FileHandler(log_file)
162
- elif file_handler_type == 'daily':
163
- file_handler = handlers.TimedRotatingFileHandler(
164
- log_file, when='midnight', interval=1, backupCount=30
165
- )
166
- elif file_handler_type == 'rotate':
167
- file_handler = handlers.RotatingFileHandler(
168
- log_file, maxBytes=1024*1024, backupCount=5
169
- )
170
- elif file_handler_type == 'sqlite':
171
- file_handler = SQLiteFileHandler(log_file if log_file.suffix == '.db' else log_file.with_suffix('.log.db'))
172
-
173
- file_handler.setFormatter(formatter_plain)
174
- logger.addHandler(file_handler)
157
+ if not DISABLE_LOGGING:
158
+ formatter_plain = logging.Formatter(format_str_plain)
159
+ log_home.mkdir(exist_ok=True)
160
+ log_file = log_home / f'{name}.log'
161
+ if file_handler_type == 'simple':
162
+ file_handler = logging.FileHandler(log_file)
163
+ elif file_handler_type == 'daily':
164
+ file_handler = handlers.TimedRotatingFileHandler(
165
+ log_file, when='midnight', interval=1, backupCount=30
166
+ )
167
+ elif file_handler_type == 'rotate':
168
+ file_handler = handlers.RotatingFileHandler(
169
+ log_file, maxBytes=1024*1024, backupCount=5
170
+ )
171
+ elif file_handler_type == 'sqlite':
172
+ file_handler = SQLiteFileHandler(log_file if log_file.suffix == '.db' else log_file.with_suffix('.log.db'))
173
+
174
+ file_handler.setFormatter(formatter_plain)
175
+ logger.addHandler(file_handler)
175
176
 
176
177
  logger = BaseLogger(name)
177
178
  setupLogger(logger)
lfss/svc/app_base.py CHANGED
@@ -60,10 +60,13 @@ def handle_exception(fn):
60
60
  raise
61
61
  return wrapper
62
62
 
63
+ env_origins = os.environ.get("LFSS_ORIGINS", "*")
64
+ logger.debug(f"LFSS_ORIGINS: {env_origins}")
65
+ origins = [x.strip() for x in env_origins.split(",") if x.strip()]
63
66
  app = FastAPI(docs_url=None, redoc_url=None, lifespan=lifespan)
64
67
  app.add_middleware(
65
68
  CORSMiddleware,
66
- allow_origins=["*"],
69
+ allow_origins=origins,
67
70
  allow_credentials=True,
68
71
  allow_methods=["*"],
69
72
  allow_headers=["*"],
lfss/svc/app_native.py CHANGED
@@ -1,17 +1,19 @@
1
- from typing import Optional, Literal
1
+ from typing import Optional, Literal, Annotated
2
+ from collections import OrderedDict
2
3
 
3
- from fastapi import Depends, Request, Response, UploadFile
4
- from fastapi.responses import StreamingResponse
4
+ from fastapi import Depends, Request, Response, UploadFile, Query
5
+ from fastapi.responses import StreamingResponse, JSONResponse
5
6
  from fastapi.exceptions import HTTPException
6
7
 
7
8
  from ..eng.utils import ensure_uri_compnents
8
9
  from ..eng.config import MAX_MEM_FILE_BYTES
9
10
  from ..eng.connection_pool import unique_cursor
10
- from ..eng.database import check_file_read_permission, check_path_permission, UserConn, FileConn
11
+ from ..eng.database import check_file_read_permission, check_path_permission, FileConn, delayed_log_access
11
12
  from ..eng.datatype import (
12
13
  FileReadPermission, UserRecord, AccessLevel,
13
14
  FileSortKey, DirSortKey
14
15
  )
16
+ from ..eng.error import InvalidPathError
15
17
 
16
18
  from .app_base import *
17
19
  from .common_impl import get_impl, put_file_impl, post_file_impl, delete_impl, copy_impl
@@ -189,6 +191,7 @@ async def validate_path_read_permission(path: str, user: UserRecord):
189
191
  if not await check_path_permission(path, user) >= AccessLevel.READ:
190
192
  raise HTTPException(status_code=403, detail="Permission denied")
191
193
  @router_api.get("/count-files")
194
+ @handle_exception
192
195
  async def count_files(path: str, flat: bool = False, user: UserRecord = Depends(registered_user)):
193
196
  await validate_path_read_permission(path, user)
194
197
  path = ensure_uri_compnents(path)
@@ -196,6 +199,7 @@ async def count_files(path: str, flat: bool = False, user: UserRecord = Depends(
196
199
  fconn = FileConn(conn)
197
200
  return { "count": await fconn.count_dir_files(url = path, flat = flat) }
198
201
  @router_api.get("/list-files")
202
+ @handle_exception
199
203
  async def list_files(
200
204
  path: str, offset: int = 0, limit: int = 1000,
201
205
  order_by: FileSortKey = "", order_desc: bool = False,
@@ -212,6 +216,7 @@ async def list_files(
212
216
  )
213
217
 
214
218
  @router_api.get("/count-dirs")
219
+ @handle_exception
215
220
  async def count_dirs(path: str, user: UserRecord = Depends(registered_user)):
216
221
  await validate_path_read_permission(path, user)
217
222
  path = ensure_uri_compnents(path)
@@ -219,6 +224,7 @@ async def count_dirs(path: str, user: UserRecord = Depends(registered_user)):
219
224
  fconn = FileConn(conn)
220
225
  return { "count": await fconn.count_path_dirs(url = path) }
221
226
  @router_api.get("/list-dirs")
227
+ @handle_exception
222
228
  async def list_dirs(
223
229
  path: str, offset: int = 0, limit: int = 1000,
224
230
  order_by: DirSortKey = "", order_desc: bool = False,
@@ -232,6 +238,47 @@ async def list_dirs(
232
238
  url = path, offset = offset, limit = limit,
233
239
  order_by=order_by, order_desc=order_desc, skim=skim
234
240
  )
241
+
242
+ # https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#query-parameter-list-multiple-values
243
+ @router_api.get("/get-multiple")
244
+ @handle_exception
245
+ async def get_multiple_files(
246
+ path: Annotated[list[str], Query()],
247
+ skip_content: bool = False,
248
+ user: UserRecord = Depends(registered_user)
249
+ ):
250
+ """
251
+ Get multiple files by path.
252
+ Please note that the content is supposed to be text and are small enough to fit in memory.
253
+
254
+ Not existing files will have content null, and the response will be 206 Partial Content if not all files are found.
255
+ if skip_content is True, the content of the files will always be ''
256
+ """
257
+ for p in path:
258
+ if p.endswith("/"):
259
+ raise InvalidPathError(f"Path '{p}' must not end with /")
260
+
261
+ # here we unify the path, so need to keep a record of the inputs
262
+ # make output keys consistent with inputs
263
+ upath2path = OrderedDict[str, str]()
264
+ for p in path:
265
+ p_ = p if not p.startswith("/") else p[1:]
266
+ upath2path[ensure_uri_compnents(p_)] = p
267
+ upaths = list(upath2path.keys())
268
+
269
+ # get files
270
+ raw_res = await db.read_files_bulk(upaths, skip_content=skip_content, op_user=user)
271
+ for k in raw_res.keys():
272
+ await delayed_log_access(k)
273
+ partial_content = len(raw_res) != len(upaths)
274
+
275
+ return JSONResponse(
276
+ content = {
277
+ upath2path[k]: v.decode('utf-8') if v is not None else None for k, v in raw_res.items()
278
+ },
279
+ status_code = 206 if partial_content else 200
280
+ )
281
+
235
282
 
236
283
  @router_api.get("/whoami")
237
284
  @handle_exception
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lfss
3
- Version: 0.11.2
3
+ Version: 0.11.3
4
4
  Summary: Lightweight file storage service
5
5
  Home-page: https://github.com/MenxLi/lfss
6
6
  Author: Li, Mengxun
@@ -10,7 +10,7 @@ Classifier: Programming Language :: Python :: 3
10
10
  Classifier: Programming Language :: Python :: 3.10
11
11
  Classifier: Programming Language :: Python :: 3.11
12
12
  Classifier: Programming Language :: Python :: 3.12
13
- Requires-Dist: aiofiles (==23.*)
13
+ Requires-Dist: aiofiles (==24.*)
14
14
  Requires-Dist: aiosqlite (==0.*)
15
15
  Requires-Dist: fastapi (==0.*)
16
16
  Requires-Dist: mimesniff (==1.*)
@@ -1,10 +1,10 @@
1
1
  Readme.md,sha256=B-foESzFWoSI5MEd89AWUzKcVRrTwipM28TK8GN0o8c,1920
2
- docs/Enviroment_variables.md,sha256=xaL8qBwT8B2Qe11FaOU3xWrRCh1mJ1VyTFCeFbkd0rs,570
2
+ docs/Enviroment_variables.md,sha256=CZ5DrrXSLU5RLBEVQ-gLMaOIuFthd7dEiTzO7ODrPRQ,788
3
3
  docs/Known_issues.md,sha256=ZqETcWP8lzTOel9b2mxEgCnADFF8IxOrEtiVO1NoMAk,251
4
4
  docs/Permission.md,sha256=thUJx7YRoU63Pb-eqo5l5450DrZN3QYZ36GCn8r66no,3152
5
5
  docs/Webdav.md,sha256=-Ja-BTWSY1BEMAyZycvEMNnkNTPZ49gSPzmf3Lbib70,1547
6
6
  docs/changelog.md,sha256=fE0rE2IcovbxMhdTeqhnCnknT1vtVr7A860zIh7AEnE,1581
7
- frontend/api.js,sha256=GlQsNoZFEcy7QUUsLbXv7aP-KxRnIxM37FQHTaakGiQ,19387
7
+ frontend/api.js,sha256=F35jQjWF2LITkuO-wZJuEKyafLWFx_M4C2tEYJV8zak,22631
8
8
  frontend/index.html,sha256=-k0bJ5FRqdl_H-O441D_H9E-iejgRCaL_z5UeYaS2qc,3384
9
9
  frontend/info.css,sha256=Ny0N3GywQ3a9q1_Qph_QFEKB4fEnTe_2DJ1Y5OsLLmQ,595
10
10
  frontend/info.js,sha256=xGUJPCSrtDhuSu0ELLQZ77PmVWldg-prU1mwQGbdEoA,5797
@@ -19,10 +19,10 @@ frontend/thumb.css,sha256=rNsx766amYS2DajSQNabhpQ92gdTpNoQKmV69OKvtpI,295
19
19
  frontend/thumb.js,sha256=46ViD2TlTTWy0fx6wjoAs_5CQ4ajYB90vVzM7UO2IHw,6182
20
20
  frontend/utils.js,sha256=XP5hM_mROYaxK5dqn9qZVwv7GdQuiDzByilFskbrnxA,6068
21
21
  lfss/api/__init__.py,sha256=zT1JCiUM76wX-GtRrmKhTUzSYYfcmoyI1vYwN0fCcLw,6818
22
- lfss/api/connector.py,sha256=xl_WrvupplepZSYJs4pN9zN7GDnuZR2A8-pc08ILutI,13231
22
+ lfss/api/connector.py,sha256=o0_Ws1cmDJdM5YFKy5EhwStU9nW9a05CrjVYDex0Hmo,13931
23
23
  lfss/cli/__init__.py,sha256=lPwPmqpa7EXQ4zlU7E7LOe6X2kw_xATGdwoHphUEirA,827
24
24
  lfss/cli/balance.py,sha256=fUbKKAUyaDn74f7mmxMfBL4Q4voyBLHu6Lg_g8GfMOQ,4121
25
- lfss/cli/cli.py,sha256=tPeUgj0BR_M649AGcBYwfsrGioes0qzGc0lghFkrjoo,8086
25
+ lfss/cli/cli.py,sha256=QLItJBjCJv6mWMUp5T6M0tUBuBzWr8yxoqn6V55Mb7I,8193
26
26
  lfss/cli/log.py,sha256=TBlt8mhHMouv8ZBUMHYfGZiV6-0yPdajJQ5mkGHEojI,3016
27
27
  lfss/cli/panel.py,sha256=Xq3I_n-ctveym-Gh9LaUpzHiLlvt3a_nuDiwUS-MGrg,1597
28
28
  lfss/cli/serve.py,sha256=vTo6_BiD7Dn3VLvHsC5RKRBC3lMu45JVr_0SqpgHdj0,1086
@@ -30,23 +30,23 @@ lfss/cli/user.py,sha256=1mTroQbaKxHjFCPHT67xwd08v-zxH0RZ_OnVc-4MzL0,5364
30
30
  lfss/cli/vacuum.py,sha256=arEY89kYJKEpzuzjKtf21V7s0QzM1t3QWa1hNghhT0Q,6611
31
31
  lfss/eng/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
32
  lfss/eng/bounded_pool.py,sha256=BI1dU-MBf82TMwJBYbjhEty7w1jIUKc5Bn9SnZ_-hoY,1288
33
- lfss/eng/config.py,sha256=vP-0h_9TkAfu5626KjowHjCgX-CnVGZajw3sxBs5jtU,902
33
+ lfss/eng/config.py,sha256=0dncHYn3tYw4pKBwXuP_huz0u7ud23fJ6SUmUPfLmeM,967
34
34
  lfss/eng/connection_pool.py,sha256=1aq7nSgd7hB9YNV4PjD1RDRyl_moDw3ubBtSLyfgGBs,6320
35
- lfss/eng/database.py,sha256=huYSOvTO5jES9wVl6Zity2XzNXyJBSQQwuCQHrEVf-Q,53255
35
+ lfss/eng/database.py,sha256=c-hMyPmF6G-_fPkaR53KVNKyKCiPYLcOOekXlK8vIBE,56014
36
36
  lfss/eng/datatype.py,sha256=27UB7-l9SICy5lAvKjdzpTL_GohZjzstQcr9PtAq7nM,2709
37
37
  lfss/eng/error.py,sha256=JGf5NV-f4rL6tNIDSAx5-l9MG8dEj7F2w_MuOjj1d1o,732
38
- lfss/eng/log.py,sha256=jJKOnC64Lb5EoVJK_oi7vl4iRrH_gtCKM_zjHiIUA-4,7590
38
+ lfss/eng/log.py,sha256=yciFQ7Utz1AItNekS4YtdP6bM7i1krA6qSAU2wVQv24,7698
39
39
  lfss/eng/thumb.py,sha256=AFyWEkkpuCKGWOB9bLlaDwPKzQ9JtCSSmHMhX2Gu3CI,3096
40
40
  lfss/eng/utils.py,sha256=jQUJWWmzOPmXdTCId2Y307m1cZfB4hpzHcTjO0mkOrU,6683
41
41
  lfss/sql/init.sql,sha256=FBmVzkNjYUnWjEELRFzf7xb50GngmzmeDVffT1Uk8u8,1625
42
42
  lfss/sql/pragma.sql,sha256=uENx7xXjARmro-A3XAK8OM8v5AxDMdCCRj47f86UuXg,206
43
43
  lfss/svc/app.py,sha256=r1KUO3sPaaJWbkJF0bcVTD7arPKLs2jFlq52Ixicomo,220
44
- lfss/svc/app_base.py,sha256=bTQbz945xalyB3UZLlqVBvL6JKGNQ8Fm2KpIvvucPZQ,6850
44
+ lfss/svc/app_base.py,sha256=s5ieQVI4BT0CBYavRx0dyBqwts7PYnjyCovHNYPHul8,7014
45
45
  lfss/svc/app_dav.py,sha256=H3aL3MEdYaPK1w3FQvTzrGYGaaow4m8LZ7R35MN351A,18238
46
- lfss/svc/app_native.py,sha256=_dhcq_R1VoafRCLuuWxXuttuhBAVaFVdlIQ6ep6ZQvs,8883
46
+ lfss/svc/app_native.py,sha256=zQM9o6HKtMVpq2WDZ77oc8tRnBdUdTkYHeV92NonGoo,10601
47
47
  lfss/svc/common_impl.py,sha256=7QflWnxRqghLOSMpDz2UCRqEn49X1GLS3agCb5msia8,13729
48
48
  lfss/svc/request_log.py,sha256=v8yXEIzPjaksu76Oh5vgdbUEUrw8Kt4etLAXBWSGie8,3207
49
- lfss-0.11.2.dist-info/METADATA,sha256=__YXS_WBv6oNQlzcamUPEWayjek6bVsF4zRoGR0iJb8,2732
50
- lfss-0.11.2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
51
- lfss-0.11.2.dist-info/entry_points.txt,sha256=R4uOP1y6eD0Qp3j1ySA8kRPVMdt6_W_9o-Zj9Ra4D0A,232
52
- lfss-0.11.2.dist-info/RECORD,,
49
+ lfss-0.11.3.dist-info/METADATA,sha256=OtKMj9dIs-FEgxYIPIwFTFGg1fWoTdnEc2LHyWFIgo0,2732
50
+ lfss-0.11.3.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
51
+ lfss-0.11.3.dist-info/entry_points.txt,sha256=R4uOP1y6eD0Qp3j1ySA8kRPVMdt6_W_9o-Zj9Ra4D0A,232
52
+ lfss-0.11.3.dist-info/RECORD,,
File without changes