lfss 0.11.2__tar.gz → 0.11.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. {lfss-0.11.2 → lfss-0.11.4}/PKG-INFO +2 -2
  2. {lfss-0.11.2 → lfss-0.11.4}/docs/Enviroment_variables.md +3 -1
  3. {lfss-0.11.2 → lfss-0.11.4}/frontend/api.js +66 -4
  4. {lfss-0.11.2 → lfss-0.11.4}/lfss/api/connector.py +17 -2
  5. {lfss-0.11.2 → lfss-0.11.4}/lfss/cli/cli.py +7 -7
  6. {lfss-0.11.2 → lfss-0.11.4}/lfss/eng/config.py +3 -1
  7. {lfss-0.11.2 → lfss-0.11.4}/lfss/eng/database.py +119 -27
  8. {lfss-0.11.2 → lfss-0.11.4}/lfss/eng/log.py +21 -20
  9. {lfss-0.11.2 → lfss-0.11.4}/lfss/svc/app_base.py +4 -1
  10. {lfss-0.11.2 → lfss-0.11.4}/lfss/svc/app_native.py +51 -4
  11. {lfss-0.11.2 → lfss-0.11.4}/pyproject.toml +2 -2
  12. {lfss-0.11.2 → lfss-0.11.4}/Readme.md +0 -0
  13. {lfss-0.11.2 → lfss-0.11.4}/docs/Known_issues.md +0 -0
  14. {lfss-0.11.2 → lfss-0.11.4}/docs/Permission.md +0 -0
  15. {lfss-0.11.2 → lfss-0.11.4}/docs/Webdav.md +0 -0
  16. {lfss-0.11.2 → lfss-0.11.4}/docs/changelog.md +0 -0
  17. {lfss-0.11.2 → lfss-0.11.4}/frontend/index.html +0 -0
  18. {lfss-0.11.2 → lfss-0.11.4}/frontend/info.css +0 -0
  19. {lfss-0.11.2 → lfss-0.11.4}/frontend/info.js +0 -0
  20. {lfss-0.11.2 → lfss-0.11.4}/frontend/login.css +0 -0
  21. {lfss-0.11.2 → lfss-0.11.4}/frontend/login.js +0 -0
  22. {lfss-0.11.2 → lfss-0.11.4}/frontend/popup.css +0 -0
  23. {lfss-0.11.2 → lfss-0.11.4}/frontend/popup.js +0 -0
  24. {lfss-0.11.2 → lfss-0.11.4}/frontend/scripts.js +0 -0
  25. {lfss-0.11.2 → lfss-0.11.4}/frontend/state.js +0 -0
  26. {lfss-0.11.2 → lfss-0.11.4}/frontend/styles.css +0 -0
  27. {lfss-0.11.2 → lfss-0.11.4}/frontend/thumb.css +0 -0
  28. {lfss-0.11.2 → lfss-0.11.4}/frontend/thumb.js +0 -0
  29. {lfss-0.11.2 → lfss-0.11.4}/frontend/utils.js +0 -0
  30. {lfss-0.11.2 → lfss-0.11.4}/lfss/api/__init__.py +0 -0
  31. {lfss-0.11.2 → lfss-0.11.4}/lfss/cli/__init__.py +0 -0
  32. {lfss-0.11.2 → lfss-0.11.4}/lfss/cli/balance.py +0 -0
  33. {lfss-0.11.2 → lfss-0.11.4}/lfss/cli/log.py +0 -0
  34. {lfss-0.11.2 → lfss-0.11.4}/lfss/cli/panel.py +0 -0
  35. {lfss-0.11.2 → lfss-0.11.4}/lfss/cli/serve.py +0 -0
  36. {lfss-0.11.2 → lfss-0.11.4}/lfss/cli/user.py +0 -0
  37. {lfss-0.11.2 → lfss-0.11.4}/lfss/cli/vacuum.py +0 -0
  38. {lfss-0.11.2 → lfss-0.11.4}/lfss/eng/__init__.py +0 -0
  39. {lfss-0.11.2 → lfss-0.11.4}/lfss/eng/bounded_pool.py +0 -0
  40. {lfss-0.11.2 → lfss-0.11.4}/lfss/eng/connection_pool.py +0 -0
  41. {lfss-0.11.2 → lfss-0.11.4}/lfss/eng/datatype.py +0 -0
  42. {lfss-0.11.2 → lfss-0.11.4}/lfss/eng/error.py +0 -0
  43. {lfss-0.11.2 → lfss-0.11.4}/lfss/eng/thumb.py +0 -0
  44. {lfss-0.11.2 → lfss-0.11.4}/lfss/eng/utils.py +0 -0
  45. {lfss-0.11.2 → lfss-0.11.4}/lfss/sql/init.sql +0 -0
  46. {lfss-0.11.2 → lfss-0.11.4}/lfss/sql/pragma.sql +0 -0
  47. {lfss-0.11.2 → lfss-0.11.4}/lfss/svc/app.py +0 -0
  48. {lfss-0.11.2 → lfss-0.11.4}/lfss/svc/app_dav.py +0 -0
  49. {lfss-0.11.2 → lfss-0.11.4}/lfss/svc/common_impl.py +0 -0
  50. {lfss-0.11.2 → lfss-0.11.4}/lfss/svc/request_log.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lfss
3
- Version: 0.11.2
3
+ Version: 0.11.4
4
4
  Summary: Lightweight file storage service
5
5
  Home-page: https://github.com/MenxLi/lfss
6
6
  Author: Li, Mengxun
@@ -10,7 +10,7 @@ Classifier: Programming Language :: Python :: 3
10
10
  Classifier: Programming Language :: Python :: 3.10
11
11
  Classifier: Programming Language :: Python :: 3.11
12
12
  Classifier: Programming Language :: Python :: 3.12
13
- Requires-Dist: aiofiles (==23.*)
13
+ Requires-Dist: aiofiles (==24.*)
14
14
  Requires-Dist: aiosqlite (==0.*)
15
15
  Requires-Dist: fastapi (==0.*)
16
16
  Requires-Dist: mimesniff (==1.*)
@@ -4,8 +4,10 @@
4
4
  **Server**
5
5
  - `LFSS_DATA`: The directory to store the data. Default is `.storage_data`.
6
6
  - `LFSS_WEBDAV`: Enable WebDAV support. Default is `0`, set to `1` to enable.
7
- - `LFSS_LARGE_FILE`: The size limit of the file to store in the database. Default is `8m`.
7
+ - `LFSS_LARGE_FILE`: The size limit of the file to store in the database. Default is `1m`.
8
8
  - `LFSS_DEBUG`: Enable debug mode for more verbose logging. Default is `0`, set to `1` to enable.
9
+ - `LFSS_DISABLE_LOGGING`: Disable all file logging. Default is 0; set to `1` to disable file logging.
10
+ - `LFSS_ORIGIN`: The `Origin` header to allow CORS requests. Use `,` to separate multiple origins. Default is `*`.
9
11
 
10
12
  **Client**
11
13
  - `LFSS_ENDPOINT`: The fallback server endpoint. Default is `http://localhost:8000`.
@@ -69,6 +69,10 @@ export default class Connector {
69
69
  /**
70
70
  * @param {string} path - the path to the file (url)
71
71
  * @param {File} file - the file to upload
72
+ * @param {Object} [options] - Optional upload configuration.
73
+ * @param {'abort' | 'overwrite' | 'skip'} [options.conflict='abort'] - Conflict resolution strategy:
74
+ * `'abort'` to cancel and raise 409, `'overwrite'` to replace.
75
+ * @param {number} [options.permission=0] - Optional permission setting for the file (refer to backend impl).
72
76
  * @returns {Promise<string>} - the promise of the request, the url of the file
73
77
  */
74
78
  async put(path, file, {
@@ -96,8 +100,12 @@ export default class Connector {
96
100
  }
97
101
 
98
102
  /**
99
- * @param {string} path - the path to the file (url)
103
+ * @param {string} path - the path to the file (url), should end with .json
100
104
  * @param {File} file - the file to upload
105
+ * @param {Object} [options] - Optional upload configuration.
106
+ * @param {'abort' | 'overwrite' | 'skip'} [options.conflict='abort'] - Conflict resolution strategy:
107
+ * `'abort'` to cancel and raise 409, `'overwrite'` to replace, `'skip'` to ignore if already exists.
108
+ * @param {number} [options.permission=0] - Optional permission setting for the file (refer to backend impl).
101
109
  * @returns {Promise<string>} - the promise of the request, the url of the file
102
110
  */
103
111
  async post(path, file, {
@@ -129,13 +137,23 @@ export default class Connector {
129
137
 
130
138
  /**
131
139
  * @param {string} path - the path to the file (url), should end with .json
132
- * @param {Objec} data - the data to upload
140
+ * @param {Object} data - the data to upload
141
+ * @param {Object} [options] - Optional upload configuration.
142
+ * @param {'abort' | 'overwrite' | 'skip'} [options.conflict='abort'] - Conflict resolution strategy:
143
+ * `'abort'` to cancel and raise 409, `'overwrite'` to replace, `'skip'` to ignore if already exists.
144
+ * @param {number} [options.permission=0] - Optional permission setting for the file (refer to backend impl).
133
145
  * @returns {Promise<string>} - the promise of the request, the url of the file
134
146
  */
135
- async putJson(path, data){
147
+ async putJson(path, data, {
148
+ conflict = "overwrite",
149
+ permission = 0
150
+ } = {}){
136
151
  if (!path.endsWith('.json')){ throw new Error('Upload object must end with .json'); }
137
152
  if (path.startsWith('/')){ path = path.slice(1); }
138
- const res = await fetch(this.config.endpoint + '/' + path, {
153
+ const dst = new URL(this.config.endpoint + '/' + path);
154
+ dst.searchParams.append('conflict', conflict);
155
+ dst.searchParams.append('permission', permission);
156
+ const res = await fetch(dst.toString(), {
139
157
  method: 'PUT',
140
158
  headers: {
141
159
  'Authorization': 'Bearer ' + this.config.token,
@@ -149,6 +167,50 @@ export default class Connector {
149
167
  return (await res.json()).url;
150
168
  }
151
169
 
170
+ /**
171
+ * @param {string} path - the path to the file (url), should have content type application/json
172
+ * @returns {Promise<Object>} - return the json object
173
+ */
174
+ async getJson(path){
175
+ if (path.startsWith('/')){ path = path.slice(1); }
176
+ const res = await fetch(this.config.endpoint + '/' + path, {
177
+ method: 'GET',
178
+ headers: {
179
+ "Authorization": 'Bearer ' + this.config.token
180
+ },
181
+ });
182
+ if (res.status != 200){
183
+ throw new Error(`Failed to get object, status code: ${res.status}, message: ${await fmtFailedResponse(res)}`);
184
+ }
185
+ return await res.json();
186
+ }
187
+
188
+ /**
189
+ * @param {string[]} paths - the paths to the files (url), should have content type plain/text, application/json, etc.
190
+ * @param {Object} [options] - Optional configuration.
191
+ * @param {boolean} [options.skipContent=false] - If true, skips fetching content and returns a record of <path, ''>.
192
+ * @returns {Promise<Record<string, string | null>>} - return the mapping of path to text content, non-existing paths will be ignored
193
+ */
194
+ async getMultipleText(paths, {
195
+ skipContent = false
196
+ } = {}){
197
+ const url = new URL(this.config.endpoint + '/_api/get-multiple');
198
+ url.searchParams.append('skip_content', skipContent);
199
+ for (const path of paths){
200
+ url.searchParams.append('path', path);
201
+ }
202
+ const res = await fetch(url.toString(), {
203
+ method: 'GET',
204
+ headers: {
205
+ "Authorization": 'Bearer ' + this.config.token,
206
+ }
207
+ });
208
+ if (res.status != 200 && res.status != 206){
209
+ throw new Error(`Failed to get multiple files, status code: ${res.status}, message: ${await fmtFailedResponse(res)}`);
210
+ }
211
+ return await res.json();
212
+ }
213
+
152
214
  async delete(path){
153
215
  if (path.startsWith('/')){ path = path.slice(1); }
154
216
  const res = await fetch(this.config.endpoint + '/' + path, {
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
  from typing import Optional, Literal
3
3
  from collections.abc import Iterator
4
- import os
4
+ import os, json
5
5
  import requests
6
6
  import requests.adapters
7
7
  import urllib.parse
@@ -76,7 +76,11 @@ class Connector:
76
76
  path = path[1:]
77
77
  path = ensure_uri_compnents(path)
78
78
  def f(**kwargs):
79
- url = f"{self.config['endpoint']}/{path}" + "?" + urllib.parse.urlencode(search_params)
79
+ search_params_t = [
80
+ (k, str(v).lower() if isinstance(v, bool) else v)
81
+ for k, v in search_params.items()
82
+ ] # tuple form
83
+ url = f"{self.config['endpoint']}/{path}" + "?" + urllib.parse.urlencode(search_params_t, doseq=True)
80
84
  headers: dict = kwargs.pop('headers', {})
81
85
  headers.update({
82
86
  'Authorization': f"Bearer {self.config['token']}",
@@ -207,6 +211,17 @@ class Connector:
207
211
  assert response.headers['Content-Type'] == 'application/json'
208
212
  return response.json()
209
213
 
214
+ def get_multiple_text(self, *paths: str, skip_content = False) -> dict[str, Optional[str]]:
215
+ """
216
+ Gets text contents of multiple files at once. Non-existing files will return None.
217
+ - skip_content: if True, the file contents will not be fetched, always be empty string ''.
218
+ """
219
+ response = self._fetch_factory(
220
+ 'GET', '_api/get-multiple',
221
+ {'path': paths, "skip_content": skip_content}
222
+ )()
223
+ return response.json()
224
+
210
225
  def delete(self, path: str):
211
226
  """Deletes the file at the specified path."""
212
227
  self._fetch_factory('DELETE', path)()
@@ -1,5 +1,5 @@
1
1
  from pathlib import Path
2
- import argparse, typing
2
+ import argparse, typing, sys
3
3
  from lfss.api import Connector, upload_directory, upload_file, download_file, download_directory
4
4
  from lfss.eng.datatype import FileReadPermission, FileSortKey, DirSortKey
5
5
  from lfss.eng.utils import decode_uri_compnents
@@ -78,9 +78,9 @@ def main():
78
78
  permission=args.permission
79
79
  )
80
80
  if failed_upload:
81
- print("\033[91mFailed to upload:\033[0m")
81
+ print("\033[91mFailed to upload:\033[0m", file=sys.stderr)
82
82
  for path in failed_upload:
83
- print(f" {path}")
83
+ print(f" {path}", file=sys.stderr)
84
84
  else:
85
85
  success, msg = upload_file(
86
86
  connector,
@@ -93,7 +93,7 @@ def main():
93
93
  permission=args.permission
94
94
  )
95
95
  if not success:
96
- print("\033[91mFailed to upload: \033[0m", msg)
96
+ print("\033[91mFailed to upload: \033[0m", msg, file=sys.stderr)
97
97
 
98
98
  elif args.command == "download":
99
99
  is_dir = args.src.endswith("/")
@@ -107,9 +107,9 @@ def main():
107
107
  overwrite=args.overwrite
108
108
  )
109
109
  if failed_download:
110
- print("\033[91mFailed to download:\033[0m")
110
+ print("\033[91mFailed to download:\033[0m", file=sys.stderr)
111
111
  for path in failed_download:
112
- print(f" {path}")
112
+ print(f" {path}", file=sys.stderr)
113
113
  else:
114
114
  success, msg = download_file(
115
115
  connector,
@@ -121,7 +121,7 @@ def main():
121
121
  overwrite=args.overwrite
122
122
  )
123
123
  if not success:
124
- print("\033[91mFailed to download: \033[0m", msg)
124
+ print("\033[91mFailed to download: \033[0m", msg, file=sys.stderr)
125
125
 
126
126
  elif args.command == "query":
127
127
  for path in args.path:
@@ -13,6 +13,8 @@ LARGE_BLOB_DIR = DATA_HOME / 'large_blobs'
13
13
  LARGE_BLOB_DIR.mkdir(exist_ok=True)
14
14
  LOG_DIR = DATA_HOME / 'logs'
15
15
 
16
+ DISABLE_LOGGING = os.environ.get('DISABLE_LOGGING', '0') == '1'
17
+
16
18
  # https://sqlite.org/fasterthanfs.html
17
19
  __env_large_file = os.environ.get('LFSS_LARGE_FILE', None)
18
20
  if __env_large_file is not None:
@@ -24,4 +26,4 @@ CHUNK_SIZE = 1024 * 1024 # 1MB chunks for streaming (on large files)
24
26
  DEBUG_MODE = os.environ.get('LFSS_DEBUG', '0') == '1'
25
27
 
26
28
  THUMB_DB = DATA_HOME / 'thumbs.v0-11.db'
27
- THUMB_SIZE = (48, 48)
29
+ THUMB_SIZE = (64, 64)
@@ -197,6 +197,11 @@ class FileConn(DBObjectBase):
197
197
  def parse_record(record) -> FileRecord:
198
198
  return FileRecord(*record)
199
199
 
200
+ @staticmethod
201
+ def escape_sqlike(url: str) -> str:
202
+ """ Escape a url for use in SQL LIKE clause (The % and _ characters) """
203
+ return url.replace('%', r'\%').replace('_', r'\_')
204
+
200
205
  @overload
201
206
  async def get_file_record(self, url: str, throw: Literal[True]) -> FileRecord: ...
202
207
  @overload
@@ -210,6 +215,10 @@ class FileConn(DBObjectBase):
210
215
  return self.parse_record(res)
211
216
 
212
217
  async def get_file_records(self, urls: list[str]) -> list[FileRecord]:
218
+ """
219
+ Get all file records with the given urls, only urls in the database will be returned.
220
+ If the urls are not in the database, they will be ignored.
221
+ """
213
222
  await self.cur.execute("SELECT * FROM fmeta WHERE url IN ({})".format(','.join(['?'] * len(urls))), urls)
214
223
  res = await self.cur.fetchall()
215
224
  if res is None:
@@ -242,9 +251,9 @@ class FileConn(DBObjectBase):
242
251
  url, LENGTH(?) + 1,
243
252
  INSTR(SUBSTR(url, LENGTH(?) + 1), '/')
244
253
  ) AS dirname
245
- FROM fmeta WHERE url LIKE ? AND dirname != ''
254
+ FROM fmeta WHERE url LIKE ? ESCAPE '\\' AND dirname != ''
246
255
  )
247
- """, (url, url, url + '%'))
256
+ """, (url, url, self.escape_sqlike(url) + '%'))
248
257
  res = await cursor.fetchone()
249
258
  assert res is not None, "Error: count_path_dirs"
250
259
  return res[0]
@@ -267,11 +276,11 @@ class FileConn(DBObjectBase):
267
276
  1 + LENGTH(?),
268
277
  INSTR(SUBSTR(url, 1 + LENGTH(?)), '/')
269
278
  ) AS dirname
270
- FROM fmeta WHERE url LIKE ? AND dirname != ''
279
+ FROM fmeta WHERE url LIKE ? ESCAPE '\\' AND dirname != ''
271
280
  """ \
272
281
  + (f"ORDER BY {order_by} {'DESC' if order_desc else 'ASC'}" if order_by else '') \
273
282
  + " LIMIT ? OFFSET ?"
274
- cursor = await self.cur.execute(sql_qury, (url, url, url + '%', limit, offset))
283
+ cursor = await self.cur.execute(sql_qury, (url, url, self.escape_sqlike(url) + '%', limit, offset))
275
284
  res = await cursor.fetchall()
276
285
  dirs_str = [r[0] for r in res]
277
286
  async def get_dir(dir_url):
@@ -286,9 +295,15 @@ class FileConn(DBObjectBase):
286
295
  if not url.endswith('/'): url += '/'
287
296
  if url == '/': url = ''
288
297
  if flat:
289
- cursor = await self.cur.execute("SELECT COUNT(*) FROM fmeta WHERE url LIKE ?", (url + '%', ))
298
+ cursor = await self.cur.execute(
299
+ "SELECT COUNT(*) FROM fmeta WHERE url LIKE ? ESCAPE '\\'",
300
+ (self.escape_sqlike(url) + '%', )
301
+ )
290
302
  else:
291
- cursor = await self.cur.execute("SELECT COUNT(*) FROM fmeta WHERE url LIKE ? AND url NOT LIKE ?", (url + '%', url + '%/%'))
303
+ cursor = await self.cur.execute(
304
+ "SELECT COUNT(*) FROM fmeta WHERE url LIKE ? ESCAPE '\\' AND url NOT LIKE ? ESCAPE '\\'",
305
+ (self.escape_sqlike(url) + '%', self.escape_sqlike(url) + '%/%')
306
+ )
292
307
  res = await cursor.fetchone()
293
308
  assert res is not None, "Error: count_path_files"
294
309
  return res[0]
@@ -305,14 +320,14 @@ class FileConn(DBObjectBase):
305
320
  if not url.endswith('/'): url += '/'
306
321
  if url == '/': url = ''
307
322
 
308
- sql_query = "SELECT * FROM fmeta WHERE url LIKE ?"
309
- if not flat: sql_query += " AND url NOT LIKE ?"
323
+ sql_query = "SELECT * FROM fmeta WHERE url LIKE ? ESCAPE '\\'"
324
+ if not flat: sql_query += " AND url NOT LIKE ? ESCAPE '\\'"
310
325
  if order_by: sql_query += f" ORDER BY {order_by} {'DESC' if order_desc else 'ASC'}"
311
326
  sql_query += " LIMIT ? OFFSET ?"
312
327
  if flat:
313
- cursor = await self.cur.execute(sql_query, (url + '%', limit, offset))
328
+ cursor = await self.cur.execute(sql_query, (self.escape_sqlike(url) + '%', limit, offset))
314
329
  else:
315
- cursor = await self.cur.execute(sql_query, (url + '%', url + '%/%', limit, offset))
330
+ cursor = await self.cur.execute(sql_query, (self.escape_sqlike(url) + '%', self.escape_sqlike(url) + '%/%', limit, offset))
316
331
  res = await cursor.fetchall()
317
332
  files = [self.parse_record(r) for r in res]
318
333
  return files
@@ -347,8 +362,8 @@ class FileConn(DBObjectBase):
347
362
  MAX(access_time) as access_time,
348
363
  COUNT(*) as n_files
349
364
  FROM fmeta
350
- WHERE url LIKE ?
351
- """, (url + '%', ))
365
+ WHERE url LIKE ? ESCAPE '\\'
366
+ """, (self.escape_sqlike(url) + '%', ))
352
367
  result = await cursor.fetchone()
353
368
  if result is None or any(val is None for val in result):
354
369
  raise PathNotFoundError(f"Path {url} not found")
@@ -372,10 +387,16 @@ class FileConn(DBObjectBase):
372
387
  if not url.endswith('/'):
373
388
  url += '/'
374
389
  if not include_subpath:
375
- cursor = await self.cur.execute("SELECT SUM(file_size) FROM fmeta WHERE url LIKE ? AND url NOT LIKE ?", (url + '%', url + '%/%'))
390
+ cursor = await self.cur.execute(
391
+ "SELECT SUM(file_size) FROM fmeta WHERE url LIKE ? ESCAPE '\\' AND url NOT LIKE ? ESCAPE '\\'",
392
+ (self.escape_sqlike(url) + '%', self.escape_sqlike(url) + '%/%')
393
+ )
376
394
  res = await cursor.fetchone()
377
395
  else:
378
- cursor = await self.cur.execute("SELECT SUM(file_size) FROM fmeta WHERE url LIKE ?", (url + '%', ))
396
+ cursor = await self.cur.execute(
397
+ "SELECT SUM(file_size) FROM fmeta WHERE url LIKE ? ESCAPE '\\'",
398
+ (self.escape_sqlike(url) + '%', )
399
+ )
379
400
  res = await cursor.fetchone()
380
401
  assert res is not None
381
402
  return res[0] or 0
@@ -412,6 +433,10 @@ class FileConn(DBObjectBase):
412
433
  self.logger.info(f"File {url} created")
413
434
 
414
435
  async def copy_file(self, old_url: str, new_url: str, user_id: Optional[int] = None):
436
+ """
437
+ Copy file from old_url to new_url,
438
+ if user_id is None, will not change the owner_id of the file. Otherwise, will change the owner_id to user_id.
439
+ """
415
440
  old = await self.get_file_record(old_url)
416
441
  if old is None:
417
442
  raise FileNotFoundError(f"File {old_url} not found")
@@ -428,14 +453,17 @@ class FileConn(DBObjectBase):
428
453
  self.logger.info(f"Copied file {old_url} to {new_url}")
429
454
 
430
455
  async def copy_dir(self, old_url: str, new_url: str, user_id: Optional[int] = None):
456
+ """
457
+ Copy all files under old_url to new_url,
458
+ if user_id is None, will not change the owner_id of the files. Otherwise, will change the owner_id to user_id.
459
+ """
431
460
  assert old_url.endswith('/'), "Old path must end with /"
432
461
  assert new_url.endswith('/'), "New path must end with /"
433
- if user_id is None:
434
- cursor = await self.cur.execute("SELECT * FROM fmeta WHERE url LIKE ?", (old_url + '%', ))
435
- res = await cursor.fetchall()
436
- else:
437
- cursor = await self.cur.execute("SELECT * FROM fmeta WHERE url LIKE ? AND owner_id = ?", (old_url + '%', user_id))
438
- res = await cursor.fetchall()
462
+ cursor = await self.cur.execute(
463
+ "SELECT * FROM fmeta WHERE url LIKE ? ESCAPE '\\'",
464
+ (self.escape_sqlike(old_url) + '%', )
465
+ )
466
+ res = await cursor.fetchall()
439
467
  for r in res:
440
468
  old_record = FileRecord(*r)
441
469
  new_r = new_url + old_record.url[len(old_url):]
@@ -464,10 +492,16 @@ class FileConn(DBObjectBase):
464
492
  assert old_url.endswith('/'), "Old path must end with /"
465
493
  assert new_url.endswith('/'), "New path must end with /"
466
494
  if user_id is None:
467
- cursor = await self.cur.execute("SELECT * FROM fmeta WHERE url LIKE ?", (old_url + '%', ))
495
+ cursor = await self.cur.execute(
496
+ "SELECT * FROM fmeta WHERE url LIKE ? ESCAPE '\\'",
497
+ (self.escape_sqlike(old_url) + '%', )
498
+ )
468
499
  res = await cursor.fetchall()
469
500
  else:
470
- cursor = await self.cur.execute("SELECT * FROM fmeta WHERE url LIKE ? AND owner_id = ?", (old_url + '%', user_id))
501
+ cursor = await self.cur.execute(
502
+ "SELECT * FROM fmeta WHERE url LIKE ? ESCAPE '\\' AND owner_id = ?",
503
+ (self.escape_sqlike(old_url) + '%', user_id)
504
+ )
471
505
  res = await cursor.fetchall()
472
506
  for r in res:
473
507
  new_r = new_url + r[0][len(old_url):]
@@ -502,10 +536,16 @@ class FileConn(DBObjectBase):
502
536
  async def delete_records_by_prefix(self, path: str, under_owner_id: Optional[int] = None) -> list[FileRecord]:
503
537
  """Delete all records with url starting with path"""
504
538
  # update user size
505
- cursor = await self.cur.execute("SELECT DISTINCT owner_id FROM fmeta WHERE url LIKE ?", (path + '%', ))
539
+ cursor = await self.cur.execute(
540
+ "SELECT DISTINCT owner_id FROM fmeta WHERE url LIKE ? ESCAPE '\\'",
541
+ (self.escape_sqlike(path) + '%', )
542
+ )
506
543
  res = await cursor.fetchall()
507
544
  for r in res:
508
- cursor = await self.cur.execute("SELECT SUM(file_size) FROM fmeta WHERE owner_id = ? AND url LIKE ?", (r[0], path + '%'))
545
+ cursor = await self.cur.execute(
546
+ "SELECT SUM(file_size) FROM fmeta WHERE owner_id = ? AND url LIKE ? ESCAPE '\\'",
547
+ (r[0], self.escape_sqlike(path) + '%')
548
+ )
509
549
  size = await cursor.fetchone()
510
550
  if size is not None:
511
551
  await self._user_size_dec(r[0], size[0])
@@ -514,9 +554,9 @@ class FileConn(DBObjectBase):
514
554
  # but it's not a big deal... we should have only one writer
515
555
 
516
556
  if under_owner_id is None:
517
- res = await self.cur.execute("DELETE FROM fmeta WHERE url LIKE ? RETURNING *", (path + '%', ))
557
+ res = await self.cur.execute("DELETE FROM fmeta WHERE url LIKE ? ESCAPE '\\' RETURNING *", (self.escape_sqlike(path) + '%', ))
518
558
  else:
519
- res = await self.cur.execute("DELETE FROM fmeta WHERE url LIKE ? AND owner_id = ? RETURNING *", (path + '%', under_owner_id))
559
+ res = await self.cur.execute("DELETE FROM fmeta WHERE url LIKE ? ESCAPE '\\' AND owner_id = ? RETURNING *", (self.escape_sqlike(path) + '%', under_owner_id))
520
560
  all_f_rec = await res.fetchall()
521
561
  self.logger.info(f"Deleted {len(all_f_rec)} file(s) for path {path}") # type: ignore
522
562
  return [self.parse_record(r) for r in all_f_rec]
@@ -684,7 +724,7 @@ async def delayed_log_access(url: str):
684
724
  prohibited_part_regex = re.compile(
685
725
  "|".join([
686
726
  r"^\s*\.+\s*$", # dot path
687
- "[{}]".format("".join(re.escape(c) for c in ('/', "\\", "'", '"', "*", "%"))), # prohibited characters
727
+ "[{}]".format("".join(re.escape(c) for c in ('/', "\\", "'", '"', "*"))), # prohibited characters
688
728
  ])
689
729
  ),
690
730
  )
@@ -826,6 +866,58 @@ class Database:
826
866
  yield blob
827
867
  ret = blob_stream()
828
868
  return ret
869
+
870
+ async def read_files_bulk(
871
+ self, urls: list[str],
872
+ skip_content = False,
873
+ op_user: Optional[UserRecord] = None,
874
+ ) -> dict[str, Optional[bytes]]:
875
+ """
876
+ A frequent use case is to read multiple files at once,
877
+ this method will read all files in the list and return a dict of url -> blob.
878
+ if the file is not found, the value will be None.
879
+ - skip_content: if True, will not read the content of the file, resulting in a dict of url -> b''
880
+
881
+ may raise StorageExceededError if the total size of the files exceeds MAX_MEM_FILE_BYTES
882
+ """
883
+ for url in urls:
884
+ validate_url(url)
885
+
886
+ async with unique_cursor() as cur:
887
+ fconn = FileConn(cur)
888
+ file_records = await fconn.get_file_records(urls)
889
+
890
+ if op_user is not None:
891
+ for r in file_records:
892
+ if await check_path_permission(r.url, op_user, cursor=cur) >= AccessLevel.READ:
893
+ continue
894
+ is_allowed, reason = await check_file_read_permission(op_user, r, cursor=cur)
895
+ if not is_allowed:
896
+ raise PermissionDeniedError(f"Permission denied: {op_user.username} cannot read file {r.url}: {reason}")
897
+
898
+ # first check if the files are too big
899
+ sum_size = sum([r.file_size for r in file_records])
900
+ if not skip_content and sum_size > MAX_MEM_FILE_BYTES:
901
+ raise StorageExceededError(f"Unable to read files at once, total size {sum_size} exceeds {MAX_MEM_FILE_BYTES}")
902
+
903
+ self.logger.debug(f"Reading {len(file_records)} files{' (skip content)' if skip_content else ''}, getting {sum_size} bytes, from {urls}")
904
+ # read the file content
905
+ async with unique_cursor() as cur:
906
+ fconn = FileConn(cur)
907
+ blobs: dict[str, bytes] = {}
908
+ for r in file_records:
909
+ if skip_content:
910
+ blobs[r.url] = b''
911
+ continue
912
+
913
+ if r.external:
914
+ blob_iter = fconn.get_file_blob_external(r.file_id)
915
+ blob = b''.join([chunk async for chunk in blob_iter])
916
+ else:
917
+ blob = await fconn.get_file_blob(r.file_id)
918
+ blobs[r.url] = blob
919
+
920
+ return {url: blobs.get(url, None) for url in urls}
829
921
 
830
922
  async def delete_file(self, url: str, op_user: Optional[UserRecord] = None) -> Optional[FileRecord]:
831
923
  validate_url(url)
@@ -1,9 +1,9 @@
1
- from .config import LOG_DIR
1
+ from .config import LOG_DIR, DISABLE_LOGGING
2
2
  import time, sqlite3, dataclasses
3
3
  from typing import TypeVar, Callable, Literal, Optional
4
4
  from concurrent.futures import ThreadPoolExecutor
5
5
  from functools import wraps
6
- import logging, pathlib, asyncio
6
+ import logging, asyncio
7
7
  from logging import handlers
8
8
 
9
9
  class BCOLORS:
@@ -154,24 +154,25 @@ def get_logger(
154
154
  if isinstance(color, str) and color.startswith('\033'):
155
155
  format_str_plain = format_str_plain.replace(color, '')
156
156
 
157
- formatter_plain = logging.Formatter(format_str_plain)
158
- log_home.mkdir(exist_ok=True)
159
- log_file = log_home / f'{name}.log'
160
- if file_handler_type == 'simple':
161
- file_handler = logging.FileHandler(log_file)
162
- elif file_handler_type == 'daily':
163
- file_handler = handlers.TimedRotatingFileHandler(
164
- log_file, when='midnight', interval=1, backupCount=30
165
- )
166
- elif file_handler_type == 'rotate':
167
- file_handler = handlers.RotatingFileHandler(
168
- log_file, maxBytes=1024*1024, backupCount=5
169
- )
170
- elif file_handler_type == 'sqlite':
171
- file_handler = SQLiteFileHandler(log_file if log_file.suffix == '.db' else log_file.with_suffix('.log.db'))
172
-
173
- file_handler.setFormatter(formatter_plain)
174
- logger.addHandler(file_handler)
157
+ if not DISABLE_LOGGING:
158
+ formatter_plain = logging.Formatter(format_str_plain)
159
+ log_home.mkdir(exist_ok=True)
160
+ log_file = log_home / f'{name}.log'
161
+ if file_handler_type == 'simple':
162
+ file_handler = logging.FileHandler(log_file)
163
+ elif file_handler_type == 'daily':
164
+ file_handler = handlers.TimedRotatingFileHandler(
165
+ log_file, when='midnight', interval=1, backupCount=30
166
+ )
167
+ elif file_handler_type == 'rotate':
168
+ file_handler = handlers.RotatingFileHandler(
169
+ log_file, maxBytes=1024*1024, backupCount=5
170
+ )
171
+ elif file_handler_type == 'sqlite':
172
+ file_handler = SQLiteFileHandler(log_file if log_file.suffix == '.db' else log_file.with_suffix('.log.db'))
173
+
174
+ file_handler.setFormatter(formatter_plain)
175
+ logger.addHandler(file_handler)
175
176
 
176
177
  logger = BaseLogger(name)
177
178
  setupLogger(logger)
@@ -60,10 +60,13 @@ def handle_exception(fn):
60
60
  raise
61
61
  return wrapper
62
62
 
63
+ env_origins = os.environ.get("LFSS_ORIGINS", "*")
64
+ logger.debug(f"LFSS_ORIGINS: {env_origins}")
65
+ origins = [x.strip() for x in env_origins.split(",") if x.strip()]
63
66
  app = FastAPI(docs_url=None, redoc_url=None, lifespan=lifespan)
64
67
  app.add_middleware(
65
68
  CORSMiddleware,
66
- allow_origins=["*"],
69
+ allow_origins=origins,
67
70
  allow_credentials=True,
68
71
  allow_methods=["*"],
69
72
  allow_headers=["*"],
@@ -1,17 +1,19 @@
1
- from typing import Optional, Literal
1
+ from typing import Optional, Literal, Annotated
2
+ from collections import OrderedDict
2
3
 
3
- from fastapi import Depends, Request, Response, UploadFile
4
- from fastapi.responses import StreamingResponse
4
+ from fastapi import Depends, Request, Response, UploadFile, Query
5
+ from fastapi.responses import StreamingResponse, JSONResponse
5
6
  from fastapi.exceptions import HTTPException
6
7
 
7
8
  from ..eng.utils import ensure_uri_compnents
8
9
  from ..eng.config import MAX_MEM_FILE_BYTES
9
10
  from ..eng.connection_pool import unique_cursor
10
- from ..eng.database import check_file_read_permission, check_path_permission, UserConn, FileConn
11
+ from ..eng.database import check_file_read_permission, check_path_permission, FileConn, delayed_log_access
11
12
  from ..eng.datatype import (
12
13
  FileReadPermission, UserRecord, AccessLevel,
13
14
  FileSortKey, DirSortKey
14
15
  )
16
+ from ..eng.error import InvalidPathError
15
17
 
16
18
  from .app_base import *
17
19
  from .common_impl import get_impl, put_file_impl, post_file_impl, delete_impl, copy_impl
@@ -189,6 +191,7 @@ async def validate_path_read_permission(path: str, user: UserRecord):
189
191
  if not await check_path_permission(path, user) >= AccessLevel.READ:
190
192
  raise HTTPException(status_code=403, detail="Permission denied")
191
193
  @router_api.get("/count-files")
194
+ @handle_exception
192
195
  async def count_files(path: str, flat: bool = False, user: UserRecord = Depends(registered_user)):
193
196
  await validate_path_read_permission(path, user)
194
197
  path = ensure_uri_compnents(path)
@@ -196,6 +199,7 @@ async def count_files(path: str, flat: bool = False, user: UserRecord = Depends(
196
199
  fconn = FileConn(conn)
197
200
  return { "count": await fconn.count_dir_files(url = path, flat = flat) }
198
201
  @router_api.get("/list-files")
202
+ @handle_exception
199
203
  async def list_files(
200
204
  path: str, offset: int = 0, limit: int = 1000,
201
205
  order_by: FileSortKey = "", order_desc: bool = False,
@@ -212,6 +216,7 @@ async def list_files(
212
216
  )
213
217
 
214
218
  @router_api.get("/count-dirs")
219
+ @handle_exception
215
220
  async def count_dirs(path: str, user: UserRecord = Depends(registered_user)):
216
221
  await validate_path_read_permission(path, user)
217
222
  path = ensure_uri_compnents(path)
@@ -219,6 +224,7 @@ async def count_dirs(path: str, user: UserRecord = Depends(registered_user)):
219
224
  fconn = FileConn(conn)
220
225
  return { "count": await fconn.count_path_dirs(url = path) }
221
226
  @router_api.get("/list-dirs")
227
+ @handle_exception
222
228
  async def list_dirs(
223
229
  path: str, offset: int = 0, limit: int = 1000,
224
230
  order_by: DirSortKey = "", order_desc: bool = False,
@@ -232,6 +238,47 @@ async def list_dirs(
232
238
  url = path, offset = offset, limit = limit,
233
239
  order_by=order_by, order_desc=order_desc, skim=skim
234
240
  )
241
+
242
+ # https://fastapi.tiangolo.com/tutorial/query-params-str-validations/#query-parameter-list-multiple-values
243
+ @router_api.get("/get-multiple")
244
+ @handle_exception
245
+ async def get_multiple_files(
246
+ path: Annotated[list[str], Query()],
247
+ skip_content: bool = False,
248
+ user: UserRecord = Depends(registered_user)
249
+ ):
250
+ """
251
+ Get multiple files by path.
252
+ Please note that the content is supposed to be text and are small enough to fit in memory.
253
+
254
+ Not existing files will have content null, and the response will be 206 Partial Content if not all files are found.
255
+ if skip_content is True, the content of the files will always be ''
256
+ """
257
+ for p in path:
258
+ if p.endswith("/"):
259
+ raise InvalidPathError(f"Path '{p}' must not end with /")
260
+
261
+ # here we unify the path, so need to keep a record of the inputs
262
+ # make output keys consistent with inputs
263
+ upath2path = OrderedDict[str, str]()
264
+ for p in path:
265
+ p_ = p if not p.startswith("/") else p[1:]
266
+ upath2path[ensure_uri_compnents(p_)] = p
267
+ upaths = list(upath2path.keys())
268
+
269
+ # get files
270
+ raw_res = await db.read_files_bulk(upaths, skip_content=skip_content, op_user=user)
271
+ for k in raw_res.keys():
272
+ await delayed_log_access(k)
273
+ partial_content = len(raw_res) != len(upaths)
274
+
275
+ return JSONResponse(
276
+ content = {
277
+ upath2path[k]: v.decode('utf-8') if v is not None else None for k, v in raw_res.items()
278
+ },
279
+ status_code = 206 if partial_content else 200
280
+ )
281
+
235
282
 
236
283
  @router_api.get("/whoami")
237
284
  @handle_exception
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "lfss"
3
- version = "0.11.2"
3
+ version = "0.11.4"
4
4
  description = "Lightweight file storage service"
5
5
  authors = ["Li, Mengxun <mengxunli@whu.edu.cn>"]
6
6
  readme = "Readme.md"
@@ -12,7 +12,7 @@ include = ["Readme.md", "docs/*", "frontend/*", "lfss/sql/*"]
12
12
  python = ">=3.10" # PEP-622
13
13
  requests = "2.*"
14
14
  aiosqlite = "0.*"
15
- aiofiles = "23.*"
15
+ aiofiles = "24.*"
16
16
  mimesniff = "1.*"
17
17
  fastapi = "0.*"
18
18
  uvicorn = "0.*"
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes