lfss 0.6.0__tar.gz → 0.7.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. {lfss-0.6.0 → lfss-0.7.0}/PKG-INFO +1 -1
  2. {lfss-0.6.0 → lfss-0.7.0}/lfss/cli/balance.py +4 -4
  3. {lfss-0.6.0 → lfss-0.7.0}/lfss/cli/cli.py +1 -1
  4. {lfss-0.6.0 → lfss-0.7.0}/lfss/client/api.py +22 -2
  5. {lfss-0.6.0 → lfss-0.7.0}/lfss/sql/init.sql +11 -5
  6. lfss-0.7.0/lfss/sql/pragma.sql +8 -0
  7. {lfss-0.6.0 → lfss-0.7.0}/lfss/src/config.py +1 -0
  8. {lfss-0.6.0 → lfss-0.7.0}/lfss/src/database.py +11 -59
  9. {lfss-0.6.0 → lfss-0.7.0}/lfss/src/datatype.py +1 -1
  10. {lfss-0.6.0 → lfss-0.7.0}/pyproject.toml +1 -1
  11. lfss-0.6.0/lfss/sql/pragma.sql +0 -5
  12. {lfss-0.6.0 → lfss-0.7.0}/Readme.md +0 -0
  13. {lfss-0.6.0 → lfss-0.7.0}/docs/Known_issues.md +0 -0
  14. {lfss-0.6.0 → lfss-0.7.0}/docs/Permission.md +0 -0
  15. {lfss-0.6.0 → lfss-0.7.0}/frontend/api.js +0 -0
  16. {lfss-0.6.0 → lfss-0.7.0}/frontend/index.html +0 -0
  17. {lfss-0.6.0 → lfss-0.7.0}/frontend/popup.css +0 -0
  18. {lfss-0.6.0 → lfss-0.7.0}/frontend/popup.js +0 -0
  19. {lfss-0.6.0 → lfss-0.7.0}/frontend/scripts.js +0 -0
  20. {lfss-0.6.0 → lfss-0.7.0}/frontend/styles.css +0 -0
  21. {lfss-0.6.0 → lfss-0.7.0}/frontend/utils.js +0 -0
  22. {lfss-0.6.0 → lfss-0.7.0}/lfss/cli/panel.py +0 -0
  23. {lfss-0.6.0 → lfss-0.7.0}/lfss/cli/serve.py +0 -0
  24. {lfss-0.6.0 → lfss-0.7.0}/lfss/cli/user.py +0 -0
  25. {lfss-0.6.0 → lfss-0.7.0}/lfss/client/__init__.py +0 -0
  26. {lfss-0.6.0 → lfss-0.7.0}/lfss/src/__init__.py +0 -0
  27. {lfss-0.6.0 → lfss-0.7.0}/lfss/src/error.py +0 -0
  28. {lfss-0.6.0 → lfss-0.7.0}/lfss/src/log.py +0 -0
  29. {lfss-0.6.0 → lfss-0.7.0}/lfss/src/server.py +0 -0
  30. {lfss-0.6.0 → lfss-0.7.0}/lfss/src/stat.py +0 -0
  31. {lfss-0.6.0 → lfss-0.7.0}/lfss/src/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lfss
3
- Version: 0.6.0
3
+ Version: 0.7.0
4
4
  Summary: Lightweight file storage service
5
5
  Home-page: https://github.com/MenxLi/lfss
6
6
  Author: li, mengxun
@@ -24,10 +24,10 @@ def barriered(func):
24
24
  @barriered
25
25
  async def move_to_external(f_id: str, flag: str = ''):
26
26
  async with aiosqlite.connect(db_file, timeout = 60) as c:
27
- async with c.execute( "SELECT data FROM fdata WHERE file_id = ?", (f_id,)) as cursor:
27
+ async with c.execute( "SELECT data FROM blobs.fdata WHERE file_id = ?", (f_id,)) as cursor:
28
28
  blob_row = await cursor.fetchone()
29
29
  if blob_row is None:
30
- print(f"{flag}File {f_id} not found in fdata")
30
+ print(f"{flag}File {f_id} not found in blobs.fdata")
31
31
  return
32
32
  await c.execute("BEGIN")
33
33
  blob: bytes = blob_row[0]
@@ -35,7 +35,7 @@ async def move_to_external(f_id: str, flag: str = ''):
35
35
  async with aiofiles.open(LARGE_BLOB_DIR / f_id, 'wb') as f:
36
36
  await f.write(blob)
37
37
  await c.execute( "UPDATE fmeta SET external = 1 WHERE file_id = ?", (f_id,))
38
- await c.execute( "DELETE FROM fdata WHERE file_id = ?", (f_id,))
38
+ await c.execute( "DELETE FROM blobs.fdata WHERE file_id = ?", (f_id,))
39
39
  await c.commit()
40
40
  print(f"{flag}Moved {f_id} to external storage")
41
41
  except Exception as e:
@@ -56,7 +56,7 @@ async def move_to_internal(f_id: str, flag: str = ''):
56
56
 
57
57
  await c.execute("BEGIN")
58
58
  try:
59
- await c.execute("INSERT INTO fdata (file_id, data) VALUES (?, ?)", (f_id, blob))
59
+ await c.execute("INSERT INTO blobs.fdata (file_id, data) VALUES (?, ?)", (f_id, blob))
60
60
  await c.execute("UPDATE fmeta SET external = 0 WHERE file_id = ?", (f_id,))
61
61
  await c.commit()
62
62
  (LARGE_BLOB_DIR / f_id).unlink(missing_ok=True)
@@ -14,7 +14,7 @@ def parse_arguments():
14
14
  sp_upload.add_argument("dst", help="Destination path", type=str)
15
15
  sp_upload.add_argument("-j", "--jobs", type=int, default=1, help="Number of concurrent uploads")
16
16
  sp_upload.add_argument("--interval", type=float, default=0, help="Interval between files, only works with directory upload")
17
- sp_upload.add_argument("--conflict", choices=["overwrite", "abort", "skip"], default="abort", help="Conflict resolution")
17
+ sp_upload.add_argument("--conflict", choices=["overwrite", "abort", "skip", "skip-ahead"], default="abort", help="Conflict resolution")
18
18
  sp_upload.add_argument("--permission", type=FileReadPermission, default=FileReadPermission.UNSET, help="File permission")
19
19
  sp_upload.add_argument("--retries", type=int, default=0, help="Number of retries, only works with directory upload")
20
20
 
@@ -34,8 +34,18 @@ class Connector:
34
34
  return response
35
35
  return f
36
36
 
37
- def put(self, path: str, file_data: bytes, permission: int | FileReadPermission = 0, conflict: Literal['overwrite', 'abort', 'skip'] = 'abort'):
37
+ def put(self, path: str, file_data: bytes, permission: int | FileReadPermission = 0, conflict: Literal['overwrite', 'abort', 'skip', 'skip-ahead'] = 'abort'):
38
38
  """Uploads a file to the specified path."""
39
+ assert isinstance(file_data, bytes), "file_data must be bytes"
40
+
41
+ # Skip ahead by checking if the file already exists
42
+ if conflict == 'skip-ahead':
43
+ exists = self.get_metadata(path)
44
+ if exists is None:
45
+ conflict = 'skip'
46
+ else:
47
+ return {'status': 'skipped', 'path': path}
48
+
39
49
  response = self._fetch('PUT', path, search_params={
40
50
  'permission': int(permission),
41
51
  'conflict': conflict
@@ -45,9 +55,19 @@ class Connector:
45
55
  )
46
56
  return response.json()
47
57
 
48
- def put_json(self, path: str, data: dict, permission: int | FileReadPermission = 0, conflict: Literal['overwrite', 'abort', 'skip'] = 'abort'):
58
+ def put_json(self, path: str, data: dict, permission: int | FileReadPermission = 0, conflict: Literal['overwrite', 'abort', 'skip', 'skip-ahead'] = 'abort'):
49
59
  """Uploads a JSON file to the specified path."""
50
60
  assert path.endswith('.json'), "Path must end with .json"
61
+ assert isinstance(data, dict), "data must be a dict"
62
+
63
+ # Skip ahead by checking if the file already exists
64
+ if conflict == 'skip-ahead':
65
+ exists = self.get_metadata(path)
66
+ if exists is None:
67
+ conflict = 'skip'
68
+ else:
69
+ return {'status': 'skipped', 'path': path}
70
+
51
71
  response = self._fetch('PUT', path, search_params={
52
72
  'permission': int(permission),
53
73
  'conflict': conflict
@@ -1,3 +1,9 @@
1
+ PRAGMA journal_mode=MEMROY;
2
+ PRAGMA temp_store=MEMORY;
3
+ PRAGMA page_size=4096;
4
+ PRAGMA synchronous=NORMAL;
5
+ PRAGMA case_sensitive_like=ON;
6
+
1
7
  CREATE TABLE IF NOT EXISTS user (
2
8
  id INTEGER PRIMARY KEY AUTOINCREMENT,
3
9
  username VARCHAR(256) UNIQUE NOT NULL,
@@ -22,11 +28,6 @@ CREATE TABLE IF NOT EXISTS fmeta (
22
28
  FOREIGN KEY(owner_id) REFERENCES user(id)
23
29
  );
24
30
 
25
- CREATE TABLE IF NOT EXISTS fdata (
26
- file_id CHAR(32) PRIMARY KEY,
27
- data BLOB
28
- );
29
-
30
31
  CREATE TABLE IF NOT EXISTS usize (
31
32
  user_id INTEGER PRIMARY KEY,
32
33
  size INTEGER DEFAULT 0
@@ -37,3 +38,8 @@ CREATE INDEX IF NOT EXISTS idx_fmeta_url ON fmeta(url);
37
38
  CREATE INDEX IF NOT EXISTS idx_user_username ON user(username);
38
39
 
39
40
  CREATE INDEX IF NOT EXISTS idx_user_credential ON user(credential);
41
+
42
+ CREATE TABLE IF NOT EXISTS blobs.fdata (
43
+ file_id CHAR(32) PRIMARY KEY,
44
+ data BLOB
45
+ );
@@ -0,0 +1,8 @@
1
+ PRAGMA journal_mode=DELETE;
2
+ PRAGMA synchronous=NORMAL;
3
+ PRAGMA case_sensitive_like=ON;
4
+ PRAGMA page_size=4096;
5
+
6
+ PRAGMA blobs.journal_mode=DELETE;
7
+ PRAGMA blobs.synchronous=NORMAL;
8
+ PRAGMA blobs.page_size=16384;
@@ -7,6 +7,7 @@ DATA_HOME = Path(os.environ.get('LFSS_DATA', __default_dir))
7
7
  if not DATA_HOME.exists():
8
8
  DATA_HOME.mkdir()
9
9
  print(f"[init] Created data home at {DATA_HOME}")
10
+ DATA_HOME = DATA_HOME.resolve().absolute()
10
11
  LARGE_BLOB_DIR = DATA_HOME / 'large_blobs'
11
12
  LARGE_BLOB_DIR.mkdir(exist_ok=True)
12
13
 
@@ -1,6 +1,7 @@
1
1
 
2
2
  from typing import Optional, overload, Literal, AsyncIterable
3
3
  from abc import ABC, abstractmethod
4
+ import os
4
5
 
5
6
  import urllib.parse
6
7
  from pathlib import Path
@@ -57,7 +58,12 @@ class DBConnBase(ABC):
57
58
  """Should return self"""
58
59
  global _g_conn
59
60
  if _g_conn is None:
60
- _g_conn = await aiosqlite.connect(DATA_HOME / 'lfss.db')
61
+ if not os.environ.get('SQLITE_TEMPDIR'):
62
+ os.environ['SQLITE_TEMPDIR'] = str(DATA_HOME)
63
+ # large blobs are stored in a separate database, should be more efficient
64
+ _g_conn = await aiosqlite.connect(DATA_HOME / 'index.db')
65
+ async with _g_conn.cursor() as c:
66
+ await c.execute(f"ATTACH DATABASE ? AS blobs", (str(DATA_HOME/'blobs.db'), ))
61
67
  await execute_sql(_g_conn, 'pragma.sql')
62
68
  await execute_sql(_g_conn, 'init.sql')
63
69
 
@@ -162,60 +168,6 @@ class FileConn(DBConnBase):
162
168
 
163
169
  async def init(self):
164
170
  await super().init()
165
- # backward compatibility, since 0.2.1
166
- async with self.conn.execute("SELECT * FROM user") as cursor:
167
- res = await cursor.fetchall()
168
- for r in res:
169
- async with self.conn.execute("SELECT user_id FROM usize WHERE user_id = ?", (r[0], )) as cursor:
170
- size = await cursor.fetchone()
171
- if size is None:
172
- async with self.conn.execute("SELECT SUM(file_size) FROM fmeta WHERE owner_id = ?", (r[0], )) as cursor:
173
- size = await cursor.fetchone()
174
- if size is not None and size[0] is not None:
175
- await self._user_size_inc(r[0], size[0])
176
-
177
- # backward compatibility, since 0.5.0
178
- # 'external' means the file is not stored in the database, but in the external storage
179
- async with self.conn.execute("SELECT * FROM fmeta") as cursor:
180
- res = await cursor.fetchone()
181
- if res and len(res) < 8:
182
- self.logger.info("Updating fmeta table")
183
- await self.conn.execute('''
184
- ALTER TABLE fmeta ADD COLUMN external BOOLEAN DEFAULT FALSE
185
- ''')
186
-
187
- # backward compatibility, since 0.6.0
188
- async with self.conn.execute("SELECT * FROM fmeta") as cursor:
189
- res = await cursor.fetchone()
190
- if res and len(res) < 9:
191
- self.logger.info("Updating fmeta table")
192
- await self.conn.execute('''
193
- ALTER TABLE fmeta ADD COLUMN mime_type TEXT DEFAULT 'application/octet-stream'
194
- ''')
195
- # check all mime types
196
- import mimetypes, mimesniff
197
- async with self.conn.execute("SELECT url, file_id, external FROM fmeta") as cursor:
198
- res = await cursor.fetchall()
199
- async with self.conn.execute("SELECT count(*) FROM fmeta") as cursor:
200
- count = await cursor.fetchone()
201
- assert count is not None
202
- for counter, r in enumerate(res, start=1):
203
- print(f"Checking mimetype for {counter}/{count[0]}")
204
- url, f_id, external = r
205
- fname = url.split('/')[-1]
206
- mime_type, _ = mimetypes.guess_type(fname)
207
- if mime_type is None:
208
- # try to sniff the file
209
- if not external:
210
- async with self.conn.execute("SELECT data FROM fdata WHERE file_id = ?", (f_id, )) as cursor:
211
- blob = await cursor.fetchone()
212
- assert blob is not None
213
- blob = blob[0]
214
- mime_type = mimesniff.what(blob)
215
- else:
216
- mime_type = mimesniff.what(LARGE_BLOB_DIR / f_id)
217
- await self.conn.execute("UPDATE fmeta SET mime_type = ? WHERE url = ?", (mime_type, url))
218
-
219
171
  return self
220
172
 
221
173
  async def get_file_record(self, url: str) -> Optional[FileRecord]:
@@ -459,7 +411,7 @@ class FileConn(DBConnBase):
459
411
 
460
412
  @atomic
461
413
  async def set_file_blob(self, file_id: str, blob: bytes):
462
- await self.conn.execute("INSERT OR REPLACE INTO fdata (file_id, data) VALUES (?, ?)", (file_id, blob))
414
+ await self.conn.execute("INSERT OR REPLACE INTO blobs.fdata (file_id, data) VALUES (?, ?)", (file_id, blob))
463
415
 
464
416
  @atomic
465
417
  async def set_file_blob_external(self, file_id: str, stream: AsyncIterable[bytes])->int:
@@ -476,7 +428,7 @@ class FileConn(DBConnBase):
476
428
  return size_sum
477
429
 
478
430
  async def get_file_blob(self, file_id: str) -> Optional[bytes]:
479
- async with self.conn.execute("SELECT data FROM fdata WHERE file_id = ?", (file_id, )) as cursor:
431
+ async with self.conn.execute("SELECT data FROM blobs.fdata WHERE file_id = ?", (file_id, )) as cursor:
480
432
  res = await cursor.fetchone()
481
433
  if res is None:
482
434
  return None
@@ -495,11 +447,11 @@ class FileConn(DBConnBase):
495
447
 
496
448
  @atomic
497
449
  async def delete_file_blob(self, file_id: str):
498
- await self.conn.execute("DELETE FROM fdata WHERE file_id = ?", (file_id, ))
450
+ await self.conn.execute("DELETE FROM blobs.fdata WHERE file_id = ?", (file_id, ))
499
451
 
500
452
  @atomic
501
453
  async def delete_file_blobs(self, file_ids: list[str]):
502
- await self.conn.execute("DELETE FROM fdata WHERE file_id IN ({})".format(','.join(['?'] * len(file_ids))), file_ids)
454
+ await self.conn.execute("DELETE FROM blobs.fdata WHERE file_id IN ({})".format(','.join(['?'] * len(file_ids))), file_ids)
503
455
 
504
456
  def validate_url(url: str, is_file = True):
505
457
  prohibited_chars = ['..', ';', "'", '"', '\\', '\0', '\n', '\r', '\t', '\x0b', '\x0c']
@@ -25,7 +25,7 @@ class UserRecord:
25
25
  class FileRecord:
26
26
  url: str
27
27
  owner_id: int
28
- file_id: str # defines mapping from fmata to fdata
28
+ file_id: str # defines mapping from fmata to blobs.fdata
29
29
  file_size: int
30
30
  create_time: str
31
31
  access_time: str
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "lfss"
3
- version = "0.6.0"
3
+ version = "0.7.0"
4
4
  description = "Lightweight file storage service"
5
5
  authors = ["li, mengxun <limengxun45@outlook.com>"]
6
6
  readme = "Readme.md"
@@ -1,5 +0,0 @@
1
- PRAGMA journal_mode=MEMROY;
2
- PRAGMA temp_store=MEMORY;
3
- PRAGMA page_size=8192;
4
- PRAGMA synchronous=NORMAL;
5
- PRAGMA case_sensitive_like=ON;
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes