lfss 0.7.4__py3-none-any.whl → 0.7.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- frontend/api.js +7 -2
- lfss/client/__init__.py +1 -1
- lfss/client/api.py +2 -2
- lfss/src/config.py +1 -1
- lfss/src/connection_pool.py +39 -28
- lfss/src/database.py +21 -33
- lfss/src/log.py +1 -4
- lfss/src/server.py +12 -19
- lfss/src/utils.py +3 -16
- {lfss-0.7.4.dist-info → lfss-0.7.5.dist-info}/METADATA +1 -1
- {lfss-0.7.4.dist-info → lfss-0.7.5.dist-info}/RECORD +13 -13
- {lfss-0.7.4.dist-info → lfss-0.7.5.dist-info}/WHEEL +0 -0
- {lfss-0.7.4.dist-info → lfss-0.7.5.dist-info}/entry_points.txt +0 -0
frontend/api.js
CHANGED
@@ -133,12 +133,17 @@ export default class Connector {
|
|
133
133
|
|
134
134
|
/**
|
135
135
|
* @param {string} path - the path to the file directory, should ends with '/'
|
136
|
+
* @param {Object} options - the options for the request
|
136
137
|
* @returns {Promise<PathListResponse>} - the promise of the request
|
137
138
|
*/
|
138
|
-
async listPath(path
|
139
|
+
async listPath(path, {
|
140
|
+
flat = false
|
141
|
+
} = {}){
|
139
142
|
if (path.startsWith('/')){ path = path.slice(1); }
|
140
143
|
if (!path.endsWith('/')){ path += '/'; }
|
141
|
-
const
|
144
|
+
const dst = new URL(this.config.endpoint + '/' + path);
|
145
|
+
dst.searchParams.append('flat', flat);
|
146
|
+
const res = await fetch(dst.toString(), {
|
142
147
|
method: 'GET',
|
143
148
|
headers: {
|
144
149
|
'Authorization': 'Bearer ' + this.config.token
|
lfss/client/__init__.py
CHANGED
@@ -150,6 +150,6 @@ def download_directory(
|
|
150
150
|
failed_files.append(src_url)
|
151
151
|
|
152
152
|
with ThreadPoolExecutor(n_concurrent) as executor:
|
153
|
-
for file in connector.list_path(src_path).files:
|
153
|
+
for file in connector.list_path(src_path, flat=True).files:
|
154
154
|
executor.submit(get_file, file.url)
|
155
155
|
return failed_files
|
lfss/client/api.py
CHANGED
@@ -117,9 +117,9 @@ class Connector:
|
|
117
117
|
return None
|
118
118
|
raise e
|
119
119
|
|
120
|
-
def list_path(self, path: str) -> PathContents:
|
120
|
+
def list_path(self, path: str, flat: bool = False) -> PathContents:
|
121
121
|
assert path.endswith('/')
|
122
|
-
response = self._fetch_factory('GET', path)()
|
122
|
+
response = self._fetch_factory('GET', path, {'flat': flat})()
|
123
123
|
dirs = [DirectoryRecord(**d) for d in response.json()['dirs']]
|
124
124
|
files = [FileRecord(**f) for f in response.json()['files']]
|
125
125
|
return PathContents(dirs=dirs, files=files)
|
lfss/src/config.py
CHANGED
lfss/src/connection_pool.py
CHANGED
@@ -18,13 +18,22 @@ async def execute_sql(conn: aiosqlite.Connection | aiosqlite.Cursor, name: str):
|
|
18
18
|
for s in sql:
|
19
19
|
await conn.execute(s)
|
20
20
|
|
21
|
-
async def get_connection() -> aiosqlite.Connection:
|
21
|
+
async def get_connection(read_only: bool = False) -> aiosqlite.Connection:
|
22
22
|
if not os.environ.get('SQLITE_TEMPDIR'):
|
23
23
|
os.environ['SQLITE_TEMPDIR'] = str(DATA_HOME)
|
24
|
-
|
25
|
-
|
24
|
+
|
25
|
+
def get_db_uri(path: Path, read_only: bool = False):
|
26
|
+
return f"file:{path}?mode={ 'ro' if read_only else 'rwc' }"
|
27
|
+
|
28
|
+
conn = await aiosqlite.connect(
|
29
|
+
get_db_uri(DATA_HOME / 'index.db', read_only=read_only),
|
30
|
+
timeout = 60, uri = True
|
31
|
+
)
|
26
32
|
async with conn.cursor() as c:
|
27
|
-
await c.execute(
|
33
|
+
await c.execute(
|
34
|
+
f"ATTACH DATABASE ? AS blobs",
|
35
|
+
(get_db_uri(DATA_HOME/'blobs.db', read_only=read_only), )
|
36
|
+
)
|
28
37
|
await execute_sql(conn, 'pragma.sql')
|
29
38
|
return conn
|
30
39
|
|
@@ -35,47 +44,49 @@ class SqlConnection:
|
|
35
44
|
is_available: bool = True
|
36
45
|
|
37
46
|
class SqlConnectionPool:
|
38
|
-
|
47
|
+
_r_sem: Semaphore
|
39
48
|
_w_sem: Semaphore
|
40
49
|
def __init__(self):
|
41
|
-
self.
|
42
|
-
self.
|
50
|
+
self._readers: list[SqlConnection] = []
|
51
|
+
self._writer: None | SqlConnection = None
|
43
52
|
self._lock = Lock()
|
44
53
|
|
45
54
|
async def init(self, n_read: int):
|
46
55
|
await self.close()
|
47
|
-
self.
|
48
|
-
|
49
|
-
|
50
|
-
self._connections.append(SqlConnection(conn))
|
51
|
-
self._w_connection = SqlConnection(await get_connection())
|
52
|
-
self._sem = Semaphore(n_read)
|
56
|
+
self._readers = []
|
57
|
+
|
58
|
+
self._writer = SqlConnection(await get_connection(read_only=False))
|
53
59
|
self._w_sem = Semaphore(1)
|
60
|
+
|
61
|
+
for _ in range(n_read):
|
62
|
+
conn = await get_connection(read_only=True)
|
63
|
+
self._readers.append(SqlConnection(conn))
|
64
|
+
self._r_sem = Semaphore(n_read)
|
54
65
|
|
55
66
|
@property
|
56
67
|
def n_read(self):
|
57
|
-
return len(self.
|
68
|
+
return len(self._readers)
|
58
69
|
@property
|
59
|
-
def
|
60
|
-
return self.
|
70
|
+
def r_sem(self):
|
71
|
+
return self._r_sem
|
61
72
|
@property
|
62
73
|
def w_sem(self):
|
63
74
|
return self._w_sem
|
64
75
|
|
65
76
|
async def get(self, w: bool = False) -> SqlConnection:
|
66
|
-
if len(self.
|
77
|
+
if len(self._readers) == 0:
|
67
78
|
raise Exception("No available connections, please init the pool first")
|
68
79
|
|
69
80
|
async with self._lock:
|
70
81
|
if w:
|
71
|
-
assert self.
|
72
|
-
if self.
|
73
|
-
self.
|
74
|
-
return self.
|
82
|
+
assert self._writer
|
83
|
+
if self._writer.is_available:
|
84
|
+
self._writer.is_available = False
|
85
|
+
return self._writer
|
75
86
|
raise Exception("Write connection is not available")
|
76
87
|
|
77
88
|
else:
|
78
|
-
for c in self.
|
89
|
+
for c in self._readers:
|
79
90
|
if c.is_available:
|
80
91
|
c.is_available = False
|
81
92
|
return c
|
@@ -83,19 +94,19 @@ class SqlConnectionPool:
|
|
83
94
|
|
84
95
|
async def release(self, conn: SqlConnection):
|
85
96
|
async with self._lock:
|
86
|
-
if conn == self.
|
97
|
+
if conn == self._writer:
|
87
98
|
conn.is_available = True
|
88
99
|
return
|
89
100
|
|
90
|
-
if not conn in self.
|
101
|
+
if not conn in self._readers:
|
91
102
|
raise Exception("Connection not in pool")
|
92
103
|
conn.is_available = True
|
93
104
|
|
94
105
|
async def close(self):
|
95
|
-
for c in self.
|
106
|
+
for c in self._readers:
|
96
107
|
await c.conn.close()
|
97
|
-
if self.
|
98
|
-
await self.
|
108
|
+
if self._writer:
|
109
|
+
await self._writer.conn.close()
|
99
110
|
|
100
111
|
# these two functions shold be called before and after the event loop
|
101
112
|
g_pool = SqlConnectionPool()
|
@@ -125,7 +136,7 @@ def global_entrance(n_read: int = 1):
|
|
125
136
|
@asynccontextmanager
|
126
137
|
async def unique_cursor(is_write: bool = False):
|
127
138
|
if not is_write:
|
128
|
-
async with g_pool.
|
139
|
+
async with g_pool.r_sem:
|
129
140
|
connection_obj = await g_pool.get()
|
130
141
|
try:
|
131
142
|
yield await connection_obj.conn.cursor()
|
lfss/src/database.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
|
2
|
-
from typing import Optional,
|
2
|
+
from typing import Optional, Literal, AsyncIterable
|
3
3
|
from abc import ABC
|
4
4
|
|
5
5
|
import urllib.parse
|
@@ -29,9 +29,6 @@ class DBObjectBase(ABC):
|
|
29
29
|
raise ValueError("Connection not set")
|
30
30
|
return self._cur
|
31
31
|
|
32
|
-
# async def commit(self):
|
33
|
-
# await self.conn.commit()
|
34
|
-
|
35
32
|
DECOY_USER = UserRecord(0, 'decoy', 'decoy', False, '2021-01-01 00:00:00', '2021-01-01 00:00:00', 0, FileReadPermission.PRIVATE)
|
36
33
|
class UserConn(DBObjectBase):
|
37
34
|
|
@@ -43,10 +40,6 @@ class UserConn(DBObjectBase):
|
|
43
40
|
def parse_record(record) -> UserRecord:
|
44
41
|
return UserRecord(*record)
|
45
42
|
|
46
|
-
async def init(self, cur: aiosqlite.Cursor):
|
47
|
-
self.set_cursor(cur)
|
48
|
-
return self
|
49
|
-
|
50
43
|
async def get_user(self, username: str) -> Optional[UserRecord]:
|
51
44
|
await self.cur.execute("SELECT * FROM user WHERE username = ?", (username, ))
|
52
45
|
res = await self.cur.fetchone()
|
@@ -132,10 +125,6 @@ class FileConn(DBObjectBase):
|
|
132
125
|
def parse_record(record) -> FileRecord:
|
133
126
|
return FileRecord(*record)
|
134
127
|
|
135
|
-
def init(self, cur: aiosqlite.Cursor):
|
136
|
-
self.set_cursor(cur)
|
137
|
-
return self
|
138
|
-
|
139
128
|
async def get_file_record(self, url: str) -> Optional[FileRecord]:
|
140
129
|
cursor = await self.cur.execute("SELECT * FROM fmeta WHERE url = ?", (url, ))
|
141
130
|
res = await cursor.fetchone()
|
@@ -150,7 +139,7 @@ class FileConn(DBObjectBase):
|
|
150
139
|
return []
|
151
140
|
return [self.parse_record(r) for r in res]
|
152
141
|
|
153
|
-
async def
|
142
|
+
async def list_root_dirs(self, *usernames: str) -> list[DirectoryRecord]:
|
154
143
|
"""
|
155
144
|
Efficiently list users' directories, if usernames is empty, list all users' directories.
|
156
145
|
"""
|
@@ -167,17 +156,12 @@ class FileConn(DBObjectBase):
|
|
167
156
|
dirs = [DirectoryRecord(u, await self.path_size(u, include_subpath=True)) for u in dirnames]
|
168
157
|
return dirs
|
169
158
|
|
170
|
-
|
171
|
-
async def list_path(self, url: str, flat: Literal[True]) -> list[FileRecord]:...
|
172
|
-
@overload
|
173
|
-
async def list_path(self, url: str, flat: Literal[False]) -> PathContents:...
|
174
|
-
|
175
|
-
async def list_path(self, url: str, flat: bool = False) -> list[FileRecord] | PathContents:
|
159
|
+
async def list_path(self, url: str, flat: bool = False) -> PathContents:
|
176
160
|
"""
|
177
|
-
List all files and directories under the given path
|
178
|
-
if flat is True,
|
179
|
-
Otherwise, return a tuple of (dirs, files), where dirs is a list of DirectoryRecord,
|
161
|
+
List all files and directories under the given path
|
162
|
+
if flat is True, list all files under the path, with out delimiting directories
|
180
163
|
"""
|
164
|
+
self.logger.debug(f"Listing path {url}, flat={flat}")
|
181
165
|
if not url.endswith('/'):
|
182
166
|
url += '/'
|
183
167
|
if url == '/':
|
@@ -186,15 +170,17 @@ class FileConn(DBObjectBase):
|
|
186
170
|
if flat:
|
187
171
|
cursor = await self.cur.execute("SELECT * FROM fmeta")
|
188
172
|
res = await cursor.fetchall()
|
189
|
-
|
173
|
+
files = [self.parse_record(r) for r in res]
|
174
|
+
return PathContents([], files)
|
190
175
|
|
191
176
|
else:
|
192
|
-
return PathContents(await self.
|
177
|
+
return PathContents(await self.list_root_dirs(), [])
|
193
178
|
|
194
179
|
if flat:
|
195
180
|
cursor = await self.cur.execute("SELECT * FROM fmeta WHERE url LIKE ?", (url + '%', ))
|
196
181
|
res = await cursor.fetchall()
|
197
|
-
|
182
|
+
files = [self.parse_record(r) for r in res]
|
183
|
+
return PathContents([], files)
|
198
184
|
|
199
185
|
cursor = await self.cur.execute("SELECT * FROM fmeta WHERE url LIKE ? AND url NOT LIKE ?", (url + '%', url + '%/%'))
|
200
186
|
res = await cursor.fetchall()
|
@@ -358,7 +344,7 @@ class FileConn(DBObjectBase):
|
|
358
344
|
await self._user_size_dec(r[0], size[0])
|
359
345
|
|
360
346
|
# if any new records are created here, the size update may be inconsistent
|
361
|
-
# but it's not a big deal...
|
347
|
+
# but it's not a big deal... we should have only one writer
|
362
348
|
|
363
349
|
if under_user_id is None:
|
364
350
|
res = await self.cur.execute("DELETE FROM fmeta WHERE url LIKE ? RETURNING *", (path + '%', ))
|
@@ -432,7 +418,7 @@ async def get_user(cur: aiosqlite.Cursor, user: int | str) -> Optional[UserRecor
|
|
432
418
|
else:
|
433
419
|
return None
|
434
420
|
|
435
|
-
# mostly transactional
|
421
|
+
# higher level database operations, mostly transactional
|
436
422
|
class Database:
|
437
423
|
logger = get_logger('database', global_instance=True)
|
438
424
|
|
@@ -615,11 +601,13 @@ class Database:
|
|
615
601
|
else:
|
616
602
|
internal_ids.append(r.file_id)
|
617
603
|
|
618
|
-
|
619
|
-
|
620
|
-
|
621
|
-
|
622
|
-
|
604
|
+
async def del_internal():
|
605
|
+
for i in range(0, len(internal_ids), batch_size):
|
606
|
+
await fconn.delete_file_blobs([r for r in internal_ids[i:i+batch_size]])
|
607
|
+
async def del_external():
|
608
|
+
for i in range(0, len(external_ids)):
|
609
|
+
await fconn.delete_file_blob_external(external_ids[i])
|
610
|
+
await asyncio.gather(del_internal(), del_external())
|
623
611
|
|
624
612
|
async def delete_path(self, url: str, under_user: Optional[UserRecord] = None) -> Optional[list[FileRecord]]:
|
625
613
|
validate_url(url, is_file=False)
|
@@ -655,7 +643,7 @@ class Database:
|
|
655
643
|
async with unique_cursor() as cur:
|
656
644
|
fconn = FileConn(cur)
|
657
645
|
if urls is None:
|
658
|
-
urls = [r.url for r in await fconn.list_path(top_url, flat=True)]
|
646
|
+
urls = [r.url for r in (await fconn.list_path(top_url, flat=True)).files]
|
659
647
|
|
660
648
|
for url in urls:
|
661
649
|
if not url.startswith(top_url):
|
lfss/src/log.py
CHANGED
@@ -151,9 +151,6 @@ def log_access(
|
|
151
151
|
return wrapper # type: ignore
|
152
152
|
return _log_access
|
153
153
|
|
154
|
-
def get_dummy_logger() -> BaseLogger:
|
155
|
-
return BaseLogger('dummy')
|
156
|
-
|
157
154
|
__ALL__ = [
|
158
|
-
'get_logger', '
|
155
|
+
'get_logger', 'log_access'
|
159
156
|
]
|
lfss/src/server.py
CHANGED
@@ -18,7 +18,7 @@ from .stat import RequestDB
|
|
18
18
|
from .config import MAX_BUNDLE_BYTES, MAX_FILE_BYTES, LARGE_FILE_BYTES
|
19
19
|
from .utils import ensure_uri_compnents, format_last_modified, now_stamp
|
20
20
|
from .connection_pool import global_connection_init, global_connection_close, unique_cursor
|
21
|
-
from .database import Database, UserRecord, DECOY_USER, FileRecord, check_user_permission, FileReadPermission, UserConn, FileConn
|
21
|
+
from .database import Database, UserRecord, DECOY_USER, FileRecord, check_user_permission, FileReadPermission, UserConn, FileConn, PathContents
|
22
22
|
|
23
23
|
logger = get_logger("server", term_level="DEBUG")
|
24
24
|
logger_failed_request = get_logger("failed_requests", term_level="INFO")
|
@@ -120,7 +120,7 @@ router_fs = APIRouter(prefix="")
|
|
120
120
|
|
121
121
|
@router_fs.get("/{path:path}")
|
122
122
|
@handle_exception
|
123
|
-
async def get_file(path: str, download = False, user: UserRecord = Depends(get_current_user)):
|
123
|
+
async def get_file(path: str, download: bool = False, flat: bool = False, user: UserRecord = Depends(get_current_user)):
|
124
124
|
path = ensure_uri_compnents(path)
|
125
125
|
|
126
126
|
# handle directory query
|
@@ -130,18 +130,20 @@ async def get_file(path: str, download = False, user: UserRecord = Depends(get_c
|
|
130
130
|
async with unique_cursor() as conn:
|
131
131
|
fconn = FileConn(conn)
|
132
132
|
if user.id == 0:
|
133
|
-
raise HTTPException(status_code=
|
133
|
+
raise HTTPException(status_code=401, detail="Permission denied, credential required")
|
134
134
|
if path == "/":
|
135
|
-
|
136
|
-
"
|
137
|
-
|
138
|
-
|
139
|
-
|
135
|
+
if flat:
|
136
|
+
raise HTTPException(status_code=400, detail="Flat query not supported for root path")
|
137
|
+
return PathContents(
|
138
|
+
dirs = await fconn.list_root_dirs(user.username) \
|
139
|
+
if not user.is_admin else await fconn.list_root_dirs(),
|
140
|
+
files = []
|
141
|
+
)
|
140
142
|
|
141
143
|
if not path.startswith(f"{user.username}/") and not user.is_admin:
|
142
144
|
raise HTTPException(status_code=403, detail="Permission denied, path must start with username")
|
143
145
|
|
144
|
-
return await fconn.list_path(path, flat =
|
146
|
+
return await fconn.list_path(path, flat = flat)
|
145
147
|
|
146
148
|
async with unique_cursor() as conn:
|
147
149
|
fconn = FileConn(conn)
|
@@ -217,7 +219,6 @@ async def put_file(
|
|
217
219
|
return Response(status_code=200, headers={
|
218
220
|
"Content-Type": "application/json",
|
219
221
|
}, content=json.dumps({"url": path}))
|
220
|
-
# remove the old file
|
221
222
|
exists_flag = True
|
222
223
|
if not user.is_admin and not file_record.owner_id == user.id:
|
223
224
|
raise HTTPException(status_code=403, detail="Permission denied, cannot overwrite other's file")
|
@@ -317,7 +318,7 @@ async def bundle_files(path: str, user: UserRecord = Depends(registered_user)):
|
|
317
318
|
|
318
319
|
async with unique_cursor() as conn:
|
319
320
|
fconn = FileConn(conn)
|
320
|
-
files = await fconn.list_path(path, flat = True)
|
321
|
+
files = (await fconn.list_path(path, flat = True)).files
|
321
322
|
files = [f for f in files if await is_access_granted(f)]
|
322
323
|
if len(files) == 0:
|
323
324
|
raise HTTPException(status_code=404, detail="No files found")
|
@@ -339,14 +340,6 @@ async def bundle_files(path: str, user: UserRecord = Depends(registered_user)):
|
|
339
340
|
@router_api.get("/meta")
|
340
341
|
@handle_exception
|
341
342
|
async def get_file_meta(path: str, user: UserRecord = Depends(registered_user)):
|
342
|
-
"""
|
343
|
-
Permission:
|
344
|
-
for file:
|
345
|
-
if file is under user's path, return the meta,
|
346
|
-
else, determine by the permission same as get_file
|
347
|
-
for path:
|
348
|
-
if path is under user's path, return the meta, else return 403
|
349
|
-
"""
|
350
343
|
logger.info(f"GET meta({path}), user: {user.username}")
|
351
344
|
path = ensure_uri_compnents(path)
|
352
345
|
is_file = not path.endswith("/")
|
lfss/src/utils.py
CHANGED
@@ -8,33 +8,21 @@ def hash_credential(username: str, password: str):
|
|
8
8
|
return hashlib.sha256((username + password).encode()).hexdigest()
|
9
9
|
|
10
10
|
def encode_uri_compnents(path: str):
|
11
|
-
"""
|
12
|
-
Encode the path components to encode the special characters,
|
13
|
-
also to avoid path traversal attack
|
14
|
-
"""
|
15
11
|
path_sp = path.split("/")
|
16
12
|
mapped = map(lambda x: urllib.parse.quote(x), path_sp)
|
17
13
|
return "/".join(mapped)
|
18
14
|
|
19
15
|
def decode_uri_compnents(path: str):
|
20
|
-
"""
|
21
|
-
Decode the path components to decode the special characters
|
22
|
-
"""
|
23
16
|
path_sp = path.split("/")
|
24
17
|
mapped = map(lambda x: urllib.parse.unquote(x), path_sp)
|
25
18
|
return "/".join(mapped)
|
26
19
|
|
27
20
|
def ensure_uri_compnents(path: str):
|
28
|
-
"""
|
29
|
-
Ensure the path components are safe to use
|
30
|
-
"""
|
21
|
+
""" Ensure the path components are safe to use """
|
31
22
|
return encode_uri_compnents(decode_uri_compnents(path))
|
32
23
|
|
33
24
|
def debounce_async(delay: float = 0):
|
34
|
-
"""
|
35
|
-
Decorator to debounce the async function (procedure)
|
36
|
-
The function must return None
|
37
|
-
"""
|
25
|
+
""" Debounce the async procedure """
|
38
26
|
def debounce_wrap(func):
|
39
27
|
# https://docs.python.org/3/library/asyncio-task.html#asyncio.Task.cancel
|
40
28
|
async def delayed_func(*args, **kwargs):
|
@@ -55,10 +43,9 @@ def debounce_async(delay: float = 0):
|
|
55
43
|
return wrapper
|
56
44
|
return debounce_wrap
|
57
45
|
|
58
|
-
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Last-Modified
|
59
46
|
def format_last_modified(last_modified_gmt: str):
|
60
47
|
"""
|
61
|
-
Format the last modified time to the HTTP standard format
|
48
|
+
Format the last modified time to the [HTTP standard format](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Last-Modified)
|
62
49
|
- last_modified_gmt: The last modified time in SQLite ISO 8601 GMT format: e.g. '2021-09-01 12:00:00'
|
63
50
|
"""
|
64
51
|
assert len(last_modified_gmt) == 19
|
@@ -1,7 +1,7 @@
|
|
1
1
|
Readme.md,sha256=vsPotlwPAaHI5plh4aaszpi3rr7ZGDn7-wLdEYTWQ0k,1275
|
2
2
|
docs/Known_issues.md,sha256=rfdG3j1OJF-59S9E06VPyn0nZKbW-ybPxkoZ7MEZWp8,81
|
3
3
|
docs/Permission.md,sha256=X0VNfBKU52f93QYqcVyiBFJ3yURiSkhIo9S_5fdSgzM,2265
|
4
|
-
frontend/api.js,sha256
|
4
|
+
frontend/api.js,sha256=lHqT7zGmsUZItE-FRR0LfTl_WYCcqlNssfa00XYo-EY,7865
|
5
5
|
frontend/index.html,sha256=VPJDs2LG8ep9kjlsKzjWzpN9vc1VGgdvOUlNTZWyQoQ,2088
|
6
6
|
frontend/popup.css,sha256=VzkjG1ZTLxhHMtTyobnlvqYmVsTmdbJJed2Pu1cc06c,1007
|
7
7
|
frontend/popup.js,sha256=3PgaGZmxSdV1E-D_MWgcR7aHWkcsHA1BNKSOkmP66tA,5191
|
@@ -13,21 +13,21 @@ lfss/cli/cli.py,sha256=Yup3xIVEQPu10uM8dq1bvre1fK5ngweQHxXZsgQq4Hc,4187
|
|
13
13
|
lfss/cli/panel.py,sha256=iGdVmdWYjA_7a78ZzWEB_3ggIOBeUKTzg6F5zLaB25c,1401
|
14
14
|
lfss/cli/serve.py,sha256=bO3GT0kuylMGN-7bZWP4e71MlugGZ_lEMkYaYld_Ntg,985
|
15
15
|
lfss/cli/user.py,sha256=h-USWF6lB0Ztm9vwQznqsghKJ5INq5mBmaQeX2D5F-w,3490
|
16
|
-
lfss/client/__init__.py,sha256=
|
17
|
-
lfss/client/api.py,sha256=
|
16
|
+
lfss/client/__init__.py,sha256=8uvcKs3PYQamDd_cjfN-fX9QUohEzJqeJlOYkBlzC3M,4556
|
17
|
+
lfss/client/api.py,sha256=kSkB4wADTu012-1wl6v90OiZrw6aTQ42GU4jtV4KO0k,5764
|
18
18
|
lfss/sql/init.sql,sha256=C-JtQAlaOjESI8uoF1Y_9dKukEVSw5Ll-7yA3gG-XHU,1210
|
19
19
|
lfss/sql/pragma.sql,sha256=uENx7xXjARmro-A3XAK8OM8v5AxDMdCCRj47f86UuXg,206
|
20
20
|
lfss/src/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
21
|
-
lfss/src/config.py,sha256=
|
22
|
-
lfss/src/connection_pool.py,sha256=
|
23
|
-
lfss/src/database.py,sha256=
|
21
|
+
lfss/src/config.py,sha256=aTfjWORE9Mx7LSEjbfmHnULlrmIWEvEBSZ4fJKWZNjM,530
|
22
|
+
lfss/src/connection_pool.py,sha256=teW_4DMiwlCN_bS7AhjkbY9cHZqUFlmHE_J2yPjHVsA,5125
|
23
|
+
lfss/src/database.py,sha256=G9U_Iijp7euuGj3fcWdSGJPetMhn56X0vI8iWr6ZUr8,31904
|
24
24
|
lfss/src/datatype.py,sha256=BLS7vuuKnFZQg0nrKeP9SymqUhcN6HwPgejU0yBd_Ak,1622
|
25
25
|
lfss/src/error.py,sha256=imbhwnbhnI3HLhkbfICROe3F0gleKrOk4XnqHJDOtuI,285
|
26
|
-
lfss/src/log.py,sha256=
|
27
|
-
lfss/src/server.py,sha256=
|
26
|
+
lfss/src/log.py,sha256=xOnkuH-gB_jSVGqNnDVEW05iki6SCJ2xdEhjz5eEsMo,5136
|
27
|
+
lfss/src/server.py,sha256=EA5fK4qc98tF8qoS9F6VaxIE65D5X8Ztkjqy8EUYIv8,16276
|
28
28
|
lfss/src/stat.py,sha256=hTMtQyM_Ukmhc33Bb9FGCfBMIX02KrGHQg8nL7sC8sU,2082
|
29
|
-
lfss/src/utils.py,sha256=
|
30
|
-
lfss-0.7.
|
31
|
-
lfss-0.7.
|
32
|
-
lfss-0.7.
|
33
|
-
lfss-0.7.
|
29
|
+
lfss/src/utils.py,sha256=miGsv7udupDtSpVSd66IvUt0_QMi3JXYCp_BjdPJY-M,2134
|
30
|
+
lfss-0.7.5.dist-info/METADATA,sha256=cGOrCSM3pjCqYJdrFNrLfO2P08zx1cmwgfmAmTT4MJg,1967
|
31
|
+
lfss-0.7.5.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
32
|
+
lfss-0.7.5.dist-info/entry_points.txt,sha256=d_Ri3GXxUW-S0E6q953A8od0YMmUAnZGlJSKS46OiW8,172
|
33
|
+
lfss-0.7.5.dist-info/RECORD,,
|
File without changes
|
File without changes
|