lfss 0.11.1__py3-none-any.whl → 0.11.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- docs/Enviroment_variables.md +3 -1
- docs/changelog.md +27 -0
- frontend/api.js +66 -4
- frontend/login.js +0 -1
- frontend/popup.js +18 -3
- frontend/scripts.js +8 -5
- frontend/utils.js +4 -5
- lfss/api/connector.py +17 -2
- lfss/cli/cli.py +7 -7
- lfss/cli/log.py +77 -0
- lfss/cli/vacuum.py +10 -3
- lfss/eng/config.py +6 -3
- lfss/eng/database.py +99 -40
- lfss/eng/log.py +91 -21
- lfss/eng/utils.py +1 -2
- lfss/svc/app_base.py +4 -1
- lfss/svc/app_dav.py +7 -7
- lfss/svc/app_native.py +58 -11
- lfss/svc/common_impl.py +4 -4
- {lfss-0.11.1.dist-info → lfss-0.11.3.dist-info}/METADATA +3 -2
- {lfss-0.11.1.dist-info → lfss-0.11.3.dist-info}/RECORD +23 -22
- {lfss-0.11.1.dist-info → lfss-0.11.3.dist-info}/entry_points.txt +1 -0
- {lfss-0.11.1.dist-info → lfss-0.11.3.dist-info}/WHEEL +0 -0
docs/Enviroment_variables.md
CHANGED
@@ -4,8 +4,10 @@
|
|
4
4
|
**Server**
|
5
5
|
- `LFSS_DATA`: The directory to store the data. Default is `.storage_data`.
|
6
6
|
- `LFSS_WEBDAV`: Enable WebDAV support. Default is `0`, set to `1` to enable.
|
7
|
-
- `LFSS_LARGE_FILE`: The size limit of the file to store in the database. Default is `
|
7
|
+
- `LFSS_LARGE_FILE`: The size limit of the file to store in the database. Default is `1m`.
|
8
8
|
- `LFSS_DEBUG`: Enable debug mode for more verbose logging. Default is `0`, set to `1` to enable.
|
9
|
+
- `LFSS_DISABLE_LOGGING`: Disable all file logging. Default is 0; set to `1` to disable file logging.
|
10
|
+
- `LFSS_ORIGIN`: The `Origin` header to allow CORS requests. Use `,` to separate multiple origins. Default is `*`.
|
9
11
|
|
10
12
|
**Client**
|
11
13
|
- `LFSS_ENDPOINT`: The fallback server endpoint. Default is `http://localhost:8000`.
|
docs/changelog.md
CHANGED
@@ -1,3 +1,30 @@
|
|
1
|
+
## 0.11
|
2
|
+
|
3
|
+
### 0.11.2
|
4
|
+
- Improve frontend directory upload feedback.
|
5
|
+
- Set default large file threashold to 1M.
|
6
|
+
- Increase default concurrent threads.
|
7
|
+
- Use sqlite for logging.
|
8
|
+
- Add vacuum logs.
|
9
|
+
- Refactor: use dir for directory path.
|
10
|
+
|
11
|
+
### 0.11.1
|
12
|
+
- Rename api `get_meta` function.
|
13
|
+
- Frontend support upload directory.
|
14
|
+
- Fix admin put to non-exists user path.
|
15
|
+
|
16
|
+
### 0.11.0
|
17
|
+
- Copy file as hard link.
|
18
|
+
- Add vacuum thumb and all.
|
19
|
+
- Thumb database use file_id as index.
|
20
|
+
- improve username and url check with regular expression.
|
21
|
+
|
22
|
+
## 0.10
|
23
|
+
|
24
|
+
### 0.10.0
|
25
|
+
- Inherit permission from path owner for `unset` permission files.
|
26
|
+
- Add timeout and verify options for client api.
|
27
|
+
- Bundle small files in memory.
|
1
28
|
|
2
29
|
## 0.9
|
3
30
|
|
frontend/api.js
CHANGED
@@ -69,6 +69,10 @@ export default class Connector {
|
|
69
69
|
/**
|
70
70
|
* @param {string} path - the path to the file (url)
|
71
71
|
* @param {File} file - the file to upload
|
72
|
+
* @param {Object} [options] - Optional upload configuration.
|
73
|
+
* @param {'abort' | 'overwrite' | 'skip'} [options.conflict='abort'] - Conflict resolution strategy:
|
74
|
+
* `'abort'` to cancel and raise 409, `'overwrite'` to replace.
|
75
|
+
* @param {number} [options.permission=0] - Optional permission setting for the file (refer to backend impl).
|
72
76
|
* @returns {Promise<string>} - the promise of the request, the url of the file
|
73
77
|
*/
|
74
78
|
async put(path, file, {
|
@@ -96,8 +100,12 @@ export default class Connector {
|
|
96
100
|
}
|
97
101
|
|
98
102
|
/**
|
99
|
-
* @param {string} path - the path to the file (url)
|
103
|
+
* @param {string} path - the path to the file (url), should end with .json
|
100
104
|
* @param {File} file - the file to upload
|
105
|
+
* @param {Object} [options] - Optional upload configuration.
|
106
|
+
* @param {'abort' | 'overwrite' | 'skip'} [options.conflict='abort'] - Conflict resolution strategy:
|
107
|
+
* `'abort'` to cancel and raise 409, `'overwrite'` to replace, `'skip'` to ignore if already exists.
|
108
|
+
* @param {number} [options.permission=0] - Optional permission setting for the file (refer to backend impl).
|
101
109
|
* @returns {Promise<string>} - the promise of the request, the url of the file
|
102
110
|
*/
|
103
111
|
async post(path, file, {
|
@@ -129,13 +137,23 @@ export default class Connector {
|
|
129
137
|
|
130
138
|
/**
|
131
139
|
* @param {string} path - the path to the file (url), should end with .json
|
132
|
-
* @param {
|
140
|
+
* @param {Object} data - the data to upload
|
141
|
+
* @param {Object} [options] - Optional upload configuration.
|
142
|
+
* @param {'abort' | 'overwrite' | 'skip'} [options.conflict='abort'] - Conflict resolution strategy:
|
143
|
+
* `'abort'` to cancel and raise 409, `'overwrite'` to replace, `'skip'` to ignore if already exists.
|
144
|
+
* @param {number} [options.permission=0] - Optional permission setting for the file (refer to backend impl).
|
133
145
|
* @returns {Promise<string>} - the promise of the request, the url of the file
|
134
146
|
*/
|
135
|
-
async putJson(path, data
|
147
|
+
async putJson(path, data, {
|
148
|
+
conflict = "overwrite",
|
149
|
+
permission = 0
|
150
|
+
} = {}){
|
136
151
|
if (!path.endsWith('.json')){ throw new Error('Upload object must end with .json'); }
|
137
152
|
if (path.startsWith('/')){ path = path.slice(1); }
|
138
|
-
const
|
153
|
+
const dst = new URL(this.config.endpoint + '/' + path);
|
154
|
+
dst.searchParams.append('conflict', conflict);
|
155
|
+
dst.searchParams.append('permission', permission);
|
156
|
+
const res = await fetch(dst.toString(), {
|
139
157
|
method: 'PUT',
|
140
158
|
headers: {
|
141
159
|
'Authorization': 'Bearer ' + this.config.token,
|
@@ -149,6 +167,50 @@ export default class Connector {
|
|
149
167
|
return (await res.json()).url;
|
150
168
|
}
|
151
169
|
|
170
|
+
/**
|
171
|
+
* @param {string} path - the path to the file (url), should have content type application/json
|
172
|
+
* @returns {Promise<Object>} - return the json object
|
173
|
+
*/
|
174
|
+
async getJson(path){
|
175
|
+
if (path.startsWith('/')){ path = path.slice(1); }
|
176
|
+
const res = await fetch(this.config.endpoint + '/' + path, {
|
177
|
+
method: 'GET',
|
178
|
+
headers: {
|
179
|
+
"Authorization": 'Bearer ' + this.config.token
|
180
|
+
},
|
181
|
+
});
|
182
|
+
if (res.status != 200){
|
183
|
+
throw new Error(`Failed to get object, status code: ${res.status}, message: ${await fmtFailedResponse(res)}`);
|
184
|
+
}
|
185
|
+
return await res.json();
|
186
|
+
}
|
187
|
+
|
188
|
+
/**
|
189
|
+
* @param {string[]} paths - the paths to the files (url), should have content type plain/text, application/json, etc.
|
190
|
+
* @param {Object} [options] - Optional configuration.
|
191
|
+
* @param {boolean} [options.skipContent=false] - If true, skips fetching content and returns a record of <path, ''>.
|
192
|
+
* @returns {Promise<Record<string, string | null>>} - return the mapping of path to text content, non-existing paths will be ignored
|
193
|
+
*/
|
194
|
+
async getMultipleText(paths, {
|
195
|
+
skipContent = false
|
196
|
+
} = {}){
|
197
|
+
const url = new URL(this.config.endpoint + '/_api/get-multiple');
|
198
|
+
url.searchParams.append('skip_content', skipContent);
|
199
|
+
for (const path of paths){
|
200
|
+
url.searchParams.append('path', path);
|
201
|
+
}
|
202
|
+
const res = await fetch(url.toString(), {
|
203
|
+
method: 'GET',
|
204
|
+
headers: {
|
205
|
+
"Authorization": 'Bearer ' + this.config.token,
|
206
|
+
}
|
207
|
+
});
|
208
|
+
if (res.status != 200 && res.status != 206){
|
209
|
+
throw new Error(`Failed to get multiple files, status code: ${res.status}, message: ${await fmtFailedResponse(res)}`);
|
210
|
+
}
|
211
|
+
return await res.json();
|
212
|
+
}
|
213
|
+
|
152
214
|
async delete(path){
|
153
215
|
if (path.startsWith('/')){ path = path.slice(1); }
|
154
216
|
const res = await fetch(this.config.endpoint + '/' + path, {
|
frontend/login.js
CHANGED
frontend/popup.js
CHANGED
@@ -109,7 +109,14 @@ export function showPopup(content = '', {
|
|
109
109
|
} = {}){
|
110
110
|
const popup = document.createElement("div");
|
111
111
|
popup.classList.add("popup-window");
|
112
|
-
|
112
|
+
/**
|
113
|
+
* @param {string} c
|
114
|
+
* @returns {void}
|
115
|
+
*/
|
116
|
+
function setPopupContent(c){
|
117
|
+
popup.innerHTML = showTime? `<span>[${new Date().toLocaleTimeString()}]</span> ${c}` : c;
|
118
|
+
}
|
119
|
+
setPopupContent(content);
|
113
120
|
popup.style.width = width;
|
114
121
|
const popupHeight = '1rem';
|
115
122
|
popup.style.height = popupHeight;
|
@@ -132,11 +139,19 @@ export function showPopup(content = '', {
|
|
132
139
|
if (level === "success") popup.style.backgroundColor = "darkgreen";
|
133
140
|
document.body.appendChild(popup);
|
134
141
|
shownPopups.push(popup);
|
135
|
-
|
142
|
+
|
143
|
+
function closePopup(){
|
136
144
|
if (popup.parentNode) document.body.removeChild(popup);
|
137
145
|
shownPopups.splice(shownPopups.indexOf(popup), 1);
|
138
146
|
for (let i = 0; i < shownPopups.length; i++) {
|
139
147
|
shownPopups[i].style.top = `${i * (parseInt(popupHeight) + 2*parseInt(paddingHeight))*1.2 + 0.5}rem`;
|
140
148
|
}
|
141
|
-
}
|
149
|
+
}
|
150
|
+
|
151
|
+
window.setTimeout(closePopup, timeout);
|
152
|
+
return {
|
153
|
+
elem: popup,
|
154
|
+
setContent: setPopupContent,
|
155
|
+
close: closePopup
|
156
|
+
}
|
142
157
|
}
|
frontend/scripts.js
CHANGED
@@ -183,26 +183,29 @@ Are you sure you want to proceed?\
|
|
183
183
|
`)){ return; }
|
184
184
|
|
185
185
|
let counter = 0;
|
186
|
+
let totalCount = 0;
|
187
|
+
const uploadPopup = showPopup('Uploading multiple files...', {level: 'info', timeout: 999999});
|
186
188
|
async function uploadFileFn(path, file){
|
187
|
-
const this_count = counter;
|
188
189
|
try{
|
189
190
|
await uploadFile(conn, path, file, {conflict: 'overwrite'});
|
190
191
|
}
|
191
192
|
catch (err){
|
192
193
|
showPopup('Failed to upload file [' + file.name + ']: ' + err, {level: 'error', timeout: 5000});
|
193
194
|
}
|
194
|
-
console.log(`[${
|
195
|
+
console.log(`[${counter}/${totalCount}] Uploaded file: ${path}`);
|
196
|
+
uploadPopup.setContent(`Uploading multiple files... [${counter}/${totalCount}]`);
|
195
197
|
}
|
196
198
|
|
197
|
-
const promises = await forEachFile(e, async (relPath,
|
199
|
+
const promises = await forEachFile(e, async (relPath, filePromiseFn) => {
|
198
200
|
counter += 1;
|
199
|
-
const file = await
|
201
|
+
const file = await filePromiseFn();
|
200
202
|
await uploadFileFn(dstPath + relPath, file);
|
201
203
|
});
|
204
|
+
totalCount = promises.length;
|
202
205
|
|
203
|
-
showPopup('Uploading multiple files...', {level: 'info', timeout: 3000});
|
204
206
|
Promise.all(promises).then(
|
205
207
|
() => {
|
208
|
+
window.setTimeout(uploadPopup.close, 3000);
|
206
209
|
showPopup('Upload success.', {level: 'success', timeout: 3000});
|
207
210
|
refreshFileList();
|
208
211
|
},
|
frontend/utils.js
CHANGED
@@ -101,7 +101,7 @@ export function asHtmlText(text){
|
|
101
101
|
* using the provided callback with a concurrency limit.
|
102
102
|
*
|
103
103
|
* @param {Event} e The drop event.
|
104
|
-
* @param {(relPath: string, file: Promise<File>) => Promise<void>} callback A function
|
104
|
+
* @param {(relPath: string, file: () => Promise<File>) => Promise<void>} callback A function
|
105
105
|
* that receives the relative path and a promise for the File.
|
106
106
|
* @param {number} [maxConcurrent=5] Maximum number of concurrent callback executions.
|
107
107
|
* @returns {Promise<Promise<void>[]>} A promise resolving to an array of callback promises.
|
@@ -146,11 +146,10 @@ export async function forEachFile(e, callback, maxConcurrent = 16) {
|
|
146
146
|
async function traverse(entry, path) {
|
147
147
|
if (entry.isFile) {
|
148
148
|
// Wrap file retrieval in a promise.
|
149
|
-
const
|
150
|
-
entry.file(resolve, reject);
|
151
|
-
});
|
149
|
+
const filePromiseFn = () =>
|
150
|
+
new Promise((resolve, reject) => entry.file(resolve, reject));
|
152
151
|
// Use the concurrency barrier for the callback invocation.
|
153
|
-
results.push(runWithLimit(() => callback(path + entry.name,
|
152
|
+
results.push(runWithLimit(() => callback(path + entry.name, filePromiseFn)));
|
154
153
|
} else if (entry.isDirectory) {
|
155
154
|
const reader = entry.createReader();
|
156
155
|
|
lfss/api/connector.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
from typing import Optional, Literal
|
3
3
|
from collections.abc import Iterator
|
4
|
-
import os
|
4
|
+
import os, json
|
5
5
|
import requests
|
6
6
|
import requests.adapters
|
7
7
|
import urllib.parse
|
@@ -76,7 +76,11 @@ class Connector:
|
|
76
76
|
path = path[1:]
|
77
77
|
path = ensure_uri_compnents(path)
|
78
78
|
def f(**kwargs):
|
79
|
-
|
79
|
+
search_params_t = [
|
80
|
+
(k, str(v).lower() if isinstance(v, bool) else v)
|
81
|
+
for k, v in search_params.items()
|
82
|
+
] # tuple form
|
83
|
+
url = f"{self.config['endpoint']}/{path}" + "?" + urllib.parse.urlencode(search_params_t, doseq=True)
|
80
84
|
headers: dict = kwargs.pop('headers', {})
|
81
85
|
headers.update({
|
82
86
|
'Authorization': f"Bearer {self.config['token']}",
|
@@ -207,6 +211,17 @@ class Connector:
|
|
207
211
|
assert response.headers['Content-Type'] == 'application/json'
|
208
212
|
return response.json()
|
209
213
|
|
214
|
+
def get_multiple_text(self, *paths: str, skip_content = False) -> dict[str, Optional[str]]:
|
215
|
+
"""
|
216
|
+
Gets text contents of multiple files at once. Non-existing files will return None.
|
217
|
+
- skip_content: if True, the file contents will not be fetched, always be empty string ''.
|
218
|
+
"""
|
219
|
+
response = self._fetch_factory(
|
220
|
+
'GET', '_api/get-multiple',
|
221
|
+
{'path': paths, "skip_content": skip_content}
|
222
|
+
)()
|
223
|
+
return response.json()
|
224
|
+
|
210
225
|
def delete(self, path: str):
|
211
226
|
"""Deletes the file at the specified path."""
|
212
227
|
self._fetch_factory('DELETE', path)()
|
lfss/cli/cli.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
from pathlib import Path
|
2
|
-
import argparse, typing
|
2
|
+
import argparse, typing, sys
|
3
3
|
from lfss.api import Connector, upload_directory, upload_file, download_file, download_directory
|
4
4
|
from lfss.eng.datatype import FileReadPermission, FileSortKey, DirSortKey
|
5
5
|
from lfss.eng.utils import decode_uri_compnents
|
@@ -78,9 +78,9 @@ def main():
|
|
78
78
|
permission=args.permission
|
79
79
|
)
|
80
80
|
if failed_upload:
|
81
|
-
print("\033[91mFailed to upload:\033[0m")
|
81
|
+
print("\033[91mFailed to upload:\033[0m", file=sys.stderr)
|
82
82
|
for path in failed_upload:
|
83
|
-
print(f" {path}")
|
83
|
+
print(f" {path}", file=sys.stderr)
|
84
84
|
else:
|
85
85
|
success, msg = upload_file(
|
86
86
|
connector,
|
@@ -93,7 +93,7 @@ def main():
|
|
93
93
|
permission=args.permission
|
94
94
|
)
|
95
95
|
if not success:
|
96
|
-
print("\033[91mFailed to upload: \033[0m", msg)
|
96
|
+
print("\033[91mFailed to upload: \033[0m", msg, file=sys.stderr)
|
97
97
|
|
98
98
|
elif args.command == "download":
|
99
99
|
is_dir = args.src.endswith("/")
|
@@ -107,9 +107,9 @@ def main():
|
|
107
107
|
overwrite=args.overwrite
|
108
108
|
)
|
109
109
|
if failed_download:
|
110
|
-
print("\033[91mFailed to download:\033[0m")
|
110
|
+
print("\033[91mFailed to download:\033[0m", file=sys.stderr)
|
111
111
|
for path in failed_download:
|
112
|
-
print(f" {path}")
|
112
|
+
print(f" {path}", file=sys.stderr)
|
113
113
|
else:
|
114
114
|
success, msg = download_file(
|
115
115
|
connector,
|
@@ -121,7 +121,7 @@ def main():
|
|
121
121
|
overwrite=args.overwrite
|
122
122
|
)
|
123
123
|
if not success:
|
124
|
-
print("\033[91mFailed to download: \033[0m", msg)
|
124
|
+
print("\033[91mFailed to download: \033[0m", msg, file=sys.stderr)
|
125
125
|
|
126
126
|
elif args.command == "query":
|
127
127
|
for path in args.path:
|
lfss/cli/log.py
ADDED
@@ -0,0 +1,77 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
import argparse
|
3
|
+
import rich.console
|
4
|
+
import logging
|
5
|
+
import sqlite3
|
6
|
+
from lfss.eng.log import eval_logline
|
7
|
+
|
8
|
+
console = rich.console.Console()
|
9
|
+
def levelstr2int(levelstr: str) -> int:
|
10
|
+
import sys
|
11
|
+
if sys.version_info < (3, 11):
|
12
|
+
return logging.getLevelName(levelstr.upper())
|
13
|
+
else:
|
14
|
+
return logging.getLevelNamesMapping()[levelstr.upper()]
|
15
|
+
|
16
|
+
def view(
|
17
|
+
db_file: str,
|
18
|
+
level: Optional[str] = None,
|
19
|
+
offset: int = 0,
|
20
|
+
limit: int = 1000
|
21
|
+
):
|
22
|
+
conn = sqlite3.connect(db_file)
|
23
|
+
cursor = conn.cursor()
|
24
|
+
if level is None:
|
25
|
+
cursor.execute("SELECT * FROM log ORDER BY created DESC LIMIT ? OFFSET ?", (limit, offset))
|
26
|
+
else:
|
27
|
+
level_int = levelstr2int(level)
|
28
|
+
cursor.execute("SELECT * FROM log WHERE level >= ? ORDER BY created DESC LIMIT ? OFFSET ?", (level_int, limit, offset))
|
29
|
+
levelname_color = {
|
30
|
+
'DEBUG': 'blue',
|
31
|
+
'INFO': 'green',
|
32
|
+
'WARNING': 'yellow',
|
33
|
+
'ERROR': 'red',
|
34
|
+
'CRITICAL': 'bold red',
|
35
|
+
'FATAL': 'bold red'
|
36
|
+
}
|
37
|
+
for row in cursor.fetchall():
|
38
|
+
log = eval_logline(row)
|
39
|
+
console.print(f"{log.created} [{levelname_color[log.levelname]}][{log.levelname}] [default]{log.message}")
|
40
|
+
conn.close()
|
41
|
+
|
42
|
+
def trim(db_file: str, keep: int = 1000, level: Optional[str] = None):
|
43
|
+
conn = sqlite3.connect(db_file)
|
44
|
+
cursor = conn.cursor()
|
45
|
+
if level is None:
|
46
|
+
cursor.execute("DELETE FROM log WHERE id NOT IN (SELECT id FROM log ORDER BY created DESC LIMIT ?)", (keep,))
|
47
|
+
else:
|
48
|
+
cursor.execute("DELETE FROM log WHERE levelname = ? and id NOT IN (SELECT id FROM log WHERE levelname = ? ORDER BY created DESC LIMIT ?)", (level.upper(), level.upper(), keep))
|
49
|
+
conn.commit()
|
50
|
+
conn.execute("VACUUM")
|
51
|
+
conn.close()
|
52
|
+
|
53
|
+
def main():
|
54
|
+
parser = argparse.ArgumentParser(description="Log operations utility")
|
55
|
+
subparsers = parser.add_subparsers(title='subcommands', description='valid subcommands', help='additional help')
|
56
|
+
|
57
|
+
parser_show = subparsers.add_parser('view', help='Show logs')
|
58
|
+
parser_show.add_argument('db_file', type=str, help='Database file path')
|
59
|
+
parser_show.add_argument('-l', '--level', type=str, required=False, help='Log level')
|
60
|
+
parser_show.add_argument('--offset', type=int, default=0, help='Starting offset')
|
61
|
+
parser_show.add_argument('--limit', type=int, default=1000, help='Maximum number of entries to display')
|
62
|
+
parser_show.set_defaults(func=view)
|
63
|
+
|
64
|
+
parser_trim = subparsers.add_parser('trim', help='Trim logs')
|
65
|
+
parser_trim.add_argument('db_file', type=str, help='Database file path')
|
66
|
+
parser_trim.add_argument('-l', '--level', type=str, required=False, help='Log level')
|
67
|
+
parser_trim.add_argument('--keep', type=int, default=1000, help='Number of entries to keep')
|
68
|
+
parser_trim.set_defaults(func=trim)
|
69
|
+
|
70
|
+
args = parser.parse_args()
|
71
|
+
if hasattr(args, 'func'):
|
72
|
+
kwargs = vars(args)
|
73
|
+
func = kwargs.pop('func')
|
74
|
+
func(**kwargs)
|
75
|
+
|
76
|
+
if __name__ == '__main__':
|
77
|
+
main()
|
lfss/cli/vacuum.py
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
Vacuum the database and external storage to ensure that the storage is consistent and minimal.
|
3
3
|
"""
|
4
4
|
|
5
|
-
from lfss.eng.config import LARGE_BLOB_DIR, THUMB_DB
|
5
|
+
from lfss.eng.config import LARGE_BLOB_DIR, THUMB_DB, LOG_DIR
|
6
6
|
import argparse, time, itertools
|
7
7
|
from functools import wraps
|
8
8
|
from asyncio import Semaphore
|
@@ -14,6 +14,7 @@ from lfss.eng.database import transaction, unique_cursor
|
|
14
14
|
from lfss.svc.request_log import RequestDB
|
15
15
|
from lfss.eng.utils import now_stamp
|
16
16
|
from lfss.eng.connection_pool import global_entrance
|
17
|
+
from lfss.cli.log import trim
|
17
18
|
|
18
19
|
sem: Semaphore
|
19
20
|
|
@@ -33,7 +34,7 @@ def barriered(func):
|
|
33
34
|
return wrapper
|
34
35
|
|
35
36
|
@global_entrance()
|
36
|
-
async def vacuum_main(index: bool = False, blobs: bool = False, thumbs: bool = False, vacuum_all: bool = False):
|
37
|
+
async def vacuum_main(index: bool = False, blobs: bool = False, thumbs: bool = False, logs: bool = False, vacuum_all: bool = False):
|
37
38
|
|
38
39
|
# check if any file in the Large Blob directory is not in the database
|
39
40
|
# the reverse operation is not necessary, because by design, the database should be the source of truth...
|
@@ -73,6 +74,11 @@ async def vacuum_main(index: bool = False, blobs: bool = False, thumbs: bool = F
|
|
73
74
|
async with unique_cursor(is_write=True) as c:
|
74
75
|
await c.execute("VACUUM blobs")
|
75
76
|
|
77
|
+
if logs or vacuum_all:
|
78
|
+
with indicator("VACUUM-logs"):
|
79
|
+
for log_file in LOG_DIR.glob("*.log.db"):
|
80
|
+
trim(str(log_file), keep=10_000)
|
81
|
+
|
76
82
|
if thumbs or vacuum_all:
|
77
83
|
try:
|
78
84
|
async with transaction() as c:
|
@@ -123,9 +129,10 @@ def main():
|
|
123
129
|
parser.add_argument("-d", "--data", action="store_true", help="Vacuum blobs")
|
124
130
|
parser.add_argument("-t", "--thumb", action="store_true", help="Vacuum thumbnails")
|
125
131
|
parser.add_argument("-r", "--requests", action="store_true", help="Vacuum request logs to only keep at most recent 1M rows in 7 days")
|
132
|
+
parser.add_argument("-l", "--logs", action="store_true", help="Trim log to keep at most recent 10k rows for each category")
|
126
133
|
args = parser.parse_args()
|
127
134
|
sem = Semaphore(args.jobs)
|
128
|
-
asyncio.run(vacuum_main(index=args.metadata, blobs=args.data, thumbs=args.thumb, vacuum_all=args.all))
|
135
|
+
asyncio.run(vacuum_main(index=args.metadata, blobs=args.data, thumbs=args.thumb, logs = args.logs, vacuum_all=args.all))
|
129
136
|
|
130
137
|
if args.requests or args.all:
|
131
138
|
asyncio.run(vacuum_requests())
|
lfss/eng/config.py
CHANGED
@@ -11,16 +11,19 @@ if not DATA_HOME.exists():
|
|
11
11
|
DATA_HOME = DATA_HOME.resolve().absolute()
|
12
12
|
LARGE_BLOB_DIR = DATA_HOME / 'large_blobs'
|
13
13
|
LARGE_BLOB_DIR.mkdir(exist_ok=True)
|
14
|
+
LOG_DIR = DATA_HOME / 'logs'
|
15
|
+
|
16
|
+
DISABLE_LOGGING = os.environ.get('DISABLE_LOGGING', '0') == '1'
|
14
17
|
|
15
18
|
# https://sqlite.org/fasterthanfs.html
|
16
19
|
__env_large_file = os.environ.get('LFSS_LARGE_FILE', None)
|
17
20
|
if __env_large_file is not None:
|
18
21
|
LARGE_FILE_BYTES = parse_storage_size(__env_large_file)
|
19
22
|
else:
|
20
|
-
LARGE_FILE_BYTES =
|
21
|
-
MAX_MEM_FILE_BYTES = 128 * 1024 * 1024
|
23
|
+
LARGE_FILE_BYTES = 1 * 1024 * 1024 # 1MB
|
24
|
+
MAX_MEM_FILE_BYTES = 128 * 1024 * 1024 # 128MB
|
22
25
|
CHUNK_SIZE = 1024 * 1024 # 1MB chunks for streaming (on large files)
|
23
26
|
DEBUG_MODE = os.environ.get('LFSS_DEBUG', '0') == '1'
|
24
27
|
|
25
28
|
THUMB_DB = DATA_HOME / 'thumbs.v0-11.db'
|
26
|
-
THUMB_SIZE = (
|
29
|
+
THUMB_SIZE = (64, 64)
|