backupchan-client-lib 1.4.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- backupchan_client_lib-1.4.0/.gitignore +6 -0
- backupchan_client_lib-1.4.0/CHANGELOG.md +71 -0
- backupchan_client_lib-1.4.0/LICENSE +11 -0
- backupchan_client_lib-1.4.0/PKG-INFO +82 -0
- backupchan_client_lib-1.4.0/README.md +56 -0
- backupchan_client_lib-1.4.0/backupchan/__init__.py +5 -0
- backupchan_client_lib-1.4.0/backupchan/api.py +220 -0
- backupchan_client_lib-1.4.0/backupchan/connection.py +72 -0
- backupchan_client_lib-1.4.0/backupchan/models.py +108 -0
- backupchan_client_lib-1.4.0/backupchan_client_lib.egg-info/PKG-INFO +82 -0
- backupchan_client_lib-1.4.0/backupchan_client_lib.egg-info/SOURCES.txt +17 -0
- backupchan_client_lib-1.4.0/backupchan_client_lib.egg-info/dependency_links.txt +1 -0
- backupchan_client_lib-1.4.0/backupchan_client_lib.egg-info/requires.txt +5 -0
- backupchan_client_lib-1.4.0/backupchan_client_lib.egg-info/top_level.txt +1 -0
- backupchan_client_lib-1.4.0/pyproject.toml +40 -0
- backupchan_client_lib-1.4.0/setup.cfg +4 -0
- backupchan_client_lib-1.4.0/tests/conftest.py +8 -0
- backupchan_client_lib-1.4.0/tests/test_connection.py +144 -0
- backupchan_client_lib-1.4.0/tests/test_models.py +65 -0
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
# Backup-chan client library changelog
|
|
2
|
+
|
|
3
|
+
See what's changed between versions!
|
|
4
|
+
|
|
5
|
+
## 1.4.0
|
|
6
|
+
|
|
7
|
+
* Added support for minimum number of backups field.
|
|
8
|
+
|
|
9
|
+
## 1.3.2
|
|
10
|
+
|
|
11
|
+
* Hotfix for 1.3.1
|
|
12
|
+
|
|
13
|
+
## 1.3.1
|
|
14
|
+
|
|
15
|
+
* Added function to get human-readable timestamps for jobs.
|
|
16
|
+
|
|
17
|
+
## 1.3.0
|
|
18
|
+
|
|
19
|
+
* Added support for listing jobs and force running scheduled jobs.
|
|
20
|
+
|
|
21
|
+
## 1.2.0
|
|
22
|
+
|
|
23
|
+
* Added support for deleting recycled backups of target.
|
|
24
|
+
|
|
25
|
+
## 1.1.0
|
|
26
|
+
|
|
27
|
+
* Added support for sequential uploads.
|
|
28
|
+
|
|
29
|
+
## 1.0.1
|
|
30
|
+
|
|
31
|
+
* Fixed `upload_backup_folder` not returning value.
|
|
32
|
+
|
|
33
|
+
## 1.0.0
|
|
34
|
+
|
|
35
|
+
* **Breaking change**: Backup upload now returns ID of the job that is processing the upload instead of the backup ID.
|
|
36
|
+
|
|
37
|
+
## 0.4.2
|
|
38
|
+
|
|
39
|
+
* Fixed backup download function not respecting output directory.
|
|
40
|
+
|
|
41
|
+
## 0.4.1
|
|
42
|
+
|
|
43
|
+
* Fixed backup download function not returning downloaded filename.
|
|
44
|
+
|
|
45
|
+
## 0.4.0
|
|
46
|
+
|
|
47
|
+
* Added backup downloads.
|
|
48
|
+
|
|
49
|
+
## 0.3.2
|
|
50
|
+
|
|
51
|
+
* Stream files when doing file uploads.
|
|
52
|
+
|
|
53
|
+
## 0.3.1
|
|
54
|
+
|
|
55
|
+
* Fixed some functions not validating response.
|
|
56
|
+
|
|
57
|
+
## 0.3.0
|
|
58
|
+
|
|
59
|
+
* Added support for uploading directories directly.
|
|
60
|
+
|
|
61
|
+
## 0.2.0
|
|
62
|
+
|
|
63
|
+
* Added viewing stats.
|
|
64
|
+
|
|
65
|
+
## 0.1.1
|
|
66
|
+
|
|
67
|
+
Updated the `README.md` and `pyproject.toml` file.
|
|
68
|
+
|
|
69
|
+
## 0.1.0
|
|
70
|
+
|
|
71
|
+
The first stable version.
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
Copyright 2025 moltony
|
|
2
|
+
|
|
3
|
+
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
|
4
|
+
|
|
5
|
+
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
|
6
|
+
|
|
7
|
+
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
|
8
|
+
|
|
9
|
+
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
|
10
|
+
|
|
11
|
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: backupchan-client-lib
|
|
3
|
+
Version: 1.4.0
|
|
4
|
+
Summary: Library for interfacing with Backup-chan.
|
|
5
|
+
Author-email: Moltony <koronavirusnyj@gmail.com>
|
|
6
|
+
License: BSD-3-Clause
|
|
7
|
+
Project-URL: Homepage, https://github.com/Backupchan/client-lib
|
|
8
|
+
Project-URL: Repository, https://github.com/Backupchan/client-lib.git
|
|
9
|
+
Project-URL: Issues, https://github.com/Backupchan/client-lib/issues
|
|
10
|
+
Project-URL: Changelog, https://github.com/Backupchan/client-lib/blob/main/CHANGELOG.md
|
|
11
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
12
|
+
Classifier: License :: OSI Approved :: BSD License
|
|
13
|
+
Classifier: Natural Language :: English
|
|
14
|
+
Classifier: Operating System :: OS Independent
|
|
15
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
16
|
+
Classifier: Topic :: System :: Archiving :: Backup
|
|
17
|
+
Classifier: Topic :: Software Development :: Libraries
|
|
18
|
+
Classifier: Typing :: Typed
|
|
19
|
+
Description-Content-Type: text/markdown
|
|
20
|
+
License-File: LICENSE
|
|
21
|
+
Requires-Dist: requests
|
|
22
|
+
Provides-Extra: dev
|
|
23
|
+
Requires-Dist: pytest; extra == "dev"
|
|
24
|
+
Requires-Dist: requests-mock; extra == "dev"
|
|
25
|
+
Dynamic: license-file
|
|
26
|
+
|
|
27
|
+
# Backup-chan client library
|
|
28
|
+
|
|
29
|
+

|
|
30
|
+

|
|
31
|
+

|
|
32
|
+
|
|
33
|
+
This is the Python library for interfacing with a Backup-chan server.
|
|
34
|
+
|
|
35
|
+
## Installing
|
|
36
|
+
|
|
37
|
+
```bash
|
|
38
|
+
# The easy way
|
|
39
|
+
pip install backupchan-client-lib
|
|
40
|
+
|
|
41
|
+
# Install from source
|
|
42
|
+
git clone https://github.com/Backupchan/client-lib.git backupchan-client-lib
|
|
43
|
+
cd backupchan-client-lib
|
|
44
|
+
pip install .
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
For instructions on setting up the server, refer to Backup-chan server's README.
|
|
48
|
+
|
|
49
|
+
## Testing
|
|
50
|
+
|
|
51
|
+
```
|
|
52
|
+
pytest
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
## Example
|
|
56
|
+
|
|
57
|
+
```python
|
|
58
|
+
from backupchan import *
|
|
59
|
+
|
|
60
|
+
# Connect to a server
|
|
61
|
+
api = API("http://192.168.1.43", 5000, "your api key")
|
|
62
|
+
|
|
63
|
+
# Print every target
|
|
64
|
+
targets = api.list_targets()
|
|
65
|
+
for target in targets:
|
|
66
|
+
print(target)
|
|
67
|
+
|
|
68
|
+
# Create a new target
|
|
69
|
+
target_id = api.new_target(
|
|
70
|
+
"the waifu collection", # name
|
|
71
|
+
BackupType.MULTI,
|
|
72
|
+
BackupRecycleCriteria.AGE,
|
|
73
|
+
10, # recycle value
|
|
74
|
+
BackupRecycleAction.RECYCLE,
|
|
75
|
+
"/var/backups/waifu", # location
|
|
76
|
+
"wf-$I_$D", # name template
|
|
77
|
+
False, # deduplicate
|
|
78
|
+
None # alias
|
|
79
|
+
)
|
|
80
|
+
target = api.get_target(target_id)
|
|
81
|
+
print(f"Created new target: {target}")
|
|
82
|
+
```
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
# Backup-chan client library
|
|
2
|
+
|
|
3
|
+

|
|
4
|
+

|
|
5
|
+

|
|
6
|
+
|
|
7
|
+
This is the Python library for interfacing with a Backup-chan server.
|
|
8
|
+
|
|
9
|
+
## Installing
|
|
10
|
+
|
|
11
|
+
```bash
|
|
12
|
+
# The easy way
|
|
13
|
+
pip install backupchan-client-lib
|
|
14
|
+
|
|
15
|
+
# Install from source
|
|
16
|
+
git clone https://github.com/Backupchan/client-lib.git backupchan-client-lib
|
|
17
|
+
cd backupchan-client-lib
|
|
18
|
+
pip install .
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
For instructions on setting up the server, refer to Backup-chan server's README.
|
|
22
|
+
|
|
23
|
+
## Testing
|
|
24
|
+
|
|
25
|
+
```
|
|
26
|
+
pytest
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
## Example
|
|
30
|
+
|
|
31
|
+
```python
|
|
32
|
+
from backupchan import *
|
|
33
|
+
|
|
34
|
+
# Connect to a server
|
|
35
|
+
api = API("http://192.168.1.43", 5000, "your api key")
|
|
36
|
+
|
|
37
|
+
# Print every target
|
|
38
|
+
targets = api.list_targets()
|
|
39
|
+
for target in targets:
|
|
40
|
+
print(target)
|
|
41
|
+
|
|
42
|
+
# Create a new target
|
|
43
|
+
target_id = api.new_target(
|
|
44
|
+
"the waifu collection", # name
|
|
45
|
+
BackupType.MULTI,
|
|
46
|
+
BackupRecycleCriteria.AGE,
|
|
47
|
+
10, # recycle value
|
|
48
|
+
BackupRecycleAction.RECYCLE,
|
|
49
|
+
"/var/backups/waifu", # location
|
|
50
|
+
"wf-$I_$D", # name template
|
|
51
|
+
False, # deduplicate
|
|
52
|
+
None # alias
|
|
53
|
+
)
|
|
54
|
+
target = api.get_target(target_id)
|
|
55
|
+
print(f"Created new target: {target}")
|
|
56
|
+
```
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
from .connection import Connection
|
|
2
|
+
from .models import *
|
|
3
|
+
from .api import API, BackupchanAPIError
|
|
4
|
+
|
|
5
|
+
__all__ = ["Connection", "BackupRecycleCriteria", "BackupRecycleAction", "BackupType", "BackupTarget", "Backup", "API", "BackupchanAPIError", "Stats", "SequentialFile"]
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
import io
|
|
2
|
+
import os
|
|
3
|
+
import tempfile
|
|
4
|
+
import uuid
|
|
5
|
+
import tarfile
|
|
6
|
+
import dataclasses
|
|
7
|
+
from typing import Generator
|
|
8
|
+
from .connection import Connection, Response
|
|
9
|
+
from .models import Backup, BackupTarget, BackupRecycleCriteria, BackupRecycleAction, BackupType, Stats, SequentialFile, DelayedJob, ScheduledJob
|
|
10
|
+
|
|
11
|
+
class BackupchanAPIError(Exception):
|
|
12
|
+
def __init__(self, message: str, status_code: int | None = None):
|
|
13
|
+
super().__init__(message)
|
|
14
|
+
self.status_code = status_code
|
|
15
|
+
|
|
16
|
+
def check_success(response: Response) -> dict | Generator[bytes, None, None]:
|
|
17
|
+
if isinstance(response.json_body, Generator):
|
|
18
|
+
if response.status_code != 200:
|
|
19
|
+
raise BackupchanAPIError(f"Server returned error (code {response.status_code})", response.status_code)
|
|
20
|
+
elif not response.json_body.get("success", False):
|
|
21
|
+
raise BackupchanAPIError(f"Server returned error: {response.json_body} (code {response.status_code})", response.status_code)
|
|
22
|
+
return response.json_body
|
|
23
|
+
|
|
24
|
+
class API:
|
|
25
|
+
def __init__(self, host: str, port: int, api_key: str):
|
|
26
|
+
self.connection = Connection(host, port, api_key)
|
|
27
|
+
|
|
28
|
+
def list_targets(self, page: int = 1) -> list[BackupTarget]:
|
|
29
|
+
response = self.connection.get(f"target?page={page}")
|
|
30
|
+
resp_json = check_success(response)
|
|
31
|
+
targets = resp_json["targets"]
|
|
32
|
+
return [BackupTarget.from_dict(target) for target in targets]
|
|
33
|
+
|
|
34
|
+
def new_target(self, name: str, backup_type: BackupType, recycle_criteria: BackupRecycleCriteria, recycle_value: int, recycle_action: BackupRecycleAction, location: str, name_template: str, deduplicate: bool, alias: str | None, min_backups: int | None) -> str:
|
|
35
|
+
"""
|
|
36
|
+
Returns ID of new target.
|
|
37
|
+
"""
|
|
38
|
+
data = {
|
|
39
|
+
"name": name,
|
|
40
|
+
"backup_type": backup_type,
|
|
41
|
+
"recycle_criteria": recycle_criteria,
|
|
42
|
+
"recycle_value": recycle_value,
|
|
43
|
+
"recycle_action": recycle_action,
|
|
44
|
+
"location": location,
|
|
45
|
+
"name_template": name_template,
|
|
46
|
+
"deduplicate": deduplicate,
|
|
47
|
+
"alias": alias,
|
|
48
|
+
"min_backups": min_backups
|
|
49
|
+
}
|
|
50
|
+
resp_json = check_success(self.connection.post("target", data))
|
|
51
|
+
return resp_json["id"]
|
|
52
|
+
|
|
53
|
+
def upload_backup(self, target_id: str, file: io.IOBase, filename: str, manual: bool) -> str:
|
|
54
|
+
"""
|
|
55
|
+
Returns ID of the job that uploads the backup.
|
|
56
|
+
"""
|
|
57
|
+
data = {
|
|
58
|
+
"manual": int(manual)
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
files = {
|
|
62
|
+
"backup_file": (filename, file)
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
response = self.connection.post_form(f"target/{target_id}/upload", data=data, files=files)
|
|
66
|
+
resp_json = check_success(response)
|
|
67
|
+
return resp_json["job_id"]
|
|
68
|
+
|
|
69
|
+
def upload_backup_folder(self, target_id: str, folder_path: str, manual: bool) -> str:
|
|
70
|
+
if not os.path.isdir(folder_path):
|
|
71
|
+
raise BackupchanAPIError("Cannot upload a single file in a directory upload")
|
|
72
|
+
|
|
73
|
+
# Cannot upload a directory to a single-file target.
|
|
74
|
+
target_type = self.get_target(target_id)[0].target_type
|
|
75
|
+
if target_type == BackupType.SINGLE:
|
|
76
|
+
raise BackupchanAPIError("Cannot upload directory to a single file target")
|
|
77
|
+
|
|
78
|
+
# Make a temporary gzipped tarball containing the directory contents.
|
|
79
|
+
temp_dir = tempfile.gettempdir()
|
|
80
|
+
temp_tar_path = os.path.join(temp_dir, f"bakch-{uuid.uuid4().hex}.tar.gz")
|
|
81
|
+
with tarfile.open(temp_tar_path, "w:gz") as tar:
|
|
82
|
+
tar.add(folder_path, arcname=os.path.basename(folder_path))
|
|
83
|
+
|
|
84
|
+
# Upload our new tar.
|
|
85
|
+
with open(temp_tar_path, "rb") as tar:
|
|
86
|
+
return self.upload_backup(target_id, tar, os.path.basename(folder_path) + ".tar.gz", manual)
|
|
87
|
+
|
|
88
|
+
def download_backup(self, backup_id: str, output_directory: str) -> str:
|
|
89
|
+
response = self.connection.get_stream(f"backup/{backup_id}/download")
|
|
90
|
+
check_success(response)
|
|
91
|
+
filename = response.headers["Content-Disposition"].split("filename=")[-1].strip('"')
|
|
92
|
+
full_path = os.path.join(output_directory, filename)
|
|
93
|
+
with open(full_path, "wb") as file:
|
|
94
|
+
for chunk in response.json_body:
|
|
95
|
+
file.write(chunk)
|
|
96
|
+
return full_path
|
|
97
|
+
|
|
98
|
+
def get_target(self, id: str) -> tuple[BackupTarget, list[Backup]]:
|
|
99
|
+
response = self.connection.get(f"target/{id}")
|
|
100
|
+
resp_json = check_success(response)
|
|
101
|
+
return BackupTarget.from_dict(resp_json["target"]), [Backup.from_dict(backup) for backup in resp_json["backups"]]
|
|
102
|
+
|
|
103
|
+
def edit_target(self, id: str, name: str, recycle_criteria: BackupRecycleCriteria, recycle_value: int, recycle_action: BackupRecycleAction, location: str, name_template: str, deduplicate: bool, alias: str | None, min_backups: int | None):
|
|
104
|
+
data = {
|
|
105
|
+
"name": name,
|
|
106
|
+
"recycle_criteria": recycle_criteria,
|
|
107
|
+
"recycle_value": recycle_value,
|
|
108
|
+
"recycle_action": recycle_action,
|
|
109
|
+
"location": location,
|
|
110
|
+
"name_template": name_template,
|
|
111
|
+
"deduplicate": deduplicate,
|
|
112
|
+
"alias": alias
|
|
113
|
+
}
|
|
114
|
+
response = self.connection.patch(f"target/{id}", data=data)
|
|
115
|
+
check_success(response)
|
|
116
|
+
|
|
117
|
+
def delete_target(self, id: str, delete_files: bool):
|
|
118
|
+
data = {
|
|
119
|
+
"delete_files": delete_files
|
|
120
|
+
}
|
|
121
|
+
response = self.connection.delete(f"target/{id}", data=data)
|
|
122
|
+
check_success(response)
|
|
123
|
+
|
|
124
|
+
def delete_target_backups(self, id: str, delete_files: bool):
|
|
125
|
+
data = {
|
|
126
|
+
"delete_files": delete_files
|
|
127
|
+
}
|
|
128
|
+
response = self.connection.delete(f"target/{id}/all", data=data)
|
|
129
|
+
check_success(response)
|
|
130
|
+
|
|
131
|
+
def delete_target_recycled_backups(self, id: str, delete_files: bool):
|
|
132
|
+
data = {
|
|
133
|
+
"delete_files": delete_files
|
|
134
|
+
}
|
|
135
|
+
response = self.connection.delete(f"target/{id}/recycled", data=data)
|
|
136
|
+
check_success(response)
|
|
137
|
+
|
|
138
|
+
def delete_backup(self, id: str, delete_files: bool):
|
|
139
|
+
data = {
|
|
140
|
+
"delete_files": delete_files
|
|
141
|
+
}
|
|
142
|
+
response = self.connection.delete(f"backup/{id}", data=data)
|
|
143
|
+
check_success(response)
|
|
144
|
+
|
|
145
|
+
def recycle_backup(self, id: str, is_recycled: bool):
|
|
146
|
+
data = {
|
|
147
|
+
"is_recycled": is_recycled
|
|
148
|
+
}
|
|
149
|
+
response = self.connection.patch(f"backup/{id}", data=data)
|
|
150
|
+
check_success(response)
|
|
151
|
+
|
|
152
|
+
def list_recycled_backups(self) -> list[Backup]:
|
|
153
|
+
response = self.connection.get("recycle_bin")
|
|
154
|
+
resp_json = check_success(response)
|
|
155
|
+
return [Backup.from_dict(backup) for backup in resp_json["backups"]]
|
|
156
|
+
|
|
157
|
+
def clear_recycle_bin(self, delete_files: bool):
|
|
158
|
+
data = {
|
|
159
|
+
"delete_files": delete_files
|
|
160
|
+
}
|
|
161
|
+
response = self.connection.delete("recycle_bin", data=data)
|
|
162
|
+
check_success(response)
|
|
163
|
+
|
|
164
|
+
def get_log(self, tail: int) -> str:
|
|
165
|
+
response = self.connection.get(f"log?tail={tail}")
|
|
166
|
+
resp_json = check_success(response)
|
|
167
|
+
return resp_json["log"]
|
|
168
|
+
|
|
169
|
+
def view_stats(self) -> Stats:
|
|
170
|
+
response = self.connection.get("stats")
|
|
171
|
+
resp_json = check_success(response)
|
|
172
|
+
return Stats.from_dict(resp_json)
|
|
173
|
+
|
|
174
|
+
def list_jobs(self) -> tuple[list[DelayedJob], list[ScheduledJob]]:
|
|
175
|
+
response = self.connection.get("jobs")
|
|
176
|
+
resp_json = check_success(response)
|
|
177
|
+
delayed_jobs = []
|
|
178
|
+
scheduled_jobs = []
|
|
179
|
+
for json_job in resp_json["delayed"]:
|
|
180
|
+
delayed_jobs.append(DelayedJob.from_dict(json_job))
|
|
181
|
+
for json_job in resp_json["scheduled"]:
|
|
182
|
+
scheduled_jobs.append(ScheduledJob.from_dict(json_job))
|
|
183
|
+
return delayed_jobs, scheduled_jobs
|
|
184
|
+
|
|
185
|
+
def force_run_job(self, name: str):
|
|
186
|
+
check_success(self.connection.get(f"jobs/force_run/{name}"))
|
|
187
|
+
|
|
188
|
+
def seq_begin(self, target_id: str, file_list: list[SequentialFile], manual: bool):
|
|
189
|
+
data = {
|
|
190
|
+
"manual": int(manual),
|
|
191
|
+
"file_list": [dataclasses.asdict(file) for file in file_list]
|
|
192
|
+
}
|
|
193
|
+
response = self.connection.post(f"seq/{target_id}/begin", data=data)
|
|
194
|
+
check_success(response)
|
|
195
|
+
|
|
196
|
+
def seq_check(self, target_id: str) -> list[SequentialFile]:
|
|
197
|
+
response = self.connection.get(f"seq/{target_id}")
|
|
198
|
+
resp_json = check_success(response)
|
|
199
|
+
return [SequentialFile.from_dict(file) for file in resp_json["file_list"]]
|
|
200
|
+
|
|
201
|
+
def seq_upload(self, target_id: str, file_io: io.IOBase, file: SequentialFile):
|
|
202
|
+
data = {
|
|
203
|
+
"name": file.name,
|
|
204
|
+
"path": file.path
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
files = {
|
|
208
|
+
"file": file_io
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
response = self.connection.post_form(f"seq/{target_id}/upload", data=data, files=files)
|
|
212
|
+
check_success(response)
|
|
213
|
+
|
|
214
|
+
def seq_finish(self, target_id: str):
|
|
215
|
+
response = self.connection.post(f"seq/{target_id}/finish", data={})
|
|
216
|
+
check_success(response)
|
|
217
|
+
|
|
218
|
+
def seq_terminate(self, target_id: str):
|
|
219
|
+
response = self.connection.post(f"seq/{target_id}/terminate", data={})
|
|
220
|
+
check_success(response)
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import requests
|
|
2
|
+
import json
|
|
3
|
+
import re
|
|
4
|
+
from typing import Generator
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
|
|
7
|
+
@dataclass
|
|
8
|
+
class Response:
|
|
9
|
+
json_body: dict | Generator[bytes, None, None] # is a generator when using get_stream
|
|
10
|
+
status_code: int
|
|
11
|
+
headers: dict
|
|
12
|
+
|
|
13
|
+
def valid_api_key(api_key: str) -> bool:
|
|
14
|
+
return re.match(r"^bakch-([a-f]|[0-9]){64}$", api_key)
|
|
15
|
+
|
|
16
|
+
class Connection:
|
|
17
|
+
def __init__(self, host: str, port: int, api_key: str):
|
|
18
|
+
if len(api_key.strip()) != 0 and not valid_api_key(api_key):
|
|
19
|
+
raise ValueError("Invalid API key")
|
|
20
|
+
|
|
21
|
+
if port < 0 or port > 65535:
|
|
22
|
+
raise ValueError("Port out of range")
|
|
23
|
+
|
|
24
|
+
self.api_key = api_key
|
|
25
|
+
|
|
26
|
+
if host.startswith("http://") or host.startswith("https://"):
|
|
27
|
+
server_host = host.rstrip("/")
|
|
28
|
+
else:
|
|
29
|
+
server_host = f"http://{host.rstrip('/')}"
|
|
30
|
+
self.base_url = f"{server_host}:{port}"
|
|
31
|
+
|
|
32
|
+
def endpoint_url(self, endpoint: str) -> str:
|
|
33
|
+
return f"{self.base_url}/api/{endpoint.rstrip('/')}"
|
|
34
|
+
|
|
35
|
+
def headers(self) -> dict:
|
|
36
|
+
return {"Authorization": f"Bearer {self.api_key}"}
|
|
37
|
+
|
|
38
|
+
def get(self, endpoint: str, raise_on_error=False) -> Response:
|
|
39
|
+
response = requests.get(self.endpoint_url(endpoint), headers=self.headers())
|
|
40
|
+
if raise_on_error:
|
|
41
|
+
response.raise_for_status()
|
|
42
|
+
return Response(response.json(), response.status_code, response.headers)
|
|
43
|
+
|
|
44
|
+
def get_stream(self, endpoint: str, raise_on_error=False) -> Response:
|
|
45
|
+
response = requests.get(self.endpoint_url(endpoint), headers=self.headers(), stream=True)
|
|
46
|
+
if raise_on_error:
|
|
47
|
+
response.raise_for_status()
|
|
48
|
+
return Response(response.iter_content(chunk_size=8192), response.status_code, response.headers)
|
|
49
|
+
|
|
50
|
+
def post(self, endpoint: str, data: dict, raise_on_error=False) -> Response:
|
|
51
|
+
response = requests.post(self.endpoint_url(endpoint), headers=self.headers(), json=data)
|
|
52
|
+
if raise_on_error:
|
|
53
|
+
response.raise_for_status()
|
|
54
|
+
return Response(response.json(), response.status_code, response.headers)
|
|
55
|
+
|
|
56
|
+
def post_form(self, endpoint: str, data: dict, files: dict, raise_on_error=False) -> Response:
|
|
57
|
+
response = requests.post(self.endpoint_url(endpoint), headers=self.headers(), data=data, files=files, stream=True)
|
|
58
|
+
if raise_on_error:
|
|
59
|
+
response.raise_for_status()
|
|
60
|
+
return Response(response.json(), response.status_code, response.headers)
|
|
61
|
+
|
|
62
|
+
def patch(self, endpoint: str, data: dict, raise_on_error=False) -> Response:
|
|
63
|
+
response = requests.patch(self.endpoint_url(endpoint), headers=self.headers(), json=data)
|
|
64
|
+
if raise_on_error:
|
|
65
|
+
response.raise_for_status()
|
|
66
|
+
return Response(response.json(), response.status_code, response.headers)
|
|
67
|
+
|
|
68
|
+
def delete(self, endpoint: str, data: dict, raise_on_error=False) -> Response:
|
|
69
|
+
response = requests.delete(self.endpoint_url(endpoint), headers=self.headers(), json=data)
|
|
70
|
+
if raise_on_error:
|
|
71
|
+
response.raise_for_status()
|
|
72
|
+
return Response(response.json(), response.status_code, response.headers)
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from typing import Optional
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
|
|
6
|
+
def from_http_date(date):
|
|
7
|
+
return datetime.strptime(date, "%a, %d %b %Y %H:%M:%S GMT")
|
|
8
|
+
|
|
9
|
+
class BackupRecycleCriteria(str, Enum):
|
|
10
|
+
NONE = "none"
|
|
11
|
+
COUNT = "count"
|
|
12
|
+
AGE = "age"
|
|
13
|
+
|
|
14
|
+
class BackupRecycleAction(str, Enum):
|
|
15
|
+
DELETE = "delete"
|
|
16
|
+
RECYCLE = "recycle"
|
|
17
|
+
|
|
18
|
+
class BackupType(str, Enum):
|
|
19
|
+
SINGLE = "single"
|
|
20
|
+
MULTI = "multi"
|
|
21
|
+
|
|
22
|
+
@dataclass
|
|
23
|
+
class BackupTarget:
|
|
24
|
+
id: str
|
|
25
|
+
name: str
|
|
26
|
+
target_type: BackupType
|
|
27
|
+
recycle_criteria: BackupRecycleCriteria
|
|
28
|
+
recycle_value: Optional[int]
|
|
29
|
+
recycle_action: BackupRecycleAction
|
|
30
|
+
location: str
|
|
31
|
+
name_template: str
|
|
32
|
+
deduplicate: bool
|
|
33
|
+
alias: str | None
|
|
34
|
+
min_backups: int | None
|
|
35
|
+
|
|
36
|
+
@staticmethod
|
|
37
|
+
def from_dict(d: dict) -> "BackupTarget":
|
|
38
|
+
return BackupTarget(d["id"], d["name"], d["target_type"], d["recycle_criteria"], d["recycle_value"], d["recycle_action"], d["location"], d["name_template"], d["deduplicate"], d["alias"], d["min_backups"])
|
|
39
|
+
|
|
40
|
+
@dataclass
|
|
41
|
+
class Backup:
|
|
42
|
+
id: str
|
|
43
|
+
target_id: str
|
|
44
|
+
created_at: datetime
|
|
45
|
+
manual: bool
|
|
46
|
+
is_recycled: bool
|
|
47
|
+
filesize: int
|
|
48
|
+
|
|
49
|
+
def pretty_created_at(self) -> str:
|
|
50
|
+
return self.created_at.strftime("%B %d, %Y %H:%M")
|
|
51
|
+
|
|
52
|
+
@staticmethod
|
|
53
|
+
def from_dict(d: dict) -> "Backup":
|
|
54
|
+
return Backup(d["id"], d["target_id"], datetime.fromisoformat(d["created_at"]), d["manual"], d["is_recycled"], d["filesize"])
|
|
55
|
+
|
|
56
|
+
@dataclass
|
|
57
|
+
class Stats:
|
|
58
|
+
program_version: str
|
|
59
|
+
total_target_size: int
|
|
60
|
+
total_recycle_bin_size: int
|
|
61
|
+
total_targets: int
|
|
62
|
+
total_backups: int
|
|
63
|
+
total_recycled_backups: int
|
|
64
|
+
|
|
65
|
+
@staticmethod
|
|
66
|
+
def from_dict(d: dict) -> "Stats":
|
|
67
|
+
return Stats(d["program_version"], d["total_target_size"], d["total_recycle_bin_size"], d["total_targets"], d["total_backups"], d["total_recycled_backups"])
|
|
68
|
+
|
|
69
|
+
@dataclass
|
|
70
|
+
class SequentialFile:
|
|
71
|
+
path: str
|
|
72
|
+
name: str
|
|
73
|
+
uploaded: bool
|
|
74
|
+
|
|
75
|
+
@staticmethod
|
|
76
|
+
def from_dict(d: dict) -> "SequentialFile":
|
|
77
|
+
return SequentialFile(d["path"], d["name"], d.get("uploaded", False))
|
|
78
|
+
|
|
79
|
+
@dataclass
|
|
80
|
+
class DelayedJob:
|
|
81
|
+
id: int
|
|
82
|
+
name: str
|
|
83
|
+
status: str
|
|
84
|
+
start_time: datetime
|
|
85
|
+
end_time: datetime
|
|
86
|
+
|
|
87
|
+
@staticmethod
|
|
88
|
+
def from_dict(d: dict) -> "DelayedJob":
|
|
89
|
+
return DelayedJob(d["id"], d["name"], d["status"], from_http_date(d["start_time"]), from_http_date(d["end_time"]))
|
|
90
|
+
|
|
91
|
+
def pretty_start_time(self) -> str:
|
|
92
|
+
return self.start_time.strftime("%B %d, %Y %H:%M")
|
|
93
|
+
|
|
94
|
+
def pretty_end_time(self) -> str:
|
|
95
|
+
return self.end_time.strftime("%B %d, %Y %H:%M")
|
|
96
|
+
|
|
97
|
+
@dataclass
|
|
98
|
+
class ScheduledJob:
|
|
99
|
+
name: str
|
|
100
|
+
interval: int
|
|
101
|
+
next_run: datetime
|
|
102
|
+
|
|
103
|
+
@staticmethod
|
|
104
|
+
def from_dict(d: dict) -> "ScheduledJob":
|
|
105
|
+
return ScheduledJob(d["name"], d["interval"], datetime.fromtimestamp(d["next_run"]))
|
|
106
|
+
|
|
107
|
+
def pretty_next_run(self) -> str:
|
|
108
|
+
return self.next_run.strftime("%B %d, %Y %H:%M")
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: backupchan-client-lib
|
|
3
|
+
Version: 1.4.0
|
|
4
|
+
Summary: Library for interfacing with Backup-chan.
|
|
5
|
+
Author-email: Moltony <koronavirusnyj@gmail.com>
|
|
6
|
+
License: BSD-3-Clause
|
|
7
|
+
Project-URL: Homepage, https://github.com/Backupchan/client-lib
|
|
8
|
+
Project-URL: Repository, https://github.com/Backupchan/client-lib.git
|
|
9
|
+
Project-URL: Issues, https://github.com/Backupchan/client-lib/issues
|
|
10
|
+
Project-URL: Changelog, https://github.com/Backupchan/client-lib/blob/main/CHANGELOG.md
|
|
11
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
12
|
+
Classifier: License :: OSI Approved :: BSD License
|
|
13
|
+
Classifier: Natural Language :: English
|
|
14
|
+
Classifier: Operating System :: OS Independent
|
|
15
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
16
|
+
Classifier: Topic :: System :: Archiving :: Backup
|
|
17
|
+
Classifier: Topic :: Software Development :: Libraries
|
|
18
|
+
Classifier: Typing :: Typed
|
|
19
|
+
Description-Content-Type: text/markdown
|
|
20
|
+
License-File: LICENSE
|
|
21
|
+
Requires-Dist: requests
|
|
22
|
+
Provides-Extra: dev
|
|
23
|
+
Requires-Dist: pytest; extra == "dev"
|
|
24
|
+
Requires-Dist: requests-mock; extra == "dev"
|
|
25
|
+
Dynamic: license-file
|
|
26
|
+
|
|
27
|
+
# Backup-chan client library
|
|
28
|
+
|
|
29
|
+

|
|
30
|
+

|
|
31
|
+

|
|
32
|
+
|
|
33
|
+
This is the Python library for interfacing with a Backup-chan server.
|
|
34
|
+
|
|
35
|
+
## Installing
|
|
36
|
+
|
|
37
|
+
```bash
|
|
38
|
+
# The easy way
|
|
39
|
+
pip install backupchan-client-lib
|
|
40
|
+
|
|
41
|
+
# Install from source
|
|
42
|
+
git clone https://github.com/Backupchan/client-lib.git backupchan-client-lib
|
|
43
|
+
cd backupchan-client-lib
|
|
44
|
+
pip install .
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
For instructions on setting up the server, refer to Backup-chan server's README.
|
|
48
|
+
|
|
49
|
+
## Testing
|
|
50
|
+
|
|
51
|
+
```
|
|
52
|
+
pytest
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
## Example
|
|
56
|
+
|
|
57
|
+
```python
|
|
58
|
+
from backupchan import *
|
|
59
|
+
|
|
60
|
+
# Connect to a server
|
|
61
|
+
api = API("http://192.168.1.43", 5000, "your api key")
|
|
62
|
+
|
|
63
|
+
# Print every target
|
|
64
|
+
targets = api.list_targets()
|
|
65
|
+
for target in targets:
|
|
66
|
+
print(target)
|
|
67
|
+
|
|
68
|
+
# Create a new target
|
|
69
|
+
target_id = api.new_target(
|
|
70
|
+
"the waifu collection", # name
|
|
71
|
+
BackupType.MULTI,
|
|
72
|
+
BackupRecycleCriteria.AGE,
|
|
73
|
+
10, # recycle value
|
|
74
|
+
BackupRecycleAction.RECYCLE,
|
|
75
|
+
"/var/backups/waifu", # location
|
|
76
|
+
"wf-$I_$D", # name template
|
|
77
|
+
False, # deduplicate
|
|
78
|
+
None # alias
|
|
79
|
+
)
|
|
80
|
+
target = api.get_target(target_id)
|
|
81
|
+
print(f"Created new target: {target}")
|
|
82
|
+
```
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
.gitignore
|
|
2
|
+
CHANGELOG.md
|
|
3
|
+
LICENSE
|
|
4
|
+
README.md
|
|
5
|
+
pyproject.toml
|
|
6
|
+
backupchan/__init__.py
|
|
7
|
+
backupchan/api.py
|
|
8
|
+
backupchan/connection.py
|
|
9
|
+
backupchan/models.py
|
|
10
|
+
backupchan_client_lib.egg-info/PKG-INFO
|
|
11
|
+
backupchan_client_lib.egg-info/SOURCES.txt
|
|
12
|
+
backupchan_client_lib.egg-info/dependency_links.txt
|
|
13
|
+
backupchan_client_lib.egg-info/requires.txt
|
|
14
|
+
backupchan_client_lib.egg-info/top_level.txt
|
|
15
|
+
tests/conftest.py
|
|
16
|
+
tests/test_connection.py
|
|
17
|
+
tests/test_models.py
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
backupchan
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "backupchan-client-lib"
|
|
3
|
+
version = "1.4.0"
|
|
4
|
+
description="Library for interfacing with Backup-chan."
|
|
5
|
+
authors = [
|
|
6
|
+
{ name="Moltony", email="koronavirusnyj@gmail.com" } # but I probably won't respond...
|
|
7
|
+
]
|
|
8
|
+
dependencies = [
|
|
9
|
+
"requests"
|
|
10
|
+
]
|
|
11
|
+
|
|
12
|
+
readme = "README.md"
|
|
13
|
+
license = {text = "BSD-3-Clause"}
|
|
14
|
+
|
|
15
|
+
classifiers = [
|
|
16
|
+
"Development Status :: 5 - Production/Stable",
|
|
17
|
+
"License :: OSI Approved :: BSD License",
|
|
18
|
+
"Natural Language :: English",
|
|
19
|
+
"Operating System :: OS Independent",
|
|
20
|
+
"Programming Language :: Python :: 3 :: Only",
|
|
21
|
+
"Topic :: System :: Archiving :: Backup",
|
|
22
|
+
"Topic :: Software Development :: Libraries",
|
|
23
|
+
"Typing :: Typed"
|
|
24
|
+
]
|
|
25
|
+
|
|
26
|
+
[project.urls]
|
|
27
|
+
Homepage = "https://github.com/Backupchan/client-lib"
|
|
28
|
+
Repository = "https://github.com/Backupchan/client-lib.git"
|
|
29
|
+
Issues = "https://github.com/Backupchan/client-lib/issues"
|
|
30
|
+
Changelog = "https://github.com/Backupchan/client-lib/blob/main/CHANGELOG.md"
|
|
31
|
+
|
|
32
|
+
[build-system]
|
|
33
|
+
requires = ["setuptools", "wheel"]
|
|
34
|
+
build-backend = "setuptools.build_meta"
|
|
35
|
+
|
|
36
|
+
[project.optional-dependencies]
|
|
37
|
+
dev = [
|
|
38
|
+
"pytest",
|
|
39
|
+
"requests-mock"
|
|
40
|
+
]
|
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
import io
|
|
3
|
+
import pytest
|
|
4
|
+
import requests_mock
|
|
5
|
+
from backupchan import Connection
|
|
6
|
+
|
|
7
|
+
# example responses all taken straight from the api docs
|
|
8
|
+
|
|
9
|
+
NULL_UUID = "00000000-0000-0000-0000-000000000000"
|
|
10
|
+
|
|
11
|
+
def check_request(mock: requests_mock.Mocker, conn: Connection, method: str, payload: None | dict = None):
|
|
12
|
+
last_request = mock.last_request
|
|
13
|
+
assert mock.called
|
|
14
|
+
assert last_request.method == method
|
|
15
|
+
assert last_request.headers["Authorization"] == conn.headers()["Authorization"]
|
|
16
|
+
|
|
17
|
+
if payload is not None:
|
|
18
|
+
if "application/json" in last_request.headers["Content-Type"]:
|
|
19
|
+
assert last_request.json() == payload
|
|
20
|
+
else:
|
|
21
|
+
assert last_request.text is not None
|
|
22
|
+
|
|
23
|
+
def test_get(conn):
|
|
24
|
+
mock_response = {
|
|
25
|
+
"success": True,
|
|
26
|
+
"targets": [
|
|
27
|
+
{
|
|
28
|
+
"id": NULL_UUID,
|
|
29
|
+
"name": "My backup",
|
|
30
|
+
"target_type": "multi",
|
|
31
|
+
"recycle_criteria": "count",
|
|
32
|
+
"recycle_value": 10,
|
|
33
|
+
"recycle_action": "recycle",
|
|
34
|
+
"location": "/var/backups/MyBackup",
|
|
35
|
+
"name_template": "backup-$I-$D"
|
|
36
|
+
}
|
|
37
|
+
]
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
with requests_mock.Mocker() as m:
|
|
41
|
+
m.get("http://localhost:5000/api/target", json=mock_response, status_code=200)
|
|
42
|
+
|
|
43
|
+
response = conn.get("target")
|
|
44
|
+
|
|
45
|
+
check_request(m, conn, "GET")
|
|
46
|
+
|
|
47
|
+
assert response.status_code == 200
|
|
48
|
+
assert response.json_body["success"] is True
|
|
49
|
+
assert len(response.json_body["targets"]) == 1
|
|
50
|
+
assert response.json_body["targets"][0]["name"] == "My backup"
|
|
51
|
+
|
|
52
|
+
def test_post(conn):
|
|
53
|
+
mock_response = {
|
|
54
|
+
"success": True,
|
|
55
|
+
"id": NULL_UUID
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
with requests_mock.Mocker() as m:
|
|
59
|
+
m.post("http://localhost:5000/api/target", json=mock_response, status_code=201)
|
|
60
|
+
|
|
61
|
+
payload = {
|
|
62
|
+
"name": "Backupy",
|
|
63
|
+
"backup_type": "multi",
|
|
64
|
+
"recycle_criteria": "count",
|
|
65
|
+
"recycle_value": 10,
|
|
66
|
+
"recycle_action": "recycle",
|
|
67
|
+
"location": "/bakupy",
|
|
68
|
+
"name_template": "bkp-$I"
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
response = conn.post("target", payload)
|
|
72
|
+
|
|
73
|
+
check_request(m, conn, "POST", payload)
|
|
74
|
+
|
|
75
|
+
assert response.status_code == 201
|
|
76
|
+
assert response.json_body["success"] is True
|
|
77
|
+
assert response.json_body["id"] == NULL_UUID
|
|
78
|
+
|
|
79
|
+
def test_post_form(conn):
|
|
80
|
+
mock_response = {
|
|
81
|
+
"success": True,
|
|
82
|
+
"id": NULL_UUID
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
test_uuid = str(uuid.uuid4())
|
|
86
|
+
|
|
87
|
+
with requests_mock.Mocker() as m:
|
|
88
|
+
m.post(f"http://localhost:5000/api/target/{test_uuid}/upload", json=mock_response, status_code=200)
|
|
89
|
+
|
|
90
|
+
payload = {
|
|
91
|
+
"manual": False
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
files = {
|
|
95
|
+
"backup_file": io.BytesIO(b"i am file")
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
response = conn.post_form(f"target/{test_uuid}/upload", data=payload, files=files)
|
|
99
|
+
|
|
100
|
+
last_request = m.last_request
|
|
101
|
+
check_request(m, conn, "POST", payload)
|
|
102
|
+
assert "multipart/form-data" in last_request.headers["Content-Type"]
|
|
103
|
+
|
|
104
|
+
assert response.status_code == 200
|
|
105
|
+
assert response.json_body["success"] is True
|
|
106
|
+
assert response.json_body["id"] == NULL_UUID
|
|
107
|
+
|
|
108
|
+
def test_delete(conn):
|
|
109
|
+
mock_response = {
|
|
110
|
+
"success": True
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
with requests_mock.Mocker() as m:
|
|
114
|
+
m.delete(f"http://localhost:5000/api/target/{NULL_UUID}", json=mock_response, status_code=200)
|
|
115
|
+
|
|
116
|
+
payload = {
|
|
117
|
+
"delete_files": True
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
response = conn.delete(f"target/{NULL_UUID}", data=payload)
|
|
121
|
+
|
|
122
|
+
check_request(m, conn, "DELETE", payload)
|
|
123
|
+
|
|
124
|
+
assert response.status_code == 200
|
|
125
|
+
assert response.json_body["success"] is True
|
|
126
|
+
|
|
127
|
+
def test_patch(conn):
|
|
128
|
+
mock_response = {
|
|
129
|
+
"success": True
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
with requests_mock.Mocker() as m:
|
|
133
|
+
m.patch(f"http://localhost:5000/api/backup/{NULL_UUID}", json=mock_response, status_code=200)
|
|
134
|
+
|
|
135
|
+
payload = {
|
|
136
|
+
"is_recycled": True
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
response = conn.patch(f"backup/{NULL_UUID}", data=payload)
|
|
140
|
+
|
|
141
|
+
check_request(m, conn, "PATCH", payload)
|
|
142
|
+
|
|
143
|
+
assert response.status_code == 200
|
|
144
|
+
assert response.json_body["success"] is True
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
from backupchan.models import Backup, BackupType, BackupRecycleAction, BackupRecycleCriteria, BackupTarget, Stats
|
|
3
|
+
|
|
4
|
+
def test_target_from_dict():
|
|
5
|
+
json_target = {
|
|
6
|
+
"id": "deadbeef-dead-beef-dead-beefdeadbeef",
|
|
7
|
+
"name": "touhoku kiritest",
|
|
8
|
+
"target_type": "multi",
|
|
9
|
+
"recycle_criteria": "count",
|
|
10
|
+
"recycle_value": 13,
|
|
11
|
+
"recycle_action": "recycle",
|
|
12
|
+
"location": "/var/backups/touhoku",
|
|
13
|
+
"name_template": "$I_kiritanpo",
|
|
14
|
+
"deduplicate": False,
|
|
15
|
+
"alias": None
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
target = BackupTarget.from_dict(json_target)
|
|
19
|
+
assert target.id == json_target["id"]
|
|
20
|
+
assert target.name == json_target["name"]
|
|
21
|
+
assert target.target_type == BackupType.MULTI
|
|
22
|
+
assert target.recycle_criteria == BackupRecycleCriteria.COUNT
|
|
23
|
+
assert target.recycle_value == json_target["recycle_value"]
|
|
24
|
+
assert target.recycle_action == BackupRecycleAction.RECYCLE
|
|
25
|
+
assert target.location == json_target["location"]
|
|
26
|
+
assert target.name_template == json_target["name_template"]
|
|
27
|
+
assert target.deduplicate == json_target["deduplicate"]
|
|
28
|
+
assert target.alias == json_target["alias"]
|
|
29
|
+
|
|
30
|
+
def test_backup_from_dict():
|
|
31
|
+
created_at = datetime.datetime.now()
|
|
32
|
+
json_backup = {
|
|
33
|
+
"id": "d0d0caca-d0d0-caca-d0d0-cacad0d0caca",
|
|
34
|
+
"target_id": "deadbeef-dead-beef-dead-beefdeadbeef",
|
|
35
|
+
"created_at": created_at.isoformat(),
|
|
36
|
+
"manual": False,
|
|
37
|
+
"is_recycled": True,
|
|
38
|
+
"filesize": 123456
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
backup = Backup.from_dict(json_backup)
|
|
42
|
+
assert backup.id == json_backup["id"]
|
|
43
|
+
assert backup.target_id == json_backup["target_id"]
|
|
44
|
+
assert backup.created_at == created_at
|
|
45
|
+
assert backup.manual == json_backup["manual"]
|
|
46
|
+
assert backup.is_recycled == json_backup["is_recycled"]
|
|
47
|
+
assert backup.filesize == json_backup["filesize"]
|
|
48
|
+
|
|
49
|
+
def test_stats_from_dict():
|
|
50
|
+
json_stats = {
|
|
51
|
+
"program_version": "1.1.0",
|
|
52
|
+
"total_target_size": 123456,
|
|
53
|
+
"total_recycle_bin_size": 654321,
|
|
54
|
+
"total_targets": 4,
|
|
55
|
+
"total_backups": 43,
|
|
56
|
+
"total_recycled_backups": 11
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
stats = Stats.from_dict(json_stats)
|
|
60
|
+
assert stats.program_version == json_stats["program_version"]
|
|
61
|
+
assert stats.total_target_size == json_stats["total_target_size"]
|
|
62
|
+
assert stats.total_recycle_bin_size == json_stats["total_recycle_bin_size"]
|
|
63
|
+
assert stats.total_targets == json_stats["total_targets"]
|
|
64
|
+
assert stats.total_backups == json_stats["total_backups"]
|
|
65
|
+
assert stats.total_recycled_backups == json_stats["total_recycled_backups"]
|