azul-client 9.0.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- azul_client/__init__.py +4 -0
- azul_client/api/__init__.py +74 -0
- azul_client/api/base_api.py +163 -0
- azul_client/api/binaries_data.py +513 -0
- azul_client/api/binaries_meta.py +510 -0
- azul_client/api/features.py +175 -0
- azul_client/api/plugins.py +49 -0
- azul_client/api/purge.py +71 -0
- azul_client/api/security.py +29 -0
- azul_client/api/sources.py +51 -0
- azul_client/api/statistics.py +23 -0
- azul_client/api/users.py +29 -0
- azul_client/client.py +510 -0
- azul_client/config.py +116 -0
- azul_client/exceptions.py +30 -0
- azul_client/oidc/__init__.py +5 -0
- azul_client/oidc/callback.py +73 -0
- azul_client/oidc/oidc.py +215 -0
- azul_client-9.0.24.dist-info/METADATA +102 -0
- azul_client-9.0.24.dist-info/RECORD +23 -0
- azul_client-9.0.24.dist-info/WHEEL +5 -0
- azul_client-9.0.24.dist-info/entry_points.txt +2 -0
- azul_client-9.0.24.dist-info/top_level.txt +1 -0
azul_client/client.py
ADDED
|
@@ -0,0 +1,510 @@
|
|
|
1
|
+
"""High level library flow."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import sys
|
|
6
|
+
from tempfile import SpooledTemporaryFile
|
|
7
|
+
|
|
8
|
+
import click
|
|
9
|
+
import pendulum
|
|
10
|
+
from azul_bedrock import models_restapi
|
|
11
|
+
from pydantic import BaseModel
|
|
12
|
+
from rich.console import Console
|
|
13
|
+
|
|
14
|
+
from azul_client import config
|
|
15
|
+
from azul_client.api import Api
|
|
16
|
+
from azul_client.config import _client_config
|
|
17
|
+
from azul_client.exceptions import BadResponse, BadResponse404
|
|
18
|
+
|
|
19
|
+
api: Api = None
|
|
20
|
+
|
|
21
|
+
SECURITY_STRING_DESCRIPTION = "simple security string (use `azul security` to see available security strings)"
|
|
22
|
+
TIMESTAMP_DESCRIPTION = (
|
|
23
|
+
"timestamp for which the file being submitted was sourced in ISO8601 format e.g 2025-05-26T02:11:44Z"
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@click.group()
|
|
28
|
+
@click.option("-c", default="default", help="switch to a different configured Azul instance.")
|
|
29
|
+
def cli(c: str):
|
|
30
|
+
"""Interact with the Azul API via CLI tools."""
|
|
31
|
+
global api
|
|
32
|
+
config.switch_section(c)
|
|
33
|
+
api = Api()
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@click.group()
|
|
37
|
+
def binaries():
|
|
38
|
+
"""Upload, download and get metadata associated with binaries."""
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@click.command(name="security")
|
|
43
|
+
@click.option("--full", is_flag=True, show_default=True, default=False, help="show full configuration")
|
|
44
|
+
def security(full: bool):
|
|
45
|
+
"""List Azul security classification settings."""
|
|
46
|
+
settings = api.security.get_security_settings()
|
|
47
|
+
if not full and settings.get("presets"):
|
|
48
|
+
click.echo("Security Presets:")
|
|
49
|
+
click.echo("\n".join(settings.get("presets")))
|
|
50
|
+
else:
|
|
51
|
+
click.echo(json.dumps(settings, indent=2))
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@click.group(name="sources")
|
|
55
|
+
def sources():
|
|
56
|
+
"""List and get information about specific sources."""
|
|
57
|
+
pass
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@sources.command(name="list")
|
|
61
|
+
def sources_list():
|
|
62
|
+
"""List all of the source ids."""
|
|
63
|
+
all_sources = api.sources.get_all_sources()
|
|
64
|
+
click.echo("Source IDS:")
|
|
65
|
+
click.echo("\n".join(all_sources.keys()))
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
@sources.command(name="full")
|
|
69
|
+
def sources_full():
|
|
70
|
+
"""Get the full source information for each source."""
|
|
71
|
+
all_sources = api.sources.get_all_sources()
|
|
72
|
+
all_sources_dumped = {}
|
|
73
|
+
for k, val in all_sources.items():
|
|
74
|
+
all_sources_dumped[k] = val.model_dump()
|
|
75
|
+
click.echo("Sources:")
|
|
76
|
+
click.echo(json.dumps(all_sources_dumped, indent=2))
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@sources.command(name="info")
|
|
80
|
+
@click.argument("source")
|
|
81
|
+
def sources_info(source: str):
|
|
82
|
+
"""Get summary information about a specific SOURCE by source Id."""
|
|
83
|
+
all_sources = api.sources.get_all_sources()
|
|
84
|
+
for source_id, sourceObj in all_sources.items():
|
|
85
|
+
if source_id.lower() == source.lower():
|
|
86
|
+
click.echo(source_id + ":")
|
|
87
|
+
click.echo("Description: " + sourceObj.description)
|
|
88
|
+
click.echo("Submissions Expire After " + sourceObj.expire_events_after)
|
|
89
|
+
click.echo("References:")
|
|
90
|
+
for ref in sourceObj.references:
|
|
91
|
+
click.echo(f" name: '{ref.name}'")
|
|
92
|
+
click.echo(f" description: '{ref.description}'")
|
|
93
|
+
click.echo(f" required: '{ref.required}'")
|
|
94
|
+
break
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
@click.group()
|
|
98
|
+
def plugins():
|
|
99
|
+
"""List and get information for plugins in Azul."""
|
|
100
|
+
pass
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@plugins.command(name="list")
|
|
104
|
+
def plugins_list():
|
|
105
|
+
"""List all of the plugins registered in Azul."""
|
|
106
|
+
plugin_list = api.plugins.get_all_plugins()
|
|
107
|
+
click.echo("Plugins (name version):")
|
|
108
|
+
for p in plugin_list:
|
|
109
|
+
click.echo(f"{p.newest_version.name} {p.newest_version.version}")
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
@plugins.command(name="info")
|
|
113
|
+
@click.argument("name")
|
|
114
|
+
@click.option("--version", type=str, help="version of the plugin to get info for (defaults to newest)")
|
|
115
|
+
def plugin_info(name: str, version: str):
|
|
116
|
+
"""Get the details of a plugin with the provided plugin name."""
|
|
117
|
+
if version:
|
|
118
|
+
try:
|
|
119
|
+
details = api.plugins.get_plugin(name, version)
|
|
120
|
+
except BadResponse404:
|
|
121
|
+
click.echo(f"Plugin {name} {version} does not exist check the version and name.")
|
|
122
|
+
return
|
|
123
|
+
except BadResponse as e:
|
|
124
|
+
click.echo(f"Plugin {name} could not be found due to error {e.message}.")
|
|
125
|
+
click.echo(f"Providing detail for plugin {name} {version}")
|
|
126
|
+
click.echo(details.plugin.model_dump_json(indent=2))
|
|
127
|
+
|
|
128
|
+
try:
|
|
129
|
+
plugin_list = api.plugins.get_all_plugins()
|
|
130
|
+
except BadResponse as e:
|
|
131
|
+
click.echo(f"Plugin {name} could not be found due to error {e.message}.")
|
|
132
|
+
return
|
|
133
|
+
|
|
134
|
+
for p in plugin_list:
|
|
135
|
+
if p.newest_version.name == name:
|
|
136
|
+
click.echo(f"Providing detail for plugin {p.newest_version.name} {p.newest_version.version}")
|
|
137
|
+
click.echo(p.newest_version.model_dump_json(indent=2))
|
|
138
|
+
return
|
|
139
|
+
click.echo(f"Plugin {name} could not be found, check the name is valid.")
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
@binaries.command()
|
|
143
|
+
@click.argument("sha256")
|
|
144
|
+
def check(sha256: str):
|
|
145
|
+
"""Check if binary metadata associated with the provided SHA256 is in Azul or not."""
|
|
146
|
+
if api.binaries_meta.check_meta(sha256):
|
|
147
|
+
click.echo("Binary metadata available")
|
|
148
|
+
else:
|
|
149
|
+
click.echo("Binary metadata NOT available")
|
|
150
|
+
sys.exit(1)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
@binaries.command()
|
|
154
|
+
@click.argument("sha256")
|
|
155
|
+
def check_data(sha256: str):
|
|
156
|
+
"""Check if a binary in Azul has the original file stored in Azul for the provided SHA256."""
|
|
157
|
+
if api.binaries_data.check_data(sha256):
|
|
158
|
+
click.echo("Binary data available")
|
|
159
|
+
else:
|
|
160
|
+
click.echo("Binary data NOT available")
|
|
161
|
+
sys.exit(1)
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _walk_files_in_path(path: str) -> list[str]:
|
|
165
|
+
"""Walks a user given path for files."""
|
|
166
|
+
input_files = []
|
|
167
|
+
if os.path.isdir(path):
|
|
168
|
+
for root, dirs, files in os.walk(path, followlinks=False):
|
|
169
|
+
files = [f for f in files if not f[0] == "."]
|
|
170
|
+
dirs[:] = [d for d in dirs if not d[0] == "."]
|
|
171
|
+
for name in files:
|
|
172
|
+
loc = os.path.join(root, name)
|
|
173
|
+
input_files.append(loc)
|
|
174
|
+
elif os.path.isfile(path):
|
|
175
|
+
input_files.append(path)
|
|
176
|
+
else:
|
|
177
|
+
raise Exception("cannot upload something that is not a folder or file")
|
|
178
|
+
|
|
179
|
+
return input_files
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def _shared_submit(
|
|
183
|
+
confirmed: bool,
|
|
184
|
+
path: str,
|
|
185
|
+
*,
|
|
186
|
+
security: str = "",
|
|
187
|
+
timestamp: str = "",
|
|
188
|
+
extract: bool = False,
|
|
189
|
+
extract_password: str = "",
|
|
190
|
+
parent: str = "",
|
|
191
|
+
parent_rels: dict = None,
|
|
192
|
+
source: str = "",
|
|
193
|
+
source_refs: dict = None,
|
|
194
|
+
):
|
|
195
|
+
"""Common class for submitting binaries to Azul."""
|
|
196
|
+
security = security if security else ""
|
|
197
|
+
if not timestamp:
|
|
198
|
+
timestamp = pendulum.now(pendulum.UTC).to_iso8601_string()
|
|
199
|
+
else:
|
|
200
|
+
timestamp = pendulum.parse(timestamp).to_iso8601_string()
|
|
201
|
+
|
|
202
|
+
raw_input_files = _walk_files_in_path(path)
|
|
203
|
+
|
|
204
|
+
# generate azul file names
|
|
205
|
+
input_files = []
|
|
206
|
+
for filepath in raw_input_files:
|
|
207
|
+
# try to remove provided path, unless that was a reference to a specific file
|
|
208
|
+
# in which case keep the filename only
|
|
209
|
+
adjusted = filepath.removeprefix(path)
|
|
210
|
+
filename = os.path.basename(filepath)
|
|
211
|
+
if filename not in adjusted:
|
|
212
|
+
adjusted = filename
|
|
213
|
+
input_files.append((filepath, adjusted))
|
|
214
|
+
|
|
215
|
+
# print info and confirm to upload
|
|
216
|
+
click.echo(f"{len(input_files)} files found including:")
|
|
217
|
+
for _, filepath in input_files[:10]:
|
|
218
|
+
click.echo(filepath)
|
|
219
|
+
|
|
220
|
+
click.echo(f"Security: {security}")
|
|
221
|
+
click.echo(f"Timestamp: {timestamp}")
|
|
222
|
+
click.echo(f"Extract: {extract}")
|
|
223
|
+
click.echo(f"Extract Password: {extract_password}")
|
|
224
|
+
if parent:
|
|
225
|
+
click.echo(f"Parent: {parent}")
|
|
226
|
+
click.echo(f"Relationship: {parent_rels}")
|
|
227
|
+
else:
|
|
228
|
+
click.echo(f"Source: {source}")
|
|
229
|
+
click.echo(f"References: {source_refs}")
|
|
230
|
+
|
|
231
|
+
if not confirmed and not click.confirm(f"Proceed with upload of {len(input_files)} files?"):
|
|
232
|
+
sys.exit(1)
|
|
233
|
+
|
|
234
|
+
# submit each file
|
|
235
|
+
for fullpath, filepath in input_files:
|
|
236
|
+
with open(fullpath, "rb") as f:
|
|
237
|
+
if parent:
|
|
238
|
+
resp = api.binaries_data.upload_child(
|
|
239
|
+
f,
|
|
240
|
+
parent_sha256=parent,
|
|
241
|
+
relationship=parent_rels,
|
|
242
|
+
security=security,
|
|
243
|
+
filename=filepath,
|
|
244
|
+
timestamp=timestamp,
|
|
245
|
+
extract=extract,
|
|
246
|
+
password=extract_password,
|
|
247
|
+
)
|
|
248
|
+
else:
|
|
249
|
+
resp = api.binaries_data.upload(
|
|
250
|
+
f,
|
|
251
|
+
security=security,
|
|
252
|
+
source_id=source,
|
|
253
|
+
filename=filepath,
|
|
254
|
+
timestamp=timestamp,
|
|
255
|
+
references=source_refs,
|
|
256
|
+
extract=extract,
|
|
257
|
+
password=extract_password,
|
|
258
|
+
)
|
|
259
|
+
click.echo(f"{filepath} - {resp.sha256}")
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def _print_model(model: BaseModel, pretty: bool):
|
|
263
|
+
"""Prints a Pydantic model for user consumption, with a pretty filter as required."""
|
|
264
|
+
if pretty:
|
|
265
|
+
# Configure our environment for using a pager if possible
|
|
266
|
+
if "MANPAGER" not in os.environ and "PAGER" not in os.environ:
|
|
267
|
+
os.environ["PAGER"] = "less -r"
|
|
268
|
+
|
|
269
|
+
# Guess to see if the pager we are using supports color
|
|
270
|
+
colour_supported = "less -r" in os.environ.get("MANPAGER", "") or "less -r" in os.environ.get("PAGER", "")
|
|
271
|
+
|
|
272
|
+
console = Console()
|
|
273
|
+
# Enable a pager for the entity document (its big) if this is an interactive terminal
|
|
274
|
+
if console.is_terminal:
|
|
275
|
+
with console.pager(styles=colour_supported):
|
|
276
|
+
console.print(model)
|
|
277
|
+
else:
|
|
278
|
+
console.print(model)
|
|
279
|
+
else:
|
|
280
|
+
# Dump the entire JSON document for use with e.g. jq
|
|
281
|
+
click.echo(model.model_dump_json(indent=4))
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
@binaries.command()
|
|
285
|
+
@click.argument("path")
|
|
286
|
+
@click.option("-y", is_flag=True, show_default=True, default=False, help="no confirmation prompt")
|
|
287
|
+
@click.option("--timestamp", type=str, help=TIMESTAMP_DESCRIPTION)
|
|
288
|
+
@click.option("--security", required=True, type=str, help=SECURITY_STRING_DESCRIPTION)
|
|
289
|
+
@click.option("--parent", required=True, type=str, help="SHA256 of parent file")
|
|
290
|
+
@click.option(
|
|
291
|
+
"-r",
|
|
292
|
+
"--relationship",
|
|
293
|
+
required=True,
|
|
294
|
+
multiple=True,
|
|
295
|
+
type=str,
|
|
296
|
+
help="""relationship information between the uploaded child and the parent in form key:value e.g:
|
|
297
|
+
azul put-child --relationship action:extracted --relationship relationship:friend
|
|
298
|
+
""",
|
|
299
|
+
)
|
|
300
|
+
@click.option(
|
|
301
|
+
"--extract",
|
|
302
|
+
is_flag=True,
|
|
303
|
+
show_default=True,
|
|
304
|
+
default=False,
|
|
305
|
+
help="extract the provided child file (must be trusted archive)",
|
|
306
|
+
)
|
|
307
|
+
@click.option("--extract-password", type=str, help="password to use when extracting the child archive")
|
|
308
|
+
def put_child(
|
|
309
|
+
y: bool,
|
|
310
|
+
path: str,
|
|
311
|
+
timestamp: str,
|
|
312
|
+
security: str,
|
|
313
|
+
parent: str,
|
|
314
|
+
relationship: list[str],
|
|
315
|
+
extract: bool,
|
|
316
|
+
extract_password: str,
|
|
317
|
+
):
|
|
318
|
+
"""Uploads a binary from PATH as a child of a pre-existing parent binary."""
|
|
319
|
+
parsed_relationships = [r.split(":", 1) for r in relationship]
|
|
320
|
+
relation_dict = {item[0]: item[1] for item in parsed_relationships}
|
|
321
|
+
_shared_submit(
|
|
322
|
+
y,
|
|
323
|
+
path,
|
|
324
|
+
security=security,
|
|
325
|
+
timestamp=timestamp,
|
|
326
|
+
parent=parent,
|
|
327
|
+
parent_rels=relation_dict,
|
|
328
|
+
extract=extract,
|
|
329
|
+
extract_password=extract_password,
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
@binaries.command()
|
|
334
|
+
@click.option("-y", is_flag=True, show_default=True, default=False, help="no confirmation prompt")
|
|
335
|
+
@click.argument("path")
|
|
336
|
+
@click.argument("source")
|
|
337
|
+
@click.option(
|
|
338
|
+
"--ref", type=str, multiple=True, help="references for source. e.g. --ref user:llama --ref location:ocean"
|
|
339
|
+
)
|
|
340
|
+
@click.option("--timestamp", type=str, help=TIMESTAMP_DESCRIPTION)
|
|
341
|
+
@click.option("--security", required=True, type=str, help=SECURITY_STRING_DESCRIPTION)
|
|
342
|
+
@click.option("--extract", is_flag=True, show_default=True, default=False, help="submitted files are trusted archives")
|
|
343
|
+
@click.option("--extract-password", type=str, help="password for trusted archive to be extracted with")
|
|
344
|
+
def put(
|
|
345
|
+
y: bool,
|
|
346
|
+
path: str,
|
|
347
|
+
timestamp: str,
|
|
348
|
+
security: str,
|
|
349
|
+
source: str,
|
|
350
|
+
ref: list[str],
|
|
351
|
+
extract: bool,
|
|
352
|
+
extract_password: str,
|
|
353
|
+
):
|
|
354
|
+
"""Upload all files in PATH to Azul SOURCE."""
|
|
355
|
+
split_refs = [x.split(":", 1) for x in ref]
|
|
356
|
+
refs = {x[0]: x[1] for x in split_refs}
|
|
357
|
+
_shared_submit(
|
|
358
|
+
y,
|
|
359
|
+
path,
|
|
360
|
+
security=security,
|
|
361
|
+
timestamp=timestamp,
|
|
362
|
+
source=source,
|
|
363
|
+
source_refs=refs,
|
|
364
|
+
extract=extract,
|
|
365
|
+
extract_password=extract_password,
|
|
366
|
+
)
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
@binaries.command()
|
|
370
|
+
@click.argument("filename")
|
|
371
|
+
@click.argument("source")
|
|
372
|
+
@click.option("-y", is_flag=True, show_default=True, default=False, help="automatic confirmation")
|
|
373
|
+
@click.option(
|
|
374
|
+
"--ref", type=str, multiple=True, help="references for source. e.g. --ref user:llama --ref location:ocean"
|
|
375
|
+
)
|
|
376
|
+
@click.option("--timestamp", type=str, help=TIMESTAMP_DESCRIPTION)
|
|
377
|
+
@click.option("--security", required=True, type=str, help=SECURITY_STRING_DESCRIPTION)
|
|
378
|
+
def put_stdin(y: bool, filename: str, source: str, ref: list[str], timestamp: str, security: str):
|
|
379
|
+
"""Upload a file from stdin into an Azul source.
|
|
380
|
+
|
|
381
|
+
FILENAME is the name of the file in Azul, and SOURCE is the ID of the source to upload the file to.
|
|
382
|
+
"""
|
|
383
|
+
split_refs = [x.split(":", 1) for x in ref]
|
|
384
|
+
refs = {x[0]: x[1] for x in split_refs}
|
|
385
|
+
security = security if security else ""
|
|
386
|
+
|
|
387
|
+
if not timestamp:
|
|
388
|
+
timestamp = pendulum.now(pendulum.UTC).to_iso8601_string()
|
|
389
|
+
else:
|
|
390
|
+
timestamp = pendulum.parse(timestamp).to_iso8601_string()
|
|
391
|
+
|
|
392
|
+
click.echo(f"Filename: {filename}")
|
|
393
|
+
click.echo(f"Source: {source}")
|
|
394
|
+
click.echo(f"References: {refs}")
|
|
395
|
+
click.echo(f"Timestamp: {timestamp}")
|
|
396
|
+
click.echo(f"Security: {security}")
|
|
397
|
+
|
|
398
|
+
if not y and not click.confirm("Proceed with upload of file?"):
|
|
399
|
+
sys.exit(1)
|
|
400
|
+
|
|
401
|
+
# Read file in chunks into a spooled temporary file.
|
|
402
|
+
chunk_size = 1024 * 1024 * 1024
|
|
403
|
+
with SpooledTemporaryFile(max_size=chunk_size) as spooledFile:
|
|
404
|
+
while chunk := sys.stdin.buffer.read(chunk_size):
|
|
405
|
+
spooledFile.write(chunk)
|
|
406
|
+
spooledFile.seek(0)
|
|
407
|
+
|
|
408
|
+
resp = api.binaries_data.upload(
|
|
409
|
+
spooledFile,
|
|
410
|
+
security=security,
|
|
411
|
+
source_id=source,
|
|
412
|
+
filename=filename,
|
|
413
|
+
timestamp=timestamp,
|
|
414
|
+
references=refs,
|
|
415
|
+
)
|
|
416
|
+
click.echo(f"{filename} - {resp.sha256}")
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
@binaries.command(help="""Get a binary's metadata from Azul by SHA256.""")
|
|
420
|
+
@click.argument("sha256")
|
|
421
|
+
@click.option(
|
|
422
|
+
"-o",
|
|
423
|
+
"--output",
|
|
424
|
+
help="Output to a file - use '-' for stdout.",
|
|
425
|
+
default="-",
|
|
426
|
+
show_default=True,
|
|
427
|
+
type=click.Path(file_okay=True, dir_okay=False, allow_dash=True),
|
|
428
|
+
)
|
|
429
|
+
@click.option(
|
|
430
|
+
"--pretty/--no-pretty",
|
|
431
|
+
help="Render stdout output coloured (default true if terminal, else false).",
|
|
432
|
+
default=os.isatty(sys.stdout.fileno()),
|
|
433
|
+
)
|
|
434
|
+
def get_meta(sha256: str, output: str, pretty: bool):
|
|
435
|
+
"""Get metadata for a binary."""
|
|
436
|
+
entity = api.binaries_meta.get_meta(sha256)
|
|
437
|
+
|
|
438
|
+
if output == "-":
|
|
439
|
+
_print_model(entity, pretty)
|
|
440
|
+
else:
|
|
441
|
+
click.echo(f"saving output to path {output}", err=True)
|
|
442
|
+
with open(output, "w") as f:
|
|
443
|
+
f.write(entity.model_dump_json(indent=4))
|
|
444
|
+
|
|
445
|
+
|
|
446
|
+
@binaries.command(
|
|
447
|
+
help="""
|
|
448
|
+
Find and download samples from Azul.
|
|
449
|
+
Combining multiple filters may lead to unexpected results.
|
|
450
|
+
You can only query multiple attributes over a single authors document.
|
|
451
|
+
"""
|
|
452
|
+
)
|
|
453
|
+
@click.option("-o", "--output", help="output folder")
|
|
454
|
+
@click.option("--term", help="search term (refer to UI Explore for suggested search terms)", default="")
|
|
455
|
+
@click.option("--max", help="max number of entities to retrieve", default=100)
|
|
456
|
+
@click.option(
|
|
457
|
+
"--sort-by",
|
|
458
|
+
default=None,
|
|
459
|
+
type=click.Choice(
|
|
460
|
+
[
|
|
461
|
+
str(models_restapi.FindBinariesSortEnum.score),
|
|
462
|
+
str(models_restapi.FindBinariesSortEnum.source_timestamp),
|
|
463
|
+
str(models_restapi.FindBinariesSortEnum.timestamp),
|
|
464
|
+
]
|
|
465
|
+
),
|
|
466
|
+
help="What property to use when sorting results",
|
|
467
|
+
)
|
|
468
|
+
@click.option(
|
|
469
|
+
"--sort-asc", default=False, is_flag=True, show_default=True, help="sort by ascending rather than descending."
|
|
470
|
+
)
|
|
471
|
+
def get(output: str, term: str, max: int, sort_by: models_restapi.FindBinariesSortEnum, sort_asc: bool):
|
|
472
|
+
"""Get all samples matching the criteria and optionally download the files to an output folder."""
|
|
473
|
+
if output:
|
|
474
|
+
click.echo(f"saving output to folder {output}")
|
|
475
|
+
else:
|
|
476
|
+
click.echo("no output folder provided, skip download")
|
|
477
|
+
|
|
478
|
+
params = {"term": term, "max_entities": max, "sort_prop": sort_by, "sort_asc": sort_asc}
|
|
479
|
+
kwargs = {x: y for (x, y) in params.items() if y is not None}
|
|
480
|
+
entity = api.binaries_meta.find(**kwargs)
|
|
481
|
+
|
|
482
|
+
# create output folder
|
|
483
|
+
if output:
|
|
484
|
+
click.echo(f"download to folder {output}")
|
|
485
|
+
if not os.path.exists(output):
|
|
486
|
+
click.echo(f"creating directory: {output}")
|
|
487
|
+
os.mkdir(output)
|
|
488
|
+
if not os.path.isdir(output):
|
|
489
|
+
raise Exception(f"supplied path is not a directory: {output}")
|
|
490
|
+
|
|
491
|
+
# print and save found binary
|
|
492
|
+
for hit in entity.items:
|
|
493
|
+
click.echo(hit.sha256)
|
|
494
|
+
if output:
|
|
495
|
+
content = api.binaries_data.download(hit.sha256)
|
|
496
|
+
if content:
|
|
497
|
+
with open(os.path.join(output, f"{hit.sha256}.cart"), "wb") as f:
|
|
498
|
+
f.write(content)
|
|
499
|
+
else:
|
|
500
|
+
click.echo("content not found")
|
|
501
|
+
|
|
502
|
+
|
|
503
|
+
cli.add_command(_client_config)
|
|
504
|
+
cli.add_command(binaries)
|
|
505
|
+
cli.add_command(binaries, name="b")
|
|
506
|
+
cli.add_command(security)
|
|
507
|
+
cli.add_command(sources)
|
|
508
|
+
cli.add_command(plugins)
|
|
509
|
+
if __name__ == "__main__":
|
|
510
|
+
cli()
|
azul_client/config.py
ADDED
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
"""Config handling."""
|
|
2
|
+
|
|
3
|
+
import configparser
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
import sys
|
|
7
|
+
import tempfile
|
|
8
|
+
|
|
9
|
+
import click
|
|
10
|
+
import pydantic
|
|
11
|
+
from filelock import FileLock
|
|
12
|
+
from pydantic_settings import BaseSettings
|
|
13
|
+
|
|
14
|
+
config_section = "default"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def switch_section(section: str):
|
|
18
|
+
"""Switch to a different azul deployment configured in the ini."""
|
|
19
|
+
global config_section
|
|
20
|
+
config_section = section
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@click.group(name="config")
|
|
24
|
+
def _client_config():
|
|
25
|
+
"""Change azul-client configuration."""
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class ConfigLocation(BaseSettings):
|
|
30
|
+
"""Path to settings and lock files for azul."""
|
|
31
|
+
|
|
32
|
+
azul_config_location: str = os.path.join(os.path.expanduser("~"), ".azul.ini")
|
|
33
|
+
token_refresh_path_lock: str = os.path.join(tempfile.gettempdir(), "azul-token-refresh.lock")
|
|
34
|
+
token_lock_timeout: float = 30
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
config_location = ConfigLocation()
|
|
38
|
+
# NOTE - double locking with this as a decorator does not harm because it tracks the current processes PID.
|
|
39
|
+
# So if it double acquires the lock it knows it already has the lock and continues to work.
|
|
40
|
+
_lock_azul_config = FileLock(config_location.token_refresh_path_lock, timeout=config_location.token_lock_timeout)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class Config(BaseSettings):
|
|
44
|
+
"""Config wrapper."""
|
|
45
|
+
|
|
46
|
+
azul_url: str = "http://localhost"
|
|
47
|
+
oidc_url: str = "http://keycloak/.well-known/openid-configuration"
|
|
48
|
+
auth_type: str = "callback"
|
|
49
|
+
auth_scopes: str = ""
|
|
50
|
+
auth_client_id: str = "azul-web"
|
|
51
|
+
auth_client_secret: str = "" # nosec B105
|
|
52
|
+
azul_verify_ssl: bool = True
|
|
53
|
+
auth_token: dict | None = {}
|
|
54
|
+
auth_token_time: int = 0
|
|
55
|
+
max_timeout: float = 300.0
|
|
56
|
+
oidc_timeout: float = 10.0
|
|
57
|
+
|
|
58
|
+
@pydantic.field_validator("azul_url")
|
|
59
|
+
def no_trailing_slash(cls, v):
|
|
60
|
+
"""Remove trailing slash from azul_url."""
|
|
61
|
+
return v.rstrip("/")
|
|
62
|
+
|
|
63
|
+
@_lock_azul_config
|
|
64
|
+
def save(self):
|
|
65
|
+
"""Save the current configuration."""
|
|
66
|
+
tmp = self.model_dump()
|
|
67
|
+
# save auth token as json string
|
|
68
|
+
tmp["auth_token"] = json.dumps(tmp["auth_token"])
|
|
69
|
+
|
|
70
|
+
location = ConfigLocation().azul_config_location
|
|
71
|
+
cfg = configparser.ConfigParser()
|
|
72
|
+
cfg.read(location)
|
|
73
|
+
cfg[config_section] = tmp
|
|
74
|
+
with open(ConfigLocation().azul_config_location, "w") as configfile:
|
|
75
|
+
cfg.write(configfile)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@_client_config.command()
|
|
79
|
+
@_lock_azul_config
|
|
80
|
+
def clear_auth():
|
|
81
|
+
"""Reset current auth information."""
|
|
82
|
+
conf = get_config()
|
|
83
|
+
conf.auth_token = {} # nosec B105
|
|
84
|
+
conf.auth_token_time = 0
|
|
85
|
+
conf.save()
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
@_lock_azul_config
|
|
89
|
+
def get_config():
|
|
90
|
+
"""Get config loaded from file."""
|
|
91
|
+
location = ConfigLocation().azul_config_location
|
|
92
|
+
if not os.path.exists(location):
|
|
93
|
+
print(f"ERROR - no config found - generating default at {location}", file=sys.stderr)
|
|
94
|
+
print("You will likely need to edit this config.", file=sys.stderr)
|
|
95
|
+
conf = Config()
|
|
96
|
+
conf.save()
|
|
97
|
+
|
|
98
|
+
print(f"Loading config [{config_section}] from {location}", file=sys.stderr)
|
|
99
|
+
tmp = configparser.ConfigParser()
|
|
100
|
+
tmp.read(location)
|
|
101
|
+
conf = {}
|
|
102
|
+
try:
|
|
103
|
+
# configparser has an odd data structure, convert to dictionary
|
|
104
|
+
conf = {**tmp[config_section]}
|
|
105
|
+
except KeyError:
|
|
106
|
+
if config_section == "default":
|
|
107
|
+
print(f"Config section [{config_section}] is invalid, generating defaults", file=sys.stderr)
|
|
108
|
+
else:
|
|
109
|
+
raise Exception(f"config section [{config_section}] is invalid")
|
|
110
|
+
|
|
111
|
+
# the auth token was saved as a json string
|
|
112
|
+
if conf.get("auth_token"):
|
|
113
|
+
conf["auth_token"] = json.loads(conf["auth_token"])
|
|
114
|
+
config = Config(**conf)
|
|
115
|
+
print(f"using Azul API at {config.azul_url}\n", file=sys.stderr)
|
|
116
|
+
return config
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"""Handle and register api exceptions designed to allow for more explicit message types to be added overtime."""
|
|
2
|
+
|
|
3
|
+
import httpx
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class BadResponse(Exception):
|
|
7
|
+
"""The client received a bad http status code from the server."""
|
|
8
|
+
|
|
9
|
+
def __init__(self, resp: httpx.Response, *args):
|
|
10
|
+
message = f"{resp.url} - {resp.status_code} - {resp.content}"
|
|
11
|
+
super().__init__(message, *args)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class BadResponse404(Exception):
|
|
15
|
+
"""The client received a bad http status code from the server."""
|
|
16
|
+
|
|
17
|
+
def __init__(self, resp: httpx.Response, *args):
|
|
18
|
+
message = f"{resp.url} - {resp.status_code} - {resp.content}"
|
|
19
|
+
super().__init__(message, *args)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def bad_response(resp: httpx.Response) -> BadResponse | BadResponse404:
|
|
23
|
+
"""Raise a formatted exception based on an http response's status code.
|
|
24
|
+
|
|
25
|
+
:return BadResponse | BadResponse404
|
|
26
|
+
"""
|
|
27
|
+
# If you want a different status code e.g 500 to be explicitly raised this makes it easy to add later.
|
|
28
|
+
if resp.status_code == 404:
|
|
29
|
+
return BadResponse404(resp)
|
|
30
|
+
return BadResponse(resp)
|