dreadnode 1.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dreadnode-1.0.0/PKG-INFO +125 -0
- dreadnode-1.0.0/README.md +100 -0
- dreadnode-1.0.0/dreadnode/__init__.py +51 -0
- dreadnode-1.0.0/dreadnode/api/__init__.py +0 -0
- dreadnode-1.0.0/dreadnode/api/client.py +244 -0
- dreadnode-1.0.0/dreadnode/api/models.py +210 -0
- dreadnode-1.0.0/dreadnode/artifact/__init__.py +0 -0
- dreadnode-1.0.0/dreadnode/artifact/merger.py +599 -0
- dreadnode-1.0.0/dreadnode/artifact/storage.py +126 -0
- dreadnode-1.0.0/dreadnode/artifact/tree_builder.py +455 -0
- dreadnode-1.0.0/dreadnode/constants.py +16 -0
- dreadnode-1.0.0/dreadnode/integrations/__init__.py +0 -0
- dreadnode-1.0.0/dreadnode/integrations/transformers.py +183 -0
- dreadnode-1.0.0/dreadnode/main.py +1048 -0
- dreadnode-1.0.0/dreadnode/metric.py +228 -0
- dreadnode-1.0.0/dreadnode/object.py +29 -0
- dreadnode-1.0.0/dreadnode/py.typed +0 -0
- dreadnode-1.0.0/dreadnode/serialization.py +731 -0
- dreadnode-1.0.0/dreadnode/task.py +447 -0
- dreadnode-1.0.0/dreadnode/tracing/__init__.py +0 -0
- dreadnode-1.0.0/dreadnode/tracing/constants.py +35 -0
- dreadnode-1.0.0/dreadnode/tracing/exporters.py +157 -0
- dreadnode-1.0.0/dreadnode/tracing/span.py +819 -0
- dreadnode-1.0.0/dreadnode/types.py +25 -0
- dreadnode-1.0.0/dreadnode/util.py +150 -0
- dreadnode-1.0.0/dreadnode/version.py +3 -0
- dreadnode-1.0.0/pyproject.toml +116 -0
dreadnode-1.0.0/PKG-INFO
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: dreadnode
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: Dreadnode SDK
|
|
5
|
+
Author: Nick Landers
|
|
6
|
+
Author-email: monoxgas@gmail.com
|
|
7
|
+
Requires-Python: >=3.10,<3.14
|
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
|
9
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
10
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
13
|
+
Provides-Extra: training
|
|
14
|
+
Requires-Dist: coolname (>=2.2.0,<3.0.0)
|
|
15
|
+
Requires-Dist: fast-depends (>=2.4.12,<3.0.0)
|
|
16
|
+
Requires-Dist: fsspec[s3] (>=2023.1.0,<=2025.3.0)
|
|
17
|
+
Requires-Dist: httpx (>=0.28.0,<0.29.0)
|
|
18
|
+
Requires-Dist: logfire (>=3.5.3,<4.0.0)
|
|
19
|
+
Requires-Dist: pandas (>=2.2.3,<3.0.0)
|
|
20
|
+
Requires-Dist: pydantic (>=2.9.2,<3.0.0)
|
|
21
|
+
Requires-Dist: python-ulid (>=3.0.0,<4.0.0)
|
|
22
|
+
Requires-Dist: transformers (>=4.41.0,<5.0.0) ; extra == "training"
|
|
23
|
+
Project-URL: Repository, https://github.com/dreadnode/sdk
|
|
24
|
+
Description-Content-Type: text/markdown
|
|
25
|
+
|
|
26
|
+
<p align="center">
|
|
27
|
+
<img
|
|
28
|
+
src="https://d1lppblt9t2x15.cloudfront.net/logos/5714928f3cdc09503751580cffbe8d02.png"
|
|
29
|
+
alt="Logo"
|
|
30
|
+
align="center"
|
|
31
|
+
width="144px"
|
|
32
|
+
height="144px"
|
|
33
|
+
/>
|
|
34
|
+
</p>
|
|
35
|
+
|
|
36
|
+
<h3 align="center">
|
|
37
|
+
Dreadnode Strikes SDK
|
|
38
|
+
</h3>
|
|
39
|
+
|
|
40
|
+
<h4 align="center">
|
|
41
|
+
<img alt="PyPI - Python Version" src="https://img.shields.io/pypi/pyversions/dreadnode">
|
|
42
|
+
<img alt="PyPI - Version" src="https://img.shields.io/pypi/v/dreadnode">
|
|
43
|
+
<img alt="GitHub License" src="https://img.shields.io/github/license/dreadnode/sdk">
|
|
44
|
+
<img alt="Tests" src="https://img.shields.io/github/actions/workflow/status/dreadnode/sdk/tests.yaml">
|
|
45
|
+
<img alt="Pre-Commit" src="https://img.shields.io/github/actions/workflow/status/dreadnode/sdk/pre-commit.yaml">
|
|
46
|
+
<img alt="Renovate" src="https://img.shields.io/github/actions/workflow/status/dreadnode/sdk/renovate.yaml">
|
|
47
|
+
</h4>
|
|
48
|
+
|
|
49
|
+
</br>
|
|
50
|
+
|
|
51
|
+
Strikes is a platform for building, experimenting with, and evaluating AI security agent code.
|
|
52
|
+
|
|
53
|
+
- **Experiment + Tasking + Observability** in a single place that's lightweight and scales.
|
|
54
|
+
- **Track your data** with parameters, inputs, and outputs all connected to your tasks.
|
|
55
|
+
- **Log your artifacts** — data, models, files, and folders — to track data of your Dreadnode runs, enabling easy reuse and reproducibility.
|
|
56
|
+
- **Measure everything** with metrics throughout your code and anywhere you need them.
|
|
57
|
+
- **Scale your code** from a single run to thousands.
|
|
58
|
+
|
|
59
|
+
```python
|
|
60
|
+
import dreadnode as dn
|
|
61
|
+
import rigging as rg
|
|
62
|
+
|
|
63
|
+
from .tools import reversing_tools
|
|
64
|
+
|
|
65
|
+
dn.configure()
|
|
66
|
+
|
|
67
|
+
@dataclass
|
|
68
|
+
class Finding:
|
|
69
|
+
name: str
|
|
70
|
+
severity: str
|
|
71
|
+
description: str
|
|
72
|
+
exploit_code: str
|
|
73
|
+
|
|
74
|
+
@dn.scorer(name="Score Finding")
|
|
75
|
+
async def score_finding(finding: Finding) -> float:
|
|
76
|
+
if finding.severity == "critical":
|
|
77
|
+
return 1.0
|
|
78
|
+
elif finding.severity == "high":
|
|
79
|
+
return 0.8
|
|
80
|
+
else:
|
|
81
|
+
return 0.2
|
|
82
|
+
|
|
83
|
+
@dn.task(scorers=[score_finding])
|
|
84
|
+
@rg.prompt(tools=[reversing_tools])
|
|
85
|
+
async def analyze_binary(binary: str) -> list[Finding]:
|
|
86
|
+
"""
|
|
87
|
+
Analyze the binary for vulnerabilities.
|
|
88
|
+
"""
|
|
89
|
+
...
|
|
90
|
+
|
|
91
|
+
with dn.run(tags=["reverse-engineering"]):
|
|
92
|
+
binary = "c2/downloads/service.exe"
|
|
93
|
+
|
|
94
|
+
dn.log_params(
|
|
95
|
+
model="gpt-4",
|
|
96
|
+
temperature=0.5,
|
|
97
|
+
binary=binary
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
findings = await analyze_binary(binary)
|
|
101
|
+
|
|
102
|
+
dn.log_metric("findings", len(findings))
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
## Installation
|
|
106
|
+
|
|
107
|
+
We publish every version to PyPi:
|
|
108
|
+
```bash
|
|
109
|
+
pip install -U dreadnode
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
If you want to build from source:
|
|
113
|
+
```bash
|
|
114
|
+
poetry install
|
|
115
|
+
```
|
|
116
|
+
|
|
117
|
+
See our **[installation guide](https://docs.dreadnode.io/strikes/install)** for more options.
|
|
118
|
+
|
|
119
|
+
## Getting Started
|
|
120
|
+
|
|
121
|
+
Read through our **[introduction guide](https://docs.dreadnode.io/strikes/intro)** in the docs.
|
|
122
|
+
|
|
123
|
+
## Examples
|
|
124
|
+
|
|
125
|
+
Check out **[dreadnode/example-agents](https://github.com/dreadnode/example-agents)** to find your favorite use case.
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
<p align="center">
|
|
2
|
+
<img
|
|
3
|
+
src="https://d1lppblt9t2x15.cloudfront.net/logos/5714928f3cdc09503751580cffbe8d02.png"
|
|
4
|
+
alt="Logo"
|
|
5
|
+
align="center"
|
|
6
|
+
width="144px"
|
|
7
|
+
height="144px"
|
|
8
|
+
/>
|
|
9
|
+
</p>
|
|
10
|
+
|
|
11
|
+
<h3 align="center">
|
|
12
|
+
Dreadnode Strikes SDK
|
|
13
|
+
</h3>
|
|
14
|
+
|
|
15
|
+
<h4 align="center">
|
|
16
|
+
<img alt="PyPI - Python Version" src="https://img.shields.io/pypi/pyversions/dreadnode">
|
|
17
|
+
<img alt="PyPI - Version" src="https://img.shields.io/pypi/v/dreadnode">
|
|
18
|
+
<img alt="GitHub License" src="https://img.shields.io/github/license/dreadnode/sdk">
|
|
19
|
+
<img alt="Tests" src="https://img.shields.io/github/actions/workflow/status/dreadnode/sdk/tests.yaml">
|
|
20
|
+
<img alt="Pre-Commit" src="https://img.shields.io/github/actions/workflow/status/dreadnode/sdk/pre-commit.yaml">
|
|
21
|
+
<img alt="Renovate" src="https://img.shields.io/github/actions/workflow/status/dreadnode/sdk/renovate.yaml">
|
|
22
|
+
</h4>
|
|
23
|
+
|
|
24
|
+
</br>
|
|
25
|
+
|
|
26
|
+
Strikes is a platform for building, experimenting with, and evaluating AI security agent code.
|
|
27
|
+
|
|
28
|
+
- **Experiment + Tasking + Observability** in a single place that's lightweight and scales.
|
|
29
|
+
- **Track your data** with parameters, inputs, and outputs all connected to your tasks.
|
|
30
|
+
- **Log your artifacts** — data, models, files, and folders — to track data of your Dreadnode runs, enabling easy reuse and reproducibility.
|
|
31
|
+
- **Measure everything** with metrics throughout your code and anywhere you need them.
|
|
32
|
+
- **Scale your code** from a single run to thousands.
|
|
33
|
+
|
|
34
|
+
```python
|
|
35
|
+
import dreadnode as dn
|
|
36
|
+
import rigging as rg
|
|
37
|
+
|
|
38
|
+
from .tools import reversing_tools
|
|
39
|
+
|
|
40
|
+
dn.configure()
|
|
41
|
+
|
|
42
|
+
@dataclass
|
|
43
|
+
class Finding:
|
|
44
|
+
name: str
|
|
45
|
+
severity: str
|
|
46
|
+
description: str
|
|
47
|
+
exploit_code: str
|
|
48
|
+
|
|
49
|
+
@dn.scorer(name="Score Finding")
|
|
50
|
+
async def score_finding(finding: Finding) -> float:
|
|
51
|
+
if finding.severity == "critical":
|
|
52
|
+
return 1.0
|
|
53
|
+
elif finding.severity == "high":
|
|
54
|
+
return 0.8
|
|
55
|
+
else:
|
|
56
|
+
return 0.2
|
|
57
|
+
|
|
58
|
+
@dn.task(scorers=[score_finding])
|
|
59
|
+
@rg.prompt(tools=[reversing_tools])
|
|
60
|
+
async def analyze_binary(binary: str) -> list[Finding]:
|
|
61
|
+
"""
|
|
62
|
+
Analyze the binary for vulnerabilities.
|
|
63
|
+
"""
|
|
64
|
+
...
|
|
65
|
+
|
|
66
|
+
with dn.run(tags=["reverse-engineering"]):
|
|
67
|
+
binary = "c2/downloads/service.exe"
|
|
68
|
+
|
|
69
|
+
dn.log_params(
|
|
70
|
+
model="gpt-4",
|
|
71
|
+
temperature=0.5,
|
|
72
|
+
binary=binary
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
findings = await analyze_binary(binary)
|
|
76
|
+
|
|
77
|
+
dn.log_metric("findings", len(findings))
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
## Installation
|
|
81
|
+
|
|
82
|
+
We publish every version to PyPi:
|
|
83
|
+
```bash
|
|
84
|
+
pip install -U dreadnode
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
If you want to build from source:
|
|
88
|
+
```bash
|
|
89
|
+
poetry install
|
|
90
|
+
```
|
|
91
|
+
|
|
92
|
+
See our **[installation guide](https://docs.dreadnode.io/strikes/install)** for more options.
|
|
93
|
+
|
|
94
|
+
## Getting Started
|
|
95
|
+
|
|
96
|
+
Read through our **[introduction guide](https://docs.dreadnode.io/strikes/intro)** in the docs.
|
|
97
|
+
|
|
98
|
+
## Examples
|
|
99
|
+
|
|
100
|
+
Check out **[dreadnode/example-agents](https://github.com/dreadnode/example-agents)** to find your favorite use case.
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
from dreadnode.main import DEFAULT_INSTANCE, Dreadnode
|
|
2
|
+
from dreadnode.metric import Metric, MetricDict, Scorer
|
|
3
|
+
from dreadnode.object import Object
|
|
4
|
+
from dreadnode.task import Task
|
|
5
|
+
from dreadnode.tracing.span import RunSpan, Span, TaskSpan
|
|
6
|
+
from dreadnode.version import VERSION
|
|
7
|
+
|
|
8
|
+
configure = DEFAULT_INSTANCE.configure
|
|
9
|
+
shutdown = DEFAULT_INSTANCE.shutdown
|
|
10
|
+
|
|
11
|
+
api = DEFAULT_INSTANCE.api
|
|
12
|
+
span = DEFAULT_INSTANCE.span
|
|
13
|
+
task = DEFAULT_INSTANCE.task
|
|
14
|
+
task_span = DEFAULT_INSTANCE.task_span
|
|
15
|
+
run = DEFAULT_INSTANCE.run
|
|
16
|
+
scorer = DEFAULT_INSTANCE.scorer
|
|
17
|
+
task_span = DEFAULT_INSTANCE.task_span
|
|
18
|
+
push_update = DEFAULT_INSTANCE.push_update
|
|
19
|
+
|
|
20
|
+
log_metric = DEFAULT_INSTANCE.log_metric
|
|
21
|
+
log_param = DEFAULT_INSTANCE.log_param
|
|
22
|
+
log_params = DEFAULT_INSTANCE.log_params
|
|
23
|
+
log_input = DEFAULT_INSTANCE.log_input
|
|
24
|
+
log_inputs = DEFAULT_INSTANCE.log_inputs
|
|
25
|
+
log_output = DEFAULT_INSTANCE.log_output
|
|
26
|
+
link_objects = DEFAULT_INSTANCE.link_objects
|
|
27
|
+
log_artifact = DEFAULT_INSTANCE.log_artifact
|
|
28
|
+
|
|
29
|
+
__version__ = VERSION
|
|
30
|
+
|
|
31
|
+
__all__ = [
|
|
32
|
+
"Dreadnode",
|
|
33
|
+
"Metric",
|
|
34
|
+
"MetricDict",
|
|
35
|
+
"Object",
|
|
36
|
+
"Run",
|
|
37
|
+
"RunSpan",
|
|
38
|
+
"Score",
|
|
39
|
+
"Scorer",
|
|
40
|
+
"Span",
|
|
41
|
+
"Task",
|
|
42
|
+
"TaskSpan",
|
|
43
|
+
"__version__",
|
|
44
|
+
"configure",
|
|
45
|
+
"log_metric",
|
|
46
|
+
"log_param",
|
|
47
|
+
"run",
|
|
48
|
+
"shutdown",
|
|
49
|
+
"span",
|
|
50
|
+
"task",
|
|
51
|
+
]
|
|
File without changes
|
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
import io
|
|
2
|
+
import json
|
|
3
|
+
import typing as t
|
|
4
|
+
|
|
5
|
+
import httpx
|
|
6
|
+
import pandas as pd
|
|
7
|
+
from pydantic import BaseModel
|
|
8
|
+
from ulid import ULID
|
|
9
|
+
|
|
10
|
+
from dreadnode.util import logger
|
|
11
|
+
from dreadnode.version import VERSION
|
|
12
|
+
|
|
13
|
+
from .models import (
|
|
14
|
+
MetricAggregationType,
|
|
15
|
+
Project,
|
|
16
|
+
Run,
|
|
17
|
+
StatusFilter,
|
|
18
|
+
Task,
|
|
19
|
+
TimeAggregationType,
|
|
20
|
+
TimeAxisType,
|
|
21
|
+
TraceSpan,
|
|
22
|
+
UserDataCredentials,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
ModelT = t.TypeVar("ModelT", bound=BaseModel)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class ApiClient:
|
|
29
|
+
"""Client for the Dreadnode API."""
|
|
30
|
+
|
|
31
|
+
def __init__(
|
|
32
|
+
self,
|
|
33
|
+
base_url: str,
|
|
34
|
+
api_key: str,
|
|
35
|
+
*,
|
|
36
|
+
debug: bool = False,
|
|
37
|
+
):
|
|
38
|
+
self._base_url = base_url.rstrip("/")
|
|
39
|
+
if not self._base_url.endswith("/api"):
|
|
40
|
+
self._base_url += "/api"
|
|
41
|
+
|
|
42
|
+
self._client = httpx.Client(
|
|
43
|
+
headers={
|
|
44
|
+
"User-Agent": f"dreadnode-sdk/{VERSION}",
|
|
45
|
+
"Accept": "application/json",
|
|
46
|
+
"X-API-Key": api_key,
|
|
47
|
+
},
|
|
48
|
+
base_url=self._base_url,
|
|
49
|
+
timeout=30,
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
if debug:
|
|
53
|
+
self._client.event_hooks["request"].append(self._log_request)
|
|
54
|
+
self._client.event_hooks["response"].append(self._log_response)
|
|
55
|
+
|
|
56
|
+
def _log_request(self, request: httpx.Request) -> None:
|
|
57
|
+
"""Log every request to the console if debug is enabled."""
|
|
58
|
+
|
|
59
|
+
logger.debug("-------------------------------------------")
|
|
60
|
+
logger.debug("%s %s", request.method, request.url)
|
|
61
|
+
logger.debug("Headers: %s", request.headers)
|
|
62
|
+
logger.debug("Content: %s", request.content)
|
|
63
|
+
logger.debug("-------------------------------------------")
|
|
64
|
+
|
|
65
|
+
def _log_response(self, response: httpx.Response) -> None:
|
|
66
|
+
"""Log every response to the console if debug is enabled."""
|
|
67
|
+
|
|
68
|
+
logger.debug("-------------------------------------------")
|
|
69
|
+
logger.debug("Response: %s", response.status_code)
|
|
70
|
+
logger.debug("Headers: %s", response.headers)
|
|
71
|
+
logger.debug("Content: %s", response.read())
|
|
72
|
+
logger.debug("--------------------------------------------")
|
|
73
|
+
|
|
74
|
+
def _get_error_message(self, response: httpx.Response) -> str:
|
|
75
|
+
"""Get the error message from the response."""
|
|
76
|
+
|
|
77
|
+
try:
|
|
78
|
+
obj = response.json()
|
|
79
|
+
return f"{response.status_code}: {obj.get('detail', json.dumps(obj))}"
|
|
80
|
+
except Exception: # noqa: BLE001
|
|
81
|
+
return str(response.content)
|
|
82
|
+
|
|
83
|
+
def _request(
|
|
84
|
+
self,
|
|
85
|
+
method: str,
|
|
86
|
+
path: str,
|
|
87
|
+
params: dict[str, t.Any] | None = None,
|
|
88
|
+
json_data: dict[str, t.Any] | None = None,
|
|
89
|
+
) -> httpx.Response:
|
|
90
|
+
"""Make a raw request to the API."""
|
|
91
|
+
|
|
92
|
+
return self._client.request(method, path, json=json_data, params=params)
|
|
93
|
+
|
|
94
|
+
def request(
|
|
95
|
+
self,
|
|
96
|
+
method: str,
|
|
97
|
+
path: str,
|
|
98
|
+
params: dict[str, t.Any] | None = None,
|
|
99
|
+
json_data: dict[str, t.Any] | None = None,
|
|
100
|
+
) -> httpx.Response:
|
|
101
|
+
"""Make a request to the API. Raise an exception for non-200 status codes."""
|
|
102
|
+
|
|
103
|
+
response = self._request(method, path, params, json_data)
|
|
104
|
+
if response.status_code == 401: # noqa: PLR2004
|
|
105
|
+
raise RuntimeError("Authentication failed, please check your API token.")
|
|
106
|
+
|
|
107
|
+
try:
|
|
108
|
+
response.raise_for_status()
|
|
109
|
+
except httpx.HTTPStatusError as e:
|
|
110
|
+
raise RuntimeError(self._get_error_message(response)) from e
|
|
111
|
+
|
|
112
|
+
return response
|
|
113
|
+
|
|
114
|
+
def list_projects(self) -> list[Project]:
|
|
115
|
+
response = self.request("GET", "/strikes/projects")
|
|
116
|
+
return [Project(**project) for project in response.json()]
|
|
117
|
+
|
|
118
|
+
def get_project(self, project: str) -> Project:
|
|
119
|
+
response = self.request("GET", f"/strikes/projects/{project!s}")
|
|
120
|
+
return Project(**response.json())
|
|
121
|
+
|
|
122
|
+
def list_runs(self, project: str) -> list[Run]:
|
|
123
|
+
response = self.request("GET", f"/strikes/projects/{project!s}/runs")
|
|
124
|
+
return [Run(**run) for run in response.json()]
|
|
125
|
+
|
|
126
|
+
def get_run(self, run: str | ULID) -> Run:
|
|
127
|
+
response = self.request("GET", f"/strikes/projects/runs/{run!s}")
|
|
128
|
+
return Run(**response.json())
|
|
129
|
+
|
|
130
|
+
def get_run_tasks(self, run: str | ULID) -> list[Task]:
|
|
131
|
+
response = self.request("GET", f"/strikes/projects/runs/{run!s}/tasks")
|
|
132
|
+
return [Task(**task) for task in response.json()]
|
|
133
|
+
|
|
134
|
+
def get_run_trace(self, run: str | ULID) -> list[Task | TraceSpan]:
|
|
135
|
+
response = self.request("GET", f"/strikes/projects/runs/{run!s}/spans")
|
|
136
|
+
spans: list[Task | TraceSpan] = []
|
|
137
|
+
for item in response.json():
|
|
138
|
+
if "parent_task_span_id" in item:
|
|
139
|
+
spans.append(Task(**item))
|
|
140
|
+
else:
|
|
141
|
+
spans.append(TraceSpan(**item))
|
|
142
|
+
return spans
|
|
143
|
+
|
|
144
|
+
# Data exports
|
|
145
|
+
|
|
146
|
+
def export_runs(
|
|
147
|
+
self,
|
|
148
|
+
project: str,
|
|
149
|
+
*,
|
|
150
|
+
filter: str | None = None,
|
|
151
|
+
# format: ExportFormat = "parquet",
|
|
152
|
+
status: StatusFilter = "completed",
|
|
153
|
+
aggregations: list[MetricAggregationType] | None = None,
|
|
154
|
+
) -> pd.DataFrame:
|
|
155
|
+
response = self.request(
|
|
156
|
+
"GET",
|
|
157
|
+
f"/strikes/projects/{project!s}/export",
|
|
158
|
+
params={
|
|
159
|
+
"format": "parquet",
|
|
160
|
+
"status": status,
|
|
161
|
+
**({"filter": filter} if filter else {}),
|
|
162
|
+
**({"aggregations": aggregations} if aggregations else {}),
|
|
163
|
+
},
|
|
164
|
+
)
|
|
165
|
+
return pd.read_parquet(io.BytesIO(response.content))
|
|
166
|
+
|
|
167
|
+
def export_metrics(
|
|
168
|
+
self,
|
|
169
|
+
project: str,
|
|
170
|
+
*,
|
|
171
|
+
filter: str | None = None,
|
|
172
|
+
# format: ExportFormat = "parquet",
|
|
173
|
+
status: StatusFilter = "completed",
|
|
174
|
+
metrics: list[str] | None = None,
|
|
175
|
+
aggregations: list[MetricAggregationType] | None = None,
|
|
176
|
+
) -> pd.DataFrame:
|
|
177
|
+
response = self.request(
|
|
178
|
+
"GET",
|
|
179
|
+
f"/strikes/projects/{project!s}/export/metrics",
|
|
180
|
+
params={
|
|
181
|
+
"format": "parquet",
|
|
182
|
+
"status": status,
|
|
183
|
+
"filter": filter,
|
|
184
|
+
**({"metrics": metrics} if metrics else {}),
|
|
185
|
+
**({"aggregations": aggregations} if aggregations else {}),
|
|
186
|
+
},
|
|
187
|
+
)
|
|
188
|
+
return pd.read_parquet(io.BytesIO(response.content))
|
|
189
|
+
|
|
190
|
+
def export_parameters(
|
|
191
|
+
self,
|
|
192
|
+
project: str,
|
|
193
|
+
*,
|
|
194
|
+
filter: str | None = None,
|
|
195
|
+
# format: ExportFormat = "parquet",
|
|
196
|
+
status: StatusFilter = "completed",
|
|
197
|
+
parameters: list[str] | None = None,
|
|
198
|
+
metrics: list[str] | None = None,
|
|
199
|
+
aggregations: list[MetricAggregationType] | None = None,
|
|
200
|
+
) -> pd.DataFrame:
|
|
201
|
+
response = self.request(
|
|
202
|
+
"GET",
|
|
203
|
+
f"/strikes/projects/{project!s}/export/parameters",
|
|
204
|
+
params={
|
|
205
|
+
"format": "parquet",
|
|
206
|
+
"status": status,
|
|
207
|
+
"filter": filter,
|
|
208
|
+
**({"parameters": parameters} if parameters else {}),
|
|
209
|
+
**({"metrics": metrics} if metrics else {}),
|
|
210
|
+
**({"aggregations": aggregations} if aggregations else {}),
|
|
211
|
+
},
|
|
212
|
+
)
|
|
213
|
+
return pd.read_parquet(io.BytesIO(response.content))
|
|
214
|
+
|
|
215
|
+
def export_timeseries(
|
|
216
|
+
self,
|
|
217
|
+
project: str,
|
|
218
|
+
*,
|
|
219
|
+
filter: str | None = None,
|
|
220
|
+
# format: ExportFormat = "parquet",
|
|
221
|
+
status: StatusFilter = "completed",
|
|
222
|
+
metrics: list[str] | None = None,
|
|
223
|
+
time_axis: TimeAxisType = "relative",
|
|
224
|
+
aggregations: list[TimeAggregationType] | None = None,
|
|
225
|
+
) -> pd.DataFrame:
|
|
226
|
+
response = self.request(
|
|
227
|
+
"GET",
|
|
228
|
+
f"/strikes/projects/{project!s}/export/timeseries",
|
|
229
|
+
params={
|
|
230
|
+
"format": "parquet",
|
|
231
|
+
"status": status,
|
|
232
|
+
"filter": filter,
|
|
233
|
+
"time_axis": time_axis,
|
|
234
|
+
**({"metrics": metrics} if metrics else {}),
|
|
235
|
+
**({"aggregation": aggregations} if aggregations else {}),
|
|
236
|
+
},
|
|
237
|
+
)
|
|
238
|
+
return pd.read_parquet(io.BytesIO(response.content))
|
|
239
|
+
|
|
240
|
+
# User data access
|
|
241
|
+
|
|
242
|
+
def get_user_data_credentials(self) -> UserDataCredentials:
|
|
243
|
+
response = self.request("GET", "/user-data/credentials")
|
|
244
|
+
return UserDataCredentials(**response.json())
|