perspective-cli 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,388 @@
1
+ """Utility functions for working with PostgreSQL metadata."""
2
+
3
+ import os
4
+ import re
5
+ from typing import Any
6
+ from urllib.parse import urljoin
7
+
8
+ import psycopg
9
+ from requests.models import Response
10
+ from typer import Exit, Option, Typer
11
+
12
+ from perspective.utils import console, run_command, send_request
13
+ from perspective.utils.options import DryRun, PerspectiveURL
14
+
15
+
16
+ app = Typer(no_args_is_help=True, pretty_exceptions_show_locals=False)
17
+
18
+
19
+ def create_conn(
20
+ username: str, password: str, host: str, port: str, database: str
21
+ ) -> psycopg.Connection:
22
+ """Create and return a new database connection using the given credentials.
23
+
24
+ Args:
25
+ username (str): Username for the database.
26
+ password (str): Password for the database.
27
+ host (str): Host address of the database.
28
+ port (str): Port number for the database connection.
29
+ database (str): Name of the database to connect to.
30
+
31
+ Returns:
32
+ psycopg.extensions.connection: A new database connection.
33
+ """
34
+ return psycopg.connect(
35
+ user=username, password=password, host=host, port=port, dbname=database
36
+ )
37
+
38
+
39
+ def generate_pg_dump_content(
40
+ username: str,
41
+ database: str,
42
+ password: str,
43
+ host: str = "localhost",
44
+ port: str = "5432",
45
+ ) -> str:
46
+ """Generate a string dump of the PostgreSQL database schema, tables, etc.
47
+
48
+ Args:
49
+ username (str): The username for the database.
50
+ database (str): The name of the database.
51
+ password (str): The password for the database.
52
+ host (str, optional): The host of the database. Defaults to "localhost".
53
+ port (str, optional): The port of the database. Defaults to "5432".
54
+
55
+ Returns:
56
+ str: The result of the pg_dump command as a string.
57
+ """
58
+ os.environ["PGPASSWORD"] = password
59
+ command = f"pg_dump -h {host} -p {port} -U {username} -d {database} --column-inserts --rows-per-insert=1 --no-password"
60
+ return run_command(command, capture_output=True)
61
+
62
+
63
+ def get_pg_dump_tables_info(content: str) -> list[dict]: # noqa: PLR0914
64
+ """Extract table information from a PostgreSQL database dump content.
65
+
66
+ Args:
67
+ content (str): PostgreSQL database dump content as a string.
68
+
69
+ Returns:
70
+ list[dict]: A list of dictionaries, each containing information about a table
71
+ in the database.
72
+ """
73
+ # Find table creation statements
74
+ table_creations = re.findall(
75
+ r"CREATE TABLE (.*?)\.(.*?) \((.*?)\);", content, re.DOTALL
76
+ )
77
+
78
+ # Find row counts and table sizes
79
+ row_counts = re.findall(
80
+ r"-- Name: (.*?); Type: TABLE;.*?-- Total rows: (\base_dict+)", content
81
+ )
82
+ table_sizes = re.findall(
83
+ r"-- Name: (.*?); Type: TABLE;.*?-- Size: (.*?)\n", content
84
+ )
85
+
86
+ # Find table owners
87
+ table_owners = re.findall(r"ALTER TABLE (.*?)\.(.*?) OWNER TO (.*?);", content)
88
+
89
+ # Find column defaults
90
+ column_defaults = re.findall(
91
+ r"ALTER TABLE ONLY (.*?)\.(.*?) ALTER COLUMN (.*?) SET DEFAULT (.*?);", content
92
+ )
93
+
94
+ # Find primary keys
95
+ primary_keys = re.findall(
96
+ r"ALTER TABLE ONLY (.*?)\.(.*?)\s+ADD CONSTRAINT (.*?) PRIMARY KEY \((.*?)\);",
97
+ content,
98
+ )
99
+
100
+ # Find foreign keys
101
+ foreign_keys = re.findall(
102
+ r"ALTER TABLE ONLY (.*?)\.(.*?)\s+ADD CONSTRAINT (.*?) FOREIGN KEY \((.*?)\) REFERENCES (.*?)\.(.*?)\(id\);",
103
+ content,
104
+ )
105
+
106
+ # Initialize the dictionary
107
+ tables_list = []
108
+
109
+ # Process each table
110
+ for table_schema, table_name, table_definition in table_creations:
111
+ # Extract column names and types
112
+ columns = re.findall(r"(\w+)\s+([\w\s()]+)(?:,|\))", table_definition)
113
+
114
+ # Store column names and types in a dictionary
115
+ columns_dict = {
116
+ column_name: column_type.strip() for column_name, column_type in columns
117
+ }
118
+
119
+ # Find row count for the table
120
+ row_count = next((count for name, count in row_counts if name == table_name), 0)
121
+
122
+ # Find table size (in MB) for the table
123
+ table_size = next(
124
+ (size for name, size in table_sizes if name == table_name), "Unknown"
125
+ )
126
+
127
+ # Find table owner
128
+ table_owner = next(
129
+ (owner for schema, name, owner in table_owners if name == table_name),
130
+ "Unknown",
131
+ )
132
+
133
+ # Find column defaults
134
+ column_default_dict = {
135
+ column_name: default_value
136
+ for schema, table, column_name, default_value in column_defaults
137
+ if table == table_name
138
+ }
139
+
140
+ # Find primary key
141
+ primary_key = next(
142
+ (key for schema, name, _, key in primary_keys if name == table_name), None
143
+ )
144
+
145
+ # Find foreign keys
146
+ foreign_keys_list = [
147
+ {"constraint_name": name, "column": column, "ref_table": ref_table}
148
+ for schema, table, name, column, ref_schema, ref_table in foreign_keys
149
+ if table == table_name
150
+ ]
151
+
152
+ # Add the table details to the schema dictionary
153
+ tables_list.append({
154
+ "table_name": table_name,
155
+ "table_schema": table_schema,
156
+ "columns": columns_dict,
157
+ "row_count": int(row_count),
158
+ "table_size": table_size,
159
+ "owner": table_owner,
160
+ "column_defaults": column_default_dict,
161
+ "primary_key": primary_key,
162
+ "foreign_keys": foreign_keys_list,
163
+ })
164
+
165
+ return tables_list
166
+
167
+
168
+ def get_pg_dump_views_info(content: str) -> list[dict]:
169
+ """Extract view information from a PostgreSQL database dump content.
170
+
171
+ Args:
172
+ content (str): PostgreSQL database dump content as a string.
173
+
174
+ Returns:
175
+ list[dict]: A list of dictionaries, each containing information about a view in
176
+ the database.
177
+ """
178
+ # Find view creation statements
179
+ view_creations = re.findall(
180
+ r"CREATE VIEW (.*?)\.(.*?) AS(.*?);", content, re.DOTALL
181
+ )
182
+
183
+ # Initialize the list for storing view information
184
+ views_list = []
185
+
186
+ # Process each view
187
+ for view_schema, view_name, view_definition in view_creations:
188
+ # Remove newline characters and extra spaces
189
+ view_definition_clean = re.sub(r"\s+", " ", view_definition)
190
+
191
+ # Extract column names and aliases
192
+ columns = re.findall(r"(\w+)\s+AS\s+(\w+)", view_definition_clean)
193
+
194
+ # Add the view details to the list
195
+ views_list.append({
196
+ "view_name": view_name,
197
+ "view_schema": view_schema,
198
+ "columns": dict(columns),
199
+ })
200
+
201
+ return views_list
202
+
203
+
204
+ def get_tables_size_info(
205
+ username: str,
206
+ database: str,
207
+ password: str,
208
+ host: str = "localhost",
209
+ port: str = "5432",
210
+ ) -> dict:
211
+ """Retrieve size information for all tables in the specified PostgreSQL database.
212
+
213
+ Args:
214
+ username (str): Database username.
215
+ database (str): Database name.
216
+ password (str): Database password.
217
+ host (str, optional): Database host. Defaults to "localhost".
218
+ port (str, optional): Database port. Defaults to "5432".
219
+
220
+ Returns:
221
+ dict: A dictionary where keys are table names and values are their respective
222
+ sizes.
223
+ """
224
+ conn = create_conn(username, password, host, port, database)
225
+ cur = conn.cursor()
226
+ cur.execute(
227
+ """
228
+ SELECT table_schema,
229
+ table_name,
230
+ pg_size_pretty(pg_total_relation_size(('"' || table_schema || '"."' || table_name || '"'))) AS table_size
231
+ FROM information_schema.tables
232
+ ORDER BY pg_total_relation_size(('"' || table_schema || '"."' || table_name || '"')) DESC;
233
+ """
234
+ )
235
+ rows = cur.fetchall()
236
+
237
+ table_size_dict = {}
238
+
239
+ for table_schema, table_name, table_size in rows:
240
+ table_size_dict[table_schema, table_name] = table_size
241
+ return table_size_dict
242
+
243
+
244
+ def get_tables_row_counts(
245
+ username: str,
246
+ database: str,
247
+ password: str,
248
+ host: str = "localhost",
249
+ port: str = "5432",
250
+ ) -> dict:
251
+ """Retrieve row counts for all tables in a specified PostgreSQL database.
252
+
253
+ Args:
254
+ username (str): Database username.
255
+ database (str): Database name.
256
+ password (str): Database password.
257
+ host (str, optional): Database host. Defaults to "localhost".
258
+ port (str, optional): Database port. Defaults to "5432".
259
+
260
+ Returns:
261
+ dict: A dictionary where keys are table names and values are the number of rows
262
+ in each table.
263
+ """
264
+ conn = create_conn(username, password, host, port, database)
265
+ cur = conn.cursor()
266
+ cur.execute(
267
+ """
268
+ SELECT schemaname,relname,n_live_tup
269
+ FROM pg_stat_user_tables
270
+ ORDER BY n_live_tup DESC;
271
+ """
272
+ )
273
+ rows = cur.fetchall()
274
+ row_count_dict = {}
275
+
276
+ for table_schema, table_name, row_count in rows:
277
+ row_count_dict[table_schema, table_name] = row_count
278
+ return row_count_dict
279
+
280
+
281
+ def get_db_metadata(
282
+ username: str,
283
+ database: str,
284
+ password: str,
285
+ host: str = "localhost",
286
+ port: str = "5432",
287
+ ) -> dict[str, list[dict]]:
288
+ """Gather metadata about the specified PostgreSQL database, tables, and views.
289
+
290
+ Args:
291
+ username (str): Database username.
292
+ database (str): Database name.
293
+ password (str): Database password.
294
+ host (str, optional): Database host. Defaults to "localhost".
295
+ port (str, optional): Database port. Defaults to "5432".
296
+
297
+ Returns:
298
+ dict[str, list[dict]]: A dictionary containing metadata about tables and views
299
+ in the database.
300
+ """
301
+ dump_content: str = generate_pg_dump_content(
302
+ username=username, database=database, host=host, port=port, password=password
303
+ )
304
+ # Transform SQL dump to dictionary
305
+ tables_info: list[dict] = get_pg_dump_tables_info(dump_content)
306
+
307
+ # Get row count information
308
+ row_count_dict: dict = get_tables_row_counts(
309
+ username=username, database=database, host=host, port=port, password=password
310
+ )
311
+
312
+ # Get table size information:
313
+ table_size_dict: dict = get_tables_size_info(
314
+ username=username, database=database, host=host, port=port, password=password
315
+ )
316
+
317
+ for table in tables_info:
318
+ key = (table["table_schema"], table["table_name"])
319
+ table["row_count"] = row_count_dict.get(key, 0)
320
+ table["table_size"] = table_size_dict.get(key, 0)
321
+
322
+ views_info: list[dict] = get_pg_dump_views_info(dump_content)
323
+
324
+ return {"tables": tables_info, "views": views_info}
325
+
326
+
327
+ @app.callback(invoke_without_command=True)
328
+ def ingest(
329
+ perspective_url: str = PerspectiveURL,
330
+ username: str = Option(
331
+ ...,
332
+ "--username",
333
+ "-n",
334
+ envvar="LUMA_POSTGRES_USERNAME",
335
+ help="The username for the PostgreSQL database.",
336
+ prompt="PostgreSQL username",
337
+ ),
338
+ database: str = Option(
339
+ ...,
340
+ "--database",
341
+ "-d",
342
+ envvar="LUMA_POSTGRES_DATABASE",
343
+ help="The name of the PostgreSQL database.",
344
+ prompt="PostgreSQL database",
345
+ ),
346
+ host: str = Option(
347
+ "localhost",
348
+ "--host",
349
+ "-h",
350
+ envvar="LUMA_POSTGRES_HOST",
351
+ help="The host address of the PostgreSQL database.",
352
+ ),
353
+ port: str = Option(
354
+ "5432",
355
+ "--port",
356
+ "-p",
357
+ envvar="LUMA_POSTGRES_PORT",
358
+ help="The port number for the PostgreSQL database.",
359
+ ),
360
+ password: str = Option(
361
+ ...,
362
+ "--password",
363
+ "-P",
364
+ envvar="LUMA_POSTGRES_PASSWORD",
365
+ help="The password for the PostgreSQL database.",
366
+ prompt="PostgreSQL password",
367
+ hide_input=True,
368
+ ),
369
+ dry_run: bool = DryRun,
370
+ ) -> Response:
371
+ """Ingest metadata from PostgreSQL database into a Luma ingestion endpoint."""
372
+ # Retrieve database metadata.
373
+ db_metadata: dict[str, list[dict[str, Any]]] = get_db_metadata(
374
+ username=username, database=database, host=host, port=port, password=password
375
+ )
376
+
377
+ # In dry run mode, print the database metadata and exit.
378
+ if dry_run:
379
+ console.print(db_metadata)
380
+ raise Exit(0)
381
+
382
+ # Send ingestion request.
383
+ return send_request(
384
+ url=urljoin(perspective_url, "postgres/"),
385
+ method="POST",
386
+ payload=db_metadata,
387
+ verify=False,
388
+ )
@@ -0,0 +1,184 @@
1
+ """Download lineage information from PowerBI API."""
2
+
3
+ from collections.abc import Generator
4
+ import logging
5
+ from pathlib import Path
6
+ from time import sleep
7
+ from typing import Any
8
+
9
+ from azure.identity import ClientSecretCredential
10
+ import dlt
11
+ from dlt.extract.resource import DltResource
12
+ from dlt.sources.helpers.rest_client import RESTClient
13
+ from dlt.sources.helpers.rest_client.auth import OAuth2AuthBase
14
+ from dlt.sources.helpers.rest_client.paginators import SinglePagePaginator
15
+
16
+ from perspective.exceptions import ExtractionError
17
+
18
+
19
+ Workspace = dict[str, Any]
20
+ WorkspaceDataflows = dict[str, Any]
21
+ DataflowDetails = dict[str, Any]
22
+
23
+
24
+ logger = logging.getLogger("dlt")
25
+
26
+
27
+ class PowerBIOauthClientCredentials(OAuth2AuthBase):
28
+ def __init__(self, tenant_id: str, client_id: str, client_secret: str):
29
+ """PowerBI OAuth2 client credentials authentication.
30
+
31
+ Args:
32
+ tenant_id (str): The Azure tenant ID.
33
+ client_id (str): The client ID of the service principal app.
34
+ client_secret (str): The client secret of the service principal app.
35
+ """
36
+ super().__init__()
37
+ self.access_token = self._get_token(tenant_id, client_id, client_secret)
38
+
39
+ @staticmethod
40
+ def _get_token(tenant_id: str, client_id: str, client_secret: str) -> str:
41
+ scope = "https://analysis.windows.net/powerbi/api/.default"
42
+ client_secret_credential_class = ClientSecretCredential(
43
+ tenant_id=tenant_id, client_id=client_id, client_secret=client_secret
44
+ )
45
+ return client_secret_credential_class.get_token(scope).token
46
+
47
+
48
+ @dlt.source(name="powerbi")
49
+ def powerbi( # noqa: C901
50
+ tenant_id: str = dlt.secrets.value,
51
+ client_id: str = dlt.secrets.value,
52
+ client_secret: str = dlt.secrets.value,
53
+ ) -> list[DltResource]:
54
+ """The PowerBI metadata source.
55
+
56
+ Args:
57
+ tenant_id (str, optional): The Azure tenant ID. Defaults to dlt.secrets.value.
58
+ client_id (str, optional): The client ID of the service principal app. Defaults
59
+ to dlt.secrets.value.
60
+ client_secret (str, optional): The client secret of the service principal app.
61
+ Defaults to dlt.secrets.value.
62
+
63
+ Returns:
64
+ dict: _description_
65
+
66
+ Yields:
67
+ Iterator[dict]: _description_
68
+ """
69
+ client = RESTClient(
70
+ base_url="https://api.powerbi.com/v1.0/myorg",
71
+ auth=PowerBIOauthClientCredentials(
72
+ tenant_id=tenant_id,
73
+ client_id=client_id,
74
+ client_secret=client_secret,
75
+ ),
76
+ paginator=SinglePagePaginator(),
77
+ )
78
+
79
+ @dlt.resource(primary_key="id", write_disposition="replace")
80
+ def workspaces() -> Generator[list[dict[str, Any]], None, None]:
81
+ endpoint = "groups"
82
+ yield client.get(endpoint).json()["value"]
83
+
84
+ # We need to add the type hint for our custom column here as this is required for
85
+ # dbt-osmosis to correctly generate the bronze properties file, and consequently,
86
+ # for dbt-ibis to work.
87
+ @dlt.transformer(
88
+ data_from=workspaces, columns={"description": {"data_type": "text"}}
89
+ )
90
+ def workspaces_lineage(
91
+ workspaces: list[Workspace],
92
+ ) -> Generator[list[Workspace], None, None]:
93
+ workspace_ids = [workspace["id"] for workspace in workspaces]
94
+ request_lineage_endpoint = "admin/workspaces/getInfo"
95
+ params = {
96
+ "lineage": True,
97
+ "datasourceDetails": True,
98
+ "datasetSchema": True,
99
+ "datasetExpressions": True,
100
+ "getArtifactUsers": True,
101
+ }
102
+ body = {"workspaces": workspace_ids}
103
+
104
+ # Request a workspace lineage scan and await scan completion.
105
+ response = client.post(
106
+ request_lineage_endpoint, params=params, json=body
107
+ ).json()
108
+ if response.get("error"):
109
+ msg = f"Error requesting workspace lineage scan: {response['error']}"
110
+ raise ExtractionError(msg)
111
+
112
+ scan_id = response["id"]
113
+ scan_status = None
114
+ logger.info("Waiting for scan to complete...")
115
+ while scan_status != "Succeeded":
116
+ scan_status_endpoint = f"admin/workspaces/scanStatus/{scan_id}"
117
+ scan_status = client.get(scan_status_endpoint).json()["status"]
118
+ sleep(0.2)
119
+
120
+ # Get the scan result.
121
+ scan_result_endpoint = f"admin/workspaces/scanResult/{scan_id}"
122
+ response = client.get(scan_result_endpoint)
123
+ lineage = response.json()
124
+
125
+ # Add "description" column if it doesn't exist.
126
+ # This is required as the schema returned by PowerBI REST API is dynamic, which
127
+ # can break everything downstream. For example, if a workspace description is
128
+ # not set, instead of returning "description": "", PBI REST API simply omits the
129
+ # "description" key.
130
+ for i, workspace in enumerate(lineage["workspaces"]):
131
+ if "description" not in workspace:
132
+ lineage["workspaces"][i]["description"] = ""
133
+
134
+ yield lineage
135
+
136
+ @dlt.transformer(data_from=workspaces)
137
+ def workspace_dataflows(
138
+ workspaces: list[Workspace],
139
+ ) -> Generator[WorkspaceDataflows, None, None]:
140
+ for workspace in workspaces:
141
+ workspace_id = workspace["id"]
142
+
143
+ logger.info(f"Extracting dataflows for workspace '{workspace_id}'...")
144
+
145
+ endpoint = f"groups/{workspace_id}/dataflows"
146
+ dataflows = client.get(endpoint).json()["value"]
147
+
148
+ logger.info(f"Extracted {len(dataflows)} dataflows.")
149
+
150
+ yield {
151
+ "workspace_id": workspace_id,
152
+ "dataflows": dataflows,
153
+ }
154
+
155
+ @dlt.transformer(data_from=workspace_dataflows)
156
+ def dataflows_details(
157
+ workspace_dataflows: WorkspaceDataflows,
158
+ ) -> Generator[list[DataflowDetails], None, None]:
159
+ dataflows_details = []
160
+ workspace_id = workspace_dataflows["workspace_id"]
161
+
162
+ for dataflow in workspace_dataflows["dataflows"]:
163
+ dataflow_id = dataflow["objectId"]
164
+ endpoint = f"groups/{workspace_id}/dataflows/{dataflow_id}"
165
+ dataflow_details = client.get(endpoint).json()
166
+ dataflows_details.append({"id": dataflow_id, **dataflow_details})
167
+
168
+ yield dataflows_details
169
+
170
+ return [workspaces_lineage, dataflows_details]
171
+
172
+
173
+ if __name__ == "__main__":
174
+ import json
175
+
176
+ source = powerbi()
177
+ lineage = source.workspaces_lineage
178
+ dataflows = source.dataflows_details
179
+
180
+ with Path("powerbi_workspace_info.json").open("w", encoding="utf-8") as f:
181
+ json.dump(list(lineage), f, indent=4)
182
+
183
+ with Path("powerbi_dataflows_info.json").open("w", encoding="utf-8") as f:
184
+ json.dump(list(dataflows), f, indent=4)
@@ -0,0 +1,137 @@
1
+ """Pydantic models for Power BI metadata ingestion."""
2
+
3
+ from enum import Enum
4
+ from typing import Any
5
+
6
+ from pydantic import BaseModel, EmailStr, Field
7
+
8
+
9
+ class ColumnType(str, Enum):
10
+ CalculatedTableColumn = "CalculatedTableColumn"
11
+ Calculated = "Calculated"
12
+ Data = "Data"
13
+
14
+
15
+ class TableColumn(BaseModel):
16
+ name: str
17
+ dataType: str
18
+ isHidden: bool
19
+ columnType: ColumnType
20
+
21
+
22
+ class TableMeasure(BaseModel):
23
+ name: str
24
+ expression: str
25
+ description: str | None = None
26
+ isHidden: bool
27
+
28
+
29
+ class TableSource(BaseModel):
30
+ expression: str
31
+
32
+
33
+ class SourceExpression(BaseModel):
34
+ name: str
35
+ expression: str
36
+
37
+
38
+ class Table(BaseModel):
39
+ name: str
40
+ isHidden: bool
41
+ columns: list[TableColumn] = []
42
+ measures: list[TableMeasure] = []
43
+ source: list[TableSource] | None = None
44
+
45
+
46
+ class Dataset(BaseModel):
47
+ id: str
48
+ name: str
49
+ tables: list[Table] = []
50
+ expressions: list[SourceExpression] | None = None
51
+ description: str | None = None
52
+ configuredBy: EmailStr | None = None
53
+ configuredById: str | None = None
54
+ directQueryRefreshSchedule: dict[str, Any] | None = None
55
+ createdDate: str
56
+ users: list[dict] | None = None
57
+
58
+
59
+ class Dataflow(BaseModel):
60
+ objectId: str
61
+ name: str
62
+ description: str | None = None
63
+ configuredBy: str
64
+ users: list[str] = []
65
+
66
+
67
+ class WorkspaceDataflows(BaseModel):
68
+ workspace_id: str
69
+ dataflows: list[Dataflow]
70
+
71
+
72
+ class PBIMashup(BaseModel):
73
+ queriesMetadata: dict[str, dict[str, Any]]
74
+ document: str
75
+ connectionOverrides: list[dict[str, Any]] | None = None
76
+
77
+
78
+ class DataflowDetails(BaseModel):
79
+ id: str
80
+ name: str
81
+ description: str
82
+ version: str
83
+ modifiedTime: str
84
+ pbi_mashup: PBIMashup = Field(alias="pbi:mashup")
85
+ entities: list[dict[str, Any]] = Field(default_factory=list)
86
+
87
+
88
+ class DashboardTile(BaseModel):
89
+ id: str
90
+ title: str
91
+ reportId: str
92
+ datasetId: str
93
+
94
+
95
+ class Dashboard(BaseModel):
96
+ id: str
97
+ displayName: str
98
+ tiles: list[DashboardTile]
99
+ users: list[dict]
100
+ tags: list[str]
101
+
102
+
103
+ class ReportUser(BaseModel):
104
+ reportUserAccessRight: str
105
+ emailAddress: EmailStr | None
106
+ displayName: str
107
+ identifier: EmailStr | str
108
+ graphId: str
109
+ principalType: str
110
+ userType: str | None
111
+
112
+
113
+ class Report(BaseModel):
114
+ id: str
115
+ appId: str | None = None
116
+ name: str
117
+ datasetId: str
118
+ description: str | None = None
119
+ createdDateTime: str | None = None
120
+ modifiedDateTime: str | None = None
121
+ users: list[ReportUser] | None = None
122
+
123
+
124
+ class Workspace(BaseModel):
125
+ id: str
126
+ name: str
127
+ type: str = "Workspace"
128
+ state: str
129
+ reports: list[Report] = []
130
+ datasets: list[Dataset] = []
131
+ dashboards: list[Dashboard] = []
132
+ users: list[dict]
133
+
134
+
135
+ class WorkspaceInfo(BaseModel):
136
+ workspaces: list[Workspace]
137
+ datasourceInstances: list[dict] | None = None