peak-sdk 1.10.0__py3-none-any.whl → 1.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. peak/__init__.py +3 -2
  2. peak/_metadata.py +182 -1
  3. peak/_version.py +1 -1
  4. peak/callbacks.py +22 -2
  5. peak/cli/cli.py +2 -0
  6. peak/cli/helpers.py +2 -0
  7. peak/cli/metrics/__init__.py +21 -0
  8. peak/cli/metrics/metrics.py +707 -0
  9. peak/cli/press/blocks/specs.py +2 -0
  10. peak/cli/resources/services.py +20 -5
  11. peak/handler.py +16 -7
  12. peak/metrics/__init__.py +26 -0
  13. peak/metrics/metrics.py +553 -0
  14. peak/output.py +9 -1
  15. peak/press/blocks.py +2 -0
  16. peak/resources/__init__.py +10 -1
  17. peak/resources/services.py +5 -2
  18. peak/sample_yaml/metrics/create_collection.yaml +8 -0
  19. peak/sample_yaml/metrics/publish.yaml +6 -0
  20. peak/sample_yaml/metrics/query.yaml +25 -0
  21. peak/sample_yaml/press/blocks/specs/service/webapp/create_block_spec.yaml +1 -0
  22. peak/sample_yaml/press/blocks/specs/service/webapp/create_block_spec_release.yaml +1 -0
  23. peak/sample_yaml/resources/services/create_or_update_service.yaml +1 -0
  24. peak/sample_yaml/resources/services/create_service.yaml +1 -0
  25. peak/sample_yaml/resources/services/update_service.yaml +1 -0
  26. peak/sample_yaml/resources/webapps/create_or_update_webapp.yaml +1 -0
  27. peak/sample_yaml/resources/webapps/create_webapp.yaml +1 -0
  28. peak/sample_yaml/resources/webapps/update_webapp.yaml +1 -0
  29. {peak_sdk-1.10.0.dist-info → peak_sdk-1.12.0.dist-info}/METADATA +45 -7
  30. {peak_sdk-1.10.0.dist-info → peak_sdk-1.12.0.dist-info}/RECORD +33 -26
  31. {peak_sdk-1.10.0.dist-info → peak_sdk-1.12.0.dist-info}/LICENSE +0 -0
  32. {peak_sdk-1.10.0.dist-info → peak_sdk-1.12.0.dist-info}/WHEEL +0 -0
  33. {peak_sdk-1.10.0.dist-info → peak_sdk-1.12.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,707 @@
1
+ #
2
+ # # Copyright © 2024 Peak AI Limited. or its affiliates. All Rights Reserved.
3
+ # #
4
+ # # Licensed under the Apache License, Version 2.0 (the "License"). You
5
+ # # may not use this file except in compliance with the License. A copy of
6
+ # # the License is located at:
7
+ # #
8
+ # # https://github.com/PeakBI/peak-sdk/blob/main/LICENSE
9
+ # #
10
+ # # or in the "license" file accompanying this file. This file is
11
+ # # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
12
+ # # ANY KIND, either express or implied. See the License for the specific
13
+ # # language governing permissions and limitations under the License.
14
+ # #
15
+ # # This file is part of the peak-sdk.
16
+ # # see (https://github.com/PeakBI/peak-sdk)
17
+ # #
18
+ # # You should have received a copy of the APACHE LICENSE, VERSION 2.0
19
+ # # along with this program. If not, see <https://apache.org/licenses/LICENSE-2.0>
20
+ #
21
+
22
+ """Peak Metric commands."""
23
+
24
+ import json
25
+ from typing import Any, Dict, List, Optional
26
+
27
+ import typer
28
+ from peak import config
29
+ from peak.cli import args, helpers
30
+ from peak.cli.args import DRY_RUN, GENERATE_YAML, OUTPUT_TYPES, PAGING
31
+ from peak.constants import OutputTypes, OutputTypesNoTable
32
+ from peak.helpers import combine_dictionaries, parse_list_of_strings, variables_to_dict
33
+ from peak.metrics.metrics import Metric
34
+ from peak.output import Writer
35
+ from typing_extensions import Annotated
36
+
37
+ app = typer.Typer(
38
+ help="Metrics commands.",
39
+ short_help="Manage Metrics.",
40
+ )
41
+
42
+ _ARTIFACT_PATH = typer.Option(None, help="Path to the artifact.")
43
+
44
+ _PUBLISH_NAMESPACE = _NAMESPACE = typer.Option(
45
+ None,
46
+ help="The namespace where you intend to publish the metrics. If not provided, the default namespace is used.",
47
+ )
48
+
49
+ _NAMESPACE = typer.Option(
50
+ None,
51
+ help="The namespace associated with the metrics. If not provided, the default namespace is used.",
52
+ )
53
+ _GENERATE_SQL = typer.Option(
54
+ None,
55
+ help="Indicates whether to return the SQL query instead of data. If `true`, the response will include the SQL query used to retrieve the metrics. Default is `false`.",
56
+ )
57
+ _MEASURES = typer.Option(
58
+ None,
59
+ help="An array of measures to include in the query. Measures represent quantitative metrics such as sums or counts. Provide them in stringified JSON format.",
60
+ )
61
+ _DIMENSIONS = typer.Option(
62
+ None,
63
+ help="An array of dimensions to include in the query. Dimensions represent qualitative categories such as time, location, or product names. Provide them in stringified JSON format.",
64
+ )
65
+ _FILTERS = typer.Option(
66
+ None,
67
+ help="An array of filter objects to apply to the query. Filters limit the data returned based on specific conditions. Provide them in stringified JSON format.",
68
+ )
69
+ _TIME_DIMENSIONS = typer.Option(
70
+ None,
71
+ help="An array of time dimensions to include in the query. Time dimensions allow querying over specific time ranges with optional granularity (e.g., day, month, year). Provide them in stringified JSON format.",
72
+ )
73
+ _SEGMENTS = typer.Option(
74
+ None,
75
+ help="An array of segments to include in the query. Segments represent pre-defined filters that can be applied to metrics. Provide them in stringified JSON format.",
76
+ )
77
+ _ORDER = typer.Option(
78
+ None,
79
+ help="Defines the sort order of the results. This is an object where keys are the dimensions/measures and values are either 'asc' or 'desc' to specify ascending or descending order. Provide them in stringified JSON format.",
80
+ )
81
+ _LIMIT = typer.Option(
82
+ None,
83
+ help="Limits the number of rows returned by the query. If not provided, the default limit is applied.",
84
+ )
85
+ _OFFSET = typer.Option(
86
+ None,
87
+ help="Specifies the number of rows to skip before starting to return data. Useful for pagination.",
88
+ )
89
+
90
+ _METRIC_TYPES = typer.Option(
91
+ None,
92
+ help="The type of metric to create. If not provided, all metrics are retrieved. Available types are `cube`, `view`, `dimension`, `measure`, `segment` and `all`.",
93
+ )
94
+
95
+ _COLLECTION_NAME = typer.Option(
96
+ None,
97
+ help="Name of the metric collection.",
98
+ )
99
+ _PUBLICATION_ID = typer.Option(
100
+ None,
101
+ help="The publication ID associated with the metrics.",
102
+ )
103
+
104
+ _MEASURES_DELETE = typer.Option(
105
+ None,
106
+ help="An array of measures to delete.",
107
+ )
108
+
109
+ _COLLECTION_ID = typer.Option(
110
+ None,
111
+ help="The ID of the collection to publish the metrics.",
112
+ )
113
+
114
+ _COLLECTION_SCOPE = typer.Option(
115
+ None,
116
+ help="Scope of the metrics collection. Must be one of the following: PUBLIC, PRIVATE.",
117
+ )
118
+
119
+ _COLLECTION_DESCRIPTION = typer.Option(
120
+ None,
121
+ help="Description of the metric collection.",
122
+ )
123
+
124
+ _COLLECTION_IDS = typer.Option(
125
+ None,
126
+ help="An array of collection IDs to include in the query. If not provided, all collections are retrieved.",
127
+ )
128
+
129
+ _COLLECTION_SCOPES = typer.Option(
130
+ None,
131
+ help="An array of scopes to filter the collections by. Available scopes are `PUBLIC` and `PRIVATE`. If not provided, all collections of the tenant along with all public collections are retrieved.",
132
+ )
133
+
134
+
135
+ @app.command(short_help="Publish metrics.")
136
+ def publish(
137
+ ctx: typer.Context,
138
+ file: Annotated[
139
+ Optional[str],
140
+ typer.Argument(
141
+ ...,
142
+ help="Path to the file that defines the body for this operation, supports both `yaml` file or a `jinja` template.",
143
+ ),
144
+ ] = None,
145
+ params_file: str = args.TEMPLATE_PARAMS_FILE,
146
+ params: List[str] = args.TEMPLATE_PARAMS,
147
+ namespace: Optional[str] = _PUBLISH_NAMESPACE,
148
+ artifact_path: Optional[str] = _ARTIFACT_PATH,
149
+ collection_id: Optional[str] = _COLLECTION_ID,
150
+ dry_run: Optional[bool] = DRY_RUN, # noqa: ARG001
151
+ output_type: Optional[OutputTypesNoTable] = OUTPUT_TYPES, # noqa: ARG001
152
+ generate: Optional[bool] = GENERATE_YAML, # noqa: ARG001
153
+ ) -> None:
154
+ """***Publish*** metrics.
155
+
156
+ The metrics can be published either by passing artifact and namespace,
157
+ or by passing collection_id and namespace. If both artifact and collection_id
158
+ are provided, artifact takes priority. If the namespace is not provided, the 'default' namespace is used.
159
+
160
+ \b
161
+ 🧩 ***Input file schema(yaml):***<br/>
162
+
163
+ **_Publish metrics using artifact and namespace_**
164
+ ```yaml
165
+ body (map):
166
+ namespace (str): The namespace associated with the metrics.
167
+ artifact (map):
168
+ path (str): Path to the artifact.
169
+ ```
170
+ **_Publish metrics using collection id and namespace_**
171
+ ```yaml
172
+ body (map):
173
+ namespace (str): The namespace associated with the metrics.
174
+ collectionId (str): The ID of the collection to publish the metrics.
175
+ ```
176
+
177
+ \b
178
+ 📝 ***Example usage:***
179
+ ```bash
180
+ peak metrics publish '/path/to/metrics.yaml' --params-file '/path/to/values.yaml'
181
+ ```
182
+
183
+ We can also provide the required parameters from the command line or combine the YAML template and command line arguments in which case the command line arguments will take precedence.
184
+
185
+ \b
186
+ 📝 ***Example usage without yaml:***
187
+ ```bash
188
+ # Publish metrics using artifact and namespace
189
+ peak metrics publish --artifact-path <path> --namespace <namespace>
190
+ # Publish metrics using collection id and namespace
191
+ peak metrics publish --collection-id <collection-id> --namespace <namespace>
192
+ ```
193
+
194
+ \b
195
+ 🆗 ***Response:***
196
+ ```json
197
+ {
198
+ "artifactId": "7dc0feaa-be90-467b-9c3a-009a234e4b2b",
199
+ "collectionId": "bc8b6ef5-d2f6-4b7f-9365-0261b43997c9",
200
+ "publicationId": "79af8462-2820-483c-a8b6-d697555a8fc2",
201
+ }
202
+ ```
203
+
204
+ 🔗 [**API Documentation**](https://service.peak.ai/semantic-layer/api-docs/index.htm#/Metrics/post_api_v1_metrics)
205
+ """
206
+ metrics_client: Metric = ctx.obj["client"]
207
+ writer: Writer = ctx.obj["writer"]
208
+
209
+ user_options: Dict[str, Any] = variables_to_dict(
210
+ namespace,
211
+ )
212
+
213
+ body: Dict[str, Any] = {}
214
+ if file:
215
+ body = helpers.template_handler(file=file, params_file=params_file, params=params)
216
+ body = helpers.remove_unknown_args(body, metrics_client.publish)
217
+
218
+ updated_body = combine_dictionaries(body.get("body") or {}, user_options)
219
+ artifact = helpers.get_updated_artifacts(body, artifact_path, None)
220
+
221
+ with writer.pager():
222
+ response: Dict[str, Any] = metrics_client.publish(
223
+ body=updated_body,
224
+ artifact=artifact, # type: ignore # noqa: PGH003
225
+ collection_id=collection_id,
226
+ )
227
+ writer.write(response)
228
+
229
+
230
+ @app.command(short_help="Query metrics.")
231
+ def query(
232
+ ctx: typer.Context,
233
+ file: Annotated[
234
+ Optional[str],
235
+ typer.Argument(
236
+ ...,
237
+ help="Path to the file that defines the body for this operation, supports both `yaml` file or a `jinja` template.",
238
+ ),
239
+ ] = None,
240
+ params_file: Optional[str] = args.TEMPLATE_PARAMS_FILE,
241
+ params: Optional[List[str]] = args.TEMPLATE_PARAMS,
242
+ namespace: Optional[str] = _NAMESPACE,
243
+ generate_sql: Optional[bool] = _GENERATE_SQL,
244
+ measures: Optional[List[str]] = _MEASURES,
245
+ dimensions: Optional[List[str]] = _DIMENSIONS,
246
+ filters: Optional[List[str]] = _FILTERS,
247
+ time_dimensions: Optional[List[str]] = _TIME_DIMENSIONS,
248
+ segments: Optional[List[str]] = _SEGMENTS,
249
+ order: Optional[str] = _ORDER,
250
+ limit: Optional[int] = _LIMIT,
251
+ offset: Optional[int] = _OFFSET,
252
+ paging: Optional[bool] = PAGING, # noqa: ARG001
253
+ output_type: Optional[OutputTypes] = OUTPUT_TYPES, # noqa: ARG001
254
+ generate: Optional[bool] = GENERATE_YAML, # noqa: ARG001
255
+ ) -> None:
256
+ """***Query*** a published metric in the semantic layer using the provided parameters.
257
+
258
+ \b
259
+ 🧩 ***Input file schema(yaml):***<br/>
260
+ ```yaml
261
+ namespace (str | required: false): The namespace associated with the metrics. If not provided, the default namespace is used.
262
+ generateSql (bool | required: false): Indicates whether to return the SQL query instead of data. If `true`, the response will include the SQL query used to retrieve the metrics. Default is `false`.
263
+ measures (list(str) | required: false): An array of measures to include in the query. Measures represent quantitative metrics such as sums or counts.
264
+ dimensions (list(str) | required: false): An array of dimensions to include in the query. Dimensions represent qualitative categories such as time, location, or product names.
265
+ filters (list(map) | required: false):
266
+ dimension (str): The dimension to filter on. Supported values are `equals`, `notEquals`, `contains`, `notContains`, `startsWith`, `notStartsWith`, `endsWith`, `notEndsWith`, `gt`, `gte`, `lt`, `lte`, `inDateRange`, `notInDateRange`, `beforeDate`, `beforeOrOnDate`, `afterDate`, `afterOrOnDate` etc.
267
+ operator (str): The operator to use for the filter.
268
+ values (list(str)): An array of values to filter on.
269
+ timeDimensions (list(map) | required: false):
270
+ dimension (str): The time dimension to include in the query.
271
+ granularity (str | required: false): The granularity of the time dimension. Supported values are `second`, `minute`, `hour`, `day`, `week`, `month`, `quarter`, and `year`.
272
+ dateRange (list(str) | str | required: false): An array of two dates that define the time range for the query. Alternatively, you can provide a single string out of the following predefined date ranges `today`, `yesterday`, `this week`, `last week`, `this month`, `last month`, `this quarter`, `last quarter`, `this year`, `last year`, `last 7 days` and `last 30 days`.
273
+ segments (list(str) | required: false): An array of segments to include in the query. Segments represent pre-defined filters that can be applied to metrics.
274
+ order (map | required: false): Defines the sort order of the results. This is a stringified object where keys are the dimensions/measures and values are either 'asc' or 'desc' to specify ascending or descending order.
275
+ limit (int | required: false): Limits the number of rows returned by the query. If not provided, the default limit is applied.
276
+ offset (int | required: false): Specifies the number of rows to skip before starting to return data. Useful for pagination.
277
+ ```
278
+
279
+ \b
280
+ 📝 ***Example usage:***<br/>
281
+ ```bash
282
+ peak metrics query '/path/to/query.yaml' --params-file '/path/to/values.yaml'
283
+ ```
284
+
285
+ We can also provide the required parameters from the command line or combine the YAML template and command line arguments in which case the command line arguments will take precedence.
286
+
287
+ \b
288
+ 📝 ***Example usage without yaml:***<br/>
289
+ ```bash
290
+ peak metrics query --measures "<cube_name>.<resource_name_1>" --measures "<cube_name>.<resource_name_2>" --dimensions "<cube_name>.<resource_name>" --time-dimensions "{\"dimension\":\"<cube_name>.<resource_name>\",\"dateRange\":[\"2024-01-26T00:00:00Z\",\"2024-06-06T00:00:00Z\"],\"granularity\":\"day\"}"
291
+ ```
292
+
293
+ \b
294
+ 🆗 ***Response:***
295
+ ```
296
+ {
297
+ "data": [
298
+ {
299
+ "region": "North America",
300
+ "total_sales": 1000000
301
+ },
302
+ {
303
+ "region": "Europe",
304
+ "total_sales": 500000
305
+ }
306
+ ],
307
+ "sql": "SELECT region, SUM(total_sales) AS total_sales FROM orders GROUP BY region"
308
+ }
309
+ ```
310
+
311
+ 🔗 [**API Documentation**](https://service.peak.ai/semantic-layer/api-docs/index.htm#/Query/get_api_v1_metrics_query)
312
+ """
313
+ metrics_client: Metric = ctx.obj["client"]
314
+ writer: Writer = ctx.obj["writer"]
315
+
316
+ request_params: Dict[str, Any] = {}
317
+
318
+ if file:
319
+ request_params = helpers.template_handler(file, params_file, params)
320
+ request_params = helpers.remove_unknown_args(request_params, metrics_client.query)
321
+
322
+ cli_params = {
323
+ "namespace": namespace,
324
+ "generate_sql": generate_sql,
325
+ "measures": parse_list_of_strings(measures) if measures else [],
326
+ "dimensions": parse_list_of_strings(dimensions) if dimensions else [],
327
+ "filters": [json.loads(query_filter) for query_filter in filters] if filters else [],
328
+ "time_dimensions": (
329
+ [json.loads(time_dimension) for time_dimension in time_dimensions] if time_dimensions else []
330
+ ),
331
+ "segments": parse_list_of_strings(segments) if segments else [],
332
+ "order": json.loads(order) if order else {},
333
+ "limit": limit,
334
+ "offset": offset,
335
+ }
336
+
337
+ merged_params = {**request_params}
338
+ for key, value in cli_params.items():
339
+ if (value not in [None, [], {}] and key in merged_params) or key not in merged_params:
340
+ merged_params[key] = value
341
+
342
+ with writer.pager():
343
+ response = metrics_client.query(**merged_params)
344
+ cli_output_type = config.OUTPUT_TYPE
345
+
346
+ if "data" in response:
347
+ total_count = len(response["data"])
348
+ response["totalCount"] = total_count
349
+
350
+ if "generate_sql" in merged_params and merged_params["generate_sql"] is True and cli_output_type == "table":
351
+ response["data"] = []
352
+
353
+ writer.write(response)
354
+
355
+
356
+ @app.command("list", short_help="List metrics.")
357
+ def list_metrics(
358
+ ctx: typer.Context,
359
+ page_size: Optional[int] = args.PAGE_SIZE,
360
+ page_number: Optional[int] = args.PAGE_NUMBER,
361
+ namespace: Optional[str] = _NAMESPACE,
362
+ type: Optional[str] = _METRIC_TYPES, # noqa: A002
363
+ paging: Optional[bool] = PAGING, # noqa: ARG001
364
+ output_type: Optional[OutputTypes] = OUTPUT_TYPES, # noqa: ARG001
365
+ ) -> None:
366
+ """***List*** metrics in the semantic layer.
367
+
368
+ \b
369
+ 📝 ***Example usage:***<br/>
370
+ ```bash
371
+ peak metrics list --page-size 25 --page-number 1 --namespace <namespace> --type <type>
372
+ ```
373
+
374
+ \b
375
+ 🆗 ***Response:***
376
+ ```
377
+ {
378
+ "data": [
379
+ {
380
+ "name": "product",
381
+ "type": "cube",
382
+ "public": true,
383
+ "measures": [
384
+ {
385
+ "name": "product.max_price",
386
+ "type": "number",
387
+ "aggType": "max",
388
+ "public": true
389
+ },
390
+ {
391
+ "name": "product.max_discount",
392
+ "type": "number",
393
+ "aggType": "max",
394
+ "public": true
395
+ },
396
+ ],
397
+ "dimensions": [
398
+ {
399
+ "name": "product.sale",
400
+ "type": "string",
401
+ "public": true,
402
+ "primaryKey": true
403
+ },
404
+ {
405
+ "name": "order.created_at",
406
+ "type": "time",
407
+ "public": true,
408
+ "primaryKey": false
409
+ },
410
+ ],
411
+ "segments": [],
412
+ "collectionId": "b8d8308e-4f45-42e5-9c08-d4c4ba6db7f6",
413
+ "publicationId": "299e7d07-db2f-4050-88c3-40afd7603807"
414
+ }
415
+ ],
416
+ "pageCount": 1,
417
+ "pageNumber": 1,
418
+ "pageSize": 25,
419
+ "totalCount": 2
420
+ }
421
+ ```
422
+
423
+ 🔗 [**API Documentation**](https://service.peak.ai/semantic-layer/api-docs/index.htm#/Metrics/get_api_v1_metrics)
424
+ """
425
+ metric_client: Metric = ctx.obj["client"]
426
+ writer: Writer = ctx.obj["writer"]
427
+
428
+ with writer.pager():
429
+ response = metric_client.list(
430
+ page_size=page_size,
431
+ page_number=page_number,
432
+ namespace=namespace,
433
+ type=type,
434
+ return_iterator=False,
435
+ )
436
+
437
+ writer.write(response)
438
+
439
+
440
+ @app.command(short_help="Create metrics collection.")
441
+ def create_collection(
442
+ ctx: typer.Context,
443
+ file: Annotated[
444
+ Optional[str],
445
+ typer.Argument(
446
+ ...,
447
+ help="Path to the file that defines the body for this operation, supports both `yaml` file or a `jinja` template.",
448
+ ),
449
+ ] = None,
450
+ params_file: str = args.TEMPLATE_PARAMS_FILE,
451
+ params: List[str] = args.TEMPLATE_PARAMS,
452
+ name: str = _COLLECTION_NAME,
453
+ scope: str = _COLLECTION_SCOPE,
454
+ description: Optional[str] = _COLLECTION_DESCRIPTION,
455
+ artifact_path: str = _ARTIFACT_PATH,
456
+ dry_run: Optional[bool] = DRY_RUN, # noqa: ARG001
457
+ output_type: Optional[OutputTypesNoTable] = OUTPUT_TYPES, # noqa: ARG001
458
+ generate: Optional[bool] = GENERATE_YAML, # noqa: ARG001
459
+ ) -> None:
460
+ """***Create*** metrics collection.
461
+
462
+ \b
463
+ 🧩 ***Input file schema(yaml):***<br/>
464
+ ```yaml
465
+ body (map):
466
+ name (str): Name of the metric collection.
467
+ scope (str): Scope of the metrics artifact.
468
+ description (str | required: false): Description of the metric collection.
469
+ artifact (map):
470
+ path (str): Path to the artifact.
471
+ ```
472
+
473
+ \b
474
+ 📝 ***Example usage:***
475
+ ```bash
476
+ peak metrics create-collection '/path/to/metrics.yaml' --params-file '/path/to/values.yaml'
477
+ ```
478
+
479
+ We can also provide the required parameters from the command line or combine the YAML template and command line arguments in which case the command line arguments will take precedence.
480
+
481
+ \b
482
+ 📝 ***Example usage without yaml:***
483
+ ```bash
484
+ peak metrics create-collection --artifact-path <path> --name <name> --scope <scope> --description <description>
485
+ ```
486
+
487
+ \b
488
+ 🆗 ***Response:***
489
+ ```json
490
+ {
491
+ "artifactId": "7dc0feaa-be90-467b-9c3a-009a234e4b2b",
492
+ "collectionId": "bc8b6ef5-d2f6-4b7f-9365-0261b43997c9",
493
+ }
494
+ ```
495
+
496
+ 🔗 [**API Documentation**](https://service.peak.ai/semantic-layer/api-docs/index.htm#/Collections/post_api_v1_metrics_collections)
497
+ """
498
+ metrics_client: Metric = ctx.obj["client"]
499
+ writer: Writer = ctx.obj["writer"]
500
+
501
+ user_options: Dict[str, Any] = variables_to_dict(
502
+ name,
503
+ scope,
504
+ description,
505
+ )
506
+
507
+ body: Dict[str, Any] = {}
508
+ if file:
509
+ body = helpers.template_handler(file=file, params_file=params_file, params=params)
510
+ body = helpers.remove_unknown_args(body, metrics_client.create_collection)
511
+
512
+ updated_body = combine_dictionaries(body.get("body") or {}, user_options)
513
+ artifact = helpers.get_updated_artifacts(body, artifact_path, None)
514
+
515
+ with writer.pager():
516
+ response: Dict[str, Any] = metrics_client.create_collection(body=updated_body, artifact=artifact) # type: ignore # noqa: PGH003
517
+ writer.write(response)
518
+
519
+
520
+ @app.command(short_help="Delete the metrics.")
521
+ def delete(
522
+ ctx: typer.Context,
523
+ namespace: Optional[str] = _NAMESPACE,
524
+ measures: Optional[List[str]] = _MEASURES_DELETE,
525
+ publication_id: Optional[str] = _PUBLICATION_ID,
526
+ *,
527
+ dry_run: bool = DRY_RUN, # noqa: ARG001
528
+ output_type: OutputTypesNoTable = OUTPUT_TYPES, # noqa: ARG001
529
+ ) -> None:
530
+ """***Delete*** one or more measures.
531
+
532
+ Measures can either be deleted by passing a namespace and a list of measures or
533
+ by giving a publication id which would delete all the measures associated with that publication. If both are passed,
534
+ publication id takes priority.
535
+
536
+ \b
537
+ 📝 ***Example usage:***
538
+ ```bash
539
+ # Delete using namespace and measure names
540
+ peak metrics delete --namespace <namespace> --measures <measure> --measures <measure>
541
+ # Delete using publication id
542
+ peak metrics delete --publication-id
543
+ ```
544
+
545
+ \b
546
+ 🆗 ***Response:***
547
+ ```json
548
+ {}
549
+ ```
550
+
551
+ 🔗 [**API Documentation**](https://service.peak.ai/semantic-layer/api-docs/index.htm#/Metrics/delete_api_v1_metrics_publications__publicationId_)
552
+ """
553
+ metrics_client: Metric = ctx.obj["client"]
554
+ writer: Writer = ctx.obj["writer"]
555
+
556
+ body: Dict[str, Any] = {
557
+ "namespace": namespace,
558
+ "measures": parse_list_of_strings(measures) if measures else [],
559
+ "publication_id": publication_id,
560
+ }
561
+
562
+ with writer.pager():
563
+ response: Dict[str, Any] = metrics_client.delete(**body)
564
+ writer.write(response)
565
+
566
+
567
+ @app.command(short_help="Delete metrics collection.")
568
+ def delete_collection(
569
+ ctx: typer.Context,
570
+ collection_id: str = typer.Argument(..., help="ID of the metric collection."),
571
+ dry_run: Optional[bool] = DRY_RUN, # noqa: ARG001
572
+ output_type: Optional[OutputTypesNoTable] = OUTPUT_TYPES, # noqa: ARG001
573
+ ) -> None:
574
+ """***Delete*** metrics collection.
575
+
576
+ \b
577
+ 📝 ***Example usage:***
578
+ ```bash
579
+ peak metrics delete-collection <collection_id>
580
+ ```
581
+
582
+ \b
583
+ 🆗 ***Response:***
584
+ ```json
585
+ {}
586
+ ```
587
+
588
+ 🔗 [**API Documentation**](https://service.peak.ai/semantic-layer/api-docs/index.htm#/Collections/delete_api_v1_metrics_collections__collectionId_)
589
+ """
590
+ metrics_client: Metric = ctx.obj["client"]
591
+ writer: Writer = ctx.obj["writer"]
592
+
593
+ with writer.pager():
594
+ response: Dict[str, Any] = metrics_client.delete_collection(collection_id=collection_id)
595
+ writer.write(response)
596
+
597
+
598
+ @app.command(short_help="List metrics collections.")
599
+ def list_collections(
600
+ ctx: typer.Context,
601
+ page_size: Optional[int] = args.PAGE_SIZE,
602
+ page_number: Optional[int] = args.PAGE_NUMBER,
603
+ id: Optional[List[str]] = _COLLECTION_IDS, # noqa: A002
604
+ scope: Optional[List[str]] = _COLLECTION_SCOPES,
605
+ paging: Optional[bool] = PAGING, # noqa: ARG001
606
+ output_type: Optional[OutputTypes] = OUTPUT_TYPES, # noqa: ARG001
607
+ ) -> None:
608
+ """***List*** metrics collections.
609
+
610
+ \b
611
+ 📝 ***Example usage:***<br/>
612
+ ```bash
613
+ peak metrics list-collections --page-size 25 --page-number 1 --id <collection_id_1> --id <collection_id_2> --scope <scope>
614
+ ```
615
+
616
+ \b
617
+ 🆗 ***Response:***<br/>
618
+ ```json
619
+ {
620
+ "collections": [
621
+ {
622
+ "id": "b8d8308e-4f45-42e5-9c08-d4c4ba6db7f6",
623
+ "name": "product",
624
+ "tenant": "tenant-name",
625
+ "scope": "PUBLIC",
626
+ "description": "Product metrics",
627
+ "artifactId": "7dc0feaa-be90-467b-9c3a-009a234e4b2b",
628
+ "createdAt": "2024-01-26T00:00:00Z",
629
+ "createdBy": "someone@peak.ai",
630
+ "updatedAt": "2024-01-26T00:00:00Z",
631
+ "updatedBy": "someone@peak.ai"
632
+ }
633
+ ],
634
+ "pageCount": 1,
635
+ "pageNumber": 1,
636
+ "pageSize": 25,
637
+ "totalCount": 1
638
+ }
639
+ ```
640
+
641
+ 🔗 [**API Documentation**](https://service.peak.ai/semantic-layer/api-docs/index.htm#/Collections/get_api_v1_metrics_collections)
642
+ """
643
+ metrics_client: Metric = ctx.obj["client"]
644
+ writer: Writer = ctx.obj["writer"]
645
+
646
+ with writer.pager():
647
+ response = metrics_client.list_collections(
648
+ page_size=page_size,
649
+ page_number=page_number,
650
+ id=parse_list_of_strings(id) if id else [],
651
+ scope=parse_list_of_strings(scope) if scope else [],
652
+ return_iterator=False,
653
+ )
654
+ writer.write(response)
655
+
656
+
657
+ @app.command(short_help="List namespaces.")
658
+ def list_namespaces(
659
+ ctx: typer.Context,
660
+ page_size: Optional[int] = args.PAGE_SIZE,
661
+ page_number: Optional[int] = args.PAGE_NUMBER,
662
+ paging: Optional[bool] = PAGING, # noqa: ARG001
663
+ output_type: Optional[OutputTypes] = OUTPUT_TYPES, # noqa: ARG001
664
+ ) -> None:
665
+ """***List*** namespaces.
666
+
667
+ \b
668
+ 📝 ***Example usage:***<br/>
669
+ ```bash
670
+ peak metrics list-namespaces --page-size 25 --page-number 1
671
+ ```
672
+
673
+ \b
674
+ 🆗 ***Response:***<br/>
675
+ ```json
676
+ {
677
+ "namespaces": [
678
+ {
679
+ "name": "default",
680
+ "models": [
681
+ {
682
+ "name": "stocks",
683
+ "publicationId": "5503a4df-fa33-4932-9f60-9e3930f37f65",
684
+ "type": "cube"
685
+ }
686
+ ]
687
+ }
688
+ ],
689
+ "pageCount": 1,
690
+ "pageNumber": 1,
691
+ "pageSize": 25,
692
+ "totalCount": 1
693
+ }
694
+ ```
695
+
696
+ 🔗 [**API Documentation**](https://service.peak.ai/semantic-layer/api-docs/index.htm#/Namespaces/get_api_v1_namespaces)
697
+ """
698
+ metrics_client: Metric = ctx.obj["client"]
699
+ writer: Writer = ctx.obj["writer"]
700
+
701
+ with writer.pager():
702
+ response = metrics_client.list_namespaces(
703
+ page_size=page_size,
704
+ page_number=page_number,
705
+ return_iterator=False,
706
+ )
707
+ writer.write(response)