snowflake-cli 3.2.2__py3-none-any.whl → 3.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. snowflake/cli/__about__.py +1 -1
  2. snowflake/cli/_app/constants.py +4 -0
  3. snowflake/cli/_app/snow_connector.py +12 -0
  4. snowflake/cli/_app/telemetry.py +10 -3
  5. snowflake/cli/_plugins/connection/util.py +12 -19
  6. snowflake/cli/_plugins/helpers/commands.py +207 -1
  7. snowflake/cli/_plugins/nativeapp/artifacts.py +10 -4
  8. snowflake/cli/_plugins/nativeapp/codegen/compiler.py +41 -17
  9. snowflake/cli/_plugins/nativeapp/codegen/setup/native_app_setup_processor.py +7 -0
  10. snowflake/cli/_plugins/nativeapp/codegen/snowpark/python_processor.py +4 -1
  11. snowflake/cli/_plugins/nativeapp/codegen/templates/templates_processor.py +42 -32
  12. snowflake/cli/_plugins/nativeapp/commands.py +92 -2
  13. snowflake/cli/_plugins/nativeapp/constants.py +5 -0
  14. snowflake/cli/_plugins/nativeapp/entities/application.py +221 -288
  15. snowflake/cli/_plugins/nativeapp/entities/application_package.py +772 -89
  16. snowflake/cli/_plugins/nativeapp/entities/application_package_child_interface.py +43 -0
  17. snowflake/cli/_plugins/nativeapp/feature_flags.py +5 -1
  18. snowflake/cli/_plugins/nativeapp/release_channel/__init__.py +13 -0
  19. snowflake/cli/_plugins/nativeapp/release_channel/commands.py +212 -0
  20. snowflake/cli/_plugins/nativeapp/release_directive/__init__.py +13 -0
  21. snowflake/cli/_plugins/nativeapp/release_directive/commands.py +165 -0
  22. snowflake/cli/_plugins/nativeapp/same_account_install_method.py +9 -17
  23. snowflake/cli/_plugins/nativeapp/sf_facade_exceptions.py +80 -0
  24. snowflake/cli/_plugins/nativeapp/sf_sql_facade.py +999 -75
  25. snowflake/cli/_plugins/nativeapp/utils.py +11 -0
  26. snowflake/cli/_plugins/nativeapp/v2_conversions/compat.py +5 -1
  27. snowflake/cli/_plugins/nativeapp/version/commands.py +31 -4
  28. snowflake/cli/_plugins/notebook/manager.py +4 -2
  29. snowflake/cli/_plugins/snowpark/snowpark_entity.py +234 -4
  30. snowflake/cli/_plugins/spcs/common.py +129 -0
  31. snowflake/cli/_plugins/spcs/services/commands.py +134 -14
  32. snowflake/cli/_plugins/spcs/services/manager.py +169 -1
  33. snowflake/cli/_plugins/stage/manager.py +12 -4
  34. snowflake/cli/_plugins/streamlit/manager.py +8 -1
  35. snowflake/cli/_plugins/streamlit/streamlit_entity.py +153 -2
  36. snowflake/cli/_plugins/workspace/commands.py +3 -2
  37. snowflake/cli/_plugins/workspace/manager.py +8 -4
  38. snowflake/cli/api/cli_global_context.py +22 -1
  39. snowflake/cli/api/config.py +6 -2
  40. snowflake/cli/api/connections.py +12 -1
  41. snowflake/cli/api/constants.py +9 -1
  42. snowflake/cli/api/entities/common.py +85 -0
  43. snowflake/cli/api/entities/utils.py +9 -8
  44. snowflake/cli/api/errno.py +60 -3
  45. snowflake/cli/api/feature_flags.py +20 -4
  46. snowflake/cli/api/metrics.py +21 -27
  47. snowflake/cli/api/project/definition_conversion.py +1 -2
  48. snowflake/cli/api/project/schemas/project_definition.py +27 -6
  49. snowflake/cli/api/project/schemas/v1/streamlit/streamlit.py +1 -1
  50. snowflake/cli/api/project/util.py +45 -0
  51. {snowflake_cli-3.2.2.dist-info → snowflake_cli-3.3.0.dist-info}/METADATA +12 -12
  52. {snowflake_cli-3.2.2.dist-info → snowflake_cli-3.3.0.dist-info}/RECORD +55 -50
  53. {snowflake_cli-3.2.2.dist-info → snowflake_cli-3.3.0.dist-info}/WHEEL +1 -1
  54. {snowflake_cli-3.2.2.dist-info → snowflake_cli-3.3.0.dist-info}/entry_points.txt +0 -0
  55. {snowflake_cli-3.2.2.dist-info → snowflake_cli-3.3.0.dist-info}/licenses/LICENSE +0 -0
@@ -96,3 +96,14 @@ def verify_no_directories(paths_to_sync: Iterable[Path]):
96
96
  def verify_exists(path: Path):
97
97
  if not path.exists():
98
98
  raise ClickException(f"The following path does not exist: {path}")
99
+
100
+
101
+ def sanitize_dir_name(dir_name: str) -> str:
102
+ """
103
+ Returns a string that is safe to use as a directory name.
104
+ For simplicity, this function is over restricitive: it strips non alphanumeric characters,
105
+ unless listed in the allow list. Additional characters can be allowed in the future, but
106
+ we need to be careful to consider both Unix/Windows directory naming rules.
107
+ """
108
+ allowed_chars = [" ", "_"]
109
+ return "".join(char for char in dir_name if char in allowed_chars or char.isalnum())
@@ -217,7 +217,11 @@ def force_project_definition_v2(
217
217
  entities_to_keep.add(app_definition.entity_id)
218
218
  kwargs["app_entity_id"] = app_definition.entity_id
219
219
  for entity_id in list(original_pdf.entities):
220
- if entity_id not in entities_to_keep:
220
+ entity_type = original_pdf.entities[entity_id].type.lower()
221
+ if (
222
+ entity_type in ["application", "application package"]
223
+ and entity_id not in entities_to_keep
224
+ ):
221
225
  # This happens after templates are rendered,
222
226
  # so we can safely remove the entity
223
227
  del original_pdf.entities[entity_id]
@@ -18,6 +18,7 @@ import logging
18
18
  from typing import Optional
19
19
 
20
20
  import typer
21
+ from snowflake.cli._plugins.nativeapp.artifacts import VersionInfo
21
22
  from snowflake.cli._plugins.nativeapp.common_flags import ForceOption, InteractiveOption
22
23
  from snowflake.cli._plugins.nativeapp.v2_conversions.compat import (
23
24
  force_project_definition_v2,
@@ -29,7 +30,14 @@ from snowflake.cli.api.commands.decorators import (
29
30
  )
30
31
  from snowflake.cli.api.commands.snow_typer import SnowTyperFactory
31
32
  from snowflake.cli.api.entities.common import EntityActions
32
- from snowflake.cli.api.output.types import CommandResult, MessageResult, QueryResult
33
+ from snowflake.cli.api.output.formats import OutputFormat
34
+ from snowflake.cli.api.output.types import (
35
+ CollectionResult,
36
+ CommandResult,
37
+ MessageResult,
38
+ ObjectResult,
39
+ )
40
+ from snowflake.cli.api.project.util import to_identifier
33
41
 
34
42
  app = SnowTyperFactory(
35
43
  name="version",
@@ -64,6 +72,12 @@ def create(
64
72
  help="When enabled, the Snowflake CLI skips checking if your project has any untracked or stages files in git. Default: unset.",
65
73
  is_flag=True,
66
74
  ),
75
+ from_stage: bool = typer.Option(
76
+ False,
77
+ "--from-stage",
78
+ help="When enabled, the Snowflake CLI creates a version from the current application package stage without syncing to the stage first.",
79
+ is_flag=True,
80
+ ),
67
81
  interactive: bool = InteractiveOption,
68
82
  force: Optional[bool] = ForceOption,
69
83
  **options,
@@ -78,7 +92,7 @@ def create(
78
92
  project_root=cli_context.project_root,
79
93
  )
80
94
  package_id = options["package_entity_id"]
81
- ws.perform_action(
95
+ result: VersionInfo = ws.perform_action(
82
96
  package_id,
83
97
  EntityActions.VERSION_CREATE,
84
98
  version=version,
@@ -87,8 +101,21 @@ def create(
87
101
  force=force,
88
102
  interactive=interactive,
89
103
  skip_git_check=skip_git_check,
104
+ from_stage=from_stage,
90
105
  )
91
- return MessageResult(f"Version create is now complete.")
106
+
107
+ message = "Version create is now complete."
108
+ if cli_context.output_format == OutputFormat.JSON:
109
+ return ObjectResult(
110
+ {
111
+ "message": message,
112
+ "version": to_identifier(result.version_name),
113
+ "patch": result.patch_number,
114
+ "label": result.label,
115
+ }
116
+ )
117
+ else:
118
+ return MessageResult(message)
92
119
 
93
120
 
94
121
  @app.command("list", requires_connection=True)
@@ -110,7 +137,7 @@ def version_list(
110
137
  package_id,
111
138
  EntityActions.VERSION_LIST,
112
139
  )
113
- return QueryResult(cursor)
140
+ return CollectionResult(cursor)
114
141
 
115
142
 
116
143
  @app.command(requires_connection=True)
@@ -21,6 +21,7 @@ from snowflake.cli._plugins.notebook.types import NotebookStagePath
21
21
  from snowflake.cli.api.cli_global_context import get_cli_context
22
22
  from snowflake.cli.api.identifiers import FQN
23
23
  from snowflake.cli.api.sql_execution import SqlExecutionMixin
24
+ from snowflake.cli.api.stage_path import StagePath
24
25
 
25
26
 
26
27
  class NotebookManager(SqlExecutionMixin):
@@ -40,8 +41,9 @@ class NotebookManager(SqlExecutionMixin):
40
41
  """Parses notebook file path to pathlib.Path."""
41
42
  if not notebook_file.endswith(".ipynb"):
42
43
  raise NotebookStagePathError(notebook_file)
43
- stage_path = Path(notebook_file)
44
- if len(stage_path.parts) < 2:
44
+ # we don't perform any operations on the path, so we don't need to differentiate git repository paths
45
+ stage_path = StagePath.from_stage_str(notebook_file)
46
+ if len(stage_path.parts) < 1:
45
47
  raise NotebookStagePathError(notebook_file)
46
48
 
47
49
  return stage_path
@@ -1,16 +1,228 @@
1
- from typing import Generic, TypeVar
1
+ from enum import Enum
2
+ from pathlib import Path
3
+ from typing import Generic, List, Optional, TypeVar
2
4
 
5
+ from click import ClickException
6
+ from snowflake.cli._plugins.nativeapp.feature_flags import FeatureFlag
7
+ from snowflake.cli._plugins.snowpark import package_utils
8
+ from snowflake.cli._plugins.snowpark.common import DEFAULT_RUNTIME
9
+ from snowflake.cli._plugins.snowpark.package.anaconda_packages import (
10
+ AnacondaPackages,
11
+ AnacondaPackagesManager,
12
+ )
13
+ from snowflake.cli._plugins.snowpark.package_utils import (
14
+ DownloadUnavailablePackagesResult,
15
+ )
3
16
  from snowflake.cli._plugins.snowpark.snowpark_entity_model import (
4
17
  FunctionEntityModel,
5
18
  ProcedureEntityModel,
6
19
  )
20
+ from snowflake.cli._plugins.snowpark.zipper import zip_dir
21
+ from snowflake.cli._plugins.workspace.context import ActionContext
7
22
  from snowflake.cli.api.entities.common import EntityBase
23
+ from snowflake.cli.api.secure_path import SecurePath
24
+ from snowflake.connector import ProgrammingError
8
25
 
9
26
  T = TypeVar("T")
10
27
 
11
28
 
29
+ class CreateMode(
30
+ str, Enum
31
+ ): # This should probably be moved to some common place, think where
32
+ create = "CREATE"
33
+ create_or_replace = "CREATE OR REPLACE"
34
+ create_if_not_exists = "CREATE IF NOT EXISTS"
35
+
36
+
12
37
  class SnowparkEntity(EntityBase[Generic[T]]):
13
- pass
38
+ def __init__(self, *args, **kwargs):
39
+
40
+ if not FeatureFlag.ENABLE_NATIVE_APP_CHILDREN.is_enabled():
41
+ raise NotImplementedError("Snowpark entity is not implemented yet")
42
+ super().__init__(*args, **kwargs)
43
+
44
+ def action_bundle(
45
+ self,
46
+ action_ctx: ActionContext,
47
+ output_dir: Path | None,
48
+ ignore_anaconda: bool,
49
+ skip_version_check: bool,
50
+ index_url: str | None = None,
51
+ allow_shared_libraries: bool = False,
52
+ *args,
53
+ **kwargs,
54
+ ) -> List[Path]:
55
+ return self.bundle(
56
+ output_dir,
57
+ ignore_anaconda,
58
+ skip_version_check,
59
+ index_url,
60
+ allow_shared_libraries,
61
+ )
62
+
63
+ def action_deploy(
64
+ self, action_ctx: ActionContext, mode: CreateMode, *args, **kwargs
65
+ ):
66
+ # TODO: After introducing bundle map, we should introduce file copying part here
67
+ return self._execute_query(self.get_deploy_sql(mode))
68
+
69
+ def action_drop(self, action_ctx: ActionContext, *args, **kwargs):
70
+ return self._execute_query(self.get_drop_sql())
71
+
72
+ def action_describe(self, action_ctx: ActionContext, *args, **kwargs):
73
+ return self._execute_query(self.get_describe_sql())
74
+
75
+ def action_execute(
76
+ self,
77
+ action_ctx: ActionContext,
78
+ execution_arguments: List[str] | None = None,
79
+ *args,
80
+ **kwargs,
81
+ ):
82
+ return self._execute_query(self.get_execute_sql(execution_arguments))
83
+
84
+ def bundle(
85
+ self,
86
+ output_dir: Path | None,
87
+ ignore_anaconda: bool,
88
+ skip_version_check: bool,
89
+ index_url: str | None = None,
90
+ allow_shared_libraries: bool = False,
91
+ ) -> List[Path]:
92
+ """
93
+ Bundles the entity artifacts and dependencies into a directory.
94
+ Parameters:
95
+ output_dir: The directory to output the bundled artifacts to. Defaults to output dir in project root
96
+ ignore_anaconda: If True, ignores anaconda chceck and tries to download all packages using pip
97
+ skip_version_check: If True, skips version check when downloading packages
98
+ index_url: The index URL to use when downloading packages, if none set - default pip index is used (in most cases- Pypi)
99
+ allow_shared_libraries: If not set to True, using dependency with .so/.dll files will raise an exception
100
+ Returns:
101
+ """
102
+ # 0 Create a directory for the entity
103
+ if not output_dir:
104
+ output_dir = self.root / "output" / "bundle" / "snowpark"
105
+ output_dir.mkdir(parents=True, exist_ok=True) # type: ignore
106
+
107
+ output_files = []
108
+
109
+ # 1 Check if requirements exits
110
+ if (self.root / "requirements.txt").exists():
111
+ download_results = self._process_requirements(
112
+ bundle_dir=output_dir, # type: ignore
113
+ archive_name="dependencies.zip",
114
+ requirements_file=SecurePath(self.root / "requirements.txt"),
115
+ ignore_anaconda=ignore_anaconda,
116
+ skip_version_check=skip_version_check,
117
+ index_url=index_url,
118
+ allow_shared_libraries=allow_shared_libraries,
119
+ )
120
+
121
+ # 3 get the artifacts list
122
+ artifacts = self.model.artifacts
123
+
124
+ for artifact in artifacts:
125
+ output_file = output_dir / artifact.dest / artifact.src.name
126
+
127
+ if artifact.src.is_file():
128
+ output_file.mkdir(parents=True, exist_ok=True)
129
+ SecurePath(artifact.src).copy(output_file)
130
+ elif artifact.is_dir():
131
+ output_file.mkdir(parents=True, exist_ok=True)
132
+
133
+ output_files.append(output_file)
134
+
135
+ return output_files
136
+
137
+ def check_if_exists(
138
+ self, action_ctx: ActionContext
139
+ ) -> bool: # TODO it should return current state, so we know if update is necessary
140
+ try:
141
+ current_state = self.action_describe(action_ctx)
142
+ return True
143
+ except ProgrammingError:
144
+ return False
145
+
146
+ def get_deploy_sql(self, mode: CreateMode):
147
+ query = [
148
+ f"{mode.value} {self.model.type.upper()} {self.identifier}",
149
+ "COPY GRANTS",
150
+ f"RETURNS {self.model.returns}",
151
+ f"LANGUAGE PYTHON",
152
+ f"RUNTIME_VERSION '{self.model.runtime or DEFAULT_RUNTIME}'",
153
+ f"IMPORTS={','.join(self.model.imports)}", # TODO: Add source files here after introducing bundlemap
154
+ f"HANDLER='{self.model.handler}'",
155
+ ]
156
+
157
+ if self.model.external_access_integrations:
158
+ query.append(self.model.get_external_access_integrations_sql())
159
+
160
+ if self.model.secrets:
161
+ query.append(self.model.get_secrets_sql())
162
+
163
+ if self.model.type == "procedure" and self.model.execute_as_caller:
164
+ query.append("EXECUTE AS CALLER")
165
+
166
+ return "\n".join(query)
167
+
168
+ def get_execute_sql(self, execution_arguments: List[str] | None = None):
169
+ raise NotImplementedError
170
+
171
+ def _process_requirements( # TODO: maybe leave all the logic with requirements here - so download, write requirements file etc.
172
+ self,
173
+ bundle_dir: Path,
174
+ archive_name: str, # TODO: not the best name, think of something else
175
+ requirements_file: Optional[SecurePath],
176
+ ignore_anaconda: bool,
177
+ skip_version_check: bool = False,
178
+ index_url: Optional[str] = None,
179
+ allow_shared_libraries: bool = False,
180
+ ) -> DownloadUnavailablePackagesResult:
181
+ """
182
+ Processes the requirements file and downloads the dependencies
183
+ Parameters:
184
+
185
+ """
186
+ anaconda_packages_manager = AnacondaPackagesManager()
187
+ with SecurePath.temporary_directory() as tmp_dir:
188
+ requirements = package_utils.parse_requirements(requirements_file)
189
+ anaconda_packages = (
190
+ AnacondaPackages.empty()
191
+ if ignore_anaconda
192
+ else anaconda_packages_manager.find_packages_available_in_snowflake_anaconda()
193
+ )
194
+ download_result = package_utils.download_unavailable_packages(
195
+ requirements=requirements,
196
+ target_dir=tmp_dir,
197
+ anaconda_packages=anaconda_packages,
198
+ skip_version_check=skip_version_check,
199
+ pip_index_url=index_url,
200
+ )
201
+
202
+ if download_result.anaconda_packages:
203
+ anaconda_packages.write_requirements_file_in_snowflake_format(
204
+ file_path=SecurePath(bundle_dir / "requirements.txt"),
205
+ requirements=download_result.anaconda_packages,
206
+ )
207
+
208
+ if download_result.downloaded_packages_details:
209
+ if (
210
+ package_utils.detect_and_log_shared_libraries(
211
+ download_result.downloaded_packages_details
212
+ )
213
+ and not allow_shared_libraries
214
+ ):
215
+ raise ClickException(
216
+ "Some packages contain shared (.so/.dll) libraries. "
217
+ "Try again with allow_shared_libraries_flag."
218
+ )
219
+
220
+ zip_dir(
221
+ source=tmp_dir,
222
+ dest_zip=bundle_dir / archive_name,
223
+ )
224
+
225
+ return download_result
14
226
 
15
227
 
16
228
  class FunctionEntity(SnowparkEntity[FunctionEntityModel]):
@@ -18,7 +230,17 @@ class FunctionEntity(SnowparkEntity[FunctionEntityModel]):
18
230
  A single UDF
19
231
  """
20
232
 
21
- pass
233
+ # TO THINK OF
234
+ # Where will we get imports? Should we rely on bundle map? Or should it be self-sufficient in this matter?
235
+
236
+ def get_execute_sql(
237
+ self, execution_arguments: List[str] | None = None, *args, **kwargs
238
+ ):
239
+ if not execution_arguments:
240
+ execution_arguments = []
241
+ return (
242
+ f"SELECT {self.fqn}({', '.join([str(arg) for arg in execution_arguments])})"
243
+ )
22
244
 
23
245
 
24
246
  class ProcedureEntity(SnowparkEntity[ProcedureEntityModel]):
@@ -26,4 +248,12 @@ class ProcedureEntity(SnowparkEntity[ProcedureEntityModel]):
26
248
  A stored procedure
27
249
  """
28
250
 
29
- pass
251
+ def get_execute_sql(
252
+ self,
253
+ execution_arguments: List[str] | None = None,
254
+ ):
255
+ if not execution_arguments:
256
+ execution_arguments = []
257
+ return (
258
+ f"CALL {self.fqn}({', '.join([str(arg) for arg in execution_arguments])})"
259
+ )
@@ -14,7 +14,9 @@
14
14
 
15
15
  from __future__ import annotations
16
16
 
17
+ import json
17
18
  import sys
19
+ from datetime import datetime
18
20
  from typing import TextIO
19
21
 
20
22
  from click import ClickException
@@ -23,6 +25,22 @@ from snowflake.cli.api.exceptions import ObjectAlreadyExistsError
23
25
  from snowflake.cli.api.project.util import unquote_identifier
24
26
  from snowflake.connector.errors import ProgrammingError
25
27
 
28
+ EVENT_COLUMN_NAMES = [
29
+ "TIMESTAMP",
30
+ "START_TIMESTAMP",
31
+ "OBSERVED_TIMESTAMP",
32
+ "TRACE",
33
+ "RESOURCE",
34
+ "RESOURCE_ATTRIBUTES",
35
+ "SCOPE",
36
+ "SCOPE_ATTRIBUTES",
37
+ "RECORD_TYPE",
38
+ "RECORD",
39
+ "RECORD_ATTRIBUTES",
40
+ "VALUE",
41
+ "EXEMPLARS",
42
+ ]
43
+
26
44
  if not sys.stdout.closed and sys.stdout.isatty():
27
45
  GREEN = "\033[32m"
28
46
  BLUE = "\033[34m"
@@ -124,5 +142,116 @@ def new_logs_only(prev_log_records: list[str], new_log_records: list[str]) -> li
124
142
  return new_log_records_sorted
125
143
 
126
144
 
145
+ def build_resource_clause(
146
+ service_name: str, instance_id: str, container_name: str
147
+ ) -> str:
148
+ resource_filters = []
149
+ if service_name:
150
+ resource_filters.append(
151
+ f"resource_attributes:\"snow.service.name\" = '{service_name}'"
152
+ )
153
+ if instance_id:
154
+ resource_filters.append(
155
+ f"(resource_attributes:\"snow.service.instance\" = '{instance_id}' "
156
+ f"OR resource_attributes:\"snow.service.container.instance\" = '{instance_id}')"
157
+ )
158
+ if container_name:
159
+ resource_filters.append(
160
+ f"resource_attributes:\"snow.service.container.name\" = '{container_name}'"
161
+ )
162
+ return " and ".join(resource_filters) if resource_filters else "1=1"
163
+
164
+
165
+ def build_time_clauses(
166
+ since: str | datetime | None, until: str | datetime | None
167
+ ) -> tuple[str, str]:
168
+ since_clause = ""
169
+ until_clause = ""
170
+
171
+ if isinstance(since, datetime):
172
+ since_clause = f"and timestamp >= '{since}'"
173
+ elif isinstance(since, str) and since:
174
+ since_clause = f"and timestamp >= sysdate() - interval '{since}'"
175
+
176
+ if isinstance(until, datetime):
177
+ until_clause = f"and timestamp <= '{until}'"
178
+ elif isinstance(until, str) and until:
179
+ until_clause = f"and timestamp <= sysdate() - interval '{until}'"
180
+
181
+ return since_clause, until_clause
182
+
183
+
184
+ def format_event_row(event_dict: dict) -> dict:
185
+ try:
186
+ resource_attributes = json.loads(event_dict.get("RESOURCE_ATTRIBUTES", "{}"))
187
+ record_attributes = json.loads(event_dict.get("RECORD_ATTRIBUTES", "{}"))
188
+ record = json.loads(event_dict.get("RECORD", "{}"))
189
+
190
+ database_name = resource_attributes.get("snow.database.name", "N/A")
191
+ schema_name = resource_attributes.get("snow.schema.name", "N/A")
192
+ service_name = resource_attributes.get("snow.service.name", "N/A")
193
+ instance_name = resource_attributes.get("snow.service.instance", "N/A")
194
+ container_name = resource_attributes.get("snow.service.container.name", "N/A")
195
+ event_name = record_attributes.get("event.name", "Unknown Event")
196
+ event_value = event_dict.get("VALUE", "Unknown Value")
197
+ severity = record.get("severity_text", "Unknown Severity")
198
+
199
+ return {
200
+ "TIMESTAMP": event_dict.get("TIMESTAMP", "N/A"),
201
+ "DATABASE NAME": database_name,
202
+ "SCHEMA NAME": schema_name,
203
+ "SERVICE NAME": service_name,
204
+ "INSTANCE ID": instance_name,
205
+ "CONTAINER NAME": container_name,
206
+ "SEVERITY": severity,
207
+ "EVENT NAME": event_name,
208
+ "EVENT VALUE": event_value,
209
+ }
210
+ except (json.JSONDecodeError, KeyError) as e:
211
+ raise RecordProcessingError(f"Error processing event row.")
212
+
213
+
214
+ def format_metric_row(metric_dict: dict) -> dict:
215
+ try:
216
+ resource_attributes = json.loads(metric_dict["RESOURCE_ATTRIBUTES"])
217
+ record = json.loads(metric_dict["RECORD"])
218
+
219
+ database_name = resource_attributes.get("snow.database.name", "N/A")
220
+ schema_name = resource_attributes.get("snow.schema.name", "N/A")
221
+ service_name = resource_attributes.get("snow.service.name", "N/A")
222
+ instance_name = resource_attributes.get(
223
+ "snow.service.container.instance", "N/A"
224
+ )
225
+ container_name = resource_attributes.get("snow.service.container.name", "N/A")
226
+
227
+ metric_name = record["metric"].get("name", "Unknown Metric")
228
+ metric_value = metric_dict.get("VALUE", "Unknown Value")
229
+
230
+ return {
231
+ "TIMESTAMP": metric_dict.get("TIMESTAMP", "N/A"),
232
+ "DATABASE NAME": database_name,
233
+ "SCHEMA NAME": schema_name,
234
+ "SERVICE NAME": service_name,
235
+ "INSTANCE ID": instance_name,
236
+ "CONTAINER NAME": container_name,
237
+ "METRIC NAME": metric_name,
238
+ "METRIC VALUE": metric_value,
239
+ }
240
+ except (json.JSONDecodeError, KeyError) as e:
241
+ raise RecordProcessingError(f"Error processing metric row.")
242
+
243
+
244
+ class RecordProcessingError(ClickException):
245
+ """Raised when processing an event or metric record fails due to invalid data."""
246
+
247
+ pass
248
+
249
+
250
+ class SPCSEventTableError(ClickException):
251
+ """Raised when there is an issue related to the SPCS event table."""
252
+
253
+ pass
254
+
255
+
127
256
  class NoPropertiesProvidedError(ClickException):
128
257
  pass