snowflake-cli 3.11.0__py3-none-any.whl → 3.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. snowflake/cli/__about__.py +1 -1
  2. snowflake/cli/_app/cli_app.py +43 -1
  3. snowflake/cli/_app/commands_registration/builtin_plugins.py +1 -1
  4. snowflake/cli/_app/commands_registration/command_plugins_loader.py +14 -1
  5. snowflake/cli/_app/printing.py +153 -19
  6. snowflake/cli/_app/telemetry.py +25 -10
  7. snowflake/cli/_plugins/auth/__init__.py +0 -2
  8. snowflake/cli/_plugins/connection/commands.py +1 -78
  9. snowflake/cli/_plugins/dbt/commands.py +44 -19
  10. snowflake/cli/_plugins/dbt/constants.py +1 -1
  11. snowflake/cli/_plugins/dbt/manager.py +252 -47
  12. snowflake/cli/_plugins/dcm/commands.py +65 -90
  13. snowflake/cli/_plugins/dcm/manager.py +137 -50
  14. snowflake/cli/_plugins/logs/commands.py +7 -0
  15. snowflake/cli/_plugins/logs/manager.py +21 -1
  16. snowflake/cli/_plugins/nativeapp/entities/application_package.py +4 -1
  17. snowflake/cli/_plugins/nativeapp/sf_sql_facade.py +3 -1
  18. snowflake/cli/_plugins/object/manager.py +1 -0
  19. snowflake/cli/_plugins/snowpark/common.py +1 -0
  20. snowflake/cli/_plugins/snowpark/package/anaconda_packages.py +29 -5
  21. snowflake/cli/_plugins/snowpark/package_utils.py +44 -3
  22. snowflake/cli/_plugins/spcs/services/commands.py +19 -1
  23. snowflake/cli/_plugins/spcs/services/manager.py +17 -4
  24. snowflake/cli/_plugins/spcs/services/service_entity_model.py +5 -0
  25. snowflake/cli/_plugins/sql/lexer/types.py +1 -0
  26. snowflake/cli/_plugins/sql/repl.py +100 -26
  27. snowflake/cli/_plugins/sql/repl_commands.py +607 -0
  28. snowflake/cli/_plugins/sql/statement_reader.py +44 -20
  29. snowflake/cli/_plugins/streamlit/streamlit_entity.py +28 -2
  30. snowflake/cli/_plugins/streamlit/streamlit_entity_model.py +24 -4
  31. snowflake/cli/api/artifacts/bundle_map.py +32 -2
  32. snowflake/cli/api/artifacts/regex_resolver.py +54 -0
  33. snowflake/cli/api/artifacts/upload.py +5 -1
  34. snowflake/cli/api/artifacts/utils.py +12 -1
  35. snowflake/cli/api/cli_global_context.py +7 -0
  36. snowflake/cli/api/commands/decorators.py +7 -0
  37. snowflake/cli/api/commands/flags.py +24 -1
  38. snowflake/cli/api/console/abc.py +13 -2
  39. snowflake/cli/api/console/console.py +20 -0
  40. snowflake/cli/api/constants.py +9 -0
  41. snowflake/cli/api/entities/utils.py +10 -6
  42. snowflake/cli/api/feature_flags.py +3 -2
  43. snowflake/cli/api/identifiers.py +18 -1
  44. snowflake/cli/api/project/schemas/entities/entities.py +0 -6
  45. snowflake/cli/api/rendering/sql_templates.py +2 -0
  46. {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.13.0.dist-info}/METADATA +7 -7
  47. {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.13.0.dist-info}/RECORD +51 -54
  48. snowflake/cli/_plugins/auth/keypair/__init__.py +0 -0
  49. snowflake/cli/_plugins/auth/keypair/commands.py +0 -153
  50. snowflake/cli/_plugins/auth/keypair/manager.py +0 -331
  51. snowflake/cli/_plugins/dcm/dcm_project_entity_model.py +0 -59
  52. snowflake/cli/_plugins/sql/snowsql_commands.py +0 -331
  53. /snowflake/cli/_plugins/auth/{keypair/plugin_spec.py → plugin_spec.py} +0 -0
  54. {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.13.0.dist-info}/WHEEL +0 -0
  55. {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.13.0.dist-info}/entry_points.txt +0 -0
  56. {snowflake_cli-3.11.0.dist-info → snowflake_cli-3.13.0.dist-info}/licenses/LICENSE +0 -0
@@ -11,21 +11,75 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
+ from contextlib import contextmanager, nullcontext
15
+ from pathlib import Path
16
+ from typing import Generator, List
14
17
 
15
- from typing import List
16
-
17
- from snowflake.cli._plugins.dcm.dcm_project_entity_model import DCMProjectEntityModel
18
+ import yaml
18
19
  from snowflake.cli._plugins.stage.manager import StageManager
20
+ from snowflake.cli.api.artifacts.upload import sync_artifacts_with_stage
19
21
  from snowflake.cli.api.commands.utils import parse_key_value_variables
22
+ from snowflake.cli.api.console.console import cli_console
23
+ from snowflake.cli.api.constants import (
24
+ DEFAULT_SIZE_LIMIT_MB,
25
+ ObjectType,
26
+ PatternMatchingType,
27
+ )
28
+ from snowflake.cli.api.exceptions import CliError
20
29
  from snowflake.cli.api.identifiers import FQN
30
+ from snowflake.cli.api.project.project_paths import ProjectPaths
31
+ from snowflake.cli.api.project.schemas.entities.common import PathMapping
32
+ from snowflake.cli.api.secure_path import SecurePath
21
33
  from snowflake.cli.api.sql_execution import SqlExecutionMixin
22
34
  from snowflake.cli.api.stage_path import StagePath
35
+ from snowflake.cli.api.utils.path_utils import is_stage_path
36
+
37
+ MANIFEST_FILE_NAME = "manifest.yml"
38
+ DCM_PROJECT_TYPE = "dcm_project"
23
39
 
24
40
 
25
41
  class DCMProjectManager(SqlExecutionMixin):
42
+ @contextmanager
43
+ def _collect_output(
44
+ self, project_identifier: FQN, output_path: str
45
+ ) -> Generator[str, None, None]:
46
+ """
47
+ Context manager for handling output path - creates temporary stage for local paths,
48
+ downloads files after execution, and ensures proper cleanup.
49
+
50
+ Args:
51
+ project_identifier: The DCM project identifier
52
+ output_path: Either a stage path (@stage/path) or local directory path
53
+
54
+ Yields:
55
+ str: The effective output path to use in the DCM command
56
+ """
57
+ temp_stage_for_local_output = None
58
+ stage_manager = StageManager()
59
+
60
+ if should_download_files := not is_stage_path(output_path):
61
+ temp_stage_fqn = FQN.from_resource(
62
+ ObjectType.DCM_PROJECT, project_identifier, "OUTPUT_TMP_STAGE"
63
+ )
64
+ stage_manager.create(temp_stage_fqn, temporary=True)
65
+ effective_output_path = StagePath.from_stage_str(temp_stage_fqn.identifier)
66
+ temp_stage_for_local_output = (temp_stage_fqn.identifier, Path(output_path))
67
+ else:
68
+ effective_output_path = StagePath.from_stage_str(output_path)
69
+
70
+ yield effective_output_path.absolute_path()
71
+
72
+ if should_download_files:
73
+ assert temp_stage_for_local_output is not None
74
+ stage_path, local_path = temp_stage_for_local_output
75
+ stage_manager.get_recursive(stage_path=stage_path, dest_path=local_path)
76
+ cli_console.step(f"Plan output saved to: {local_path.resolve()}")
77
+ else:
78
+ cli_console.step(f"Plan output saved to: {output_path}")
79
+
26
80
  def execute(
27
81
  self,
28
- project_name: FQN,
82
+ project_identifier: FQN,
29
83
  from_stage: str,
30
84
  configuration: str | None = None,
31
85
  variables: List[str] | None = None,
@@ -33,64 +87,97 @@ class DCMProjectManager(SqlExecutionMixin):
33
87
  alias: str | None = None,
34
88
  output_path: str | None = None,
35
89
  ):
90
+ with self._collect_output(project_identifier, output_path) if (
91
+ output_path and dry_run
92
+ ) else nullcontext() as output_stage:
93
+ query = f"EXECUTE DCM PROJECT {project_identifier.sql_identifier}"
94
+ if dry_run:
95
+ query += " PLAN"
96
+ else:
97
+ query += " DEPLOY"
98
+ if alias:
99
+ query += f' AS "{alias}"'
100
+ if configuration or variables:
101
+ query += f" USING"
102
+ if configuration:
103
+ query += f" CONFIGURATION {configuration}"
104
+ if variables:
105
+ query += StageManager.parse_execute_variables(
106
+ parse_key_value_variables(variables)
107
+ ).removeprefix(" using")
108
+ stage_path = StagePath.from_stage_str(from_stage)
109
+ query += f" FROM {stage_path.absolute_path()}"
110
+ if output_stage is not None:
111
+ query += f" OUTPUT_PATH {output_stage}"
112
+ result = self.execute_query(query=query)
36
113
 
37
- query = f"EXECUTE DCM PROJECT {project_name.sql_identifier}"
38
- if dry_run:
39
- query += " PLAN"
40
- else:
41
- query += " DEPLOY"
42
- if alias:
43
- query += f" AS {alias}"
44
- if configuration or variables:
45
- query += f" USING"
46
- if configuration:
47
- query += f" CONFIGURATION {configuration}"
48
- if variables:
49
- query += StageManager.parse_execute_variables(
50
- parse_key_value_variables(variables)
51
- ).removeprefix(" using")
52
- stage_path = StagePath.from_stage_str(from_stage)
53
- query += f" FROM {stage_path.absolute_path()}"
54
- if output_path:
55
- output_stage_path = StagePath.from_stage_str(output_path)
56
- query += f" OUTPUT_PATH {output_stage_path.absolute_path()}"
57
- return self.execute_query(query=query)
114
+ return result
58
115
 
59
- def create(self, project: DCMProjectEntityModel) -> None:
60
- query = f"CREATE DCM PROJECT {project.fqn.sql_identifier}"
116
+ def create(self, project_identifier: FQN) -> None:
117
+ query = f"CREATE DCM PROJECT {project_identifier.sql_identifier}"
61
118
  self.execute_query(query)
62
119
 
63
- def _create_version(
64
- self,
65
- project_name: FQN,
66
- from_stage: str,
67
- alias: str | None = None,
68
- comment: str | None = None,
69
- ):
70
- stage_path = StagePath.from_stage_str(from_stage)
71
- query = f"ALTER DCM PROJECT {project_name.identifier} ADD VERSION"
72
- if alias:
73
- query += f" IF NOT EXISTS {alias}"
74
- query += f" FROM {stage_path.absolute_path(at_prefix=True)}"
75
- if comment:
76
- query += f" COMMENT = '{comment}'"
77
- return self.execute_query(query=query)
78
-
79
- def list_versions(self, project_name: FQN):
80
- query = f"SHOW VERSIONS IN DCM PROJECT {project_name.identifier}"
120
+ def list_deployments(self, project_identifier: FQN):
121
+ query = f"SHOW DEPLOYMENTS IN DCM PROJECT {project_identifier.identifier}"
81
122
  return self.execute_query(query=query)
82
123
 
83
124
  def drop_deployment(
84
125
  self,
85
- project_name: FQN,
86
- version_name: str,
126
+ project_identifier: FQN,
127
+ deployment_name: str,
87
128
  if_exists: bool = False,
88
129
  ):
89
130
  """
90
- Drops a version from the DCM Project.
131
+ Drops a deployment from the DCM Project.
91
132
  """
92
- query = f"ALTER DCM PROJECT {project_name.identifier} DROP VERSION"
133
+ query = f"ALTER DCM PROJECT {project_identifier.identifier} DROP DEPLOYMENT"
93
134
  if if_exists:
94
135
  query += " IF EXISTS"
95
- query += f" {version_name}"
136
+ query += f' "{deployment_name}"'
96
137
  return self.execute_query(query=query)
138
+
139
+ @staticmethod
140
+ def sync_local_files(
141
+ project_identifier: FQN, source_directory: str | None = None
142
+ ) -> str:
143
+ source_path = (
144
+ SecurePath(source_directory).resolve()
145
+ if source_directory
146
+ else SecurePath.cwd()
147
+ )
148
+
149
+ dcm_manifest_file = source_path / MANIFEST_FILE_NAME
150
+ if not dcm_manifest_file.exists():
151
+ raise CliError(
152
+ f"{MANIFEST_FILE_NAME} was not found in directory {source_path.path}"
153
+ )
154
+
155
+ with dcm_manifest_file.open(read_file_limit_mb=DEFAULT_SIZE_LIMIT_MB) as fd:
156
+ dcm_manifest = yaml.safe_load(fd)
157
+ object_type = dcm_manifest.get("type") if dcm_manifest else None
158
+ if object_type is None:
159
+ raise CliError(
160
+ f"Manifest file type is undefined. Expected {DCM_PROJECT_TYPE}"
161
+ )
162
+ if object_type.lower() != DCM_PROJECT_TYPE:
163
+ raise CliError(
164
+ f"Manifest file is defined for type {object_type}. Expected {DCM_PROJECT_TYPE}"
165
+ )
166
+
167
+ definitions = list(dcm_manifest.get("include_definitions", list()))
168
+ if MANIFEST_FILE_NAME not in definitions:
169
+ definitions.append(MANIFEST_FILE_NAME)
170
+
171
+ with cli_console.phase(f"Uploading definition files"):
172
+ stage_fqn = FQN.from_resource(
173
+ ObjectType.DCM_PROJECT, project_identifier, "TMP_STAGE"
174
+ )
175
+ sync_artifacts_with_stage(
176
+ project_paths=ProjectPaths(project_root=source_path.path),
177
+ stage_root=stage_fqn.identifier,
178
+ use_temporary_stage=True,
179
+ artifacts=[PathMapping(src=definition) for definition in definitions],
180
+ pattern_type=PatternMatchingType.REGEX,
181
+ )
182
+
183
+ return stage_fqn.identifier
@@ -48,6 +48,11 @@ def get_logs(
48
48
  "--log-level",
49
49
  help="The log level to filter by. If not provided, INFO will be used",
50
50
  ),
51
+ partial_match: bool = typer.Option(
52
+ False,
53
+ "--partial",
54
+ help="Enable partial, case-insensitive matching for object names",
55
+ ),
51
56
  **options,
52
57
  ):
53
58
  """
@@ -75,6 +80,7 @@ def get_logs(
75
80
  refresh_time=refresh_time,
76
81
  event_table=event_table,
77
82
  log_level=log_level,
83
+ partial_match=partial_match,
78
84
  )
79
85
  logs = itertools.chain(
80
86
  (MessageResult(log.log_message) for logs in logs_stream for log in logs)
@@ -87,6 +93,7 @@ def get_logs(
87
93
  to_time=to_time,
88
94
  event_table=event_table,
89
95
  log_level=log_level,
96
+ partial_match=partial_match,
90
97
  )
91
98
  logs = (MessageResult(log.log_message) for log in logs_iterable) # type: ignore
92
99
 
@@ -11,6 +11,7 @@ from snowflake.cli._plugins.logs.utils import (
11
11
  )
12
12
  from snowflake.cli._plugins.object.commands import NameArgument, ObjectArgument
13
13
  from snowflake.cli.api.identifiers import FQN
14
+ from snowflake.cli.api.project.util import escape_like_pattern
14
15
  from snowflake.cli.api.sql_execution import SqlExecutionMixin
15
16
  from snowflake.connector.cursor import SnowflakeCursor
16
17
 
@@ -24,6 +25,7 @@ class LogsManager(SqlExecutionMixin):
24
25
  from_time: Optional[datetime] = None,
25
26
  event_table: Optional[str] = None,
26
27
  log_level: Optional[str] = "INFO",
28
+ partial_match: bool = False,
27
29
  ) -> Iterable[List[LogsQueryRow]]:
28
30
  try:
29
31
  previous_end = from_time
@@ -36,6 +38,7 @@ class LogsManager(SqlExecutionMixin):
36
38
  to_time=None,
37
39
  event_table=event_table,
38
40
  log_level=log_level,
41
+ partial_match=partial_match,
39
42
  ).fetchall()
40
43
 
41
44
  if raw_logs:
@@ -56,6 +59,7 @@ class LogsManager(SqlExecutionMixin):
56
59
  to_time: Optional[datetime] = None,
57
60
  event_table: Optional[str] = None,
58
61
  log_level: Optional[str] = "INFO",
62
+ partial_match: bool = False,
59
63
  ) -> Iterable[LogsQueryRow]:
60
64
  """
61
65
  Basic function to get a single batch of logs from the server
@@ -68,6 +72,7 @@ class LogsManager(SqlExecutionMixin):
68
72
  to_time=to_time,
69
73
  event_table=event_table,
70
74
  log_level=log_level,
75
+ partial_match=partial_match,
71
76
  )
72
77
 
73
78
  return sanitize_logs(logs)
@@ -80,10 +85,25 @@ class LogsManager(SqlExecutionMixin):
80
85
  to_time: Optional[datetime] = None,
81
86
  event_table: Optional[str] = None,
82
87
  log_level: Optional[str] = "INFO",
88
+ partial_match: bool = False,
83
89
  ) -> SnowflakeCursor:
84
90
 
85
91
  table = event_table if event_table else "SNOWFLAKE.TELEMETRY.EVENTS"
86
92
 
93
+ # Escape single quotes in object_name to prevent SQL injection
94
+ escaped_object_name = str(object_name).replace("'", "''")
95
+
96
+ # Build the object name condition based on partial_match flag
97
+ if partial_match:
98
+ # Use ILIKE for case-insensitive partial matching with wildcards
99
+ escaped_pattern = escape_like_pattern(
100
+ escaped_object_name, escape_sequence="\\"
101
+ )
102
+ object_condition = f"object_name ILIKE '%{escaped_pattern}%'"
103
+ else:
104
+ # Use exact match (original behavior)
105
+ object_condition = f"object_name = '{escaped_object_name}'"
106
+
87
107
  query = dedent(
88
108
  f"""
89
109
  SELECT
@@ -96,7 +116,7 @@ class LogsManager(SqlExecutionMixin):
96
116
  FROM {table}
97
117
  WHERE record_type = 'LOG'
98
118
  AND (record:severity_text IN ({parse_log_levels_for_query((log_level))}) or record:severity_text is NULL )
99
- AND object_name = '{object_name}'
119
+ AND {object_condition}
100
120
  {get_timestamp_query(from_time, to_time)}
101
121
  ORDER BY timestamp;
102
122
  """
@@ -1066,7 +1066,10 @@ class ApplicationPackageEntity(EntityBase[ApplicationPackageEntityModel]):
1066
1066
 
1067
1067
  for version in free_versions:
1068
1068
  last_updated = last_updated_map[version]
1069
- if not oldest_version or last_updated < oldest_version_last_updated_on:
1069
+ if not oldest_version or (
1070
+ oldest_version_last_updated_on is not None
1071
+ and last_updated < oldest_version_last_updated_on
1072
+ ):
1070
1073
  oldest_version = version
1071
1074
  oldest_version_last_updated_on = last_updated
1072
1075
 
@@ -632,6 +632,7 @@ class SnowflakeSQLFacade:
632
632
  role: str | None = None,
633
633
  database: str | None = None,
634
634
  schema: str | None = None,
635
+ temporary: bool = False,
635
636
  ):
636
637
  """
637
638
  Creates a stage.
@@ -641,13 +642,14 @@ class SnowflakeSQLFacade:
641
642
  @param [Optional] role: Role to switch to while running this script. Current role will be used if no role is passed in.
642
643
  @param [Optional] database: Database to use while running this script, unless the stage name is database-qualified.
643
644
  @param [Optional] schema: Schema to use while running this script, unless the stage name is schema-qualified.
645
+ @param [Optional] temporary: determines if stage should be temporary. Default is false.
644
646
  """
645
647
  fqn = FQN.from_string(name)
646
648
  identifier = to_identifier(fqn.name)
647
649
  database = fqn.database or database
648
650
  schema = fqn.schema or schema
649
651
 
650
- query = f"create stage if not exists {identifier}"
652
+ query = f"create{' temporary' if temporary else ''} stage if not exists {identifier}"
651
653
  if encryption_type:
652
654
  query += f" encryption = (type = '{encryption_type}')"
653
655
  if enable_directory:
@@ -137,3 +137,4 @@ def _handle_create_error_codes(err: Exception) -> None:
137
137
  raise ClickException(f"{err_code} internal server error.")
138
138
  case _:
139
139
  raise err
140
+ raise err
@@ -349,6 +349,7 @@ def user_to_sql_type_mapper(user_provided_type: str) -> str:
349
349
  "FLOAT4",
350
350
  "FLOAT8",
351
351
  ),
352
+ ("DECFLOAT", ""): ("DECFLOAT",),
352
353
  ("TIMESTAMP_NTZ", ""): ("TIMESTAMP_NTZ", "TIMESTAMPNTZ", "DATETIME"),
353
354
  ("TIMESTAMP_LTZ", ""): ("TIMESTAMP_LTZ", "TIMESTAMPLTZ"),
354
355
  ("TIMESTAMP_TZ", ""): ("TIMESTAMP_TZ", "TIMESTAMPTZ"),
@@ -153,13 +153,37 @@ class AnacondaPackages:
153
153
  ):
154
154
  """Saves requirements to a file in format accepted by Snowflake SQL commands."""
155
155
  log.info("Writing requirements into file %s", file_path.path)
156
- formatted_requirements = []
156
+
157
+ # Deduplicate requirements by package name, keeping the first occurrence
158
+ seen_packages = set()
159
+ deduplicated_requirements = []
160
+ duplicate_packages = set()
161
+
157
162
  for requirement in requirements:
158
163
  if requirement.name and requirement.name in self._packages:
159
- snowflake_name = self._packages[requirement.name].snowflake_name
160
- formatted_requirements.append(
161
- snowflake_name + requirement.formatted_specs
162
- )
164
+ if requirement.name in seen_packages:
165
+ duplicate_packages.add(requirement.name)
166
+ log.warning(
167
+ "Duplicate package '%s' found in Anaconda requirements. "
168
+ "Ignoring: %s",
169
+ requirement.name,
170
+ requirement.name_and_version,
171
+ )
172
+ else:
173
+ seen_packages.add(requirement.name)
174
+ deduplicated_requirements.append(requirement)
175
+
176
+ if duplicate_packages:
177
+ log.warning(
178
+ "Found duplicate Anaconda packages: %s. "
179
+ "Consider consolidating package versions in requirements.txt.",
180
+ ", ".join(sorted(duplicate_packages)),
181
+ )
182
+
183
+ formatted_requirements = []
184
+ for requirement in deduplicated_requirements:
185
+ snowflake_name = self._packages[requirement.name].snowflake_name
186
+ formatted_requirements.append(snowflake_name + requirement.formatted_specs)
163
187
 
164
188
  if formatted_requirements:
165
189
  file_path.write_text("\n".join(formatted_requirements))
@@ -255,14 +255,55 @@ def split_downloaded_dependencies(
255
255
  anaconda_packages: AnacondaPackages,
256
256
  skip_version_check: bool,
257
257
  ) -> SplitDownloadedDependenciesResult:
258
- packages_metadata: Dict[str, WheelMetadata] = {
259
- meta.name: meta
258
+ # Build metadata for all downloaded wheels
259
+ all_wheels_metadata = [
260
+ meta
260
261
  for meta in (
261
262
  WheelMetadata.from_wheel(wheel_path)
262
263
  for wheel_path in downloads_dir.glob("*.whl")
263
264
  )
264
265
  if meta is not None
265
- }
266
+ ]
267
+
268
+ # Detect and handle duplicate packages
269
+ packages_metadata: Dict[str, WheelMetadata] = {}
270
+ duplicate_packages = set()
271
+
272
+ for meta in all_wheels_metadata:
273
+ if meta.name in packages_metadata:
274
+ duplicate_packages.add(meta.name)
275
+ log.warning(
276
+ "Multiple versions of package '%s' found in dependencies. "
277
+ "Using: %s, Ignoring: %s",
278
+ meta.name,
279
+ packages_metadata[meta.name].wheel_path.name,
280
+ meta.wheel_path.name,
281
+ )
282
+ else:
283
+ packages_metadata[meta.name] = meta
284
+
285
+ if duplicate_packages:
286
+ log.warning(
287
+ "Found duplicate packages: %s. This may cause deployment issues. "
288
+ "Consider pinning package versions in requirements.txt to avoid conflicts.",
289
+ ", ".join(sorted(duplicate_packages)),
290
+ )
291
+
292
+ # Remove duplicate wheel files to prevent them from being extracted
293
+ for meta in all_wheels_metadata:
294
+ if (
295
+ meta.name in duplicate_packages
296
+ and meta not in packages_metadata.values()
297
+ ):
298
+ try:
299
+ meta.wheel_path.unlink()
300
+ log.debug("Removed duplicate wheel file: %s", meta.wheel_path.name)
301
+ except Exception as e:
302
+ log.warning(
303
+ "Failed to remove duplicate wheel file %s: %s",
304
+ meta.wheel_path.name,
305
+ e,
306
+ )
266
307
  available_in_snowflake_dependencies: Dict = {}
267
308
  unavailable_dependencies: Dict = {}
268
309
 
@@ -151,6 +151,14 @@ AutoResumeOption = OverrideableOption(
151
151
  help=_AUTO_RESUME_HELP,
152
152
  )
153
153
 
154
+ _AUTO_SUSPEND_SECS_HELP = "Number of seconds of inactivity after which the service will be automatically suspended."
155
+ AutoSuspendSecsOption = OverrideableOption(
156
+ None,
157
+ "--auto-suspend-secs",
158
+ help=_AUTO_SUSPEND_SECS_HELP,
159
+ min=0,
160
+ )
161
+
154
162
  _COMMENT_HELP = "Comment for the service."
155
163
 
156
164
  add_object_command_aliases(
@@ -217,7 +225,7 @@ def deploy(
217
225
  upgrade: bool = typer.Option(
218
226
  False,
219
227
  "--upgrade",
220
- help="Updates the existing service. Can update min_instances, max_instances, query_warehouse, auto_resume, external_access_integrations and comment.",
228
+ help="Updates the existing service. Can update min_instances, max_instances, query_warehouse, auto_resume, auto_suspend_secs, external_access_integrations and comment.",
221
229
  ),
222
230
  **options,
223
231
  ) -> CommandResult:
@@ -241,6 +249,7 @@ def deploy(
241
249
  min_instances=service.min_instances,
242
250
  max_instances=max_instances,
243
251
  auto_resume=service.auto_resume,
252
+ auto_suspend_secs=service.auto_suspend_secs,
244
253
  external_access_integrations=service.external_access_integrations,
245
254
  query_warehouse=service.query_warehouse,
246
255
  tags=service.tags,
@@ -529,6 +538,7 @@ def set_property(
529
538
  max_instances: Optional[int] = MaxInstancesOption(show_default=False),
530
539
  query_warehouse: Optional[str] = QueryWarehouseOption(show_default=False),
531
540
  auto_resume: Optional[bool] = AutoResumeOption(default=None, show_default=False),
541
+ auto_suspend_secs: Optional[int] = AutoSuspendSecsOption(show_default=False),
532
542
  external_access_integrations: Optional[List[str]] = typer.Option(
533
543
  None,
534
544
  "--eai-name",
@@ -546,6 +556,7 @@ def set_property(
546
556
  max_instances=max_instances,
547
557
  query_warehouse=query_warehouse,
548
558
  auto_resume=auto_resume,
559
+ auto_suspend_secs=auto_suspend_secs,
549
560
  external_access_integrations=external_access_integrations,
550
561
  comment=comment,
551
562
  )
@@ -576,6 +587,12 @@ def unset_property(
576
587
  help=f"Reset the AUTO_RESUME property - {_AUTO_RESUME_HELP}",
577
588
  show_default=False,
578
589
  ),
590
+ auto_suspend_secs: bool = AutoSuspendSecsOption(
591
+ default=False,
592
+ param_decls=["--auto-suspend-secs"],
593
+ help=f"Reset the AUTO_SUSPEND_SECS property - {_AUTO_SUSPEND_SECS_HELP}",
594
+ show_default=False,
595
+ ),
579
596
  comment: bool = CommentOption(
580
597
  default=False,
581
598
  help=f"Reset the COMMENT property - {_COMMENT_HELP}",
@@ -593,6 +610,7 @@ def unset_property(
593
610
  max_instances=max_instances,
594
611
  query_warehouse=query_warehouse,
595
612
  auto_resume=auto_resume,
613
+ auto_suspend_secs=auto_suspend_secs,
596
614
  comment=comment,
597
615
  )
598
616
  return SingleQueryResult(cursor)
@@ -114,6 +114,7 @@ class ServiceManager(SqlExecutionMixin):
114
114
  min_instances: int,
115
115
  max_instances: int,
116
116
  auto_resume: bool,
117
+ auto_suspend_secs: Optional[int],
117
118
  external_access_integrations: Optional[List[str]],
118
119
  query_warehouse: Optional[str],
119
120
  tags: Optional[List[Tag]],
@@ -139,6 +140,7 @@ class ServiceManager(SqlExecutionMixin):
139
140
  max_instances=max_instances,
140
141
  query_warehouse=query_warehouse,
141
142
  auto_resume=auto_resume,
143
+ auto_suspend_secs=auto_suspend_secs,
142
144
  external_access_integrations=external_access_integrations,
143
145
  comment=comment,
144
146
  )
@@ -163,6 +165,9 @@ class ServiceManager(SqlExecutionMixin):
163
165
  if max_instances:
164
166
  query.append(f"MAX_INSTANCES = {max_instances}")
165
167
 
168
+ if auto_suspend_secs is not None:
169
+ query.append(f"AUTO_SUSPEND_SECS = {auto_suspend_secs}")
170
+
166
171
  if query_warehouse:
167
172
  query.append(f"QUERY_WAREHOUSE = {query_warehouse}")
168
173
 
@@ -313,11 +318,12 @@ class ServiceManager(SqlExecutionMixin):
313
318
 
314
319
  if new_log_records:
315
320
  dedup_log_records = new_logs_only(prev_log_records, new_log_records)
316
- for log in dedup_log_records:
317
- yield filter_log_timestamp(log, include_timestamps)
321
+ if dedup_log_records:
322
+ for log in dedup_log_records:
323
+ yield filter_log_timestamp(log, include_timestamps)
318
324
 
319
- prev_timestamp = dedup_log_records[-1].split(" ", 1)[0]
320
- prev_log_records = dedup_log_records
325
+ prev_timestamp = dedup_log_records[-1].split(" ", 1)[0]
326
+ prev_log_records = dedup_log_records
321
327
 
322
328
  time.sleep(interval_seconds)
323
329
 
@@ -531,6 +537,7 @@ class ServiceManager(SqlExecutionMixin):
531
537
  max_instances: Optional[int],
532
538
  query_warehouse: Optional[str],
533
539
  auto_resume: Optional[bool],
540
+ auto_suspend_secs: Optional[int],
534
541
  external_access_integrations: Optional[List[str]],
535
542
  comment: Optional[str],
536
543
  ):
@@ -539,6 +546,7 @@ class ServiceManager(SqlExecutionMixin):
539
546
  ("max_instances", max_instances),
540
547
  ("query_warehouse", query_warehouse),
541
548
  ("auto_resume", auto_resume),
549
+ ("auto_suspend_secs", auto_suspend_secs),
542
550
  ("external_access_integrations", external_access_integrations),
543
551
  ("comment", comment),
544
552
  ]
@@ -562,6 +570,9 @@ class ServiceManager(SqlExecutionMixin):
562
570
  if auto_resume is not None:
563
571
  query.append(f" auto_resume = {auto_resume}")
564
572
 
573
+ if auto_suspend_secs is not None:
574
+ query.append(f" auto_suspend_secs = {auto_suspend_secs}")
575
+
565
576
  if external_access_integrations is not None:
566
577
  external_access_integration_list = ",".join(
567
578
  f"{e}" for e in external_access_integrations
@@ -582,6 +593,7 @@ class ServiceManager(SqlExecutionMixin):
582
593
  max_instances: bool,
583
594
  query_warehouse: bool,
584
595
  auto_resume: bool,
596
+ auto_suspend_secs: bool,
585
597
  comment: bool,
586
598
  ):
587
599
  property_pairs = [
@@ -589,6 +601,7 @@ class ServiceManager(SqlExecutionMixin):
589
601
  ("max_instances", max_instances),
590
602
  ("query_warehouse", query_warehouse),
591
603
  ("auto_resume", auto_resume),
604
+ ("auto_suspend_secs", auto_suspend_secs),
592
605
  ("comment", comment),
593
606
  ]
594
607
 
@@ -30,6 +30,11 @@ class ServiceEntityModel(EntityModelBaseWithArtifacts, ExternalAccessBaseModel):
30
30
  title="The service will automatically resume when a service function or ingress is called.",
31
31
  default=True,
32
32
  )
33
+ auto_suspend_secs: Optional[int] = Field(
34
+ title="Number of seconds of inactivity after which the service is automatically suspended.",
35
+ default=None,
36
+ ge=0,
37
+ )
33
38
  query_warehouse: Optional[str] = Field(
34
39
  title="Warehouse to use if a service container connects to Snowflake to execute a query without explicitly specifying a warehouse to use",
35
40
  default=None,
@@ -11,6 +11,7 @@ TYPES = (
11
11
  "DATETIME",
12
12
  "DEC",
13
13
  "DECIMAL",
14
+ "DECFLOAT",
14
15
  "DOUBLE",
15
16
  "FLOAT",
16
17
  "INT",