snowflake-cli-labs 2.7.0rc2__py3-none-any.whl → 2.7.0rc4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,4 +14,4 @@
14
14
 
15
15
  from __future__ import annotations
16
16
 
17
- VERSION = "2.7.0rc2"
17
+ VERSION = "2.7.0rc4"
@@ -52,5 +52,4 @@ class FeatureFlag(FeatureFlagMixin):
52
52
  ENABLE_STREAMLIT_VERSIONED_STAGE = BooleanFlag(
53
53
  "ENABLE_STREAMLIT_VERSIONED_STAGE", False
54
54
  )
55
- # TODO: remove in 3.0
56
- ENABLE_PROJECT_DEFINITION_V2 = BooleanFlag("ENABLE_PROJECT_DEFINITION_V2", True)
55
+ ENABLE_PROJECT_DEFINITION_V2 = BooleanFlag("ENABLE_PROJECT_DEFINITION_V2", False)
@@ -63,6 +63,11 @@ from snowflake.cli.plugins.nativeapp.v2_conversions.v2_to_v1_decorator import (
63
63
  nativeapp_definition_v2_to_v1,
64
64
  )
65
65
  from snowflake.cli.plugins.nativeapp.version.commands import app as versions_app
66
+ from snowflake.cli.plugins.stage.diff import (
67
+ DiffResult,
68
+ compute_stage_diff,
69
+ print_diff_to_console,
70
+ )
66
71
 
67
72
  app = SnowTyperFactory(
68
73
  name="app",
@@ -169,6 +174,32 @@ def app_bundle(
169
174
  return MessageResult(f"Bundle generated at {manager.deploy_root}")
170
175
 
171
176
 
177
+ @app.command("diff", requires_connection=True, hidden=True)
178
+ @with_project_definition()
179
+ @nativeapp_definition_v2_to_v1
180
+ def app_diff(
181
+ **options,
182
+ ) -> CommandResult:
183
+ """
184
+ Performs a diff between the app's source stage and the local deploy root.
185
+ """
186
+ assert_project_type("native_app")
187
+
188
+ manager = NativeAppManager(
189
+ project_definition=cli_context.project_definition.native_app,
190
+ project_root=cli_context.project_root,
191
+ )
192
+ bundle_map = manager.build_bundle()
193
+ diff: DiffResult = compute_stage_diff(
194
+ local_root=Path(manager.deploy_root), stage_fqn=manager.stage_fqn
195
+ )
196
+ if cli_context.output_format == OutputFormat.JSON:
197
+ return ObjectResult(diff.to_dict())
198
+ else:
199
+ print_diff_to_console(diff, bundle_map)
200
+ return None # don't print any output
201
+
202
+
172
203
  @app.command("run", requires_connection=True)
173
204
  @with_project_definition()
174
205
  @nativeapp_definition_v2_to_v1
@@ -14,9 +14,7 @@
14
14
 
15
15
  from __future__ import annotations
16
16
 
17
- import hashlib
18
17
  import logging
19
- import re
20
18
  from dataclasses import dataclass, field
21
19
  from pathlib import Path, PurePosixPath
22
20
  from typing import Collection, Dict, List, Optional, Tuple
@@ -25,14 +23,11 @@ from snowflake.cli.api.console import cli_console as cc
25
23
  from snowflake.cli.api.exceptions import (
26
24
  SnowflakeSQLExecutionError,
27
25
  )
28
- from snowflake.cli.api.secure_path import UNLIMITED, SecurePath
29
26
  from snowflake.cli.plugins.nativeapp.artifacts import BundleMap
30
27
  from snowflake.connector.cursor import DictCursor
31
28
 
32
29
  from .manager import StageManager
33
-
34
- MD5SUM_REGEX = r"^[A-Fa-f0-9]{32}$"
35
- CHUNK_SIZE_BYTES = 8192
30
+ from .md5 import UnknownMD5FormatError, file_matches_md5sum
36
31
 
37
32
  log = logging.getLogger(__name__)
38
33
 
@@ -72,45 +67,6 @@ class DiffResult:
72
67
  }
73
68
 
74
69
 
75
- def is_valid_md5sum(checksum: Optional[str]) -> bool:
76
- """
77
- Could the provided hexadecimal checksum represent a valid md5sum?
78
- """
79
- if checksum is None:
80
- return False
81
- return re.match(MD5SUM_REGEX, checksum) is not None
82
-
83
-
84
- def compute_md5sum(file: Path) -> str:
85
- """
86
- Returns a hexadecimal checksum for the file located at the given path.
87
- """
88
- if not file.is_file():
89
- raise ValueError(
90
- "The provided file does not exist or not a (symlink to a) regular file"
91
- )
92
-
93
- # FIXME: there are two cases in which this will fail to provide a matching
94
- # md5sum, even when the underlying file is the same:
95
- # 1. when the stage uses SNOWFLAKE_FULL encryption
96
- # 2. when the file was uploaded in multiple parts
97
-
98
- # We can re-create the second if we know what chunk size was used by the
99
- # upload process to the backing object store (e.g. S3, azure blob, etc.)
100
- # but we cannot re-create the first as the encrpytion key is hidden.
101
-
102
- # We are assuming that we will not get accidental collisions here due to the
103
- # large space of the md5sum (32 * 4 = 128 bits means 1-in-9-trillion chance)
104
- # combined with the fact that the file name + path must also match elsewhere.
105
-
106
- with SecurePath(file).open("rb", read_file_limit_mb=UNLIMITED) as f:
107
- file_hash = hashlib.md5()
108
- while chunk := f.read(CHUNK_SIZE_BYTES):
109
- file_hash.update(chunk)
110
-
111
- return file_hash.hexdigest()
112
-
113
-
114
70
  def enumerate_files(path: Path) -> List[Path]:
115
71
  """
116
72
  Get a list of all files in a directory (recursively).
@@ -175,30 +131,35 @@ def compute_stage_diff(
175
131
 
176
132
  for local_file in local_files:
177
133
  relpath = local_file.relative_to(local_root)
178
- stage_filename = to_stage_path(relpath)
179
- if stage_filename not in remote_md5:
134
+ stage_path = to_stage_path(relpath)
135
+ if stage_path not in remote_md5:
180
136
  # doesn't exist on the stage
181
- result.only_local.append(stage_filename)
137
+ result.only_local.append(stage_path)
182
138
  else:
183
- # N.B. we could compare local size vs remote size to skip the relatively-
184
- # expensive md5sum operation, but after seeing a comment that says the value
185
- # may not always be correctly populated, we'll ignore that column.
186
- stage_md5sum = remote_md5[stage_filename]
187
- if is_valid_md5sum(stage_md5sum) and stage_md5sum == compute_md5sum(
188
- local_file
189
- ):
190
- # the file definitely hasn't changed
191
- result.identical.append(stage_filename)
192
- else:
193
- # either the file has changed, or we can't tell if it has
194
- result.different.append(stage_filename)
139
+ # N.B. file size on stage is not always accurate, so cannot fail fast
140
+ try:
141
+ if file_matches_md5sum(local_file, remote_md5[stage_path]):
142
+ # We are assuming that we will not get accidental collisions here due to the
143
+ # large space of the md5sum (32 * 4 = 128 bits means 1-in-9-trillion chance)
144
+ # combined with the fact that the file name + path must also match elsewhere.
145
+ result.identical.append(stage_path)
146
+ else:
147
+ # either the file has changed, or we can't tell if it has
148
+ result.different.append(stage_path)
149
+ except UnknownMD5FormatError:
150
+ log.warning(
151
+ "Could not compare md5 for %s, assuming file has changed",
152
+ local_file,
153
+ exc_info=True,
154
+ )
155
+ result.different.append(stage_path)
195
156
 
196
157
  # mark this file as seen
197
- del remote_md5[stage_filename]
158
+ del remote_md5[stage_path]
198
159
 
199
160
  # every entry here is a file we never saw locally
200
- for stage_filename in remote_md5.keys():
201
- result.only_on_stage.append(stage_filename)
161
+ for stage_path in remote_md5.keys():
162
+ result.only_on_stage.append(stage_path)
202
163
 
203
164
  return result
204
165
 
@@ -232,8 +193,8 @@ def delete_only_on_stage_files(
232
193
  """
233
194
  Deletes all files from a Snowflake stage according to the input list of filenames, using a custom role.
234
195
  """
235
- for _stage_filename in only_on_stage:
236
- stage_manager.remove(stage_name=stage_fqn, path=str(_stage_filename), role=role)
196
+ for _stage_path in only_on_stage:
197
+ stage_manager.remove(stage_name=stage_fqn, path=str(_stage_path), role=role)
237
198
 
238
199
 
239
200
  def put_files_on_stage(
@@ -0,0 +1,160 @@
1
+ # Copyright (c) 2024 Snowflake Inc.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from __future__ import annotations
16
+
17
+ import hashlib
18
+ import logging
19
+ import math
20
+ import os.path
21
+ import re
22
+ from pathlib import Path
23
+ from typing import List, Tuple
24
+
25
+ from click.exceptions import ClickException
26
+ from snowflake.cli.api.secure_path import UNLIMITED, SecurePath
27
+ from snowflake.connector.constants import S3_CHUNK_SIZE, S3_MAX_PARTS, S3_MIN_PART_SIZE
28
+
29
+ ONE_MEGABYTE = 1024**2
30
+ READ_BUFFER_BYTES = 64 * 1024
31
+ MD5SUM_REGEX = r"^[A-Fa-f0-9]{32}$"
32
+ MULTIPART_MD5SUM_REGEX = r"^([A-Fa-f0-9]{32})-(\d+)$"
33
+
34
+ log = logging.getLogger(__name__)
35
+
36
+
37
+ class UnknownMD5FormatError(ClickException):
38
+ def __init__(self, md5: str):
39
+ super().__init__(f"Unknown md5 format: {md5}")
40
+
41
+
42
+ def is_md5sum(checksum: str) -> bool:
43
+ """
44
+ Could the provided hexadecimal checksum represent a valid md5sum?
45
+ """
46
+ return re.match(MD5SUM_REGEX, checksum) is not None
47
+
48
+
49
+ def parse_multipart_md5sum(checksum: str) -> Tuple[str, int] | None:
50
+ """
51
+ Does this represent a multi-part md5sum (i.e. "<md5>-<n>")?
52
+ If so, returns the tuple (md5, n), otherwise None.
53
+ """
54
+ multipart_md5 = re.match(MULTIPART_MD5SUM_REGEX, checksum)
55
+ if multipart_md5:
56
+ return (multipart_md5.group(1), int(multipart_md5.group(2)))
57
+ return None
58
+
59
+
60
+ def compute_md5sum(file: Path, chunk_size: int | None = None) -> str:
61
+ """
62
+ Returns a hexadecimal checksum for the file located at the given path.
63
+ If chunk_size is given, computes a multi-part md5sum.
64
+ """
65
+ if not file.is_file():
66
+ raise ValueError(
67
+ "The provided file does not exist or not a (symlink to a) regular file"
68
+ )
69
+
70
+ # If the stage uses SNOWFLAKE_FULL encryption, this will fail to provide
71
+ # a matching md5sum, even when the underlying file is the same, as we do
72
+ # not have access to the encrypted file under checksum.
73
+
74
+ file_size = os.path.getsize(file)
75
+ if file_size == 0:
76
+ # simple md5 with no content
77
+ return hashlib.md5().hexdigest()
78
+
79
+ with SecurePath(file).open("rb", read_file_limit_mb=UNLIMITED) as f:
80
+ md5s: List[hashlib._Hash] = [] # noqa: SLF001
81
+ hasher = hashlib.md5()
82
+
83
+ remains = file_size
84
+ remains_in_chunk: int = min(chunk_size, remains) if chunk_size else remains
85
+ while remains > 0:
86
+ sz = min(READ_BUFFER_BYTES, remains_in_chunk)
87
+ buf = f.read(sz)
88
+ hasher.update(buf)
89
+ remains_in_chunk -= sz
90
+ remains -= sz
91
+ if remains_in_chunk == 0:
92
+ if not chunk_size:
93
+ # simple md5; only one chunk processed
94
+ return hasher.hexdigest()
95
+ else:
96
+ # push the hash of this chunk + reset
97
+ md5s.append(hasher)
98
+ hasher = hashlib.md5()
99
+ remains_in_chunk = min(chunk_size, remains)
100
+
101
+ # multi-part hash (e.g. aws)
102
+ digests = b"".join(m.digest() for m in md5s)
103
+ digests_md5 = hashlib.md5(digests)
104
+ return f"{digests_md5.hexdigest()}-{len(md5s)}"
105
+
106
+
107
+ def file_matches_md5sum(local_file: Path, remote_md5: str | None) -> bool:
108
+ """
109
+ Try a few different md5sums to determine if a local file is identical
110
+ to a file that has a given remote md5sum.
111
+
112
+ Handles the multi-part md5sums generated by e.g. AWS S3, using values
113
+ from the python connector to make educated guesses on chunk size.
114
+
115
+ Assumes that upload time would dominate local hashing time.
116
+ """
117
+ if not remote_md5:
118
+ # no hash available
119
+ return False
120
+
121
+ if is_md5sum(remote_md5):
122
+ # regular hash
123
+ return compute_md5sum(local_file) == remote_md5
124
+
125
+ if md5_and_chunks := parse_multipart_md5sum(remote_md5):
126
+ # multi-part hash (e.g. aws)
127
+ (_, num_chunks) = md5_and_chunks
128
+ file_size = os.path.getsize(local_file)
129
+
130
+ # If this file uses the maximum number of parts supported by the cloud backend,
131
+ # the chunk size is likely not a clean multiple of a megabyte. Try reverse engineering
132
+ # from the file size first, then fall back to the usual detection method.
133
+ # At time of writing this logic would trigger for files >= 80GiB (python connector)
134
+ if num_chunks == S3_MAX_PARTS:
135
+ chunk_size = max(math.ceil(file_size / S3_MAX_PARTS), S3_MIN_PART_SIZE)
136
+ if compute_md5sum(local_file, chunk_size) == remote_md5:
137
+ return True
138
+
139
+ # Estimates the chunk size the multi-part file must have been uploaded with
140
+ # by trying chunk sizes that give the most evenly-sized chunks.
141
+ #
142
+ # First we'll try the chunk size that's a multiple of S3_CHUNK_SIZE (8mb) from
143
+ # the python connector that results in num_chunks, then we'll do the same with
144
+ # a smaller granularity (1mb) that is used by default in some AWS multi-part
145
+ # upload implementations.
146
+ #
147
+ # We're working backwards from num_chunks here because it's the only value we know.
148
+ for chunk_size_alignment in [S3_CHUNK_SIZE, ONE_MEGABYTE]:
149
+ # +1 because we need at least one chunk when file_size < num_chunks * chunk_size_alignment
150
+ # -1 because we don't want to add an extra chunk when file_size is an exact multiple of num_chunks * chunk_size_alignment
151
+ multiplier = 1 + ((file_size - 1) // (num_chunks * chunk_size_alignment))
152
+ chunk_size = multiplier * chunk_size_alignment
153
+ if compute_md5sum(local_file, chunk_size) == remote_md5:
154
+ return True
155
+
156
+ # we were unable to figure out the chunk size, or the files are different
157
+ log.debug("multi-part md5: %s != %s", remote_md5, local_file)
158
+ return False
159
+
160
+ raise UnknownMD5FormatError(remote_md5)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: snowflake-cli-labs
3
- Version: 2.7.0rc2
3
+ Version: 2.7.0rc4
4
4
  Summary: Snowflake CLI
5
5
  Project-URL: Source code, https://github.com/snowflakedb/snowflake-cli
6
6
  Project-URL: Bug Tracker, https://github.com/snowflakedb/snowflake-cli/issues
@@ -1,4 +1,4 @@
1
- snowflake/cli/__about__.py,sha256=Uv-6PLfi16rouu4FmVaYcQxzWlnqxuynvd1zCdCUdao,636
1
+ snowflake/cli/__about__.py,sha256=57l-U9OA6E64f2S2YFFAENOMxzkx4jUqFHQTGLxKp1U,636
2
2
  snowflake/cli/__init__.py,sha256=uGA_QRGW3iGwaegpFsLgOhup0zBliBSXh9ou8J439uU,578
3
3
  snowflake/cli/api/__init__.py,sha256=kD6lYv5et7QJvW7vzvLN9p2ibfD7pjh9KRWsp2QoYqo,1330
4
4
  snowflake/cli/api/cli_global_context.py,sha256=gAs7snaqRi5ESaxU8HcC_QBR2q9y1PMdhi7kQCGkICs,11714
@@ -6,7 +6,7 @@ snowflake/cli/api/config.py,sha256=-CmOMU14fEgI3oba00WU27RiwG46yc1UkgcI6Rdoxew,1
6
6
  snowflake/cli/api/constants.py,sha256=nVcX-NNZBFUIDX3Gbgm_YKjzv8tgcd1JdYvicV-nL_A,2964
7
7
  snowflake/cli/api/errno.py,sha256=IvotDJv_m_lz4tf5es0q7qRSdzCxv3zd2X2bQP6KsNU,1015
8
8
  snowflake/cli/api/exceptions.py,sha256=syNz7HdRVs3hAVC2NUaQINlSo-Ge-WEceuFvLoau2eQ,5118
9
- snowflake/cli/api/feature_flags.py,sha256=RXQERhm4BIwmCrt7peHCNQafw1-IVnTynQsDUB6AXjk,1725
9
+ snowflake/cli/api/feature_flags.py,sha256=BJ_QywyZ9yfDDMf1NzG7Ju8OmuMSoMbkMVQd_Fj3Gaw,1700
10
10
  snowflake/cli/api/identifiers.py,sha256=dBIKuCW5d8xoBbNPE_YnBab58B3b0pYNIfjuzpQPTug,5433
11
11
  snowflake/cli/api/rest_api.py,sha256=X2hYq-J2mZJmVIEeCUvdk8ccTiV86ltVlj9ac5ZmIak,6070
12
12
  snowflake/cli/api/sanitizers.py,sha256=7EKqVQ3KOob0IFFoc_GmXPYpRhgnmIqhnJSvHPgxM5I,1211
@@ -125,7 +125,7 @@ snowflake/cli/plugins/init/commands.py,sha256=wLIuMlHFMBmS3oJWSLxYlMO0Ac1b9dBcZY
125
125
  snowflake/cli/plugins/init/plugin_spec.py,sha256=uxglpV4GxY_hysq5fit_XR8JWLGkA8sClEN0bAU5OwU,993
126
126
  snowflake/cli/plugins/nativeapp/__init__.py,sha256=uGA_QRGW3iGwaegpFsLgOhup0zBliBSXh9ou8J439uU,578
127
127
  snowflake/cli/plugins/nativeapp/artifacts.py,sha256=otpxa6YFWJs1W1KDzo1agAyvq5YqAR3YoLjJJhm1CLs,30178
128
- snowflake/cli/plugins/nativeapp/commands.py,sha256=86vCJmiVXHN49--Bu83FHZJamNoPLH5Oz_4HRiPuctQ,14612
128
+ snowflake/cli/plugins/nativeapp/commands.py,sha256=e0JIWSeCRD5H33UBydlmXmIxNHnA2UakxnSbwNRS9sc,15551
129
129
  snowflake/cli/plugins/nativeapp/common_flags.py,sha256=7OUXprC2n3B0a9gGTj4AezHaJyt8-C7vgRI5q9l-UXE,1610
130
130
  snowflake/cli/plugins/nativeapp/constants.py,sha256=j25fS9dS54GPPp41njUOZTDykhYq12PY67B084FLCZk,956
131
131
  snowflake/cli/plugins/nativeapp/exceptions.py,sha256=Wh-qJlAG9UMdWB0lqAVafbBdA_hj_m7UnI4efLjOgUA,4360
@@ -207,8 +207,9 @@ snowflake/cli/plugins/sql/plugin_spec.py,sha256=U6ex88D6rhVkKR1d_HmVXo8EK0k9oZB-
207
207
  snowflake/cli/plugins/sql/snowsql_templating.py,sha256=VTPvFzZihZnu6R5L1JMaVdzxfviSRoMLlNyxK87sE5E,881
208
208
  snowflake/cli/plugins/stage/__init__.py,sha256=uGA_QRGW3iGwaegpFsLgOhup0zBliBSXh9ou8J439uU,578
209
209
  snowflake/cli/plugins/stage/commands.py,sha256=iLtu5qIQ7e1Ld-PaQ8DpuS5SP7C3q337dmlJIvriYUw,8201
210
- snowflake/cli/plugins/stage/diff.py,sha256=Wja12l-ORhBYsoKfr64ToowJLpsH7zaBowR7QuZGF6Y,12071
210
+ snowflake/cli/plugins/stage/diff.py,sha256=xmZoZL3ZQZS36M5H8FEb29kRNpcE6KM7tJVw8ZRcb_A,10756
211
211
  snowflake/cli/plugins/stage/manager.py,sha256=r93oM5g-2rpXM7F7qtW00KlTmwCxKi7374ERvDgVHz8,20412
212
+ snowflake/cli/plugins/stage/md5.py,sha256=hX_Ao7ys7hUPNuWLm5KEmIpcIULEIJ1UMgc4qUyvoLE,6303
212
213
  snowflake/cli/plugins/stage/plugin_spec.py,sha256=r8fvJxonf1TYBUm2361ka9fpsnA35NQRbGYqhKVfaC4,993
213
214
  snowflake/cli/plugins/streamlit/__init__.py,sha256=uGA_QRGW3iGwaegpFsLgOhup0zBliBSXh9ou8J439uU,578
214
215
  snowflake/cli/plugins/streamlit/commands.py,sha256=e6wkD2HyWH39S05q6YlHzvzLAhyph6gmltHxuWOc8ao,5975
@@ -230,8 +231,8 @@ snowflake/cli/templates/default_streamlit/snowflake.yml,sha256=yWFU-vqJ7Z17K3loU
230
231
  snowflake/cli/templates/default_streamlit/streamlit_app.py,sha256=hfYtJl4Rtm0n3J2gNeDwMT-leeGL5R-qJKwyJ0kUxAI,109
231
232
  snowflake/cli/templates/default_streamlit/common/hello.py,sha256=3Zt2LthAYDs6UGqOvRNCzYH-HISLHxdx_uAVhcCOtJM,37
232
233
  snowflake/cli/templates/default_streamlit/pages/my_page.py,sha256=f__P9j5XCo8J1c6du25wSvknIKvhTFWrXF_298YiQbw,49
233
- snowflake_cli_labs-2.7.0rc2.dist-info/METADATA,sha256=098gUXYelbDm6UjtsbMOhcNOyl5YdPSfL1r2dO7f9Pg,17804
234
- snowflake_cli_labs-2.7.0rc2.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
235
- snowflake_cli_labs-2.7.0rc2.dist-info/entry_points.txt,sha256=_qdnT44fYFbH78kb6Em5jr2_26amIg3UIAvSdmqT6TY,57
236
- snowflake_cli_labs-2.7.0rc2.dist-info/licenses/LICENSE,sha256=mJMA3Uz2AbjU_kVggo1CAx01XhBsI7BSi2H7ggUg_-c,11344
237
- snowflake_cli_labs-2.7.0rc2.dist-info/RECORD,,
234
+ snowflake_cli_labs-2.7.0rc4.dist-info/METADATA,sha256=GNEknVbYBh4FbJJZV6jig2A5yYfGKZ37oH_m8TbGNKA,17804
235
+ snowflake_cli_labs-2.7.0rc4.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
236
+ snowflake_cli_labs-2.7.0rc4.dist-info/entry_points.txt,sha256=_qdnT44fYFbH78kb6Em5jr2_26amIg3UIAvSdmqT6TY,57
237
+ snowflake_cli_labs-2.7.0rc4.dist-info/licenses/LICENSE,sha256=mJMA3Uz2AbjU_kVggo1CAx01XhBsI7BSi2H7ggUg_-c,11344
238
+ snowflake_cli_labs-2.7.0rc4.dist-info/RECORD,,