alibuild 1.17.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alibuild-1.17.19.data/scripts/aliBuild +137 -0
- alibuild-1.17.19.data/scripts/aliDeps +7 -0
- alibuild-1.17.19.data/scripts/aliDoctor +7 -0
- alibuild-1.17.19.data/scripts/alienv +344 -0
- alibuild-1.17.19.data/scripts/pb +7 -0
- alibuild-1.17.19.dist-info/METADATA +78 -0
- alibuild-1.17.19.dist-info/RECORD +74 -0
- alibuild-1.17.19.dist-info/WHEEL +5 -0
- alibuild-1.17.19.dist-info/licenses/LICENSE.md +674 -0
- alibuild-1.17.19.dist-info/top_level.txt +5 -0
- alibuild_helpers/__init__.py +21 -0
- alibuild_helpers/_version.py +21 -0
- alibuild_helpers/analytics.py +120 -0
- alibuild_helpers/args.py +493 -0
- alibuild_helpers/build.py +1209 -0
- alibuild_helpers/build_template.sh +314 -0
- alibuild_helpers/clean.py +83 -0
- alibuild_helpers/cmd.py +154 -0
- alibuild_helpers/deps.py +116 -0
- alibuild_helpers/doctor.py +195 -0
- alibuild_helpers/git.py +104 -0
- alibuild_helpers/init.py +103 -0
- alibuild_helpers/log.py +132 -0
- alibuild_helpers/scm.py +31 -0
- alibuild_helpers/sl.py +62 -0
- alibuild_helpers/sync.py +693 -0
- alibuild_helpers/templating_plugin.py +18 -0
- alibuild_helpers/utilities.py +662 -0
- alibuild_helpers/workarea.py +179 -0
- debian/changelog +11 -0
- debian/compat +1 -0
- debian/control +14 -0
- debian/copyright +10 -0
- debian/files +1 -0
- debian/rules +7 -0
- docs/README.md +1 -0
- docs/SUPPORT +3 -0
- docs/docs/alice_logo.png +0 -0
- docs/docs/deps.png +0 -0
- docs/docs/index.md +75 -0
- docs/docs/quick.md +89 -0
- docs/docs/reference.md +430 -0
- docs/docs/stylesheets/extra.css +9 -0
- docs/docs/troubleshooting.md +346 -0
- docs/docs/user.md +413 -0
- docs/mkdocs.yml +37 -0
- templates/alibuild_to_please.jnj +63 -0
- tests/test_analytics.py +42 -0
- tests/test_args.py +119 -0
- tests/test_build.py +426 -0
- tests/test_clean.py +154 -0
- tests/test_cmd.py +73 -0
- tests/test_deps.py +79 -0
- tests/test_doctor.py +128 -0
- tests/test_git.py +48 -0
- tests/test_hashing.py +67 -0
- tests/test_init.py +103 -0
- tests/test_log.py +50 -0
- tests/test_packagelist.py +235 -0
- tests/test_parseRecipe.py +132 -0
- tests/test_sync.py +332 -0
- tests/test_utilities.py +383 -0
- tests/test_workarea.py +101 -0
- tests/testdist/broken1.sh +1 -0
- tests/testdist/broken2.sh +1 -0
- tests/testdist/broken3.sh +3 -0
- tests/testdist/broken4.sh +2 -0
- tests/testdist/broken5.sh +2 -0
- tests/testdist/broken6.sh +2 -0
- tests/testdist/broken7.sh +5 -0
- tests/testdist/clobber-initdotsh.sh +4 -0
- tests/testdist/defaults-o2.sh +10 -0
- tests/testdist/delete-etc.sh +4 -0
- tests/testdist/tracking-env.sh +6 -0
tests/test_sync.py
ADDED
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import os.path
|
|
3
|
+
import sys
|
|
4
|
+
import unittest
|
|
5
|
+
from io import BytesIO
|
|
6
|
+
|
|
7
|
+
from unittest.mock import patch, MagicMock
|
|
8
|
+
|
|
9
|
+
from alibuild_helpers import sync
|
|
10
|
+
from alibuild_helpers.utilities import resolve_links_path, resolve_store_path
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
ARCHITECTURE = "slc7_x86-64"
|
|
14
|
+
PACKAGE = "zlib"
|
|
15
|
+
GOOD_HASH = "deadbeefdeadbeefdeadbeefdeadbeefdeadbeef"
|
|
16
|
+
BAD_HASH = "baadf00dbaadf00dbaadf00dbaadf00dbaadf00d"
|
|
17
|
+
NONEXISTENT_HASH = "TRIGGERS_A_404"
|
|
18
|
+
GOOD_SPEC = { # fully present on the remote store
|
|
19
|
+
"package": PACKAGE, "version": "v1.2.3", "revision": "1",
|
|
20
|
+
"hash": GOOD_HASH,
|
|
21
|
+
"remote_revision_hash": GOOD_HASH,
|
|
22
|
+
"remote_hashes": [GOOD_HASH],
|
|
23
|
+
}
|
|
24
|
+
BAD_SPEC = { # partially present on the remote store
|
|
25
|
+
"package": PACKAGE, "version": "v1.2.3", "revision": "2",
|
|
26
|
+
"hash": BAD_HASH,
|
|
27
|
+
"remote_revision_hash": BAD_HASH,
|
|
28
|
+
"remote_hashes": [BAD_HASH],
|
|
29
|
+
}
|
|
30
|
+
MISSING_SPEC = { # completely absent from the remote store
|
|
31
|
+
"package": PACKAGE, "version": "v1.2.3", "revision": "3",
|
|
32
|
+
"hash": NONEXISTENT_HASH,
|
|
33
|
+
"remote_revision_hash": NONEXISTENT_HASH,
|
|
34
|
+
"remote_hashes": [NONEXISTENT_HASH],
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def tarball_name(spec):
|
|
39
|
+
return ("{package}-{version}-{revision}.{arch}.tar.gz"
|
|
40
|
+
.format(arch=ARCHITECTURE, **spec))
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
TAR_NAMES = tarball_name(GOOD_SPEC), tarball_name(BAD_SPEC), tarball_name(MISSING_SPEC)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class MockRequest:
|
|
47
|
+
def __init__(self, j, simulate_err=False) -> None:
|
|
48
|
+
self.j = j
|
|
49
|
+
self.simulate_err = simulate_err
|
|
50
|
+
self.status_code = 200 if j else 404
|
|
51
|
+
self._bytes_left = 123456
|
|
52
|
+
self.headers = {"content-length": str(self._bytes_left)}
|
|
53
|
+
|
|
54
|
+
def raise_for_status(self):
|
|
55
|
+
return True
|
|
56
|
+
|
|
57
|
+
def json(self):
|
|
58
|
+
return self.j
|
|
59
|
+
|
|
60
|
+
def iter_content(self, chunk_size=10):
|
|
61
|
+
if not self.simulate_err:
|
|
62
|
+
while self._bytes_left > 0:
|
|
63
|
+
toread = min(chunk_size, self._bytes_left)
|
|
64
|
+
yield b"x" * toread
|
|
65
|
+
self._bytes_left -= toread
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
@patch("alibuild_helpers.sync.ProgressPrint", new=MagicMock())
|
|
69
|
+
class SyncTestCase(unittest.TestCase):
|
|
70
|
+
def mock_get(self, url, *args, **kw):
|
|
71
|
+
if NONEXISTENT_HASH in url:
|
|
72
|
+
return MockRequest(None)
|
|
73
|
+
if "/store/" in url:
|
|
74
|
+
if GOOD_HASH in url:
|
|
75
|
+
return MockRequest([{"name": tarball_name(GOOD_SPEC)}])
|
|
76
|
+
elif BAD_HASH in url:
|
|
77
|
+
return MockRequest([{"name": tarball_name(BAD_SPEC)}],
|
|
78
|
+
simulate_err=True)
|
|
79
|
+
elif url.endswith(".manifest"):
|
|
80
|
+
return MockRequest("")
|
|
81
|
+
elif ("/%s/" % PACKAGE) in url:
|
|
82
|
+
return MockRequest([{"name": tarball_name(GOOD_SPEC)},
|
|
83
|
+
{"name": tarball_name(BAD_SPEC)}])
|
|
84
|
+
raise NotImplementedError(url)
|
|
85
|
+
|
|
86
|
+
@patch("alibuild_helpers.sync.open", new=lambda fn, mode: BytesIO())
|
|
87
|
+
@patch("os.path.isfile", new=MagicMock(return_value=False))
|
|
88
|
+
@patch("os.rename", new=MagicMock(return_value=None))
|
|
89
|
+
@patch("os.makedirs", new=MagicMock(return_value=None))
|
|
90
|
+
@patch("os.listdir", new=MagicMock(return_value=[]))
|
|
91
|
+
@patch("alibuild_helpers.sync.symlink", new=MagicMock(return_value=None))
|
|
92
|
+
@patch("alibuild_helpers.sync.execute", new=MagicMock(return_value=None))
|
|
93
|
+
@patch("alibuild_helpers.sync.debug")
|
|
94
|
+
@patch("alibuild_helpers.sync.error")
|
|
95
|
+
@patch("requests.Session.get")
|
|
96
|
+
def test_http_remote(self, mock_get, mock_error, mock_debug):
|
|
97
|
+
"""Test HTTPS remote store."""
|
|
98
|
+
mock_get.side_effect = self.mock_get
|
|
99
|
+
syncer = sync.HttpRemoteSync(remoteStore="https://localhost/test",
|
|
100
|
+
architecture=ARCHITECTURE,
|
|
101
|
+
workdir="/sw", insecure=False)
|
|
102
|
+
syncer.httpBackoff = 0 # speed up tests
|
|
103
|
+
|
|
104
|
+
# Try good spec
|
|
105
|
+
mock_error.reset_mock()
|
|
106
|
+
|
|
107
|
+
syncer.fetch_symlinks(GOOD_SPEC)
|
|
108
|
+
syncer.fetch_tarball(GOOD_SPEC)
|
|
109
|
+
mock_error.assert_not_called()
|
|
110
|
+
syncer.upload_symlinks_and_tarball(GOOD_SPEC)
|
|
111
|
+
|
|
112
|
+
# Try bad spec
|
|
113
|
+
mock_error.reset_mock()
|
|
114
|
+
|
|
115
|
+
syncer.fetch_symlinks(BAD_SPEC)
|
|
116
|
+
syncer.fetch_tarball(BAD_SPEC)
|
|
117
|
+
|
|
118
|
+
# We can't use mock_error.assert_called_once_with because two
|
|
119
|
+
# PartialDownloadError instances don't compare equal.
|
|
120
|
+
self.assertEqual(len(mock_error.call_args_list), 1)
|
|
121
|
+
self.assertEqual(mock_error.call_args_list[0][0][0],
|
|
122
|
+
"GET %s failed: %s")
|
|
123
|
+
self.assertEqual(mock_error.call_args_list[0][0][1],
|
|
124
|
+
"https://localhost/test/TARS/%s/store/%s/%s/%s" %
|
|
125
|
+
(ARCHITECTURE, BAD_SPEC["remote_revision_hash"][:2],
|
|
126
|
+
BAD_SPEC["remote_revision_hash"],
|
|
127
|
+
tarball_name(BAD_SPEC)))
|
|
128
|
+
self.assertIsInstance(mock_error.call_args_list[0][0][2],
|
|
129
|
+
sync.PartialDownloadError)
|
|
130
|
+
|
|
131
|
+
syncer.upload_symlinks_and_tarball(BAD_SPEC)
|
|
132
|
+
|
|
133
|
+
# Try missing spec
|
|
134
|
+
mock_debug.reset_mock()
|
|
135
|
+
syncer.fetch_symlinks(MISSING_SPEC)
|
|
136
|
+
syncer.fetch_tarball(MISSING_SPEC)
|
|
137
|
+
mock_debug.assert_called_with("Nothing fetched for %s (%s)",
|
|
138
|
+
MISSING_SPEC["package"], NONEXISTENT_HASH)
|
|
139
|
+
|
|
140
|
+
@patch("alibuild_helpers.sync.execute", new=lambda cmd, printer=None: 0)
|
|
141
|
+
@patch("alibuild_helpers.sync.os")
|
|
142
|
+
def test_sync(self, mock_os):
|
|
143
|
+
"""Check NoRemoteSync, rsync:// and s3:// remote stores."""
|
|
144
|
+
# file does not exist locally: force download
|
|
145
|
+
mock_os.path.exists.side_effect = lambda path: False
|
|
146
|
+
mock_os.path.islink.side_effect = lambda path: False
|
|
147
|
+
mock_os.path.isfile.side_effect = lambda path: False
|
|
148
|
+
|
|
149
|
+
syncers = [
|
|
150
|
+
sync.NoRemoteSync(),
|
|
151
|
+
sync.RsyncRemoteSync(remoteStore="ssh://localhost/test",
|
|
152
|
+
writeStore="ssh://localhost/test",
|
|
153
|
+
architecture=ARCHITECTURE,
|
|
154
|
+
workdir="/sw"),
|
|
155
|
+
sync.S3RemoteSync(remoteStore="s3://localhost",
|
|
156
|
+
writeStore="s3://localhost",
|
|
157
|
+
architecture=ARCHITECTURE,
|
|
158
|
+
workdir="/sw"),
|
|
159
|
+
]
|
|
160
|
+
|
|
161
|
+
for spec in (GOOD_SPEC, BAD_SPEC):
|
|
162
|
+
for syncer in syncers:
|
|
163
|
+
syncer.fetch_symlinks(spec)
|
|
164
|
+
syncer.fetch_tarball(spec)
|
|
165
|
+
syncer.upload_symlinks_and_tarball(spec)
|
|
166
|
+
|
|
167
|
+
for syncer in syncers:
|
|
168
|
+
syncer.fetch_symlinks(MISSING_SPEC)
|
|
169
|
+
syncer.fetch_tarball(MISSING_SPEC)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
@unittest.skipIf(sys.version_info < (3, 6), "python >= 3.6 is required for boto3")
|
|
173
|
+
@patch("os.makedirs", new=MagicMock(return_value=None))
|
|
174
|
+
@patch("alibuild_helpers.sync.symlink", new=MagicMock(return_value=None))
|
|
175
|
+
@patch("alibuild_helpers.sync.ProgressPrint", new=MagicMock())
|
|
176
|
+
@patch("alibuild_helpers.log.error", new=MagicMock())
|
|
177
|
+
@patch("alibuild_helpers.sync.Boto3RemoteSync._s3_init", new=MagicMock())
|
|
178
|
+
class Boto3TestCase(unittest.TestCase):
|
|
179
|
+
"""Check the b3:// remote is working properly."""
|
|
180
|
+
|
|
181
|
+
def mock_s3(self):
|
|
182
|
+
"""Create a mock object imitating an S3 client.
|
|
183
|
+
|
|
184
|
+
Which spec we are listing contents for controls the simulated contents
|
|
185
|
+
of the store under dist*/:
|
|
186
|
+
|
|
187
|
+
- MISSING_SPEC: Simulate a case where the store is empty; we can safely
|
|
188
|
+
upload objects to the remote.
|
|
189
|
+
- GOOD_SPEC: Simulate a case where we can fetch tarballs from the store;
|
|
190
|
+
we mustn't upload as that would overwrite existing packages.
|
|
191
|
+
- BAD_SPEC: Simulate a case where we must abort our upload.
|
|
192
|
+
|
|
193
|
+
This currently only affects the simulated contents of dist*
|
|
194
|
+
directories.
|
|
195
|
+
"""
|
|
196
|
+
from botocore.exceptions import ClientError
|
|
197
|
+
|
|
198
|
+
def paginate_listdir(Bucket, Delimiter, Prefix):
|
|
199
|
+
dir = Prefix.rstrip(Delimiter)
|
|
200
|
+
if dir in (resolve_store_path(ARCHITECTURE, NONEXISTENT_HASH),
|
|
201
|
+
resolve_store_path(ARCHITECTURE, BAD_HASH)):
|
|
202
|
+
return [{}]
|
|
203
|
+
elif dir in (resolve_store_path(ARCHITECTURE, GOOD_HASH),
|
|
204
|
+
resolve_links_path(ARCHITECTURE, PACKAGE)):
|
|
205
|
+
return [{"Contents": [
|
|
206
|
+
{"Key": dir + Delimiter + tarball_name(GOOD_SPEC)},
|
|
207
|
+
]}]
|
|
208
|
+
elif "/dist" not in Prefix:
|
|
209
|
+
raise NotImplementedError("unknown prefix " + Prefix)
|
|
210
|
+
elif dir.endswith("-" + GOOD_SPEC["revision"]):
|
|
211
|
+
# The expected dist symlinks already exist on S3. As our
|
|
212
|
+
# test package has no dependencies, the prefix should only
|
|
213
|
+
# contain a link to the package itself.
|
|
214
|
+
return [{"Contents": [
|
|
215
|
+
{"Key": dir + Delimiter + "%s.%s.tar.gz" %
|
|
216
|
+
(os.path.basename(dir), ARCHITECTURE)},
|
|
217
|
+
]}]
|
|
218
|
+
elif dir.endswith("-" + BAD_SPEC["revision"]):
|
|
219
|
+
# Simulate partially complete upload of symlinks, e.g. by
|
|
220
|
+
# another aliBuild running in parallel.
|
|
221
|
+
return [{"Contents": [
|
|
222
|
+
{"Key": dir + Delimiter + "somepackage-v1-1.%s.tar.gz" % ARCHITECTURE},
|
|
223
|
+
]}]
|
|
224
|
+
elif dir.endswith("-" + MISSING_SPEC["revision"]):
|
|
225
|
+
# No pre-existing symlinks under dist*.
|
|
226
|
+
return [{"Contents": []}]
|
|
227
|
+
else:
|
|
228
|
+
raise NotImplementedError("unknown dist prefix " + Prefix)
|
|
229
|
+
|
|
230
|
+
def head_object(Bucket, Key):
|
|
231
|
+
if NONEXISTENT_HASH in Key or BAD_HASH in Key or \
|
|
232
|
+
os.path.basename(Key) == tarball_name(MISSING_SPEC):
|
|
233
|
+
raise ClientError({"Error": {"Code": "404"}}, "head_object")
|
|
234
|
+
return {}
|
|
235
|
+
|
|
236
|
+
def download_file(Bucket, Key, Filename, Callback=None):
|
|
237
|
+
self.assertNotIn(NONEXISTENT_HASH, Key, "tried to fetch missing tarball")
|
|
238
|
+
self.assertNotIn(BAD_HASH, Key, "tried to follow bad symlink")
|
|
239
|
+
|
|
240
|
+
def get_object(Bucket, Key):
|
|
241
|
+
if Key.endswith(".manifest"):
|
|
242
|
+
return {"Body": MagicMock(iter_lines=lambda: [
|
|
243
|
+
tarball_name(GOOD_SPEC).encode("utf-8") + b"\t...from manifest\n",
|
|
244
|
+
])}
|
|
245
|
+
return {"Body": MagicMock(read=lambda: b"...fetched individually")}
|
|
246
|
+
|
|
247
|
+
def get_paginator(method):
|
|
248
|
+
if method == "list_objects_v2":
|
|
249
|
+
return MagicMock(paginate=paginate_listdir)
|
|
250
|
+
raise NotImplementedError(method)
|
|
251
|
+
|
|
252
|
+
return MagicMock(
|
|
253
|
+
get_paginator=get_paginator,
|
|
254
|
+
head_object=head_object,
|
|
255
|
+
download_file=MagicMock(side_effect=download_file),
|
|
256
|
+
get_object=get_object,
|
|
257
|
+
put_object=MagicMock(return_value=None),
|
|
258
|
+
upload_file=MagicMock(return_value=None),
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
@patch("glob.glob", new=MagicMock(return_value=[]))
|
|
262
|
+
@patch("os.listdir", new=MagicMock(return_value=[]))
|
|
263
|
+
@patch("os.makedirs", new=MagicMock())
|
|
264
|
+
# Pretend file does not exist locally to force download.
|
|
265
|
+
@patch("os.path.exists", new=MagicMock(return_value=False))
|
|
266
|
+
@patch("os.path.isfile", new=MagicMock(return_value=False))
|
|
267
|
+
@patch("os.path.islink", new=MagicMock(return_value=False))
|
|
268
|
+
@patch("alibuild_helpers.sync.execute", new=MagicMock(return_value=0))
|
|
269
|
+
def test_tarball_download(self) -> None:
|
|
270
|
+
"""Test boto3 behaviour when downloading tarballs from the remote."""
|
|
271
|
+
b3sync = sync.Boto3RemoteSync(
|
|
272
|
+
remoteStore="b3://localhost", writeStore="b3://localhost",
|
|
273
|
+
architecture=ARCHITECTURE, workdir="/sw")
|
|
274
|
+
b3sync.s3 = self.mock_s3()
|
|
275
|
+
|
|
276
|
+
b3sync.s3.download_file.reset_mock()
|
|
277
|
+
b3sync.fetch_symlinks(GOOD_SPEC)
|
|
278
|
+
b3sync.fetch_tarball(GOOD_SPEC)
|
|
279
|
+
b3sync.s3.download_file.assert_called()
|
|
280
|
+
|
|
281
|
+
b3sync.s3.download_file.reset_mock()
|
|
282
|
+
b3sync.fetch_symlinks(BAD_SPEC)
|
|
283
|
+
b3sync.fetch_tarball(BAD_SPEC)
|
|
284
|
+
b3sync.s3.download_file.assert_not_called()
|
|
285
|
+
|
|
286
|
+
b3sync.s3.download_file.reset_mock()
|
|
287
|
+
b3sync.fetch_symlinks(MISSING_SPEC)
|
|
288
|
+
b3sync.fetch_tarball(MISSING_SPEC)
|
|
289
|
+
b3sync.s3.download_file.assert_not_called()
|
|
290
|
+
|
|
291
|
+
@patch("os.listdir", new=lambda path: (
|
|
292
|
+
[tarball_name(GOOD_SPEC)] if path.endswith("-" + GOOD_SPEC["revision"]) else
|
|
293
|
+
[tarball_name(BAD_SPEC)] if path.endswith("-" + BAD_SPEC["revision"]) else
|
|
294
|
+
[] if path.endswith("-" + MISSING_SPEC["revision"]) else
|
|
295
|
+
NotImplemented
|
|
296
|
+
))
|
|
297
|
+
@patch("os.readlink", new=MagicMock(return_value="dummy path"))
|
|
298
|
+
@patch("os.path.islink", new=MagicMock(return_value=True))
|
|
299
|
+
def test_tarball_upload(self) -> None:
|
|
300
|
+
"""Test boto3 behaviour when building packages for upload locally."""
|
|
301
|
+
b3sync = sync.Boto3RemoteSync(
|
|
302
|
+
remoteStore="b3://localhost", writeStore="b3://localhost",
|
|
303
|
+
architecture=ARCHITECTURE, workdir="/sw")
|
|
304
|
+
b3sync.s3 = self.mock_s3()
|
|
305
|
+
|
|
306
|
+
# Make sure upload of a fresh, new tarball works fine.
|
|
307
|
+
b3sync.s3.put_object.reset_mock()
|
|
308
|
+
b3sync.s3.upload_file.reset_mock()
|
|
309
|
+
b3sync.upload_symlinks_and_tarball(MISSING_SPEC)
|
|
310
|
+
# We simulated local builds, so we should upload the tarballs to
|
|
311
|
+
# the remote.
|
|
312
|
+
b3sync.s3.put_object.assert_called()
|
|
313
|
+
b3sync.s3.upload_file.assert_called()
|
|
314
|
+
|
|
315
|
+
b3sync.s3.put_object.reset_mock()
|
|
316
|
+
b3sync.s3.upload_file.reset_mock()
|
|
317
|
+
b3sync.upload_symlinks_and_tarball(GOOD_SPEC)
|
|
318
|
+
# We simulated downloading tarballs from the remote, so we mustn't
|
|
319
|
+
# upload them again and overwrite the remote.
|
|
320
|
+
b3sync.s3.put_object.assert_not_called()
|
|
321
|
+
b3sync.s3.upload_file.assert_not_called()
|
|
322
|
+
|
|
323
|
+
# Make sure conflict detection is working for tarball sync.
|
|
324
|
+
b3sync.s3.put_object.reset_mock()
|
|
325
|
+
b3sync.s3.upload_file.reset_mock()
|
|
326
|
+
self.assertRaises(SystemExit, b3sync.upload_symlinks_and_tarball, BAD_SPEC)
|
|
327
|
+
b3sync.s3.put_object.assert_not_called()
|
|
328
|
+
b3sync.s3.upload_file.assert_not_called()
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
if __name__ == '__main__':
|
|
332
|
+
unittest.main()
|