libv8 5.0.71.48.3 → 5.1.281.59.0beta3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -0
- data/ext/libv8/location.rb +12 -9
- data/ext/libv8/patcher.rb +1 -1
- data/ext/libv8/paths.rb +1 -1
- data/lib/libv8/version.rb +1 -1
- data/patches/0001-Build-standalone-static-library.patch +26 -0
- data/patches/{disable-building-tests.patch → 0002-Disable-building-tests.patch} +17 -5
- data/patches/0003-Use-the-fPIC-flag-for-the-static-library.patch +25 -0
- data/spec/location_spec.rb +1 -1
- data/vendor/depot_tools/.gitignore +1 -0
- data/vendor/depot_tools/PRESUBMIT.py +3 -1
- data/vendor/depot_tools/README +1 -6
- data/vendor/depot_tools/apply_issue.py +6 -0
- data/vendor/depot_tools/bootstrap/win/README.md +2 -2
- data/vendor/depot_tools/bootstrap/win/git.template.bat +1 -1
- data/vendor/depot_tools/bootstrap/win/win_tools.bat +12 -11
- data/vendor/depot_tools/codereview.settings +1 -1
- data/vendor/depot_tools/cpplint.py +353 -592
- data/vendor/depot_tools/fetch.py +10 -3
- data/vendor/depot_tools/fetch_configs/infra.py +4 -2
- data/vendor/depot_tools/fetch_configs/ios_internal.py +49 -0
- data/vendor/depot_tools/gclient.py +33 -7
- data/vendor/depot_tools/gclient_scm.py +14 -11
- data/vendor/depot_tools/gclient_utils.py +14 -3
- data/vendor/depot_tools/git-gs +3 -3
- data/vendor/depot_tools/git_cache.py +8 -4
- data/vendor/depot_tools/git_cl.py +221 -98
- data/vendor/depot_tools/git_footers.py +76 -39
- data/vendor/depot_tools/git_map_branches.py +12 -10
- data/vendor/depot_tools/infra/config/cq.cfg +0 -11
- data/vendor/depot_tools/infra/config/recipes.cfg +1 -1
- data/vendor/depot_tools/presubmit_canned_checks.py +31 -19
- data/vendor/depot_tools/presubmit_support.py +0 -13
- data/vendor/depot_tools/recipe_modules/bot_update/resources/bot_update.py +19 -2
- data/vendor/depot_tools/recipe_modules/depot_tools/api.py +4 -0
- data/vendor/depot_tools/recipe_modules/depot_tools/example.expected/basic.json +49 -0
- data/vendor/depot_tools/recipe_modules/depot_tools/example.expected/win.json +49 -0
- data/vendor/depot_tools/recipe_modules/depot_tools/example.py +38 -0
- data/vendor/depot_tools/recipe_modules/gclient/api.py +1 -0
- data/vendor/depot_tools/recipe_modules/gclient/config.py +19 -0
- data/vendor/depot_tools/recipe_modules/gclient/example.expected/basic.json +1 -0
- data/vendor/depot_tools/recipe_modules/gclient/example.expected/revision.json +1 -0
- data/vendor/depot_tools/recipe_modules/gclient/example.expected/tryserver.json +1 -0
- data/vendor/depot_tools/recipe_modules/gclient/example.py +3 -0
- data/vendor/depot_tools/recipe_modules/git_cl/api.py +22 -6
- data/vendor/depot_tools/recipe_modules/git_cl/example.expected/basic.json +27 -9
- data/vendor/depot_tools/recipe_modules/git_cl/example.py +9 -7
- data/vendor/depot_tools/recipe_modules/presubmit/api.py +5 -2
- data/vendor/depot_tools/recipe_modules/tryserver/__init__.py +1 -0
- data/vendor/depot_tools/recipe_modules/tryserver/api.py +31 -0
- data/vendor/depot_tools/recipe_modules/tryserver/example.expected/basic_tags.json +59 -0
- data/vendor/depot_tools/recipe_modules/tryserver/example.expected/with_rietveld_patch.json +26 -0
- data/vendor/depot_tools/recipe_modules/tryserver/example.expected/with_rietveld_patch_new.json +26 -0
- data/vendor/depot_tools/recipe_modules/tryserver/example.py +32 -3
- data/vendor/depot_tools/roll_dep.py +6 -2
- data/vendor/depot_tools/third_party/upload.py +17 -9
- data/vendor/depot_tools/update_depot_tools +11 -0
- data/vendor/depot_tools/update_depot_tools.bat +11 -0
- data/vendor/depot_tools/win_toolchain/get_toolchain_if_necessary.py +52 -9
- data/vendor/depot_tools/win_toolchain/package_from_installed.py +64 -57
- metadata +12 -10
- data/patches/build-standalone-static-library.patch +0 -14
- data/patches/fPIC-for-static.patch +0 -13
- data/vendor/depot_tools/git-lkgr +0 -208
- data/vendor/depot_tools/hammer +0 -28
- data/vendor/depot_tools/hammer.bat +0 -23
data/vendor/depot_tools/fetch.py
CHANGED
@@ -231,6 +231,7 @@ def usage(msg=None):
|
|
231
231
|
Valid options:
|
232
232
|
-h, --help, help Print this message.
|
233
233
|
--nohooks Don't run hooks after checkout.
|
234
|
+
--force (dangerous) Don't look for existing .gclient file.
|
234
235
|
-n, --dry-run Don't run commands, only print them.
|
235
236
|
--no-history Perform shallow clones, don't fetch the full git history.
|
236
237
|
|
@@ -255,6 +256,7 @@ def handle_args(argv):
|
|
255
256
|
dry_run = False
|
256
257
|
nohooks = False
|
257
258
|
no_history = False
|
259
|
+
force = False
|
258
260
|
while len(argv) >= 2:
|
259
261
|
arg = argv[1]
|
260
262
|
if not arg.startswith('-'):
|
@@ -266,6 +268,8 @@ def handle_args(argv):
|
|
266
268
|
nohooks = True
|
267
269
|
elif arg == '--no-history':
|
268
270
|
no_history = True
|
271
|
+
elif arg == '--force':
|
272
|
+
force = True
|
269
273
|
else:
|
270
274
|
usage('Invalid option %s.' % arg)
|
271
275
|
|
@@ -279,8 +283,11 @@ def handle_args(argv):
|
|
279
283
|
config = argv[1]
|
280
284
|
props = argv[2:]
|
281
285
|
return (
|
282
|
-
optparse.Values(
|
283
|
-
|
286
|
+
optparse.Values({
|
287
|
+
'dry_run': dry_run,
|
288
|
+
'nohooks': nohooks,
|
289
|
+
'no_history': no_history,
|
290
|
+
'force': force}),
|
284
291
|
config,
|
285
292
|
props)
|
286
293
|
|
@@ -324,7 +331,7 @@ def run(options, spec, root):
|
|
324
331
|
checkout = CheckoutFactory(checkout_type, options, checkout_spec, root)
|
325
332
|
except KeyError:
|
326
333
|
return 1
|
327
|
-
if checkout.exists():
|
334
|
+
if not options.force and checkout.exists():
|
328
335
|
print 'Your current directory appears to already contain, or be part of, '
|
329
336
|
print 'a checkout. "fetch" is used only to get new checkouts. Use '
|
330
337
|
print '"gclient sync" to update existing checkouts.'
|
@@ -13,7 +13,9 @@ class Infra(config_util.Config):
|
|
13
13
|
"""Basic Config class for the Infrastructure repositories."""
|
14
14
|
|
15
15
|
@staticmethod
|
16
|
-
def fetch_spec(
|
16
|
+
def fetch_spec(props):
|
17
|
+
# This is used by [depot_tools]/testing_support/local_rietveld.py
|
18
|
+
managed = props.get('managed', 'false').lower() == 'true'
|
17
19
|
return {
|
18
20
|
'type': 'gclient_git',
|
19
21
|
'gclient_git_spec': {
|
@@ -22,7 +24,7 @@ class Infra(config_util.Config):
|
|
22
24
|
'name' : 'infra',
|
23
25
|
'url' : 'https://chromium.googlesource.com/infra/infra.git',
|
24
26
|
'deps_file': '.DEPS.git',
|
25
|
-
'managed' :
|
27
|
+
'managed' : managed,
|
26
28
|
}
|
27
29
|
],
|
28
30
|
},
|
@@ -0,0 +1,49 @@
|
|
1
|
+
# Copyright 2016 The Chromium Authors. All rights reserved.
|
2
|
+
# Use of this source code is governed by a BSD-style license that can be
|
3
|
+
# found in the LICENSE file.
|
4
|
+
|
5
|
+
import sys
|
6
|
+
|
7
|
+
import config_util # pylint: disable=F0401
|
8
|
+
|
9
|
+
|
10
|
+
# This class doesn't need an __init__ method, so we disable the warning
|
11
|
+
# pylint: disable=W0232
|
12
|
+
class IOSInternal(config_util.Config):
|
13
|
+
"""Basic Config class for Chrome on iOS."""
|
14
|
+
|
15
|
+
@staticmethod
|
16
|
+
def fetch_spec(props):
|
17
|
+
url = 'https://chrome-internal.googlesource.com/chrome/ios_internal.git'
|
18
|
+
solution = { 'name' :'src/ios_internal',
|
19
|
+
'url' : url,
|
20
|
+
'deps_file': 'DEPS',
|
21
|
+
'managed' : False,
|
22
|
+
'custom_deps': {},
|
23
|
+
'safesync_url': '',
|
24
|
+
}
|
25
|
+
spec = {
|
26
|
+
'solutions': [solution],
|
27
|
+
}
|
28
|
+
if props.get('target_os'):
|
29
|
+
spec['target_os'] = props['target_os'].split(',')
|
30
|
+
else:
|
31
|
+
spec['target_os'] = ['ios']
|
32
|
+
if props.get('target_os_only'):
|
33
|
+
spec['target_os_only'] = props['target_os_only']
|
34
|
+
return {
|
35
|
+
'type': 'gclient_git',
|
36
|
+
'gclient_git_spec': spec,
|
37
|
+
}
|
38
|
+
|
39
|
+
@staticmethod
|
40
|
+
def expected_root(_props):
|
41
|
+
return 'src'
|
42
|
+
|
43
|
+
|
44
|
+
def main(argv=None):
|
45
|
+
return IOSInternal().handle_args(argv)
|
46
|
+
|
47
|
+
|
48
|
+
if __name__ == '__main__':
|
49
|
+
sys.exit(main(sys.argv))
|
@@ -371,7 +371,15 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
|
|
371
371
|
# recursedeps is a mutable value that selectively overrides the default
|
372
372
|
# 'no recursion' setting on a dep-by-dep basis. It will replace
|
373
373
|
# recursion_override.
|
374
|
+
#
|
375
|
+
# It will be a dictionary of {deps_name: {"deps_file": depfile_name}} or
|
376
|
+
# None.
|
374
377
|
self.recursedeps = None
|
378
|
+
# This is inherited from WorkItem. We want the URL to be a resource.
|
379
|
+
if url and isinstance(url, basestring):
|
380
|
+
# The url is usually given to gclient either as https://blah@123
|
381
|
+
# or just https://blah. The @123 portion is irrelevent.
|
382
|
+
self.resources.append(url.split('@')[0])
|
375
383
|
|
376
384
|
if not self.name and self.parent:
|
377
385
|
raise gclient_utils.Error('Dependency without name')
|
@@ -650,9 +658,14 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
|
|
650
658
|
self.recursion_override = local_scope.get('recursion')
|
651
659
|
logging.warning(
|
652
660
|
'Setting %s recursion to %d.', self.name, self.recursion_limit)
|
653
|
-
self.recursedeps =
|
661
|
+
self.recursedeps = None
|
654
662
|
if 'recursedeps' in local_scope:
|
655
|
-
self.recursedeps =
|
663
|
+
self.recursedeps = {}
|
664
|
+
for ent in local_scope['recursedeps']:
|
665
|
+
if isinstance(ent, basestring):
|
666
|
+
self.recursedeps[ent] = {"deps_file": self.deps_file}
|
667
|
+
else: # (depname, depsfilename)
|
668
|
+
self.recursedeps[ent[0]] = {"deps_file": ent[1]}
|
656
669
|
logging.warning('Found recursedeps %r.', repr(self.recursedeps))
|
657
670
|
# If present, save 'target_os' in the local_target_os property.
|
658
671
|
if 'target_os' in local_scope:
|
@@ -687,9 +700,9 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
|
|
687
700
|
# Update recursedeps if it's set.
|
688
701
|
if self.recursedeps is not None:
|
689
702
|
logging.warning('Updating recursedeps by prepending %s.', self.name)
|
690
|
-
rel_deps =
|
691
|
-
for
|
692
|
-
rel_deps
|
703
|
+
rel_deps = {}
|
704
|
+
for depname, options in self.recursedeps.iteritems():
|
705
|
+
rel_deps[os.path.normpath(os.path.join(self.name, depname))] = options
|
693
706
|
self.recursedeps = rel_deps
|
694
707
|
|
695
708
|
if 'allowed_hosts' in local_scope:
|
@@ -708,9 +721,14 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
|
|
708
721
|
deps_to_add = []
|
709
722
|
for name, url in deps.iteritems():
|
710
723
|
should_process = self.recursion_limit and self.should_process
|
724
|
+
deps_file = self.deps_file
|
725
|
+
if self.recursedeps is not None:
|
726
|
+
ent = self.recursedeps.get(name)
|
727
|
+
if ent is not None:
|
728
|
+
deps_file = ent['deps_file']
|
711
729
|
deps_to_add.append(Dependency(
|
712
730
|
self, name, url, None, None, None, self.custom_vars, None,
|
713
|
-
|
731
|
+
deps_file, should_process))
|
714
732
|
deps_to_add.sort(key=lambda x: x.name)
|
715
733
|
|
716
734
|
# override named sets of hooks by the custom hooks
|
@@ -1220,12 +1238,19 @@ solutions = [
|
|
1220
1238
|
dep.url, self.root_dir, dep.name, self.outbuf)
|
1221
1239
|
actual_url = scm.GetActualRemoteURL(self._options)
|
1222
1240
|
if actual_url and not scm.DoesRemoteURLMatch(self._options):
|
1241
|
+
mirror = scm.GetCacheMirror()
|
1242
|
+
if mirror:
|
1243
|
+
mirror_string = '%s (exists=%s)' % (mirror.mirror_path,
|
1244
|
+
mirror.exists())
|
1245
|
+
else:
|
1246
|
+
mirror_string = 'not used'
|
1223
1247
|
raise gclient_utils.Error('''
|
1224
1248
|
Your .gclient file seems to be broken. The requested URL is different from what
|
1225
1249
|
is actually checked out in %(checkout_path)s.
|
1226
1250
|
|
1227
1251
|
The .gclient file contains:
|
1228
|
-
%(expected_url)s (%(expected_scm)s)
|
1252
|
+
URL: %(expected_url)s (%(expected_scm)s)
|
1253
|
+
Cache mirror: %(mirror_string)s
|
1229
1254
|
|
1230
1255
|
The local checkout in %(checkout_path)s reports:
|
1231
1256
|
%(actual_url)s (%(actual_scm)s)
|
@@ -1237,6 +1262,7 @@ want to set 'managed': False in .gclient.
|
|
1237
1262
|
''' % {'checkout_path': os.path.join(self.root_dir, dep.name),
|
1238
1263
|
'expected_url': dep.url,
|
1239
1264
|
'expected_scm': gclient_scm.GetScmName(dep.url),
|
1265
|
+
'mirror_string' : mirror_string,
|
1240
1266
|
'actual_url': actual_url,
|
1241
1267
|
'actual_scm': gclient_scm.GetScmName(actual_url)})
|
1242
1268
|
|
@@ -173,19 +173,24 @@ class SCMWrapper(object):
|
|
173
173
|
# Get the second token of the first line of the log.
|
174
174
|
return log.splitlines()[0].split(' ', 1)[1]
|
175
175
|
|
176
|
+
def GetCacheMirror(self):
|
177
|
+
if (getattr(self, 'cache_dir', None)):
|
178
|
+
url, _ = gclient_utils.SplitUrlRevision(self.url)
|
179
|
+
return git_cache.Mirror(url)
|
180
|
+
return None
|
181
|
+
|
176
182
|
def GetActualRemoteURL(self, options):
|
177
183
|
"""Attempt to determine the remote URL for this SCMWrapper."""
|
178
184
|
# Git
|
179
185
|
if os.path.exists(os.path.join(self.checkout_path, '.git')):
|
180
186
|
actual_remote_url = self._get_first_remote_url(self.checkout_path)
|
181
187
|
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
actual_remote_url = self._get_first_remote_url(mirror.mirror_path)
|
188
|
+
mirror = self.GetCacheMirror()
|
189
|
+
# If the cache is used, obtain the actual remote URL from there.
|
190
|
+
if (mirror and mirror.exists() and
|
191
|
+
mirror.mirror_path.replace('\\', '/') ==
|
192
|
+
actual_remote_url.replace('\\', '/')):
|
193
|
+
actual_remote_url = self._get_first_remote_url(mirror.mirror_path)
|
189
194
|
return actual_remote_url
|
190
195
|
|
191
196
|
# Svn
|
@@ -553,9 +558,7 @@ class GitWrapper(SCMWrapper):
|
|
553
558
|
|
554
559
|
if current_type == 'detached':
|
555
560
|
# case 0
|
556
|
-
|
557
|
-
# Don't do this check if nuclear option is on.
|
558
|
-
self._CheckClean(rev_str)
|
561
|
+
self._CheckClean(rev_str)
|
559
562
|
self._CheckDetachedHead(rev_str, options)
|
560
563
|
if self._Capture(['rev-list', '-n', '1', 'HEAD']) == revision:
|
561
564
|
self.Print('Up-to-date; skipping checkout.')
|
@@ -565,7 +568,7 @@ class GitWrapper(SCMWrapper):
|
|
565
568
|
self._Checkout(
|
566
569
|
options,
|
567
570
|
revision,
|
568
|
-
force=(options.force
|
571
|
+
force=(options.force and options.delete_unversioned_trees),
|
569
572
|
quiet=True,
|
570
573
|
)
|
571
574
|
if not printed_path:
|
@@ -784,6 +784,7 @@ class WorkItem(object):
|
|
784
784
|
self._name = name
|
785
785
|
self.outbuf = cStringIO.StringIO()
|
786
786
|
self.start = self.finish = None
|
787
|
+
self.resources = [] # List of resources this work item requires.
|
787
788
|
|
788
789
|
def run(self, work_queue):
|
789
790
|
"""work_queue is passed as keyword argument so it should be
|
@@ -869,6 +870,15 @@ class ExecutionQueue(object):
|
|
869
870
|
----------------------------------------""" % (
|
870
871
|
task.name, comment, elapsed, task.outbuf.getvalue().strip())
|
871
872
|
|
873
|
+
def _is_conflict(self, job):
|
874
|
+
"""Checks to see if a job will conflict with another running job."""
|
875
|
+
for running_job in self.running:
|
876
|
+
for used_resource in running_job.item.resources:
|
877
|
+
logging.debug('Checking resource %s' % used_resource)
|
878
|
+
if used_resource in job.resources:
|
879
|
+
return True
|
880
|
+
return False
|
881
|
+
|
872
882
|
def flush(self, *args, **kwargs):
|
873
883
|
"""Runs all enqueued items until all are executed."""
|
874
884
|
kwargs['work_queue'] = self
|
@@ -892,9 +902,10 @@ class ExecutionQueue(object):
|
|
892
902
|
# Verify its requirements.
|
893
903
|
if (self.ignore_requirements or
|
894
904
|
not (set(self.queued[i].requirements) - set(self.ran))):
|
895
|
-
|
896
|
-
|
897
|
-
|
905
|
+
if not self._is_conflict(self.queued[i]):
|
906
|
+
# Start one work item: all its requirements are satisfied.
|
907
|
+
self._run_one_task(self.queued.pop(i), args, kwargs)
|
908
|
+
break
|
898
909
|
else:
|
899
910
|
# Couldn't find an item that could run. Break out the outher loop.
|
900
911
|
break
|
data/vendor/depot_tools/git-gs
CHANGED
@@ -4,6 +4,6 @@
|
|
4
4
|
# found in the LICENSE file.
|
5
5
|
git grep -n -e "$@" -- "*.h" "*.hpp" "*.cpp" "*.c" "*.cc" "*.cpp" "*.inl"\
|
6
6
|
"*.grd" "*.grdp" "*.idl" "*.m" "*.mm" "*.py" "*.sh" "*.cfg" "*.tac" "*.go"\
|
7
|
-
"
|
8
|
-
"*.
|
9
|
-
"*.
|
7
|
+
"*.vcproj" "*.vsprops" "*.make" "*.gyp" "*.gypi" "*.isolate" "*.java"\
|
8
|
+
"*.js" "*.html" "*.css" "*.ebuild" "*.pl" "*.pm" "*.yaml" "*.gn" "*.gni"\
|
9
|
+
"*.json" "DEPS" "*/DEPS"
|
@@ -38,7 +38,7 @@ except NameError:
|
|
38
38
|
class LockError(Exception):
|
39
39
|
pass
|
40
40
|
|
41
|
-
class
|
41
|
+
class ClobberNeeded(Exception):
|
42
42
|
pass
|
43
43
|
|
44
44
|
class Lockfile(object):
|
@@ -246,7 +246,11 @@ class Mirror(object):
|
|
246
246
|
cwd = self.mirror_path
|
247
247
|
|
248
248
|
# Don't run git-gc in a daemon. Bad things can happen if it gets killed.
|
249
|
-
|
249
|
+
try:
|
250
|
+
self.RunGit(['config', 'gc.autodetach', '0'], cwd=cwd)
|
251
|
+
except subprocess.CalledProcessError:
|
252
|
+
# Hard error, need to clobber.
|
253
|
+
raise ClobberNeeded()
|
250
254
|
|
251
255
|
# Don't combine pack files into one big pack file. It's really slow for
|
252
256
|
# repositories, and there's no way to track progress and make sure it's
|
@@ -407,7 +411,7 @@ class Mirror(object):
|
|
407
411
|
self.RunGit(fetch_cmd + [spec], cwd=rundir, retry=True)
|
408
412
|
except subprocess.CalledProcessError:
|
409
413
|
if spec == '+refs/heads/*:refs/heads/*':
|
410
|
-
raise
|
414
|
+
raise ClobberNeeded() # Corrupted cache.
|
411
415
|
logging.warn('Fetch of %s failed' % spec)
|
412
416
|
|
413
417
|
def populate(self, depth=None, shallow=False, bootstrap=False,
|
@@ -426,7 +430,7 @@ class Mirror(object):
|
|
426
430
|
tempdir = self._ensure_bootstrapped(depth, bootstrap)
|
427
431
|
rundir = tempdir or self.mirror_path
|
428
432
|
self._fetch(rundir, verbose, depth)
|
429
|
-
except
|
433
|
+
except ClobberNeeded:
|
430
434
|
# This is a major failure, we need to clean and force a bootstrap.
|
431
435
|
gclient_utils.rmtree(rundir)
|
432
436
|
self.print(GIT_CACHE_CORRUPT_MESSAGE)
|
@@ -18,11 +18,9 @@ import logging
|
|
18
18
|
import multiprocessing
|
19
19
|
import optparse
|
20
20
|
import os
|
21
|
-
import Queue
|
22
21
|
import re
|
23
22
|
import stat
|
24
23
|
import sys
|
25
|
-
import tempfile
|
26
24
|
import textwrap
|
27
25
|
import time
|
28
26
|
import traceback
|
@@ -132,7 +130,7 @@ def RunGitWithCode(args, suppress_stderr=False):
|
|
132
130
|
|
133
131
|
|
134
132
|
def RunGitSilent(args):
|
135
|
-
"""Returns stdout, suppresses stderr and
|
133
|
+
"""Returns stdout, suppresses stderr and ignores the return code."""
|
136
134
|
return RunGitWithCode(args, suppress_stderr=True)[1]
|
137
135
|
|
138
136
|
|
@@ -929,6 +927,7 @@ class Changelist(object):
|
|
929
927
|
|
930
928
|
self.branchref = branchref
|
931
929
|
if self.branchref:
|
930
|
+
assert branchref.startswith('refs/heads/')
|
932
931
|
self.branch = ShortBranchName(self.branchref)
|
933
932
|
else:
|
934
933
|
self.branch = None
|
@@ -1256,11 +1255,15 @@ class Changelist(object):
|
|
1256
1255
|
if codereview_server:
|
1257
1256
|
RunGit(['config', codereview_setting, codereview_server])
|
1258
1257
|
else:
|
1259
|
-
|
1260
|
-
|
1261
|
-
|
1258
|
+
# Reset it regardless. It doesn't hurt.
|
1259
|
+
config_settings = [issue_setting, self._codereview_impl.PatchsetSetting()]
|
1260
|
+
for prop in (['last-upload-hash'] +
|
1261
|
+
self._codereview_impl._PostUnsetIssueProperties()):
|
1262
|
+
config_settings.append('branch.%s.%s' % (self.GetBranch(), prop))
|
1263
|
+
for setting in config_settings:
|
1264
|
+
RunGit(['config', '--unset', setting], error_ok=True)
|
1262
1265
|
self.issue = None
|
1263
|
-
self.
|
1266
|
+
self.patchset = None
|
1264
1267
|
|
1265
1268
|
def GetChange(self, upstream_branch, author):
|
1266
1269
|
if not self.GitSanityChecks(upstream_branch):
|
@@ -1449,7 +1452,7 @@ class Changelist(object):
|
|
1449
1452
|
def __getattr__(self, attr):
|
1450
1453
|
# This is because lots of untested code accesses Rietveld-specific stuff
|
1451
1454
|
# directly, and it's hard to fix for sure. So, just let it work, and fix
|
1452
|
-
# on a
|
1455
|
+
# on a case by case basis.
|
1453
1456
|
return getattr(self._codereview_impl, attr)
|
1454
1457
|
|
1455
1458
|
|
@@ -1498,6 +1501,10 @@ class _ChangelistCodereviewBase(object):
|
|
1498
1501
|
"""Returns name of git config setting which stores issue number."""
|
1499
1502
|
raise NotImplementedError()
|
1500
1503
|
|
1504
|
+
def _PostUnsetIssueProperties(self):
|
1505
|
+
"""Which branch-specific properties to erase when unsettin issue."""
|
1506
|
+
raise NotImplementedError()
|
1507
|
+
|
1501
1508
|
def GetRieveldObjForPresubmit(self):
|
1502
1509
|
# This is an unfortunate Rietveld-embeddedness in presubmit.
|
1503
1510
|
# For non-Rietveld codereviews, this probably should return a dummy object.
|
@@ -1739,6 +1746,10 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
1739
1746
|
return 'branch.%s.rietveldserver' % branch
|
1740
1747
|
return None
|
1741
1748
|
|
1749
|
+
def _PostUnsetIssueProperties(self):
|
1750
|
+
"""Which branch-specific properties to erase when unsetting issue."""
|
1751
|
+
return ['rietveldserver']
|
1752
|
+
|
1742
1753
|
def GetRieveldObjForPresubmit(self):
|
1743
1754
|
return self.RpcServer()
|
1744
1755
|
|
@@ -1935,7 +1946,7 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
1935
1946
|
upstream_branch = ShortBranchName(upstream_branch)
|
1936
1947
|
if remote is '.':
|
1937
1948
|
# A local branch is being tracked.
|
1938
|
-
local_branch =
|
1949
|
+
local_branch = upstream_branch
|
1939
1950
|
if settings.GetIsSkipDependencyUpload(local_branch):
|
1940
1951
|
print
|
1941
1952
|
print ('Skipping dependency patchset upload because git config '
|
@@ -1943,7 +1954,7 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
1943
1954
|
print
|
1944
1955
|
else:
|
1945
1956
|
auth_config = auth.extract_auth_config_from_options(options)
|
1946
|
-
branch_cl = Changelist(branchref=local_branch,
|
1957
|
+
branch_cl = Changelist(branchref='refs/heads/'+local_branch,
|
1947
1958
|
auth_config=auth_config)
|
1948
1959
|
branch_cl_issue_url = branch_cl.GetIssueURL()
|
1949
1960
|
branch_cl_issue = branch_cl.GetIssue()
|
@@ -2095,6 +2106,13 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2095
2106
|
return 'branch.%s.gerritserver' % branch
|
2096
2107
|
return None
|
2097
2108
|
|
2109
|
+
def _PostUnsetIssueProperties(self):
|
2110
|
+
"""Which branch-specific properties to erase when unsetting issue."""
|
2111
|
+
return [
|
2112
|
+
'gerritserver',
|
2113
|
+
'gerritsquashhash',
|
2114
|
+
]
|
2115
|
+
|
2098
2116
|
def GetRieveldObjForPresubmit(self):
|
2099
2117
|
class ThisIsNotRietveldIssue(object):
|
2100
2118
|
def __nonzero__(self):
|
@@ -2138,7 +2156,7 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2138
2156
|
except httplib.HTTPException:
|
2139
2157
|
return 'error'
|
2140
2158
|
|
2141
|
-
if data['status']
|
2159
|
+
if data['status'] in ('ABANDONED', 'MERGED'):
|
2142
2160
|
return 'closed'
|
2143
2161
|
|
2144
2162
|
cq_label = data['labels'].get('Commit-Queue', {})
|
@@ -2296,6 +2314,27 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2296
2314
|
hostname=parsed_url.netloc)
|
2297
2315
|
return None
|
2298
2316
|
|
2317
|
+
def _GerritCommitMsgHookCheck(self, offer_removal):
|
2318
|
+
hook = os.path.join(settings.GetRoot(), '.git', 'hooks', 'commit-msg')
|
2319
|
+
if not os.path.exists(hook):
|
2320
|
+
return
|
2321
|
+
# Crude attempt to distinguish Gerrit Codereview hook from potentially
|
2322
|
+
# custom developer made one.
|
2323
|
+
data = gclient_utils.FileRead(hook)
|
2324
|
+
if not('From Gerrit Code Review' in data and 'add_ChangeId()' in data):
|
2325
|
+
return
|
2326
|
+
print('Warning: you have Gerrit commit-msg hook installed.\n'
|
2327
|
+
'It is not neccessary for uploading with git cl in squash mode, '
|
2328
|
+
'and may interfere with it in subtle ways.\n'
|
2329
|
+
'We recommend you remove the commit-msg hook.')
|
2330
|
+
if offer_removal:
|
2331
|
+
reply = ask_for_data('Do you want to remove it now? [Yes/No]')
|
2332
|
+
if reply.lower().startswith('y'):
|
2333
|
+
gclient_utils.rm_file_or_tree(hook)
|
2334
|
+
print('Gerrit commit-msg hook removed.')
|
2335
|
+
else:
|
2336
|
+
print('OK, will keep Gerrit commit-msg hook in place.')
|
2337
|
+
|
2299
2338
|
def CMDUploadChange(self, options, args, change):
|
2300
2339
|
"""Upload the current branch to Gerrit."""
|
2301
2340
|
if options.squash and options.no_squash:
|
@@ -2311,6 +2350,7 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2311
2350
|
pending_prefix='')
|
2312
2351
|
|
2313
2352
|
if options.squash:
|
2353
|
+
self._GerritCommitMsgHookCheck(offer_removal=not options.force)
|
2314
2354
|
if not self.GetIssue():
|
2315
2355
|
# TODO(tandrii): deperecate this after 2016Q2. Backwards compatibility
|
2316
2356
|
# with shadow branch, which used to contain change-id for a given
|
@@ -2468,21 +2508,24 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2468
2508
|
print('WARNING: underscores in title will be converted to spaces.')
|
2469
2509
|
refspec_opts.append('m=' + options.title.replace(' ', '_'))
|
2470
2510
|
|
2511
|
+
if options.send_mail:
|
2512
|
+
if not change_desc.get_reviewers():
|
2513
|
+
DieWithError('Must specify reviewers to send email.')
|
2514
|
+
refspec_opts.append('notify=ALL')
|
2515
|
+
else:
|
2516
|
+
refspec_opts.append('notify=NONE')
|
2517
|
+
|
2471
2518
|
cc = self.GetCCList().split(',')
|
2472
2519
|
if options.cc:
|
2473
2520
|
cc.extend(options.cc)
|
2474
2521
|
cc = filter(None, cc)
|
2475
2522
|
if cc:
|
2476
|
-
|
2477
|
-
# TODO(tandrii): enable this back. http://crbug.com/604377
|
2478
|
-
print('WARNING: Gerrit doesn\'t yet support cc-ing arbitrary emails.\n'
|
2479
|
-
' Ignoring cc-ed emails. See http://crbug.com/604377.')
|
2523
|
+
refspec_opts.extend('cc=' + email.strip() for email in cc)
|
2480
2524
|
|
2481
2525
|
if change_desc.get_reviewers():
|
2482
2526
|
refspec_opts.extend('r=' + email.strip()
|
2483
2527
|
for email in change_desc.get_reviewers())
|
2484
2528
|
|
2485
|
-
|
2486
2529
|
refspec_suffix = ''
|
2487
2530
|
if refspec_opts:
|
2488
2531
|
refspec_suffix = '%' + ','.join(refspec_opts)
|
@@ -2523,7 +2566,7 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2523
2566
|
print 'git-cl: Added Change-Id to commit message.'
|
2524
2567
|
return new_log_desc
|
2525
2568
|
else:
|
2526
|
-
|
2569
|
+
DieWithError('ERROR: Gerrit commit-msg hook not installed.')
|
2527
2570
|
|
2528
2571
|
def SetCQState(self, new_state):
|
2529
2572
|
"""Sets the Commit-Queue label assuming canonical CQ config for Gerrit."""
|
@@ -2668,13 +2711,41 @@ class ChangeDescription(object):
|
|
2668
2711
|
self.set_description(clean_lines)
|
2669
2712
|
|
2670
2713
|
def append_footer(self, line):
|
2671
|
-
|
2672
|
-
|
2673
|
-
|
2674
|
-
|
2675
|
-
|
2676
|
-
|
2677
|
-
|
2714
|
+
"""Adds a footer line to the description.
|
2715
|
+
|
2716
|
+
Differentiates legacy "KEY=xxx" footers (used to be called tags) and
|
2717
|
+
Gerrit's footers in the form of "Footer-Key: footer any value" and ensures
|
2718
|
+
that Gerrit footers are always at the end.
|
2719
|
+
"""
|
2720
|
+
parsed_footer_line = git_footers.parse_footer(line)
|
2721
|
+
if parsed_footer_line:
|
2722
|
+
# Line is a gerrit footer in the form: Footer-Key: any value.
|
2723
|
+
# Thus, must be appended observing Gerrit footer rules.
|
2724
|
+
self.set_description(
|
2725
|
+
git_footers.add_footer(self.description,
|
2726
|
+
key=parsed_footer_line[0],
|
2727
|
+
value=parsed_footer_line[1]))
|
2728
|
+
return
|
2729
|
+
|
2730
|
+
if not self._description_lines:
|
2731
|
+
self._description_lines.append(line)
|
2732
|
+
return
|
2733
|
+
|
2734
|
+
top_lines, gerrit_footers, _ = git_footers.split_footers(self.description)
|
2735
|
+
if gerrit_footers:
|
2736
|
+
# git_footers.split_footers ensures that there is an empty line before
|
2737
|
+
# actual (gerrit) footers, if any. We have to keep it that way.
|
2738
|
+
assert top_lines and top_lines[-1] == ''
|
2739
|
+
top_lines, separator = top_lines[:-1], top_lines[-1:]
|
2740
|
+
else:
|
2741
|
+
separator = [] # No need for separator if there are no gerrit_footers.
|
2742
|
+
|
2743
|
+
prev_line = top_lines[-1] if top_lines else ''
|
2744
|
+
if (not presubmit_support.Change.TAG_LINE_RE.match(prev_line) or
|
2745
|
+
not presubmit_support.Change.TAG_LINE_RE.match(line)):
|
2746
|
+
top_lines.append('')
|
2747
|
+
top_lines.append(line)
|
2748
|
+
self._description_lines = top_lines + separator + gerrit_footers
|
2678
2749
|
|
2679
2750
|
def get_reviewers(self):
|
2680
2751
|
"""Retrieves the list of reviewers."""
|
@@ -2919,21 +2990,9 @@ def color_for_status(status):
|
|
2919
2990
|
'error': Fore.WHITE,
|
2920
2991
|
}.get(status, Fore.WHITE)
|
2921
2992
|
|
2922
|
-
def fetch_cl_status(branch, auth_config=None):
|
2923
|
-
"""Fetches information for an issue and returns (branch, issue, status)."""
|
2924
|
-
cl = Changelist(branchref=branch, auth_config=auth_config)
|
2925
|
-
url = cl.GetIssueURL()
|
2926
|
-
status = cl.GetStatus()
|
2927
2993
|
|
2928
|
-
|
2929
|
-
|
2930
|
-
url += ' (broken)'
|
2931
|
-
|
2932
|
-
return (branch, url, status)
|
2933
|
-
|
2934
|
-
def get_cl_statuses(
|
2935
|
-
branches, fine_grained, max_processes=None, auth_config=None):
|
2936
|
-
"""Returns a blocking iterable of (branch, issue, status) for given branches.
|
2994
|
+
def get_cl_statuses(changes, fine_grained, max_processes=None):
|
2995
|
+
"""Returns a blocking iterable of (cl, status) for given branches.
|
2937
2996
|
|
2938
2997
|
If fine_grained is true, this will fetch CL statuses from the server.
|
2939
2998
|
Otherwise, simply indicate if there's a matching url for the given branches.
|
@@ -2944,50 +3003,45 @@ def get_cl_statuses(
|
|
2944
3003
|
|
2945
3004
|
See GetStatus() for a list of possible statuses.
|
2946
3005
|
"""
|
2947
|
-
def fetch(branch):
|
2948
|
-
if not branch:
|
2949
|
-
return None
|
2950
|
-
|
2951
|
-
return fetch_cl_status(branch, auth_config=auth_config)
|
2952
|
-
|
2953
3006
|
# Silence upload.py otherwise it becomes unwieldly.
|
2954
3007
|
upload.verbosity = 0
|
2955
3008
|
|
2956
3009
|
if fine_grained:
|
2957
3010
|
# Process one branch synchronously to work through authentication, then
|
2958
3011
|
# spawn processes to process all the other branches in parallel.
|
2959
|
-
if
|
3012
|
+
if changes:
|
3013
|
+
fetch = lambda cl: (cl, cl.GetStatus())
|
3014
|
+
yield fetch(changes[0])
|
2960
3015
|
|
2961
|
-
|
3016
|
+
if not changes:
|
3017
|
+
# Exit early if there was only one branch to fetch.
|
3018
|
+
return
|
2962
3019
|
|
2963
|
-
|
3020
|
+
changes_to_fetch = changes[1:]
|
2964
3021
|
pool = ThreadPool(
|
2965
|
-
min(max_processes, len(
|
3022
|
+
min(max_processes, len(changes_to_fetch))
|
2966
3023
|
if max_processes is not None
|
2967
|
-
else len(
|
3024
|
+
else len(changes_to_fetch))
|
2968
3025
|
|
2969
|
-
|
2970
|
-
it = pool.imap_unordered(fetch,
|
3026
|
+
fetched_cls = set()
|
3027
|
+
it = pool.imap_unordered(fetch, changes_to_fetch).__iter__()
|
2971
3028
|
while True:
|
2972
3029
|
try:
|
2973
3030
|
row = it.next(timeout=5)
|
2974
3031
|
except multiprocessing.TimeoutError:
|
2975
3032
|
break
|
2976
3033
|
|
2977
|
-
|
3034
|
+
fetched_cls.add(row[0])
|
2978
3035
|
yield row
|
2979
3036
|
|
2980
3037
|
# Add any branches that failed to fetch.
|
2981
|
-
for
|
2982
|
-
|
2983
|
-
yield (b, cl.GetIssueURL() if b else None, 'error')
|
3038
|
+
for cl in set(changes_to_fetch) - fetched_cls:
|
3039
|
+
yield (cl, 'error')
|
2984
3040
|
|
2985
3041
|
else:
|
2986
3042
|
# Do not use GetApprovingReviewers(), since it requires an HTTP request.
|
2987
|
-
for
|
2988
|
-
cl
|
2989
|
-
url = cl.GetIssueURL() if b else None
|
2990
|
-
yield (b, url, 'waiting' if url else 'error')
|
3043
|
+
for cl in changes:
|
3044
|
+
yield (cl, 'waiting' if cl.GetIssueURL() else 'error')
|
2991
3045
|
|
2992
3046
|
|
2993
3047
|
def upload_branch_deps(cl, args):
|
@@ -3090,6 +3144,69 @@ def upload_branch_deps(cl, args):
|
|
3090
3144
|
return 0
|
3091
3145
|
|
3092
3146
|
|
3147
|
+
def CMDarchive(parser, args):
|
3148
|
+
"""Archives and deletes branches associated with closed changelists."""
|
3149
|
+
parser.add_option(
|
3150
|
+
'-j', '--maxjobs', action='store', type=int,
|
3151
|
+
help='The maximum number of jobs to use when retrieving review status')
|
3152
|
+
parser.add_option(
|
3153
|
+
'-f', '--force', action='store_true',
|
3154
|
+
help='Bypasses the confirmation prompt.')
|
3155
|
+
|
3156
|
+
auth.add_auth_options(parser)
|
3157
|
+
options, args = parser.parse_args(args)
|
3158
|
+
if args:
|
3159
|
+
parser.error('Unsupported args: %s' % ' '.join(args))
|
3160
|
+
auth_config = auth.extract_auth_config_from_options(options)
|
3161
|
+
|
3162
|
+
branches = RunGit(['for-each-ref', '--format=%(refname)', 'refs/heads'])
|
3163
|
+
if not branches:
|
3164
|
+
return 0
|
3165
|
+
|
3166
|
+
print 'Finding all branches associated with closed issues...'
|
3167
|
+
changes = [Changelist(branchref=b, auth_config=auth_config)
|
3168
|
+
for b in branches.splitlines()]
|
3169
|
+
alignment = max(5, max(len(c.GetBranch()) for c in changes))
|
3170
|
+
statuses = get_cl_statuses(changes,
|
3171
|
+
fine_grained=True,
|
3172
|
+
max_processes=options.maxjobs)
|
3173
|
+
proposal = [(cl.GetBranch(),
|
3174
|
+
'git-cl-archived-%s-%s' % (cl.GetIssue(), cl.GetBranch()))
|
3175
|
+
for cl, status in statuses
|
3176
|
+
if status == 'closed']
|
3177
|
+
proposal.sort()
|
3178
|
+
|
3179
|
+
if not proposal:
|
3180
|
+
print 'No branches with closed codereview issues found.'
|
3181
|
+
return 0
|
3182
|
+
|
3183
|
+
current_branch = GetCurrentBranch()
|
3184
|
+
|
3185
|
+
print '\nBranches with closed issues that will be archived:\n'
|
3186
|
+
print '%*s | %s' % (alignment, 'Branch name', 'Archival tag name')
|
3187
|
+
for next_item in proposal:
|
3188
|
+
print '%*s %s' % (alignment, next_item[0], next_item[1])
|
3189
|
+
|
3190
|
+
if any(branch == current_branch for branch, _ in proposal):
|
3191
|
+
print('You are currently on a branch \'%s\' which is associated with a '
|
3192
|
+
'closed codereview issue, so archive cannot proceed. Please '
|
3193
|
+
'checkout another branch and run this command again.' %
|
3194
|
+
current_branch)
|
3195
|
+
return 1
|
3196
|
+
|
3197
|
+
if not options.force:
|
3198
|
+
if ask_for_data('\nProceed with deletion (Y/N)? ').lower() != 'y':
|
3199
|
+
print 'Aborted.'
|
3200
|
+
return 1
|
3201
|
+
|
3202
|
+
for branch, tagname in proposal:
|
3203
|
+
RunGit(['tag', tagname, branch])
|
3204
|
+
RunGit(['branch', '-D', branch])
|
3205
|
+
print '\nJob\'s done!'
|
3206
|
+
|
3207
|
+
return 0
|
3208
|
+
|
3209
|
+
|
3093
3210
|
def CMDstatus(parser, args):
|
3094
3211
|
"""Show status of changelists.
|
3095
3212
|
|
@@ -3140,25 +3257,27 @@ def CMDstatus(parser, args):
|
|
3140
3257
|
print('No local branch found.')
|
3141
3258
|
return 0
|
3142
3259
|
|
3143
|
-
changes =
|
3260
|
+
changes = [
|
3144
3261
|
Changelist(branchref=b, auth_config=auth_config)
|
3145
|
-
for b in branches.splitlines()
|
3146
|
-
# TODO(tandrii): refactor to use CLs list instead of branches list.
|
3147
|
-
branches = [c.GetBranch() for c in changes]
|
3148
|
-
alignment = max(5, max(len(b) for b in branches))
|
3262
|
+
for b in branches.splitlines()]
|
3149
3263
|
print 'Branches associated with reviews:'
|
3150
|
-
output = get_cl_statuses(
|
3264
|
+
output = get_cl_statuses(changes,
|
3151
3265
|
fine_grained=not options.fast,
|
3152
|
-
max_processes=options.maxjobs
|
3153
|
-
auth_config=auth_config)
|
3266
|
+
max_processes=options.maxjobs)
|
3154
3267
|
|
3155
3268
|
branch_statuses = {}
|
3156
|
-
alignment = max(5, max(len(ShortBranchName(
|
3157
|
-
for
|
3269
|
+
alignment = max(5, max(len(ShortBranchName(c.GetBranch())) for c in changes))
|
3270
|
+
for cl in sorted(changes, key=lambda c: c.GetBranch()):
|
3271
|
+
branch = cl.GetBranch()
|
3158
3272
|
while branch not in branch_statuses:
|
3159
|
-
|
3160
|
-
branch_statuses[
|
3161
|
-
|
3273
|
+
c, status = output.next()
|
3274
|
+
branch_statuses[c.GetBranch()] = status
|
3275
|
+
status = branch_statuses.pop(branch)
|
3276
|
+
url = cl.GetIssueURL()
|
3277
|
+
if url and (not status or status == 'error'):
|
3278
|
+
# The issue probably doesn't exist anymore.
|
3279
|
+
url += ' (broken)'
|
3280
|
+
|
3162
3281
|
color = color_for_status(status)
|
3163
3282
|
reset = Fore.RESET
|
3164
3283
|
if not setup_color.IS_TTY:
|
@@ -3166,8 +3285,8 @@ def CMDstatus(parser, args):
|
|
3166
3285
|
reset = ''
|
3167
3286
|
status_str = '(%s)' % status if status else ''
|
3168
3287
|
print ' %*s : %s%s %s%s' % (
|
3169
|
-
alignment, ShortBranchName(branch), color,
|
3170
|
-
reset)
|
3288
|
+
alignment, ShortBranchName(branch), color, url,
|
3289
|
+
status_str, reset)
|
3171
3290
|
|
3172
3291
|
cl = Changelist(auth_config=auth_config)
|
3173
3292
|
print
|
@@ -4106,14 +4225,18 @@ def CMDpatch(parser, args):
|
|
4106
4225
|
_process_codereview_select_options(parser, options)
|
4107
4226
|
auth_config = auth.extract_auth_config_from_options(options)
|
4108
4227
|
|
4109
|
-
cl = Changelist(auth_config=auth_config, codereview=options.forced_codereview)
|
4110
4228
|
|
4111
|
-
issue_arg = None
|
4112
4229
|
if options.reapply :
|
4230
|
+
if options.newbranch:
|
4231
|
+
parser.error('--reapply works on the current branch only')
|
4113
4232
|
if len(args) > 0:
|
4114
|
-
parser.error('--reapply implies no additional arguments
|
4233
|
+
parser.error('--reapply implies no additional arguments')
|
4234
|
+
|
4235
|
+
cl = Changelist(auth_config=auth_config,
|
4236
|
+
codereview=options.forced_codereview)
|
4237
|
+
if not cl.GetIssue():
|
4238
|
+
parser.error('current branch must have an associated issue')
|
4115
4239
|
|
4116
|
-
issue_arg = cl.GetIssue()
|
4117
4240
|
upstream = cl.GetUpstreamBranch()
|
4118
4241
|
if upstream == None:
|
4119
4242
|
parser.error('No upstream branch specified. Cannot reset branch')
|
@@ -4121,37 +4244,34 @@ def CMDpatch(parser, args):
|
|
4121
4244
|
RunGit(['reset', '--hard', upstream])
|
4122
4245
|
if options.pull:
|
4123
4246
|
RunGit(['pull'])
|
4124
|
-
else:
|
4125
|
-
if len(args) != 1:
|
4126
|
-
parser.error('Must specify issue number or url')
|
4127
|
-
issue_arg = args[0]
|
4128
4247
|
|
4129
|
-
|
4130
|
-
|
4131
|
-
return 1
|
4248
|
+
return cl.CMDPatchIssue(cl.GetIssue(), options.reject, options.nocommit,
|
4249
|
+
options.directory)
|
4132
4250
|
|
4133
|
-
if
|
4134
|
-
|
4135
|
-
parser.error('--reject is not supported with Gerrit codereview.')
|
4136
|
-
if options.nocommit:
|
4137
|
-
parser.error('--nocommit is not supported with Gerrit codereview.')
|
4138
|
-
if options.directory:
|
4139
|
-
parser.error('--directory is not supported with Gerrit codereview.')
|
4251
|
+
if len(args) != 1 or not args[0]:
|
4252
|
+
parser.error('Must specify issue number or url')
|
4140
4253
|
|
4141
4254
|
# We don't want uncommitted changes mixed up with the patch.
|
4142
4255
|
if git_common.is_dirty_git_tree('patch'):
|
4143
4256
|
return 1
|
4144
4257
|
|
4145
4258
|
if options.newbranch:
|
4146
|
-
if options.reapply:
|
4147
|
-
parser.error("--reapply excludes any option other than --pull")
|
4148
4259
|
if options.force:
|
4149
4260
|
RunGit(['branch', '-D', options.newbranch],
|
4150
|
-
|
4151
|
-
RunGit(['
|
4152
|
-
|
4261
|
+
stderr=subprocess2.PIPE, error_ok=True)
|
4262
|
+
RunGit(['new-branch', options.newbranch])
|
4263
|
+
|
4264
|
+
cl = Changelist(auth_config=auth_config, codereview=options.forced_codereview)
|
4153
4265
|
|
4154
|
-
|
4266
|
+
if cl.IsGerrit():
|
4267
|
+
if options.reject:
|
4268
|
+
parser.error('--reject is not supported with Gerrit codereview.')
|
4269
|
+
if options.nocommit:
|
4270
|
+
parser.error('--nocommit is not supported with Gerrit codereview.')
|
4271
|
+
if options.directory:
|
4272
|
+
parser.error('--directory is not supported with Gerrit codereview.')
|
4273
|
+
|
4274
|
+
return cl.CMDPatchIssue(args[0], options.reject, options.nocommit,
|
4155
4275
|
options.directory)
|
4156
4276
|
|
4157
4277
|
|
@@ -4364,7 +4484,10 @@ def CMDtry(parser, args):
|
|
4364
4484
|
# Skip presubmit builders, because these will fail without LGTM.
|
4365
4485
|
masters.setdefault(master, {})[builder] = ['defaulttests']
|
4366
4486
|
if masters:
|
4487
|
+
print('Loaded default bots from CQ config (%s)' % cq_cfg)
|
4367
4488
|
return masters
|
4489
|
+
else:
|
4490
|
+
print('CQ config exists (%s) but has no try bots listed' % cq_cfg)
|
4368
4491
|
|
4369
4492
|
if not options.bot:
|
4370
4493
|
parser.error('No default try builder to try, use --bot')
|