libv8 5.0.71.48.3 → 5.1.281.59.0beta3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -0
- data/ext/libv8/location.rb +12 -9
- data/ext/libv8/patcher.rb +1 -1
- data/ext/libv8/paths.rb +1 -1
- data/lib/libv8/version.rb +1 -1
- data/patches/0001-Build-standalone-static-library.patch +26 -0
- data/patches/{disable-building-tests.patch → 0002-Disable-building-tests.patch} +17 -5
- data/patches/0003-Use-the-fPIC-flag-for-the-static-library.patch +25 -0
- data/spec/location_spec.rb +1 -1
- data/vendor/depot_tools/.gitignore +1 -0
- data/vendor/depot_tools/PRESUBMIT.py +3 -1
- data/vendor/depot_tools/README +1 -6
- data/vendor/depot_tools/apply_issue.py +6 -0
- data/vendor/depot_tools/bootstrap/win/README.md +2 -2
- data/vendor/depot_tools/bootstrap/win/git.template.bat +1 -1
- data/vendor/depot_tools/bootstrap/win/win_tools.bat +12 -11
- data/vendor/depot_tools/codereview.settings +1 -1
- data/vendor/depot_tools/cpplint.py +353 -592
- data/vendor/depot_tools/fetch.py +10 -3
- data/vendor/depot_tools/fetch_configs/infra.py +4 -2
- data/vendor/depot_tools/fetch_configs/ios_internal.py +49 -0
- data/vendor/depot_tools/gclient.py +33 -7
- data/vendor/depot_tools/gclient_scm.py +14 -11
- data/vendor/depot_tools/gclient_utils.py +14 -3
- data/vendor/depot_tools/git-gs +3 -3
- data/vendor/depot_tools/git_cache.py +8 -4
- data/vendor/depot_tools/git_cl.py +221 -98
- data/vendor/depot_tools/git_footers.py +76 -39
- data/vendor/depot_tools/git_map_branches.py +12 -10
- data/vendor/depot_tools/infra/config/cq.cfg +0 -11
- data/vendor/depot_tools/infra/config/recipes.cfg +1 -1
- data/vendor/depot_tools/presubmit_canned_checks.py +31 -19
- data/vendor/depot_tools/presubmit_support.py +0 -13
- data/vendor/depot_tools/recipe_modules/bot_update/resources/bot_update.py +19 -2
- data/vendor/depot_tools/recipe_modules/depot_tools/api.py +4 -0
- data/vendor/depot_tools/recipe_modules/depot_tools/example.expected/basic.json +49 -0
- data/vendor/depot_tools/recipe_modules/depot_tools/example.expected/win.json +49 -0
- data/vendor/depot_tools/recipe_modules/depot_tools/example.py +38 -0
- data/vendor/depot_tools/recipe_modules/gclient/api.py +1 -0
- data/vendor/depot_tools/recipe_modules/gclient/config.py +19 -0
- data/vendor/depot_tools/recipe_modules/gclient/example.expected/basic.json +1 -0
- data/vendor/depot_tools/recipe_modules/gclient/example.expected/revision.json +1 -0
- data/vendor/depot_tools/recipe_modules/gclient/example.expected/tryserver.json +1 -0
- data/vendor/depot_tools/recipe_modules/gclient/example.py +3 -0
- data/vendor/depot_tools/recipe_modules/git_cl/api.py +22 -6
- data/vendor/depot_tools/recipe_modules/git_cl/example.expected/basic.json +27 -9
- data/vendor/depot_tools/recipe_modules/git_cl/example.py +9 -7
- data/vendor/depot_tools/recipe_modules/presubmit/api.py +5 -2
- data/vendor/depot_tools/recipe_modules/tryserver/__init__.py +1 -0
- data/vendor/depot_tools/recipe_modules/tryserver/api.py +31 -0
- data/vendor/depot_tools/recipe_modules/tryserver/example.expected/basic_tags.json +59 -0
- data/vendor/depot_tools/recipe_modules/tryserver/example.expected/with_rietveld_patch.json +26 -0
- data/vendor/depot_tools/recipe_modules/tryserver/example.expected/with_rietveld_patch_new.json +26 -0
- data/vendor/depot_tools/recipe_modules/tryserver/example.py +32 -3
- data/vendor/depot_tools/roll_dep.py +6 -2
- data/vendor/depot_tools/third_party/upload.py +17 -9
- data/vendor/depot_tools/update_depot_tools +11 -0
- data/vendor/depot_tools/update_depot_tools.bat +11 -0
- data/vendor/depot_tools/win_toolchain/get_toolchain_if_necessary.py +52 -9
- data/vendor/depot_tools/win_toolchain/package_from_installed.py +64 -57
- metadata +12 -10
- data/patches/build-standalone-static-library.patch +0 -14
- data/patches/fPIC-for-static.patch +0 -13
- data/vendor/depot_tools/git-lkgr +0 -208
- data/vendor/depot_tools/hammer +0 -28
- data/vendor/depot_tools/hammer.bat +0 -23
@@ -4,6 +4,7 @@
|
|
4
4
|
# found in the LICENSE file.
|
5
5
|
|
6
6
|
import argparse
|
7
|
+
import json
|
7
8
|
import re
|
8
9
|
import sys
|
9
10
|
|
@@ -22,6 +23,7 @@ def normalize_name(header):
|
|
22
23
|
|
23
24
|
|
24
25
|
def parse_footer(line):
|
26
|
+
"""Returns footer's (key, value) if footer is valid, else None."""
|
25
27
|
match = FOOTER_PATTERN.match(line)
|
26
28
|
if match:
|
27
29
|
return (match.group(1), match.group(2))
|
@@ -31,21 +33,39 @@ def parse_footer(line):
|
|
31
33
|
|
32
34
|
def parse_footers(message):
|
33
35
|
"""Parses a git commit message into a multimap of footers."""
|
36
|
+
_, _, parsed_footers = split_footers(message)
|
37
|
+
footer_map = defaultdict(list)
|
38
|
+
if parsed_footers:
|
39
|
+
# Read footers from bottom to top, because latter takes precedense,
|
40
|
+
# and we want it to be first in the multimap value.
|
41
|
+
for (k, v) in reversed(parsed_footers):
|
42
|
+
footer_map[normalize_name(k)].append(v.strip())
|
43
|
+
return footer_map
|
44
|
+
|
45
|
+
|
46
|
+
def split_footers(message):
|
47
|
+
"""Returns (non_footer_lines, footer_lines, parsed footers).
|
48
|
+
|
49
|
+
Guarantees that:
|
50
|
+
(non_footer_lines + footer_lines) == message.splitlines().
|
51
|
+
parsed_footers is parse_footer applied on each line of footer_lines.
|
52
|
+
"""
|
53
|
+
message_lines = list(message.splitlines())
|
34
54
|
footer_lines = []
|
35
|
-
for line in reversed(
|
55
|
+
for line in reversed(message_lines):
|
36
56
|
if line == '' or line.isspace():
|
37
57
|
break
|
38
58
|
footer_lines.append(line)
|
59
|
+
else:
|
60
|
+
# The whole description was consisting of footers,
|
61
|
+
# which means those aren't footers.
|
62
|
+
footer_lines = []
|
39
63
|
|
64
|
+
footer_lines.reverse()
|
40
65
|
footers = map(parse_footer, footer_lines)
|
41
|
-
if not all(footers):
|
42
|
-
return
|
43
|
-
|
44
|
-
footer_map = defaultdict(list)
|
45
|
-
for (k, v) in footers:
|
46
|
-
footer_map[normalize_name(k)].append(v.strip())
|
47
|
-
|
48
|
-
return footer_map
|
66
|
+
if not footer_lines or not all(footers):
|
67
|
+
return message_lines, [], []
|
68
|
+
return message_lines[:-len(footer_lines)], footer_lines, footers
|
49
69
|
|
50
70
|
|
51
71
|
def get_footer_svn_id(branch=None):
|
@@ -70,38 +90,46 @@ def get_footer_change_id(message):
|
|
70
90
|
def add_footer_change_id(message, change_id):
|
71
91
|
"""Returns message with Change-ID footer in it.
|
72
92
|
|
73
|
-
Assumes that Change-Id is not yet in footers, which is then
|
74
|
-
|
93
|
+
Assumes that Change-Id is not yet in footers, which is then inserted at
|
94
|
+
earliest footer line which is after all of these footers:
|
95
|
+
Bug|Issue|Test|Feature.
|
75
96
|
"""
|
76
|
-
assert
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
97
|
+
assert 'Change-Id' not in parse_footers(message)
|
98
|
+
return add_footer(message, 'Change-Id', change_id,
|
99
|
+
after_keys=['Bug', 'Issue', 'Test', 'Feature'])
|
100
|
+
|
101
|
+
def add_footer(message, key, value, after_keys=None):
|
102
|
+
"""Returns a message with given footer appended.
|
103
|
+
|
104
|
+
If after_keys is None (default), appends footer last.
|
105
|
+
Otherwise, after_keys must be iterable of footer keys, then the new footer
|
106
|
+
would be inserted at the topmost position such there would be no footer lines
|
107
|
+
after it with key matching one of after_keys.
|
108
|
+
For example, given
|
109
|
+
message='Header.\n\nAdded: 2016\nBug: 123\nVerified-By: CQ'
|
110
|
+
after_keys=['Bug', 'Issue']
|
111
|
+
the new footer will be inserted between Bug and Verified-By existing footers.
|
112
|
+
"""
|
113
|
+
assert key == normalize_name(key), 'Use normalized key'
|
114
|
+
new_footer = '%s: %s' % (key, value)
|
115
|
+
|
116
|
+
top_lines, footer_lines, parsed_footers = split_footers(message)
|
117
|
+
if not footer_lines:
|
118
|
+
if not top_lines or top_lines[-1] != '':
|
119
|
+
top_lines.append('')
|
120
|
+
footer_lines = [new_footer]
|
121
|
+
elif not after_keys:
|
122
|
+
footer_lines.append(new_footer)
|
95
123
|
else:
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
124
|
+
after_keys = set(map(normalize_name, after_keys))
|
125
|
+
# Iterate from last to first footer till we find the footer keys above.
|
126
|
+
for i, (key, _) in reversed(list(enumerate(parsed_footers))):
|
127
|
+
if normalize_name(key) in after_keys:
|
128
|
+
footer_lines.insert(i + 1, new_footer)
|
100
129
|
break
|
101
130
|
else:
|
102
|
-
|
103
|
-
|
104
|
-
return '\n'.join(lines)
|
131
|
+
footer_lines.insert(0, new_footer)
|
132
|
+
return '\n'.join(top_lines + footer_lines)
|
105
133
|
|
106
134
|
|
107
135
|
def get_unique(footers, key):
|
@@ -163,7 +191,8 @@ def main(args):
|
|
163
191
|
parser = argparse.ArgumentParser(
|
164
192
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter
|
165
193
|
)
|
166
|
-
parser.add_argument('ref'
|
194
|
+
parser.add_argument('ref', nargs='?', help="Git ref to retrieve footers from."
|
195
|
+
" Omit to parse stdin.")
|
167
196
|
|
168
197
|
g = parser.add_mutually_exclusive_group()
|
169
198
|
g.add_argument('--key', metavar='KEY',
|
@@ -172,11 +201,16 @@ def main(args):
|
|
172
201
|
g.add_argument('--position', action='store_true')
|
173
202
|
g.add_argument('--position-ref', action='store_true')
|
174
203
|
g.add_argument('--position-num', action='store_true')
|
204
|
+
g.add_argument('--json', help="filename to dump JSON serialized headers to.")
|
175
205
|
|
176
206
|
|
177
207
|
opts = parser.parse_args(args)
|
178
208
|
|
179
|
-
|
209
|
+
if opts.ref:
|
210
|
+
message = git.run('log', '-1', '--format=%B', opts.ref)
|
211
|
+
else:
|
212
|
+
message = '\n'.join(l for l in sys.stdin)
|
213
|
+
|
180
214
|
footers = parse_footers(message)
|
181
215
|
|
182
216
|
if opts.key:
|
@@ -191,6 +225,9 @@ def main(args):
|
|
191
225
|
pos = get_position(footers)
|
192
226
|
assert pos[1], 'No valid position for commit'
|
193
227
|
print pos[1]
|
228
|
+
elif opts.json:
|
229
|
+
with open(opts.json, 'w') as f:
|
230
|
+
json.dump(footers, f)
|
194
231
|
else:
|
195
232
|
for k in footers.keys():
|
196
233
|
for v in footers[k]:
|
@@ -128,17 +128,19 @@ class BranchMapper(object):
|
|
128
128
|
include_tracking_status=self.verbosity >= 1)
|
129
129
|
if (self.verbosity >= 2):
|
130
130
|
# Avoid heavy import unless necessary.
|
131
|
-
from git_cl import get_cl_statuses, color_for_status
|
131
|
+
from git_cl import get_cl_statuses, color_for_status, Changelist
|
132
132
|
|
133
|
-
|
133
|
+
change_cls = [Changelist(branchref='refs/heads/'+b)
|
134
|
+
for b in self.__branches_info.keys() if b]
|
135
|
+
status_info = get_cl_statuses(change_cls,
|
134
136
|
fine_grained=self.verbosity > 2,
|
135
137
|
max_processes=self.maxjobs)
|
136
138
|
|
137
|
-
for
|
138
|
-
|
139
|
-
|
140
|
-
(
|
141
|
-
|
139
|
+
# This is a blocking get which waits for the remote CL status to be
|
140
|
+
# retrieved.
|
141
|
+
for cl, status in status_info:
|
142
|
+
self.__status_info[cl.GetBranch()] = (cl.GetIssueURL(),
|
143
|
+
color_for_status(status))
|
142
144
|
|
143
145
|
roots = set()
|
144
146
|
|
@@ -258,9 +260,9 @@ class BranchMapper(object):
|
|
258
260
|
|
259
261
|
# The Rietveld issue associated with the branch.
|
260
262
|
if self.verbosity >= 2:
|
261
|
-
|
262
|
-
|
263
|
-
line.append(url or
|
263
|
+
(url, color) = ('', '') if self.__is_invalid_parent(branch) \
|
264
|
+
else self.__status_info[branch]
|
265
|
+
line.append(url or '', color=color)
|
264
266
|
|
265
267
|
# The subject of the most recent commit on the branch.
|
266
268
|
if self.show_subject:
|
@@ -4,20 +4,9 @@
|
|
4
4
|
version: 1
|
5
5
|
cq_name: "depot_tools"
|
6
6
|
cq_status_url: "https://chromium-cq-status.appspot.com"
|
7
|
-
svn_repo_url: "svn://svn.chromium.org/chrome/trunk/tools/depot_tools"
|
8
7
|
|
9
8
|
rietveld {
|
10
9
|
url: "https://codereview.chromium.org"
|
11
|
-
project_bases: "^svn\\:\\/\\/svn\\.chromium\\.org\\/chrome/trunk/tools/depot_tools(|/.*)$"
|
12
|
-
project_bases: "^svn\\:\\/\\/chrome\\-svn\\/chrome/trunk/tools/depot_tools(|/.*)$"
|
13
|
-
project_bases: "^svn\\:\\/\\/chrome\\-svn\\.corp\\/chrome/trunk/tools/depot_tools(|/.*)$"
|
14
|
-
project_bases: "^svn\\:\\/\\/chrome\\-svn\\.corp\\.google\\.com\\/chrome/trunk/tools/depot_tools(|/.*)$"
|
15
|
-
project_bases: "^http\\:\\/\\/src\\.chromium\\.org\\/svn/trunk/tools/depot_tools(|/.*)$"
|
16
|
-
project_bases: "^https\\:\\/\\/src\\.chromium\\.org\\/svn/trunk/tools/depot_tools(|/.*)$"
|
17
|
-
project_bases: "^http\\:\\/\\/src\\.chromium\\.org\\/chrome/trunk/tools/depot_tools(|/.*)$"
|
18
|
-
project_bases: "^https\\:\\/\\/src\\.chromium\\.org\\/chrome/trunk/tools/depot_tools(|/.*)$"
|
19
|
-
project_bases: "^https?\\:\\/\\/git\\.chromium\\.org\\/git\\/chromium\\/tools\\/depot_tools(?:\\.git)?\\@[a-zA-Z0-9\\-_\\.]+$"
|
20
|
-
project_bases: "^https?\\:\\/\\/git\\.chromium\\.org\\/chromium\\/tools\\/depot_tools(?:\\.git)?\\@[a-zA-Z0-9\\-_\\.]+$"
|
21
10
|
project_bases: "^https?\\:\\/\\/chromium\\.googlesource\\.com\\/chromium\\/tools\\/depot_tools(?:\\.git)?\\@[a-zA-Z0-9\\-_\\.]+$"
|
22
11
|
project_bases: "^https?\\:\\/\\/chromium\\.googlesource\\.com\\/a\\/chromium\\/tools\\/depot_tools(?:\\.git)?\\@[a-zA-Z0-9\\-_\\.]+$"
|
23
12
|
}
|
@@ -211,6 +211,26 @@ def CheckChangeHasNoCrAndHasOnlyOneEol(input_api, output_api,
|
|
211
211
|
items=eof_files))
|
212
212
|
return outputs
|
213
213
|
|
214
|
+
def CheckGenderNeutral(input_api, output_api, source_file_filter=None):
|
215
|
+
"""Checks that there are no gendered pronouns in any of the text files to be
|
216
|
+
submitted.
|
217
|
+
"""
|
218
|
+
gendered_re = input_api.re.compile(
|
219
|
+
'(^|\s|\(|\[)([Hh]e|[Hh]is|[Hh]ers?|[Hh]im|[Ss]he|[Gg]uys?)\\b')
|
220
|
+
|
221
|
+
errors = []
|
222
|
+
for f in input_api.AffectedFiles(include_deletes=False,
|
223
|
+
file_filter=source_file_filter):
|
224
|
+
for line_num, line in f.ChangedContents():
|
225
|
+
if gendered_re.search(line):
|
226
|
+
errors.append('%s (%d): %s' % (f.LocalPath(), line_num, line))
|
227
|
+
|
228
|
+
if len(errors):
|
229
|
+
return [output_api.PresubmitPromptWarning('Found a gendered pronoun in:',
|
230
|
+
long_text='\n'.join(errors))]
|
231
|
+
return []
|
232
|
+
|
233
|
+
|
214
234
|
|
215
235
|
def _ReportErrorFileAndLine(filename, line_num, dummy_line):
|
216
236
|
"""Default error formatter for _FindNewViolationsOfRule."""
|
@@ -875,7 +895,7 @@ def CheckOwners(input_api, output_api, source_file_filter=None):
|
|
875
895
|
input_api.change.AffectedFiles(file_filter=source_file_filter)])
|
876
896
|
|
877
897
|
owners_db = input_api.owners_db
|
878
|
-
owner_email, reviewers =
|
898
|
+
owner_email, reviewers = GetCodereviewOwnerAndReviewers(
|
879
899
|
input_api,
|
880
900
|
owners_db.email_regexp,
|
881
901
|
approval_needed=input_api.is_committing)
|
@@ -904,25 +924,17 @@ def CheckOwners(input_api, output_api, source_file_filter=None):
|
|
904
924
|
return [output('Missing LGTM from someone other than %s' % owner_email)]
|
905
925
|
return []
|
906
926
|
|
907
|
-
def
|
927
|
+
def GetCodereviewOwnerAndReviewers(input_api, email_regexp, approval_needed):
|
908
928
|
"""Return the owner and reviewers of a change, if any.
|
909
929
|
|
910
930
|
If approval_needed is True, only reviewers who have approved the change
|
911
931
|
will be returned.
|
912
932
|
"""
|
913
|
-
|
914
|
-
|
915
|
-
|
916
|
-
|
917
|
-
|
918
|
-
res = _RietveldOwnerAndReviewers(input_api, email_regexp, approval_needed)
|
919
|
-
if res:
|
920
|
-
return res
|
921
|
-
|
922
|
-
reviewers = set()
|
923
|
-
if not approval_needed:
|
924
|
-
reviewers = _ReviewersFromChange(input_api.change)
|
925
|
-
return None, reviewers
|
933
|
+
# Rietveld is default.
|
934
|
+
func = _RietveldOwnerAndReviewers
|
935
|
+
if input_api.gerrit:
|
936
|
+
func = _GerritOwnerAndReviewers
|
937
|
+
return func(input_api, email_regexp, approval_needed)
|
926
938
|
|
927
939
|
|
928
940
|
def _GetRietveldIssueProps(input_api, messages):
|
@@ -953,11 +965,11 @@ def _RietveldOwnerAndReviewers(input_api, email_regexp, approval_needed=False):
|
|
953
965
|
|
954
966
|
If approval_needed is True, only reviewers who have approved the change
|
955
967
|
will be returned.
|
956
|
-
Returns None if can't fetch issue properties from codereview.
|
957
968
|
"""
|
958
969
|
issue_props = _GetRietveldIssueProps(input_api, True)
|
959
970
|
if not issue_props:
|
960
|
-
return None
|
971
|
+
return None, (set() if approval_needed else
|
972
|
+
_ReviewersFromChange(input_api.change))
|
961
973
|
|
962
974
|
if not approval_needed:
|
963
975
|
return issue_props['owner_email'], set(issue_props['reviewers'])
|
@@ -977,11 +989,11 @@ def _GerritOwnerAndReviewers(input_api, email_regexp, approval_needed=False):
|
|
977
989
|
|
978
990
|
If approval_needed is True, only reviewers who have approved the change
|
979
991
|
will be returned.
|
980
|
-
Returns None if can't fetch issue properties from codereview.
|
981
992
|
"""
|
982
993
|
issue = input_api.change.issue
|
983
994
|
if not issue:
|
984
|
-
return None
|
995
|
+
return None, (set() if approval_needed else
|
996
|
+
_ReviewersFromChange(input_api.change))
|
985
997
|
|
986
998
|
owner_email = input_api.gerrit.GetChangeOwner(issue)
|
987
999
|
reviewers = set(
|
@@ -156,18 +156,6 @@ class _PresubmitResult(object):
|
|
156
156
|
output.fail()
|
157
157
|
|
158
158
|
|
159
|
-
# Top level object so multiprocessing can pickle
|
160
|
-
# Public access through OutputApi object.
|
161
|
-
class _PresubmitAddReviewers(_PresubmitResult):
|
162
|
-
"""Add some suggested reviewers to the change."""
|
163
|
-
def __init__(self, reviewers):
|
164
|
-
super(_PresubmitAddReviewers, self).__init__('')
|
165
|
-
self.reviewers = reviewers
|
166
|
-
|
167
|
-
def handle(self, output):
|
168
|
-
output.reviewers.extend(self.reviewers)
|
169
|
-
|
170
|
-
|
171
159
|
# Top level object so multiprocessing can pickle
|
172
160
|
# Public access through OutputApi object.
|
173
161
|
class _PresubmitError(_PresubmitResult):
|
@@ -270,7 +258,6 @@ class OutputApi(object):
|
|
270
258
|
can output various types of results.
|
271
259
|
"""
|
272
260
|
PresubmitResult = _PresubmitResult
|
273
|
-
PresubmitAddReviewers = _PresubmitAddReviewers
|
274
261
|
PresubmitError = _PresubmitError
|
275
262
|
PresubmitPromptWarning = _PresubmitPromptWarning
|
276
263
|
PresubmitNotifyResult = _PresubmitNotifyResult
|
@@ -1126,6 +1126,10 @@ def apply_rietveld_issue(issue, patchset, root, server, _rev_map, _revision,
|
|
1126
1126
|
for item in blacklist:
|
1127
1127
|
cmd.extend(['--blacklist', item])
|
1128
1128
|
|
1129
|
+
# TODO(kjellander): Remove this hack when http://crbug.com/611808 is fixed.
|
1130
|
+
if root == path.join('src', 'third_party', 'webrtc'):
|
1131
|
+
cmd.extend(['--extra_patchlevel=1'])
|
1132
|
+
|
1129
1133
|
# Only try once, since subsequent failures hide the real failure.
|
1130
1134
|
try:
|
1131
1135
|
call(*cmd, tries=1)
|
@@ -1135,6 +1139,8 @@ def apply_rietveld_issue(issue, patchset, root, server, _rev_map, _revision,
|
|
1135
1139
|
def apply_gerrit_ref(gerrit_repo, gerrit_ref, root, gerrit_reset):
|
1136
1140
|
gerrit_repo = gerrit_repo or 'origin'
|
1137
1141
|
assert gerrit_ref
|
1142
|
+
print '===Applying gerrit ref==='
|
1143
|
+
print 'Repo is %r, ref is %r, root is %r' % (gerrit_repo, gerrit_ref, root)
|
1138
1144
|
try:
|
1139
1145
|
base_rev = git('rev-parse', 'HEAD', cwd=root).strip()
|
1140
1146
|
git('retry', 'fetch', gerrit_repo, gerrit_ref, cwd=root, tries=1)
|
@@ -1311,13 +1317,17 @@ def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only,
|
|
1311
1317
|
if patch_url:
|
1312
1318
|
patches = get_svn_patch(patch_url)
|
1313
1319
|
|
1320
|
+
print '===Processing patch solutions==='
|
1314
1321
|
already_patched = []
|
1315
1322
|
patch_root = patch_root or ''
|
1323
|
+
print 'Patch root is %r' % patch_root
|
1316
1324
|
for solution in solutions:
|
1325
|
+
print 'Processing solution %r' % solution['name']
|
1317
1326
|
if (patch_root == solution['name'] or
|
1318
1327
|
solution['name'].startswith(patch_root + '/')):
|
1319
1328
|
relative_root = solution['name'][len(patch_root) + 1:]
|
1320
1329
|
target = '/'.join([relative_root, 'DEPS']).lstrip('/')
|
1330
|
+
print ' relative root is %r, target is %r' % (relative_root, target)
|
1321
1331
|
if patches:
|
1322
1332
|
apply_svn_patch(patch_root, patches, whitelist=[target])
|
1323
1333
|
already_patched.append(target)
|
@@ -1447,7 +1457,9 @@ def parse_args():
|
|
1447
1457
|
parse.add_option('--gerrit_no_reset', action='store_true',
|
1448
1458
|
help='Bypass calling reset after applying a gerrit ref.')
|
1449
1459
|
parse.add_option('--specs', help='Gcilent spec.')
|
1450
|
-
parse.add_option('--master',
|
1460
|
+
parse.add_option('--master',
|
1461
|
+
help='Master name. If specified and it is not in '
|
1462
|
+
'bot_update\'s whitelist, bot_update will be noop.')
|
1451
1463
|
parse.add_option('-f', '--force', action='store_true',
|
1452
1464
|
help='Bypass check to see if we want to be run. '
|
1453
1465
|
'Should ONLY be used locally or by smart recipes.')
|
@@ -1715,8 +1727,13 @@ def main():
|
|
1715
1727
|
slave = options.slave_name
|
1716
1728
|
master = options.master
|
1717
1729
|
|
1730
|
+
if not master:
|
1731
|
+
# bot_update activation whitelist is checked only on buildbot masters.
|
1732
|
+
# If there is no master, bot_update is always active.
|
1733
|
+
options.force = True
|
1734
|
+
|
1718
1735
|
# Check if this script should activate or not.
|
1719
|
-
active = check_valid_host(master, builder, slave)
|
1736
|
+
active = options.force or check_valid_host(master, builder, slave)
|
1720
1737
|
|
1721
1738
|
# Print a helpful message to tell developers whats going on with this step.
|
1722
1739
|
print_help_text(
|
@@ -9,6 +9,10 @@ class DepotToolsApi(recipe_api.RecipeApi):
|
|
9
9
|
def download_from_google_storage_path(self):
|
10
10
|
return self.package_repo_resource('download_from_google_storage.py')
|
11
11
|
|
12
|
+
@property
|
13
|
+
def upload_to_google_storage_path(self):
|
14
|
+
return self.package_repo_resource('upload_to_google_storage.py')
|
15
|
+
|
12
16
|
@property
|
13
17
|
def cros_path(self):
|
14
18
|
return self.package_repo_resource('cros')
|
@@ -0,0 +1,49 @@
|
|
1
|
+
[
|
2
|
+
{
|
3
|
+
"cmd": [
|
4
|
+
"ls",
|
5
|
+
"RECIPE_PACKAGE_REPO[depot_tools]/download_from_google_storage.py"
|
6
|
+
],
|
7
|
+
"name": "download_from_google_storage"
|
8
|
+
},
|
9
|
+
{
|
10
|
+
"cmd": [
|
11
|
+
"ls",
|
12
|
+
"RECIPE_PACKAGE_REPO[depot_tools]/upload_to_google_storage.py"
|
13
|
+
],
|
14
|
+
"name": "upload_to_google_storage"
|
15
|
+
},
|
16
|
+
{
|
17
|
+
"cmd": [
|
18
|
+
"ls",
|
19
|
+
"RECIPE_PACKAGE_REPO[depot_tools]/cros"
|
20
|
+
],
|
21
|
+
"name": "cros"
|
22
|
+
},
|
23
|
+
{
|
24
|
+
"cmd": [
|
25
|
+
"ls",
|
26
|
+
"RECIPE_PACKAGE_REPO[depot_tools]/gn.py"
|
27
|
+
],
|
28
|
+
"name": "gn_py_path"
|
29
|
+
},
|
30
|
+
{
|
31
|
+
"cmd": [
|
32
|
+
"ls",
|
33
|
+
"RECIPE_PACKAGE_REPO[depot_tools]/gsutil.py"
|
34
|
+
],
|
35
|
+
"name": "gsutil_py_path"
|
36
|
+
},
|
37
|
+
{
|
38
|
+
"cmd": [
|
39
|
+
"ls",
|
40
|
+
"RECIPE_PACKAGE_REPO[depot_tools]/ninja"
|
41
|
+
],
|
42
|
+
"name": "ninja_path"
|
43
|
+
},
|
44
|
+
{
|
45
|
+
"name": "$result",
|
46
|
+
"recipe_result": null,
|
47
|
+
"status_code": 0
|
48
|
+
}
|
49
|
+
]
|