libv8 7.3.492.27.1 → 7.3.492.27.3beta1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (56) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +4 -0
  3. data/ext/libv8/builder.rb +1 -2
  4. data/lib/libv8/version.rb +1 -1
  5. data/vendor/depot_tools/.gitignore +4 -1
  6. data/vendor/depot_tools/autoninja.bat +8 -0
  7. data/vendor/depot_tools/autoninja.py +1 -1
  8. data/vendor/depot_tools/cipd_manifest.txt +2 -2
  9. data/vendor/depot_tools/cipd_manifest.versions +52 -52
  10. data/vendor/depot_tools/gclient.py +4 -1
  11. data/vendor/depot_tools/gclient_scm.py +17 -6
  12. data/vendor/depot_tools/git_cache.py +109 -128
  13. data/vendor/depot_tools/git_cl.py +98 -26
  14. data/vendor/depot_tools/infra/config/recipes.cfg +1 -1
  15. data/vendor/depot_tools/man/src/filter_demo_output.py +1 -1
  16. data/vendor/depot_tools/metrics.README.md +3 -3
  17. data/vendor/depot_tools/metrics.py +3 -2
  18. data/vendor/depot_tools/metrics_utils.py +1 -11
  19. data/vendor/depot_tools/owners.py +109 -32
  20. data/vendor/depot_tools/patch.py +1 -1
  21. data/vendor/depot_tools/recipes/README.recipes.md +23 -20
  22. data/vendor/depot_tools/recipes/recipe_modules/bot_update/__init__.py +1 -0
  23. data/vendor/depot_tools/recipes/recipe_modules/bot_update/api.py +67 -51
  24. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/basic.json +7 -0
  25. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/basic_luci.json +7 -0
  26. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/basic_with_branch_heads.json +7 -0
  27. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/clobber.json +7 -0
  28. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/deprecated_got_revision_mapping.json +1 -1
  29. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/gerrit_no_rebase_patch_ref.json +7 -0
  30. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/gerrit_no_reset.json +7 -0
  31. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/input_commit_with_id_without_repo.json +7 -0
  32. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/multiple_patch_refs.json +7 -0
  33. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/no_apply_patch_on_gclient.json +8 -1
  34. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/no_cp_checkout_HEAD.json +57 -0
  35. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/no_cp_checkout_a_branch_head.json +59 -0
  36. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/no_cp_checkout_a_specific_commit.json +57 -0
  37. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/no_cp_checkout_master.json +59 -0
  38. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/refs.json +7 -0
  39. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/reset_root_solution_revision.json +7 -0
  40. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/tryjob_fail.json +1 -1
  41. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/tryjob_fail_patch.json +9 -2
  42. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/tryjob_fail_patch_download.json +8 -1
  43. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/tryjob_gerrit_angle.json +8 -1
  44. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/tryjob_gerrit_branch_heads.json +8 -1
  45. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/tryjob_gerrit_feature_branch.json +11 -2
  46. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/tryjob_gerrit_v8_feature_branch.json +9 -2
  47. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/tryjob_gerrit_webrtc.json +8 -1
  48. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/tryjob_v8.json +8 -1
  49. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/tryjob_v8_head_by_default.json +8 -1
  50. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/with_manifest_name.json +0 -7
  51. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.expected/with_tags.json +7 -0
  52. data/vendor/depot_tools/recipes/recipe_modules/bot_update/examples/full.py +92 -3
  53. data/vendor/depot_tools/recipes/recipe_modules/gclient/config.py +2 -2
  54. data/vendor/depot_tools/scm.py +1 -1
  55. data/vendor/depot_tools/upload_to_google_storage.py +1 -1
  56. metadata +8 -4
@@ -68,6 +68,24 @@ import watchlists
68
68
 
69
69
  __version__ = '2.0'
70
70
 
71
+ # Traces for git push will be stored in a traces directory inside the
72
+ # depot_tools checkout.
73
+ DEPOT_TOOLS = os.path.dirname(os.path.abspath(__file__))
74
+ TRACES_DIR = os.path.join(DEPOT_TOOLS, 'traces')
75
+
76
+ # When collecting traces, Git hashes will be reduced to 6 characters to reduce
77
+ # the size after compression.
78
+ GIT_HASH_RE = re.compile(r'\b([a-f0-9]{6})[a-f0-9]{34}\b', flags=re.I)
79
+ # Used to redact the cookies from the gitcookies file.
80
+ GITCOOKIES_REDACT_RE = re.compile(r'1/.*')
81
+
82
+ TRACES_MESSAGE = (
83
+ 'When filing a bug, be sure to include the traces found at:\n'
84
+ ' %s.zip\n'
85
+ 'Consider including the git config and gitcookies,\n'
86
+ 'which we have packed for you at:\n'
87
+ ' %s.zip\n')
88
+
71
89
  COMMIT_BOT_EMAIL = 'commit-bot@chromium.org'
72
90
  POSTUPSTREAM_HOOK = '.git/hooks/post-cl-land'
73
91
  DESCRIPTION_BACKUP_FILE = '~/.git_cl_description_backup'
@@ -1244,7 +1262,7 @@ class Changelist(object):
1244
1262
  def GetUpstreamBranch(self):
1245
1263
  if self.upstream_branch is None:
1246
1264
  remote, upstream_branch = self.FetchUpstreamTuple(self.GetBranch())
1247
- if remote is not '.':
1265
+ if remote != '.':
1248
1266
  upstream_branch = upstream_branch.replace('refs/heads/',
1249
1267
  'refs/remotes/%s/' % remote)
1250
1268
  upstream_branch = upstream_branch.replace('refs/branch-heads/',
@@ -2449,7 +2467,7 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
2449
2467
  part = parsed_url.fragment
2450
2468
  else:
2451
2469
  part = parsed_url.path
2452
- match = re.match('(/c(/.*/\+)?)?/(\d+)(/(\d+)?/?)?$', part)
2470
+ match = re.match(r'(/c(/.*/\+)?)?/(\d+)(/(\d+)?/?)?$', part)
2453
2471
  if match:
2454
2472
  return _ParsedIssueNumberArgument(
2455
2473
  issue=int(match.group(3)),
@@ -2478,6 +2496,83 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
2478
2496
  else:
2479
2497
  print('OK, will keep Gerrit commit-msg hook in place.')
2480
2498
 
2499
+ def _RunGitPushWithTraces(self, change_desc, refspec, refspec_opts):
2500
+ gclient_utils.safe_makedirs(TRACES_DIR)
2501
+
2502
+ # Create a temporary directory to store traces in. Traces will be compressed
2503
+ # and stored in a 'traces' dir inside depot_tools.
2504
+ traces_dir = tempfile.mkdtemp()
2505
+ trace_name = os.path.basename(traces_dir)
2506
+ traces_zip = os.path.join(TRACES_DIR, trace_name + '-traces')
2507
+ # Create a temporary dir to store git config and gitcookies in. It will be
2508
+ # compressed and stored next to the traces.
2509
+ git_info_dir = tempfile.mkdtemp()
2510
+ git_info_zip = os.path.join(TRACES_DIR, trace_name + '-git-info')
2511
+
2512
+ env = os.environ.copy()
2513
+ env['GIT_REDACT_COOKIES'] = 'o,SSO,GSSO_Uberproxy'
2514
+ env['GIT_TR2_EVENT'] = os.path.join(traces_dir, 'tr2-event')
2515
+ env['GIT_TRACE_CURL'] = os.path.join(traces_dir, 'trace-curl')
2516
+ env['GIT_TRACE_CURL_NO_DATA'] = '1'
2517
+ env['GIT_TRACE_PACKET'] = os.path.join(traces_dir, 'trace-packet')
2518
+
2519
+ try:
2520
+ push_returncode = 0
2521
+ before_push = time_time()
2522
+ push_stdout = gclient_utils.CheckCallAndFilter(
2523
+ ['git', 'push', self.GetRemoteUrl(), refspec],
2524
+ env=env,
2525
+ print_stdout=True,
2526
+ # Flush after every line: useful for seeing progress when running as
2527
+ # recipe.
2528
+ filter_fn=lambda _: sys.stdout.flush())
2529
+ except subprocess2.CalledProcessError as e:
2530
+ push_returncode = e.returncode
2531
+ DieWithError('Failed to create a change. Please examine output above '
2532
+ 'for the reason of the failure.\n'
2533
+ 'Hint: run command below to diagnose common Git/Gerrit '
2534
+ 'credential problems:\n'
2535
+ ' git cl creds-check\n' +
2536
+ TRACES_MESSAGE % (traces_zip, git_info_zip),
2537
+ change_desc)
2538
+ finally:
2539
+ execution_time = time_time() - before_push
2540
+ metrics.collector.add_repeated('sub_commands', {
2541
+ 'command': 'git push',
2542
+ 'execution_time': execution_time,
2543
+ 'exit_code': push_returncode,
2544
+ 'arguments': metrics_utils.extract_known_subcommand_args(refspec_opts),
2545
+ })
2546
+
2547
+ if push_returncode != 0:
2548
+ # Keep only the first 6 characters of the git hashes on the packet
2549
+ # trace. This greatly decreases size after compression.
2550
+ packet_traces = os.path.join(traces_dir, 'trace-packet')
2551
+ contents = gclient_utils.FileRead(packet_traces)
2552
+ gclient_utils.FileWrite(
2553
+ packet_traces, GIT_HASH_RE.sub(r'\1', contents))
2554
+ shutil.make_archive(traces_zip, 'zip', traces_dir)
2555
+
2556
+ # Collect and compress the git config and gitcookies.
2557
+ git_config = RunGit(['config', '-l'])
2558
+ gclient_utils.FileWrite(
2559
+ os.path.join(git_info_dir, 'git-config'),
2560
+ git_config)
2561
+
2562
+ cookie_auth = gerrit_util.Authenticator.get()
2563
+ if isinstance(cookie_auth, gerrit_util.CookiesAuthenticator):
2564
+ gitcookies_path = cookie_auth.get_gitcookies_path()
2565
+ gitcookies = gclient_utils.FileRead(gitcookies_path)
2566
+ gclient_utils.FileWrite(
2567
+ os.path.join(git_info_dir, 'gitcookies'),
2568
+ GITCOOKIES_REDACT_RE.sub('REDACTED', gitcookies))
2569
+ shutil.make_archive(git_info_zip, 'zip', git_info_dir)
2570
+
2571
+ gclient_utils.rmtree(git_info_dir)
2572
+ gclient_utils.rmtree(traces_dir)
2573
+
2574
+ return push_stdout
2575
+
2481
2576
  def CMDUploadChange(self, options, git_diff_args, custom_cl_base, change):
2482
2577
  """Upload the current branch to Gerrit."""
2483
2578
  if options.squash and options.no_squash:
@@ -2727,30 +2822,7 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
2727
2822
  'spaces not allowed in refspec: "%s"' % refspec_suffix)
2728
2823
  refspec = '%s:refs/for/%s%s' % (ref_to_push, branch, refspec_suffix)
2729
2824
 
2730
- try:
2731
- push_returncode = 0
2732
- before_push = time_time()
2733
- push_stdout = gclient_utils.CheckCallAndFilter(
2734
- ['git', 'push', self.GetRemoteUrl(), refspec],
2735
- print_stdout=True,
2736
- # Flush after every line: useful for seeing progress when running as
2737
- # recipe.
2738
- filter_fn=lambda _: sys.stdout.flush())
2739
- except subprocess2.CalledProcessError as e:
2740
- push_returncode = e.returncode
2741
- DieWithError('Failed to create a change. Please examine output above '
2742
- 'for the reason of the failure.\n'
2743
- 'Hint: run command below to diagnose common Git/Gerrit '
2744
- 'credential problems:\n'
2745
- ' git cl creds-check\n',
2746
- change_desc)
2747
- finally:
2748
- metrics.collector.add_repeated('sub_commands', {
2749
- 'command': 'git push',
2750
- 'execution_time': time_time() - before_push,
2751
- 'exit_code': push_returncode,
2752
- 'arguments': metrics_utils.extract_known_subcommand_args(refspec_opts),
2753
- })
2825
+ push_stdout = self._RunGitPushWithTraces(change_desc, refspec, refspec_opts)
2754
2826
 
2755
2827
  if options.squash:
2756
2828
  regex = re.compile(r'remote:\s+https?://[\w\-\.\+\/#]*/(\d+)\s.*')
@@ -15,7 +15,7 @@
15
15
  "deps": {
16
16
  "recipe_engine": {
17
17
  "branch": "master",
18
- "revision": "f6dd8332ee794be50574a5887a6efa6ff4516971",
18
+ "revision": "0589a429cf3c164004dae4ced4c75784a50afd81",
19
19
  "url": "https://chromium.googlesource.com/infra/luci/recipes-py.git"
20
20
  }
21
21
  },
@@ -66,7 +66,7 @@ def main():
66
66
  backend = sys.argv[1]
67
67
  output = sys.stdin.read().rstrip()
68
68
 
69
- callout_re = re.compile('\x1b\[(\d+)c\n')
69
+ callout_re = re.compile(r'\x1b\[(\d+)c\n')
70
70
  callouts = collections.defaultdict(int)
71
71
  for i, line in enumerate(output.splitlines(True)):
72
72
  m = callout_re.match(line)
@@ -34,8 +34,8 @@ First, some words about what data we are **NOT** collecting:
34
34
 
35
35
  The metrics we're collecting are:
36
36
 
37
- - A timestamp, with a week resolution.
38
- - The age of your depot\_tools checkout, with a week resolution.
37
+ - The time when the command was run.
38
+ - The age of your depot\_tools checkout.
39
39
  - Your version of Python (in the format major.minor.micro).
40
40
  - Your version of Git (in the format major.minor.micro).
41
41
  - The OS of your machine (i.e. win, linux or mac).
@@ -47,7 +47,7 @@ The metrics we're collecting are:
47
47
  - The exit code.
48
48
  - The project you're working on. We only record data about projects you can
49
49
  fetch using depot\_tools' fetch command (e.g. Chromium, WebRTC, V8, etc)
50
- - The age of your project checkout, with a week resolution.
50
+ - The age of your project checkout.
51
51
  - What features are you using in your DEPS and .gclient files. For example:
52
52
  - Are you setting `use_relative_paths=True`?
53
53
  - Are you using `recursedeps`?
@@ -198,6 +198,7 @@ class MetricsCollector(object):
198
198
  return func(*args, **kwargs)
199
199
 
200
200
  self._collecting_metrics = True
201
+ self.add('metrics_version', metrics_utils.CURRENT_VERSION)
201
202
  self.add('command', command_name)
202
203
  try:
203
204
  start = time.time()
@@ -213,14 +214,14 @@ class MetricsCollector(object):
213
214
  self.add('exit_code', exit_code)
214
215
 
215
216
  # Add metrics regarding environment information.
216
- self.add('timestamp', metrics_utils.seconds_to_weeks(time.time()))
217
+ self.add('timestamp', int(time.time()))
217
218
  self.add('python_version', metrics_utils.get_python_version())
218
219
  self.add('host_os', gclient_utils.GetMacWinOrLinux())
219
220
  self.add('host_arch', detect_host_arch.HostArch())
220
221
 
221
222
  depot_tools_age = metrics_utils.get_repo_timestamp(DEPOT_TOOLS)
222
223
  if depot_tools_age is not None:
223
- self.add('depot_tools_age', depot_tools_age)
224
+ self.add('depot_tools_age', int(depot_tools_age))
224
225
 
225
226
  git_version = metrics_utils.get_git_version()
226
227
  if git_version:
@@ -183,15 +183,6 @@ def return_code_from_exception(exception):
183
183
  return 1
184
184
 
185
185
 
186
- def seconds_to_weeks(duration):
187
- """Transform a |duration| from seconds to weeks approximately.
188
-
189
- Drops the lowest 19 bits of the integer representation, which ammounts to
190
- about 6 days.
191
- """
192
- return int(duration) >> 19
193
-
194
-
195
186
  def extract_known_subcommand_args(args):
196
187
  """Extract the known arguments from the passed list of args."""
197
188
  known_args = []
@@ -276,8 +267,7 @@ def get_repo_timestamp(path_to_repo):
276
267
  if p.returncode != 0:
277
268
  return None
278
269
 
279
- # Get the age of the checkout in weeks.
280
- return seconds_to_weeks(stdout.strip())
270
+ return stdout.strip()
281
271
 
282
272
  def print_boxed_text(out, min_width, lines):
283
273
  [EW, NS, SE, SW, NE, NW] = list('=|++++')
@@ -125,7 +125,9 @@ class Database(object):
125
125
  # Mapping of owners to the paths or globs they own.
126
126
  self._owners_to_paths = {EVERYONE: set()}
127
127
 
128
- # Mapping of paths to authorized owners.
128
+ # Mappings of paths to authorized owners, via the longest path with no
129
+ # glob in it.
130
+ # For instance "chrome/browser" -> "chrome/browser/*.h" -> ("john", "maria")
129
131
  self._paths_to_owners = {}
130
132
 
131
133
  # Mapping reviewers to the preceding comment per file in the OWNERS files.
@@ -134,9 +136,11 @@ class Database(object):
134
136
  # Cache of compiled regexes for _fnmatch()
135
137
  self._fnmatch_cache = {}
136
138
 
137
- # Set of paths that stop us from looking above them for owners.
138
- # (This is implicitly true for the root directory).
139
- self._stop_looking = set([''])
139
+ # Sets of paths that stop us from looking above them for owners.
140
+ # (This is implicitly true for the root directory). They are organized
141
+ # by glob free path so that a 'ui/events/devices/mojo/*_struct_traits*.*'
142
+ # rule would be found in 'ui/events/devices/mojo'.
143
+ self._stop_looking = {'': set([''])}
140
144
 
141
145
  # Set of files which have already been read.
142
146
  self.read_files = set()
@@ -219,23 +223,59 @@ class Database(object):
219
223
 
220
224
  def load_data_needed_for(self, files):
221
225
  self._read_global_comments()
226
+ visited_dirs = set()
222
227
  for f in files:
223
228
  dirpath = self.os_path.dirname(f)
224
- while not self._owners_for(dirpath):
229
+ while dirpath not in visited_dirs:
230
+ visited_dirs.add(dirpath)
231
+
232
+ obj_owners = self._owners_for(dirpath)
233
+ if obj_owners:
234
+ break
225
235
  self._read_owners(self.os_path.join(dirpath, 'OWNERS'))
226
236
  if self._should_stop_looking(dirpath):
227
237
  break
238
+
228
239
  dirpath = self.os_path.dirname(dirpath)
229
240
 
230
241
  def _should_stop_looking(self, objname):
231
- return any(self._fnmatch(objname, stop_looking)
232
- for stop_looking in self._stop_looking)
242
+ dirname = objname
243
+ while True:
244
+ if dirname in self._stop_looking:
245
+ if any(self._fnmatch(objname, stop_looking)
246
+ for stop_looking in self._stop_looking[dirname]):
247
+ return True
248
+ up_dirname = self.os_path.dirname(dirname)
249
+ if up_dirname == dirname:
250
+ break
251
+ dirname = up_dirname
252
+ return False
253
+
254
+ def _get_root_affected_dir(self, obj_name):
255
+ """Returns the deepest directory/path that is affected by a file pattern
256
+ |obj_name|."""
257
+ root_affected_dir = obj_name
258
+ while '*' in root_affected_dir:
259
+ root_affected_dir = self.os_path.dirname(root_affected_dir)
260
+ return root_affected_dir
233
261
 
234
262
  def _owners_for(self, objname):
235
263
  obj_owners = set()
236
- for owned_path, path_owners in self._paths_to_owners.iteritems():
237
- if self._fnmatch(objname, owned_path):
238
- obj_owners |= path_owners
264
+
265
+ # Possibly relevant rules can be found stored at every directory
266
+ # level so iterate upwards, looking for them.
267
+ dirname = objname
268
+ while True:
269
+ dir_owner_rules = self._paths_to_owners.get(dirname)
270
+ if dir_owner_rules:
271
+ for owned_path, path_owners in dir_owner_rules.iteritems():
272
+ if self._fnmatch(objname, owned_path):
273
+ obj_owners |= path_owners
274
+ up_dirname = self.os_path.dirname(dirname)
275
+ if up_dirname == dirname:
276
+ break
277
+ dirname = up_dirname
278
+
239
279
  return obj_owners
240
280
 
241
281
  def _read_owners(self, path):
@@ -268,7 +308,7 @@ class Database(object):
268
308
  line = line.strip()
269
309
  if line.startswith('#'):
270
310
  if is_toplevel:
271
- m = re.match('#\s*OWNERS_STATUS\s+=\s+(.+)$', line)
311
+ m = re.match(r'#\s*OWNERS_STATUS\s+=\s+(.+)$', line)
272
312
  if m:
273
313
  self._status_file = m.group(1).strip()
274
314
  continue
@@ -293,7 +333,8 @@ class Database(object):
293
333
 
294
334
  previous_line_was_blank = False
295
335
  if line == 'set noparent':
296
- self._stop_looking.add(dirpath)
336
+ self._stop_looking.setdefault(
337
+ self._get_root_affected_dir(dirpath), set()).add(dirpath)
297
338
  continue
298
339
 
299
340
  m = re.match('per-file (.+)=(.+)', line)
@@ -364,7 +405,8 @@ class Database(object):
364
405
 
365
406
  def _add_entry(self, owned_paths, directive, owners_path, lineno, comment):
366
407
  if directive == 'set noparent':
367
- self._stop_looking.add(owned_paths)
408
+ self._stop_looking.setdefault(
409
+ self._get_root_affected_dir(owned_paths), set()).add(owned_paths)
368
410
  elif directive.startswith('file:'):
369
411
  include_file = self._resolve_include(directive[5:], owners_path, lineno)
370
412
  if not include_file:
@@ -374,13 +416,17 @@ class Database(object):
374
416
  included_owners = self._read_just_the_owners(include_file)
375
417
  for owner in included_owners:
376
418
  self._owners_to_paths.setdefault(owner, set()).add(owned_paths)
377
- self._paths_to_owners.setdefault(owned_paths, set()).add(owner)
419
+ self._paths_to_owners.setdefault(
420
+ self._get_root_affected_dir(owned_paths), {}).setdefault(
421
+ owned_paths, set()).add(owner)
378
422
  elif self.email_regexp.match(directive) or directive == EVERYONE:
379
423
  if comment:
380
424
  self.comments.setdefault(directive, {})
381
425
  self.comments[directive][owned_paths] = comment
382
426
  self._owners_to_paths.setdefault(directive, set()).add(owned_paths)
383
- self._paths_to_owners.setdefault(owned_paths, set()).add(directive)
427
+ self._paths_to_owners.setdefault(
428
+ self._get_root_affected_dir(owned_paths), {}).setdefault(
429
+ owned_paths, set()).add(directive)
384
430
  else:
385
431
  raise SyntaxErrorInOwnersFile(owners_path, lineno,
386
432
  ('"%s" is not a "set noparent", file include, "*", '
@@ -463,29 +509,60 @@ class Database(object):
463
509
  all_possible_owners[o] = new_dirs
464
510
  return suggested_owners
465
511
 
466
- def all_possible_owners(self, dirs, author):
512
+ def _all_possible_owners_for_dir_or_file(self, dir_or_file, author,
513
+ cache):
514
+ """Returns a dict of {potential owner: (dir_or_file, distance)} mappings.
515
+ """
516
+ assert not dir_or_file.startswith("/")
517
+ res = cache.get(dir_or_file)
518
+ if res is None:
519
+ res = {}
520
+ dirname = dir_or_file
521
+ for owner in self._owners_for(dirname):
522
+ if author and owner == author:
523
+ continue
524
+ res.setdefault(owner, [])
525
+ res[owner] = (dir_or_file, 1)
526
+ if not self._should_stop_looking(dirname):
527
+ dirname = self.os_path.dirname(dirname)
528
+
529
+ parent_res = self._all_possible_owners_for_dir_or_file(dirname,
530
+ author, cache)
531
+
532
+ # Merge the parent information with our information, adjusting
533
+ # distances as necessary, and replacing the parent directory
534
+ # names with our names.
535
+ for owner, par_dir_and_distances in parent_res.iteritems():
536
+ if owner in res:
537
+ # If the same person is in multiple OWNERS files above a given
538
+ # directory, only count the closest one.
539
+ continue
540
+ parent_distance = par_dir_and_distances[1]
541
+ res[owner] = (dir_or_file, parent_distance + 1)
542
+
543
+ cache[dir_or_file] = res
544
+
545
+ return res
546
+
547
+ def all_possible_owners(self, dirs_and_files, author):
467
548
  """Returns a dict of {potential owner: (dir, distance)} mappings.
468
549
 
469
550
  A distance of 1 is the lowest/closest possible distance (which makes the
470
551
  subsequent math easier).
471
552
  """
553
+
554
+ all_possible_owners_for_dir_or_file_cache = {}
472
555
  all_possible_owners = {}
473
- for current_dir in dirs:
474
- dirname = current_dir
475
- distance = 1
476
- while True:
477
- for owner in self._owners_for(dirname):
478
- if author and owner == author:
479
- continue
480
- all_possible_owners.setdefault(owner, [])
481
- # If the same person is in multiple OWNERS files above a given
482
- # directory, only count the closest one.
483
- if not any(current_dir == el[0] for el in all_possible_owners[owner]):
484
- all_possible_owners[owner].append((current_dir, distance))
485
- if self._should_stop_looking(dirname):
486
- break
487
- dirname = self.os_path.dirname(dirname)
488
- distance += 1
556
+ for current_dir in dirs_and_files:
557
+ dir_owners = self._all_possible_owners_for_dir_or_file(
558
+ current_dir, author,
559
+ all_possible_owners_for_dir_or_file_cache)
560
+ for owner, dir_and_distance in dir_owners.iteritems():
561
+ if owner in all_possible_owners:
562
+ all_possible_owners[owner].append(dir_and_distance)
563
+ else:
564
+ all_possible_owners[owner] = [dir_and_distance]
565
+
489
566
  return all_possible_owners
490
567
 
491
568
  def _fnmatch(self, filename, pattern):