libv8 5.3.332.38.5 → 5.6.326.50.0beta1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.travis.yml +0 -1
- data/CHANGELOG.md +2 -0
- data/ext/libv8/builder.rb +2 -2
- data/lib/libv8/version.rb +1 -1
- data/patches/0001-Build-a-standalone-static-library.patch +4 -4
- data/patches/0002-Don-t-compile-unnecessary-stuff.patch +16 -11
- data/patches/0003-Use-the-fPIC-flag-for-the-static-library.patch +4 -4
- data/patches/{0005-Do-not-embed-debug-symbols-in-macOS-libraries.patch → 0004-Do-not-embed-debug-symbols-in-macOS-libraries.patch} +4 -4
- data/vendor/depot_tools/.gitignore +4 -0
- data/vendor/depot_tools/OWNERS +0 -2
- data/vendor/depot_tools/PRESUBMIT.py +20 -23
- data/vendor/depot_tools/README.gclient.md +3 -3
- data/vendor/depot_tools/README.git-cl.md +13 -12
- data/vendor/depot_tools/README.md +2 -3
- data/vendor/depot_tools/WATCHLISTS +0 -1
- data/vendor/depot_tools/appengine_mapper.py +23 -0
- data/vendor/depot_tools/apply_issue.py +2 -8
- data/vendor/depot_tools/bootstrap/win/README.md +1 -8
- data/vendor/depot_tools/bootstrap/win/git_bootstrap.py +6 -16
- data/vendor/depot_tools/bootstrap/win/git_version.txt +1 -1
- data/vendor/depot_tools/bootstrap/win/git_version_bleeding_edge.txt +1 -1
- data/vendor/depot_tools/checkout.py +20 -433
- data/vendor/depot_tools/cipd +73 -0
- data/vendor/depot_tools/cipd.bat +12 -0
- data/vendor/depot_tools/cipd.ps1 +57 -0
- data/vendor/depot_tools/cipd_client_version +1 -0
- data/vendor/depot_tools/clang_format.py +9 -6
- data/vendor/depot_tools/clang_format_merge_driver +8 -0
- data/vendor/depot_tools/clang_format_merge_driver.bat +11 -0
- data/vendor/depot_tools/clang_format_merge_driver.py +67 -0
- data/vendor/depot_tools/codereview.settings +3 -2
- data/vendor/depot_tools/commit_queue.py +1 -1
- data/vendor/depot_tools/cpplint.py +2 -0
- data/vendor/depot_tools/fetch.py +1 -54
- data/vendor/depot_tools/fetch_configs/android.py +2 -2
- data/vendor/depot_tools/fetch_configs/breakpad.py +2 -3
- data/vendor/depot_tools/fetch_configs/chromium.py +2 -3
- data/vendor/depot_tools/fetch_configs/crashpad.py +2 -2
- data/vendor/depot_tools/fetch_configs/dart.py +2 -3
- data/vendor/depot_tools/fetch_configs/dartino.py +2 -3
- data/vendor/depot_tools/fetch_configs/dartium.py +2 -3
- data/vendor/depot_tools/fetch_configs/depot_tools.py +3 -6
- data/vendor/depot_tools/fetch_configs/gyp.py +2 -3
- data/vendor/depot_tools/fetch_configs/infra.py +2 -2
- data/vendor/depot_tools/fetch_configs/infra_internal.py +2 -2
- data/vendor/depot_tools/fetch_configs/ios.py +2 -2
- data/vendor/depot_tools/fetch_configs/ios_internal.py +2 -3
- data/vendor/depot_tools/fetch_configs/mojo.py +2 -3
- data/vendor/depot_tools/fetch_configs/nacl.py +2 -3
- data/vendor/depot_tools/fetch_configs/naclports.py +2 -3
- data/vendor/depot_tools/fetch_configs/pdfium.py +2 -2
- data/vendor/depot_tools/fetch_configs/skia.py +2 -2
- data/vendor/depot_tools/fetch_configs/skia_buildbot.py +2 -2
- data/vendor/depot_tools/fetch_configs/syzygy.py +2 -2
- data/vendor/depot_tools/fetch_configs/v8.py +2 -3
- data/vendor/depot_tools/fetch_configs/webrtc.py +5 -3
- data/vendor/depot_tools/fetch_configs/webrtc_android.py +2 -2
- data/vendor/depot_tools/fetch_configs/webrtc_ios.py +2 -2
- data/vendor/depot_tools/fix_encoding.py +6 -6
- data/vendor/depot_tools/gclient.py +136 -368
- data/vendor/depot_tools/gclient_scm.py +108 -647
- data/vendor/depot_tools/gclient_utils.py +22 -86
- data/vendor/depot_tools/gerrit_client.py +105 -0
- data/vendor/depot_tools/gerrit_util.py +174 -67
- data/vendor/depot_tools/git-crrev-parse +6 -7
- data/vendor/depot_tools/git-gs +1 -1
- data/vendor/depot_tools/git_cache.py +68 -18
- data/vendor/depot_tools/git_cherry_pick_upload.py +4 -4
- data/vendor/depot_tools/git_cl.py +1028 -961
- data/vendor/depot_tools/git_common.py +2 -3
- data/vendor/depot_tools/git_drover.py +0 -1
- data/vendor/depot_tools/git_footers.py +3 -43
- data/vendor/depot_tools/git_rebase_update.py +9 -1
- data/vendor/depot_tools/git_squash_branch.py +1 -1
- data/vendor/depot_tools/infra/config/cq.cfg +8 -1
- data/vendor/depot_tools/infra/config/recipes.cfg +1 -1
- data/vendor/depot_tools/man/html/depot_tools.html +3 -11
- data/vendor/depot_tools/man/html/depot_tools_tutorial.html +9 -9
- data/vendor/depot_tools/man/html/git-cherry-pick-upload.html +2 -2
- data/vendor/depot_tools/man/html/git-drover.html +17 -17
- data/vendor/depot_tools/man/html/git-footers.html +2 -2
- data/vendor/depot_tools/man/html/git-freeze.html +4 -4
- data/vendor/depot_tools/man/html/git-hyper-blame.html +2 -2
- data/vendor/depot_tools/man/html/git-map-branches.html +2 -2
- data/vendor/depot_tools/man/html/git-map.html +2 -2
- data/vendor/depot_tools/man/html/git-mark-merge-base.html +2 -2
- data/vendor/depot_tools/man/html/git-nav-downstream.html +2 -2
- data/vendor/depot_tools/man/html/git-nav-upstream.html +2 -2
- data/vendor/depot_tools/man/html/git-new-branch.html +2 -2
- data/vendor/depot_tools/man/html/git-rebase-update.html +2 -2
- data/vendor/depot_tools/man/html/git-rename-branch.html +2 -2
- data/vendor/depot_tools/man/html/git-reparent-branch.html +2 -2
- data/vendor/depot_tools/man/html/git-retry.html +3 -3
- data/vendor/depot_tools/man/html/git-squash-branch.html +3 -3
- data/vendor/depot_tools/man/html/git-thaw.html +2 -2
- data/vendor/depot_tools/man/html/git-upstream-diff.html +3 -3
- data/vendor/depot_tools/man/man1/git-cherry-pick-upload.1 +4 -4
- data/vendor/depot_tools/man/man1/git-drover.1 +19 -19
- data/vendor/depot_tools/man/man1/git-footers.1 +4 -4
- data/vendor/depot_tools/man/man1/git-freeze.1 +6 -6
- data/vendor/depot_tools/man/man1/git-hyper-blame.1 +4 -4
- data/vendor/depot_tools/man/man1/git-map-branches.1 +4 -4
- data/vendor/depot_tools/man/man1/git-map.1 +4 -4
- data/vendor/depot_tools/man/man1/git-mark-merge-base.1 +4 -4
- data/vendor/depot_tools/man/man1/git-nav-downstream.1 +4 -4
- data/vendor/depot_tools/man/man1/git-nav-upstream.1 +4 -4
- data/vendor/depot_tools/man/man1/git-new-branch.1 +4 -4
- data/vendor/depot_tools/man/man1/git-rebase-update.1 +4 -4
- data/vendor/depot_tools/man/man1/git-rename-branch.1 +4 -4
- data/vendor/depot_tools/man/man1/git-reparent-branch.1 +4 -4
- data/vendor/depot_tools/man/man1/git-retry.1 +5 -5
- data/vendor/depot_tools/man/man1/git-squash-branch.1 +5 -5
- data/vendor/depot_tools/man/man1/git-thaw.1 +4 -4
- data/vendor/depot_tools/man/man1/git-upstream-diff.1 +5 -5
- data/vendor/depot_tools/man/man7/depot_tools.7 +5 -10
- data/vendor/depot_tools/man/man7/depot_tools_tutorial.7 +4 -4
- data/vendor/depot_tools/man/src/depot_tools.txt +1 -1
- data/vendor/depot_tools/man/src/depot_tools_tutorial.txt +7 -7
- data/vendor/depot_tools/man/src/filter_demo_output.py +2 -2
- data/vendor/depot_tools/man/src/git-footers.demo.1.sh +1 -1
- data/vendor/depot_tools/man/src/git-retry.txt +1 -1
- data/vendor/depot_tools/man/src/git-squash-branch.txt +2 -2
- data/vendor/depot_tools/man/src/git-upstream-diff.txt +1 -1
- data/vendor/depot_tools/my_activity.py +6 -3
- data/vendor/depot_tools/my_reviews.py +1 -1
- data/vendor/depot_tools/ninja +2 -2
- data/vendor/depot_tools/ninja-linux32 +0 -0
- data/vendor/depot_tools/ninja-linux64 +0 -0
- data/vendor/depot_tools/ninja-mac +0 -0
- data/vendor/depot_tools/ninja.exe +0 -0
- data/vendor/depot_tools/owners.py +14 -3
- data/vendor/depot_tools/presubmit_canned_checks.py +46 -67
- data/vendor/depot_tools/presubmit_support.py +109 -371
- data/vendor/depot_tools/pylintrc +83 -56
- data/vendor/depot_tools/recipe_modules/OWNERS +1 -0
- data/vendor/depot_tools/recipe_modules/bot_update/__init__.py +18 -9
- data/vendor/depot_tools/recipe_modules/bot_update/api.py +56 -55
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/basic.json +3 -7
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/basic_output_manifest.json +3 -7
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/basic_with_branch_heads.json +3 -7
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/buildbot.json +52 -0
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/clobber.json +19 -10
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/gerrit_no_rebase_patch_ref.json +19 -10
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/gerrit_no_reset.json +19 -10
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/no_shallow.json +19 -10
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/reset_root_solution_revision.json +19 -10
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/trychange.json +3 -7
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/trychange_oauth2.json +2 -54
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/trychange_oauth2_buildbot.json +56 -0
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/{forced.json → trychange_oauth2_json.json} +6 -9
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/trychange_oauth2_json_win.json +54 -0
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/tryjob.json +9 -9
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/tryjob_fail.json +9 -9
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/tryjob_fail_patch.json +9 -9
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/tryjob_fail_patch_download.json +9 -9
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/tryjob_gerrit_angle.json +20 -10
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/tryjob_gerrit_angle_deprecated.json +59 -0
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/tryjob_v8.json +9 -9
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/tryjob_v8_head_by_default.json +20 -10
- data/vendor/depot_tools/recipe_modules/bot_update/example.py +45 -63
- data/vendor/depot_tools/recipe_modules/bot_update/resources/bot_update.py +210 -807
- data/vendor/depot_tools/recipe_modules/bot_update/test_api.py +34 -45
- data/vendor/depot_tools/recipe_modules/cipd/api.py +59 -84
- data/vendor/depot_tools/recipe_modules/cipd/example.expected/basic.json +71 -117
- data/vendor/depot_tools/recipe_modules/cipd/example.expected/describe-failed.json +14 -60
- data/vendor/depot_tools/recipe_modules/cipd/example.expected/describe-many-instances.json +71 -117
- data/vendor/depot_tools/recipe_modules/cipd/example.expected/mac64.json +71 -117
- data/vendor/depot_tools/recipe_modules/cipd/example.expected/win64.json +71 -117
- data/vendor/depot_tools/recipe_modules/cipd/example.py +2 -12
- data/vendor/depot_tools/recipe_modules/cipd/test_api.py +0 -9
- data/vendor/depot_tools/recipe_modules/depot_tools/api.py +6 -0
- data/vendor/depot_tools/recipe_modules/depot_tools/example.expected/basic.json +7 -0
- data/vendor/depot_tools/recipe_modules/depot_tools/example.expected/win.json +7 -0
- data/vendor/depot_tools/recipe_modules/depot_tools/example.py +3 -0
- data/vendor/depot_tools/recipe_modules/gclient/__init__.py +4 -0
- data/vendor/depot_tools/recipe_modules/gclient/api.py +9 -22
- data/vendor/depot_tools/recipe_modules/gclient/config.py +18 -5
- data/vendor/depot_tools/recipe_modules/gclient/example.expected/basic.json +14 -14
- data/vendor/depot_tools/recipe_modules/gclient/example.expected/buildbot.json +211 -0
- data/vendor/depot_tools/recipe_modules/gclient/example.expected/revision.json +16 -14
- data/vendor/depot_tools/recipe_modules/gclient/example.expected/tryserver.json +16 -14
- data/vendor/depot_tools/recipe_modules/gclient/example.py +13 -11
- data/vendor/depot_tools/recipe_modules/gerrit/__init__.py +6 -0
- data/vendor/depot_tools/recipe_modules/gerrit/api.py +63 -0
- data/vendor/depot_tools/recipe_modules/gerrit/example.expected/basic.json +64 -0
- data/vendor/depot_tools/recipe_modules/gerrit/example.py +35 -0
- data/vendor/depot_tools/recipe_modules/gerrit/test_api.py +24 -0
- data/vendor/depot_tools/recipe_modules/git/__init__.py +4 -0
- data/vendor/depot_tools/recipe_modules/git/api.py +155 -142
- data/vendor/depot_tools/recipe_modules/git/example.expected/basic.json +43 -17
- data/vendor/depot_tools/recipe_modules/git/example.expected/basic_branch.json +43 -17
- data/vendor/depot_tools/recipe_modules/git/example.expected/basic_file_name.json +43 -17
- data/vendor/depot_tools/recipe_modules/git/example.expected/basic_hash.json +43 -17
- data/vendor/depot_tools/recipe_modules/git/example.expected/basic_ref.json +43 -17
- data/vendor/depot_tools/recipe_modules/git/example.expected/basic_submodule_update_force.json +43 -17
- data/vendor/depot_tools/recipe_modules/git/example.expected/can_fail_build.json +13 -13
- data/vendor/depot_tools/recipe_modules/git/example.expected/cannot_fail_build.json +43 -17
- data/vendor/depot_tools/recipe_modules/git/example.expected/cat-file_test.json +45 -19
- data/vendor/depot_tools/recipe_modules/git/example.expected/count-objects_delta.json +45 -19
- data/vendor/depot_tools/recipe_modules/git/example.expected/count-objects_failed.json +43 -17
- data/vendor/depot_tools/recipe_modules/git/example.expected/count-objects_with_bad_output.json +43 -17
- data/vendor/depot_tools/recipe_modules/git/example.expected/count-objects_with_bad_output_fails_build.json +8 -8
- data/vendor/depot_tools/recipe_modules/git/example.expected/curl_trace_file.json +44 -18
- data/vendor/depot_tools/recipe_modules/git/example.expected/git-cache-checkout.json +48 -22
- data/vendor/depot_tools/recipe_modules/git/example.expected/platform_win.json +43 -17
- data/vendor/depot_tools/recipe_modules/git/example.expected/rebase_failed.json +42 -16
- data/vendor/depot_tools/recipe_modules/git/example.expected/remote_not_origin.json +43 -17
- data/vendor/depot_tools/recipe_modules/git/example.expected/set_got_revision.json +43 -17
- data/vendor/depot_tools/recipe_modules/git/example.py +9 -3
- data/vendor/depot_tools/recipe_modules/git_cl/__init__.py +4 -0
- data/vendor/depot_tools/recipe_modules/git_cl/api.py +8 -8
- data/vendor/depot_tools/recipe_modules/git_cl/example.py +1 -1
- data/vendor/depot_tools/recipe_modules/gsutil/__init__.py +4 -0
- data/vendor/depot_tools/recipe_modules/gsutil/api.py +196 -0
- data/vendor/depot_tools/recipe_modules/gsutil/example.expected/basic.json +186 -0
- data/vendor/depot_tools/recipe_modules/gsutil/example.py +77 -0
- data/vendor/depot_tools/recipe_modules/gsutil/resources/gsutil_smart_retry.py +69 -0
- data/vendor/depot_tools/recipe_modules/infra_paths/__init__.py +3 -0
- data/vendor/depot_tools/recipe_modules/infra_paths/api.py +20 -3
- data/vendor/depot_tools/recipe_modules/infra_paths/example.expected/basic.json +3 -1
- data/vendor/depot_tools/recipe_modules/infra_paths/example.expected/paths_buildbot_linux.json +3 -1
- data/vendor/depot_tools/recipe_modules/infra_paths/example.expected/paths_buildbot_mac.json +3 -1
- data/vendor/depot_tools/recipe_modules/infra_paths/example.expected/paths_buildbot_win.json +3 -1
- data/vendor/depot_tools/recipe_modules/infra_paths/example.expected/paths_kitchen_linux.json +3 -1
- data/vendor/depot_tools/recipe_modules/infra_paths/example.expected/paths_kitchen_mac.json +3 -1
- data/vendor/depot_tools/recipe_modules/infra_paths/example.expected/paths_kitchen_win.json +3 -1
- data/vendor/depot_tools/recipe_modules/infra_paths/example.expected/paths_swarmbucket_linux.json +3 -1
- data/vendor/depot_tools/recipe_modules/infra_paths/example.expected/paths_swarmbucket_mac.json +3 -1
- data/vendor/depot_tools/recipe_modules/infra_paths/example.expected/paths_swarmbucket_win.json +3 -1
- data/vendor/depot_tools/recipe_modules/infra_paths/example.py +6 -1
- data/vendor/depot_tools/recipe_modules/infra_paths/path_config.py +4 -6
- data/vendor/depot_tools/recipe_modules/rietveld/__init__.py +5 -0
- data/vendor/depot_tools/recipe_modules/rietveld/api.py +12 -9
- data/vendor/depot_tools/recipe_modules/rietveld/example.expected/basic.json +2 -24
- data/vendor/depot_tools/recipe_modules/rietveld/example.expected/buildbot.json +30 -0
- data/vendor/depot_tools/recipe_modules/rietveld/example.py +12 -6
- data/vendor/depot_tools/recipe_modules/tryserver/__init__.py +4 -0
- data/vendor/depot_tools/recipe_modules/tryserver/api.py +46 -70
- data/vendor/depot_tools/recipe_modules/tryserver/example.expected/set_failure_hash_with_no_steps.json +8 -0
- data/vendor/depot_tools/recipe_modules/tryserver/example.expected/{with_svn_patch.json → with_gerrit_patch.json} +1 -31
- data/vendor/depot_tools/recipe_modules/tryserver/example.expected/with_gerrit_patch_deprecated.json +39 -0
- data/vendor/depot_tools/recipe_modules/tryserver/example.expected/with_git_patch.json +2 -2
- data/vendor/depot_tools/recipe_modules/tryserver/example.expected/with_git_patch_luci.json +8 -0
- data/vendor/depot_tools/recipe_modules/tryserver/example.expected/with_rietveld_patch.json +3 -3
- data/vendor/depot_tools/recipe_modules/tryserver/example.expected/with_rietveld_patch_new.json +3 -3
- data/vendor/depot_tools/recipe_modules/tryserver/example.expected/with_wrong_patch.json +1 -1
- data/vendor/depot_tools/recipe_modules/tryserver/example.expected/with_wrong_patch_new.json +1 -1
- data/vendor/depot_tools/recipe_modules/tryserver/example.py +35 -5
- data/vendor/depot_tools/recipes.py +52 -28
- data/vendor/depot_tools/repo +216 -69
- data/vendor/depot_tools/rietveld.py +20 -15
- data/vendor/depot_tools/roll_dep.py +1 -1
- data/vendor/depot_tools/scm.py +11 -826
- data/vendor/depot_tools/subprocess2.py +5 -5
- data/vendor/depot_tools/third_party/cq_client/README.depot_tools.md +2 -0
- data/vendor/depot_tools/third_party/cq_client/README.md +5 -1
- data/vendor/depot_tools/third_party/cq_client/cq.pb.go +183 -104
- data/vendor/depot_tools/third_party/cq_client/cq.proto +43 -27
- data/vendor/depot_tools/third_party/cq_client/cq_pb2.py +95 -29
- data/vendor/depot_tools/third_party/cq_client/testdata/cq_both.cfg +67 -0
- data/vendor/depot_tools/third_party/cq_client/testdata/cq_gerrit.cfg +1 -2
- data/vendor/depot_tools/third_party/cq_client/testdata/cq_rietveld.cfg +0 -3
- data/vendor/depot_tools/third_party/upload.py +44 -24
- data/vendor/depot_tools/win_toolchain/get_toolchain_if_necessary.py +0 -5
- metadata +38 -93
- data/patches/0004-Reinterpret-thread-hash-for-FreeBSD-too.patch +0 -25
- data/vendor/depot_tools/git-auto-svn +0 -6
- data/vendor/depot_tools/git_auto_svn.py +0 -122
- data/vendor/depot_tools/man/html/git-auto-svn.html +0 -837
- data/vendor/depot_tools/man/man1/git-auto-svn.1 +0 -113
- data/vendor/depot_tools/man/src/_git-auto-svn_desc.helper.txt +0 -1
- data/vendor/depot_tools/man/src/git-auto-svn.txt +0 -69
- data/vendor/depot_tools/recipe_modules/bot_update/example.expected/off.json +0 -43
- data/vendor/depot_tools/recipe_modules/cipd/example.expected/install-failed.json +0 -31
- data/vendor/depot_tools/recipe_modules/cipd/resources/bootstrap.py +0 -218
- data/vendor/depot_tools/recipe_modules/tryserver/test_api.py +0 -7
- data/vendor/depot_tools/third_party/gsutil/CHECKSUM +0 -1
- data/vendor/depot_tools/third_party/gsutil/COPYING +0 -202
- data/vendor/depot_tools/third_party/gsutil/LICENSE.third_party +0 -295
- data/vendor/depot_tools/third_party/gsutil/MANIFEST.in +0 -5
- data/vendor/depot_tools/third_party/gsutil/README +0 -38
- data/vendor/depot_tools/third_party/gsutil/README.chromium +0 -25
- data/vendor/depot_tools/third_party/gsutil/README.pkg +0 -49
- data/vendor/depot_tools/third_party/gsutil/ReleaseNotes.txt +0 -825
- data/vendor/depot_tools/third_party/gsutil/VERSION +0 -1
- data/vendor/depot_tools/third_party/gsutil/gslib/README +0 -5
- data/vendor/depot_tools/third_party/gsutil/gslib/__init__.py +0 -22
- data/vendor/depot_tools/third_party/gsutil/gslib/addlhelp/__init__.py +0 -15
- data/vendor/depot_tools/third_party/gsutil/gslib/addlhelp/acls.py +0 -234
- data/vendor/depot_tools/third_party/gsutil/gslib/addlhelp/anon.py +0 -57
- data/vendor/depot_tools/third_party/gsutil/gslib/addlhelp/command_opts.py +0 -116
- data/vendor/depot_tools/third_party/gsutil/gslib/addlhelp/dev.py +0 -139
- data/vendor/depot_tools/third_party/gsutil/gslib/addlhelp/metadata.py +0 -186
- data/vendor/depot_tools/third_party/gsutil/gslib/addlhelp/naming.py +0 -173
- data/vendor/depot_tools/third_party/gsutil/gslib/addlhelp/prod.py +0 -160
- data/vendor/depot_tools/third_party/gsutil/gslib/addlhelp/projects.py +0 -130
- data/vendor/depot_tools/third_party/gsutil/gslib/addlhelp/subdirs.py +0 -110
- data/vendor/depot_tools/third_party/gsutil/gslib/addlhelp/support.py +0 -86
- data/vendor/depot_tools/third_party/gsutil/gslib/addlhelp/versioning.py +0 -242
- data/vendor/depot_tools/third_party/gsutil/gslib/addlhelp/wildcards.py +0 -170
- data/vendor/depot_tools/third_party/gsutil/gslib/bucket_listing_ref.py +0 -175
- data/vendor/depot_tools/third_party/gsutil/gslib/command.py +0 -725
- data/vendor/depot_tools/third_party/gsutil/gslib/command_runner.py +0 -102
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/__init__.py +0 -15
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/cat.py +0 -131
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/chacl.py +0 -523
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/config.py +0 -662
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/cp.py +0 -1819
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/disablelogging.py +0 -101
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/enablelogging.py +0 -149
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/getacl.py +0 -82
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/getcors.py +0 -121
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/getdefacl.py +0 -86
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/getlogging.py +0 -137
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/getversioning.py +0 -116
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/getwebcfg.py +0 -122
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/help.py +0 -218
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/ls.py +0 -578
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/mb.py +0 -172
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/mv.py +0 -159
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/perfdiag.py +0 -903
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/rb.py +0 -113
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/rm.py +0 -237
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/setacl.py +0 -138
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/setcors.py +0 -145
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/setdefacl.py +0 -105
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/setmeta.py +0 -420
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/setversioning.py +0 -114
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/setwebcfg.py +0 -190
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/update.py +0 -305
- data/vendor/depot_tools/third_party/gsutil/gslib/commands/version.py +0 -150
- data/vendor/depot_tools/third_party/gsutil/gslib/exception.py +0 -76
- data/vendor/depot_tools/third_party/gsutil/gslib/help_provider.py +0 -81
- data/vendor/depot_tools/third_party/gsutil/gslib/name_expansion.py +0 -550
- data/vendor/depot_tools/third_party/gsutil/gslib/no_op_auth_plugin.py +0 -30
- data/vendor/depot_tools/third_party/gsutil/gslib/plurality_checkable_iterator.py +0 -56
- data/vendor/depot_tools/third_party/gsutil/gslib/project_id.py +0 -67
- data/vendor/depot_tools/third_party/gsutil/gslib/storage_uri_builder.py +0 -56
- data/vendor/depot_tools/third_party/gsutil/gslib/thread_pool.py +0 -79
- data/vendor/depot_tools/third_party/gsutil/gslib/util.py +0 -167
- data/vendor/depot_tools/third_party/gsutil/gslib/wildcard_iterator.py +0 -498
- data/vendor/depot_tools/third_party/gsutil/gsutil +0 -384
- data/vendor/depot_tools/third_party/gsutil/gsutil.spec.in +0 -75
- data/vendor/depot_tools/third_party/gsutil/oauth2_plugin/__init__.py +0 -22
- data/vendor/depot_tools/third_party/gsutil/oauth2_plugin/oauth2_client.py +0 -630
- data/vendor/depot_tools/third_party/gsutil/oauth2_plugin/oauth2_client_test.py +0 -374
- data/vendor/depot_tools/third_party/gsutil/oauth2_plugin/oauth2_helper.py +0 -103
- data/vendor/depot_tools/third_party/gsutil/oauth2_plugin/oauth2_plugin.py +0 -24
- data/vendor/depot_tools/third_party/gsutil/pkg_util.py +0 -60
- data/vendor/depot_tools/third_party/gsutil/plugins/__init__.py +0 -0
- data/vendor/depot_tools/third_party/gsutil/plugins/sso_auth.py +0 -105
@@ -6,19 +6,19 @@
|
|
6
6
|
|
7
7
|
# This git extension converts a chromium commit number to its git commit hash.
|
8
8
|
# It accepts the following input formats:
|
9
|
-
#
|
9
|
+
#
|
10
10
|
# $ git crrev-parse Cr-Commit-Position: refs/heads/master@{#311769}
|
11
11
|
# $ git crrev-parse ' Cr-Commit-Position: refs/heads/master@{#311769}'
|
12
12
|
# $ git crrev-parse 'Cr-Commit-Position: refs/heads/master@{#311769}'
|
13
13
|
# $ git crrev-parse refs/heads/master@{#311769}
|
14
|
-
#
|
14
|
+
#
|
15
15
|
# It also works for branches (assuming you have branches in your local
|
16
16
|
# checkout):
|
17
|
-
#
|
17
|
+
#
|
18
18
|
# $ git crrev-parse refs/branch-heads/2278@{#2}
|
19
|
-
#
|
19
|
+
#
|
20
20
|
# If you don't specify a branch, refs/heads/master is assumed:
|
21
|
-
#
|
21
|
+
#
|
22
22
|
# $ git crrev-parse @{#311769}
|
23
23
|
# $ git crrev-parse 311769
|
24
24
|
|
@@ -41,11 +41,10 @@ while [ -n "$1" ]; do
|
|
41
41
|
remote_ref="${remote_ref/refs\/branch-heads/refs\/remotes\/branch-heads}"
|
42
42
|
num="${commit_pos#*@\{\#}"
|
43
43
|
num="${num%\}}"
|
44
|
-
|
45
44
|
if [ -z "$ref" -o -z "$num" ]; then
|
46
45
|
git rev-parse "$1"
|
47
46
|
else
|
48
|
-
grep_str="Cr-Commit-Position: $ref@{#$num}"
|
47
|
+
grep_str="^Cr-Commit-Position: $ref@{#$num}"
|
49
48
|
git rev-list -n 1 --grep="$grep_str" "$remote_ref"
|
50
49
|
fi
|
51
50
|
|
data/vendor/depot_tools/git-gs
CHANGED
@@ -6,4 +6,4 @@ git grep -n -e "$@" -- "*.h" "*.hpp" "*.cpp" "*.c" "*.cc" "*.cpp" "*.inl"\
|
|
6
6
|
"*.grd" "*.grdp" "*.idl" "*.m" "*.mm" "*.py" "*.sh" "*.cfg" "*.tac" "*.go"\
|
7
7
|
"*.vcproj" "*.vsprops" "*.make" "*.gyp" "*.gypi" "*.isolate" "*.java"\
|
8
8
|
"*.js" "*.html" "*.css" "*.ebuild" "*.pl" "*.pm" "*.yaml" "*.gn" "*.gni"\
|
9
|
-
"*.json" "DEPS" "*/DEPS"
|
9
|
+
"*.json" "DEPS" "*/DEPS" "*.mojom"
|
@@ -29,7 +29,7 @@ GC_AUTOPACKLIMIT = 50
|
|
29
29
|
GIT_CACHE_CORRUPT_MESSAGE = 'WARNING: The Git cache is corrupt.'
|
30
30
|
|
31
31
|
try:
|
32
|
-
# pylint: disable=
|
32
|
+
# pylint: disable=undefined-variable
|
33
33
|
WinErr = WindowsError
|
34
34
|
except NameError:
|
35
35
|
class WinErr(Exception):
|
@@ -41,6 +41,42 @@ class LockError(Exception):
|
|
41
41
|
class ClobberNeeded(Exception):
|
42
42
|
pass
|
43
43
|
|
44
|
+
|
45
|
+
def exponential_backoff_retry(fn, excs=(Exception,), name=None, count=10,
|
46
|
+
sleep_time=0.25, printerr=None):
|
47
|
+
"""Executes |fn| up to |count| times, backing off exponentially.
|
48
|
+
|
49
|
+
Args:
|
50
|
+
fn (callable): The function to execute. If this raises a handled
|
51
|
+
exception, the function will retry with exponential backoff.
|
52
|
+
excs (tuple): A tuple of Exception types to handle. If one of these is
|
53
|
+
raised by |fn|, a retry will be attempted. If |fn| raises an Exception
|
54
|
+
that is not in this list, it will immediately pass through. If |excs|
|
55
|
+
is empty, the Exception base class will be used.
|
56
|
+
name (str): Optional operation name to print in the retry string.
|
57
|
+
count (int): The number of times to try before allowing the exception to
|
58
|
+
pass through.
|
59
|
+
sleep_time (float): The initial number of seconds to sleep in between
|
60
|
+
retries. This will be doubled each retry.
|
61
|
+
printerr (callable): Function that will be called with the error string upon
|
62
|
+
failures. If None, |logging.warning| will be used.
|
63
|
+
|
64
|
+
Returns: The return value of the successful fn.
|
65
|
+
"""
|
66
|
+
printerr = printerr or logging.warning
|
67
|
+
for i in xrange(count):
|
68
|
+
try:
|
69
|
+
return fn()
|
70
|
+
except excs as e:
|
71
|
+
if (i+1) >= count:
|
72
|
+
raise
|
73
|
+
|
74
|
+
printerr('Retrying %s in %.2f second(s) (%d / %d attempts): %s' % (
|
75
|
+
(name or 'operation'), sleep_time, (i+1), count, e))
|
76
|
+
time.sleep(sleep_time)
|
77
|
+
sleep_time *= 2
|
78
|
+
|
79
|
+
|
44
80
|
class Lockfile(object):
|
45
81
|
"""Class to represent a cross-platform process-specific lockfile."""
|
46
82
|
|
@@ -79,13 +115,16 @@ class Lockfile(object):
|
|
79
115
|
"""
|
80
116
|
if sys.platform == 'win32':
|
81
117
|
lockfile = os.path.normcase(self.lockfile)
|
82
|
-
|
118
|
+
|
119
|
+
def delete():
|
83
120
|
exitcode = subprocess.call(['cmd.exe', '/c',
|
84
121
|
'del', '/f', '/q', lockfile])
|
85
|
-
if exitcode
|
86
|
-
|
87
|
-
|
88
|
-
|
122
|
+
if exitcode != 0:
|
123
|
+
raise LockError('Failed to remove lock: %s' % (lockfile,))
|
124
|
+
exponential_backoff_retry(
|
125
|
+
delete,
|
126
|
+
excs=(LockError,),
|
127
|
+
name='del [%s]' % (lockfile,))
|
89
128
|
else:
|
90
129
|
os.remove(self.lockfile)
|
91
130
|
|
@@ -181,7 +220,7 @@ class Mirror(object):
|
|
181
220
|
else:
|
182
221
|
self.print = print
|
183
222
|
|
184
|
-
def print_without_file(self, message, **
|
223
|
+
def print_without_file(self, message, **_kwargs):
|
185
224
|
self.print_func(message)
|
186
225
|
|
187
226
|
@property
|
@@ -230,6 +269,16 @@ class Mirror(object):
|
|
230
269
|
setattr(cls, 'cachepath', cachepath)
|
231
270
|
return getattr(cls, 'cachepath')
|
232
271
|
|
272
|
+
def Rename(self, src, dst):
|
273
|
+
# This is somehow racy on Windows.
|
274
|
+
# Catching OSError because WindowsError isn't portable and
|
275
|
+
# pylint complains.
|
276
|
+
exponential_backoff_retry(
|
277
|
+
lambda: os.rename(src, dst),
|
278
|
+
excs=(OSError,),
|
279
|
+
name='rename [%s] => [%s]' % (src, dst),
|
280
|
+
printerr=self.print)
|
281
|
+
|
233
282
|
def RunGit(self, cmd, **kwargs):
|
234
283
|
"""Run git in a subprocess."""
|
235
284
|
cwd = kwargs.setdefault('cwd', self.mirror_path)
|
@@ -324,7 +373,15 @@ class Mirror(object):
|
|
324
373
|
retcode = 0
|
325
374
|
finally:
|
326
375
|
# Clean up the downloaded zipfile.
|
327
|
-
|
376
|
+
#
|
377
|
+
# This is somehow racy on Windows.
|
378
|
+
# Catching OSError because WindowsError isn't portable and
|
379
|
+
# pylint complains.
|
380
|
+
exponential_backoff_retry(
|
381
|
+
lambda: gclient_utils.rm_file_or_tree(tempdir),
|
382
|
+
excs=(OSError,),
|
383
|
+
name='rmtree [%s]' % (tempdir,),
|
384
|
+
printerr=self.print)
|
328
385
|
|
329
386
|
if retcode:
|
330
387
|
self.print(
|
@@ -439,16 +496,9 @@ class Mirror(object):
|
|
439
496
|
self._fetch(tempdir or self.mirror_path, verbose, depth)
|
440
497
|
finally:
|
441
498
|
if tempdir:
|
442
|
-
|
443
|
-
|
444
|
-
|
445
|
-
os.rename(tempdir, self.mirror_path)
|
446
|
-
except OSError as e:
|
447
|
-
# This is somehow racy on Windows.
|
448
|
-
# Catching OSError because WindowsError isn't portable and
|
449
|
-
# pylint complains.
|
450
|
-
self.print('Error moving %s to %s: %s' % (tempdir, self.mirror_path,
|
451
|
-
str(e)))
|
499
|
+
if os.path.exists(self.mirror_path):
|
500
|
+
gclient_utils.rmtree(self.mirror_path)
|
501
|
+
self.Rename(tempdir, self.mirror_path)
|
452
502
|
if not ignore_lock:
|
453
503
|
lockfile.unlock()
|
454
504
|
|
@@ -52,7 +52,7 @@ def cherry_pick(target_branch, commit, auth_config):
|
|
52
52
|
])
|
53
53
|
|
54
54
|
rietveld = Rietveld(config('rietveld.server'), auth_config, author)
|
55
|
-
# pylint: disable=
|
55
|
+
# pylint: disable=protected-access
|
56
56
|
output = rietveld._send(
|
57
57
|
'/upload',
|
58
58
|
payload=payload,
|
@@ -92,7 +92,7 @@ def cherry_pick(target_branch, commit, auth_config):
|
|
92
92
|
('data', filename, content),
|
93
93
|
])
|
94
94
|
|
95
|
-
# pylint: disable=
|
95
|
+
# pylint: disable=protected-access
|
96
96
|
print ' Uploading base file for %s:' % filename, rietveld._send(
|
97
97
|
'/%s/upload_content/%s/%s' % (issue, patchset, file_id),
|
98
98
|
payload=payload,
|
@@ -115,14 +115,14 @@ def cherry_pick(target_branch, commit, auth_config):
|
|
115
115
|
('data', filename, content),
|
116
116
|
])
|
117
117
|
|
118
|
-
# pylint: disable=
|
118
|
+
# pylint: disable=protected-access
|
119
119
|
print ' Uploading %s:' % filename, rietveld._send(
|
120
120
|
'/%s/upload_content/%s/%s' % (issue, patchset, file_id),
|
121
121
|
payload=payload,
|
122
122
|
content_type=content_type,
|
123
123
|
)
|
124
124
|
|
125
|
-
# pylint: disable=
|
125
|
+
# pylint: disable=protected-access
|
126
126
|
print 'Finalizing upload:', rietveld._send('/%s/upload_complete/1' % issue)
|
127
127
|
|
128
128
|
|
@@ -13,6 +13,8 @@ from distutils.version import LooseVersion
|
|
13
13
|
from multiprocessing.pool import ThreadPool
|
14
14
|
import base64
|
15
15
|
import collections
|
16
|
+
import contextlib
|
17
|
+
import fnmatch
|
16
18
|
import httplib
|
17
19
|
import json
|
18
20
|
import logging
|
@@ -23,8 +25,6 @@ import re
|
|
23
25
|
import stat
|
24
26
|
import sys
|
25
27
|
import textwrap
|
26
|
-
import time
|
27
|
-
import traceback
|
28
28
|
import urllib
|
29
29
|
import urllib2
|
30
30
|
import urlparse
|
@@ -33,7 +33,7 @@ import webbrowser
|
|
33
33
|
import zlib
|
34
34
|
|
35
35
|
try:
|
36
|
-
import readline # pylint: disable=
|
36
|
+
import readline # pylint: disable=import-error,W0611
|
37
37
|
except ImportError:
|
38
38
|
pass
|
39
39
|
|
@@ -41,8 +41,8 @@ from third_party import colorama
|
|
41
41
|
from third_party import httplib2
|
42
42
|
from third_party import upload
|
43
43
|
import auth
|
44
|
+
import checkout
|
44
45
|
import clang_format
|
45
|
-
import commit_queue
|
46
46
|
import dart_format
|
47
47
|
import setup_color
|
48
48
|
import fix_encoding
|
@@ -64,9 +64,8 @@ __version__ = '2.0'
|
|
64
64
|
|
65
65
|
COMMIT_BOT_EMAIL = 'commit-bot@chromium.org'
|
66
66
|
DEFAULT_SERVER = 'https://codereview.chromium.org'
|
67
|
-
|
67
|
+
POSTUPSTREAM_HOOK = '.git/hooks/post-cl-land'
|
68
68
|
DESCRIPTION_BACKUP_FILE = '~/.git_cl_description_backup'
|
69
|
-
GIT_INSTRUCTIONS_URL = 'http://code.google.com/p/chromium/wiki/UsingGit'
|
70
69
|
REFS_THAT_ALIAS_TO_OTHER_REFS = {
|
71
70
|
'refs/remotes/origin/lkgr': 'refs/remotes/origin/master',
|
72
71
|
'refs/remotes/origin/lkcr': 'refs/remotes/origin/master',
|
@@ -76,18 +75,39 @@ REFS_THAT_ALIAS_TO_OTHER_REFS = {
|
|
76
75
|
DEFAULT_LINT_REGEX = r"(.*\.cpp|.*\.cc|.*\.h)"
|
77
76
|
DEFAULT_LINT_IGNORE_REGEX = r"$^"
|
78
77
|
|
78
|
+
# Buildbucket master name prefix.
|
79
|
+
MASTER_PREFIX = 'master.'
|
80
|
+
|
79
81
|
# Shortcut since it quickly becomes redundant.
|
80
82
|
Fore = colorama.Fore
|
81
83
|
|
82
84
|
# Initialized in main()
|
83
85
|
settings = None
|
84
86
|
|
87
|
+
# Used by tests/git_cl_test.py to add extra logging.
|
88
|
+
# Inside the weirdly failing test, add this:
|
89
|
+
# >>> self.mock(git_cl, '_IS_BEING_TESTED', True)
|
90
|
+
# And scroll up to see the strack trace printed.
|
91
|
+
_IS_BEING_TESTED = False
|
92
|
+
|
93
|
+
|
94
|
+
def DieWithError(message, change_desc=None):
|
95
|
+
if change_desc:
|
96
|
+
SaveDescriptionBackup(change_desc)
|
85
97
|
|
86
|
-
def DieWithError(message):
|
87
98
|
print(message, file=sys.stderr)
|
88
99
|
sys.exit(1)
|
89
100
|
|
90
101
|
|
102
|
+
def SaveDescriptionBackup(change_desc):
|
103
|
+
backup_path = os.path.expanduser(DESCRIPTION_BACKUP_FILE)
|
104
|
+
print('\nError after CL description prompt -- saving description to %s\n' %
|
105
|
+
backup_path)
|
106
|
+
backup_file = open(backup_path, 'w')
|
107
|
+
backup_file.write(change_desc.description)
|
108
|
+
backup_file.close()
|
109
|
+
|
110
|
+
|
91
111
|
def GetNoGitPagerEnv():
|
92
112
|
env = os.environ.copy()
|
93
113
|
# 'cat' is a magical git string that disables pagers on all platforms.
|
@@ -148,6 +168,13 @@ def BranchExists(branch):
|
|
148
168
|
return not code
|
149
169
|
|
150
170
|
|
171
|
+
def time_sleep(seconds):
|
172
|
+
# Use this so that it can be mocked in tests without interfering with python
|
173
|
+
# system machinery.
|
174
|
+
import time # Local import to discourage others from importing time globally.
|
175
|
+
return time.sleep(seconds)
|
176
|
+
|
177
|
+
|
151
178
|
def ask_for_data(prompt):
|
152
179
|
try:
|
153
180
|
return raw_input(prompt)
|
@@ -218,6 +245,26 @@ def _git_set_branch_config_value(key, value, branch=None, **kwargs):
|
|
218
245
|
RunGit(args, **kwargs)
|
219
246
|
|
220
247
|
|
248
|
+
def _get_committer_timestamp(commit):
|
249
|
+
"""Returns unix timestamp as integer of a committer in a commit.
|
250
|
+
|
251
|
+
Commit can be whatever git show would recognize, such as HEAD, sha1 or ref.
|
252
|
+
"""
|
253
|
+
# Git also stores timezone offset, but it only affects visual display,
|
254
|
+
# actual point in time is defined by this timestamp only.
|
255
|
+
return int(RunGit(['show', '-s', '--format=%ct', commit]).strip())
|
256
|
+
|
257
|
+
|
258
|
+
def _git_amend_head(message, committer_timestamp):
|
259
|
+
"""Amends commit with new message and desired committer_timestamp.
|
260
|
+
|
261
|
+
Sets committer timezone to UTC.
|
262
|
+
"""
|
263
|
+
env = os.environ.copy()
|
264
|
+
env['GIT_COMMITTER_DATE'] = '%d+0000' % committer_timestamp
|
265
|
+
return RunGit(['commit', '--amend', '-m', message], env=env)
|
266
|
+
|
267
|
+
|
221
268
|
def add_git_similarity(parser):
|
222
269
|
parser.add_option(
|
223
270
|
'--similarity', metavar='SIM', type=int, action='store',
|
@@ -275,10 +322,22 @@ def _prefix_master(master):
|
|
275
322
|
(tryserver.chromium.linux) by stripping off the prefix 'master.'. This
|
276
323
|
function does the conversion for buildbucket migration.
|
277
324
|
"""
|
278
|
-
|
279
|
-
if master.startswith(prefix):
|
325
|
+
if master.startswith(MASTER_PREFIX):
|
280
326
|
return master
|
281
|
-
return '%s%s' % (
|
327
|
+
return '%s%s' % (MASTER_PREFIX, master)
|
328
|
+
|
329
|
+
|
330
|
+
def _unprefix_master(bucket):
|
331
|
+
"""Convert bucket name to shortened master name.
|
332
|
+
|
333
|
+
Buildbucket uses full master name(master.tryserver.chromium.linux) as bucket
|
334
|
+
name, while the developers always use shortened master name
|
335
|
+
(tryserver.chromium.linux) by stripping off the prefix 'master.'. This
|
336
|
+
function does the conversion for buildbucket migration.
|
337
|
+
"""
|
338
|
+
if bucket.startswith(MASTER_PREFIX):
|
339
|
+
return bucket[len(MASTER_PREFIX):]
|
340
|
+
return bucket
|
282
341
|
|
283
342
|
|
284
343
|
def _buildbucket_retry(operation_name, http, *args, **kwargs):
|
@@ -313,72 +372,152 @@ def _buildbucket_retry(operation_name, http, *args, **kwargs):
|
|
313
372
|
|
314
373
|
# status >= 500 means transient failures.
|
315
374
|
logging.debug('Transient errors when %s. Will retry.', operation_name)
|
316
|
-
|
375
|
+
time_sleep(0.5 + 1.5*try_count)
|
317
376
|
try_count += 1
|
318
377
|
assert False, 'unreachable'
|
319
378
|
|
320
379
|
|
321
|
-
def
|
322
|
-
|
323
|
-
|
324
|
-
|
380
|
+
def _get_bucket_map(changelist, options, option_parser):
|
381
|
+
"""Returns a dict mapping bucket names to builders and tests,
|
382
|
+
for triggering try jobs.
|
383
|
+
"""
|
384
|
+
# If no bots are listed, we try to get a set of builders and tests based
|
385
|
+
# on GetPreferredTryMasters functions in PRESUBMIT.py files.
|
386
|
+
if not options.bot:
|
387
|
+
change = changelist.GetChange(
|
388
|
+
changelist.GetCommonAncestorWithUpstream(), None)
|
389
|
+
# Get try masters from PRESUBMIT.py files.
|
390
|
+
masters = presubmit_support.DoGetTryMasters(
|
391
|
+
change=change,
|
392
|
+
changed_files=change.LocalPaths(),
|
393
|
+
repository_root=settings.GetRoot(),
|
394
|
+
default_presubmit=None,
|
395
|
+
project=None,
|
396
|
+
verbose=options.verbose,
|
397
|
+
output_stream=sys.stdout)
|
398
|
+
if masters is None:
|
399
|
+
return None
|
400
|
+
return {_prefix_master(m): b for m, b in masters.iteritems()}
|
401
|
+
|
402
|
+
if options.bucket:
|
403
|
+
return {options.bucket: {b: [] for b in options.bot}}
|
404
|
+
if options.master:
|
405
|
+
return {_prefix_master(options.master): {b: [] for b in options.bot}}
|
406
|
+
|
407
|
+
# If bots are listed but no master or bucket, then we need to find out
|
408
|
+
# the corresponding master for each bot.
|
409
|
+
bucket_map, error_message = _get_bucket_map_for_builders(options.bot)
|
410
|
+
if error_message:
|
411
|
+
option_parser.error(
|
412
|
+
'Tryserver master cannot be found because: %s\n'
|
413
|
+
'Please manually specify the tryserver master, e.g. '
|
414
|
+
'"-m tryserver.chromium.linux".' % error_message)
|
415
|
+
return bucket_map
|
416
|
+
|
417
|
+
|
418
|
+
def _get_bucket_map_for_builders(builders):
|
419
|
+
"""Returns a map of buckets to builders for the given builders."""
|
420
|
+
map_url = 'https://builders-map.appspot.com/'
|
421
|
+
try:
|
422
|
+
builders_map = json.load(urllib2.urlopen(map_url))
|
423
|
+
except urllib2.URLError as e:
|
424
|
+
return None, ('Failed to fetch builder-to-master map from %s. Error: %s.' %
|
425
|
+
(map_url, e))
|
426
|
+
except ValueError as e:
|
427
|
+
return None, ('Invalid json string from %s. Error: %s.' % (map_url, e))
|
428
|
+
if not builders_map:
|
429
|
+
return None, 'Failed to build master map.'
|
430
|
+
|
431
|
+
bucket_map = {}
|
432
|
+
for builder in builders:
|
433
|
+
masters = builders_map.get(builder, [])
|
434
|
+
if not masters:
|
435
|
+
return None, ('No matching master for builder %s.' % builder)
|
436
|
+
if len(masters) > 1:
|
437
|
+
return None, ('The builder name %s exists in multiple masters %s.' %
|
438
|
+
(builder, masters))
|
439
|
+
bucket = _prefix_master(masters[0])
|
440
|
+
bucket_map.setdefault(bucket, {})[builder] = []
|
441
|
+
|
442
|
+
return bucket_map, None
|
443
|
+
|
444
|
+
|
445
|
+
def _trigger_try_jobs(auth_config, changelist, buckets, options,
|
446
|
+
category='git_cl_try', patchset=None):
|
447
|
+
"""Sends a request to Buildbucket to trigger try jobs for a changelist.
|
448
|
+
|
449
|
+
Args:
|
450
|
+
auth_config: AuthConfig for Rietveld.
|
451
|
+
changelist: Changelist that the try jobs are associated with.
|
452
|
+
buckets: A nested dict mapping bucket names to builders to tests.
|
453
|
+
options: Command-line options.
|
454
|
+
"""
|
455
|
+
assert changelist.GetIssue(), 'CL must be uploaded first'
|
456
|
+
codereview_url = changelist.GetCodereviewServer()
|
457
|
+
assert codereview_url, 'CL must be uploaded first'
|
458
|
+
patchset = patchset or changelist.GetMostRecentPatchset()
|
459
|
+
assert patchset, 'CL must be uploaded first'
|
460
|
+
|
461
|
+
codereview_host = urlparse.urlparse(codereview_url).hostname
|
462
|
+
authenticator = auth.get_authenticator_for_host(codereview_host, auth_config)
|
325
463
|
http = authenticator.authorize(httplib2.Http())
|
326
464
|
http.force_exception_to_status_code = True
|
327
|
-
issue_props = changelist.GetIssueProperties()
|
328
|
-
issue = changelist.GetIssue()
|
329
|
-
patchset = changelist.GetMostRecentPatchset()
|
330
|
-
properties = _get_properties_from_options(options)
|
331
465
|
|
332
466
|
buildbucket_put_url = (
|
333
467
|
'https://{hostname}/_ah/api/buildbucket/v1/builds/batch'.format(
|
334
468
|
hostname=options.buildbucket_host))
|
335
|
-
buildset = 'patch/
|
336
|
-
|
337
|
-
|
469
|
+
buildset = 'patch/{codereview}/{hostname}/{issue}/{patch}'.format(
|
470
|
+
codereview='gerrit' if changelist.IsGerrit() else 'rietveld',
|
471
|
+
hostname=codereview_host,
|
472
|
+
issue=changelist.GetIssue(),
|
338
473
|
patch=patchset)
|
339
474
|
|
475
|
+
shared_parameters_properties = changelist.GetTryjobProperties(patchset)
|
476
|
+
shared_parameters_properties['category'] = category
|
477
|
+
if options.clobber:
|
478
|
+
shared_parameters_properties['clobber'] = True
|
479
|
+
extra_properties = _get_properties_from_options(options)
|
480
|
+
if extra_properties:
|
481
|
+
shared_parameters_properties.update(extra_properties)
|
482
|
+
|
340
483
|
batch_req_body = {'builds': []}
|
341
484
|
print_text = []
|
342
485
|
print_text.append('Tried jobs on:')
|
343
|
-
for
|
344
|
-
print_text.append('
|
345
|
-
|
486
|
+
for bucket, builders_and_tests in sorted(buckets.iteritems()):
|
487
|
+
print_text.append('Bucket: %s' % bucket)
|
488
|
+
master = None
|
489
|
+
if bucket.startswith(MASTER_PREFIX):
|
490
|
+
master = _unprefix_master(bucket)
|
346
491
|
for builder, tests in sorted(builders_and_tests.iteritems()):
|
347
492
|
print_text.append(' %s: %s' % (builder, tests))
|
348
493
|
parameters = {
|
349
494
|
'builder_name': builder,
|
350
495
|
'changes': [{
|
351
|
-
'author': {'email':
|
496
|
+
'author': {'email': changelist.GetIssueOwner()},
|
352
497
|
'revision': options.revision,
|
353
498
|
}],
|
354
|
-
'properties':
|
355
|
-
'category': category,
|
356
|
-
'issue': issue,
|
357
|
-
'master': master,
|
358
|
-
'patch_project': issue_props['project'],
|
359
|
-
'patch_storage': 'rietveld',
|
360
|
-
'patchset': patchset,
|
361
|
-
'reason': options.name,
|
362
|
-
'rietveld': rietveld_url,
|
363
|
-
},
|
499
|
+
'properties': shared_parameters_properties.copy(),
|
364
500
|
}
|
365
501
|
if 'presubmit' in builder.lower():
|
366
502
|
parameters['properties']['dry_run'] = 'true'
|
367
503
|
if tests:
|
368
504
|
parameters['properties']['testfilter'] = tests
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
|
505
|
+
|
506
|
+
tags = [
|
507
|
+
'builder:%s' % builder,
|
508
|
+
'buildset:%s' % buildset,
|
509
|
+
'user_agent:git_cl_try',
|
510
|
+
]
|
511
|
+
if master:
|
512
|
+
parameters['properties']['master'] = master
|
513
|
+
tags.append('master:%s' % master)
|
514
|
+
|
373
515
|
batch_req_body['builds'].append(
|
374
516
|
{
|
375
517
|
'bucket': bucket,
|
376
518
|
'parameters_json': json.dumps(parameters),
|
377
519
|
'client_operation_id': str(uuid.uuid4()),
|
378
|
-
'tags':
|
379
|
-
'buildset:%s' % buildset,
|
380
|
-
'master:%s' % master,
|
381
|
-
'user_agent:git_cl_try']
|
520
|
+
'tags': tags,
|
382
521
|
}
|
383
522
|
)
|
384
523
|
|
@@ -395,34 +534,42 @@ def trigger_try_jobs(auth_config, changelist, options, masters, category):
|
|
395
534
|
print('\n'.join(print_text))
|
396
535
|
|
397
536
|
|
398
|
-
def fetch_try_jobs(auth_config, changelist,
|
537
|
+
def fetch_try_jobs(auth_config, changelist, buildbucket_host,
|
538
|
+
patchset=None):
|
399
539
|
"""Fetches try jobs from buildbucket.
|
400
540
|
|
401
541
|
Returns a map from build id to build info as a dictionary.
|
402
542
|
"""
|
403
|
-
|
404
|
-
|
405
|
-
|
543
|
+
assert buildbucket_host
|
544
|
+
assert changelist.GetIssue(), 'CL must be uploaded first'
|
545
|
+
assert changelist.GetCodereviewServer(), 'CL must be uploaded first'
|
546
|
+
patchset = patchset or changelist.GetMostRecentPatchset()
|
547
|
+
assert patchset, 'CL must be uploaded first'
|
548
|
+
|
549
|
+
codereview_url = changelist.GetCodereviewServer()
|
550
|
+
codereview_host = urlparse.urlparse(codereview_url).hostname
|
551
|
+
authenticator = auth.get_authenticator_for_host(codereview_host, auth_config)
|
406
552
|
if authenticator.has_cached_credentials():
|
407
553
|
http = authenticator.authorize(httplib2.Http())
|
408
554
|
else:
|
409
555
|
print('Warning: Some results might be missing because %s' %
|
410
556
|
# Get the message on how to login.
|
411
|
-
(auth.LoginRequiredError(
|
557
|
+
(auth.LoginRequiredError(codereview_host).message,))
|
412
558
|
http = httplib2.Http()
|
413
559
|
|
414
560
|
http.force_exception_to_status_code = True
|
415
561
|
|
416
|
-
buildset = 'patch/
|
417
|
-
|
562
|
+
buildset = 'patch/{codereview}/{hostname}/{issue}/{patch}'.format(
|
563
|
+
codereview='gerrit' if changelist.IsGerrit() else 'rietveld',
|
564
|
+
hostname=codereview_host,
|
418
565
|
issue=changelist.GetIssue(),
|
419
|
-
patch=
|
566
|
+
patch=patchset)
|
420
567
|
params = {'tag': 'buildset:%s' % buildset}
|
421
568
|
|
422
569
|
builds = {}
|
423
570
|
while True:
|
424
571
|
url = 'https://{hostname}/_ah/api/buildbucket/v1/search?{params}'.format(
|
425
|
-
hostname=
|
572
|
+
hostname=buildbucket_host,
|
426
573
|
params=urllib.urlencode(params))
|
427
574
|
content = _buildbucket_retry('fetching try jobs', http, url, 'GET')
|
428
575
|
for build in content.get('builds', []):
|
@@ -558,48 +705,6 @@ def write_try_results_json(output_file, builds):
|
|
558
705
|
write_json(output_file, converted)
|
559
706
|
|
560
707
|
|
561
|
-
def MatchSvnGlob(url, base_url, glob_spec, allow_wildcards):
|
562
|
-
"""Return the corresponding git ref if |base_url| together with |glob_spec|
|
563
|
-
matches the full |url|.
|
564
|
-
|
565
|
-
If |allow_wildcards| is true, |glob_spec| can contain wildcards (see below).
|
566
|
-
"""
|
567
|
-
fetch_suburl, as_ref = glob_spec.split(':')
|
568
|
-
if allow_wildcards:
|
569
|
-
glob_match = re.match('(.+/)?(\*|{[^/]*})(/.+)?', fetch_suburl)
|
570
|
-
if glob_match:
|
571
|
-
# Parse specs like "branches/*/src:refs/remotes/svn/*" or
|
572
|
-
# "branches/{472,597,648}/src:refs/remotes/svn/*".
|
573
|
-
branch_re = re.escape(base_url)
|
574
|
-
if glob_match.group(1):
|
575
|
-
branch_re += '/' + re.escape(glob_match.group(1))
|
576
|
-
wildcard = glob_match.group(2)
|
577
|
-
if wildcard == '*':
|
578
|
-
branch_re += '([^/]*)'
|
579
|
-
else:
|
580
|
-
# Escape and replace surrounding braces with parentheses and commas
|
581
|
-
# with pipe symbols.
|
582
|
-
wildcard = re.escape(wildcard)
|
583
|
-
wildcard = re.sub('^\\\\{', '(', wildcard)
|
584
|
-
wildcard = re.sub('\\\\,', '|', wildcard)
|
585
|
-
wildcard = re.sub('\\\\}$', ')', wildcard)
|
586
|
-
branch_re += wildcard
|
587
|
-
if glob_match.group(3):
|
588
|
-
branch_re += re.escape(glob_match.group(3))
|
589
|
-
match = re.match(branch_re, url)
|
590
|
-
if match:
|
591
|
-
return re.sub('\*$', match.group(1), as_ref)
|
592
|
-
|
593
|
-
# Parse specs like "trunk/src:refs/remotes/origin/trunk".
|
594
|
-
if fetch_suburl:
|
595
|
-
full_url = base_url + '/' + fetch_suburl
|
596
|
-
else:
|
597
|
-
full_url = base_url
|
598
|
-
if full_url == url:
|
599
|
-
return as_ref
|
600
|
-
return None
|
601
|
-
|
602
|
-
|
603
708
|
def print_stats(similarity, find_copies, args):
|
604
709
|
"""Prints statistics about the change to the user."""
|
605
710
|
# --no-ext-diff is broken in some versions of Git, so try to work around
|
@@ -610,8 +715,7 @@ def print_stats(similarity, find_copies, args):
|
|
610
715
|
del env['GIT_EXTERNAL_DIFF']
|
611
716
|
|
612
717
|
if find_copies:
|
613
|
-
similarity_options = ['
|
614
|
-
'-C%s' % similarity]
|
718
|
+
similarity_options = ['-l100000', '-C%s' % similarity]
|
615
719
|
else:
|
616
720
|
similarity_options = ['-M%s' % similarity]
|
617
721
|
|
@@ -634,8 +738,6 @@ class Settings(object):
|
|
634
738
|
self.default_server = None
|
635
739
|
self.cc = None
|
636
740
|
self.root = None
|
637
|
-
self.is_git_svn = None
|
638
|
-
self.svn_branch = None
|
639
741
|
self.tree_status_url = None
|
640
742
|
self.viewvc_url = None
|
641
743
|
self.updated = False
|
@@ -645,7 +747,6 @@ class Settings(object):
|
|
645
747
|
self.git_editor = None
|
646
748
|
self.project = None
|
647
749
|
self.force_https_commit_url = None
|
648
|
-
self.pending_ref_prefix = None
|
649
750
|
|
650
751
|
def LazyUpdateIfNeeded(self):
|
651
752
|
"""Updates the settings from a codereview.settings file, if available."""
|
@@ -691,93 +792,12 @@ class Settings(object):
|
|
691
792
|
return None
|
692
793
|
git_cache.Mirror.SetCachePath(os.path.dirname(local_url))
|
693
794
|
remote_url = git_cache.Mirror.CacheDirToUrl(local_url)
|
694
|
-
# Use the /dev/null print_func to avoid terminal spew
|
695
|
-
mirror = git_cache.Mirror(remote_url, print_func
|
795
|
+
# Use the /dev/null print_func to avoid terminal spew.
|
796
|
+
mirror = git_cache.Mirror(remote_url, print_func=lambda *args: None)
|
696
797
|
if mirror.exists():
|
697
798
|
return mirror
|
698
799
|
return None
|
699
800
|
|
700
|
-
def GetIsGitSvn(self):
|
701
|
-
"""Return true if this repo looks like it's using git-svn."""
|
702
|
-
if self.is_git_svn is None:
|
703
|
-
if self.GetPendingRefPrefix():
|
704
|
-
# If PENDING_REF_PREFIX is set then it's a pure git repo no matter what.
|
705
|
-
self.is_git_svn = False
|
706
|
-
else:
|
707
|
-
# If you have any "svn-remote.*" config keys, we think you're using svn.
|
708
|
-
self.is_git_svn = RunGitWithCode(
|
709
|
-
['config', '--local', '--get-regexp', r'^svn-remote\.'])[0] == 0
|
710
|
-
return self.is_git_svn
|
711
|
-
|
712
|
-
def GetSVNBranch(self):
|
713
|
-
if self.svn_branch is None:
|
714
|
-
if not self.GetIsGitSvn():
|
715
|
-
DieWithError('Repo doesn\'t appear to be a git-svn repo.')
|
716
|
-
|
717
|
-
# Try to figure out which remote branch we're based on.
|
718
|
-
# Strategy:
|
719
|
-
# 1) iterate through our branch history and find the svn URL.
|
720
|
-
# 2) find the svn-remote that fetches from the URL.
|
721
|
-
|
722
|
-
# regexp matching the git-svn line that contains the URL.
|
723
|
-
git_svn_re = re.compile(r'^\s*git-svn-id: (\S+)@', re.MULTILINE)
|
724
|
-
|
725
|
-
# We don't want to go through all of history, so read a line from the
|
726
|
-
# pipe at a time.
|
727
|
-
# The -100 is an arbitrary limit so we don't search forever.
|
728
|
-
cmd = ['git', 'log', '-100', '--pretty=medium']
|
729
|
-
proc = subprocess2.Popen(cmd, stdout=subprocess2.PIPE,
|
730
|
-
env=GetNoGitPagerEnv())
|
731
|
-
url = None
|
732
|
-
for line in proc.stdout:
|
733
|
-
match = git_svn_re.match(line)
|
734
|
-
if match:
|
735
|
-
url = match.group(1)
|
736
|
-
proc.stdout.close() # Cut pipe.
|
737
|
-
break
|
738
|
-
|
739
|
-
if url:
|
740
|
-
svn_remote_re = re.compile(r'^svn-remote\.([^.]+)\.url (.*)$')
|
741
|
-
remotes = RunGit(['config', '--get-regexp',
|
742
|
-
r'^svn-remote\..*\.url']).splitlines()
|
743
|
-
for remote in remotes:
|
744
|
-
match = svn_remote_re.match(remote)
|
745
|
-
if match:
|
746
|
-
remote = match.group(1)
|
747
|
-
base_url = match.group(2)
|
748
|
-
rewrite_root = RunGit(
|
749
|
-
['config', 'svn-remote.%s.rewriteRoot' % remote],
|
750
|
-
error_ok=True).strip()
|
751
|
-
if rewrite_root:
|
752
|
-
base_url = rewrite_root
|
753
|
-
fetch_spec = RunGit(
|
754
|
-
['config', 'svn-remote.%s.fetch' % remote],
|
755
|
-
error_ok=True).strip()
|
756
|
-
if fetch_spec:
|
757
|
-
self.svn_branch = MatchSvnGlob(url, base_url, fetch_spec, False)
|
758
|
-
if self.svn_branch:
|
759
|
-
break
|
760
|
-
branch_spec = RunGit(
|
761
|
-
['config', 'svn-remote.%s.branches' % remote],
|
762
|
-
error_ok=True).strip()
|
763
|
-
if branch_spec:
|
764
|
-
self.svn_branch = MatchSvnGlob(url, base_url, branch_spec, True)
|
765
|
-
if self.svn_branch:
|
766
|
-
break
|
767
|
-
tag_spec = RunGit(
|
768
|
-
['config', 'svn-remote.%s.tags' % remote],
|
769
|
-
error_ok=True).strip()
|
770
|
-
if tag_spec:
|
771
|
-
self.svn_branch = MatchSvnGlob(url, base_url, tag_spec, True)
|
772
|
-
if self.svn_branch:
|
773
|
-
break
|
774
|
-
|
775
|
-
if not self.svn_branch:
|
776
|
-
DieWithError('Can\'t guess svn branch -- try specifying it on the '
|
777
|
-
'command line')
|
778
|
-
|
779
|
-
return self.svn_branch
|
780
|
-
|
781
801
|
def GetTreeStatusUrl(self, error_ok=False):
|
782
802
|
if not self.tree_status_url:
|
783
803
|
error_message = ('You must configure your tree status URL by running '
|
@@ -869,18 +889,6 @@ class Settings(object):
|
|
869
889
|
self.project = self._GetRietveldConfig('project', error_ok=True)
|
870
890
|
return self.project
|
871
891
|
|
872
|
-
def GetForceHttpsCommitUrl(self):
|
873
|
-
if not self.force_https_commit_url:
|
874
|
-
self.force_https_commit_url = self._GetRietveldConfig(
|
875
|
-
'force-https-commit-url', error_ok=True)
|
876
|
-
return self.force_https_commit_url
|
877
|
-
|
878
|
-
def GetPendingRefPrefix(self):
|
879
|
-
if not self.pending_ref_prefix:
|
880
|
-
self.pending_ref_prefix = self._GetRietveldConfig(
|
881
|
-
'pending-ref-prefix', error_ok=True)
|
882
|
-
return self.pending_ref_prefix
|
883
|
-
|
884
892
|
def _GetRietveldConfig(self, param, **kwargs):
|
885
893
|
return self._GetConfig('rietveld.' + param, **kwargs)
|
886
894
|
|
@@ -892,6 +900,84 @@ class Settings(object):
|
|
892
900
|
return RunGit(['config', param], **kwargs).strip()
|
893
901
|
|
894
902
|
|
903
|
+
@contextlib.contextmanager
|
904
|
+
def _get_gerrit_project_config_file(remote_url):
|
905
|
+
"""Context manager to fetch and store Gerrit's project.config from
|
906
|
+
refs/meta/config branch and store it in temp file.
|
907
|
+
|
908
|
+
Provides a temporary filename or None if there was error.
|
909
|
+
"""
|
910
|
+
error, _ = RunGitWithCode([
|
911
|
+
'fetch', remote_url,
|
912
|
+
'+refs/meta/config:refs/git_cl/meta/config'])
|
913
|
+
if error:
|
914
|
+
# Ref doesn't exist or isn't accessible to current user.
|
915
|
+
print('WARNING: failed to fetch project config for %s: %s' %
|
916
|
+
(remote_url, error))
|
917
|
+
yield None
|
918
|
+
return
|
919
|
+
|
920
|
+
error, project_config_data = RunGitWithCode(
|
921
|
+
['show', 'refs/git_cl/meta/config:project.config'])
|
922
|
+
if error:
|
923
|
+
print('WARNING: project.config file not found')
|
924
|
+
yield None
|
925
|
+
return
|
926
|
+
|
927
|
+
with gclient_utils.temporary_directory() as tempdir:
|
928
|
+
project_config_file = os.path.join(tempdir, 'project.config')
|
929
|
+
gclient_utils.FileWrite(project_config_file, project_config_data)
|
930
|
+
yield project_config_file
|
931
|
+
|
932
|
+
|
933
|
+
def _is_git_numberer_enabled(remote_url, remote_ref):
|
934
|
+
"""Returns True if Git Numberer is enabled on this ref."""
|
935
|
+
# TODO(tandrii): this should be deleted once repos below are 100% on Gerrit.
|
936
|
+
KNOWN_PROJECTS_WHITELIST = [
|
937
|
+
'chromium/src',
|
938
|
+
'external/webrtc',
|
939
|
+
'v8/v8',
|
940
|
+
]
|
941
|
+
|
942
|
+
assert remote_ref and remote_ref.startswith('refs/'), remote_ref
|
943
|
+
url_parts = urlparse.urlparse(remote_url)
|
944
|
+
project_name = url_parts.path.lstrip('/').rstrip('git./')
|
945
|
+
for known in KNOWN_PROJECTS_WHITELIST:
|
946
|
+
if project_name.endswith(known):
|
947
|
+
break
|
948
|
+
else:
|
949
|
+
# Early exit to avoid extra fetches for repos that aren't using Git
|
950
|
+
# Numberer.
|
951
|
+
return False
|
952
|
+
|
953
|
+
with _get_gerrit_project_config_file(remote_url) as project_config_file:
|
954
|
+
if project_config_file is None:
|
955
|
+
# Failed to fetch project.config, which shouldn't happen on open source
|
956
|
+
# repos KNOWN_PROJECTS_WHITELIST.
|
957
|
+
return False
|
958
|
+
def get_opts(x):
|
959
|
+
code, out = RunGitWithCode(
|
960
|
+
['config', '-f', project_config_file, '--get-all',
|
961
|
+
'plugin.git-numberer.validate-%s-refglob' % x])
|
962
|
+
if code == 0:
|
963
|
+
return out.strip().splitlines()
|
964
|
+
return []
|
965
|
+
enabled, disabled = map(get_opts, ['enabled', 'disabled'])
|
966
|
+
|
967
|
+
logging.info('validator config enabled %s disabled %s refglobs for '
|
968
|
+
'(this ref: %s)', enabled, disabled, remote_ref)
|
969
|
+
|
970
|
+
def match_refglobs(refglobs):
|
971
|
+
for refglob in refglobs:
|
972
|
+
if remote_ref == refglob or fnmatch.fnmatch(remote_ref, refglob):
|
973
|
+
return True
|
974
|
+
return False
|
975
|
+
|
976
|
+
if match_refglobs(disabled):
|
977
|
+
return False
|
978
|
+
return match_refglobs(enabled)
|
979
|
+
|
980
|
+
|
895
981
|
def ShortBranchName(branch):
|
896
982
|
"""Convert a name like 'refs/heads/foo' to just 'foo'."""
|
897
983
|
return branch.replace('refs/heads/', '', 1)
|
@@ -934,12 +1020,6 @@ class _ParsedIssueNumberArgument(object):
|
|
934
1020
|
return self.issue is not None
|
935
1021
|
|
936
1022
|
|
937
|
-
class _RietveldParsedIssueNumberArgument(_ParsedIssueNumberArgument):
|
938
|
-
def __init__(self, *args, **kwargs):
|
939
|
-
self.patch_url = kwargs.pop('patch_url', None)
|
940
|
-
super(_RietveldParsedIssueNumberArgument, self).__init__(*args, **kwargs)
|
941
|
-
|
942
|
-
|
943
1023
|
def ParseIssueNumberArgument(arg):
|
944
1024
|
"""Parses the issue argument and returns _ParsedIssueNumberArgument."""
|
945
1025
|
fail_result = _ParsedIssueNumberArgument()
|
@@ -960,6 +1040,17 @@ def ParseIssueNumberArgument(arg):
|
|
960
1040
|
return fail_result
|
961
1041
|
|
962
1042
|
|
1043
|
+
class GerritChangeNotExists(Exception):
|
1044
|
+
def __init__(self, issue, url):
|
1045
|
+
self.issue = issue
|
1046
|
+
self.url = url
|
1047
|
+
super(GerritChangeNotExists, self).__init__()
|
1048
|
+
|
1049
|
+
def __str__(self):
|
1050
|
+
return 'change %s at %s does not exist or you have no access to it' % (
|
1051
|
+
self.issue, self.url)
|
1052
|
+
|
1053
|
+
|
963
1054
|
class Changelist(object):
|
964
1055
|
"""Changelist works with one changelist in local branch.
|
965
1056
|
|
@@ -1120,28 +1211,19 @@ class Changelist(object):
|
|
1120
1211
|
if upstream_branch:
|
1121
1212
|
remote = RunGit(['config', 'rietveld.upstream-remote']).strip()
|
1122
1213
|
else:
|
1123
|
-
#
|
1124
|
-
|
1125
|
-
|
1214
|
+
# Else, try to guess the origin remote.
|
1215
|
+
remote_branches = RunGit(['branch', '-r']).split()
|
1216
|
+
if 'origin/master' in remote_branches:
|
1217
|
+
# Fall back on origin/master if it exits.
|
1218
|
+
remote = 'origin'
|
1219
|
+
upstream_branch = 'refs/heads/master'
|
1126
1220
|
else:
|
1127
|
-
|
1128
|
-
|
1129
|
-
|
1130
|
-
|
1131
|
-
|
1132
|
-
|
1133
|
-
elif 'origin/trunk' in remote_branches:
|
1134
|
-
# Fall back on origin/trunk if it exists. Generally a shared
|
1135
|
-
# git-svn clone
|
1136
|
-
remote = 'origin'
|
1137
|
-
upstream_branch = 'refs/heads/trunk'
|
1138
|
-
else:
|
1139
|
-
DieWithError(
|
1140
|
-
'Unable to determine default branch to diff against.\n'
|
1141
|
-
'Either pass complete "git diff"-style arguments, like\n'
|
1142
|
-
' git cl upload origin/master\n'
|
1143
|
-
'or verify this branch is set up to track another \n'
|
1144
|
-
'(via the --track argument to "git checkout -b ...").')
|
1221
|
+
DieWithError(
|
1222
|
+
'Unable to determine default branch to diff against.\n'
|
1223
|
+
'Either pass complete "git diff"-style arguments, like\n'
|
1224
|
+
' git cl upload origin/master\n'
|
1225
|
+
'or verify this branch is set up to track another \n'
|
1226
|
+
'(via the --track argument to "git checkout -b ...").')
|
1145
1227
|
|
1146
1228
|
return remote, upstream_branch
|
1147
1229
|
|
@@ -1180,17 +1262,11 @@ class Changelist(object):
|
|
1180
1262
|
remote, = remotes
|
1181
1263
|
elif 'origin' in remotes:
|
1182
1264
|
remote = 'origin'
|
1183
|
-
logging.
|
1184
|
-
|
1185
|
-
'not be what you want. You may prevent this message '
|
1186
|
-
'by running "git svn info" as documented here: %s',
|
1187
|
-
self._remote,
|
1188
|
-
GIT_INSTRUCTIONS_URL)
|
1265
|
+
logging.warn('Could not determine which remote this change is '
|
1266
|
+
'associated with, so defaulting to "%s".' % self._remote)
|
1189
1267
|
else:
|
1190
1268
|
logging.warn('Could not determine which remote this change is '
|
1191
|
-
'associated with.
|
1192
|
-
'running "git svn info" as documented here: %s',
|
1193
|
-
GIT_INSTRUCTIONS_URL)
|
1269
|
+
'associated with.')
|
1194
1270
|
branch = 'HEAD'
|
1195
1271
|
if branch.startswith('refs/remotes'):
|
1196
1272
|
self._remote = (remote, branch)
|
@@ -1249,19 +1325,6 @@ class Changelist(object):
|
|
1249
1325
|
"""
|
1250
1326
|
return self._GitGetBranchConfigValue('base-url')
|
1251
1327
|
|
1252
|
-
def GetGitSvnRemoteUrl(self):
|
1253
|
-
"""Return the configured git-svn remote URL parsed from git svn info.
|
1254
|
-
|
1255
|
-
Returns None if it is not set.
|
1256
|
-
"""
|
1257
|
-
# URL is dependent on the current directory.
|
1258
|
-
data = RunGit(['svn', 'info'], cwd=settings.GetRoot())
|
1259
|
-
if data:
|
1260
|
-
keys = dict(line.split(': ', 1) for line in data.splitlines()
|
1261
|
-
if ': ' in line)
|
1262
|
-
return keys.get('URL', None)
|
1263
|
-
return None
|
1264
|
-
|
1265
1328
|
def GetRemoteUrl(self):
|
1266
1329
|
"""Return the configured remote URL, e.g. 'git://example.org/foo.git/'.
|
1267
1330
|
|
@@ -1292,15 +1355,17 @@ class Changelist(object):
|
|
1292
1355
|
return None
|
1293
1356
|
return '%s/%s' % (self._codereview_impl.GetCodereviewServer(), issue)
|
1294
1357
|
|
1295
|
-
def GetDescription(self, pretty=False):
|
1296
|
-
if not self.has_description:
|
1358
|
+
def GetDescription(self, pretty=False, force=False):
|
1359
|
+
if not self.has_description or force:
|
1297
1360
|
if self.GetIssue():
|
1298
|
-
self.description = self._codereview_impl.FetchDescription()
|
1361
|
+
self.description = self._codereview_impl.FetchDescription(force=force)
|
1299
1362
|
self.has_description = True
|
1300
1363
|
if pretty:
|
1301
|
-
|
1364
|
+
# Set width to 72 columns + 2 space indent.
|
1365
|
+
wrapper = textwrap.TextWrapper(width=74, replace_whitespace=True)
|
1302
1366
|
wrapper.initial_indent = wrapper.subsequent_indent = ' '
|
1303
|
-
|
1367
|
+
lines = self.description.splitlines()
|
1368
|
+
return '\n'.join([wrapper.fill(line) for line in lines])
|
1304
1369
|
return self.description
|
1305
1370
|
|
1306
1371
|
def GetPatchset(self):
|
@@ -1366,8 +1431,8 @@ class Changelist(object):
|
|
1366
1431
|
('\nFailed to diff against upstream branch %s\n\n'
|
1367
1432
|
'This branch probably doesn\'t exist anymore. To reset the\n'
|
1368
1433
|
'tracking branch, please run\n'
|
1369
|
-
' git branch --set-upstream %s
|
1370
|
-
'
|
1434
|
+
' git branch --set-upstream-to origin/master %s\n'
|
1435
|
+
'or replace origin/master with the relevant branch') %
|
1371
1436
|
(upstream_branch, self.GetBranch()))
|
1372
1437
|
|
1373
1438
|
issue = self.GetIssue()
|
@@ -1393,9 +1458,10 @@ class Changelist(object):
|
|
1393
1458
|
author,
|
1394
1459
|
upstream=upstream_branch)
|
1395
1460
|
|
1396
|
-
def UpdateDescription(self, description):
|
1461
|
+
def UpdateDescription(self, description, force=False):
|
1462
|
+
self._codereview_impl.UpdateDescriptionRemote(description, force=force)
|
1397
1463
|
self.description = description
|
1398
|
-
|
1464
|
+
self.has_description = True
|
1399
1465
|
|
1400
1466
|
def RunHook(self, committing, may_prompt, verbose, change):
|
1401
1467
|
"""Calls sys.exit() if the hook fails; returns a HookResults otherwise."""
|
@@ -1437,10 +1503,11 @@ class Changelist(object):
|
|
1437
1503
|
base_branch = self.GetCommonAncestorWithUpstream()
|
1438
1504
|
git_diff_args = [base_branch, 'HEAD']
|
1439
1505
|
|
1440
|
-
#
|
1441
|
-
# hooks
|
1442
|
-
#
|
1506
|
+
# Fast best-effort checks to abort before running potentially
|
1507
|
+
# expensive hooks if uploading is likely to fail anyway. Passing these
|
1508
|
+
# checks does not guarantee that uploading will not fail.
|
1443
1509
|
self._codereview_impl.EnsureAuthenticated(force=options.force)
|
1510
|
+
self._codereview_impl.EnsureCanUploadPatchset()
|
1444
1511
|
|
1445
1512
|
# Apply watchlists on upload.
|
1446
1513
|
change = self.GetChange(base_branch, None)
|
@@ -1466,7 +1533,9 @@ class Changelist(object):
|
|
1466
1533
|
if not options.reviewers and hook_results.reviewers:
|
1467
1534
|
options.reviewers = hook_results.reviewers.split(',')
|
1468
1535
|
|
1469
|
-
|
1536
|
+
# TODO(tandrii): Checking local patchset against remote patchset is only
|
1537
|
+
# supported for Rietveld. Extend it to Gerrit or remove it completely.
|
1538
|
+
if self.GetIssue() and not self.IsGerrit():
|
1470
1539
|
latest_patchset = self.GetMostRecentPatchset()
|
1471
1540
|
local_patchset = self.GetPatchset()
|
1472
1541
|
if (latest_patchset and local_patchset and
|
@@ -1519,6 +1588,31 @@ class Changelist(object):
|
|
1519
1588
|
assert self.GetIssue()
|
1520
1589
|
return self._codereview_impl.SetCQState(new_state)
|
1521
1590
|
|
1591
|
+
def TriggerDryRun(self):
|
1592
|
+
"""Triggers a dry run and prints a warning on failure."""
|
1593
|
+
# TODO(qyearsley): Either re-use this method in CMDset_commit
|
1594
|
+
# and CMDupload, or change CMDtry to trigger dry runs with
|
1595
|
+
# just SetCQState, and catch keyboard interrupt and other
|
1596
|
+
# errors in that method.
|
1597
|
+
try:
|
1598
|
+
self.SetCQState(_CQState.DRY_RUN)
|
1599
|
+
print('scheduled CQ Dry Run on %s' % self.GetIssueURL())
|
1600
|
+
return 0
|
1601
|
+
except KeyboardInterrupt:
|
1602
|
+
raise
|
1603
|
+
except:
|
1604
|
+
print('WARNING: failed to trigger CQ Dry Run.\n'
|
1605
|
+
'Either:\n'
|
1606
|
+
' * your project has no CQ\n'
|
1607
|
+
' * you don\'t have permission to trigger Dry Run\n'
|
1608
|
+
' * bug in this code (see stack trace below).\n'
|
1609
|
+
'Consider specifying which bots to trigger manually '
|
1610
|
+
'or asking your project owners for permissions '
|
1611
|
+
'or contacting Chrome Infrastructure team at '
|
1612
|
+
'https://www.chromium.org/infra\n\n')
|
1613
|
+
# Still raise exception so that stack trace is printed.
|
1614
|
+
raise
|
1615
|
+
|
1522
1616
|
# Forward methods to codereview specific implementation.
|
1523
1617
|
|
1524
1618
|
def CloseIssue(self):
|
@@ -1530,12 +1624,24 @@ class Changelist(object):
|
|
1530
1624
|
def GetCodereviewServer(self):
|
1531
1625
|
return self._codereview_impl.GetCodereviewServer()
|
1532
1626
|
|
1627
|
+
def GetIssueOwner(self):
|
1628
|
+
"""Get owner from codereview, which may differ from this checkout."""
|
1629
|
+
return self._codereview_impl.GetIssueOwner()
|
1630
|
+
|
1533
1631
|
def GetApprovingReviewers(self):
|
1534
1632
|
return self._codereview_impl.GetApprovingReviewers()
|
1535
1633
|
|
1536
1634
|
def GetMostRecentPatchset(self):
|
1537
1635
|
return self._codereview_impl.GetMostRecentPatchset()
|
1538
1636
|
|
1637
|
+
def CannotTriggerTryJobReason(self):
|
1638
|
+
"""Returns reason (str) if unable trigger tryjobs on this CL or None."""
|
1639
|
+
return self._codereview_impl.CannotTriggerTryJobReason()
|
1640
|
+
|
1641
|
+
def GetTryjobProperties(self, patchset=None):
|
1642
|
+
"""Returns dictionary of properties to launch tryjob."""
|
1643
|
+
return self._codereview_impl.GetTryjobProperties(patchset=patchset)
|
1644
|
+
|
1539
1645
|
def __getattr__(self, attr):
|
1540
1646
|
# This is because lots of untested code accesses Rietveld-specific stuff
|
1541
1647
|
# directly, and it's hard to fix for sure. So, just let it work, and fix
|
@@ -1571,7 +1677,7 @@ class _ChangelistCodereviewBase(object):
|
|
1571
1677
|
"""Returns server URL without end slash, like "https://codereview.com"."""
|
1572
1678
|
raise NotImplementedError()
|
1573
1679
|
|
1574
|
-
def FetchDescription(self):
|
1680
|
+
def FetchDescription(self, force=False):
|
1575
1681
|
"""Fetches and returns description from the codereview server."""
|
1576
1682
|
raise NotImplementedError()
|
1577
1683
|
|
@@ -1603,7 +1709,7 @@ class _ChangelistCodereviewBase(object):
|
|
1603
1709
|
# None is valid return value, otherwise presubmit_support.GerritAccessor.
|
1604
1710
|
return None
|
1605
1711
|
|
1606
|
-
def UpdateDescriptionRemote(self, description):
|
1712
|
+
def UpdateDescriptionRemote(self, description, force=False):
|
1607
1713
|
"""Update the description on codereview site."""
|
1608
1714
|
raise NotImplementedError()
|
1609
1715
|
|
@@ -1642,14 +1748,25 @@ class _ChangelistCodereviewBase(object):
|
|
1642
1748
|
failed."""
|
1643
1749
|
raise NotImplementedError()
|
1644
1750
|
|
1645
|
-
def EnsureAuthenticated(self, force):
|
1751
|
+
def EnsureAuthenticated(self, force, refresh=False):
|
1646
1752
|
"""Best effort check that user is authenticated with codereview server.
|
1647
1753
|
|
1648
1754
|
Arguments:
|
1649
1755
|
force: whether to skip confirmation questions.
|
1756
|
+
refresh: whether to attempt to refresh credentials. Ignored if not
|
1757
|
+
applicable.
|
1650
1758
|
"""
|
1651
1759
|
raise NotImplementedError()
|
1652
1760
|
|
1761
|
+
def EnsureCanUploadPatchset(self):
|
1762
|
+
"""Best effort check that uploading isn't supposed to fail for predictable
|
1763
|
+
reasons.
|
1764
|
+
|
1765
|
+
This method should raise informative exception if uploading shouldn't
|
1766
|
+
proceed.
|
1767
|
+
"""
|
1768
|
+
pass
|
1769
|
+
|
1653
1770
|
def CMDUploadChange(self, options, args, change):
|
1654
1771
|
"""Uploads a change to codereview."""
|
1655
1772
|
raise NotImplementedError()
|
@@ -1661,16 +1778,26 @@ class _ChangelistCodereviewBase(object):
|
|
1661
1778
|
"""
|
1662
1779
|
raise NotImplementedError()
|
1663
1780
|
|
1781
|
+
def CannotTriggerTryJobReason(self):
|
1782
|
+
"""Returns reason (str) if unable trigger tryjobs on this CL or None."""
|
1783
|
+
raise NotImplementedError()
|
1784
|
+
|
1785
|
+
def GetIssueOwner(self):
|
1786
|
+
raise NotImplementedError()
|
1787
|
+
|
1788
|
+
def GetTryjobProperties(self, patchset=None):
|
1789
|
+
raise NotImplementedError()
|
1790
|
+
|
1664
1791
|
|
1665
1792
|
class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
1666
|
-
def __init__(self, changelist, auth_config=None,
|
1793
|
+
def __init__(self, changelist, auth_config=None, codereview_host=None):
|
1667
1794
|
super(_RietveldChangelistImpl, self).__init__(changelist)
|
1668
1795
|
assert settings, 'must be initialized in _ChangelistCodereviewBase'
|
1669
|
-
if not
|
1796
|
+
if not codereview_host:
|
1670
1797
|
settings.GetDefaultServerUrl()
|
1671
1798
|
|
1672
|
-
self._rietveld_server =
|
1673
|
-
self._auth_config = auth_config
|
1799
|
+
self._rietveld_server = codereview_host
|
1800
|
+
self._auth_config = auth_config or auth.make_auth_config()
|
1674
1801
|
self._props = None
|
1675
1802
|
self._rpc_server = None
|
1676
1803
|
|
@@ -1685,19 +1812,21 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
1685
1812
|
self._rietveld_server = settings.GetDefaultServerUrl()
|
1686
1813
|
return self._rietveld_server
|
1687
1814
|
|
1688
|
-
def EnsureAuthenticated(self, force):
|
1815
|
+
def EnsureAuthenticated(self, force, refresh=False):
|
1689
1816
|
"""Best effort check that user is authenticated with Rietveld server."""
|
1690
1817
|
if self._auth_config.use_oauth2:
|
1691
1818
|
authenticator = auth.get_authenticator_for_host(
|
1692
1819
|
self.GetCodereviewServer(), self._auth_config)
|
1693
1820
|
if not authenticator.has_cached_credentials():
|
1694
1821
|
raise auth.LoginRequiredError(self.GetCodereviewServer())
|
1822
|
+
if refresh:
|
1823
|
+
authenticator.get_access_token()
|
1695
1824
|
|
1696
|
-
def FetchDescription(self):
|
1825
|
+
def FetchDescription(self, force=False):
|
1697
1826
|
issue = self.GetIssue()
|
1698
1827
|
assert issue
|
1699
1828
|
try:
|
1700
|
-
return self.RpcServer().get_description(issue).strip()
|
1829
|
+
return self.RpcServer().get_description(issue, force=force).strip()
|
1701
1830
|
except urllib2.HTTPError as e:
|
1702
1831
|
if e.code == 404:
|
1703
1832
|
DieWithError(
|
@@ -1720,10 +1849,6 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
1720
1849
|
def GetMostRecentPatchset(self):
|
1721
1850
|
return self.GetIssueProperties()['patchsets'][-1]
|
1722
1851
|
|
1723
|
-
def GetPatchSetDiff(self, issue, patchset):
|
1724
|
-
return self.RpcServer().get(
|
1725
|
-
'/download/issue%s_%s.diff' % (issue, patchset))
|
1726
|
-
|
1727
1852
|
def GetIssueProperties(self):
|
1728
1853
|
if self._props is None:
|
1729
1854
|
issue = self.GetIssue()
|
@@ -1733,9 +1858,33 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
1733
1858
|
self._props = self.RpcServer().get_issue_properties(issue, True)
|
1734
1859
|
return self._props
|
1735
1860
|
|
1861
|
+
def CannotTriggerTryJobReason(self):
|
1862
|
+
props = self.GetIssueProperties()
|
1863
|
+
if not props:
|
1864
|
+
return 'Rietveld doesn\'t know about your issue %s' % self.GetIssue()
|
1865
|
+
if props.get('closed'):
|
1866
|
+
return 'CL %s is closed' % self.GetIssue()
|
1867
|
+
if props.get('private'):
|
1868
|
+
return 'CL %s is private' % self.GetIssue()
|
1869
|
+
return None
|
1870
|
+
|
1871
|
+
def GetTryjobProperties(self, patchset=None):
|
1872
|
+
"""Returns dictionary of properties to launch tryjob."""
|
1873
|
+
project = (self.GetIssueProperties() or {}).get('project')
|
1874
|
+
return {
|
1875
|
+
'issue': self.GetIssue(),
|
1876
|
+
'patch_project': project,
|
1877
|
+
'patch_storage': 'rietveld',
|
1878
|
+
'patchset': patchset or self.GetPatchset(),
|
1879
|
+
'rietveld': self.GetCodereviewServer(),
|
1880
|
+
}
|
1881
|
+
|
1736
1882
|
def GetApprovingReviewers(self):
|
1737
1883
|
return get_approving_reviewers(self.GetIssueProperties())
|
1738
1884
|
|
1885
|
+
def GetIssueOwner(self):
|
1886
|
+
return (self.GetIssueProperties() or {}).get('owner_email')
|
1887
|
+
|
1739
1888
|
def AddComment(self, message):
|
1740
1889
|
return self.RpcServer().add_comment(self.GetIssue(), message)
|
1741
1890
|
|
@@ -1798,9 +1947,8 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
1798
1947
|
return 'reply'
|
1799
1948
|
return 'waiting'
|
1800
1949
|
|
1801
|
-
def UpdateDescriptionRemote(self, description):
|
1802
|
-
|
1803
|
-
self.GetIssue(), self.description)
|
1950
|
+
def UpdateDescriptionRemote(self, description, force=False):
|
1951
|
+
self.RpcServer().update_description(self.GetIssue(), description)
|
1804
1952
|
|
1805
1953
|
def CloseIssue(self):
|
1806
1954
|
return self.RpcServer().close_issue(self.GetIssue())
|
@@ -1830,7 +1978,7 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
1830
1978
|
if not self._rpc_server:
|
1831
1979
|
self._rpc_server = rietveld.CachingRietveld(
|
1832
1980
|
self.GetCodereviewServer(),
|
1833
|
-
self._auth_config
|
1981
|
+
self._auth_config)
|
1834
1982
|
return self._rpc_server
|
1835
1983
|
|
1836
1984
|
@classmethod
|
@@ -1861,11 +2009,8 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
1861
2009
|
assert new_state == _CQState.DRY_RUN
|
1862
2010
|
self.SetFlags({'commit': '1', 'cq_dry_run': '1'})
|
1863
2011
|
|
1864
|
-
|
1865
2012
|
def CMDPatchWithParsedIssue(self, parsed_issue_arg, reject, nocommit,
|
1866
2013
|
directory):
|
1867
|
-
# TODO(maruel): Use apply_issue.py
|
1868
|
-
|
1869
2014
|
# PatchIssue should never be called with a dirty tree. It is up to the
|
1870
2015
|
# caller to check this, but just in case we assert here since the
|
1871
2016
|
# consequences of the caller not checking this could be dire.
|
@@ -1875,47 +2020,13 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
1875
2020
|
if parsed_issue_arg.hostname:
|
1876
2021
|
self._rietveld_server = 'https://%s' % parsed_issue_arg.hostname
|
1877
2022
|
|
1878
|
-
|
1879
|
-
|
1880
|
-
|
1881
|
-
patchset = parsed_issue_arg.patchset
|
1882
|
-
patch_data = urllib2.urlopen(parsed_issue_arg.patch_url).read()
|
1883
|
-
else:
|
1884
|
-
patchset = parsed_issue_arg.patchset or self.GetMostRecentPatchset()
|
1885
|
-
patch_data = self.GetPatchSetDiff(self.GetIssue(), patchset)
|
1886
|
-
|
1887
|
-
# Switch up to the top-level directory, if necessary, in preparation for
|
1888
|
-
# applying the patch.
|
1889
|
-
top = settings.GetRelativeRoot()
|
1890
|
-
if top:
|
1891
|
-
os.chdir(top)
|
1892
|
-
|
1893
|
-
# Git patches have a/ at the beginning of source paths. We strip that out
|
1894
|
-
# with a sed script rather than the -p flag to patch so we can feed either
|
1895
|
-
# Git or svn-style patches into the same apply command.
|
1896
|
-
# re.sub() should be used but flags=re.MULTILINE is only in python 2.7.
|
2023
|
+
patchset = parsed_issue_arg.patchset or self.GetMostRecentPatchset()
|
2024
|
+
patchset_object = self.RpcServer().get_patch(self.GetIssue(), patchset)
|
2025
|
+
scm_obj = checkout.GitCheckout(settings.GetRoot(), None, None, None, None)
|
1897
2026
|
try:
|
1898
|
-
|
1899
|
-
|
1900
|
-
|
1901
|
-
DieWithError('Git patch mungling failed.')
|
1902
|
-
logging.info(patch_data)
|
1903
|
-
|
1904
|
-
# We use "git apply" to apply the patch instead of "patch" so that we can
|
1905
|
-
# pick up file adds.
|
1906
|
-
# The --index flag means: also insert into the index (so we catch adds).
|
1907
|
-
cmd = ['git', 'apply', '--index', '-p0']
|
1908
|
-
if directory:
|
1909
|
-
cmd.extend(('--directory', directory))
|
1910
|
-
if reject:
|
1911
|
-
cmd.append('--reject')
|
1912
|
-
elif IsGitVersionAtLeast('1.7.12'):
|
1913
|
-
cmd.append('--3way')
|
1914
|
-
try:
|
1915
|
-
subprocess2.check_call(cmd, env=GetNoGitPagerEnv(),
|
1916
|
-
stdin=patch_data, stdout=subprocess2.VOID)
|
1917
|
-
except subprocess2.CalledProcessError:
|
1918
|
-
print('Failed to apply the patch')
|
2027
|
+
scm_obj.apply_patch(patchset_object)
|
2028
|
+
except Exception as e:
|
2029
|
+
print(str(e))
|
1919
2030
|
return 1
|
1920
2031
|
|
1921
2032
|
# If we had an issue, commit the current state and register the issue.
|
@@ -1939,24 +2050,23 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
1939
2050
|
match = re.match(r'/(\d+)/$', parsed_url.path)
|
1940
2051
|
match2 = re.match(r'ps(\d+)$', parsed_url.fragment)
|
1941
2052
|
if match and match2:
|
1942
|
-
return
|
2053
|
+
return _ParsedIssueNumberArgument(
|
1943
2054
|
issue=int(match.group(1)),
|
1944
2055
|
patchset=int(match2.group(1)),
|
1945
2056
|
hostname=parsed_url.netloc)
|
1946
2057
|
# Typical url: https://domain/<issue_number>[/[other]]
|
1947
2058
|
match = re.match('/(\d+)(/.*)?$', parsed_url.path)
|
1948
2059
|
if match:
|
1949
|
-
return
|
2060
|
+
return _ParsedIssueNumberArgument(
|
1950
2061
|
issue=int(match.group(1)),
|
1951
2062
|
hostname=parsed_url.netloc)
|
1952
2063
|
# Rietveld patch: https://domain/download/issue<number>_<patchset>.diff
|
1953
2064
|
match = re.match(r'/download/issue(\d+)_(\d+).diff$', parsed_url.path)
|
1954
2065
|
if match:
|
1955
|
-
return
|
2066
|
+
return _ParsedIssueNumberArgument(
|
1956
2067
|
issue=int(match.group(1)),
|
1957
2068
|
patchset=int(match.group(2)),
|
1958
|
-
hostname=parsed_url.netloc
|
1959
|
-
patch_url=gclient_utils.UpgradeToHttps(parsed_url.geturl()))
|
2069
|
+
hostname=parsed_url.netloc)
|
1960
2070
|
return None
|
1961
2071
|
|
1962
2072
|
def CMDUploadChange(self, options, args, change):
|
@@ -1983,8 +2093,12 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
1983
2093
|
else:
|
1984
2094
|
if options.title is not None:
|
1985
2095
|
upload_args.extend(['--title', options.title])
|
1986
|
-
|
1987
|
-
|
2096
|
+
if options.message:
|
2097
|
+
message = options.message
|
2098
|
+
else:
|
2099
|
+
message = CreateDescriptionFromLog(args)
|
2100
|
+
if options.title:
|
2101
|
+
message = options.title + '\n\n' + message
|
1988
2102
|
change_desc = ChangeDescription(message)
|
1989
2103
|
if options.reviewers or options.tbr_owners:
|
1990
2104
|
change_desc.update_reviewers(options.reviewers,
|
@@ -2003,7 +2117,7 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
2003
2117
|
change_desc.get_reviewers()))
|
2004
2118
|
if options.send_mail:
|
2005
2119
|
if not change_desc.get_reviewers():
|
2006
|
-
DieWithError("Must specify reviewers to send email.")
|
2120
|
+
DieWithError("Must specify reviewers to send email.", change_desc)
|
2007
2121
|
upload_args.append('--send_mail')
|
2008
2122
|
|
2009
2123
|
# We check this before applying rietveld.private assuming that in
|
@@ -2017,6 +2131,8 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
2017
2131
|
else:
|
2018
2132
|
cc = self.GetCCList()
|
2019
2133
|
cc = ','.join(filter(None, (cc, ','.join(options.cc))))
|
2134
|
+
if change_desc.get_cced():
|
2135
|
+
cc = ','.join(filter(None, (cc, ','.join(change_desc.get_cced()))))
|
2020
2136
|
if cc:
|
2021
2137
|
upload_args.extend(['--cc', cc])
|
2022
2138
|
|
@@ -2031,16 +2147,12 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
2031
2147
|
# projects that have their source spread across multiple repos.
|
2032
2148
|
remote_url = self.GetGitBaseUrlFromConfig()
|
2033
2149
|
if not remote_url:
|
2034
|
-
if
|
2035
|
-
remote_url = self.
|
2036
|
-
|
2037
|
-
if self.GetRemoteUrl() and '/' in self.GetUpstreamBranch():
|
2038
|
-
remote_url = '%s@%s' % (self.GetRemoteUrl(),
|
2039
|
-
self.GetUpstreamBranch().split('/')[-1])
|
2150
|
+
if self.GetRemoteUrl() and '/' in self.GetUpstreamBranch():
|
2151
|
+
remote_url = '%s@%s' % (self.GetRemoteUrl(),
|
2152
|
+
self.GetUpstreamBranch().split('/')[-1])
|
2040
2153
|
if remote_url:
|
2041
2154
|
remote, remote_branch = self.GetRemoteBranch()
|
2042
|
-
target_ref = GetTargetRef(remote, remote_branch, options.target_branch
|
2043
|
-
settings.GetPendingRefPrefix())
|
2155
|
+
target_ref = GetTargetRef(remote, remote_branch, options.target_branch)
|
2044
2156
|
if target_ref:
|
2045
2157
|
upload_args.extend(['--target_ref', target_ref])
|
2046
2158
|
|
@@ -2090,12 +2202,7 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
2090
2202
|
# If we got an exception after the user typed a description for their
|
2091
2203
|
# change, back up the description before re-raising.
|
2092
2204
|
if change_desc:
|
2093
|
-
|
2094
|
-
print('\nGot exception while uploading -- saving description to %s\n' %
|
2095
|
-
backup_path)
|
2096
|
-
backup_file = open(backup_path, 'w')
|
2097
|
-
backup_file.write(change_desc.description)
|
2098
|
-
backup_file.close()
|
2205
|
+
SaveDescriptionBackup(change_desc)
|
2099
2206
|
raise
|
2100
2207
|
|
2101
2208
|
if not self.GetIssue():
|
@@ -2105,13 +2212,20 @@ class _RietveldChangelistImpl(_ChangelistCodereviewBase):
|
|
2105
2212
|
|
2106
2213
|
|
2107
2214
|
class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
2108
|
-
def __init__(self, changelist, auth_config=None):
|
2215
|
+
def __init__(self, changelist, auth_config=None, codereview_host=None):
|
2109
2216
|
# auth_config is Rietveld thing, kept here to preserve interface only.
|
2110
2217
|
super(_GerritChangelistImpl, self).__init__(changelist)
|
2111
2218
|
self._change_id = None
|
2112
2219
|
# Lazily cached values.
|
2113
|
-
self._gerrit_server = None # e.g. https://chromium-review.googlesource.com
|
2114
2220
|
self._gerrit_host = None # e.g. chromium-review.googlesource.com
|
2221
|
+
self._gerrit_server = None # e.g. https://chromium-review.googlesource.com
|
2222
|
+
# Map from change number (issue) to its detail cache.
|
2223
|
+
self._detail_cache = {}
|
2224
|
+
|
2225
|
+
if codereview_host is not None:
|
2226
|
+
assert not codereview_host.startswith('https://'), codereview_host
|
2227
|
+
self._gerrit_host = codereview_host
|
2228
|
+
self._gerrit_server = 'https://%s' % codereview_host
|
2115
2229
|
|
2116
2230
|
def _GetGerritHost(self):
|
2117
2231
|
# Lazy load of configs.
|
@@ -2161,7 +2275,7 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2161
2275
|
def CodereviewServerConfigKey(cls):
|
2162
2276
|
return 'gerritserver'
|
2163
2277
|
|
2164
|
-
def EnsureAuthenticated(self, force):
|
2278
|
+
def EnsureAuthenticated(self, force, refresh=None):
|
2165
2279
|
"""Best effort check that user is authenticated with Gerrit server."""
|
2166
2280
|
if settings.GetGerritSkipEnsureAuthenticated():
|
2167
2281
|
# For projects with unusual authentication schemes.
|
@@ -2206,6 +2320,26 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2206
2320
|
cookie_auth.get_netrc_path(),
|
2207
2321
|
cookie_auth.get_new_password_message(git_host)))
|
2208
2322
|
|
2323
|
+
def EnsureCanUploadPatchset(self):
|
2324
|
+
"""Best effort check that uploading isn't supposed to fail for predictable
|
2325
|
+
reasons.
|
2326
|
+
|
2327
|
+
This method should raise informative exception if uploading shouldn't
|
2328
|
+
proceed.
|
2329
|
+
"""
|
2330
|
+
if not self.GetIssue():
|
2331
|
+
return
|
2332
|
+
|
2333
|
+
# Warm change details cache now to avoid RPCs later, reducing latency for
|
2334
|
+
# developers.
|
2335
|
+
self.FetchDescription()
|
2336
|
+
|
2337
|
+
status = self._GetChangeDetail()['status']
|
2338
|
+
if status in ('MERGED', 'ABANDONED'):
|
2339
|
+
DieWithError('Change %s has been %s, new uploads are not allowed' %
|
2340
|
+
(self.GetIssueURL(),
|
2341
|
+
'submitted' if status == 'MERGED' else 'abandoned'))
|
2342
|
+
|
2209
2343
|
def _PostUnsetIssueProperties(self):
|
2210
2344
|
"""Which branch-specific properties to erase when unsetting issue."""
|
2211
2345
|
return ['gerritsquashhash']
|
@@ -2240,8 +2374,8 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2240
2374
|
* 'unsent' - no reviewers added
|
2241
2375
|
* 'waiting' - waiting for review
|
2242
2376
|
* 'reply' - waiting for owner to reply to review
|
2243
|
-
* 'not lgtm' - Code-Review
|
2244
|
-
* 'lgtm' - Code-Review
|
2377
|
+
* 'not lgtm' - Code-Review disapproval from at least one valid reviewer
|
2378
|
+
* 'lgtm' - Code-Review approval from at least one valid reviewer
|
2245
2379
|
* 'commit' - in the commit queue
|
2246
2380
|
* 'closed' - abandoned
|
2247
2381
|
"""
|
@@ -2250,7 +2384,7 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2250
2384
|
|
2251
2385
|
try:
|
2252
2386
|
data = self._GetChangeDetail(['DETAILED_LABELS', 'CURRENT_REVISION'])
|
2253
|
-
except httplib.HTTPException:
|
2387
|
+
except (httplib.HTTPException, GerritChangeNotExists):
|
2254
2388
|
return 'error'
|
2255
2389
|
|
2256
2390
|
if data['status'] in ('ABANDONED', 'MERGED'):
|
@@ -2258,8 +2392,15 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2258
2392
|
|
2259
2393
|
cq_label = data['labels'].get('Commit-Queue', {})
|
2260
2394
|
if cq_label:
|
2261
|
-
|
2262
|
-
|
2395
|
+
votes = cq_label.get('all', [])
|
2396
|
+
highest_vote = 0
|
2397
|
+
for v in votes:
|
2398
|
+
highest_vote = max(highest_vote, v.get('value', 0))
|
2399
|
+
vote_value = str(highest_vote)
|
2400
|
+
if vote_value != '0':
|
2401
|
+
# Add a '+' if the value is not 0 to match the values in the label.
|
2402
|
+
# The cq_label does not have negatives.
|
2403
|
+
vote_value = '+' + vote_value
|
2263
2404
|
vote_text = cq_label.get('values', {}).get(vote_value, '')
|
2264
2405
|
if vote_text.lower() == 'commit':
|
2265
2406
|
return 'commit'
|
@@ -2275,28 +2416,40 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2275
2416
|
return 'unsent'
|
2276
2417
|
|
2277
2418
|
messages = data.get('messages', [])
|
2278
|
-
|
2279
|
-
|
2280
|
-
last_message_author = messages
|
2281
|
-
if
|
2419
|
+
owner = data['owner'].get('_account_id')
|
2420
|
+
while messages:
|
2421
|
+
last_message_author = messages.pop().get('author', {})
|
2422
|
+
if last_message_author.get('email') == COMMIT_BOT_EMAIL:
|
2423
|
+
# Ignore replies from CQ.
|
2424
|
+
continue
|
2425
|
+
if last_message_author.get('_account_id') != owner:
|
2282
2426
|
# Some reply from non-owner.
|
2283
2427
|
return 'reply'
|
2284
|
-
|
2285
2428
|
return 'waiting'
|
2286
2429
|
|
2287
2430
|
def GetMostRecentPatchset(self):
|
2288
2431
|
data = self._GetChangeDetail(['CURRENT_REVISION'])
|
2289
2432
|
return data['revisions'][data['current_revision']]['_number']
|
2290
2433
|
|
2291
|
-
def FetchDescription(self):
|
2292
|
-
data = self._GetChangeDetail(['CURRENT_REVISION']
|
2434
|
+
def FetchDescription(self, force=False):
|
2435
|
+
data = self._GetChangeDetail(['CURRENT_REVISION', 'CURRENT_COMMIT'],
|
2436
|
+
no_cache=force)
|
2293
2437
|
current_rev = data['current_revision']
|
2294
|
-
|
2295
|
-
|
2438
|
+
return data['revisions'][current_rev]['commit']['message']
|
2439
|
+
|
2440
|
+
def UpdateDescriptionRemote(self, description, force=False):
|
2441
|
+
if gerrit_util.HasPendingChangeEdit(self._GetGerritHost(), self.GetIssue()):
|
2442
|
+
if not force:
|
2443
|
+
ask_for_data(
|
2444
|
+
'The description cannot be modified while the issue has a pending '
|
2445
|
+
'unpublished edit. Either publish the edit in the Gerrit web UI '
|
2446
|
+
'or delete it.\n\n'
|
2447
|
+
'Press Enter to delete the unpublished edit, Ctrl+C to abort.')
|
2296
2448
|
|
2297
|
-
|
2449
|
+
gerrit_util.DeletePendingChangeEdit(self._GetGerritHost(),
|
2450
|
+
self.GetIssue())
|
2298
2451
|
gerrit_util.SetCommitMessage(self._GetGerritHost(), self.GetIssue(),
|
2299
|
-
description)
|
2452
|
+
description, notify='NONE')
|
2300
2453
|
|
2301
2454
|
def CloseIssue(self):
|
2302
2455
|
gerrit_util.AbandonChange(self._GetGerritHost(), self.GetIssue(), msg='')
|
@@ -2312,12 +2465,59 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2312
2465
|
gerrit_util.SubmitChange(self._GetGerritHost(), self.GetIssue(),
|
2313
2466
|
wait_for_merge=wait_for_merge)
|
2314
2467
|
|
2315
|
-
def _GetChangeDetail(self, options=None, issue=None
|
2468
|
+
def _GetChangeDetail(self, options=None, issue=None,
|
2469
|
+
no_cache=False):
|
2470
|
+
"""Returns details of the issue by querying Gerrit and caching results.
|
2471
|
+
|
2472
|
+
If fresh data is needed, set no_cache=True which will clear cache and
|
2473
|
+
thus new data will be fetched from Gerrit.
|
2474
|
+
"""
|
2316
2475
|
options = options or []
|
2317
2476
|
issue = issue or self.GetIssue()
|
2318
|
-
assert issue, 'issue required to query Gerrit'
|
2319
|
-
|
2320
|
-
|
2477
|
+
assert issue, 'issue is required to query Gerrit'
|
2478
|
+
|
2479
|
+
# Optimization to avoid multiple RPCs:
|
2480
|
+
if (('CURRENT_REVISION' in options or 'ALL_REVISIONS' in options) and
|
2481
|
+
'CURRENT_COMMIT' not in options):
|
2482
|
+
options.append('CURRENT_COMMIT')
|
2483
|
+
|
2484
|
+
# Normalize issue and options for consistent keys in cache.
|
2485
|
+
issue = str(issue)
|
2486
|
+
options = [o.upper() for o in options]
|
2487
|
+
|
2488
|
+
# Check in cache first unless no_cache is True.
|
2489
|
+
if no_cache:
|
2490
|
+
self._detail_cache.pop(issue, None)
|
2491
|
+
else:
|
2492
|
+
options_set = frozenset(options)
|
2493
|
+
for cached_options_set, data in self._detail_cache.get(issue, []):
|
2494
|
+
# Assumption: data fetched before with extra options is suitable
|
2495
|
+
# for return for a smaller set of options.
|
2496
|
+
# For example, if we cached data for
|
2497
|
+
# options=[CURRENT_REVISION, DETAILED_FOOTERS]
|
2498
|
+
# and request is for options=[CURRENT_REVISION],
|
2499
|
+
# THEN we can return prior cached data.
|
2500
|
+
if options_set.issubset(cached_options_set):
|
2501
|
+
return data
|
2502
|
+
|
2503
|
+
try:
|
2504
|
+
data = gerrit_util.GetChangeDetail(self._GetGerritHost(), str(issue),
|
2505
|
+
options, ignore_404=False)
|
2506
|
+
except gerrit_util.GerritError as e:
|
2507
|
+
if e.http_status == 404:
|
2508
|
+
raise GerritChangeNotExists(issue, self.GetCodereviewServer())
|
2509
|
+
raise
|
2510
|
+
|
2511
|
+
self._detail_cache.setdefault(issue, []).append((frozenset(options), data))
|
2512
|
+
return data
|
2513
|
+
|
2514
|
+
def _GetChangeCommit(self, issue=None):
|
2515
|
+
issue = issue or self.GetIssue()
|
2516
|
+
assert issue, 'issue is required to query Gerrit'
|
2517
|
+
data = gerrit_util.GetChangeCommit(self._GetGerritHost(), str(issue))
|
2518
|
+
if not data:
|
2519
|
+
raise GerritChangeNotExists(issue, self.GetCodereviewServer())
|
2520
|
+
return data
|
2321
2521
|
|
2322
2522
|
def CMDLand(self, force, bypass_hooks, verbose):
|
2323
2523
|
if git_common.is_dirty_git_tree('land'):
|
@@ -2344,7 +2544,8 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2344
2544
|
if differs:
|
2345
2545
|
if not force:
|
2346
2546
|
ask_for_data(
|
2347
|
-
'Do you want to submit latest Gerrit patchset and bypass hooks
|
2547
|
+
'Do you want to submit latest Gerrit patchset and bypass hooks?\n'
|
2548
|
+
'Press Enter to continue, Ctrl+C to abort.')
|
2348
2549
|
print('WARNING: bypassing hooks and submitting latest uploaded patchset')
|
2349
2550
|
elif not bypass_hooks:
|
2350
2551
|
hook_results = self.RunHook(
|
@@ -2357,6 +2558,11 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2357
2558
|
|
2358
2559
|
self.SubmitIssue(wait_for_merge=True)
|
2359
2560
|
print('Issue %s has been submitted.' % self.GetIssueURL())
|
2561
|
+
links = self._GetChangeCommit().get('web_links', [])
|
2562
|
+
for link in links:
|
2563
|
+
if link.get('name') == 'gitiles' and link.get('url'):
|
2564
|
+
print('Landed as %s' % link.get('url'))
|
2565
|
+
break
|
2360
2566
|
return 0
|
2361
2567
|
|
2362
2568
|
def CMDPatchWithParsedIssue(self, parsed_issue_arg, reject, nocommit,
|
@@ -2372,7 +2578,10 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2372
2578
|
self._gerrit_host = parsed_issue_arg.hostname
|
2373
2579
|
self._gerrit_server = 'https://%s' % self._gerrit_host
|
2374
2580
|
|
2375
|
-
|
2581
|
+
try:
|
2582
|
+
detail = self._GetChangeDetail(['ALL_REVISIONS'])
|
2583
|
+
except GerritChangeNotExists as e:
|
2584
|
+
DieWithError(str(e))
|
2376
2585
|
|
2377
2586
|
if not parsed_issue_arg.patchset:
|
2378
2587
|
# Use current revision by default.
|
@@ -2384,7 +2593,7 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2384
2593
|
if int(revision_info['_number']) == parsed_issue_arg.patchset:
|
2385
2594
|
break
|
2386
2595
|
else:
|
2387
|
-
DieWithError('Couldn\'t find patchset %i in
|
2596
|
+
DieWithError('Couldn\'t find patchset %i in change %i' %
|
2388
2597
|
(parsed_issue_arg.patchset, self.GetIssue()))
|
2389
2598
|
|
2390
2599
|
fetch_info = revision_info['fetch']['http']
|
@@ -2392,7 +2601,7 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2392
2601
|
RunGit(['cherry-pick', 'FETCH_HEAD'])
|
2393
2602
|
self.SetIssue(self.GetIssue())
|
2394
2603
|
self.SetPatchset(patchset)
|
2395
|
-
print('Committed patch for
|
2604
|
+
print('Committed patch for change %i patchset %i locally' %
|
2396
2605
|
(self.GetIssue(), self.GetPatchset()))
|
2397
2606
|
return 0
|
2398
2607
|
|
@@ -2453,8 +2662,11 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2453
2662
|
gerrit_remote = 'origin'
|
2454
2663
|
|
2455
2664
|
remote, remote_branch = self.GetRemoteBranch()
|
2456
|
-
branch = GetTargetRef(remote, remote_branch, options.target_branch
|
2457
|
-
|
2665
|
+
branch = GetTargetRef(remote, remote_branch, options.target_branch)
|
2666
|
+
|
2667
|
+
# This may be None; default fallback value is determined in logic below.
|
2668
|
+
title = options.title
|
2669
|
+
automatic_title = False
|
2458
2670
|
|
2459
2671
|
if options.squash:
|
2460
2672
|
self._GerritCommitMsgHookCheck(offer_removal=not options.force)
|
@@ -2463,8 +2675,14 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2463
2675
|
message = self.GetDescription()
|
2464
2676
|
if not message:
|
2465
2677
|
DieWithError(
|
2466
|
-
'failed to fetch description from current Gerrit
|
2678
|
+
'failed to fetch description from current Gerrit change %d\n'
|
2467
2679
|
'%s' % (self.GetIssue(), self.GetIssueURL()))
|
2680
|
+
if not title:
|
2681
|
+
default_title = RunGit(['show', '-s', '--format=%s', 'HEAD']).strip()
|
2682
|
+
title = ask_for_data(
|
2683
|
+
'Title for patchset [%s]: ' % default_title) or default_title
|
2684
|
+
if title == default_title:
|
2685
|
+
automatic_title = True
|
2468
2686
|
change_id = self._GetChangeDetail()['change_id']
|
2469
2687
|
while True:
|
2470
2688
|
footer_change_ids = git_footers.get_footer_change_id(message)
|
@@ -2472,7 +2690,7 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2472
2690
|
break
|
2473
2691
|
if not footer_change_ids:
|
2474
2692
|
message = git_footers.add_footer_change_id(message, change_id)
|
2475
|
-
print('WARNING: appended missing Change-Id to
|
2693
|
+
print('WARNING: appended missing Change-Id to change description')
|
2476
2694
|
continue
|
2477
2695
|
# There is already a valid footer but with different or several ids.
|
2478
2696
|
# Doing this automatically is non-trivial as we don't want to lose
|
@@ -2481,9 +2699,9 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2481
2699
|
# new description.
|
2482
2700
|
message = '%s\n\nChange-Id: %s' % (message, change_id)
|
2483
2701
|
print(
|
2484
|
-
'WARNING:
|
2702
|
+
'WARNING: change %s has Change-Id footer(s):\n'
|
2485
2703
|
' %s\n'
|
2486
|
-
'but
|
2704
|
+
'but change has Change-Id %s, according to Gerrit.\n'
|
2487
2705
|
'Please, check the proposed correction to the description, '
|
2488
2706
|
'and edit it if necessary but keep the "Change-Id: %s" footer\n'
|
2489
2707
|
% (self.GetIssue(), '\n '.join(footer_change_ids), change_id,
|
@@ -2500,11 +2718,20 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2500
2718
|
# footer.
|
2501
2719
|
assert [change_id] == git_footers.get_footer_change_id(message)
|
2502
2720
|
change_desc = ChangeDescription(message)
|
2503
|
-
else:
|
2504
|
-
|
2505
|
-
|
2721
|
+
else: # if not self.GetIssue()
|
2722
|
+
if options.message:
|
2723
|
+
message = options.message
|
2724
|
+
else:
|
2725
|
+
message = CreateDescriptionFromLog(args)
|
2726
|
+
if options.title:
|
2727
|
+
message = options.title + '\n\n' + message
|
2728
|
+
change_desc = ChangeDescription(message)
|
2506
2729
|
if not options.force:
|
2507
2730
|
change_desc.prompt(bug=options.bug)
|
2731
|
+
# On first upload, patchset title is always this string, while
|
2732
|
+
# --title flag gets converted to first line of message.
|
2733
|
+
title = 'Initial upload'
|
2734
|
+
automatic_title = True
|
2508
2735
|
if not change_desc.description:
|
2509
2736
|
DieWithError("Description is empty. Aborting...")
|
2510
2737
|
message = change_desc.description
|
@@ -2534,10 +2761,12 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2534
2761
|
if not parent or (RunGitSilent(['rev-parse', upstream_branch + ':']) !=
|
2535
2762
|
RunGitSilent(['rev-parse', parent + ':'])):
|
2536
2763
|
DieWithError(
|
2537
|
-
'
|
2538
|
-
'
|
2539
|
-
'
|
2540
|
-
'
|
2764
|
+
'\nUpload upstream branch %s first.\n'
|
2765
|
+
'It is likely that this branch has been rebased since its last '
|
2766
|
+
'upload, so you just need to upload it again.\n'
|
2767
|
+
'(If you uploaded it with --no-squash, then branch dependencies '
|
2768
|
+
'are not supported, and you should reupload with --squash.)'
|
2769
|
+
% upstream_branch_name, change_desc)
|
2541
2770
|
else:
|
2542
2771
|
parent = self.GetCommonAncestorWithUpstream()
|
2543
2772
|
|
@@ -2580,34 +2809,41 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2580
2809
|
print('Adding self-LGTM (Code-Review +1) because of TBRs')
|
2581
2810
|
refspec_opts.append('l=Code-Review+1')
|
2582
2811
|
|
2583
|
-
if
|
2584
|
-
if not re.match(r'^[\w ]+$',
|
2585
|
-
|
2586
|
-
|
2587
|
-
|
2588
|
-
|
2589
|
-
|
2812
|
+
if title:
|
2813
|
+
if not re.match(r'^[\w ]+$', title):
|
2814
|
+
title = re.sub(r'[^\w ]', '', title)
|
2815
|
+
if not automatic_title:
|
2816
|
+
print('WARNING: Patchset title may only contain alphanumeric chars '
|
2817
|
+
'and spaces. You can edit it in the UI. '
|
2818
|
+
'See https://crbug.com/663787.\n'
|
2819
|
+
'Cleaned up title: %s' % title)
|
2590
2820
|
# Per doc, spaces must be converted to underscores, and Gerrit will do the
|
2591
2821
|
# reverse on its side.
|
2592
|
-
refspec_opts.append('m=' +
|
2822
|
+
refspec_opts.append('m=' + title.replace(' ', '_'))
|
2593
2823
|
|
2594
2824
|
if options.send_mail:
|
2595
2825
|
if not change_desc.get_reviewers():
|
2596
|
-
DieWithError('Must specify reviewers to send email.')
|
2826
|
+
DieWithError('Must specify reviewers to send email.', change_desc)
|
2597
2827
|
refspec_opts.append('notify=ALL')
|
2598
2828
|
else:
|
2599
2829
|
refspec_opts.append('notify=NONE')
|
2600
2830
|
|
2601
|
-
cc = self.GetCCList().split(',')
|
2602
|
-
if options.cc:
|
2603
|
-
cc.extend(options.cc)
|
2604
|
-
cc = filter(None, cc)
|
2605
|
-
if cc:
|
2606
|
-
refspec_opts.extend('cc=' + email.strip() for email in cc)
|
2607
|
-
|
2608
2831
|
reviewers = change_desc.get_reviewers()
|
2609
2832
|
if reviewers:
|
2610
|
-
|
2833
|
+
# TODO(tandrii): remove this horrible hack once (Poly)Gerrit fixes their
|
2834
|
+
# side for real (b/34702620).
|
2835
|
+
def clean_invisible_chars(email):
|
2836
|
+
return email.decode('unicode_escape').encode('ascii', 'ignore')
|
2837
|
+
refspec_opts.extend('r=' + clean_invisible_chars(email).strip()
|
2838
|
+
for email in reviewers)
|
2839
|
+
|
2840
|
+
if options.private:
|
2841
|
+
refspec_opts.append('draft')
|
2842
|
+
|
2843
|
+
if options.topic:
|
2844
|
+
# Documentation on Gerrit topics is here:
|
2845
|
+
# https://gerrit-review.googlesource.com/Documentation/user-upload.html#topic
|
2846
|
+
refspec_opts.append('topic=%s' % options.topic)
|
2611
2847
|
|
2612
2848
|
refspec_suffix = ''
|
2613
2849
|
if refspec_opts:
|
@@ -2616,12 +2852,16 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2616
2852
|
'spaces not allowed in refspec: "%s"' % refspec_suffix)
|
2617
2853
|
refspec = '%s:refs/for/%s%s' % (ref_to_push, branch, refspec_suffix)
|
2618
2854
|
|
2619
|
-
|
2620
|
-
|
2621
|
-
|
2622
|
-
|
2623
|
-
|
2624
|
-
|
2855
|
+
try:
|
2856
|
+
push_stdout = gclient_utils.CheckCallAndFilter(
|
2857
|
+
['git', 'push', self.GetRemoteUrl(), refspec],
|
2858
|
+
print_stdout=True,
|
2859
|
+
# Flush after every line: useful for seeing progress when running as
|
2860
|
+
# recipe.
|
2861
|
+
filter_fn=lambda _: sys.stdout.flush())
|
2862
|
+
except subprocess2.CalledProcessError:
|
2863
|
+
DieWithError('Failed to create a change. Please examine output above '
|
2864
|
+
'for the reason of the failure. ', change_desc)
|
2625
2865
|
|
2626
2866
|
if options.squash:
|
2627
2867
|
regex = re.compile(r'remote:\s+https?://[\w\-\.\/]*/(\d+)\s.*')
|
@@ -2631,9 +2871,21 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2631
2871
|
if len(change_numbers) != 1:
|
2632
2872
|
DieWithError(
|
2633
2873
|
('Created|Updated %d issues on Gerrit, but only 1 expected.\n'
|
2634
|
-
'Change-Id: %s') % (len(change_numbers), change_id))
|
2874
|
+
'Change-Id: %s') % (len(change_numbers), change_id), change_desc)
|
2635
2875
|
self.SetIssue(change_numbers[0])
|
2636
2876
|
self._GitSetBranchConfigValue('gerritsquashhash', ref_to_push)
|
2877
|
+
|
2878
|
+
# Add cc's from the CC_LIST and --cc flag (if any).
|
2879
|
+
cc = self.GetCCList().split(',')
|
2880
|
+
if options.cc:
|
2881
|
+
cc.extend(options.cc)
|
2882
|
+
cc = filter(None, [email.strip() for email in cc])
|
2883
|
+
if change_desc.get_cced():
|
2884
|
+
cc.extend(change_desc.get_cced())
|
2885
|
+
if cc:
|
2886
|
+
gerrit_util.AddReviewers(
|
2887
|
+
self._GetGerritHost(), self.GetIssue(), cc,
|
2888
|
+
is_reviewer=False, notify=bool(options.send_mail))
|
2637
2889
|
return 0
|
2638
2890
|
|
2639
2891
|
def _AddChangeIdToCommitMessage(self, options, args):
|
@@ -2655,10 +2907,47 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
|
|
2655
2907
|
vote_map = {
|
2656
2908
|
_CQState.NONE: 0,
|
2657
2909
|
_CQState.DRY_RUN: 1,
|
2658
|
-
_CQState.COMMIT
|
2910
|
+
_CQState.COMMIT: 2,
|
2659
2911
|
}
|
2660
|
-
|
2661
|
-
|
2912
|
+
kwargs = {'labels': {'Commit-Queue': vote_map[new_state]}}
|
2913
|
+
if new_state == _CQState.DRY_RUN:
|
2914
|
+
# Don't spam everybody reviewer/owner.
|
2915
|
+
kwargs['notify'] = 'NONE'
|
2916
|
+
gerrit_util.SetReview(self._GetGerritHost(), self.GetIssue(), **kwargs)
|
2917
|
+
|
2918
|
+
def CannotTriggerTryJobReason(self):
|
2919
|
+
try:
|
2920
|
+
data = self._GetChangeDetail()
|
2921
|
+
except GerritChangeNotExists:
|
2922
|
+
return 'Gerrit doesn\'t know about your change %s' % self.GetIssue()
|
2923
|
+
|
2924
|
+
if data['status'] in ('ABANDONED', 'MERGED'):
|
2925
|
+
return 'CL %s is closed' % self.GetIssue()
|
2926
|
+
|
2927
|
+
def GetTryjobProperties(self, patchset=None):
|
2928
|
+
"""Returns dictionary of properties to launch tryjob."""
|
2929
|
+
data = self._GetChangeDetail(['ALL_REVISIONS'])
|
2930
|
+
patchset = int(patchset or self.GetPatchset())
|
2931
|
+
assert patchset
|
2932
|
+
revision_data = None # Pylint wants it to be defined.
|
2933
|
+
for revision_data in data['revisions'].itervalues():
|
2934
|
+
if int(revision_data['_number']) == patchset:
|
2935
|
+
break
|
2936
|
+
else:
|
2937
|
+
raise Exception('Patchset %d is not known in Gerrit change %d' %
|
2938
|
+
(patchset, self.GetIssue()))
|
2939
|
+
return {
|
2940
|
+
'patch_issue': self.GetIssue(),
|
2941
|
+
'patch_set': patchset or self.GetPatchset(),
|
2942
|
+
'patch_project': data['project'],
|
2943
|
+
'patch_storage': 'gerrit',
|
2944
|
+
'patch_ref': revision_data['fetch']['http']['ref'],
|
2945
|
+
'patch_repository_url': revision_data['fetch']['http']['url'],
|
2946
|
+
'patch_gerrit_url': self.GetCodereviewServer(),
|
2947
|
+
}
|
2948
|
+
|
2949
|
+
def GetIssueOwner(self):
|
2950
|
+
return self._GetChangeDetail(['DETAILED_ACCOUNTS'])['owner']['email']
|
2662
2951
|
|
2663
2952
|
|
2664
2953
|
_CODEREVIEW_IMPLEMENTATIONS = {
|
@@ -2744,13 +3033,15 @@ def _get_bug_line_values(default_project, bugs):
|
|
2744
3033
|
class ChangeDescription(object):
|
2745
3034
|
"""Contains a parsed form of the change description."""
|
2746
3035
|
R_LINE = r'^[ \t]*(TBR|R)[ \t]*=[ \t]*(.*?)[ \t]*$'
|
3036
|
+
CC_LINE = r'^[ \t]*(CC)[ \t]*=[ \t]*(.*?)[ \t]*$'
|
2747
3037
|
BUG_LINE = r'^[ \t]*(BUG)[ \t]*=[ \t]*(.*?)[ \t]*$'
|
3038
|
+
CHERRY_PICK_LINE = r'^\(cherry picked from commit [a-fA-F0-9]{40}\)$'
|
2748
3039
|
|
2749
3040
|
def __init__(self, description):
|
2750
3041
|
self._description_lines = (description or '').strip().splitlines()
|
2751
3042
|
|
2752
3043
|
@property # www.logilab.org/ticket/89786
|
2753
|
-
def description(self): # pylint: disable=
|
3044
|
+
def description(self): # pylint: disable=method-hidden
|
2754
3045
|
return '\n'.join(self._description_lines)
|
2755
3046
|
|
2756
3047
|
def set_description(self, desc):
|
@@ -2891,6 +3182,63 @@ class ChangeDescription(object):
|
|
2891
3182
|
if match and (not tbr_only or match.group(1).upper() == 'TBR')]
|
2892
3183
|
return cleanup_list(reviewers)
|
2893
3184
|
|
3185
|
+
def get_cced(self):
|
3186
|
+
"""Retrieves the list of reviewers."""
|
3187
|
+
matches = [re.match(self.CC_LINE, line) for line in self._description_lines]
|
3188
|
+
cced = [match.group(2).strip() for match in matches if match]
|
3189
|
+
return cleanup_list(cced)
|
3190
|
+
|
3191
|
+
def update_with_git_number_footers(self, parent_hash, parent_msg, dest_ref):
|
3192
|
+
"""Updates this commit description given the parent.
|
3193
|
+
|
3194
|
+
This is essentially what Gnumbd used to do.
|
3195
|
+
Consult https://goo.gl/WMmpDe for more details.
|
3196
|
+
"""
|
3197
|
+
assert parent_msg # No, orphan branch creation isn't supported.
|
3198
|
+
assert parent_hash
|
3199
|
+
assert dest_ref
|
3200
|
+
parent_footer_map = git_footers.parse_footers(parent_msg)
|
3201
|
+
# This will also happily parse svn-position, which GnumbD is no longer
|
3202
|
+
# supporting. While we'd generate correct footers, the verifier plugin
|
3203
|
+
# installed in Gerrit will block such commit (ie git push below will fail).
|
3204
|
+
parent_position = git_footers.get_position(parent_footer_map)
|
3205
|
+
|
3206
|
+
# Cherry-picks may have last line obscuring their prior footers,
|
3207
|
+
# from git_footers perspective. This is also what Gnumbd did.
|
3208
|
+
cp_line = None
|
3209
|
+
if (self._description_lines and
|
3210
|
+
re.match(self.CHERRY_PICK_LINE, self._description_lines[-1])):
|
3211
|
+
cp_line = self._description_lines.pop()
|
3212
|
+
|
3213
|
+
top_lines, _, parsed_footers = git_footers.split_footers(self.description)
|
3214
|
+
|
3215
|
+
# Original-ify all Cr- footers, to avoid re-lands, cherry-picks, or just
|
3216
|
+
# user interference with actual footers we'd insert below.
|
3217
|
+
for i, (k, v) in enumerate(parsed_footers):
|
3218
|
+
if k.startswith('Cr-'):
|
3219
|
+
parsed_footers[i] = (k.replace('Cr-', 'Cr-Original-'), v)
|
3220
|
+
|
3221
|
+
# Add Position and Lineage footers based on the parent.
|
3222
|
+
lineage = list(reversed(parent_footer_map.get('Cr-Branched-From', [])))
|
3223
|
+
if parent_position[0] == dest_ref:
|
3224
|
+
# Same branch as parent.
|
3225
|
+
number = int(parent_position[1]) + 1
|
3226
|
+
else:
|
3227
|
+
number = 1 # New branch, and extra lineage.
|
3228
|
+
lineage.insert(0, '%s-%s@{#%d}' % (parent_hash, parent_position[0],
|
3229
|
+
int(parent_position[1])))
|
3230
|
+
|
3231
|
+
parsed_footers.append(('Cr-Commit-Position',
|
3232
|
+
'%s@{#%d}' % (dest_ref, number)))
|
3233
|
+
parsed_footers.extend(('Cr-Branched-From', v) for v in lineage)
|
3234
|
+
|
3235
|
+
self._description_lines = top_lines
|
3236
|
+
if cp_line:
|
3237
|
+
self._description_lines.append(cp_line)
|
3238
|
+
if self._description_lines[-1] != '':
|
3239
|
+
self._description_lines.append('') # Ensure footer separator.
|
3240
|
+
self._description_lines.extend('%s: %s' % kv for kv in parsed_footers)
|
3241
|
+
|
2894
3242
|
|
2895
3243
|
def get_approving_reviewers(props):
|
2896
3244
|
"""Retrieves the reviewers that approved a CL from the issue properties with
|
@@ -2939,7 +3287,8 @@ def LoadCodereviewSettingsFromFile(fileobj):
|
|
2939
3287
|
else:
|
2940
3288
|
RunGit(['config', '--unset-all', fullname], error_ok=unset_error_ok)
|
2941
3289
|
|
2942
|
-
|
3290
|
+
if not keyvals.get('GERRIT_HOST', False):
|
3291
|
+
SetProperty('server', 'CODE_REVIEW_SERVER')
|
2943
3292
|
# Only server setting is required. Other settings can be absent.
|
2944
3293
|
# In that case, we ignore errors raised during option deletion attempt.
|
2945
3294
|
SetProperty('cc', 'CC_LIST', unset_error_ok=True)
|
@@ -2948,11 +3297,8 @@ def LoadCodereviewSettingsFromFile(fileobj):
|
|
2948
3297
|
SetProperty('viewvc-url', 'VIEW_VC', unset_error_ok=True)
|
2949
3298
|
SetProperty('bug-prefix', 'BUG_PREFIX', unset_error_ok=True)
|
2950
3299
|
SetProperty('cpplint-regex', 'LINT_REGEX', unset_error_ok=True)
|
2951
|
-
SetProperty('force-https-commit-url', 'FORCE_HTTPS_COMMIT_URL',
|
2952
|
-
unset_error_ok=True)
|
2953
3300
|
SetProperty('cpplint-ignore-regex', 'LINT_IGNORE_REGEX', unset_error_ok=True)
|
2954
3301
|
SetProperty('project', 'PROJECT', unset_error_ok=True)
|
2955
|
-
SetProperty('pending-ref-prefix', 'PENDING_REF_PREFIX', unset_error_ok=True)
|
2956
3302
|
SetProperty('run-post-upload-hook', 'RUN_POST_UPLOAD_HOOK',
|
2957
3303
|
unset_error_ok=True)
|
2958
3304
|
|
@@ -2968,9 +3314,9 @@ def LoadCodereviewSettingsFromFile(fileobj):
|
|
2968
3314
|
keyvals['GERRIT_SKIP_ENSURE_AUTHENTICATED']])
|
2969
3315
|
|
2970
3316
|
if 'PUSH_URL_CONFIG' in keyvals and 'ORIGIN_URL_CONFIG' in keyvals:
|
2971
|
-
#should be of the form
|
2972
|
-
#PUSH_URL_CONFIG: url.ssh://gitrw.chromium.org.pushinsteadof
|
2973
|
-
#ORIGIN_URL_CONFIG: http://src.chromium.org/git
|
3317
|
+
# should be of the form
|
3318
|
+
# PUSH_URL_CONFIG: url.ssh://gitrw.chromium.org.pushinsteadof
|
3319
|
+
# ORIGIN_URL_CONFIG: http://src.chromium.org/git
|
2974
3320
|
RunGit(['config', keyvals['PUSH_URL_CONFIG'],
|
2975
3321
|
keyvals['ORIGIN_URL_CONFIG']])
|
2976
3322
|
|
@@ -3024,7 +3370,6 @@ def DownloadGerritHook(force):
|
|
3024
3370
|
'chmod +x .git/hooks/commit-msg' % src)
|
3025
3371
|
|
3026
3372
|
|
3027
|
-
|
3028
3373
|
def GetRietveldCodereviewSettingsInteractively():
|
3029
3374
|
"""Prompt the user for settings."""
|
3030
3375
|
server = settings.GetDefaultServerUrl(error_ok=True)
|
@@ -3061,13 +3406,14 @@ def GetRietveldCodereviewSettingsInteractively():
|
|
3061
3406
|
SetProperty(settings.GetRunPostUploadHook(), 'Run Post Upload Hook',
|
3062
3407
|
'run-post-upload-hook', False)
|
3063
3408
|
|
3409
|
+
|
3064
3410
|
@subcommand.usage('[repo root containing codereview.settings]')
|
3065
3411
|
def CMDconfig(parser, args):
|
3066
3412
|
"""Edits configuration for this tree."""
|
3067
3413
|
|
3068
|
-
print('WARNING: git cl config works for Rietveld only
|
3069
|
-
|
3070
|
-
#
|
3414
|
+
print('WARNING: git cl config works for Rietveld only')
|
3415
|
+
# TODO(tandrii): remove this once we switch to Gerrit.
|
3416
|
+
# See bugs http://crbug.com/637561 and http://crbug.com/600469.
|
3071
3417
|
parser.add_option('--activate-update', action='store_true',
|
3072
3418
|
help='activate auto-updating [rietveld] section in '
|
3073
3419
|
'.git/config')
|
@@ -3140,48 +3486,51 @@ def get_cl_statuses(changes, fine_grained, max_processes=None):
|
|
3140
3486
|
# Silence upload.py otherwise it becomes unwieldy.
|
3141
3487
|
upload.verbosity = 0
|
3142
3488
|
|
3143
|
-
if
|
3144
|
-
|
3145
|
-
# spawn processes to process all the other branches in parallel.
|
3146
|
-
if changes:
|
3147
|
-
def fetch(cl):
|
3148
|
-
try:
|
3149
|
-
return (cl, cl.GetStatus())
|
3150
|
-
except:
|
3151
|
-
# See http://crbug.com/629863.
|
3152
|
-
logging.exception('failed to fetch status for %s:', cl)
|
3153
|
-
raise
|
3154
|
-
yield fetch(changes[0])
|
3155
|
-
|
3156
|
-
changes_to_fetch = changes[1:]
|
3157
|
-
if not changes_to_fetch:
|
3158
|
-
# Exit early if there was only one branch to fetch.
|
3159
|
-
return
|
3489
|
+
if not changes:
|
3490
|
+
raise StopIteration()
|
3160
3491
|
|
3161
|
-
|
3162
|
-
|
3163
|
-
if max_processes is not None
|
3164
|
-
else max(len(changes_to_fetch), 1))
|
3165
|
-
|
3166
|
-
fetched_cls = set()
|
3167
|
-
it = pool.imap_unordered(fetch, changes_to_fetch).__iter__()
|
3168
|
-
while True:
|
3169
|
-
try:
|
3170
|
-
row = it.next(timeout=5)
|
3171
|
-
except multiprocessing.TimeoutError:
|
3172
|
-
break
|
3173
|
-
|
3174
|
-
fetched_cls.add(row[0])
|
3175
|
-
yield row
|
3176
|
-
|
3177
|
-
# Add any branches that failed to fetch.
|
3178
|
-
for cl in set(changes_to_fetch) - fetched_cls:
|
3179
|
-
yield (cl, 'error')
|
3180
|
-
|
3181
|
-
else:
|
3492
|
+
if not fine_grained:
|
3493
|
+
# Fast path which doesn't involve querying codereview servers.
|
3182
3494
|
# Do not use GetApprovingReviewers(), since it requires an HTTP request.
|
3183
3495
|
for cl in changes:
|
3184
3496
|
yield (cl, 'waiting' if cl.GetIssueURL() else 'error')
|
3497
|
+
return
|
3498
|
+
|
3499
|
+
# First, sort out authentication issues.
|
3500
|
+
logging.debug('ensuring credentials exist')
|
3501
|
+
for cl in changes:
|
3502
|
+
cl.EnsureAuthenticated(force=False, refresh=True)
|
3503
|
+
|
3504
|
+
def fetch(cl):
|
3505
|
+
try:
|
3506
|
+
return (cl, cl.GetStatus())
|
3507
|
+
except:
|
3508
|
+
# See http://crbug.com/629863.
|
3509
|
+
logging.exception('failed to fetch status for %s:', cl)
|
3510
|
+
raise
|
3511
|
+
|
3512
|
+
threads_count = len(changes)
|
3513
|
+
if max_processes:
|
3514
|
+
threads_count = max(1, min(threads_count, max_processes))
|
3515
|
+
logging.debug('querying %d CLs using %d threads', len(changes), threads_count)
|
3516
|
+
|
3517
|
+
pool = ThreadPool(threads_count)
|
3518
|
+
fetched_cls = set()
|
3519
|
+
try:
|
3520
|
+
it = pool.imap_unordered(fetch, changes).__iter__()
|
3521
|
+
while True:
|
3522
|
+
try:
|
3523
|
+
cl, status = it.next(timeout=5)
|
3524
|
+
except multiprocessing.TimeoutError:
|
3525
|
+
break
|
3526
|
+
fetched_cls.add(cl)
|
3527
|
+
yield cl, status
|
3528
|
+
finally:
|
3529
|
+
pool.close()
|
3530
|
+
|
3531
|
+
# Add any branches that failed to fetch.
|
3532
|
+
for cl in set(changes) - fetched_cls:
|
3533
|
+
yield (cl, 'error')
|
3185
3534
|
|
3186
3535
|
|
3187
3536
|
def upload_branch_deps(cl, args):
|
@@ -3208,7 +3557,7 @@ def upload_branch_deps(cl, args):
|
|
3208
3557
|
if root_branch is None:
|
3209
3558
|
DieWithError('Can\'t find dependent branches from detached HEAD state. '
|
3210
3559
|
'Get on a branch!')
|
3211
|
-
if not cl.GetIssue() or not cl.GetPatchset():
|
3560
|
+
if not cl.GetIssue() or (not cl.IsGerrit() and not cl.GetPatchset()):
|
3212
3561
|
DieWithError('Current branch does not have an uploaded CL. We cannot set '
|
3213
3562
|
'patchset dependencies without an uploaded CL.')
|
3214
3563
|
|
@@ -3267,7 +3616,7 @@ def upload_branch_deps(cl, args):
|
|
3267
3616
|
if CMDupload(OptionParser(), args) != 0:
|
3268
3617
|
print('Upload failed for %s!' % dependent_branch)
|
3269
3618
|
failures[dependent_branch] = 1
|
3270
|
-
except: # pylint: disable=
|
3619
|
+
except: # pylint: disable=bare-except
|
3271
3620
|
failures[dependent_branch] = 1
|
3272
3621
|
print()
|
3273
3622
|
finally:
|
@@ -3455,10 +3804,13 @@ def CMDstatus(parser, args):
|
|
3455
3804
|
alignment, ShortBranchName(branch), color, url,
|
3456
3805
|
status_str, reset))
|
3457
3806
|
|
3458
|
-
|
3807
|
+
|
3808
|
+
branch = GetCurrentBranch()
|
3459
3809
|
print()
|
3460
|
-
print('Current branch:'
|
3461
|
-
|
3810
|
+
print('Current branch: %s' % branch)
|
3811
|
+
for cl in changes:
|
3812
|
+
if cl.GetBranch() == branch:
|
3813
|
+
break
|
3462
3814
|
if not cl.GetIssue():
|
3463
3815
|
print('No issue assigned.')
|
3464
3816
|
return 0
|
@@ -3607,16 +3959,19 @@ def CMDdescription(parser, args):
|
|
3607
3959
|
parser.add_option('-n', '--new-description',
|
3608
3960
|
help='New description to set for this issue (- for stdin, '
|
3609
3961
|
'+ to load from local commit HEAD)')
|
3962
|
+
parser.add_option('-f', '--force', action='store_true',
|
3963
|
+
help='Delete any unpublished Gerrit edits for this issue '
|
3964
|
+
'without prompting')
|
3610
3965
|
|
3611
3966
|
_add_codereview_select_options(parser)
|
3612
3967
|
auth.add_auth_options(parser)
|
3613
3968
|
options, args = parser.parse_args(args)
|
3614
3969
|
_process_codereview_select_options(parser, options)
|
3615
3970
|
|
3616
|
-
|
3971
|
+
target_issue_arg = None
|
3617
3972
|
if len(args) > 0:
|
3618
|
-
|
3619
|
-
if not
|
3973
|
+
target_issue_arg = ParseIssueNumberArgument(args[0])
|
3974
|
+
if not target_issue_arg.valid:
|
3620
3975
|
parser.print_help()
|
3621
3976
|
return 1
|
3622
3977
|
|
@@ -3626,10 +3981,9 @@ def CMDdescription(parser, args):
|
|
3626
3981
|
'auth_config': auth_config,
|
3627
3982
|
'codereview': options.forced_codereview,
|
3628
3983
|
}
|
3629
|
-
if
|
3630
|
-
kwargs['issue'] =
|
3631
|
-
|
3632
|
-
kwargs['rietveld_server'] = target_issue.hostname
|
3984
|
+
if target_issue_arg:
|
3985
|
+
kwargs['issue'] = target_issue_arg.issue
|
3986
|
+
kwargs['codereview_host'] = target_issue_arg.hostname
|
3633
3987
|
|
3634
3988
|
cl = Changelist(**kwargs)
|
3635
3989
|
|
@@ -3655,7 +4009,7 @@ def CMDdescription(parser, args):
|
|
3655
4009
|
description.prompt()
|
3656
4010
|
|
3657
4011
|
if cl.GetDescription() != description.description:
|
3658
|
-
cl.UpdateDescription(description.description)
|
4012
|
+
cl.UpdateDescription(description.description, force=options.force)
|
3659
4013
|
return 0
|
3660
4014
|
|
3661
4015
|
|
@@ -3682,7 +4036,7 @@ def CMDlint(parser, args):
|
|
3682
4036
|
auth_config = auth.extract_auth_config_from_options(options)
|
3683
4037
|
|
3684
4038
|
# Access to a protected member _XX of a client class
|
3685
|
-
# pylint: disable=
|
4039
|
+
# pylint: disable=protected-access
|
3686
4040
|
try:
|
3687
4041
|
import cpplint
|
3688
4042
|
import cpplint_chromium
|
@@ -3731,7 +4085,7 @@ def CMDlint(parser, args):
|
|
3731
4085
|
def CMDpresubmit(parser, args):
|
3732
4086
|
"""Runs presubmit tests on the current changelist."""
|
3733
4087
|
parser.add_option('-u', '--upload', action='store_true',
|
3734
|
-
help='Run upload hook instead of the push
|
4088
|
+
help='Run upload hook instead of the push hook')
|
3735
4089
|
parser.add_option('-f', '--force', action='store_true',
|
3736
4090
|
help='Run checks even if tree is dirty')
|
3737
4091
|
auth.add_auth_options(parser)
|
@@ -3787,14 +4141,13 @@ def GenerateGerritChangeId(message):
|
|
3787
4141
|
return 'I%s' % change_hash.strip()
|
3788
4142
|
|
3789
4143
|
|
3790
|
-
def GetTargetRef(remote, remote_branch, target_branch
|
4144
|
+
def GetTargetRef(remote, remote_branch, target_branch):
|
3791
4145
|
"""Computes the remote branch ref to use for the CL.
|
3792
4146
|
|
3793
4147
|
Args:
|
3794
4148
|
remote (str): The git remote for the CL.
|
3795
4149
|
remote_branch (str): The git remote branch for the CL.
|
3796
4150
|
target_branch (str): The target branch specified by the user.
|
3797
|
-
pending_prefix (str): The pending prefix from the settings.
|
3798
4151
|
"""
|
3799
4152
|
if not (remote and remote_branch):
|
3800
4153
|
return None
|
@@ -3836,9 +4189,7 @@ def GetTargetRef(remote, remote_branch, target_branch, pending_prefix):
|
|
3836
4189
|
'refs/heads/')
|
3837
4190
|
elif remote_branch.startswith('refs/remotes/branch-heads'):
|
3838
4191
|
remote_branch = remote_branch.replace('refs/remotes/', 'refs/')
|
3839
|
-
|
3840
|
-
if pending_prefix:
|
3841
|
-
remote_branch = remote_branch.replace('refs/', pending_prefix)
|
4192
|
+
|
3842
4193
|
return remote_branch
|
3843
4194
|
|
3844
4195
|
|
@@ -3870,14 +4221,15 @@ def CMDupload(parser, args):
|
|
3870
4221
|
help='bypass watchlists auto CC-ing reviewers')
|
3871
4222
|
parser.add_option('-f', action='store_true', dest='force',
|
3872
4223
|
help="force yes to questions (don't prompt)")
|
3873
|
-
parser.add_option('-m', dest='message',
|
4224
|
+
parser.add_option('--message', '-m', dest='message',
|
4225
|
+
help='message for patchset')
|
3874
4226
|
parser.add_option('-b', '--bug',
|
3875
4227
|
help='pre-populate the bug number(s) for this issue. '
|
3876
4228
|
'If several, separate with commas')
|
3877
4229
|
parser.add_option('--message-file', dest='message_file',
|
3878
4230
|
help='file which contains message for patchset')
|
3879
|
-
parser.add_option('-t', dest='title',
|
3880
|
-
help='title for patchset
|
4231
|
+
parser.add_option('--title', '-t', dest='title',
|
4232
|
+
help='title for patchset')
|
3881
4233
|
parser.add_option('-r', '--reviewers',
|
3882
4234
|
action='append', default=[],
|
3883
4235
|
help='reviewer email addresses')
|
@@ -3885,7 +4237,7 @@ def CMDupload(parser, args):
|
|
3885
4237
|
action='append', default=[],
|
3886
4238
|
help='cc email addresses')
|
3887
4239
|
parser.add_option('-s', '--send-mail', action='store_true',
|
3888
|
-
help='send email to reviewer immediately')
|
4240
|
+
help='send email to reviewer(s) and cc(s) immediately')
|
3889
4241
|
parser.add_option('--emulate_svn_auto_props',
|
3890
4242
|
'--emulate-svn-auto-props',
|
3891
4243
|
action="store_true",
|
@@ -3905,6 +4257,8 @@ def CMDupload(parser, args):
|
|
3905
4257
|
parser.add_option('--no-squash', action='store_true',
|
3906
4258
|
help='Don\'t squash multiple commits into one ' +
|
3907
4259
|
'(Gerrit only)')
|
4260
|
+
parser.add_option('--topic', default=None,
|
4261
|
+
help='Topic to specify when uploading (Gerrit only)')
|
3908
4262
|
parser.add_option('--email', default=None,
|
3909
4263
|
help='email address to use to connect to Rietveld')
|
3910
4264
|
parser.add_option('--tbr-owners', dest='tbr_owners', action='store_true',
|
@@ -3947,24 +4301,26 @@ def CMDupload(parser, args):
|
|
3947
4301
|
return cl.CMDUpload(options, args, orig_args)
|
3948
4302
|
|
3949
4303
|
|
3950
|
-
|
3951
|
-
|
3952
|
-
|
3953
|
-
|
3954
|
-
|
3955
|
-
|
4304
|
+
@subcommand.usage('DEPRECATED')
|
4305
|
+
def CMDdcommit(parser, args):
|
4306
|
+
"""DEPRECATED: Used to commit the current changelist via git-svn."""
|
4307
|
+
message = ('git-cl no longer supports committing to SVN repositories via '
|
4308
|
+
'git-svn. You probably want to use `git cl land` instead.')
|
4309
|
+
print(message)
|
4310
|
+
return 1
|
3956
4311
|
|
3957
4312
|
|
3958
|
-
|
3959
|
-
|
4313
|
+
@subcommand.usage('[upstream branch to apply against]')
|
4314
|
+
def CMDland(parser, args):
|
4315
|
+
"""Commits the current changelist via git.
|
3960
4316
|
|
3961
4317
|
In case of Gerrit, uses Gerrit REST api to "submit" the issue, which pushes
|
3962
4318
|
upstream and closes the issue automatically and atomically.
|
3963
4319
|
|
3964
4320
|
Otherwise (in case of Rietveld):
|
3965
4321
|
Squashes branch into a single commit.
|
3966
|
-
Updates
|
3967
|
-
Pushes
|
4322
|
+
Updates commit message with metadata (e.g. pointer to review).
|
4323
|
+
Pushes the code upstream.
|
3968
4324
|
Updates review and closes.
|
3969
4325
|
"""
|
3970
4326
|
parser.add_option('--bypass-hooks', action='store_true', dest='bypass_hooks',
|
@@ -3999,12 +4355,16 @@ def SendUpstream(parser, args, cmd):
|
|
3999
4355
|
'the contributor\'s "name <email>". If you can\'t upload such a '
|
4000
4356
|
'commit for review, contact your repository admin and request'
|
4001
4357
|
'"Forge-Author" permission.')
|
4358
|
+
if not cl.GetIssue():
|
4359
|
+
DieWithError('You must upload the change first to Gerrit.\n'
|
4360
|
+
' If you would rather have `git cl land` upload '
|
4361
|
+
'automatically for you, see http://crbug.com/642759')
|
4002
4362
|
return cl._codereview_impl.CMDLand(options.force, options.bypass_hooks,
|
4003
4363
|
options.verbose)
|
4004
4364
|
|
4005
4365
|
current = cl.GetBranch()
|
4006
4366
|
remote, upstream_branch = cl.FetchUpstreamTuple(cl.GetBranch())
|
4007
|
-
if
|
4367
|
+
if remote == '.':
|
4008
4368
|
print()
|
4009
4369
|
print('Attempting to push branch %r into another local branch!' % current)
|
4010
4370
|
print()
|
@@ -4017,7 +4377,7 @@ def SendUpstream(parser, args, cmd):
|
|
4017
4377
|
print(' Current parent: %r' % upstream_branch)
|
4018
4378
|
return 1
|
4019
4379
|
|
4020
|
-
if not args
|
4380
|
+
if not args:
|
4021
4381
|
# Default to merging against our best guess of the upstream branch.
|
4022
4382
|
args = [cl.GetUpstreamBranch()]
|
4023
4383
|
|
@@ -4027,9 +4387,8 @@ def SendUpstream(parser, args, cmd):
|
|
4027
4387
|
return 1
|
4028
4388
|
|
4029
4389
|
base_branch = args[0]
|
4030
|
-
base_has_submodules = IsSubmoduleMergeCommit(base_branch)
|
4031
4390
|
|
4032
|
-
if git_common.is_dirty_git_tree(
|
4391
|
+
if git_common.is_dirty_git_tree('land'):
|
4033
4392
|
return 1
|
4034
4393
|
|
4035
4394
|
# This rev-list syntax means "show all commits not in my branch that
|
@@ -4039,30 +4398,9 @@ def SendUpstream(parser, args, cmd):
|
|
4039
4398
|
if upstream_commits:
|
4040
4399
|
print('Base branch "%s" has %d commits '
|
4041
4400
|
'not in this branch.' % (base_branch, len(upstream_commits)))
|
4042
|
-
print('Run "git merge %s" before attempting to
|
4401
|
+
print('Run "git merge %s" before attempting to land.' % base_branch)
|
4043
4402
|
return 1
|
4044
4403
|
|
4045
|
-
# This is the revision `svn dcommit` will commit on top of.
|
4046
|
-
svn_head = None
|
4047
|
-
if cmd == 'dcommit' or base_has_submodules:
|
4048
|
-
svn_head = RunGit(['log', '--grep=^git-svn-id:', '-1',
|
4049
|
-
'--pretty=format:%H'])
|
4050
|
-
|
4051
|
-
if cmd == 'dcommit':
|
4052
|
-
# If the base_head is a submodule merge commit, the first parent of the
|
4053
|
-
# base_head should be a git-svn commit, which is what we're interested in.
|
4054
|
-
base_svn_head = base_branch
|
4055
|
-
if base_has_submodules:
|
4056
|
-
base_svn_head += '^1'
|
4057
|
-
|
4058
|
-
extra_commits = RunGit(['rev-list', '^' + svn_head, base_svn_head])
|
4059
|
-
if extra_commits:
|
4060
|
-
print('This branch has %d additional commits not upstreamed yet.'
|
4061
|
-
% len(extra_commits.splitlines()))
|
4062
|
-
print('Upstream "%s" or rebase this branch on top of the upstream trunk '
|
4063
|
-
'before attempting to %s.' % (base_branch, cmd))
|
4064
|
-
return 1
|
4065
|
-
|
4066
4404
|
merge_base = RunGit(['merge-base', base_branch, 'HEAD']).strip()
|
4067
4405
|
if not options.bypass_hooks:
|
4068
4406
|
author = None
|
@@ -4080,11 +4418,11 @@ def SendUpstream(parser, args, cmd):
|
|
4080
4418
|
status = GetTreeStatus()
|
4081
4419
|
if 'closed' == status:
|
4082
4420
|
print('The tree is closed. Please wait for it to reopen. Use '
|
4083
|
-
'"git cl
|
4421
|
+
'"git cl land --bypass-hooks" to commit on a closed tree.')
|
4084
4422
|
return 1
|
4085
4423
|
elif 'unknown' == status:
|
4086
4424
|
print('Unable to determine tree status. Please verify manually and '
|
4087
|
-
'use "git cl
|
4425
|
+
'use "git cl land --bypass-hooks" to commit on a closed tree.')
|
4088
4426
|
return 1
|
4089
4427
|
|
4090
4428
|
change_desc = ChangeDescription(options.message)
|
@@ -4102,7 +4440,6 @@ def SendUpstream(parser, args, cmd):
|
|
4102
4440
|
# Keep a separate copy for the commit message, because the commit message
|
4103
4441
|
# contains the link to the Rietveld issue, while the Rietveld message contains
|
4104
4442
|
# the commit viewvc url.
|
4105
|
-
# Keep a separate copy for the commit message.
|
4106
4443
|
if cl.GetIssue():
|
4107
4444
|
change_desc.update_reviewers(cl.GetApprovingReviewers())
|
4108
4445
|
|
@@ -4112,7 +4449,7 @@ def SendUpstream(parser, args, cmd):
|
|
4112
4449
|
# after it. Add a period on a new line to circumvent this. Also add a space
|
4113
4450
|
# before the period to make sure that Gitiles continues to correctly resolve
|
4114
4451
|
# the URL.
|
4115
|
-
commit_desc.append_footer('Review
|
4452
|
+
commit_desc.append_footer('Review-Url: %s .' % cl.GetIssueURL())
|
4116
4453
|
if options.contributor:
|
4117
4454
|
commit_desc.append_footer('Patch from %s.' % options.contributor)
|
4118
4455
|
|
@@ -4125,9 +4462,7 @@ def SendUpstream(parser, args, cmd):
|
|
4125
4462
|
|
4126
4463
|
# We want to squash all this branch's commits into one commit with the proper
|
4127
4464
|
# description. We do this by doing a "reset --soft" to the base branch (which
|
4128
|
-
# keeps the working copy the same), then
|
4129
|
-
# has a submodule merge commit, we'll also need to cherry-pick the squashed
|
4130
|
-
# commit onto a branch based on the git-svn head.
|
4465
|
+
# keeps the working copy the same), then landing that.
|
4131
4466
|
MERGE_BRANCH = 'git-cl-commit'
|
4132
4467
|
CHERRY_PICK_BRANCH = 'git-cl-cherry-pick'
|
4133
4468
|
# Delete the branches if they exist.
|
@@ -4148,8 +4483,6 @@ def SendUpstream(parser, args, cmd):
|
|
4148
4483
|
# We wrap in a try...finally block so if anything goes wrong,
|
4149
4484
|
# we clean up the branches.
|
4150
4485
|
retcode = -1
|
4151
|
-
pushed_to_pending = False
|
4152
|
-
pending_ref = None
|
4153
4486
|
revision = None
|
4154
4487
|
try:
|
4155
4488
|
RunGit(['checkout', '-q', '-b', MERGE_BRANCH])
|
@@ -4162,237 +4495,75 @@ def SendUpstream(parser, args, cmd):
|
|
4162
4495
|
])
|
4163
4496
|
else:
|
4164
4497
|
RunGit(['commit', '-m', commit_desc.description])
|
4165
|
-
|
4166
|
-
|
4167
|
-
|
4168
|
-
|
4169
|
-
|
4170
|
-
|
4171
|
-
remote, branch = cl.FetchUpstreamTuple(cl.GetBranch())
|
4172
|
-
mirror = settings.GetGitMirror(remote)
|
4173
|
-
pushurl = mirror.url if mirror else remote
|
4174
|
-
pending_prefix = settings.GetPendingRefPrefix()
|
4175
|
-
if not pending_prefix or branch.startswith(pending_prefix):
|
4176
|
-
# If not using refs/pending/heads/* at all, or target ref is already set
|
4177
|
-
# to pending, then push to the target ref directly.
|
4178
|
-
retcode, output = RunGitWithCode(
|
4179
|
-
['push', '--porcelain', pushurl, 'HEAD:%s' % branch])
|
4180
|
-
pushed_to_pending = pending_prefix and branch.startswith(pending_prefix)
|
4181
|
-
else:
|
4182
|
-
# Cherry-pick the change on top of pending ref and then push it.
|
4183
|
-
assert branch.startswith('refs/'), branch
|
4184
|
-
assert pending_prefix[-1] == '/', pending_prefix
|
4185
|
-
pending_ref = pending_prefix + branch[len('refs/'):]
|
4186
|
-
retcode, output = PushToGitPending(pushurl, pending_ref, branch)
|
4187
|
-
pushed_to_pending = (retcode == 0)
|
4188
|
-
if retcode == 0:
|
4189
|
-
revision = RunGit(['rev-parse', 'HEAD']).strip()
|
4498
|
+
|
4499
|
+
remote, branch = cl.FetchUpstreamTuple(cl.GetBranch())
|
4500
|
+
mirror = settings.GetGitMirror(remote)
|
4501
|
+
if mirror:
|
4502
|
+
pushurl = mirror.url
|
4503
|
+
git_numberer_enabled = _is_git_numberer_enabled(pushurl, branch)
|
4190
4504
|
else:
|
4191
|
-
#
|
4192
|
-
|
4193
|
-
|
4194
|
-
|
4195
|
-
|
4196
|
-
|
4197
|
-
|
4198
|
-
|
4199
|
-
|
4200
|
-
|
4201
|
-
|
4202
|
-
|
4203
|
-
|
4204
|
-
|
4205
|
-
|
4206
|
-
|
4207
|
-
|
4505
|
+
pushurl = remote # Usually, this is 'origin'.
|
4506
|
+
git_numberer_enabled = _is_git_numberer_enabled(
|
4507
|
+
RunGit(['config', 'remote.%s.url' % remote]).strip(), branch)
|
4508
|
+
|
4509
|
+
if git_numberer_enabled:
|
4510
|
+
# TODO(tandrii): maybe do autorebase + retry on failure
|
4511
|
+
# http://crbug.com/682934, but better just use Gerrit :)
|
4512
|
+
logging.debug('Adding git number footers')
|
4513
|
+
parent_msg = RunGit(['show', '-s', '--format=%B', merge_base]).strip()
|
4514
|
+
commit_desc.update_with_git_number_footers(merge_base, parent_msg,
|
4515
|
+
branch)
|
4516
|
+
# Ensure timestamps are monotonically increasing.
|
4517
|
+
timestamp = max(1 + _get_committer_timestamp(merge_base),
|
4518
|
+
_get_committer_timestamp('HEAD'))
|
4519
|
+
_git_amend_head(commit_desc.description, timestamp)
|
4520
|
+
change_desc = ChangeDescription(commit_desc.description)
|
4521
|
+
|
4522
|
+
retcode, output = RunGitWithCode(
|
4523
|
+
['push', '--porcelain', pushurl, 'HEAD:%s' % branch])
|
4524
|
+
if retcode == 0:
|
4525
|
+
revision = RunGit(['rev-parse', 'HEAD']).strip()
|
4208
4526
|
logging.debug(output)
|
4527
|
+
except: # pylint: disable=bare-except
|
4528
|
+
if _IS_BEING_TESTED:
|
4529
|
+
logging.exception('this is likely your ACTUAL cause of test failure.\n'
|
4530
|
+
+ '-' * 30 + '8<' + '-' * 30)
|
4531
|
+
logging.error('\n' + '-' * 30 + '8<' + '-' * 30 + '\n\n\n')
|
4532
|
+
raise
|
4209
4533
|
finally:
|
4210
4534
|
# And then swap back to the original branch and clean up.
|
4211
4535
|
RunGit(['checkout', '-q', cl.GetBranch()])
|
4212
4536
|
RunGit(['branch', '-D', MERGE_BRANCH])
|
4213
|
-
if base_has_submodules:
|
4214
|
-
RunGit(['branch', '-D', CHERRY_PICK_BRANCH])
|
4215
4537
|
|
4216
4538
|
if not revision:
|
4217
4539
|
print('Failed to push. If this persists, please file a bug.')
|
4218
4540
|
return 1
|
4219
4541
|
|
4220
|
-
killed = False
|
4221
|
-
if pushed_to_pending:
|
4222
|
-
try:
|
4223
|
-
revision = WaitForRealCommit(remote, revision, base_branch, branch)
|
4224
|
-
# We set pushed_to_pending to False, since it made it all the way to the
|
4225
|
-
# real ref.
|
4226
|
-
pushed_to_pending = False
|
4227
|
-
except KeyboardInterrupt:
|
4228
|
-
killed = True
|
4229
|
-
|
4230
4542
|
if cl.GetIssue():
|
4231
|
-
to_pending = ' to pending queue' if pushed_to_pending else ''
|
4232
4543
|
viewvc_url = settings.GetViewVCUrl()
|
4233
|
-
if
|
4234
|
-
|
4235
|
-
|
4236
|
-
|
4237
|
-
|
4238
|
-
change_desc.append_footer('Committed: %s' % (revision,))
|
4544
|
+
if viewvc_url and revision:
|
4545
|
+
change_desc.append_footer(
|
4546
|
+
'Committed: %s%s' % (viewvc_url, revision))
|
4547
|
+
elif revision:
|
4548
|
+
change_desc.append_footer('Committed: %s' % (revision,))
|
4239
4549
|
print('Closing issue '
|
4240
4550
|
'(you may be prompted for your codereview password)...')
|
4241
4551
|
cl.UpdateDescription(change_desc.description)
|
4242
4552
|
cl.CloseIssue()
|
4243
4553
|
props = cl.GetIssueProperties()
|
4244
4554
|
patch_num = len(props['patchsets'])
|
4245
|
-
comment = "Committed patchset #%d (id:%d)
|
4246
|
-
patch_num, props['patchsets'][-1],
|
4555
|
+
comment = "Committed patchset #%d (id:%d) manually as %s" % (
|
4556
|
+
patch_num, props['patchsets'][-1], revision)
|
4247
4557
|
if options.bypass_hooks:
|
4248
4558
|
comment += ' (tree was closed).' if GetTreeStatus() == 'closed' else '.'
|
4249
4559
|
else:
|
4250
4560
|
comment += ' (presubmit successful).'
|
4251
4561
|
cl.RpcServer().add_comment(cl.GetIssue(), comment)
|
4252
4562
|
|
4253
|
-
if
|
4254
|
-
|
4255
|
-
print('The commit is in the pending queue (%s).' % pending_ref)
|
4256
|
-
print('It will show up on %s in ~1 min, once it gets a Cr-Commit-Position '
|
4257
|
-
'footer.' % branch)
|
4258
|
-
|
4259
|
-
hook = POSTUPSTREAM_HOOK_PATTERN % cmd
|
4260
|
-
if os.path.isfile(hook):
|
4261
|
-
RunCommand([hook, merge_base], error_ok=True)
|
4262
|
-
|
4263
|
-
return 1 if killed else 0
|
4264
|
-
|
4265
|
-
|
4266
|
-
def WaitForRealCommit(remote, pushed_commit, local_base_ref, real_ref):
|
4267
|
-
print()
|
4268
|
-
print('Waiting for commit to be landed on %s...' % real_ref)
|
4269
|
-
print('(If you are impatient, you may Ctrl-C once without harm)')
|
4270
|
-
target_tree = RunGit(['rev-parse', '%s:' % pushed_commit]).strip()
|
4271
|
-
current_rev = RunGit(['rev-parse', local_base_ref]).strip()
|
4272
|
-
mirror = settings.GetGitMirror(remote)
|
4273
|
-
|
4274
|
-
loop = 0
|
4275
|
-
while True:
|
4276
|
-
sys.stdout.write('fetching (%d)... \r' % loop)
|
4277
|
-
sys.stdout.flush()
|
4278
|
-
loop += 1
|
4279
|
-
|
4280
|
-
if mirror:
|
4281
|
-
mirror.populate()
|
4282
|
-
RunGit(['retry', 'fetch', remote, real_ref], stderr=subprocess2.VOID)
|
4283
|
-
to_rev = RunGit(['rev-parse', 'FETCH_HEAD']).strip()
|
4284
|
-
commits = RunGit(['rev-list', '%s..%s' % (current_rev, to_rev)])
|
4285
|
-
for commit in commits.splitlines():
|
4286
|
-
if RunGit(['rev-parse', '%s:' % commit]).strip() == target_tree:
|
4287
|
-
print('Found commit on %s' % real_ref)
|
4288
|
-
return commit
|
4289
|
-
|
4290
|
-
current_rev = to_rev
|
4563
|
+
if os.path.isfile(POSTUPSTREAM_HOOK):
|
4564
|
+
RunCommand([POSTUPSTREAM_HOOK, merge_base], error_ok=True)
|
4291
4565
|
|
4292
|
-
|
4293
|
-
def PushToGitPending(remote, pending_ref, upstream_ref):
|
4294
|
-
"""Fetches pending_ref, cherry-picks current HEAD on top of it, pushes.
|
4295
|
-
|
4296
|
-
Returns:
|
4297
|
-
(retcode of last operation, output log of last operation).
|
4298
|
-
"""
|
4299
|
-
assert pending_ref.startswith('refs/'), pending_ref
|
4300
|
-
local_pending_ref = 'refs/git-cl/' + pending_ref[len('refs/'):]
|
4301
|
-
cherry = RunGit(['rev-parse', 'HEAD']).strip()
|
4302
|
-
code = 0
|
4303
|
-
out = ''
|
4304
|
-
max_attempts = 3
|
4305
|
-
attempts_left = max_attempts
|
4306
|
-
while attempts_left:
|
4307
|
-
if attempts_left != max_attempts:
|
4308
|
-
print('Retrying, %d attempts left...' % (attempts_left - 1,))
|
4309
|
-
attempts_left -= 1
|
4310
|
-
|
4311
|
-
# Fetch. Retry fetch errors.
|
4312
|
-
print('Fetching pending ref %s...' % pending_ref)
|
4313
|
-
code, out = RunGitWithCode(
|
4314
|
-
['retry', 'fetch', remote, '+%s:%s' % (pending_ref, local_pending_ref)])
|
4315
|
-
if code:
|
4316
|
-
print('Fetch failed with exit code %d.' % code)
|
4317
|
-
if out.strip():
|
4318
|
-
print(out.strip())
|
4319
|
-
continue
|
4320
|
-
|
4321
|
-
# Try to cherry pick. Abort on merge conflicts.
|
4322
|
-
print('Cherry-picking commit on top of pending ref...')
|
4323
|
-
RunGitWithCode(['checkout', local_pending_ref], suppress_stderr=True)
|
4324
|
-
code, out = RunGitWithCode(['cherry-pick', cherry])
|
4325
|
-
if code:
|
4326
|
-
print('Your patch doesn\'t apply cleanly to ref \'%s\', '
|
4327
|
-
'the following files have merge conflicts:' % pending_ref)
|
4328
|
-
print(RunGit(['diff', '--name-status', '--diff-filter=U']).strip())
|
4329
|
-
print('Please rebase your patch and try again.')
|
4330
|
-
RunGitWithCode(['cherry-pick', '--abort'])
|
4331
|
-
return code, out
|
4332
|
-
|
4333
|
-
# Applied cleanly, try to push now. Retry on error (flake or non-ff push).
|
4334
|
-
print('Pushing commit to %s... It can take a while.' % pending_ref)
|
4335
|
-
code, out = RunGitWithCode(
|
4336
|
-
['retry', 'push', '--porcelain', remote, 'HEAD:%s' % pending_ref])
|
4337
|
-
if code == 0:
|
4338
|
-
# Success.
|
4339
|
-
print('Commit pushed to pending ref successfully!')
|
4340
|
-
return code, out
|
4341
|
-
|
4342
|
-
print('Push failed with exit code %d.' % code)
|
4343
|
-
if out.strip():
|
4344
|
-
print(out.strip())
|
4345
|
-
if IsFatalPushFailure(out):
|
4346
|
-
print('Fatal push error. Make sure your .netrc credentials and git '
|
4347
|
-
'user.email are correct and you have push access to the repo.')
|
4348
|
-
return code, out
|
4349
|
-
|
4350
|
-
print('All attempts to push to pending ref failed.')
|
4351
|
-
return code, out
|
4352
|
-
|
4353
|
-
|
4354
|
-
def IsFatalPushFailure(push_stdout):
|
4355
|
-
"""True if retrying push won't help."""
|
4356
|
-
return '(prohibited by Gerrit)' in push_stdout
|
4357
|
-
|
4358
|
-
|
4359
|
-
@subcommand.usage('[upstream branch to apply against]')
|
4360
|
-
def CMDdcommit(parser, args):
|
4361
|
-
"""Commits the current changelist via git-svn."""
|
4362
|
-
if not settings.GetIsGitSvn():
|
4363
|
-
if git_footers.get_footer_svn_id():
|
4364
|
-
# If it looks like previous commits were mirrored with git-svn.
|
4365
|
-
message = """This repository appears to be a git-svn mirror, but no
|
4366
|
-
upstream SVN master is set. You probably need to run 'git auto-svn' once."""
|
4367
|
-
else:
|
4368
|
-
message = """This doesn't appear to be an SVN repository.
|
4369
|
-
If your project has a true, writeable git repository, you probably want to run
|
4370
|
-
'git cl land' instead.
|
4371
|
-
If your project has a git mirror of an upstream SVN master, you probably need
|
4372
|
-
to run 'git svn init'.
|
4373
|
-
|
4374
|
-
Using the wrong command might cause your commit to appear to succeed, and the
|
4375
|
-
review to be closed, without actually landing upstream. If you choose to
|
4376
|
-
proceed, please verify that the commit lands upstream as expected."""
|
4377
|
-
print(message)
|
4378
|
-
ask_for_data('[Press enter to dcommit or ctrl-C to quit]')
|
4379
|
-
# TODO(tandrii): kill this post SVN migration with
|
4380
|
-
# https://codereview.chromium.org/2076683002
|
4381
|
-
print('WARNING: chrome infrastructure is migrating SVN repos to Git.\n'
|
4382
|
-
'Please let us know of this project you are committing to:'
|
4383
|
-
' http://crbug.com/600451')
|
4384
|
-
return SendUpstream(parser, args, 'dcommit')
|
4385
|
-
|
4386
|
-
|
4387
|
-
@subcommand.usage('[upstream branch to apply against]')
|
4388
|
-
def CMDland(parser, args):
|
4389
|
-
"""Commits the current changelist via git."""
|
4390
|
-
if settings.GetIsGitSvn() or git_footers.get_footer_svn_id():
|
4391
|
-
print('This appears to be an SVN repository.')
|
4392
|
-
print('Are you sure you didn\'t mean \'git cl dcommit\'?')
|
4393
|
-
print('(Ignore if this is the first commit after migrating from svn->git)')
|
4394
|
-
ask_for_data('[Press enter to push or ctrl-C to quit]')
|
4395
|
-
return SendUpstream(parser, args, 'land')
|
4566
|
+
return 0
|
4396
4567
|
|
4397
4568
|
|
4398
4569
|
@subcommand.usage('<patch url or issue id or issue url>')
|
@@ -4434,7 +4605,7 @@ def CMDpatch(parser, args):
|
|
4434
4605
|
auth_config = auth.extract_auth_config_from_options(options)
|
4435
4606
|
|
4436
4607
|
|
4437
|
-
if options.reapply
|
4608
|
+
if options.reapply:
|
4438
4609
|
if options.newbranch:
|
4439
4610
|
parser.error('--reapply works on the current branch only')
|
4440
4611
|
if len(args) > 0:
|
@@ -4446,7 +4617,7 @@ def CMDpatch(parser, args):
|
|
4446
4617
|
parser.error('current branch must have an associated issue')
|
4447
4618
|
|
4448
4619
|
upstream = cl.GetUpstreamBranch()
|
4449
|
-
if upstream
|
4620
|
+
if upstream is None:
|
4450
4621
|
parser.error('No upstream branch specified. Cannot reset branch')
|
4451
4622
|
|
4452
4623
|
RunGit(['reset', '--hard', upstream])
|
@@ -4485,16 +4656,6 @@ def CMDpatch(parser, args):
|
|
4485
4656
|
options.directory)
|
4486
4657
|
|
4487
4658
|
|
4488
|
-
def CMDrebase(parser, args):
|
4489
|
-
"""Rebases current branch on top of svn repo."""
|
4490
|
-
# Provide a wrapper for git svn rebase to help avoid accidental
|
4491
|
-
# git svn dcommit.
|
4492
|
-
# It's the only command that doesn't use parser at all since we just defer
|
4493
|
-
# execution to git-svn.
|
4494
|
-
|
4495
|
-
return RunGitWithCode(['svn', 'rebase'] + args)[1]
|
4496
|
-
|
4497
|
-
|
4498
4659
|
def GetTreeStatus(url=None):
|
4499
4660
|
"""Fetches the tree status and returns either 'open', 'closed',
|
4500
4661
|
'unknown' or 'unset'."""
|
@@ -4520,37 +4681,6 @@ def GetTreeStatusReason():
|
|
4520
4681
|
return status['message']
|
4521
4682
|
|
4522
4683
|
|
4523
|
-
def GetBuilderMaster(bot_list):
|
4524
|
-
"""For a given builder, fetch the master from AE if available."""
|
4525
|
-
map_url = 'https://builders-map.appspot.com/'
|
4526
|
-
try:
|
4527
|
-
master_map = json.load(urllib2.urlopen(map_url))
|
4528
|
-
except urllib2.URLError as e:
|
4529
|
-
return None, ('Failed to fetch builder-to-master map from %s. Error: %s.' %
|
4530
|
-
(map_url, e))
|
4531
|
-
except ValueError as e:
|
4532
|
-
return None, ('Invalid json string from %s. Error: %s.' % (map_url, e))
|
4533
|
-
if not master_map:
|
4534
|
-
return None, 'Failed to build master map.'
|
4535
|
-
|
4536
|
-
result_master = ''
|
4537
|
-
for bot in bot_list:
|
4538
|
-
builder = bot.split(':', 1)[0]
|
4539
|
-
master_list = master_map.get(builder, [])
|
4540
|
-
if not master_list:
|
4541
|
-
return None, ('No matching master for builder %s.' % builder)
|
4542
|
-
elif len(master_list) > 1:
|
4543
|
-
return None, ('The builder name %s exists in multiple masters %s.' %
|
4544
|
-
(builder, master_list))
|
4545
|
-
else:
|
4546
|
-
cur_master = master_list[0]
|
4547
|
-
if not result_master:
|
4548
|
-
result_master = cur_master
|
4549
|
-
elif result_master != cur_master:
|
4550
|
-
return None, 'The builders do not belong to the same master.'
|
4551
|
-
return result_master, None
|
4552
|
-
|
4553
|
-
|
4554
4684
|
def CMDtree(parser, args):
|
4555
4685
|
"""Shows the status of the tree."""
|
4556
4686
|
_, args = parser.parse_args(args)
|
@@ -4568,52 +4698,50 @@ def CMDtree(parser, args):
|
|
4568
4698
|
|
4569
4699
|
|
4570
4700
|
def CMDtry(parser, args):
|
4571
|
-
"""Triggers try jobs
|
4572
|
-
group = optparse.OptionGroup(parser,
|
4701
|
+
"""Triggers try jobs using either BuildBucket or CQ dry run."""
|
4702
|
+
group = optparse.OptionGroup(parser, 'Try job options')
|
4573
4703
|
group.add_option(
|
4574
|
-
|
4575
|
-
help=(
|
4576
|
-
|
4577
|
-
"
|
4578
|
-
|
4579
|
-
|
4704
|
+
'-b', '--bot', action='append',
|
4705
|
+
help=('IMPORTANT: specify ONE builder per --bot flag. Use it multiple '
|
4706
|
+
'times to specify multiple builders. ex: '
|
4707
|
+
'"-b win_rel -b win_layout". See '
|
4708
|
+
'the try server waterfall for the builders name and the tests '
|
4709
|
+
'available.'))
|
4580
4710
|
group.add_option(
|
4581
|
-
|
4582
|
-
help=(
|
4711
|
+
'-B', '--bucket', default='',
|
4712
|
+
help=('Buildbucket bucket to send the try requests.'))
|
4583
4713
|
group.add_option(
|
4584
|
-
|
4585
|
-
help=
|
4586
|
-
"revision will be determined by the try server; see "
|
4587
|
-
"its waterfall for more info")
|
4714
|
+
'-m', '--master', default='',
|
4715
|
+
help=('Specify a try master where to run the tries.'))
|
4588
4716
|
group.add_option(
|
4589
|
-
|
4590
|
-
help=
|
4591
|
-
|
4717
|
+
'-r', '--revision',
|
4718
|
+
help='Revision to use for the try job; default: the revision will '
|
4719
|
+
'be determined by the try recipe that builder runs, which usually '
|
4720
|
+
'defaults to HEAD of origin/master')
|
4592
4721
|
group.add_option(
|
4593
|
-
|
4594
|
-
help=
|
4595
|
-
|
4722
|
+
'-c', '--clobber', action='store_true', default=False,
|
4723
|
+
help='Force a clobber before building; that is don\'t do an '
|
4724
|
+
'incremental build')
|
4596
4725
|
group.add_option(
|
4597
|
-
|
4598
|
-
help=
|
4599
|
-
|
4600
|
-
|
4726
|
+
'--project',
|
4727
|
+
help='Override which project to use. Projects are defined '
|
4728
|
+
'in recipe to determine to which repository or directory to '
|
4729
|
+
'apply the patch')
|
4601
4730
|
group.add_option(
|
4602
|
-
|
4731
|
+
'-p', '--property', dest='properties', action='append', default=[],
|
4732
|
+
help='Specify generic properties in the form -p key1=value1 -p '
|
4733
|
+
'key2=value2 etc. The value will be treated as '
|
4734
|
+
'json if decodable, or as string otherwise. '
|
4735
|
+
'NOTE: using this may make your try job not usable for CQ, '
|
4736
|
+
'which will then schedule another try job with default properties')
|
4603
4737
|
group.add_option(
|
4604
|
-
|
4605
|
-
help=
|
4606
|
-
group.add_option(
|
4607
|
-
"--buildbucket-host", default='cr-buildbucket.appspot.com',
|
4608
|
-
help="Host of buildbucket. The default host is %default.")
|
4738
|
+
'--buildbucket-host', default='cr-buildbucket.appspot.com',
|
4739
|
+
help='Host of buildbucket. The default host is %default.')
|
4609
4740
|
parser.add_option_group(group)
|
4610
4741
|
auth.add_auth_options(parser)
|
4611
4742
|
options, args = parser.parse_args(args)
|
4612
4743
|
auth_config = auth.extract_auth_config_from_options(options)
|
4613
4744
|
|
4614
|
-
if options.use_rietveld and options.properties:
|
4615
|
-
parser.error('Properties can only be specified with buildbucket')
|
4616
|
-
|
4617
4745
|
# Make sure that all properties are prop=value pairs.
|
4618
4746
|
bad_params = [x for x in options.properties if '=' not in x]
|
4619
4747
|
if bad_params:
|
@@ -4627,164 +4755,66 @@ def CMDtry(parser, args):
|
|
4627
4755
|
parser.error('Need to upload first')
|
4628
4756
|
|
4629
4757
|
if cl.IsGerrit():
|
4630
|
-
|
4631
|
-
|
4632
|
-
|
4633
|
-
|
4634
|
-
|
4635
|
-
|
4636
|
-
|
4637
|
-
props = cl.GetIssueProperties()
|
4638
|
-
if props.get('closed'):
|
4639
|
-
parser.error('Cannot send try jobs for a closed CL')
|
4640
|
-
|
4641
|
-
if props.get('private'):
|
4642
|
-
parser.error('Cannot use try bots with private issue')
|
4643
|
-
|
4644
|
-
if not options.name:
|
4645
|
-
options.name = cl.GetBranch()
|
4646
|
-
|
4647
|
-
if options.bot and not options.master:
|
4648
|
-
options.master, err_msg = GetBuilderMaster(options.bot)
|
4649
|
-
if err_msg:
|
4650
|
-
parser.error('Tryserver master cannot be found because: %s\n'
|
4651
|
-
'Please manually specify the tryserver master'
|
4652
|
-
', e.g. "-m tryserver.chromium.linux".' % err_msg)
|
4653
|
-
|
4654
|
-
def GetMasterMap():
|
4655
|
-
# Process --bot.
|
4656
|
-
if not options.bot:
|
4657
|
-
change = cl.GetChange(cl.GetCommonAncestorWithUpstream(), None)
|
4658
|
-
|
4659
|
-
# Get try masters from PRESUBMIT.py files.
|
4660
|
-
masters = presubmit_support.DoGetTryMasters(
|
4661
|
-
change,
|
4662
|
-
change.LocalPaths(),
|
4663
|
-
settings.GetRoot(),
|
4664
|
-
None,
|
4665
|
-
None,
|
4666
|
-
options.verbose,
|
4667
|
-
sys.stdout)
|
4668
|
-
if masters:
|
4669
|
-
return masters
|
4670
|
-
|
4671
|
-
# Fall back to deprecated method: get try slaves from PRESUBMIT.py files.
|
4672
|
-
options.bot = presubmit_support.DoGetTrySlaves(
|
4673
|
-
change,
|
4674
|
-
change.LocalPaths(),
|
4675
|
-
settings.GetRoot(),
|
4676
|
-
None,
|
4677
|
-
None,
|
4678
|
-
options.verbose,
|
4679
|
-
sys.stdout)
|
4680
|
-
|
4681
|
-
if not options.bot:
|
4682
|
-
return {}
|
4683
|
-
|
4684
|
-
builders_and_tests = {}
|
4685
|
-
# TODO(machenbach): The old style command-line options don't support
|
4686
|
-
# multiple try masters yet.
|
4687
|
-
old_style = filter(lambda x: isinstance(x, basestring), options.bot)
|
4688
|
-
new_style = filter(lambda x: isinstance(x, tuple), options.bot)
|
4689
|
-
|
4690
|
-
for bot in old_style:
|
4691
|
-
if ':' in bot:
|
4692
|
-
parser.error('Specifying testfilter is no longer supported')
|
4693
|
-
elif ',' in bot:
|
4694
|
-
parser.error('Specify one bot per --bot flag')
|
4695
|
-
else:
|
4696
|
-
builders_and_tests.setdefault(bot, [])
|
4758
|
+
# HACK: warm up Gerrit change detail cache to save on RPCs.
|
4759
|
+
cl._codereview_impl._GetChangeDetail(['DETAILED_ACCOUNTS', 'ALL_REVISIONS'])
|
4760
|
+
|
4761
|
+
error_message = cl.CannotTriggerTryJobReason()
|
4762
|
+
if error_message:
|
4763
|
+
parser.error('Can\'t trigger try jobs: %s' % error_message)
|
4697
4764
|
|
4698
|
-
|
4699
|
-
|
4765
|
+
if options.bucket and options.master:
|
4766
|
+
parser.error('Only one of --bucket and --master may be used.')
|
4700
4767
|
|
4701
|
-
|
4702
|
-
# master name defaults to an empty string, which will cause the master
|
4703
|
-
# not to be set on rietveld (deprecated).
|
4704
|
-
return {options.master: builders_and_tests}
|
4768
|
+
buckets = _get_bucket_map(cl, options, parser)
|
4705
4769
|
|
4706
|
-
|
4707
|
-
|
4708
|
-
|
4770
|
+
# If no bots are listed and we couldn't get a list based on PRESUBMIT files,
|
4771
|
+
# then we default to triggering a CQ dry run (see http://crbug.com/625697).
|
4772
|
+
if not buckets:
|
4709
4773
|
if options.verbose:
|
4710
4774
|
print('git cl try with no bots now defaults to CQ Dry Run.')
|
4711
|
-
|
4712
|
-
cl.SetCQState(_CQState.DRY_RUN)
|
4713
|
-
print('scheduled CQ Dry Run on %s' % cl.GetIssueURL())
|
4714
|
-
return 0
|
4715
|
-
except KeyboardInterrupt:
|
4716
|
-
raise
|
4717
|
-
except:
|
4718
|
-
print('WARNING: failed to trigger CQ Dry Run.\n'
|
4719
|
-
'Either:\n'
|
4720
|
-
' * your project has no CQ\n'
|
4721
|
-
' * you don\'t have permission to trigger Dry Run\n'
|
4722
|
-
' * bug in this code (see stack trace below).\n'
|
4723
|
-
'Consider specifying which bots to trigger manually '
|
4724
|
-
'or asking your project owners for permissions '
|
4725
|
-
'or contacting Chrome Infrastructure team at '
|
4726
|
-
'https://www.chromium.org/infra\n\n')
|
4727
|
-
# Still raise exception so that stack trace is printed.
|
4728
|
-
raise
|
4775
|
+
return cl.TriggerDryRun()
|
4729
4776
|
|
4730
|
-
for builders in
|
4777
|
+
for builders in buckets.itervalues():
|
4731
4778
|
if any('triggered' in b for b in builders):
|
4732
4779
|
print('ERROR You are trying to send a job to a triggered bot. This type '
|
4733
|
-
'of bot requires an
|
4734
|
-
'
|
4780
|
+
'of bot requires an initial job from a parent (usually a builder). '
|
4781
|
+
'Instead send your job to the parent.\n'
|
4735
4782
|
'Bot list: %s' % builders, file=sys.stderr)
|
4736
4783
|
return 1
|
4737
4784
|
|
4738
4785
|
patchset = cl.GetMostRecentPatchset()
|
4739
|
-
|
4740
|
-
|
4741
|
-
|
4742
|
-
|
4743
|
-
|
4744
|
-
|
4745
|
-
|
4746
|
-
|
4747
|
-
|
4748
|
-
print('ERROR: %s' % ex)
|
4749
|
-
return 1
|
4750
|
-
except Exception as e:
|
4751
|
-
stacktrace = (''.join(traceback.format_stack()) + traceback.format_exc())
|
4752
|
-
print('ERROR: Exception when trying to trigger try jobs: %s\n%s' %
|
4753
|
-
(e, stacktrace))
|
4754
|
-
return 1
|
4755
|
-
else:
|
4756
|
-
try:
|
4757
|
-
cl.RpcServer().trigger_distributed_try_jobs(
|
4758
|
-
cl.GetIssue(), patchset, options.name, options.clobber,
|
4759
|
-
options.revision, masters)
|
4760
|
-
except urllib2.HTTPError as e:
|
4761
|
-
if e.code == 404:
|
4762
|
-
print('404 from rietveld; '
|
4763
|
-
'did you mean to use "git try" instead of "git cl try"?')
|
4764
|
-
return 1
|
4765
|
-
print('Tried jobs on:')
|
4786
|
+
# TODO(tandrii): Checking local patchset against remote patchset is only
|
4787
|
+
# supported for Rietveld. Extend it to Gerrit or remove it completely.
|
4788
|
+
if not cl.IsGerrit() and patchset != cl.GetPatchset():
|
4789
|
+
print('Warning: Codereview server has newer patchsets (%s) than most '
|
4790
|
+
'recent upload from local checkout (%s). Did a previous upload '
|
4791
|
+
'fail?\n'
|
4792
|
+
'By default, git cl try uses the latest patchset from '
|
4793
|
+
'codereview, continuing to use patchset %s.\n' %
|
4794
|
+
(patchset, cl.GetPatchset(), patchset))
|
4766
4795
|
|
4767
|
-
|
4768
|
-
|
4769
|
-
|
4770
|
-
|
4771
|
-
|
4772
|
-
|
4796
|
+
try:
|
4797
|
+
_trigger_try_jobs(auth_config, cl, buckets, options, 'git_cl_try',
|
4798
|
+
patchset)
|
4799
|
+
except BuildbucketResponseException as ex:
|
4800
|
+
print('ERROR: %s' % ex)
|
4801
|
+
return 1
|
4773
4802
|
return 0
|
4774
4803
|
|
4775
4804
|
|
4776
4805
|
def CMDtry_results(parser, args):
|
4777
|
-
|
4806
|
+
"""Prints info about try jobs associated with current CL."""
|
4807
|
+
group = optparse.OptionGroup(parser, 'Try job results options')
|
4778
4808
|
group.add_option(
|
4779
|
-
|
4809
|
+
'-p', '--patchset', type=int, help='patchset number if not current.')
|
4780
4810
|
group.add_option(
|
4781
|
-
|
4811
|
+
'--print-master', action='store_true', help='print master name as well.')
|
4782
4812
|
group.add_option(
|
4783
|
-
|
4784
|
-
help=
|
4813
|
+
'--color', action='store_true', default=setup_color.IS_TTY,
|
4814
|
+
help='force color output, useful when piping output.')
|
4785
4815
|
group.add_option(
|
4786
|
-
|
4787
|
-
help=
|
4816
|
+
'--buildbucket-host', default='cr-buildbucket.appspot.com',
|
4817
|
+
help='Host of buildbucket. The default host is %default.')
|
4788
4818
|
group.add_option(
|
4789
4819
|
'--json', help='Path of JSON output file to write try job results to.')
|
4790
4820
|
parser.add_option_group(group)
|
@@ -4798,23 +4828,29 @@ def CMDtry_results(parser, args):
|
|
4798
4828
|
if not cl.GetIssue():
|
4799
4829
|
parser.error('Need to upload first')
|
4800
4830
|
|
4801
|
-
|
4802
|
-
|
4803
|
-
|
4804
|
-
|
4805
|
-
|
4806
|
-
|
4807
|
-
|
4831
|
+
patchset = options.patchset
|
4832
|
+
if not patchset:
|
4833
|
+
patchset = cl.GetMostRecentPatchset()
|
4834
|
+
if not patchset:
|
4835
|
+
parser.error('Codereview doesn\'t know about issue %s. '
|
4836
|
+
'No access to issue or wrong issue number?\n'
|
4837
|
+
'Either upload first, or pass --patchset explicitely' %
|
4838
|
+
cl.GetIssue())
|
4839
|
+
|
4840
|
+
# TODO(tandrii): Checking local patchset against remote patchset is only
|
4841
|
+
# supported for Rietveld. Extend it to Gerrit or remove it completely.
|
4842
|
+
if not cl.IsGerrit() and patchset != cl.GetPatchset():
|
4843
|
+
print('Warning: Codereview server has newer patchsets (%s) than most '
|
4844
|
+
'recent upload from local checkout (%s). Did a previous upload '
|
4845
|
+
'fail?\n'
|
4846
|
+
'By default, git cl try-results uses the latest patchset from '
|
4847
|
+
'codereview, continuing to use patchset %s.\n' %
|
4848
|
+
(patchset, cl.GetPatchset(), patchset))
|
4808
4849
|
try:
|
4809
|
-
jobs = fetch_try_jobs(auth_config, cl, options)
|
4850
|
+
jobs = fetch_try_jobs(auth_config, cl, options.buildbucket_host, patchset)
|
4810
4851
|
except BuildbucketResponseException as ex:
|
4811
4852
|
print('Buildbucket error: %s' % ex)
|
4812
4853
|
return 1
|
4813
|
-
except Exception as e:
|
4814
|
-
stacktrace = (''.join(traceback.format_stack()) + traceback.format_exc())
|
4815
|
-
print('ERROR: Exception when trying to fetch try jobs: %s\n%s' %
|
4816
|
-
(e, stacktrace))
|
4817
|
-
return 1
|
4818
4854
|
if options.json:
|
4819
4855
|
write_try_results_json(options.json, jobs)
|
4820
4856
|
else:
|
@@ -4833,7 +4869,7 @@ def CMDupstream(parser, args):
|
|
4833
4869
|
if args:
|
4834
4870
|
# One arg means set upstream branch.
|
4835
4871
|
branch = cl.GetBranch()
|
4836
|
-
RunGit(['branch', '--set-upstream',
|
4872
|
+
RunGit(['branch', '--set-upstream-to', args[0], branch])
|
4837
4873
|
cl = Changelist()
|
4838
4874
|
print('Upstream branch set to %s' % (cl.GetUpstreamBranch(),))
|
4839
4875
|
|
@@ -4880,6 +4916,7 @@ def CMDset_commit(parser, args):
|
|
4880
4916
|
if options.clear:
|
4881
4917
|
state = _CQState.NONE
|
4882
4918
|
elif options.dry_run:
|
4919
|
+
# TODO(qyearsley): Use cl.TriggerDryRun.
|
4883
4920
|
state = _CQState.DRY_RUN
|
4884
4921
|
else:
|
4885
4922
|
state = _CQState.COMMIT
|
@@ -4995,7 +5032,7 @@ def BuildGitDiffCmd(diff_type, upstream_commit, args):
|
|
4995
5032
|
"""Generates a diff command."""
|
4996
5033
|
# Generate diff for the current branch's changes.
|
4997
5034
|
diff_cmd = ['diff', '--no-ext-diff', '--no-prefix', diff_type,
|
4998
|
-
upstream_commit, '--'
|
5035
|
+
upstream_commit, '--']
|
4999
5036
|
|
5000
5037
|
if args:
|
5001
5038
|
for arg in args:
|
@@ -5006,10 +5043,12 @@ def BuildGitDiffCmd(diff_type, upstream_commit, args):
|
|
5006
5043
|
|
5007
5044
|
return diff_cmd
|
5008
5045
|
|
5046
|
+
|
5009
5047
|
def MatchingFileType(file_name, extensions):
|
5010
5048
|
"""Returns true if the file name ends with one of the given extensions."""
|
5011
5049
|
return bool([ext for ext in extensions if file_name.lower().endswith(ext)])
|
5012
5050
|
|
5051
|
+
|
5013
5052
|
@subcommand.usage('[files or directories to diff]')
|
5014
5053
|
def CMDformat(parser, args):
|
5015
5054
|
"""Runs auto-formatting tools (clang-format etc.) on the diff."""
|
@@ -5021,10 +5060,16 @@ def CMDformat(parser, args):
|
|
5021
5060
|
help='Don\'t modify any file on disk.')
|
5022
5061
|
parser.add_option('--python', action='store_true',
|
5023
5062
|
help='Format python code with yapf (experimental).')
|
5063
|
+
parser.add_option('--js', action='store_true',
|
5064
|
+
help='Format javascript code with clang-format.')
|
5024
5065
|
parser.add_option('--diff', action='store_true',
|
5025
5066
|
help='Print diff to stdout rather than modifying files.')
|
5026
5067
|
opts, args = parser.parse_args(args)
|
5027
5068
|
|
5069
|
+
# Normalize any remaining args against the current path, so paths relative to
|
5070
|
+
# the current directory are still resolved as expected.
|
5071
|
+
args = [os.path.join(os.getcwd(), arg) for arg in args]
|
5072
|
+
|
5028
5073
|
# git diff generates paths against the root of the repository. Change
|
5029
5074
|
# to that directory so clang-format can find files even within subdirs.
|
5030
5075
|
rel_base_path = settings.GetRelativeRoot()
|
@@ -5052,6 +5097,9 @@ def CMDformat(parser, args):
|
|
5052
5097
|
# Filter out files deleted by this CL
|
5053
5098
|
diff_files = [x for x in diff_files if os.path.isfile(x)]
|
5054
5099
|
|
5100
|
+
if opts.js:
|
5101
|
+
CLANG_EXTS.append('.js')
|
5102
|
+
|
5055
5103
|
clang_diff_files = [x for x in diff_files if MatchingFileType(x, CLANG_EXTS)]
|
5056
5104
|
python_diff_files = [x for x in diff_files if MatchingFileType(x, ['.py'])]
|
5057
5105
|
dart_diff_files = [x for x in diff_files if MatchingFileType(x, ['.dart'])]
|
@@ -5139,7 +5187,7 @@ def CMDformat(parser, args):
|
|
5139
5187
|
|
5140
5188
|
# Format GN build files. Always run on full build files for canonical form.
|
5141
5189
|
if gn_diff_files:
|
5142
|
-
cmd = ['gn', 'format'
|
5190
|
+
cmd = ['gn', 'format']
|
5143
5191
|
if opts.dry_run or opts.diff:
|
5144
5192
|
cmd.append('--dry-run')
|
5145
5193
|
for gn_diff_file in gn_diff_files:
|
@@ -5158,6 +5206,22 @@ def CMDformat(parser, args):
|
|
5158
5206
|
DieWithError("gn format failed on " + gn_diff_file +
|
5159
5207
|
"\nTry running 'gn format' on this file manually.")
|
5160
5208
|
|
5209
|
+
metrics_xml_files = [
|
5210
|
+
'tools/metrics/actions/actions.xml',
|
5211
|
+
'tools/metrics/histograms/histograms.xml',
|
5212
|
+
'tools/metrics/rappor/rappor.xml']
|
5213
|
+
for xml_file in metrics_xml_files:
|
5214
|
+
if xml_file in diff_files:
|
5215
|
+
tool_dir = top_dir + '/' + os.path.dirname(xml_file)
|
5216
|
+
cmd = [tool_dir + '/pretty_print.py', '--non-interactive']
|
5217
|
+
if opts.dry_run or opts.diff:
|
5218
|
+
cmd.append('--diff')
|
5219
|
+
stdout = RunCommand(cmd, cwd=top_dir)
|
5220
|
+
if opts.diff:
|
5221
|
+
sys.stdout.write(stdout)
|
5222
|
+
if opts.dry_run and stdout:
|
5223
|
+
return_value = 2 # Not formatted.
|
5224
|
+
|
5161
5225
|
return return_value
|
5162
5226
|
|
5163
5227
|
|
@@ -5228,7 +5292,10 @@ class OptionParser(optparse.OptionParser):
|
|
5228
5292
|
def parse_args(self, args=None, values=None):
|
5229
5293
|
options, args = optparse.OptionParser.parse_args(self, args, values)
|
5230
5294
|
levels = [logging.WARNING, logging.INFO, logging.DEBUG]
|
5231
|
-
logging.basicConfig(
|
5295
|
+
logging.basicConfig(
|
5296
|
+
level=levels[min(options.verbose, len(levels) - 1)],
|
5297
|
+
format='[%(levelname).1s%(asctime)s %(process)d %(thread)d '
|
5298
|
+
'%(filename)s] %(message)s')
|
5232
5299
|
return options, args
|
5233
5300
|
|
5234
5301
|
|