amp 0.5.2 → 0.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +12 -0
- data/.hgignore +3 -0
- data/AUTHORS +1 -1
- data/Manifest.txt +99 -38
- data/README.md +3 -3
- data/Rakefile +53 -18
- data/SCHEDULE.markdown +5 -1
- data/TODO.markdown +120 -149
- data/ampfile.rb +3 -1
- data/bin/amp +4 -1
- data/ext/amp/bz2/extconf.rb +1 -1
- data/ext/amp/mercurial_patch/extconf.rb +1 -1
- data/ext/amp/mercurial_patch/mpatch.c +4 -3
- data/ext/amp/priority_queue/extconf.rb +1 -1
- data/ext/amp/support/extconf.rb +1 -1
- data/ext/amp/support/support.c +1 -1
- data/lib/amp.rb +125 -67
- data/lib/amp/commands/command.rb +12 -10
- data/lib/amp/commands/command_support.rb +8 -1
- data/lib/amp/commands/commands/help.rb +2 -20
- data/lib/amp/commands/commands/init.rb +14 -2
- data/lib/amp/commands/commands/templates.rb +6 -4
- data/lib/amp/commands/commands/version.rb +15 -1
- data/lib/amp/commands/commands/workflow.rb +3 -3
- data/lib/amp/commands/commands/workflows/git/add.rb +3 -3
- data/lib/amp/commands/commands/workflows/git/copy.rb +1 -1
- data/lib/amp/commands/commands/workflows/git/rm.rb +4 -2
- data/lib/amp/commands/commands/workflows/hg/add.rb +1 -1
- data/lib/amp/commands/commands/workflows/hg/addremove.rb +2 -2
- data/lib/amp/commands/commands/workflows/hg/annotate.rb +8 -2
- data/lib/amp/commands/commands/workflows/hg/bisect.rb +253 -0
- data/lib/amp/commands/commands/workflows/hg/branch.rb +1 -1
- data/lib/amp/commands/commands/workflows/hg/branches.rb +3 -3
- data/lib/amp/commands/commands/workflows/hg/bundle.rb +3 -3
- data/lib/amp/commands/commands/workflows/hg/clone.rb +4 -5
- data/lib/amp/commands/commands/workflows/hg/commit.rb +37 -1
- data/lib/amp/commands/commands/workflows/hg/copy.rb +2 -1
- data/lib/amp/commands/commands/workflows/hg/debug/index.rb +1 -1
- data/lib/amp/commands/commands/workflows/hg/diff.rb +3 -8
- data/lib/amp/commands/commands/workflows/hg/forget.rb +5 -4
- data/lib/amp/commands/commands/workflows/hg/identify.rb +6 -6
- data/lib/amp/commands/commands/workflows/hg/import.rb +1 -1
- data/lib/amp/commands/commands/workflows/hg/incoming.rb +2 -2
- data/lib/amp/commands/commands/workflows/hg/log.rb +5 -4
- data/lib/amp/commands/commands/workflows/hg/merge.rb +1 -1
- data/lib/amp/commands/commands/workflows/hg/move.rb +5 -3
- data/lib/amp/commands/commands/workflows/hg/outgoing.rb +1 -1
- data/lib/amp/commands/commands/workflows/hg/push.rb +6 -7
- data/lib/amp/commands/commands/workflows/hg/remove.rb +2 -2
- data/lib/amp/commands/commands/workflows/hg/resolve.rb +6 -23
- data/lib/amp/commands/commands/workflows/hg/root.rb +1 -2
- data/lib/amp/commands/commands/workflows/hg/status.rb +21 -12
- data/lib/amp/commands/commands/workflows/hg/tag.rb +2 -2
- data/lib/amp/commands/commands/workflows/hg/untrack.rb +12 -0
- data/lib/amp/commands/commands/workflows/hg/verify.rb +13 -3
- data/lib/amp/commands/commands/workflows/hg/what_changed.rb +18 -0
- data/lib/amp/commands/dispatch.rb +12 -13
- data/lib/amp/dependencies/amp_support.rb +1 -1
- data/lib/amp/dependencies/amp_support/ruby_amp_support.rb +1 -0
- data/lib/amp/dependencies/maruku.rb +136 -0
- data/lib/amp/dependencies/maruku/attributes.rb +227 -0
- data/lib/amp/dependencies/maruku/defaults.rb +71 -0
- data/lib/amp/dependencies/maruku/errors_management.rb +92 -0
- data/lib/amp/dependencies/maruku/helpers.rb +260 -0
- data/lib/amp/dependencies/maruku/input/charsource.rb +326 -0
- data/lib/amp/dependencies/maruku/input/extensions.rb +69 -0
- data/lib/amp/dependencies/maruku/input/html_helper.rb +189 -0
- data/lib/amp/dependencies/maruku/input/linesource.rb +111 -0
- data/lib/amp/dependencies/maruku/input/parse_block.rb +615 -0
- data/lib/amp/dependencies/maruku/input/parse_doc.rb +234 -0
- data/lib/amp/dependencies/maruku/input/parse_span_better.rb +746 -0
- data/lib/amp/dependencies/maruku/input/rubypants.rb +225 -0
- data/lib/amp/dependencies/maruku/input/type_detection.rb +147 -0
- data/lib/amp/dependencies/maruku/input_textile2/t2_parser.rb +163 -0
- data/lib/amp/dependencies/maruku/maruku.rb +33 -0
- data/lib/amp/dependencies/maruku/output/to_ansi.rb +223 -0
- data/lib/amp/dependencies/maruku/output/to_html.rb +991 -0
- data/lib/amp/dependencies/maruku/output/to_markdown.rb +164 -0
- data/lib/amp/dependencies/maruku/output/to_s.rb +56 -0
- data/lib/amp/dependencies/maruku/string_utils.rb +191 -0
- data/lib/amp/dependencies/maruku/structures.rb +167 -0
- data/lib/amp/dependencies/maruku/structures_inspect.rb +87 -0
- data/lib/amp/dependencies/maruku/structures_iterators.rb +61 -0
- data/lib/amp/dependencies/maruku/textile2.rb +1 -0
- data/lib/amp/dependencies/maruku/toc.rb +199 -0
- data/lib/amp/dependencies/maruku/usage/example1.rb +33 -0
- data/lib/amp/dependencies/maruku/version.rb +40 -0
- data/lib/amp/dependencies/priority_queue.rb +2 -1
- data/lib/amp/dependencies/python_config.rb +2 -1
- data/lib/amp/graphs/ancestor.rb +2 -1
- data/lib/amp/graphs/copies.rb +236 -233
- data/lib/amp/help/entries/__default__.erb +31 -0
- data/lib/amp/help/entries/commands.erb +6 -0
- data/lib/amp/help/entries/mdtest.md +35 -0
- data/lib/amp/help/entries/silly +3 -0
- data/lib/amp/help/help.rb +288 -0
- data/lib/amp/profiling_hacks.rb +5 -3
- data/lib/amp/repository/abstract/abstract_changeset.rb +97 -0
- data/lib/amp/repository/abstract/abstract_local_repo.rb +181 -0
- data/lib/amp/repository/abstract/abstract_staging_area.rb +180 -0
- data/lib/amp/repository/abstract/abstract_versioned_file.rb +100 -0
- data/lib/amp/repository/abstract/common_methods/changeset.rb +75 -0
- data/lib/amp/repository/abstract/common_methods/local_repo.rb +277 -0
- data/lib/amp/repository/abstract/common_methods/staging_area.rb +233 -0
- data/lib/amp/repository/abstract/common_methods/versioned_file.rb +71 -0
- data/lib/amp/repository/generic_repo_picker.rb +78 -0
- data/lib/amp/repository/git/repo_format/changeset.rb +336 -0
- data/lib/amp/repository/git/repo_format/staging_area.rb +192 -0
- data/lib/amp/repository/git/repo_format/versioned_file.rb +119 -0
- data/lib/amp/repository/git/repositories/local_repository.rb +164 -0
- data/lib/amp/repository/git/repository.rb +41 -0
- data/lib/amp/repository/mercurial/encoding/mercurial_diff.rb +382 -0
- data/lib/amp/repository/mercurial/encoding/mercurial_patch.rb +1 -0
- data/lib/amp/repository/mercurial/encoding/patch.rb +294 -0
- data/lib/amp/repository/mercurial/encoding/pure_ruby/ruby_mercurial_patch.rb +124 -0
- data/lib/amp/repository/mercurial/merging/merge_ui.rb +327 -0
- data/lib/amp/repository/mercurial/merging/simple_merge.rb +452 -0
- data/lib/amp/repository/mercurial/repo_format/branch_manager.rb +266 -0
- data/lib/amp/repository/mercurial/repo_format/changeset.rb +768 -0
- data/lib/amp/repository/mercurial/repo_format/dir_state.rb +716 -0
- data/lib/amp/repository/mercurial/repo_format/journal.rb +218 -0
- data/lib/amp/repository/mercurial/repo_format/lock.rb +210 -0
- data/lib/amp/repository/mercurial/repo_format/merge_state.rb +228 -0
- data/lib/amp/repository/mercurial/repo_format/staging_area.rb +367 -0
- data/lib/amp/repository/mercurial/repo_format/store.rb +487 -0
- data/lib/amp/repository/mercurial/repo_format/tag_manager.rb +322 -0
- data/lib/amp/repository/mercurial/repo_format/updatable.rb +543 -0
- data/lib/amp/repository/mercurial/repo_format/updater.rb +848 -0
- data/lib/amp/repository/mercurial/repo_format/verification.rb +433 -0
- data/lib/amp/repository/mercurial/repositories/bundle_repository.rb +216 -0
- data/lib/amp/repository/mercurial/repositories/http_repository.rb +386 -0
- data/lib/amp/repository/mercurial/repositories/local_repository.rb +2034 -0
- data/lib/amp/repository/mercurial/repository.rb +119 -0
- data/lib/amp/repository/mercurial/revlogs/bundle_revlogs.rb +249 -0
- data/lib/amp/repository/mercurial/revlogs/changegroup.rb +217 -0
- data/lib/amp/repository/mercurial/revlogs/changelog.rb +339 -0
- data/lib/amp/repository/mercurial/revlogs/file_log.rb +152 -0
- data/lib/amp/repository/mercurial/revlogs/index.rb +500 -0
- data/lib/amp/repository/mercurial/revlogs/manifest.rb +201 -0
- data/lib/amp/repository/mercurial/revlogs/node.rb +20 -0
- data/lib/amp/repository/mercurial/revlogs/revlog.rb +1026 -0
- data/lib/amp/repository/mercurial/revlogs/revlog_support.rb +129 -0
- data/lib/amp/repository/mercurial/revlogs/versioned_file.rb +597 -0
- data/lib/amp/repository/repository.rb +11 -88
- data/lib/amp/server/extension/amp_extension.rb +3 -3
- data/lib/amp/server/fancy_http_server.rb +1 -1
- data/lib/amp/server/fancy_views/_browser.haml +1 -1
- data/lib/amp/server/fancy_views/_diff_file.haml +1 -8
- data/lib/amp/server/fancy_views/changeset.haml +2 -2
- data/lib/amp/server/fancy_views/file.haml +1 -1
- data/lib/amp/server/fancy_views/file_diff.haml +1 -1
- data/lib/amp/support/amp_ui.rb +13 -29
- data/lib/amp/support/generator.rb +1 -1
- data/lib/amp/support/loaders.rb +1 -2
- data/lib/amp/support/logger.rb +10 -16
- data/lib/amp/support/match.rb +18 -4
- data/lib/amp/support/mercurial/ignore.rb +151 -0
- data/lib/amp/support/openers.rb +8 -3
- data/lib/amp/support/support.rb +91 -46
- data/lib/amp/templates/{blank.commit.erb → mercurial/blank.commit.erb} +0 -0
- data/lib/amp/templates/{blank.log.erb → mercurial/blank.log.erb} +0 -0
- data/lib/amp/templates/{default.commit.erb → mercurial/default.commit.erb} +0 -0
- data/lib/amp/templates/{default.log.erb → mercurial/default.log.erb} +0 -0
- data/lib/amp/templates/template.rb +18 -18
- data/man/amp.1 +51 -0
- data/site/src/about/commands.haml +1 -1
- data/site/src/css/amp.css +1 -1
- data/site/src/index.haml +3 -3
- data/tasks/man.rake +39 -0
- data/tasks/stats.rake +1 -10
- data/tasks/yard.rake +1 -50
- data/test/dirstate_tests/test_dir_state.rb +10 -8
- data/test/functional_tests/annotate.out +31 -0
- data/test/functional_tests/test_functional.rb +155 -63
- data/test/localrepo_tests/ampfile.rb +12 -0
- data/test/localrepo_tests/test_local_repo.rb +56 -57
- data/test/manifest_tests/test_manifest.rb +3 -5
- data/test/merge_tests/test_merge.rb +3 -3
- data/test/revlog_tests/test_revlog.rb +14 -6
- data/test/store_tests/test_fncache_store.rb +19 -19
- data/test/test_19_compatibility.rb +46 -0
- data/test/test_base85.rb +2 -2
- data/test/test_bdiff.rb +2 -2
- data/test/test_changegroup.rb +59 -0
- data/test/test_commands.rb +2 -2
- data/test/test_difflib.rb +2 -2
- data/test/test_generator.rb +34 -0
- data/test/test_ignore.rb +203 -0
- data/test/test_journal.rb +18 -13
- data/test/test_match.rb +2 -2
- data/test/test_mdiff.rb +3 -3
- data/test/test_mpatch.rb +3 -3
- data/test/test_multi_io.rb +40 -0
- data/test/test_support.rb +18 -2
- data/test/test_templates.rb +38 -0
- data/test/test_ui.rb +79 -0
- data/test/testutilities.rb +56 -0
- metadata +168 -49
- data/ext/amp/bz2/mkmf.log +0 -38
- data/lib/amp/encoding/mercurial_diff.rb +0 -378
- data/lib/amp/encoding/mercurial_patch.rb +0 -1
- data/lib/amp/encoding/patch.rb +0 -292
- data/lib/amp/encoding/pure_ruby/ruby_mercurial_patch.rb +0 -123
- data/lib/amp/merges/merge_state.rb +0 -164
- data/lib/amp/merges/merge_ui.rb +0 -322
- data/lib/amp/merges/simple_merge.rb +0 -450
- data/lib/amp/repository/branch_manager.rb +0 -234
- data/lib/amp/repository/dir_state.rb +0 -950
- data/lib/amp/repository/journal.rb +0 -203
- data/lib/amp/repository/lock.rb +0 -207
- data/lib/amp/repository/repositories/bundle_repository.rb +0 -214
- data/lib/amp/repository/repositories/http_repository.rb +0 -377
- data/lib/amp/repository/repositories/local_repository.rb +0 -2661
- data/lib/amp/repository/store.rb +0 -485
- data/lib/amp/repository/tag_manager.rb +0 -319
- data/lib/amp/repository/updatable.rb +0 -532
- data/lib/amp/repository/verification.rb +0 -431
- data/lib/amp/repository/versioned_file.rb +0 -475
- data/lib/amp/revlogs/bundle_revlogs.rb +0 -246
- data/lib/amp/revlogs/changegroup.rb +0 -217
- data/lib/amp/revlogs/changelog.rb +0 -338
- data/lib/amp/revlogs/changeset.rb +0 -521
- data/lib/amp/revlogs/file_log.rb +0 -165
- data/lib/amp/revlogs/index.rb +0 -493
- data/lib/amp/revlogs/manifest.rb +0 -195
- data/lib/amp/revlogs/node.rb +0 -18
- data/lib/amp/revlogs/revlog.rb +0 -1045
- data/lib/amp/revlogs/revlog_support.rb +0 -126
- data/lib/amp/support/ignore.rb +0 -144
- data/site/Rakefile +0 -38
- data/test/test_amp.rb +0 -9
- data/test/test_helper.rb +0 -15
@@ -0,0 +1,386 @@
|
|
1
|
+
require 'uri'
|
2
|
+
require 'zlib'
|
3
|
+
|
4
|
+
# to shut up those fucking warnings!
|
5
|
+
# taken from http://www.5dollarwhitebox.org/drupal/node/64
|
6
|
+
class Net::HTTP
|
7
|
+
alias_method :old_initialize, :initialize
|
8
|
+
def initialize(*args)
|
9
|
+
old_initialize(*args)
|
10
|
+
require 'openssl' unless defined? OpenSSL
|
11
|
+
@ssl_context = OpenSSL::SSL::SSLContext.new
|
12
|
+
@ssl_context.verify_mode = OpenSSL::SSL::VERIFY_NONE
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
module Amp
|
17
|
+
module Repositories
|
18
|
+
module Mercurial
|
19
|
+
|
20
|
+
##
|
21
|
+
# = This is the class for connecting to an HTTP[S]-based repository.
|
22
|
+
# The protocol's pretty simple - just ?cmd="command", and any other
|
23
|
+
# args you need. Should be pretty easy.
|
24
|
+
class HTTPRepository < Repository
|
25
|
+
include Amp::Mercurial::RevlogSupport::Node
|
26
|
+
|
27
|
+
DEFAULT_HEADERS = {"User-agent" => "Amp-#{Amp::VERSION}",
|
28
|
+
"Accept" => "Application/Mercurial-0.1"}
|
29
|
+
|
30
|
+
##
|
31
|
+
# The URL we connect to for this repository
|
32
|
+
attr_reader :url
|
33
|
+
|
34
|
+
##
|
35
|
+
# Should the repository connect via SSL?
|
36
|
+
attr_accessor :secure
|
37
|
+
|
38
|
+
##
|
39
|
+
# Returns whether the repository is local or not. Which it isn't. Because
|
40
|
+
# we're connecting over HTTP.
|
41
|
+
#
|
42
|
+
# @return [Boolean] +false+. Because the repo isn't local.
|
43
|
+
def local?; false; end
|
44
|
+
|
45
|
+
##
|
46
|
+
# Standard initializer for a repository. However, "create" is a no-op.
|
47
|
+
#
|
48
|
+
# @param [String] path the URL for the repository.
|
49
|
+
# @param [Boolean] create this is useless since we can't create remote repos
|
50
|
+
# @param [Amp::AmpConfig] config the configuration for Amp right now.
|
51
|
+
def initialize(path="", create=false, config=nil)
|
52
|
+
@url, @config = URI.parse(path), config
|
53
|
+
@username ||= @url.user
|
54
|
+
@password ||= @url.password
|
55
|
+
@auth_mode = :none
|
56
|
+
raise InvalidArgumentError.new("Invalid URL for an HTTP repo!") if @url.nil?
|
57
|
+
end
|
58
|
+
|
59
|
+
##
|
60
|
+
# Loads the capabilities from the server when necessary. (Lazy loading)
|
61
|
+
#
|
62
|
+
# @return [Hash] the capabilities of the server, in the form:
|
63
|
+
# { capability => true }
|
64
|
+
# or
|
65
|
+
# { capability => "capability;settings;"}
|
66
|
+
def get_capabilities
|
67
|
+
return @capabilities if @capabilities
|
68
|
+
begin
|
69
|
+
@capabilities = {}
|
70
|
+
do_read("capabilities")[:body].split.each do |k|
|
71
|
+
if k.include? "="
|
72
|
+
key, value = k.split("=", 2)
|
73
|
+
@capabilities[key] = value
|
74
|
+
else
|
75
|
+
@capabilities[k] = true
|
76
|
+
end
|
77
|
+
end
|
78
|
+
rescue
|
79
|
+
@capabilities = []
|
80
|
+
end
|
81
|
+
@capabilities
|
82
|
+
end
|
83
|
+
|
84
|
+
##
|
85
|
+
# Unsupported - raises an error.
|
86
|
+
def lock; raise RepoError.new("You can't lock an HTTP repo."); end
|
87
|
+
|
88
|
+
##
|
89
|
+
# Looks up a node with the given key. The key could be a node ID (full or
|
90
|
+
# partial), an index number (though this is slightly risky as it might
|
91
|
+
# match a node ID partially), "tip", and so on. See {LocalRepository#[]}.
|
92
|
+
#
|
93
|
+
# @param [String] key the key to look up - could be node ID, revision index,
|
94
|
+
# and so on.
|
95
|
+
# @return [String] the full node ID of the requested node on the remote server
|
96
|
+
def lookup(key)
|
97
|
+
require_capability("lookup", "Look up Remote Revision")
|
98
|
+
data = do_read("lookup", :key => key)[:body]
|
99
|
+
code, data = data.chomp.split(" ", 2)
|
100
|
+
|
101
|
+
return data.unhexlify if code.to_i > 0
|
102
|
+
raise RepoError.new("Unknown Revision #{data}")
|
103
|
+
end
|
104
|
+
|
105
|
+
##
|
106
|
+
# Gets all the heads of the repository. Returned in binary form.
|
107
|
+
#
|
108
|
+
# @return [Array<String>] the full, binary node_ids of all the heads on
|
109
|
+
# the remote server.
|
110
|
+
def heads
|
111
|
+
data = do_read("heads")[:body]
|
112
|
+
data.chomp.split(" ").map {|h| h.unhexlify }
|
113
|
+
end
|
114
|
+
|
115
|
+
##
|
116
|
+
# Gets the node IDs of all the branch roots in the repository. Uses
|
117
|
+
# the supplied nodes to use to search for branches.
|
118
|
+
#
|
119
|
+
# @param [Array<String>] nodes the nodes to use as heads to search for
|
120
|
+
# branches. The search starts at each supplied node (or the tip, if
|
121
|
+
# left empty), and goes to that tree's root, and returns the relevant
|
122
|
+
# information for the branch.
|
123
|
+
# @return [Array<Array<String>>] An array of arrays of strings. Each array
|
124
|
+
# has 4 components: [head, root, parent1, parent2].
|
125
|
+
def branches(nodes)
|
126
|
+
n = nodes.map {|n| n.hexlify }.join(" ")
|
127
|
+
data = do_read("branches", :nodes => n)[:body]
|
128
|
+
data.split("\n").map do |b|
|
129
|
+
b.split(" ").map {|b| b.unhexlify}
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
##
|
134
|
+
# Asks the server to bundle up the given nodes into a changegroup, and returns it
|
135
|
+
# uncompressed. This is for pulls.
|
136
|
+
#
|
137
|
+
# @todo figure out what the +kind+ parameter is for
|
138
|
+
# @param [Array<String>] nodes the nodes to package into the changegroup
|
139
|
+
# @param [NilClass] kind (UNUSED)
|
140
|
+
# @return [StringIO] the uncompressed changegroup as a stream
|
141
|
+
def changegroup(nodes, kind)
|
142
|
+
n = nodes.map{|i| i.hexlify }.join ' '
|
143
|
+
f = do_read('changegroup', n.empty? ? {} : {:roots => n})[:body]
|
144
|
+
|
145
|
+
s = StringIO.new "",(ruby_19? ? "w+:ASCII-8BIT" : "w+")
|
146
|
+
s.write Zlib::Inflate.inflate(f)
|
147
|
+
s.pos = 0
|
148
|
+
s
|
149
|
+
end
|
150
|
+
|
151
|
+
##
|
152
|
+
# Asks the server to bundle up all the necessary nodes between the lists
|
153
|
+
# bases and heads. It is returned as a stream that reads it in a decompressed
|
154
|
+
# fashion. This is for pulls.
|
155
|
+
#
|
156
|
+
# @param [Array<String>] bases the base nodes of the subset we're requesting.
|
157
|
+
# Should be an array (or any Enumerable) of node ids.
|
158
|
+
# @param [Array<String>] heads the heads of the subset we're requesting.
|
159
|
+
# These nodes will be retrieved as well. Should be an array of node IDs.
|
160
|
+
# @param [NilClass] source i have no idea (UNUSED)
|
161
|
+
# @return [StringIO] the uncompressed changegroup subset as a stream.
|
162
|
+
def changegroup_subset(bases, heads, source)
|
163
|
+
#require_capability 'changegroupsubset', 'look up remote changes'
|
164
|
+
base_list = bases.map {|n| n.hexlify }.join ' '
|
165
|
+
head_list = heads.map {|n| n.hexlify }.join ' '
|
166
|
+
response = do_read("changegroupsubset", :bases => base_list, :heads => head_list)
|
167
|
+
|
168
|
+
s = StringIO.new "",(ruby_19? ? "w+:ASCII-8BIT" : "w+")
|
169
|
+
s.write Zlib::Inflate.inflate(response[:body])
|
170
|
+
s.rewind
|
171
|
+
s
|
172
|
+
end
|
173
|
+
|
174
|
+
##
|
175
|
+
# Sends a bundled up changegroup to the server, who will add it to its repository.
|
176
|
+
# Uses the bundle format.
|
177
|
+
#
|
178
|
+
# @param [StringIO] cg the changegroup to push as a stream.
|
179
|
+
# @param [Array<String>] heads the heads of the changegroup being sent
|
180
|
+
# @param [NilClass] source no idea UNUSED
|
181
|
+
# @return [Fixnum] the response code from the server (1 indicates success)
|
182
|
+
def unbundle(cg, heads, source)
|
183
|
+
# have to stream bundle to a temp file because we do not have
|
184
|
+
# http 1.1 chunked transfer
|
185
|
+
|
186
|
+
type = ''
|
187
|
+
types = capable? 'unbundle'
|
188
|
+
|
189
|
+
# servers older than d1b16a746db6 will send 'unbundle' as a boolean
|
190
|
+
# capability
|
191
|
+
# this will be a list of allowed bundle compression types
|
192
|
+
types = types.split ',' rescue ['']
|
193
|
+
|
194
|
+
# pick a compression format
|
195
|
+
types.each do |x|
|
196
|
+
(type = x and break) if Amp::Mercurial::RevlogSupport::ChangeGroup::BUNDLE_HEADERS.include? x
|
197
|
+
end
|
198
|
+
|
199
|
+
# compress and create the bundle
|
200
|
+
data = Amp::Mercurial::RevlogSupport::ChangeGroup.write_bundle cg, type
|
201
|
+
|
202
|
+
# send the data
|
203
|
+
resp = do_read 'unbundle', :data => data.string,
|
204
|
+
:headers => {'Content-Type' => 'application/octet-stream'},
|
205
|
+
:heads => heads.map{|h| h.hexlify }.join(' ')
|
206
|
+
# parse output
|
207
|
+
resp_code, output = resp[:body].split "\n"
|
208
|
+
|
209
|
+
# make sure the reponse was in an expected format (i.e. with a response code)
|
210
|
+
unless resp_code.to_i.to_s == resp_code
|
211
|
+
raise abort("push failed (unexpected response): #{resp}")
|
212
|
+
end
|
213
|
+
|
214
|
+
# output any text from the server
|
215
|
+
UI::status output
|
216
|
+
# return 1 for success, 0 for failure
|
217
|
+
resp_code.to_i
|
218
|
+
end
|
219
|
+
|
220
|
+
def stream_out
|
221
|
+
do_cmd 'stream_out'
|
222
|
+
end
|
223
|
+
|
224
|
+
##
|
225
|
+
# For each provided pair of nodes, return the nodes between the pair.
|
226
|
+
#
|
227
|
+
# @param [Array<Array<String>>] an array of node pairs, so an array of an array
|
228
|
+
# of strings. The first node is the head, the second node is the root of the pair.
|
229
|
+
# @return [Array<Array<String>>] for each pair, we return 1 array, which contains
|
230
|
+
# the node IDs of every node between the pair.
|
231
|
+
# add lstrip to split_newlines to fix but not cure bug
|
232
|
+
def between(pairs)
|
233
|
+
batch = 8
|
234
|
+
ret = []
|
235
|
+
|
236
|
+
(0..(pairs.size)).step(batch) do |i|
|
237
|
+
n = pairs[i..(i+batch-1)].map {|p| p.map {|k| k.hexlify }.join("-") }.join(" ")
|
238
|
+
resp = do_read("between", :pairs => n)
|
239
|
+
|
240
|
+
raise RepoError.new("unexpected code: #{code}") unless resp[:code] == 200
|
241
|
+
|
242
|
+
ret += resp[:body].lstrip.split_newlines.map {|l| (l && l.split(" ").map{|i| i.unhexlify }) || []}
|
243
|
+
end
|
244
|
+
|
245
|
+
Amp::UI.debug "between returns: #{ret.inspect}"
|
246
|
+
ret
|
247
|
+
end
|
248
|
+
|
249
|
+
private
|
250
|
+
|
251
|
+
##
|
252
|
+
# Runs the given command by the server, gets the response. Takes the name of the command,
|
253
|
+
# the data, headers, etc. The command is assumed to be a GET request, unless args[:data] is
|
254
|
+
# set, in which case it is sent via POST.
|
255
|
+
#
|
256
|
+
# @param [String] command the command to send to the server, such as "heads"
|
257
|
+
# @param [Hash] args the arguments you need to provide - for lookup, it
|
258
|
+
# might be the revision indicies.
|
259
|
+
# @return [String] the response data from the server.
|
260
|
+
def do_cmd(command, args={})
|
261
|
+
require 'net/http'
|
262
|
+
|
263
|
+
# Be safe for recursive calls
|
264
|
+
work_args = args.dup
|
265
|
+
# grab data, but don't leave it in, or it'll be added to the query string
|
266
|
+
data = work_args.delete(:data) || nil
|
267
|
+
# and headers, but don't leave it in, or it'll be added to the query string
|
268
|
+
headers = work_args.delete(:headers) || {}
|
269
|
+
|
270
|
+
# Our query string is "cmd => command" plus any other parts of the args hash
|
271
|
+
query = { "cmd" => command }
|
272
|
+
query.merge! work_args
|
273
|
+
|
274
|
+
# break it up, make a query
|
275
|
+
host = @url.host
|
276
|
+
path = @url.path
|
277
|
+
# Was having trouble with this... should be safe now
|
278
|
+
path += "?" + URI.escape(query.map {|k,v| "#{k}=#{v}"}.join("&"), /[^-_!~*'()a-zA-Z\d;\/?:@&=+$,\[\]]/n)
|
279
|
+
|
280
|
+
# silly scoping
|
281
|
+
response = nil
|
282
|
+
# Create an HTTP object so we can send our request. static methods aren't flexible
|
283
|
+
# enough for us
|
284
|
+
sess = Net::HTTP.new host, @url.port
|
285
|
+
# Use SSL if necessary
|
286
|
+
sess.use_ssl = true if secure
|
287
|
+
# Let's send our request!
|
288
|
+
sess.start do |http|
|
289
|
+
# if we have data, it's a POST
|
290
|
+
if data
|
291
|
+
req = Net::HTTP::Post.new(path)
|
292
|
+
req.body = data
|
293
|
+
else
|
294
|
+
# otherwise, it's a GET
|
295
|
+
req = Net::HTTP::Get.new(path)
|
296
|
+
end
|
297
|
+
if @auth_mode == :digest
|
298
|
+
# Set digest headers
|
299
|
+
req.digest_auth @username, @password, @auth_digest
|
300
|
+
elsif @auth_mode == :basic
|
301
|
+
# Set basic auth headers
|
302
|
+
req.basic_auth @username, @password
|
303
|
+
end
|
304
|
+
# Copy over the default headers
|
305
|
+
DEFAULT_HEADERS.each {|k, v| req[k] = v}
|
306
|
+
# Then overwrite them (and add new ones) from our arguments
|
307
|
+
headers.each {|k, v| req[k] = v}
|
308
|
+
# And send the request!
|
309
|
+
response = http.request(req)
|
310
|
+
end
|
311
|
+
# Case on response - we'll be using the kind_of? style of switch statement
|
312
|
+
# here
|
313
|
+
case response
|
314
|
+
when Net::HTTPRedirection
|
315
|
+
# Redirect to a new URL - grab the new URL...
|
316
|
+
newurl = response["Location"]
|
317
|
+
@url = URI.parse newurl
|
318
|
+
@url.user = @username # Keep the old username/password combination.
|
319
|
+
@url.password = @password # Keep the old username/password combination.
|
320
|
+
|
321
|
+
# and try that again.
|
322
|
+
do_cmd(command, args)
|
323
|
+
when Net::HTTPUnauthorized
|
324
|
+
if @auth_mode == :digest
|
325
|
+
# no other handlers!
|
326
|
+
raise AuthorizationError.new("Failed to authenticate to local repository!")
|
327
|
+
elsif @auth_mode == :basic
|
328
|
+
# failed to authenticate via basic, so escalate to digest mode
|
329
|
+
@auth_mode = :digest
|
330
|
+
@auth_digest = response
|
331
|
+
do_cmd command, args
|
332
|
+
else
|
333
|
+
# They want a username and password. A few routes:
|
334
|
+
# First, check the URL for the username:password@host format
|
335
|
+
@username ||= @url.user
|
336
|
+
@password ||= @url.password
|
337
|
+
# and start off with basic authentication
|
338
|
+
@auth_mode = :basic
|
339
|
+
# If the URL didn't contain the username AND password, ask the user for them.
|
340
|
+
unless @username && @password
|
341
|
+
UI::say "==> HTTP Authentication Required"
|
342
|
+
|
343
|
+
@username = UI::ask 'username: '
|
344
|
+
@password = UI::ask 'password: ', :password
|
345
|
+
end
|
346
|
+
|
347
|
+
# Recursively call the command
|
348
|
+
do_cmd command, args
|
349
|
+
end
|
350
|
+
else
|
351
|
+
response
|
352
|
+
end
|
353
|
+
end
|
354
|
+
|
355
|
+
##
|
356
|
+
# This is a helper for do_cmd - it splits up the response object into
|
357
|
+
# two relevant parts: the response body, and the response code.
|
358
|
+
#
|
359
|
+
# @param [String] command the remote command to execute, such as "heads"
|
360
|
+
# @param [Hash] args the arguments to pass to the request. Takes some special values. All
|
361
|
+
# other values are sent in the query string.
|
362
|
+
# @option args [String] :data (nil) the POST data to send
|
363
|
+
# @option args [Hash] :headers ({}) the headers to send with the request, not including
|
364
|
+
# any authentication or user-agent headers.
|
365
|
+
# @return [Hash<Symbol => String, Integer>] the response data, in the form
|
366
|
+
# {:body => body, :code => response_code}
|
367
|
+
def do_read(command, args={})
|
368
|
+
response = do_cmd(command, args)
|
369
|
+
{:body => response.body, :code => response.code.to_i}
|
370
|
+
end
|
371
|
+
end
|
372
|
+
|
373
|
+
##
|
374
|
+
# A special form of the HTTPRepository, except that it is secured over SSL (HTTPS).
|
375
|
+
# Other than that, nothing fancy about it.
|
376
|
+
class HTTPSRepository < HTTPRepository
|
377
|
+
def initialize(*args)
|
378
|
+
require 'net/https'
|
379
|
+
|
380
|
+
super(*args)
|
381
|
+
self.secure = true
|
382
|
+
end
|
383
|
+
end
|
384
|
+
end
|
385
|
+
end
|
386
|
+
end
|
@@ -0,0 +1,2034 @@
|
|
1
|
+
require 'fileutils'
|
2
|
+
module Amp
|
3
|
+
module Repositories
|
4
|
+
module Mercurial
|
5
|
+
|
6
|
+
##
|
7
|
+
# A Local Repository is a repository that works on local repo's, such
|
8
|
+
# as your working directory. This makes it pretty damn important, and also
|
9
|
+
# pretty damn complicated. Have fun!
|
10
|
+
class LocalRepository < Repository
|
11
|
+
include Amp::Mercurial::RevlogSupport::Node
|
12
|
+
include Repositories::Mercurial::BranchManager
|
13
|
+
include Repositories::Mercurial::TagManager
|
14
|
+
include Repositories::Mercurial::Updatable
|
15
|
+
include Repositories::Mercurial::Verification
|
16
|
+
|
17
|
+
# The config is an {AmpConfig} for this repo (and uses .hg/hgrc)
|
18
|
+
attr_accessor :config
|
19
|
+
|
20
|
+
attr_reader :root
|
21
|
+
attr_reader :root_pathname # save some computation here
|
22
|
+
attr_reader :hg
|
23
|
+
attr_reader :hg_opener
|
24
|
+
attr_reader :branch_manager
|
25
|
+
attr_reader :store
|
26
|
+
attr_reader :staging_area
|
27
|
+
|
28
|
+
##
|
29
|
+
# Initializes a new directory to the given path, and with the current
|
30
|
+
# configuration.
|
31
|
+
#
|
32
|
+
# @param [String] path a path to the Repository.
|
33
|
+
# @param [Boolean] create Should we create a new one? Usually for
|
34
|
+
# the "amp init" command.
|
35
|
+
# @param [Amp::AmpConfig] config the configuration loaded from the user's
|
36
|
+
# system. Will have some settings overwritten by the repo's hgrc.
|
37
|
+
def initialize(path="", create=false, config=nil)
|
38
|
+
super(path, create, config)
|
39
|
+
@hg = working_join ".hg"
|
40
|
+
@file_opener = Amp::Opener.new @root
|
41
|
+
@file_opener.default = :open_file # these two are the same, pretty much
|
42
|
+
@hg_opener = Amp::Opener.new @root
|
43
|
+
@hg_opener.default = :open_hg # just with different defaults
|
44
|
+
@filters = {}
|
45
|
+
@changelog = nil
|
46
|
+
@manifest = nil
|
47
|
+
@dirstate = nil
|
48
|
+
@staging_area = StagingArea.new(self)
|
49
|
+
@working_lock_ref = @lock_ref = nil
|
50
|
+
requirements = []
|
51
|
+
|
52
|
+
# make a repo if necessary
|
53
|
+
unless File.directory? @hg
|
54
|
+
if create
|
55
|
+
then requirements = init config
|
56
|
+
else raise RepoError.new("Repository #{path} not found")
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
# no point in reading what we _just_ wrote...
|
61
|
+
unless create
|
62
|
+
# read requires
|
63
|
+
# save it if something's up
|
64
|
+
@hg_opener.open("requires", 'r') {|f| f.each {|r| requirements << r.strip } } rescue nil
|
65
|
+
end
|
66
|
+
|
67
|
+
@store = Stores.pick requirements, @hg, Amp::Opener
|
68
|
+
@config = Amp::AmpConfig.new :parent_config => config
|
69
|
+
@config.read_file join("hgrc")
|
70
|
+
end
|
71
|
+
|
72
|
+
def local?; true; end
|
73
|
+
|
74
|
+
def inspect; "#<LocalRepository @root=#{@root.inspect}>"; end
|
75
|
+
|
76
|
+
##
|
77
|
+
# Creates this repository's folders and structure.
|
78
|
+
#
|
79
|
+
# @param [AmpConfig] config the configuration for this user so
|
80
|
+
# we know what neato features to use (like filename cache)
|
81
|
+
# @return [Array<String>] the requirements that we found are returned,
|
82
|
+
# so further configuration can go down.
|
83
|
+
def init(config=@config)
|
84
|
+
# make the directory if it's not there
|
85
|
+
super
|
86
|
+
FileUtils.makedirs @hg
|
87
|
+
|
88
|
+
requirements = ["revlogv1"]
|
89
|
+
|
90
|
+
# add some requirements
|
91
|
+
if config["format"]["usestore", Boolean] || true
|
92
|
+
FileUtils.mkdir "#{@hg}/store"
|
93
|
+
requirements << "store"
|
94
|
+
requirements << "fncache" if config["format"]["usefncache", Boolean, true]
|
95
|
+
|
96
|
+
# add the changelog
|
97
|
+
make_changelog
|
98
|
+
end
|
99
|
+
|
100
|
+
# write the requires file
|
101
|
+
write_requires requirements
|
102
|
+
end
|
103
|
+
|
104
|
+
##
|
105
|
+
# Has the repository been changed since the last commit?
|
106
|
+
# Returns true if there are NO outstanding changes or uncommitted merges.
|
107
|
+
#
|
108
|
+
# @return [Boolean] is the repo pristine
|
109
|
+
def pristine?
|
110
|
+
dirstate.parents.last == RevlogSupport::Node::NULL_ID &&
|
111
|
+
status(:only => [:modified, :added, :removed, :deleted]).all? {|_, v| v.empty? }
|
112
|
+
end
|
113
|
+
|
114
|
+
opposite_method :changed?, :pristine?
|
115
|
+
|
116
|
+
##
|
117
|
+
# Gets the changeset at the given revision.
|
118
|
+
#
|
119
|
+
# @param [String, Integer] rev the revision index (Integer) or
|
120
|
+
# node_id (String) that we want to access. if nil, returns
|
121
|
+
# the working directory. if the string is 'tip', it returns the
|
122
|
+
# latest head. Can be either a string or an integer;
|
123
|
+
# this shit is smart.
|
124
|
+
# @return [Changeset] the changeset at the given revision index or node
|
125
|
+
# id. Could be working directory.
|
126
|
+
def [](rev)
|
127
|
+
if rev.nil?
|
128
|
+
return Amp::Mercurial::WorkingDirectoryChangeset.new(self)
|
129
|
+
end
|
130
|
+
rev = rev.to_i if rev.to_i.to_s == rev
|
131
|
+
return Amp::Mercurial::Changeset.new(self, rev)
|
132
|
+
end
|
133
|
+
|
134
|
+
##
|
135
|
+
# Creates a lock at the given path. At first it tries to just make it straight away.
|
136
|
+
# If this fails, we then sleep for up to a given amount of time (defaults to 10 minutes!)
|
137
|
+
# and continually try to acquire the lock.
|
138
|
+
#
|
139
|
+
# @raise [LockHeld] if the lock cannot be acquired, this exception is raised
|
140
|
+
# @param [String] lockname the name of the lock to create
|
141
|
+
# @param [Boolean] wait should we wait for the lock to be released?
|
142
|
+
# @param [Proc, #call] release_proc a proc to run when the lock is released
|
143
|
+
# @param [Proc, #call] acquire_proc a proc to run when we get the lock
|
144
|
+
# @param [String] desc the description of the lock to show if someone stomps on it
|
145
|
+
# @return [Lock] a lock at the given location.
|
146
|
+
def make_lock(lockname, wait, release_proc, acquire_proc, desc)
|
147
|
+
begin
|
148
|
+
lock = Lock.new(lockname, :timeout => 0, :release_fxn => release_proc, :desc => desc)
|
149
|
+
rescue LockHeld => err
|
150
|
+
raise unless wait
|
151
|
+
UI.warn("waiting for lock on #{desc} held by #{err.locker}")
|
152
|
+
lock = Lock.new(lockname, :timeout => @config["ui","timeout","600"].to_i,
|
153
|
+
:release_proc => release_proc, :desc => desc)
|
154
|
+
end
|
155
|
+
acquire_proc.call if acquire_proc
|
156
|
+
return lock
|
157
|
+
end
|
158
|
+
|
159
|
+
##
|
160
|
+
# Locks the repository's .hg/store directory. Returns the lock, or if a block is given,
|
161
|
+
# runs the block with the lock, and clears the lock afterward.
|
162
|
+
#
|
163
|
+
# @yield When a block is given, that block is executed under locked
|
164
|
+
# conditions. That code can be guaranteed it is the only code running on the
|
165
|
+
# store in a destructive manner.
|
166
|
+
# @param [Boolean] wait (true) wait for the lock to expire?
|
167
|
+
# @return [Lock] the lock on the .hg/store directory
|
168
|
+
def lock_store(wait = true)
|
169
|
+
return @lock_ref if @lock_ref && @lock_ref.weakref_alive?
|
170
|
+
|
171
|
+
lock = make_lock(store_join("lock"), wait, nil, nil, "repository #{root}")
|
172
|
+
@lock_ref = WeakRef.new(lock)
|
173
|
+
if block_given?
|
174
|
+
begin
|
175
|
+
yield
|
176
|
+
ensure
|
177
|
+
@lock_ref = nil
|
178
|
+
lock.release
|
179
|
+
end
|
180
|
+
else
|
181
|
+
return lock
|
182
|
+
end
|
183
|
+
end
|
184
|
+
|
185
|
+
##
|
186
|
+
# Locks the repository's working directory. Returns the lock, or if a block is given,
|
187
|
+
# runs the block with the lock, and clears the lock afterward.
|
188
|
+
#
|
189
|
+
# @yield When a block is given, that block is executed under locked
|
190
|
+
# conditions. That code can be guaranteed it is the only code running on the
|
191
|
+
# working directory in a destructive manner.
|
192
|
+
# @param [Boolean] wait (true) wait for the lock to expire?
|
193
|
+
# @return [Lock] the lock on the working directory
|
194
|
+
def lock_working(wait = true)
|
195
|
+
return @working_lock_ref if @working_lock_ref && @working_lock_ref.weakref_alive?
|
196
|
+
|
197
|
+
lock = make_lock(join("wlock"), wait, nil, nil, "working directory of #{root}")
|
198
|
+
@working_lock_ref = WeakRef.new(lock)
|
199
|
+
if block_given?
|
200
|
+
begin
|
201
|
+
yield
|
202
|
+
ensure
|
203
|
+
@working_lock_ref = nil
|
204
|
+
lock.release
|
205
|
+
end
|
206
|
+
else
|
207
|
+
return lock
|
208
|
+
end
|
209
|
+
end
|
210
|
+
|
211
|
+
##
|
212
|
+
# Takes a block, and runs that block with both the store and the working directory locked.
|
213
|
+
#
|
214
|
+
# @param [Boolean] wait (true) should we wait for locks, or just give up early?
|
215
|
+
def lock_working_and_store(wait=true)
|
216
|
+
lock_store(wait) do
|
217
|
+
lock_working(wait) do
|
218
|
+
yield
|
219
|
+
end
|
220
|
+
end
|
221
|
+
end
|
222
|
+
|
223
|
+
##
|
224
|
+
# Returns an opener object, which knows how to open objects in the repository's
|
225
|
+
# store.
|
226
|
+
def store_opener
|
227
|
+
@store.opener
|
228
|
+
end
|
229
|
+
|
230
|
+
##
|
231
|
+
# Gets the file-log for the given path, so we can look at an individual
|
232
|
+
# file's history, for example.
|
233
|
+
#
|
234
|
+
# @param [String] f the path to the file
|
235
|
+
# @return [FileLog] a filelog (a type of revision log) for the given file
|
236
|
+
def file_log(f)
|
237
|
+
f = f[1..-1] if f[0, 1] == "/"
|
238
|
+
Amp::Mercurial::FileLog.new @store.opener, f
|
239
|
+
end
|
240
|
+
alias_method :file, :file_log
|
241
|
+
|
242
|
+
##
|
243
|
+
# Returns the parent changesets of the specified changeset. Defaults to the
|
244
|
+
# working directory, if +change_id+ is unspecified.
|
245
|
+
#
|
246
|
+
# @param [Integer, String] change_id the ID (or index) of the requested changeset
|
247
|
+
# @return [Array<Changeset>] the parent changesets of the requested changeset
|
248
|
+
def parents(change_id=nil)
|
249
|
+
self[change_id].parents
|
250
|
+
end
|
251
|
+
|
252
|
+
##
|
253
|
+
# Gets a versioned file for the given path, so we can look at the individual
|
254
|
+
# file's history with the file object itself.
|
255
|
+
#
|
256
|
+
# @param [String] path the path to the file
|
257
|
+
# @param [Hash] opts the options for creating the versioned file
|
258
|
+
# @option [String] opts change_id (nil) The ID of the changeset in question
|
259
|
+
# @option [String, Integer] opts file_id (nil) the revision # or node ID of
|
260
|
+
# into the file_log
|
261
|
+
def versioned_file(path, opts={})
|
262
|
+
Amp::Mercurial::VersionedFile.new(self, path, opts)
|
263
|
+
end
|
264
|
+
|
265
|
+
##
|
266
|
+
# Gets a versioned file, but using the working directory, so we are looking
|
267
|
+
# past the last commit. Important because it uses a different class. Duh.
|
268
|
+
#
|
269
|
+
# @param [String] path the path to the file
|
270
|
+
# @param [Hash] opts the options for creating the versioned file
|
271
|
+
# @option [String] opts change_id (nil) The ID of the changeset in question
|
272
|
+
# @option [String, Integer] opts file_id (nil) the revision # or node ID of
|
273
|
+
# into the file_log
|
274
|
+
def working_file(path, opts={})
|
275
|
+
VersionedWorkingFile.new(self, path, opts)
|
276
|
+
end
|
277
|
+
|
278
|
+
##
|
279
|
+
# Reads from a file, but in the working directory.
|
280
|
+
# Uses encoding if we are set up to do so.
|
281
|
+
#
|
282
|
+
# @param [String] filename the file to read from the working directory
|
283
|
+
# @return [String] the data read from the file, encoded if we are set
|
284
|
+
# up to do so.
|
285
|
+
def working_read(filename)
|
286
|
+
data = @file_opener.open(filename, "r") {|f| f.read }
|
287
|
+
data = @filters["encode"].call(filename, data) if @filters["encode"]
|
288
|
+
data
|
289
|
+
end
|
290
|
+
|
291
|
+
##
|
292
|
+
# Writes to a file, but in the working directory. Uses encoding if we are
|
293
|
+
# set up to do so. Also handles symlinks and executables. Ugh.
|
294
|
+
#
|
295
|
+
# @param [String] path the path to the file to write to
|
296
|
+
# @param [String] data the data to write
|
297
|
+
# @param [String] flags the flags to set
|
298
|
+
def working_write(path, data, flags = "")
|
299
|
+
@file_opener.open(path, "w") do |file|
|
300
|
+
file.write(data)
|
301
|
+
end
|
302
|
+
if flags && flags.include?('x')
|
303
|
+
File.amp_set_executable(working_join(path), true)
|
304
|
+
end
|
305
|
+
end
|
306
|
+
|
307
|
+
##
|
308
|
+
# Returns the changelog for this repository. This changelog basically
|
309
|
+
# is the history of all commits.
|
310
|
+
#
|
311
|
+
# @return [ChangeLog] the commit history object for the entire repo.
|
312
|
+
def changelog
|
313
|
+
return @changelog if @changelog
|
314
|
+
|
315
|
+
@changelog = Amp::Mercurial::ChangeLog.new @store.opener
|
316
|
+
|
317
|
+
if path = ENV['HG_PENDING']
|
318
|
+
if path =~ /^#{root}/
|
319
|
+
@changelog.read_pending('00changelog.i.a')
|
320
|
+
end
|
321
|
+
end
|
322
|
+
|
323
|
+
@changelog
|
324
|
+
end
|
325
|
+
|
326
|
+
##
|
327
|
+
# Has the file been modified from node1 to node2?
|
328
|
+
#
|
329
|
+
# @param [String] file the file to check
|
330
|
+
# @param [Hash] opts needs to have :node1 and :node2
|
331
|
+
# @return [Boolean] has the +file+ been modified?
|
332
|
+
def file_modified?(file, opts={})
|
333
|
+
vf_old, vf_new = opts[:node1][file], opts[:node2][file]
|
334
|
+
|
335
|
+
tests = [vf_old.flags != vf_new.flags,
|
336
|
+
vf_old.file_node != vf_new.file_node &&
|
337
|
+
(vf_new.changeset.include?(file) || vf_old === vf_new)]
|
338
|
+
tests.any?
|
339
|
+
end
|
340
|
+
|
341
|
+
|
342
|
+
##
|
343
|
+
# Marks a file as resolved according to the merge state. Basic form of
|
344
|
+
# merge conflict resolution that all repositories must support.
|
345
|
+
#
|
346
|
+
# @api
|
347
|
+
# @param [String] filename the file to mark resolved
|
348
|
+
def mark_resolved(filename)
|
349
|
+
merge_state.mark_resolved filename
|
350
|
+
end
|
351
|
+
|
352
|
+
##
|
353
|
+
# Marks a file as conflicted according to the merge state. Basic form of
|
354
|
+
# merge conflict resolution that all repositories must support.
|
355
|
+
#
|
356
|
+
# @api
|
357
|
+
# @param [String] filename the file to mark as conflicted
|
358
|
+
def mark_conflicted(filename)
|
359
|
+
merge_state.mark_conflicted filename
|
360
|
+
end
|
361
|
+
|
362
|
+
##
|
363
|
+
# Returns all files that have not been merged. In other words, if we're
|
364
|
+
# waiting for the user to fix up their merge, then return the list of files
|
365
|
+
# we need to be correct before merging.
|
366
|
+
#
|
367
|
+
# @todo think up a better name
|
368
|
+
#
|
369
|
+
# @return [Array<Array<String, Symbol>>] an array of String-Symbol pairs - the
|
370
|
+
# filename is the first entry, the status of the merge is the second.
|
371
|
+
def uncommitted_merge_files
|
372
|
+
merge_state.uncommitted_merge_files
|
373
|
+
end
|
374
|
+
|
375
|
+
##
|
376
|
+
# Attempts to resolve the given file, according to how mercurial manages
|
377
|
+
# merges. Needed for api compliance.
|
378
|
+
#
|
379
|
+
# @api
|
380
|
+
# @param [String] filename the file to attempt to resolve
|
381
|
+
def try_resolve_conflict
|
382
|
+
# retry the merge
|
383
|
+
working_changeset = self[nil]
|
384
|
+
merge_changeset = working_changeset.parents.last
|
385
|
+
|
386
|
+
# backup the current file to a .resolve file (but retain the extension
|
387
|
+
# so editors that rely on extensions won't bug out)
|
388
|
+
path = working_join file
|
389
|
+
File.copy(path, path + ".resolve" + File.extname(path))
|
390
|
+
|
391
|
+
# try to merge the files!
|
392
|
+
merge_state.resolve(file, working_changeset, merge_changeset)
|
393
|
+
|
394
|
+
# restore the backup to .orig (overwriting the old one)
|
395
|
+
File.move(path + ".resolve" + File.extname(path), path + ".orig" + File.extname(path))
|
396
|
+
end
|
397
|
+
|
398
|
+
##
|
399
|
+
# Returns the merge state for this repository. The merge state keeps track
|
400
|
+
# of what files need to be merged for an update to be successfully completed.
|
401
|
+
#
|
402
|
+
# @return [MergeState] the repository's merge state.
|
403
|
+
def merge_state
|
404
|
+
@merge_state ||= Amp::Merges::Mercurial::MergeState.new(self)
|
405
|
+
end
|
406
|
+
|
407
|
+
##
|
408
|
+
# Returns the manifest for this repository. The manifest keeps track
|
409
|
+
# of what files exist at what times, and if they have certain flags
|
410
|
+
# (such as executable, or is it a symlink).
|
411
|
+
#
|
412
|
+
# @return [Manifest] the manifest for the repository
|
413
|
+
def manifest
|
414
|
+
return @manifest if @manifest
|
415
|
+
|
416
|
+
changelog #load the changelog
|
417
|
+
@manifest = Amp::Mercurial::Manifest.new @store.opener
|
418
|
+
end
|
419
|
+
|
420
|
+
##
|
421
|
+
# Returns the dirstate for this repository. The dirstate keeps track
|
422
|
+
# of files status, such as removed, added, merged, and so on. It also
|
423
|
+
# keeps track of the working directory.
|
424
|
+
#
|
425
|
+
# @return [DirState] the dirstate for this local repository.
|
426
|
+
def dirstate
|
427
|
+
staging_area.dirstate
|
428
|
+
end
|
429
|
+
|
430
|
+
##
|
431
|
+
# Returns the URL of this repository. Uses the "file:" scheme as such.
|
432
|
+
#
|
433
|
+
# @return [String] the URL pointing to this repo
|
434
|
+
def url; "file:#{@root}"; end
|
435
|
+
|
436
|
+
##
|
437
|
+
# Opens a file using our opener. Can only access files in .hg/
|
438
|
+
def open(*args, &block)
|
439
|
+
@hg_opener.open(*args, &block)
|
440
|
+
end
|
441
|
+
|
442
|
+
##
|
443
|
+
# Joins the path from this repo's path (.hg), to the file provided.
|
444
|
+
#
|
445
|
+
# @param file the file we need the path for
|
446
|
+
# @return [String] the repo's root, joined with the file's path
|
447
|
+
def join(file)
|
448
|
+
File.join(@hg, file)
|
449
|
+
end
|
450
|
+
|
451
|
+
##
|
452
|
+
# Joins the path, with a bunch of other args, to the store's directory.
|
453
|
+
# Used for opening {FileLog}s and whatnot.
|
454
|
+
#
|
455
|
+
# @param file the path to the file
|
456
|
+
# @return [String] the path to the file from the store.
|
457
|
+
def store_join(file)
|
458
|
+
@store.join file
|
459
|
+
end
|
460
|
+
|
461
|
+
##
|
462
|
+
# Looks up an identifier for a revision in the commit history. This
|
463
|
+
# key could be an integer (specifying a revision number), "." for
|
464
|
+
# the latest revision, "null" for the null revision, "tip" for
|
465
|
+
# the tip of the repository, a node_id (in hex or binary form) for
|
466
|
+
# a revision in the changelog. Yeah. It's a flexible method.
|
467
|
+
#
|
468
|
+
# @param key the key to lookup in the history of the repo
|
469
|
+
# @return [String] a node_id into the changelog for the requested revision
|
470
|
+
def lookup(key)
|
471
|
+
key = key.to_i if key.to_i.to_s == key.to_s # casting for things like "10"
|
472
|
+
case key
|
473
|
+
when Fixnum, Bignum, Integer
|
474
|
+
changelog.node_id_for_index(key)
|
475
|
+
when "."
|
476
|
+
dirstate.parents().first
|
477
|
+
when "null", nil
|
478
|
+
NULL_ID
|
479
|
+
when "tip"
|
480
|
+
changelog.tip
|
481
|
+
else
|
482
|
+
|
483
|
+
n = changelog.id_match(key)
|
484
|
+
return n if n
|
485
|
+
|
486
|
+
return tags[key] if tags[key]
|
487
|
+
return branch_tags[key] if branch_tags[key]
|
488
|
+
|
489
|
+
n = changelog.partial_id_match(key)
|
490
|
+
return n if n
|
491
|
+
|
492
|
+
# bail
|
493
|
+
raise RepoError.new("unknown revision #{key}")
|
494
|
+
end
|
495
|
+
end
|
496
|
+
|
497
|
+
##
|
498
|
+
# Finds the nodes between two nodes - this algorithm is ported from the
|
499
|
+
# python for mercurial (localrepo.py:1247, for 1.2.1 source). Since this
|
500
|
+
# is used by servers, it implements their algorithm... which seems to
|
501
|
+
# intentionally not return every node between +top+ and +bottom+.
|
502
|
+
# Each one is twice as far from +top+ as the previous.
|
503
|
+
#
|
504
|
+
# @param [Array<String, String>] An array of node-id pairs, which are arrays
|
505
|
+
# of [+top+, +bottom+], which are:
|
506
|
+
# top [String] the "top" - or most recent - revision's node ID
|
507
|
+
# bottom [String] the "bottom" - or oldest - revision's node ID
|
508
|
+
#
|
509
|
+
# return [Array<String>] a list of node IDs that are between +top+ and +bottom+
|
510
|
+
def between(pairs)
|
511
|
+
pairs.map do |top, bottom|
|
512
|
+
node, list, counter = top, [], 0
|
513
|
+
add_me = 1
|
514
|
+
while node != bottom && node != NULL_ID
|
515
|
+
if counter == add_me
|
516
|
+
list << node
|
517
|
+
add_me *= 2
|
518
|
+
end
|
519
|
+
parent = changelog.parents_for_node(node).first
|
520
|
+
node = parent
|
521
|
+
counter += 1
|
522
|
+
end
|
523
|
+
list
|
524
|
+
end
|
525
|
+
end
|
526
|
+
|
527
|
+
##
|
528
|
+
# Pull new changegroups from +remote+
|
529
|
+
# This does not apply the changes, but pulls them onto
|
530
|
+
# the local server.
|
531
|
+
#
|
532
|
+
# @param [Repository] remote_repo the remote repository object to pull from
|
533
|
+
# @param [Hash] options extra options for pulling
|
534
|
+
# @option [Array<String, Fixnum>] :heads ([]) which repository heads to pull, such as
|
535
|
+
# a branch name or a sha-1 identifier
|
536
|
+
# @option [Boolean] :force (false) force the pull, ignoring any errors or warnings
|
537
|
+
# @return [Boolean] for success/failure
|
538
|
+
def pull(remote, opts={:heads => nil, :force => nil})
|
539
|
+
lock_store do
|
540
|
+
# get the common nodes, missing nodes, and the remote heads
|
541
|
+
# this is findcommonincoming in the Python code, for those with both open
|
542
|
+
common, fetch, remote_heads = *common_nodes(remote, :heads => opts[:heads],
|
543
|
+
:force => opts[:force])
|
544
|
+
|
545
|
+
UI::status 'requesting all changes' if fetch == [NULL_ID]
|
546
|
+
if fetch.empty?
|
547
|
+
UI::status 'no changes found'
|
548
|
+
return 0
|
549
|
+
end
|
550
|
+
|
551
|
+
if (opts[:heads].nil? || opts[:heads].empty?) && remote.capable?('changegroupsubset')
|
552
|
+
opts[:heads] = remote_heads
|
553
|
+
end
|
554
|
+
opts[:heads] ||= []
|
555
|
+
cg = if opts[:heads].empty?
|
556
|
+
remote.changegroup fetch, :pull
|
557
|
+
else
|
558
|
+
# check for capabilities
|
559
|
+
unless remote.capable? 'changegroupsubset'
|
560
|
+
raise abort('Partial pull cannot be done because' +
|
561
|
+
'the other repository doesn\'t support' +
|
562
|
+
'changegroupsubset')
|
563
|
+
end # end unless
|
564
|
+
|
565
|
+
remote.changegroup_subset fetch, opts[:heads], :pull
|
566
|
+
end
|
567
|
+
|
568
|
+
add_changegroup cg, :pull, remote.url
|
569
|
+
end
|
570
|
+
end
|
571
|
+
|
572
|
+
##
|
573
|
+
# Add a changegroup to the repo.
|
574
|
+
#
|
575
|
+
# Return values:
|
576
|
+
# - nothing changed or no source: 0
|
577
|
+
# - more heads than before: 1+added_heads (2..n)
|
578
|
+
# - fewer heads than before: -1-removed_heads (-2..-n)
|
579
|
+
# - number of heads stays the same: 1
|
580
|
+
#
|
581
|
+
# Don't the first and last conflict? they stay the same if
|
582
|
+
# nothing has changed...
|
583
|
+
def add_changegroup(source, type, url, opts={:empty => []})
|
584
|
+
run_hook :pre_changegroup, :throw => true, :source => type, :url => url
|
585
|
+
changesets = files = revisions = 0
|
586
|
+
|
587
|
+
return 0 if source.string.empty?
|
588
|
+
|
589
|
+
rev_map = proc {|x| changelog.revision_index_for_node x }
|
590
|
+
cs_map = proc do |x|
|
591
|
+
UI::debug "add changeset #{short x}"
|
592
|
+
changelog.size
|
593
|
+
end
|
594
|
+
|
595
|
+
# write changelog data to temp files so concurrent readers will not
|
596
|
+
# see inconsistent view
|
597
|
+
changelog.delay_update
|
598
|
+
old_heads = changelog.heads.size
|
599
|
+
new_heads = nil # scoping
|
600
|
+
changesets = nil # scoping
|
601
|
+
cor = nil # scoping
|
602
|
+
cnr = nil # scoping
|
603
|
+
heads = nil # scoping
|
604
|
+
|
605
|
+
Amp::Mercurial::Journal.start(join('journal'), :opener => @store.opener) do |journal|
|
606
|
+
UI::status 'adding changeset'
|
607
|
+
|
608
|
+
# pull of the changeset group
|
609
|
+
cor = changelog.size - 1
|
610
|
+
unless changelog.add_group(source, cs_map, journal) || opts[:empty].any?
|
611
|
+
raise abort("received changelog group is empty")
|
612
|
+
end
|
613
|
+
|
614
|
+
cnr = changelog.size - 1
|
615
|
+
changesets = cnr - cor
|
616
|
+
|
617
|
+
# pull off the manifest group
|
618
|
+
UI::status 'adding manifests'
|
619
|
+
|
620
|
+
# No need to check for empty manifest group here:
|
621
|
+
# if the result of the merge of 1 and 2 is the same in 3 and 4,
|
622
|
+
# no new manifest will be created and the manifest group will be
|
623
|
+
# empty during the pull
|
624
|
+
manifest.add_group source, rev_map, journal
|
625
|
+
|
626
|
+
# process the files
|
627
|
+
UI::status 'adding file changes'
|
628
|
+
|
629
|
+
loop do
|
630
|
+
f = Amp::Mercurial::RevlogSupport::ChangeGroup.get_chunk source
|
631
|
+
break if f.empty?
|
632
|
+
|
633
|
+
UI::debug "adding #{f} revisions"
|
634
|
+
fl = file f
|
635
|
+
o = fl.index_size
|
636
|
+
unless fl.add_group source, rev_map, journal
|
637
|
+
raise abort('received file revlog group is empty')
|
638
|
+
end
|
639
|
+
revisions += fl.index_size - o
|
640
|
+
files += 1
|
641
|
+
end # end loop
|
642
|
+
|
643
|
+
new_heads = changelog.heads.size
|
644
|
+
heads = ""
|
645
|
+
|
646
|
+
unless old_heads.zero? || new_heads == old_heads
|
647
|
+
heads = " (+#{new_heads - old_heads} heads)"
|
648
|
+
end
|
649
|
+
|
650
|
+
UI::status("added #{changesets} changesets" +
|
651
|
+
" with #{revisions} changes to #{files} files#{heads}")
|
652
|
+
|
653
|
+
if changesets > 0
|
654
|
+
changelog.write_pending
|
655
|
+
p = proc { changelog.write_pending && root or "" }
|
656
|
+
run_hook :pre_txnchangegroup, :throw => true,
|
657
|
+
:node => changelog.node_id_for_index(cor+1).hexlify,
|
658
|
+
:source => type,
|
659
|
+
:url => url
|
660
|
+
end
|
661
|
+
|
662
|
+
changelog.finalize journal
|
663
|
+
|
664
|
+
end # end Journal::start
|
665
|
+
|
666
|
+
if changesets > 0
|
667
|
+
# forcefully update the on-disk branch cache
|
668
|
+
UI::debug 'updating the branch cache'
|
669
|
+
branch_tags
|
670
|
+
run_hook :post_changegroup, :node => changelog.node_id_for_index(cor+1).hexlify, :source => type, :url => url
|
671
|
+
|
672
|
+
((cor+1)..(cnr+1)).to_a.each do |i|
|
673
|
+
run_hook :incoming, :node => changelog.node_id_for_index(i).hexlify,
|
674
|
+
:source => type,
|
675
|
+
:url => url
|
676
|
+
end # end each
|
677
|
+
end # end if
|
678
|
+
|
679
|
+
# never return 0 here
|
680
|
+
ret = if new_heads < old_heads
|
681
|
+
new_heads - old_heads - 1
|
682
|
+
else
|
683
|
+
new_heads - old_heads + 1
|
684
|
+
end # end if
|
685
|
+
|
686
|
+
ret
|
687
|
+
end # end def
|
688
|
+
|
689
|
+
##
|
690
|
+
# A changegroup, of some sort.
|
691
|
+
def changegroup(base_nodes, source)
|
692
|
+
changegroup_subset(base_nodes, heads, source)
|
693
|
+
end
|
694
|
+
|
695
|
+
##
|
696
|
+
# Prints information about the changegroup we are going to receive.
|
697
|
+
#
|
698
|
+
# @param [Array<String>] nodes the list of node IDs we are receiving
|
699
|
+
# @param [Symbol] source how are we receiving the changegroup?
|
700
|
+
# @todo add more debug info
|
701
|
+
def changegroup_info(nodes, source)
|
702
|
+
# print info
|
703
|
+
if source == :bundle
|
704
|
+
UI.status("#{nodes.size} changesets found")
|
705
|
+
end
|
706
|
+
# debug stuff
|
707
|
+
end
|
708
|
+
|
709
|
+
##
|
710
|
+
# Faster version of changegroup_subset. Useful when pushing working dir.
|
711
|
+
#
|
712
|
+
# Generate a changegruop of all nodes that we have that a recipient
|
713
|
+
# doesn't
|
714
|
+
#
|
715
|
+
# This is much easier than the previous function as we can assume that
|
716
|
+
# the recipient has any changegnode we aren't sending them.
|
717
|
+
#
|
718
|
+
# @param [[String]] common the set of common nodes between remote and self
|
719
|
+
# @param [Amp::Repository] source
|
720
|
+
def get_changegroup(common, source)
|
721
|
+
# Call the hooks
|
722
|
+
run_hook :pre_outgoing, :throw => true, :source => source
|
723
|
+
|
724
|
+
nodes = changelog.find_missing common
|
725
|
+
revset = Hash.with_keys(nodes.map {|n| changelog.rev(n)})
|
726
|
+
|
727
|
+
changegroup_info nodes, source
|
728
|
+
|
729
|
+
identity = proc {|x| x }
|
730
|
+
|
731
|
+
# ok so this method goes through the generic revlog, and looks for nodes
|
732
|
+
# in the changeset(s) we're pushing. Works by the link_rev - basically,
|
733
|
+
# the changelog says "hey we're at revision 35", and any changes to any
|
734
|
+
# files in any revision logs for that commit will have a link_revision
|
735
|
+
# of 35. So we just look for 35!
|
736
|
+
gen_node_list = proc do |log|
|
737
|
+
log.select {|r| revset[r.link_rev] }.map {|r| r.node_id }
|
738
|
+
end
|
739
|
+
|
740
|
+
# Ok.... I've tried explaining this 3 times and failed.
|
741
|
+
#
|
742
|
+
# Goal of this proc: We need to update the changed_files hash to reflect
|
743
|
+
# which files (typically file logs) have changed since the last push.
|
744
|
+
#
|
745
|
+
# How it works: it generates a proc that takes a node_id. That node_id
|
746
|
+
# will be looked up in the changelog.i file, which happens to store a
|
747
|
+
# list of files that were changed in that commit! So really, this method
|
748
|
+
# just takes a node_id, and adds filenamess to the list of changed files.
|
749
|
+
changed_file_collector = proc do |changed_fileset|
|
750
|
+
proc do |cl_node|
|
751
|
+
c = changelog.read cl_node
|
752
|
+
c[3].each {|fname| changed_fileset[fname] = true }
|
753
|
+
end
|
754
|
+
end
|
755
|
+
|
756
|
+
lookup_revlink_func = proc do |revlog|
|
757
|
+
# given a revision, return the node
|
758
|
+
# good thing the python has a description of what this does
|
759
|
+
#
|
760
|
+
# *snort*
|
761
|
+
lookup_revlink = proc do |n|
|
762
|
+
changelog.node revlog[n].link_rev
|
763
|
+
end
|
764
|
+
end
|
765
|
+
|
766
|
+
# This constructs a changegroup, or a list of all changed files.
|
767
|
+
# If you're here, looking at this code, this bears repeating:
|
768
|
+
# - Changelog
|
769
|
+
# -- ChangeSet+
|
770
|
+
#
|
771
|
+
# A Changelog (history of a branch) is an array of ChangeSets,
|
772
|
+
# and a ChangeSet is just a single revision, containing what files
|
773
|
+
# were changed, who did it, and the commit message. THIS IS JUST A
|
774
|
+
# RECEIPT!!!
|
775
|
+
#
|
776
|
+
# The REASON we construct a changegroup here is because this is called
|
777
|
+
# when we push, and we push a changelog (usually bundled to conserve
|
778
|
+
# space). This is where we make that receipt, called a changegroup.
|
779
|
+
#
|
780
|
+
# 'nuff tangent, time to fucking code
|
781
|
+
generate_group = proc do
|
782
|
+
result = []
|
783
|
+
changed_files = {}
|
784
|
+
|
785
|
+
coll = changed_file_collector[changed_files]
|
786
|
+
# get the changelog's changegroups
|
787
|
+
changelog.group(nodes, identity, coll) {|chunk| result << chunk }
|
788
|
+
|
789
|
+
|
790
|
+
node_iter = gen_node_list[manifest]
|
791
|
+
look = lookup_revlink_func[manifest]
|
792
|
+
# get the manifest's changegroups
|
793
|
+
manifest.group(node_iter, look) {|chunk| result << chunk }
|
794
|
+
|
795
|
+
changed_files.keys.sort.each do |fname|
|
796
|
+
file_revlog = file fname
|
797
|
+
# warning: useless comment
|
798
|
+
if file_revlog.index_size.zero?
|
799
|
+
raise abort("empty or missing revlog for #{fname}")
|
800
|
+
end
|
801
|
+
|
802
|
+
node_list = gen_node_list[file_revlog]
|
803
|
+
|
804
|
+
if node_list.any?
|
805
|
+
result << Amp::Mercurial::RevlogSupport::ChangeGroup.chunk_header(fname.size)
|
806
|
+
result << fname
|
807
|
+
|
808
|
+
lookup = lookup_revlink_func[file_revlog] # Proc#call
|
809
|
+
# more changegroups
|
810
|
+
file_revlog.group(node_list, lookup) {|chunk| result << chunk }
|
811
|
+
end
|
812
|
+
end
|
813
|
+
result << Amp::Mercurial::RevlogSupport::ChangeGroup.closing_chunk
|
814
|
+
|
815
|
+
run_hook :post_outgoing, :node => nodes[0].hexlify, :source => source
|
816
|
+
|
817
|
+
result
|
818
|
+
end
|
819
|
+
|
820
|
+
s = StringIO.new "",(ruby_19? ? "w+:ASCII-8BIT" : "w+")
|
821
|
+
generate_group[].each {|chunk| s.write chunk }
|
822
|
+
s.rewind
|
823
|
+
s
|
824
|
+
end
|
825
|
+
|
826
|
+
##
|
827
|
+
# This function generates a changegroup consisting of all the nodes
|
828
|
+
# that are descendents of any of the bases, and ancestors of any of
|
829
|
+
# the heads.
|
830
|
+
#
|
831
|
+
# It is fairly complex in determining which filenodes and which
|
832
|
+
# manifest nodes need to be included for the changeset to be complete
|
833
|
+
# is non-trivial.
|
834
|
+
#
|
835
|
+
# Another wrinkle is doing the reverse, figuring out which changeset in
|
836
|
+
# the changegroup a particular filenode or manifestnode belongs to.
|
837
|
+
#
|
838
|
+
# The caller can specify some nodes that must be included in the
|
839
|
+
# changegroup using the extranodes argument. It should be a dict
|
840
|
+
# where the keys are the filenames (or 1 for the manifest), and the
|
841
|
+
# values are lists of (node, linknode) tuples, where node is a wanted
|
842
|
+
# node and linknode is the changelog node that should be transmitted as
|
843
|
+
# the linkrev.
|
844
|
+
#
|
845
|
+
# MAD SHOUTZ to Eric Hopper, who actually had the balls to document a
|
846
|
+
# good chunk of this code in the Python. He is a really great man, and
|
847
|
+
# deserves whatever thanks we can give him. *Peace*
|
848
|
+
#
|
849
|
+
# @param [String => [(String, String)]] extra_nodes the key is a filename
|
850
|
+
# and the value is a list of (node, link_node) tuples
|
851
|
+
def changegroup_subset(bases, new_heads, source, extra_nodes=nil)
|
852
|
+
unless extra_nodes
|
853
|
+
if new_heads.sort! == heads.sort!
|
854
|
+
common = []
|
855
|
+
|
856
|
+
# parents of bases are known from both sides
|
857
|
+
bases.each do |base|
|
858
|
+
changelog.parents_for_node(base).each do |parent|
|
859
|
+
common << parent unless parent.null? # == NULL_ID
|
860
|
+
end # end each
|
861
|
+
end # end each
|
862
|
+
|
863
|
+
# BAIL
|
864
|
+
return get_changegroup(common, source)
|
865
|
+
end # end if
|
866
|
+
end # end unless
|
867
|
+
|
868
|
+
run_hook :pre_outgoing, :throw => true, :source => source # call dem hooks
|
869
|
+
|
870
|
+
|
871
|
+
# missing changelog list, bases, and heads
|
872
|
+
#
|
873
|
+
# Some bases may turn out to be superfluous, and some heads may be as
|
874
|
+
# well. #nodes_between will return the minimal set of bases and heads
|
875
|
+
# necessary to recreate the changegroup.
|
876
|
+
# missing_cl_list, bases, heads = changelog.nodes_between(bases, heads)
|
877
|
+
btw = changelog.nodes_between(bases, heads)
|
878
|
+
missing_cl_list, bases, heads = btw[:between], btw[:roots], btw[:heads]
|
879
|
+
changegroup_info missing_cl_list, source
|
880
|
+
|
881
|
+
# Known heads are the list of heads about which it is assumed the recipient
|
882
|
+
# of this changegroup will know.
|
883
|
+
known_heads = []
|
884
|
+
|
885
|
+
# We assume that all parents of bases are known heads.
|
886
|
+
bases.each do |base|
|
887
|
+
changelog.parents_for_node(base).each do |parent|
|
888
|
+
known_heads << parent
|
889
|
+
end # end each
|
890
|
+
end # end each
|
891
|
+
|
892
|
+
if known_heads.any? # unless known_heads.empty?
|
893
|
+
# Now that we know what heads are known, we can compute which
|
894
|
+
# changesets are known. The recipient must know about all
|
895
|
+
# changesets required to reach the known heads from the null
|
896
|
+
# changeset.
|
897
|
+
has_cl_set = changelog.nodes_between(nil, known_heads)[:between]
|
898
|
+
|
899
|
+
# cast to a hash for latter usage
|
900
|
+
has_cl_set = Hash.with_keys has_cl_set
|
901
|
+
else
|
902
|
+
# If there were no known heads, the recipient cannot be assumed to
|
903
|
+
# know about any changesets.
|
904
|
+
has_cl_set = {}
|
905
|
+
end
|
906
|
+
|
907
|
+
# We don't know which manifests are missing yet
|
908
|
+
missing_mf_set = {}
|
909
|
+
# Nor do we know which filenodes are missing.
|
910
|
+
missing_fn_set = {}
|
911
|
+
|
912
|
+
########
|
913
|
+
# Here are procs for further usage
|
914
|
+
|
915
|
+
# A changeset always belongs to itself, so the changenode lookup
|
916
|
+
# function for a changenode is +identity+
|
917
|
+
identity = proc {|x| x }
|
918
|
+
|
919
|
+
# A function generating function. Sets up an enviroment for the
|
920
|
+
# inner function.
|
921
|
+
cmp_by_rev_function = proc do |rvlg|
|
922
|
+
# Compare two nodes by their revision number in the environment's
|
923
|
+
# revision history. Since the revision number both represents the
|
924
|
+
# most efficient order to read the nodes in, and represents a
|
925
|
+
# topological sorting of the nodes, this function if often useful.
|
926
|
+
proc {|a, b| rvlg.rev(a) <=> rvlg.rev(b) }
|
927
|
+
end
|
928
|
+
|
929
|
+
# If we determine that a particular file or manifest node must be a
|
930
|
+
# node that the recipient of the changegroup will already have, we can
|
931
|
+
# also assume the recipient will have all the parents. This function
|
932
|
+
# prunes them from the set of missing nodes.
|
933
|
+
prune_parents = proc do |rvlg, hasses, missing|
|
934
|
+
has_list = hasses.keys
|
935
|
+
has_list.sort!(&cmp_by_rev_function(rvlg))
|
936
|
+
|
937
|
+
has_list.each do |node|
|
938
|
+
parent_list = revlog.parent_for_node(node).select {|p| p.not_null? }
|
939
|
+
end
|
940
|
+
|
941
|
+
while parent_list.any?
|
942
|
+
n = parent_list.pop
|
943
|
+
unless hasses.include? n
|
944
|
+
hasses[n] = 1
|
945
|
+
p = revlog.parent_for_node(node).select {|p| p.not_null? }
|
946
|
+
parent_list += p
|
947
|
+
end
|
948
|
+
end
|
949
|
+
|
950
|
+
hasses.each do |n|
|
951
|
+
missing.slice!(n - 1, 1) # pop(n, None)
|
952
|
+
end
|
953
|
+
end
|
954
|
+
|
955
|
+
# This is a function generating function used to set up an environment
|
956
|
+
# for the inner funciont to execute in.
|
957
|
+
manifest_and_file_collector = proc do |changed_fileset|
|
958
|
+
# This is an information gathering function that gathers
|
959
|
+
# information from each changeset node that goes out as part of
|
960
|
+
# the changegroup. The information gathered is a list of which
|
961
|
+
# manifest nodes are potentially required (the recipient may already
|
962
|
+
# have them) and total list of all files which were changed in any
|
963
|
+
# changeset in the changegroup.
|
964
|
+
#
|
965
|
+
# We also remember the first changenode we saw any manifest
|
966
|
+
# referenced by so we can later determine which changenode owns
|
967
|
+
# the manifest.
|
968
|
+
|
969
|
+
# this is what we're returning
|
970
|
+
proc do |cl_node|
|
971
|
+
c = changelog.read cl_node
|
972
|
+
c[3].each do |f|
|
973
|
+
# This is to make sure we only have one instance of each
|
974
|
+
# filename string for each filename
|
975
|
+
changed_fileset[f] ||= f
|
976
|
+
end # end each
|
977
|
+
|
978
|
+
missing_mf_set[c[0]] ||= cl_node
|
979
|
+
end # end proc
|
980
|
+
end # end proc
|
981
|
+
|
982
|
+
# Figure out which manifest nodes (of the ones we think might be part
|
983
|
+
# of the changegroup) the recipients must know about and remove them
|
984
|
+
# from the changegroup.
|
985
|
+
prune_manifest = proc do
|
986
|
+
has_mnfst_set = {}
|
987
|
+
missing_mf_set.values.each do |node|
|
988
|
+
# If a 'missing' manifest thinks it belongs to a changenode
|
989
|
+
# the recipient is assumed to have, obviously the recipient
|
990
|
+
# must have the manifest.
|
991
|
+
link_node = changelog.node manifest.link_rev(manifest.revision_index_for_node(node))
|
992
|
+
has_mnfst_set[n] = 1 if has_cl_set.include? link_node
|
993
|
+
end # end each
|
994
|
+
|
995
|
+
prune_parents[manifest, has_mnfst_set, missing_mf_set] # Proc#call
|
996
|
+
end # end proc
|
997
|
+
|
998
|
+
# Use the information collected in collect_manifests_and_files to say
|
999
|
+
# which changenode any manifestnode belongs to.
|
1000
|
+
lookup_manifest_link = proc {|node| missing_mf_set[node] }
|
1001
|
+
|
1002
|
+
# A function generating function that sets up the initial environment
|
1003
|
+
# the inner function.
|
1004
|
+
filenode_collector = proc do |changed_files|
|
1005
|
+
next_rev = []
|
1006
|
+
|
1007
|
+
# This gathers information from each manifestnode included in the
|
1008
|
+
# changegroup about which filenodes the manifest node references
|
1009
|
+
# so we can include those in the changegroup too.
|
1010
|
+
#
|
1011
|
+
# It also remembers which changenode each filenode belongs to. It
|
1012
|
+
# does this by assuming the a filenode belongs to the changenode
|
1013
|
+
# the first manifest that references it belongs to.
|
1014
|
+
collect_missing_filenodes = proc do |node|
|
1015
|
+
r = manifest.rev node
|
1016
|
+
|
1017
|
+
if r == next_rev[0]
|
1018
|
+
|
1019
|
+
# If the last rev we looked at was the one just previous,
|
1020
|
+
# we only need to see a diff.
|
1021
|
+
delta_manifest = manifest.read_delta node
|
1022
|
+
|
1023
|
+
# For each line in the delta
|
1024
|
+
delta_manifest.each do |f, fnode|
|
1025
|
+
f = changed_files[f]
|
1026
|
+
|
1027
|
+
# And if the file is in the list of files we care
|
1028
|
+
# about.
|
1029
|
+
if f
|
1030
|
+
# Get the changenode this manifest belongs to
|
1031
|
+
cl_node = missing_mf_set[node]
|
1032
|
+
|
1033
|
+
# Create the set of filenodes for the file if
|
1034
|
+
# there isn't one already.
|
1035
|
+
ndset = missing_fn_set[f] ||= {}
|
1036
|
+
|
1037
|
+
# And set the filenode's changelog node to the
|
1038
|
+
# manifest's if it hasn't been set already.
|
1039
|
+
ndset[fnode] ||= cl_node
|
1040
|
+
end
|
1041
|
+
end
|
1042
|
+
else
|
1043
|
+
# Otherwise we need a full manifest.
|
1044
|
+
m = manifest.read node
|
1045
|
+
|
1046
|
+
# For every file in we care about.
|
1047
|
+
changed_files.each do |f|
|
1048
|
+
fnode = m[f]
|
1049
|
+
|
1050
|
+
# If it's in the manifest
|
1051
|
+
if fnode
|
1052
|
+
# See comments above.
|
1053
|
+
cl_node = msng_mnfst_set[mnfstnode]
|
1054
|
+
ndset = missing_fn_set[f] ||= {}
|
1055
|
+
ndset[fnode] ||= cl_node
|
1056
|
+
end
|
1057
|
+
end
|
1058
|
+
end
|
1059
|
+
|
1060
|
+
# Remember the revision we hope to see next.
|
1061
|
+
next_rev[0] = r + 1
|
1062
|
+
end # end proc
|
1063
|
+
end # end proc
|
1064
|
+
|
1065
|
+
# We have a list of filenodes we think need for a file, let's remove
|
1066
|
+
# all those we know the recipient must have.
|
1067
|
+
prune_filenodes = proc do |f, f_revlog|
|
1068
|
+
missing_set = missing_fn_set[f]
|
1069
|
+
hasset = {}
|
1070
|
+
|
1071
|
+
# If a 'missing' filenode thinks it belongs to a changenode we
|
1072
|
+
# assume the recipient must have, the the recipient must have
|
1073
|
+
# that filenode.
|
1074
|
+
missing_set.each do |n|
|
1075
|
+
cl_node = changelog.node f_revlog[n].link_rev
|
1076
|
+
hasset[n] = true if has_cl_set.include? cl_node
|
1077
|
+
end
|
1078
|
+
|
1079
|
+
prune_parents[f_revlog, hasset, missing_set] # Proc#call
|
1080
|
+
end # end proc
|
1081
|
+
|
1082
|
+
# Function that returns a function.
|
1083
|
+
lookup_filenode_link_func = proc do |name|
|
1084
|
+
missing_set = missing_fn_set[name]
|
1085
|
+
|
1086
|
+
# lookup the changenode the filenode belongs to
|
1087
|
+
lookup_filenode_link = proc do |node|
|
1088
|
+
missing_set[node]
|
1089
|
+
end # end proc
|
1090
|
+
end # end proc
|
1091
|
+
|
1092
|
+
# add the nodes that were explicitly requested.
|
1093
|
+
add_extra_nodes = proc do |name, nodes|
|
1094
|
+
return unless extra_nodes && extra_nodes[name]
|
1095
|
+
|
1096
|
+
extra_nodes[name].each do |node, link_node|
|
1097
|
+
nodes[node] = link_node unless nodes[node]
|
1098
|
+
end
|
1099
|
+
|
1100
|
+
end
|
1101
|
+
|
1102
|
+
# Now that we have all theses utility functions to help out and
|
1103
|
+
# logically divide up the task, generate the group.
|
1104
|
+
generate_group = proc do
|
1105
|
+
changed_files = {}
|
1106
|
+
group = changelog.group(missing_cl_list, identity, &manifest_and_file_collector[changed_files])
|
1107
|
+
group.each { |chunk| yield chunk }
|
1108
|
+
prune_manifests.call
|
1109
|
+
add_extra_nodes[1, msng_mnfst_set]
|
1110
|
+
msng_mnfst_lst = msng_mnfst_set.keys
|
1111
|
+
|
1112
|
+
msng_mnfst_lst.sort!(&cmp_by_rev_function[manifest])
|
1113
|
+
|
1114
|
+
group = manifest.group(msng_mnfst_lst, lookup_filenode_link,
|
1115
|
+
filenode_collector[changed_files])
|
1116
|
+
|
1117
|
+
group.each {|chunk| yield chunk }
|
1118
|
+
|
1119
|
+
msng_mnfst_lst = nil
|
1120
|
+
msng_mnfst_set.clear
|
1121
|
+
|
1122
|
+
if extra_nodes
|
1123
|
+
extra_nodes.each do |fname|
|
1124
|
+
next if fname.kind_of?(Integer)
|
1125
|
+
msng_mnfst_set[fname] ||= {}
|
1126
|
+
changed_files[fname] = true
|
1127
|
+
end
|
1128
|
+
end
|
1129
|
+
|
1130
|
+
changed_files.sort.each do |fname|
|
1131
|
+
file_revlog = file(fname)
|
1132
|
+
unless file_revlog.size > 0
|
1133
|
+
raise abort("empty or missing revlog for #{fname}")
|
1134
|
+
end
|
1135
|
+
|
1136
|
+
if msng_mnfst_set[fname]
|
1137
|
+
prune_filenodes[fname, file_revlog]
|
1138
|
+
add_extra_nodes[fname, missing_fn_set[fname]]
|
1139
|
+
missing_fn_list = missing_fn_set[fname].keys
|
1140
|
+
else
|
1141
|
+
missing_fn_list = []
|
1142
|
+
end
|
1143
|
+
|
1144
|
+
if missing_fn_list.size > 0
|
1145
|
+
yield ChangeGroup.chunk_header(fname.size)
|
1146
|
+
yield fname
|
1147
|
+
missing_fn_list.sort!(&cmp_by_rev_function[file_revlog])
|
1148
|
+
group = file_revlog.group(missing_fn_list,
|
1149
|
+
lookup_filenode_link_func[fname])
|
1150
|
+
group.each {|chunk| yield chunk }
|
1151
|
+
end
|
1152
|
+
if missing_fn_set[fname]
|
1153
|
+
missing_fn_set.delete fname
|
1154
|
+
end
|
1155
|
+
end
|
1156
|
+
|
1157
|
+
yield ChangeGroup.close_chunk
|
1158
|
+
|
1159
|
+
if missing_cl_list
|
1160
|
+
run_hook :post_outgoing
|
1161
|
+
end
|
1162
|
+
end # end proc
|
1163
|
+
|
1164
|
+
s = StringIO.new "",(ruby_19? ? "w+:ASCII-8BIT" : "w+")
|
1165
|
+
generate_group.call do |chunk|
|
1166
|
+
s.write chunk
|
1167
|
+
end
|
1168
|
+
s.seek(0, IO::SEEK_SET)
|
1169
|
+
|
1170
|
+
end # end def
|
1171
|
+
|
1172
|
+
##
|
1173
|
+
# Revert a file or group of files to +revision+. If +opts[:unlink]+
|
1174
|
+
# is true, then the files
|
1175
|
+
#
|
1176
|
+
# @param [Array<String>] files a list of files to revert
|
1177
|
+
# @return [Boolean] a success marker
|
1178
|
+
def revert(files=nil, opts={})
|
1179
|
+
# get the parents - used in checking if we haven an uncommitted merge
|
1180
|
+
parent, p2 = dirstate.parents
|
1181
|
+
|
1182
|
+
# get the revision
|
1183
|
+
rev = opts[:revision] || opts[:rev] || opts[:to]
|
1184
|
+
|
1185
|
+
# check to make sure it's logically possible
|
1186
|
+
unless rev || p2 == Amp::Mercurial::RevlogSupport::Node::NULL_ID
|
1187
|
+
raise abort("uncommitted merge - please provide a specific revision")
|
1188
|
+
end
|
1189
|
+
|
1190
|
+
# if we have anything here, then create a matcher
|
1191
|
+
matcher = if files
|
1192
|
+
Amp::Match.create :files => files ,
|
1193
|
+
:includer => opts[:include],
|
1194
|
+
:excluder => opts[:exclude]
|
1195
|
+
else
|
1196
|
+
# else just return nil
|
1197
|
+
# we can return nil because when it gets used in :match => matcher,
|
1198
|
+
# it will be as though it's not even there
|
1199
|
+
nil
|
1200
|
+
end
|
1201
|
+
|
1202
|
+
# the changeset we use as a guide
|
1203
|
+
changeset = self[rev]
|
1204
|
+
|
1205
|
+
# get the files that need to be changed
|
1206
|
+
stats = status :node_1 => rev, :match => matcher
|
1207
|
+
|
1208
|
+
###
|
1209
|
+
# now make the changes
|
1210
|
+
###
|
1211
|
+
|
1212
|
+
##########
|
1213
|
+
# MODIFIED and DELETED
|
1214
|
+
##########
|
1215
|
+
# Just write the old data to the files
|
1216
|
+
(stats[:modified] + stats[:deleted]).each do |path|
|
1217
|
+
File.open path, 'w' do |file|
|
1218
|
+
file.write changeset.get_file(path).data
|
1219
|
+
end
|
1220
|
+
UI::status "restored\t#{path}"
|
1221
|
+
end
|
1222
|
+
|
1223
|
+
##########
|
1224
|
+
# REMOVED
|
1225
|
+
##########
|
1226
|
+
# these files are set to be removed, and have thus far been dropped from the filesystem
|
1227
|
+
# we restore them and we alert the repo
|
1228
|
+
stats[:removed].each do |path|
|
1229
|
+
File.open path, 'w' do |file|
|
1230
|
+
file.write changeset.get_file(path).data
|
1231
|
+
end
|
1232
|
+
|
1233
|
+
dirstate.normal path # pretend nothing happened
|
1234
|
+
UI::status "saved\t#{path}"
|
1235
|
+
end
|
1236
|
+
|
1237
|
+
##########
|
1238
|
+
# ADDED
|
1239
|
+
##########
|
1240
|
+
# these files have been added SINCE +rev+
|
1241
|
+
stats[:added].each do |path|
|
1242
|
+
remove path
|
1243
|
+
UI::status "destroyed\t#{path}"
|
1244
|
+
end # pretend these files were never even there
|
1245
|
+
|
1246
|
+
true # success marker
|
1247
|
+
end
|
1248
|
+
|
1249
|
+
##
|
1250
|
+
# Return list of roots of the subsets of missing nodes from remote
|
1251
|
+
#
|
1252
|
+
# If base dict is specified, assume that these nodes and their parents
|
1253
|
+
# exist on the remote side and that no child of a node of base exists
|
1254
|
+
# in both remote and self.
|
1255
|
+
# Furthermore base will be updated to include the nodes that exists
|
1256
|
+
# in self and remote but no children exists in self and remote.
|
1257
|
+
# If a list of heads is specified, return only nodes which are heads
|
1258
|
+
# or ancestors of these heads.
|
1259
|
+
#
|
1260
|
+
# All the ancestors of base are in self and in remote.
|
1261
|
+
# All the descendants of the list returned are missing in self.
|
1262
|
+
# (and so we know that the rest of the nodes are missing in remote, see
|
1263
|
+
# outgoing)
|
1264
|
+
#
|
1265
|
+
# @return [Array<String>] the nodes that are missing from the local repository
|
1266
|
+
# but are present in the foreign repo. These are the nodes that will be
|
1267
|
+
# coming in over the wire.
|
1268
|
+
def find_incoming_roots(remote, opts={:base => nil, :heads => nil,
|
1269
|
+
:force => false, :base => nil})
|
1270
|
+
common_nodes(remote, opts)[1]
|
1271
|
+
end
|
1272
|
+
|
1273
|
+
##
|
1274
|
+
# Find the common nodes, missing nodes, and remote heads.
|
1275
|
+
#
|
1276
|
+
# So in this code, we use opts[:base] and fetch as hashes
|
1277
|
+
# instead of arrays. We could very well use arrays, but hashes have
|
1278
|
+
# O(1) lookup time, and since these could get RFH (Really Fucking
|
1279
|
+
# Huge), we decided to take the liberty and just use hash for now.
|
1280
|
+
#
|
1281
|
+
# If opts[:base] (Hash) is specified, assume that these nodes and their parents
|
1282
|
+
# exist on the remote side and that no child of a node of base exists
|
1283
|
+
# in both remote and self.
|
1284
|
+
# Furthermore base will be updated to include the nodes that exists
|
1285
|
+
# in self and remote but no children exists in self and remote.
|
1286
|
+
# If a list of heads is specified, return only nodes which are heads
|
1287
|
+
# or ancestors of these heads.
|
1288
|
+
#
|
1289
|
+
# All the ancestors of base are in self and in remote.
|
1290
|
+
#
|
1291
|
+
# @param [Amp::Repository] remote the repository we're pulling from
|
1292
|
+
# @param [(Array<String>, Array<String>, Array<String>)] the common nodes, missing nodes, and
|
1293
|
+
# remote heads
|
1294
|
+
def common_nodes(remote, opts={:heads => nil, :force => nil, :base => nil})
|
1295
|
+
# variable prep!
|
1296
|
+
node_map = changelog.node_map
|
1297
|
+
search = []
|
1298
|
+
unknown = []
|
1299
|
+
fetch = {}
|
1300
|
+
seen = {}
|
1301
|
+
seen_branch = {}
|
1302
|
+
opts[:base] ||= {}
|
1303
|
+
opts[:heads] ||= remote.heads
|
1304
|
+
|
1305
|
+
# if we've got nothing...
|
1306
|
+
if changelog.tip == NULL_ID
|
1307
|
+
opts[:base][NULL_ID] = true # 1 is stored in the Python
|
1308
|
+
|
1309
|
+
return [NULL_ID], [NULL_ID], opts[:heads].dup unless opts[:heads] == [NULL_ID]
|
1310
|
+
return [NULL_ID], [], [] # if we didn't trip ^, we're returning this
|
1311
|
+
end
|
1312
|
+
|
1313
|
+
# assume we're closer to the tip than the root
|
1314
|
+
# and start by examining heads
|
1315
|
+
UI::status 'searching for changes'
|
1316
|
+
|
1317
|
+
opts[:heads].each do |head|
|
1318
|
+
if !node_map.include?(head)
|
1319
|
+
unknown << head
|
1320
|
+
else
|
1321
|
+
opts[:base][head] = true # 1 is stored in the Python
|
1322
|
+
end
|
1323
|
+
end
|
1324
|
+
|
1325
|
+
opts[:heads] = unknown # the ol' switcheroo
|
1326
|
+
return opts[:base].keys, [], [] if unknown.empty? # BAIL
|
1327
|
+
|
1328
|
+
# make a hash with keys of unknown
|
1329
|
+
requests = Hash.with_keys unknown
|
1330
|
+
count = 0
|
1331
|
+
|
1332
|
+
# Search through the remote branches
|
1333
|
+
# a branch here is a linear part of history, with 4 (four)
|
1334
|
+
# parts:
|
1335
|
+
#
|
1336
|
+
# head, root, first parent, second parent
|
1337
|
+
# (a branch always has two parents (or none) by definition)
|
1338
|
+
#
|
1339
|
+
# Here's where we start using the Hashes instead of Arrays
|
1340
|
+
# trick. Keep an eye out for opts[:base] and opts[:heads]!
|
1341
|
+
unknown = remote.branches(*unknown)
|
1342
|
+
until unknown.empty?
|
1343
|
+
r = []
|
1344
|
+
|
1345
|
+
while node = unknown.shift
|
1346
|
+
next if seen.include?(node[0])
|
1347
|
+
UI::debug "examining #{short node[0]}:#{short node[1]}"
|
1348
|
+
|
1349
|
+
if node[0] == NULL_ID
|
1350
|
+
# Do nothing...
|
1351
|
+
elsif seen_branch.include? node
|
1352
|
+
UI::debug 'branch already found'
|
1353
|
+
next
|
1354
|
+
elsif node_map.include? node[1]
|
1355
|
+
UI::debug "found incomplete branch #{short node[0]}:#{short node[1]}"
|
1356
|
+
search << node[0..1]
|
1357
|
+
seen_branch[node] = true # 1 in the python
|
1358
|
+
else
|
1359
|
+
unless seen.include?(node[1]) || fetch.include?(node[1])
|
1360
|
+
if node_map.include?(node[2]) and node_map.include?(node[3])
|
1361
|
+
UI::debug "found new changset #{short node[1]}"
|
1362
|
+
fetch[node[1]] = true # 1 in the python
|
1363
|
+
end # end if
|
1364
|
+
|
1365
|
+
node[2..3].each do |p|
|
1366
|
+
opts[:base][p] = true if node_map.include? p
|
1367
|
+
end
|
1368
|
+
end # end unless
|
1369
|
+
|
1370
|
+
node[2..3].each do |p|
|
1371
|
+
unless requests.include?(p) || node_map.include?(p)
|
1372
|
+
r << p
|
1373
|
+
requests[p] = true # 1 in the python
|
1374
|
+
end # end unless
|
1375
|
+
end # end each
|
1376
|
+
end # end if
|
1377
|
+
|
1378
|
+
seen[node[0]] = true # 1 in the python
|
1379
|
+
end # end while
|
1380
|
+
|
1381
|
+
unless r.empty?
|
1382
|
+
count += 1
|
1383
|
+
|
1384
|
+
UI::debug "request #{count}: #{r.map{|i| short i }}"
|
1385
|
+
|
1386
|
+
(0 .. (r.size-1)).step(10) do |p|
|
1387
|
+
remote.branches(r[p..(p+9)]).each do |b|
|
1388
|
+
UI::debug "received #{short b[0]}:#{short b[1]}"
|
1389
|
+
unknown << b
|
1390
|
+
end
|
1391
|
+
end
|
1392
|
+
end # end unless
|
1393
|
+
end # end until
|
1394
|
+
|
1395
|
+
# sorry for the ambiguous variable names
|
1396
|
+
# the python doesn't name them either, which
|
1397
|
+
# means I have no clue what these are
|
1398
|
+
find_proc = proc do |item1, item2|
|
1399
|
+
fetch[item1] = true
|
1400
|
+
opts[:base][item2] = true
|
1401
|
+
end
|
1402
|
+
|
1403
|
+
# do a binary search on the branches we found
|
1404
|
+
search, new_count = *binary_search(:find => search,
|
1405
|
+
:repo => remote,
|
1406
|
+
:node_map => node_map,
|
1407
|
+
:on_find => find_proc)
|
1408
|
+
count += new_count # keep keeping track of the total
|
1409
|
+
|
1410
|
+
# sanity check, because this method is sooooo fucking long
|
1411
|
+
fetch.keys.each do |f|
|
1412
|
+
if node_map.include? f
|
1413
|
+
raise RepoError.new("already have changeset #{short f[0..3]}")
|
1414
|
+
end
|
1415
|
+
end
|
1416
|
+
|
1417
|
+
if opts[:base].keys == [NULL_ID]
|
1418
|
+
if opts[:force]
|
1419
|
+
UI::warn 'repository is unrelated'
|
1420
|
+
else
|
1421
|
+
raise RepoError.new('repository is unrelated')
|
1422
|
+
end
|
1423
|
+
end
|
1424
|
+
|
1425
|
+
UI::debug "found new changesets starting at #{fetch.keys.map{|f| short f }.join ' '}"
|
1426
|
+
UI::debug "#{count} total queries"
|
1427
|
+
|
1428
|
+
# on with the show!
|
1429
|
+
[opts[:base].keys, fetch.keys, opts[:heads]]
|
1430
|
+
end
|
1431
|
+
|
1432
|
+
##
|
1433
|
+
# Returns the number of revisions the repository is tracking.
|
1434
|
+
#
|
1435
|
+
# @return [Integer] how many revisions there have been
|
1436
|
+
def size
|
1437
|
+
changelog.size
|
1438
|
+
end
|
1439
|
+
|
1440
|
+
##
|
1441
|
+
# Forgets an added file or files from the repository. Doesn't delete the
|
1442
|
+
# files, it just says "don't add this on the next commit."
|
1443
|
+
#
|
1444
|
+
# Please note that this has different semantics from {DirState#forget}
|
1445
|
+
#
|
1446
|
+
# @param [Array, String] list a file path (or list of file paths) to
|
1447
|
+
# "forget".
|
1448
|
+
# @return [Boolean] success marker
|
1449
|
+
def forget(list)
|
1450
|
+
lock_working do
|
1451
|
+
list = [*list]
|
1452
|
+
|
1453
|
+
successful = list.any? do |f|
|
1454
|
+
if dirstate[f].status != :added
|
1455
|
+
UI.warn "#{f} not being added! can't forget it"
|
1456
|
+
false
|
1457
|
+
else
|
1458
|
+
dirstate.forget f
|
1459
|
+
true
|
1460
|
+
end
|
1461
|
+
end
|
1462
|
+
|
1463
|
+
dirstate.write if successful
|
1464
|
+
end
|
1465
|
+
|
1466
|
+
true
|
1467
|
+
end
|
1468
|
+
|
1469
|
+
##
|
1470
|
+
# Returns the parents that aren't NULL_ID
|
1471
|
+
def living_parents
|
1472
|
+
dirstate.parents.select {|p| p != NULL_ID }
|
1473
|
+
end
|
1474
|
+
|
1475
|
+
##
|
1476
|
+
# There are two ways to push to remote repo:
|
1477
|
+
#
|
1478
|
+
# addchangegroup assumes local user can lock remote
|
1479
|
+
# repo (local filesystem, old ssh servers).
|
1480
|
+
#
|
1481
|
+
# unbundle assumes local user cannot lock remote repo (new ssh
|
1482
|
+
# servers, http servers).
|
1483
|
+
#
|
1484
|
+
# @param [Repository] remote_repo the remote repository object to push to
|
1485
|
+
# @param [Hash] options extra options for pushing
|
1486
|
+
# @option options [Boolean] :force (false) Force pushing, even if it would create
|
1487
|
+
# new heads (or some other error arises)
|
1488
|
+
# @option options [Array<Fixnum, String>] :revs ([]) specify which revisions to push
|
1489
|
+
# @return [Boolean] for success/failure
|
1490
|
+
def push(remote_repo, opts={:force => false, :revs => nil})
|
1491
|
+
if remote_repo.capable? "unbundle"
|
1492
|
+
push_unbundle remote_repo, opts
|
1493
|
+
else
|
1494
|
+
push_add_changegroup remote_repo, opts
|
1495
|
+
end
|
1496
|
+
end
|
1497
|
+
|
1498
|
+
##
|
1499
|
+
# Push and add a changegroup
|
1500
|
+
# @todo -- add default values for +opts+
|
1501
|
+
def push_add_changegroup(remote, opts={})
|
1502
|
+
# no locking cuz we rockz
|
1503
|
+
ret = pre_push remote, opts
|
1504
|
+
|
1505
|
+
if ret[0]
|
1506
|
+
cg, remote_heads = *ret
|
1507
|
+
remote.add_changegroup cg, :push, url
|
1508
|
+
else
|
1509
|
+
ret[1]
|
1510
|
+
end
|
1511
|
+
end
|
1512
|
+
|
1513
|
+
##
|
1514
|
+
# Push an unbundled dohickey
|
1515
|
+
# @todo -- add default values for +opts+
|
1516
|
+
def push_unbundle(remote, opts={})
|
1517
|
+
# local repo finds heads on server, finds out what revs it
|
1518
|
+
# must push. once revs transferred, if server finds it has
|
1519
|
+
# different heads (someone else won commit/push race), server
|
1520
|
+
# aborts.
|
1521
|
+
|
1522
|
+
ret = pre_push remote, opts
|
1523
|
+
|
1524
|
+
if ret[0]
|
1525
|
+
cg, remote_heads = *ret
|
1526
|
+
remote_heads = ['force'] if opts[:force]
|
1527
|
+
remote.unbundle cg, remote_heads, :push
|
1528
|
+
else
|
1529
|
+
ret[1]
|
1530
|
+
end
|
1531
|
+
end
|
1532
|
+
|
1533
|
+
##
|
1534
|
+
# Return list of nodes that are roots of subsets not in remote
|
1535
|
+
#
|
1536
|
+
# If base dict is specified, assume that these nodes and their parents
|
1537
|
+
# exist on the remote side.
|
1538
|
+
# If a list of heads is specified, return only nodes which are heads
|
1539
|
+
# or ancestors of these heads, and return a second element which
|
1540
|
+
# contains all remote heads which get new children.
|
1541
|
+
def find_outgoing_roots(remote, opts={:base => nil, :heads => nil, :force => false})
|
1542
|
+
base, heads, force = opts[:base], opts[:heads], opts[:force]
|
1543
|
+
if base.nil?
|
1544
|
+
base = {}
|
1545
|
+
find_incoming_roots remote, :base => base, :heads => heads, :force => force
|
1546
|
+
end
|
1547
|
+
|
1548
|
+
UI::debug("common changesets up to "+base.keys.map {|k| k.short_hex}.join(" "))
|
1549
|
+
|
1550
|
+
remain = Hash.with_keys changelog.node_map.keys, nil
|
1551
|
+
|
1552
|
+
# prune everything remote has from the tree
|
1553
|
+
remain.delete NULL_ID
|
1554
|
+
remove = base.keys
|
1555
|
+
while remove.any?
|
1556
|
+
node = remove.shift
|
1557
|
+
if remain.include? node
|
1558
|
+
remain.delete node
|
1559
|
+
changelog.parents_for_node(node).each {|p| remove << p }
|
1560
|
+
end
|
1561
|
+
end
|
1562
|
+
|
1563
|
+
# find every node whose parents have been pruned
|
1564
|
+
subset = []
|
1565
|
+
# find every remote head that will get new children
|
1566
|
+
updated_heads = {}
|
1567
|
+
remain.keys.each do |n|
|
1568
|
+
p1, p2 = changelog.parents_for_node n
|
1569
|
+
subset << n unless remain.include?(p1) || remain.include?(p2)
|
1570
|
+
if heads && heads.any?
|
1571
|
+
updated_heads[p1] = true if heads.include? p1
|
1572
|
+
updated_heads[p2] = true if heads.include? p2
|
1573
|
+
end
|
1574
|
+
end
|
1575
|
+
|
1576
|
+
# this is the set of all roots we have to push
|
1577
|
+
if heads && heads.any?
|
1578
|
+
return subset, updated_heads.keys
|
1579
|
+
else
|
1580
|
+
return subset
|
1581
|
+
end
|
1582
|
+
end
|
1583
|
+
|
1584
|
+
##
|
1585
|
+
# The branches available in this repository.
|
1586
|
+
#
|
1587
|
+
# @param [Array<String>] nodes the list of nodes. this can be optionally left empty
|
1588
|
+
# @return [Array<String>] the branches, active and inactive!
|
1589
|
+
def branches(*nodes)
|
1590
|
+
branches = []
|
1591
|
+
nodes = [changelog.tip] if nodes.empty?
|
1592
|
+
|
1593
|
+
# for each node, find its first parent (adam and eve, basically)
|
1594
|
+
# -- that's our branch!
|
1595
|
+
nodes.each do |node|
|
1596
|
+
t = node
|
1597
|
+
# traverse the tree, staying to the left side
|
1598
|
+
# node
|
1599
|
+
# / \
|
1600
|
+
# parent1 parent2
|
1601
|
+
# .... ....
|
1602
|
+
# This will get us the first parent. When it's finally NULL_ID,
|
1603
|
+
# we have a root -- this is the basis for our branch.
|
1604
|
+
loop do
|
1605
|
+
parents = changelog.parents_for_node t
|
1606
|
+
if parents[1] != NULL_ID || parents[0] == NULL_ID
|
1607
|
+
branches << [node, t, *parents]
|
1608
|
+
break
|
1609
|
+
end
|
1610
|
+
t = parents.first # get the first parent and start again
|
1611
|
+
end
|
1612
|
+
end
|
1613
|
+
|
1614
|
+
branches
|
1615
|
+
end
|
1616
|
+
|
1617
|
+
##
|
1618
|
+
# Undelete a file. For instance, if you remove something and then
|
1619
|
+
# find out that you NEED that file, you can use this command.
|
1620
|
+
#
|
1621
|
+
# @param [[String]] list the files to be undeleted
|
1622
|
+
def undelete(list)
|
1623
|
+
manifests = living_parents.map do |p|
|
1624
|
+
manifest.read changelog.read(p).first
|
1625
|
+
end
|
1626
|
+
|
1627
|
+
# now we actually restore the files
|
1628
|
+
list.each do |file|
|
1629
|
+
unless dirstate[file].removed?
|
1630
|
+
UI.warn "#{file} isn't being removed!"
|
1631
|
+
else
|
1632
|
+
m = manifests[0] || manifests[1]
|
1633
|
+
data = file(f).read m[f]
|
1634
|
+
add_file file, data, m.flags(f) # add_file is wwrite in the python
|
1635
|
+
dirstate.normal f # we know it's clean, we just restored it
|
1636
|
+
end
|
1637
|
+
end
|
1638
|
+
end
|
1639
|
+
alias_method :restore, :undelete
|
1640
|
+
|
1641
|
+
##
|
1642
|
+
# Write data to a file in the CODE repo, not the .hg
|
1643
|
+
#
|
1644
|
+
# @param [String] file_name
|
1645
|
+
# @param [String] data (no trailing newlines are appended)
|
1646
|
+
# @param [[String]] flags we're really just looking for links
|
1647
|
+
# and executables, here
|
1648
|
+
def add_file(file_name, data, flags)
|
1649
|
+
data = filter "decode", file_name, data
|
1650
|
+
path = working_join file_name
|
1651
|
+
|
1652
|
+
File.unlink path rescue nil
|
1653
|
+
|
1654
|
+
if flags.include? 'l' # if it's a link
|
1655
|
+
@file_opener.symlink path, data
|
1656
|
+
else
|
1657
|
+
@file_opener.open(path, 'w') {|f| f.write data }
|
1658
|
+
File.set_flag path, false, true if flags.include? 'x'
|
1659
|
+
end
|
1660
|
+
end
|
1661
|
+
|
1662
|
+
##
|
1663
|
+
# Returns the node_id's of the heads of the repository.
|
1664
|
+
def heads(start=nil, options={:closed => true})
|
1665
|
+
heads = changelog.heads(start)
|
1666
|
+
should_show = lambda do |head|
|
1667
|
+
return true if options[:closed]
|
1668
|
+
|
1669
|
+
extras = changelog.read(head)[5]
|
1670
|
+
return !(extras["close"])
|
1671
|
+
end
|
1672
|
+
heads = heads.select {|h| should_show[h] }
|
1673
|
+
heads.map! {|h| [changelog.rev(h), h] }
|
1674
|
+
heads.sort! {|arr1, arr2| arr2[0] <=> arr1[0] }
|
1675
|
+
heads.map! {|r, n| n}
|
1676
|
+
end
|
1677
|
+
|
1678
|
+
##
|
1679
|
+
# Walk recursively through the directory tree (or a changeset)
|
1680
|
+
# finding all files matched by the match function
|
1681
|
+
#
|
1682
|
+
# @param [String, Integer] node selects which changeset to walk
|
1683
|
+
# @param [Amp::Match] match the matcher decides how to pick the files
|
1684
|
+
# @param [Array<String>] an array of filenames
|
1685
|
+
def walk(node=nil, match = Match.create({}) { true })
|
1686
|
+
self[node].walk match # calls Changeset#walk
|
1687
|
+
end
|
1688
|
+
|
1689
|
+
##
|
1690
|
+
# Returns the requested file at the given revision annotated by
|
1691
|
+
# line number, so you can see who committed which lines in the file's
|
1692
|
+
# history.
|
1693
|
+
#
|
1694
|
+
# @param file The name of the file to annotate
|
1695
|
+
# @param [Integer, String] rev (nil) The revision to look at for
|
1696
|
+
# annotation
|
1697
|
+
def annotate(file, revision=nil, opts={})
|
1698
|
+
changeset = self[revision]
|
1699
|
+
changeset[file].annotate opts[:follow_copies], opts[:line_numbers]
|
1700
|
+
end
|
1701
|
+
|
1702
|
+
##
|
1703
|
+
# Clone a repository.
|
1704
|
+
#
|
1705
|
+
# Here is what this does, pretty much:
|
1706
|
+
# % amp init monkey
|
1707
|
+
# % cd monkey
|
1708
|
+
# % amp pull http://monkey
|
1709
|
+
#
|
1710
|
+
# It's so simple it's not even funny.
|
1711
|
+
#
|
1712
|
+
# @param [Amp::Repository] remote repository to pull from
|
1713
|
+
# @param [Array<String>] heads list of revs to clone (forces use of pull)
|
1714
|
+
# @param [Boolean] stream do we stream from the remote source?
|
1715
|
+
def clone(remote, opts={:revs => [], :stream => false})
|
1716
|
+
# now, all clients that can request uncompressed clones can
|
1717
|
+
# read repo formats supported by all servers that can serve
|
1718
|
+
# them.
|
1719
|
+
|
1720
|
+
# The streaming case:
|
1721
|
+
# if revlog format changes, client will have to check version
|
1722
|
+
# and format flags on "stream" capability, and use
|
1723
|
+
# uncompressed only if compatible.
|
1724
|
+
if opts[:stream] && opts[:revs].any? && remote.capable?('stream')
|
1725
|
+
stream_in remote
|
1726
|
+
else
|
1727
|
+
pull remote, :revs => opts[:revs]
|
1728
|
+
end
|
1729
|
+
end
|
1730
|
+
|
1731
|
+
##
|
1732
|
+
# Stream in the data from +remote+.
|
1733
|
+
#
|
1734
|
+
# @param [Amp::Repository] remote repository to pull from
|
1735
|
+
# @return [Integer] the number of heads in the repository minus 1
|
1736
|
+
def stream_in(remote)
|
1737
|
+
remote.stream_out do |f|
|
1738
|
+
l = f.gets # this should be the server code
|
1739
|
+
|
1740
|
+
unless Integer(l)
|
1741
|
+
raise ResponseError.new("Unexpected response from server: #{l}")
|
1742
|
+
end
|
1743
|
+
|
1744
|
+
case l.to_i
|
1745
|
+
when 1
|
1746
|
+
raise RepoError.new("operation forbidden by server")
|
1747
|
+
when 2
|
1748
|
+
raise RepoError.new("locking the remote repository failed")
|
1749
|
+
end
|
1750
|
+
|
1751
|
+
UI::status "streaming all changes"
|
1752
|
+
|
1753
|
+
l = f.gets # this is effectively [total_files, total_bytes].join ' '
|
1754
|
+
total_files, total_bytes = *l.split(' ').map {|i| i.to_i }[0..1]
|
1755
|
+
UI::status "#{total_files} file#{total_files == 1 ? '' : 's' } to transfer, #{total_bytes.to_human} of data"
|
1756
|
+
|
1757
|
+
start = Time.now
|
1758
|
+
total_files.times do |i|
|
1759
|
+
l = f.gets
|
1760
|
+
name, size = *l.split("\0")[0..1]
|
1761
|
+
size = size.to_i
|
1762
|
+
UI::debug "adding #{name} (#{size.to_human})"
|
1763
|
+
|
1764
|
+
@store.opener.open do |store_file|
|
1765
|
+
chunk = f.read size # will return nil if at EOF
|
1766
|
+
store_file.write chunk if chunk
|
1767
|
+
end
|
1768
|
+
end
|
1769
|
+
|
1770
|
+
elapsed = Time.now - start
|
1771
|
+
elapsed = 0.001 if elapsed <= 0
|
1772
|
+
|
1773
|
+
UI::status("transferred #{total_bytes.to_human} in #{elapsed}" +
|
1774
|
+
"second#{elapsed == 1.0 ? '' : 's' } (#{total_bytes.to_f / elapsed}/sec)")
|
1775
|
+
|
1776
|
+
invalidate!
|
1777
|
+
heads.size - 1
|
1778
|
+
end
|
1779
|
+
end
|
1780
|
+
|
1781
|
+
##
|
1782
|
+
# Invalidate the repository: delete things and reset others.
|
1783
|
+
def invalidate!
|
1784
|
+
@changelog = nil
|
1785
|
+
@manifest = nil
|
1786
|
+
|
1787
|
+
invalidate_tag_cache!
|
1788
|
+
invalidate_branch_cache!
|
1789
|
+
end
|
1790
|
+
|
1791
|
+
##
|
1792
|
+
# Commits a changeset or set of files to the repository. You will quite often
|
1793
|
+
# use this method since it's basically the basis of version control systems.
|
1794
|
+
#
|
1795
|
+
# @api
|
1796
|
+
# @param [Hash] opts the options to this method are all optional, so it's a very
|
1797
|
+
# flexible method. Options listed below.
|
1798
|
+
# @option opts [Array] :modified ([]) which files have been added or modified
|
1799
|
+
# that you want to be added as a changeset.
|
1800
|
+
# @option opts [Array] :removed ([]) which files should be removed in this
|
1801
|
+
# commit?
|
1802
|
+
# @option opts [Hash] :extra ({}) any extra data, such as "close" => true
|
1803
|
+
# will close the active branch.
|
1804
|
+
# @option opts [String] :message ("") the message for the commit. An editor
|
1805
|
+
# will be opened if this is not provided.
|
1806
|
+
# @option opts [Boolean] :force (false) Forces the commit, ignoring minor details
|
1807
|
+
# like when you try to commit when no files have been changed.
|
1808
|
+
# @option opts [Match] :match (nil) A match object to specify how to pick files
|
1809
|
+
# to commit. These are useful so you don't accidentally commit ignored files,
|
1810
|
+
# for example.
|
1811
|
+
# @option opts [Array<String>] :parents (nil) the node IDs of the parents under
|
1812
|
+
# which this changeset will be committed. No more than 2 for mercurial.
|
1813
|
+
# @option opts [Boolean] :empty_ok (false) Is an empty commit message a-ok?
|
1814
|
+
# @option opts [Boolean] :force_editor (false) Do we force the editor to be
|
1815
|
+
# opened, even if :message is provided?
|
1816
|
+
# @option opts [String] :user (ENV["HGUSER"]) the username to associate with the commit.
|
1817
|
+
# Defaults to AmpConfig#username.
|
1818
|
+
# @option opts [DateTime, Time, Date] :date (Time.now) the date to mark with
|
1819
|
+
# the commit. Useful if you miss a deadline and want to pretend that you actually
|
1820
|
+
# made it!
|
1821
|
+
# @return [String] the digest referring to this entry in the changelog
|
1822
|
+
def commit(options={})
|
1823
|
+
pre_commit(options) {|changeset, opts| changeset.commit opts }
|
1824
|
+
end
|
1825
|
+
|
1826
|
+
##
|
1827
|
+
# Prepares a local changeset to be committed. It must take a block
|
1828
|
+
# and yield to it the changeset and any options to be passed to
|
1829
|
+
# {AbstractChangeset#commit}. This is only ever called by
|
1830
|
+
# {AbstractLocalRepository#commit}.
|
1831
|
+
#
|
1832
|
+
# @example def pre_commit(opts={})
|
1833
|
+
# cs = MyChangeset.new :text => 'asdf', :diff => "asdfasd"
|
1834
|
+
# raise "fail" if something_happens
|
1835
|
+
# yield cs, opts # well MTV, dis where da magic happen
|
1836
|
+
# true
|
1837
|
+
# end
|
1838
|
+
#
|
1839
|
+
# @yield [String] the result of {AbstractChangeset#commit}
|
1840
|
+
# @yieldparam [AbstractChangeset] the changeset to commit
|
1841
|
+
# @yieldparam [Hash] any options to be passed to {AbstractChangeset#commit}
|
1842
|
+
# @return [Boolean] success/failure
|
1843
|
+
def pre_commit(opts={})
|
1844
|
+
[:parents, :modified, :removed].each {|sym| opts[sym] ||= [] }
|
1845
|
+
opts[:extra] ||= {}
|
1846
|
+
opts[:force] = true if opts[:extra]["close"]
|
1847
|
+
|
1848
|
+
# lock the working directory and store
|
1849
|
+
lock_working_and_store do
|
1850
|
+
opts[:use_dirstate] = opts[:parents][0].nil?
|
1851
|
+
|
1852
|
+
# do we use the dirstate?
|
1853
|
+
if opts[:use_dirstate]
|
1854
|
+
p1, p2 = dirstate.parents
|
1855
|
+
|
1856
|
+
if opts[:force] && # we're forcing the commit
|
1857
|
+
p2 != NULL_ID && # but we're merging two branches
|
1858
|
+
opts[:match] # and we have a partial matcher
|
1859
|
+
raise StandardError("cannot partially commit a merge")
|
1860
|
+
end
|
1861
|
+
|
1862
|
+
opts[:update_dirstate] = opts[:modified].any?
|
1863
|
+
else
|
1864
|
+
p1, p2 = opts[:parents]
|
1865
|
+
p2 ||= NULL_ID
|
1866
|
+
|
1867
|
+
opts[:update_dirstate] = dirstate.parents[0] == p1
|
1868
|
+
end
|
1869
|
+
|
1870
|
+
|
1871
|
+
opts[:modified].each do |file|
|
1872
|
+
if merge_state.unresolved? file
|
1873
|
+
raise StandardError.new("unresolved merge conflicts (see `amp resolve`)")
|
1874
|
+
end
|
1875
|
+
end
|
1876
|
+
|
1877
|
+
changes = {:modified => opts[:modified], :removed => opts[:removed]}
|
1878
|
+
changeset = Amp::Mercurial::WorkingDirectoryChangeset.new self, :parents => [p1, p2] ,
|
1879
|
+
:text => opts[:message],
|
1880
|
+
:user => opts[:user] ,
|
1881
|
+
:date => opts[:date] ,
|
1882
|
+
:extra => opts[:extra] ,
|
1883
|
+
:changes => changes
|
1884
|
+
|
1885
|
+
tailored_hash = opts.only :force, :force_editor, :empty_ok,
|
1886
|
+
:use_dirstate, :update_dirstate
|
1887
|
+
revision = yield changeset, tailored_hash
|
1888
|
+
|
1889
|
+
merge_state.reset
|
1890
|
+
return revision
|
1891
|
+
end #unlock working dir + store
|
1892
|
+
end
|
1893
|
+
|
1894
|
+
private
|
1895
|
+
|
1896
|
+
##
|
1897
|
+
# Make the dummy changelog at .hg/00changelog.i
|
1898
|
+
def make_changelog
|
1899
|
+
@hg_opener.open "00changelog.i", "w" do |file|
|
1900
|
+
file.write "\0\0\0\2" # represents revlogv2
|
1901
|
+
file.write " dummy changelog to avoid using the old repo type"
|
1902
|
+
end
|
1903
|
+
end
|
1904
|
+
|
1905
|
+
##
|
1906
|
+
# Write the requirements file. This returns the requirements passed
|
1907
|
+
# so that it can be the final method call in #init
|
1908
|
+
def write_requires(requirements)
|
1909
|
+
@hg_opener.open "requires", "w" do |require_file|
|
1910
|
+
requirements.each {|r| require_file.puts r }
|
1911
|
+
end
|
1912
|
+
requirements
|
1913
|
+
end
|
1914
|
+
|
1915
|
+
|
1916
|
+
##
|
1917
|
+
# do a binary search
|
1918
|
+
# used by common_nodes
|
1919
|
+
#
|
1920
|
+
# Hash info!
|
1921
|
+
# :find => the stuff we're searching through
|
1922
|
+
# :on_find => what to do when we've got something new
|
1923
|
+
# :repo => usually the remote repo where we get new info from
|
1924
|
+
# :node_map => the nodes in the current changelog
|
1925
|
+
def binary_search(opts={})
|
1926
|
+
# I have a lot of stuff to do for scouts
|
1927
|
+
# but instead i'm doing this
|
1928
|
+
# hizzah!
|
1929
|
+
count = 0
|
1930
|
+
|
1931
|
+
until opts[:find].empty?
|
1932
|
+
new_search = []
|
1933
|
+
count += 1
|
1934
|
+
|
1935
|
+
zipped = opts[:find].zip opts[:repo].between(opts[:find])
|
1936
|
+
zipped.each do |(n, list)|
|
1937
|
+
list << n[1]
|
1938
|
+
p = n[0]
|
1939
|
+
f = 1 # ??? why are these vars so NAMELESS
|
1940
|
+
|
1941
|
+
list.each do |item|
|
1942
|
+
UI::debug "narrowing #{f}:#{list.size} #{short item}"
|
1943
|
+
|
1944
|
+
if opts[:node_map].include? item
|
1945
|
+
if f <= 2
|
1946
|
+
opts[:on_find].call(p, item)
|
1947
|
+
else
|
1948
|
+
UI::debug "narrowed branch search to #{short p}:#{short item}"
|
1949
|
+
new_search << [p, item]
|
1950
|
+
end
|
1951
|
+
break
|
1952
|
+
end
|
1953
|
+
|
1954
|
+
p, f = item, f*2
|
1955
|
+
end
|
1956
|
+
end
|
1957
|
+
|
1958
|
+
opts[:find] = new_search
|
1959
|
+
end
|
1960
|
+
|
1961
|
+
[opts[:find], count]
|
1962
|
+
end
|
1963
|
+
|
1964
|
+
##
|
1965
|
+
# this is called before every push
|
1966
|
+
# @todo -- add default values for +opts+
|
1967
|
+
def pre_push(remote, opts={})
|
1968
|
+
common = {}
|
1969
|
+
remote_heads = remote.heads
|
1970
|
+
inc = common_nodes remote, :base => common, :heads => remote_heads, :force => true
|
1971
|
+
inc = inc[1]
|
1972
|
+
update, updated_heads = find_outgoing_roots remote, :base => common, :heads => remote_heads
|
1973
|
+
|
1974
|
+
if opts[:revs]
|
1975
|
+
btw = changelog.nodes_between(update, opts[:revs])
|
1976
|
+
missing_cl, bases, heads = btw[:between], btw[:roots], btw[:heads]
|
1977
|
+
else
|
1978
|
+
bases, heads = update, changelog.heads
|
1979
|
+
end
|
1980
|
+
if bases.empty?
|
1981
|
+
UI::status 'no changes found'
|
1982
|
+
return nil, 1
|
1983
|
+
elsif !opts[:force]
|
1984
|
+
# check if we're creating new remote heads
|
1985
|
+
# to be a remote head after push, node must be either
|
1986
|
+
# - unknown locally
|
1987
|
+
# - a local outgoing head descended from update
|
1988
|
+
# - a remote head that's known locally and not
|
1989
|
+
# ancestral to an outgoing head
|
1990
|
+
|
1991
|
+
warn = false
|
1992
|
+
if remote_heads == [NULL_ID]
|
1993
|
+
warn = false
|
1994
|
+
elsif (opts[:revs].nil? || opts[:revs].empty?) and heads.size > remote_heads.size
|
1995
|
+
warn = true
|
1996
|
+
else
|
1997
|
+
new_heads = heads
|
1998
|
+
remote_heads.each do |r|
|
1999
|
+
if changelog.node_map.include? r
|
2000
|
+
desc = changelog.heads r, heads
|
2001
|
+
l = heads.select {|h| desc.include? h }
|
2002
|
+
|
2003
|
+
new_heads << r if l.empty?
|
2004
|
+
else
|
2005
|
+
new_heads << r
|
2006
|
+
end
|
2007
|
+
end
|
2008
|
+
|
2009
|
+
warn = true if new_heads.size > remote_heads.size
|
2010
|
+
end
|
2011
|
+
|
2012
|
+
if warn
|
2013
|
+
UI::status 'abort: push creates new remote heads!'
|
2014
|
+
UI::status '(did you forget to merge? use push -f to forge)'
|
2015
|
+
return nil, 0
|
2016
|
+
elsif inc.any?
|
2017
|
+
UI::note 'unsynced remote changes!'
|
2018
|
+
end
|
2019
|
+
end
|
2020
|
+
|
2021
|
+
if opts[:revs].nil?
|
2022
|
+
# use the fast path, no race possible on push
|
2023
|
+
cg = get_changegroup common.keys, :push
|
2024
|
+
else
|
2025
|
+
cg = changegroup_subset update, revs, :push
|
2026
|
+
end
|
2027
|
+
|
2028
|
+
[cg, remote_heads]
|
2029
|
+
end
|
2030
|
+
|
2031
|
+
end # localrepo
|
2032
|
+
end # repo
|
2033
|
+
end
|
2034
|
+
end
|