amp-pure 0.5.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (295) hide show
  1. data/.gitignore +1 -0
  2. data/.hgignore +26 -0
  3. data/AUTHORS +2 -0
  4. data/History.txt +6 -0
  5. data/LICENSE +37 -0
  6. data/MANIFESTO +7 -0
  7. data/Manifest.txt +294 -0
  8. data/README.md +116 -0
  9. data/Rakefile +102 -0
  10. data/SCHEDULE.markdown +12 -0
  11. data/STYLE +27 -0
  12. data/TODO.markdown +149 -0
  13. data/ampfile.rb +47 -0
  14. data/bin/amp +30 -0
  15. data/bin/amp1.9 +30 -0
  16. data/ext/amp/bz2/README.txt +39 -0
  17. data/ext/amp/bz2/bz2.c +1582 -0
  18. data/ext/amp/bz2/extconf.rb +77 -0
  19. data/ext/amp/bz2/mkmf.log +29 -0
  20. data/ext/amp/mercurial_patch/extconf.rb +5 -0
  21. data/ext/amp/mercurial_patch/mpatch.c +405 -0
  22. data/ext/amp/priority_queue/extconf.rb +5 -0
  23. data/ext/amp/priority_queue/priority_queue.c +947 -0
  24. data/ext/amp/support/extconf.rb +5 -0
  25. data/ext/amp/support/support.c +250 -0
  26. data/lib/amp.rb +200 -0
  27. data/lib/amp/commands/command.rb +507 -0
  28. data/lib/amp/commands/command_support.rb +137 -0
  29. data/lib/amp/commands/commands/config.rb +143 -0
  30. data/lib/amp/commands/commands/help.rb +29 -0
  31. data/lib/amp/commands/commands/init.rb +10 -0
  32. data/lib/amp/commands/commands/templates.rb +137 -0
  33. data/lib/amp/commands/commands/version.rb +7 -0
  34. data/lib/amp/commands/commands/workflow.rb +28 -0
  35. data/lib/amp/commands/commands/workflows/git/add.rb +65 -0
  36. data/lib/amp/commands/commands/workflows/git/copy.rb +27 -0
  37. data/lib/amp/commands/commands/workflows/git/mv.rb +23 -0
  38. data/lib/amp/commands/commands/workflows/git/rm.rb +60 -0
  39. data/lib/amp/commands/commands/workflows/hg/add.rb +53 -0
  40. data/lib/amp/commands/commands/workflows/hg/addremove.rb +86 -0
  41. data/lib/amp/commands/commands/workflows/hg/annotate.rb +46 -0
  42. data/lib/amp/commands/commands/workflows/hg/archive.rb +126 -0
  43. data/lib/amp/commands/commands/workflows/hg/branch.rb +28 -0
  44. data/lib/amp/commands/commands/workflows/hg/branches.rb +30 -0
  45. data/lib/amp/commands/commands/workflows/hg/bundle.rb +115 -0
  46. data/lib/amp/commands/commands/workflows/hg/clone.rb +95 -0
  47. data/lib/amp/commands/commands/workflows/hg/commit.rb +42 -0
  48. data/lib/amp/commands/commands/workflows/hg/copy.rb +31 -0
  49. data/lib/amp/commands/commands/workflows/hg/debug/dirstate.rb +32 -0
  50. data/lib/amp/commands/commands/workflows/hg/debug/index.rb +36 -0
  51. data/lib/amp/commands/commands/workflows/hg/default.rb +9 -0
  52. data/lib/amp/commands/commands/workflows/hg/diff.rb +30 -0
  53. data/lib/amp/commands/commands/workflows/hg/forget.rb +11 -0
  54. data/lib/amp/commands/commands/workflows/hg/heads.rb +25 -0
  55. data/lib/amp/commands/commands/workflows/hg/identify.rb +23 -0
  56. data/lib/amp/commands/commands/workflows/hg/import.rb +135 -0
  57. data/lib/amp/commands/commands/workflows/hg/incoming.rb +85 -0
  58. data/lib/amp/commands/commands/workflows/hg/info.rb +18 -0
  59. data/lib/amp/commands/commands/workflows/hg/log.rb +21 -0
  60. data/lib/amp/commands/commands/workflows/hg/manifest.rb +13 -0
  61. data/lib/amp/commands/commands/workflows/hg/merge.rb +53 -0
  62. data/lib/amp/commands/commands/workflows/hg/move.rb +28 -0
  63. data/lib/amp/commands/commands/workflows/hg/outgoing.rb +61 -0
  64. data/lib/amp/commands/commands/workflows/hg/pull.rb +74 -0
  65. data/lib/amp/commands/commands/workflows/hg/push.rb +20 -0
  66. data/lib/amp/commands/commands/workflows/hg/remove.rb +45 -0
  67. data/lib/amp/commands/commands/workflows/hg/resolve.rb +83 -0
  68. data/lib/amp/commands/commands/workflows/hg/revert.rb +53 -0
  69. data/lib/amp/commands/commands/workflows/hg/root.rb +13 -0
  70. data/lib/amp/commands/commands/workflows/hg/serve.rb +38 -0
  71. data/lib/amp/commands/commands/workflows/hg/status.rb +116 -0
  72. data/lib/amp/commands/commands/workflows/hg/tag.rb +69 -0
  73. data/lib/amp/commands/commands/workflows/hg/tags.rb +27 -0
  74. data/lib/amp/commands/commands/workflows/hg/tip.rb +13 -0
  75. data/lib/amp/commands/commands/workflows/hg/update.rb +27 -0
  76. data/lib/amp/commands/commands/workflows/hg/verify.rb +9 -0
  77. data/lib/amp/commands/commands/workflows/hg/view.rb +36 -0
  78. data/lib/amp/commands/dispatch.rb +181 -0
  79. data/lib/amp/commands/hooks.rb +81 -0
  80. data/lib/amp/dependencies/amp_support.rb +1 -0
  81. data/lib/amp/dependencies/amp_support/ruby_amp_support.rb +103 -0
  82. data/lib/amp/dependencies/minitar.rb +979 -0
  83. data/lib/amp/dependencies/priority_queue.rb +18 -0
  84. data/lib/amp/dependencies/priority_queue/c_priority_queue.rb +1 -0
  85. data/lib/amp/dependencies/priority_queue/poor_priority_queue.rb +46 -0
  86. data/lib/amp/dependencies/priority_queue/ruby_priority_queue.rb +525 -0
  87. data/lib/amp/dependencies/python_config.rb +211 -0
  88. data/lib/amp/dependencies/trollop.rb +713 -0
  89. data/lib/amp/dependencies/zip/ioextras.rb +155 -0
  90. data/lib/amp/dependencies/zip/stdrubyext.rb +111 -0
  91. data/lib/amp/dependencies/zip/tempfile_bugfixed.rb +186 -0
  92. data/lib/amp/dependencies/zip/zip.rb +1850 -0
  93. data/lib/amp/dependencies/zip/zipfilesystem.rb +609 -0
  94. data/lib/amp/dependencies/zip/ziprequire.rb +90 -0
  95. data/lib/amp/encoding/base85.rb +97 -0
  96. data/lib/amp/encoding/binary_diff.rb +82 -0
  97. data/lib/amp/encoding/difflib.rb +166 -0
  98. data/lib/amp/encoding/mercurial_diff.rb +378 -0
  99. data/lib/amp/encoding/mercurial_patch.rb +1 -0
  100. data/lib/amp/encoding/patch.rb +292 -0
  101. data/lib/amp/encoding/pure_ruby/ruby_mercurial_patch.rb +123 -0
  102. data/lib/amp/extensions/ditz.rb +41 -0
  103. data/lib/amp/extensions/lighthouse.rb +167 -0
  104. data/lib/amp/graphs/ancestor.rb +147 -0
  105. data/lib/amp/graphs/copies.rb +261 -0
  106. data/lib/amp/merges/merge_state.rb +164 -0
  107. data/lib/amp/merges/merge_ui.rb +322 -0
  108. data/lib/amp/merges/simple_merge.rb +450 -0
  109. data/lib/amp/profiling_hacks.rb +36 -0
  110. data/lib/amp/repository/branch_manager.rb +234 -0
  111. data/lib/amp/repository/dir_state.rb +950 -0
  112. data/lib/amp/repository/journal.rb +203 -0
  113. data/lib/amp/repository/lock.rb +207 -0
  114. data/lib/amp/repository/repositories/bundle_repository.rb +214 -0
  115. data/lib/amp/repository/repositories/http_repository.rb +377 -0
  116. data/lib/amp/repository/repositories/local_repository.rb +2661 -0
  117. data/lib/amp/repository/repository.rb +94 -0
  118. data/lib/amp/repository/store.rb +485 -0
  119. data/lib/amp/repository/tag_manager.rb +319 -0
  120. data/lib/amp/repository/updatable.rb +532 -0
  121. data/lib/amp/repository/verification.rb +431 -0
  122. data/lib/amp/repository/versioned_file.rb +475 -0
  123. data/lib/amp/revlogs/bundle_revlogs.rb +246 -0
  124. data/lib/amp/revlogs/changegroup.rb +217 -0
  125. data/lib/amp/revlogs/changelog.rb +338 -0
  126. data/lib/amp/revlogs/changeset.rb +521 -0
  127. data/lib/amp/revlogs/file_log.rb +165 -0
  128. data/lib/amp/revlogs/index.rb +493 -0
  129. data/lib/amp/revlogs/manifest.rb +195 -0
  130. data/lib/amp/revlogs/node.rb +18 -0
  131. data/lib/amp/revlogs/revlog.rb +1032 -0
  132. data/lib/amp/revlogs/revlog_support.rb +126 -0
  133. data/lib/amp/server/amp_user.rb +44 -0
  134. data/lib/amp/server/extension/amp_extension.rb +396 -0
  135. data/lib/amp/server/extension/authorization.rb +201 -0
  136. data/lib/amp/server/fancy_http_server.rb +252 -0
  137. data/lib/amp/server/fancy_views/_browser.haml +28 -0
  138. data/lib/amp/server/fancy_views/_diff_file.haml +13 -0
  139. data/lib/amp/server/fancy_views/_navbar.haml +17 -0
  140. data/lib/amp/server/fancy_views/changeset.haml +31 -0
  141. data/lib/amp/server/fancy_views/commits.haml +32 -0
  142. data/lib/amp/server/fancy_views/file.haml +35 -0
  143. data/lib/amp/server/fancy_views/file_diff.haml +23 -0
  144. data/lib/amp/server/fancy_views/harshcss/all_hallows_eve.css +72 -0
  145. data/lib/amp/server/fancy_views/harshcss/amy.css +147 -0
  146. data/lib/amp/server/fancy_views/harshcss/twilight.css +138 -0
  147. data/lib/amp/server/fancy_views/stylesheet.sass +175 -0
  148. data/lib/amp/server/http_server.rb +140 -0
  149. data/lib/amp/server/repo_user_management.rb +287 -0
  150. data/lib/amp/support/amp_config.rb +164 -0
  151. data/lib/amp/support/amp_ui.rb +287 -0
  152. data/lib/amp/support/docs.rb +54 -0
  153. data/lib/amp/support/generator.rb +78 -0
  154. data/lib/amp/support/ignore.rb +144 -0
  155. data/lib/amp/support/loaders.rb +93 -0
  156. data/lib/amp/support/logger.rb +103 -0
  157. data/lib/amp/support/match.rb +151 -0
  158. data/lib/amp/support/multi_io.rb +87 -0
  159. data/lib/amp/support/openers.rb +121 -0
  160. data/lib/amp/support/ruby_19_compatibility.rb +66 -0
  161. data/lib/amp/support/support.rb +1095 -0
  162. data/lib/amp/templates/blank.commit.erb +23 -0
  163. data/lib/amp/templates/blank.log.erb +18 -0
  164. data/lib/amp/templates/default.commit.erb +23 -0
  165. data/lib/amp/templates/default.log.erb +26 -0
  166. data/lib/amp/templates/template.rb +165 -0
  167. data/site/Rakefile +24 -0
  168. data/site/src/about/ampfile.haml +57 -0
  169. data/site/src/about/commands.haml +106 -0
  170. data/site/src/about/index.haml +33 -0
  171. data/site/src/about/performance.haml +31 -0
  172. data/site/src/about/workflows.haml +34 -0
  173. data/site/src/contribute/index.haml +65 -0
  174. data/site/src/contribute/style.haml +297 -0
  175. data/site/src/css/active4d.css +114 -0
  176. data/site/src/css/all_hallows_eve.css +72 -0
  177. data/site/src/css/all_themes.css +3299 -0
  178. data/site/src/css/amp.css +260 -0
  179. data/site/src/css/amy.css +147 -0
  180. data/site/src/css/blackboard.css +88 -0
  181. data/site/src/css/brilliance_black.css +605 -0
  182. data/site/src/css/brilliance_dull.css +599 -0
  183. data/site/src/css/cobalt.css +149 -0
  184. data/site/src/css/cur_amp.css +185 -0
  185. data/site/src/css/dawn.css +121 -0
  186. data/site/src/css/eiffel.css +121 -0
  187. data/site/src/css/espresso_libre.css +109 -0
  188. data/site/src/css/idle.css +62 -0
  189. data/site/src/css/iplastic.css +80 -0
  190. data/site/src/css/lazy.css +73 -0
  191. data/site/src/css/mac_classic.css +123 -0
  192. data/site/src/css/magicwb_amiga.css +104 -0
  193. data/site/src/css/pastels_on_dark.css +188 -0
  194. data/site/src/css/reset.css +55 -0
  195. data/site/src/css/slush_poppies.css +85 -0
  196. data/site/src/css/spacecadet.css +51 -0
  197. data/site/src/css/sunburst.css +180 -0
  198. data/site/src/css/twilight.css +137 -0
  199. data/site/src/css/zenburnesque.css +91 -0
  200. data/site/src/get/index.haml +32 -0
  201. data/site/src/helpers.rb +121 -0
  202. data/site/src/images/amp_logo.png +0 -0
  203. data/site/src/images/carbonica.png +0 -0
  204. data/site/src/images/revolution.png +0 -0
  205. data/site/src/images/tab-bg.png +0 -0
  206. data/site/src/images/tab-sliding-left.png +0 -0
  207. data/site/src/images/tab-sliding-right.png +0 -0
  208. data/site/src/include/_footer.haml +22 -0
  209. data/site/src/include/_header.haml +17 -0
  210. data/site/src/index.haml +104 -0
  211. data/site/src/learn/index.haml +46 -0
  212. data/site/src/scripts/jquery-1.3.2.min.js +19 -0
  213. data/site/src/scripts/jquery.cookie.js +96 -0
  214. data/tasks/stats.rake +155 -0
  215. data/tasks/yard.rake +171 -0
  216. data/test/dirstate_tests/dirstate +0 -0
  217. data/test/dirstate_tests/hgrc +5 -0
  218. data/test/dirstate_tests/test_dir_state.rb +192 -0
  219. data/test/functional_tests/resources/.hgignore +2 -0
  220. data/test/functional_tests/resources/STYLE.txt +25 -0
  221. data/test/functional_tests/resources/command.rb +372 -0
  222. data/test/functional_tests/resources/commands/annotate.rb +57 -0
  223. data/test/functional_tests/resources/commands/experimental/lolcats.rb +17 -0
  224. data/test/functional_tests/resources/commands/heads.rb +22 -0
  225. data/test/functional_tests/resources/commands/manifest.rb +12 -0
  226. data/test/functional_tests/resources/commands/status.rb +90 -0
  227. data/test/functional_tests/resources/version2/.hgignore +5 -0
  228. data/test/functional_tests/resources/version2/STYLE.txt +25 -0
  229. data/test/functional_tests/resources/version2/command.rb +372 -0
  230. data/test/functional_tests/resources/version2/commands/annotate.rb +45 -0
  231. data/test/functional_tests/resources/version2/commands/experimental/lolcats.rb +17 -0
  232. data/test/functional_tests/resources/version2/commands/heads.rb +22 -0
  233. data/test/functional_tests/resources/version2/commands/manifest.rb +12 -0
  234. data/test/functional_tests/resources/version2/commands/status.rb +90 -0
  235. data/test/functional_tests/resources/version3/.hgignore +5 -0
  236. data/test/functional_tests/resources/version3/STYLE.txt +31 -0
  237. data/test/functional_tests/resources/version3/command.rb +376 -0
  238. data/test/functional_tests/resources/version3/commands/annotate.rb +45 -0
  239. data/test/functional_tests/resources/version3/commands/experimental/lolcats.rb +17 -0
  240. data/test/functional_tests/resources/version3/commands/heads.rb +22 -0
  241. data/test/functional_tests/resources/version3/commands/manifest.rb +12 -0
  242. data/test/functional_tests/resources/version3/commands/status.rb +90 -0
  243. data/test/functional_tests/resources/version4/.hgignore +5 -0
  244. data/test/functional_tests/resources/version4/STYLE.txt +31 -0
  245. data/test/functional_tests/resources/version4/command.rb +376 -0
  246. data/test/functional_tests/resources/version4/commands/experimental/lolcats.rb +17 -0
  247. data/test/functional_tests/resources/version4/commands/heads.rb +22 -0
  248. data/test/functional_tests/resources/version4/commands/manifest.rb +12 -0
  249. data/test/functional_tests/resources/version4/commands/stats.rb +25 -0
  250. data/test/functional_tests/resources/version4/commands/status.rb +90 -0
  251. data/test/functional_tests/resources/version5_1/.hgignore +5 -0
  252. data/test/functional_tests/resources/version5_1/STYLE.txt +2 -0
  253. data/test/functional_tests/resources/version5_1/command.rb +374 -0
  254. data/test/functional_tests/resources/version5_1/commands/experimental/lolcats.rb +17 -0
  255. data/test/functional_tests/resources/version5_1/commands/heads.rb +22 -0
  256. data/test/functional_tests/resources/version5_1/commands/manifest.rb +12 -0
  257. data/test/functional_tests/resources/version5_1/commands/stats.rb +25 -0
  258. data/test/functional_tests/resources/version5_1/commands/status.rb +90 -0
  259. data/test/functional_tests/resources/version5_2/.hgignore +5 -0
  260. data/test/functional_tests/resources/version5_2/STYLE.txt +14 -0
  261. data/test/functional_tests/resources/version5_2/command.rb +376 -0
  262. data/test/functional_tests/resources/version5_2/commands/experimental/lolcats.rb +17 -0
  263. data/test/functional_tests/resources/version5_2/commands/manifest.rb +12 -0
  264. data/test/functional_tests/resources/version5_2/commands/newz.rb +12 -0
  265. data/test/functional_tests/resources/version5_2/commands/stats.rb +25 -0
  266. data/test/functional_tests/resources/version5_2/commands/status.rb +90 -0
  267. data/test/functional_tests/test_functional.rb +604 -0
  268. data/test/localrepo_tests/test_local_repo.rb +121 -0
  269. data/test/localrepo_tests/testrepo.tar.gz +0 -0
  270. data/test/manifest_tests/00manifest.i +0 -0
  271. data/test/manifest_tests/test_manifest.rb +72 -0
  272. data/test/merge_tests/base.txt +10 -0
  273. data/test/merge_tests/expected.local.txt +16 -0
  274. data/test/merge_tests/local.txt +11 -0
  275. data/test/merge_tests/remote.txt +11 -0
  276. data/test/merge_tests/test_merge.rb +26 -0
  277. data/test/revlog_tests/00changelog.i +0 -0
  278. data/test/revlog_tests/revision_added_changelog.i +0 -0
  279. data/test/revlog_tests/test_adding_index.i +0 -0
  280. data/test/revlog_tests/test_revlog.rb +333 -0
  281. data/test/revlog_tests/testindex.i +0 -0
  282. data/test/store_tests/store.tar.gz +0 -0
  283. data/test/store_tests/test_fncache_store.rb +122 -0
  284. data/test/test_amp.rb +9 -0
  285. data/test/test_base85.rb +14 -0
  286. data/test/test_bdiff.rb +42 -0
  287. data/test/test_commands.rb +122 -0
  288. data/test/test_difflib.rb +50 -0
  289. data/test/test_helper.rb +15 -0
  290. data/test/test_journal.rb +29 -0
  291. data/test/test_match.rb +134 -0
  292. data/test/test_mdiff.rb +74 -0
  293. data/test/test_mpatch.rb +14 -0
  294. data/test/test_support.rb +24 -0
  295. metadata +382 -0
@@ -0,0 +1,377 @@
1
+ require 'uri'
2
+
3
+ # to shut up those fucking warnings!
4
+ # taken from http://www.5dollarwhitebox.org/drupal/node/64
5
+ class Net::HTTP
6
+ alias_method :old_initialize, :initialize
7
+ def initialize(*args)
8
+ old_initialize(*args)
9
+ require 'openssl' unless defined? OpenSSL
10
+ @ssl_context = OpenSSL::SSL::SSLContext.new
11
+ @ssl_context.verify_mode = OpenSSL::SSL::VERIFY_NONE
12
+ end
13
+ end
14
+
15
+ module Amp
16
+ module Repositories
17
+ ##
18
+ # = This is the class for connecting to an HTTP[S]-based repository.
19
+ # The protocol's pretty simple - just ?cmd="command", and any other
20
+ # args you need. Should be pretty easy.
21
+ class HTTPRepository < Repository
22
+ include RevlogSupport::Node
23
+
24
+ DEFAULT_HEADERS = {"User-agent" => "Amp-#{Amp::VERSION}",
25
+ "Accept" => "Application/Mercurial-0.1"}
26
+
27
+ ##
28
+ # The URL we connect to for this repository
29
+ attr_reader :url
30
+
31
+ ##
32
+ # Should the repository connect via SSL?
33
+ attr_accessor :secure
34
+
35
+ ##
36
+ # Returns whether the repository is local or not. Which it isn't. Because
37
+ # we're connecting over HTTP.
38
+ #
39
+ # @return [Boolean] +false+. Because the repo isn't local.
40
+ def local?; false; end
41
+
42
+ ##
43
+ # Standard initializer for a repository. However, "create" is a no-op.
44
+ #
45
+ # @param [String] path the URL for the repository.
46
+ # @param [Boolean] create this is useless since we can't create remote repos
47
+ # @param [Amp::AmpConfig] config the configuration for Amp right now.
48
+ def initialize(path="", create=false, config=nil)
49
+ @url, @config = URI.parse(path), config
50
+ @auth_mode = :none
51
+ raise InvalidArgumentError.new("Invalid URL for an HTTP repo!") if @url.nil?
52
+ end
53
+
54
+ ##
55
+ # Loads the capabilities from the server when necessary. (Lazy loading)
56
+ #
57
+ # @return [Hash] the capabilities of the server, in the form:
58
+ # { capability => true }
59
+ # or
60
+ # { capability => "capability;settings;"}
61
+ def get_capabilities
62
+ return @capabilities if @capabilities
63
+ begin
64
+ @capabilities = {}
65
+ do_read("capabilities").first.split.each do |k|
66
+ if k.include? "="
67
+ key, value = k.split("=", 2)
68
+ @capabilities[key] = value
69
+ else
70
+ @capabilities[k] = true
71
+ end
72
+ end
73
+ rescue
74
+ @capabilities = []
75
+ end
76
+ @capabilities
77
+ end
78
+
79
+ ##
80
+ # Unsupported - raises an error.
81
+ def lock; raise RepoError.new("You can't lock an HTTP repo."); end
82
+
83
+ ##
84
+ # Looks up a node with the given key. The key could be a node ID (full or
85
+ # partial), an index number (though this is slightly risky as it might
86
+ # match a node ID partially), "tip", and so on. See {LocalRepository#[]}.
87
+ #
88
+ # @param [String] key the key to look up - could be node ID, revision index,
89
+ # and so on.
90
+ # @return [String] the full node ID of the requested node on the remote server
91
+ def lookup(key)
92
+ require_capability("lookup", "Look up Remote Revision")
93
+ data = do_read("lookup", :key => key).first
94
+ code, data = data.chomp.split(" ", 2)
95
+
96
+ return data.unhexlify if code.to_i > 0
97
+ raise RepoError.new("Unknown Revision #{data}")
98
+ end
99
+
100
+ ##
101
+ # Gets all the heads of the repository. Returned in binary form.
102
+ #
103
+ # @return [Array<String>] the full, binary node_ids of all the heads on
104
+ # the remote server.
105
+ def heads
106
+ data = do_read("heads").first
107
+ data.chomp.split(" ").map {|h| h.unhexlify }
108
+ end
109
+
110
+ ##
111
+ # Gets the node IDs of all the branch roots in the repository. Uses
112
+ # the supplied nodes to use to search for branches.
113
+ #
114
+ # @param [Array<String>] nodes the nodes to use as heads to search for
115
+ # branches. The search starts at each supplied node (or the tip, if
116
+ # left empty), and goes to that tree's root, and returns the relevant
117
+ # information for the branch.
118
+ # @return [Array<Array<String>>] An array of arrays of strings. Each array
119
+ # has 4 components: [head, root, parent1, parent2].
120
+ def branches(nodes)
121
+ n = nodes.map {|n| n.hexlify }.join(" ")
122
+ data = do_read("branches", :nodes => n).first
123
+ data.split("\n").map do |b|
124
+ b.split(" ").map {|b| b.unhexlify}
125
+ end
126
+ end
127
+
128
+ ##
129
+ # Asks the server to bundle up the given nodes into a changegroup, and returns it
130
+ # uncompressed. This is for pulls.
131
+ #
132
+ # @todo figure out what the +kind+ parameter is for
133
+ # @param [Array<String>] nodes the nodes to package into the changegroup
134
+ # @param [NilClass] kind (UNUSED)
135
+ # @return [StringIO] the uncompressed changegroup as a stream
136
+ def changegroup(nodes, kind)
137
+ n = nodes.map{|i| i.hexlify }.join ' '
138
+ f = do_read('changegroup', n.empty? ? {} : {:roots => n}).first
139
+
140
+ s = StringIO.new "",(ruby_19? ? "w+:ASCII-8BIT" : "w+")
141
+ s.write Zlib::Inflate.inflate(f)
142
+ s.pos = 0
143
+ s
144
+ end
145
+
146
+ ##
147
+ # Asks the server to bundle up all the necessary nodes between the lists
148
+ # bases and heads. It is returned as a stream that reads it in a decompressed
149
+ # fashion. This is for pulls.
150
+ #
151
+ # @param [Array<String>] bases the base nodes of the subset we're requesting.
152
+ # Should be an array (or any Enumerable) of node ids.
153
+ # @param [Array<String>] heads the heads of the subset we're requesting.
154
+ # These nodes will be retrieved as well. Should be an array of node IDs.
155
+ # @param [NilClass] source i have no idea (UNUSED)
156
+ # @return [StringIO] the uncompressed changegroup subset as a stream.
157
+ def changegroup_subset(bases, heads, source)
158
+ #require_capability 'changegroupsubset', 'look up remote changes'
159
+ base_list = bases.map {|n| n.hexlify }.join ' '
160
+ head_list = heads.map {|n| n.hexlify }.join ' '
161
+ # p base_list, head_list
162
+ f, code = *do_read("changegroupsubset", :bases => base_list, :heads => head_list)
163
+
164
+ s = StringIO.new "",(ruby_19? ? "w+:ASCII-8BIT" : "w+")
165
+ s.write Zlib::Inflate.inflate(f)
166
+ s.rewind
167
+ s
168
+ end
169
+
170
+ ##
171
+ # Sends a bundled up changegroup to the server, who will add it to its repository.
172
+ # Uses the bundle format.
173
+ #
174
+ # @param [StringIO] cg the changegroup to push as a stream.
175
+ # @param [Array<String>] heads the heads of the changegroup being sent
176
+ # @param [NilClass] source no idea UNUSED
177
+ # @return [Fixnum] the response code from the server (1 indicates success)
178
+ def unbundle(cg, heads, source)
179
+ # have to stream bundle to a temp file because we do not have
180
+ # http 1.1 chunked transfer
181
+
182
+ type = ''
183
+ types = capable? 'unbundle'
184
+
185
+ # servers older than d1b16a746db6 will send 'unbundle' as a boolean
186
+ # capability
187
+ # this will be a list of allowed bundle compression types
188
+ types = types.split ',' rescue ['']
189
+
190
+ # pick a compression format
191
+ types.each do |x|
192
+ (type = x and break) if RevlogSupport::ChangeGroup::BUNDLE_HEADERS.include? x
193
+ end
194
+
195
+ # compress and create the bundle
196
+ data = RevlogSupport::ChangeGroup.write_bundle cg, type
197
+
198
+ # send the data
199
+ resp = do_read 'unbundle', :data => data.string,
200
+ :headers => {'Content-Type' => 'application/octet-stream'},
201
+ :heads => heads.map{|h| h.hexlify }.join(' ')
202
+ # parse output
203
+ resp_code, output = resp.first.split "\n"
204
+
205
+ # make sure the reponse was in an expected format (i.e. with a response code)
206
+ unless resp_code.to_i.to_s == resp_code
207
+ raise abort("push failed (unexpected response): #{resp}")
208
+ end
209
+
210
+ # output any text from the server
211
+ UI::say output
212
+ # return 1 for success, 0 for failure
213
+ resp_code.to_i
214
+ end
215
+
216
+ def stream_out
217
+ do_cmd 'stream_out'
218
+ end
219
+
220
+ ##
221
+ # For each provided pair of nodes, return the nodes between the pair.
222
+ #
223
+ # @param [Array<Array<String>>] an array of node pairs, so an array of an array
224
+ # of strings. The first node is the head, the second node is the root of the pair.
225
+ # @return [Array<Array<String>>] for each pair, we return 1 array, which contains
226
+ # the node IDs of every node between the pair.
227
+ # add lstrip to split_newlines to fix but not cure bug
228
+ def between(pairs)
229
+ batch = 8
230
+ ret = []
231
+
232
+ (0..(pairs.size)).step(batch) do |i|
233
+ n = pairs[i..(i+batch-1)].map {|p| p.map {|k| k.hexlify }.join("-") }.join(" ")
234
+ d, code = *do_read("between", :pairs => n)
235
+
236
+ raise RepoError.new("unexpected code: #{code}") unless code == 200
237
+
238
+ ret += d.lstrip.split_newlines.map {|l| (l && l.split(" ").map{|i| i.unhexlify }) || []}
239
+ end
240
+ Amp::UI.debug "between returns: #{ret.inspect}"
241
+ ret
242
+ end
243
+
244
+ private
245
+
246
+ ##
247
+ # Runs the given command by the server, gets the response. Takes the name of the command,
248
+ # the data, headers, etc. The command is assumed to be a GET request, unless args[:data] is
249
+ # set, in which case it is sent via POST.
250
+ #
251
+ # @param [String] command the command to send to the server, such as "heads"
252
+ # @param [Hash] args the arguments you need to provide - for lookup, it
253
+ # might be the revision indicies.
254
+ # @return [String] the response data from the server.
255
+ def do_cmd(command, args={})
256
+ require 'net/http'
257
+
258
+ # Be safe for recursive calls
259
+ work_args = args.dup
260
+ # grab data, but don't leave it in, or it'll be added to the query string
261
+ data = work_args.delete(:data) || nil
262
+ # and headers, but don't leave it in, or it'll be added to the query string
263
+ headers = work_args.delete(:headers) || {}
264
+
265
+ # Our query string is "cmd => command" plus any other parts of the args hash
266
+ query = { "cmd" => command }
267
+ query.merge! work_args
268
+
269
+ # break it up, make a query
270
+ host = @url.host
271
+ path = @url.path
272
+ # Was having trouble with this... should be safe now
273
+ path += "?" + URI.escape(query.map {|k,v| "#{k}=#{v}"}.join("&"), /[^-_!~*'()a-zA-Z\d;\/?:@&=+$,\[\]]/n)
274
+
275
+ # silly scoping
276
+ response = nil
277
+ # Create an HTTP object so we can send our request. static methods aren't flexible
278
+ # enough for us
279
+ sess = Net::HTTP.new host, @url.port
280
+ # Use SSL if necessary
281
+ sess.use_ssl = true if secure
282
+ # Let's send our request!
283
+ sess.start do |http|
284
+ # if we have data, it's a POST
285
+ if data
286
+ req = Net::HTTP::Post.new(path)
287
+ req.body = data
288
+ else
289
+ # otherwise, it's a GET
290
+ req = Net::HTTP::Get.new(path)
291
+ end
292
+ if @auth_mode == :digest
293
+ # Set digest headers
294
+ req.digest_auth @username, @password, @auth_digest
295
+ elsif @auth_mode == :basic
296
+ # Set basic auth headers
297
+ req.basic_auth @username, @password
298
+ end
299
+ # Copy over the default headers
300
+ DEFAULT_HEADERS.each {|k, v| req[k] = v}
301
+ # Then overwrite them (and add new ones) from our arguments
302
+ headers.each {|k, v| req[k] = v}
303
+ # And send the request!
304
+ response = http.request(req)
305
+ end
306
+ # Case on response - we'll be using the kind_of? style of switch statement
307
+ # here
308
+ case response
309
+ when Net::HTTPRedirection
310
+ # Redirect to a new URL - grab the new URL...
311
+ newurl = response["Location"]
312
+ @url = URI.parse(newurl)
313
+ # and try that again.
314
+ do_cmd(command, args)
315
+ when Net::HTTPUnauthorized
316
+ if @auth_mode == :digest
317
+ # no other handlers!
318
+ raise AuthorizationError.new("Failed to authenticate to local repository!")
319
+ elsif @auth_mode == :basic
320
+ # failed to authenticate via basic, so escalate to digest mode
321
+ @auth_mode = :digest
322
+ @auth_digest = response
323
+ do_cmd command, args
324
+ else
325
+ # They want a username and password. A few routes:
326
+ # First, check the URL for the username:password@host format
327
+ @username ||= @url.user
328
+ @password ||= @url.password
329
+ # and start off with basic authentication
330
+ @auth_mode = :basic
331
+ # If the URL didn't contain the username AND password, ask the user for them.
332
+ unless @username && @password
333
+ UI::say "==> HTTP Authentication Required"
334
+
335
+ @username = UI::ask 'username: '
336
+ @password = UI::ask 'password: ', :password
337
+ end
338
+
339
+ # Recursively call the command
340
+ do_cmd command, args
341
+ end
342
+ else
343
+ # We got a successful response! Woo!
344
+ response
345
+ end
346
+ end
347
+
348
+ ##
349
+ # This is a helper for do_cmd - it splits up the response object into
350
+ # two relevant parts: the response body, and the response code.
351
+ #
352
+ # @param [String] command the remote command to execute, such as "heads"
353
+ # @param [Hash] args the arguments to pass to the request. Takes some special values. All
354
+ # other values are sent in the query string.
355
+ # @option args [String] :data (nil) the POST data to send
356
+ # @option args [Hash] :headers ({}) the headers to send with the request, not including
357
+ # any authentication or user-agent headers.
358
+ # @return [Array] the response data, in the form [body, response_code]
359
+ def do_read(command, args={})
360
+ response = do_cmd(command, args)
361
+ [response.body, response.code.to_i]
362
+ end
363
+ end
364
+
365
+ ##
366
+ # A special form of the HTTPRepository, except that it is secured over SSL (HTTPS).
367
+ # Other than that, nothing fancy about it.
368
+ class HTTPSRepository < HTTPRepository
369
+ def initialize(*args)
370
+ require 'net/https'
371
+
372
+ super(*args)
373
+ self.secure = true
374
+ end
375
+ end
376
+ end
377
+ end
@@ -0,0 +1,2661 @@
1
+ require 'fileutils'
2
+
3
+ module Amp
4
+ module Repositories
5
+
6
+ ##
7
+ # A Local Repository is a repository that works on local repo's, such
8
+ # as your working directory. This makes it pretty damn important, and also
9
+ # pretty damn complicated. Have fun!
10
+ class LocalRepository < Repository
11
+ include Amp::RevlogSupport::Node
12
+ include Amp::Repositories::BranchManager
13
+ include Amp::Repositories::TagManager
14
+ include Amp::Repositories::Updatable
15
+ include Amp::Repositories::Verification
16
+
17
+ # The config is an {AmpConfig} for this repo (and uses .hg/hgrc)
18
+ attr_accessor :config
19
+
20
+ attr_reader :root
21
+ attr_reader :root_pathname # save some computation here
22
+ attr_reader :hg
23
+ attr_reader :hg_opener
24
+ attr_reader :branch_manager
25
+ attr_reader :store_opener
26
+ attr_reader :store
27
+
28
+ ##
29
+ # Initializes a new directory to the given path, and with the current
30
+ # configuration.
31
+ #
32
+ # @param [String] path a path to the Repository.
33
+ # @param [Boolean] create Should we create a new one? Usually for
34
+ # the "amp init" command.
35
+ # @param [Amp::AmpConfig] config the configuration loaded from the user's
36
+ # system. Will have some settings overwritten by the repo's hgrc.
37
+ def initialize(path="", create=false, config=nil)
38
+ @capabilities = {}
39
+ @root = path[-1, 1] == '/' ? path[0..-2] : path # no ending slashes
40
+ @root = File.expand_path @root
41
+ @hg = File.join @root, ".hg"
42
+ @file_opener = Amp::Opener.new @root
43
+ @file_opener.default = :open_file # these two are the same, pretty much
44
+ @hg_opener = Amp::Opener.new @root
45
+ @hg_opener.default = :open_hg # just with different defaults
46
+ @filters = {}
47
+ @changelog = nil
48
+ @manifest = nil
49
+ @dirstate = nil
50
+ requirements = []
51
+
52
+ # make a repo if necessary
53
+ unless File.directory? @hg
54
+ if create
55
+ then requirements = init config
56
+ else raise RepoError.new("Repository #{path} not found")
57
+ end
58
+ end
59
+
60
+ # no point in reading what we _just_ wrote...
61
+ unless create
62
+ # read requires
63
+ # save it if something's up
64
+ @hg_opener.open("requires", 'r') {|f| f.each {|r| requirements << r.strip } } rescue nil
65
+ end
66
+
67
+ @store = Stores.pick requirements, @hg, Amp::Opener
68
+ @config = Amp::AmpConfig.new :parent_config => config
69
+ @config.read_file File.join(@hg,"hgrc")
70
+ end
71
+
72
+ def local?; true; end
73
+
74
+ def relative_join(file, cur_dir=FileUtils.pwd)
75
+ @root_pathname ||= Pathname.new(@root)
76
+ Pathname.new(File.expand_path(File.join(cur_dir, file))).relative_path_from(@root_pathname).to_s
77
+ end
78
+
79
+
80
+ def inspect; "#<LocalRepository @root=#{@root.inspect}>"; end
81
+
82
+ ##
83
+ # Creates this repository's folders and structure.
84
+ #
85
+ # @param [AmpConfig] config the configuration for this user so
86
+ # we know what neato features to use (like filename cache)
87
+ # @return [Array<String>] the requirements that we found are returned,
88
+ # so further configuration can go down.
89
+ def init(config=@config)
90
+ # make the directory if it's not there
91
+ FileUtils.makedirs @hg
92
+
93
+ requirements = ["revlogv1"]
94
+
95
+ # add some requirements
96
+ if config["format"]["usestore", Boolean] || true
97
+ FileUtils.mkdir "#{@hg}/store"
98
+ requirements << "store"
99
+ requirements << "fncache" if config["format"]["usefncache", Boolean, true]
100
+
101
+ # add the changelog
102
+ make_changelog
103
+ end
104
+
105
+
106
+ # write the requires file
107
+ write_requires requirements
108
+ end
109
+
110
+ ##
111
+ # Has the repository been changed since the last commit?
112
+ # Returns true if there are NO outstanding changes or uncommitted merges.
113
+ #
114
+ # @return [Boolean] is the repo pristine
115
+ def pristine?
116
+ dirstate.parents.last == RevlogSupport::Node::NULL_ID &&
117
+ status(:only => [:modified, :added, :removed, :deleted]).all? {|_, v| v.empty? }
118
+ end
119
+
120
+ ##
121
+ # @see pristine?
122
+ def changed?; !pristine?; end
123
+
124
+ ##
125
+ # Effectively FileUtils.pwd
126
+ #
127
+ # @return [String] the current location
128
+ def cwd
129
+ dirstate.cwd
130
+ end
131
+ alias_method :pwd, :cwd
132
+
133
+ ##
134
+ # Returns the relative path from +src+ to +dest+.
135
+ #
136
+ # @param [String] src This is a directory! If this is relative,
137
+ # it is assumed to be relative to the root.
138
+ # @param [String] dest This MUST be within root! It also is a file.
139
+ # @return [String] the relative path
140
+ def path_to(src, dest)
141
+ dirstate.path_to src, dest
142
+ end
143
+
144
+ ##
145
+ # Gets the changeset at the given revision.
146
+ #
147
+ # @param [String, Integer] rev the revision index (Integer) or
148
+ # node_id (String) that we want to access. if nil, returns
149
+ # the working directory. if the string is 'tip', it returns the
150
+ # latest head. Can be either a string or an integer;
151
+ # this shit is smart.
152
+ # @return [Changeset] the changeset at the given revision index or node
153
+ # id. Could be working directory.
154
+ def [](rev)
155
+ if rev.nil?
156
+ return WorkingDirectoryChangeset.new(self)
157
+ end
158
+ rev = rev.to_i if rev.to_i.to_s == rev
159
+ return Changeset.new(self, rev)
160
+ end
161
+
162
+ ##
163
+ # Iterates over each changeset in the repository, from oldest to newest.
164
+ #
165
+ # @yield each changeset in the repository is yielded to the caller, in order
166
+ # from oldest to newest. (Actually, lowest revision # to highest revision #)
167
+ def each(&block)
168
+ 0.upto(size - 1) { |i| yield self[i]}
169
+ end
170
+
171
+ ##
172
+ # Creates a lock at the given path. At first it tries to just make it straight away.
173
+ # If this fails, we then sleep for up to a given amount of time (defaults to 10 minutes!)
174
+ # and continually try to acquire the lock.
175
+ #
176
+ # @raise [LockHeld] if the lock cannot be acquired, this exception is raised
177
+ # @param [String] lockname the name of the lock to create
178
+ # @param [Boolean] wait should we wait for the lock to be released?
179
+ # @param [Proc, #call] release_proc a proc to run when the lock is released
180
+ # @param [Proc, #call] acquire_proc a proc to run when we get the lock
181
+ # @param [String] desc the description of the lock to show if someone stomps on it
182
+ # @return [Lock] a lock at the given location.
183
+ def make_lock(lockname, wait, release_proc, acquire_proc, desc)
184
+ begin
185
+ lock = Lock.new(lockname, :timeout => 0, :release_fxn => release_proc, :desc => desc)
186
+ rescue LockHeld => err
187
+ raise unless wait
188
+ UI.warn("waiting for lock on #{desc} held by #{err.locker}")
189
+ lock = Lock.new(lockname, :timeout => @config["ui","timeout","600"].to_i,
190
+ :release_proc => release_proc, :desc => desc)
191
+ end
192
+ acquire_proc.call if acquire_proc
193
+ return lock
194
+ end
195
+
196
+ ##
197
+ # Locks the repository's .hg/store directory. Returns the lock, or if a block is given,
198
+ # runs the block with the lock, and clears the lock afterward.
199
+ #
200
+ # @yield When a block is given, that block is executed under locked
201
+ # conditions. That code can be guaranteed it is the only code running on the
202
+ # store in a destructive manner.
203
+ # @param [Boolean] wait (true) wait for the lock to expire?
204
+ # @return [Lock] the lock on the .hg/store directory
205
+ def lock_store(wait = true)
206
+ return @lock_ref if @lock_ref && @lock_ref.weakref_alive?
207
+
208
+ lock = make_lock(store_join("lock"), wait, nil, nil, "repository #{root}")
209
+ @lock_ref = WeakRef.new(lock)
210
+ if block_given?
211
+ begin
212
+ yield
213
+ ensure
214
+ @lock_ref = nil
215
+ lock.release
216
+ end
217
+ else
218
+ return lock
219
+ end
220
+ end
221
+
222
+ ##
223
+ # Locks the repository's .hg/store directory. Returns the lock, or if a block is given,
224
+ # runs the block with the lock, and clears the lock afterward.
225
+ #
226
+ # @yield When a block is given, that block is executed under locked
227
+ # conditions. That code can be guaranteed it is the only code running on the
228
+ # working directory in a destructive manner.
229
+ # @param [Boolean] wait (true) wait for the lock to expire?
230
+ # @return [Lock] the lock on the .hg/store directory
231
+ def lock_working(wait = true)
232
+ return @working_lock_ref if @working_lock_ref && @working_lock_ref.weakref_alive?
233
+
234
+ lock = make_lock(join("wlock"), wait, nil, nil, "working directory of #{root}")
235
+ @working_lock_ref = WeakRef.new(lock)
236
+ if block_given?
237
+ begin
238
+ yield
239
+ ensure
240
+ @working_lock_ref = nil
241
+ lock.release
242
+ end
243
+ else
244
+ return lock
245
+ end
246
+ end
247
+
248
+ ##
249
+ # Takes a block, and runs that block with both the store and the working directory locked.
250
+ #
251
+ # @param [Boolean] wait (true) should we wait for locks, or jsut give up early?
252
+ def lock_working_and_store(wait=true)
253
+ lock_store(wait) do
254
+ lock_working(wait) do
255
+ yield
256
+ end
257
+ end
258
+ end
259
+
260
+ ##
261
+ # Gets the file-log for the given path, so we can look at an individual
262
+ # file's history, for example.
263
+ #
264
+ # @param [String] f the path to the file
265
+ # @return [FileLog] a filelog (a type of revision log) for the given file
266
+ def file(f)
267
+ f = f[1..-1] if f[0, 1] == "/"
268
+ FileLog.new @store.opener, f
269
+ end
270
+
271
+ ##
272
+ # Returns the parent changesets of the specified changeset. Defaults to the
273
+ # working directory, if +change_id+ is unspecified.
274
+ #
275
+ # @param [Integer, String] change_id the ID (or index) of the requested changeset
276
+ # @return [Array<Changeset>] the parent changesets of the requested changeset
277
+ def parents(change_id = nil)
278
+ self[change_id].parents
279
+ end
280
+
281
+ ##
282
+ # Gets a versioned file for the given path, so we can look at the individual
283
+ # file's history with the file object itself.
284
+ #
285
+ # @param [String] path the path to the file
286
+ # @param [Hash] opts the options for creating the versioned file
287
+ # @option [String] opts change_id (nil) The ID of the changeset in question
288
+ # @option [String, Integer] opts file_id (nil) the revision # or node ID of
289
+ # into the file_log
290
+ def versioned_file(path, opts={})
291
+ VersionedFile.new(self, path, opts)
292
+ end
293
+
294
+ ##
295
+ # Gets a versioned file, but using the working directory, so we are looking
296
+ # past the last commit. Important because it uses a different class. Duh.
297
+ #
298
+ # @param [String] path the path to the file
299
+ # @param [Hash] opts the options for creating the versioned file
300
+ # @option [String] opts change_id (nil) The ID of the changeset in question
301
+ # @option [String, Integer] opts file_id (nil) the revision # or node ID of
302
+ # into the file_log
303
+ def working_file(path, opts={})
304
+ VersionedWorkingFile.new(self, path, opts)
305
+ end
306
+
307
+ ##
308
+ # Reads from a file, but in the working directory.
309
+ # Uses encoding if we are set up to do so.
310
+ #
311
+ # @param [String] filename the file to read from the working directory
312
+ # @return [String] the data read from the file, encoded if we are set
313
+ # up to do so.
314
+ def working_read(filename)
315
+ data = @file_opener.open(filename, "r") {|f| f.read }
316
+ data = @filters["encode"].call(filename, data) if @filters["encode"]
317
+ data
318
+ end
319
+
320
+ ##
321
+ # Writes to a file, but in the working directory. Uses encoding if we are
322
+ # set up to do so. Also handles symlinks and executables. Ugh.
323
+ #
324
+ # @param [String] path the path to the file to write to
325
+ # @param [String] data the data to write
326
+ # @param [String] flags the flags to set
327
+ def working_write(path, data, flags)
328
+ @file_opener.open(path, "w") do |file|
329
+ file.write(data)
330
+ end
331
+ if flags && flags.include?('x')
332
+ File.amp_set_executable(working_join(path), true)
333
+ end
334
+ end
335
+
336
+ ##
337
+ # Returns the changelog for this repository. This changelog basically
338
+ # is the history of all commits.
339
+ #
340
+ # @return [ChangeLog] the commit history object for the entire repo.
341
+ def changelog
342
+ return @changelog if @changelog
343
+
344
+ @changelog = ChangeLog.new @store.opener
345
+
346
+ if path = ENV['HG_PENDING']
347
+ if path =~ /^#{root}/
348
+ @changelog.read_pending('00changelog.i.a')
349
+ end
350
+ end
351
+
352
+ @changelog
353
+ end
354
+
355
+ ##
356
+ # Returns the merge state for this repository. The merge state keeps track
357
+ # of what files need to be merged for an update to be successfully completed.
358
+ #
359
+ # @return [MergeState] the repository's merge state.
360
+ def merge_state
361
+ @merge_state ||= Amp::Merges::MergeState.new(self)
362
+ end
363
+
364
+ ##
365
+ # Returns the manifest for this repository. The manifest keeps track
366
+ # of what files exist at what times, and if they have certain flags
367
+ # (such as executable, or is it a symlink).
368
+ #
369
+ # @return [Manifest] the manifest for the repository
370
+ def manifest
371
+ return @manifest if @manifest
372
+
373
+ changelog #load the changelog
374
+ @manifest = Manifest.new @store.opener
375
+ end
376
+
377
+ ##
378
+ # Returns the dirstate for this repository. The dirstate keeps track
379
+ # of files status, such as removed, added, merged, and so on. It also
380
+ # keeps track of the working directory.
381
+ #
382
+ # @return [DirState] the dirstate for this local repository.
383
+ def dirstate
384
+ return @dirstate if @dirstate
385
+
386
+ opener = Amp::Opener.new @root
387
+ opener.default = :open_hg
388
+
389
+ @dirstate = DirState.new(@root, @config, opener)
390
+ @dirstate.read!
391
+ end
392
+
393
+ ##
394
+ # Returns the URL of this repository. Uses the "file:" scheme as such.
395
+ #
396
+ # @return [String] the URL pointing to this repo
397
+ def url; "file:#{@root}"; end
398
+
399
+ ##
400
+ # Opens a file using our opener. Can only access files in .hg/
401
+ def open(*args, &block)
402
+ @hg_opener.open(*args, &block)
403
+ end
404
+
405
+ ##
406
+ # Joins the path to the repo's root (not .hg, the working dir root)
407
+ #
408
+ # @param path the path we're joining
409
+ # @return [String] the path joined to the working directory's root
410
+ def working_join(path)
411
+ File.join(@root, path)
412
+ end
413
+
414
+ ##
415
+ # Joins the path from this repo's path (.hg), to the file provided.
416
+ #
417
+ # @param file the file we need the path for
418
+ # @return [String] the repo's root, joined with the file's path
419
+ def join(file)
420
+ File.join(@hg, file)
421
+ end
422
+
423
+ ##
424
+ # Joins the path, with a bunch of other args, to the store's directory.
425
+ # Used for opening {FileLog}s and whatnot.
426
+ #
427
+ # @param file the path to the file
428
+ # @return [String] the path to the file from the store.
429
+ def store_join(file)
430
+ @store.join file
431
+ end
432
+
433
+ ##
434
+ # Looks up an identifier for a revision in the commit history. This
435
+ # key could be an integer (specifying a revision number), "." for
436
+ # the latest revision, "null" for the null revision, "tip" for
437
+ # the tip of the repository, a node_id (in hex or binary form) for
438
+ # a revision in the changelog. Yeah. It's a flexible method.
439
+ #
440
+ # @param key the key to lookup in the history of the repo
441
+ # @return [String] a node_id into the changelog for the requested revision
442
+ def lookup(key)
443
+ key = key.to_i if key.to_i.to_s == key.to_s # casting for things like "10"
444
+ case key
445
+ when Fixnum, Bignum, Integer
446
+ changelog.node_id_for_index(key)
447
+ when "."
448
+ dirstate.parents().first
449
+ when "null", nil
450
+ NULL_ID
451
+ when "tip"
452
+ changelog.tip
453
+ else
454
+
455
+ n = changelog.id_match(key)
456
+ return n if n
457
+
458
+ return tags[key] if tags[key]
459
+ return branch_tags[key] if branch_tags[key]
460
+
461
+ n = changelog.partial_id_match(key)
462
+ return n if n
463
+
464
+ # bail
465
+ raise RepoError.new("unknown revision #{key}")
466
+ end
467
+ end
468
+
469
+ ##
470
+ # Finds the nodes between two nodes - this algorithm is ported from the
471
+ # python for mercurial (localrepo.py:1247, for 1.2.1 source). Since this
472
+ # is used by servers, it implements their algorithm... which seems to
473
+ # intentionally not return every node between +top+ and +bottom+.
474
+ # Each one is twice as far from +top+ as the previous.
475
+ #
476
+ # @param [Array<String, String>] An array of node-id pairs, which are arrays
477
+ # of [+top+, +bottom+], which are:
478
+ # top [String] the "top" - or most recent - revision's node ID
479
+ # bottom [String] the "bottom" - or oldest - revision's node ID
480
+ #
481
+ # return [Array<String>] a list of node IDs that are between +top+ and +bottom+
482
+ def between(pairs)
483
+ pairs.map do |top, bottom|
484
+ node, list, counter = top, [], 0
485
+ add_me = 1
486
+ while node != bottom && node != NULL_ID
487
+ if counter == add_me
488
+ list << node
489
+ add_me *= 2
490
+ end
491
+ parent = changelog.parents_for_node(node).first
492
+ node = parent
493
+ counter += 1
494
+ end
495
+ list
496
+ end
497
+ end
498
+
499
+ ##
500
+ # Pull new changegroups from +remote+
501
+ # This does not apply the changes, but pulls them onto
502
+ # the local server.
503
+ #
504
+ # @param [String] remote the path of the remote source (will either be
505
+ # an HTTP repo or an SSH repo)
506
+ # @param [{Symbol => [String] or Boolean}] this reads two parameters from
507
+ # opts -- heads and force. heads is the changesets to collect. If this
508
+ # is empty, it will pull from tip.
509
+ def pull(remote, opts={:heads => nil, :force => nil})
510
+ lock_store do
511
+ # get the common nodes, missing nodes, and the remote heads
512
+ # this is findcommonincoming in the Python code, for those with both open
513
+ common, fetch, remote_heads = *common_nodes(remote, :heads => opts[:heads],
514
+ :force => opts[:force])
515
+
516
+ UI::status 'requesting all changes' if fetch == [NULL_ID]
517
+ if fetch.empty?
518
+ UI::status 'no changes found'
519
+ return 0
520
+ end
521
+
522
+ if (opts[:heads].nil? || opts[:heads].empty?) && remote.capable?('changegroupsubset')
523
+ opts[:heads] = remote_heads
524
+ end
525
+ opts[:heads] ||= []
526
+ cg = if opts[:heads].empty?
527
+ remote.changegroup fetch, :pull
528
+ else
529
+ # check for capabilities
530
+ unless remote.capable? 'changegroupsubset'
531
+ raise abort('Partial pull cannot be done because' +
532
+ 'the other repository doesn\'t support' +
533
+ 'changegroupsubset')
534
+ end # end unless
535
+
536
+ remote.changegroup_subset fetch, opts[:heads], :pull
537
+ end
538
+
539
+ add_changegroup cg, :pull, remote.url
540
+ end
541
+ end
542
+
543
+ ##
544
+ # Add a changegroup to the repo.
545
+ #
546
+ # Return values:
547
+ # - nothing changed or no source: 0
548
+ # - more heads than before: 1+added_heads (2..n)
549
+ # - fewer heads than before: -1-removed_heads (-2..-n)
550
+ # - number of heads stays the same: 1
551
+ #
552
+ # Don't the first and last conflict? they stay the same if
553
+ # nothing has changed...
554
+ def add_changegroup(source, type, url, opts={:empty => []})
555
+ run_hook :pre_changegroup, :throw => true, :source => type, :url => url
556
+ changesets = files = revisions = 0
557
+
558
+ return 0 if source.string.empty?
559
+
560
+ rev_map = proc {|x| changelog.revision_index_for_node x }
561
+ cs_map = proc do |x|
562
+ UI::debug "add changeset #{short x}"
563
+ changelog.size
564
+ end
565
+
566
+ # write changelog data to temp files so concurrent readers will not
567
+ # see inconsistent view
568
+ changelog.delay_update
569
+ old_heads = changelog.heads.size
570
+ new_heads = nil # scoping
571
+ changesets = nil # scoping
572
+ cor = nil # scoping
573
+ cnr = nil # scoping
574
+ heads = nil # scoping
575
+
576
+ Journal::start join('journal') do |journal|
577
+ UI::status 'adding changeset'
578
+
579
+ # pull of the changeset group
580
+ cor = changelog.size - 1
581
+ unless changelog.add_group(source, cs_map, journal) || opts[:empty].any?
582
+ raise abort("received changelog group is empty")
583
+ end
584
+
585
+ cnr = changelog.size - 1
586
+ changesets = cnr - cor
587
+
588
+ # pull off the manifest group
589
+ UI::status 'adding manifests'
590
+
591
+ # No need to check for empty manifest group here:
592
+ # if the result of the merge of 1 and 2 is the same in 3 and 4,
593
+ # no new manifest will be created and the manifest group will be
594
+ # empty during the pull
595
+ manifest.add_group source, rev_map, journal
596
+
597
+ # process the files
598
+ UI::status 'adding file changes'
599
+
600
+ loop do
601
+ f = Amp::RevlogSupport::ChangeGroup.get_chunk source
602
+ break if f.empty?
603
+
604
+ UI::debug "adding #{f} revisions"
605
+ fl = file f
606
+ o = fl.index_size
607
+ unless fl.add_group source, rev_map, journal
608
+ raise abort('received file revlog group is empty')
609
+ end
610
+ revisions += fl.index_size - o
611
+ files += 1
612
+ end # end loop
613
+
614
+ new_heads = changelog.heads.size
615
+ heads = ""
616
+
617
+ unless old_heads.zero? || new_heads == old_heads
618
+ heads = " (+#{new_heads - old_heads} heads)"
619
+ end
620
+
621
+ UI::status("added #{changesets} changesets" +
622
+ " with #{revisions} changes to #{files} files#{heads}")
623
+
624
+ if changesets > 0
625
+ changelog.write_pending
626
+ p = proc { changelog.write_pending && root or "" }
627
+ run_hook :pre_txnchangegroup, :throw => true,
628
+ :node => changelog.node_id_for_index(cor+1).hexlify,
629
+ :source => type,
630
+ :url => url
631
+ end
632
+
633
+ changelog.finalize journal
634
+
635
+ end # end Journal::start
636
+
637
+ if changesets > 0
638
+ # forcefully update the on-disk branch cache
639
+ UI::debug 'updating the branch cache'
640
+ branch_tags
641
+ run_hook :post_changegroup, :node => changelog.node_id_for_index(cor+1).hexlify, :source => type, :url => url
642
+
643
+ ((cor+1)..(cnr+1)).to_a.each do |i|
644
+ run_hook :incoming, :node => changelog.node_id_for_index(i).hexlify,
645
+ :source => type,
646
+ :url => url
647
+ end # end each
648
+ end # end if
649
+
650
+ hdz = branch_heads
651
+ # never return 0 here
652
+ ret = if new_heads < old_heads
653
+ new_heads - old_heads - 1
654
+ else
655
+ new_heads - old_heads + 1
656
+ end # end if
657
+
658
+ class << ret
659
+ def success?; self <= 1 || hdz.size == 1; end
660
+ end
661
+
662
+ ret
663
+ end # end def
664
+
665
+ ##
666
+ # A changegroup, of some sort.
667
+ def changegroup(base_nodes, source)
668
+ changegroup_subset(base_nodes, heads, source)
669
+ end
670
+
671
+ ##
672
+ # Prints information about the changegroup we are going to receive.
673
+ #
674
+ # @param [Array<String>] nodes the list of node IDs we are receiving
675
+ # @param [Symbol] source how are we receiving the changegroup?
676
+ # @todo add more debug info
677
+ def changegroup_info(nodes, source)
678
+ # print info
679
+ if source == :bundle
680
+ UI.status("#{nodes.size} changesets found")
681
+ end
682
+ # debug stuff
683
+ end
684
+
685
+ ##
686
+ # Faster version of changegroup_subset. Useful when pushing working dir.
687
+ #
688
+ # Generate a changegruop of all nodes that we have that a recipient
689
+ # doesn't
690
+ #
691
+ # This is much easier than the previous function as we can assume that
692
+ # the recipient has any changegnode we aren't sending them.
693
+ #
694
+ # @param [[String]] common the set of common nodes between remote and self
695
+ # @param [Amp::Repository] source
696
+ def get_changegroup(common, source)
697
+ # Call the hooks
698
+ run_hook :pre_outgoing, :throw => true, :source => source
699
+
700
+ nodes = changelog.find_missing common
701
+ revset = Hash.with_keys(nodes.map {|n| changelog.rev(n)})
702
+
703
+ changegroup_info nodes, source
704
+
705
+ identity = proc {|x| x }
706
+
707
+ # ok so this method goes through the generic revlog, and looks for nodes
708
+ # in the changeset(s) we're pushing. Works by the link_rev - basically,
709
+ # the changelog says "hey we're at revision 35", and any changes to any
710
+ # files in any revision logs for that commit will have a link_revision
711
+ # of 35. So we just look for 35!
712
+ gen_node_list = proc do |log|
713
+ log.select {|r| revset[r.link_rev] }.map {|r| r.node_id }
714
+ end
715
+
716
+ # Ok.... I've tried explaining this 3 times and failed.
717
+ #
718
+ # Goal of this proc: We need to update the changed_files hash to reflect
719
+ # which files (typically file logs) have changed since the last push.
720
+ #
721
+ # How it works: it generates a proc that takes a node_id. That node_id
722
+ # will be looked up in the changelog.i file, which happens to store a
723
+ # list of files that were changed in that commit! So really, this method
724
+ # just takes a node_id, and adds filenamess to the list of changed files.
725
+ changed_file_collector = proc do |changed_fileset|
726
+ proc do |cl_node|
727
+ c = changelog.read cl_node
728
+ c[3].each {|fname| changed_fileset[fname] = true }
729
+ end
730
+ end
731
+
732
+ lookup_revlink_func = proc do |revlog|
733
+ # given a revision, return the node
734
+ # good thing the python has a description of what this does
735
+ #
736
+ # *snort*
737
+ lookup_revlink = proc do |n|
738
+ changelog.node revlog[n].link_rev
739
+ end
740
+ end
741
+
742
+ # This constructs a changegroup, or a list of all changed files.
743
+ # If you're here, looking at this code, this bears repeating:
744
+ # - Changelog
745
+ # -- ChangeSet+
746
+ #
747
+ # A Changelog (history of a branch) is an array of ChangeSets,
748
+ # and a ChangeSet is just a single revision, containing what files
749
+ # were changed, who did it, and the commit message. THIS IS JUST A
750
+ # RECEIPT!!!
751
+ #
752
+ # The REASON we construct a changegroup here is because this is called
753
+ # when we push, and we push a changelog (usually bundled to conserve
754
+ # space). This is where we make that receipt, called a changegroup.
755
+ #
756
+ # 'nuff tangent, time to fucking code
757
+ generate_group = proc do
758
+ result = []
759
+ changed_files = {}
760
+
761
+ coll = changed_file_collector[changed_files]
762
+ # get the changelog's changegroups
763
+ changelog.group(nodes, identity, coll) {|chunk| result << chunk }
764
+
765
+
766
+ node_iter = gen_node_list[manifest]
767
+ look = lookup_revlink_func[manifest]
768
+ # get the manifest's changegroups
769
+ manifest.group(node_iter, look) {|chunk| result << chunk }
770
+
771
+ changed_files.keys.sort.each do |fname|
772
+ file_revlog = file fname
773
+ # warning: useless comment
774
+ if file_revlog.index_size.zero?
775
+ raise abort("empty or missing revlog for #{fname}")
776
+ end
777
+
778
+ node_list = gen_node_list[file_revlog]
779
+
780
+ if node_list.any?
781
+ result << RevlogSupport::ChangeGroup.chunk_header(fname.size)
782
+ result << fname
783
+
784
+ lookup = lookup_revlink_func[file_revlog] # Proc#call
785
+ # more changegroups
786
+ file_revlog.group(node_list, lookup) {|chunk| result << chunk }
787
+ end
788
+ end
789
+ result << RevlogSupport::ChangeGroup.closing_chunk
790
+
791
+ run_hook :post_outgoing, :node => nodes[0].hexlify, :source => source
792
+
793
+ result
794
+ end
795
+
796
+ s = StringIO.new "",(ruby_19? ? "w+:ASCII-8BIT" : "w+")
797
+ generate_group[].each {|chunk| s.write chunk }
798
+ s.rewind
799
+ s
800
+ end
801
+
802
+ ##
803
+ # This function generates a changegroup consisting of all the nodes
804
+ # that are descendents of any of the bases, and ancestors of any of
805
+ # the heads.
806
+ #
807
+ # It is fairly complex in determining which filenodes and which
808
+ # manifest nodes need to be included for the changeset to be complete
809
+ # is non-trivial.
810
+ #
811
+ # Another wrinkle is doing the reverse, figuring out which changeset in
812
+ # the changegroup a particular filenode or manifestnode belongs to.
813
+ #
814
+ # The caller can specify some nodes that must be included in the
815
+ # changegroup using the extranodes argument. It should be a dict
816
+ # where the keys are the filenames (or 1 for the manifest), and the
817
+ # values are lists of (node, linknode) tuples, where node is a wanted
818
+ # node and linknode is the changelog node that should be transmitted as
819
+ # the linkrev.
820
+ #
821
+ # MAD SHOUTZ to Eric Hopper, who actually had the balls to document a
822
+ # good chunk of this code in the Python. He is a really great man, and
823
+ # deserves whatever thanks we can give him. *Peace*
824
+ #
825
+ # @param [String => [(String, String)]] extra_nodes the key is a filename
826
+ # and the value is a list of (node, link_node) tuples
827
+ def changegroup_subset(bases, new_heads, source, extra_nodes=nil)
828
+ unless extra_nodes
829
+ if new_heads.sort! == heads.sort!
830
+ common = []
831
+
832
+ # parents of bases are known from both sides
833
+ bases.each do |base|
834
+ changelog.parents_for_node(base).each do |parent|
835
+ common << parent unless parent.null? # == NULL_ID
836
+ end # end each
837
+ end # end each
838
+
839
+ # BAIL
840
+ return get_changegroup(common, source)
841
+ end # end if
842
+ end # end unless
843
+
844
+ run_hook :pre_outgoing, :throw => true, :source => source # call dem hooks
845
+
846
+
847
+ # missing changelog list, bases, and heads
848
+ #
849
+ # Some bases may turn out to be superfluous, and some heads may be as
850
+ # well. #nodes_between will return the minimal set of bases and heads
851
+ # necessary to recreate the changegroup.
852
+ # missing_cl_list, bases, heads = changelog.nodes_between(bases, heads)
853
+ btw = changelog.nodes_between(bases, heads)
854
+ missing_cl_list, bases, heads = btw[:between], btw[:roots], btw[:heads]
855
+ changegroup_info missing_cl_list, source
856
+
857
+ # Known heads are the list of heads about which it is assumed the recipient
858
+ # of this changegroup will know.
859
+ known_heads = []
860
+
861
+ # We assume that all parents of bases are known heads.
862
+ bases.each do |base|
863
+ changelog.parents_for_node(base).each do |parent|
864
+ known_heads << parent
865
+ end # end each
866
+ end # end each
867
+
868
+ if known_heads.any? # unless known_heads.empty?
869
+ # Now that we know what heads are known, we can compute which
870
+ # changesets are known. The recipient must know about all
871
+ # changesets required to reach the known heads from the null
872
+ # changeset.
873
+ has_cl_set = changelog.nodes_between(nil, known_heads)[:between]
874
+
875
+ # cast to a hash for latter usage
876
+ has_cl_set = Hash.with_keys has_cl_set
877
+ else
878
+ # If there were no known heads, the recipient cannot be assumed to
879
+ # know about any changesets.
880
+ has_cl_set = {}
881
+ end
882
+
883
+ # We don't know which manifests are missing yet
884
+ missing_mf_set = {}
885
+ # Nor do we know which filenodes are missing.
886
+ missing_fn_set = {}
887
+
888
+ ########
889
+ # Here are procs for further usage
890
+
891
+ # A changeset always belongs to itself, so the changenode lookup
892
+ # function for a changenode is +identity+
893
+ identity = proc {|x| x }
894
+
895
+ # A function generating function. Sets up an enviroment for the
896
+ # inner function.
897
+ cmp_by_rev_function = proc do |rvlg|
898
+ # Compare two nodes by their revision number in the environment's
899
+ # revision history. Since the revision number both represents the
900
+ # most efficient order to read the nodes in, and represents a
901
+ # topological sorting of the nodes, this function if often useful.
902
+ proc {|a, b| rvlg.rev(a) <=> rvlg.rev(b) }
903
+ end
904
+
905
+ # If we determine that a particular file or manifest node must be a
906
+ # node that the recipient of the changegroup will already have, we can
907
+ # also assume the recipient will have all the parents. This function
908
+ # prunes them from the set of missing nodes.
909
+ prune_parents = proc do |rvlg, hasses, missing|
910
+ has_list = hasses.keys
911
+ has_list.sort!(&cmp_by_rev_function(rvlg))
912
+
913
+ has_list.each do |node|
914
+ parent_list = revlog.parent_for_node(node).select {|p| p.not_null? }
915
+ end
916
+
917
+ while parent_list.any?
918
+ n = parent_list.pop
919
+ unless hasses.include? n
920
+ hasses[n] = 1
921
+ p = revlog.parent_for_node(node).select {|p| p.not_null? }
922
+ parent_list += p
923
+ end
924
+ end
925
+
926
+ hasses.each do |n|
927
+ missing.slice!(n - 1, 1) # pop(n, None)
928
+ end
929
+ end
930
+
931
+ # This is a function generating function used to set up an environment
932
+ # for the inner funciont to execute in.
933
+ manifest_and_file_collector = proc do |changed_fileset|
934
+ # This is an information gathering function that gathers
935
+ # information from each changeset node that goes out as part of
936
+ # the changegroup. The information gathered is a list of which
937
+ # manifest nodes are potentially required (the recipient may already
938
+ # have them) and total list of all files which were changed in any
939
+ # changeset in the changegroup.
940
+ #
941
+ # We also remember the first changenode we saw any manifest
942
+ # referenced by so we can later determine which changenode owns
943
+ # the manifest.
944
+
945
+ # this is what we're returning
946
+ proc do |cl_node|
947
+ c = changelog.read cl_node
948
+ c[3].each do |f|
949
+ # This is to make sure we only have one instance of each
950
+ # filename string for each filename
951
+ changed_fileset[f] ||= f
952
+ end # end each
953
+
954
+ missing_mf_set[c[0]] ||= cl_node
955
+ end # end proc
956
+ end # end proc
957
+
958
+ # Figure out which manifest nodes (of the ones we think might be part
959
+ # of the changegroup) the recipients must know about and remove them
960
+ # from the changegroup.
961
+ prune_manifest = proc do
962
+ has_mnfst_set = {}
963
+ missing_mf_set.values.each do |node|
964
+ # If a 'missing' manifest thinks it belongs to a changenode
965
+ # the recipient is assumed to have, obviously the recipient
966
+ # must have the manifest.
967
+ link_node = changelog.node manifest.link_rev(manifest.revision_index_for_node(node))
968
+ has_mnfst_set[n] = 1 if has_cl_set.include? link_node
969
+ end # end each
970
+
971
+ prune_parents[manifest, has_mnfst_set, missing_mf_set] # Proc#call
972
+ end # end proc
973
+
974
+ # Use the information collected in collect_manifests_and_files to say
975
+ # which changenode any manifestnode belongs to.
976
+ lookup_manifest_link = proc {|node| missing_mf_set[node] }
977
+
978
+ # A function generating function that sets up the initial environment
979
+ # the inner function.
980
+ filenode_collector = proc do |changed_files|
981
+ next_rev = []
982
+
983
+ # This gathers information from each manifestnode included in the
984
+ # changegroup about which filenodes the manifest node references
985
+ # so we can include those in the changegroup too.
986
+ #
987
+ # It also remembers which changenode each filenode belongs to. It
988
+ # does this by assuming the a filenode belongs to the changenode
989
+ # the first manifest that references it belongs to.
990
+ collect_missing_filenodes = proc do |node|
991
+ r = manifest.rev node
992
+
993
+ if r == next_rev[0]
994
+
995
+ # If the last rev we looked at was the one just previous,
996
+ # we only need to see a diff.
997
+ delta_manifest = manifest.read_delta node
998
+
999
+ # For each line in the delta
1000
+ delta_manifest.each do |f, fnode|
1001
+ f = changed_files[f]
1002
+
1003
+ # And if the file is in the list of files we care
1004
+ # about.
1005
+ if f
1006
+ # Get the changenode this manifest belongs to
1007
+ cl_node = missing_mf_set[node]
1008
+
1009
+ # Create the set of filenodes for the file if
1010
+ # there isn't one already.
1011
+ ndset = missing_fn_set[f] ||= {}
1012
+
1013
+ # And set the filenode's changelog node to the
1014
+ # manifest's if it hasn't been set already.
1015
+ ndset[fnode] ||= cl_node
1016
+ end
1017
+ end
1018
+ else
1019
+ # Otherwise we need a full manifest.
1020
+ m = manifest.read node
1021
+
1022
+ # For every file in we care about.
1023
+ changed_files.each do |f|
1024
+ fnode = m[f]
1025
+
1026
+ # If it's in the manifest
1027
+ if fnode
1028
+ # See comments above.
1029
+ cl_node = msng_mnfst_set[mnfstnode]
1030
+ ndset = missing_fn_set[f] ||= {}
1031
+ ndset[fnode] ||= cl_node
1032
+ end
1033
+ end
1034
+ end
1035
+
1036
+ # Remember the revision we hope to see next.
1037
+ next_rev[0] = r + 1
1038
+ end # end proc
1039
+ end # end proc
1040
+
1041
+ # We have a list of filenodes we think need for a file, let's remove
1042
+ # all those we know the recipient must have.
1043
+ prune_filenodes = proc do |f, f_revlog|
1044
+ missing_set = missing_fn_set[f]
1045
+ hasset = {}
1046
+
1047
+ # If a 'missing' filenode thinks it belongs to a changenode we
1048
+ # assume the recipient must have, the the recipient must have
1049
+ # that filenode.
1050
+ missing_set.each do |n|
1051
+ cl_node = changelog.node f_revlog[n].link_rev
1052
+ hasset[n] = true if has_cl_set.include? cl_node
1053
+ end
1054
+
1055
+ prune_parents[f_revlog, hasset, missing_set] # Proc#call
1056
+ end # end proc
1057
+
1058
+ # Function that returns a function.
1059
+ lookup_filenode_link_func = proc do |name|
1060
+ missing_set = missing_fn_set[name]
1061
+
1062
+ # lookup the changenode the filenode belongs to
1063
+ lookup_filenode_link = proc do |node|
1064
+ missing_set[node]
1065
+ end # end proc
1066
+ end # end proc
1067
+
1068
+ # add the nodes that were explicitly requested.
1069
+ add_extra_nodes = proc do |name, nodes|
1070
+ return unless extra_nodes && extra_nodes[name]
1071
+
1072
+ extra_nodes[name].each do |node, link_node|
1073
+ nodes[node] = link_node unless nodes[node]
1074
+ end
1075
+
1076
+ end
1077
+
1078
+ # Now that we have all theses utility functions to help out and
1079
+ # logically divide up the task, generate the group.
1080
+ generate_group = proc do
1081
+ changed_files = {}
1082
+ group = changelog.group(missing_cl_list, identity, &manifest_and_file_collector[changed_files])
1083
+ group.each { |chunk| yield chunk }
1084
+ prune_manifests.call
1085
+ add_extra_nodes[1, msng_mnfst_set]
1086
+ msng_mnfst_lst = msng_mnfst_set.keys
1087
+
1088
+ msng_mnfst_lst.sort!(&cmp_by_rev_function[manifest])
1089
+
1090
+ group = manifest.group(msng_mnfst_lst, lookup_filenode_link,
1091
+ filenode_collector[changed_files])
1092
+
1093
+ group.each {|chunk| yield chunk }
1094
+
1095
+ msng_mnfst_lst = nil
1096
+ msng_mnfst_set.clear
1097
+
1098
+ if extra_nodes
1099
+ extra_nodes.each do |fname|
1100
+ next if fname.kind_of?(Integer)
1101
+ msng_mnfst_set[fname] ||= {}
1102
+ changed_files[fname] = true
1103
+ end
1104
+ end
1105
+
1106
+ changed_files.sort.each do |fname|
1107
+ file_revlog = file(fname)
1108
+ unless file_revlog.size > 0
1109
+ raise abort("empty or missing revlog for #{fname}")
1110
+ end
1111
+
1112
+ if msng_mnfst_set[fname]
1113
+ prune_filenodes[fname, file_revlog]
1114
+ add_extra_nodes[fname, missing_fn_set[fname]]
1115
+ missing_fn_list = missing_fn_set[fname].keys
1116
+ else
1117
+ missing_fn_list = []
1118
+ end
1119
+
1120
+ if missing_fn_list.size > 0
1121
+ yield ChangeGroup.chunk_header(fname.size)
1122
+ yield fname
1123
+ missing_fn_list.sort!(&cmp_by_rev_function[file_revlog])
1124
+ group = file_revlog.group(missing_fn_list,
1125
+ lookup_filenode_link_func[fname])
1126
+ group.each {|chunk| yield chunk }
1127
+ end
1128
+ if missing_fn_set[fname]
1129
+ missing_fn_set.delete fname
1130
+ end
1131
+ end
1132
+
1133
+ yield ChangeGroup.close_chunk
1134
+
1135
+ if missing_cl_list
1136
+ run_hook :post_outgoing
1137
+ end
1138
+ end # end proc
1139
+
1140
+ s = StringIO.new "",(ruby_19? ? "w+:ASCII-8BIT" : "w+")
1141
+ generate_group.call do |chunk|
1142
+ s.write chunk
1143
+ end
1144
+ s.seek(0, IO::SEEK_SET)
1145
+
1146
+ end # end def
1147
+
1148
+ ##
1149
+ # Revert a file or group of files to +revision+. If +opts[:unlink]+
1150
+ # is true, then the files
1151
+ #
1152
+ # @param [Array<String>] files a list of files to revert
1153
+ # @return [Boolean] a success marker
1154
+ def revert(files, opts={})
1155
+ # get the parents - used in checking if we haven an uncommitted merge
1156
+ parent, p2 = dirstate.parents
1157
+
1158
+ # get the revision
1159
+ rev = opts[:revision] || opts[:rev] || opts[:to]
1160
+
1161
+ # check to make sure it's logically possible
1162
+ unless rev || p2 == RevlogSupport::Node::NULL_ID
1163
+ raise abort("uncommitted merge - please provide a specific revision")
1164
+ end
1165
+
1166
+ # if we have anything here, then create a matcher
1167
+ matcher = if files
1168
+ Amp::Match.create :files => files ,
1169
+ :includer => opts[:include],
1170
+ :excluder => opts[:exclude]
1171
+ else
1172
+ # else just return nil
1173
+ # we can return nil because when it gets used in :match => matcher,
1174
+ # it will be as though it's not even there
1175
+ nil
1176
+ end
1177
+
1178
+ # the changeset we use as a guide
1179
+ changeset = self[rev]
1180
+
1181
+ # get the files that need to be changed
1182
+ stats = status :node_1 => rev, :match => matcher
1183
+
1184
+ ###
1185
+ # now make the changes
1186
+ ###
1187
+
1188
+ ##########
1189
+ # MODIFIED and DELETED
1190
+ ##########
1191
+ # Just write the old data to the files
1192
+ (stats[:modified] + stats[:deleted]).each do |path|
1193
+ File.open path, 'w' do |file|
1194
+ file.write changeset.get_file(path).data
1195
+ end
1196
+ UI::status "restored\t#{path}"
1197
+ end
1198
+
1199
+ ##########
1200
+ # REMOVED
1201
+ ##########
1202
+ # these files are set to be removed, and have thus far been dropped from the filesystem
1203
+ # we restore them and we alert the repo
1204
+ stats[:removed].each do |path|
1205
+ File.open path, 'w' do |file|
1206
+ file.write changeset.get_file(path).data
1207
+ end
1208
+
1209
+ dirstate.normal path # pretend nothing happened
1210
+ UI::status "saved\t#{path}"
1211
+ end
1212
+
1213
+ ##########
1214
+ # ADDED
1215
+ ##########
1216
+ # these files have been added SINCE +rev+
1217
+ stats[:added].each do |path|
1218
+ remove path
1219
+ UI::status "destroyed\t#{path}"
1220
+ end # pretend these files were never even there
1221
+
1222
+ true # success marker
1223
+ end
1224
+
1225
+ # Return list of roots of the subsets of missing nodes from remote
1226
+ #
1227
+ # If base dict is specified, assume that these nodes and their parents
1228
+ # exist on the remote side and that no child of a node of base exists
1229
+ # in both remote and self.
1230
+ # Furthermore base will be updated to include the nodes that exists
1231
+ # in self and remote but no children exists in self and remote.
1232
+ # If a list of heads is specified, return only nodes which are heads
1233
+ # or ancestors of these heads.
1234
+ #
1235
+ # All the ancestors of base are in self and in remote.
1236
+ # All the descendants of the list returned are missing in self.
1237
+ # (and so we know that the rest of the nodes are missing in remote, see
1238
+ # outgoing)
1239
+ def find_incoming_roots(remote, opts={:base => nil, :heads => nil,
1240
+ :force => false, :base => nil})
1241
+ common_nodes(remote, opts)[1]
1242
+ end
1243
+
1244
+ ##
1245
+ # Find the common nodes, missing nodes, and remote heads.
1246
+ #
1247
+ # So in this code, we use opts[:base] and fetch as hashes
1248
+ # instead of arrays. We could very well use arrays, but hashes have
1249
+ # O(1) lookup time, and since these could get RFH (Really Fucking
1250
+ # Huge), we decided to take the liberty and just use hash for now.
1251
+ #
1252
+ # If opts[:base] (Hash) is specified, assume that these nodes and their parents
1253
+ # exist on the remote side and that no child of a node of base exists
1254
+ # in both remote and self.
1255
+ # Furthermore base will be updated to include the nodes that exists
1256
+ # in self and remote but no children exists in self and remote.
1257
+ # If a list of heads is specified, return only nodes which are heads
1258
+ # or ancestors of these heads.
1259
+ #
1260
+ # All the ancestors of base are in self and in remote.
1261
+ #
1262
+ # @param [Amp::Repository] remote the repository we're pulling from
1263
+ # @param [(Array<>, Array<>, Array<>)] the common nodes, missing nodes, and
1264
+ # remote heads
1265
+ def common_nodes(remote, opts={:heads => nil, :force => nil, :base => nil})
1266
+ # variable prep!
1267
+ node_map = changelog.node_map
1268
+ search = []
1269
+ unknown = []
1270
+ fetch = {}
1271
+ seen = {}
1272
+ seen_branch = {}
1273
+ opts[:base] ||= {}
1274
+ opts[:heads] ||= remote.heads
1275
+
1276
+ # if we've got nothing...
1277
+ if changelog.tip == NULL_ID
1278
+ opts[:base][NULL_ID] = true # 1 is stored in the Python
1279
+
1280
+ return [NULL_ID], [NULL_ID], opts[:heads].dup unless opts[:heads] == [NULL_ID]
1281
+ return [NULL_ID], [], [] # if we didn't trip ^, we're returning this
1282
+ end
1283
+
1284
+ # assume we're closer to the tip than the root
1285
+ # and start by examining heads
1286
+ UI::status 'searching for changes'
1287
+
1288
+ opts[:heads].each do |head|
1289
+ if !node_map.include?(head)
1290
+ unknown << head
1291
+ else
1292
+ opts[:base][head] = true # 1 is stored in the Python
1293
+ end
1294
+ end
1295
+
1296
+ opts[:heads] = unknown # the ol' switcheroo
1297
+ return opts[:base].keys, [], [] if unknown.empty? # BAIL
1298
+
1299
+ # make a hash with keys of unknown
1300
+ requests = Hash.with_keys unknown
1301
+ count = 0
1302
+ # Search through the remote branches
1303
+ # a branch here is a linear part of history, with 4 (four)
1304
+ # parts:
1305
+ #
1306
+ # head, root, first parent, second parent
1307
+ # (a branch always has two parents (or none) by definition)
1308
+ #
1309
+ # Here's where we start using the Hashes instead of Arrays
1310
+ # trick. Keep an eye out for opts[:base] and opts[:heads]!
1311
+ unknown = remote.branches(*unknown)
1312
+ until unknown.empty?
1313
+ r = []
1314
+
1315
+ while node = unknown.shift
1316
+ next if seen.include?(node[0])
1317
+ UI::debug "examining #{short node[0]}:#{short node[1]}"
1318
+
1319
+ if node[0] == NULL_ID
1320
+ # Do nothing...
1321
+ elsif seen_branch.include? node
1322
+ UI::debug 'branch already found'
1323
+ next
1324
+ elsif node_map.include? node[1]
1325
+ UI::debug "found incomplete branch #{short node[0]}:#{short node[1]}"
1326
+ search << node[0..1]
1327
+ seen_branch[node] = true # 1 in the python
1328
+ else
1329
+ unless seen.include?(node[1]) || fetch.include?(node[1])
1330
+ if node_map.include?(node[2]) and node_map.include?(node[3])
1331
+ UI::debug "found new changset #{short node[1]}"
1332
+ fetch[node[1]] = true # 1 in the python
1333
+ end # end if
1334
+
1335
+ node[2..3].each do |p|
1336
+ opts[:base][p] = true if node_map.include? p
1337
+ end
1338
+ end # end unless
1339
+
1340
+ node[2..3].each do |p|
1341
+ unless requests.include?(p) || node_map.include?(p)
1342
+ r << p
1343
+ requests[p] = true # 1 in the python
1344
+ end # end unless
1345
+ end # end each
1346
+ end # end if
1347
+
1348
+ seen[node[0]] = true # 1 in the python
1349
+ end # end while
1350
+
1351
+ unless r.empty?
1352
+ count += 1
1353
+
1354
+ UI::debug "request #{count}: #{r.map{|i| short i }}"
1355
+
1356
+ (0..(r.size-1)).step(10) do |p|
1357
+ remote.branches(r[p..(p+9)]).each do |b|
1358
+ UI::debug "received #{short b[0]}:#{short b[1]}"
1359
+ unknown << b
1360
+ end
1361
+ end
1362
+ end # end unless
1363
+ end # end until
1364
+
1365
+ # sorry for the ambiguous variable names
1366
+ # the python doesn't name them either, which
1367
+ # means I have no clue what these are
1368
+ find_proc = proc do |item1, item2|
1369
+ fetch[item1] = true
1370
+ opts[:base][item2] = true
1371
+ end
1372
+
1373
+ # do a binary search on the branches we found
1374
+ search, new_count = *binary_search(:find => search,
1375
+ :repo => remote,
1376
+ :node_map => node_map,
1377
+ :on_find => find_proc)
1378
+ count += new_count # keep keeping track of the total
1379
+
1380
+ # sanity check, because this method is sooooo fucking long
1381
+ fetch.keys.each do |f|
1382
+ if node_map.include? f
1383
+ raise RepoError.new("already have changeset #{short f[0..3]}")
1384
+ end
1385
+ end
1386
+
1387
+ if opts[:base].keys == [NULL_ID]
1388
+ if opts[:force]
1389
+ UI::warn 'repository is unrelated'
1390
+ else
1391
+ raise RepoError.new('repository is unrelated')
1392
+ end
1393
+ end
1394
+
1395
+ UI::debug "found new changesets starting at #{fetch.keys.map{|f| short f }.join ' '}"
1396
+ UI::debug "#{count} total queries"
1397
+
1398
+ # on with the show!
1399
+ [opts[:base].keys, fetch.keys, opts[:heads]]
1400
+ end
1401
+
1402
+ ##
1403
+ # Call the hooks that run under +call+
1404
+ #
1405
+ # @param [Symbol] call the location in the system where the hooks
1406
+ # are to be called
1407
+ def run_hook(call, opts={:throw => false})
1408
+ Hook.run_hook(call, opts)
1409
+ end
1410
+
1411
+ ##
1412
+ # Adds a list of file paths to the repository for the next commit.
1413
+ #
1414
+ # @param [String, Array<String>] paths the paths of the files we need to
1415
+ # add to the next commit
1416
+ # @return [Array<String>] which files WEREN'T added
1417
+ def add(*paths)
1418
+ lock_working do
1419
+ rejected = []
1420
+ paths.flatten!
1421
+
1422
+ paths.each do |file|
1423
+ path = working_join file
1424
+
1425
+ st = File.lstat(path) rescue nil
1426
+
1427
+ unless st
1428
+ UI.warn "#{file} does not exist!"
1429
+ rejected << file
1430
+ next
1431
+ end
1432
+
1433
+ if st.size > 10.mb
1434
+ UI.warn "#{file}: files over 10MB may cause memory and" +
1435
+ "performance problems\n" +
1436
+ "(use 'amp revert #{file}' to unadd the file)\n"
1437
+ end
1438
+
1439
+
1440
+ state = dirstate[file]
1441
+
1442
+
1443
+ if File.ftype(path) != 'file' && File.ftype(path) != 'link'
1444
+ # fail if it's not a file or link
1445
+ UI.warn "#{file} not added: only files and symlinks supported. Type is #{File.ftype path}"
1446
+ rejected << path
1447
+ elsif state.added? || state.modified? || state.normal?
1448
+ # fail if it's being tracked
1449
+ UI.warn "#{file} already tracked!"
1450
+ elsif state.removed?
1451
+ # check back on it if it's being removed
1452
+ dirstate.normal_lookup file
1453
+ else
1454
+ # else add it
1455
+ dirstate.add file
1456
+ #Amp::Logger.info("added #{file}")
1457
+ end
1458
+ end
1459
+
1460
+ dirstate.write unless rejected.size == paths.size
1461
+ return rejected
1462
+ end
1463
+ end
1464
+
1465
+ ##
1466
+ # Returns the number of revisions the repository is tracking.
1467
+ #
1468
+ # @return [Integer] how many revisions there have been
1469
+ def size
1470
+ changelog.size
1471
+ end
1472
+
1473
+ ##
1474
+ # Forgets an added file or files from the repository. Doesn't delete the
1475
+ # files, it just says "don't add this on the next commit."
1476
+ #
1477
+ # @param [Array, String] list a file path (or list of file paths) to
1478
+ # "forget".
1479
+ # @return [Boolean] success marker
1480
+ def forget(list)
1481
+ lock_working do
1482
+ list = [*list]
1483
+
1484
+ successful = list.any? do |f|
1485
+ if dirstate[f].status != :added
1486
+ UI.warn "#{f} not being added! can't forget it"
1487
+ false
1488
+ else
1489
+ dirstate.forget f
1490
+ true
1491
+ end
1492
+ end
1493
+
1494
+ dirstate.write if successful
1495
+ end
1496
+
1497
+ true
1498
+ end
1499
+
1500
+ ##
1501
+ # Removes the file (or files) from the repository. Marks them as removed
1502
+ # in the DirState, and if the :unlink option is provided, the files are
1503
+ # deleted from the filesystem.
1504
+ #
1505
+ # @param list the list of files. Could also just be 1 file as a string.
1506
+ # should be paths.
1507
+ # @param opts the options for this removal.
1508
+ # @option [Boolean] opts :unlink (false) whether or not to delete the
1509
+ # files from the filesystem after marking them as removed from the
1510
+ # DirState.
1511
+ # @return [Boolean] success?
1512
+ def remove(list, opts={})
1513
+ list = [*list]
1514
+
1515
+ # Should we delete the filez?
1516
+ if opts[:unlink]
1517
+ list.each do |f|
1518
+ ignore_missing_files do
1519
+ FileUtils.safe_unlink working_join(f)
1520
+ end
1521
+ end
1522
+ end
1523
+
1524
+ lock_working do
1525
+ # Save ourselves a dirstate write
1526
+ successful = list.any? do |f|
1527
+ if opts[:unlink] && File.exists?(working_join(f))
1528
+ # Uh, why is the file still there? Don't remove it from the dirstate
1529
+ UI.warn("#{f} still exists!")
1530
+ false # no success
1531
+ elsif dirstate[f].added?
1532
+ # Is it already added? if so, forgettaboutit
1533
+ dirstate.forget f
1534
+ #Amp::Logger.info("forgot #{f}")
1535
+ true # success!
1536
+ elsif !dirstate.tracking?(f)
1537
+ # Are we not even tracking this file? dumbass
1538
+ UI.warn("#{f} not being tracked!")
1539
+ false # no success
1540
+ else
1541
+ # Woooo we can delete it
1542
+ dirstate.remove f
1543
+ #Amp::Logger.info("removed #{f}")
1544
+ true
1545
+ end
1546
+ end
1547
+
1548
+ # Write 'em out boss
1549
+ dirstate.write if successful
1550
+ end
1551
+
1552
+ true
1553
+ end
1554
+
1555
+ ##
1556
+ # Returns the parents that aren't NULL_ID
1557
+ def living_parents
1558
+ dirstate.parents.select {|p| p != NULL_ID }
1559
+ end
1560
+
1561
+ ##
1562
+ # There are two ways to push to remote repo:
1563
+ #
1564
+ # addchangegroup assumes local user can lock remote
1565
+ # repo (local filesystem, old ssh servers).
1566
+ #
1567
+ # unbundle assumes local user cannot lock remote repo (new ssh
1568
+ # servers, http servers).
1569
+ def push(remote_repo, opts={:force => false, :revs => nil})
1570
+ if remote_repo.capable? "unbundle"
1571
+ push_unbundle remote_repo, opts
1572
+ else
1573
+ push_add_changegroup remote_repo, opts
1574
+ end
1575
+ end
1576
+
1577
+ ##
1578
+ # Push and add a changegroup
1579
+ # @todo -- add default values for +opts+
1580
+ def push_add_changegroup(remote, opts={})
1581
+ # no locking cuz we rockz
1582
+ ret = pre_push remote, opts
1583
+
1584
+ if ret[0]
1585
+ cg, remote_heads = *ret
1586
+ remote.add_changegroup cg, :push, url
1587
+ else
1588
+ ret[1]
1589
+ end
1590
+ end
1591
+
1592
+ ##
1593
+ # Push an unbundled dohickey
1594
+ # @todo -- add default values for +opts+
1595
+ def push_unbundle(remote, opts={})
1596
+ # local repo finds heads on server, finds out what revs it
1597
+ # must push. once revs transferred, if server finds it has
1598
+ # different heads (someone else won commit/push race), server
1599
+ # aborts.
1600
+
1601
+ ret = pre_push remote, opts
1602
+
1603
+ if ret[0]
1604
+ cg, remote_heads = *ret
1605
+ remote_heads = ['force'] if opts[:force]
1606
+ remote.unbundle cg, remote_heads, :push
1607
+ else
1608
+ ret[1]
1609
+ end
1610
+ end
1611
+
1612
+ ##
1613
+ # Return list of nodes that are roots of subsets not in remote
1614
+ #
1615
+ # If base dict is specified, assume that these nodes and their parents
1616
+ # exist on the remote side.
1617
+ # If a list of heads is specified, return only nodes which are heads
1618
+ # or ancestors of these heads, and return a second element which
1619
+ # contains all remote heads which get new children.
1620
+ def find_outgoing_roots(remote, opts={:base => nil, :heads => nil, :force => false})
1621
+ base, heads, force = opts[:base], opts[:heads], opts[:force]
1622
+ if base.nil?
1623
+ base = {}
1624
+ find_incoming_roots remote, :base => base, :heads => heads, :force => force
1625
+ end
1626
+
1627
+ UI::debug("common changesets up to "+base.keys.map {|k| k.short_hex}.join(" "))
1628
+
1629
+ remain = Hash.with_keys changelog.node_map.keys, nil
1630
+
1631
+ # prune everything remote has from the tree
1632
+ remain.delete NULL_ID
1633
+ remove = base.keys
1634
+ while remove.any?
1635
+ node = remove.shift
1636
+ if remain.include? node
1637
+ remain.delete node
1638
+ changelog.parents_for_node(node).each {|p| remove << p }
1639
+ end
1640
+ end
1641
+
1642
+ # find every node whose parents have been pruned
1643
+ subset = []
1644
+ # find every remote head that will get new children
1645
+ updated_heads = {}
1646
+ remain.keys.each do |n|
1647
+ p1, p2 = changelog.parents_for_node n
1648
+ subset << n unless remain.include?(p1) || remain.include?(p2)
1649
+ if heads && heads.any?
1650
+ updated_heads[p1] = true if heads.include? p1
1651
+ updated_heads[p2] = true if heads.include? p2
1652
+ end
1653
+ end
1654
+
1655
+ # this is the set of all roots we have to push
1656
+ if heads && heads.any?
1657
+ return subset, updated_heads.keys
1658
+ else
1659
+ return subset
1660
+ end
1661
+ end
1662
+
1663
+ ##
1664
+ # The branches available in this repository.
1665
+ #
1666
+ # @param [Array<String>] nodes the list of nodes. this can be optionally left empty
1667
+ # @return [Array<String>] the branches, active and inactive!
1668
+ def branches(*nodes)
1669
+ branches = []
1670
+ nodes = [changelog.tip] if nodes.empty?
1671
+
1672
+ # for each node, find its first parent (adam and eve, basically)
1673
+ # -- that's our branch!
1674
+ nodes.each do |node|
1675
+ t = node
1676
+ # traverse the tree, staying to the left side
1677
+ # node
1678
+ # / \
1679
+ # parent1 parent2
1680
+ # .... ....
1681
+ # This will get us the first parent. When it's finally NULL_ID,
1682
+ # we have a root -- this is the basis for our branch.
1683
+ loop do
1684
+ parents = changelog.parents_for_node t
1685
+ if parents[1] != NULL_ID || parents[0] == NULL_ID
1686
+ branches << [node, t, *parents]
1687
+ break
1688
+ end
1689
+ t = parents.first # get the first parent and start again
1690
+ end
1691
+ end
1692
+
1693
+ branches
1694
+ end
1695
+
1696
+ ##
1697
+ # Copies a file from +source+ to +destination+, while being careful of the
1698
+ # specified options. This method will perform all necessary file manipulation
1699
+ # and dirstate changes and so forth. Just give 'er a source and a destination.
1700
+ #
1701
+ # @param [String] source the path to the source file
1702
+ # @param [String] destination the path to the destination file
1703
+ # @param [Hash] opts the options for the copy
1704
+ # @option [Boolean] opts :after (false) should the file be deleted?
1705
+ # @return [Boolean] success?
1706
+ def copy(source, destination, opts)
1707
+ # Traverse repository subdirectories
1708
+ src = relative_join source
1709
+ target = relative_join destination
1710
+
1711
+ # Is there a tracked file at our destination? If so, get its state.
1712
+ state = dirstate[target].status
1713
+ # abstarget is the full path to the target. Needed for system calls
1714
+ # (just to be safe)
1715
+ abstarget = working_join target
1716
+
1717
+ # If true, we're copying into a directory, so be smart about it.
1718
+ if File.directory? abstarget
1719
+ abstarget = File.join abstarget, File.basename(src)
1720
+ target = File.join target, File.basename(src)
1721
+ end
1722
+ abssrc = working_join(src)
1723
+
1724
+
1725
+ exists = File.exist? abstarget
1726
+ # If the file's there, and we aren't forcing the copy, then we should let
1727
+ # the user know they might overwrite an existing file in the repo.
1728
+ if (!opts[:after] && exists || opts[:after] && [:merged, :normal].include?(state))
1729
+ unless opts[:force]
1730
+ Amp::UI.warn "#{target} not overwriting, file exists"
1731
+ return false
1732
+ end
1733
+ end
1734
+
1735
+ return if opts[:after] && !exists
1736
+ unless opts[:"dry-run"]
1737
+ # Performs actual file copy from one locatino to another.
1738
+ # Overwrites file if it's there.
1739
+ begin
1740
+ File.safe_unlink(abstarget) if exists
1741
+
1742
+ target_dir = File.dirname abstarget
1743
+ File.makedirs target_dir unless File.directory? target_dir
1744
+ File.copy(abssrc, abstarget)
1745
+ rescue Errno::ENOENT
1746
+ # This happens if the file has been deleted between the check up above
1747
+ # (exists = File.exist? abstarget) and the call to File.safe_unlink.
1748
+ Amp::UI.warn("#{target}: deleted in working copy in the last 2 microseconds")
1749
+ rescue StandardError => e
1750
+ Amp::UI.warn("#{target} - cannot copy: #{e}")
1751
+ return false
1752
+ end
1753
+ end
1754
+
1755
+ # Be nice and give the user some output
1756
+ if opts[:verbose] || opts[:"dry-run"]
1757
+ action = opts[:rename] ? "moving" : "copying"
1758
+ Amp::UI.status("#{action} #{src} to #{target}")
1759
+ end
1760
+ return false if opts[:"dry-run"]
1761
+
1762
+ # in case the source of the copy is marked as the destination of a
1763
+ # different copy (that hasn't yet been committed either), we should
1764
+ # do some extra handling
1765
+ origsrc = dirstate.copy_map[src] || src
1766
+ if target == origsrc
1767
+ # We're copying back to our original location! D'oh.
1768
+ unless [:merged, :normal].include?(state)
1769
+ dirstate.maybe_dirty target
1770
+ end
1771
+ else
1772
+ if dirstate[origsrc].added? && origsrc == src
1773
+ # we copying an added (but uncommitted) file?
1774
+ UI.warn("#{origsrc} has not been committed yet, so no copy data" +
1775
+ "will be stored for #{target}")
1776
+ if [:untracked, :removed].include?(dirstate[target].status)
1777
+ add [target]
1778
+ end
1779
+ else
1780
+ dirstate_copy src, target
1781
+ end
1782
+ end
1783
+
1784
+ # Clean up if we're doing a move, and not a copy.
1785
+ remove([src], :unlink => !(opts[:after])) if opts[:rename]
1786
+ end
1787
+
1788
+ ##
1789
+ # Copy a file from +source+ to +dest+. Really simple, peeps.
1790
+ # The reason this shit is even *slightly* complicated because
1791
+ # it deals with file types. Otherwise I could write this
1792
+ # in, what, 3 lines?
1793
+ #
1794
+ # @param [String] source the from
1795
+ # @param [String] dest the to
1796
+ def dirstate_copy(source, dest)
1797
+ path = working_join dest
1798
+
1799
+ if !File.exist?(path) || File.ftype(path) == 'link'
1800
+ UI::warn "#{dest} doesn't exist!"
1801
+ elsif not (File.ftype(path) == 'file' || File.ftype(path) == 'link')
1802
+ UI::warn "copy failed: #{dest} is neither a file nor a symlink"
1803
+ else
1804
+ lock_working do
1805
+ # HOME FREE!!!!!!! i love getting out of school before noon :-D
1806
+ # add it if it makes sense (like it was previously removed or untracked)
1807
+ # and then copy da hoe
1808
+ state = dirstate[dest].status
1809
+ dirstate.add dest if [:untracked, :removed].include?(state)
1810
+ dirstate.copy source => dest
1811
+ dirstate.write
1812
+
1813
+ #Amp::Logger.info("copy #{source} -> #{dest}")
1814
+ end
1815
+ end
1816
+ end
1817
+
1818
+ ##
1819
+ # Undelete a file. For instance, if you remove something and then
1820
+ # find out that you NEED that file, you can use this command.
1821
+ #
1822
+ # @param [[String]] list the files to be undeleted
1823
+ def undelete(list)
1824
+ manifests = living_parents.map do |p|
1825
+ manifest.read changelog.read(p).first
1826
+ end
1827
+
1828
+ # now we actually restore the files
1829
+ list.each do |file|
1830
+ unless dirstate[file].removed?
1831
+ UI.warn "#{file} isn't being removed!"
1832
+ else
1833
+ m = manifests[0] || manifests[1]
1834
+ data = file(f).read m[f]
1835
+ add_file file, data, m.flags(f) # add_file is wwrite in the python
1836
+ dirstate.normal f # we know it's clean, we just restored it
1837
+ end
1838
+ end
1839
+ end
1840
+ alias_method :restore, :undelete
1841
+
1842
+ ##
1843
+ # Write data to a file in the CODE repo, not the .hg
1844
+ #
1845
+ # @param [String] file_name
1846
+ # @param [String] data (no trailing newlines are appended)
1847
+ # @param [[String]] flags we're really just looking for links
1848
+ # and executables, here
1849
+ def add_file(file_name, data, flags)
1850
+ data = filter "decode", file_name, data
1851
+ path = "#{@root}/#{file_name}"
1852
+
1853
+ File.unlink path rescue nil
1854
+
1855
+ if flags.include? 'l' # if it's a link
1856
+ @file_opener.symlink path, data
1857
+ else
1858
+ @file_opener.open(path, 'w') {|f| f.write data }
1859
+ File.set_flag path, false, true if flags.include? 'x'
1860
+ end
1861
+ end
1862
+
1863
+ ##
1864
+ # Returns the node_id's of the heads of the repository.
1865
+ def heads(start=nil, options={:closed => true})
1866
+ heads = changelog.heads(start)
1867
+ should_show = lambda do |head|
1868
+ return true if options[:closed]
1869
+
1870
+ extras = changelog.read(head)[5]
1871
+ return !(extras["close"])
1872
+ end
1873
+ heads = heads.select {|h| should_show[h] }
1874
+ heads.map! {|h| [changelog.rev(h), h] }
1875
+ heads.sort! {|arr1, arr2| arr2[0] <=> arr1[0] }
1876
+ heads.map! {|r, n| n}
1877
+ end
1878
+
1879
+ ##
1880
+ # Walk recursively through the directory tree (or a changeset)
1881
+ # finding all files matched by the match function
1882
+ #
1883
+ # @param [String, Integer] node selects which changeset to walk
1884
+ # @param [Amp::Match] match the matcher decides how to pick the files
1885
+ # @param [Array<String>] an array of filenames
1886
+ def walk(node=nil, match = Match.create({}) { true })
1887
+ self[node].walk(match) # calls Changeset#walk
1888
+ end
1889
+
1890
+ ##
1891
+ # Returns the requested file at the given revision annotated by
1892
+ # line number, so you can see who committed which lines in the file's
1893
+ # history.
1894
+ #
1895
+ # @param file The name of the file to annotate
1896
+ # @param [Integer, String] rev (nil) The revision to look at for
1897
+ # annotation
1898
+ def annotate(file, revision=nil, opts={})
1899
+ changeset = self[revision]
1900
+ file = changeset.get_file(file)
1901
+ return file.annotate(opts[:follow_copies], opts[:line_numbers])
1902
+ end
1903
+
1904
+ ##
1905
+ # This gives the status of the repository, comparing 2 node in
1906
+ # its history. Now, with no parameters, it's going to compare the
1907
+ # last revision with the working directory, which is the most common
1908
+ # usage - that answers "what is the current status of the repository,
1909
+ # compared to the last time a commit happened?". However, given any
1910
+ # two revisions, it can compare them.
1911
+ #
1912
+ # @example @repo.status # => {:unknown => ['code/smthng.rb'], :added => [], ...}
1913
+ # @param [Hash] opts the options for this command. there's a bunch.
1914
+ # @option [String, Integer] opts :node_1 (".") an identifier for the starting
1915
+ # revision
1916
+ # @option [String, Integer] opts :node_2 (nil) an identifier for the ending
1917
+ # revision. Defaults to the working directory.
1918
+ # @option [Proc] opts :match (proc { true }) a proc that will match
1919
+ # a file, so we know if we're interested in it.
1920
+ # @option [Boolean] opts :ignored (false) do we want to see files we're
1921
+ # ignoring?
1922
+ # @option [Boolean] opts :clean (false) do we want to see files that are
1923
+ # totally unchanged?
1924
+ # @option [Boolean] opts :unknown (false) do we want to see files we've
1925
+ # never seen before (i.e. files the user forgot to add to the repo)?
1926
+ # @return [Hash<Symbol => Array<String>>] no, I'm not kidding. the keys are:
1927
+ # :modified, :added, :removed, :deleted, :unknown, :ignored, :clean. The
1928
+ # keys are the type of change, and the values are arrays of filenames
1929
+ # (local to the root) that are under each key.
1930
+ def status(opts={:node_1 => '.'})
1931
+ run_hook :status
1932
+
1933
+ node1, node2, match = opts[:node_1], opts[:node_2], opts[:match]
1934
+
1935
+ match = Match.create({}) { true } unless match
1936
+
1937
+ node1 = self[node1] unless node1.kind_of? Changeset # get changeset objects
1938
+ node2 = self[node2] unless node2.kind_of? Changeset
1939
+
1940
+ write_dirstate = false
1941
+
1942
+ # are we working with working directories?
1943
+ working = node2.revision == nil
1944
+ parent_working = working && node1 == self["."]
1945
+
1946
+ # load the working directory's manifest
1947
+ node2.manifest if !working && node2.revision < node1.revision
1948
+
1949
+ if working
1950
+ # get the dirstate's latest status
1951
+ status = dirstate.status(opts[:ignored], opts[:clean], opts[:unknown], match)
1952
+
1953
+ # this case is run about 99% of the time
1954
+ # do we need to do hashes on any files to see if they've changed?
1955
+ if parent_working && status[:lookup].any?
1956
+ # lookup.any? is a shortcut for !lookup.empty?
1957
+ clean, modified, write_dirstate = *fix_files(status[:lookup], node1, node2)
1958
+
1959
+ status[:clean] += clean
1960
+ status[:modified] += modified
1961
+ end
1962
+ else
1963
+ status = {:clean => [], :modified => [], :lookup => [], :unknown => [], :ignored => [],
1964
+ :removed => [], :added => [], :deleted => []}
1965
+ end
1966
+ # if we're working with old revisions...
1967
+ unless parent_working
1968
+ # get the older revision manifest
1969
+ mf1 = node1.manifest.dup
1970
+
1971
+ if working
1972
+ # get the working directory manifest. note, it's a tweaked
1973
+ # manifest to reflect working directory files
1974
+ mf2 = self["."].manifest.dup
1975
+
1976
+ # mark them as not in the manifest to force checking later
1977
+ files_for_later = status[:lookup] + status[:modified] + status[:added]
1978
+ files_for_later.each {|file| mf2.mark_for_later file, node2 }
1979
+
1980
+ # remove any files we've marked as removed them from the '.' manifest
1981
+ status[:removed].each {|file| mf2.delete file }
1982
+ else
1983
+ # if we aren't working with the working directory, then we'll
1984
+ # just use the old revision's information
1985
+ status[:removed], status[:unknown], status[:ignored] = [], [], []
1986
+ mf2 = node2.manifest.dup
1987
+ end
1988
+
1989
+ # Every file in the later revision (or working directory)
1990
+ mf2.each do |file, node|
1991
+ # Does it exist in the old manifest? If so, it wasn't added.
1992
+ if mf1[file]
1993
+ # the tests to run
1994
+ tests = [ mf1.flags[file] != mf2.flags[file] ,
1995
+ mf1[file] != mf2[file] &&
1996
+ (mf2[file] || node1[file] === node2[file]) ]
1997
+
1998
+ # It's in the old manifest, so lets check if its been changed
1999
+ # Else, it must be unchanged
2000
+ if tests.any?
2001
+ status[:modified] << file
2002
+ status[:clean] << file if opts[:clean]
2003
+ end
2004
+
2005
+ # Remove that file from the old manifest, since we've checked it
2006
+ mf1.delete file
2007
+ else
2008
+ # if it's not in the old manifest, it's been added
2009
+ status[:added] << file
2010
+ end
2011
+ end
2012
+
2013
+ # Anything left in the old manifest is a file we've removed since the
2014
+ # first revision.
2015
+ status[:removed] = mf1.keys
2016
+ end
2017
+
2018
+ # We're done!
2019
+ status.delete :lookup # because nobody cares about it
2020
+ delta = status.delete :delta
2021
+
2022
+ status.map {|k, v| [k, v.sort] }.to_hash # sort dem fuckers
2023
+ status[:delta] = delta
2024
+ status.select {|k, _| opts[:only] ? opts[:only].include?(k) : true }.to_hash
2025
+ end
2026
+
2027
+ ##
2028
+ # Clone a repository.
2029
+ #
2030
+ # Here is what this does, pretty much:
2031
+ # % amp init monkey
2032
+ # % cd monkey
2033
+ # % amp pull http://monkey
2034
+ #
2035
+ # It's so simple it's not even funny.
2036
+ #
2037
+ # @param [Amp::Repository] remote repository to pull from
2038
+ # @param [Array<String>] heads list of revs to clone (forces use of pull)
2039
+ # @param [Boolean] stream do we stream from the remote source?
2040
+ def clone(remote, opts={:revs => [], :stream => false})
2041
+ # now, all clients that can request uncompressed clones can
2042
+ # read repo formats supported by all servers that can serve
2043
+ # them.
2044
+
2045
+ # The streaming case:
2046
+ # if revlog format changes, client will have to check version
2047
+ # and format flags on "stream" capability, and use
2048
+ # uncompressed only if compatible.
2049
+ if opts[:stream] && opts[:revs].any? && remote.capable?('stream')
2050
+ stream_in remote
2051
+ else
2052
+ pull remote, :revs => opts[:revs]
2053
+ end
2054
+ end
2055
+
2056
+ ##
2057
+ # Stream in the data from +remote+.
2058
+ #
2059
+ # @param [Amp::Repository] remote repository to pull from
2060
+ # @return [Integer] the number of heads in the repository minus 1
2061
+ def stream_in(remote)
2062
+ remote.stream_out do |f|
2063
+ l = f.gets # this should be the server code
2064
+
2065
+ unless Integer(l)
2066
+ raise ResponseError.new("Unexpected response from server: #{l}")
2067
+ end
2068
+
2069
+ case l.to_i
2070
+ when 1
2071
+ raise RepoError.new("operation forbidden by server")
2072
+ when 2
2073
+ raise RepoError.new("locking the remote repository failed")
2074
+ end
2075
+
2076
+ UI::status "streaming all changes"
2077
+
2078
+ l = f.gets # this is effectively [total_files, total_bytes].join ' '
2079
+ total_files, total_bytes = *l.split(' ').map {|i| i.to_i }[0..1]
2080
+ UI::status "#{total_files} file#{total_files == 1 ? '' : 's' } to transfer, #{total_bytes.to_human} of data"
2081
+
2082
+ start = Time.now
2083
+ total_files.times do |i|
2084
+ l = f.gets
2085
+ name, size = *l.split("\0")[0..1]
2086
+ size = size.to_i
2087
+ UI::debug "adding #{name} (#{size.to_human})"
2088
+
2089
+ @store.opener.open do |store_file|
2090
+ chunk = f.read size # will return nil if at EOF
2091
+ store_file.write chunk if chunk
2092
+ end
2093
+ end
2094
+
2095
+ elapsed = Time.now - start
2096
+ elapsed = 0.001 if elapsed <= 0
2097
+
2098
+ UI::status("transferred #{total_bytes.to_human} in #{elapsed}" +
2099
+ "second#{elapsed == 1.0 ? '' : 's' } (#{total_bytes.to_f / elapsed}/sec)")
2100
+
2101
+ invalidate!
2102
+ heads.size - 1
2103
+ end
2104
+ end
2105
+
2106
+ ##
2107
+ # Invalidate the repository: delete things and reset others.
2108
+ def invalidate!
2109
+ @changelog = nil
2110
+ @manifest = nil
2111
+
2112
+ invalidate_tag_cache!
2113
+ invalidate_branch_cache!
2114
+ end
2115
+
2116
+ ##
2117
+ # Commits a changeset or set of files to the repository. You will quite often
2118
+ # use this method since it's basically the basis of version control systems.
2119
+ #
2120
+ # @param [Hash] opts the options to this method are all optional, so it's a very
2121
+ # flexible method. Options listed below.
2122
+ # @option [Array] opts :files ([]) the specific files to commit - if this is
2123
+ # not provided, the current status of the working directory is used.
2124
+ # @option [Hash] opts :extra ({}) any extra data, such as "close" => true
2125
+ # will close the active branch.
2126
+ # @option [String] opts :message ("") the message for the commit. An editor
2127
+ # will be opened if this is not provided.
2128
+ # @option [Boolean] opts :force (false) Forces the commit, ignoring minor details
2129
+ # like when you try to commit when no files have been changed.
2130
+ # @option [Match] opts :match (nil) A match object to specify how to pick files
2131
+ # to commit. These are useful so you don't accidentally commit ignored files,
2132
+ # for example.
2133
+ # @option [Boolean] opts :empty_ok (false) Is an empty commit message a-ok?
2134
+ # @option [Boolean] opts :force_editor (false) Do we force the editor to be
2135
+ # opened, even if :message is provided?
2136
+ # @option [String] opts :user ($USER) the username to associate with the commit.
2137
+ # Defaults to AmpConfig#username.
2138
+ # @option [DateTime, Time, Date] opts :date (Time.now) the date to mark with
2139
+ # the commit. Useful if you miss a deadline and want to pretend that you actually
2140
+ # made it!
2141
+ # @return [String] the digest referring to this entry in the revlog
2142
+ def commit(opts={:message => "", :extra => {}, :files => []})
2143
+ opts[:extra] ||= {}
2144
+ opts[:force] = true if opts[:extra]["close"]
2145
+ opts[:files] ||= []
2146
+ opts[:files].uniq!
2147
+
2148
+ use_dirstate = opts[:p1] == nil
2149
+ changes = {}
2150
+ lock_working_and_store do
2151
+ if use_dirstate
2152
+ p1, p2 = dirstate.parents
2153
+ update_dirstate = true
2154
+
2155
+ tests = [opts[:force] ,
2156
+ p2 != NULL_ID,
2157
+ opts[:match] ]
2158
+
2159
+ raise StandardError("cannot partially commit a merge") if tests.all?
2160
+
2161
+ if opts[:files].any?
2162
+ changes = {:modified => [], :removed => []}
2163
+
2164
+ # split the files up so we can deal with them appropriately
2165
+ opts[:files].each do |file|
2166
+ state = dirstate[file]
2167
+ if state.normal? || state.merged? || state.added?
2168
+ changes[:modified] << file
2169
+ elsif state.removed?
2170
+ changes[:removed] << file
2171
+ elsif state.untracked?
2172
+ UI.warn "#{file} not tracked!"
2173
+ else
2174
+ UI.err "#{file} has unknown state #{state[0]}"
2175
+ end
2176
+ end
2177
+
2178
+ else
2179
+ changes = status(:match => opts[:match])
2180
+ end
2181
+ else
2182
+ p1, p2 = opts[:p1], (opts[:p2] || NULL_ID)
2183
+ update_dirstate = dirstate.parents[0] == p1
2184
+ changes = {:modified => files}
2185
+ end
2186
+
2187
+
2188
+ merge_state = Amp::Merges::MergeState.new self # merge state!
2189
+
2190
+ changes[:modified].each do |file|
2191
+ if merge_state[file] && merge_state[file] == "u"
2192
+ raise StandardError.new("unresolved merge conflicts (see `amp resolve`)")
2193
+ end
2194
+ end
2195
+
2196
+ changeset = WorkingDirectoryChangeset.new self, :parents => [p1, p2] ,
2197
+ :text => opts[:message],
2198
+ :user => opts[:user] ,
2199
+ :date => opts[:date] ,
2200
+ :extra => opts[:extra] ,
2201
+ :changes => changes
2202
+
2203
+ revision = commit_changeset changeset, :force => opts[:force] ,
2204
+ :force_editor => opts[:force_editor],
2205
+ :empty_ok => opts[:empty_ok] ,
2206
+ :use_dirstate => use_dirstate ,
2207
+ :update_dirstate => update_dirstate
2208
+
2209
+ merge_state.reset
2210
+ return revision
2211
+ end
2212
+ end
2213
+
2214
+ ##
2215
+ # Commits the given changeset to the repository.
2216
+ #
2217
+ # @param changeset the changeset to commit. Could be working dir, for
2218
+ # example.
2219
+ # @param opts the options for committing the changeset.
2220
+ # @option [Boolean] opts :force (false) force the commit, even though
2221
+ # nothing has changed.
2222
+ # @option [Boolean] opts :force_editor (false) force the user to open
2223
+ # their editor, even though they provided a message already
2224
+ # @option [Boolean] opts :empty_ok (false) is it ok if they have no
2225
+ # description of the commit?
2226
+ # @option [Boolean] opts :use_dirstate (true) use the DirState for this
2227
+ # commit? Used if you're committing the working directory (typical)
2228
+ # @option [Boolean] opts :update_dirstate (true) should we update the
2229
+ # DirState after the commit? Used if you're committing the working
2230
+ # directory.
2231
+ # @return [String] the digest referring to this entry in the revlog
2232
+ def commit_changeset(changeset, opts = {:use_dirstate => true,
2233
+ :update_dirstate => true})
2234
+ journal = nil
2235
+ valid = false #don't update the DirState if this is set!
2236
+
2237
+ commit = ((changeset.modified || []) + (changeset.added || [])).sort
2238
+ remove = changeset.removed
2239
+ extra = changeset.extra.dup
2240
+ branchname = extra["branch"]
2241
+ user = changeset.user
2242
+ text = changeset.description
2243
+
2244
+ p1, p2 = changeset.parents.map {|p| p.node}
2245
+ c1 = changelog.read(p1) # 1 parent's changeset as an array
2246
+ c2 = changelog.read(p2) # 2nd parent's changeset as an array
2247
+ m1 = manifest.read(c1[0]).dup # 1st parent's manifest
2248
+ m2 = manifest.read(c2[0]) # 2nd parent's manifest
2249
+
2250
+ if opts[:use_dirstate]
2251
+ oldname = c1[5]["branch"]
2252
+ tests = [ commit.empty?, remove.empty?, ! opts[:force],
2253
+ p2 == NULL_ID, branchname = oldname ]
2254
+
2255
+ if tests.all?
2256
+ UI::status "nothing changed"
2257
+ return nil
2258
+ end
2259
+ end
2260
+
2261
+ xp1 = p1.hexlify
2262
+ xp2 = (p2 == NULL_ID) ? "" : p2.hexlify
2263
+
2264
+ run_hook :pre_commit
2265
+ journal = Journal.new
2266
+
2267
+ fresh = {} # new = reserved haha
2268
+ changed = []
2269
+ link_rev = self.size
2270
+
2271
+ (commit + (remove || [])).each {|file| UI::status file }
2272
+
2273
+ #Amp::Logger.info("<changeset commit>").indent
2274
+
2275
+ commit.each do |file|
2276
+ # begin
2277
+
2278
+ versioned_file = changeset.get_file(file)
2279
+ newflags = versioned_file.flags
2280
+
2281
+ fresh[file] = commit_file(versioned_file, m1, m2, link_rev,
2282
+ journal, changed)
2283
+ if [ changed.empty? || changed.last != file,
2284
+ m2[file] != fresh[file] ].all?
2285
+ changed << file if m1.flags[file] != newflags
2286
+ end
2287
+ m1.flags[file] = newflags
2288
+
2289
+ dirstate.normal file if opts[:use_dirstate]
2290
+ #Amp::Logger.section("committing: #{file}") do
2291
+ #Amp::Logger.info("flags: #{newflags.inspect}")
2292
+ #Amp::Logger.info("total changes: #{changed.inspect}")
2293
+ #end
2294
+ # rescue
2295
+ # if opts[:use_dirstate]
2296
+ # UI.warn("trouble committing #{file}")
2297
+ # raise
2298
+ # else
2299
+ # remove << file
2300
+ # end
2301
+ # end
2302
+ end
2303
+
2304
+ updated, added = [], []
2305
+ changed.sort.each do |file|
2306
+ if m1[file] || m2[file]
2307
+ updated << file
2308
+ else
2309
+ added << file
2310
+ end
2311
+ end
2312
+
2313
+ m1.merge!(fresh)
2314
+
2315
+ removed = remove.sort.select {|f| m1[f] || m2[f]}
2316
+ removed_1 = []
2317
+ removed.select {|f| m1[f]}.each do |f|
2318
+ m1.delete f
2319
+ removed_1 << f
2320
+ #Amp::Logger.info("Removed: #{f}")
2321
+ end
2322
+
2323
+ fresh = fresh.map {|k, v| (v) ? k : nil}.reject {|k| k.nil? }
2324
+ man_entry = manifest.add(m1, journal, link_rev, c1[0], c2[0],
2325
+ [fresh, removed_1])
2326
+ #Amp::Logger.info("Adding/modifying: #{fresh.inspect}")
2327
+ #Amp::Logger.info("Removing: #{removed_1.inspect}")
2328
+ #Amp::Logger.section("New Manifest") do
2329
+ #manifest.read(:tip).each do |file, _|
2330
+ #Amp::Logger.info(file)
2331
+ #end
2332
+ #end
2333
+ if !opts[:empty_ok] && !text
2334
+ template_opts = {:added => added, :updated => updated,
2335
+ :removed => removed, :template_type => :commit }
2336
+ edit_text = changeset.to_templated_s(template_opts)
2337
+ text = UI.edit(edit_text, user)
2338
+ end
2339
+
2340
+ lines = text.rstrip.split("\n").map {|r| r.rstrip}.reject {|l| l.empty?}
2341
+ if lines.empty? && opts[:use_dirstate]
2342
+ raise abort("empty commit message")
2343
+ end
2344
+ text = lines.join("\n")
2345
+
2346
+ changelog.delay_update
2347
+ n = changelog.add(man_entry, changed + removed_1, text, journal, p1, p2, user,
2348
+ changeset.date, extra)
2349
+ #Amp::Logger.section("changelog") do
2350
+ #Amp::Logger.info("manifest entry: #{man_entry.inspect}")
2351
+ #Amp::Logger.info("files: #{(changed + removed_1).inspect}")
2352
+ #Amp::Logger.info("text: #{text.inspect}")
2353
+ #Amp::Logger.info("p1: #{p1.inspect}")
2354
+ #Amp::Logger.info("p2: #{p2.inspect}")
2355
+ #Amp::Logger.info("user: #{user.inspect}")
2356
+ #Amp::Logger.info("date: #{changeset.date.inspect}")
2357
+ #Amp::Logger.info("extra: #{extra.inspect}")
2358
+ #end
2359
+ self.changelog.write_pending()
2360
+ changelog.finalize(journal)
2361
+ #Amp::Logger.outdent.info("</changeset commit>")
2362
+ # branchtags
2363
+
2364
+ if opts[:use_dirstate] || opts[:update_dirstate]
2365
+ dirstate.parents = n
2366
+ removed.each {|f| dirstate.forget(f) } if opts[:use_dirstate]
2367
+ dirstate.write
2368
+ end
2369
+
2370
+ valid = true
2371
+ journal.close
2372
+ run_hook :post_commit, :added => added, :modified => updated, :removed => removed,
2373
+ :user => user, :date => changeset.date, :text => text,
2374
+ :revision => changelog.index_size
2375
+ return n
2376
+ rescue StandardError => e
2377
+ if !valid
2378
+ dirstate.invalidate!
2379
+ end
2380
+ if e.kind_of?(AbortError)
2381
+ UI::warn "Abort: #{e}"
2382
+ else
2383
+ UI::warn "Got exception while committing. #{e}"
2384
+ UI::warn e.backtrace.join("\n")
2385
+ end
2386
+ journal.delete if journal
2387
+ end
2388
+
2389
+
2390
+ ##
2391
+ # Commits a file as part of a larger transaction.
2392
+ #
2393
+ # @param file the versioned-file to commit
2394
+ # @param manifest1 the manifest of the first parent
2395
+ # @param manifest2 the manifest of the second parent
2396
+ # @param link_revision the revision index we'll be adding this under
2397
+ # @param journal the journal for aborting failed commits
2398
+ # @param change_list the list of all the files changed during the commit
2399
+ #
2400
+ def commit_file(file, manifest1, manifest2, link_revision, journal, change_list)
2401
+ filename = file.path
2402
+ text = file.data
2403
+ curfile = self.file filename
2404
+
2405
+ fp1 = manifest1[filename] || NULL_ID
2406
+ fp2 = manifest2[filename] || NULL_ID
2407
+
2408
+ metadata = {}
2409
+ copied = file.renamed
2410
+ if copied && copied[0] != filename
2411
+ # Mark the new revision of this file as a copy of another
2412
+ # file. This copy data will effectively act as a parent
2413
+ # of this new revision. If this is a merge, the first
2414
+ # parent will be the nullid (meaning "look up the copy data")
2415
+ # and the second one will be the other parent. For example:
2416
+ #
2417
+ # 0 --- 1 --- 3 rev1 changes file foo
2418
+ # \ / rev2 renames foo to bar and changes it
2419
+ # \- 2 -/ rev3 should have bar with all changes and
2420
+ # should record that bar descends from
2421
+ # bar in rev2 and foo in rev1
2422
+ #
2423
+ # this allows this merge to succeed:
2424
+ #
2425
+ # 0 --- 1 --- 3 rev4 reverts the content change from rev2
2426
+ # \ / merging rev3 and rev4 should use bar@rev2
2427
+ # \- 2 --- 4 as the merge base
2428
+
2429
+ copied_file = copied[0]
2430
+ copied_revision = manifest1[copied_file]
2431
+ new_fp = fp2
2432
+
2433
+ if manifest2 # branch merge
2434
+ if fp2 == NULL_ID || copied_revision == nil # copied on remote side
2435
+ if manifest2[copied_file]
2436
+ copied_revision = manifest2[copied_file]
2437
+ new_fp = fp1
2438
+ end
2439
+ end
2440
+ end
2441
+
2442
+ if copied_revision.nil? || copied_revision.empty?
2443
+ self["."].ancestors.each do |a|
2444
+ if a[copied_file]
2445
+ copied_revision = a[copied_file].file_node
2446
+ break
2447
+ end
2448
+ end
2449
+ end
2450
+
2451
+ UI::say "#{filename}: copy #{copied_file}:#{copied_revision.hexlify}"
2452
+ metadata["copy"] = copied_file
2453
+ metadata["copyrev"] = copied_revision.hexlify
2454
+ fp1, fp2 = NULL_ID, new_fp
2455
+ elsif fp2 != NULL_ID
2456
+ fpa = curfile.ancestor(fp1, fp2)
2457
+
2458
+ fp1, fp2 = fp2, NULL_ID if fpa == fp1
2459
+ fp2 = NULL_ID if fpa != fp2 && fpa == fp2
2460
+ end
2461
+
2462
+ if fp2 == NULL_ID && !(curfile.cmp(fp1, text)) && metadata.empty?
2463
+ return fp1
2464
+ end
2465
+
2466
+ change_list << filename
2467
+ return curfile.add(text, metadata, journal, link_revision, fp1, fp2)
2468
+ end
2469
+
2470
+ private
2471
+
2472
+ ##
2473
+ # Make the dummy changelog at .hg/00changelog.i
2474
+ def make_changelog
2475
+ @hg_opener.open "00changelog.i", "w" do |file|
2476
+ file.write "\0\0\0\2" # represents revlogv2
2477
+ file.write " dummy changelog to avoid using the old repo type"
2478
+ end
2479
+ end
2480
+
2481
+ ##
2482
+ # Write the requirements file. This returns the requirements passed
2483
+ # so that it can be the final method call in #init
2484
+ def write_requires(requirements)
2485
+ @hg_opener.open "requires", "w" do |require_file|
2486
+ requirements.each {|r| require_file.puts r }
2487
+ end
2488
+ requirements
2489
+ end
2490
+
2491
+ ##
2492
+ # Look up the files in +lookup+ to make sure
2493
+ # they're either the same or not. Normally, we can
2494
+ # just tell if two files are the same by looking at their sizes. But
2495
+ # sometimes, we can't! That's where this method comes into play; it
2496
+ # hashes the files to verify integrity.
2497
+ #
2498
+ # @param [String] lookup files to look up
2499
+ # @param node1
2500
+ # @param node2
2501
+ # @return [[String], [String], Boolean] clean files, modified files, and
2502
+ # whether or not to write the dirstate
2503
+ def fix_files(lookup, node1, node2)
2504
+ write_dirstate = false # this gets returned
2505
+ modified = [] # and this
2506
+ fixup = [] # fixup are files that haven't changed so they're being
2507
+ # marked wrong in the dirstate. this gets returned
2508
+
2509
+ lookup.each do |file|
2510
+ # this checks to see if the file has been modified after doing
2511
+ # hashes/flag checks
2512
+ tests = [ node1.include?(file) ,
2513
+ node2.flags(file) == node1.flags(file) ,
2514
+ node1[file] === node2[file] ]
2515
+
2516
+ unless tests.all?
2517
+ modified << file
2518
+ else
2519
+ fixup << file # mark the file as clean
2520
+ end
2521
+ end
2522
+
2523
+
2524
+ # mark every fixup'd file as clean in the dirstate
2525
+ begin
2526
+ lock_working do
2527
+ fixup.each do |file|
2528
+ write_dirstate = true
2529
+ dirstate.normal file
2530
+ modified.delete file
2531
+ end
2532
+ end
2533
+ rescue LockError
2534
+ end
2535
+ dirstate.write if write_dirstate
2536
+
2537
+ # the fixups are actually clean
2538
+ [fixup, modified, write_dirstate]
2539
+ end
2540
+
2541
+ ##
2542
+ # do a binary search
2543
+ # used by common_nodes
2544
+ #
2545
+ # Hash info!
2546
+ # :find => the stuff we're searching through
2547
+ # :on_find => what to do when we've got something new
2548
+ # :repo => usually the remote repo where we get new info from
2549
+ # :node_map => the nodes in the current changelog
2550
+ def binary_search(opts={})
2551
+ # I have a lot of stuff to do for scouts
2552
+ # but instead i'm doing this
2553
+ # hizzah!
2554
+ count = 0
2555
+
2556
+ until opts[:find].empty?
2557
+ new_search = []
2558
+ count += 1
2559
+
2560
+ #puts opts[:find].inspect #killme
2561
+ #puts opts[:find].inspect #killme
2562
+
2563
+ zipped = opts[:find].zip opts[:repo].between(opts[:find])
2564
+ zipped.each do |(n, list)|
2565
+ list << n[1]
2566
+ p = n[0]
2567
+ f = 1 # ??? why are these vars so NAMELESS
2568
+
2569
+ list.each do |item|
2570
+ UI::debug "narrowing #{f}:#{list.size} #{short item}"
2571
+
2572
+ if opts[:node_map].include? item
2573
+ if f <= 2
2574
+ opts[:on_find].call(p, item)
2575
+ else
2576
+ UI::debug "narrowed branch search to #{short p}:#{short item}"
2577
+ new_search << [p, item]
2578
+ end
2579
+ break
2580
+ end
2581
+
2582
+ p, f = item, f*2
2583
+ end
2584
+ end
2585
+
2586
+ opts[:find] = new_search
2587
+ end
2588
+
2589
+ [opts[:find], count]
2590
+ end
2591
+
2592
+ ##
2593
+ # this is called before every push
2594
+ # @todo -- add default values for +opts+
2595
+ def pre_push(remote, opts={})
2596
+ common = {}
2597
+ remote_heads = remote.heads
2598
+ inc = common_nodes remote, :base => common, :heads => remote_heads, :force => true
2599
+ inc = inc[1]
2600
+ update, updated_heads = find_outgoing_roots remote, :base => common, :heads => remote_heads
2601
+
2602
+ if opts[:revs]
2603
+ btw = changelog.nodes_between(update, opts[:revs])
2604
+ missing_cl, bases, heads = btw[:between], btw[:roots], btw[:heads]
2605
+ else
2606
+ bases, heads = update, changelog.heads
2607
+ end
2608
+ if bases.empty?
2609
+ UI::status 'no changes found'
2610
+ return nil, 1
2611
+ elsif !opts[:force]
2612
+ # check if we're creating new remote heads
2613
+ # to be a remote head after push, node must be either
2614
+ # - unknown locally
2615
+ # - a local outgoing head descended from update
2616
+ # - a remote head that's known locally and not
2617
+ # ancestral to an outgoing head
2618
+
2619
+ warn = false
2620
+ if remote_heads == [NULL_ID]
2621
+ warn = false
2622
+ elsif (opts[:revs].nil? || opts[:revs].empty?) and heads.size > remote_heads.size
2623
+ warn = true
2624
+ else
2625
+ new_heads = heads
2626
+ remote_heads.each do |r|
2627
+ if changelog.node_map.include? r
2628
+ desc = changelog.heads r, heads
2629
+ l = heads.select {|h| desc.include? h }
2630
+
2631
+ new_heads << r if l.empty?
2632
+ else
2633
+ new_heads << r
2634
+ end
2635
+ end
2636
+
2637
+ warn = true if new_heads.size > remote_heads.size
2638
+ end
2639
+
2640
+ if warn
2641
+ UI::status 'abort: push creates new remote heads!'
2642
+ UI::status '(did you forget to merge? use push -f to forge)'
2643
+ return nil, 0
2644
+ elsif inc.any?
2645
+ UI::note 'unsynced remote changes!'
2646
+ end
2647
+ end
2648
+
2649
+ if opts[:revs].nil?
2650
+ # use the fast path, no race possible on push
2651
+ cg = get_changegroup common.keys, :push
2652
+ else
2653
+ cg = changegroup_subset update, revs, :push
2654
+ end
2655
+
2656
+ [cg, remote_heads]
2657
+ end
2658
+
2659
+ end # localrepo
2660
+ end # repo
2661
+ end