amp 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +1 -0
- data/.hgignore +26 -0
- data/AUTHORS +2 -0
- data/History.txt +6 -0
- data/LICENSE +37 -0
- data/MANIFESTO +7 -0
- data/Manifest.txt +294 -0
- data/README.md +129 -0
- data/Rakefile +102 -0
- data/SCHEDULE.markdown +12 -0
- data/STYLE +27 -0
- data/TODO.markdown +149 -0
- data/ampfile.rb +47 -0
- data/bin/amp +30 -0
- data/bin/amp1.9 +30 -0
- data/ext/amp/bz2/README.txt +39 -0
- data/ext/amp/bz2/bz2.c +1582 -0
- data/ext/amp/bz2/extconf.rb +77 -0
- data/ext/amp/bz2/mkmf.log +29 -0
- data/ext/amp/mercurial_patch/extconf.rb +5 -0
- data/ext/amp/mercurial_patch/mpatch.c +405 -0
- data/ext/amp/priority_queue/extconf.rb +5 -0
- data/ext/amp/priority_queue/priority_queue.c +947 -0
- data/ext/amp/support/extconf.rb +5 -0
- data/ext/amp/support/support.c +250 -0
- data/lib/amp.rb +200 -0
- data/lib/amp/commands/command.rb +507 -0
- data/lib/amp/commands/command_support.rb +137 -0
- data/lib/amp/commands/commands/config.rb +143 -0
- data/lib/amp/commands/commands/help.rb +29 -0
- data/lib/amp/commands/commands/init.rb +10 -0
- data/lib/amp/commands/commands/templates.rb +137 -0
- data/lib/amp/commands/commands/version.rb +7 -0
- data/lib/amp/commands/commands/workflow.rb +28 -0
- data/lib/amp/commands/commands/workflows/git/add.rb +65 -0
- data/lib/amp/commands/commands/workflows/git/copy.rb +27 -0
- data/lib/amp/commands/commands/workflows/git/mv.rb +23 -0
- data/lib/amp/commands/commands/workflows/git/rm.rb +60 -0
- data/lib/amp/commands/commands/workflows/hg/add.rb +53 -0
- data/lib/amp/commands/commands/workflows/hg/addremove.rb +86 -0
- data/lib/amp/commands/commands/workflows/hg/annotate.rb +46 -0
- data/lib/amp/commands/commands/workflows/hg/archive.rb +126 -0
- data/lib/amp/commands/commands/workflows/hg/branch.rb +28 -0
- data/lib/amp/commands/commands/workflows/hg/branches.rb +30 -0
- data/lib/amp/commands/commands/workflows/hg/bundle.rb +115 -0
- data/lib/amp/commands/commands/workflows/hg/clone.rb +95 -0
- data/lib/amp/commands/commands/workflows/hg/commit.rb +42 -0
- data/lib/amp/commands/commands/workflows/hg/copy.rb +31 -0
- data/lib/amp/commands/commands/workflows/hg/debug/dirstate.rb +32 -0
- data/lib/amp/commands/commands/workflows/hg/debug/index.rb +36 -0
- data/lib/amp/commands/commands/workflows/hg/default.rb +9 -0
- data/lib/amp/commands/commands/workflows/hg/diff.rb +30 -0
- data/lib/amp/commands/commands/workflows/hg/forget.rb +11 -0
- data/lib/amp/commands/commands/workflows/hg/heads.rb +25 -0
- data/lib/amp/commands/commands/workflows/hg/identify.rb +23 -0
- data/lib/amp/commands/commands/workflows/hg/import.rb +135 -0
- data/lib/amp/commands/commands/workflows/hg/incoming.rb +85 -0
- data/lib/amp/commands/commands/workflows/hg/info.rb +18 -0
- data/lib/amp/commands/commands/workflows/hg/log.rb +21 -0
- data/lib/amp/commands/commands/workflows/hg/manifest.rb +13 -0
- data/lib/amp/commands/commands/workflows/hg/merge.rb +53 -0
- data/lib/amp/commands/commands/workflows/hg/move.rb +28 -0
- data/lib/amp/commands/commands/workflows/hg/outgoing.rb +61 -0
- data/lib/amp/commands/commands/workflows/hg/pull.rb +74 -0
- data/lib/amp/commands/commands/workflows/hg/push.rb +20 -0
- data/lib/amp/commands/commands/workflows/hg/remove.rb +45 -0
- data/lib/amp/commands/commands/workflows/hg/resolve.rb +83 -0
- data/lib/amp/commands/commands/workflows/hg/revert.rb +53 -0
- data/lib/amp/commands/commands/workflows/hg/root.rb +13 -0
- data/lib/amp/commands/commands/workflows/hg/serve.rb +38 -0
- data/lib/amp/commands/commands/workflows/hg/status.rb +116 -0
- data/lib/amp/commands/commands/workflows/hg/tag.rb +69 -0
- data/lib/amp/commands/commands/workflows/hg/tags.rb +27 -0
- data/lib/amp/commands/commands/workflows/hg/tip.rb +13 -0
- data/lib/amp/commands/commands/workflows/hg/update.rb +27 -0
- data/lib/amp/commands/commands/workflows/hg/verify.rb +9 -0
- data/lib/amp/commands/commands/workflows/hg/view.rb +36 -0
- data/lib/amp/commands/dispatch.rb +181 -0
- data/lib/amp/commands/hooks.rb +81 -0
- data/lib/amp/dependencies/amp_support.rb +1 -0
- data/lib/amp/dependencies/amp_support/ruby_amp_support.rb +103 -0
- data/lib/amp/dependencies/minitar.rb +979 -0
- data/lib/amp/dependencies/priority_queue.rb +18 -0
- data/lib/amp/dependencies/priority_queue/c_priority_queue.rb +1 -0
- data/lib/amp/dependencies/priority_queue/poor_priority_queue.rb +46 -0
- data/lib/amp/dependencies/priority_queue/ruby_priority_queue.rb +525 -0
- data/lib/amp/dependencies/python_config.rb +211 -0
- data/lib/amp/dependencies/trollop.rb +713 -0
- data/lib/amp/dependencies/zip/ioextras.rb +155 -0
- data/lib/amp/dependencies/zip/stdrubyext.rb +111 -0
- data/lib/amp/dependencies/zip/tempfile_bugfixed.rb +186 -0
- data/lib/amp/dependencies/zip/zip.rb +1850 -0
- data/lib/amp/dependencies/zip/zipfilesystem.rb +609 -0
- data/lib/amp/dependencies/zip/ziprequire.rb +90 -0
- data/lib/amp/encoding/base85.rb +97 -0
- data/lib/amp/encoding/binary_diff.rb +82 -0
- data/lib/amp/encoding/difflib.rb +166 -0
- data/lib/amp/encoding/mercurial_diff.rb +378 -0
- data/lib/amp/encoding/mercurial_patch.rb +1 -0
- data/lib/amp/encoding/patch.rb +292 -0
- data/lib/amp/encoding/pure_ruby/ruby_mercurial_patch.rb +123 -0
- data/lib/amp/extensions/ditz.rb +41 -0
- data/lib/amp/extensions/lighthouse.rb +167 -0
- data/lib/amp/graphs/ancestor.rb +147 -0
- data/lib/amp/graphs/copies.rb +261 -0
- data/lib/amp/merges/merge_state.rb +164 -0
- data/lib/amp/merges/merge_ui.rb +322 -0
- data/lib/amp/merges/simple_merge.rb +450 -0
- data/lib/amp/profiling_hacks.rb +36 -0
- data/lib/amp/repository/branch_manager.rb +234 -0
- data/lib/amp/repository/dir_state.rb +950 -0
- data/lib/amp/repository/journal.rb +203 -0
- data/lib/amp/repository/lock.rb +207 -0
- data/lib/amp/repository/repositories/bundle_repository.rb +214 -0
- data/lib/amp/repository/repositories/http_repository.rb +377 -0
- data/lib/amp/repository/repositories/local_repository.rb +2661 -0
- data/lib/amp/repository/repository.rb +94 -0
- data/lib/amp/repository/store.rb +485 -0
- data/lib/amp/repository/tag_manager.rb +319 -0
- data/lib/amp/repository/updatable.rb +532 -0
- data/lib/amp/repository/verification.rb +431 -0
- data/lib/amp/repository/versioned_file.rb +475 -0
- data/lib/amp/revlogs/bundle_revlogs.rb +246 -0
- data/lib/amp/revlogs/changegroup.rb +217 -0
- data/lib/amp/revlogs/changelog.rb +338 -0
- data/lib/amp/revlogs/changeset.rb +521 -0
- data/lib/amp/revlogs/file_log.rb +165 -0
- data/lib/amp/revlogs/index.rb +493 -0
- data/lib/amp/revlogs/manifest.rb +195 -0
- data/lib/amp/revlogs/node.rb +18 -0
- data/lib/amp/revlogs/revlog.rb +1032 -0
- data/lib/amp/revlogs/revlog_support.rb +126 -0
- data/lib/amp/server/amp_user.rb +44 -0
- data/lib/amp/server/extension/amp_extension.rb +396 -0
- data/lib/amp/server/extension/authorization.rb +201 -0
- data/lib/amp/server/fancy_http_server.rb +252 -0
- data/lib/amp/server/fancy_views/_browser.haml +28 -0
- data/lib/amp/server/fancy_views/_diff_file.haml +13 -0
- data/lib/amp/server/fancy_views/_navbar.haml +17 -0
- data/lib/amp/server/fancy_views/changeset.haml +31 -0
- data/lib/amp/server/fancy_views/commits.haml +32 -0
- data/lib/amp/server/fancy_views/file.haml +35 -0
- data/lib/amp/server/fancy_views/file_diff.haml +23 -0
- data/lib/amp/server/fancy_views/harshcss/all_hallows_eve.css +72 -0
- data/lib/amp/server/fancy_views/harshcss/amy.css +147 -0
- data/lib/amp/server/fancy_views/harshcss/twilight.css +138 -0
- data/lib/amp/server/fancy_views/stylesheet.sass +175 -0
- data/lib/amp/server/http_server.rb +140 -0
- data/lib/amp/server/repo_user_management.rb +287 -0
- data/lib/amp/support/amp_config.rb +164 -0
- data/lib/amp/support/amp_ui.rb +287 -0
- data/lib/amp/support/docs.rb +54 -0
- data/lib/amp/support/generator.rb +78 -0
- data/lib/amp/support/ignore.rb +144 -0
- data/lib/amp/support/loaders.rb +93 -0
- data/lib/amp/support/logger.rb +103 -0
- data/lib/amp/support/match.rb +151 -0
- data/lib/amp/support/multi_io.rb +87 -0
- data/lib/amp/support/openers.rb +121 -0
- data/lib/amp/support/ruby_19_compatibility.rb +66 -0
- data/lib/amp/support/support.rb +1095 -0
- data/lib/amp/templates/blank.commit.erb +23 -0
- data/lib/amp/templates/blank.log.erb +18 -0
- data/lib/amp/templates/default.commit.erb +23 -0
- data/lib/amp/templates/default.log.erb +26 -0
- data/lib/amp/templates/template.rb +165 -0
- data/site/Rakefile +24 -0
- data/site/src/about/ampfile.haml +57 -0
- data/site/src/about/commands.haml +106 -0
- data/site/src/about/index.haml +33 -0
- data/site/src/about/performance.haml +31 -0
- data/site/src/about/workflows.haml +34 -0
- data/site/src/contribute/index.haml +65 -0
- data/site/src/contribute/style.haml +297 -0
- data/site/src/css/active4d.css +114 -0
- data/site/src/css/all_hallows_eve.css +72 -0
- data/site/src/css/all_themes.css +3299 -0
- data/site/src/css/amp.css +260 -0
- data/site/src/css/amy.css +147 -0
- data/site/src/css/blackboard.css +88 -0
- data/site/src/css/brilliance_black.css +605 -0
- data/site/src/css/brilliance_dull.css +599 -0
- data/site/src/css/cobalt.css +149 -0
- data/site/src/css/cur_amp.css +185 -0
- data/site/src/css/dawn.css +121 -0
- data/site/src/css/eiffel.css +121 -0
- data/site/src/css/espresso_libre.css +109 -0
- data/site/src/css/idle.css +62 -0
- data/site/src/css/iplastic.css +80 -0
- data/site/src/css/lazy.css +73 -0
- data/site/src/css/mac_classic.css +123 -0
- data/site/src/css/magicwb_amiga.css +104 -0
- data/site/src/css/pastels_on_dark.css +188 -0
- data/site/src/css/reset.css +55 -0
- data/site/src/css/slush_poppies.css +85 -0
- data/site/src/css/spacecadet.css +51 -0
- data/site/src/css/sunburst.css +180 -0
- data/site/src/css/twilight.css +137 -0
- data/site/src/css/zenburnesque.css +91 -0
- data/site/src/get/index.haml +32 -0
- data/site/src/helpers.rb +121 -0
- data/site/src/images/amp_logo.png +0 -0
- data/site/src/images/carbonica.png +0 -0
- data/site/src/images/revolution.png +0 -0
- data/site/src/images/tab-bg.png +0 -0
- data/site/src/images/tab-sliding-left.png +0 -0
- data/site/src/images/tab-sliding-right.png +0 -0
- data/site/src/include/_footer.haml +22 -0
- data/site/src/include/_header.haml +17 -0
- data/site/src/index.haml +104 -0
- data/site/src/learn/index.haml +46 -0
- data/site/src/scripts/jquery-1.3.2.min.js +19 -0
- data/site/src/scripts/jquery.cookie.js +96 -0
- data/tasks/stats.rake +155 -0
- data/tasks/yard.rake +171 -0
- data/test/dirstate_tests/dirstate +0 -0
- data/test/dirstate_tests/hgrc +5 -0
- data/test/dirstate_tests/test_dir_state.rb +192 -0
- data/test/functional_tests/resources/.hgignore +2 -0
- data/test/functional_tests/resources/STYLE.txt +25 -0
- data/test/functional_tests/resources/command.rb +372 -0
- data/test/functional_tests/resources/commands/annotate.rb +57 -0
- data/test/functional_tests/resources/commands/experimental/lolcats.rb +17 -0
- data/test/functional_tests/resources/commands/heads.rb +22 -0
- data/test/functional_tests/resources/commands/manifest.rb +12 -0
- data/test/functional_tests/resources/commands/status.rb +90 -0
- data/test/functional_tests/resources/version2/.hgignore +5 -0
- data/test/functional_tests/resources/version2/STYLE.txt +25 -0
- data/test/functional_tests/resources/version2/command.rb +372 -0
- data/test/functional_tests/resources/version2/commands/annotate.rb +45 -0
- data/test/functional_tests/resources/version2/commands/experimental/lolcats.rb +17 -0
- data/test/functional_tests/resources/version2/commands/heads.rb +22 -0
- data/test/functional_tests/resources/version2/commands/manifest.rb +12 -0
- data/test/functional_tests/resources/version2/commands/status.rb +90 -0
- data/test/functional_tests/resources/version3/.hgignore +5 -0
- data/test/functional_tests/resources/version3/STYLE.txt +31 -0
- data/test/functional_tests/resources/version3/command.rb +376 -0
- data/test/functional_tests/resources/version3/commands/annotate.rb +45 -0
- data/test/functional_tests/resources/version3/commands/experimental/lolcats.rb +17 -0
- data/test/functional_tests/resources/version3/commands/heads.rb +22 -0
- data/test/functional_tests/resources/version3/commands/manifest.rb +12 -0
- data/test/functional_tests/resources/version3/commands/status.rb +90 -0
- data/test/functional_tests/resources/version4/.hgignore +5 -0
- data/test/functional_tests/resources/version4/STYLE.txt +31 -0
- data/test/functional_tests/resources/version4/command.rb +376 -0
- data/test/functional_tests/resources/version4/commands/experimental/lolcats.rb +17 -0
- data/test/functional_tests/resources/version4/commands/heads.rb +22 -0
- data/test/functional_tests/resources/version4/commands/manifest.rb +12 -0
- data/test/functional_tests/resources/version4/commands/stats.rb +25 -0
- data/test/functional_tests/resources/version4/commands/status.rb +90 -0
- data/test/functional_tests/resources/version5_1/.hgignore +5 -0
- data/test/functional_tests/resources/version5_1/STYLE.txt +2 -0
- data/test/functional_tests/resources/version5_1/command.rb +374 -0
- data/test/functional_tests/resources/version5_1/commands/experimental/lolcats.rb +17 -0
- data/test/functional_tests/resources/version5_1/commands/heads.rb +22 -0
- data/test/functional_tests/resources/version5_1/commands/manifest.rb +12 -0
- data/test/functional_tests/resources/version5_1/commands/stats.rb +25 -0
- data/test/functional_tests/resources/version5_1/commands/status.rb +90 -0
- data/test/functional_tests/resources/version5_2/.hgignore +5 -0
- data/test/functional_tests/resources/version5_2/STYLE.txt +14 -0
- data/test/functional_tests/resources/version5_2/command.rb +376 -0
- data/test/functional_tests/resources/version5_2/commands/experimental/lolcats.rb +17 -0
- data/test/functional_tests/resources/version5_2/commands/manifest.rb +12 -0
- data/test/functional_tests/resources/version5_2/commands/newz.rb +12 -0
- data/test/functional_tests/resources/version5_2/commands/stats.rb +25 -0
- data/test/functional_tests/resources/version5_2/commands/status.rb +90 -0
- data/test/functional_tests/test_functional.rb +604 -0
- data/test/localrepo_tests/test_local_repo.rb +121 -0
- data/test/localrepo_tests/testrepo.tar.gz +0 -0
- data/test/manifest_tests/00manifest.i +0 -0
- data/test/manifest_tests/test_manifest.rb +72 -0
- data/test/merge_tests/base.txt +10 -0
- data/test/merge_tests/expected.local.txt +16 -0
- data/test/merge_tests/local.txt +11 -0
- data/test/merge_tests/remote.txt +11 -0
- data/test/merge_tests/test_merge.rb +26 -0
- data/test/revlog_tests/00changelog.i +0 -0
- data/test/revlog_tests/revision_added_changelog.i +0 -0
- data/test/revlog_tests/test_adding_index.i +0 -0
- data/test/revlog_tests/test_revlog.rb +333 -0
- data/test/revlog_tests/testindex.i +0 -0
- data/test/store_tests/store.tar.gz +0 -0
- data/test/store_tests/test_fncache_store.rb +122 -0
- data/test/test_amp.rb +9 -0
- data/test/test_base85.rb +14 -0
- data/test/test_bdiff.rb +42 -0
- data/test/test_commands.rb +122 -0
- data/test/test_difflib.rb +50 -0
- data/test/test_helper.rb +15 -0
- data/test/test_journal.rb +29 -0
- data/test/test_match.rb +134 -0
- data/test/test_mdiff.rb +74 -0
- data/test/test_mpatch.rb +14 -0
- data/test/test_support.rb +24 -0
- metadata +385 -0
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
module Amp
|
|
2
|
+
|
|
3
|
+
class ManifestEntry < DelegateClass(Hash)
|
|
4
|
+
|
|
5
|
+
##
|
|
6
|
+
# Initializes the dictionary. It can be empty, by initializing with no
|
|
7
|
+
# arguments, or with more data by assigning them.
|
|
8
|
+
#
|
|
9
|
+
# It is a hash of Filename => node_id
|
|
10
|
+
#
|
|
11
|
+
# @param [Hash] mapping the initial settings of the dictionary
|
|
12
|
+
# @param [Hash] flags the flag settings of the dictionary
|
|
13
|
+
def initialize(mapping=nil, flags=nil)
|
|
14
|
+
@source_hash = mapping || {}
|
|
15
|
+
super(@source_hash || {})
|
|
16
|
+
@flags = flags || {}
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def inspect
|
|
20
|
+
"#<ManifestEntry " + @source_hash.inspect + "\n" +
|
|
21
|
+
" " + @flags.inspect + ">"
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def flags(file=nil)
|
|
25
|
+
file ? @flags[file] : @flags
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
def files; keys; end
|
|
29
|
+
|
|
30
|
+
def delete(*args)
|
|
31
|
+
super(*args)
|
|
32
|
+
flags.delete(*args)
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
##
|
|
36
|
+
# Clones the dictionary
|
|
37
|
+
def clone
|
|
38
|
+
self.class.new @source_hash.dup, @flags.dup
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
# @see clone
|
|
42
|
+
alias_method :dup, :clone
|
|
43
|
+
|
|
44
|
+
##
|
|
45
|
+
# Mark a file to be checked later on
|
|
46
|
+
#
|
|
47
|
+
# @param [String] file the file to be marked for later checking
|
|
48
|
+
# @param []
|
|
49
|
+
def mark_for_later(file, node)
|
|
50
|
+
self[file] = nil # notice how we DIDN'T use `self.delete file`
|
|
51
|
+
flags[file] = node.flags file
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
##
|
|
58
|
+
# = Manifest
|
|
59
|
+
# A Manifest is a special type of revision log. It stores lists of files
|
|
60
|
+
# that are being tracked, with some flags associated with each one. The
|
|
61
|
+
# manifest is where you can go to find what files a revision changed,
|
|
62
|
+
# and any extra information about the file via its flags.
|
|
63
|
+
class Manifest < Revlog
|
|
64
|
+
|
|
65
|
+
attr_accessor :manifest_list
|
|
66
|
+
|
|
67
|
+
##
|
|
68
|
+
# Parses a bunch of text and interprets it as a manifest entry.
|
|
69
|
+
# It then maps them onto a ManifestEntry that stores the real
|
|
70
|
+
# info.
|
|
71
|
+
#
|
|
72
|
+
# @param [String] lines the string that contains the information
|
|
73
|
+
# we need to parse.
|
|
74
|
+
def self.parse(lines)
|
|
75
|
+
mf_dict = ManifestEntry.new
|
|
76
|
+
|
|
77
|
+
lines.split("\n").each do |line|
|
|
78
|
+
f, n = line.split("\0")
|
|
79
|
+
if n.size > 40
|
|
80
|
+
mf_dict.flags[f] = n[40..-1]
|
|
81
|
+
mf_dict[f] = n[0..39].unhexlify
|
|
82
|
+
else
|
|
83
|
+
mf_dict[f] = n.unhexlify
|
|
84
|
+
end
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
mf_dict
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
def initialize(opener)
|
|
91
|
+
@map_cache = nil
|
|
92
|
+
@list_cache = nil
|
|
93
|
+
super(opener, "00manifest.i")
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
##
|
|
97
|
+
# Reads the difference between the given node and the revision
|
|
98
|
+
# before that.
|
|
99
|
+
#
|
|
100
|
+
# @param [String] node the node_id of the revision to diff
|
|
101
|
+
# @return [ManifestEntry] the dictionary with the info between
|
|
102
|
+
# the given revision and the one before that
|
|
103
|
+
def read_delta(node)
|
|
104
|
+
r = self.revision_index_for_node node
|
|
105
|
+
return self.class.parse(Diffs::MercurialDiff.patch_text(self.revision_diff(r-1, r)))
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
##
|
|
109
|
+
# Parses the manifest's data at a given revision's node_id
|
|
110
|
+
#
|
|
111
|
+
# @param [String, Symbol] node the node_id of the revision. If a symbol,
|
|
112
|
+
# it better be :tip or else shit will go down.
|
|
113
|
+
# @return [ManifestEntry] the dictionary mapping the
|
|
114
|
+
# flags, filenames, digests, etc from the parsed data
|
|
115
|
+
def read(node)
|
|
116
|
+
node = tip if node == :tip
|
|
117
|
+
|
|
118
|
+
return ManifestEntry.new if node == NULL_ID
|
|
119
|
+
return @map_cache[1] if @map_cache && @map_cache[0] == node
|
|
120
|
+
|
|
121
|
+
text = decompress_revision node
|
|
122
|
+
|
|
123
|
+
@list_cache = text
|
|
124
|
+
mapping = self.class.parse(text)
|
|
125
|
+
@map_cache = [node, mapping]
|
|
126
|
+
mapping
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
##
|
|
130
|
+
# Digs up the information about how a file changed in the revision
|
|
131
|
+
# specified by the provided node_id.
|
|
132
|
+
#
|
|
133
|
+
# @param [String] nodes the node_id of the revision we're interested in
|
|
134
|
+
# @param [String] f the path to the file we're interested in
|
|
135
|
+
# @return [[String, String], [nil, nil]] The data stored in the manifest about the
|
|
136
|
+
# file. The first String is a digest, the second String is the extra
|
|
137
|
+
# info stored alongside the file. Returns [nil, nil] if the node is not there
|
|
138
|
+
def find(node, f)
|
|
139
|
+
if @map_cache && node == @map_cache[0]
|
|
140
|
+
return [@map_cache[1][f], @map_cache[1].flags[f]]
|
|
141
|
+
end
|
|
142
|
+
mapping = read(node)
|
|
143
|
+
return [mapping[f], (mapping.flags[f] || "")]
|
|
144
|
+
end
|
|
145
|
+
##
|
|
146
|
+
# Checks the list for files invalid characters that aren't allowed in
|
|
147
|
+
# filenames.
|
|
148
|
+
#
|
|
149
|
+
# @raise [RevlogSupport::RevlogError] if the path contains an invalid
|
|
150
|
+
# character, raise.
|
|
151
|
+
def check_forbidden(list)
|
|
152
|
+
list.each do |f|
|
|
153
|
+
if f =~ /\n/ || f =~ /\r/
|
|
154
|
+
raise RevlogSupport::RevlogError.new("\\r and \\n are disallowed in "+
|
|
155
|
+
"filenames")
|
|
156
|
+
end
|
|
157
|
+
end
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
def encode_file(file, manifest)
|
|
161
|
+
"#{file}\000#{manifest[file].hexlify}#{manifest.flags[file]}\n"
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def add(map, journal, link, p1=nil, p2=nil, changed=nil)
|
|
166
|
+
if changed || changed.empty? || @list_cache ||
|
|
167
|
+
@list_cache.empty? || p1.nil? || @map_cache[0] != p1
|
|
168
|
+
check_forbidden map
|
|
169
|
+
@list_cache = map.map {|f,n| f}.sort.map {|f| encode_file f, map }.join
|
|
170
|
+
|
|
171
|
+
n = add_revision(@list_cache, journal, link, p1, p2)
|
|
172
|
+
@map_cache = [n, map]
|
|
173
|
+
|
|
174
|
+
return n
|
|
175
|
+
end
|
|
176
|
+
|
|
177
|
+
check_forbidden changed[0] # added files, check if they're forbidden
|
|
178
|
+
|
|
179
|
+
mapping = Manifest.parse(@list_cache)
|
|
180
|
+
|
|
181
|
+
changed[0].each do |x|
|
|
182
|
+
mapping[x] = map[x].hexlify
|
|
183
|
+
mapping.flags[x] = map.flags[x]
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
changed[1].each {|x| mapping.delete x }
|
|
187
|
+
@list_cache = mapping.map {|k, v| k}.sort.map {|fn| encode_file(fn, mapping)}.join
|
|
188
|
+
|
|
189
|
+
n = add_revision(@list_cache, journal, link, p1, p2)
|
|
190
|
+
@map_cache = [n, map]
|
|
191
|
+
|
|
192
|
+
n
|
|
193
|
+
end
|
|
194
|
+
end
|
|
195
|
+
end
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
module Amp
|
|
2
|
+
module RevlogSupport
|
|
3
|
+
module Node
|
|
4
|
+
# the null node ID - just 20 null bytes
|
|
5
|
+
NULL_ID = "\0" * 20
|
|
6
|
+
# -1 is the null revision (the last one in the index)
|
|
7
|
+
NULL_REV = -1
|
|
8
|
+
|
|
9
|
+
##
|
|
10
|
+
# Returns the node in a short hexadecimal format - only 6 bytes => 12 hex bytes
|
|
11
|
+
#
|
|
12
|
+
# @return [String] the node, in hex, and chopped a bit
|
|
13
|
+
def short(node)
|
|
14
|
+
node.short_hex
|
|
15
|
+
end
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
end
|
|
@@ -0,0 +1,1032 @@
|
|
|
1
|
+
require 'set'
|
|
2
|
+
|
|
3
|
+
module Amp
|
|
4
|
+
|
|
5
|
+
##
|
|
6
|
+
# = Revlog
|
|
7
|
+
# A revlog is a generic file that represents a revision history. This
|
|
8
|
+
# class, while generic, is extremely importantly and highly functional.
|
|
9
|
+
# While the {Amp::Manifest} and {Amp::ChangeLog} classes inherit
|
|
10
|
+
# from Revlog, one can open either file using the base Revlog class.
|
|
11
|
+
#
|
|
12
|
+
# A Revision log is based on two things: an index, which stores some
|
|
13
|
+
# meta-data about each revision in the repository's history, and
|
|
14
|
+
# some data associated with each revision. The data is stored as
|
|
15
|
+
# a (possibly zlib-compressed) diff.
|
|
16
|
+
#
|
|
17
|
+
# There are two versions of revision logs - version 0 and version NG.
|
|
18
|
+
# This information is handled by the {Amp::RevlogSupport:Index} classes.
|
|
19
|
+
#
|
|
20
|
+
# Sometimes the data is stored in a separate file from the index. This
|
|
21
|
+
# is up to the system to decide.
|
|
22
|
+
#
|
|
23
|
+
class Revlog
|
|
24
|
+
include Enumerable
|
|
25
|
+
include RevlogSupport::Node
|
|
26
|
+
|
|
27
|
+
# the file paths to the index and data files
|
|
28
|
+
attr_reader :index_file, :data_file
|
|
29
|
+
# The actual {Index} object.
|
|
30
|
+
attr_reader :index
|
|
31
|
+
|
|
32
|
+
##
|
|
33
|
+
# Initializes the revision log with an opener object (which handles how
|
|
34
|
+
# the interface to opening the files) and the path to the index itself.
|
|
35
|
+
#
|
|
36
|
+
# @param [Amp::Opener] opener an object that will handle opening the file
|
|
37
|
+
# @param [String] indexfile the path to the index file
|
|
38
|
+
def initialize(opener, indexfile)
|
|
39
|
+
@opener = opener
|
|
40
|
+
@index_file = indexfile
|
|
41
|
+
@data_file = indexfile[0..-3] + ".d"
|
|
42
|
+
@chunk_cache = nil
|
|
43
|
+
@index = RevlogSupport::Index.parse(opener, indexfile)
|
|
44
|
+
|
|
45
|
+
# add the null, terminating index entry if it isn't already there
|
|
46
|
+
if @index.index.empty? || @index.is_a?(RevlogSupport::LazyIndex) ||
|
|
47
|
+
@index.index[-1].node_id.not_null?
|
|
48
|
+
# the use of @index.index is deliberate!
|
|
49
|
+
@index.index << RevlogSupport::IndexEntry.new(0,0,0,-1,-1,-1,-1,NULL_ID)
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
alias_method :revlog_initialize, :initialize
|
|
53
|
+
|
|
54
|
+
##
|
|
55
|
+
# Actually opens the file.
|
|
56
|
+
def open(path, mode="r")
|
|
57
|
+
@opener.open(path, mode)
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
##
|
|
61
|
+
# Returns the requested node as an IndexEntry. Takes either a string or
|
|
62
|
+
# a fixnum index value.
|
|
63
|
+
#
|
|
64
|
+
# @param [String, Fixnum] the index or node ID to look up in the revlog
|
|
65
|
+
# @return [IndexEntry] the requested index entry.
|
|
66
|
+
def [](idx)
|
|
67
|
+
if idx.is_a? String
|
|
68
|
+
return @index[@index.node_map[idx]]
|
|
69
|
+
elsif idx.is_a? Array
|
|
70
|
+
STDERR.puts idx.inspect # KILLME
|
|
71
|
+
idx
|
|
72
|
+
else
|
|
73
|
+
return @index[idx]
|
|
74
|
+
end
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
##
|
|
78
|
+
# Returns the unique node_id (a string) for a given revision at _index_.
|
|
79
|
+
#
|
|
80
|
+
# @param [Fixnum] index the index into the list, from 0-(num_revisions - 1).
|
|
81
|
+
# @return [String] the node's ID
|
|
82
|
+
def node_id_for_index(index)
|
|
83
|
+
unless @index[index]
|
|
84
|
+
raise RevlogSupport::LookupError.new("Couldn't find node for id '#{index}'")
|
|
85
|
+
end
|
|
86
|
+
@index[index].node_id
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
# @see node_id_for_index
|
|
90
|
+
alias_method :node, :node_id_for_index
|
|
91
|
+
|
|
92
|
+
##
|
|
93
|
+
# Returns the index number for the given node ID.
|
|
94
|
+
#
|
|
95
|
+
# @param [String] id the node_id to lookup
|
|
96
|
+
# @return [Integer] the index into the revision index where you can find
|
|
97
|
+
# the requested node.
|
|
98
|
+
def revision_index_for_node(id)
|
|
99
|
+
unless @index.node_map[id]
|
|
100
|
+
p id
|
|
101
|
+
raise StandardError.new("Couldn't find node for id '#{id}'")
|
|
102
|
+
end
|
|
103
|
+
@index.node_map[id]
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
##
|
|
107
|
+
# @see revision_index_for_node
|
|
108
|
+
alias_method :rev, :revision_index_for_node
|
|
109
|
+
|
|
110
|
+
##
|
|
111
|
+
# Returns the "link revision" index for the given revision index
|
|
112
|
+
def link_revision_for_index(index)
|
|
113
|
+
self[index].link_rev
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
##
|
|
117
|
+
# Returns the node_id's of the parents (1 or 2) of the given node ID.
|
|
118
|
+
def parents_for_node(id)
|
|
119
|
+
#index = revision_index_for_node id
|
|
120
|
+
entry = self[id]
|
|
121
|
+
[ @index[entry.parent_one_rev].node_id ,
|
|
122
|
+
@index[entry.parent_two_rev].node_id ]
|
|
123
|
+
end
|
|
124
|
+
alias_method :parents, :parents_for_node
|
|
125
|
+
|
|
126
|
+
##
|
|
127
|
+
# Returns the indicies of the parents (1 or 2) of the node at _index_
|
|
128
|
+
def parent_indices_for_index(index)
|
|
129
|
+
[ self[index].parent_one_rev ,
|
|
130
|
+
self[index].parent_two_rev ]
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
##
|
|
134
|
+
# Returns the size of the data for the revision at _index_.
|
|
135
|
+
def data_size_for_index(index)
|
|
136
|
+
self[index].compressed_len
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
##
|
|
140
|
+
# Returns the uncompressed size of the data for the revision at _index_.
|
|
141
|
+
def uncompressed_size_for_index(index)
|
|
142
|
+
len = self[index].uncompressed_len
|
|
143
|
+
return len if len >= 0
|
|
144
|
+
|
|
145
|
+
text = decompress_revision node_id_for_index(index)
|
|
146
|
+
return text.size
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
##
|
|
150
|
+
# Returns the offset where the data begins for the revision at _index_.
|
|
151
|
+
def data_start_for_index(index)
|
|
152
|
+
RevlogSupport::Support.get_offset self[index].offset_flags
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
##
|
|
156
|
+
# Returns the offset where the data ends for the revision at _index_.
|
|
157
|
+
def data_end_for_index(index)
|
|
158
|
+
data_start_for_index(index) + self[index].compressed_len
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
##
|
|
162
|
+
# Returns the "base revision" index for the revision at _index_.
|
|
163
|
+
def base_revision_for_index(index)
|
|
164
|
+
self[index].base_rev
|
|
165
|
+
end
|
|
166
|
+
|
|
167
|
+
##
|
|
168
|
+
# Returns the node ID for the index's tip-most revision
|
|
169
|
+
def tip
|
|
170
|
+
node_id_for_index(@index.size - 2)
|
|
171
|
+
end
|
|
172
|
+
|
|
173
|
+
##
|
|
174
|
+
# Returns the number of entries in this revision log.
|
|
175
|
+
def size
|
|
176
|
+
@index.size - 1
|
|
177
|
+
end
|
|
178
|
+
alias_method :index_size, :size
|
|
179
|
+
|
|
180
|
+
##
|
|
181
|
+
# Returns true if size is 0
|
|
182
|
+
def empty?
|
|
183
|
+
index_size.zero?
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
##
|
|
187
|
+
# Returns each revision as a {Amp::RevlogSupport::IndexEntry}.
|
|
188
|
+
# Don't iterate over the extra revision -1!
|
|
189
|
+
def each(&b); @index[0..-2].each(&b); end
|
|
190
|
+
|
|
191
|
+
##
|
|
192
|
+
# Returns all of the indices for all revisions.
|
|
193
|
+
#
|
|
194
|
+
# @return [Array] all indicies
|
|
195
|
+
def all_indices
|
|
196
|
+
(0..size).to_a
|
|
197
|
+
end
|
|
198
|
+
|
|
199
|
+
##
|
|
200
|
+
# Returns a hash of all _ancestral_ nodes that can be reached from
|
|
201
|
+
# the given node ID. Just do [node_id] on the result to check if it's
|
|
202
|
+
# reachable.
|
|
203
|
+
def reachable_nodes_for_node(node, stop=nil)
|
|
204
|
+
reachable = {}
|
|
205
|
+
to_visit = [node]
|
|
206
|
+
reachable[node] = true
|
|
207
|
+
stop_idx = stop ? revision_index_for_node(stop) : 0
|
|
208
|
+
|
|
209
|
+
until to_visit.empty?
|
|
210
|
+
node = to_visit.shift
|
|
211
|
+
next if node == stop || node.null?
|
|
212
|
+
parents_for_node(node).each do |parent|
|
|
213
|
+
next if revision_index_for_node(parent) < stop_idx
|
|
214
|
+
unless reachable[parent]
|
|
215
|
+
reachable[parent] = true
|
|
216
|
+
to_visit << parent
|
|
217
|
+
end
|
|
218
|
+
end
|
|
219
|
+
end
|
|
220
|
+
|
|
221
|
+
reachable
|
|
222
|
+
end
|
|
223
|
+
|
|
224
|
+
##
|
|
225
|
+
# Allows the user to operate on all the ancestors of the given revisions.
|
|
226
|
+
# One can pass a block, or just call it and get a Set.
|
|
227
|
+
def ancestors(revisions)
|
|
228
|
+
revisions = [revisions] unless revisions.kind_of? Array
|
|
229
|
+
to_visit = revisions.dup
|
|
230
|
+
seen = Set.new([NULL_REV])
|
|
231
|
+
until to_visit.empty?
|
|
232
|
+
parent_indices_for_index(to_visit.shift).each do |parent|
|
|
233
|
+
unless seen.include? parent
|
|
234
|
+
to_visit << parent
|
|
235
|
+
seen << parent
|
|
236
|
+
yield parent if block_given?
|
|
237
|
+
end
|
|
238
|
+
end
|
|
239
|
+
end
|
|
240
|
+
seen.delete NULL_REV
|
|
241
|
+
seen
|
|
242
|
+
end
|
|
243
|
+
|
|
244
|
+
##
|
|
245
|
+
# Allows the user to operate on all the descendants of the given revisions.
|
|
246
|
+
# One can pass a block, or just call it and get a Set. Revisions are passed
|
|
247
|
+
# as indices.
|
|
248
|
+
def descendants(revisions)
|
|
249
|
+
seen = Set.new revisions
|
|
250
|
+
start = revisions.min + 1
|
|
251
|
+
start.upto self.size do |i|
|
|
252
|
+
parent_indices_for_index(i).each do |x|
|
|
253
|
+
if x != NULL_REV && seen.include?(x)
|
|
254
|
+
seen << i
|
|
255
|
+
yield i if block_given?
|
|
256
|
+
break 1
|
|
257
|
+
end
|
|
258
|
+
end
|
|
259
|
+
end
|
|
260
|
+
seen - revisions
|
|
261
|
+
end
|
|
262
|
+
|
|
263
|
+
##
|
|
264
|
+
# Returns the topologically sorted list of nodes from the set:
|
|
265
|
+
# missing = (ancestors(heads) \ ancestors(common))
|
|
266
|
+
def find_missing(common=[NULL_ID], heads=self.heads)
|
|
267
|
+
common.map! {|r| revision_index_for_node r}
|
|
268
|
+
heads.map! {|r| revision_index_for_node r}
|
|
269
|
+
|
|
270
|
+
has = {}
|
|
271
|
+
ancestors(common) {|a| has[a] = true}
|
|
272
|
+
has[NULL_REV] = true
|
|
273
|
+
common.each {|r| has[r] = true}
|
|
274
|
+
|
|
275
|
+
missing = {}
|
|
276
|
+
to_visit = heads.reject {|r| has[r]}
|
|
277
|
+
until to_visit.empty?
|
|
278
|
+
r = to_visit.shift
|
|
279
|
+
next if missing.include? r
|
|
280
|
+
missing[r] = true
|
|
281
|
+
parent_indices_for_index(r).each do |p|
|
|
282
|
+
to_visit << p unless has[p]
|
|
283
|
+
end
|
|
284
|
+
end
|
|
285
|
+
|
|
286
|
+
missing.keys.sort.map {|rev| node_id_for_index rev}
|
|
287
|
+
end
|
|
288
|
+
|
|
289
|
+
##
|
|
290
|
+
# Return a tuple containing three elements. Elements 1 and 2 contain
|
|
291
|
+
# a final list bases and heads after all the unreachable ones have been
|
|
292
|
+
# pruned. Element 0 contains a topologically sorted list of all
|
|
293
|
+
#
|
|
294
|
+
# nodes that satisfy these constraints:
|
|
295
|
+
# 1. All nodes must be descended from a node in roots (the nodes on
|
|
296
|
+
# roots are considered descended from themselves).
|
|
297
|
+
# 2. All nodes must also be ancestors of a node in heads (the nodes in
|
|
298
|
+
# heads are considered to be their own ancestors).
|
|
299
|
+
#
|
|
300
|
+
# If roots is unspecified, nullid is assumed as the only root.
|
|
301
|
+
# If heads is unspecified, it is taken to be the output of the
|
|
302
|
+
# heads method (i.e. a list of all nodes in the repository that
|
|
303
|
+
# have no children).
|
|
304
|
+
#
|
|
305
|
+
# @param [Array<String>] roots
|
|
306
|
+
# @param [Array<String>] heads
|
|
307
|
+
# @return [{:heads => Array<String>, :roots => Array<String>, :between => Array<String>}]
|
|
308
|
+
def nodes_between(roots=nil, heads=nil)
|
|
309
|
+
no_nodes = {:roots => [], :heads => [], :between => []}
|
|
310
|
+
return no_nodes if roots != nil && roots.empty?
|
|
311
|
+
return no_nodes if heads != nil && heads.empty?
|
|
312
|
+
|
|
313
|
+
if roots.nil?
|
|
314
|
+
roots = [NULL_ID] # Everybody's a descendent of nullid
|
|
315
|
+
lowest_rev = NULL_REV
|
|
316
|
+
else
|
|
317
|
+
roots = roots.dup
|
|
318
|
+
lowest_rev = roots.map {|r| revision_index_for_node r}.min
|
|
319
|
+
end
|
|
320
|
+
|
|
321
|
+
if lowest_rev == NULL_REV && heads.nil?
|
|
322
|
+
# We want _all_ the nodes!
|
|
323
|
+
return {:between => all_indices.map {|i| node_id_for_index i },
|
|
324
|
+
:roots => [NULL_ID], :heads => self.heads}
|
|
325
|
+
end
|
|
326
|
+
|
|
327
|
+
if heads.nil?
|
|
328
|
+
# All nodes are ancestors, so the latest ancestor is the last
|
|
329
|
+
# node.
|
|
330
|
+
highest_rev = self.size - 1
|
|
331
|
+
# Set ancestors to None to signal that every node is an ancestor.
|
|
332
|
+
ancestors = nil
|
|
333
|
+
# Set heads to an empty dictionary for later discovery of heads
|
|
334
|
+
heads = {}
|
|
335
|
+
else
|
|
336
|
+
heads = heads.dup
|
|
337
|
+
ancestors = {}
|
|
338
|
+
|
|
339
|
+
# Turn heads into a dictionary so we can remove 'fake' heads.
|
|
340
|
+
# Also, later we will be using it to filter out the heads we can't
|
|
341
|
+
# find from roots.
|
|
342
|
+
heads = Hash.with_keys heads, false
|
|
343
|
+
|
|
344
|
+
# Start at the top and keep marking parents until we're done.
|
|
345
|
+
nodes_to_tag = heads.keys
|
|
346
|
+
highest_rev = nodes_to_tag.map {|r| revision_index_for_node r }.max
|
|
347
|
+
|
|
348
|
+
until nodes_to_tag.empty?
|
|
349
|
+
# grab a node to tag
|
|
350
|
+
node = nodes_to_tag.pop
|
|
351
|
+
# Never tag nullid
|
|
352
|
+
next if node.null?
|
|
353
|
+
|
|
354
|
+
# A node's revision number represents its place in a
|
|
355
|
+
# topologically sorted list of nodes.
|
|
356
|
+
r = revision_index_for_node node
|
|
357
|
+
if r >= lowest_rev
|
|
358
|
+
if !ancestors.include?(node)
|
|
359
|
+
# If we are possibly a descendent of one of the roots
|
|
360
|
+
# and we haven't already been marked as an ancestor
|
|
361
|
+
ancestors[node] = true # mark as ancestor
|
|
362
|
+
# Add non-nullid parents to list of nodes to tag.
|
|
363
|
+
nodes_to_tag += parents_for_node(node).reject {|p| p.null? }
|
|
364
|
+
elsif heads.include? node # We've seen it before, is it a fake head?
|
|
365
|
+
# So it is, real heads should not be the ancestors of
|
|
366
|
+
# any other heads.
|
|
367
|
+
heads.delete_at node
|
|
368
|
+
end
|
|
369
|
+
end
|
|
370
|
+
end
|
|
371
|
+
|
|
372
|
+
return no_nodes if ancestors.empty?
|
|
373
|
+
|
|
374
|
+
# Now that we have our set of ancestors, we want to remove any
|
|
375
|
+
# roots that are not ancestors.
|
|
376
|
+
|
|
377
|
+
# If one of the roots was nullid, everything is included anyway.
|
|
378
|
+
if lowest_rev > NULL_REV
|
|
379
|
+
# But, since we weren't, let's recompute the lowest rev to not
|
|
380
|
+
# include roots that aren't ancestors.
|
|
381
|
+
|
|
382
|
+
# Filter out roots that aren't ancestors of heads
|
|
383
|
+
roots = roots.select {|rev| ancestors.include? rev}
|
|
384
|
+
|
|
385
|
+
return no_nodes if roots.empty? # No more roots? Return empty list
|
|
386
|
+
|
|
387
|
+
# Recompute the lowest revision
|
|
388
|
+
lowest_rev = roots.map {|rev| revision_index_for_node rev}.min
|
|
389
|
+
else
|
|
390
|
+
lowest_rev = NULL_REV
|
|
391
|
+
roots = [NULL_ID]
|
|
392
|
+
end
|
|
393
|
+
end
|
|
394
|
+
|
|
395
|
+
# Transform our roots list into a 'set' (i.e. a dictionary where the
|
|
396
|
+
# values don't matter.
|
|
397
|
+
descendents = Hash.with_keys roots
|
|
398
|
+
|
|
399
|
+
# Also, keep the original roots so we can filter out roots that aren't
|
|
400
|
+
# 'real' roots (i.e. are descended from other roots).
|
|
401
|
+
roots = descendents.dup
|
|
402
|
+
|
|
403
|
+
# Our topologically sorted list of output nodes.
|
|
404
|
+
ordered_output = []
|
|
405
|
+
|
|
406
|
+
# Don't start at nullid since we don't want nullid in our output list,
|
|
407
|
+
# and if nullid shows up in descedents, empty parents will look like
|
|
408
|
+
# they're descendents.
|
|
409
|
+
[lowest_rev, 0].max.upto(highest_rev) do |rev|
|
|
410
|
+
node = node_id_for_index rev
|
|
411
|
+
is_descendent = false
|
|
412
|
+
|
|
413
|
+
if lowest_rev == NULL_REV # Everybody is a descendent of nullid
|
|
414
|
+
is_descendent = true
|
|
415
|
+
elsif descendents.include? node
|
|
416
|
+
# n is already a descendent
|
|
417
|
+
is_descendent = true
|
|
418
|
+
|
|
419
|
+
# This check only needs to be done here because all the roots
|
|
420
|
+
# will start being marked is descendents before the loop.
|
|
421
|
+
if roots.include? node
|
|
422
|
+
# If n was a root, check if it's a 'real' root.
|
|
423
|
+
par = parents_for_node node
|
|
424
|
+
# If any of its parents are descendents, it's not a root.
|
|
425
|
+
if descendents.include?(par[0]) || descendents.include?(par[1])
|
|
426
|
+
roots.delete_at node
|
|
427
|
+
end
|
|
428
|
+
end
|
|
429
|
+
else
|
|
430
|
+
# A node is a descendent if either of its parents are
|
|
431
|
+
# descendents. (We seeded the dependents list with the roots
|
|
432
|
+
# up there, remember?)
|
|
433
|
+
par = parents_for_node node
|
|
434
|
+
if descendents.include?(par[0]) || descendents.include?(par[1])
|
|
435
|
+
descendents[node] = true
|
|
436
|
+
is_descendent = true
|
|
437
|
+
end
|
|
438
|
+
end
|
|
439
|
+
|
|
440
|
+
if is_descendent && (ancestors.nil? || ancestors.include?(node))
|
|
441
|
+
# Only include nodes that are both descendents and ancestors.
|
|
442
|
+
ordered_output << node
|
|
443
|
+
if !ancestors.nil? && heads.include?(node)
|
|
444
|
+
# We're trying to figure out which heads are reachable
|
|
445
|
+
# from roots.
|
|
446
|
+
# Mark this head as having been reached
|
|
447
|
+
heads[node] = true
|
|
448
|
+
elsif ancestors.nil?
|
|
449
|
+
# Otherwise, we're trying to discover the heads.
|
|
450
|
+
# Assume this is a head because if it isn't, the next step
|
|
451
|
+
# will eventually remove it.
|
|
452
|
+
heads[node] = true
|
|
453
|
+
|
|
454
|
+
# But, obviously its parents aren't.
|
|
455
|
+
parents_for_node(node).each {|parent| heads.delete parent }
|
|
456
|
+
end
|
|
457
|
+
end
|
|
458
|
+
end
|
|
459
|
+
|
|
460
|
+
heads = heads.keys.select {|k| heads[k] }
|
|
461
|
+
roots = roots.keys
|
|
462
|
+
{:heads => heads, :roots => roots, :between => ordered_output}
|
|
463
|
+
end
|
|
464
|
+
|
|
465
|
+
##
|
|
466
|
+
# Return the list of all nodes that have no children.
|
|
467
|
+
#
|
|
468
|
+
# if start is specified, only heads that are descendants of
|
|
469
|
+
# start will be returned
|
|
470
|
+
# if stop is specified, it will consider all the revs from stop
|
|
471
|
+
# as if they had no children
|
|
472
|
+
def heads(start=nil, stop=nil)
|
|
473
|
+
if start.nil? && stop.nil?
|
|
474
|
+
count = self.size
|
|
475
|
+
return [NULL_ID] if count == 0
|
|
476
|
+
is_head = [true] * (count + 1)
|
|
477
|
+
count.times do |r|
|
|
478
|
+
e = @index[r]
|
|
479
|
+
is_head[e.parent_one_rev] = is_head[e.parent_two_rev] = false
|
|
480
|
+
end
|
|
481
|
+
return (0..(count-1)).to_a.select {|r| is_head[r]}.map {|r| node_id_for_index r}
|
|
482
|
+
end
|
|
483
|
+
start = NULL_ID if start.nil?
|
|
484
|
+
stop = [] if stop.nil?
|
|
485
|
+
stop_revs = {}
|
|
486
|
+
stop.each {|r| stop_revs[revision_index_for_node(r)] = true }
|
|
487
|
+
start_rev = revision_index_for_node start
|
|
488
|
+
reachable = {start_rev => 1}
|
|
489
|
+
heads = {start_rev => 1}
|
|
490
|
+
(start_rev + 1).upto(self.size - 1) do |r|
|
|
491
|
+
parent_indices_for_index(r).each do |p|
|
|
492
|
+
if reachable[p]
|
|
493
|
+
reachable[r] = 1 unless stop_revs[r]
|
|
494
|
+
heads[r] = 1
|
|
495
|
+
end
|
|
496
|
+
heads.delete p if heads[p] && stop_revs[p].nil?
|
|
497
|
+
end
|
|
498
|
+
end
|
|
499
|
+
|
|
500
|
+
heads.map {|k,v| node_id_for_index k}
|
|
501
|
+
end
|
|
502
|
+
|
|
503
|
+
##
|
|
504
|
+
# Returns the children of the node with ID _node_.
|
|
505
|
+
def children(node)
|
|
506
|
+
c = []
|
|
507
|
+
p = revision_index_for_node node
|
|
508
|
+
(p+1).upto(self.size - 1) do |r|
|
|
509
|
+
prevs = parent_indices_for_index(r).select {|pr| pr != NULL_REV}
|
|
510
|
+
prevs.each {|pr| c << node_id_for_index(r) if pr == p} if prevs.any?
|
|
511
|
+
c << node_id_for_index(r) if p == NULL_REV
|
|
512
|
+
end
|
|
513
|
+
c
|
|
514
|
+
end
|
|
515
|
+
|
|
516
|
+
##
|
|
517
|
+
# Tries to find an exact match for a node with ID _id_. If no match is,
|
|
518
|
+
# found, then the id is treated as an index number - if that doesn't work,
|
|
519
|
+
# the revlog will try treating the ID supplied as node_id in hex form.
|
|
520
|
+
def id_match(id)
|
|
521
|
+
return node_id_for_index(id) if id.is_a? Integer
|
|
522
|
+
return id if id.size == 20 && revision_index_for_node(id)
|
|
523
|
+
rev = id.to_i
|
|
524
|
+
rev = self.size + rev if rev < 0
|
|
525
|
+
if id.size == 40
|
|
526
|
+
node = id.unhexlify
|
|
527
|
+
r = revision_index_for_node node
|
|
528
|
+
return node if r
|
|
529
|
+
end
|
|
530
|
+
nil
|
|
531
|
+
end
|
|
532
|
+
|
|
533
|
+
##
|
|
534
|
+
# Tries to find a partial match for a node_id in hex form.
|
|
535
|
+
def partial_id_match(id)
|
|
536
|
+
return nil if id.size >= 40
|
|
537
|
+
l = id.size / 2
|
|
538
|
+
bin_id = id[0..(l*2 - 1)].unhexlify
|
|
539
|
+
nl = @index.node_map.keys.select {|k| k[0..(l-1)] == bin_id}
|
|
540
|
+
nl = nl.select {|n| n.hexlify =~ /^#{id}/}
|
|
541
|
+
return nl.first if nl.size == 1
|
|
542
|
+
raise RevlogSupport::LookupError.new("ambiguous ID #{id}") if nl.size > 1
|
|
543
|
+
nil
|
|
544
|
+
end
|
|
545
|
+
|
|
546
|
+
##
|
|
547
|
+
# This method will, given an id (or an index) or an ID in hex form,
|
|
548
|
+
# try to find the given node in the index.
|
|
549
|
+
def lookup_id(id)
|
|
550
|
+
n = id_match id
|
|
551
|
+
return n unless n.nil?
|
|
552
|
+
n = partial_id_match id
|
|
553
|
+
return n unless n.nil?
|
|
554
|
+
raise RevlogSupport::LookupError.new("no match found #{id}")
|
|
555
|
+
end
|
|
556
|
+
|
|
557
|
+
##
|
|
558
|
+
# Compares a node with the provided text, as a consistency check. Works
|
|
559
|
+
# using <=> semantics.
|
|
560
|
+
def cmp(node, text)
|
|
561
|
+
|
|
562
|
+
p1, p2 = parents_for_node node
|
|
563
|
+
return RevlogSupport::Support.history_hash(text, p1, p2) != node
|
|
564
|
+
end
|
|
565
|
+
|
|
566
|
+
##
|
|
567
|
+
# Loads a block of data into the cache.
|
|
568
|
+
def load_cache(data_file, start, cache_length)
|
|
569
|
+
|
|
570
|
+
if data_file.nil?
|
|
571
|
+
data_file = open(@index_file) if @index.inline?
|
|
572
|
+
data_file = open(@data_file) unless @index.inline?
|
|
573
|
+
end
|
|
574
|
+
|
|
575
|
+
# data_file.seek(start, IO::SEEK_SET)
|
|
576
|
+
# sz = data_file.read.length
|
|
577
|
+
# data_file.seek(0, IO::SEEK_SET)
|
|
578
|
+
# $zs = data_file.read.length
|
|
579
|
+
# puts(@index.inline? ? "------- INLINE" : "-------NOT INLINE") #killme
|
|
580
|
+
# puts "------- CACHE_LENGTH = #{cache_length}" # KILLME
|
|
581
|
+
# puts "===" # KILLME
|
|
582
|
+
# puts "We are going to read #{cache_length} bytes starting at #{start}" # KILLME
|
|
583
|
+
# puts "Wait a minute... on Ari's machine, there's only #{sz} bytes to read..." # KILLME
|
|
584
|
+
# puts "Filesize: #{$zs}" # KILLME
|
|
585
|
+
# puts "===" # KILLME
|
|
586
|
+
|
|
587
|
+
data_file.seek(start, IO::SEEK_SET)
|
|
588
|
+
@chunk_cache = [start, data_file.read(cache_length)]
|
|
589
|
+
data_file
|
|
590
|
+
end
|
|
591
|
+
|
|
592
|
+
##
|
|
593
|
+
# Gets a chunk of data from the datafile (or, if inline, from the index
|
|
594
|
+
# file). Just give it a revision index and which data file to use
|
|
595
|
+
#
|
|
596
|
+
# @param [Fixnum] rev the revision index to extract
|
|
597
|
+
# @param [IO] data_file The IO file descriptor for loading data
|
|
598
|
+
# @return [String] the raw data from the index (posssibly compressed)
|
|
599
|
+
def get_chunk(rev, data_file = nil)
|
|
600
|
+
begin
|
|
601
|
+
start, length = self.data_start_for_index(rev), self[rev].compressed_len
|
|
602
|
+
rescue
|
|
603
|
+
Amp::UI.debug "Failed get_chunk: #{@index_file}:#{rev}"
|
|
604
|
+
raise
|
|
605
|
+
end
|
|
606
|
+
|
|
607
|
+
#puts "The starting point for the data is: #{data_start_for_index(rev)}" # KILLME
|
|
608
|
+
#puts "We're reading #{length} bytes. Look at data_start_for_index" # KILLME
|
|
609
|
+
|
|
610
|
+
start += ((rev + 1) * @index.entry_size) if @index.inline?
|
|
611
|
+
|
|
612
|
+
endpt = start + length
|
|
613
|
+
offset = 0
|
|
614
|
+
if @chunk_cache.nil?
|
|
615
|
+
cache_length = [65536, length].max
|
|
616
|
+
data_file = load_cache data_file, start, cache_length
|
|
617
|
+
else
|
|
618
|
+
cache_start = @chunk_cache[0]
|
|
619
|
+
cache_length = @chunk_cache[1].size
|
|
620
|
+
cache_end = cache_start + cache_length
|
|
621
|
+
if start >= cache_start && endpt <= cache_end
|
|
622
|
+
offset = start - cache_start
|
|
623
|
+
else
|
|
624
|
+
cache_length = [65536, length].max
|
|
625
|
+
data_file = load_cache data_file, start, cache_length
|
|
626
|
+
end
|
|
627
|
+
end
|
|
628
|
+
|
|
629
|
+
c = @chunk_cache[1]
|
|
630
|
+
return "" if c.nil? || c.empty? || length == 0
|
|
631
|
+
c = c[offset..(offset + length - 1)] if cache_length != length
|
|
632
|
+
|
|
633
|
+
RevlogSupport::Support.decompress c
|
|
634
|
+
end
|
|
635
|
+
|
|
636
|
+
##
|
|
637
|
+
# Unified diffs 2 revisions, based on their indices. They are returned in a sexified
|
|
638
|
+
# unified diff format.
|
|
639
|
+
def unified_revision_diff(rev1, rev2)
|
|
640
|
+
Diffs::MercurialDiff.unified_diff( decompress_revision(self.node_id_for_index(rev1)),
|
|
641
|
+
decompress_revision(self.node_id_for_index(rev2)))
|
|
642
|
+
end
|
|
643
|
+
|
|
644
|
+
##
|
|
645
|
+
# Diffs 2 revisions, based on their indices. They are returned in
|
|
646
|
+
# BinaryDiff format.
|
|
647
|
+
#
|
|
648
|
+
# @param [Fixnum] rev1 the index of the source revision
|
|
649
|
+
# @param [Fixnum] rev2 the index of the destination revision
|
|
650
|
+
# @return [String] The diff of the 2 revisions.
|
|
651
|
+
def revision_diff(rev1, rev2)
|
|
652
|
+
return get_chunk(rev2) if (rev1 + 1 == rev2) &&
|
|
653
|
+
self[rev1].base_rev == self[rev2].base_rev
|
|
654
|
+
Diffs::MercurialDiff.text_diff( decompress_revision(node_id_for_index(rev1)),
|
|
655
|
+
decompress_revision(node_id_for_index(rev2)))
|
|
656
|
+
end
|
|
657
|
+
|
|
658
|
+
##
|
|
659
|
+
# Given a node ID, extracts that revision and decompresses it. What you get
|
|
660
|
+
# back will the pristine revision data!
|
|
661
|
+
#
|
|
662
|
+
# @param [String] node the Node ID of the revision to extract.
|
|
663
|
+
# @return [String] the pristine revision data.
|
|
664
|
+
def decompress_revision(node)
|
|
665
|
+
return "" if node.nil? || node.null?
|
|
666
|
+
return @index.cache[2] if @index.cache && @index.cache[0] == node
|
|
667
|
+
|
|
668
|
+
|
|
669
|
+
text = nil
|
|
670
|
+
rev = revision_index_for_node node
|
|
671
|
+
base = @index[rev].base_rev
|
|
672
|
+
|
|
673
|
+
if @index[rev].offset_flags & 0xFFFF > 0
|
|
674
|
+
raise RevlogSupport::RevlogError.new("incompatible revision flag %x" %
|
|
675
|
+
(self.index[rev].offset_flags & 0xFFFF))
|
|
676
|
+
end
|
|
677
|
+
data_file = nil
|
|
678
|
+
|
|
679
|
+
if @index.cache && @index.cache[1].is_a?(Numeric) && @index.cache[1] >= base && @index.cache[1] < rev
|
|
680
|
+
base = @index.cache[1]
|
|
681
|
+
text = @index.cache[2]
|
|
682
|
+
# load the index if we're lazy (base, rev + 1)
|
|
683
|
+
end
|
|
684
|
+
data_file = open(@data_file) if !(@index.inline?) && rev > base + 1
|
|
685
|
+
text = get_chunk(base, data_file) if text.nil?
|
|
686
|
+
bins = ((base + 1)..rev).map {|r| get_chunk(r, data_file)}
|
|
687
|
+
text = Diffs::MercurialPatch.apply_patches(text, bins)
|
|
688
|
+
|
|
689
|
+
p1, p2 = parents_for_node node
|
|
690
|
+
if node != RevlogSupport::Support.history_hash(text, p1, p2)
|
|
691
|
+
raise RevlogSupport::RevlogError.new("integrity check failed on %s:%d, data:%s" %
|
|
692
|
+
[(@index.inline? ? @index_file : @data_file), rev, text.inspect])
|
|
693
|
+
end
|
|
694
|
+
@index.cache = [node, rev, text]
|
|
695
|
+
text
|
|
696
|
+
end
|
|
697
|
+
|
|
698
|
+
############ TODO
|
|
699
|
+
# @todo FINISH THIS METHOD
|
|
700
|
+
# @todo FIXME
|
|
701
|
+
# FINISH THIS METHOD
|
|
702
|
+
# TODO
|
|
703
|
+
# FIXME
|
|
704
|
+
def check_inline_size(tr, fp=nil)
|
|
705
|
+
return unless @index.inline?
|
|
706
|
+
if fp.nil?
|
|
707
|
+
fp = open(@index_file, "r")
|
|
708
|
+
fp.seek(0, IO::SEEK_END)
|
|
709
|
+
end
|
|
710
|
+
size = fp.tell
|
|
711
|
+
return if size < 131072
|
|
712
|
+
|
|
713
|
+
trinfo = tr.find(@index_file)
|
|
714
|
+
if trinfo.nil?
|
|
715
|
+
raise RevlogSupport::RevlogError.new("#{@index_file} not found in the"+
|
|
716
|
+
"transaction")
|
|
717
|
+
end
|
|
718
|
+
trindex = trinfo[:data]
|
|
719
|
+
data_offset = data_start_for_index trindex
|
|
720
|
+
tr.add @data_file, data_offset
|
|
721
|
+
df = open(@data_file, 'w')
|
|
722
|
+
begin
|
|
723
|
+
calc = @index.entry_size
|
|
724
|
+
self.size.times do |r|
|
|
725
|
+
start = data_start_for_index(r) + (r + 1) * calc
|
|
726
|
+
length = self[r].compressed_len
|
|
727
|
+
fp.seek(start)
|
|
728
|
+
d = fp.read length
|
|
729
|
+
df.write d
|
|
730
|
+
end
|
|
731
|
+
ensure
|
|
732
|
+
df.close
|
|
733
|
+
end
|
|
734
|
+
fp.close
|
|
735
|
+
|
|
736
|
+
############ TODO
|
|
737
|
+
# FINISH THIS METHOD
|
|
738
|
+
############ TODO
|
|
739
|
+
end
|
|
740
|
+
|
|
741
|
+
##
|
|
742
|
+
# add a revision to the log
|
|
743
|
+
#
|
|
744
|
+
# @param [String] text the revision data to add
|
|
745
|
+
# @param transaction the transaction object used for rollback
|
|
746
|
+
# @param link the linkrev data to add
|
|
747
|
+
# @param [String] p1 the parent nodeids of the revision
|
|
748
|
+
# @param [String] p2 the parent nodeids of the revision
|
|
749
|
+
# @param d an optional precomputed delta
|
|
750
|
+
# @return [String] the digest ID referring to the node in the log
|
|
751
|
+
def add_revision(text, transaction, link, p1, p2, d=nil, index_file_handle=nil)
|
|
752
|
+
node = RevlogSupport::Support.history_hash(text, p1, p2)
|
|
753
|
+
return node if @index.node_map[node]
|
|
754
|
+
curr = index_size
|
|
755
|
+
prev = curr - 1
|
|
756
|
+
base = self[prev].base_rev
|
|
757
|
+
offset = data_end_for_index prev
|
|
758
|
+
if curr > 0
|
|
759
|
+
if d.nil? || d.empty?
|
|
760
|
+
ptext = decompress_revision node_id_for_index(prev)
|
|
761
|
+
d = Diffs::MercurialDiff.text_diff(ptext, text)
|
|
762
|
+
end
|
|
763
|
+
data = RevlogSupport::Support.compress d
|
|
764
|
+
len = data[:compression].size + data[:text].size
|
|
765
|
+
dist = len + offset - data_start_for_index(base)
|
|
766
|
+
end
|
|
767
|
+
# Compressed diff > size of actual file
|
|
768
|
+
if curr == 0 || dist > text.size * 2
|
|
769
|
+
data = RevlogSupport::Support.compress text
|
|
770
|
+
len = data[:compression].size + data[:text].size
|
|
771
|
+
base = curr
|
|
772
|
+
end
|
|
773
|
+
entry = RevlogSupport::IndexEntry.new(RevlogSupport::Support.offset_version(offset, 0),
|
|
774
|
+
len, text.size, base, link, rev(p1), rev(p2), node)
|
|
775
|
+
@index << entry
|
|
776
|
+
@index.node_map[node] = curr
|
|
777
|
+
@index.write_entry(@index_file, entry, transaction, data, index_file_handle)
|
|
778
|
+
@index.cache = [node, curr, text]
|
|
779
|
+
node
|
|
780
|
+
end
|
|
781
|
+
|
|
782
|
+
##
|
|
783
|
+
# Finds the most-recent common ancestor for the two nodes.
|
|
784
|
+
def ancestor(a, b)
|
|
785
|
+
parent_func = proc do |rev|
|
|
786
|
+
self.parent_indices_for_index(rev).select {|i| i != NULL_REV }
|
|
787
|
+
end
|
|
788
|
+
c = Graphs::AncestorCalculator.ancestors(revision_index_for_node(a),
|
|
789
|
+
revision_index_for_node(b),
|
|
790
|
+
parent_func)
|
|
791
|
+
return NULL_ID if c.nil?
|
|
792
|
+
node_id_for_index c
|
|
793
|
+
end
|
|
794
|
+
|
|
795
|
+
##
|
|
796
|
+
# Yields chunks of change-group data for writing to disk, given
|
|
797
|
+
# a nodelist, a method to lookup stuff. Given a list of changset
|
|
798
|
+
# revs, return a set of deltas and metadata corresponding to nodes.
|
|
799
|
+
# the first delta is parent(nodes[0]) -> nodes[0] the receiver is
|
|
800
|
+
# guaranteed to have this parent as it has all history before these
|
|
801
|
+
# changesets. parent is parent[0]
|
|
802
|
+
#
|
|
803
|
+
# FIXME -- could be the cause of our failures with #pre_push!
|
|
804
|
+
# @param [[String]] nodelist
|
|
805
|
+
# @param [Proc, #[], #call] lookup
|
|
806
|
+
# @param [Proc, #[], #call] info_collect can be left nil
|
|
807
|
+
def group(nodelist, lookup, info_collect=nil)
|
|
808
|
+
revs = nodelist.map {|n| rev n }
|
|
809
|
+
|
|
810
|
+
# if we don't have any revisions touched by these changesets, bail
|
|
811
|
+
if revs.empty?
|
|
812
|
+
yield RevlogSupport::ChangeGroup.closing_chunk
|
|
813
|
+
return
|
|
814
|
+
end
|
|
815
|
+
|
|
816
|
+
# add the parent of the first rev
|
|
817
|
+
parent1 = parents_for_node(node(revs[0]))[0]
|
|
818
|
+
revs.unshift rev(parent1)
|
|
819
|
+
|
|
820
|
+
# build deltas
|
|
821
|
+
0.upto(revs.size - 2) do |d|
|
|
822
|
+
a, b = revs[d], revs[d + 1]
|
|
823
|
+
nb = node b
|
|
824
|
+
|
|
825
|
+
info_collect[nb] if info_collect
|
|
826
|
+
|
|
827
|
+
p = parents(nb)
|
|
828
|
+
meta = nb + p[0] + p[1] + lookup[nb]
|
|
829
|
+
|
|
830
|
+
if a == -1
|
|
831
|
+
data = decompress_revision nb
|
|
832
|
+
meta += Diffs::MercurialDiff.trivial_diff_header(d.size)
|
|
833
|
+
else
|
|
834
|
+
|
|
835
|
+
data = revision_diff(a, b)
|
|
836
|
+
end
|
|
837
|
+
|
|
838
|
+
yield RevlogSupport::ChangeGroup.chunk_header(meta.size + data.size)
|
|
839
|
+
yield meta
|
|
840
|
+
if data.size > 1048576
|
|
841
|
+
pos = 0
|
|
842
|
+
while pos < data.size
|
|
843
|
+
pos2 = pos + 262144
|
|
844
|
+
yield data[pos..(pos2-1)]
|
|
845
|
+
pos = pos2
|
|
846
|
+
end
|
|
847
|
+
else
|
|
848
|
+
yield data
|
|
849
|
+
end
|
|
850
|
+
end
|
|
851
|
+
yield RevlogSupport::ChangeGroup.closing_chunk
|
|
852
|
+
end
|
|
853
|
+
|
|
854
|
+
# Adds a changelog to the index
|
|
855
|
+
#
|
|
856
|
+
# @param [StringIO, #string] revisions something we can iterate over (Usually a StringIO)
|
|
857
|
+
# @param [Proc, #call, #[]] link_mapper
|
|
858
|
+
# @param [Amp::Journal] journal to start a transaction
|
|
859
|
+
def add_group(revisions, link_mapper, journal)
|
|
860
|
+
r = index_size
|
|
861
|
+
t = r - 1
|
|
862
|
+
node = nil
|
|
863
|
+
|
|
864
|
+
base = prev = RevlogSupport::Node::NULL_REV
|
|
865
|
+
start = endpt = text_len = 0
|
|
866
|
+
endpt = data_end_for_index t if r != 0
|
|
867
|
+
|
|
868
|
+
index_file_handle = open(@index_file, "a+")
|
|
869
|
+
index_size = r * @index.entry_size
|
|
870
|
+
if @index.inline?
|
|
871
|
+
journal << [@index_file, endpt + index_size, r]
|
|
872
|
+
data_file_handle = nil
|
|
873
|
+
else
|
|
874
|
+
journal << [@index_file, index_size, r]
|
|
875
|
+
journal << [@data_file, endpt]
|
|
876
|
+
data_file_handle = open(@data_file, "a")
|
|
877
|
+
end
|
|
878
|
+
|
|
879
|
+
begin #errors abound here i guess
|
|
880
|
+
chain = nil
|
|
881
|
+
|
|
882
|
+
Amp::RevlogSupport::ChangeGroup.each_chunk(revisions) do |chunk|
|
|
883
|
+
node, parent1, parent2, cs = chunk[0..79].unpack("a20a20a20a20")
|
|
884
|
+
link = link_mapper.call(cs)
|
|
885
|
+
|
|
886
|
+
if @index.node_map[node]
|
|
887
|
+
chain = node
|
|
888
|
+
next
|
|
889
|
+
end
|
|
890
|
+
delta = chunk[80..-1]
|
|
891
|
+
[parent1, parent2].each do |parent|
|
|
892
|
+
unless @index.node_map[parent]
|
|
893
|
+
raise RevlogSupport::LookupError.new("unknown parent #{parent}"+
|
|
894
|
+
" in #{@index_file}")
|
|
895
|
+
end
|
|
896
|
+
end
|
|
897
|
+
|
|
898
|
+
unless chain
|
|
899
|
+
chain = parent1
|
|
900
|
+
unless @index.node_map[chain]
|
|
901
|
+
raise RevlogSupport::LookupError.new("unknown parent #{chain}"+
|
|
902
|
+
" from #{chain} in #{@index_file}")
|
|
903
|
+
end
|
|
904
|
+
end
|
|
905
|
+
|
|
906
|
+
if chain == prev
|
|
907
|
+
cdelta = RevlogSupport::Support.compress delta
|
|
908
|
+
cdeltalen = cdelta[:compression].size + cdelta[:text].size
|
|
909
|
+
text_len = Diffs::MercurialPatch.patched_size text_len, delta
|
|
910
|
+
end
|
|
911
|
+
|
|
912
|
+
if chain != prev || (endpt - start + cdeltalen) > text_len * 2
|
|
913
|
+
#flush our writes here so we can read it in revision
|
|
914
|
+
data_file_handle.flush if data_file_handle
|
|
915
|
+
index_file_handle.flush
|
|
916
|
+
text = decompress_revision(chain)
|
|
917
|
+
if text.size == 0
|
|
918
|
+
text = delta[12..-1]
|
|
919
|
+
else
|
|
920
|
+
text = Diffs::MercurialPatch.apply_patches(text, [delta])
|
|
921
|
+
end
|
|
922
|
+
chk = add_revision(text, journal, link, parent1, parent2,
|
|
923
|
+
nil, index_file_handle)
|
|
924
|
+
# if (! data_file_handle) && (! @index.inline?)
|
|
925
|
+
# data_file_handle = open(@data_file, "a")
|
|
926
|
+
# index_file_handle = open(@index_file, "a")
|
|
927
|
+
# end
|
|
928
|
+
if chk != node
|
|
929
|
+
raise RevlogSupport::RevlogError.new("consistency error "+
|
|
930
|
+
"adding group")
|
|
931
|
+
end
|
|
932
|
+
text_len = text.size
|
|
933
|
+
else
|
|
934
|
+
entry = RevlogSupport::IndexEntry.new(RevlogSupport::Support.offset_version(endpt, 0),
|
|
935
|
+
cdeltalen,text_len, base, link, rev(parent1), rev(parent2), node)
|
|
936
|
+
@index << entry
|
|
937
|
+
@index.node_map[node] = r
|
|
938
|
+
@index.write_entry(@index_file, entry, journal, cdelta, index_file_handle)
|
|
939
|
+
end
|
|
940
|
+
|
|
941
|
+
|
|
942
|
+
t, r, chain, prev = r, r + 1, node, node
|
|
943
|
+
base = self[t].base_rev
|
|
944
|
+
start = data_start_for_index base
|
|
945
|
+
endpt = data_end_for_index t
|
|
946
|
+
end
|
|
947
|
+
rescue Exception => e
|
|
948
|
+
puts e
|
|
949
|
+
puts e.backtrace
|
|
950
|
+
ensure
|
|
951
|
+
if data_file_handle && !(data_file_handle.closed?)
|
|
952
|
+
data_file_handle.close
|
|
953
|
+
end
|
|
954
|
+
index_file_handle.close
|
|
955
|
+
end
|
|
956
|
+
node
|
|
957
|
+
end
|
|
958
|
+
|
|
959
|
+
##
|
|
960
|
+
# Strips all revisions after (and including) a given link_index
|
|
961
|
+
def strip(min_link)
|
|
962
|
+
return if size == 0
|
|
963
|
+
|
|
964
|
+
load_index_map if @index.is_a? RevlogSupport::LazyIndex
|
|
965
|
+
|
|
966
|
+
rev = 0
|
|
967
|
+
all_indices.each {|_rev| rev = _rev; break if @index[rev].link_rev >= min_link }
|
|
968
|
+
return if rev > all_indices.max
|
|
969
|
+
|
|
970
|
+
endpt = data_start_for_index rev
|
|
971
|
+
unless @index.inline?
|
|
972
|
+
df = File.open(@data_file, "a")
|
|
973
|
+
df.truncate(endpt)
|
|
974
|
+
endpt = rev * @index.entry_size
|
|
975
|
+
else
|
|
976
|
+
endpt += rev * @index.entry_size
|
|
977
|
+
end
|
|
978
|
+
|
|
979
|
+
indexf = File.open(@index_file, "a")
|
|
980
|
+
indexf.truncate(endpt)
|
|
981
|
+
|
|
982
|
+
@cache = @index.cache = nil
|
|
983
|
+
@chunk_cache = nil
|
|
984
|
+
rev.upto(self.size-1) {|x| @index.node_map.delete(self.node(x)) }
|
|
985
|
+
@index.index = @index.index[0..rev-1]
|
|
986
|
+
end
|
|
987
|
+
|
|
988
|
+
##
|
|
989
|
+
# Checks to make sure our data and index files are the right size.
|
|
990
|
+
# Returns the differences between expected and actual sizes.
|
|
991
|
+
def checksize
|
|
992
|
+
expected = 0
|
|
993
|
+
expected = [0, data_end_for_index(self.index_size - 1)].max if self.index_size > 0
|
|
994
|
+
|
|
995
|
+
|
|
996
|
+
|
|
997
|
+
|
|
998
|
+
f = open(@index_file)
|
|
999
|
+
f.seek(0, IO::SEEK_END)
|
|
1000
|
+
actual = f.tell
|
|
1001
|
+
s = @index.entry_size
|
|
1002
|
+
i = [0, actual / s].max
|
|
1003
|
+
di = actual - (i * s)
|
|
1004
|
+
|
|
1005
|
+
if @index.inline?
|
|
1006
|
+
databytes = 0
|
|
1007
|
+
self.index_size.times do |r|
|
|
1008
|
+
databytes += [0, self[r].compressed_len].max
|
|
1009
|
+
end
|
|
1010
|
+
dd = 0
|
|
1011
|
+
di = actual - (self.index_size * s) - databytes
|
|
1012
|
+
else
|
|
1013
|
+
f = open(@data_file)
|
|
1014
|
+
f.seek(0, IO::SEEK_END)
|
|
1015
|
+
actual = f.tell
|
|
1016
|
+
dd = actual - expected
|
|
1017
|
+
f.close
|
|
1018
|
+
end
|
|
1019
|
+
|
|
1020
|
+
return {:data_diff => dd, :index_diff => di}
|
|
1021
|
+
end
|
|
1022
|
+
|
|
1023
|
+
##
|
|
1024
|
+
# Returns all the files this object is concerned with.
|
|
1025
|
+
def files
|
|
1026
|
+
res = [ @index_file ]
|
|
1027
|
+
res << @data_file unless @index.inline?
|
|
1028
|
+
res
|
|
1029
|
+
end
|
|
1030
|
+
|
|
1031
|
+
end
|
|
1032
|
+
end
|