Soks 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/LICENSE.txt +60 -0
- data/README.txt +65 -0
- data/bin/soks-create-wiki.rb +41 -0
- data/contrib/diff/lcs.rb +1105 -0
- data/contrib/diff/lcs/array.rb +21 -0
- data/contrib/diff/lcs/block.rb +51 -0
- data/contrib/diff/lcs/callbacks.rb +322 -0
- data/contrib/diff/lcs/change.rb +169 -0
- data/contrib/diff/lcs/hunk.rb +257 -0
- data/contrib/diff/lcs/ldiff.rb +226 -0
- data/contrib/diff/lcs/string.rb +19 -0
- data/contrib/diff_licence.txt +76 -0
- data/contrib/redcloth-2.0.11.rb +894 -0
- data/contrib/redcloth-3.0.1.rb +1019 -0
- data/contrib/redcloth_license.txt +27 -0
- data/lib/authenticators.rb +79 -0
- data/lib/soks-helpers.rb +321 -0
- data/lib/soks-model.rb +208 -0
- data/lib/soks-servlet.rb +125 -0
- data/lib/soks-utils.rb +80 -0
- data/lib/soks-view.rb +424 -0
- data/lib/soks.rb +19 -0
- data/template/attachment/logo.png +0 -0
- data/template/attachment/stylesheet.css +63 -0
- data/template/content/How%20to%20export%20a%20site%20from%20this%20wiki.textile +5 -0
- data/template/content/How%20to%20hack%20soks.textile +60 -0
- data/template/content/How%20to%20import%20a%20site%20from%20instiki.textile +13 -0
- data/template/content/Improving%20the%20style%20of%20this%20wiki.textile +30 -0
- data/template/content/Picture%20of%20a%20pair%20of%20soks.textile +1 -0
- data/template/content/Pointers%20on%20adjusting%20the%20settings.textile +39 -0
- data/template/content/Pointers%20on%20how%20to%20use%20this%20wiki.textile +21 -0
- data/template/content/Recent%20Changes%20to%20This%20Site.textile +203 -0
- data/template/content/Soks%20Licence.textile +64 -0
- data/template/content/home%20page.textile +18 -0
- data/template/start.rb +74 -0
- data/template/views/AttachmentPage_edit.rhtml +36 -0
- data/template/views/ImagePage_edit.rhtml +36 -0
- data/template/views/Page_content.rhtml +1 -0
- data/template/views/Page_edit.rhtml +34 -0
- data/template/views/Page_print.rhtml +5 -0
- data/template/views/Page_revisions.rhtml +18 -0
- data/template/views/Page_rss.rhtml +34 -0
- data/template/views/Page_search_results.rhtml +19 -0
- data/template/views/Page_view.rhtml +3 -0
- data/template/views/frame.rhtml +34 -0
- metadata +88 -0
data/lib/soks-servlet.rb
ADDED
@@ -0,0 +1,125 @@
|
|
1
|
+
#!/usr/local/bin/ruby
|
2
|
+
require 'authenticators'
|
3
|
+
|
4
|
+
Thread.abort_on_exception = true
|
5
|
+
Socket.do_not_reverse_lookup = true
|
6
|
+
|
7
|
+
class WikiServlet < WEBrick::HTTPServlet::AbstractServlet
|
8
|
+
|
9
|
+
def initialize( server, wiki, name, wikiurl, authenticator = nil, no_authentication_for= [] )
|
10
|
+
@server, @wiki, @name, @wikiurl, @authenticator, @no_authentication_for = server, wiki, name, wikiurl, authenticator, no_authentication_for
|
11
|
+
end
|
12
|
+
|
13
|
+
def service( request, response )
|
14
|
+
leadingslash, verb, *pagename = request.path.split('/')
|
15
|
+
redirect( response, "Home Page" ) unless verb
|
16
|
+
if verb == 'find' # The search term is from a form
|
17
|
+
pagename = [ request.query['regex'] ]
|
18
|
+
elsif pagename.empty? # There was no verb. Use the pagename instead
|
19
|
+
verb, pagename = 'view', [ verb ]
|
20
|
+
end
|
21
|
+
@authenticator.authenticate( request, response ) unless @no_authentication_for.include? verb
|
22
|
+
|
23
|
+
if self.respond_to?( "do#{verb.capitalize}" )
|
24
|
+
self.send( "do#{verb.capitalize}", request, response, pagename.join('/'), request.user )
|
25
|
+
else
|
26
|
+
response.body = @wiki.view( pagename.join('/'), verb, request.user )
|
27
|
+
end
|
28
|
+
response['Content-Type'] ||= "text/html"
|
29
|
+
end
|
30
|
+
|
31
|
+
def redirect( response, pagename, verb = 'view' )
|
32
|
+
pagename = pagename.url_encode # Reformat into url encoding
|
33
|
+
response.set_redirect(WEBrick::HTTPStatus::Found, verb ? "#{@wikiurl}/#{verb}/#{pagename}" : "#{@wikiurl}/#{pagename}")
|
34
|
+
end
|
35
|
+
|
36
|
+
def doFind( request, response, pagename, person )
|
37
|
+
response.body = @wiki.find( pagename )
|
38
|
+
end
|
39
|
+
|
40
|
+
def doSave( request, response, pagename, person )
|
41
|
+
content = request.query["content"].to_s
|
42
|
+
newpagename = request.query["titleprefix"].to_s + request.query["newtitle"].to_s
|
43
|
+
@wiki.revise( pagename, content.gsub(/\r\n/,"\n"), person, newpagename) if content
|
44
|
+
redirect( response, newpagename )
|
45
|
+
end
|
46
|
+
|
47
|
+
def doRollback( request, response, pagename, person )
|
48
|
+
revision = request.query['revision'] ? request.query['revision'].to_i : nil
|
49
|
+
@wiki.rollback( pagename, revision, person ) if revision
|
50
|
+
redirect( response, pagename )
|
51
|
+
end
|
52
|
+
|
53
|
+
def doDelete( request, response, pagename, person )
|
54
|
+
@wiki.delete( pagename, person )
|
55
|
+
redirect( response, pagename )
|
56
|
+
end
|
57
|
+
|
58
|
+
def doUpload( request, response, pagename, person )
|
59
|
+
newpagename = request.query["titleprefix"].to_s + request.query["newtitle"].to_s
|
60
|
+
unless request.query['file'] == ""
|
61
|
+
@wiki.revise( pagename, upload( request.query['file'].filename[/[0-9A-Za-z.]*$/], request.query['file'] ) , person, newpagename )
|
62
|
+
else
|
63
|
+
@wiki.move( pagename, person, newpagename )
|
64
|
+
end
|
65
|
+
redirect( response, newpagename )
|
66
|
+
end
|
67
|
+
|
68
|
+
def doRss( request, response, pagename, person )
|
69
|
+
response.body = @wiki.view( pagename, 'rss', person )
|
70
|
+
response["Content-Type"] = "text/xml"
|
71
|
+
end
|
72
|
+
|
73
|
+
private
|
74
|
+
|
75
|
+
def upload( filename, filedata )
|
76
|
+
path = "#{$SETTINGS[:root_directory]}/attachment/"
|
77
|
+
filename = unique_filename( path ,filename )
|
78
|
+
File.open( File.join( path, filename ), 'w' ) { |file| filedata.list.each { |data| file << data } }
|
79
|
+
"/attachment/#{filename}"
|
80
|
+
end
|
81
|
+
|
82
|
+
def unique_filename( path, filename )
|
83
|
+
bits = filename.split(/\./)
|
84
|
+
while File.exist? File.join( path, filename )
|
85
|
+
if bits.length >= 1
|
86
|
+
bits[-2].succ!
|
87
|
+
else
|
88
|
+
bits[0].succ!
|
89
|
+
end
|
90
|
+
filename = bits.join('.')
|
91
|
+
end
|
92
|
+
filename
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
$SETTINGS = {
|
97
|
+
:name => 'test',
|
98
|
+
:description => 'A Soks Wiki',
|
99
|
+
:root_directory => 'wiki',
|
100
|
+
:url => 'http://localhost:8000',
|
101
|
+
:port => 8000,
|
102
|
+
:authenticator => WEBrick::HTTPAuth::NotAuthentication.new( 'No password, just enter a name'),
|
103
|
+
:dont_authenticate => ['view','rss']
|
104
|
+
}
|
105
|
+
|
106
|
+
|
107
|
+
|
108
|
+
def start_wiki( settings = {}, &automatic_agents )
|
109
|
+
$SETTINGS.merge! settings
|
110
|
+
|
111
|
+
wiki = Wiki.new( "#{$SETTINGS[:root_directory]}/content" )
|
112
|
+
view = View.new( wiki, $SETTINGS[:name] )
|
113
|
+
|
114
|
+
Thread.new( automatic_agents, view ) do |block, view|
|
115
|
+
block.call( view )
|
116
|
+
end.priority = -2
|
117
|
+
|
118
|
+
server = WEBrick::HTTPServer.new( { :Port => $SETTINGS[:port] } )
|
119
|
+
server.mount("/attachment", WEBrick::HTTPServlet::FileHandler, "#{ $SETTINGS[:root_directory] }/attachment", true)
|
120
|
+
server.mount("/", WikiServlet, view, $SETTINGS[:name] , $SETTINGS[:url] , $SETTINGS[:authenticator] , $SETTINGS[:dont_authenticate] )
|
121
|
+
|
122
|
+
trap("INT") { server.shutdown }
|
123
|
+
|
124
|
+
server.start
|
125
|
+
end
|
data/lib/soks-utils.rb
ADDED
@@ -0,0 +1,80 @@
|
|
1
|
+
class EventQueue
|
2
|
+
|
3
|
+
def initialize
|
4
|
+
@queue = Queue.new
|
5
|
+
Thread.new do
|
6
|
+
loop do
|
7
|
+
check_for_events
|
8
|
+
end
|
9
|
+
end.priority = -1
|
10
|
+
end
|
11
|
+
|
12
|
+
def event( event, messages )
|
13
|
+
@queue.enq [ event, messages ]
|
14
|
+
end
|
15
|
+
|
16
|
+
def watch_for( events , action_block )
|
17
|
+
events.each { |event| watchers_for(event) << action_block }
|
18
|
+
end
|
19
|
+
|
20
|
+
private
|
21
|
+
|
22
|
+
def check_for_events
|
23
|
+
event, messages = @queue.deq
|
24
|
+
notify( event, *messages )
|
25
|
+
end
|
26
|
+
|
27
|
+
def notify( event, *messages)
|
28
|
+
watchers_for( event ).each { |action_block|
|
29
|
+
action_block.call(event, *messages)
|
30
|
+
}
|
31
|
+
end
|
32
|
+
|
33
|
+
def watchers_for( event )
|
34
|
+
watchers[ event ] ||= []
|
35
|
+
end
|
36
|
+
|
37
|
+
def watchers
|
38
|
+
@watchers ||= {}
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
# This is a bit like observable, but for events
|
43
|
+
module Notify
|
44
|
+
|
45
|
+
def watch_for( *events , &action_block )
|
46
|
+
self.event_queue.watch_for( events, action_block )
|
47
|
+
end
|
48
|
+
|
49
|
+
def notify( event, *messages)
|
50
|
+
self.event_queue.event( event, messages )
|
51
|
+
end
|
52
|
+
|
53
|
+
def event_queue
|
54
|
+
@event_queue ||= EventQueue.new
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
class String
|
59
|
+
# Return the left bit of a string e.g. "String".left(2) => "St"
|
60
|
+
def left( length ) self.slice( 0, length ) end
|
61
|
+
|
62
|
+
# Encode the string so it can be used in urls (code coppied from CGI)
|
63
|
+
def url_encode
|
64
|
+
self.gsub(/([^a-zA-Z0-9_.-]+)/n) do
|
65
|
+
'%' + $1.unpack('H2' * $1.size).join('%').upcase
|
66
|
+
end.tr(' ', '+')
|
67
|
+
end
|
68
|
+
|
69
|
+
# Decode a string url encoded so it can be used in urls (code coppied from CGI)
|
70
|
+
def url_decode
|
71
|
+
self.gsub(/((?:%[0-9a-fA-F]{2})+)/n) do
|
72
|
+
[$1.delete('%')].pack('H*')
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
# Return the first n lines of the string
|
77
|
+
def first_lines( lines = 1 )
|
78
|
+
self.split("\n")[0,lines].join("\n")
|
79
|
+
end
|
80
|
+
end
|
data/lib/soks-view.rb
ADDED
@@ -0,0 +1,424 @@
|
|
1
|
+
class Links
|
2
|
+
def initialize
|
3
|
+
@links = Hash.new
|
4
|
+
end
|
5
|
+
|
6
|
+
def links_from( page ) ; return @links[ page ] ; end
|
7
|
+
|
8
|
+
def set_links_from( page, linkarray )
|
9
|
+
page.links_from = @links[ page ] = linkarray.uniq
|
10
|
+
page.links_to = links_to( page )
|
11
|
+
page.links_from.each { |linked| linked.links_to = links_to( linked ) }
|
12
|
+
end
|
13
|
+
|
14
|
+
def links_to( thispage )
|
15
|
+
linksto = Array.new
|
16
|
+
@links.each do | pagefrom, pagesto |
|
17
|
+
next if pagefrom == thispage
|
18
|
+
unless [ "site index", "recent changes to this site" ].include? pagefrom.name.downcase
|
19
|
+
if pagesto.include? thispage
|
20
|
+
linksto << pagefrom
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
return linksto.uniq.sort
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
# This class matches against a text finding the longest matches it can.
|
29
|
+
# Note that it isn't that sophisticated, and will tend to match
|
30
|
+
# the first thing it can, rather than skipping a shorter match to enable
|
31
|
+
# a longer one that starts at a later point in the text
|
32
|
+
class RollingMatch
|
33
|
+
WORD_REGEX = /^\w*/
|
34
|
+
SPACE_REGEX = /^\W*/
|
35
|
+
IGNORE_CASE = true
|
36
|
+
|
37
|
+
def initialize
|
38
|
+
@firstwords, @matches = Hash.new, Hash.new
|
39
|
+
end
|
40
|
+
|
41
|
+
def []( title ) @matches[ lower_case(title) ] end
|
42
|
+
alias :object_for :[]
|
43
|
+
|
44
|
+
def delete( title )
|
45
|
+
@matches.delete( title )
|
46
|
+
end
|
47
|
+
|
48
|
+
# Use this to add a string to match and an associated object to return
|
49
|
+
# if an object is matched.
|
50
|
+
def []=( title, object )
|
51
|
+
@matches[lower_case(title)] = object
|
52
|
+
unless lengths_for_first_word_of( title ).include? title.length
|
53
|
+
(lengths_for_first_word_of( title ) << title.length).sort!
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
# This returns the longest match that starts with the first word in the text
|
58
|
+
def longest_match( text, *dont_match )
|
59
|
+
lengths_for_first_word_of( text ).reverse_each do |length|
|
60
|
+
break if length > text.length
|
61
|
+
if object_for(text.left(length)) && !dont_match.include?( lower_case( text.left(length) ) )
|
62
|
+
return text.left(length)
|
63
|
+
end
|
64
|
+
end
|
65
|
+
nil
|
66
|
+
end
|
67
|
+
|
68
|
+
# This iterates through the text returning the longest matches
|
69
|
+
def rolling_match( text, *dont_match )
|
70
|
+
text = text.dup
|
71
|
+
skipped_text = ""
|
72
|
+
until text.empty?
|
73
|
+
matching_title = longest_match( text, *dont_match )
|
74
|
+
if matching_title
|
75
|
+
yield skipped_text, matching_title, object_for( matching_title )
|
76
|
+
text.slice!( 0, matching_title.length ) # Skip to the end of the matched bit
|
77
|
+
skipped_text = text.slice!( SPACE_REGEX ) # Skip to the end of the next space
|
78
|
+
else
|
79
|
+
skipped_text << text.slice!( WORD_REGEX ) << text.slice!( SPACE_REGEX ) # Skip to the begining of the next word
|
80
|
+
end
|
81
|
+
end
|
82
|
+
yield skipped_text, nil, nil
|
83
|
+
end
|
84
|
+
|
85
|
+
private
|
86
|
+
|
87
|
+
def lengths_for_first_word_of(title)
|
88
|
+
@firstwords[ lower_case( title ).slice( WORD_REGEX ) ] ||= Array.new
|
89
|
+
end
|
90
|
+
|
91
|
+
def lower_case( text )
|
92
|
+
IGNORE_CASE ? text.downcase : text
|
93
|
+
end
|
94
|
+
end
|
95
|
+
|
96
|
+
# This adds some extra match types to (a bodged version of) RedCloth
|
97
|
+
#
|
98
|
+
# Specifically:
|
99
|
+
# * Inserting other pages
|
100
|
+
# * Square bracketed wiki links
|
101
|
+
# * Automaticlly links anytime the title of another page appears in the text
|
102
|
+
# * Automatically links things that look like email addresses and urls
|
103
|
+
class WikiRedCloth < RedCloth
|
104
|
+
def initialize( wiki, page, string, restrictions = [] )
|
105
|
+
super(string, restrictions)
|
106
|
+
@wiki, @page = wiki, page
|
107
|
+
@rules = [ :soks, :textile ]
|
108
|
+
end
|
109
|
+
|
110
|
+
if RedCloth::VERSION == '2.0.11'
|
111
|
+
def to_html( lite = nil )
|
112
|
+
|
113
|
+
@lite = lite unless lite.nil?
|
114
|
+
|
115
|
+
# make our working copy
|
116
|
+
text = self.dup
|
117
|
+
|
118
|
+
@urlrefs = {}
|
119
|
+
@shelf = []
|
120
|
+
@internal_links_from_page = [] # NEW
|
121
|
+
|
122
|
+
insert_sub_strings text # NEW
|
123
|
+
|
124
|
+
incoming_entities text
|
125
|
+
## encode_entities text
|
126
|
+
## fix_entities text
|
127
|
+
clean_white_space text
|
128
|
+
|
129
|
+
get_refs text
|
130
|
+
|
131
|
+
no_textile text
|
132
|
+
|
133
|
+
pre_list = rip_offtags text # NEW wasteful to duplicate, but reduces overloading
|
134
|
+
hide_textile_links text # NEW
|
135
|
+
hide_textile_image_tags text # NEW
|
136
|
+
inline_soks_external_link text # NEW
|
137
|
+
inline_soks_bracketed_link text # NEW
|
138
|
+
inline_soks_automatic_link text # NEW
|
139
|
+
@wiki.links.set_links_from( @page, @internal_links_from_page ) # NEW
|
140
|
+
unhide_textile text # NEW
|
141
|
+
smooth_offtags text, pre_list # NEW wasteful to duplicate
|
142
|
+
|
143
|
+
inline text
|
144
|
+
|
145
|
+
unless @lite
|
146
|
+
fold text
|
147
|
+
block text
|
148
|
+
end
|
149
|
+
|
150
|
+
retrieve text
|
151
|
+
|
152
|
+
text.gsub!( /<\/?notextile>/, '' )
|
153
|
+
text.gsub!( /x%x%/, '&' )
|
154
|
+
text.gsub!( /<br \/>/, "<br />\n" )
|
155
|
+
text.strip!
|
156
|
+
text
|
157
|
+
end
|
158
|
+
end
|
159
|
+
|
160
|
+
if RedCloth::VERSION == '3.0.1'
|
161
|
+
def to_html( *rules )
|
162
|
+
rules = @rules if rules.empty?
|
163
|
+
# make our working copy
|
164
|
+
text = self.dup
|
165
|
+
|
166
|
+
@urlrefs = {}
|
167
|
+
@shelf = []
|
168
|
+
@internal_links_from_page = []
|
169
|
+
|
170
|
+
textile_rules = [:refs_textile, :block_textile_table, :block_textile_lists,
|
171
|
+
:block_textile_prefix, :inline_textile_image, :inline_textile_link,
|
172
|
+
:inline_textile_code, :inline_textile_glyphs, :inline_textile_span]
|
173
|
+
markdown_rules = [:refs_markdown, :block_markdown_setext, :block_markdown_atx, :block_markdown_rule,
|
174
|
+
:block_markdown_bq, :block_markdown_lists,
|
175
|
+
:inline_markdown_reflink, :inline_markdown_link]
|
176
|
+
soks_rules = [ :hide_textile_links, :hide_textile_image_tags, :inline_soks_external_link, :inline_soks_bracketed_link, :inline_soks_automatic_link, :unhide_textile ]
|
177
|
+
@rules = rules.collect do |rule|
|
178
|
+
case rule
|
179
|
+
when :markdown
|
180
|
+
markdown_rules
|
181
|
+
when :textile
|
182
|
+
textile_rules
|
183
|
+
when :soks
|
184
|
+
soks_rules
|
185
|
+
else
|
186
|
+
rule
|
187
|
+
end
|
188
|
+
end.flatten
|
189
|
+
|
190
|
+
# insert sub pages
|
191
|
+
insert_sub_strings text
|
192
|
+
|
193
|
+
# standard clean up
|
194
|
+
incoming_entities text
|
195
|
+
clean_white_space text
|
196
|
+
|
197
|
+
# start processor
|
198
|
+
pre_list = rip_offtags text
|
199
|
+
refs text
|
200
|
+
blocks text
|
201
|
+
inline text
|
202
|
+
smooth_offtags text, pre_list
|
203
|
+
|
204
|
+
retrieve text
|
205
|
+
|
206
|
+
@wiki.links.set_links_from( @page, @internal_links_from_page )
|
207
|
+
|
208
|
+
text.gsub!( /<\/?notextile>/, '' )
|
209
|
+
text.gsub!( /x%x%/, '&' )
|
210
|
+
text.strip!
|
211
|
+
text
|
212
|
+
end
|
213
|
+
end
|
214
|
+
|
215
|
+
private
|
216
|
+
|
217
|
+
def insert_sub_strings( text, count = 0 )
|
218
|
+
return text if count > 5 # Stops us getting locked into a cycle if people mess up the insert
|
219
|
+
text.gsub!(/\[\[\s*insert (.+?)\]\]/i) do |m|
|
220
|
+
inserted_page = @wiki.page( $1.to_s.strip )
|
221
|
+
if @wiki.exists? inserted_page.name
|
222
|
+
@internal_links_from_page << inserted_page
|
223
|
+
inserted_page.is_inserted_into( @page )
|
224
|
+
end
|
225
|
+
insert_sub_strings( "#{inserted_page.textile}\n", count + 1 )
|
226
|
+
end
|
227
|
+
"\n\n#{text}\n\n"
|
228
|
+
end
|
229
|
+
|
230
|
+
def inline_soks_external_link( text )
|
231
|
+
text.gsub!(/http:\/\/[^ \n<]+/i) { |m| link m }
|
232
|
+
text.gsub!(/https:\/\/[^ \n<]+/i) { |m| link m }
|
233
|
+
text.gsub!(/www.[^ \n<]*/i) { |m| link( "http://#{m}", m) }
|
234
|
+
text.gsub!(/[A-Za-z0-9.]+?@[A-Za-z0-9.]+/) { |m| link( "mailto:#{m}", m) }
|
235
|
+
end
|
236
|
+
|
237
|
+
def inline_soks_bracketed_link( text )
|
238
|
+
text.gsub!(/\[\[([^\]]+)\]\]/) do |m|
|
239
|
+
title, *pagename = m[2..-3].split(':').map { |t| t.strip }
|
240
|
+
pagename = pagename.empty? ? title : pagename.join(':')
|
241
|
+
case pagename
|
242
|
+
when /^www./i ; link("http://#{pagename}", title )
|
243
|
+
when /[A-Za-z0-9.]+?@[A-Za-z0-9.]+/ ; link("mailto:#{pagename}",title)
|
244
|
+
when /^http/i ; link(pagename,title)
|
245
|
+
else ; wiki_link( pagename, title )
|
246
|
+
end
|
247
|
+
end
|
248
|
+
end
|
249
|
+
|
250
|
+
def inline_soks_automatic_link( text )
|
251
|
+
linkedtext = ""
|
252
|
+
@wiki.rollingmatch.rolling_match( text, @page.name.downcase ) do |skip, title, page|
|
253
|
+
linkedtext << skip
|
254
|
+
linkedtext << wiki_link( page.name, title, 'automatic' ) if title
|
255
|
+
end
|
256
|
+
text.replace( linkedtext )
|
257
|
+
end
|
258
|
+
|
259
|
+
def wiki_link( pagename, title, css = nil )
|
260
|
+
if @wiki.exists? pagename
|
261
|
+
@internal_links_from_page << @wiki.page( pagename )
|
262
|
+
link(pagename,title, css || '' )
|
263
|
+
else
|
264
|
+
link(pagename,title, css || 'missing')
|
265
|
+
end
|
266
|
+
end
|
267
|
+
|
268
|
+
def link( url, title = url, css = '' )
|
269
|
+
shelve "<a href='#{url}' class='#{css}'>#{title}</a>"
|
270
|
+
end
|
271
|
+
|
272
|
+
def hide_textile_image_tags( text )
|
273
|
+
text.gsub!( IMAGE_RE ) do |match|
|
274
|
+
hide_textile match
|
275
|
+
end
|
276
|
+
text
|
277
|
+
end
|
278
|
+
|
279
|
+
|
280
|
+
def hide_textile_links( text )
|
281
|
+
text.gsub!( LINK_RE ) do |match|
|
282
|
+
hide_textile match
|
283
|
+
end
|
284
|
+
text
|
285
|
+
end
|
286
|
+
|
287
|
+
def unhide_textile( text )
|
288
|
+
hidden_textile.each_with_index do |r, i|
|
289
|
+
text.gsub!( " <$%#{ i + 1 }%$> ", r )
|
290
|
+
end
|
291
|
+
text
|
292
|
+
end
|
293
|
+
|
294
|
+
def hide_textile( text )
|
295
|
+
hidden_textile << text
|
296
|
+
" <$%#{hidden_textile.length}%$> "
|
297
|
+
end
|
298
|
+
|
299
|
+
def hidden_textile
|
300
|
+
@hidden_textile_links ||= []
|
301
|
+
end
|
302
|
+
|
303
|
+
end
|
304
|
+
|
305
|
+
class View
|
306
|
+
include Notify
|
307
|
+
attr_reader :rollingmatch, :links
|
308
|
+
|
309
|
+
def initialize( wiki, name )
|
310
|
+
@wikiname = name
|
311
|
+
@rollingmatch, @links, @redcloth_cache, @erb_cache = RollingMatch.new, Links.new, Hash.new, Hash.new
|
312
|
+
@wiki = wiki
|
313
|
+
end
|
314
|
+
|
315
|
+
def view( pagename, view = 'view', person = nil )
|
316
|
+
page = @wiki.page pagename
|
317
|
+
renderedview = redcloth( page )
|
318
|
+
content_of_page = html( page.class, view, binding )
|
319
|
+
if should_frame? view
|
320
|
+
frame_erb.result binding
|
321
|
+
else
|
322
|
+
content_of_page
|
323
|
+
end
|
324
|
+
end
|
325
|
+
|
326
|
+
def find( pagename )
|
327
|
+
return view( pagename ) if @wiki.exists?( pagename )
|
328
|
+
search_term = /#{pagename}/i
|
329
|
+
title_results = @wiki.select { |name,page| name=~ search_term }
|
330
|
+
text_results = @wiki.select { |name,page| page.content=~ search_term }
|
331
|
+
content_of_page = html( Page, 'search_results', binding )
|
332
|
+
page = nil
|
333
|
+
frame_erb.result binding
|
334
|
+
end
|
335
|
+
|
336
|
+
def revise( pagename, content, person, newpagename = pagename )
|
337
|
+
if @wiki.exists? pagename
|
338
|
+
unless newpagename == pagename
|
339
|
+
mutate( pagename ) { @wiki.revise( pagename, "Content moved to [[#{newpagename}]]", person ) }
|
340
|
+
mutate( newpagename ) { @wiki.revise( newpagename, "Content moved from [[#{pagename}]]", person ) }
|
341
|
+
end
|
342
|
+
end
|
343
|
+
mutate( newpagename ) { @wiki.revise( newpagename, content, person ) }
|
344
|
+
end
|
345
|
+
|
346
|
+
def move( oldpagename, person, newpagename )
|
347
|
+
unless newpagename == oldpagename
|
348
|
+
mutate( newpagename ) { @wiki.revise( newpagename, "Content moved from [[#{pagename}]]", person ) }
|
349
|
+
mutate( newpagename ) { @wiki.revise( newpagename, @wiki.page( oldpagename ).content, person ) }
|
350
|
+
mutate( oldpagename ) { @wiki.revise( oldpagename, "Content moved to [[#{newpagename}]]", person ) }
|
351
|
+
end
|
352
|
+
end
|
353
|
+
|
354
|
+
def rollback( pagename, revision, person )
|
355
|
+
mutate( pagename ) { @wiki.rollback( pagename, revision, person ) }
|
356
|
+
end
|
357
|
+
|
358
|
+
def delete( pagename, person )
|
359
|
+
mutate( pagename ) { @wiki.revise( pagename, 'Page deleted', person ) }
|
360
|
+
end
|
361
|
+
|
362
|
+
def mutate( pagename )
|
363
|
+
didexist = @wiki.exists? pagename
|
364
|
+
yield
|
365
|
+
page = @wiki.page( pagename )
|
366
|
+
clear_redcloth_cache( page )
|
367
|
+
notify :page_revised, page, page.revisions.last
|
368
|
+
if page.deleted?
|
369
|
+
notify :page_deleted, page
|
370
|
+
elsif !didexist
|
371
|
+
notify :page_created, page
|
372
|
+
end
|
373
|
+
end
|
374
|
+
|
375
|
+
def refresh_redcloth( page )
|
376
|
+
$stderr.puts "Refreshing #{page}"
|
377
|
+
clear_redcloth_cache( page )
|
378
|
+
redcloth( page )
|
379
|
+
end
|
380
|
+
|
381
|
+
def redcloth( page )
|
382
|
+
@redcloth_cache[page] = "" if page.textile.strip == ""
|
383
|
+
@redcloth_cache[ page ] ||= WikiRedCloth.new( self, page, page.textile ).to_html
|
384
|
+
end
|
385
|
+
|
386
|
+
def clear_redcloth_cache( page = :all_pages )
|
387
|
+
( page == :all_pages ) ? @redcloth_cache.clear : @redcloth_cache.delete( page )
|
388
|
+
end
|
389
|
+
|
390
|
+
def html( klass, view, _binding )
|
391
|
+
#@erb_cache.clear
|
392
|
+
( @erb_cache[ path_for( klass, view ) ] ||= ERB.new( IO.readlines( erb_filename( klass, view ) ).join ) ).result( _binding )
|
393
|
+
end
|
394
|
+
|
395
|
+
def erb_filename( klass, view )
|
396
|
+
$stderr.puts "Looking for #{path_for( klass, view)}"
|
397
|
+
until File.exists?( path_for( klass, view ) )
|
398
|
+
klass = klass.superclass
|
399
|
+
end
|
400
|
+
path_for( klass, view )
|
401
|
+
end
|
402
|
+
|
403
|
+
def path_for( klass, view ) "#{$SETTINGS[:root_directory]}/views/#{klass}_#{view}.rhtml" end
|
404
|
+
|
405
|
+
def should_frame?( view )
|
406
|
+
return true unless ['print','rss'].include? view.downcase
|
407
|
+
end
|
408
|
+
|
409
|
+
def frame_erb
|
410
|
+
@frame_erb ||= load_frame_erb
|
411
|
+
end
|
412
|
+
|
413
|
+
def load_frame_erb
|
414
|
+
if File.exists? "#{$SETTINGS[:root_directory]}/views/frame.rhtml"
|
415
|
+
ERB.new( IO.readlines( "#{$SETTINGS[:root_directory]}/views/frame.rhtml" ).join )
|
416
|
+
else
|
417
|
+
ERB.new( "<%= content_of_page %>" )
|
418
|
+
end
|
419
|
+
end
|
420
|
+
|
421
|
+
def method_missing( method, *args, &block )
|
422
|
+
@wiki.send( method, *args, &block )
|
423
|
+
end
|
424
|
+
end
|