sup 0.14.1.1 → 0.15.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -168,6 +168,32 @@ EOS
168
168
  matchset.matches_estimated
169
169
  end
170
170
 
171
+ ## check if a message is part of a killed thread
172
+ ## (warning: duplicates code below)
173
+ ## NOTE: We can be more efficient if we assume every
174
+ ## killed message that hasn't been initially added
175
+ ## to the indexi s this way
176
+ def message_joining_killed? m
177
+ return false unless doc = find_doc(m.id)
178
+ queue = doc.value(THREAD_VALUENO).split(',')
179
+ seen_threads = Set.new
180
+ seen_messages = Set.new [m.id]
181
+ while not queue.empty?
182
+ thread_id = queue.pop
183
+ next if seen_threads.member? thread_id
184
+ return true if thread_killed?(thread_id)
185
+ seen_threads << thread_id
186
+ docs = term_docids(mkterm(:thread, thread_id)).map { |x| @xapian.document x }
187
+ docs.each do |doc|
188
+ msgid = doc.value MSGID_VALUENO
189
+ next if seen_messages.member? msgid
190
+ seen_messages << msgid
191
+ queue.concat doc.value(THREAD_VALUENO).split(',')
192
+ end
193
+ end
194
+ false
195
+ end
196
+
171
197
  ## yield all messages in the thread containing 'm' by repeatedly
172
198
  ## querying the index. yields pairs of message ids and
173
199
  ## message-building lambdas, so that building an unwanted message
@@ -248,11 +274,11 @@ EOS
248
274
 
249
275
  ## Yield each message-id matching query
250
276
  EACH_ID_PAGE = 100
251
- def each_id query={}
277
+ def each_id query={}, ignore_neg_terms = true
252
278
  offset = 0
253
279
  page = EACH_ID_PAGE
254
280
 
255
- xapian_query = build_xapian_query query
281
+ xapian_query = build_xapian_query query, ignore_neg_terms
256
282
  while true
257
283
  ids = run_query_ids xapian_query, offset, (offset+page)
258
284
  ids.each { |id| yield id }
@@ -262,8 +288,12 @@ EOS
262
288
  end
263
289
 
264
290
  ## Yield each message matching query
265
- def each_message query={}, &b
266
- each_id query do |id|
291
+ ## The ignore_neg_terms parameter is used to display result even if
292
+ ## it contains "forbidden" labels such as :deleted, it is used in
293
+ ## Poll#poll_from when we need to get the location of a message that
294
+ ## may contain these labels
295
+ def each_message query={}, ignore_neg_terms = true, &b
296
+ each_id query, ignore_neg_terms do |id|
267
297
  yield build_message(id)
268
298
  end
269
299
  end
@@ -313,9 +343,9 @@ EOS
313
343
  ## Yields (in lexicographical order) the source infos of all locations from
314
344
  ## the given source with the given source_info prefix
315
345
  def each_source_info source_id, prefix='', &b
316
- prefix = mkterm :location, source_id, prefix
317
- each_prefixed_term prefix do |x|
318
- yield x[prefix.length..-1]
346
+ p = mkterm :location, source_id, prefix
347
+ each_prefixed_term p do |x|
348
+ yield prefix + x[p.length..-1]
319
349
  end
320
350
  end
321
351
 
@@ -492,7 +522,7 @@ EOS
492
522
  raise ParseError, "xapian query parser error: #{e}"
493
523
  end
494
524
 
495
- debug "parsed xapian query: #{Util::Query.describe(xapian_query)}"
525
+ debug "parsed xapian query: #{Util::Query.describe(xapian_query, subs)}"
496
526
 
497
527
  raise ParseError if xapian_query.nil? or xapian_query.empty?
498
528
  query[:qobj] = xapian_query
@@ -500,14 +530,18 @@ EOS
500
530
  query
501
531
  end
502
532
 
533
+ def save_message m
534
+ if @sync_worker
535
+ @sync_queue << m
536
+ else
537
+ update_message_state m
538
+ end
539
+ m.clear_dirty
540
+ end
541
+
503
542
  def save_thread t
504
543
  t.each_dirty_message do |m|
505
- if @sync_worker
506
- @sync_queue << m
507
- else
508
- update_message_state m
509
- end
510
- m.clear_dirty
544
+ save_message m
511
545
  end
512
546
  end
513
547
 
@@ -614,7 +648,7 @@ EOS
614
648
  end
615
649
 
616
650
  Q = Xapian::Query
617
- def build_xapian_query opts
651
+ def build_xapian_query opts, ignore_neg_terms = true
618
652
  labels = ([opts[:label]] + (opts[:labels] || [])).compact
619
653
  neglabels = [:spam, :deleted, :killed].reject { |l| (labels.include? l) || opts.member?("load_#{l}".intern) }
620
654
  pos_terms, neg_terms = [], []
@@ -630,7 +664,7 @@ EOS
630
664
  pos_terms << Q.new(Q::OP_OR, participant_terms)
631
665
  end
632
666
 
633
- neg_terms.concat(neglabels.map { |l| mkterm(:label,l) })
667
+ neg_terms.concat(neglabels.map { |l| mkterm(:label,l) }) if ignore_neg_terms
634
668
 
635
669
  pos_query = Q.new(Q::OP_AND, pos_terms)
636
670
  neg_query = Q.new(Q::OP_OR, neg_terms)
@@ -643,6 +677,10 @@ EOS
643
677
  end
644
678
 
645
679
  def sync_message m, overwrite
680
+ ## TODO: we should not save the message if the sync_back failed
681
+ ## since it would overwrite the location field
682
+ m.sync_back
683
+
646
684
  doc = synchronize { find_doc(m.id) }
647
685
  existed = doc != nil
648
686
  doc ||= Xapian::Document.new
@@ -7,10 +7,10 @@ class LabelManager
7
7
 
8
8
  ## labels that have special semantics. user will be unable to
9
9
  ## add/remove these via normal label mechanisms.
10
- RESERVED_LABELS = [ :starred, :spam, :draft, :unread, :killed, :sent, :deleted, :inbox, :attachment ]
10
+ RESERVED_LABELS = [ :starred, :spam, :draft, :unread, :killed, :sent, :deleted, :inbox, :attachment, :forwarded, :replied ]
11
11
 
12
12
  ## labels that will typically be hidden from the user
13
- HIDDEN_RESERVED_LABELS = [ :starred, :unread, :attachment ]
13
+ HIDDEN_RESERVED_LABELS = [ :starred, :unread, :attachment, :forwarded, :replied ]
14
14
 
15
15
  def initialize fn
16
16
  @fn = fn
@@ -1,4 +1,5 @@
1
1
  require 'uri'
2
+ require 'set'
2
3
 
3
4
  module Redwood
4
5
 
@@ -7,8 +8,8 @@ class Maildir < Source
7
8
  MYHOSTNAME = Socket.gethostname
8
9
 
9
10
  ## remind me never to use inheritance again.
10
- yaml_properties :uri, :usual, :archived, :id, :labels
11
- def initialize uri, usual=true, archived=false, id=nil, labels=[]
11
+ yaml_properties :uri, :usual, :archived, :sync_back, :id, :labels
12
+ def initialize uri, usual=true, archived=false, sync_back=true, id=nil, labels=[]
12
13
  super uri, usual, archived, id
13
14
  @expanded_uri = Source.expand_filesystem_uri(uri)
14
15
  uri = URI(@expanded_uri)
@@ -17,16 +18,28 @@ class Maildir < Source
17
18
  raise ArgumentError, "maildir URI cannot have a host: #{uri.host}" if uri.host
18
19
  raise ArgumentError, "maildir URI must have a path component" unless uri.path
19
20
 
21
+ @sync_back = sync_back
22
+ # sync by default if not specified
23
+ @sync_back = true if @sync_back.nil?
24
+
20
25
  @dir = uri.path
21
26
  @labels = Set.new(labels || [])
22
27
  @mutex = Mutex.new
23
- @mtimes = { 'cur' => Time.at(0), 'new' => Time.at(0) }
28
+ @ctimes = { 'cur' => Time.at(0), 'new' => Time.at(0) }
24
29
  end
25
30
 
26
31
  def file_path; @dir end
27
32
  def self.suggest_labels_for path; [] end
28
33
  def is_source_for? uri; super || (uri == @expanded_uri); end
29
34
 
35
+ def supported_labels?
36
+ [:draft, :starred, :forwarded, :replied, :unread, :deleted]
37
+ end
38
+
39
+ def sync_back_enabled?
40
+ @sync_back
41
+ end
42
+
30
43
  def store_message date, from_email, &block
31
44
  stored = false
32
45
  new_fn = new_maildir_basefn + ':2,S'
@@ -44,7 +57,7 @@ class Maildir < Source
44
57
  f.fsync
45
58
  end
46
59
 
47
- File.link tmp_path, new_path
60
+ File.safe_link tmp_path, new_path
48
61
  stored = true
49
62
  ensure
50
63
  File.unlink tmp_path if File.exists? tmp_path
@@ -71,6 +84,14 @@ class Maildir < Source
71
84
  with_file_for(id) { |f| RMail::Parser.read f }
72
85
  end
73
86
 
87
+ def sync_back id, labels
88
+ synchronize do
89
+ debug "syncing back maildir message #{id} with flags #{labels.to_a}"
90
+ flags = maildir_reconcile_flags id, labels
91
+ maildir_mark_file id, flags
92
+ end
93
+ end
94
+
74
95
  def raw_header id
75
96
  ret = ""
76
97
  with_file_for(id) do |f|
@@ -87,41 +108,78 @@ class Maildir < Source
87
108
 
88
109
  ## XXX use less memory
89
110
  def poll
90
- @mtimes.each do |d,prev_mtime|
111
+ added = []
112
+ deleted = []
113
+ updated = []
114
+ @ctimes.each do |d,prev_ctime|
91
115
  subdir = File.join @dir, d
92
116
  debug "polling maildir #{subdir}"
93
117
  raise FatalSourceError, "#{subdir} not a directory" unless File.directory? subdir
94
- mtime = File.mtime subdir
95
- next if prev_mtime >= mtime
96
- @mtimes[d] = mtime
118
+ ctime = File.ctime subdir
119
+ next if prev_ctime >= ctime
120
+ @ctimes[d] = ctime
97
121
 
98
122
  old_ids = benchmark(:maildir_read_index) { Enumerator.new(Index.instance, :each_source_info, self.id, "#{d}/").to_a }
99
- new_ids = benchmark(:maildir_read_dir) { Dir.glob("#{subdir}/*").map { |x| File.basename x }.sort }
100
- added = new_ids - old_ids
101
- deleted = old_ids - new_ids
123
+ new_ids = benchmark(:maildir_read_dir) { Dir.glob("#{subdir}/*").map { |x| File.join(d,File.basename(x)) }.sort }
124
+ added += new_ids - old_ids
125
+ deleted += old_ids - new_ids
102
126
  debug "#{old_ids.size} in index, #{new_ids.size} in filesystem"
103
- debug "#{added.size} added, #{deleted.size} deleted"
127
+ end
104
128
 
105
- added.each_with_index do |id,i|
106
- yield :add,
107
- :info => File.join(d,id),
108
- :labels => @labels + maildir_labels(id) + [:inbox],
109
- :progress => i.to_f/(added.size+deleted.size)
110
- end
129
+ ## find updated mails by checking if an id is in both added and
130
+ ## deleted arrays, meaning that its flags changed or that it has
131
+ ## been moved, these ids need to be removed from added and deleted
132
+ add_to_delete = del_to_delete = []
133
+ map = Hash.new { |hash, key| hash[key] = [] }
134
+ deleted.each do |id_del|
135
+ map[maildir_data(id_del)[0]].push id_del
136
+ end
137
+ added.each do |id_add|
138
+ map[maildir_data(id_add)[0]].each do |id_del|
139
+ updated.push [ id_del, id_add ]
140
+ add_to_delete.push id_add
141
+ del_to_delete.push id_del
142
+ end
143
+ end
144
+ added -= add_to_delete
145
+ deleted -= del_to_delete
146
+ debug "#{added.size} added, #{deleted.size} deleted, #{updated.size} updated"
147
+ total_size = added.size+deleted.size+updated.size
111
148
 
112
- deleted.each_with_index do |id,i|
113
- yield :delete,
114
- :info => File.join(d,id),
115
- :progress => (i.to_f+added.size)/(added.size+deleted.size)
116
- end
149
+ added.each_with_index do |id,i|
150
+ yield :add,
151
+ :info => id,
152
+ :labels => @labels + maildir_labels(id) + [:inbox],
153
+ :progress => i.to_f/total_size
154
+ end
155
+
156
+ deleted.each_with_index do |id,i|
157
+ yield :delete,
158
+ :info => id,
159
+ :progress => (i.to_f+added.size)/total_size
160
+ end
161
+
162
+ updated.each_with_index do |id,i|
163
+ yield :update,
164
+ :old_info => id[0],
165
+ :new_info => id[1],
166
+ :labels => @labels + maildir_labels(id[1]),
167
+ :progress => (i.to_f+added.size+deleted.size)/total_size
117
168
  end
118
169
  nil
119
170
  end
120
171
 
172
+ def labels? id
173
+ maildir_labels id
174
+ end
175
+
121
176
  def maildir_labels id
122
177
  (seen?(id) ? [] : [:unread]) +
123
178
  (trashed?(id) ? [:deleted] : []) +
124
- (flagged?(id) ? [:starred] : [])
179
+ (flagged?(id) ? [:starred] : []) +
180
+ (passed?(id) ? [:forwarded] : []) +
181
+ (replied?(id) ? [:replied] : []) +
182
+ (draft?(id) ? [:draft] : [])
125
183
  end
126
184
 
127
185
  def draft? id; maildir_data(id)[2].include? "D"; end
@@ -131,13 +189,6 @@ class Maildir < Source
131
189
  def seen? id; maildir_data(id)[2].include? "S"; end
132
190
  def trashed? id; maildir_data(id)[2].include? "T"; end
133
191
 
134
- def mark_draft id; maildir_mark_file id, "D" unless draft? id; end
135
- def mark_flagged id; maildir_mark_file id, "F" unless flagged? id; end
136
- def mark_passed id; maildir_mark_file id, "P" unless passed? id; end
137
- def mark_replied id; maildir_mark_file id, "R" unless replied? id; end
138
- def mark_seen id; maildir_mark_file id, "S" unless seen? id; end
139
- def mark_trashed id; maildir_mark_file id, "T" unless trashed? id; end
140
-
141
192
  def valid? id
142
193
  File.exists? File.join(@dir, id)
143
194
  end
@@ -159,25 +210,47 @@ private
159
210
  end
160
211
 
161
212
  def maildir_data id
162
- id =~ %r{^([^:]+):([12]),([DFPRST]*)$}
213
+ id = File.basename id
214
+ # Flags we recognize are DFPRST
215
+ id =~ %r{^([^:]+):([12]),([A-Za-z]*)$}
163
216
  [($1 || id), ($2 || "2"), ($3 || "")]
164
217
  end
165
218
 
166
- ## not thread-safe on msg
167
- def maildir_mark_file msg, flag
168
- orig_path = @ids_to_fns[msg]
169
- orig_base, orig_fn = File.split(orig_path)
170
- new_base = orig_base.slice(0..-4) + 'cur'
171
- tmp_base = orig_base.slice(0..-4) + 'tmp'
172
- md_base, md_ver, md_flags = maildir_data msg
173
- md_flags += flag; md_flags = md_flags.split(//).sort.join.squeeze
174
- new_path = File.join new_base, "#{md_base}:#{md_ver},#{md_flags}"
175
- tmp_path = File.join tmp_base, "#{md_base}:#{md_ver},#{md_flags}"
176
- File.link orig_path, tmp_path
177
- File.unlink orig_path
178
- File.link tmp_path, new_path
179
- File.unlink tmp_path
180
- @ids_to_fns[msg] = new_path
219
+ def maildir_reconcile_flags id, labels
220
+ new_flags = Set.new( maildir_data(id)[2].each_char )
221
+
222
+ # Set flags based on labels for the six flags we recognize
223
+ if labels.member? :draft then new_flags.add?( "D" ) else new_flags.delete?( "D" ) end
224
+ if labels.member? :starred then new_flags.add?( "F" ) else new_flags.delete?( "F" ) end
225
+ if labels.member? :forwarded then new_flags.add?( "P" ) else new_flags.delete?( "P" ) end
226
+ if labels.member? :replied then new_flags.add?( "R" ) else new_flags.delete?( "R" ) end
227
+ if not labels.member? :unread then new_flags.add?( "S" ) else new_flags.delete?( "S" ) end
228
+ if labels.member? :deleted or labels.member? :killed then new_flags.add?( "T" ) else new_flags.delete?( "T" ) end
229
+
230
+ ## Flags must be stored in ASCII order according to Maildir
231
+ ## documentation
232
+ new_flags.to_a.sort.join
233
+ end
234
+
235
+ def maildir_mark_file orig_path, flags
236
+ @mutex.synchronize do
237
+ new_base = (flags.include?("S")) ? "cur" : "new"
238
+ md_base, md_ver, md_flags = maildir_data orig_path
239
+
240
+ return if md_flags == flags
241
+
242
+ new_loc = File.join new_base, "#{md_base}:#{md_ver},#{flags}"
243
+ orig_path = File.join @dir, orig_path
244
+ new_path = File.join @dir, new_loc
245
+ tmp_path = File.join @dir, "tmp", "#{md_base}:#{md_ver},#{flags}"
246
+
247
+ File.safe_link orig_path, tmp_path
248
+ File.unlink orig_path
249
+ File.safe_link tmp_path, new_path
250
+ File.unlink tmp_path
251
+
252
+ new_loc
253
+ end
181
254
  end
182
255
  end
183
256
 
@@ -120,7 +120,7 @@ class MBox < Source
120
120
  ## into memory with raw_message.
121
121
  ##
122
122
  ## i hoped never to have to move shit around on disk but
123
- ## sup-sync-back has to do it.
123
+ ## sup-sync-back-mbox has to do it.
124
124
  def each_raw_message_line offset
125
125
  @mutex.synchronize do
126
126
  ensure_open
@@ -291,6 +291,32 @@ EOS
291
291
  location.each_raw_message_line &b
292
292
  end
293
293
 
294
+ def sync_back
295
+ @locations.map { |l| l.sync_back @labels, self }.any? do
296
+ UpdateManager.relay self, :updated, self
297
+ end
298
+ end
299
+
300
+ def merge_labels_from_locations merge_labels
301
+ ## Get all labels from all locations
302
+ location_labels = Set.new([])
303
+
304
+ @locations.each do |l|
305
+ if l.valid?
306
+ location_labels = location_labels.union(l.labels?)
307
+ end
308
+ end
309
+
310
+ ## Add to the message labels the intersection between all location
311
+ ## labels and those we want to merge
312
+ location_labels = location_labels.intersection(merge_labels.to_set)
313
+
314
+ if not location_labels.empty?
315
+ @labels = @labels.union(location_labels)
316
+ @dirty = true
317
+ end
318
+ end
319
+
294
320
  ## returns all the content from a message that will be indexed
295
321
  def indexable_content
296
322
  load_from_source!
@@ -545,10 +571,18 @@ private
545
571
  ## (and possible signed) inline GPG messages
546
572
  def inline_gpg_to_chunks body, encoding_to, encoding_from
547
573
  lines = body.split("\n")
574
+
575
+ # First case: Message is enclosed between
576
+ #
577
+ # -----BEGIN PGP SIGNED MESSAGE-----
578
+ # and
579
+ # -----END PGP SIGNED MESSAGE-----
580
+ #
581
+ # In some cases, END PGP SIGNED MESSAGE doesn't appear
548
582
  gpg = lines.between(GPG_SIGNED_START, GPG_SIGNED_END)
549
583
  # between does not check if GPG_END actually exists
550
584
  # Reference: http://permalink.gmane.org/gmane.mail.sup.devel/641
551
- if !gpg.empty? && !lines.index(GPG_END).nil?
585
+ if !gpg.empty?
552
586
  msg = RMail::Message.new
553
587
  msg.body = gpg.join("\n")
554
588
 
@@ -560,14 +594,21 @@ private
560
594
  before = startidx != 0 ? lines[0 .. startidx-1] : []
561
595
  after = endidx ? lines[endidx+1 .. lines.size] : []
562
596
 
597
+ # sig contains BEGIN PGP SIGNED MESSAGE and END PGP SIGNATURE, so
598
+ # we ditch them. sig may also contain the hash used by PGP (with a
599
+ # newline), so we also skip them
600
+ sig_start = sig[1].match(/^Hash:/) ? 3 : 1
601
+ sig_end = sig.size-2
563
602
  payload = RMail::Message.new
564
- payload.body = sig[1, sig.size-2].join("\n")
603
+ payload.body = sig[sig_start, sig_end].join("\n")
565
604
  return [text_to_chunks(before, false),
566
605
  CryptoManager.verify(nil, msg, false),
567
606
  message_to_chunks(payload),
568
607
  text_to_chunks(after, false)].flatten.compact
569
608
  end
570
609
 
610
+ # Second case: Message is encrypted
611
+
571
612
  gpg = lines.between(GPG_START, GPG_END)
572
613
  # between does not check if GPG_END actually exists
573
614
  if !gpg.empty? && !lines.index(GPG_END).nil?
@@ -704,6 +745,24 @@ class Location
704
745
  source.raw_message info
705
746
  end
706
747
 
748
+ def sync_back labels, message
749
+ synced = false
750
+ return synced unless sync_back_enabled? and valid?
751
+ source.synchronize do
752
+ new_info = source.sync_back(@info, labels)
753
+ if new_info
754
+ @info = new_info
755
+ Index.sync_message message, true
756
+ synced = true
757
+ end
758
+ end
759
+ synced
760
+ end
761
+
762
+ def sync_back_enabled?
763
+ source.respond_to? :sync_back and $config[:sync_back_to_maildir] and source.sync_back_enabled?
764
+ end
765
+
707
766
  ## much faster than raw_message
708
767
  def each_raw_message_line &b
709
768
  source.each_raw_message_line info, &b
@@ -717,6 +776,10 @@ class Location
717
776
  source.valid? info
718
777
  end
719
778
 
779
+ def labels?
780
+ source.labels? info
781
+ end
782
+
720
783
  def == o
721
784
  o.source.id == source.id and o.info == info
722
785
  end