murder 0.0.0.pre

Sign up to get free protection for your applications and to get access to all the features.
Files changed (65) hide show
  1. data/.gitignore +1 -0
  2. data/LICENSE +17 -0
  3. data/README +224 -0
  4. data/Rakefile +52 -0
  5. data/VERSION +1 -0
  6. data/dist/BitTornado/BT1/Choker.py +128 -0
  7. data/dist/BitTornado/BT1/Connecter.py +288 -0
  8. data/dist/BitTornado/BT1/Downloader.py +594 -0
  9. data/dist/BitTornado/BT1/DownloaderFeedback.py +155 -0
  10. data/dist/BitTornado/BT1/Encrypter.py +333 -0
  11. data/dist/BitTornado/BT1/FileSelector.py +245 -0
  12. data/dist/BitTornado/BT1/Filter.py +12 -0
  13. data/dist/BitTornado/BT1/HTTPDownloader.py +251 -0
  14. data/dist/BitTornado/BT1/NatCheck.py +95 -0
  15. data/dist/BitTornado/BT1/PiecePicker.py +320 -0
  16. data/dist/BitTornado/BT1/Rerequester.py +426 -0
  17. data/dist/BitTornado/BT1/Statistics.py +177 -0
  18. data/dist/BitTornado/BT1/Storage.py +584 -0
  19. data/dist/BitTornado/BT1/StorageWrapper.py +1045 -0
  20. data/dist/BitTornado/BT1/StreamCheck.py +135 -0
  21. data/dist/BitTornado/BT1/T2T.py +193 -0
  22. data/dist/BitTornado/BT1/Uploader.py +145 -0
  23. data/dist/BitTornado/BT1/__init__.py +1 -0
  24. data/dist/BitTornado/BT1/btformats.py +100 -0
  25. data/dist/BitTornado/BT1/fakeopen.py +89 -0
  26. data/dist/BitTornado/BT1/makemetafile.py +263 -0
  27. data/dist/BitTornado/BT1/track.py +1067 -0
  28. data/dist/BitTornado/ConfigDir.py +401 -0
  29. data/dist/BitTornado/ConfigReader.py +1068 -0
  30. data/dist/BitTornado/ConnChoice.py +31 -0
  31. data/dist/BitTornado/CreateIcons.py +105 -0
  32. data/dist/BitTornado/CurrentRateMeasure.py +37 -0
  33. data/dist/BitTornado/HTTPHandler.py +167 -0
  34. data/dist/BitTornado/PSYCO.py +5 -0
  35. data/dist/BitTornado/RateLimiter.py +153 -0
  36. data/dist/BitTornado/RateMeasure.py +75 -0
  37. data/dist/BitTornado/RawServer.py +195 -0
  38. data/dist/BitTornado/ServerPortHandler.py +188 -0
  39. data/dist/BitTornado/SocketHandler.py +375 -0
  40. data/dist/BitTornado/__init__.py +63 -0
  41. data/dist/BitTornado/bencode.py +319 -0
  42. data/dist/BitTornado/bitfield.py +162 -0
  43. data/dist/BitTornado/clock.py +27 -0
  44. data/dist/BitTornado/download_bt1.py +882 -0
  45. data/dist/BitTornado/inifile.py +169 -0
  46. data/dist/BitTornado/iprangeparse.py +194 -0
  47. data/dist/BitTornado/launchmanycore.py +381 -0
  48. data/dist/BitTornado/natpunch.py +254 -0
  49. data/dist/BitTornado/parseargs.py +137 -0
  50. data/dist/BitTornado/parsedir.py +150 -0
  51. data/dist/BitTornado/piecebuffer.py +86 -0
  52. data/dist/BitTornado/selectpoll.py +109 -0
  53. data/dist/BitTornado/subnetparse.py +218 -0
  54. data/dist/BitTornado/torrentlistparse.py +38 -0
  55. data/dist/BitTornado/zurllib.py +100 -0
  56. data/dist/murder_client.py +291 -0
  57. data/dist/murder_make_torrent.py +46 -0
  58. data/dist/murder_tracker.py +28 -0
  59. data/doc/examples/Capfile +28 -0
  60. data/lib/capistrano/recipes/deploy/strategy/murder.rb +52 -0
  61. data/lib/murder.rb +43 -0
  62. data/lib/murder/admin.rb +47 -0
  63. data/lib/murder/murder.rb +121 -0
  64. data/murder.gemspec +101 -0
  65. metadata +129 -0
@@ -0,0 +1,245 @@
1
+ # Written by John Hoffman
2
+ # see LICENSE.txt for license information
3
+
4
+ from random import shuffle
5
+ from traceback import print_exc
6
+ try:
7
+ True
8
+ except:
9
+ True = 1
10
+ False = 0
11
+
12
+
13
+ class FileSelector:
14
+ def __init__(self, files, piece_length, bufferdir,
15
+ storage, storagewrapper, sched, failfunc):
16
+ self.files = files
17
+ self.storage = storage
18
+ self.storagewrapper = storagewrapper
19
+ self.sched = sched
20
+ self.failfunc = failfunc
21
+ self.downloader = None
22
+ self.picker = None
23
+
24
+ storage.set_bufferdir(bufferdir)
25
+
26
+ self.numfiles = len(files)
27
+ self.priority = [1] * self.numfiles
28
+ self.new_priority = None
29
+ self.new_partials = None
30
+ self.filepieces = []
31
+ total = 0L
32
+ for file, length in files:
33
+ if not length:
34
+ self.filepieces.append(())
35
+ else:
36
+ pieces = range( int(total/piece_length),
37
+ int((total+length-1)/piece_length)+1 )
38
+ self.filepieces.append(tuple(pieces))
39
+ total += length
40
+ self.numpieces = int((total+piece_length-1)/piece_length)
41
+ self.piece_priority = [1] * self.numpieces
42
+
43
+
44
+
45
+ def init_priority(self, new_priority):
46
+ try:
47
+ assert len(new_priority) == self.numfiles
48
+ for v in new_priority:
49
+ assert type(v) in (type(0),type(0L))
50
+ assert v >= -1
51
+ assert v <= 2
52
+ except:
53
+ # print_exc()
54
+ return False
55
+ try:
56
+ files_updated = False
57
+ for f in xrange(self.numfiles):
58
+ if new_priority[f] < 0:
59
+ self.storage.disable_file(f)
60
+ files_updated = True
61
+ if files_updated:
62
+ self.storage.reset_file_status()
63
+ self.new_priority = new_priority
64
+ except (IOError, OSError), e:
65
+ self.failfunc("can't open partial file for "
66
+ + self.files[f][0] + ': ' + str(e))
67
+ return False
68
+ return True
69
+
70
+ '''
71
+ d['priority'] = [file #1 priority [,file #2 priority...] ]
72
+ a list of download priorities for each file.
73
+ Priority may be -1, 0, 1, 2. -1 = download disabled,
74
+ 0 = highest, 1 = normal, 2 = lowest.
75
+ Also see Storage.pickle and StorageWrapper.pickle for additional keys.
76
+ '''
77
+ def unpickle(self, d):
78
+ if d.has_key('priority'):
79
+ if not self.init_priority(d['priority']):
80
+ return
81
+ pieces = self.storage.unpickle(d)
82
+ if not pieces: # don't bother, nothing restoreable
83
+ return
84
+ new_piece_priority = self._get_piece_priority_list(self.new_priority)
85
+ self.storagewrapper.reblock([i == -1 for i in new_piece_priority])
86
+ self.new_partials = self.storagewrapper.unpickle(d, pieces)
87
+
88
+
89
+ def tie_in(self, picker, cancelfunc, requestmorefunc, rerequestfunc):
90
+ self.picker = picker
91
+ self.cancelfunc = cancelfunc
92
+ self.requestmorefunc = requestmorefunc
93
+ self.rerequestfunc = rerequestfunc
94
+
95
+ if self.new_priority:
96
+ self.priority = self.new_priority
97
+ self.new_priority = None
98
+ self.new_piece_priority = self._set_piece_priority(self.priority)
99
+
100
+ if self.new_partials:
101
+ shuffle(self.new_partials)
102
+ for p in self.new_partials:
103
+ self.picker.requested(p)
104
+ self.new_partials = None
105
+
106
+
107
+ def _set_files_disabled(self, old_priority, new_priority):
108
+ old_disabled = [p == -1 for p in old_priority]
109
+ new_disabled = [p == -1 for p in new_priority]
110
+ data_to_update = []
111
+ for f in xrange(self.numfiles):
112
+ if new_disabled[f] != old_disabled[f]:
113
+ data_to_update.extend(self.storage.get_piece_update_list(f))
114
+ buffer = []
115
+ for piece, start, length in data_to_update:
116
+ if self.storagewrapper.has_data(piece):
117
+ data = self.storagewrapper.read_raw(piece, start, length)
118
+ if data is None:
119
+ return False
120
+ buffer.append((piece, start, data))
121
+
122
+ files_updated = False
123
+ try:
124
+ for f in xrange(self.numfiles):
125
+ if new_disabled[f] and not old_disabled[f]:
126
+ self.storage.disable_file(f)
127
+ files_updated = True
128
+ if old_disabled[f] and not new_disabled[f]:
129
+ self.storage.enable_file(f)
130
+ files_updated = True
131
+ except (IOError, OSError), e:
132
+ if new_disabled[f]:
133
+ msg = "can't open partial file for "
134
+ else:
135
+ msg = 'unable to open '
136
+ self.failfunc(msg + self.files[f][0] + ': ' + str(e))
137
+ return False
138
+ if files_updated:
139
+ self.storage.reset_file_status()
140
+
141
+ changed_pieces = {}
142
+ for piece, start, data in buffer:
143
+ if not self.storagewrapper.write_raw(piece, start, data):
144
+ return False
145
+ data.release()
146
+ changed_pieces[piece] = 1
147
+ if not self.storagewrapper.doublecheck_data(changed_pieces):
148
+ return False
149
+
150
+ return True
151
+
152
+
153
+ def _get_piece_priority_list(self, file_priority_list):
154
+ l = [-1] * self.numpieces
155
+ for f in xrange(self.numfiles):
156
+ if file_priority_list[f] == -1:
157
+ continue
158
+ for i in self.filepieces[f]:
159
+ if l[i] == -1:
160
+ l[i] = file_priority_list[f]
161
+ continue
162
+ l[i] = min(l[i],file_priority_list[f])
163
+ return l
164
+
165
+
166
+ def _set_piece_priority(self, new_priority):
167
+ was_complete = self.storagewrapper.am_I_complete()
168
+ new_piece_priority = self._get_piece_priority_list(new_priority)
169
+ pieces = range(self.numpieces)
170
+ shuffle(pieces)
171
+ new_blocked = []
172
+ new_unblocked = []
173
+ for piece in pieces:
174
+ self.picker.set_priority(piece,new_piece_priority[piece])
175
+ o = self.piece_priority[piece] == -1
176
+ n = new_piece_priority[piece] == -1
177
+ if n and not o:
178
+ new_blocked.append(piece)
179
+ if o and not n:
180
+ new_unblocked.append(piece)
181
+ if new_blocked:
182
+ self.cancelfunc(new_blocked)
183
+ self.storagewrapper.reblock([i == -1 for i in new_piece_priority])
184
+ if new_unblocked:
185
+ self.requestmorefunc(new_unblocked)
186
+ if was_complete and not self.storagewrapper.am_I_complete():
187
+ self.rerequestfunc()
188
+
189
+ return new_piece_priority
190
+
191
+
192
+ def set_priorities_now(self, new_priority = None):
193
+ if not new_priority:
194
+ new_priority = self.new_priority
195
+ self.new_priority = None # potential race condition
196
+ if not new_priority:
197
+ return
198
+ old_priority = self.priority
199
+ self.priority = new_priority
200
+ if not self._set_files_disabled(old_priority, new_priority):
201
+ return
202
+ self.piece_priority = self._set_piece_priority(new_priority)
203
+
204
+ def set_priorities(self, new_priority):
205
+ self.new_priority = new_priority
206
+ self.sched(self.set_priorities_now)
207
+
208
+ def set_priority(self, f, p):
209
+ new_priority = self.get_priorities()
210
+ new_priority[f] = p
211
+ self.set_priorities(new_priority)
212
+
213
+ def get_priorities(self):
214
+ priority = self.new_priority
215
+ if not priority:
216
+ priority = self.priority # potential race condition
217
+ return [i for i in priority]
218
+
219
+ def __setitem__(self, index, val):
220
+ self.set_priority(index, val)
221
+
222
+ def __getitem__(self, index):
223
+ try:
224
+ return self.new_priority[index]
225
+ except:
226
+ return self.priority[index]
227
+
228
+
229
+ def finish(self):
230
+ for f in xrange(self.numfiles):
231
+ if self.priority[f] == -1:
232
+ self.storage.delete_file(f)
233
+
234
+ def pickle(self):
235
+ d = {'priority': self.priority}
236
+ try:
237
+ s = self.storage.pickle()
238
+ sw = self.storagewrapper.pickle()
239
+ for k in s.keys():
240
+ d[k] = s[k]
241
+ for k in sw.keys():
242
+ d[k] = sw[k]
243
+ except (IOError, OSError):
244
+ pass
245
+ return d
@@ -0,0 +1,12 @@
1
+ class Filter:
2
+ def __init__(self, callback):
3
+ self.callback = callback
4
+
5
+ def check(self, ip, paramslist, headers):
6
+
7
+ def params(key, default = None, l = paramslist):
8
+ if l.has_key(key):
9
+ return l[key][0]
10
+ return default
11
+
12
+ return None
@@ -0,0 +1,251 @@
1
+ # Written by John Hoffman
2
+ # see LICENSE.txt for license information
3
+
4
+ from BitTornado.CurrentRateMeasure import Measure
5
+ from random import randint
6
+ from urlparse import urlparse
7
+ from httplib import HTTPConnection
8
+ from urllib import quote
9
+ from threading import Thread
10
+ from BitTornado.__init__ import product_name,version_short
11
+ try:
12
+ True
13
+ except:
14
+ True = 1
15
+ False = 0
16
+
17
+ EXPIRE_TIME = 60 * 60
18
+
19
+ VERSION = product_name+'/'+version_short
20
+
21
+ class haveComplete:
22
+ def complete(self):
23
+ return True
24
+ def __getitem__(self, x):
25
+ return True
26
+ haveall = haveComplete()
27
+
28
+ class SingleDownload:
29
+ def __init__(self, downloader, url):
30
+ self.downloader = downloader
31
+ self.baseurl = url
32
+ try:
33
+ (scheme, self.netloc, path, pars, query, fragment) = urlparse(url)
34
+ except:
35
+ self.downloader.errorfunc('cannot parse http seed address: '+url)
36
+ return
37
+ if scheme != 'http':
38
+ self.downloader.errorfunc('http seed url not http: '+url)
39
+ return
40
+ try:
41
+ self.connection = HTTPConnection(self.netloc)
42
+ except:
43
+ self.downloader.errorfunc('cannot connect to http seed: '+url)
44
+ return
45
+ self.seedurl = path
46
+ if pars:
47
+ self.seedurl += ';'+pars
48
+ self.seedurl += '?'
49
+ if query:
50
+ self.seedurl += query+'&'
51
+ self.seedurl += 'info_hash='+quote(self.downloader.infohash)
52
+
53
+ self.measure = Measure(downloader.max_rate_period)
54
+ self.index = None
55
+ self.url = ''
56
+ self.requests = []
57
+ self.request_size = 0
58
+ self.endflag = False
59
+ self.error = None
60
+ self.retry_period = 30
61
+ self._retry_period = None
62
+ self.errorcount = 0
63
+ self.goodseed = False
64
+ self.active = False
65
+ self.cancelled = False
66
+ self.resched(randint(2,10))
67
+
68
+ def resched(self, len = None):
69
+ if len is None:
70
+ len = self.retry_period
71
+ if self.errorcount > 3:
72
+ len = len * (self.errorcount - 2)
73
+ self.downloader.rawserver.add_task(self.download, len)
74
+
75
+ def _want(self, index):
76
+ if self.endflag:
77
+ return self.downloader.storage.do_I_have_requests(index)
78
+ else:
79
+ return self.downloader.storage.is_unstarted(index)
80
+
81
+ def download(self):
82
+ self.cancelled = False
83
+ if self.downloader.picker.am_I_complete():
84
+ self.downloader.downloads.remove(self)
85
+ return
86
+ self.index = self.downloader.picker.next(haveall, self._want)
87
+ if ( self.index is None and not self.endflag
88
+ and not self.downloader.peerdownloader.has_downloaders() ):
89
+ self.endflag = True
90
+ self.index = self.downloader.picker.next(haveall, self._want)
91
+ if self.index is None:
92
+ self.endflag = True
93
+ self.resched()
94
+ else:
95
+ self.url = ( self.seedurl+'&piece='+str(self.index) )
96
+ self._get_requests()
97
+ if self.request_size < self.downloader.storage._piecelen(self.index):
98
+ self.url += '&ranges='+self._request_ranges()
99
+ rq = Thread(target = self._request)
100
+ rq.setDaemon(False)
101
+ rq.start()
102
+ self.active = True
103
+
104
+ def _request(self):
105
+ import encodings.ascii
106
+ import encodings.punycode
107
+ import encodings.idna
108
+
109
+ self.error = None
110
+ self.received_data = None
111
+ try:
112
+ self.connection.request('GET',self.url, None,
113
+ {'User-Agent': VERSION})
114
+ r = self.connection.getresponse()
115
+ self.connection_status = r.status
116
+ self.received_data = r.read()
117
+ except Exception, e:
118
+ self.error = 'error accessing http seed: '+str(e)
119
+ try:
120
+ self.connection.close()
121
+ except:
122
+ pass
123
+ try:
124
+ self.connection = HTTPConnection(self.netloc)
125
+ except:
126
+ self.connection = None # will cause an exception and retry next cycle
127
+ self.downloader.rawserver.add_task(self.request_finished)
128
+
129
+ def request_finished(self):
130
+ self.active = False
131
+ if self.error is not None:
132
+ if self.goodseed:
133
+ self.downloader.errorfunc(self.error)
134
+ self.errorcount += 1
135
+ if self.received_data:
136
+ self.errorcount = 0
137
+ if not self._got_data():
138
+ self.received_data = None
139
+ if not self.received_data:
140
+ self._release_requests()
141
+ self.downloader.peerdownloader.piece_flunked(self.index)
142
+ if self._retry_period:
143
+ self.resched(self._retry_period)
144
+ self._retry_period = None
145
+ return
146
+ self.resched()
147
+
148
+ def _got_data(self):
149
+ if self.connection_status == 503: # seed is busy
150
+ try:
151
+ self.retry_period = max(int(self.received_data),5)
152
+ except:
153
+ pass
154
+ return False
155
+ if self.connection_status != 200:
156
+ self.errorcount += 1
157
+ return False
158
+ self._retry_period = 1
159
+ if len(self.received_data) != self.request_size:
160
+ if self.goodseed:
161
+ self.downloader.errorfunc('corrupt data from http seed - redownloading')
162
+ return False
163
+ self.measure.update_rate(len(self.received_data))
164
+ self.downloader.measurefunc(len(self.received_data))
165
+ if self.cancelled:
166
+ return False
167
+ if not self._fulfill_requests():
168
+ return False
169
+ if not self.goodseed:
170
+ self.goodseed = True
171
+ self.downloader.seedsfound += 1
172
+ if self.downloader.storage.do_I_have(self.index):
173
+ self.downloader.picker.complete(self.index)
174
+ self.downloader.peerdownloader.check_complete(self.index)
175
+ self.downloader.gotpiecefunc(self.index)
176
+ return True
177
+
178
+ def _get_requests(self):
179
+ self.requests = []
180
+ self.request_size = 0L
181
+ while self.downloader.storage.do_I_have_requests(self.index):
182
+ r = self.downloader.storage.new_request(self.index)
183
+ self.requests.append(r)
184
+ self.request_size += r[1]
185
+ self.requests.sort()
186
+
187
+ def _fulfill_requests(self):
188
+ start = 0L
189
+ success = True
190
+ while self.requests:
191
+ begin, length = self.requests.pop(0)
192
+ if not self.downloader.storage.piece_came_in(self.index, begin,
193
+ self.received_data[start:start+length]):
194
+ success = False
195
+ break
196
+ start += length
197
+ return success
198
+
199
+ def _release_requests(self):
200
+ for begin, length in self.requests:
201
+ self.downloader.storage.request_lost(self.index, begin, length)
202
+ self.requests = []
203
+
204
+ def _request_ranges(self):
205
+ s = ''
206
+ begin, length = self.requests[0]
207
+ for begin1, length1 in self.requests[1:]:
208
+ if begin + length == begin1:
209
+ length += length1
210
+ continue
211
+ else:
212
+ if s:
213
+ s += ','
214
+ s += str(begin)+'-'+str(begin+length-1)
215
+ begin, length = begin1, length1
216
+ if s:
217
+ s += ','
218
+ s += str(begin)+'-'+str(begin+length-1)
219
+ return s
220
+
221
+
222
+ class HTTPDownloader:
223
+ def __init__(self, storage, picker, rawserver,
224
+ finflag, errorfunc, peerdownloader,
225
+ max_rate_period, infohash, measurefunc, gotpiecefunc):
226
+ self.storage = storage
227
+ self.picker = picker
228
+ self.rawserver = rawserver
229
+ self.finflag = finflag
230
+ self.errorfunc = errorfunc
231
+ self.peerdownloader = peerdownloader
232
+ self.infohash = infohash
233
+ self.max_rate_period = max_rate_period
234
+ self.gotpiecefunc = gotpiecefunc
235
+ self.measurefunc = measurefunc
236
+ self.downloads = []
237
+ self.seedsfound = 0
238
+
239
+ def make_download(self, url):
240
+ self.downloads.append(SingleDownload(self, url))
241
+ return self.downloads[-1]
242
+
243
+ def get_downloads(self):
244
+ if self.finflag.isSet():
245
+ return []
246
+ return self.downloads
247
+
248
+ def cancel_piece_download(self, pieces):
249
+ for d in self.downloads:
250
+ if d.active and d.index in pieces:
251
+ d.cancelled = True