murder 0.0.0.pre

Sign up to get free protection for your applications and to get access to all the features.
Files changed (65) hide show
  1. data/.gitignore +1 -0
  2. data/LICENSE +17 -0
  3. data/README +224 -0
  4. data/Rakefile +52 -0
  5. data/VERSION +1 -0
  6. data/dist/BitTornado/BT1/Choker.py +128 -0
  7. data/dist/BitTornado/BT1/Connecter.py +288 -0
  8. data/dist/BitTornado/BT1/Downloader.py +594 -0
  9. data/dist/BitTornado/BT1/DownloaderFeedback.py +155 -0
  10. data/dist/BitTornado/BT1/Encrypter.py +333 -0
  11. data/dist/BitTornado/BT1/FileSelector.py +245 -0
  12. data/dist/BitTornado/BT1/Filter.py +12 -0
  13. data/dist/BitTornado/BT1/HTTPDownloader.py +251 -0
  14. data/dist/BitTornado/BT1/NatCheck.py +95 -0
  15. data/dist/BitTornado/BT1/PiecePicker.py +320 -0
  16. data/dist/BitTornado/BT1/Rerequester.py +426 -0
  17. data/dist/BitTornado/BT1/Statistics.py +177 -0
  18. data/dist/BitTornado/BT1/Storage.py +584 -0
  19. data/dist/BitTornado/BT1/StorageWrapper.py +1045 -0
  20. data/dist/BitTornado/BT1/StreamCheck.py +135 -0
  21. data/dist/BitTornado/BT1/T2T.py +193 -0
  22. data/dist/BitTornado/BT1/Uploader.py +145 -0
  23. data/dist/BitTornado/BT1/__init__.py +1 -0
  24. data/dist/BitTornado/BT1/btformats.py +100 -0
  25. data/dist/BitTornado/BT1/fakeopen.py +89 -0
  26. data/dist/BitTornado/BT1/makemetafile.py +263 -0
  27. data/dist/BitTornado/BT1/track.py +1067 -0
  28. data/dist/BitTornado/ConfigDir.py +401 -0
  29. data/dist/BitTornado/ConfigReader.py +1068 -0
  30. data/dist/BitTornado/ConnChoice.py +31 -0
  31. data/dist/BitTornado/CreateIcons.py +105 -0
  32. data/dist/BitTornado/CurrentRateMeasure.py +37 -0
  33. data/dist/BitTornado/HTTPHandler.py +167 -0
  34. data/dist/BitTornado/PSYCO.py +5 -0
  35. data/dist/BitTornado/RateLimiter.py +153 -0
  36. data/dist/BitTornado/RateMeasure.py +75 -0
  37. data/dist/BitTornado/RawServer.py +195 -0
  38. data/dist/BitTornado/ServerPortHandler.py +188 -0
  39. data/dist/BitTornado/SocketHandler.py +375 -0
  40. data/dist/BitTornado/__init__.py +63 -0
  41. data/dist/BitTornado/bencode.py +319 -0
  42. data/dist/BitTornado/bitfield.py +162 -0
  43. data/dist/BitTornado/clock.py +27 -0
  44. data/dist/BitTornado/download_bt1.py +882 -0
  45. data/dist/BitTornado/inifile.py +169 -0
  46. data/dist/BitTornado/iprangeparse.py +194 -0
  47. data/dist/BitTornado/launchmanycore.py +381 -0
  48. data/dist/BitTornado/natpunch.py +254 -0
  49. data/dist/BitTornado/parseargs.py +137 -0
  50. data/dist/BitTornado/parsedir.py +150 -0
  51. data/dist/BitTornado/piecebuffer.py +86 -0
  52. data/dist/BitTornado/selectpoll.py +109 -0
  53. data/dist/BitTornado/subnetparse.py +218 -0
  54. data/dist/BitTornado/torrentlistparse.py +38 -0
  55. data/dist/BitTornado/zurllib.py +100 -0
  56. data/dist/murder_client.py +291 -0
  57. data/dist/murder_make_torrent.py +46 -0
  58. data/dist/murder_tracker.py +28 -0
  59. data/doc/examples/Capfile +28 -0
  60. data/lib/capistrano/recipes/deploy/strategy/murder.rb +52 -0
  61. data/lib/murder.rb +43 -0
  62. data/lib/murder/admin.rb +47 -0
  63. data/lib/murder/murder.rb +121 -0
  64. data/murder.gemspec +101 -0
  65. metadata +129 -0
@@ -0,0 +1 @@
1
+ # placeholder
@@ -0,0 +1,100 @@
1
+ # Written by Bram Cohen
2
+ # see LICENSE.txt for license information
3
+
4
+ from types import StringType, LongType, IntType, ListType, DictType
5
+ from re import compile
6
+
7
+ reg = compile(r'^[^/\\.~][^/\\]*$')
8
+
9
+ ints = (LongType, IntType)
10
+
11
+ def check_info(info):
12
+ if type(info) != DictType:
13
+ raise ValueError, 'bad metainfo - not a dictionary'
14
+ pieces = info.get('pieces')
15
+ if type(pieces) != StringType or len(pieces) % 20 != 0:
16
+ raise ValueError, 'bad metainfo - bad pieces key'
17
+ piecelength = info.get('piece length')
18
+ if type(piecelength) not in ints or piecelength <= 0:
19
+ raise ValueError, 'bad metainfo - illegal piece length'
20
+ name = info.get('name')
21
+ if type(name) != StringType:
22
+ raise ValueError, 'bad metainfo - bad name'
23
+ if not reg.match(name):
24
+ raise ValueError, 'name %s disallowed for security reasons' % name
25
+ if info.has_key('files') == info.has_key('length'):
26
+ raise ValueError, 'single/multiple file mix'
27
+ if info.has_key('length'):
28
+ length = info.get('length')
29
+ if type(length) not in ints or length < 0:
30
+ raise ValueError, 'bad metainfo - bad length'
31
+ else:
32
+ files = info.get('files')
33
+ if type(files) != ListType:
34
+ raise ValueError
35
+ for f in files:
36
+ if type(f) != DictType:
37
+ raise ValueError, 'bad metainfo - bad file value'
38
+ length = f.get('length')
39
+ if type(length) not in ints or length < 0:
40
+ raise ValueError, 'bad metainfo - bad length'
41
+ path = f.get('path')
42
+ if type(path) != ListType or path == []:
43
+ raise ValueError, 'bad metainfo - bad path'
44
+ for p in path:
45
+ if type(p) != StringType:
46
+ raise ValueError, 'bad metainfo - bad path dir'
47
+ if not reg.match(p):
48
+ raise ValueError, 'path %s disallowed for security reasons' % p
49
+ for i in xrange(len(files)):
50
+ for j in xrange(i):
51
+ if files[i]['path'] == files[j]['path']:
52
+ raise ValueError, 'bad metainfo - duplicate path'
53
+
54
+ def check_message(message):
55
+ if type(message) != DictType:
56
+ raise ValueError
57
+ check_info(message.get('info'))
58
+ if type(message.get('announce')) != StringType:
59
+ raise ValueError
60
+
61
+ def check_peers(message):
62
+ if type(message) != DictType:
63
+ raise ValueError
64
+ if message.has_key('failure reason'):
65
+ if type(message['failure reason']) != StringType:
66
+ raise ValueError
67
+ return
68
+ peers = message.get('peers')
69
+ if type(peers) == ListType:
70
+ for p in peers:
71
+ if type(p) != DictType:
72
+ raise ValueError
73
+ if type(p.get('ip')) != StringType:
74
+ raise ValueError
75
+ port = p.get('port')
76
+ if type(port) not in ints or p <= 0:
77
+ raise ValueError
78
+ if p.has_key('peer id'):
79
+ id = p['peer id']
80
+ if type(id) != StringType or len(id) != 20:
81
+ raise ValueError
82
+ elif type(peers) != StringType or len(peers) % 6 != 0:
83
+ raise ValueError
84
+ interval = message.get('interval', 1)
85
+ if type(interval) not in ints or interval <= 0:
86
+ raise ValueError
87
+ minint = message.get('min interval', 1)
88
+ if type(minint) not in ints or minint <= 0:
89
+ raise ValueError
90
+ if type(message.get('tracker id', '')) != StringType:
91
+ raise ValueError
92
+ npeers = message.get('num peers', 0)
93
+ if type(npeers) not in ints or npeers < 0:
94
+ raise ValueError
95
+ dpeers = message.get('done peers', 0)
96
+ if type(dpeers) not in ints or dpeers < 0:
97
+ raise ValueError
98
+ last = message.get('last', 0)
99
+ if type(last) not in ints or last < 0:
100
+ raise ValueError
@@ -0,0 +1,89 @@
1
+ # Written by Bram Cohen
2
+ # see LICENSE.txt for license information
3
+
4
+ from string import join
5
+
6
+ class FakeHandle:
7
+ def __init__(self, name, fakeopen):
8
+ self.name = name
9
+ self.fakeopen = fakeopen
10
+ self.pos = 0
11
+
12
+ def flush(self):
13
+ pass
14
+
15
+ def close(self):
16
+ pass
17
+
18
+ def seek(self, pos):
19
+ self.pos = pos
20
+
21
+ def read(self, amount = None):
22
+ old = self.pos
23
+ f = self.fakeopen.files[self.name]
24
+ if self.pos >= len(f):
25
+ return ''
26
+ if amount is None:
27
+ self.pos = len(f)
28
+ return join(f[old:], '')
29
+ else:
30
+ self.pos = min(len(f), old + amount)
31
+ return join(f[old:self.pos], '')
32
+
33
+ def write(self, s):
34
+ f = self.fakeopen.files[self.name]
35
+ while len(f) < self.pos:
36
+ f.append(chr(0))
37
+ self.fakeopen.files[self.name][self.pos : self.pos + len(s)] = list(s)
38
+ self.pos += len(s)
39
+
40
+ class FakeOpen:
41
+ def __init__(self, initial = {}):
42
+ self.files = {}
43
+ for key, value in initial.items():
44
+ self.files[key] = list(value)
45
+
46
+ def open(self, filename, mode):
47
+ """currently treats everything as rw - doesn't support append"""
48
+ self.files.setdefault(filename, [])
49
+ return FakeHandle(filename, self)
50
+
51
+ def exists(self, file):
52
+ return self.files.has_key(file)
53
+
54
+ def getsize(self, file):
55
+ return len(self.files[file])
56
+
57
+ def test_normal():
58
+ f = FakeOpen({'f1': 'abcde'})
59
+ assert f.exists('f1')
60
+ assert not f.exists('f2')
61
+ assert f.getsize('f1') == 5
62
+ h = f.open('f1', 'rw')
63
+ assert h.read(3) == 'abc'
64
+ assert h.read(1) == 'd'
65
+ assert h.read() == 'e'
66
+ assert h.read(2) == ''
67
+ h.write('fpq')
68
+ h.seek(4)
69
+ assert h.read(2) == 'ef'
70
+ h.write('ghij')
71
+ h.seek(0)
72
+ assert h.read() == 'abcdefghij'
73
+ h.seek(2)
74
+ h.write('p')
75
+ h.write('q')
76
+ assert h.read(1) == 'e'
77
+ h.seek(1)
78
+ assert h.read(5) == 'bpqef'
79
+
80
+ h2 = f.open('f2', 'rw')
81
+ assert h2.read() == ''
82
+ h2.write('mnop')
83
+ h2.seek(1)
84
+ assert h2.read() == 'nop'
85
+
86
+ assert f.exists('f1')
87
+ assert f.exists('f2')
88
+ assert f.getsize('f1') == 10
89
+ assert f.getsize('f2') == 4
@@ -0,0 +1,263 @@
1
+ # Written by Bram Cohen
2
+ # multitracker extensions by John Hoffman
3
+ # see LICENSE.txt for license information
4
+
5
+ from os.path import getsize, split, join, abspath, isdir
6
+ from os import listdir
7
+ from sha import sha
8
+ from copy import copy
9
+ from string import strip
10
+ from BitTornado.bencode import bencode
11
+ from btformats import check_info
12
+ from threading import Event
13
+ from time import time
14
+ from traceback import print_exc
15
+ try:
16
+ from sys import getfilesystemencoding
17
+ ENCODING = getfilesystemencoding()
18
+ except:
19
+ from sys import getdefaultencoding
20
+ ENCODING = getdefaultencoding()
21
+
22
+ defaults = [
23
+ ('announce_list', '',
24
+ 'a list of announce URLs - explained below'),
25
+ ('httpseeds', '',
26
+ 'a list of http seed URLs - explained below'),
27
+ ('piece_size_pow2', 0,
28
+ "which power of 2 to set the piece size to (0 = automatic)"),
29
+ ('comment', '',
30
+ "optional human-readable comment to put in .torrent"),
31
+ ('filesystem_encoding', '',
32
+ "optional specification for filesystem encoding " +
33
+ "(set automatically in recent Python versions)"),
34
+ ('target', '',
35
+ "optional target file for the torrent")
36
+ ]
37
+
38
+ default_piece_len_exp = 18
39
+
40
+ ignore = ['core', 'CVS']
41
+
42
+ def print_announcelist_details():
43
+ print (' announce_list = optional list of redundant/backup tracker URLs, in the format:')
44
+ print (' url[,url...][|url[,url...]...]')
45
+ print (' where URLs separated by commas are all tried first')
46
+ print (' before the next group of URLs separated by the pipe is checked.')
47
+ print (" If none is given, it is assumed you don't want one in the metafile.")
48
+ print (' If announce_list is given, clients which support it')
49
+ print (' will ignore the <announce> value.')
50
+ print (' Examples:')
51
+ print (' http://tracker1.com|http://tracker2.com|http://tracker3.com')
52
+ print (' (tries trackers 1-3 in order)')
53
+ print (' http://tracker1.com,http://tracker2.com,http://tracker3.com')
54
+ print (' (tries trackers 1-3 in a randomly selected order)')
55
+ print (' http://tracker1.com|http://backup1.com,http://backup2.com')
56
+ print (' (tries tracker 1 first, then tries between the 2 backups randomly)')
57
+ print ('')
58
+ print (' httpseeds = optional list of http-seed URLs, in the format:')
59
+ print (' url[|url...]')
60
+
61
+ def make_meta_file(file, url, params = {}, flag = Event(),
62
+ progress = lambda x: None, progress_percent = 1):
63
+ if params.has_key('piece_size_pow2'):
64
+ piece_len_exp = params['piece_size_pow2']
65
+ else:
66
+ piece_len_exp = default_piece_len_exp
67
+ if params.has_key('target') and params['target'] != '':
68
+ f = params['target']
69
+ else:
70
+ a, b = split(file)
71
+ if b == '':
72
+ f = a + '.torrent'
73
+ else:
74
+ f = join(a, b + '.torrent')
75
+
76
+ if piece_len_exp == 0: # automatic
77
+ size = calcsize(file)
78
+ if size > 8L*1024*1024*1024: # > 8 gig =
79
+ piece_len_exp = 21 # 2 meg pieces
80
+ elif size > 2*1024*1024*1024: # > 2 gig =
81
+ piece_len_exp = 20 # 1 meg pieces
82
+ elif size > 512*1024*1024: # > 512M =
83
+ piece_len_exp = 19 # 512K pieces
84
+ elif size > 64*1024*1024: # > 64M =
85
+ piece_len_exp = 18 # 256K pieces
86
+ elif size > 16*1024*1024: # > 16M =
87
+ piece_len_exp = 17 # 128K pieces
88
+ elif size > 4*1024*1024: # > 4M =
89
+ piece_len_exp = 16 # 64K pieces
90
+ else: # < 4M =
91
+ piece_len_exp = 15 # 32K pieces
92
+ piece_length = 2 ** piece_len_exp
93
+
94
+ encoding = None
95
+ if params.has_key('filesystem_encoding'):
96
+ encoding = params['filesystem_encoding']
97
+ if not encoding:
98
+ encoding = ENCODING
99
+ if not encoding:
100
+ encoding = 'ascii'
101
+
102
+ info = makeinfo(file, piece_length, encoding, flag, progress, progress_percent)
103
+ if flag.isSet():
104
+ return
105
+ check_info(info)
106
+ h = open(f, 'wb')
107
+ data = {'info': info, 'announce': strip(url), 'creation date': long(time())}
108
+
109
+ if params.has_key('comment') and params['comment']:
110
+ data['comment'] = params['comment']
111
+
112
+ if params.has_key('real_announce_list'): # shortcut for progs calling in from outside
113
+ data['announce-list'] = params['real_announce_list']
114
+ elif params.has_key('announce_list') and params['announce_list']:
115
+ l = []
116
+ for tier in params['announce_list'].split('|'):
117
+ l.append(tier.split(','))
118
+ data['announce-list'] = l
119
+
120
+ if params.has_key('real_httpseeds'): # shortcut for progs calling in from outside
121
+ data['httpseeds'] = params['real_httpseeds']
122
+ elif params.has_key('httpseeds') and params['httpseeds']:
123
+ data['httpseeds'] = params['httpseeds'].split('|')
124
+
125
+ h.write(bencode(data))
126
+ h.close()
127
+
128
+ def calcsize(file):
129
+ if not isdir(file):
130
+ return getsize(file)
131
+ total = 0L
132
+ for s in subfiles(abspath(file)):
133
+ total += getsize(s[1])
134
+ return total
135
+
136
+
137
+ def uniconvertl(l, e):
138
+ r = []
139
+ try:
140
+ for s in l:
141
+ r.append(uniconvert(s, e))
142
+ except UnicodeError:
143
+ raise UnicodeError('bad filename: '+join(l))
144
+ return r
145
+
146
+ def uniconvert(s, e):
147
+ try:
148
+ s = unicode(s,e)
149
+ except UnicodeError:
150
+ raise UnicodeError('bad filename: '+s)
151
+ return s.encode('utf-8')
152
+
153
+ def makeinfo(file, piece_length, encoding, flag, progress, progress_percent=1):
154
+ file = abspath(file)
155
+ if isdir(file):
156
+ subs = subfiles(file)
157
+ subs.sort()
158
+ pieces = []
159
+ sh = sha()
160
+ done = 0L
161
+ fs = []
162
+ totalsize = 0.0
163
+ totalhashed = 0L
164
+ for p, f in subs:
165
+ totalsize += getsize(f)
166
+
167
+ for p, f in subs:
168
+ pos = 0L
169
+ size = getsize(f)
170
+ fs.append({'length': size, 'path': uniconvertl(p, encoding)})
171
+ h = open(f, 'rb')
172
+ while pos < size:
173
+ a = min(size - pos, piece_length - done)
174
+ sh.update(h.read(a))
175
+ if flag.isSet():
176
+ return
177
+ done += a
178
+ pos += a
179
+ totalhashed += a
180
+
181
+ if done == piece_length:
182
+ pieces.append(sh.digest())
183
+ done = 0
184
+ sh = sha()
185
+ if progress_percent:
186
+ progress(totalhashed / totalsize)
187
+ else:
188
+ progress(a)
189
+ h.close()
190
+ if done > 0:
191
+ pieces.append(sh.digest())
192
+ return {'pieces': ''.join(pieces),
193
+ 'piece length': piece_length, 'files': fs,
194
+ 'name': uniconvert(split(file)[1], encoding) }
195
+ else:
196
+ size = getsize(file)
197
+ pieces = []
198
+ p = 0L
199
+ h = open(file, 'rb')
200
+ while p < size:
201
+ x = h.read(min(piece_length, size - p))
202
+ if flag.isSet():
203
+ return
204
+ pieces.append(sha(x).digest())
205
+ p += piece_length
206
+ if p > size:
207
+ p = size
208
+ if progress_percent:
209
+ progress(float(p) / size)
210
+ else:
211
+ progress(min(piece_length, size - p))
212
+ h.close()
213
+ return {'pieces': ''.join(pieces),
214
+ 'piece length': piece_length, 'length': size,
215
+ 'name': uniconvert(split(file)[1], encoding) }
216
+
217
+ def subfiles(d):
218
+ r = []
219
+ stack = [([], d)]
220
+ while len(stack) > 0:
221
+ p, n = stack.pop()
222
+ if isdir(n):
223
+ for s in listdir(n):
224
+ if s not in ignore and s[:1] != '.':
225
+ stack.append((copy(p) + [s], join(n, s)))
226
+ else:
227
+ r.append((p, n))
228
+ return r
229
+
230
+
231
+ def completedir(dir, url, params = {}, flag = Event(),
232
+ vc = lambda x: None, fc = lambda x: None):
233
+ files = listdir(dir)
234
+ files.sort()
235
+ ext = '.torrent'
236
+ if params.has_key('target'):
237
+ target = params['target']
238
+ else:
239
+ target = ''
240
+
241
+ togen = []
242
+ for f in files:
243
+ if f[-len(ext):] != ext and (f + ext) not in files:
244
+ togen.append(join(dir, f))
245
+
246
+ total = 0
247
+ for i in togen:
248
+ total += calcsize(i)
249
+
250
+ subtotal = [0]
251
+ def callback(x, subtotal = subtotal, total = total, vc = vc):
252
+ subtotal[0] += x
253
+ vc(float(subtotal[0]) / total)
254
+ for i in togen:
255
+ fc(i)
256
+ try:
257
+ t = split(i)[-1]
258
+ if t not in ignore and t[0] != '.':
259
+ if target != '':
260
+ params['target'] = join(target,t+ext)
261
+ make_meta_file(i, url, params, flag, progress = callback, progress_percent = 0)
262
+ except ValueError:
263
+ print_exc()