murder 0.0.0.pre

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. data/.gitignore +1 -0
  2. data/LICENSE +17 -0
  3. data/README +224 -0
  4. data/Rakefile +52 -0
  5. data/VERSION +1 -0
  6. data/dist/BitTornado/BT1/Choker.py +128 -0
  7. data/dist/BitTornado/BT1/Connecter.py +288 -0
  8. data/dist/BitTornado/BT1/Downloader.py +594 -0
  9. data/dist/BitTornado/BT1/DownloaderFeedback.py +155 -0
  10. data/dist/BitTornado/BT1/Encrypter.py +333 -0
  11. data/dist/BitTornado/BT1/FileSelector.py +245 -0
  12. data/dist/BitTornado/BT1/Filter.py +12 -0
  13. data/dist/BitTornado/BT1/HTTPDownloader.py +251 -0
  14. data/dist/BitTornado/BT1/NatCheck.py +95 -0
  15. data/dist/BitTornado/BT1/PiecePicker.py +320 -0
  16. data/dist/BitTornado/BT1/Rerequester.py +426 -0
  17. data/dist/BitTornado/BT1/Statistics.py +177 -0
  18. data/dist/BitTornado/BT1/Storage.py +584 -0
  19. data/dist/BitTornado/BT1/StorageWrapper.py +1045 -0
  20. data/dist/BitTornado/BT1/StreamCheck.py +135 -0
  21. data/dist/BitTornado/BT1/T2T.py +193 -0
  22. data/dist/BitTornado/BT1/Uploader.py +145 -0
  23. data/dist/BitTornado/BT1/__init__.py +1 -0
  24. data/dist/BitTornado/BT1/btformats.py +100 -0
  25. data/dist/BitTornado/BT1/fakeopen.py +89 -0
  26. data/dist/BitTornado/BT1/makemetafile.py +263 -0
  27. data/dist/BitTornado/BT1/track.py +1067 -0
  28. data/dist/BitTornado/ConfigDir.py +401 -0
  29. data/dist/BitTornado/ConfigReader.py +1068 -0
  30. data/dist/BitTornado/ConnChoice.py +31 -0
  31. data/dist/BitTornado/CreateIcons.py +105 -0
  32. data/dist/BitTornado/CurrentRateMeasure.py +37 -0
  33. data/dist/BitTornado/HTTPHandler.py +167 -0
  34. data/dist/BitTornado/PSYCO.py +5 -0
  35. data/dist/BitTornado/RateLimiter.py +153 -0
  36. data/dist/BitTornado/RateMeasure.py +75 -0
  37. data/dist/BitTornado/RawServer.py +195 -0
  38. data/dist/BitTornado/ServerPortHandler.py +188 -0
  39. data/dist/BitTornado/SocketHandler.py +375 -0
  40. data/dist/BitTornado/__init__.py +63 -0
  41. data/dist/BitTornado/bencode.py +319 -0
  42. data/dist/BitTornado/bitfield.py +162 -0
  43. data/dist/BitTornado/clock.py +27 -0
  44. data/dist/BitTornado/download_bt1.py +882 -0
  45. data/dist/BitTornado/inifile.py +169 -0
  46. data/dist/BitTornado/iprangeparse.py +194 -0
  47. data/dist/BitTornado/launchmanycore.py +381 -0
  48. data/dist/BitTornado/natpunch.py +254 -0
  49. data/dist/BitTornado/parseargs.py +137 -0
  50. data/dist/BitTornado/parsedir.py +150 -0
  51. data/dist/BitTornado/piecebuffer.py +86 -0
  52. data/dist/BitTornado/selectpoll.py +109 -0
  53. data/dist/BitTornado/subnetparse.py +218 -0
  54. data/dist/BitTornado/torrentlistparse.py +38 -0
  55. data/dist/BitTornado/zurllib.py +100 -0
  56. data/dist/murder_client.py +291 -0
  57. data/dist/murder_make_torrent.py +46 -0
  58. data/dist/murder_tracker.py +28 -0
  59. data/doc/examples/Capfile +28 -0
  60. data/lib/capistrano/recipes/deploy/strategy/murder.rb +52 -0
  61. data/lib/murder.rb +43 -0
  62. data/lib/murder/admin.rb +47 -0
  63. data/lib/murder/murder.rb +121 -0
  64. data/murder.gemspec +101 -0
  65. metadata +129 -0
@@ -0,0 +1,162 @@
1
+ # Written by Bram Cohen, Uoti Urpala, and John Hoffman
2
+ # see LICENSE.txt for license information
3
+
4
+ try:
5
+ True
6
+ except:
7
+ True = 1
8
+ False = 0
9
+ bool = lambda x: not not x
10
+
11
+ try:
12
+ sum([1])
13
+ negsum = lambda a: len(a)-sum(a)
14
+ except:
15
+ negsum = lambda a: reduce(lambda x,y: x+(not y), a, 0)
16
+
17
+ def _int_to_booleans(x):
18
+ r = []
19
+ for i in range(8):
20
+ r.append(bool(x & 0x80))
21
+ x <<= 1
22
+ return tuple(r)
23
+
24
+ lookup_table = []
25
+ reverse_lookup_table = {}
26
+ for i in xrange(256):
27
+ x = _int_to_booleans(i)
28
+ lookup_table.append(x)
29
+ reverse_lookup_table[x] = chr(i)
30
+
31
+
32
+ class Bitfield:
33
+ def __init__(self, length = None, bitstring = None, copyfrom = None):
34
+ if copyfrom is not None:
35
+ self.length = copyfrom.length
36
+ self.array = copyfrom.array[:]
37
+ self.numfalse = copyfrom.numfalse
38
+ return
39
+ if length is None:
40
+ raise ValueError, "length must be provided unless copying from another array"
41
+ self.length = length
42
+ if bitstring is not None:
43
+ extra = len(bitstring) * 8 - length
44
+ if extra < 0 or extra >= 8:
45
+ raise ValueError
46
+ t = lookup_table
47
+ r = []
48
+ for c in bitstring:
49
+ r.extend(t[ord(c)])
50
+ if extra > 0:
51
+ if r[-extra:] != [0] * extra:
52
+ raise ValueError
53
+ del r[-extra:]
54
+ self.array = r
55
+ self.numfalse = negsum(r)
56
+ else:
57
+ self.array = [False] * length
58
+ self.numfalse = length
59
+
60
+ def __setitem__(self, index, val):
61
+ val = bool(val)
62
+ self.numfalse += self.array[index]-val
63
+ self.array[index] = val
64
+
65
+ def __getitem__(self, index):
66
+ return self.array[index]
67
+
68
+ def __len__(self):
69
+ return self.length
70
+
71
+ def tostring(self):
72
+ booleans = self.array
73
+ t = reverse_lookup_table
74
+ s = len(booleans) % 8
75
+ r = [ t[tuple(booleans[x:x+8])] for x in xrange(0, len(booleans)-s, 8) ]
76
+ if s:
77
+ r += t[tuple(booleans[-s:] + ([0] * (8-s)))]
78
+ return ''.join(r)
79
+
80
+ def complete(self):
81
+ return not self.numfalse
82
+
83
+
84
+ def test_bitfield():
85
+ try:
86
+ x = Bitfield(7, 'ab')
87
+ assert False
88
+ except ValueError:
89
+ pass
90
+ try:
91
+ x = Bitfield(7, 'ab')
92
+ assert False
93
+ except ValueError:
94
+ pass
95
+ try:
96
+ x = Bitfield(9, 'abc')
97
+ assert False
98
+ except ValueError:
99
+ pass
100
+ try:
101
+ x = Bitfield(0, 'a')
102
+ assert False
103
+ except ValueError:
104
+ pass
105
+ try:
106
+ x = Bitfield(1, '')
107
+ assert False
108
+ except ValueError:
109
+ pass
110
+ try:
111
+ x = Bitfield(7, '')
112
+ assert False
113
+ except ValueError:
114
+ pass
115
+ try:
116
+ x = Bitfield(8, '')
117
+ assert False
118
+ except ValueError:
119
+ pass
120
+ try:
121
+ x = Bitfield(9, 'a')
122
+ assert False
123
+ except ValueError:
124
+ pass
125
+ try:
126
+ x = Bitfield(7, chr(1))
127
+ assert False
128
+ except ValueError:
129
+ pass
130
+ try:
131
+ x = Bitfield(9, chr(0) + chr(0x40))
132
+ assert False
133
+ except ValueError:
134
+ pass
135
+ assert Bitfield(0, '').tostring() == ''
136
+ assert Bitfield(1, chr(0x80)).tostring() == chr(0x80)
137
+ assert Bitfield(7, chr(0x02)).tostring() == chr(0x02)
138
+ assert Bitfield(8, chr(0xFF)).tostring() == chr(0xFF)
139
+ assert Bitfield(9, chr(0) + chr(0x80)).tostring() == chr(0) + chr(0x80)
140
+ x = Bitfield(1)
141
+ assert x.numfalse == 1
142
+ x[0] = 1
143
+ assert x.numfalse == 0
144
+ x[0] = 1
145
+ assert x.numfalse == 0
146
+ assert x.tostring() == chr(0x80)
147
+ x = Bitfield(7)
148
+ assert len(x) == 7
149
+ x[6] = 1
150
+ assert x.numfalse == 6
151
+ assert x.tostring() == chr(0x02)
152
+ x = Bitfield(8)
153
+ x[7] = 1
154
+ assert x.tostring() == chr(1)
155
+ x = Bitfield(9)
156
+ x[8] = 1
157
+ assert x.numfalse == 8
158
+ assert x.tostring() == chr(0) + chr(0x80)
159
+ x = Bitfield(8, chr(0xC4))
160
+ assert len(x) == 8
161
+ assert x.numfalse == 5
162
+ assert x.tostring() == chr(0xC4)
@@ -0,0 +1,27 @@
1
+ # Written by John Hoffman
2
+ # see LICENSE.txt for license information
3
+
4
+ from time import *
5
+ import sys
6
+
7
+ _MAXFORWARD = 100
8
+ _FUDGE = 1
9
+
10
+ class RelativeTime:
11
+ def __init__(self):
12
+ self.time = time()
13
+ self.offset = 0
14
+
15
+ def get_time(self):
16
+ t = time() + self.offset
17
+ if t < self.time or t > self.time + _MAXFORWARD:
18
+ self.time += _FUDGE
19
+ self.offset += self.time - t
20
+ return self.time
21
+ self.time = t
22
+ return t
23
+
24
+ if sys.platform != 'win32':
25
+ _RTIME = RelativeTime()
26
+ def clock():
27
+ return _RTIME.get_time()
@@ -0,0 +1,882 @@
1
+ # Written by Bram Cohen
2
+ # see LICENSE.txt for license information
3
+
4
+ from zurllib import urlopen
5
+ from urlparse import urlparse
6
+ from BT1.btformats import check_message
7
+ from BT1.Choker import Choker
8
+ from BT1.Storage import Storage
9
+ from BT1.StorageWrapper import StorageWrapper
10
+ from BT1.FileSelector import FileSelector
11
+ from BT1.Uploader import Upload
12
+ from BT1.Downloader import Downloader
13
+ from BT1.HTTPDownloader import HTTPDownloader
14
+ from BT1.Connecter import Connecter
15
+ from RateLimiter import RateLimiter
16
+ from BT1.Encrypter import Encoder
17
+ from RawServer import RawServer, autodetect_ipv6, autodetect_socket_style
18
+ from BT1.Rerequester import Rerequester
19
+ from BT1.DownloaderFeedback import DownloaderFeedback
20
+ from RateMeasure import RateMeasure
21
+ from CurrentRateMeasure import Measure
22
+ from BT1.PiecePicker import PiecePicker
23
+ from BT1.Statistics import Statistics
24
+ from ConfigDir import ConfigDir
25
+ from bencode import bencode, bdecode
26
+ from natpunch import UPnP_test
27
+ from sha import sha
28
+ from os import path, makedirs, listdir
29
+ from parseargs import parseargs, formatDefinitions, defaultargs
30
+ from socket import error as socketerror
31
+ from random import seed
32
+ from threading import Thread, Event
33
+ from clock import clock
34
+ from __init__ import createPeerID
35
+
36
+ try:
37
+ True
38
+ except:
39
+ True = 1
40
+ False = 0
41
+
42
+ defaults = [
43
+ ('max_uploads', 1,
44
+ "the maximum number of uploads to allow at once."),
45
+ ('keepalive_interval', 120.0,
46
+ 'number of seconds to pause between sending keepalives'),
47
+ ('download_slice_size', 2 ** 14,
48
+ "How many bytes to query for per request."),
49
+ ('upload_unit_size', 1460,
50
+ "when limiting upload rate, how many bytes to send at a time"),
51
+ ('request_backlog', 10,
52
+ "maximum number of requests to keep in a single pipe at once."),
53
+ ('max_message_length', 2 ** 23,
54
+ "maximum length prefix encoding you'll accept over the wire - larger values get the connection dropped."),
55
+ ('ip', '',
56
+ "ip to report you have to the tracker."),
57
+ ('minport', 10000, 'minimum port to listen on, counts up if unavailable'),
58
+ ('maxport', 60000, 'maximum port to listen on'),
59
+ ('random_port', 1, 'whether to choose randomly inside the port range ' +
60
+ 'instead of counting up linearly'),
61
+ ('responsefile', '',
62
+ 'file the server response was stored in, alternative to url'),
63
+ ('url', '',
64
+ 'url to get file from, alternative to responsefile'),
65
+ ('selector_enabled', 1,
66
+ 'whether to enable the file selector and fast resume function'),
67
+ ('expire_cache_data', 10,
68
+ 'the number of days after which you wish to expire old cache data ' +
69
+ '(0 = disabled)'),
70
+ ('priority', '',
71
+ 'a list of file priorities separated by commas, must be one per file, ' +
72
+ '0 = highest, 1 = normal, 2 = lowest, -1 = download disabled'),
73
+ ('saveas', '',
74
+ 'local file name to save the file as, null indicates query user'),
75
+ ('timeout', 2.0,
76
+ 'time to wait between closing sockets which nothing has been received on'),
77
+ ('timeout_check_interval', 1.0,
78
+ 'time to wait between checking if any connections have timed out'),
79
+ ('max_slice_length', 2 ** 30,
80
+ "maximum length slice to send to peers, larger requests are ignored"),
81
+ ('max_rate_period', 20.0,
82
+ "maximum amount of time to guess the current rate estimate represents"),
83
+ ('bind', '',
84
+ 'comma-separated list of ips/hostnames to bind to locally'),
85
+ # ('ipv6_enabled', autodetect_ipv6(),
86
+ ('ipv6_enabled', 0,
87
+ 'allow the client to connect to peers via IPv6'),
88
+ ('ipv6_binds_v4', autodetect_socket_style(),
89
+ "set if an IPv6 server socket won't also field IPv4 connections"),
90
+ ('upnp_nat_access', 0,
91
+ 'attempt to autoconfigure a UPnP router to forward a server port ' +
92
+ '(0 = disabled, 1 = mode 1 [fast], 2 = mode 2 [slow])'),
93
+ ('upload_rate_fudge', 5.0,
94
+ 'time equivalent of writing to kernel-level TCP buffer, for rate adjustment'),
95
+ ('tcp_ack_fudge', 0.03,
96
+ 'how much TCP ACK download overhead to add to upload rate calculations ' +
97
+ '(0 = disabled)'),
98
+ ('display_interval', .5,
99
+ 'time between updates of displayed information'),
100
+ ('rerequest_interval', 15,
101
+ 'time to wait between requesting more peers'),
102
+ ('min_peers', 20,
103
+ 'minimum number of peers to not do rerequesting'),
104
+ ('http_timeout', 5,
105
+ 'number of seconds to wait before assuming that an http connection has timed out'),
106
+ ('max_initiate', 40,
107
+ 'number of peers at which to stop initiating new connections'),
108
+ ('check_hashes', 1,
109
+ 'whether to check hashes on disk'),
110
+ ('max_upload_rate', 10240,
111
+ 'maximum kB/s to upload at (0 = no limit, -1 = automatic)'),
112
+ ('max_download_rate', 10240,
113
+ 'maximum kB/s to download at (0 = no limit)'),
114
+ ('alloc_type', 'normal',
115
+ 'allocation type (may be normal, background, pre-allocate or sparse)'),
116
+ ('alloc_rate', 2.0,
117
+ 'rate (in MiB/s) to allocate space at using background allocation'),
118
+ ('buffer_reads', 1,
119
+ 'whether to buffer disk reads'),
120
+ ('write_buffer_size', 16,
121
+ 'the maximum amount of space to use for buffering disk writes ' +
122
+ '(in megabytes, 0 = disabled)'),
123
+ ('breakup_seed_bitfield', 0,
124
+ 'sends an incomplete bitfield and then fills with have messages, '
125
+ 'in order to get around stupid ISP manipulation'),
126
+ ('snub_time', 10.0,
127
+ "seconds to wait for data to come in over a connection before assuming it's semi-permanently choked"),
128
+ ('spew', 0,
129
+ "whether to display diagnostic info to stdout"),
130
+ ('rarest_first_cutoff', 2,
131
+ "number of downloads at which to switch from random to rarest first"),
132
+ ('rarest_first_priority_cutoff', 5,
133
+ 'the number of peers which need to have a piece before other partials take priority over rarest first'),
134
+ ('min_uploads', 4,
135
+ "the number of uploads to fill out to with extra optimistic unchokes"),
136
+ ('max_files_open', 50,
137
+ 'the maximum number of files to keep open at a time, 0 means no limit'),
138
+ ('round_robin_period', 30,
139
+ "the number of seconds between the client's switching upload targets"),
140
+ ('super_seeder', 0,
141
+ "whether to use special upload-efficiency-maximizing routines (only for dedicated seeds)"),
142
+ ('security', 0,
143
+ "whether to enable extra security features intended to prevent abuse"),
144
+ ('max_connections', 64,
145
+ "the absolute maximum number of peers to connect with (0 = no limit)"),
146
+ ('auto_kick', 0,
147
+ "whether to allow the client to automatically kick/ban peers that send bad data"),
148
+ ('double_check', 0,
149
+ "whether to double-check data being written to the disk for errors (may increase CPU load)"),
150
+ ('triple_check', 0,
151
+ "whether to thoroughly check data being written to the disk (may slow disk access)"),
152
+ ('lock_files', 0,
153
+ "whether to lock files the client is working with"),
154
+ ('lock_while_reading', 0,
155
+ "whether to lock access to files being read"),
156
+ ('auto_flush', 0,
157
+ "minutes between automatic flushes to disk (0 = disabled)"),
158
+ ('dedicated_seed_id', '',
159
+ "code to send to tracker identifying as a dedicated seed"),
160
+ ]
161
+
162
+ argslistheader = 'Arguments are:\n\n'
163
+
164
+
165
+ def _failfunc(x):
166
+ print x
167
+
168
+ # old-style downloader
169
+ def download(params, filefunc, statusfunc, finfunc, errorfunc, doneflag, cols,
170
+ pathFunc = None, presets = {}, exchandler = None,
171
+ failed = _failfunc, paramfunc = None):
172
+
173
+ try:
174
+ config = parse_params(params, presets)
175
+ except ValueError, e:
176
+ failed('error: ' + str(e) + '\nrun with no args for parameter explanations')
177
+ return
178
+ if not config:
179
+ errorfunc(get_usage())
180
+ return
181
+
182
+ myid = createPeerID()
183
+ seed(myid)
184
+
185
+ rawserver = RawServer(doneflag, config['timeout_check_interval'],
186
+ config['timeout'], ipv6_enable = config['ipv6_enabled'],
187
+ failfunc = failed, errorfunc = exchandler)
188
+
189
+ upnp_type = UPnP_test(config['upnp_nat_access'])
190
+ try:
191
+ listen_port = rawserver.find_and_bind(config['minport'], config['maxport'],
192
+ config['bind'], ipv6_socket_style = config['ipv6_binds_v4'],
193
+ upnp = upnp_type, randomizer = config['random_port'])
194
+ except socketerror, e:
195
+ failed("Couldn't listen - " + str(e))
196
+ return
197
+
198
+ response = get_response(config['responsefile'], config['url'], failed)
199
+ if not response:
200
+ return
201
+
202
+ infohash = sha(bencode(response['info'])).digest()
203
+
204
+ d = BT1Download(statusfunc, finfunc, errorfunc, exchandler, doneflag,
205
+ config, response, infohash, myid, rawserver, listen_port)
206
+
207
+ if not d.saveAs(filefunc):
208
+ return
209
+
210
+ if pathFunc:
211
+ pathFunc(d.getFilename())
212
+
213
+ hashcheck = d.initFiles(old_style = True)
214
+ if not hashcheck:
215
+ return
216
+ if not hashcheck():
217
+ return
218
+ if not d.startEngine():
219
+ return
220
+ d.startRerequester()
221
+ d.autoStats()
222
+
223
+ statusfunc(activity = 'connecting to peers')
224
+
225
+ if paramfunc:
226
+ paramfunc({ 'max_upload_rate' : d.setUploadRate, # change_max_upload_rate(<int KiB/sec>)
227
+ 'max_uploads': d.setConns, # change_max_uploads(<int max uploads>)
228
+ 'listen_port' : listen_port, # int
229
+ 'peer_id' : myid, # string
230
+ 'info_hash' : infohash, # string
231
+ 'start_connection' : d._startConnection, # start_connection((<string ip>, <int port>), <peer id>)
232
+ })
233
+
234
+ rawserver.listen_forever(d.getPortHandler())
235
+
236
+ d.shutdown()
237
+
238
+
239
+ def parse_params(params, presets = {}):
240
+ if len(params) == 0:
241
+ return None
242
+ config, args = parseargs(params, defaults, 0, 1, presets = presets)
243
+ if args:
244
+ if config['responsefile'] or config['url']:
245
+ raise ValueError,'must have responsefile or url as arg or parameter, not both'
246
+ if path.isfile(args[0]):
247
+ config['responsefile'] = args[0]
248
+ else:
249
+ try:
250
+ urlparse(args[0])
251
+ except:
252
+ raise ValueError, 'bad filename or url'
253
+ config['url'] = args[0]
254
+ elif (config['responsefile'] == '') == (config['url'] == ''):
255
+ raise ValueError, 'need responsefile or url, must have one, cannot have both'
256
+ return config
257
+
258
+
259
+ def get_usage(defaults = defaults, cols = 100, presets = {}):
260
+ return (argslistheader + formatDefinitions(defaults, cols, presets))
261
+
262
+
263
+ def get_response(file, url, errorfunc):
264
+ try:
265
+ if file:
266
+ h = open(file, 'rb')
267
+ try:
268
+ line = h.read(10) # quick test to see if responsefile contains a dict
269
+ front,garbage = line.split(':',1)
270
+ assert front[0] == 'd'
271
+ int(front[1:])
272
+ except:
273
+ errorfunc(file+' is not a valid responsefile')
274
+ return None
275
+ try:
276
+ h.seek(0)
277
+ except:
278
+ try:
279
+ h.close()
280
+ except:
281
+ pass
282
+ h = open(file, 'rb')
283
+ else:
284
+ try:
285
+ h = urlopen(url)
286
+ except:
287
+ errorfunc(url+' bad url')
288
+ return None
289
+ response = h.read()
290
+
291
+ except IOError, e:
292
+ errorfunc('problem getting response info - ' + str(e))
293
+ return None
294
+ try:
295
+ h.close()
296
+ except:
297
+ pass
298
+ try:
299
+ try:
300
+ response = bdecode(response)
301
+ except:
302
+ errorfunc("warning: bad data in responsefile")
303
+ response = bdecode(response, sloppy=1)
304
+ check_message(response)
305
+ except ValueError, e:
306
+ errorfunc("got bad file info - " + str(e))
307
+ return None
308
+
309
+ return response
310
+
311
+
312
+ class BT1Download:
313
+ def __init__(self, statusfunc, finfunc, errorfunc, excfunc, doneflag,
314
+ config, response, infohash, id, rawserver, port,
315
+ appdataobj = None):
316
+ self.statusfunc = statusfunc
317
+ self.finfunc = finfunc
318
+ self.errorfunc = errorfunc
319
+ self.excfunc = excfunc
320
+ self.doneflag = doneflag
321
+ self.config = config
322
+ self.response = response
323
+ self.infohash = infohash
324
+ self.myid = id
325
+ self.rawserver = rawserver
326
+ self.port = port
327
+
328
+ self.info = self.response['info']
329
+ self.pieces = [self.info['pieces'][x:x+20]
330
+ for x in xrange(0, len(self.info['pieces']), 20)]
331
+ self.len_pieces = len(self.pieces)
332
+ self.argslistheader = argslistheader
333
+ self.unpauseflag = Event()
334
+ self.unpauseflag.set()
335
+ self.downloader = None
336
+ self.storagewrapper = None
337
+ self.fileselector = None
338
+ self.super_seeding_active = False
339
+ self.filedatflag = Event()
340
+ self.spewflag = Event()
341
+ self.superseedflag = Event()
342
+ self.whenpaused = None
343
+ self.finflag = Event()
344
+ self.rerequest = None
345
+ self.tcp_ack_fudge = config['tcp_ack_fudge']
346
+
347
+ self.selector_enabled = config['selector_enabled']
348
+ if appdataobj:
349
+ self.appdataobj = appdataobj
350
+ elif self.selector_enabled:
351
+ self.appdataobj = ConfigDir()
352
+ self.appdataobj.deleteOldCacheData( config['expire_cache_data'],
353
+ [self.infohash] )
354
+
355
+ self.excflag = self.rawserver.get_exception_flag()
356
+ self.failed = False
357
+ self.checking = False
358
+ self.started = False
359
+
360
+ self.picker = PiecePicker(self.len_pieces, config['rarest_first_cutoff'],
361
+ config['rarest_first_priority_cutoff'])
362
+ self.choker = Choker(config, rawserver.add_task,
363
+ self.picker, self.finflag.isSet)
364
+
365
+
366
+ def checkSaveLocation(self, loc):
367
+ if self.info.has_key('length'):
368
+ return path.exists(loc)
369
+ for x in self.info['files']:
370
+ if path.exists(path.join(loc, x['path'][0])):
371
+ return True
372
+ return False
373
+
374
+
375
+ def saveAs(self, filefunc, pathfunc = None):
376
+ try:
377
+ def make(f, forcedir = False):
378
+ if not forcedir:
379
+ f = path.split(f)[0]
380
+ if f != '' and not path.exists(f):
381
+ makedirs(f)
382
+
383
+ if self.info.has_key('length'):
384
+ file_length = self.info['length']
385
+ file = filefunc(self.info['name'], file_length,
386
+ self.config['saveas'], False)
387
+ if file is None:
388
+ return None
389
+ make(file)
390
+ files = [(file, file_length)]
391
+ else:
392
+ file_length = 0L
393
+ for x in self.info['files']:
394
+ file_length += x['length']
395
+ file = filefunc(self.info['name'], file_length,
396
+ self.config['saveas'], True)
397
+ if file is None:
398
+ return None
399
+
400
+ # if this path exists, and no files from the info dict exist, we assume it's a new download and
401
+ # the user wants to create a new directory with the default name
402
+ existing = 0
403
+ if path.exists(file):
404
+ if not path.isdir(file):
405
+ self.errorfunc(file + 'is not a dir')
406
+ return None
407
+ if len(listdir(file)) > 0: # if it's not empty
408
+ for x in self.info['files']:
409
+ if path.exists(path.join(file, x['path'][0])):
410
+ existing = 1
411
+ if not existing:
412
+ file = path.join(file, self.info['name'])
413
+ if path.exists(file) and not path.isdir(file):
414
+ if file[-8:] == '.torrent':
415
+ file = file[:-8]
416
+ if path.exists(file) and not path.isdir(file):
417
+ self.errorfunc("Can't create dir - " + self.info['name'])
418
+ return None
419
+ make(file, True)
420
+
421
+ # alert the UI to any possible change in path
422
+ if pathfunc != None:
423
+ pathfunc(file)
424
+
425
+ files = []
426
+ for x in self.info['files']:
427
+ n = file
428
+ for i in x['path']:
429
+ n = path.join(n, i)
430
+ files.append((n, x['length']))
431
+ make(n)
432
+ except OSError, e:
433
+ self.errorfunc("Couldn't allocate dir - " + str(e))
434
+ return None
435
+
436
+ self.filename = file
437
+ self.files = files
438
+ self.datalength = file_length
439
+
440
+ return file
441
+
442
+
443
+ def getFilename(self):
444
+ return self.filename
445
+
446
+
447
+ def _finished(self):
448
+ self.finflag.set()
449
+ try:
450
+ self.storage.set_readonly()
451
+ except (IOError, OSError), e:
452
+ self.errorfunc('trouble setting readonly at end - ' + str(e))
453
+ if self.superseedflag.isSet():
454
+ self._set_super_seed()
455
+ self.choker.set_round_robin_period(
456
+ max( self.config['round_robin_period'],
457
+ self.config['round_robin_period'] *
458
+ self.info['piece length'] / 200000 ) )
459
+ self.rerequest_complete()
460
+ self.finfunc()
461
+
462
+ def _data_flunked(self, amount, index):
463
+ self.ratemeasure_datarejected(amount)
464
+ if not self.doneflag.isSet():
465
+ self.errorfunc('piece %d failed hash check, re-downloading it' % index)
466
+
467
+ def _failed(self, reason):
468
+ self.failed = True
469
+ self.doneflag.set()
470
+ if reason is not None:
471
+ self.errorfunc(reason)
472
+
473
+
474
+ def initFiles(self, old_style = False, statusfunc = None):
475
+ if self.doneflag.isSet():
476
+ return None
477
+ if not statusfunc:
478
+ statusfunc = self.statusfunc
479
+
480
+ disabled_files = None
481
+ if self.selector_enabled:
482
+ self.priority = self.config['priority']
483
+ if self.priority:
484
+ try:
485
+ self.priority = self.priority.split(',')
486
+ assert len(self.priority) == len(self.files)
487
+ self.priority = [int(p) for p in self.priority]
488
+ for p in self.priority:
489
+ assert p >= -1
490
+ assert p <= 2
491
+ except:
492
+ self.errorfunc('bad priority list given, ignored')
493
+ self.priority = None
494
+
495
+ data = self.appdataobj.getTorrentData(self.infohash)
496
+ try:
497
+ d = data['resume data']['priority']
498
+ assert len(d) == len(self.files)
499
+ disabled_files = [x == -1 for x in d]
500
+ except:
501
+ try:
502
+ disabled_files = [x == -1 for x in self.priority]
503
+ except:
504
+ pass
505
+
506
+ try:
507
+ try:
508
+ self.storage = Storage(self.files, self.info['piece length'],
509
+ self.doneflag, self.config, disabled_files)
510
+ except IOError, e:
511
+ self.errorfunc('trouble accessing files - ' + str(e))
512
+ return None
513
+ if self.doneflag.isSet():
514
+ return None
515
+
516
+ self.storagewrapper = StorageWrapper(self.storage, self.config['download_slice_size'],
517
+ self.pieces, self.info['piece length'], self._finished, self._failed,
518
+ statusfunc, self.doneflag, self.config['check_hashes'],
519
+ self._data_flunked, self.rawserver.add_task,
520
+ self.config, self.unpauseflag)
521
+
522
+ except ValueError, e:
523
+ self._failed('bad data - ' + str(e))
524
+ except IOError, e:
525
+ self._failed('IOError - ' + str(e))
526
+ if self.doneflag.isSet():
527
+ return None
528
+
529
+ if self.selector_enabled:
530
+ self.fileselector = FileSelector(self.files, self.info['piece length'],
531
+ self.appdataobj.getPieceDir(self.infohash),
532
+ self.storage, self.storagewrapper,
533
+ self.rawserver.add_task,
534
+ self._failed)
535
+ if data:
536
+ data = data.get('resume data')
537
+ if data:
538
+ self.fileselector.unpickle(data)
539
+
540
+ self.checking = True
541
+ if old_style:
542
+ return self.storagewrapper.old_style_init()
543
+ return self.storagewrapper.initialize
544
+
545
+
546
+ def getCachedTorrentData(self):
547
+ return self.appdataobj.getTorrentData(self.infohash)
548
+
549
+
550
+ def _make_upload(self, connection, ratelimiter, totalup):
551
+ return Upload(connection, ratelimiter, totalup,
552
+ self.choker, self.storagewrapper, self.picker,
553
+ self.config)
554
+
555
+ def _kick_peer(self, connection):
556
+ def k(connection = connection):
557
+ connection.close()
558
+ self.rawserver.add_task(k,0)
559
+
560
+ def _ban_peer(self, ip):
561
+ self.encoder_ban(ip)
562
+
563
+ def _received_raw_data(self, x):
564
+ if self.tcp_ack_fudge:
565
+ x = int(x*self.tcp_ack_fudge)
566
+ self.ratelimiter.adjust_sent(x)
567
+ # self.upmeasure.update_rate(x)
568
+
569
+ def _received_data(self, x):
570
+ self.downmeasure.update_rate(x)
571
+ self.ratemeasure.data_came_in(x)
572
+
573
+ def _received_http_data(self, x):
574
+ self.downmeasure.update_rate(x)
575
+ self.ratemeasure.data_came_in(x)
576
+ self.downloader.external_data_received(x)
577
+
578
+ def _cancelfunc(self, pieces):
579
+ self.downloader.cancel_piece_download(pieces)
580
+ self.httpdownloader.cancel_piece_download(pieces)
581
+ def _reqmorefunc(self, pieces):
582
+ self.downloader.requeue_piece_download(pieces)
583
+
584
+ def startEngine(self, ratelimiter = None, statusfunc = None):
585
+ if self.doneflag.isSet():
586
+ return False
587
+ if not statusfunc:
588
+ statusfunc = self.statusfunc
589
+
590
+ self.checking = False
591
+
592
+ for i in xrange(self.len_pieces):
593
+ if self.storagewrapper.do_I_have(i):
594
+ self.picker.complete(i)
595
+ self.upmeasure = Measure(self.config['max_rate_period'],
596
+ self.config['upload_rate_fudge'])
597
+ self.downmeasure = Measure(self.config['max_rate_period'])
598
+
599
+ if ratelimiter:
600
+ self.ratelimiter = ratelimiter
601
+ else:
602
+ self.ratelimiter = RateLimiter(self.rawserver.add_task,
603
+ self.config['upload_unit_size'],
604
+ self.setConns)
605
+ self.ratelimiter.set_upload_rate(self.config['max_upload_rate'])
606
+
607
+ self.ratemeasure = RateMeasure()
608
+ self.ratemeasure_datarejected = self.ratemeasure.data_rejected
609
+
610
+ self.downloader = Downloader(self.storagewrapper, self.picker,
611
+ self.config['request_backlog'], self.config['max_rate_period'],
612
+ self.len_pieces, self.config['download_slice_size'],
613
+ self._received_data, self.config['snub_time'], self.config['auto_kick'],
614
+ self._kick_peer, self._ban_peer)
615
+ self.downloader.set_download_rate(self.config['max_download_rate'])
616
+ self.connecter = Connecter(self._make_upload, self.downloader, self.choker,
617
+ self.len_pieces, self.upmeasure, self.config,
618
+ self.ratelimiter, self.rawserver.add_task)
619
+ self.encoder = Encoder(self.connecter, self.rawserver,
620
+ self.myid, self.config['max_message_length'], self.rawserver.add_task,
621
+ self.config['keepalive_interval'], self.infohash,
622
+ self._received_raw_data, self.config)
623
+ self.encoder_ban = self.encoder.ban
624
+
625
+ self.httpdownloader = HTTPDownloader(self.storagewrapper, self.picker,
626
+ self.rawserver, self.finflag, self.errorfunc, self.downloader,
627
+ self.config['max_rate_period'], self.infohash, self._received_http_data,
628
+ self.connecter.got_piece)
629
+ if self.response.has_key('httpseeds') and not self.finflag.isSet():
630
+ for u in self.response['httpseeds']:
631
+ self.httpdownloader.make_download(u)
632
+
633
+ if self.selector_enabled:
634
+ self.fileselector.tie_in(self.picker, self._cancelfunc,
635
+ self._reqmorefunc, self.rerequest_ondownloadmore)
636
+ if self.priority:
637
+ self.fileselector.set_priorities_now(self.priority)
638
+ self.appdataobj.deleteTorrentData(self.infohash)
639
+ # erase old data once you've started modifying it
640
+
641
+ if self.config['super_seeder']:
642
+ self.set_super_seed()
643
+
644
+ self.started = True
645
+ return True
646
+
647
+
648
+ def rerequest_complete(self):
649
+ if self.rerequest:
650
+ self.rerequest.announce(1)
651
+
652
+ def rerequest_stopped(self):
653
+ if self.rerequest:
654
+ self.rerequest.announce(2)
655
+
656
+ def rerequest_lastfailed(self):
657
+ if self.rerequest:
658
+ return self.rerequest.last_failed
659
+ return False
660
+
661
+ def rerequest_ondownloadmore(self):
662
+ if self.rerequest:
663
+ self.rerequest.hit()
664
+
665
+ def startRerequester(self, seededfunc = None, force_rapid_update = False):
666
+ if self.response.has_key('announce-list'):
667
+ trackerlist = self.response['announce-list']
668
+ else:
669
+ trackerlist = [[self.response['announce']]]
670
+
671
+ self.rerequest = Rerequester(trackerlist, self.config['rerequest_interval'],
672
+ self.rawserver.add_task, self.connecter.how_many_connections,
673
+ self.config['min_peers'], self.encoder.start_connections,
674
+ self.rawserver.add_task, self.storagewrapper.get_amount_left,
675
+ self.upmeasure.get_total, self.downmeasure.get_total, self.port, self.config['ip'],
676
+ self.myid, self.infohash, self.config['http_timeout'],
677
+ self.errorfunc, self.excfunc, self.config['max_initiate'],
678
+ self.doneflag, self.upmeasure.get_rate, self.downmeasure.get_rate,
679
+ self.unpauseflag, self.config['dedicated_seed_id'],
680
+ seededfunc, force_rapid_update )
681
+
682
+ self.rerequest.start()
683
+
684
+
685
+ def _init_stats(self):
686
+ self.statistics = Statistics(self.upmeasure, self.downmeasure,
687
+ self.connecter, self.httpdownloader, self.ratelimiter,
688
+ self.rerequest_lastfailed, self.filedatflag)
689
+ if self.info.has_key('files'):
690
+ self.statistics.set_dirstats(self.files, self.info['piece length'])
691
+ if self.config['spew']:
692
+ self.spewflag.set()
693
+
694
+ def autoStats(self, displayfunc = None):
695
+ if not displayfunc:
696
+ displayfunc = self.statusfunc
697
+
698
+ self._init_stats()
699
+ DownloaderFeedback(self.choker, self.httpdownloader, self.rawserver.add_task,
700
+ self.upmeasure.get_rate, self.downmeasure.get_rate,
701
+ self.ratemeasure, self.storagewrapper.get_stats,
702
+ self.datalength, self.finflag, self.spewflag, self.statistics,
703
+ displayfunc, self.config['display_interval'])
704
+
705
+ def startStats(self):
706
+ self._init_stats()
707
+ d = DownloaderFeedback(self.choker, self.httpdownloader, self.rawserver.add_task,
708
+ self.upmeasure.get_rate, self.downmeasure.get_rate,
709
+ self.ratemeasure, self.storagewrapper.get_stats,
710
+ self.datalength, self.finflag, self.spewflag, self.statistics)
711
+ return d.gather
712
+
713
+
714
+ def getPortHandler(self):
715
+ return self.encoder
716
+
717
+
718
+ def shutdown(self, torrentdata = {}):
719
+ if self.checking or self.started:
720
+ self.storagewrapper.sync()
721
+ self.storage.close()
722
+ self.rerequest_stopped()
723
+ if self.fileselector and self.started:
724
+ if not self.failed:
725
+ self.fileselector.finish()
726
+ torrentdata['resume data'] = self.fileselector.pickle()
727
+ try:
728
+ self.appdataobj.writeTorrentData(self.infohash,torrentdata)
729
+ except:
730
+ self.appdataobj.deleteTorrentData(self.infohash) # clear it
731
+ return not self.failed and not self.excflag.isSet()
732
+ # if returns false, you may wish to auto-restart the torrent
733
+
734
+
735
+ def setUploadRate(self, rate):
736
+ try:
737
+ def s(self = self, rate = rate):
738
+ self.config['max_upload_rate'] = rate
739
+ self.ratelimiter.set_upload_rate(rate)
740
+ self.rawserver.add_task(s)
741
+ except AttributeError:
742
+ pass
743
+
744
+ def setConns(self, conns, conns2 = None):
745
+ if not conns2:
746
+ conns2 = conns
747
+ try:
748
+ def s(self = self, conns = conns, conns2 = conns2):
749
+ self.config['min_uploads'] = conns
750
+ self.config['max_uploads'] = conns2
751
+ if (conns > 30):
752
+ self.config['max_initiate'] = conns + 10
753
+ self.rawserver.add_task(s)
754
+ except AttributeError:
755
+ pass
756
+
757
+ def setDownloadRate(self, rate):
758
+ try:
759
+ def s(self = self, rate = rate):
760
+ self.config['max_download_rate'] = rate
761
+ self.downloader.set_download_rate(rate)
762
+ self.rawserver.add_task(s)
763
+ except AttributeError:
764
+ pass
765
+
766
+ def startConnection(self, ip, port, id):
767
+ self.encoder._start_connection((ip, port), id)
768
+
769
+ def _startConnection(self, ipandport, id):
770
+ self.encoder._start_connection(ipandport, id)
771
+
772
+ def setInitiate(self, initiate):
773
+ try:
774
+ def s(self = self, initiate = initiate):
775
+ self.config['max_initiate'] = initiate
776
+ self.rawserver.add_task(s)
777
+ except AttributeError:
778
+ pass
779
+
780
+ def getConfig(self):
781
+ return self.config
782
+
783
+ def getDefaults(self):
784
+ return defaultargs(defaults)
785
+
786
+ def getUsageText(self):
787
+ return self.argslistheader
788
+
789
+ def reannounce(self, special = None):
790
+ try:
791
+ def r(self = self, special = special):
792
+ if special is None:
793
+ self.rerequest.announce()
794
+ else:
795
+ self.rerequest.announce(specialurl = special)
796
+ self.rawserver.add_task(r)
797
+ except AttributeError:
798
+ pass
799
+
800
+ def getResponse(self):
801
+ try:
802
+ return self.response
803
+ except:
804
+ return None
805
+
806
+ # def Pause(self):
807
+ # try:
808
+ # if self.storagewrapper:
809
+ # self.rawserver.add_task(self._pausemaker, 0)
810
+ # except:
811
+ # return False
812
+ # self.unpauseflag.clear()
813
+ # return True
814
+ #
815
+ # def _pausemaker(self):
816
+ # self.whenpaused = clock()
817
+ # self.unpauseflag.wait() # sticks a monkey wrench in the main thread
818
+ #
819
+ # def Unpause(self):
820
+ # self.unpauseflag.set()
821
+ # if self.whenpaused and clock()-self.whenpaused > 60:
822
+ # def r(self = self):
823
+ # self.rerequest.announce(3) # rerequest automatically if paused for >60 seconds
824
+ # self.rawserver.add_task(r)
825
+
826
+ def Pause(self):
827
+ if not self.storagewrapper:
828
+ return False
829
+ self.unpauseflag.clear()
830
+ self.rawserver.add_task(self.onPause)
831
+ return True
832
+
833
+ def onPause(self):
834
+ self.whenpaused = clock()
835
+ if not self.downloader:
836
+ return
837
+ self.downloader.pause(True)
838
+ self.encoder.pause(True)
839
+ self.choker.pause(True)
840
+
841
+ def Unpause(self):
842
+ self.unpauseflag.set()
843
+ self.rawserver.add_task(self.onUnpause)
844
+
845
+ def onUnpause(self):
846
+ if not self.downloader:
847
+ return
848
+ self.downloader.pause(False)
849
+ self.encoder.pause(False)
850
+ self.choker.pause(False)
851
+ if self.rerequest and self.whenpaused and clock()-self.whenpaused > 60:
852
+ self.rerequest.announce(3) # rerequest automatically if paused for >60 seconds
853
+
854
+ def set_super_seed(self):
855
+ try:
856
+ self.superseedflag.set()
857
+ def s(self = self):
858
+ if self.finflag.isSet():
859
+ self._set_super_seed()
860
+ self.rawserver.add_task(s)
861
+ except AttributeError:
862
+ pass
863
+
864
+ def _set_super_seed(self):
865
+ if not self.super_seeding_active:
866
+ self.super_seeding_active = True
867
+ self.errorfunc(' ** SUPER-SEED OPERATION ACTIVE **\n' +
868
+ ' please set Max uploads so each peer gets 6-8 kB/s')
869
+ def s(self = self):
870
+ self.downloader.set_super_seed()
871
+ self.choker.set_super_seed()
872
+ self.rawserver.add_task(s)
873
+ if self.finflag.isSet(): # mode started when already finished
874
+ def r(self = self):
875
+ self.rerequest.announce(3) # so after kicking everyone off, reannounce
876
+ self.rawserver.add_task(r)
877
+
878
+ def am_I_finished(self):
879
+ return self.finflag.isSet()
880
+
881
+ def get_transfer_stats(self):
882
+ return self.upmeasure.get_total(), self.downmeasure.get_total()