tradedangerous 12.7.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. py.typed +1 -0
  2. trade.py +49 -0
  3. tradedangerous/__init__.py +43 -0
  4. tradedangerous/cache.py +1381 -0
  5. tradedangerous/cli.py +136 -0
  6. tradedangerous/commands/TEMPLATE.py +74 -0
  7. tradedangerous/commands/__init__.py +244 -0
  8. tradedangerous/commands/buildcache_cmd.py +102 -0
  9. tradedangerous/commands/buy_cmd.py +427 -0
  10. tradedangerous/commands/commandenv.py +372 -0
  11. tradedangerous/commands/exceptions.py +94 -0
  12. tradedangerous/commands/export_cmd.py +150 -0
  13. tradedangerous/commands/import_cmd.py +222 -0
  14. tradedangerous/commands/local_cmd.py +243 -0
  15. tradedangerous/commands/market_cmd.py +207 -0
  16. tradedangerous/commands/nav_cmd.py +252 -0
  17. tradedangerous/commands/olddata_cmd.py +270 -0
  18. tradedangerous/commands/parsing.py +221 -0
  19. tradedangerous/commands/rares_cmd.py +298 -0
  20. tradedangerous/commands/run_cmd.py +1521 -0
  21. tradedangerous/commands/sell_cmd.py +262 -0
  22. tradedangerous/commands/shipvendor_cmd.py +60 -0
  23. tradedangerous/commands/station_cmd.py +68 -0
  24. tradedangerous/commands/trade_cmd.py +181 -0
  25. tradedangerous/commands/update_cmd.py +67 -0
  26. tradedangerous/corrections.py +55 -0
  27. tradedangerous/csvexport.py +234 -0
  28. tradedangerous/db/__init__.py +27 -0
  29. tradedangerous/db/adapter.py +192 -0
  30. tradedangerous/db/config.py +107 -0
  31. tradedangerous/db/engine.py +259 -0
  32. tradedangerous/db/lifecycle.py +332 -0
  33. tradedangerous/db/locks.py +208 -0
  34. tradedangerous/db/orm_models.py +500 -0
  35. tradedangerous/db/paths.py +113 -0
  36. tradedangerous/db/utils.py +661 -0
  37. tradedangerous/edscupdate.py +565 -0
  38. tradedangerous/edsmupdate.py +474 -0
  39. tradedangerous/formatting.py +210 -0
  40. tradedangerous/fs.py +156 -0
  41. tradedangerous/gui.py +1146 -0
  42. tradedangerous/mapping.py +133 -0
  43. tradedangerous/mfd/__init__.py +103 -0
  44. tradedangerous/mfd/saitek/__init__.py +3 -0
  45. tradedangerous/mfd/saitek/directoutput.py +678 -0
  46. tradedangerous/mfd/saitek/x52pro.py +195 -0
  47. tradedangerous/misc/checkpricebounds.py +287 -0
  48. tradedangerous/misc/clipboard.py +49 -0
  49. tradedangerous/misc/coord64.py +83 -0
  50. tradedangerous/misc/csvdialect.py +57 -0
  51. tradedangerous/misc/derp-sentinel.py +35 -0
  52. tradedangerous/misc/diff-system-csvs.py +159 -0
  53. tradedangerous/misc/eddb.py +81 -0
  54. tradedangerous/misc/eddn.py +349 -0
  55. tradedangerous/misc/edsc.py +437 -0
  56. tradedangerous/misc/edsm.py +121 -0
  57. tradedangerous/misc/importeddbstats.py +54 -0
  58. tradedangerous/misc/prices-json-exp.py +179 -0
  59. tradedangerous/misc/progress.py +194 -0
  60. tradedangerous/plugins/__init__.py +249 -0
  61. tradedangerous/plugins/edcd_plug.py +371 -0
  62. tradedangerous/plugins/eddblink_plug.py +861 -0
  63. tradedangerous/plugins/edmc_batch_plug.py +133 -0
  64. tradedangerous/plugins/spansh_plug.py +2647 -0
  65. tradedangerous/prices.py +211 -0
  66. tradedangerous/submit-distances.py +422 -0
  67. tradedangerous/templates/Added.csv +37 -0
  68. tradedangerous/templates/Category.csv +17 -0
  69. tradedangerous/templates/RareItem.csv +143 -0
  70. tradedangerous/templates/TradeDangerous.sql +338 -0
  71. tradedangerous/tools.py +40 -0
  72. tradedangerous/tradecalc.py +1302 -0
  73. tradedangerous/tradedb.py +2320 -0
  74. tradedangerous/tradeenv.py +313 -0
  75. tradedangerous/tradeenv.pyi +109 -0
  76. tradedangerous/tradeexcept.py +131 -0
  77. tradedangerous/tradeorm.py +183 -0
  78. tradedangerous/transfers.py +192 -0
  79. tradedangerous/utils.py +243 -0
  80. tradedangerous/version.py +16 -0
  81. tradedangerous-12.7.6.dist-info/METADATA +106 -0
  82. tradedangerous-12.7.6.dist-info/RECORD +87 -0
  83. tradedangerous-12.7.6.dist-info/WHEEL +5 -0
  84. tradedangerous-12.7.6.dist-info/entry_points.txt +3 -0
  85. tradedangerous-12.7.6.dist-info/licenses/LICENSE +373 -0
  86. tradedangerous-12.7.6.dist-info/top_level.txt +2 -0
  87. tradegui.py +24 -0
@@ -0,0 +1,159 @@
1
+ #! /usr/bin/env python
2
+ # Small tool for comparing two system.csv files.
3
+ # Usage:
4
+ # misc/compare-system-csvs.py <old file> <new file>
5
+ # Checks for stars that changed names, stars that moved
6
+ # and conflicts where two stars occupy the same space.
7
+
8
+ # Writes output suitable for "corrections.py" to stderr,
9
+ # so you could do something like:
10
+ # $ diff-system-csvs.py oldSystem.csv newSystem.csv 2>corrections.txt
11
+
12
+ import sys
13
+
14
+ if len(sys.argv) != 3:
15
+ raise SystemExit("Usage: {} <old file> <new file>".format(
16
+ sys.argv[0]
17
+ ))
18
+
19
+ import csv
20
+ import re
21
+ from pathlib import Path
22
+ from collections import namedtuple
23
+
24
+ class Loc(namedtuple('Loc', [ 'x', 'y', 'z' ])):
25
+ def __str__(self):
26
+ return "{},{},{}".format(self.x, self.y, self.z)
27
+
28
+
29
+ class Item(namedtuple('Item', [ 'norm', 'name', 'loc' ])):
30
+ pass
31
+
32
+
33
+ normalizeRe = re.compile('[^A-Za-z0-9\' ]')
34
+
35
+
36
+ def readFile(filename):
37
+ path = Path(filename)
38
+ if not path.exists():
39
+ raise SystemExit("File not found: {}".format(filename))
40
+
41
+ names, locs = dict(), dict()
42
+
43
+ with path.open("r", encoding="utf-8") as fh:
44
+ csvin = csv.reader(fh, delimiter=',', quotechar='\'', doublequote=True)
45
+ # skip headings
46
+ next(csvin)
47
+
48
+ for line in csvin:
49
+ name = line[0]
50
+ x = float(line[1])
51
+ y = float(line[2])
52
+ z = float(line[3])
53
+
54
+ normalized = normalizeRe.sub('', name).upper()
55
+ try:
56
+ prevEntry = names[normalized]
57
+ except KeyError:
58
+ pass
59
+ else:
60
+ print("Name clash: {}, this entry: {}, prev entry: {}".format(
61
+ normalized,
62
+ name,
63
+ prevEntry.name
64
+ ))
65
+ item = Item(normalized, name, Loc(x, y, z))
66
+ names[normalized] = item
67
+ if item.loc in locs:
68
+ print("{}: Duplicate location: {} and {} at {}".format(
69
+ filename, locs[item.loc].name, name, item.loc
70
+ ))
71
+ else:
72
+ locs[item.loc] = item
73
+
74
+ return names, locs
75
+
76
+
77
+ oldNames, oldLocs = readFile(sys.argv[1])
78
+ newNames, newLocs = readFile(sys.argv[2])
79
+
80
+ for oldName, oldItem in oldNames.items():
81
+ try:
82
+ # Look the item up in the new names dict
83
+ newItem = newNames[oldItem.norm]
84
+ except:
85
+ pass
86
+ else:
87
+ if oldItem.name != newItem.name:
88
+ if oldItem.name.upper() == oldItem.name.upper():
89
+ # Case changed, we can live with this.
90
+ print("CAUTION: {} changed to {}".format(
91
+ oldItem.name,
92
+ newItem.name,
93
+ ))
94
+ else:
95
+ # Punctuation or something else, we need
96
+ # a correction.
97
+ print("{} became {}".format(
98
+ oldItem.name, newItem.name
99
+ ))
100
+ print(" \"{}\": \"{}\",".format(
101
+ oldItem.name.upper(),
102
+ newItem.name,
103
+ ), file=sys.stderr)
104
+
105
+ # Name didn't change, did the position?
106
+ if oldItem.loc != newItem.loc:
107
+ print("{} moved from {} -> {}".format(
108
+ oldItem.name, oldItem.loc, newItem.loc
109
+ ))
110
+
111
+ # We don't need to do a location check on this one.
112
+ try:
113
+ del newLocs[newItem.loc]
114
+ except KeyError:
115
+ pass
116
+ continue
117
+
118
+ # We didn't find the old name in the new list, check
119
+ # to see if there is a new star at the old position.
120
+ try:
121
+ newItem = newLocs[oldItem.loc]
122
+ except:
123
+ pass
124
+ else:
125
+ # we found something at the exact loc, which I
126
+ # assume means the name changed.
127
+ try:
128
+ # we know it's there, we can remove it
129
+ del newLocs[oldItem.loc]
130
+ except KeyError:
131
+ pass
132
+ print("{} ({}) changed name to {}".format(
133
+ oldItem.name,
134
+ oldItem.loc,
135
+ newItem.name
136
+ ))
137
+ print(" \"{}\": \"{}\",".format(
138
+ oldItem.name.upper(),
139
+ newItem.name,
140
+ ), file=sys.stderr)
141
+ continue
142
+
143
+ # we didn't find it, so as best we can tell it
144
+ # has been removed. there's no easy way for us
145
+ # to catch the case of a move and a minor reloc.
146
+ print("{} ({}) was removed".format(
147
+ oldItem.name,
148
+ oldItem.loc
149
+ ))
150
+ print(" \"{}\": DELETED,".format(
151
+ oldItem.name.upper(),
152
+ ), file=sys.stderr)
153
+
154
+
155
+ for newLoc, newItem in newLocs.items():
156
+ if newLoc in oldLocs:
157
+ continue
158
+ print("{} ({}) was added".format(newItem.name, newLoc))
159
+
@@ -0,0 +1,81 @@
1
+ #! /usr/bin/env python
2
+
3
+ """
4
+ Provides a simple API for streaming data from EDDB in essentially raw format.
5
+
6
+ Example:
7
+ import misc.eddb
8
+ for sysdata in misc.eddb.SystemsQuery():
9
+ print(sysdata)
10
+
11
+ Original author: oliver@kfs.org
12
+ """
13
+
14
+ import transfers
15
+
16
+ BASE_URL = "http://eddb.io/archive/v3/"
17
+ COMMODITIES_JSON = BASE_URL + "commodities.json"
18
+ SYSTEMS_JSON = BASE_URL + "systems.json"
19
+ STATIONS_EXT_JSON = BASE_URL + "stations.json"
20
+ STATIONS_LITE_JSON = BASE_URL + "stations_lite.json"
21
+
22
+
23
+ class EDDBQuery:
24
+ """
25
+ Base class for querying an EDDB data set and converting the
26
+ JSON results into an iterable stream.
27
+
28
+ Example:
29
+ for entity in EDDBQuery():
30
+ print(entity)
31
+ """
32
+
33
+ url = None # Define in derived classes
34
+
35
+ def __init__(self):
36
+ assert self.url
37
+ self.jsonData = transfers.get_json_data(self.url)
38
+
39
+ def __iter__(self):
40
+ return iter(self.jsonData)
41
+
42
+ class CommoditiesQuery(EDDBQuery):
43
+ """
44
+ Streams Commodities data from EDDB.
45
+
46
+ Example:
47
+ for comm in CommoditiesQuery():
48
+ print(comm['name'])
49
+ """
50
+ url = COMMODITIES_JSON
51
+
52
+ class SystemsQuery(EDDBQuery):
53
+ """
54
+ Streams System data from EDDB.
55
+
56
+ Example:
57
+ for system in SystemsQuery():
58
+ print(system['name'])
59
+ """
60
+ url = SYSTEMS_JSON
61
+
62
+ class StationsQuery(EDDBQuery):
63
+ """
64
+ Streams Station data from EDDB without trade data.
65
+
66
+ Example:
67
+ for station in StationsQuery():
68
+ print(station['name'])
69
+ """
70
+ url = STATIONS_LITE_JSON
71
+
72
+ class StationsExtQuery(StationsQuery):
73
+ """
74
+ Streams extended Station data from EDDB with trade data.
75
+
76
+ Example:
77
+ for station in StationsExtQuery():
78
+ print(station['name'])
79
+ """
80
+ url = STATIONS_EXT_JSON
81
+
@@ -0,0 +1,349 @@
1
+ """
2
+ Utilities for reading from the Elite Dangerous Data Network.
3
+
4
+ Example usages:
5
+
6
+ # Simple:
7
+ import eddn
8
+ listener = eddn.Listener()
9
+ while True:
10
+ batch = listener.get_batch()
11
+ if batch:
12
+ print("Got batch of %d" % len(batch))
13
+
14
+ # Advanced:
15
+ import eddn
16
+
17
+ listener = eddn.Listener(
18
+ minBatchTime=3, # Allow at least 3-s for a batch,
19
+ maxBatchTime=5, # But allow upto 5s,
20
+ reconnectTimeout=300, # Reconnect after 5 minutes without data,
21
+ burstLimit=500, # Drain upto 500 prices between polls,
22
+ )
23
+
24
+ def handle_listener_error(e):
25
+ print("Listener Error:", e)
26
+
27
+ def process_batch(batch):
28
+ stations = set()
29
+ items = set()
30
+ software = set()
31
+ for price in batch:
32
+ stations.add(price.station)
33
+ items.add(price.item)
34
+ software.add(price.software + ":" + price.version)
35
+ print("Batch: %d entries" % len(batch))
36
+ print("Stations: %s" % (','.join(stations)))
37
+ print("Items: %s" % (','.join(items)))
38
+
39
+ print("Listening for 100 batches")
40
+ while listener.stats['batches'] < 100:
41
+ batch = listener.get_batch(onerror=handle_listener_error)
42
+ if batch:
43
+ process_batch(batch)
44
+ stats, errors = listener.stats, listener.errors
45
+ if errors or (listener.stats['batches'] % 5) == 0:
46
+ print("Stats:")
47
+ for stat in sorted(stats.keys()):
48
+ print(" {:<20s} {:>10n}".format(stat, stats[stat]))
49
+ if errors:
50
+ print("ERRORS:")
51
+ for error in sorted(errors.keys()):
52
+ print(" {:<20s} {:>10n}".format(error, errors[error]))
53
+ listener.clear_errors()
54
+
55
+ listener.reset_counters()
56
+ """
57
+
58
+ # Copyright (C) Oliver 'kfsone' Smith <oliver@kfs.org> 2015
59
+ #
60
+ # Conditional permission to copy, modify, refactor or use this
61
+ # code is granted so long as attribution to the original author
62
+ # is included.
63
+
64
+ import json
65
+ import time
66
+ import zlib
67
+ import zmq
68
+
69
+ from collections import defaultdict
70
+ from collections import namedtuple
71
+
72
+
73
+ class MarketPrice(namedtuple('MarketPrice', [
74
+ 'system',
75
+ 'station',
76
+ 'item',
77
+ 'buy',
78
+ 'sell',
79
+ 'demand',
80
+ 'supply',
81
+ 'timestamp',
82
+ 'uploader',
83
+ 'software',
84
+ 'version',
85
+ ])):
86
+ pass
87
+
88
+
89
+ class Listener:
90
+ """
91
+ Provides an object that will listen to the Elite Dangerous Data Network
92
+ firehose and capture messages for later consumption.
93
+
94
+ Rather than individual updates, prices are captured across a window of
95
+ between minBatchTime and maxBatchTime. When a new update is received,
96
+ Rather than returning individual messages, messages are captured across
97
+ a window of potentially several seconds and returned to the caller in
98
+ batches.
99
+
100
+ Attributes:
101
+ zmqContext Context this object is associated with,
102
+ minBatchTime Allow at least this long for a batch (ms),
103
+ maxBatchTime Don't allow a batch to run longer than this (ms),
104
+ reconnectTimeout Reconnect the socket after this long with no data,
105
+ burstLimit Read a maximum of this many messages between
106
+ timer checks
107
+
108
+ subscriber ZMQ socket we're using
109
+ stats Counters of nominal events
110
+ errors Counters of off-nominal events
111
+ lastRecv time of the last receive (or 0)
112
+ """
113
+
114
+ uri = 'tcp://eddn-relay.elite-markets.net:9500'
115
+ supportedSchema = 'http://schemas.elite-markets.net/eddn/commodity/1'
116
+
117
+ def __init__(
118
+ self,
119
+ zmqContext=None,
120
+ minBatchTime=5., # seconds
121
+ maxBatchTime=10., # seconds
122
+ reconnectTimeout=180., # seconds
123
+ burstLimit=200,
124
+ ):
125
+ assert burstLimit > 0
126
+ if not zmqContext:
127
+ zmqContext = zmq.Context()
128
+ self.zmqContext = zmqContext
129
+ self.subscriber = None
130
+
131
+ self.minBatchTime = minBatchTime
132
+ self.maxBatchTime = maxBatchTime
133
+ self.reconnectTimeout = reconnectTimeout
134
+ self.burstLimit = burstLimit
135
+
136
+ self.reset_counters()
137
+ self.connect()
138
+
139
+
140
+ def connect(self):
141
+ """
142
+ Start a connection
143
+ """
144
+ # tear up the new connection first
145
+ if self.subscriber:
146
+ self.subscriber.close()
147
+ del self.subscriber
148
+ self.subscriber = newsub = self.zmqContext.socket(zmq.SUB)
149
+ newsub.setsockopt(zmq.SUBSCRIBE, b"")
150
+ newsub.connect(self.uri)
151
+ self.lastRecv = time.time()
152
+ self.lastJsData = None
153
+
154
+
155
+ def disconnect(self):
156
+ del self.subscriber
157
+
158
+
159
+ def clear_errors(self):
160
+ self.errors = defaultdict(int)
161
+
162
+
163
+ def reset_counters(self):
164
+ self.clear_errors()
165
+ self.stats = defaultdict(int)
166
+
167
+
168
+ def wait_for_data(self, softCutoff, hardCutoff):
169
+ """
170
+ Waits for data until maxBatchTime ms has elapsed
171
+ or cutoff (absolute time) has been reached.
172
+ """
173
+
174
+ now = time.time()
175
+
176
+ cutoff = min(softCutoff, hardCutoff)
177
+ if self.lastRecv < now - self.reconnectTimeout:
178
+ if self.lastRecv:
179
+ self.errors['reconnects'] += 1
180
+ self.connect()
181
+ now = time.time()
182
+
183
+ nextCutoff = min(now + self.minBatchTime, cutoff)
184
+ if now > nextCutoff:
185
+ return False
186
+
187
+ timeout = (nextCutoff - now) * 1000 # milliseconds
188
+
189
+ # Wait for an event
190
+ events = self.subscriber.poll(timeout=timeout)
191
+ if events == 0:
192
+ return False
193
+ return True
194
+
195
+
196
+ def get_batch(self, onerror=None):
197
+ """
198
+ Greedily collect deduped prices from the firehose over a
199
+ period of between minBatchTime and maxBatchTime, with
200
+ built-in auto-reconnection if there is nothing from the
201
+ firehose for a period of time.
202
+
203
+ As json data is decoded, it is stored in self.lastJsData.
204
+
205
+ Parameters:
206
+ onerror
207
+ None or a function/lambda that takes an error
208
+ string and deals with it.
209
+
210
+ Returns:
211
+ A list of MarketPrice entries based on the data read.
212
+ Prices are deduped per System+Station+Item, so that
213
+ if two entries are received for the same combination,
214
+ only the most recent with the newest timestamp is kept.
215
+
216
+ Errors:
217
+ Errors are acculumated in the .errors dictionary. If you
218
+ supply an 'onerror' function they are also passed to it.
219
+ """
220
+ now = time.time()
221
+ hardCutoff = now + self.maxBatchTime
222
+ softCutoff = now + self.minBatchTime
223
+
224
+ # hoists
225
+ supportedSchema = self.supportedSchema
226
+ sub = self.subscriber
227
+ stats, errors = self.stats, self.errors
228
+
229
+ # Prices are stored as a dictionary of
230
+ # (sys,stn,item) => [MarketPrice]
231
+ # The list thing is a trick to save us having to do
232
+ # the dictionary lookup twice.
233
+ batch = defaultdict(list)
234
+
235
+ while self.wait_for_data(softCutoff, hardCutoff):
236
+ # When wait_for_data returns True, there is some data waiting,
237
+ # possibly multiple messages. At this point we can afford to
238
+ # suck down whatever is waiting in "nonblocking" mode until
239
+ # we reach the burst limit or we get EAGAIN.
240
+ bursts = 0
241
+ for _ in range(self.burstLimit):
242
+ self.lastJsData = None
243
+ try:
244
+ zdata = sub.recv(flags=zmq.NOBLOCK, copy=False)
245
+ stats['recvs'] += 1
246
+ except zmq.error.Again:
247
+ break
248
+
249
+ bursts += 1
250
+
251
+ try:
252
+ jsdata = zlib.decompress(zdata)
253
+ except Exception as e:
254
+ errors['deflate'] += 1
255
+ if onerror:
256
+ onerror("zlib.decompress: %s: %s"%(type(e), e))
257
+ continue
258
+
259
+ bdata = jsdata.decode()
260
+
261
+ try:
262
+ data = json.loads(bdata)
263
+ except ValueError as e:
264
+ errors['loads'] += 1
265
+ if onerror:
266
+ onerror("json.loads: %s: %s"%(type(e), e))
267
+ continue
268
+
269
+ self.lastJsData = jsdata
270
+
271
+ try:
272
+ schema = data["$schemaRef"]
273
+ except KeyError:
274
+ errors['schemaref'] += 1
275
+ if onerror:
276
+ onerror("missing schema ref")
277
+ continue
278
+ if schema != supportedSchema:
279
+ errors['schema'] += 1
280
+ if onerror:
281
+ onerror("unsupported schema: "+schema)
282
+ continue
283
+ try:
284
+ header = data["header"]
285
+ message = data["message"]
286
+ system = message["systemName"].upper()
287
+ station = message["stationName"].upper()
288
+ item = message["itemName"].upper()
289
+ buy = int(message["buyPrice"])
290
+ sell = int(message["sellPrice"])
291
+ demand = message["demand"]
292
+ supply = message["stationStock"]
293
+ timestamp = message["timestamp"]
294
+ uploader = header["uploaderID"]
295
+ software = header["softwareName"]
296
+ swVersion = header["softwareVersion"]
297
+ except (KeyError, ValueError) as e:
298
+ errors['json'] += 1
299
+ if onerror:
300
+ onerror("invalid json: %s: %s"%(type(e), e))
301
+ continue
302
+
303
+ # We've received real data.
304
+ stats['prices'] += 1
305
+
306
+ # Normalize timestamps
307
+ timestamp = timestamp.replace("T"," ").replace("+00:00","")
308
+
309
+ # We'll get either an empty list or a list containing
310
+ # a MarketPrice. This saves us having to do the expensive
311
+ # index operation twice.
312
+ oldEntryList = batch[(system, station, item)]
313
+ if oldEntryList:
314
+ if oldEntryList[0].timestamp > timestamp:
315
+ stats['timeseq'] += 1
316
+ continue
317
+ stats['timeseq']
318
+ else:
319
+ # Add a blank entry to make the list size > 0
320
+ oldEntryList.append(None)
321
+
322
+ # Here we're replacing the contents of the list.
323
+ # This simple array lookup is several hundred times less
324
+ # expensive than looking up a potentially large dictionary
325
+ # by STATION/SYSTEM:ITEM...
326
+ oldEntryList[0] = MarketPrice(
327
+ system, station, item,
328
+ buy, sell,
329
+ demand, supply,
330
+ timestamp,
331
+ uploader, software, swVersion,
332
+ )
333
+
334
+ # For the edge-case where we wait 4.999 seconds and then
335
+ # get a burst of data: stick around a little longer.
336
+ if bursts >= self.burstLimit:
337
+ stats['numburst'] += 1
338
+ stats['maxburst'] = max(stats['maxburst'], bursts)
339
+ softCutoff = min(softCutoff, time.time() + 0.5)
340
+
341
+ # to get average batch length, divide batchlen/batches.
342
+ # you could do the same with prices/batches except that
343
+ stats['batches'] += 1
344
+ if not batch:
345
+ stats['emptybatches'] += 1
346
+ else:
347
+ stats['batchlen'] += len(batch)
348
+
349
+ return [ entry[0] for entry in batch.values() ]