dexscreen 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dexscreen/__init__.py +31 -0
- dexscreen/api/__init__.py +3 -0
- dexscreen/api/client.py +672 -0
- dexscreen/config/__init__.py +0 -0
- dexscreen/core/__init__.py +27 -0
- dexscreen/core/http.py +460 -0
- dexscreen/core/models.py +106 -0
- dexscreen/stream/__init__.py +3 -0
- dexscreen/stream/polling.py +462 -0
- dexscreen/utils/__init__.py +4 -0
- dexscreen/utils/browser_selector.py +57 -0
- dexscreen/utils/filters.py +226 -0
- dexscreen/utils/ratelimit.py +65 -0
- dexscreen-0.0.1.dist-info/METADATA +278 -0
- dexscreen-0.0.1.dist-info/RECORD +17 -0
- dexscreen-0.0.1.dist-info/WHEEL +4 -0
- dexscreen-0.0.1.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,462 @@
|
|
1
|
+
"""
|
2
|
+
Unified streaming interface
|
3
|
+
"""
|
4
|
+
|
5
|
+
import asyncio
|
6
|
+
import logging
|
7
|
+
from abc import ABC, abstractmethod
|
8
|
+
from typing import Callable, Optional
|
9
|
+
|
10
|
+
from ..core.models import TokenPair
|
11
|
+
|
12
|
+
logger = logging.getLogger(__name__)
|
13
|
+
|
14
|
+
|
15
|
+
class StreamingClient(ABC):
|
16
|
+
"""Base class for streaming data"""
|
17
|
+
|
18
|
+
def __init__(self):
|
19
|
+
self.subscriptions: dict[str, set[Callable]] = {}
|
20
|
+
self.running = False
|
21
|
+
self.callback_errors: dict[str, int] = {} # Track errors per subscription
|
22
|
+
|
23
|
+
@abstractmethod
|
24
|
+
async def connect(self):
|
25
|
+
"""Establish connection"""
|
26
|
+
pass
|
27
|
+
|
28
|
+
@abstractmethod
|
29
|
+
async def disconnect(self):
|
30
|
+
"""Close connection"""
|
31
|
+
pass
|
32
|
+
|
33
|
+
@abstractmethod
|
34
|
+
async def subscribe(
|
35
|
+
self,
|
36
|
+
chain_id: str,
|
37
|
+
address: str,
|
38
|
+
callback: Callable[[TokenPair], None],
|
39
|
+
interval: Optional[float] = None,
|
40
|
+
):
|
41
|
+
"""Subscribe to pair updates"""
|
42
|
+
pass
|
43
|
+
|
44
|
+
async def unsubscribe(self, chain_id: str, address: str, callback: Optional[Callable] = None):
|
45
|
+
"""Unsubscribe from pair updates"""
|
46
|
+
key = f"{chain_id}:{address}"
|
47
|
+
if key in self.subscriptions:
|
48
|
+
if callback:
|
49
|
+
self.subscriptions[key].discard(callback)
|
50
|
+
if not self.subscriptions[key]:
|
51
|
+
del self.subscriptions[key]
|
52
|
+
await self._on_last_unsubscription(chain_id, address)
|
53
|
+
else:
|
54
|
+
del self.subscriptions[key]
|
55
|
+
await self._on_last_unsubscription(chain_id, address)
|
56
|
+
|
57
|
+
async def _emit(self, chain_id: str, address: str, pair: TokenPair):
|
58
|
+
"""Emit update to all subscribers"""
|
59
|
+
key = f"{chain_id}:{address}"
|
60
|
+
if key in self.subscriptions:
|
61
|
+
for callback in self.subscriptions[key].copy():
|
62
|
+
try:
|
63
|
+
if asyncio.iscoroutinefunction(callback):
|
64
|
+
await callback(pair)
|
65
|
+
else:
|
66
|
+
callback(pair)
|
67
|
+
except Exception as e:
|
68
|
+
# Log the error but continue processing other callbacks
|
69
|
+
logger.exception("Callback error for subscription %s: %s", key, type(e).__name__)
|
70
|
+
# Track error count
|
71
|
+
if key not in self.callback_errors:
|
72
|
+
self.callback_errors[key] = 0
|
73
|
+
self.callback_errors[key] += 1
|
74
|
+
|
75
|
+
@abstractmethod
|
76
|
+
async def _on_new_subscription(self, chain_id: str, address: str):
|
77
|
+
"""Called when first subscriber for a pair"""
|
78
|
+
pass
|
79
|
+
|
80
|
+
@abstractmethod
|
81
|
+
async def _on_last_unsubscription(self, chain_id: str, address: str):
|
82
|
+
"""Called when last subscriber removed"""
|
83
|
+
pass
|
84
|
+
|
85
|
+
def get_callback_error_count(self, chain_id: Optional[str] = None, address: Optional[str] = None) -> int:
|
86
|
+
"""Get the number of callback errors for a specific subscription or all subscriptions"""
|
87
|
+
if chain_id and address:
|
88
|
+
key = f"{chain_id}:{address}"
|
89
|
+
return self.callback_errors.get(key, 0)
|
90
|
+
return sum(self.callback_errors.values())
|
91
|
+
|
92
|
+
|
93
|
+
class PollingStream(StreamingClient):
|
94
|
+
"""Polling implementation with streaming interface"""
|
95
|
+
|
96
|
+
def __init__(self, dexscreener_client, interval: float = 1.0, filter_changes: bool = True):
|
97
|
+
super().__init__()
|
98
|
+
self.dexscreener_client = dexscreener_client # The main DexscreenerClient instance
|
99
|
+
self.interval = interval # Default interval
|
100
|
+
self.filter_changes = filter_changes # Whether to filter for changes
|
101
|
+
self.tasks: dict[str, asyncio.Task] = {}
|
102
|
+
self._cache: dict[str, Optional[TokenPair]] = {}
|
103
|
+
|
104
|
+
# Data structures for chain-based polling (max 30 per chain)
|
105
|
+
self._chain_subscriptions: dict[str, set[str]] = {} # chain -> set of addresses
|
106
|
+
self._chain_tasks: dict[str, asyncio.Task] = {} # chain -> polling task
|
107
|
+
self._subscription_intervals: dict[str, float] = {} # subscription_key -> interval
|
108
|
+
self._chain_intervals: dict[str, float] = {} # chain -> minimum interval
|
109
|
+
|
110
|
+
# Token subscription data structures
|
111
|
+
self._token_subscriptions: dict[str, set[Callable]] = {} # chain:token_address -> set of callbacks
|
112
|
+
self._token_tasks: dict[str, asyncio.Task] = {} # chain:token_address -> polling task
|
113
|
+
self._token_intervals: dict[str, float] = {} # chain:token_address -> interval
|
114
|
+
|
115
|
+
async def connect(self):
|
116
|
+
"""Start streaming service"""
|
117
|
+
self.running = True
|
118
|
+
|
119
|
+
async def disconnect(self):
|
120
|
+
"""Stop all polling tasks"""
|
121
|
+
self.running = False
|
122
|
+
for task in self.tasks.values():
|
123
|
+
task.cancel()
|
124
|
+
self.tasks.clear()
|
125
|
+
|
126
|
+
# Stop chain polling tasks
|
127
|
+
for task in self._chain_tasks.values():
|
128
|
+
task.cancel()
|
129
|
+
self._chain_tasks.clear()
|
130
|
+
self._chain_subscriptions.clear()
|
131
|
+
|
132
|
+
# Stop token polling tasks
|
133
|
+
for task in self._token_tasks.values():
|
134
|
+
task.cancel()
|
135
|
+
self._token_tasks.clear()
|
136
|
+
self._token_subscriptions.clear()
|
137
|
+
self._token_intervals.clear()
|
138
|
+
|
139
|
+
async def subscribe(
|
140
|
+
self,
|
141
|
+
chain_id: str,
|
142
|
+
address: str,
|
143
|
+
callback: Callable[[TokenPair], None],
|
144
|
+
interval: Optional[float] = None,
|
145
|
+
):
|
146
|
+
"""Subscribe to pair updates"""
|
147
|
+
key = f"{chain_id}:{address}"
|
148
|
+
if interval is None:
|
149
|
+
interval = self.interval # Use default if not specified
|
150
|
+
|
151
|
+
# Store the interval for this subscription
|
152
|
+
self._subscription_intervals[key] = interval
|
153
|
+
|
154
|
+
if key not in self.subscriptions:
|
155
|
+
self.subscriptions[key] = set()
|
156
|
+
await self._on_new_subscription(chain_id, address)
|
157
|
+
self.subscriptions[key].add(callback)
|
158
|
+
|
159
|
+
async def _on_new_subscription(self, chain_id: str, address: str):
|
160
|
+
"""Start polling for a new pair"""
|
161
|
+
# Add to chain subscriptions
|
162
|
+
if chain_id not in self._chain_subscriptions:
|
163
|
+
self._chain_subscriptions[chain_id] = set()
|
164
|
+
self._chain_subscriptions[chain_id].add(address)
|
165
|
+
|
166
|
+
# Update chain interval to be the minimum of all subscriptions
|
167
|
+
self._update_chain_interval(chain_id)
|
168
|
+
|
169
|
+
# Restart chain polling task if needed
|
170
|
+
await self._restart_chain_polling(chain_id)
|
171
|
+
|
172
|
+
async def _on_last_unsubscription(self, chain_id: str, address: str):
|
173
|
+
"""Stop polling for a pair"""
|
174
|
+
key = f"{chain_id}:{address}"
|
175
|
+
if key in self._cache:
|
176
|
+
del self._cache[key]
|
177
|
+
|
178
|
+
# Remove interval data
|
179
|
+
if key in self._subscription_intervals:
|
180
|
+
del self._subscription_intervals[key]
|
181
|
+
|
182
|
+
# Remove from chain subscriptions
|
183
|
+
if chain_id in self._chain_subscriptions:
|
184
|
+
self._chain_subscriptions[chain_id].discard(address)
|
185
|
+
|
186
|
+
# If no more addresses for this chain, stop the chain task
|
187
|
+
if not self._chain_subscriptions[chain_id]:
|
188
|
+
del self._chain_subscriptions[chain_id]
|
189
|
+
if chain_id in self._chain_tasks:
|
190
|
+
self._chain_tasks[chain_id].cancel()
|
191
|
+
del self._chain_tasks[chain_id]
|
192
|
+
if chain_id in self._chain_intervals:
|
193
|
+
del self._chain_intervals[chain_id]
|
194
|
+
else:
|
195
|
+
# Update chain interval and restart polling
|
196
|
+
self._update_chain_interval(chain_id)
|
197
|
+
await self._restart_chain_polling(chain_id)
|
198
|
+
|
199
|
+
def _update_chain_interval(self, chain_id: str):
|
200
|
+
"""Update the chain interval to be the minimum of all subscriptions"""
|
201
|
+
if chain_id not in self._chain_subscriptions:
|
202
|
+
return
|
203
|
+
|
204
|
+
# Find the minimum interval for all subscriptions in this chain
|
205
|
+
min_interval = float("inf")
|
206
|
+
for address in self._chain_subscriptions[chain_id]:
|
207
|
+
key = f"{chain_id}:{address}"
|
208
|
+
if key in self._subscription_intervals:
|
209
|
+
min_interval = min(min_interval, self._subscription_intervals[key])
|
210
|
+
|
211
|
+
# Use default interval if no subscriptions found
|
212
|
+
if min_interval == float("inf"):
|
213
|
+
min_interval = self.interval
|
214
|
+
|
215
|
+
self._chain_intervals[chain_id] = min_interval
|
216
|
+
|
217
|
+
async def _restart_chain_polling(self, chain_id: str):
|
218
|
+
"""Restart polling for a chain with updated addresses"""
|
219
|
+
# Cancel existing task if any
|
220
|
+
if chain_id in self._chain_tasks:
|
221
|
+
self._chain_tasks[chain_id].cancel()
|
222
|
+
|
223
|
+
# Start new polling task for this chain
|
224
|
+
if self._chain_subscriptions.get(chain_id):
|
225
|
+
task = asyncio.create_task(self._poll_chain(chain_id))
|
226
|
+
self._chain_tasks[chain_id] = task
|
227
|
+
|
228
|
+
async def _poll_chain(self, chain_id: str):
|
229
|
+
"""Poll all pairs for a specific chain (max 30 per chain)"""
|
230
|
+
import time
|
231
|
+
|
232
|
+
next_poll_time = time.time()
|
233
|
+
|
234
|
+
while self.running and chain_id in self._chain_subscriptions:
|
235
|
+
# Get the current interval for this chain
|
236
|
+
interval = self._chain_intervals.get(chain_id, self.interval)
|
237
|
+
|
238
|
+
# Calculate next poll time at the beginning
|
239
|
+
next_poll_time += interval
|
240
|
+
|
241
|
+
# Create a task for fetching pairs (non-blocking)
|
242
|
+
asyncio.create_task(self._batch_fetch_and_emit(chain_id))
|
243
|
+
|
244
|
+
# Calculate how long to sleep to maintain fixed interval
|
245
|
+
current_time = time.time()
|
246
|
+
sleep_time = max(0, next_poll_time - current_time)
|
247
|
+
|
248
|
+
if sleep_time > 0:
|
249
|
+
await asyncio.sleep(sleep_time)
|
250
|
+
else:
|
251
|
+
# If we're behind schedule, adjust but don't accumulate delay
|
252
|
+
next_poll_time = current_time + interval
|
253
|
+
|
254
|
+
async def _batch_fetch_and_emit(self, chain_id: str):
|
255
|
+
"""Fetch multiple pairs for a chain and emit updates"""
|
256
|
+
import time
|
257
|
+
|
258
|
+
if chain_id not in self._chain_subscriptions:
|
259
|
+
return
|
260
|
+
|
261
|
+
addresses = list(self._chain_subscriptions[chain_id])
|
262
|
+
if not addresses:
|
263
|
+
return
|
264
|
+
|
265
|
+
# Check if we have too many subscriptions for a single chain
|
266
|
+
max_subscriptions = 30
|
267
|
+
if len(addresses) > max_subscriptions:
|
268
|
+
logger.warning(
|
269
|
+
"Subscription limit exceeded for chain %s: %d addresses requested, limiting to %d",
|
270
|
+
chain_id,
|
271
|
+
len(addresses),
|
272
|
+
max_subscriptions,
|
273
|
+
)
|
274
|
+
addresses = addresses[:max_subscriptions]
|
275
|
+
|
276
|
+
try:
|
277
|
+
# Log API request time
|
278
|
+
request_start = time.time()
|
279
|
+
|
280
|
+
# Fetch all pairs in one request (max 30 due to limit above)
|
281
|
+
pairs = await self.dexscreener_client.get_pairs_by_pairs_addresses_async(chain_id, addresses)
|
282
|
+
|
283
|
+
request_end = time.time()
|
284
|
+
request_duration = request_end - request_start
|
285
|
+
|
286
|
+
logger.debug(
|
287
|
+
"Batch fetch completed for chain %s: %d addresses, %d pairs returned in %.2fms",
|
288
|
+
chain_id,
|
289
|
+
len(addresses),
|
290
|
+
len(pairs),
|
291
|
+
request_duration * 1000,
|
292
|
+
)
|
293
|
+
|
294
|
+
# Create a mapping for quick lookup
|
295
|
+
pairs_map = {pair.pair_address.lower(): pair for pair in pairs}
|
296
|
+
|
297
|
+
# Process each address
|
298
|
+
for address in addresses:
|
299
|
+
key = f"{chain_id}:{address}"
|
300
|
+
pair = pairs_map.get(address.lower())
|
301
|
+
|
302
|
+
if pair:
|
303
|
+
# Add request timing info to the pair object for debugging
|
304
|
+
pair._request_duration = request_duration
|
305
|
+
pair._request_time = request_end
|
306
|
+
|
307
|
+
# Check if we should filter for changes
|
308
|
+
if self.filter_changes:
|
309
|
+
# Only emit if data changed
|
310
|
+
if self._has_changed(key, pair):
|
311
|
+
self._cache[key] = pair
|
312
|
+
await self._emit(chain_id, address, pair)
|
313
|
+
else:
|
314
|
+
# Raw mode: emit every update
|
315
|
+
await self._emit(chain_id, address, pair)
|
316
|
+
|
317
|
+
except Exception:
|
318
|
+
logger.exception(
|
319
|
+
"Polling error for chain %s with %d addresses", chain_id, len(addresses) if addresses else 0
|
320
|
+
)
|
321
|
+
|
322
|
+
def _has_changed(self, key: str, new_pair: TokenPair) -> bool:
|
323
|
+
"""Check if pair data has changed"""
|
324
|
+
old_pair = self._cache.get(key)
|
325
|
+
if not old_pair:
|
326
|
+
return True
|
327
|
+
|
328
|
+
return (
|
329
|
+
old_pair.price_usd != new_pair.price_usd
|
330
|
+
or old_pair.price_native != new_pair.price_native
|
331
|
+
or old_pair.volume.h24 != new_pair.volume.h24
|
332
|
+
or old_pair.liquidity != new_pair.liquidity
|
333
|
+
)
|
334
|
+
|
335
|
+
def has_subscription(self, chain_id: str, address: str) -> bool:
|
336
|
+
"""Check if there's an active subscription for a pair"""
|
337
|
+
key = f"{chain_id}:{address}"
|
338
|
+
return key in self.subscriptions
|
339
|
+
|
340
|
+
async def close(self):
|
341
|
+
"""Alias for disconnect"""
|
342
|
+
await self.disconnect()
|
343
|
+
|
344
|
+
# Token subscription methods
|
345
|
+
async def subscribe_token(
|
346
|
+
self,
|
347
|
+
chain_id: str,
|
348
|
+
token_address: str,
|
349
|
+
callback: Callable[[list[TokenPair]], None],
|
350
|
+
interval: float = 0.2,
|
351
|
+
):
|
352
|
+
"""Subscribe to all pairs of a token"""
|
353
|
+
key = f"{chain_id}:{token_address}"
|
354
|
+
if key not in self._token_subscriptions:
|
355
|
+
self._token_subscriptions[key] = set()
|
356
|
+
self._token_intervals[key] = interval
|
357
|
+
# Start polling for this token
|
358
|
+
await self._start_token_polling(chain_id, token_address)
|
359
|
+
|
360
|
+
self._token_subscriptions[key].add(callback)
|
361
|
+
|
362
|
+
async def unsubscribe_token(self, chain_id: str, token_address: str):
|
363
|
+
"""Unsubscribe from token updates"""
|
364
|
+
key = f"{chain_id}:{token_address}"
|
365
|
+
if key in self._token_subscriptions:
|
366
|
+
del self._token_subscriptions[key]
|
367
|
+
|
368
|
+
# Stop the polling task
|
369
|
+
if key in self._token_tasks:
|
370
|
+
self._token_tasks[key].cancel()
|
371
|
+
del self._token_tasks[key]
|
372
|
+
|
373
|
+
# Clear interval
|
374
|
+
if key in self._token_intervals:
|
375
|
+
del self._token_intervals[key]
|
376
|
+
|
377
|
+
def has_token_subscription(self, chain_id: str, token_address: str) -> bool:
|
378
|
+
"""Check if there's an active token subscription"""
|
379
|
+
key = f"{chain_id}:{token_address}"
|
380
|
+
return key in self._token_subscriptions
|
381
|
+
|
382
|
+
async def _start_token_polling(self, chain_id: str, token_address: str):
|
383
|
+
"""Start polling for a token"""
|
384
|
+
key = f"{chain_id}:{token_address}"
|
385
|
+
# Cancel existing task if any
|
386
|
+
if key in self._token_tasks:
|
387
|
+
self._token_tasks[key].cancel()
|
388
|
+
|
389
|
+
# Start new polling task
|
390
|
+
task = asyncio.create_task(self._poll_token(chain_id, token_address))
|
391
|
+
self._token_tasks[key] = task
|
392
|
+
|
393
|
+
async def _poll_token(self, chain_id: str, token_address: str):
|
394
|
+
"""Poll all pairs for a specific token"""
|
395
|
+
import time
|
396
|
+
|
397
|
+
key = f"{chain_id}:{token_address}"
|
398
|
+
next_poll_time = time.time()
|
399
|
+
|
400
|
+
while self.running and key in self._token_subscriptions:
|
401
|
+
# Get the interval for this token
|
402
|
+
interval = self._token_intervals.get(key, self.interval)
|
403
|
+
|
404
|
+
# Calculate next poll time at the beginning
|
405
|
+
next_poll_time += interval
|
406
|
+
|
407
|
+
# Create a task for fetching (non-blocking)
|
408
|
+
asyncio.create_task(self._fetch_and_emit_token(chain_id, token_address))
|
409
|
+
|
410
|
+
# Calculate how long to sleep to maintain fixed interval
|
411
|
+
current_time = time.time()
|
412
|
+
sleep_time = max(0, next_poll_time - current_time)
|
413
|
+
|
414
|
+
if sleep_time > 0:
|
415
|
+
await asyncio.sleep(sleep_time)
|
416
|
+
else:
|
417
|
+
# If we're behind schedule, adjust but don't accumulate delay
|
418
|
+
next_poll_time = current_time + interval
|
419
|
+
|
420
|
+
async def _fetch_and_emit_token(self, chain_id: str, token_address: str):
|
421
|
+
"""Fetch all pairs for a token and emit updates"""
|
422
|
+
import time
|
423
|
+
|
424
|
+
key = f"{chain_id}:{token_address}"
|
425
|
+
if key not in self._token_subscriptions:
|
426
|
+
return
|
427
|
+
|
428
|
+
try:
|
429
|
+
# Log API request time
|
430
|
+
request_start = time.time()
|
431
|
+
|
432
|
+
# Fetch all pairs for this token
|
433
|
+
pairs = await self.dexscreener_client.get_pairs_by_token_address_async(chain_id, token_address)
|
434
|
+
|
435
|
+
request_end = time.time()
|
436
|
+
request_duration = request_end - request_start
|
437
|
+
|
438
|
+
logger.debug(
|
439
|
+
"Token fetch completed for %s:%s - %d pairs returned in %.2fms",
|
440
|
+
chain_id,
|
441
|
+
token_address,
|
442
|
+
len(pairs),
|
443
|
+
request_duration * 1000,
|
444
|
+
)
|
445
|
+
|
446
|
+
# Add timing info for debugging
|
447
|
+
for pair in pairs:
|
448
|
+
pair._request_duration = request_duration
|
449
|
+
pair._request_time = request_end
|
450
|
+
|
451
|
+
# Emit to all callbacks
|
452
|
+
for callback in self._token_subscriptions[key].copy():
|
453
|
+
try:
|
454
|
+
if asyncio.iscoroutinefunction(callback):
|
455
|
+
await callback(pairs)
|
456
|
+
else:
|
457
|
+
callback(pairs)
|
458
|
+
except Exception as e:
|
459
|
+
logger.exception("Token callback error for %s:%s - %s", chain_id, token_address, type(e).__name__)
|
460
|
+
|
461
|
+
except Exception:
|
462
|
+
logger.exception("Token polling error for %s:%s", chain_id, token_address)
|
@@ -0,0 +1,57 @@
|
|
1
|
+
"""
|
2
|
+
Simplified browser selector using only browser types
|
3
|
+
"""
|
4
|
+
|
5
|
+
import random
|
6
|
+
from typing import Optional
|
7
|
+
|
8
|
+
# Browser type market share
|
9
|
+
BROWSER_TYPES = [
|
10
|
+
"chrome", # ~65% market share
|
11
|
+
"safari", # ~20% market share
|
12
|
+
"firefox", # ~10% market share
|
13
|
+
"edge", # ~5% market share
|
14
|
+
]
|
15
|
+
|
16
|
+
# Browser list distributed by weight
|
17
|
+
WEIGHTED_BROWSERS = ["chrome"] * 65 + ["safari"] * 20 + ["firefox"] * 10 + ["edge"] * 5
|
18
|
+
|
19
|
+
|
20
|
+
def get_random_browser() -> str:
|
21
|
+
"""
|
22
|
+
Get random browser type
|
23
|
+
|
24
|
+
Returns:
|
25
|
+
Browser type string
|
26
|
+
"""
|
27
|
+
return random.choice(WEIGHTED_BROWSERS)
|
28
|
+
|
29
|
+
|
30
|
+
def get_browser(browser_type: Optional[str] = None) -> str:
|
31
|
+
"""
|
32
|
+
Get browser type
|
33
|
+
|
34
|
+
Args:
|
35
|
+
browser_type: Specified browser type, if None then randomly select
|
36
|
+
|
37
|
+
Returns:
|
38
|
+
Browser type string
|
39
|
+
"""
|
40
|
+
if browser_type and browser_type in BROWSER_TYPES:
|
41
|
+
return browser_type
|
42
|
+
return get_random_browser()
|
43
|
+
|
44
|
+
|
45
|
+
# Export simplified browser list
|
46
|
+
AVAILABLE_BROWSERS = BROWSER_TYPES
|
47
|
+
|
48
|
+
|
49
|
+
if __name__ == "__main__":
|
50
|
+
# Test code
|
51
|
+
|
52
|
+
# Test random selection
|
53
|
+
for _i in range(10):
|
54
|
+
browser = get_random_browser()
|
55
|
+
|
56
|
+
for _browser_type in BROWSER_TYPES:
|
57
|
+
pass
|