prosperity3bt 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- prosperity3bt/__init__.py +0 -0
- prosperity3bt/__main__.py +302 -0
- prosperity3bt/data.py +125 -0
- prosperity3bt/datamodel.py +153 -0
- prosperity3bt/file_reader.py +44 -0
- prosperity3bt/models.py +103 -0
- prosperity3bt/resources/__init__.py +0 -0
- prosperity3bt/resources/round0/__init__.py +0 -0
- prosperity3bt/resources/round0/prices_round_0_day_-2.csv +4001 -0
- prosperity3bt/resources/round0/trades_round_0_day_-2_nn.csv +1089 -0
- prosperity3bt/runner.py +303 -0
- prosperity3bt-0.1.0.dist-info/LICENSE +21 -0
- prosperity3bt-0.1.0.dist-info/METADATA +129 -0
- prosperity3bt-0.1.0.dist-info/RECORD +17 -0
- prosperity3bt-0.1.0.dist-info/WHEEL +5 -0
- prosperity3bt-0.1.0.dist-info/entry_points.txt +2 -0
- prosperity3bt-0.1.0.dist-info/top_level.txt +1 -0
File without changes
|
@@ -0,0 +1,302 @@
|
|
1
|
+
import sys
|
2
|
+
import webbrowser
|
3
|
+
from argparse import ArgumentParser
|
4
|
+
from collections import defaultdict
|
5
|
+
from datetime import datetime
|
6
|
+
from functools import partial, reduce
|
7
|
+
from http.server import HTTPServer, SimpleHTTPRequestHandler
|
8
|
+
from importlib import import_module, metadata, reload
|
9
|
+
from pathlib import Path
|
10
|
+
from typing import Any, Optional
|
11
|
+
|
12
|
+
from prosperity3bt.data import has_day_data
|
13
|
+
from prosperity3bt.file_reader import FileReader, FileSystemReader, PackageResourcesReader
|
14
|
+
from prosperity3bt.models import BacktestResult
|
15
|
+
from prosperity3bt.runner import run_backtest
|
16
|
+
|
17
|
+
|
18
|
+
def parse_algorithm(algorithm: str) -> Any:
|
19
|
+
algorithm_path = Path(algorithm).expanduser().resolve()
|
20
|
+
if not algorithm_path.is_file():
|
21
|
+
raise ModuleNotFoundError(f"{algorithm_path} is not a file")
|
22
|
+
|
23
|
+
sys.path.append(str(algorithm_path.parent))
|
24
|
+
return import_module(algorithm_path.stem)
|
25
|
+
|
26
|
+
|
27
|
+
def parse_data(data_root: Optional[str]) -> FileReader:
|
28
|
+
if data_root is not None:
|
29
|
+
return FileSystemReader(Path(data_root).expanduser().resolve())
|
30
|
+
else:
|
31
|
+
return PackageResourcesReader()
|
32
|
+
|
33
|
+
|
34
|
+
def parse_days(file_reader: FileReader, days: list[str]) -> list[tuple[int, int]]:
|
35
|
+
parsed_days = []
|
36
|
+
|
37
|
+
for arg in days:
|
38
|
+
if "-" in arg:
|
39
|
+
round_num, day_num = map(int, arg.split("-", 1))
|
40
|
+
|
41
|
+
if not has_day_data(file_reader, round_num, day_num):
|
42
|
+
print(f"Warning: no data found for round {round_num} day {day_num}")
|
43
|
+
continue
|
44
|
+
|
45
|
+
parsed_days.append((round_num, day_num))
|
46
|
+
else:
|
47
|
+
round_num = int(arg)
|
48
|
+
|
49
|
+
parsed_days_in_round = []
|
50
|
+
for day_num in range(-5, 6):
|
51
|
+
if has_day_data(file_reader, round_num, day_num):
|
52
|
+
parsed_days_in_round.append((round_num, day_num))
|
53
|
+
|
54
|
+
if len(parsed_days_in_round) == 0:
|
55
|
+
print(f"Warning: no data found for round {round_num}")
|
56
|
+
continue
|
57
|
+
|
58
|
+
parsed_days.extend(parsed_days_in_round)
|
59
|
+
|
60
|
+
if len(parsed_days) == 0:
|
61
|
+
print("Error: did not find data for any requested round/day")
|
62
|
+
sys.exit(1)
|
63
|
+
|
64
|
+
return parsed_days
|
65
|
+
|
66
|
+
|
67
|
+
def parse_out(out: Optional[str], no_out: bool) -> Optional[Path]:
|
68
|
+
if out is not None:
|
69
|
+
return Path(out).expanduser().resolve()
|
70
|
+
|
71
|
+
if no_out:
|
72
|
+
return None
|
73
|
+
|
74
|
+
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
75
|
+
return Path.cwd() / "backtests" / f"{timestamp}.log"
|
76
|
+
|
77
|
+
|
78
|
+
def print_day_summary(result: BacktestResult) -> None:
|
79
|
+
last_timestamp = result.activity_logs[-1].timestamp
|
80
|
+
|
81
|
+
product_lines = []
|
82
|
+
total_profit = 0
|
83
|
+
|
84
|
+
for row in reversed(result.activity_logs):
|
85
|
+
if row.timestamp != last_timestamp:
|
86
|
+
break
|
87
|
+
|
88
|
+
product = row.columns[2]
|
89
|
+
profit = row.columns[-1]
|
90
|
+
|
91
|
+
product_lines.append(f"{product}: {profit:,.0f}")
|
92
|
+
total_profit += profit
|
93
|
+
|
94
|
+
print(*reversed(product_lines), sep="\n")
|
95
|
+
print(f"Total profit: {total_profit:,.0f}")
|
96
|
+
|
97
|
+
|
98
|
+
def merge_results(
|
99
|
+
a: BacktestResult, b: BacktestResult, merge_profit_loss: bool, merge_timestamps: bool
|
100
|
+
) -> BacktestResult:
|
101
|
+
sandbox_logs = a.sandbox_logs[:]
|
102
|
+
activity_logs = a.activity_logs[:]
|
103
|
+
trades = a.trades[:]
|
104
|
+
|
105
|
+
if merge_timestamps:
|
106
|
+
a_last_timestamp = a.activity_logs[-1].timestamp
|
107
|
+
timestamp_offset = a_last_timestamp + 100
|
108
|
+
else:
|
109
|
+
timestamp_offset = 0
|
110
|
+
|
111
|
+
sandbox_logs.extend([row.with_offset(timestamp_offset) for row in b.sandbox_logs])
|
112
|
+
trades.extend([row.with_offset(timestamp_offset) for row in b.trades])
|
113
|
+
|
114
|
+
if merge_profit_loss:
|
115
|
+
profit_loss_offsets = defaultdict(float)
|
116
|
+
for row in reversed(a.activity_logs):
|
117
|
+
if row.timestamp != a_last_timestamp:
|
118
|
+
break
|
119
|
+
|
120
|
+
profit_loss_offsets[row.columns[2]] = row.columns[-1]
|
121
|
+
|
122
|
+
activity_logs.extend(
|
123
|
+
[row.with_offset(timestamp_offset, profit_loss_offsets[row.columns[2]]) for row in b.activity_logs]
|
124
|
+
)
|
125
|
+
else:
|
126
|
+
activity_logs.extend([row.with_offset(timestamp_offset, 0) for row in b.activity_logs])
|
127
|
+
|
128
|
+
return BacktestResult(a.round_num, a.day_num, sandbox_logs, activity_logs, trades)
|
129
|
+
|
130
|
+
|
131
|
+
def write_output(output_file: Path, merged_results: BacktestResult) -> None:
|
132
|
+
output_file.parent.mkdir(parents=True, exist_ok=True)
|
133
|
+
with output_file.open("w+", encoding="utf-8") as file:
|
134
|
+
file.write("Sandbox logs:\n")
|
135
|
+
for row in merged_results.sandbox_logs:
|
136
|
+
file.write(str(row))
|
137
|
+
|
138
|
+
file.write("\n\n\nActivities log:\n")
|
139
|
+
file.write(
|
140
|
+
"day;timestamp;product;bid_price_1;bid_volume_1;bid_price_2;bid_volume_2;bid_price_3;bid_volume_3;ask_price_1;ask_volume_1;ask_price_2;ask_volume_2;ask_price_3;ask_volume_3;mid_price;profit_and_loss\n"
|
141
|
+
)
|
142
|
+
file.write("\n".join(map(str, merged_results.activity_logs)))
|
143
|
+
|
144
|
+
file.write("\n\n\n\n\nTrade History:\n")
|
145
|
+
file.write("[\n")
|
146
|
+
file.write(",\n".join(map(str, merged_results.trades)))
|
147
|
+
file.write("]")
|
148
|
+
|
149
|
+
|
150
|
+
def print_overall_summary(results: list[BacktestResult]) -> None:
|
151
|
+
print("Profit summary:")
|
152
|
+
|
153
|
+
total_profit = 0
|
154
|
+
for result in results:
|
155
|
+
last_timestamp = result.activity_logs[-1].timestamp
|
156
|
+
|
157
|
+
profit = 0
|
158
|
+
for row in reversed(result.activity_logs):
|
159
|
+
if row.timestamp != last_timestamp:
|
160
|
+
break
|
161
|
+
|
162
|
+
profit += row.columns[-1]
|
163
|
+
|
164
|
+
print(f"Round {result.round_num} day {result.day_num}: {profit:,.0f}")
|
165
|
+
total_profit += profit
|
166
|
+
|
167
|
+
print(f"Total profit: {total_profit:,.0f}")
|
168
|
+
|
169
|
+
|
170
|
+
class HTTPRequestHandler(SimpleHTTPRequestHandler):
|
171
|
+
def end_headers(self) -> None:
|
172
|
+
self.send_header("Access-Control-Allow-Origin", "*")
|
173
|
+
return super().end_headers()
|
174
|
+
|
175
|
+
def log_message(self, format: str, *args: Any) -> None:
|
176
|
+
return
|
177
|
+
|
178
|
+
|
179
|
+
def open_visualizer(output_file: Path, no_requests: int) -> None:
|
180
|
+
http_handler = partial(HTTPRequestHandler, directory=output_file.parent)
|
181
|
+
http_server = HTTPServer(("localhost", 0), http_handler)
|
182
|
+
|
183
|
+
webbrowser.open(
|
184
|
+
f"https://jmerle.github.io/imc-prosperity-3-visualizer/?open=http://localhost:{http_server.server_port}/{output_file.name}"
|
185
|
+
)
|
186
|
+
|
187
|
+
# Chrome makes 2 requests: 1 OPTIONS request to check for CORS headers and 1 GET request to get the data
|
188
|
+
# Some users reported their browser only makes 1 request, which is covered by the --vis-requests option
|
189
|
+
for _ in range(no_requests):
|
190
|
+
http_server.handle_request()
|
191
|
+
|
192
|
+
|
193
|
+
def format_path(path: Path) -> str:
|
194
|
+
cwd = Path.cwd()
|
195
|
+
if path.is_relative_to(cwd):
|
196
|
+
return str(path.relative_to(cwd))
|
197
|
+
else:
|
198
|
+
return str(path)
|
199
|
+
|
200
|
+
|
201
|
+
def main() -> None:
|
202
|
+
parser = ArgumentParser(prog="prosperity3bt", description="Run a backtest.")
|
203
|
+
parser.add_argument("algorithm", type=str, help="path to the Python file containing the algoritm to backtest")
|
204
|
+
parser.add_argument(
|
205
|
+
"days",
|
206
|
+
type=str,
|
207
|
+
nargs="+",
|
208
|
+
help="the days to backtest on (<round>-<day> for a single day, <round> for all days in a round)",
|
209
|
+
)
|
210
|
+
parser.add_argument("--merge-pnl", action="store_true", help="merge profit and loss across days")
|
211
|
+
parser.add_argument("--vis", action="store_true", help="open backtest result in visualizer when done")
|
212
|
+
parser.add_argument("--out", type=str, help="path to save output log to (defaults to backtests/<timestamp>.log)")
|
213
|
+
parser.add_argument(
|
214
|
+
"--data",
|
215
|
+
type=str,
|
216
|
+
help="path to data directory (must look similar in structure to https://github.com/jmerle/imc-prosperity-3-backtester/tree/master/prosperity3bt/resources)",
|
217
|
+
)
|
218
|
+
parser.add_argument("--print", action="store_true", help="print the trader's output to stdout while it's running")
|
219
|
+
parser.add_argument(
|
220
|
+
"--no-trades-matching", action="store_true", help="disable matching orders against market trades"
|
221
|
+
)
|
222
|
+
parser.add_argument("--no-out", action="store_true", help="skip saving the output log to a file")
|
223
|
+
parser.add_argument("--no-progress", action="store_true", help="don't show progress bars")
|
224
|
+
parser.add_argument(
|
225
|
+
"--vis-requests",
|
226
|
+
type=int,
|
227
|
+
default=2,
|
228
|
+
help="number of requests the visualizer is expected to make to the backtester's HTTP server when using --vis",
|
229
|
+
)
|
230
|
+
parser.add_argument(
|
231
|
+
"--original-timestamps",
|
232
|
+
action="store_true",
|
233
|
+
help="preserve original timestamps in output log rather than making them increase across days",
|
234
|
+
)
|
235
|
+
# parser.add_argument(
|
236
|
+
# "--no-names", action="store_true", help="don't use de-anonymized trades data, even if it exists"
|
237
|
+
# )
|
238
|
+
parser.add_argument("-v", "--version", action="version", version=f"%(prog)s {metadata.version(__package__)}")
|
239
|
+
|
240
|
+
args = parser.parse_args()
|
241
|
+
|
242
|
+
if args.vis and args.no_out:
|
243
|
+
print("Error: --vis and --no-out are mutually exclusive")
|
244
|
+
sys.exit(1)
|
245
|
+
|
246
|
+
if args.out is not None and args.no_out:
|
247
|
+
print("Error: --out and --no-out are mutually exclusive")
|
248
|
+
sys.exit(1)
|
249
|
+
|
250
|
+
try:
|
251
|
+
trader_module = parse_algorithm(args.algorithm)
|
252
|
+
except ModuleNotFoundError as e:
|
253
|
+
print(f"{args.algorithm} is not a valid algorithm file: {e}")
|
254
|
+
sys.exit(1)
|
255
|
+
|
256
|
+
if not hasattr(trader_module, "Trader"):
|
257
|
+
print(f"{args.algorithm} does not expose a Trader class")
|
258
|
+
sys.exit(1)
|
259
|
+
|
260
|
+
file_reader = parse_data(args.data)
|
261
|
+
days = parse_days(file_reader, args.days)
|
262
|
+
output_file = parse_out(args.out, args.no_out)
|
263
|
+
|
264
|
+
show_progress_bars = not args.no_progress and not args.print
|
265
|
+
|
266
|
+
results = []
|
267
|
+
for round_num, day_num in days:
|
268
|
+
print(f"Backtesting {args.algorithm} on round {round_num} day {day_num}")
|
269
|
+
|
270
|
+
reload(trader_module)
|
271
|
+
|
272
|
+
result = run_backtest(
|
273
|
+
trader_module.Trader(),
|
274
|
+
file_reader,
|
275
|
+
round_num,
|
276
|
+
day_num,
|
277
|
+
args.print,
|
278
|
+
args.no_trades_matching,
|
279
|
+
True, # args.no_names,
|
280
|
+
show_progress_bars,
|
281
|
+
)
|
282
|
+
|
283
|
+
print_day_summary(result)
|
284
|
+
if len(days) > 1:
|
285
|
+
print()
|
286
|
+
|
287
|
+
results.append(result)
|
288
|
+
|
289
|
+
if len(days) > 1:
|
290
|
+
print_overall_summary(results)
|
291
|
+
|
292
|
+
if output_file is not None:
|
293
|
+
merged_results = reduce(lambda a, b: merge_results(a, b, args.merge_pnl, not args.original_timestamps), results)
|
294
|
+
write_output(output_file, merged_results)
|
295
|
+
print(f"\nSuccessfully saved backtest results to {format_path(output_file)}")
|
296
|
+
|
297
|
+
if args.vis:
|
298
|
+
open_visualizer(output_file, args.vis_requests)
|
299
|
+
|
300
|
+
|
301
|
+
if __name__ == "__main__":
|
302
|
+
main()
|
prosperity3bt/data.py
ADDED
@@ -0,0 +1,125 @@
|
|
1
|
+
from collections import defaultdict
|
2
|
+
from dataclasses import dataclass
|
3
|
+
from typing import Optional
|
4
|
+
|
5
|
+
from prosperity3bt.datamodel import Symbol, Trade
|
6
|
+
from prosperity3bt.file_reader import FileReader
|
7
|
+
|
8
|
+
LIMITS = {
|
9
|
+
"RAINFOREST_RESIN": 50,
|
10
|
+
"KELP": 50,
|
11
|
+
}
|
12
|
+
|
13
|
+
|
14
|
+
@dataclass
|
15
|
+
class PriceRow:
|
16
|
+
day: int
|
17
|
+
timestamp: int
|
18
|
+
product: Symbol
|
19
|
+
bid_prices: list[int]
|
20
|
+
bid_volumes: list[int]
|
21
|
+
ask_prices: list[int]
|
22
|
+
ask_volumes: list[int]
|
23
|
+
mid_price: float
|
24
|
+
profit_loss: float
|
25
|
+
|
26
|
+
|
27
|
+
def get_column_values(columns: list[str], indices: list[int]) -> list[int]:
|
28
|
+
values = []
|
29
|
+
|
30
|
+
for index in indices:
|
31
|
+
value = columns[index]
|
32
|
+
if value == "":
|
33
|
+
break
|
34
|
+
|
35
|
+
values.append(int(value))
|
36
|
+
|
37
|
+
return values
|
38
|
+
|
39
|
+
|
40
|
+
@dataclass
|
41
|
+
class BacktestData:
|
42
|
+
round_num: int
|
43
|
+
day_num: int
|
44
|
+
|
45
|
+
prices: dict[int, dict[Symbol, PriceRow]]
|
46
|
+
trades: dict[int, dict[Symbol, list[Trade]]]
|
47
|
+
products: list[Symbol]
|
48
|
+
profit_loss: dict[Symbol, int]
|
49
|
+
|
50
|
+
|
51
|
+
def create_backtest_data(round_num: int, day_num: int, prices: list[PriceRow], trades: list[Trade]) -> BacktestData:
|
52
|
+
prices_by_timestamp: dict[int, dict[Symbol, PriceRow]] = defaultdict(dict)
|
53
|
+
for row in prices:
|
54
|
+
prices_by_timestamp[row.timestamp][row.product] = row
|
55
|
+
|
56
|
+
trades_by_timestamp: dict[int, dict[Symbol, list[Trade]]] = defaultdict(lambda: defaultdict(list))
|
57
|
+
for trade in trades:
|
58
|
+
trades_by_timestamp[trade.timestamp][trade.symbol].append(trade)
|
59
|
+
|
60
|
+
products = sorted(set(row.product for row in prices))
|
61
|
+
profit_loss = {product: 0 for product in products}
|
62
|
+
|
63
|
+
return BacktestData(
|
64
|
+
round_num=round_num,
|
65
|
+
day_num=day_num,
|
66
|
+
prices=prices_by_timestamp,
|
67
|
+
trades=trades_by_timestamp,
|
68
|
+
products=products,
|
69
|
+
profit_loss=profit_loss,
|
70
|
+
)
|
71
|
+
|
72
|
+
|
73
|
+
def has_day_data(file_reader: FileReader, round_num: int, day_num: int) -> bool:
|
74
|
+
with file_reader.file([f"round{round_num}", f"prices_round_{round_num}_day_{day_num}.csv"]) as file:
|
75
|
+
return file is not None
|
76
|
+
|
77
|
+
|
78
|
+
def read_day_data(file_reader: FileReader, round_num: int, day_num: int, no_names: bool) -> Optional[BacktestData]:
|
79
|
+
prices = []
|
80
|
+
with file_reader.file([f"round{round_num}", f"prices_round_{round_num}_day_{day_num}.csv"]) as file:
|
81
|
+
if file is None:
|
82
|
+
return None
|
83
|
+
|
84
|
+
for line in file.read_text(encoding="utf-8").splitlines()[1:]:
|
85
|
+
columns = line.split(";")
|
86
|
+
|
87
|
+
prices.append(
|
88
|
+
PriceRow(
|
89
|
+
day=int(columns[0]),
|
90
|
+
timestamp=int(columns[1]),
|
91
|
+
product=columns[2],
|
92
|
+
bid_prices=get_column_values(columns, [3, 5, 7]),
|
93
|
+
bid_volumes=get_column_values(columns, [4, 6, 8]),
|
94
|
+
ask_prices=get_column_values(columns, [9, 11, 13]),
|
95
|
+
ask_volumes=get_column_values(columns, [10, 12, 14]),
|
96
|
+
mid_price=float(columns[15]),
|
97
|
+
profit_loss=float(columns[16]),
|
98
|
+
)
|
99
|
+
)
|
100
|
+
|
101
|
+
trades = []
|
102
|
+
trades_suffixes = ["nn"] if no_names else ["wn", "nn"]
|
103
|
+
|
104
|
+
for suffix in trades_suffixes:
|
105
|
+
with file_reader.file([f"round{round_num}", f"trades_round_{round_num}_day_{day_num}_{suffix}.csv"]) as file:
|
106
|
+
if file is None:
|
107
|
+
continue
|
108
|
+
|
109
|
+
for line in file.read_text(encoding="utf-8").splitlines()[1:]:
|
110
|
+
columns = line.split(";")
|
111
|
+
|
112
|
+
trades.append(
|
113
|
+
Trade(
|
114
|
+
symbol=columns[3],
|
115
|
+
price=int(float(columns[5])),
|
116
|
+
quantity=int(columns[6]),
|
117
|
+
buyer=columns[1],
|
118
|
+
seller=columns[2],
|
119
|
+
timestamp=int(columns[0]),
|
120
|
+
)
|
121
|
+
)
|
122
|
+
|
123
|
+
break
|
124
|
+
|
125
|
+
return create_backtest_data(round_num, day_num, prices, trades)
|
@@ -0,0 +1,153 @@
|
|
1
|
+
import json
|
2
|
+
from json import JSONEncoder
|
3
|
+
from typing import Dict, List
|
4
|
+
|
5
|
+
import jsonpickle
|
6
|
+
|
7
|
+
Time = int
|
8
|
+
Symbol = str
|
9
|
+
Product = str
|
10
|
+
Position = int
|
11
|
+
UserId = str
|
12
|
+
ObservationValue = int
|
13
|
+
|
14
|
+
|
15
|
+
class Listing:
|
16
|
+
def __init__(self, symbol: Symbol, product: Product, denomination: Product):
|
17
|
+
self.symbol = symbol
|
18
|
+
self.product = product
|
19
|
+
self.denomination = denomination
|
20
|
+
|
21
|
+
|
22
|
+
class ConversionObservation:
|
23
|
+
def __init__(
|
24
|
+
self,
|
25
|
+
bidPrice: float,
|
26
|
+
askPrice: float,
|
27
|
+
transportFees: float,
|
28
|
+
exportTariff: float,
|
29
|
+
importTariff: float,
|
30
|
+
sugarPrice: float,
|
31
|
+
sunlightIndex: float,
|
32
|
+
):
|
33
|
+
self.bidPrice = bidPrice
|
34
|
+
self.askPrice = askPrice
|
35
|
+
self.transportFees = transportFees
|
36
|
+
self.exportTariff = exportTariff
|
37
|
+
self.importTariff = importTariff
|
38
|
+
self.sugarPrice = sugarPrice
|
39
|
+
self.sunlightIndex = sunlightIndex
|
40
|
+
|
41
|
+
|
42
|
+
class Observation:
|
43
|
+
def __init__(
|
44
|
+
self,
|
45
|
+
plainValueObservations: Dict[Product, ObservationValue],
|
46
|
+
conversionObservations: Dict[Product, ConversionObservation],
|
47
|
+
) -> None:
|
48
|
+
self.plainValueObservations = plainValueObservations
|
49
|
+
self.conversionObservations = conversionObservations
|
50
|
+
|
51
|
+
def __str__(self) -> str:
|
52
|
+
return (
|
53
|
+
"(plainValueObservations: "
|
54
|
+
+ jsonpickle.encode(self.plainValueObservations)
|
55
|
+
+ ", conversionObservations: "
|
56
|
+
+ jsonpickle.encode(self.conversionObservations)
|
57
|
+
+ ")"
|
58
|
+
)
|
59
|
+
|
60
|
+
|
61
|
+
class Order:
|
62
|
+
def __init__(self, symbol: Symbol, price: int, quantity: int) -> None:
|
63
|
+
self.symbol = symbol
|
64
|
+
self.price = price
|
65
|
+
self.quantity = quantity
|
66
|
+
|
67
|
+
def __str__(self) -> str:
|
68
|
+
return "(" + self.symbol + ", " + str(self.price) + ", " + str(self.quantity) + ")"
|
69
|
+
|
70
|
+
def __repr__(self) -> str:
|
71
|
+
return "(" + self.symbol + ", " + str(self.price) + ", " + str(self.quantity) + ")"
|
72
|
+
|
73
|
+
|
74
|
+
class OrderDepth:
|
75
|
+
def __init__(self):
|
76
|
+
self.buy_orders: Dict[int, int] = {}
|
77
|
+
self.sell_orders: Dict[int, int] = {}
|
78
|
+
|
79
|
+
|
80
|
+
class Trade:
|
81
|
+
def __init__(
|
82
|
+
self, symbol: Symbol, price: int, quantity: int, buyer: UserId = None, seller: UserId = None, timestamp: int = 0
|
83
|
+
) -> None:
|
84
|
+
self.symbol = symbol
|
85
|
+
self.price: int = price
|
86
|
+
self.quantity: int = quantity
|
87
|
+
self.buyer = buyer
|
88
|
+
self.seller = seller
|
89
|
+
self.timestamp = timestamp
|
90
|
+
|
91
|
+
def __str__(self) -> str:
|
92
|
+
return (
|
93
|
+
"("
|
94
|
+
+ self.symbol
|
95
|
+
+ ", "
|
96
|
+
+ self.buyer
|
97
|
+
+ " << "
|
98
|
+
+ self.seller
|
99
|
+
+ ", "
|
100
|
+
+ str(self.price)
|
101
|
+
+ ", "
|
102
|
+
+ str(self.quantity)
|
103
|
+
+ ", "
|
104
|
+
+ str(self.timestamp)
|
105
|
+
+ ")"
|
106
|
+
)
|
107
|
+
|
108
|
+
def __repr__(self) -> str:
|
109
|
+
return (
|
110
|
+
"("
|
111
|
+
+ self.symbol
|
112
|
+
+ ", "
|
113
|
+
+ self.buyer
|
114
|
+
+ " << "
|
115
|
+
+ self.seller
|
116
|
+
+ ", "
|
117
|
+
+ str(self.price)
|
118
|
+
+ ", "
|
119
|
+
+ str(self.quantity)
|
120
|
+
+ ", "
|
121
|
+
+ str(self.timestamp)
|
122
|
+
+ ")"
|
123
|
+
)
|
124
|
+
|
125
|
+
|
126
|
+
class TradingState(object):
|
127
|
+
def __init__(
|
128
|
+
self,
|
129
|
+
traderData: str,
|
130
|
+
timestamp: Time,
|
131
|
+
listings: Dict[Symbol, Listing],
|
132
|
+
order_depths: Dict[Symbol, OrderDepth],
|
133
|
+
own_trades: Dict[Symbol, List[Trade]],
|
134
|
+
market_trades: Dict[Symbol, List[Trade]],
|
135
|
+
position: Dict[Product, Position],
|
136
|
+
observations: Observation,
|
137
|
+
):
|
138
|
+
self.traderData = traderData
|
139
|
+
self.timestamp = timestamp
|
140
|
+
self.listings = listings
|
141
|
+
self.order_depths = order_depths
|
142
|
+
self.own_trades = own_trades
|
143
|
+
self.market_trades = market_trades
|
144
|
+
self.position = position
|
145
|
+
self.observations = observations
|
146
|
+
|
147
|
+
def toJSON(self):
|
148
|
+
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True)
|
149
|
+
|
150
|
+
|
151
|
+
class ProsperityEncoder(JSONEncoder):
|
152
|
+
def default(self, o):
|
153
|
+
return o.__dict__
|
@@ -0,0 +1,44 @@
|
|
1
|
+
from abc import abstractmethod
|
2
|
+
from contextlib import contextmanager
|
3
|
+
from importlib import resources
|
4
|
+
from pathlib import Path
|
5
|
+
from typing import ContextManager, Optional
|
6
|
+
|
7
|
+
|
8
|
+
@contextmanager
|
9
|
+
def wrap_in_context_manager(value):
|
10
|
+
yield value
|
11
|
+
|
12
|
+
|
13
|
+
class FileReader:
|
14
|
+
@abstractmethod
|
15
|
+
def file(self, path_parts: list[str]) -> ContextManager[Optional[Path]]:
|
16
|
+
"""Given a path to a file, yields a single Path object to the file or None if the file does not exist."""
|
17
|
+
raise NotImplementedError()
|
18
|
+
|
19
|
+
|
20
|
+
class FileSystemReader(FileReader):
|
21
|
+
def __init__(self, root: Path) -> None:
|
22
|
+
self._root = root
|
23
|
+
|
24
|
+
def file(self, path_parts: list[str]) -> ContextManager[Optional[Path]]:
|
25
|
+
file = self._root
|
26
|
+
for part in path_parts:
|
27
|
+
file = file / part
|
28
|
+
|
29
|
+
if not file.is_file():
|
30
|
+
return wrap_in_context_manager(None)
|
31
|
+
|
32
|
+
return wrap_in_context_manager(file)
|
33
|
+
|
34
|
+
|
35
|
+
class PackageResourcesReader(FileReader):
|
36
|
+
def file(self, path_parts: list[str]) -> ContextManager[Optional[Path]]:
|
37
|
+
try:
|
38
|
+
container = resources.files(f"prosperity3bt.resources.{'.'.join(path_parts[:-1])}")
|
39
|
+
if not (container / path_parts[-1]).is_file():
|
40
|
+
return wrap_in_context_manager(None)
|
41
|
+
|
42
|
+
return resources.as_file(container / path_parts[-1])
|
43
|
+
except Exception:
|
44
|
+
return wrap_in_context_manager(None)
|