spells-mtg 0.0.5__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of spells-mtg might be problematic. Click here for more details.

spells/__init__.py CHANGED
@@ -1,5 +1,5 @@
1
1
  from spells import columns
2
2
  from spells import enums
3
- from spells.draft_data import summon
3
+ from spells.draft_data import summon, card_df
4
4
 
5
- __all__ = ["summon", "enums", "columns"]
5
+ __all__ = ["summon", "card_df", "enums", "columns"]
spells/cards.py CHANGED
@@ -21,6 +21,8 @@ class CardAttr(StrEnum):
21
21
  TOUGHNESS = ColName.TOUGHNESS
22
22
  IS_BONUS_SHEET = ColName.IS_BONUS_SHEET
23
23
  IS_DFC = ColName.IS_DFC
24
+ ORACLE_TEXT = ColName.ORACLE_TEXT
25
+ CARD_JSON = ColName.CARD_JSON
24
26
 
25
27
 
26
28
  MTG_JSON_TEMPLATE = "https://mtgjson.com/api/v5/{set_code}.json"
@@ -66,6 +68,10 @@ def _extract_value(set_code: str, name: str, card_dict: dict, field: CardAttr):
66
68
  return card_dict.get("setCode", set_code) != set_code
67
69
  case CardAttr.IS_DFC:
68
70
  return len(card_dict.get("otherFaceIds", [])) > 0
71
+ case CardAttr.ORACLE_TEXT:
72
+ return card_dict.get("text", "")
73
+ case CardAttr.CARD_JSON:
74
+ return card_dict.get("json", "")
69
75
 
70
76
 
71
77
  def card_df(draft_set_code: str, names: list[str]) -> pl.DataFrame:
@@ -81,6 +87,9 @@ def card_df(draft_set_code: str, names: list[str]) -> pl.DataFrame:
81
87
  card_data = draft_set_json["data"]["cards"]
82
88
 
83
89
  card_data.reverse() # prefer front face for split cards
90
+ for item in card_data:
91
+ item["json"] = json.dumps(item)
92
+
84
93
  face_name_cards = [item for item in card_data if "faceName" in item]
85
94
  card_data_map.update({item["faceName"]: item for item in face_name_cards})
86
95
  card_data_map.update({item["name"]: item for item in card_data})
spells/columns.py CHANGED
@@ -12,7 +12,7 @@ class ColumnSpec:
12
12
  col_type: ColType
13
13
  expr: pl.Expr | None = None
14
14
  exprMap: Callable[[str], pl.Expr] | None = None
15
- views: tuple[View, ...] = ()
15
+ views: list[View] | None = None
16
16
  dependencies: list[str] | None = None
17
17
  version: str | None = (
18
18
  None # only needed for user-defined functions with python functions in expr
@@ -49,76 +49,70 @@ _column_specs = [
49
49
  ColumnSpec(
50
50
  name=ColName.NAME,
51
51
  col_type=ColType.GROUP_BY,
52
- views=(),
53
52
  # handled by internals, derived from both 'pick' and "name mapped" columns
54
53
  ),
55
54
  ColumnSpec(
56
55
  name=ColName.EXPANSION,
57
56
  col_type=ColType.GROUP_BY,
58
- views=(View.GAME, View.DRAFT),
57
+ views=[View.GAME, View.DRAFT],
59
58
  ),
60
59
  ColumnSpec(
61
60
  name=ColName.EVENT_TYPE,
62
61
  col_type=ColType.GROUP_BY,
63
- views=(View.GAME, View.DRAFT),
62
+ views=[View.GAME, View.DRAFT],
64
63
  ),
65
64
  ColumnSpec(
66
65
  name=ColName.DRAFT_ID,
67
- views=(View.GAME, View.DRAFT),
66
+ views=[View.GAME, View.DRAFT],
68
67
  col_type=ColType.FILTER_ONLY,
69
68
  ),
70
69
  ColumnSpec(
71
70
  name=ColName.DRAFT_TIME,
72
71
  col_type=ColType.FILTER_ONLY,
73
- views=(View.GAME, View.DRAFT),
72
+ views=[View.GAME, View.DRAFT],
74
73
  ),
75
74
  ColumnSpec(
76
75
  name=ColName.DRAFT_DATE,
77
76
  col_type=ColType.GROUP_BY,
78
- views=(View.GAME, View.DRAFT),
79
77
  expr=pl.col("draft_time").str.to_datetime("%Y-%m-%d %H:%M:%S").dt.date(),
80
78
  dependencies=[ColName.DRAFT_TIME],
81
79
  ),
82
80
  ColumnSpec(
83
81
  name=ColName.DRAFT_DAY_OF_WEEK,
84
82
  col_type=ColType.GROUP_BY,
85
- views=(View.GAME, View.DRAFT),
86
83
  expr=pl.col("draft_time").str.to_datetime("%Y-%m-%d %H:%M:%S").dt.weekday(),
87
84
  dependencies=[ColName.DRAFT_TIME],
88
85
  ),
89
86
  ColumnSpec(
90
87
  name=ColName.DRAFT_HOUR,
91
88
  col_type=ColType.GROUP_BY,
92
- views=(View.GAME, View.DRAFT),
93
89
  expr=pl.col("draft_time").str.to_datetime("%Y-%m-%d %H:%M:%S").dt.hour(),
94
90
  dependencies=[ColName.DRAFT_TIME],
95
91
  ),
96
92
  ColumnSpec(
97
93
  name=ColName.DRAFT_WEEK,
98
94
  col_type=ColType.GROUP_BY,
99
- views=(View.GAME, View.DRAFT),
100
95
  expr=pl.col("draft_time").str.to_datetime("%Y-%m-%d %H:%M:%S").dt.week(),
101
96
  dependencies=[ColName.DRAFT_TIME],
102
97
  ),
103
98
  ColumnSpec(
104
99
  name=ColName.RANK,
105
100
  col_type=ColType.GROUP_BY,
106
- views=(View.GAME, View.DRAFT),
101
+ views=[View.GAME, View.DRAFT],
107
102
  ),
108
103
  ColumnSpec(
109
104
  name=ColName.USER_N_GAMES_BUCKET,
110
105
  col_type=ColType.GROUP_BY,
111
- views=(View.DRAFT, View.GAME),
106
+ views=[View.DRAFT, View.GAME],
112
107
  ),
113
108
  ColumnSpec(
114
109
  name=ColName.USER_GAME_WIN_RATE_BUCKET,
115
110
  col_type=ColType.GROUP_BY,
116
- views=(View.DRAFT, View.GAME),
111
+ views=[View.DRAFT, View.GAME],
117
112
  ),
118
113
  ColumnSpec(
119
114
  name=ColName.PLAYER_COHORT,
120
115
  col_type=ColType.GROUP_BY,
121
- views=(View.DRAFT, View.GAME),
122
116
  expr=pl.when(pl.col("user_n_games_bucket") < 100)
123
117
  .then(pl.lit("Other"))
124
118
  .otherwise(
@@ -135,45 +129,41 @@ _column_specs = [
135
129
  ColumnSpec(
136
130
  name=ColName.EVENT_MATCH_WINS,
137
131
  col_type=ColType.GROUP_BY,
138
- views=(View.DRAFT,),
132
+ views=[View.DRAFT],
139
133
  ),
140
134
  ColumnSpec(
141
135
  name=ColName.EVENT_MATCH_WINS_SUM,
142
136
  col_type=ColType.PICK_SUM,
143
- views=(View.DRAFT,),
137
+ views=[View.DRAFT],
144
138
  expr=pl.col(ColName.EVENT_MATCH_WINS),
145
139
  dependencies=[ColName.EVENT_MATCH_WINS],
146
140
  ),
147
141
  ColumnSpec(
148
142
  name=ColName.EVENT_MATCH_LOSSES,
149
143
  col_type=ColType.GROUP_BY,
150
- views=(View.DRAFT,),
144
+ views=[View.DRAFT],
151
145
  ),
152
146
  ColumnSpec(
153
147
  name=ColName.EVENT_MATCH_LOSSES_SUM,
154
148
  col_type=ColType.PICK_SUM,
155
- views=(View.DRAFT,),
156
149
  expr=pl.col(ColName.EVENT_MATCH_LOSSES),
157
150
  dependencies=[ColName.EVENT_MATCH_LOSSES],
158
151
  ),
159
152
  ColumnSpec(
160
153
  name=ColName.EVENT_MATCHES,
161
154
  col_type=ColType.GROUP_BY,
162
- views=(View.DRAFT,),
163
155
  expr=pl.col("event_match_wins") + pl.col("event_match_losses"),
164
156
  dependencies=[ColName.EVENT_MATCH_WINS, ColName.EVENT_MATCH_LOSSES],
165
157
  ),
166
158
  ColumnSpec(
167
159
  name=ColName.EVENT_MATCHES_SUM,
168
160
  col_type=ColType.PICK_SUM,
169
- views=(View.DRAFT,),
170
161
  expr=pl.col(ColName.EVENT_MATCHES),
171
162
  dependencies=[ColName.EVENT_MATCHES],
172
163
  ),
173
164
  ColumnSpec(
174
165
  name=ColName.IS_TROPHY,
175
166
  col_type=ColType.GROUP_BY,
176
- views=(View.DRAFT,),
177
167
  expr=pl.when(pl.col("event_type") == "Traditional")
178
168
  .then(pl.col("event_match_wins") == 3)
179
169
  .otherwise(pl.col("event_match_wins") == 7),
@@ -182,45 +172,40 @@ _column_specs = [
182
172
  ColumnSpec(
183
173
  name=ColName.IS_TROPHY_SUM,
184
174
  col_type=ColType.PICK_SUM,
185
- views=(View.DRAFT,),
186
175
  expr=pl.col(ColName.IS_TROPHY),
187
176
  dependencies=[ColName.IS_TROPHY],
188
177
  ),
189
178
  ColumnSpec(
190
179
  name=ColName.PACK_NUMBER,
191
180
  col_type=ColType.FILTER_ONLY, # use pack_num
192
- views=(View.DRAFT,),
181
+ views=[View.DRAFT],
193
182
  ),
194
183
  ColumnSpec(
195
184
  name=ColName.PACK_NUM,
196
185
  col_type=ColType.GROUP_BY,
197
- views=(View.DRAFT,),
198
186
  expr=pl.col("pack_number") + 1,
199
187
  dependencies=[ColName.PACK_NUMBER],
200
188
  ),
201
189
  ColumnSpec(
202
190
  name=ColName.PICK_NUMBER,
203
191
  col_type=ColType.FILTER_ONLY, # use pick_num
204
- views=(View.DRAFT,),
192
+ views=[View.DRAFT],
205
193
  ),
206
194
  ColumnSpec(
207
195
  name=ColName.PICK_NUM,
208
196
  col_type=ColType.GROUP_BY,
209
- views=(View.DRAFT,),
210
197
  expr=pl.col("pick_number") + 1,
211
198
  dependencies=[ColName.PICK_NUMBER],
212
199
  ),
213
200
  ColumnSpec(
214
201
  name=ColName.TAKEN_AT,
215
202
  col_type=ColType.PICK_SUM,
216
- views=(View.DRAFT,),
217
203
  expr=pl.col(ColName.PICK_NUM),
218
204
  dependencies=[ColName.PICK_NUM],
219
205
  ),
220
206
  ColumnSpec(
221
207
  name=ColName.NUM_TAKEN,
222
208
  col_type=ColType.PICK_SUM,
223
- views=(View.DRAFT,),
224
209
  expr=pl.when(pl.col(ColName.PICK).is_not_null())
225
210
  .then(1)
226
211
  .otherwise(0), # a literal returns one row under select alone
@@ -229,27 +214,26 @@ _column_specs = [
229
214
  ColumnSpec(
230
215
  name=ColName.PICK,
231
216
  col_type=ColType.FILTER_ONLY, # aggregated as "name"
232
- views=(View.DRAFT,),
217
+ views=[View.DRAFT],
233
218
  ),
234
219
  ColumnSpec(
235
220
  name=ColName.PICK_MAINDECK_RATE,
236
221
  col_type=ColType.PICK_SUM,
237
- views=(View.DRAFT,),
222
+ views=[View.DRAFT],
238
223
  ),
239
224
  ColumnSpec(
240
225
  name=ColName.PICK_SIDEBOARD_IN_RATE,
241
226
  col_type=ColType.PICK_SUM,
242
- views=(View.DRAFT,),
227
+ views=[View.DRAFT],
243
228
  ),
244
229
  ColumnSpec(
245
230
  name=ColName.PACK_CARD,
246
231
  col_type=ColType.NAME_SUM,
247
- views=(View.DRAFT,),
232
+ views=[View.DRAFT],
248
233
  ),
249
234
  ColumnSpec(
250
235
  name=ColName.LAST_SEEN,
251
236
  col_type=ColType.NAME_SUM,
252
- views=(View.DRAFT,),
253
237
  exprMap=lambda name: pl.col(f"pack_card_{name}")
254
238
  * pl.min_horizontal("pick_num", 8),
255
239
  dependencies=[ColName.PACK_CARD, ColName.PICK_NUM],
@@ -257,117 +241,107 @@ _column_specs = [
257
241
  ColumnSpec(
258
242
  name=ColName.NUM_SEEN,
259
243
  col_type=ColType.NAME_SUM,
260
- views=(View.DRAFT,),
261
244
  exprMap=lambda name: pl.col(f"pack_card_{name}") * (pl.col("pick_num") <= 8),
262
245
  dependencies=[ColName.PACK_CARD, ColName.PICK_NUM],
263
246
  ),
264
247
  ColumnSpec(
265
248
  name=ColName.POOL,
266
249
  col_type=ColType.NAME_SUM,
267
- views=(View.DRAFT,),
250
+ views=[View.DRAFT],
268
251
  ),
269
252
  ColumnSpec(
270
253
  name=ColName.GAME_TIME,
271
254
  col_type=ColType.FILTER_ONLY,
272
- views=(View.GAME,),
255
+ views=[View.GAME],
273
256
  ),
274
257
  ColumnSpec(
275
258
  name=ColName.GAME_DATE,
276
259
  col_type=ColType.GROUP_BY,
277
- views=(View.GAME,),
278
260
  expr=pl.col("game_time").str.to_datetime("%Y-%m-%d %H-%M-%S").dt.date(),
279
261
  dependencies=[ColName.GAME_TIME],
280
262
  ),
281
263
  ColumnSpec(
282
264
  name=ColName.GAME_DAY_OF_WEEK,
283
265
  col_type=ColType.GROUP_BY,
284
- views=(View.GAME,),
285
266
  expr=pl.col("game_time").str.to_datetime("%Y-%m-%d %H-%M-%S").dt.weekday(),
286
267
  dependencies=[ColName.GAME_TIME],
287
268
  ),
288
269
  ColumnSpec(
289
270
  name=ColName.GAME_HOUR,
290
271
  col_type=ColType.GROUP_BY,
291
- views=(View.GAME,),
292
272
  expr=pl.col("game_time").str.to_datetime("%Y-%m-%d %H-%M-%S").dt.hour(),
293
273
  dependencies=[ColName.GAME_TIME],
294
274
  ),
295
275
  ColumnSpec(
296
276
  name=ColName.GAME_WEEK,
297
277
  col_type=ColType.GROUP_BY,
298
- views=(View.GAME,),
299
278
  expr=pl.col("game_time").str.to_datetime("%Y-%m-%d %H-%M-%S").dt.week(),
300
279
  dependencies=[ColName.GAME_TIME],
301
280
  ),
302
281
  ColumnSpec(
303
282
  name=ColName.BUILD_INDEX,
304
283
  col_type=ColType.GROUP_BY,
305
- views=(View.GAME,),
284
+ views=[View.GAME],
306
285
  ),
307
286
  ColumnSpec(
308
287
  name=ColName.MATCH_NUMBER,
309
288
  col_type=ColType.GROUP_BY,
310
- views=(View.GAME,),
289
+ views=[View.GAME],
311
290
  ),
312
291
  ColumnSpec(
313
292
  name=ColName.GAME_NUMBER,
314
293
  col_type=ColType.GROUP_BY,
315
- views=(View.GAME,),
294
+ views=[View.GAME],
316
295
  ),
317
296
  ColumnSpec(
318
297
  name=ColName.NUM_GAMES,
319
298
  col_type=ColType.GAME_SUM,
320
- views=(View.GAME,),
321
299
  expr=pl.col(ColName.GAME_NUMBER).is_not_null(),
322
300
  dependencies=[ColName.GAME_NUMBER],
323
301
  ),
324
302
  ColumnSpec(
325
303
  name=ColName.NUM_MATCHES,
326
304
  col_type=ColType.GAME_SUM,
327
- views=(View.GAME,),
328
305
  expr=pl.col(ColName.GAME_NUMBER) == 1,
329
306
  dependencies=[ColName.GAME_NUMBER],
330
307
  ),
331
308
  ColumnSpec(
332
309
  name=ColName.NUM_EVENTS,
333
310
  col_type=ColType.GAME_SUM,
334
- views=(View.GAME,),
335
311
  expr=(pl.col(ColName.GAME_NUMBER) == 1) & (pl.col(ColName.MATCH_NUMBER) == 1),
336
312
  dependencies=[ColName.GAME_NUMBER, ColName.MATCH_NUMBER],
337
313
  ),
338
314
  ColumnSpec(
339
315
  name=ColName.OPP_RANK,
340
316
  col_type=ColType.GROUP_BY,
341
- views=(View.GAME,),
317
+ views=[View.GAME],
342
318
  ),
343
319
  ColumnSpec(
344
320
  name=ColName.MAIN_COLORS,
345
321
  col_type=ColType.GROUP_BY,
346
- views=(View.GAME,),
322
+ views=[View.GAME],
347
323
  ),
348
324
  ColumnSpec(
349
325
  name=ColName.NUM_COLORS,
350
326
  col_type=ColType.GROUP_BY,
351
- views=(View.GAME,),
352
327
  expr=pl.col(ColName.MAIN_COLORS).str.len_chars(),
353
328
  dependencies=[ColName.MAIN_COLORS],
354
329
  ),
355
330
  ColumnSpec(
356
331
  name=ColName.SPLASH_COLORS,
357
332
  col_type=ColType.GROUP_BY,
358
- views=(View.GAME,),
333
+ views=[View.GAME],
359
334
  ),
360
335
  ColumnSpec(
361
336
  name=ColName.HAS_SPLASH,
362
337
  col_type=ColType.GROUP_BY,
363
- views=(View.GAME,),
364
338
  expr=pl.col(ColName.SPLASH_COLORS).str.len_chars() > 0,
365
339
  dependencies=[ColName.SPLASH_COLORS],
366
340
  ),
367
341
  ColumnSpec(
368
342
  name=ColName.ON_PLAY,
369
343
  col_type=ColType.GROUP_BY,
370
- views=(View.GAME,),
344
+ views=[View.GAME],
371
345
  ),
372
346
  ColumnSpec(
373
347
  name=ColName.NUM_ON_PLAY,
@@ -378,120 +352,110 @@ _column_specs = [
378
352
  ColumnSpec(
379
353
  name=ColName.NUM_MULLIGANS,
380
354
  col_type=ColType.GROUP_BY,
381
- views=(View.GAME,),
355
+ views=[View.GAME],
382
356
  ),
383
357
  ColumnSpec(
384
358
  name=ColName.NUM_MULLIGANS_SUM,
385
359
  col_type=ColType.GAME_SUM,
386
- views=(View.GAME,),
387
360
  expr=pl.col(ColName.NUM_MULLIGANS),
388
361
  dependencies=[ColName.NUM_MULLIGANS],
389
362
  ),
390
363
  ColumnSpec(
391
364
  name=ColName.OPP_NUM_MULLIGANS,
392
365
  col_type=ColType.GAME_SUM,
393
- views=(View.GAME,),
366
+ views=[View.GAME],
394
367
  ),
395
368
  ColumnSpec(
396
369
  name=ColName.OPP_NUM_MULLIGANS_SUM,
397
370
  col_type=ColType.GAME_SUM,
398
- views=(View.GAME,),
399
371
  expr=pl.col(ColName.OPP_NUM_MULLIGANS),
400
372
  dependencies=[ColName.OPP_NUM_MULLIGANS],
401
373
  ),
402
374
  ColumnSpec(
403
375
  name=ColName.OPP_COLORS,
404
376
  col_type=ColType.GROUP_BY,
405
- views=(View.GAME,),
377
+ views=[View.GAME],
406
378
  ),
407
379
  ColumnSpec(
408
380
  name=ColName.NUM_TURNS,
409
381
  col_type=ColType.GROUP_BY,
410
- views=(View.GAME,),
382
+ views=[View.GAME],
411
383
  ),
412
384
  ColumnSpec(
413
385
  name=ColName.NUM_TURNS_SUM,
414
386
  col_type=ColType.GAME_SUM,
415
- views=(View.GAME,),
416
387
  expr=pl.col(ColName.NUM_TURNS),
417
388
  dependencies=[ColName.NUM_TURNS],
418
389
  ),
419
390
  ColumnSpec(
420
391
  name=ColName.WON,
421
392
  col_type=ColType.GROUP_BY,
422
- views=(View.GAME,),
393
+ views=[View.GAME],
423
394
  ),
424
395
  ColumnSpec(
425
396
  name=ColName.NUM_WON,
426
397
  col_type=ColType.GAME_SUM,
427
- views=(View.GAME,),
428
398
  expr=pl.col(ColName.WON),
429
399
  dependencies=[ColName.WON],
430
400
  ),
431
401
  ColumnSpec(
432
402
  name=ColName.OPENING_HAND,
433
403
  col_type=ColType.NAME_SUM,
434
- views=(View.GAME,),
404
+ views=[View.GAME],
435
405
  ),
436
406
  ColumnSpec(
437
407
  name=ColName.WON_OPENING_HAND,
438
408
  col_type=ColType.NAME_SUM,
439
- views=(View.GAME,),
440
409
  exprMap=lambda name: pl.col(f"opening_hand_{name}") * pl.col(ColName.WON),
441
410
  dependencies=[ColName.OPENING_HAND, ColName.WON],
442
411
  ),
443
412
  ColumnSpec(
444
413
  name=ColName.DRAWN,
445
414
  col_type=ColType.NAME_SUM,
446
- views=(View.GAME,),
415
+ views=[View.GAME],
447
416
  ),
448
417
  ColumnSpec(
449
418
  name=ColName.WON_DRAWN,
450
419
  col_type=ColType.NAME_SUM,
451
- views=(View.GAME,),
452
420
  exprMap=lambda name: pl.col(f"drawn_{name}") * pl.col(ColName.WON),
453
421
  dependencies=[ColName.DRAWN, ColName.WON],
454
422
  ),
455
423
  ColumnSpec(
456
424
  name=ColName.TUTORED,
457
425
  col_type=ColType.NAME_SUM,
458
- views=(View.GAME,),
426
+ views=[View.GAME],
459
427
  ),
460
428
  ColumnSpec(
461
429
  name=ColName.WON_TUTORED,
462
430
  col_type=ColType.NAME_SUM,
463
- views=(View.GAME,),
464
431
  exprMap=lambda name: pl.col(f"tutored_{name}") * pl.col(ColName.WON),
465
432
  dependencies=[ColName.TUTORED, ColName.WON],
466
433
  ),
467
434
  ColumnSpec(
468
435
  name=ColName.DECK,
469
436
  col_type=ColType.NAME_SUM,
470
- views=(View.GAME,),
437
+ views=[View.GAME],
471
438
  ),
472
439
  ColumnSpec(
473
440
  name=ColName.WON_DECK,
474
441
  col_type=ColType.NAME_SUM,
475
- views=(View.GAME,),
476
442
  exprMap=lambda name: pl.col(f"deck_{name}") * pl.col(ColName.WON),
477
443
  dependencies=[ColName.DECK, ColName.WON],
478
444
  ),
479
445
  ColumnSpec(
480
446
  name=ColName.SIDEBOARD,
481
447
  col_type=ColType.NAME_SUM,
482
- views=(View.GAME,),
448
+ views=[View.GAME],
483
449
  ),
484
450
  ColumnSpec(
485
451
  name=ColName.WON_SIDEBOARD,
486
452
  col_type=ColType.NAME_SUM,
487
- views=(View.GAME,),
488
453
  exprMap=lambda name: pl.col(f"sideboard_{name}") * pl.col(ColName.WON),
489
454
  dependencies=[ColName.SIDEBOARD, ColName.WON],
490
455
  ),
491
456
  ColumnSpec(
492
457
  name=ColName.NUM_GNS,
493
458
  col_type=ColType.NAME_SUM,
494
- views=(View.GAME,),
495
459
  exprMap=lambda name: pl.max_horizontal(
496
460
  0,
497
461
  pl.col(f"deck_{name}")
@@ -509,69 +473,64 @@ _column_specs = [
509
473
  ColumnSpec(
510
474
  name=ColName.WON_NUM_GNS,
511
475
  col_type=ColType.NAME_SUM,
512
- views=(View.GAME,),
513
476
  exprMap=lambda name: pl.col(ColName.WON) * pl.col(f"num_gns_{name}"),
514
477
  dependencies=[ColName.NUM_GNS, ColName.WON],
515
478
  ),
516
479
  ColumnSpec(
517
480
  name=ColName.SET_CODE,
518
481
  col_type=ColType.CARD_ATTR,
519
- views=(View.CARD,),
520
482
  ),
521
483
  ColumnSpec(
522
484
  name=ColName.COLOR,
523
485
  col_type=ColType.CARD_ATTR,
524
- views=(View.CARD,),
525
486
  ),
526
487
  ColumnSpec(
527
488
  name=ColName.RARITY,
528
489
  col_type=ColType.CARD_ATTR,
529
- views=(View.CARD,),
530
490
  ),
531
491
  ColumnSpec(
532
492
  name=ColName.COLOR_IDENTITY,
533
493
  col_type=ColType.CARD_ATTR,
534
- views=(View.CARD,),
535
494
  ),
536
495
  ColumnSpec(
537
496
  name=ColName.CARD_TYPE,
538
497
  col_type=ColType.CARD_ATTR,
539
- views=(View.CARD,),
540
498
  ),
541
499
  ColumnSpec(
542
500
  name=ColName.SUBTYPE,
543
501
  col_type=ColType.CARD_ATTR,
544
- views=(View.CARD,),
545
502
  ),
546
503
  ColumnSpec(
547
504
  name=ColName.MANA_VALUE,
548
505
  col_type=ColType.CARD_ATTR,
549
- views=(View.CARD,),
550
506
  ),
551
507
  ColumnSpec(
552
508
  name=ColName.MANA_COST,
553
509
  col_type=ColType.CARD_ATTR,
554
- views=(View.CARD,),
555
510
  ),
556
511
  ColumnSpec(
557
512
  name=ColName.POWER,
558
513
  col_type=ColType.CARD_ATTR,
559
- views=(View.CARD,),
560
514
  ),
561
515
  ColumnSpec(
562
516
  name=ColName.TOUGHNESS,
563
517
  col_type=ColType.CARD_ATTR,
564
- views=(View.CARD,),
565
518
  ),
566
519
  ColumnSpec(
567
520
  name=ColName.IS_BONUS_SHEET,
568
521
  col_type=ColType.CARD_ATTR,
569
- views=(View.CARD,),
570
522
  ),
571
523
  ColumnSpec(
572
524
  name=ColName.IS_DFC,
573
525
  col_type=ColType.CARD_ATTR,
574
- views=(View.CARD,),
526
+ ),
527
+ ColumnSpec(
528
+ name=ColName.ORACLE_TEXT,
529
+ col_type=ColType.CARD_ATTR,
530
+ ),
531
+ ColumnSpec(
532
+ name=ColName.CARD_JSON,
533
+ col_type=ColType.CARD_ATTR,
575
534
  ),
576
535
  ColumnSpec(
577
536
  name=ColName.PICKED_MATCH_WR,
@@ -594,7 +553,6 @@ _column_specs = [
594
553
  ColumnSpec(
595
554
  name=ColName.ALSA,
596
555
  col_type=ColType.AGG,
597
- views=(),
598
556
  expr=pl.col(ColName.LAST_SEEN) / pl.col(ColName.NUM_SEEN),
599
557
  dependencies=[ColName.LAST_SEEN, ColName.NUM_SEEN],
600
558
  ),
spells/draft_data.py CHANGED
@@ -53,6 +53,23 @@ def _get_names(set_code: str) -> tuple[str, ...]:
53
53
 
54
54
 
55
55
  def _hydrate_col_defs(set_code: str, col_spec_map: dict[str, ColumnSpec]):
56
+ def get_views(spec: ColumnSpec) -> list[View]:
57
+ if spec.name == ColName.NAME or spec.col_type == ColType.AGG:
58
+ return []
59
+ if spec.col_type == ColType.CARD_ATTR:
60
+ return [View.CARD]
61
+ if spec.views is not None:
62
+ return spec.views
63
+ assert (
64
+ spec.dependencies is not None
65
+ ), f"Col {spec.name} should have dependencies"
66
+
67
+ views = []
68
+ for dep in spec.dependencies:
69
+ views.extend(get_views(col_spec_map[dep]))
70
+
71
+ return list(set(views))
72
+
56
73
  names = _get_names(set_code)
57
74
  assert len(names) > 0, "there should be names"
58
75
  hydrated = {}
@@ -87,19 +104,21 @@ def _hydrate_col_defs(set_code: str, col_spec_map: dict[str, ColumnSpec]):
87
104
  expr_sig = str(datetime.datetime.now)
88
105
 
89
106
  dependencies = tuple(spec.dependencies or ())
107
+ views = get_views(spec)
90
108
  signature = str(
91
109
  (
92
110
  spec.name,
93
111
  spec.col_type.value,
94
112
  expr_sig,
95
- tuple(view.value for view in spec.views),
113
+ tuple(view.value for view in views),
96
114
  dependencies,
97
115
  )
98
116
  )
117
+
99
118
  cdef = ColumnDefinition(
100
119
  name=spec.name,
101
120
  col_type=spec.col_type,
102
- views=spec.views,
121
+ views=tuple(views),
103
122
  expr=expr,
104
123
  dependencies=dependencies,
105
124
  signature=signature,
@@ -246,6 +265,28 @@ def _base_agg_df(
246
265
  )
247
266
 
248
267
 
268
+ def card_df(
269
+ set_code: str,
270
+ extensions: list[ColumnSpec] | None = None,
271
+ ):
272
+ col_spec_map = dict(spells.columns.col_spec_map)
273
+ if extensions is not None:
274
+ for spec in extensions:
275
+ col_spec_map[spec.name] = spec
276
+
277
+ col_def_map = _hydrate_col_defs(set_code, col_spec_map)
278
+
279
+ columns = [ColName.NAME] + [
280
+ c for c, cdef in col_def_map.items() if cdef.col_type == ColType.CARD_ATTR
281
+ ]
282
+ fp = data_file_path(set_code, View.CARD)
283
+ card_df = pl.read_parquet(fp)
284
+ select_df = _view_select(
285
+ card_df, frozenset(columns), col_def_map, is_agg_view=False
286
+ )
287
+ return select_df.select(columns)
288
+
289
+
249
290
  def summon(
250
291
  set_code: str,
251
292
  columns: list[str] | None = None,
spells/enums.py CHANGED
@@ -120,6 +120,8 @@ class ColName(StrEnum):
120
120
  TOUGHNESS = "toughness"
121
121
  IS_BONUS_SHEET = "is_bonus_sheet"
122
122
  IS_DFC = "is_dfc"
123
+ ORACLE_TEXT = "oracle_text"
124
+ CARD_JSON = "card_json"
123
125
  # agg extensions
124
126
  PICKED_MATCH_WR = "picked_match_wr"
125
127
  TROPHY_RATE = "trophy_rate"
spells/external.py CHANGED
@@ -268,7 +268,7 @@ def download_data_set(
268
268
  mode, "Unzipping and transforming to parquet (this might take a few minutes)..."
269
269
  )
270
270
  _process_zipped_file(dataset_path, target_path)
271
- cache.spells_print(mode, f"File {target_path} written")
271
+ cache.spells_print(mode, f"Wrote file {target_path}")
272
272
  if clear_set_cache:
273
273
  cache.clear(set_code)
274
274
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: spells-mtg
3
- Version: 0.0.5
3
+ Version: 0.2.1
4
4
  Summary: analaysis of 17Lands.com public datasets
5
5
  Author-Email: Joel Barnes <oelarnes@gmail.com>
6
6
  License: MIT
@@ -19,16 +19,16 @@ $ spells add DSK
19
19
 
20
20
  🪄 add ✨ Downloading draft dataset from 17Lands.com
21
21
  100% [......................................................................] 250466473 / 250466473
22
- 🪄 add ✨ Unzipping and transforming to parquet...
23
- 🪄 add ✨ File /Users/joel/.local/share/spells/external/DSK/DSK_PremierDraft_draft.parquet written
22
+ 🪄 add ✨ Unzipping and transforming to parquet (this might take a few minutes)...
23
+ 🪄 add ✨ Wrote file /Users/joel/.local/share/spells/external/DSK/DSK_PremierDraft_draft.parquet
24
24
  🪄 clean ✨ No local cache found for set DSK
25
+ 🪄 add ✨ Fetching card data from mtgjson.com and writing card parquet file
26
+ 🪄 add ✨ Wrote file /Users/joel/.local/share/spells/external/DSK/DSK_card.parquet
25
27
  🪄 add ✨ Downloading game dataset from 17Lands.com
26
28
  100% [........................................................................] 77145600 / 77145600
27
- 🪄 add ✨ Unzipping and transforming to parquet...
28
- 🪄 add ✨ File /Users/joel/.local/share/spells/external/DSK/DSK_PremierDraft_game.parquet written
29
+ 🪄 add ✨ Unzipping and transforming to parquet (this might take a few minutes)...
30
+ 🪄 add ✨ Wrote file /Users/joel/.local/share/spells/external/DSK/DSK_PremierDraft_game.parquet
29
31
  🪄 clean ✨ No local cache found for set DSK
30
- 🪄 add ✨ Fetching card data from mtgjson.com and writing card parquet file
31
- 🪄 add ✨ Wrote 287 lines to file /Users/joel/.local/share/spells/external/DSK/DSK_card.parquet
32
32
  $ ipython
33
33
  ```
34
34
 
@@ -69,19 +69,20 @@ Spells is not affiliated with 17Lands. Please review the Usage Guidelines for 17
69
69
  ## spells
70
70
 
71
71
  - Uses [Polars](https://docs.pola.rs/) for high-performance, multi-threaded aggregations of large datasets
72
- - Uses Polars to power an expressive query language for specifying custom extensions and optimizing complex queries
72
+ - Uses Polars to power an expressive query language for specifying custom extensions
73
73
  - Converts csv datasets to parquet for 10x faster calculations and 20x smaller file sizes
74
74
  - Supports calculating the standard aggregations and measures out of the box with no arguments (ALSA, GIH WR, etc)
75
75
  - Caches aggregate DataFrames in the local file system automatically for instantaneous reproduction of previous analysis
76
76
  - Manages grouping and filtering by built-in and custom columns at the row level
77
- - Provides 116 explicitly specified, enumerated, documented column definitions
77
+ - Provides 118 explicitly specified, enumerated, documented column definitions
78
78
  - Supports "Deck Color Data" aggregations with built-in column definitions.
79
79
  - Provides a CLI tool `spells [add|refresh|clean|remove|info] [SET]` to download and manage external files
80
80
  - Downloads and manages public datasets from 17Lands
81
- - Downloads and models booster configuration and card data from [MTGJSON](https://mtgjson.com/)
81
+ - Retrieves and models booster configuration and card data from [MTGJSON](https://mtgjson.com/)
82
82
  - Is fully typed, linted, and statically analyzed for support of advanced IDE features
83
83
  - Provides optional enums for all base columns and built-in extensions, as well as for custom extension parameters
84
84
  - Uses Polars expressions to support second-stage aggregations and beyond like game-weighted z-scores with one call to summon
85
+ - Works on MacOS, Linux, and Windows
85
86
 
86
87
  ## summon
87
88
 
@@ -132,7 +133,7 @@ Spells is not affiliated with 17Lands. Please review the Usage Guidelines for 17
132
133
  - `filter_spec` specifies a row-level filter for the dataset, using an intuitive custom query formulation
133
134
  ```python
134
135
  >>> from spells.enums import ColName
135
- >>> spells.summon('BLB', columns=["game_wr"], group_by=["player_cohort"], filter_spec={'lhs': 'num_mulligans', 'op': '>', 'rhs': 0})
136
+ >>> spells.summon('BLB', columns=[ColName.GAME_WR], group_by=[ColName.PLAYER_COHORT], filter_spec={'lhs': ColName.NUM_MULLIGANS, 'op': '>', 'rhs': 0})
136
137
  shape: (4, 2)
137
138
  ┌───────────────┬──────────┐
138
139
  │ player_cohort ┆ game_wr │
@@ -204,19 +205,19 @@ If you're interested in the fruits of my DEq research, or in checking my work, k
204
205
 
205
206
  ## Performance
206
207
 
207
- Spells provides several features out of the box to optimize performance to the degree possible given its generality.
208
+ Spells provides several features to optimize performance.
208
209
 
209
210
  ### Parquet Transformation
210
211
 
211
- The most significant optimization used by Spells is the simplest: the csv files are scanned and streamed to Parquet files by Polars. This allows 10x faster compute times with 20x less storage space and lower memory usage compared to csv. Yes, the files are twenty times smaller and ten times faster!
212
+ The most significant optimization used by Spells is the simplest: the csv files are scanned and streamed to Parquet files by Polars. This allows 10x faster compute times with 20x less storage space and lower memory usage compared to csv. Yes, it's twenty times smaller and ten times faster!
212
213
 
213
214
  ### Query Optimization
214
215
 
215
- Firstly, it is built on top of Polars, a modern, well-supported DataFrame engine written for performance in Rust that enables declarative query plans and lazy evaluation, allowing for automatic performance optimization in the execution of the query plan. Spells selects only the necessary columns for your analysis using an optimized recursive selection algorithm traversing the dependency tree.
216
+ Spells is built on top of Polars, a modern, well-supported DataFrame engine written for performance in Rust that enables declarative query plans and lazy evaluation, allowing for automatic performance optimization in the execution of the query plan. Spells selects only the necessary columns for your analysis, recursively traversing the dependency tree.
216
217
 
217
218
  ### Local Caching
218
219
 
219
- Additionally, by default, Spells caches the results of expensive aggregations in the local file system as parquet files, which by default are found under the `data/local` path from the execution directory, which can be configured using the environment variable `SPELLS_PROJECT_DIR`. Query plans which request the same set of first-stage aggregations (sums over base rows) will attempt to locate the aggregate data in the cache before calculating. This guarantees that a repeated call to `summon` returns instantaneously.
220
+ Spells caches the results of expensive aggregations in the local file system as parquet files, which by default are found under the `data/local` path from the execution directory, which can be configured using the environment variable `SPELLS_PROJECT_DIR`. Query plans which request the same set of first-stage aggregations (sums over base rows) will attempt to locate the aggregate data in the cache before calculating. This guarantees that a repeated call to `summon` returns instantaneously.
220
221
 
221
222
  When refreshing a given set's data files from 17Lands using the provided cli, the cache for that set is automatically cleared. The `spells` CLI gives additional tools for managing the local and external caches.
222
223
 
@@ -249,7 +250,7 @@ So that's it, that's what Spells does from a high level. `summon` will hand off
249
250
 
250
251
  Spells includes a command-line interface `spells` to manage your external data files and local cache. Spells will download files to an appropriate file location on your system,
251
252
  typically `~/.local/share/spells` on Unix-like platforms and `C:\Users\{Username}\AppData\Local\Spells` on Windows.
252
- To use `spells`, make sure Spells in installed in your environment using pip or a package manager, and type `spells help` into your shell, or dive in with `spells add DSK` or your favorite set.
253
+ To use `spells`, make sure Spells in installed in your environment using pip or a package manager, and type `spells help` into your shell, or dive in with `spells add DSK` or your favorite set. If Spells is installed globally using pipx, any local version of Spells will be able to read the managed files.
253
254
 
254
255
  ## API
255
256
 
@@ -263,12 +264,14 @@ summon(
263
264
  group_by: list[str] | None = None,
264
265
  filter_spec: dict | None = None,
265
266
  extensions: list[str] | None = None,
267
+ read_cache: bool = True,
268
+ write_cache: bool = True,
266
269
  ) -> polars.DataFrame
267
270
  ```
268
271
 
269
272
  #### parameters
270
273
 
271
- - columns: a list of string or `ColName` values to select as non-grouped columns. Valid `ColTypes` are `PICK_SUM`, `NAME_SUM`, `GAME_SUM`, `CARD_ATTR`, `AGG`. Min/Max/Unique
274
+ - columns: a list of string or `ColName` values to select as non-grouped columns. Valid `ColTypes` are `PICK_SUM`, `NAME_SUM`, `GAME_SUM`, `CARD_ATTR`, and `AGG`. Min/Max/Unique
272
275
  aggregations of non-numeric (or numeric) data types are not supported. If `None`, use a set of columns modeled on the commonly used values on 17Lands.com/card_data.
273
276
 
274
277
  - group_by: a list of string or `ColName` values to display as grouped columns. Valid `ColTypes` are `GROUP_BY` and `CARD_ATTR`. By default, group by "name" (card name).
@@ -282,13 +285,15 @@ aggregations of non-numeric (or numeric) data types are not supported. If `None`
282
285
 
283
286
  - extensions: a list of `spells.columns.ColumnSpec` objects, which are appended to the definitions built-in columns described below. A name not in the enum `ColName` can be used in this way if it is the name of a provided extension. Existing names can also be redefined using extensions.
284
287
 
288
+ - read_cache/write_cache: Use the local file system to cache and retrieve aggregations to minimize expensive reads of the large datasets. You shouldn't need to touch these arguments unless you are debugging.
289
+
285
290
  ### Enums
286
291
 
287
292
  ```python
288
293
  from spells.enums import ColName, ColType, View
289
294
  ```
290
295
 
291
- Recommended to import `ColName` for any usage of `summon`, and to import `ColType` and `View` when defining custom extensions.
296
+ Recommended to import `ColName` for any usage of `summon`, and to import `ColType` when defining custom extensions. You shouldn't need `VIEW`.
292
297
 
293
298
  ### ColumnSpec
294
299
 
@@ -297,12 +302,12 @@ from spells.columns import ColumnSpec
297
302
 
298
303
  ColumnSpec(
299
304
  name: str,
300
- col_type: spells.enums.ColType,
301
- views: tuple(spells.enums.View...) = (),
305
+ col_type: ColType,
302
306
  expr: pl.Expr | None = None,
303
307
  exprMap: Callable[[str], pl.Expr] | None = None
304
308
  dependencies: list[str] | None = None
305
309
  version: str | None = None
310
+ views: list[View] | None = None,
306
311
  )
307
312
  ```
308
313
 
@@ -314,8 +319,6 @@ Used to define extensions in `summon`
314
319
 
315
320
  - `col_type`: one of the `ColType` enum values, `FILTER_ONLY`, `GROUP_BY`, `PICK_SUM`, `NAME_SUM`, `GAME_SUM`, `CARD_ATTR`, and `AGG`. See documentation for `summon` for usage. All columns except `CARD_ATTR` and `AGG` must be derivable at the individual row level on one or both base views. `CARD_ATTR` must be derivable at the individual row level from the card file. `AGG` can depend on any column present after summing over groups, and can include polars Expression aggregations. Arbitrarily long chains of aggregate dependencies are supported.
316
321
 
317
- - `views`: For a column defined at the row level on a view (see col_types above), the views on which it is supported. All col_types except `AGG` must specify at least one base view. For `CARD_ATTR` columns, `views` must be exactly `(View.CARD,)`.
318
-
319
322
  - `expr`: A polars expression giving the derivation of the column value at the first level where it is defined. For `NAME_SUM` columns the `exprMap` attribute must be used instead. `AGG` columns that depend on `NAME_SUM` columns reference the prefix (`cdef.name`) only, since the unpivot has occured prior to selection.
320
323
 
321
324
  - `exprMap`: A function of card name that returns the expression for a `NAME_SUM` column.
@@ -325,6 +328,8 @@ Used to define extensions in `summon`
325
328
  - `version`: When defining a column using a python function, as opposed to Polars expressions, add a unique version number so that the unique hashed signature of the column specification can be derived
326
329
  for caching purposes, since Polars cannot generate a serialization natively. When changing the definition, be sure to increment the version value. Otherwise you do not need to use this parameter.
327
330
 
331
+ - `views`: Not needed for custom columns.
332
+
328
333
  ### Columns
329
334
 
330
335
  A table of all included columns. Columns can be referenced by enum or by string value in arguments and filter specs. The string value is always the lowercase version of the enum attribute.
@@ -417,6 +422,8 @@ A table of all included columns. Columns can be referenced by enum or by string
417
422
  | `TOUGHNESS` | `"toughness"` | `CARD` | `CARD_ATTR` | | Float |
418
423
  | `IS_BONUS_SHEET` | `"is_bonus_sheet"` | `CARD` | `CARD_ATTR` | `SET_CODE` != `EXPANSION` | Boolean |
419
424
  | `IS_DFC` | `"is_dfc"` | `CARD` | `CARD_ATTR` | Includes split cards | Boolean |
425
+ | `ORACLE_TEXT` | `"oracle_text"` | `CARD` | `CARD_ATTR` | | String |
426
+ | `CARD_JSON` | `"card_json"` | `CARD` | `CARD_ATTR` | The full dump of the mtgjson entry for the card as printed in the draft booster | String |
420
427
  | `PICKED_MATCH_WR` | `"picked_match_wr"` | | `AGG` | `EVENT_MATCH_WINS` / `EVENT_MATCHES` | Float |
421
428
  | `TROPHY_RATE` | `"trophy_rate"` | | `AGG` || Float |
422
429
  | `GAME_WR` | `"game_wr"` | | `AGG` | `NUM_WON` / `NUM_GAMES` | Float |
@@ -443,7 +450,7 @@ A table of all included columns. Columns can be referenced by enum or by string
443
450
  | `GP_WR_Z` | `"gp_wr_z"` | | `AGG` | `GP_WR_EXCESS` / `GP_WR_STDEV` | Float |
444
451
  | `GIH_TOTAL` | `"gih_total"` | | `AGG` | Sum `NUM_GIH` over all rows and broadcast back to row level| Float |
445
452
  | `WON_GIH_TOTAL` | `"won_gih_total"` | | `AGG` | | Float |
446
- | `GIH_WR_MEAN` | `"gih_wr_mean"` | | `AGG` | `GIH_WR - GIH_WR_MEAN` | Float |
453
+ | `GIH_WR_MEAN` | `"gih_wr_mean"` | | `AGG` | `WON_GIH_TOTAL` / `GIH_TOTAL` | Float |
447
454
  | `GIH_WR_EXCESS` | `"gih_wr_excess"` | | `AGG` | `GIH_WR - GIH_WR_MEAN` | Float |
448
455
  | `GIH_WR_VAR` | `"gih_wr_var"` | | `AGG` | Game-weighted Variance | Float |
449
456
  | `GIH_WR_STDEV` | `"gh_wr_stdev"` | | `AGG` | Sqrt of `GIH_WR_VAR` | Float |
@@ -452,6 +459,7 @@ A table of all included columns. Columns can be referenced by enum or by string
452
459
  # Roadmap to 1.0
453
460
 
454
461
  - [ ] Support Traditional and Premier datasets (currently only Premier is supported)
462
+ - [ ] Group by all
455
463
  - [ ] Enable configuration using $XDG_CONFIG_HOME/cfg.toml
456
464
  - [ ] Support min and max aggregations over base views
457
465
  - [ ] Enhanced profiling
@@ -0,0 +1,15 @@
1
+ spells/__init__.py,sha256=QCPWQySUK2SZtCU-mSZLsn7vrNLJMDsRwil8gmAzmdk,151
2
+ spells/cache.py,sha256=4v7h8D3TtaT0R_EdiRNhdcQrXzdH_CukezO6oAXvNEY,2956
3
+ spells/cards.py,sha256=6seKpgI4TlJxI20bvcgt5VpxzsbCnnjWneDWpneJJ6Y,3497
4
+ spells/columns.py,sha256=Zw7IIR-HX2HCdeYBGn0RmPGzPA1juGfMl050ndtWhlc,22052
5
+ spells/draft_data.py,sha256=xIr0Jpzv3ktp5AptVFuXlSqTqwGcHE-j1sXNtf064c0,10497
6
+ spells/enums.py,sha256=BQIZvCSAopiscnKY-Qm_j2BgmPy9R5oj1yeIrjMQM1o,4507
7
+ spells/external.py,sha256=qe6wOBDhPN4CZNQvYRq6G-OpIZcWTZzJjySgnf2Gu1o,10258
8
+ spells/filter.py,sha256=J-YTOOAzOQpvIX29tviYL04RVoOUlfsbjBXoQBDCEdQ,3380
9
+ spells/manifest.py,sha256=XEJwwYJzOEWrQQwka39F9SgZo38hH_Kj4MPY7GOuLvY,6903
10
+ spells/schema.py,sha256=z8Qn2SiHG4T6YfPsz8xHLGMjU_Ofm76-Vrquh3b9B64,6422
11
+ spells_mtg-0.2.1.dist-info/METADATA,sha256=nVQ6mCUVv6TYGHaUCGtQD4ABOM3ja4OOEWuFzZhnHnc,41267
12
+ spells_mtg-0.2.1.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
13
+ spells_mtg-0.2.1.dist-info/entry_points.txt,sha256=a9Y1omdl9MdnKuIj3aOodgrp-zZII6OCdvqwgP6BFvI,63
14
+ spells_mtg-0.2.1.dist-info/licenses/LICENSE,sha256=tS54XYbJSgmq5zuHhbsQGbNQLJPVgXqhF5nu2CSRMig,1068
15
+ spells_mtg-0.2.1.dist-info/RECORD,,
@@ -1,15 +0,0 @@
1
- spells/__init__.py,sha256=EcI7ijXYvPA8jj7wUZqs6CSWr__MD8AOXhkex-Hj37E,131
2
- spells/cache.py,sha256=4v7h8D3TtaT0R_EdiRNhdcQrXzdH_CukezO6oAXvNEY,2956
3
- spells/cards.py,sha256=CP8f3rUBuFQlj6xj2W72-mDQPuzozEcMu3PkRtBeX1Q,3191
4
- spells/columns.py,sha256=BEYKDpZYkRgV1kBqIq6XC6kXvVjOih7jGthIpy-YVSw,23339
5
- spells/draft_data.py,sha256=xoGn6QGaAKgIInEzi2pnvk4CH5xNrwYq4mAj3r07GfA,9272
6
- spells/enums.py,sha256=Idpv17rCAHDF5bENU8I8V0rpL9W5KB2GM6lhrN4GunU,4447
7
- spells/external.py,sha256=Apv_l32swO9bF8ihWV73cd7zpzHgxqFy8ZZa1n3XUUE,10260
8
- spells/filter.py,sha256=J-YTOOAzOQpvIX29tviYL04RVoOUlfsbjBXoQBDCEdQ,3380
9
- spells/manifest.py,sha256=XEJwwYJzOEWrQQwka39F9SgZo38hH_Kj4MPY7GOuLvY,6903
10
- spells/schema.py,sha256=z8Qn2SiHG4T6YfPsz8xHLGMjU_Ofm76-Vrquh3b9B64,6422
11
- spells_mtg-0.0.5.dist-info/METADATA,sha256=2vVu-vT2tl0Sq-A4E10uOXl2io56NSFNnIiLtPeA3yg,40918
12
- spells_mtg-0.0.5.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
13
- spells_mtg-0.0.5.dist-info/entry_points.txt,sha256=a9Y1omdl9MdnKuIj3aOodgrp-zZII6OCdvqwgP6BFvI,63
14
- spells_mtg-0.0.5.dist-info/licenses/LICENSE,sha256=tS54XYbJSgmq5zuHhbsQGbNQLJPVgXqhF5nu2CSRMig,1068
15
- spells_mtg-0.0.5.dist-info/RECORD,,