architect-py 5.0.0b1__py3-none-any.whl → 5.0.0b3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- architect_py/__init__.py +10 -3
- architect_py/async_client.py +291 -174
- architect_py/client_interface.py +19 -18
- architect_py/common_types/order_dir.py +12 -6
- architect_py/graphql_client/__init__.py +2 -0
- architect_py/graphql_client/enums.py +5 -0
- architect_py/grpc/__init__.py +25 -7
- architect_py/grpc/client.py +13 -5
- architect_py/grpc/models/Accounts/AccountsRequest.py +4 -1
- architect_py/grpc/models/Algo/AlgoOrder.py +114 -0
- architect_py/grpc/models/Algo/{ModifyAlgoOrderRequestForTwapAlgo.py → AlgoOrderRequest.py} +11 -10
- architect_py/grpc/models/Algo/AlgoOrdersRequest.py +72 -0
- architect_py/grpc/models/Algo/AlgoOrdersResponse.py +27 -0
- architect_py/grpc/models/Algo/CreateAlgoOrderRequest.py +56 -0
- architect_py/grpc/models/Algo/PauseAlgoRequest.py +42 -0
- architect_py/grpc/models/Algo/PauseAlgoResponse.py +20 -0
- architect_py/grpc/models/Algo/StartAlgoRequest.py +42 -0
- architect_py/grpc/models/Algo/StartAlgoResponse.py +20 -0
- architect_py/grpc/models/Algo/StopAlgoRequest.py +42 -0
- architect_py/grpc/models/Algo/StopAlgoResponse.py +20 -0
- architect_py/grpc/models/Boss/DepositsRequest.py +40 -0
- architect_py/grpc/models/Boss/DepositsResponse.py +27 -0
- architect_py/grpc/models/Boss/RqdAccountStatisticsRequest.py +42 -0
- architect_py/grpc/models/Boss/RqdAccountStatisticsResponse.py +25 -0
- architect_py/grpc/models/Boss/StatementUrlRequest.py +40 -0
- architect_py/grpc/models/Boss/StatementUrlResponse.py +23 -0
- architect_py/grpc/models/Boss/StatementsRequest.py +40 -0
- architect_py/grpc/models/Boss/StatementsResponse.py +27 -0
- architect_py/grpc/models/Boss/WithdrawalsRequest.py +40 -0
- architect_py/grpc/models/Boss/WithdrawalsResponse.py +27 -0
- architect_py/grpc/models/Boss/__init__.py +2 -0
- architect_py/grpc/models/Folio/HistoricalFillsRequest.py +4 -1
- architect_py/grpc/models/Marketdata/L1BookSnapshot.py +16 -2
- architect_py/grpc/models/Oms/Cancel.py +67 -19
- architect_py/grpc/models/Oms/Order.py +4 -11
- architect_py/grpc/models/Oms/PlaceOrderRequest.py +13 -20
- architect_py/grpc/models/OptionsMarketdata/OptionsChain.py +30 -0
- architect_py/grpc/models/OptionsMarketdata/OptionsChainGreeks.py +30 -0
- architect_py/grpc/models/OptionsMarketdata/OptionsChainGreeksRequest.py +47 -0
- architect_py/grpc/models/OptionsMarketdata/OptionsChainRequest.py +45 -0
- architect_py/grpc/models/OptionsMarketdata/OptionsExpirations.py +29 -0
- architect_py/grpc/models/OptionsMarketdata/OptionsExpirationsRequest.py +42 -0
- architect_py/grpc/models/OptionsMarketdata/__init__.py +2 -0
- architect_py/grpc/models/Symbology/ExecutionInfoRequest.py +47 -0
- architect_py/grpc/models/Symbology/ExecutionInfoResponse.py +27 -0
- architect_py/grpc/models/definitions.py +457 -790
- architect_py/grpc/resolve_endpoint.py +4 -1
- architect_py/internal_utils/__init__.py +0 -0
- architect_py/internal_utils/no_pandas.py +3 -0
- architect_py/tests/conftest.py +11 -6
- architect_py/tests/test_marketdata.py +19 -19
- architect_py/tests/test_orderflow.py +31 -28
- {architect_py-5.0.0b1.dist-info → architect_py-5.0.0b3.dist-info}/METADATA +2 -3
- {architect_py-5.0.0b1.dist-info → architect_py-5.0.0b3.dist-info}/RECORD +72 -42
- {architect_py-5.0.0b1.dist-info → architect_py-5.0.0b3.dist-info}/WHEEL +1 -1
- examples/book_subscription.py +2 -3
- examples/candles.py +3 -3
- examples/common.py +29 -20
- examples/external_cpty.py +4 -4
- examples/funding_rate_mean_reversion_algo.py +14 -20
- examples/order_sending.py +32 -33
- examples/stream_l1_marketdata.py +2 -2
- examples/stream_l2_marketdata.py +1 -3
- examples/trades.py +2 -2
- examples/tutorial_async.py +9 -7
- examples/tutorial_sync.py +5 -5
- scripts/generate_functions_md.py +3 -1
- scripts/generate_sync_interface.py +30 -11
- scripts/postprocess_grpc.py +21 -11
- scripts/preprocess_grpc_schema.py +174 -113
- architect_py/grpc/models/Algo/AlgoOrderForTwapAlgo.py +0 -61
- architect_py/grpc/models/Algo/CreateAlgoOrderRequestForTwapAlgo.py +0 -59
- {architect_py-5.0.0b1.dist-info → architect_py-5.0.0b3.dist-info}/licenses/LICENSE +0 -0
- {architect_py-5.0.0b1.dist-info → architect_py-5.0.0b3.dist-info}/top_level.txt +0 -0
@@ -1,24 +1,21 @@
|
|
1
1
|
import argparse
|
2
2
|
import json
|
3
|
+
import logging
|
3
4
|
import os
|
4
5
|
import re
|
5
|
-
from typing import Any, Dict, List
|
6
|
+
from typing import Any, Dict, List
|
7
|
+
|
8
|
+
logging.basicConfig(level=logging.WARN)
|
6
9
|
|
7
10
|
# ---------------------------------------------------------------------
|
8
11
|
# Constants and Regular Expressions
|
9
12
|
# ---------------------------------------------------------------------
|
10
13
|
|
11
|
-
|
12
|
-
SINGLE_ALL_OF_DECIMAL = re.compile(
|
13
|
-
r'"allOf":\s*\[\s*\{\s*'
|
14
|
-
r'(?P<indent>[ \t]*)"type":\s*"number",\s*'
|
15
|
-
r'(?P=indent)"format":\s*"decimal"\s*'
|
16
|
-
r"\}\s*\]",
|
17
|
-
flags=re.MULTILINE,
|
18
|
-
)
|
19
|
-
CAMEL_CASE_RE = re.compile(r"(?<!^)(?=[A-Z])")
|
14
|
+
SNAKE_CASE_RE = re.compile(r"(?<!^)(?=[A-Z])")
|
20
15
|
EXTRACT_REF_RE = re.compile(r'("\$ref":\s*"#/definitions/)([^"]+)(")')
|
21
16
|
|
17
|
+
TAG_PATTERN = re.compile(r"<!--\s*py:\s*(.*?)\s*-->")
|
18
|
+
|
22
19
|
|
23
20
|
def replace_and_indent(match: re.Match) -> str:
|
24
21
|
indent = match.group(1)
|
@@ -36,18 +33,6 @@ def _apply_type_fixes_to_text(
|
|
36
33
|
"""
|
37
34
|
Perform string replacements and regex substitutions on the JSON text.
|
38
35
|
"""
|
39
|
-
replacements = {
|
40
|
-
"uint32": "default",
|
41
|
-
"uint64": "default",
|
42
|
-
'"format": "int"': '"format": "default"',
|
43
|
-
'"format": "partial-date-time"': '"format": "time"',
|
44
|
-
}
|
45
|
-
for old, new in replacements.items():
|
46
|
-
text = text.replace(old, new)
|
47
|
-
|
48
|
-
# Fix Decimal references with proper indenting.
|
49
|
-
text = DECIMAL_RE.sub(replace_and_indent, text)
|
50
|
-
text = SINGLE_ALL_OF_DECIMAL.sub(replace_and_indent, text)
|
51
36
|
|
52
37
|
def replace_ref(match: re.Match) -> str:
|
53
38
|
prefix, class_title, suffix = match.groups()
|
@@ -86,15 +71,31 @@ def apply_type_fixes(
|
|
86
71
|
# ---------------------------------------------------------------------
|
87
72
|
|
88
73
|
|
89
|
-
def
|
74
|
+
def clean_metadata_from_description(schema: Dict):
|
75
|
+
text = schema.get("description", "")
|
76
|
+
new_description = TAG_PATTERN.sub("", text).strip()
|
77
|
+
|
78
|
+
if new_description:
|
79
|
+
schema["description"] = new_description
|
80
|
+
else:
|
81
|
+
schema.pop("description", None)
|
82
|
+
|
83
|
+
for definitions in schema.get("definitions", {}).values():
|
84
|
+
definitions["description"] = TAG_PATTERN.sub(
|
85
|
+
"", definitions.get("description", "")
|
86
|
+
).strip()
|
87
|
+
if not definitions["description"]:
|
88
|
+
definitions.pop("description", None)
|
89
|
+
|
90
|
+
|
91
|
+
def parse_class_description(text: str) -> Dict[str, str]:
|
90
92
|
"""
|
91
93
|
Parse metadata from a special comment in the description.
|
92
94
|
Expected format: <!-- py: key1=value1, key2=value2 -->
|
93
95
|
"""
|
94
|
-
|
95
|
-
match = re.search(pattern, text, re.DOTALL)
|
96
|
+
match = TAG_PATTERN.search(text)
|
96
97
|
if not match:
|
97
|
-
|
98
|
+
return {}
|
98
99
|
|
99
100
|
metadata_str = match.group(1)
|
100
101
|
metadata: Dict[str, str] = {}
|
@@ -107,8 +108,7 @@ def parse_class_description(text: str) -> Tuple[Dict[str, str], str]:
|
|
107
108
|
key, value = map(str.strip, pair.split("=", 1))
|
108
109
|
metadata[key] = value
|
109
110
|
|
110
|
-
|
111
|
-
return metadata, cleaned_text
|
111
|
+
return metadata
|
112
112
|
|
113
113
|
|
114
114
|
def correct_flattened_types(schema: Dict[str, Any]) -> None:
|
@@ -120,6 +120,9 @@ def correct_flattened_types(schema: Dict[str, Any]) -> None:
|
|
120
120
|
while we want 1 type.
|
121
121
|
|
122
122
|
Removes the oneOf list and merges common keys and additional properties.
|
123
|
+
|
124
|
+
if the flatten field is not the tag, enum_variant_to_other_required_keys
|
125
|
+
will be in the json but will be empty.
|
123
126
|
"""
|
124
127
|
if "oneOf" not in schema or "required" not in schema:
|
125
128
|
return
|
@@ -127,12 +130,19 @@ def correct_flattened_types(schema: Dict[str, Any]) -> None:
|
|
127
130
|
one_of: List[Dict[str, Any]] = schema.pop("oneOf")
|
128
131
|
additional_properties: Dict[str, Any] = {}
|
129
132
|
|
130
|
-
enum_tag: str = ""
|
131
133
|
enum_value_to_required: Dict[str, List[str]] = {}
|
132
|
-
|
134
|
+
|
135
|
+
description = schema.get("description", "")
|
136
|
+
metadata = parse_class_description(description)
|
137
|
+
|
138
|
+
try:
|
139
|
+
field_name, field_title, enum_type_name = metadata["unflatten"].split("/")
|
140
|
+
except KeyError:
|
141
|
+
raise KeyError(f"Missing 'flatten' metadata in {schema['title']}")
|
142
|
+
|
133
143
|
enum_tag_property: Dict[str, Any] = {
|
134
144
|
"type": "string",
|
135
|
-
"title":
|
145
|
+
"title": field_title,
|
136
146
|
"enum": [],
|
137
147
|
}
|
138
148
|
|
@@ -144,10 +154,6 @@ def correct_flattened_types(schema: Dict[str, Any]) -> None:
|
|
144
154
|
required = item.get("required", [])
|
145
155
|
for key, prop in properties.items():
|
146
156
|
if "enum" in prop:
|
147
|
-
if not enum_tag:
|
148
|
-
enum_tag = key
|
149
|
-
else:
|
150
|
-
assert enum_tag == key, f"Enum field mismatch in {schema['title']}"
|
151
157
|
[enum_value] = prop["enum"]
|
152
158
|
enum_tag_property["enum"].append(enum_value)
|
153
159
|
enum_value_to_required[enum_value] = required
|
@@ -159,18 +165,25 @@ def correct_flattened_types(schema: Dict[str, Any]) -> None:
|
|
159
165
|
else:
|
160
166
|
additional_properties[key] = prop
|
161
167
|
|
162
|
-
if not
|
168
|
+
if not field_name:
|
163
169
|
raise ValueError(f"Enum value not found in {schema['title']}")
|
164
170
|
|
165
|
-
sets = [set(group["required"]) for group in one_of]
|
171
|
+
sets: list[set[str]] = [set(group["required"]) for group in one_of]
|
166
172
|
common_keys: list[str] = list(set.intersection(*sets)) if sets else []
|
167
173
|
common_keys.sort()
|
168
174
|
schema["required"].extend(common_keys)
|
169
175
|
|
176
|
+
if tag_field := metadata.get("tag"):
|
177
|
+
schema["tag_field"] = tag_field
|
178
|
+
|
170
179
|
schema["properties"].update(additional_properties)
|
171
|
-
schema["properties"][
|
172
|
-
|
173
|
-
|
180
|
+
schema["properties"][field_name] = {
|
181
|
+
"title": field_title,
|
182
|
+
"$ref": f"#/definitions/{enum_type_name}",
|
183
|
+
}
|
184
|
+
|
185
|
+
schema["definitions"][enum_type_name] = enum_tag_property
|
186
|
+
schema["enum_variant_to_other_required_keys"] = enum_value_to_required
|
174
187
|
|
175
188
|
|
176
189
|
def correct_variant_types(
|
@@ -196,20 +209,15 @@ def correct_variant_types(
|
|
196
209
|
}
|
197
210
|
"""
|
198
211
|
|
199
|
-
|
200
|
-
if one_of_key not in schema or "required" in schema:
|
212
|
+
if "oneOf" not in schema or "required" in schema:
|
201
213
|
return
|
202
214
|
|
203
215
|
description = schema.get("description", "")
|
204
|
-
metadata
|
205
|
-
if new_description.strip():
|
206
|
-
schema["description"] = new_description.strip()
|
207
|
-
else:
|
208
|
-
schema.pop("description", None)
|
216
|
+
metadata = parse_class_description(description)
|
209
217
|
|
210
|
-
tag_field = metadata["tag"]
|
218
|
+
tag_field: str = metadata["tag"]
|
211
219
|
new_one_of: List[Dict[str, Any]] = []
|
212
|
-
for item in schema[
|
220
|
+
for item in schema["oneOf"]:
|
213
221
|
item["required"].remove(tag_field)
|
214
222
|
[tag_value] = item["properties"].pop(tag_field)["enum"]
|
215
223
|
title = item.pop("title")
|
@@ -248,7 +256,7 @@ def correct_variant_types(
|
|
248
256
|
|
249
257
|
new_one_of.append(enum_ref)
|
250
258
|
|
251
|
-
schema[
|
259
|
+
schema["oneOf"] = new_one_of
|
252
260
|
schema["tag_field"] = tag_field
|
253
261
|
|
254
262
|
|
@@ -348,24 +356,9 @@ def correct_enums_with_multiple_titles(schema: Dict[str, Any]) -> None:
|
|
348
356
|
}
|
349
357
|
|
350
358
|
|
351
|
-
def
|
359
|
+
def correct_repr_enums_with_x_enumNames(schema: Dict[str, Any]) -> None:
|
352
360
|
"""
|
353
|
-
|
354
|
-
"FillKind": {
|
355
|
-
"type": "integer",
|
356
|
-
"enum": [
|
357
|
-
0,
|
358
|
-
1,
|
359
|
-
2
|
360
|
-
],
|
361
|
-
"x-enumNames": [
|
362
|
-
"Normal",
|
363
|
-
"Reversal",
|
364
|
-
"Correction"
|
365
|
-
]
|
366
|
-
},
|
367
|
-
this should actually be a string enum, the values of the integers actually do not matter
|
368
|
-
the names and values should be x-enumNames
|
361
|
+
This should currently not do anything
|
369
362
|
"""
|
370
363
|
if "definitions" not in schema:
|
371
364
|
return
|
@@ -374,16 +367,20 @@ def correct_enums_with_x_enumNames(schema: Dict[str, Any]) -> None:
|
|
374
367
|
for t, definition in definitions.items():
|
375
368
|
if "x-enumNames" not in definition:
|
376
369
|
continue
|
377
|
-
|
378
|
-
|
379
|
-
|
380
|
-
|
381
|
-
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
|
370
|
+
description = definition.get("description", "")
|
371
|
+
metadata = parse_class_description(description)
|
372
|
+
|
373
|
+
if metadata.get("schema") == "repr":
|
374
|
+
assert definition["type"] == "integer"
|
375
|
+
enum_names: list[str] = definition["x-enumNames"]
|
376
|
+
enum_ints: list[int] = definition.pop("enum")
|
377
|
+
definition["old_enum"] = enum_ints
|
378
|
+
if len(enum_names) != len(enum_ints):
|
379
|
+
raise ValueError(
|
380
|
+
f"Enum names and values length mismatch in {t} in {schema['title']}"
|
381
|
+
)
|
382
|
+
definition["enum"] = enum_names
|
383
|
+
definition["type"] = "string"
|
387
384
|
|
388
385
|
|
389
386
|
def correct_enums_with_descriptions(schema: Dict[str, Any]) -> None:
|
@@ -457,6 +454,8 @@ def correct_null_types_with_constraints(schema: Dict[str, Any]) -> None:
|
|
457
454
|
if "properties" in schema:
|
458
455
|
properties = schema["properties"]
|
459
456
|
for prop_def in properties.values():
|
457
|
+
if isinstance(prop_def, bool):
|
458
|
+
continue
|
460
459
|
if "type" in prop_def and "null" in prop_def["type"]:
|
461
460
|
for constraint in constraints:
|
462
461
|
if constraint in prop_def:
|
@@ -467,12 +466,56 @@ def correct_null_types_with_constraints(schema: Dict[str, Any]) -> None:
|
|
467
466
|
for definition in definitions.values():
|
468
467
|
properties = definition.get("properties", {})
|
469
468
|
for prop_def in properties.values():
|
469
|
+
if isinstance(prop_def, bool):
|
470
|
+
continue
|
470
471
|
if "type" in prop_def and "null" in prop_def["type"]:
|
471
472
|
for constraint in constraints:
|
472
473
|
if constraint in prop_def:
|
473
474
|
prop_def.pop(constraint)
|
474
475
|
|
475
476
|
|
477
|
+
def correct_type_from_description(schema: Dict[str, Any]) -> None:
|
478
|
+
"""
|
479
|
+
on the rust side, due to
|
480
|
+
derive SerializeDisplay and DeserializeFromStr
|
481
|
+
classes can have different types than normal
|
482
|
+
in the case of OrderId, it is a string
|
483
|
+
|
484
|
+
"OrderId": {
|
485
|
+
"description": "System-unique, persistent order identifiers",
|
486
|
+
"type": "object",
|
487
|
+
"required": [
|
488
|
+
"seqid",
|
489
|
+
"seqno"
|
490
|
+
],
|
491
|
+
"properties": {
|
492
|
+
"seqid": {
|
493
|
+
"type": "string",
|
494
|
+
"format": "uuid"
|
495
|
+
},
|
496
|
+
"seqno": {
|
497
|
+
"type": "integer",
|
498
|
+
"format": "default",
|
499
|
+
"minimum": 0.0
|
500
|
+
}
|
501
|
+
}
|
502
|
+
},
|
503
|
+
"""
|
504
|
+
if "definitions" not in schema:
|
505
|
+
return
|
506
|
+
|
507
|
+
definitions: dict[str, Any] = schema["definitions"]
|
508
|
+
for t, definition in definitions.items():
|
509
|
+
description = definition.get("description", "")
|
510
|
+
metadata = parse_class_description(description)
|
511
|
+
|
512
|
+
new_type = metadata.get("type")
|
513
|
+
if new_type is not None:
|
514
|
+
definition["type"] = new_type
|
515
|
+
definition.pop("required")
|
516
|
+
definition.pop("properties")
|
517
|
+
|
518
|
+
|
476
519
|
def process_schema_definitions(
|
477
520
|
schema: Dict[str, Any],
|
478
521
|
definitions: Dict[str, Any],
|
@@ -480,20 +523,19 @@ def process_schema_definitions(
|
|
480
523
|
) -> None:
|
481
524
|
"""
|
482
525
|
Extract and process definitions from a schema.
|
483
|
-
Updates the "Decimal" format and applies enum corrections.
|
484
526
|
"""
|
485
|
-
if "definitions" not in schema:
|
486
|
-
return
|
487
|
-
|
488
|
-
if "Decimal" in schema["definitions"]:
|
489
|
-
schema["definitions"]["Decimal"]["format"] = "decimal"
|
490
|
-
|
491
527
|
correct_enums_with_multiple_titles(schema)
|
492
|
-
|
528
|
+
# correct_repr_enums_with_x_enumNames(schema)
|
493
529
|
correct_enums_with_descriptions(schema)
|
494
530
|
correct_variant_types(schema, definitions, type_to_json_file)
|
495
531
|
correct_flattened_types(schema)
|
496
532
|
correct_null_types_with_constraints(schema)
|
533
|
+
correct_type_from_description(schema)
|
534
|
+
|
535
|
+
clean_metadata_from_description(schema)
|
536
|
+
|
537
|
+
if "definitions" not in schema:
|
538
|
+
return
|
497
539
|
|
498
540
|
new_defs: dict[str, Any] = schema.pop("definitions")
|
499
541
|
for t, definition in new_defs.items():
|
@@ -517,8 +559,7 @@ def add_info_to_schema(services: List[Dict[str, Any]]) -> Dict[str, str]:
|
|
517
559
|
"""
|
518
560
|
type_to_json_file: Dict[str, str] = {}
|
519
561
|
for service in services:
|
520
|
-
service_name = service["name"]
|
521
|
-
service["service_name"] = service_name
|
562
|
+
service_name = service["name"]
|
522
563
|
|
523
564
|
for rpc in service["rpcs"]:
|
524
565
|
req_schema = rpc["request_type"]
|
@@ -554,7 +595,7 @@ def write_json_file(data: Dict[Any, Any], path: str) -> None:
|
|
554
595
|
json.dump(data, out_file, indent=2)
|
555
596
|
|
556
597
|
|
557
|
-
def
|
598
|
+
def preprocess_rpc_schema(
|
558
599
|
schema: Dict[str, Any],
|
559
600
|
definitions: Dict[str, Any],
|
560
601
|
type_to_json_file: Dict[str, str],
|
@@ -567,23 +608,26 @@ def process_schema(
|
|
567
608
|
return apply_type_fixes(schema, is_definitions_file, type_to_json_file)
|
568
609
|
|
569
610
|
|
570
|
-
def
|
611
|
+
def preprocess_service_schemas(
|
571
612
|
service: Dict[str, Any],
|
572
613
|
output_dir: str,
|
573
614
|
definitions: Dict[str, Any],
|
574
615
|
type_to_json_file: Dict[str, str],
|
575
616
|
) -> None:
|
576
617
|
"""
|
618
|
+
Pre-process each service schema.
|
577
619
|
Process each RPC within a service (both request and response) and write the resulting schema files.
|
578
620
|
"""
|
579
|
-
service_name = service["
|
621
|
+
service_name = service["name"]
|
580
622
|
service_dir = os.path.join(output_dir, service_name)
|
581
623
|
os.makedirs(service_dir, exist_ok=True)
|
582
624
|
|
583
625
|
for rpc in service["rpcs"]:
|
626
|
+
logging.debug(f"Processing {service_name} {rpc['type']} RPC: {rpc['route']}")
|
584
627
|
for key in ["request_type", "response_type"]:
|
585
628
|
schema = rpc[key]
|
586
|
-
|
629
|
+
logging.debug(f"Processing {schema['title']}")
|
630
|
+
processed_schema = preprocess_rpc_schema(
|
587
631
|
schema, definitions, type_to_json_file, False
|
588
632
|
)
|
589
633
|
schema_title = processed_schema["title"]
|
@@ -591,20 +635,47 @@ def process_service(
|
|
591
635
|
write_json_file(processed_schema, file_path)
|
592
636
|
|
593
637
|
|
594
|
-
def
|
638
|
+
def preprocess_schemas(input_file: str, output_dir: str) -> None:
|
595
639
|
"""
|
596
|
-
|
597
|
-
|
640
|
+
Rust build generates a JSON file containing RPC definitions and schemas
|
641
|
+
for each service. The top level object is an array of service definitions.
|
642
|
+
|
643
|
+
We split out each service and each schema within each service into separate
|
644
|
+
JSON schema files for post-processing. Simple corrections to the schema
|
645
|
+
are made here as well.
|
598
646
|
"""
|
599
647
|
os.makedirs(output_dir, exist_ok=True)
|
648
|
+
|
600
649
|
with open(input_file, "r") as f:
|
601
|
-
|
650
|
+
input_s = f.read()
|
651
|
+
|
652
|
+
replacements = {
|
653
|
+
# Suppress UserWarning: format of 'uint32' not understood for 'integer' - using default
|
654
|
+
"uint32": "default",
|
655
|
+
"uint64": "default",
|
656
|
+
# Suppress UserWarning: format of 'int' not understood for 'integer' - using default
|
657
|
+
'"format": "int",': '"format": "default",',
|
658
|
+
'"format": "int"': '"format": "default"',
|
659
|
+
# Manually map partial-date-time format to time format (python str -> datetime.time)
|
660
|
+
'"format": "partial-date-time",': '"format": "time",',
|
661
|
+
'"format": "partial-date-time"': '"format": "time"',
|
662
|
+
# Add decimal format hint to all regex patterns that look like decimals;
|
663
|
+
# drop the original string pattern validation as redundant.
|
664
|
+
'"pattern": "^-?[0-9]+(\\\\.[0-9]+)?$",': '"format": "decimal",',
|
665
|
+
'"pattern": "^-?[0-9]+(\\\\.[0-9]+)?$"': '"format": "decimal"',
|
666
|
+
}
|
667
|
+
|
668
|
+
for old, new in replacements.items():
|
669
|
+
input_s = input_s.replace(old, new)
|
670
|
+
|
671
|
+
services = json.loads(input_s)
|
602
672
|
|
603
673
|
type_to_json_file = add_info_to_schema(services)
|
604
674
|
definitions: Dict[str, Any] = {}
|
605
675
|
|
606
676
|
for service in services:
|
607
|
-
|
677
|
+
logging.debug(f"Processing service {service['name']}")
|
678
|
+
preprocess_service_schemas(service, output_dir, definitions, type_to_json_file)
|
608
679
|
|
609
680
|
# we look at the line level to fix definitions
|
610
681
|
fixed_definitions = apply_type_fixes(definitions, True, type_to_json_file)
|
@@ -617,30 +688,20 @@ def preprocess_json(input_file: str, output_dir: str) -> None:
|
|
617
688
|
write_json_file(fixed_definitions, definitions_path)
|
618
689
|
|
619
690
|
|
620
|
-
# ---------------------------------------------------------------------
|
621
|
-
# Main Entry Point
|
622
|
-
# ---------------------------------------------------------------------
|
623
|
-
|
624
|
-
|
625
691
|
def main() -> None:
|
626
|
-
parser = argparse.ArgumentParser(description="
|
692
|
+
parser = argparse.ArgumentParser(description="Preprocess gRPC JSON schema file.")
|
627
693
|
parser.add_argument(
|
628
|
-
"--
|
629
|
-
type=str,
|
630
|
-
default="~/architect",
|
631
|
-
help="Path to the architect directory containing the api/schema.json file.",
|
694
|
+
"--schema", type=str, required=True, help="JSON schema file to preprocess."
|
632
695
|
)
|
633
696
|
parser.add_argument(
|
634
|
-
"--
|
697
|
+
"--output-dir",
|
635
698
|
type=str,
|
636
|
-
|
637
|
-
help="
|
699
|
+
required=True,
|
700
|
+
help="Output path for preprocessed schema.",
|
638
701
|
)
|
639
702
|
args = parser.parse_args()
|
640
|
-
|
641
|
-
|
642
|
-
input_file = os.path.join(architect_dir, "api/schema.json")
|
643
|
-
preprocess_json(input_file, args.output_dir)
|
703
|
+
schema_file = os.path.expanduser(args.schema)
|
704
|
+
preprocess_schemas(schema_file, args.output_dir)
|
644
705
|
|
645
706
|
|
646
707
|
if __name__ == "__main__":
|
@@ -1,61 +0,0 @@
|
|
1
|
-
# generated by datamodel-codegen:
|
2
|
-
# filename: Algo/AlgoOrderForTwapAlgo.json
|
3
|
-
|
4
|
-
from __future__ import annotations
|
5
|
-
|
6
|
-
from datetime import datetime
|
7
|
-
from typing import List, Optional
|
8
|
-
|
9
|
-
from msgspec import Struct
|
10
|
-
|
11
|
-
from .. import definitions
|
12
|
-
|
13
|
-
|
14
|
-
class AlgoOrderForTwapAlgo(Struct, omit_defaults=True):
|
15
|
-
account: str
|
16
|
-
algo_name: str
|
17
|
-
algo_order_id: definitions.OrderId
|
18
|
-
create_time: datetime
|
19
|
-
params: definitions.TwapParams
|
20
|
-
state: definitions.AlgoState
|
21
|
-
status: definitions.TwapStatus
|
22
|
-
trader: definitions.UserId
|
23
|
-
display_symbols: Optional[List[str]] = None
|
24
|
-
last_error: Optional[str] = None
|
25
|
-
last_error_time: Optional[datetime] = None
|
26
|
-
parent_order_id: Optional[definitions.OrderId] = None
|
27
|
-
|
28
|
-
# Constructor that takes all field titles as arguments for convenience
|
29
|
-
@classmethod
|
30
|
-
def new(
|
31
|
-
cls,
|
32
|
-
account: str,
|
33
|
-
algo_name: str,
|
34
|
-
algo_order_id: definitions.OrderId,
|
35
|
-
create_time: datetime,
|
36
|
-
params: definitions.TwapParams,
|
37
|
-
state: definitions.AlgoState,
|
38
|
-
status: definitions.TwapStatus,
|
39
|
-
trader: definitions.UserId,
|
40
|
-
display_symbols: Optional[List[str]] = None,
|
41
|
-
last_error: Optional[str] = None,
|
42
|
-
last_error_time: Optional[datetime] = None,
|
43
|
-
parent_order_id: Optional[definitions.OrderId] = None,
|
44
|
-
):
|
45
|
-
return cls(
|
46
|
-
account,
|
47
|
-
algo_name,
|
48
|
-
algo_order_id,
|
49
|
-
create_time,
|
50
|
-
params,
|
51
|
-
state,
|
52
|
-
status,
|
53
|
-
trader,
|
54
|
-
display_symbols,
|
55
|
-
last_error,
|
56
|
-
last_error_time,
|
57
|
-
parent_order_id,
|
58
|
-
)
|
59
|
-
|
60
|
-
def __str__(self) -> str:
|
61
|
-
return f"AlgoOrderForTwapAlgo(account={self.account},algo_name={self.algo_name},algo_order_id={self.algo_order_id},create_time={self.create_time},params={self.params},state={self.state},status={self.status},trader={self.trader},display_symbols={self.display_symbols},last_error={self.last_error},last_error_time={self.last_error_time},parent_order_id={self.parent_order_id})"
|
@@ -1,59 +0,0 @@
|
|
1
|
-
# generated by datamodel-codegen:
|
2
|
-
# filename: Algo/CreateAlgoOrderRequestForTwapAlgo.json
|
3
|
-
|
4
|
-
from __future__ import annotations
|
5
|
-
from architect_py.grpc.models.Algo.AlgoOrderForTwapAlgo import AlgoOrderForTwapAlgo
|
6
|
-
|
7
|
-
from typing import Optional
|
8
|
-
|
9
|
-
from msgspec import Struct
|
10
|
-
|
11
|
-
from .. import definitions
|
12
|
-
|
13
|
-
|
14
|
-
class CreateAlgoOrderRequestForTwapAlgo(Struct, omit_defaults=True):
|
15
|
-
algo_name: str
|
16
|
-
params: definitions.TwapParams
|
17
|
-
account: Optional[str] = None
|
18
|
-
algo_order_id: Optional[definitions.OrderId] = None
|
19
|
-
parent_order_id: Optional[definitions.OrderId] = None
|
20
|
-
trader: Optional[definitions.UserId] = None
|
21
|
-
|
22
|
-
# Constructor that takes all field titles as arguments for convenience
|
23
|
-
@classmethod
|
24
|
-
def new(
|
25
|
-
cls,
|
26
|
-
algo_name: str,
|
27
|
-
params: definitions.TwapParams,
|
28
|
-
account: Optional[str] = None,
|
29
|
-
algo_order_id: Optional[definitions.OrderId] = None,
|
30
|
-
parent_order_id: Optional[definitions.OrderId] = None,
|
31
|
-
trader: Optional[definitions.UserId] = None,
|
32
|
-
):
|
33
|
-
return cls(
|
34
|
-
algo_name,
|
35
|
-
params,
|
36
|
-
account,
|
37
|
-
algo_order_id,
|
38
|
-
parent_order_id,
|
39
|
-
trader,
|
40
|
-
)
|
41
|
-
|
42
|
-
def __str__(self) -> str:
|
43
|
-
return f"CreateAlgoOrderRequestForTwapAlgo(algo_name={self.algo_name},params={self.params},account={self.account},algo_order_id={self.algo_order_id},parent_order_id={self.parent_order_id},trader={self.trader})"
|
44
|
-
|
45
|
-
@staticmethod
|
46
|
-
def get_response_type():
|
47
|
-
return AlgoOrderForTwapAlgo
|
48
|
-
|
49
|
-
@staticmethod
|
50
|
-
def get_unannotated_response_type():
|
51
|
-
return AlgoOrderForTwapAlgo
|
52
|
-
|
53
|
-
@staticmethod
|
54
|
-
def get_route() -> str:
|
55
|
-
return "/json.architect.Algo/CreateTwapAlgoOrder"
|
56
|
-
|
57
|
-
@staticmethod
|
58
|
-
def get_rpc_method():
|
59
|
-
return "unary"
|
File without changes
|
File without changes
|