architect-py 3.2.2__py3-none-any.whl → 5.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- architect_py/__init__.py +8 -2
- architect_py/async_client.py +879 -576
- architect_py/client.py +25 -26
- architect_py/client_interface.py +62 -0
- architect_py/common_types/__init__.py +6 -0
- architect_py/common_types/order_dir.py +85 -0
- architect_py/common_types/scalars.py +25 -0
- architect_py/common_types/tradable_product.py +59 -0
- architect_py/graphql_client/client.py +3 -6
- architect_py/graphql_client/fragments.py +3 -6
- architect_py/graphql_client/get_fills_query.py +2 -1
- architect_py/graphql_client/search_symbols_query.py +2 -1
- architect_py/graphql_client/subscribe_orderflow.py +2 -1
- architect_py/graphql_client/subscribe_trades.py +2 -1
- architect_py/grpc/__init__.py +125 -0
- architect_py/grpc/client.py +86 -0
- architect_py/{grpc_client → grpc/models}/Accounts/AccountsRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Accounts/AccountsResponse.py +1 -1
- architect_py/{grpc_client → grpc/models}/Accounts/__init__.py +1 -1
- architect_py/{grpc_client → grpc/models}/Algo/AlgoOrderForTwapAlgo.py +1 -1
- architect_py/{grpc_client → grpc/models}/Algo/CreateAlgoOrderRequestForTwapAlgo.py +2 -2
- architect_py/{grpc_client → grpc/models}/Algo/ModifyAlgoOrderRequestForTwapAlgo.py +2 -2
- architect_py/{grpc_client → grpc/models}/Algo/__init__.py +1 -1
- architect_py/grpc/models/Auth/CreateJwtRequest.py +47 -0
- architect_py/grpc/models/Auth/CreateJwtResponse.py +23 -0
- architect_py/{grpc_client/Cpty → grpc/models/Auth}/__init__.py +1 -1
- architect_py/grpc/models/Core/ConfigRequest.py +37 -0
- architect_py/grpc/models/Core/ConfigResponse.py +25 -0
- architect_py/{grpc_client/Folio → grpc/models/Core}/__init__.py +1 -1
- architect_py/{grpc_client → grpc/models}/Cpty/CptyRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Cpty/CptyResponse.py +3 -3
- architect_py/{grpc_client → grpc/models}/Cpty/CptyStatus.py +1 -1
- architect_py/{grpc_client → grpc/models}/Cpty/CptyStatusRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Cpty/CptysRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Cpty/CptysResponse.py +1 -1
- architect_py/grpc/models/Cpty/__init__.py +2 -0
- architect_py/{grpc_client → grpc/models}/Folio/AccountHistoryRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Folio/AccountHistoryResponse.py +1 -1
- architect_py/{grpc_client → grpc/models}/Folio/AccountSummariesRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Folio/AccountSummariesResponse.py +1 -1
- architect_py/{grpc_client → grpc/models}/Folio/AccountSummary.py +1 -1
- architect_py/{grpc_client → grpc/models}/Folio/AccountSummaryRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Folio/HistoricalFillsRequest.py +3 -3
- architect_py/{grpc_client → grpc/models}/Folio/HistoricalFillsResponse.py +1 -1
- architect_py/{grpc_client → grpc/models}/Folio/HistoricalOrdersRequest.py +3 -3
- architect_py/{grpc_client → grpc/models}/Folio/HistoricalOrdersResponse.py +1 -1
- architect_py/grpc/models/Folio/__init__.py +2 -0
- architect_py/{grpc_client → grpc/models}/Health/HealthCheckRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Health/HealthCheckResponse.py +1 -1
- architect_py/grpc/models/Health/__init__.py +2 -0
- architect_py/{grpc_client → grpc/models}/Marketdata/Candle.py +1 -1
- architect_py/{grpc_client → grpc/models}/Marketdata/HistoricalCandlesRequest.py +11 -8
- architect_py/{grpc_client → grpc/models}/Marketdata/HistoricalCandlesResponse.py +1 -1
- architect_py/{grpc_client → grpc/models}/Marketdata/L1BookSnapshot.py +36 -3
- architect_py/{grpc_client → grpc/models}/Marketdata/L1BookSnapshotRequest.py +8 -3
- architect_py/{grpc_client → grpc/models}/Marketdata/L1BookSnapshotsRequest.py +6 -3
- architect_py/{grpc_client → grpc/models}/Marketdata/L2BookSnapshot.py +1 -1
- architect_py/{grpc_client → grpc/models}/Marketdata/L2BookSnapshotRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Marketdata/Liquidation.py +2 -2
- architect_py/{grpc_client → grpc/models}/Marketdata/MarketStatus.py +1 -1
- architect_py/{grpc_client → grpc/models}/Marketdata/MarketStatusRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Marketdata/SubscribeCandlesRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Marketdata/SubscribeCurrentCandlesRequest.py +3 -4
- architect_py/{grpc_client → grpc/models}/Marketdata/SubscribeL1BookSnapshotsRequest.py +6 -3
- architect_py/{grpc_client → grpc/models}/Marketdata/SubscribeL2BookUpdatesRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Marketdata/SubscribeLiquidationsRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Marketdata/SubscribeManyCandlesRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Marketdata/SubscribeTickersRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Marketdata/SubscribeTradesRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Marketdata/Ticker.py +1 -1
- architect_py/{grpc_client → grpc/models}/Marketdata/TickerRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Marketdata/TickersRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Marketdata/TickersResponse.py +1 -1
- architect_py/{grpc_client → grpc/models}/Marketdata/Trade.py +2 -2
- architect_py/grpc/models/Marketdata/__init__.py +2 -0
- architect_py/{grpc_client → grpc/models}/Oms/Cancel.py +1 -1
- architect_py/{grpc_client → grpc/models}/Oms/CancelAllOrdersRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Oms/CancelAllOrdersResponse.py +1 -1
- architect_py/{grpc_client → grpc/models}/Oms/CancelOrderRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Oms/OpenOrdersRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Oms/OpenOrdersResponse.py +1 -1
- architect_py/{grpc_client → grpc/models}/Oms/Order.py +2 -2
- architect_py/{grpc_client → grpc/models}/Oms/PendingCancelsRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Oms/PendingCancelsResponse.py +1 -1
- architect_py/{grpc_client → grpc/models}/Oms/PlaceOrderRequest.py +3 -3
- architect_py/grpc/models/Oms/__init__.py +2 -0
- architect_py/{grpc_client → grpc/models}/Orderflow/DropcopyRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Orderflow/OrderflowRequest.py +1 -1
- architect_py/{grpc_client → grpc/models}/Orderflow/SubscribeOrderflowRequest.py +2 -2
- architect_py/grpc/models/Orderflow/__init__.py +2 -0
- architect_py/grpc/models/Symbology/DownloadProductCatalogRequest.py +42 -0
- architect_py/grpc/models/Symbology/DownloadProductCatalogResponse.py +27 -0
- architect_py/{grpc_client → grpc/models}/Symbology/PruneExpiredSymbolsRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Symbology/PruneExpiredSymbolsResponse.py +1 -1
- architect_py/{grpc_client → grpc/models}/Symbology/SubscribeSymbology.py +1 -1
- architect_py/{grpc_client → grpc/models}/Symbology/SymbologyRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Symbology/SymbologySnapshot.py +7 -2
- architect_py/{grpc_client → grpc/models}/Symbology/SymbologyUpdate.py +9 -2
- architect_py/{grpc_client → grpc/models}/Symbology/SymbolsRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Symbology/SymbolsResponse.py +1 -1
- architect_py/grpc/models/Symbology/UploadProductCatalogRequest.py +49 -0
- architect_py/grpc/models/Symbology/UploadProductCatalogResponse.py +20 -0
- architect_py/{grpc_client → grpc/models}/Symbology/UploadSymbologyRequest.py +2 -2
- architect_py/{grpc_client → grpc/models}/Symbology/UploadSymbologyResponse.py +1 -1
- architect_py/grpc/models/Symbology/__init__.py +2 -0
- architect_py/grpc/models/__init__.py +2 -0
- architect_py/{grpc_client → grpc/models}/definitions.py +248 -66
- architect_py/grpc/resolve_endpoint.py +67 -0
- architect_py/{grpc_client/grpc_server.py → grpc/server.py} +9 -6
- architect_py/grpc/utils.py +32 -0
- architect_py/tests/conftest.py +86 -87
- architect_py/tests/test_book_building.py +49 -50
- architect_py/tests/test_marketdata.py +168 -0
- architect_py/tests/test_order_entry.py +37 -0
- architect_py/tests/test_orderflow.py +38 -0
- architect_py/tests/test_portfolio_management.py +23 -0
- architect_py/tests/test_rounding.py +28 -28
- architect_py/tests/test_symbology.py +37 -30
- architect_py/utils/nearest_tick.py +2 -5
- architect_py/utils/nearest_tick_2.py +1 -2
- architect_py/utils/orderbook.py +35 -0
- architect_py/utils/pandas.py +44 -0
- architect_py/utils/price_bands.py +0 -3
- architect_py/utils/symbol_parsing.py +29 -0
- architect_py-5.0.0b1.dist-info/METADATA +124 -0
- architect_py-5.0.0b1.dist-info/RECORD +184 -0
- {architect_py-3.2.2.dist-info → architect_py-5.0.0b1.dist-info}/WHEEL +2 -1
- architect_py-5.0.0b1.dist-info/top_level.txt +4 -0
- examples/__init__.py +0 -0
- examples/book_subscription.py +53 -0
- examples/candles.py +30 -0
- examples/common.py +107 -0
- examples/external_cpty.py +77 -0
- examples/funding_rate_mean_reversion_algo.py +192 -0
- examples/order_sending.py +92 -0
- examples/stream_l1_marketdata.py +25 -0
- examples/stream_l2_marketdata.py +40 -0
- examples/trades.py +21 -0
- examples/tutorial_async.py +84 -0
- examples/tutorial_sync.py +95 -0
- scripts/generate_functions_md.py +164 -0
- scripts/generate_sync_interface.py +207 -0
- scripts/postprocess_grpc.py +594 -0
- scripts/preprocess_grpc_schema.py +647 -0
- templates/exceptions.py +83 -0
- templates/juniper_base_client.py +371 -0
- architect_py/client_protocol.py +0 -53
- architect_py/grpc_client/Health/__init__.py +0 -2
- architect_py/grpc_client/Marketdata/__init__.py +0 -2
- architect_py/grpc_client/Oms/__init__.py +0 -2
- architect_py/grpc_client/Orderflow/__init__.py +0 -2
- architect_py/grpc_client/Symbology/__init__.py +0 -2
- architect_py/grpc_client/__init__.py +0 -2
- architect_py/grpc_client/grpc_client.py +0 -413
- architect_py/scalars.py +0 -172
- architect_py/tests/test_accounts.py +0 -31
- architect_py/tests/test_client.py +0 -29
- architect_py/tests/test_grpc_client.py +0 -30
- architect_py/tests/test_order_sending.py +0 -65
- architect_py/tests/test_snapshots.py +0 -52
- architect_py/tests/test_subscriptions.py +0 -126
- architect_py-3.2.2.dist-info/METADATA +0 -191
- architect_py-3.2.2.dist-info/RECORD +0 -148
- /architect_py/{grpc_client → grpc/models}/Marketdata/ArrayOfL1BookSnapshot.py +0 -0
- /architect_py/{grpc_client → grpc/models}/Marketdata/L2BookUpdate.py +0 -0
- /architect_py/{grpc_client → grpc/models}/Marketdata/TickerUpdate.py +0 -0
- /architect_py/{grpc_client → grpc/models}/Orderflow/Dropcopy.py +0 -0
- /architect_py/{grpc_client → grpc/models}/Orderflow/Orderflow.py +0 -0
- {architect_py-3.2.2.dist-info → architect_py-5.0.0b1.dist-info/licenses}/LICENSE +0 -0
@@ -0,0 +1,647 @@
|
|
1
|
+
import argparse
|
2
|
+
import json
|
3
|
+
import os
|
4
|
+
import re
|
5
|
+
from typing import Any, Dict, List, Tuple
|
6
|
+
|
7
|
+
# ---------------------------------------------------------------------
|
8
|
+
# Constants and Regular Expressions
|
9
|
+
# ---------------------------------------------------------------------
|
10
|
+
|
11
|
+
DECIMAL_RE = re.compile(r'(^\s*)"\$ref": "#/definitions/Decimal"', flags=re.MULTILINE)
|
12
|
+
SINGLE_ALL_OF_DECIMAL = re.compile(
|
13
|
+
r'"allOf":\s*\[\s*\{\s*'
|
14
|
+
r'(?P<indent>[ \t]*)"type":\s*"number",\s*'
|
15
|
+
r'(?P=indent)"format":\s*"decimal"\s*'
|
16
|
+
r"\}\s*\]",
|
17
|
+
flags=re.MULTILINE,
|
18
|
+
)
|
19
|
+
CAMEL_CASE_RE = re.compile(r"(?<!^)(?=[A-Z])")
|
20
|
+
EXTRACT_REF_RE = re.compile(r'("\$ref":\s*"#/definitions/)([^"]+)(")')
|
21
|
+
|
22
|
+
|
23
|
+
def replace_and_indent(match: re.Match) -> str:
|
24
|
+
indent = match.group(1)
|
25
|
+
return f'{indent}"type": "number",\n{indent}"format": "decimal"'
|
26
|
+
|
27
|
+
|
28
|
+
# ---------------------------------------------------------------------
|
29
|
+
# Type And Ref Fixing Functions
|
30
|
+
# ---------------------------------------------------------------------
|
31
|
+
|
32
|
+
|
33
|
+
def _apply_type_fixes_to_text(
|
34
|
+
text: str, is_definitions_file: bool, type_to_json_file: Dict[str, str]
|
35
|
+
) -> str:
|
36
|
+
"""
|
37
|
+
Perform string replacements and regex substitutions on the JSON text.
|
38
|
+
"""
|
39
|
+
replacements = {
|
40
|
+
"uint32": "default",
|
41
|
+
"uint64": "default",
|
42
|
+
'"format": "int"': '"format": "default"',
|
43
|
+
'"format": "partial-date-time"': '"format": "time"',
|
44
|
+
}
|
45
|
+
for old, new in replacements.items():
|
46
|
+
text = text.replace(old, new)
|
47
|
+
|
48
|
+
# Fix Decimal references with proper indenting.
|
49
|
+
text = DECIMAL_RE.sub(replace_and_indent, text)
|
50
|
+
text = SINGLE_ALL_OF_DECIMAL.sub(replace_and_indent, text)
|
51
|
+
|
52
|
+
def replace_ref(match: re.Match) -> str:
|
53
|
+
prefix, class_title, suffix = match.groups()
|
54
|
+
if class_title in type_to_json_file:
|
55
|
+
ref = (
|
56
|
+
f"{type_to_json_file[class_title]}/#"
|
57
|
+
if is_definitions_file
|
58
|
+
else f"../{type_to_json_file[class_title]}/#"
|
59
|
+
)
|
60
|
+
else:
|
61
|
+
ref = (
|
62
|
+
f"#/{class_title}"
|
63
|
+
if is_definitions_file
|
64
|
+
else f"../definitions.json#/{class_title}"
|
65
|
+
)
|
66
|
+
return f'"$ref": "{ref}"'
|
67
|
+
|
68
|
+
return EXTRACT_REF_RE.sub(replace_ref, text)
|
69
|
+
|
70
|
+
|
71
|
+
def apply_type_fixes(
|
72
|
+
schema: Dict, is_definitions_file: bool, type_to_json_file: Dict[str, str]
|
73
|
+
) -> Dict:
|
74
|
+
"""
|
75
|
+
Convert the schema dictionary to text, apply type fixes, and convert it back.
|
76
|
+
"""
|
77
|
+
json_text = json.dumps(schema, indent=2)
|
78
|
+
fixed_text = _apply_type_fixes_to_text(
|
79
|
+
json_text, is_definitions_file, type_to_json_file
|
80
|
+
)
|
81
|
+
return json.loads(fixed_text)
|
82
|
+
|
83
|
+
|
84
|
+
# ---------------------------------------------------------------------
|
85
|
+
# Schema Metadata and Enum Correction Functions
|
86
|
+
# ---------------------------------------------------------------------
|
87
|
+
|
88
|
+
|
89
|
+
def parse_class_description(text: str) -> Tuple[Dict[str, str], str]:
|
90
|
+
"""
|
91
|
+
Parse metadata from a special comment in the description.
|
92
|
+
Expected format: <!-- py: key1=value1, key2=value2 -->
|
93
|
+
"""
|
94
|
+
pattern = r"<!--\s*py:\s*(.*?)\s*-->"
|
95
|
+
match = re.search(pattern, text, re.DOTALL)
|
96
|
+
if not match:
|
97
|
+
raise ValueError("No valid 'py:' comment found in the text.")
|
98
|
+
|
99
|
+
metadata_str = match.group(1)
|
100
|
+
metadata: Dict[str, str] = {}
|
101
|
+
for pair in metadata_str.split(","):
|
102
|
+
pair = pair.strip()
|
103
|
+
if not pair:
|
104
|
+
continue
|
105
|
+
if "=" not in pair:
|
106
|
+
raise ValueError(f"Malformed key-value pair: '{pair}'")
|
107
|
+
key, value = map(str.strip, pair.split("=", 1))
|
108
|
+
metadata[key] = value
|
109
|
+
|
110
|
+
cleaned_text = re.sub(pattern, "", text, flags=re.DOTALL)
|
111
|
+
return metadata, cleaned_text
|
112
|
+
|
113
|
+
|
114
|
+
def correct_flattened_types(schema: Dict[str, Any]) -> None:
|
115
|
+
"""
|
116
|
+
Processes any type that has
|
117
|
+
#[serde(flatten)]
|
118
|
+
|
119
|
+
because it would generate several types in the json
|
120
|
+
while we want 1 type.
|
121
|
+
|
122
|
+
Removes the oneOf list and merges common keys and additional properties.
|
123
|
+
"""
|
124
|
+
if "oneOf" not in schema or "required" not in schema:
|
125
|
+
return
|
126
|
+
|
127
|
+
one_of: List[Dict[str, Any]] = schema.pop("oneOf")
|
128
|
+
additional_properties: Dict[str, Any] = {}
|
129
|
+
|
130
|
+
enum_tag: str = ""
|
131
|
+
enum_value_to_required: Dict[str, List[str]] = {}
|
132
|
+
enum_tag_title = CAMEL_CASE_RE.sub("_", schema["title"]).lower()
|
133
|
+
enum_tag_property: Dict[str, Any] = {
|
134
|
+
"type": "string",
|
135
|
+
"title": f"{enum_tag_title}_type",
|
136
|
+
"enum": [],
|
137
|
+
}
|
138
|
+
|
139
|
+
for item in one_of:
|
140
|
+
assert item.get("type") == "object", (
|
141
|
+
f"Expected object type in {schema['title']}"
|
142
|
+
)
|
143
|
+
properties = item.get("properties", {})
|
144
|
+
required = item.get("required", [])
|
145
|
+
for key, prop in properties.items():
|
146
|
+
if "enum" in prop:
|
147
|
+
if not enum_tag:
|
148
|
+
enum_tag = key
|
149
|
+
else:
|
150
|
+
assert enum_tag == key, f"Enum field mismatch in {schema['title']}"
|
151
|
+
[enum_value] = prop["enum"]
|
152
|
+
enum_tag_property["enum"].append(enum_value)
|
153
|
+
enum_value_to_required[enum_value] = required
|
154
|
+
else:
|
155
|
+
if key in additional_properties:
|
156
|
+
assert additional_properties[key] == prop, (
|
157
|
+
f"Conflicting properties for {key} in {schema['title']}"
|
158
|
+
)
|
159
|
+
else:
|
160
|
+
additional_properties[key] = prop
|
161
|
+
|
162
|
+
if not enum_tag:
|
163
|
+
raise ValueError(f"Enum value not found in {schema['title']}")
|
164
|
+
|
165
|
+
sets = [set(group["required"]) for group in one_of]
|
166
|
+
common_keys: list[str] = list(set.intersection(*sets)) if sets else []
|
167
|
+
common_keys.sort()
|
168
|
+
schema["required"].extend(common_keys)
|
169
|
+
|
170
|
+
schema["properties"].update(additional_properties)
|
171
|
+
schema["properties"][enum_tag] = enum_tag_property
|
172
|
+
schema["enum_tag"] = enum_tag
|
173
|
+
schema["enum_tag_to_other_required_keys"] = enum_value_to_required
|
174
|
+
|
175
|
+
|
176
|
+
def correct_variant_types(
|
177
|
+
schema: Dict[str, Any],
|
178
|
+
definitions: Dict[str, Any],
|
179
|
+
type_to_json_file: Dict[str, str],
|
180
|
+
) -> None:
|
181
|
+
"""
|
182
|
+
Process types that were Enums on the rust side.
|
183
|
+
|
184
|
+
This is because the Variants need both the Variant name and the actual type name.
|
185
|
+
|
186
|
+
pub enum Dropcopy {
|
187
|
+
#[serde(rename = "o")]
|
188
|
+
#[schemars(title = "Order|Order")]
|
189
|
+
Order(Order),
|
190
|
+
#[schemars(title = "Fill|Fill")]
|
191
|
+
#[serde(rename = "f")]
|
192
|
+
Fill(Fill),
|
193
|
+
#[serde(rename = "af")]
|
194
|
+
#[schemars(title = "AberrantFill|AberrantFill")]
|
195
|
+
AberrantFill(AberrantFill),
|
196
|
+
}
|
197
|
+
"""
|
198
|
+
|
199
|
+
one_of_key = "oneOf"
|
200
|
+
if one_of_key not in schema or "required" in schema:
|
201
|
+
return
|
202
|
+
|
203
|
+
description = schema.get("description", "")
|
204
|
+
metadata, new_description = parse_class_description(description)
|
205
|
+
if new_description.strip():
|
206
|
+
schema["description"] = new_description.strip()
|
207
|
+
else:
|
208
|
+
schema.pop("description", None)
|
209
|
+
|
210
|
+
tag_field = metadata["tag"]
|
211
|
+
new_one_of: List[Dict[str, Any]] = []
|
212
|
+
for item in schema[one_of_key]:
|
213
|
+
item["required"].remove(tag_field)
|
214
|
+
[tag_value] = item["properties"].pop(tag_field)["enum"]
|
215
|
+
title = item.pop("title")
|
216
|
+
|
217
|
+
enum_ref = {
|
218
|
+
"tag_value": tag_value,
|
219
|
+
}
|
220
|
+
if "|" not in title:
|
221
|
+
type_name = title
|
222
|
+
if type_name in type_to_json_file:
|
223
|
+
enum_ref["variant_name"] = f"Tagged{title}"
|
224
|
+
enum_ref["$ref"] = f"../{type_to_json_file[type_name]}/#"
|
225
|
+
else:
|
226
|
+
enum_ref["variant_name"] = title
|
227
|
+
enum_ref.update(item)
|
228
|
+
enum_ref["title"] = title
|
229
|
+
else:
|
230
|
+
variant_name, type_name = title.split("|", 1)
|
231
|
+
enum_ref["title"] = type_name
|
232
|
+
if type_name in definitions:
|
233
|
+
if definitions[type_name] != item:
|
234
|
+
raise ValueError(f"Conflicting definitions for {type_name}.")
|
235
|
+
elif type_name in type_to_json_file:
|
236
|
+
pass
|
237
|
+
else:
|
238
|
+
definitions[type_name] = item
|
239
|
+
|
240
|
+
if type_name in type_to_json_file:
|
241
|
+
ref = f"../{type_to_json_file[type_name]}/#"
|
242
|
+
else:
|
243
|
+
ref = f"../definitions.json#/{type_name}"
|
244
|
+
enum_ref["$ref"] = ref
|
245
|
+
enum_ref["variant_name"] = (
|
246
|
+
f"Tagged{variant_name}" if variant_name == type_name else variant_name
|
247
|
+
)
|
248
|
+
|
249
|
+
new_one_of.append(enum_ref)
|
250
|
+
|
251
|
+
schema[one_of_key] = new_one_of
|
252
|
+
schema["tag_field"] = tag_field
|
253
|
+
|
254
|
+
|
255
|
+
def correct_enums_with_multiple_titles(schema: Dict[str, Any]) -> None:
|
256
|
+
"""
|
257
|
+
"MinOrderQuantityUnit": {
|
258
|
+
"oneOf": [
|
259
|
+
{
|
260
|
+
"title": "Base",
|
261
|
+
"type": "object",
|
262
|
+
"required": [
|
263
|
+
"unit"
|
264
|
+
],
|
265
|
+
"properties": {
|
266
|
+
"unit": {
|
267
|
+
"type": "string",
|
268
|
+
"enum": [
|
269
|
+
"base"
|
270
|
+
]
|
271
|
+
}
|
272
|
+
}
|
273
|
+
},
|
274
|
+
{
|
275
|
+
"title": "Quote",
|
276
|
+
"type": "object",
|
277
|
+
"required": [
|
278
|
+
"unit"
|
279
|
+
],
|
280
|
+
"properties": {
|
281
|
+
"unit": {
|
282
|
+
"type": "string",
|
283
|
+
"enum": [
|
284
|
+
"quote"
|
285
|
+
]
|
286
|
+
}
|
287
|
+
}
|
288
|
+
}
|
289
|
+
]
|
290
|
+
},
|
291
|
+
This output
|
292
|
+
class Base(Struct, omit_defaults=True):
|
293
|
+
unit: Literal["base"]
|
294
|
+
class Quote(Struct, omit_defaults=True):
|
295
|
+
unit: Literal["quote"]
|
296
|
+
which was redundant. This removes it to one class named after the ultimate type.
|
297
|
+
"""
|
298
|
+
if "definitions" not in schema:
|
299
|
+
return
|
300
|
+
|
301
|
+
for type_name, definition in schema["definitions"].items():
|
302
|
+
one_of = definition.get("oneOf")
|
303
|
+
if not one_of or len(one_of) <= 1:
|
304
|
+
continue
|
305
|
+
|
306
|
+
first = one_of[0]
|
307
|
+
if not all(
|
308
|
+
item.get("type") == first["type"]
|
309
|
+
and item.get("required") == first.get("required")
|
310
|
+
and "properties" in item
|
311
|
+
and item["properties"].keys() == first["properties"].keys()
|
312
|
+
for item in one_of
|
313
|
+
):
|
314
|
+
continue
|
315
|
+
|
316
|
+
prop_keys = first["properties"].keys()
|
317
|
+
if not all(
|
318
|
+
all(
|
319
|
+
item["properties"][key].get("type")
|
320
|
+
== first["properties"][key].get("type")
|
321
|
+
and "enum" in item["properties"][key]
|
322
|
+
for key in prop_keys
|
323
|
+
)
|
324
|
+
for item in one_of
|
325
|
+
):
|
326
|
+
continue
|
327
|
+
|
328
|
+
# Consolidate the definition
|
329
|
+
merged_props = {
|
330
|
+
key: {
|
331
|
+
"type": first["properties"][key]["type"],
|
332
|
+
"enum": sorted(
|
333
|
+
{
|
334
|
+
enum_val
|
335
|
+
for item in one_of
|
336
|
+
for enum_val in item["properties"][key]["enum"]
|
337
|
+
}
|
338
|
+
),
|
339
|
+
}
|
340
|
+
for key in prop_keys
|
341
|
+
}
|
342
|
+
|
343
|
+
schema["definitions"][type_name] = {
|
344
|
+
"title": type_name,
|
345
|
+
"type": first["type"],
|
346
|
+
"required": first["required"],
|
347
|
+
"properties": merged_props,
|
348
|
+
}
|
349
|
+
|
350
|
+
|
351
|
+
def correct_enums_with_x_enumNames(schema: Dict[str, Any]) -> None:
|
352
|
+
"""
|
353
|
+
Process enums that have x-enumNames in the schema.
|
354
|
+
"FillKind": {
|
355
|
+
"type": "integer",
|
356
|
+
"enum": [
|
357
|
+
0,
|
358
|
+
1,
|
359
|
+
2
|
360
|
+
],
|
361
|
+
"x-enumNames": [
|
362
|
+
"Normal",
|
363
|
+
"Reversal",
|
364
|
+
"Correction"
|
365
|
+
]
|
366
|
+
},
|
367
|
+
this should actually be a string enum, the values of the integers actually do not matter
|
368
|
+
the names and values should be x-enumNames
|
369
|
+
"""
|
370
|
+
if "definitions" not in schema:
|
371
|
+
return
|
372
|
+
|
373
|
+
definitions: dict[str, Any] = schema["definitions"]
|
374
|
+
for t, definition in definitions.items():
|
375
|
+
if "x-enumNames" not in definition:
|
376
|
+
continue
|
377
|
+
assert definition["type"] == "integer"
|
378
|
+
enum_names: list[str] = definition["x-enumNames"]
|
379
|
+
enum_ints: list[int] = definition.pop("enum")
|
380
|
+
definition["old_enum"] = enum_ints
|
381
|
+
if len(enum_names) != len(enum_ints):
|
382
|
+
raise ValueError(
|
383
|
+
f"Enum names and values length mismatch in {t} in {schema['title']}"
|
384
|
+
)
|
385
|
+
definition["enum"] = enum_names
|
386
|
+
definition["type"] = "string"
|
387
|
+
|
388
|
+
|
389
|
+
def correct_enums_with_descriptions(schema: Dict[str, Any]) -> None:
|
390
|
+
"""
|
391
|
+
Process enums that have descriptions in the schema.
|
392
|
+
|
393
|
+
If a enum value has a description, the json gets separated
|
394
|
+
|
395
|
+
See TimeInForce for an example
|
396
|
+
"""
|
397
|
+
if "definitions" not in schema:
|
398
|
+
return
|
399
|
+
|
400
|
+
definitions: dict[str, Any] = schema["definitions"]
|
401
|
+
for t, definition in definitions.items():
|
402
|
+
if "oneOf" not in definition:
|
403
|
+
continue
|
404
|
+
one_of: list[dict[str, Any]] = definition["oneOf"]
|
405
|
+
|
406
|
+
new_enum = {
|
407
|
+
"enum": [],
|
408
|
+
"type": "string",
|
409
|
+
}
|
410
|
+
new_one_of = []
|
411
|
+
for item in one_of:
|
412
|
+
if "enum" not in item:
|
413
|
+
new_one_of.append(item)
|
414
|
+
continue
|
415
|
+
|
416
|
+
if item["type"] != "string":
|
417
|
+
raise ValueError(
|
418
|
+
f"Expected string type for enum in {t} in {schema['title']}"
|
419
|
+
)
|
420
|
+
new_enum["enum"].extend(item["enum"])
|
421
|
+
|
422
|
+
if len(new_enum["enum"]) > 0:
|
423
|
+
new_one_of.append(new_enum)
|
424
|
+
|
425
|
+
if len(new_one_of) == 1:
|
426
|
+
definition.pop("oneOf")
|
427
|
+
definition.update(new_one_of[0])
|
428
|
+
else:
|
429
|
+
definition["oneOf"] = new_one_of
|
430
|
+
new_enum["title"] = f"{t}Enum"
|
431
|
+
|
432
|
+
|
433
|
+
def correct_null_types_with_constraints(schema: Dict[str, Any]) -> None:
|
434
|
+
"""
|
435
|
+
"title": "recv_time_ns",
|
436
|
+
"type": [
|
437
|
+
"integer",
|
438
|
+
"null"
|
439
|
+
],
|
440
|
+
"format": "default",
|
441
|
+
"minimum": 0.0
|
442
|
+
in this case, there's an error when the type is potentially null and there's a constraint.
|
443
|
+
"""
|
444
|
+
constraints = (
|
445
|
+
"exclusiveMinimum",
|
446
|
+
"minimum",
|
447
|
+
"exclusiveMaximum",
|
448
|
+
"maximum",
|
449
|
+
"multipleOf",
|
450
|
+
"minItems",
|
451
|
+
"maxItems",
|
452
|
+
"minLength",
|
453
|
+
"maxLength",
|
454
|
+
"pattern",
|
455
|
+
)
|
456
|
+
|
457
|
+
if "properties" in schema:
|
458
|
+
properties = schema["properties"]
|
459
|
+
for prop_def in properties.values():
|
460
|
+
if "type" in prop_def and "null" in prop_def["type"]:
|
461
|
+
for constraint in constraints:
|
462
|
+
if constraint in prop_def:
|
463
|
+
prop_def.pop(constraint)
|
464
|
+
|
465
|
+
if "definitions" in schema:
|
466
|
+
definitions: dict[str, Any] = schema["definitions"]
|
467
|
+
for definition in definitions.values():
|
468
|
+
properties = definition.get("properties", {})
|
469
|
+
for prop_def in properties.values():
|
470
|
+
if "type" in prop_def and "null" in prop_def["type"]:
|
471
|
+
for constraint in constraints:
|
472
|
+
if constraint in prop_def:
|
473
|
+
prop_def.pop(constraint)
|
474
|
+
|
475
|
+
|
476
|
+
def process_schema_definitions(
|
477
|
+
schema: Dict[str, Any],
|
478
|
+
definitions: Dict[str, Any],
|
479
|
+
type_to_json_file: Dict[str, str],
|
480
|
+
) -> None:
|
481
|
+
"""
|
482
|
+
Extract and process definitions from a schema.
|
483
|
+
Updates the "Decimal" format and applies enum corrections.
|
484
|
+
"""
|
485
|
+
if "definitions" not in schema:
|
486
|
+
return
|
487
|
+
|
488
|
+
if "Decimal" in schema["definitions"]:
|
489
|
+
schema["definitions"]["Decimal"]["format"] = "decimal"
|
490
|
+
|
491
|
+
correct_enums_with_multiple_titles(schema)
|
492
|
+
correct_enums_with_x_enumNames(schema)
|
493
|
+
correct_enums_with_descriptions(schema)
|
494
|
+
correct_variant_types(schema, definitions, type_to_json_file)
|
495
|
+
correct_flattened_types(schema)
|
496
|
+
correct_null_types_with_constraints(schema)
|
497
|
+
|
498
|
+
new_defs: dict[str, Any] = schema.pop("definitions")
|
499
|
+
for t, definition in new_defs.items():
|
500
|
+
if t in type_to_json_file:
|
501
|
+
continue
|
502
|
+
definitions[t] = definition
|
503
|
+
|
504
|
+
|
505
|
+
# ---------------------------------------------------------------------
|
506
|
+
# Utility Functions for Service and File Handling
|
507
|
+
# ---------------------------------------------------------------------
|
508
|
+
|
509
|
+
|
510
|
+
def capitalize_first_letter(word: str) -> str:
|
511
|
+
return word[0].upper() + word[1:] if word else word
|
512
|
+
|
513
|
+
|
514
|
+
def add_info_to_schema(services: List[Dict[str, Any]]) -> Dict[str, str]:
|
515
|
+
"""
|
516
|
+
Enrich service definitions with metadata and build a mapping from type names to file paths.
|
517
|
+
"""
|
518
|
+
type_to_json_file: Dict[str, str] = {}
|
519
|
+
for service in services:
|
520
|
+
service_name = service["name"].replace(" ", "_")
|
521
|
+
service["service_name"] = service_name
|
522
|
+
|
523
|
+
for rpc in service["rpcs"]:
|
524
|
+
req_schema = rpc["request_type"]
|
525
|
+
resp_schema = rpc["response_type"]
|
526
|
+
|
527
|
+
# Add service-specific metadata.
|
528
|
+
req_schema["route"] = rpc["route"]
|
529
|
+
req_schema["rpc_method"] = rpc["type"]
|
530
|
+
req_schema["service"] = service_name
|
531
|
+
|
532
|
+
# Standardize titles.
|
533
|
+
response_type_name = "".join(
|
534
|
+
capitalize_first_letter(word)
|
535
|
+
for word in resp_schema["title"].split("_")
|
536
|
+
)
|
537
|
+
req_schema["response_type"] = response_type_name
|
538
|
+
resp_schema["title"] = response_type_name
|
539
|
+
|
540
|
+
req_title = "".join(
|
541
|
+
capitalize_first_letter(word) for word in req_schema["title"].split("_")
|
542
|
+
)
|
543
|
+
req_schema["title"] = req_title
|
544
|
+
|
545
|
+
type_to_json_file[req_title] = f"{service_name}/{req_title}.json"
|
546
|
+
type_to_json_file[response_type_name] = (
|
547
|
+
f"{service_name}/{response_type_name}.json"
|
548
|
+
)
|
549
|
+
return type_to_json_file
|
550
|
+
|
551
|
+
|
552
|
+
def write_json_file(data: Dict[Any, Any], path: str) -> None:
|
553
|
+
with open(path, "w") as out_file:
|
554
|
+
json.dump(data, out_file, indent=2)
|
555
|
+
|
556
|
+
|
557
|
+
def process_schema(
|
558
|
+
schema: Dict[str, Any],
|
559
|
+
definitions: Dict[str, Any],
|
560
|
+
type_to_json_file: Dict[str, str],
|
561
|
+
is_definitions_file: bool = False,
|
562
|
+
) -> Dict[str, Any]:
|
563
|
+
"""
|
564
|
+
Process a schema by handling definitions and applying type fixes.
|
565
|
+
"""
|
566
|
+
process_schema_definitions(schema, definitions, type_to_json_file)
|
567
|
+
return apply_type_fixes(schema, is_definitions_file, type_to_json_file)
|
568
|
+
|
569
|
+
|
570
|
+
def process_service(
|
571
|
+
service: Dict[str, Any],
|
572
|
+
output_dir: str,
|
573
|
+
definitions: Dict[str, Any],
|
574
|
+
type_to_json_file: Dict[str, str],
|
575
|
+
) -> None:
|
576
|
+
"""
|
577
|
+
Process each RPC within a service (both request and response) and write the resulting schema files.
|
578
|
+
"""
|
579
|
+
service_name = service["service_name"]
|
580
|
+
service_dir = os.path.join(output_dir, service_name)
|
581
|
+
os.makedirs(service_dir, exist_ok=True)
|
582
|
+
|
583
|
+
for rpc in service["rpcs"]:
|
584
|
+
for key in ["request_type", "response_type"]:
|
585
|
+
schema = rpc[key]
|
586
|
+
processed_schema = process_schema(
|
587
|
+
schema, definitions, type_to_json_file, False
|
588
|
+
)
|
589
|
+
schema_title = processed_schema["title"]
|
590
|
+
file_path = os.path.join(service_dir, f"{schema_title}.json")
|
591
|
+
write_json_file(processed_schema, file_path)
|
592
|
+
|
593
|
+
|
594
|
+
def preprocess_json(input_file: str, output_dir: str) -> None:
|
595
|
+
"""
|
596
|
+
Preprocess the gRPC JSON file by splitting each RPC's request and response schemas
|
597
|
+
into separate JSON files and creating a unified definitions file.
|
598
|
+
"""
|
599
|
+
os.makedirs(output_dir, exist_ok=True)
|
600
|
+
with open(input_file, "r") as f:
|
601
|
+
services = json.load(f)
|
602
|
+
|
603
|
+
type_to_json_file = add_info_to_schema(services)
|
604
|
+
definitions: Dict[str, Any] = {}
|
605
|
+
|
606
|
+
for service in services:
|
607
|
+
process_service(service, output_dir, definitions, type_to_json_file)
|
608
|
+
|
609
|
+
# we look at the line level to fix definitions
|
610
|
+
fixed_definitions = apply_type_fixes(definitions, True, type_to_json_file)
|
611
|
+
for t, definition in fixed_definitions.items():
|
612
|
+
if "enum_tag" in definition:
|
613
|
+
raise ValueError(
|
614
|
+
f"Enum tag found in definitions: {t}, please account for this in post-processing"
|
615
|
+
)
|
616
|
+
definitions_path = os.path.join(output_dir, "definitions.json")
|
617
|
+
write_json_file(fixed_definitions, definitions_path)
|
618
|
+
|
619
|
+
|
620
|
+
# ---------------------------------------------------------------------
|
621
|
+
# Main Entry Point
|
622
|
+
# ---------------------------------------------------------------------
|
623
|
+
|
624
|
+
|
625
|
+
def main() -> None:
|
626
|
+
parser = argparse.ArgumentParser(description="Process a gRPC JSON schema file.")
|
627
|
+
parser.add_argument(
|
628
|
+
"--architect_dir",
|
629
|
+
type=str,
|
630
|
+
default="~/architect",
|
631
|
+
help="Path to the architect directory containing the api/schema.json file.",
|
632
|
+
)
|
633
|
+
parser.add_argument(
|
634
|
+
"--output_dir",
|
635
|
+
type=str,
|
636
|
+
default="processed_schema",
|
637
|
+
help="Path to output the extracted schema files.",
|
638
|
+
)
|
639
|
+
args = parser.parse_args()
|
640
|
+
|
641
|
+
architect_dir = os.path.expanduser(args.architect_dir)
|
642
|
+
input_file = os.path.join(architect_dir, "api/schema.json")
|
643
|
+
preprocess_json(input_file, args.output_dir)
|
644
|
+
|
645
|
+
|
646
|
+
if __name__ == "__main__":
|
647
|
+
main()
|