finbourne-sdk-utils 0.0.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- features/__init__.py +0 -0
- features/main.py +11 -0
- finbourne_sdk_utils/__init__.py +8 -0
- finbourne_sdk_utils/cocoon/__init__.py +34 -0
- finbourne_sdk_utils/cocoon/async_tools.py +94 -0
- finbourne_sdk_utils/cocoon/cocoon.py +1862 -0
- finbourne_sdk_utils/cocoon/cocoon_printer.py +455 -0
- finbourne_sdk_utils/cocoon/config/domain_settings.json +125 -0
- finbourne_sdk_utils/cocoon/config/seed_sample_data.json +36 -0
- finbourne_sdk_utils/cocoon/dateorcutlabel.py +198 -0
- finbourne_sdk_utils/cocoon/instruments.py +482 -0
- finbourne_sdk_utils/cocoon/properties.py +442 -0
- finbourne_sdk_utils/cocoon/seed_sample_data.py +137 -0
- finbourne_sdk_utils/cocoon/systemConfiguration.py +92 -0
- finbourne_sdk_utils/cocoon/transaction_type_upload.py +136 -0
- finbourne_sdk_utils/cocoon/utilities.py +1877 -0
- finbourne_sdk_utils/cocoon/validator.py +243 -0
- finbourne_sdk_utils/extract/__init__.py +1 -0
- finbourne_sdk_utils/extract/group_holdings.py +400 -0
- finbourne_sdk_utils/iam/__init__.py +1 -0
- finbourne_sdk_utils/iam/roles.py +74 -0
- finbourne_sdk_utils/jupyter_tools/__init__.py +2 -0
- finbourne_sdk_utils/jupyter_tools/hide_code_button.py +23 -0
- finbourne_sdk_utils/jupyter_tools/stop_execution.py +14 -0
- finbourne_sdk_utils/logger/LusidLogger.py +41 -0
- finbourne_sdk_utils/logger/__init__.py +1 -0
- finbourne_sdk_utils/lpt/__init__.py +0 -0
- finbourne_sdk_utils/lpt/back_compat.py +20 -0
- finbourne_sdk_utils/lpt/cash_ladder.py +191 -0
- finbourne_sdk_utils/lpt/connect_lusid.py +64 -0
- finbourne_sdk_utils/lpt/connect_none.py +5 -0
- finbourne_sdk_utils/lpt/connect_token.py +9 -0
- finbourne_sdk_utils/lpt/dfq.py +321 -0
- finbourne_sdk_utils/lpt/either.py +65 -0
- finbourne_sdk_utils/lpt/get_instruments.py +101 -0
- finbourne_sdk_utils/lpt/lpt.py +374 -0
- finbourne_sdk_utils/lpt/lse.py +188 -0
- finbourne_sdk_utils/lpt/map_instruments.py +164 -0
- finbourne_sdk_utils/lpt/pager.py +32 -0
- finbourne_sdk_utils/lpt/record.py +13 -0
- finbourne_sdk_utils/lpt/refreshing_token.py +43 -0
- finbourne_sdk_utils/lpt/search_instruments.py +48 -0
- finbourne_sdk_utils/lpt/stdargs.py +154 -0
- finbourne_sdk_utils/lpt/txn_config.py +128 -0
- finbourne_sdk_utils/lpt/txn_config_yaml.py +493 -0
- finbourne_sdk_utils/pandas_utils/__init__.py +0 -0
- finbourne_sdk_utils/pandas_utils/lusid_pandas.py +128 -0
- finbourne_sdk_utils-0.0.24.dist-info/LICENSE +21 -0
- finbourne_sdk_utils-0.0.24.dist-info/METADATA +25 -0
- finbourne_sdk_utils-0.0.24.dist-info/RECORD +52 -0
- finbourne_sdk_utils-0.0.24.dist-info/WHEEL +5 -0
- finbourne_sdk_utils-0.0.24.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,442 @@
|
|
|
1
|
+
from typing import List
|
|
2
|
+
|
|
3
|
+
from finbourne_sdk_utils.cocoon.utilities import checkargs
|
|
4
|
+
from finbourne_sdk_utils import cocoon
|
|
5
|
+
import lusid
|
|
6
|
+
import pandas as pd
|
|
7
|
+
import logging
|
|
8
|
+
from http import HTTPStatus
|
|
9
|
+
import numpy as np
|
|
10
|
+
|
|
11
|
+
# Map Numpy data types to LUSID data types
|
|
12
|
+
global_constants = {
|
|
13
|
+
"data_type_mapping": {
|
|
14
|
+
"object": "string",
|
|
15
|
+
"float64": "number",
|
|
16
|
+
"int64": "number",
|
|
17
|
+
"bool": "string",
|
|
18
|
+
"datetime64[ns, UTC]": "string",
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@checkargs
|
|
24
|
+
def check_property_definitions_exist_in_scope_single(
|
|
25
|
+
api_factory: lusid.SyncApiClientFactory, property_key: str
|
|
26
|
+
) -> tuple[bool, str]:
|
|
27
|
+
"""
|
|
28
|
+
This function takes a list of property keys and looks to see which property definitions already exist inside LUSID
|
|
29
|
+
|
|
30
|
+
Parameters
|
|
31
|
+
----------
|
|
32
|
+
api_factory : lusid.SyncApiClientFactory
|
|
33
|
+
The ApiFactory to use
|
|
34
|
+
property_key: str
|
|
35
|
+
The property key to get from LUSID
|
|
36
|
+
|
|
37
|
+
Returns
|
|
38
|
+
-------
|
|
39
|
+
exists : bool
|
|
40
|
+
Whether or not the property definition exists
|
|
41
|
+
data_type : str
|
|
42
|
+
property definition's data type
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
data_type = None
|
|
46
|
+
|
|
47
|
+
try:
|
|
48
|
+
response = api_factory.build(
|
|
49
|
+
lusid.PropertyDefinitionsApi
|
|
50
|
+
).get_property_definition(
|
|
51
|
+
domain=property_key.split("/")[0],
|
|
52
|
+
scope=property_key.split("/")[1],
|
|
53
|
+
code=property_key.split("/")[2],
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
exists = True
|
|
57
|
+
data_type = response.data_type_id.code
|
|
58
|
+
|
|
59
|
+
except lusid.exceptions.ApiException as ex:
|
|
60
|
+
if ex.status == HTTPStatus.NOT_FOUND:
|
|
61
|
+
exists = False
|
|
62
|
+
else:
|
|
63
|
+
raise
|
|
64
|
+
|
|
65
|
+
return exists, data_type
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
@checkargs
|
|
69
|
+
def check_property_definitions_exist_in_scope(
|
|
70
|
+
api_factory: lusid.SyncApiClientFactory,
|
|
71
|
+
domain: str,
|
|
72
|
+
data_frame: pd.DataFrame,
|
|
73
|
+
target_columns: list,
|
|
74
|
+
column_to_scope: dict,
|
|
75
|
+
):
|
|
76
|
+
"""
|
|
77
|
+
This function identifiers which property definitions are missing from LUSID
|
|
78
|
+
|
|
79
|
+
Parameters
|
|
80
|
+
----------
|
|
81
|
+
api_factory : lusid.SyncApiClientFactory
|
|
82
|
+
The Api Factory to use
|
|
83
|
+
domain : str
|
|
84
|
+
The domain to check for property definitions in
|
|
85
|
+
data_frame : pd.DataFrame
|
|
86
|
+
The dataframe to check properties for
|
|
87
|
+
target_columns : list[str]
|
|
88
|
+
The columns to add properties for
|
|
89
|
+
column_to_scope : dict[str:str]
|
|
90
|
+
Column name to scope
|
|
91
|
+
Returns
|
|
92
|
+
-------
|
|
93
|
+
missing_property_columns : list[str]
|
|
94
|
+
The columns missing properties in LUSID
|
|
95
|
+
data_frame : pd.DataFrame
|
|
96
|
+
The input DataFrame with types updated
|
|
97
|
+
"""
|
|
98
|
+
|
|
99
|
+
data_type_update_map = {"number": "float64", "string": "object"}
|
|
100
|
+
|
|
101
|
+
# Initialise a set to hold the missing properties
|
|
102
|
+
missing_keys = set([])
|
|
103
|
+
|
|
104
|
+
# Iterate over the column names
|
|
105
|
+
column_property_mapping = {}
|
|
106
|
+
|
|
107
|
+
for column_name, data_type in data_frame.loc[:, target_columns].dtypes.items():
|
|
108
|
+
|
|
109
|
+
# Create the property key
|
|
110
|
+
property_key = f"{domain}/{column_to_scope[column_name]}/{cocoon.utilities.make_code_lusid_friendly(column_name)}"
|
|
111
|
+
|
|
112
|
+
column_property_mapping[property_key] = column_name
|
|
113
|
+
|
|
114
|
+
# Get a tuple with the first value being True/False key is missing, second is data type of the key
|
|
115
|
+
exists, data_type_lusid = check_property_definitions_exist_in_scope_single(
|
|
116
|
+
api_factory=api_factory, property_key=property_key
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
# If the key is missing add it to the set
|
|
120
|
+
if not exists:
|
|
121
|
+
missing_keys.add(property_key)
|
|
122
|
+
|
|
123
|
+
# If it is not missing check that the data type of the property matches the dataframe
|
|
124
|
+
else:
|
|
125
|
+
# If the data type does not match
|
|
126
|
+
if data_type_lusid != global_constants["data_type_mapping"][str(data_type)]:
|
|
127
|
+
logging.warning(
|
|
128
|
+
f"Data types don't match for column {column_name} it is {data_type_lusid} in LUSID and {data_type} in file"
|
|
129
|
+
)
|
|
130
|
+
try:
|
|
131
|
+
updated_data_type = data_type_update_map[data_type_lusid]
|
|
132
|
+
except KeyError:
|
|
133
|
+
updated_data_type = "object"
|
|
134
|
+
|
|
135
|
+
# Update the data type in the dataframe if possible
|
|
136
|
+
data_frame[column_name] = data_frame[column_name].astype(
|
|
137
|
+
updated_data_type, copy=False
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
logging.info(f"Updated {column_name} to {updated_data_type}")
|
|
141
|
+
|
|
142
|
+
missing_property_columns = [
|
|
143
|
+
column_property_mapping[property_key] for property_key in missing_keys
|
|
144
|
+
]
|
|
145
|
+
|
|
146
|
+
return missing_property_columns, data_frame
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
@checkargs
|
|
150
|
+
def create_property_definitions_from_file(
|
|
151
|
+
api_factory: lusid.SyncApiClientFactory,
|
|
152
|
+
domain: str,
|
|
153
|
+
data_frame: pd.DataFrame,
|
|
154
|
+
missing_property_columns: list,
|
|
155
|
+
column_to_scope: dict,
|
|
156
|
+
):
|
|
157
|
+
"""
|
|
158
|
+
Creates the property definitions for all the columns in a file
|
|
159
|
+
|
|
160
|
+
Parameters
|
|
161
|
+
----------
|
|
162
|
+
api_factory : lusid.SyncApiClientFactory
|
|
163
|
+
The ApiFactory to use
|
|
164
|
+
domain : domain
|
|
165
|
+
The domain to create the property definitions in
|
|
166
|
+
data_frame : pd.Series
|
|
167
|
+
The dataframe dtypes to add definitions for
|
|
168
|
+
missing_property_columns : list[str]
|
|
169
|
+
The columns that property defintions are missing for
|
|
170
|
+
column_to_scope : dict[str:str]
|
|
171
|
+
Column name to scope
|
|
172
|
+
|
|
173
|
+
Returns
|
|
174
|
+
-------
|
|
175
|
+
property_key_mapping : dict
|
|
176
|
+
A mapping of data_frame columns to property keys
|
|
177
|
+
"""
|
|
178
|
+
|
|
179
|
+
missing_property_data_frame = data_frame.loc[:, missing_property_columns]
|
|
180
|
+
|
|
181
|
+
# Ensure that all data types in the file have been mapped
|
|
182
|
+
actual_data_types = set(
|
|
183
|
+
[str(data_type) for data_type in missing_property_data_frame.dtypes]
|
|
184
|
+
)
|
|
185
|
+
allowed_data_types = set(global_constants["data_type_mapping"])
|
|
186
|
+
if not (actual_data_types <= allowed_data_types):
|
|
187
|
+
unmapped_data_types = [
|
|
188
|
+
np.dtype(value) for value in actual_data_types - allowed_data_types
|
|
189
|
+
]
|
|
190
|
+
unmapped_columns = missing_property_data_frame.dtypes[
|
|
191
|
+
missing_property_data_frame.dtypes.isin(unmapped_data_types)
|
|
192
|
+
]
|
|
193
|
+
raise TypeError(
|
|
194
|
+
invalid_columns_error_message(unmapped_columns, allowed_data_types)
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
# Initialise a dictionary to hold the keys
|
|
198
|
+
property_key_mapping = {}
|
|
199
|
+
|
|
200
|
+
# Iterate over the each column and its data type
|
|
201
|
+
for column_name, data_type in missing_property_data_frame.dtypes.items():
|
|
202
|
+
|
|
203
|
+
# Make the column name LUSID friendly
|
|
204
|
+
lusid_friendly_code = cocoon.utilities.make_code_lusid_friendly(column_name)
|
|
205
|
+
|
|
206
|
+
# If there is no data Pandas infers a type of float, would prefer to infer object
|
|
207
|
+
if missing_property_data_frame[column_name].isnull().all():
|
|
208
|
+
logging.warning(
|
|
209
|
+
f"{column_name} is null, no type can be inferred it will be treated as a string"
|
|
210
|
+
)
|
|
211
|
+
data_type = "object"
|
|
212
|
+
data_frame[column_name] = data_frame[column_name].astype(
|
|
213
|
+
"object", copy=False
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
# Create a request to define the property, assumes value_required is false for all
|
|
217
|
+
property_request = lusid.models.CreatePropertyDefinitionRequest(
|
|
218
|
+
domain=domain,
|
|
219
|
+
scope=column_to_scope[column_name],
|
|
220
|
+
code=lusid_friendly_code,
|
|
221
|
+
value_required=False,
|
|
222
|
+
display_name=column_name,
|
|
223
|
+
data_type_id=lusid.models.ResourceId(
|
|
224
|
+
scope="system",
|
|
225
|
+
code=global_constants["data_type_mapping"][str(data_type)],
|
|
226
|
+
),
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
# Call LUSID to create the new property
|
|
230
|
+
property_response = api_factory.build(
|
|
231
|
+
lusid.PropertyDefinitionsApi
|
|
232
|
+
).create_property_definition(
|
|
233
|
+
create_property_definition_request=property_request
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
logging.info(
|
|
237
|
+
f"Created - {property_response.key} - with datatype {property_response.data_type_id.code}"
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
# Grab the key off the response to use when referencing this property in other LUSID calls
|
|
241
|
+
property_key_mapping[column_name] = property_response.key
|
|
242
|
+
|
|
243
|
+
return property_key_mapping, data_frame
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
@checkargs
|
|
247
|
+
def create_missing_property_definitions_from_file(
|
|
248
|
+
api_factory: lusid.SyncApiClientFactory,
|
|
249
|
+
properties_scope: str,
|
|
250
|
+
data_frame: pd.DataFrame,
|
|
251
|
+
property_columns: list,
|
|
252
|
+
domain: str,
|
|
253
|
+
):
|
|
254
|
+
# If there are property columns
|
|
255
|
+
if len(property_columns) > 0 and domain is not None:
|
|
256
|
+
|
|
257
|
+
source_columns = [column["source"] for column in property_columns]
|
|
258
|
+
source_to_target = {
|
|
259
|
+
column1["source"]: column1.get("target", column1.get("source"))
|
|
260
|
+
for column1 in property_columns
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
for column in source_columns:
|
|
264
|
+
data_frame.loc[:, source_to_target[column]] = data_frame[column]
|
|
265
|
+
|
|
266
|
+
target_columns = [
|
|
267
|
+
column.get("target", column.get("source")) for column in property_columns
|
|
268
|
+
]
|
|
269
|
+
column_to_scope = {
|
|
270
|
+
column.get("target", column.get("source")): column.get(
|
|
271
|
+
"scope", properties_scope
|
|
272
|
+
)
|
|
273
|
+
for column in property_columns
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
# Identify which property definitions are missing
|
|
277
|
+
(
|
|
278
|
+
missing_property_columns,
|
|
279
|
+
data_frame,
|
|
280
|
+
) = cocoon.properties.check_property_definitions_exist_in_scope(
|
|
281
|
+
api_factory=api_factory,
|
|
282
|
+
domain=domain,
|
|
283
|
+
data_frame=data_frame,
|
|
284
|
+
target_columns=target_columns,
|
|
285
|
+
column_to_scope=column_to_scope,
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
logging.info(
|
|
289
|
+
f"Check for missing {domain} properties complete. {len(missing_property_columns)} missing properties found"
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
# If there are missing property definitions
|
|
293
|
+
if len(missing_property_columns) > 0:
|
|
294
|
+
logging.info(
|
|
295
|
+
f"The {domain} properties {str(missing_property_columns)} will be added in the scope {properties_scope}"
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
# Create property definitions for all of the columns in the file that have missing definitions
|
|
299
|
+
(
|
|
300
|
+
property_key_mapping,
|
|
301
|
+
data_frame,
|
|
302
|
+
) = cocoon.properties.create_property_definitions_from_file(
|
|
303
|
+
api_factory=api_factory,
|
|
304
|
+
domain=domain,
|
|
305
|
+
data_frame=data_frame,
|
|
306
|
+
missing_property_columns=missing_property_columns,
|
|
307
|
+
column_to_scope=column_to_scope,
|
|
308
|
+
)
|
|
309
|
+
|
|
310
|
+
return data_frame
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
def invalid_columns_error_message(unmapped_columns, allowed_data_types):
|
|
314
|
+
formatted_unmapped_columns = {
|
|
315
|
+
k: str(v) for k, v in unmapped_columns.to_dict().items()
|
|
316
|
+
}
|
|
317
|
+
return f"""The following columns in the data_frame have not been mapped to LUSID data types: {formatted_unmapped_columns}.
|
|
318
|
+
LUSID supports the following data types: {allowed_data_types}.
|
|
319
|
+
Please ensure that all data types have been mapped before retrying."""
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
@checkargs
|
|
323
|
+
def create_property_values(
|
|
324
|
+
row: pd.Series, column_to_scope: dict, scope: str, domain: str, dtypes: pd.Series
|
|
325
|
+
) -> dict:
|
|
326
|
+
"""
|
|
327
|
+
This function generates the property values for a row in a file
|
|
328
|
+
|
|
329
|
+
Parameters
|
|
330
|
+
----------
|
|
331
|
+
row : pd.Series
|
|
332
|
+
The current row of the data frame to create property values for
|
|
333
|
+
column_to_scope : dict {str, str}
|
|
334
|
+
The scope for a column name
|
|
335
|
+
scope : str
|
|
336
|
+
The domain to create the property values in
|
|
337
|
+
domain : str
|
|
338
|
+
The domain to create the property values in
|
|
339
|
+
dtypes : pd.Series
|
|
340
|
+
The data types of each column to create property values for
|
|
341
|
+
|
|
342
|
+
Returns
|
|
343
|
+
-------
|
|
344
|
+
properties : dict {str, models.PerpetualProperty}
|
|
345
|
+
"""
|
|
346
|
+
|
|
347
|
+
actual_data_types = set([str(data_type) for data_type in dtypes])
|
|
348
|
+
allowed_data_types = set(global_constants["data_type_mapping"])
|
|
349
|
+
|
|
350
|
+
# Ensure that all data types in the file have been mapped
|
|
351
|
+
if not (actual_data_types <= allowed_data_types):
|
|
352
|
+
unmapped_data_types = actual_data_types - allowed_data_types
|
|
353
|
+
unmapped_columns = dtypes[dtypes.isin(unmapped_data_types)]
|
|
354
|
+
raise TypeError(
|
|
355
|
+
invalid_columns_error_message(unmapped_columns, allowed_data_types)
|
|
356
|
+
)
|
|
357
|
+
|
|
358
|
+
# Initialise the empty properties dictionary
|
|
359
|
+
properties = {}
|
|
360
|
+
|
|
361
|
+
# Iterate over each column name and data type
|
|
362
|
+
for column_name, data_type in dtypes.items():
|
|
363
|
+
|
|
364
|
+
# Set the data type to be a string so that it is easier to work with
|
|
365
|
+
string_data_type = str(data_type)
|
|
366
|
+
# Convert the numpy data type to a LUSID data type using the global mapping
|
|
367
|
+
lusid_data_type = global_constants["data_type_mapping"][string_data_type]
|
|
368
|
+
# Get the value of the column from the row
|
|
369
|
+
row_value = row[column_name]
|
|
370
|
+
|
|
371
|
+
# Use the correct LUSID property value based on the data type
|
|
372
|
+
if lusid_data_type == "string":
|
|
373
|
+
if pd.isna(row_value):
|
|
374
|
+
continue
|
|
375
|
+
property_value = lusid.models.PropertyValue(label_value=str(row_value))
|
|
376
|
+
|
|
377
|
+
if lusid_data_type == "number":
|
|
378
|
+
# Handle null values given the input null value override
|
|
379
|
+
if pd.isnull(row_value):
|
|
380
|
+
continue
|
|
381
|
+
property_value = lusid.models.PropertyValue(
|
|
382
|
+
metric_value=lusid.models.MetricValue(value=row_value)
|
|
383
|
+
)
|
|
384
|
+
|
|
385
|
+
# Set the property
|
|
386
|
+
property_key = f"{domain}/{column_to_scope.get(column_name, scope)}/{cocoon.utilities.make_code_lusid_friendly(column_name)}"
|
|
387
|
+
properties[property_key] = lusid.models.PerpetualProperty(
|
|
388
|
+
key=property_key, value=property_value
|
|
389
|
+
)
|
|
390
|
+
|
|
391
|
+
if domain.lower() == "instrument":
|
|
392
|
+
properties = list(properties.values())
|
|
393
|
+
|
|
394
|
+
return properties
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
def _infer_full_property_keys(
|
|
398
|
+
partial_keys: list, properties_scope: str, domain: str
|
|
399
|
+
) -> list:
|
|
400
|
+
"""
|
|
401
|
+
Infers from a list of partially completed property keys the entire property key
|
|
402
|
+
|
|
403
|
+
Parameters
|
|
404
|
+
----------
|
|
405
|
+
partial_keys : list[str]
|
|
406
|
+
The partial keys
|
|
407
|
+
properties_scope : str
|
|
408
|
+
The scope of the properties
|
|
409
|
+
domain : str
|
|
410
|
+
The domain of the properties
|
|
411
|
+
Returns
|
|
412
|
+
-------
|
|
413
|
+
list[str]
|
|
414
|
+
A list of full property keys
|
|
415
|
+
"""
|
|
416
|
+
|
|
417
|
+
full_keys = []
|
|
418
|
+
|
|
419
|
+
for key in partial_keys:
|
|
420
|
+
|
|
421
|
+
split_key = key.split("/")
|
|
422
|
+
number_components = len(split_key)
|
|
423
|
+
# The entire key is already specified
|
|
424
|
+
if number_components == 3:
|
|
425
|
+
full_keys.append(key)
|
|
426
|
+
# The domain is missing with the scope and code specified
|
|
427
|
+
elif number_components == 2:
|
|
428
|
+
full_keys.append(f"{domain}/{split_key[0]}/{split_key[1]}")
|
|
429
|
+
# The domain and scope are missing with only the code specified
|
|
430
|
+
elif number_components == 1:
|
|
431
|
+
full_keys.append(f"{domain}/{properties_scope}/{key}")
|
|
432
|
+
|
|
433
|
+
# Ensure that the returned keys are LUSID friendly
|
|
434
|
+
return [
|
|
435
|
+
"/".join(
|
|
436
|
+
[
|
|
437
|
+
cocoon.utilities.make_code_lusid_friendly(partial)
|
|
438
|
+
for partial in key.split("/")
|
|
439
|
+
]
|
|
440
|
+
)
|
|
441
|
+
for key in full_keys
|
|
442
|
+
]
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
from finbourne_sdk_utils.cocoon.cocoon import load_from_data_frame
|
|
3
|
+
from finbourne_sdk_utils.cocoon.utilities import load_json_file
|
|
4
|
+
import logging
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
logger = logging.getLogger()
|
|
8
|
+
|
|
9
|
+
default_mappings = dict(load_json_file("config/seed_sample_data.json"))
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def seed_data(
|
|
13
|
+
api_factory,
|
|
14
|
+
domains,
|
|
15
|
+
scope: str,
|
|
16
|
+
transaction_file: str,
|
|
17
|
+
file_type: str,
|
|
18
|
+
mappings: dict = default_mappings,
|
|
19
|
+
sub_holding_keys=[],
|
|
20
|
+
):
|
|
21
|
+
"""
|
|
22
|
+
This function allows users to seed their LUSID environment with some core data (e.g. instruments,
|
|
23
|
+
portfolios, and transaction) from one file.
|
|
24
|
+
|
|
25
|
+
Parameters
|
|
26
|
+
----------
|
|
27
|
+
api_factory : lusid.SyncApiClientFactory
|
|
28
|
+
The api factory to use
|
|
29
|
+
domains : list[str]
|
|
30
|
+
A list of the file_types for upload.
|
|
31
|
+
scope : str
|
|
32
|
+
The scope of the transaction portfolio.
|
|
33
|
+
transaction_file : Union[str, panads.DataFrame]
|
|
34
|
+
The absolute or relative path to source file of transaction data or a pandas DataFrame
|
|
35
|
+
file_type : str
|
|
36
|
+
the file extension (e.g. "csv" for "test_transaction.csv".
|
|
37
|
+
mappings : dict
|
|
38
|
+
a file containing mapping of DataFrame headers to LUSID headers.
|
|
39
|
+
sub_holding_keys : list
|
|
40
|
+
a list of sub-holding keys for grouping.
|
|
41
|
+
|
|
42
|
+
Returns
|
|
43
|
+
-------
|
|
44
|
+
overall_results : dict
|
|
45
|
+
An object containing the responses for each domain upload.
|
|
46
|
+
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
if file_type == "DataFrame" and type(transaction_file) == pd.DataFrame:
|
|
50
|
+
data_frame = transaction_file
|
|
51
|
+
|
|
52
|
+
else:
|
|
53
|
+
|
|
54
|
+
# Gather a dictionary of supported files
|
|
55
|
+
|
|
56
|
+
supported_files = {"csv": "csv", "xlsx": "excel"}
|
|
57
|
+
|
|
58
|
+
if Path(transaction_file).suffix != "." + file_type.lower():
|
|
59
|
+
raise ValueError(
|
|
60
|
+
f"""Inconsistent file and file extensions passed: {str(transaction_file)} does not have file extension {file_type}"""
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
if file_type not in supported_files:
|
|
64
|
+
raise ValueError(
|
|
65
|
+
f"Unsupported file type, please upload one of the following: {list(supported_files.keys())}"
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
data_frame = getattr(pd, f"read_{supported_files[file_type]}")(transaction_file)
|
|
69
|
+
|
|
70
|
+
def check_or_set_default_value(mapping, check_key, default_value):
|
|
71
|
+
"""
|
|
72
|
+
This function check whether the mappings variables have the required default values to be uploaded
|
|
73
|
+
via the load_from_data_frame function.
|
|
74
|
+
|
|
75
|
+
Parameters
|
|
76
|
+
----------
|
|
77
|
+
mapping : dict
|
|
78
|
+
a file containing mapping of DataFrame headers to LUSID headers.
|
|
79
|
+
|
|
80
|
+
check_key : str
|
|
81
|
+
a string that represents the key to be checked
|
|
82
|
+
|
|
83
|
+
default_value : obj
|
|
84
|
+
an object such as an empty dictionary or list
|
|
85
|
+
|
|
86
|
+
Returns
|
|
87
|
+
-------
|
|
88
|
+
mapping : dict
|
|
89
|
+
a file containing mapping of DataFrame headers to LUSID headers.
|
|
90
|
+
|
|
91
|
+
"""
|
|
92
|
+
|
|
93
|
+
for value in mapping.values():
|
|
94
|
+
if isinstance(value, dict) and check_key not in value.keys():
|
|
95
|
+
value.update({check_key: default_value})
|
|
96
|
+
|
|
97
|
+
return mapping
|
|
98
|
+
|
|
99
|
+
def generic_load_from_data_frame(file_type):
|
|
100
|
+
|
|
101
|
+
return (
|
|
102
|
+
load_from_data_frame(
|
|
103
|
+
api_factory=api_factory,
|
|
104
|
+
file_type=file_type,
|
|
105
|
+
scope=scope,
|
|
106
|
+
data_frame=data_frame,
|
|
107
|
+
mapping_required=mappings[file_type]["required"],
|
|
108
|
+
mapping_optional=mappings[file_type]["optional"],
|
|
109
|
+
identifier_mapping=mappings[file_type]["identifier_mapping"],
|
|
110
|
+
property_columns=mappings[file_type]["properties"],
|
|
111
|
+
properties_scope=scope,
|
|
112
|
+
sub_holding_keys=sub_holding_keys,
|
|
113
|
+
),
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
overall_results = {}
|
|
117
|
+
|
|
118
|
+
mappings = check_or_set_default_value(mappings, "optional", {})
|
|
119
|
+
mappings = check_or_set_default_value(mappings, "identifier_mapping", {})
|
|
120
|
+
mappings = check_or_set_default_value(mappings, "properties", [])
|
|
121
|
+
|
|
122
|
+
for domain in domains:
|
|
123
|
+
|
|
124
|
+
if domain not in mappings:
|
|
125
|
+
raise ValueError(
|
|
126
|
+
f"The provided file_type of {domain} has no associated mapping"
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
logging.info(f"Loading {domain} DataFrame into LUSID...")
|
|
130
|
+
|
|
131
|
+
response = generic_load_from_data_frame(domain)
|
|
132
|
+
|
|
133
|
+
logging.info(f"Loading of {domain} is COMPLETED")
|
|
134
|
+
|
|
135
|
+
overall_results[domain] = response
|
|
136
|
+
|
|
137
|
+
return overall_results
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import lusid
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def set_transaction_mapping(client, transaction_mapping):
|
|
5
|
+
"""
|
|
6
|
+
Sets the transaction mapping in LUSID so that the system can resolve the transactions into movements
|
|
7
|
+
|
|
8
|
+
Parameters
|
|
9
|
+
----------
|
|
10
|
+
client : ApiFactory
|
|
11
|
+
The LusidApi client to use
|
|
12
|
+
transaction_mapping : dict
|
|
13
|
+
The transaction mapping configuration
|
|
14
|
+
|
|
15
|
+
Returns
|
|
16
|
+
-------
|
|
17
|
+
response : lusid.models.ResourceListOfTransactionConfigurationData
|
|
18
|
+
The response from LUSID
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
# Initialise your list of configuration requests, one for each transaction type
|
|
22
|
+
configuration_requests = []
|
|
23
|
+
|
|
24
|
+
# Iterate over your configurations in the default mapping
|
|
25
|
+
for configuration in transaction_mapping["values"]:
|
|
26
|
+
|
|
27
|
+
# Initialise your list of aliases for this configuration
|
|
28
|
+
aliases = []
|
|
29
|
+
|
|
30
|
+
# Iterate over the aliases in the imported config
|
|
31
|
+
for alias in configuration["aliases"]:
|
|
32
|
+
# Append the alias to your list
|
|
33
|
+
aliases.append(
|
|
34
|
+
lusid.models.TransactionConfigurationTypeAlias(
|
|
35
|
+
type=alias["type"],
|
|
36
|
+
description=alias["description"],
|
|
37
|
+
transaction_class=alias["transactionClass"],
|
|
38
|
+
transaction_group=alias["transactionGroup"],
|
|
39
|
+
transaction_roles=alias["transactionRoles"],
|
|
40
|
+
)
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
# Initialise your list of movements for this configuration
|
|
44
|
+
movements = []
|
|
45
|
+
|
|
46
|
+
# Iterate over the movements in the impoted config
|
|
47
|
+
for movement in configuration["movements"]:
|
|
48
|
+
|
|
49
|
+
# Add properties if they exist in the config
|
|
50
|
+
if len(movement["properties"]) > 0:
|
|
51
|
+
key = movement["properties"][0]["key"]
|
|
52
|
+
value = lusid.models.PropertyValue(
|
|
53
|
+
label_value=movement["properties"][0]["value"]
|
|
54
|
+
)
|
|
55
|
+
properties = {key: lusid.models.PerpetualProperty(key=key, value=value)}
|
|
56
|
+
else:
|
|
57
|
+
properties = {}
|
|
58
|
+
|
|
59
|
+
if len(movement["mappings"]) > 0:
|
|
60
|
+
mappings = [
|
|
61
|
+
lusid.models.TransactionPropertyMappingRequest(
|
|
62
|
+
property_key=movement["mappings"][0]["propertyKey"],
|
|
63
|
+
set_to=movement["mappings"][0]["setTo"],
|
|
64
|
+
)
|
|
65
|
+
]
|
|
66
|
+
else:
|
|
67
|
+
mappings = []
|
|
68
|
+
|
|
69
|
+
# Append the movement to your list
|
|
70
|
+
movements.append(
|
|
71
|
+
lusid.models.TransactionConfigurationMovementDataRequest(
|
|
72
|
+
movement_types=movement["movementTypes"],
|
|
73
|
+
side=movement["side"],
|
|
74
|
+
direction=movement["direction"],
|
|
75
|
+
properties=properties,
|
|
76
|
+
mappings=mappings,
|
|
77
|
+
)
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
# Build your configuration for this transaction type
|
|
81
|
+
configuration_requests.append(
|
|
82
|
+
lusid.models.TransactionConfigurationDataRequest(
|
|
83
|
+
aliases=aliases, movements=movements, properties=None
|
|
84
|
+
)
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
# Call LUSID to set your configuration for our transaction types
|
|
88
|
+
response = client.system_configuration.set_configuration_transaction_types(
|
|
89
|
+
types=configuration_requests
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
return response
|