finbourne-sdk-utils 0.0.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. features/__init__.py +0 -0
  2. features/main.py +11 -0
  3. finbourne_sdk_utils/__init__.py +8 -0
  4. finbourne_sdk_utils/cocoon/__init__.py +34 -0
  5. finbourne_sdk_utils/cocoon/async_tools.py +94 -0
  6. finbourne_sdk_utils/cocoon/cocoon.py +1862 -0
  7. finbourne_sdk_utils/cocoon/cocoon_printer.py +455 -0
  8. finbourne_sdk_utils/cocoon/config/domain_settings.json +125 -0
  9. finbourne_sdk_utils/cocoon/config/seed_sample_data.json +36 -0
  10. finbourne_sdk_utils/cocoon/dateorcutlabel.py +198 -0
  11. finbourne_sdk_utils/cocoon/instruments.py +482 -0
  12. finbourne_sdk_utils/cocoon/properties.py +442 -0
  13. finbourne_sdk_utils/cocoon/seed_sample_data.py +137 -0
  14. finbourne_sdk_utils/cocoon/systemConfiguration.py +92 -0
  15. finbourne_sdk_utils/cocoon/transaction_type_upload.py +136 -0
  16. finbourne_sdk_utils/cocoon/utilities.py +1877 -0
  17. finbourne_sdk_utils/cocoon/validator.py +243 -0
  18. finbourne_sdk_utils/extract/__init__.py +1 -0
  19. finbourne_sdk_utils/extract/group_holdings.py +400 -0
  20. finbourne_sdk_utils/iam/__init__.py +1 -0
  21. finbourne_sdk_utils/iam/roles.py +74 -0
  22. finbourne_sdk_utils/jupyter_tools/__init__.py +2 -0
  23. finbourne_sdk_utils/jupyter_tools/hide_code_button.py +23 -0
  24. finbourne_sdk_utils/jupyter_tools/stop_execution.py +14 -0
  25. finbourne_sdk_utils/logger/LusidLogger.py +41 -0
  26. finbourne_sdk_utils/logger/__init__.py +1 -0
  27. finbourne_sdk_utils/lpt/__init__.py +0 -0
  28. finbourne_sdk_utils/lpt/back_compat.py +20 -0
  29. finbourne_sdk_utils/lpt/cash_ladder.py +191 -0
  30. finbourne_sdk_utils/lpt/connect_lusid.py +64 -0
  31. finbourne_sdk_utils/lpt/connect_none.py +5 -0
  32. finbourne_sdk_utils/lpt/connect_token.py +9 -0
  33. finbourne_sdk_utils/lpt/dfq.py +321 -0
  34. finbourne_sdk_utils/lpt/either.py +65 -0
  35. finbourne_sdk_utils/lpt/get_instruments.py +101 -0
  36. finbourne_sdk_utils/lpt/lpt.py +374 -0
  37. finbourne_sdk_utils/lpt/lse.py +188 -0
  38. finbourne_sdk_utils/lpt/map_instruments.py +164 -0
  39. finbourne_sdk_utils/lpt/pager.py +32 -0
  40. finbourne_sdk_utils/lpt/record.py +13 -0
  41. finbourne_sdk_utils/lpt/refreshing_token.py +43 -0
  42. finbourne_sdk_utils/lpt/search_instruments.py +48 -0
  43. finbourne_sdk_utils/lpt/stdargs.py +154 -0
  44. finbourne_sdk_utils/lpt/txn_config.py +128 -0
  45. finbourne_sdk_utils/lpt/txn_config_yaml.py +493 -0
  46. finbourne_sdk_utils/pandas_utils/__init__.py +0 -0
  47. finbourne_sdk_utils/pandas_utils/lusid_pandas.py +128 -0
  48. finbourne_sdk_utils-0.0.24.dist-info/LICENSE +21 -0
  49. finbourne_sdk_utils-0.0.24.dist-info/METADATA +25 -0
  50. finbourne_sdk_utils-0.0.24.dist-info/RECORD +52 -0
  51. finbourne_sdk_utils-0.0.24.dist-info/WHEEL +5 -0
  52. finbourne_sdk_utils-0.0.24.dist-info/top_level.txt +2 -0
@@ -0,0 +1,374 @@
1
+ import re
2
+ import pandas as pd
3
+ from collections import defaultdict
4
+ from functools import reduce
5
+ from .record import Rec
6
+ from .either import Either
7
+ from . import back_compat
8
+
9
+ type_re = re.compile(r"(.*)\((.*),(.*)\)")
10
+
11
+
12
+ # Convert an iterable dataset to a DataFrame
13
+ def to_df(data, columns):
14
+ if isinstance(data, Rec):
15
+ data = data.content
16
+
17
+ # Create record by accessing each column
18
+ def to_record(obj):
19
+ return {col: access(obj, col) for col in columns}
20
+
21
+ def property_value(p):
22
+ return p.metric_value.value if p.label_value is None else p.label_value
23
+
24
+ # traverse dot notation to flatten sub-objects
25
+ def access(obj, col):
26
+ if col.startswith("P:"):
27
+ try:
28
+ props = getattr(obj[1], "properties")
29
+ if type(props) == list:
30
+ return property_value(
31
+ [p.value for p in props if p.key == col[2:]][0]
32
+ )
33
+ return property_value(props[col[2:]].value)
34
+ except:
35
+ return None
36
+
37
+ if col.startswith("SHK:"):
38
+ try:
39
+ props = getattr(obj[1], "sub_holding_keys")
40
+ return property_value(props[col[4:]].value)
41
+ except:
42
+ return None
43
+
44
+ for fld in col.split("."):
45
+ if fld.startswith("KEY:"):
46
+ obj = obj.get(fld[4:]) if obj else None
47
+ else:
48
+ obj = getattr(obj, fld) if obj else None
49
+ return obj
50
+
51
+ # Try standard representations
52
+ try:
53
+ iterator = iter(data)
54
+ if ( 'BaseModel.__iter' in str(iterator)):
55
+ raise Exception
56
+ except:
57
+ iterator = iter(data.values)
58
+
59
+ records = [to_record(o) for o in iterator]
60
+
61
+ if len(records) == 0:
62
+ return pd.DataFrame({col: [] for col in columns})[columns]
63
+
64
+ return pd.DataFrame.from_records(records)[columns]
65
+
66
+
67
+ # Utilities to convert YYYY-MM-DD strings to and from UTC dates.
68
+ def to_date(date, **kwargs):
69
+ return pd.to_datetime(date, utc=True, **kwargs) if date is not None else None
70
+
71
+
72
+ def from_date(date):
73
+ return date.strftime("%Y-%m-%d") if date else None
74
+
75
+
76
+ def add_days(date, days):
77
+ return to_date(date) + pd.Timedelta("{} days".format(days))
78
+
79
+
80
+ # Display a dataframe with no cropping
81
+ def display_df(df, decimals=2):
82
+ fmt = "{:,." + str(decimals) + "f}"
83
+ pd.options.display.float_format = fmt.format
84
+ pd.set_option("max_colwidth", None)
85
+
86
+ try:
87
+ if len(df) == 1 and len(df.columns) > 5:
88
+ df = df.T
89
+ with pd.option_context("display.width", None, "display.max_rows", 1000):
90
+ print(df.fillna(""))
91
+ except:
92
+ print(df)
93
+
94
+
95
+ # Create API objects from a dataframe
96
+ def from_df(
97
+ df, record_type, complex_types, related=None, columns=None, date_fields=None
98
+ ):
99
+ simple_columns = []
100
+ complex_columns = defaultdict(list)
101
+ properties = []
102
+
103
+ date_fields = set(
104
+ (date_fields or [])
105
+ + [k for (k, v) in record_type.openapi_types.items() if v == "datetime"]
106
+ + [c for c in df.columns if pd.api.types.is_datetime64_any_dtype(df[c])]
107
+ )
108
+
109
+ if len(date_fields) > 0:
110
+ df = df.copy()
111
+ for col in date_fields:
112
+ df[col] = pd.to_datetime(df[col], utc=True).map(
113
+ lambda x: None if pd.isna(x) else x
114
+ )
115
+
116
+ for col in columns or df.columns.values:
117
+ if col.startswith("P:"):
118
+ properties.append(col[2:])
119
+ elif "." in col:
120
+ (k, v) = col.split(".")
121
+ complex_columns[k].append(v)
122
+ else:
123
+ simple_columns.append(col)
124
+
125
+ def build_complex_type(row, col, fields):
126
+ d = {f: row["{}.{}".format(col, f)] for f in fields}
127
+ col_type = complex_types[record_type.openapi_types[col]]
128
+ return col_type(**d)
129
+
130
+ def build_properties(row):
131
+ ptype_tpl = type_re.findall(record_type.openapi_types["properties"])[0]
132
+
133
+ ptype = complex_types[ptype_tpl[2].strip()]
134
+
135
+ def prop_builder(property_key, value):
136
+ if isinstance(value, str):
137
+ return ptype(
138
+ key=property_key,
139
+ value=complex_types["PropertyValue"](label_value=value),
140
+ )
141
+ elif pd.isna(value) == False:
142
+ return ptype(
143
+ key=property_key,
144
+ value=complex_types["PropertyValue"](
145
+ metric_value=complex_types["MetricValue"](value)
146
+ ),
147
+ )
148
+ else:
149
+ return None
150
+
151
+ props = [
152
+ (col, prop_builder(col, row["P:{}".format(col)])) for col in properties
153
+ ]
154
+ d = dict([p for p in props if p[1] != None])
155
+ return d
156
+
157
+ def to_type(i, row):
158
+ fields = {col: row[col] for col in simple_columns}
159
+ fields.update(
160
+ {
161
+ col: build_complex_type(row, col, fields)
162
+ for col, fields in complex_columns.items()
163
+ }
164
+ )
165
+
166
+ if "properties" in record_type.openapi_types and len(properties) > 0:
167
+ fields["properties"] = build_properties(row)
168
+
169
+ if related != None:
170
+ if callable(related):
171
+ fields = related(i, row, fields)
172
+ else:
173
+ # Dict type
174
+ fields.update(related.get(i, {}))
175
+
176
+ # Quick and dirty instrument_uid handling
177
+ # This should change to allow the full
178
+ # instrument resolution logic to apply
179
+ if "instrument_uid" in fields.keys():
180
+ fields["instrument_identifiers"] = to_instrument_identifiers(
181
+ fields["instrument_uid"]
182
+ )
183
+ del fields["instrument_uid"]
184
+
185
+ # Remove any 'noise' from the dataframe
186
+ allowed = set(record_type.openapi_types.keys())
187
+ trimmed = dict([tpl for tpl in fields.items() if tpl[0] in allowed])
188
+
189
+ return record_type(**trimmed)
190
+
191
+ return [to_type(i, row) for (i, row) in df.iterrows()]
192
+
193
+
194
+ def to_instrument_identifiers(uid):
195
+ if uid.startswith("CCY_"):
196
+ return {"Instrument/default/Currency": uid[4:]}
197
+ if uid.startswith("Ccy:"):
198
+ return {"Instrument/default/Currency": uid[4:]}
199
+ if uid.startswith("Currency:"):
200
+ return {"Instrument/default/Currency": uid[9:]}
201
+ elif uid.startswith("ClientInternal:"):
202
+ return {"Instrument/default/ClientInternal": uid[15:]}
203
+ elif uid.startswith("Figi:"):
204
+ return {"Instrument/default/Figi": uid[5:]}
205
+ elif uid.startswith("RIC:"):
206
+ return {"Instrument/default/RIC": uid[4:]}
207
+ else:
208
+ parts = uid.split(":")
209
+ if len(parts) == 2 and parts[0].startswith("Instrument"):
210
+ return {parts[0]: parts[1]}
211
+ return {"Instrument/default/LusidInstrumentId": uid}
212
+
213
+
214
+ # Convert iterable of Record to DataFrame
215
+ def records_to_df(records):
216
+ return pd.DataFrame([r.to_dict() for r in records])
217
+
218
+
219
+ # Break a dataframe down into batches
220
+ def chunk(seq, size):
221
+ return (seq[pos : pos + size] for pos in range(0, len(seq), size))
222
+
223
+
224
+ # Write statistics out
225
+ def dump_stats(filename, stats, columns):
226
+ if len(stats) > 0:
227
+ df = records_to_df(stats)[columns]
228
+ if filename == "-":
229
+ display_df(df.drop(["startTime", "endTime"], axis=1))
230
+ else:
231
+ df.to_csv(filename, index=False)
232
+
233
+
234
+ # Limit the length of DataFrame
235
+ def trim_df(df, limit, **kwargs):
236
+ if kwargs.get("sort", None) != None:
237
+ df = df.sort_values(kwargs["sort"]).reset_index(drop=True)
238
+ return df[:limit] if 0 < limit < len(df) else df
239
+
240
+
241
+ # Template 'program'
242
+ def standard_flow(parser, connector, executor, display_df=display_df):
243
+ args = parser()
244
+ api = connector(args)
245
+
246
+ either = Either(executor(api, args))
247
+
248
+ # called if the executor returns a success
249
+ def success(df):
250
+ # Query type programs will return a dataframe
251
+ if df is not None:
252
+ fn = args.__dict__.get("filename", None)
253
+ if fn is not None:
254
+ if ".xls" in fn.lower():
255
+ df.to_excel(fn, index=False)
256
+ elif fn.endswith(".pk"):
257
+ df.to_pickle(fn)
258
+ else:
259
+ df.to_csv(fn, index=False)
260
+ else:
261
+ if "dfq" in args and args.dfq:
262
+ from . import dfq
263
+
264
+ dfq.dfq(dfq.parse(False, args.dfq), df)
265
+ else:
266
+ return display_df(df)
267
+
268
+ rv = either.match(left=display_error, right=success)
269
+
270
+ api.dump_stats()
271
+
272
+ return rv
273
+
274
+
275
+ # Nicely display an error from LUSID
276
+ def display_error(error, response=False):
277
+ try:
278
+ print(
279
+ "ERROR: {} Reason:{}, Code:{}\n".format(
280
+ error.status, error.reason, error.code
281
+ )
282
+ )
283
+ print("MESSAGE: {}\n".format(error.message))
284
+ print("DETAILS: {}\n".format(error.detailed_message))
285
+ print("Instance: {}\n".format(error.instance))
286
+
287
+ if len(error.items) > 0:
288
+ df = records_to_df(
289
+ [Rec(Id=key, Detail=item) for key, item in error.items.items()]
290
+ )
291
+ print("ITEMS (max 50)")
292
+ display_df(df[:50])
293
+ except:
294
+ print(str(error))
295
+ return response
296
+
297
+
298
+ # backwards compatibility
299
+ def read_csv(path, frame_type=None, **kwargs):
300
+ return read_input(path, frame_type, **kwargs)
301
+
302
+
303
+ # Read in a data-file and apply any backwards compatibility settings
304
+ def read_input(path, frame_type=None, mappings=None, **kwargs):
305
+ sheet = kwargs.get("sheet_name", 0)
306
+ if is_path_supported_excel_with_sheet(path):
307
+ path, sheet = path.rsplit(":", 1)
308
+
309
+ if ".xls" in path.lower():
310
+ df = pd.read_excel(path, sheet_name=sheet, engine="openpyxl", **kwargs)
311
+ else:
312
+ df = pd.read_csv(path, **kwargs)
313
+
314
+ if mappings is not None:
315
+ df = df.rename(columns=mappings)
316
+ df = df[list(set(mappings.values()) & set(df.columns))]
317
+
318
+ return back_compat.convert(frame_type, df)
319
+
320
+
321
+ # Check if a path is a supported excel file with a suffixed sheet
322
+ def is_path_supported_excel_with_sheet(path):
323
+ return re.match(".*\.(xls|xlsx|xlsm|xlsb):", path)
324
+
325
+
326
+ # Create PerpetualProperties request from prefixed columns in a dataframe
327
+ def perpetual_upsert(models, df, prefix="P:"):
328
+ offset = len(prefix)
329
+
330
+ def make_property(properties, key):
331
+ return properties + [
332
+ (
333
+ key[offset:],
334
+ models.PerpetualProperty(
335
+ key[offset:], models.PropertyValue(label_value=str(value))
336
+ ),
337
+ )
338
+ for value in df[key].dropna().head(1).tolist()
339
+ ]
340
+
341
+ return dict(
342
+ reduce(
343
+ make_property, [c for c in df.columns.values if c.startswith(prefix)], []
344
+ )
345
+ )
346
+
347
+
348
+ # Return the serialised representation of the object
349
+ def Serialise(api, body, bodyType):
350
+ return api.api._serialize.body(body, bodyType)
351
+
352
+
353
+ def process_input(aliases, api, args, fn):
354
+ df = pd.concat(
355
+ [read_input(input_file, dtype=str) for input_file in args.input],
356
+ ignore_index=True,
357
+ sort=False,
358
+ )
359
+ if args.mappings:
360
+ df.rename(
361
+ columns=dict(
362
+ [
363
+ (s[1], aliases.get(s[0], s[0]))
364
+ for s in [m.split("=") for m in args.mappings]
365
+ ]
366
+ ),
367
+ inplace=True,
368
+ )
369
+ prop_keys = [col for col in df.columns.values if col.startswith("P:")]
370
+ identifiers = [col for col in df.columns.values if col in args.identifiers]
371
+ # Identifiers have to be unique
372
+ df = df.drop_duplicates(identifiers)
373
+
374
+ return fn(api, args, df, identifiers, prop_keys)
@@ -0,0 +1,188 @@
1
+ import datetime
2
+ import importlib
3
+ import inspect
4
+ import json
5
+ import os
6
+
7
+ from . import lpt
8
+ from .either import Either
9
+ from .record import Rec
10
+ from lusid.api_response import ApiResponse
11
+
12
+
13
+ def ApiConverter(connection, swagger_api):
14
+ def check(f):
15
+ return (inspect.isfunction(f) and f.__name__.endswith("_with_http_info")) or (
16
+ inspect.ismethod(f) and f.__name__.endswith("_with_http_info")
17
+ )
18
+
19
+ d = {}
20
+
21
+ for api in [i[1] for i in swagger_api.__dict__.items() if i[0].endswith("Api")]:
22
+ jlh = api(connection)
23
+ for n, v in inspect.getmembers(jlh, predicate=check):
24
+ d[n[:-15]] = v
25
+ f = Rec(**d)
26
+
27
+ return f
28
+
29
+
30
+ class ExtendedAPI:
31
+ # Constructor
32
+ def __init__(self, config, api, lusid, custom_headers=None):
33
+ class dummy(Exception):
34
+ pass
35
+
36
+ self.api = ApiConverter(api, lusid)
37
+ self.models = lusid.models
38
+ self.lusid = lusid
39
+ self.stats_file = config.get("stats", "")
40
+ self.stats = [] if self.stats_file != "" else None
41
+
42
+ # See if the api contains the ErrorResponseException
43
+
44
+ exc = (
45
+ lusid.rest.ApiException if "ApiException" in lusid.rest.__dict__ else dummy
46
+ )
47
+
48
+ self.call = Caller(self.api, self.stats, exc)
49
+ self.call.custom_headers = custom_headers
50
+ self.call.as_at = config.get("asat")
51
+
52
+ # Create array of objects using the models
53
+ def from_df(self, df, model, related=None):
54
+ return lpt.from_df(df, model, self.models.__dict__, related)
55
+
56
+ def dump_stats(self):
57
+ if self.stats != None:
58
+ lpt.dump_stats(
59
+ self.stats_file,
60
+ self.stats,
61
+ [
62
+ "startTime",
63
+ "endTime",
64
+ "name",
65
+ "requestId",
66
+ "duration",
67
+ "elapsed",
68
+ "status",
69
+ ],
70
+ )
71
+
72
+
73
+ # Wrapper class to call an API function returning the stats
74
+ class Caller:
75
+ def __init__(self, api, stats, exceptionClass):
76
+ self.api = api
77
+ self.stats = stats
78
+ self.exceptionClass = exceptionClass
79
+ self.custom_headers = None
80
+ self.as_at = None
81
+
82
+ def __getattr__(self, name):
83
+ fn = getattr(self.api, name)
84
+
85
+ # Function that will call the target function
86
+ # returns an Either
87
+ def callApiFn(*args, **kwargs):
88
+ # Add the ' parameter and custom headers
89
+ # adjKwargs = dict(raw=True,custom_headers=self.custom_headers)
90
+ # adjKwargs.update(kwargs)
91
+ adjKwargs = dict([v for v in dict(kwargs).items() if v[1] != None])
92
+
93
+ # Add the as_at parameter if provided and valid
94
+ if self.as_at: # and signature(fn).parameters.get('as_at'):
95
+ adjKwargs.update({"as_at": lpt.to_date(self.as_at)})
96
+
97
+ # Measure execution time of the call
98
+ startTime = datetime.datetime.now()
99
+ try:
100
+ result = fn(*args, **(adjKwargs))
101
+ request_id = result.headers.get("lusid-meta-requestId", "n/a")
102
+ except self.exceptionClass as err:
103
+ data = {} if err.body == "" or err.body == b"" else json.loads(err.body)
104
+
105
+ instance = data.get("instance", "n/a")
106
+ s = instance.split("insights/logs/")
107
+ request_id = "n/a" if len(s) != 2 else s[1]
108
+
109
+ result = ApiResponse(
110
+ data = Rec(
111
+ status=err.status,
112
+ reason=err.reason,
113
+ code=data.get("code", "n/a"),
114
+ message=data.get("title", "n/a"),
115
+ detailed_message=data.get("detail", "n/a"),
116
+ items=data.get("errorDetails", []),
117
+ instance=instance,
118
+ requestId=request_id,
119
+ ),
120
+ status_code = err.status,
121
+ headers={},
122
+ )
123
+
124
+ endTime = datetime.datetime.now()
125
+
126
+ statistics = Rec(
127
+ name=name,
128
+ startTime=startTime,
129
+ endTime=endTime,
130
+ duration=(endTime - startTime).total_seconds(),
131
+ elapsed=float(result.headers.get("lusid-meta-duration", 0)) / 1000,
132
+ status=result.status_code,
133
+ requestId=request_id,
134
+ )
135
+
136
+ if self.stats != None:
137
+ self.stats.append(statistics)
138
+
139
+ # If successful, return the output as a 'right'
140
+ if result.headers.get("lusid-meta-success", "False") == "True":
141
+ return Either.Right(Rec(stats=statistics, content=result.data,))
142
+ # Otherwise return a 'left' (a failure)
143
+ return Either.Left(result.data)
144
+
145
+ return callApiFn
146
+
147
+
148
+ # Connect to appropriate LUSID environment
149
+ def connect(*args, **kwargs):
150
+ # start with no overrides
151
+ overrides = {}
152
+
153
+ # include dictionary types
154
+ for i in args:
155
+ overrides.update(i.__dict__)
156
+
157
+ # and keyword overrides
158
+ overrides.update(kwargs)
159
+
160
+ # Get basic settings from the secrets file
161
+ try:
162
+ with open(overrides.get("secrets", "secrets.json"), "r") as secrets:
163
+ settings = json.load(secrets)
164
+ # apply the overrides
165
+ settings.update(overrides)
166
+ except:
167
+ settings = overrides # connection may not require secrets. Don't fail (yet)
168
+
169
+ # import the appropriate environment for the connection
170
+ env = settings.get("env", ["lusid"])
171
+ connect = importlib.import_module(".connect_{}".format(env[0]), "finbourne_sdk_utils.lpt")
172
+
173
+ api = ExtendedAPI(settings, *connect.connect(settings))
174
+
175
+ # For debugging purposes, this will display
176
+ if os.getenv("LSE_DEBUG_BODY", "N") == "Y":
177
+
178
+ def serializer(standard):
179
+ def fn(request, model):
180
+ s = standard(request, model)
181
+ print(s)
182
+ return s
183
+
184
+ return fn
185
+
186
+ api.api._serialize.body = serializer(api.api._serialize.body)
187
+
188
+ return api