finbourne-sdk-utils 0.0.24__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- features/__init__.py +0 -0
- features/main.py +11 -0
- finbourne_sdk_utils/__init__.py +8 -0
- finbourne_sdk_utils/cocoon/__init__.py +34 -0
- finbourne_sdk_utils/cocoon/async_tools.py +94 -0
- finbourne_sdk_utils/cocoon/cocoon.py +1862 -0
- finbourne_sdk_utils/cocoon/cocoon_printer.py +455 -0
- finbourne_sdk_utils/cocoon/config/domain_settings.json +125 -0
- finbourne_sdk_utils/cocoon/config/seed_sample_data.json +36 -0
- finbourne_sdk_utils/cocoon/dateorcutlabel.py +198 -0
- finbourne_sdk_utils/cocoon/instruments.py +482 -0
- finbourne_sdk_utils/cocoon/properties.py +442 -0
- finbourne_sdk_utils/cocoon/seed_sample_data.py +137 -0
- finbourne_sdk_utils/cocoon/systemConfiguration.py +92 -0
- finbourne_sdk_utils/cocoon/transaction_type_upload.py +136 -0
- finbourne_sdk_utils/cocoon/utilities.py +1877 -0
- finbourne_sdk_utils/cocoon/validator.py +243 -0
- finbourne_sdk_utils/extract/__init__.py +1 -0
- finbourne_sdk_utils/extract/group_holdings.py +400 -0
- finbourne_sdk_utils/iam/__init__.py +1 -0
- finbourne_sdk_utils/iam/roles.py +74 -0
- finbourne_sdk_utils/jupyter_tools/__init__.py +2 -0
- finbourne_sdk_utils/jupyter_tools/hide_code_button.py +23 -0
- finbourne_sdk_utils/jupyter_tools/stop_execution.py +14 -0
- finbourne_sdk_utils/logger/LusidLogger.py +41 -0
- finbourne_sdk_utils/logger/__init__.py +1 -0
- finbourne_sdk_utils/lpt/__init__.py +0 -0
- finbourne_sdk_utils/lpt/back_compat.py +20 -0
- finbourne_sdk_utils/lpt/cash_ladder.py +191 -0
- finbourne_sdk_utils/lpt/connect_lusid.py +64 -0
- finbourne_sdk_utils/lpt/connect_none.py +5 -0
- finbourne_sdk_utils/lpt/connect_token.py +9 -0
- finbourne_sdk_utils/lpt/dfq.py +321 -0
- finbourne_sdk_utils/lpt/either.py +65 -0
- finbourne_sdk_utils/lpt/get_instruments.py +101 -0
- finbourne_sdk_utils/lpt/lpt.py +374 -0
- finbourne_sdk_utils/lpt/lse.py +188 -0
- finbourne_sdk_utils/lpt/map_instruments.py +164 -0
- finbourne_sdk_utils/lpt/pager.py +32 -0
- finbourne_sdk_utils/lpt/record.py +13 -0
- finbourne_sdk_utils/lpt/refreshing_token.py +43 -0
- finbourne_sdk_utils/lpt/search_instruments.py +48 -0
- finbourne_sdk_utils/lpt/stdargs.py +154 -0
- finbourne_sdk_utils/lpt/txn_config.py +128 -0
- finbourne_sdk_utils/lpt/txn_config_yaml.py +493 -0
- finbourne_sdk_utils/pandas_utils/__init__.py +0 -0
- finbourne_sdk_utils/pandas_utils/lusid_pandas.py +128 -0
- finbourne_sdk_utils-0.0.24.dist-info/LICENSE +21 -0
- finbourne_sdk_utils-0.0.24.dist-info/METADATA +25 -0
- finbourne_sdk_utils-0.0.24.dist-info/RECORD +52 -0
- finbourne_sdk_utils-0.0.24.dist-info/WHEEL +5 -0
- finbourne_sdk_utils-0.0.24.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import json
|
|
2
|
+
|
|
3
|
+
import lusid
|
|
4
|
+
import finbourne_access
|
|
5
|
+
import finbourne_identity
|
|
6
|
+
|
|
7
|
+
from finbourne_access import models as access_models
|
|
8
|
+
from finbourne_identity import models as identity_models
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
from finbourne_sdk_utils.cocoon.utilities import (
|
|
12
|
+
checkargs,
|
|
13
|
+
)
|
|
14
|
+
import logging
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@checkargs
|
|
18
|
+
def create_role(
|
|
19
|
+
access_api_factory: finbourne_access.extensions.SyncApiClientFactory,
|
|
20
|
+
identity_api_factory: finbourne_identity.extensions.SyncApiClientFactory,
|
|
21
|
+
access_role_creation_request: access_models.RoleCreationRequest,
|
|
22
|
+
) -> None:
|
|
23
|
+
"""
|
|
24
|
+
Creates a role through both the access and identity APIs
|
|
25
|
+
|
|
26
|
+
Parameters
|
|
27
|
+
----------
|
|
28
|
+
access_api_factory : finbourne_access.extensions.SyncApiClientFactory
|
|
29
|
+
identity_api_factory : finbourne_identity.extensions.id.SyncApiClientFactory
|
|
30
|
+
|
|
31
|
+
access_role_creation_request : access_models.RoleCreationRequest
|
|
32
|
+
The role creation request to use
|
|
33
|
+
|
|
34
|
+
Returns
|
|
35
|
+
-------
|
|
36
|
+
responses: None
|
|
37
|
+
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
access_roles_api = access_api_factory.build(finbourne_access.RolesApi)
|
|
41
|
+
|
|
42
|
+
identity_roles_api = identity_api_factory.build(finbourne_identity.RolesApi)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
# Create the role using the access API.
|
|
46
|
+
try:
|
|
47
|
+
access_roles_api.create_role(access_role_creation_request)
|
|
48
|
+
logging.info(
|
|
49
|
+
f"Role with code {access_role_creation_request.code} has been created via the access API"
|
|
50
|
+
)
|
|
51
|
+
except finbourne_access.ApiException as e:
|
|
52
|
+
detail = json.loads(e.body)
|
|
53
|
+
if detail["code"] not in [612, 613, 615]: # RoleWithCodeAlreadyExists
|
|
54
|
+
raise e
|
|
55
|
+
logging.info(
|
|
56
|
+
f"Role with code {access_role_creation_request.code} has already been created via the access API"
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
# Create the same role using the identity API.
|
|
60
|
+
identity_role_creation_request = identity_models.CreateRoleRequest(
|
|
61
|
+
name=access_role_creation_request.code
|
|
62
|
+
)
|
|
63
|
+
try:
|
|
64
|
+
identity_roles_api.create_role(identity_role_creation_request)
|
|
65
|
+
logging.info(
|
|
66
|
+
f"Role with code {access_role_creation_request.code} has been created via the identity API"
|
|
67
|
+
)
|
|
68
|
+
except finbourne_identity.ApiException as e:
|
|
69
|
+
detail = json.loads(e.body)
|
|
70
|
+
if detail["code"] != 157: # RoleWithCodeAlreadyExists
|
|
71
|
+
raise e
|
|
72
|
+
logging.info(
|
|
73
|
+
f"Role with code {access_role_creation_request.code} has already been created via the identity API"
|
|
74
|
+
)
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from IPython.core.display import display, HTML
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def toggle_code(button_message):
|
|
5
|
+
|
|
6
|
+
toggle_code_str = f"""
|
|
7
|
+
<form action="javascript:code_toggle()"><input type="submit" id="toggleButton" value="{button_message}"></form>
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
toggle_code_prepare_str = """
|
|
11
|
+
<script>
|
|
12
|
+
function code_toggle() {
|
|
13
|
+
if ($('div.cell.code_cell.rendered.selected div.input').css('display')!='none'){
|
|
14
|
+
$('div.cell.code_cell.rendered.selected div.input').hide();
|
|
15
|
+
} else {
|
|
16
|
+
$('div.cell.code_cell.rendered.selected div.input').show();
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
</script>
|
|
20
|
+
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
return display(HTML(toggle_code_str + toggle_code_prepare_str))
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from IPython import display
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class StopExecution(Exception):
|
|
5
|
+
"""
|
|
6
|
+
This object is used to stop a notebook from running.
|
|
7
|
+
Example: raise StopExecution("Portfolio missing")
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
def __init__(self, message):
|
|
11
|
+
self.message = message
|
|
12
|
+
|
|
13
|
+
def _render_traceback_(self):
|
|
14
|
+
display(self.message)
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import coloredlogs
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class LusidLogger:
|
|
6
|
+
def __init__(self, log_level="info", logging_file=None):
|
|
7
|
+
self.begin_logger(log_level, logging_file)
|
|
8
|
+
pass
|
|
9
|
+
|
|
10
|
+
@staticmethod
|
|
11
|
+
def begin_logger(log_level, logging_file) -> None:
|
|
12
|
+
"""
|
|
13
|
+
This function gets an instance of the root logger and sets the log_level.
|
|
14
|
+
:param log_level: A string defining what log level to set logger at
|
|
15
|
+
:param logging_file: Path to file to store log messages
|
|
16
|
+
:return:
|
|
17
|
+
"""
|
|
18
|
+
set_logger_level = {
|
|
19
|
+
"notset": logging.NOTSET,
|
|
20
|
+
"info": logging.INFO,
|
|
21
|
+
"debug": logging.DEBUG,
|
|
22
|
+
"error": logging.ERROR,
|
|
23
|
+
"critical": logging.CRITICAL,
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
if not log_level:
|
|
27
|
+
log_level = "info"
|
|
28
|
+
|
|
29
|
+
if log_level not in set_logger_level.keys():
|
|
30
|
+
raise Exception(
|
|
31
|
+
f"logging level provided ({log_level}) is not in list of valid logging levels {list(set_logger_level.keys())}"
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
# set up logging
|
|
35
|
+
if logging_file:
|
|
36
|
+
print(f"Logging to {logging_file}")
|
|
37
|
+
logging.basicConfig(filename=logging_file)
|
|
38
|
+
|
|
39
|
+
root_logger = logging.getLogger()
|
|
40
|
+
root_logger.setLevel(set_logger_level[log_level])
|
|
41
|
+
coloredlogs.install(level=set_logger_level[log_level], logger=root_logger)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from finbourne_sdk_utils.logger.LusidLogger import LusidLogger
|
|
File without changes
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# Make backwards compatibility adjustments
|
|
2
|
+
# When reading old files
|
|
3
|
+
def convert(frame_type, df):
|
|
4
|
+
|
|
5
|
+
# Security is now known as instrument_id
|
|
6
|
+
if "security_uid" in df.columns.values:
|
|
7
|
+
df.rename(columns={"security_uid": "instrument_uid"}, inplace=True)
|
|
8
|
+
|
|
9
|
+
# Some older dividend files may not have the pay date column
|
|
10
|
+
# In this case we duplicate the record date
|
|
11
|
+
if frame_type == "div":
|
|
12
|
+
if "payment_date" not in df.columns.values:
|
|
13
|
+
df["payment_date"] = df["record_date"]
|
|
14
|
+
|
|
15
|
+
# Some older pricing files call the instr column 'sec'
|
|
16
|
+
if frame_type == "prc":
|
|
17
|
+
if "instr" not in df.columns.values:
|
|
18
|
+
df["sec"] = df["instr"]
|
|
19
|
+
|
|
20
|
+
return df
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
import numpy as np
|
|
3
|
+
import dateutil
|
|
4
|
+
import sys
|
|
5
|
+
import argparse
|
|
6
|
+
|
|
7
|
+
SDATE = "settlement_date"
|
|
8
|
+
CCY = "security_uid"
|
|
9
|
+
QTY = "units"
|
|
10
|
+
TYPE = "holding_type"
|
|
11
|
+
CUM = "cum"
|
|
12
|
+
ORDER = "sort"
|
|
13
|
+
JOIN = "join"
|
|
14
|
+
|
|
15
|
+
TOOLTIP = "Demo Cash-Ladder report"
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def cash_ladder(api, scope, portfolio, date):
|
|
19
|
+
|
|
20
|
+
qry_date = pd.to_datetime(date, utc=True)
|
|
21
|
+
|
|
22
|
+
# Function to make sure there are cash positions
|
|
23
|
+
def check_contents(df):
|
|
24
|
+
if len(df) == 0:
|
|
25
|
+
print(
|
|
26
|
+
"Portfolio {} in scope {} contains no cash on {:%Y-%m-%d}".format(
|
|
27
|
+
portfolio, scope, start_date
|
|
28
|
+
)
|
|
29
|
+
)
|
|
30
|
+
exit()
|
|
31
|
+
|
|
32
|
+
# Run one-day earlier, this gives us the beginning of day for the
|
|
33
|
+
# required qry_date
|
|
34
|
+
start_date = qry_date + pd.DateOffset(days=-1)
|
|
35
|
+
df = api.qry_holdings(scope, portfolio, start_date)
|
|
36
|
+
check_contents(df)
|
|
37
|
+
|
|
38
|
+
# To convert holdings data frame into cash ladder
|
|
39
|
+
# we need to filter out Position types
|
|
40
|
+
df = df[df[TYPE] != "P"].copy()
|
|
41
|
+
check_contents(df)
|
|
42
|
+
|
|
43
|
+
# Set date for current balances
|
|
44
|
+
df[SDATE] = pd.to_datetime(df[SDATE].fillna(start_date), utc=True).dt.date
|
|
45
|
+
|
|
46
|
+
# Consolidate
|
|
47
|
+
df = df[[CCY, SDATE, TYPE, QTY]].groupby([CCY, SDATE, TYPE], as_index=False).sum()
|
|
48
|
+
|
|
49
|
+
# Populate BOD/EOD records
|
|
50
|
+
|
|
51
|
+
start_date = start_date.date() # change form for working with frame data
|
|
52
|
+
# Get unique list of dates, but make sure it includes the qry_date
|
|
53
|
+
dates = pd.concat(
|
|
54
|
+
[df[[SDATE]], pd.DataFrame({SDATE: [qry_date.date()]})], ignore_index=True
|
|
55
|
+
).drop_duplicates()
|
|
56
|
+
dates = dates[dates[SDATE] > start_date]
|
|
57
|
+
ccys = df[[CCY]].drop_duplicates()
|
|
58
|
+
|
|
59
|
+
ccys[JOIN] = 1
|
|
60
|
+
dates[JOIN] = 1
|
|
61
|
+
dates[QTY] = 0
|
|
62
|
+
dates[ORDER] = 1
|
|
63
|
+
dates[TYPE] = "Opening Cash Balance"
|
|
64
|
+
bod = ccys.merge(dates, on=JOIN)
|
|
65
|
+
eod = bod.copy()
|
|
66
|
+
eod[ORDER] = 6
|
|
67
|
+
eod[TYPE] = eod[CCY].str.slice(4) + " Summary"
|
|
68
|
+
|
|
69
|
+
df[ORDER] = df[TYPE].map({"C": 2, "A": 3, "R": 4, "F": 5})
|
|
70
|
+
df[TYPE] = df[TYPE].map(
|
|
71
|
+
{
|
|
72
|
+
"C": "Trades to settle",
|
|
73
|
+
"R": "Receivables",
|
|
74
|
+
"A": "Dividends",
|
|
75
|
+
"F": "Forward Fx",
|
|
76
|
+
}
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
df = (
|
|
80
|
+
pd.concat([bod, eod, df], ignore_index=True)
|
|
81
|
+
.sort_values([CCY, SDATE, ORDER])
|
|
82
|
+
.reset_index(drop=True)
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
# Calculate cumulative quantity
|
|
86
|
+
df[CUM] = df[[CCY, QTY]].groupby([CCY], as_index=False).cumsum()[QTY]
|
|
87
|
+
|
|
88
|
+
# Put cumulative balance onto BOD/EOD records
|
|
89
|
+
subset = df[df[ORDER].isin([1, 6])]
|
|
90
|
+
df.loc[subset.index, QTY] = subset[CUM]
|
|
91
|
+
|
|
92
|
+
# Filter out T-1 balances (just used to provide BOD balance)
|
|
93
|
+
|
|
94
|
+
df = df[df[SDATE] > start_date]
|
|
95
|
+
|
|
96
|
+
# Pivot the data
|
|
97
|
+
data = df.set_index([CCY, ORDER, TYPE, SDATE], drop=True).unstack(fill_value=0)
|
|
98
|
+
|
|
99
|
+
return data[QTY]
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def alt_cash_ladder(api, scope, portfolio, date):
|
|
103
|
+
qry_date = pd.to_datetime(date, utc=True)
|
|
104
|
+
# Run one-day earlier, this gives us the beginning of day for the
|
|
105
|
+
# required qry_date
|
|
106
|
+
start_date = qry_date + pd.DateOffset(days=-1)
|
|
107
|
+
df = api.qry_holdings(scope, portfolio, start_date)
|
|
108
|
+
|
|
109
|
+
# filter out Position types
|
|
110
|
+
df = df[df["holding_type"] != "P"]
|
|
111
|
+
|
|
112
|
+
df["settlement_date"] = pd.to_datetime(
|
|
113
|
+
df["settlement_date"].fillna(qry_date), utc=True
|
|
114
|
+
).dt.date
|
|
115
|
+
df = df.sort_values(["security_uid", "settlement_date"])
|
|
116
|
+
df["balance"] = (
|
|
117
|
+
df[["security_uid", "units"]]
|
|
118
|
+
.groupby(["security_uid"], as_index=False)
|
|
119
|
+
.cumsum()["units"]
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
columns = [
|
|
123
|
+
"security_uid",
|
|
124
|
+
"settlement_date",
|
|
125
|
+
"commitment",
|
|
126
|
+
"holding_type",
|
|
127
|
+
"commitment_security_uid",
|
|
128
|
+
"units",
|
|
129
|
+
"balance",
|
|
130
|
+
]
|
|
131
|
+
|
|
132
|
+
df = df[columns].rename(
|
|
133
|
+
columns={
|
|
134
|
+
"security_uid": "Currency",
|
|
135
|
+
"settlement_date": "Cash Date",
|
|
136
|
+
"commitment": "Transaction Type",
|
|
137
|
+
"holding_type": "Cash Type",
|
|
138
|
+
"units": "Local Cash Amount",
|
|
139
|
+
}
|
|
140
|
+
)
|
|
141
|
+
return df
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def parse(extend=None):
|
|
145
|
+
parser = argparse.ArgumentParser(description="Get Transactions")
|
|
146
|
+
parser.add_argument("scope", help="Scope")
|
|
147
|
+
parser.add_argument("portfolio", help="Portfolio id")
|
|
148
|
+
parser.add_argument("date", metavar="YYYY-MM-DD", help="date")
|
|
149
|
+
parser.add_argument(
|
|
150
|
+
"-f", "--filename", metavar="filename.csv", help="write to this file"
|
|
151
|
+
)
|
|
152
|
+
parser.add_argument(
|
|
153
|
+
"-a", "--alternative", action="store_true", help="alternative view"
|
|
154
|
+
)
|
|
155
|
+
parser.add_argument(
|
|
156
|
+
"--secrets-file",
|
|
157
|
+
dest="secrets",
|
|
158
|
+
default="secrets.json",
|
|
159
|
+
help="path to secrets file",
|
|
160
|
+
)
|
|
161
|
+
if extend:
|
|
162
|
+
extend(parser)
|
|
163
|
+
return parser.parse_args()
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def process_args(args):
|
|
167
|
+
api = lse.api(args.secrets)
|
|
168
|
+
if args.alternative:
|
|
169
|
+
df = alt_cash_ladder(api, args.scope, args.portfolio, args.date)
|
|
170
|
+
else:
|
|
171
|
+
df = cash_ladder(api, args.scope, args.portfolio, args.date)
|
|
172
|
+
|
|
173
|
+
if args.filename:
|
|
174
|
+
df.to_csv(args.filename)
|
|
175
|
+
else:
|
|
176
|
+
pd.set_option("display.width", None)
|
|
177
|
+
pd.options.display.float_format = "{:,.2f}".format
|
|
178
|
+
pd.set_option("display.max_rows", 1000)
|
|
179
|
+
print(df)
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def main():
|
|
183
|
+
process_args(parse())
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
if __name__ == "__main__":
|
|
187
|
+
import lse
|
|
188
|
+
|
|
189
|
+
main()
|
|
190
|
+
else:
|
|
191
|
+
from . import lse
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from urllib.request import quote
|
|
3
|
+
|
|
4
|
+
import lusid
|
|
5
|
+
|
|
6
|
+
from .refreshing_token import RefreshingToken
|
|
7
|
+
|
|
8
|
+
config_mapping = {
|
|
9
|
+
"FBN_TOKEN_URL": "tokenUrl",
|
|
10
|
+
"FBN_USERNAME": "username",
|
|
11
|
+
"FBN_PASSWORD": "password",
|
|
12
|
+
"FBN_CLIENT_ID": "clientId",
|
|
13
|
+
"FBN_CLIENT_SECRET": "clientSecret",
|
|
14
|
+
"FBN_LUSID_API_URL": "apiUrl",
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def check_for_missing_config(config):
|
|
19
|
+
return [
|
|
20
|
+
{"Env variable": env_var, "Secrets file key": config_key}
|
|
21
|
+
for env_var, config_key in config_mapping.items()
|
|
22
|
+
if not os.getenv(env_var, config["api"][config_key])
|
|
23
|
+
]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def connect(config, **kwargs):
|
|
27
|
+
if "api" not in config.keys():
|
|
28
|
+
config["api"] = {}
|
|
29
|
+
config["api"]["tokenUrl"] = None
|
|
30
|
+
config["api"]["username"] = None
|
|
31
|
+
config["api"]["password"] = None
|
|
32
|
+
config["api"]["clientId"] = None
|
|
33
|
+
config["api"]["clientSecret"] = None
|
|
34
|
+
config["api"]["apiUrl"] = None
|
|
35
|
+
|
|
36
|
+
missing_config = check_for_missing_config(config)
|
|
37
|
+
if len(missing_config) > 0:
|
|
38
|
+
raise Exception(f"Missing the following config: {missing_config}")
|
|
39
|
+
|
|
40
|
+
token_url = os.getenv("FBN_TOKEN_URL", config["api"]["tokenUrl"])
|
|
41
|
+
username = os.getenv("FBN_USERNAME", config["api"]["username"])
|
|
42
|
+
password = quote(os.getenv("FBN_PASSWORD", config["api"]["password"]), "*!")
|
|
43
|
+
client_id = quote(os.getenv("FBN_CLIENT_ID", config["api"]["clientId"]), "*!")
|
|
44
|
+
client_secret = quote(
|
|
45
|
+
os.getenv("FBN_CLIENT_SECRET", config["api"]["clientSecret"]), "*!"
|
|
46
|
+
)
|
|
47
|
+
api_url = os.getenv("FBN_LUSID_API_URL", config["api"]["apiUrl"])
|
|
48
|
+
|
|
49
|
+
token_request_body = (
|
|
50
|
+
"grant_type=password&username={0}".format(username)
|
|
51
|
+
+ "&password={0}&scope=openid client groups".format(password)
|
|
52
|
+
+ "&client_id={0}&client_secret={1}".format(client_id, client_secret)
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
headers = {
|
|
56
|
+
"Accept": "application/json",
|
|
57
|
+
"Content-Type": "application/x-www-form-urlencoded",
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
config = lusid.Configuration()
|
|
61
|
+
config.access_token = RefreshingToken(token_url, token_request_body, headers)
|
|
62
|
+
config.host = api_url
|
|
63
|
+
|
|
64
|
+
return (lusid.SyncApiClient(configuration=config), lusid)
|