irp-integration 0.2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. irp_integration-0.2.0/.github/workflows/ci.yml +27 -0
  2. irp_integration-0.2.0/.github/workflows/publish-test.yml +70 -0
  3. irp_integration-0.2.0/.github/workflows/publish.yml +69 -0
  4. irp_integration-0.2.0/.gitignore +14 -0
  5. irp_integration-0.2.0/CITATION.cff +9 -0
  6. irp_integration-0.2.0/LICENSE +21 -0
  7. irp_integration-0.2.0/MANIFEST.in +6 -0
  8. irp_integration-0.2.0/PKG-INFO +214 -0
  9. irp_integration-0.2.0/README.md +187 -0
  10. irp_integration-0.2.0/docs/api.md +2700 -0
  11. irp_integration-0.2.0/irp_integration/__init__.py +59 -0
  12. irp_integration-0.2.0/irp_integration/analysis.py +1598 -0
  13. irp_integration-0.2.0/irp_integration/client.py +395 -0
  14. irp_integration-0.2.0/irp_integration/constants.py +110 -0
  15. irp_integration-0.2.0/irp_integration/databridge.py +786 -0
  16. irp_integration-0.2.0/irp_integration/edm.py +634 -0
  17. irp_integration-0.2.0/irp_integration/exceptions.py +101 -0
  18. irp_integration-0.2.0/irp_integration/export_job.py +165 -0
  19. irp_integration-0.2.0/irp_integration/import_job.py +187 -0
  20. irp_integration-0.2.0/irp_integration/mri_import.py +192 -0
  21. irp_integration-0.2.0/irp_integration/portfolio.py +582 -0
  22. irp_integration-0.2.0/irp_integration/py.typed +0 -0
  23. irp_integration-0.2.0/irp_integration/rdm.py +758 -0
  24. irp_integration-0.2.0/irp_integration/reference_data.py +687 -0
  25. irp_integration-0.2.0/irp_integration/risk_data_job.py +189 -0
  26. irp_integration-0.2.0/irp_integration/s3.py +519 -0
  27. irp_integration-0.2.0/irp_integration/treaty.py +391 -0
  28. irp_integration-0.2.0/irp_integration/utils.py +134 -0
  29. irp_integration-0.2.0/irp_integration/validators.py +152 -0
  30. irp_integration-0.2.0/irp_integration.egg-info/PKG-INFO +214 -0
  31. irp_integration-0.2.0/irp_integration.egg-info/SOURCES.txt +34 -0
  32. irp_integration-0.2.0/irp_integration.egg-info/dependency_links.txt +1 -0
  33. irp_integration-0.2.0/irp_integration.egg-info/requires.txt +7 -0
  34. irp_integration-0.2.0/irp_integration.egg-info/top_level.txt +1 -0
  35. irp_integration-0.2.0/pyproject.toml +43 -0
  36. irp_integration-0.2.0/setup.cfg +4 -0
@@ -0,0 +1,27 @@
1
+ name: CI
2
+
3
+ on:
4
+ pull_request:
5
+ branches: [main]
6
+
7
+ jobs:
8
+ test:
9
+ name: Test (Python ${{ matrix.python-version }})
10
+ runs-on: ubuntu-latest
11
+ strategy:
12
+ matrix:
13
+ python-version: ["3.10", "3.11", "3.12"]
14
+ steps:
15
+ - uses: actions/checkout@v4
16
+ with:
17
+ fetch-depth: 0
18
+
19
+ - uses: actions/setup-python@v5
20
+ with:
21
+ python-version: ${{ matrix.python-version }}
22
+
23
+ - name: Install package
24
+ run: pip install .
25
+
26
+ - name: Smoke test - verify import
27
+ run: python -c "from irp_integration import IRPClient; print(f'irp_integration imported successfully on Python {__import__(\"sys\").version}')"
@@ -0,0 +1,70 @@
1
+ name: Build & Publish to TestPyPI
2
+
3
+ on:
4
+ workflow_dispatch:
5
+
6
+ jobs:
7
+ test:
8
+ name: Test (Python ${{ matrix.python-version }})
9
+ runs-on: ubuntu-latest
10
+ strategy:
11
+ matrix:
12
+ python-version: ["3.10", "3.11", "3.12"]
13
+ steps:
14
+ - uses: actions/checkout@v4
15
+ with:
16
+ fetch-depth: 0
17
+
18
+ - uses: actions/setup-python@v5
19
+ with:
20
+ python-version: ${{ matrix.python-version }}
21
+
22
+ - name: Install package
23
+ run: pip install .
24
+
25
+ - name: Smoke test - verify import
26
+ run: python -c "from irp_integration import IRPClient; print(f'irp_integration imported successfully on Python {__import__(\"sys\").version}')"
27
+
28
+ build:
29
+ name: Build distribution
30
+ needs: test
31
+ runs-on: ubuntu-latest
32
+ steps:
33
+ - uses: actions/checkout@v4
34
+ with:
35
+ fetch-depth: 0
36
+
37
+ - uses: actions/setup-python@v5
38
+ with:
39
+ python-version: "3.12"
40
+
41
+ - name: Install build tools
42
+ run: pip install build
43
+
44
+ - name: Build sdist and wheel
45
+ run: python -m build
46
+
47
+ - name: Upload artifacts
48
+ uses: actions/upload-artifact@v4
49
+ with:
50
+ name: dist
51
+ path: dist/
52
+
53
+ publish:
54
+ name: Publish to TestPyPI
55
+ needs: build
56
+ runs-on: ubuntu-latest
57
+ environment: testpypi
58
+ permissions:
59
+ id-token: write
60
+ steps:
61
+ - name: Download artifacts
62
+ uses: actions/download-artifact@v4
63
+ with:
64
+ name: dist
65
+ path: dist/
66
+
67
+ - name: Publish to TestPyPI
68
+ uses: pypa/gh-action-pypi-publish@release/v1
69
+ with:
70
+ repository-url: https://test.pypi.org/legacy/
@@ -0,0 +1,69 @@
1
+ name: Build & Publish to PyPI
2
+
3
+ on:
4
+ release:
5
+ types: [published]
6
+
7
+ jobs:
8
+ test:
9
+ name: Test (Python ${{ matrix.python-version }})
10
+ runs-on: ubuntu-latest
11
+ strategy:
12
+ matrix:
13
+ python-version: ["3.10", "3.11", "3.12"]
14
+ steps:
15
+ - uses: actions/checkout@v4
16
+ with:
17
+ fetch-depth: 0
18
+
19
+ - uses: actions/setup-python@v5
20
+ with:
21
+ python-version: ${{ matrix.python-version }}
22
+
23
+ - name: Install package
24
+ run: pip install .
25
+
26
+ - name: Smoke test - verify import
27
+ run: python -c "from irp_integration import IRPClient; print(f'irp_integration imported successfully on Python {__import__(\"sys\").version}')"
28
+
29
+ build:
30
+ name: Build distribution
31
+ needs: test
32
+ runs-on: ubuntu-latest
33
+ steps:
34
+ - uses: actions/checkout@v4
35
+ with:
36
+ fetch-depth: 0
37
+
38
+ - uses: actions/setup-python@v5
39
+ with:
40
+ python-version: "3.12"
41
+
42
+ - name: Install build tools
43
+ run: pip install build
44
+
45
+ - name: Build sdist and wheel
46
+ run: python -m build
47
+
48
+ - name: Upload artifacts
49
+ uses: actions/upload-artifact@v4
50
+ with:
51
+ name: dist
52
+ path: dist/
53
+
54
+ publish:
55
+ name: Publish to PyPI
56
+ needs: build
57
+ runs-on: ubuntu-latest
58
+ environment: pypi
59
+ permissions:
60
+ id-token: write
61
+ steps:
62
+ - name: Download artifacts
63
+ uses: actions/download-artifact@v4
64
+ with:
65
+ name: dist
66
+ path: dist/
67
+
68
+ - name: Publish to PyPI
69
+ uses: pypa/gh-action-pypi-publish@release/v1
@@ -0,0 +1,14 @@
1
+ # Claude files
2
+ CLAUDE.md
3
+
4
+ # Env File
5
+ .env
6
+
7
+ # Pycache
8
+ __pycache__/
9
+
10
+ # Build/packaging artifacts
11
+ dist/
12
+ build/
13
+ *.egg-info/
14
+ *.egg
@@ -0,0 +1,9 @@
1
+ cff-version: 1.2.0
2
+ message: "If you use this software, please cite it as below."
3
+ authors:
4
+ - name: "PremiumIQ LLC"
5
+ website: "https://premiumiq.com"
6
+ title: "irp-integration"
7
+ version: 0.1.0
8
+ date-released: 2026-02-11
9
+ url: "https://github.com/premiumiq/irp-integration"
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 PremiumIQ LLC
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,6 @@
1
+ include LICENSE
2
+ include README.md
3
+ include CITATION.cff
4
+ include irp_integration/py.typed
5
+ recursive-exclude * __pycache__
6
+ recursive-exclude * *.py[cod]
@@ -0,0 +1,214 @@
1
+ Metadata-Version: 2.4
2
+ Name: irp-integration
3
+ Version: 0.2.0
4
+ Summary: Python client library for Moody's Intelligent Risk Platform (IRP) APIs
5
+ Author-email: Ben Bailey <bbailey@premiumiq.com>, Anil Venugopal <avenugopal@premiumiq.com>
6
+ License-Expression: MIT
7
+ Project-URL: Homepage, https://github.com/premiumiq/irp-integration
8
+ Project-URL: Repository, https://github.com/premiumiq/irp-integration
9
+ Project-URL: Issues, https://github.com/premiumiq/irp-integration/issues
10
+ Classifier: Development Status :: 4 - Beta
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: Operating System :: OS Independent
13
+ Classifier: Programming Language :: Python :: 3
14
+ Classifier: Programming Language :: Python :: 3.10
15
+ Classifier: Programming Language :: Python :: 3.11
16
+ Classifier: Programming Language :: Python :: 3.12
17
+ Requires-Python: >=3.10
18
+ Description-Content-Type: text/markdown
19
+ License-File: LICENSE
20
+ Requires-Dist: requests>=2.28.0
21
+ Requires-Dist: boto3>=1.26.0
22
+ Provides-Extra: databridge
23
+ Requires-Dist: pyodbc>=4.0.0; extra == "databridge"
24
+ Requires-Dist: pandas>=1.5.0; extra == "databridge"
25
+ Requires-Dist: numpy>=1.23.0; extra == "databridge"
26
+ Dynamic: license-file
27
+
28
+ # irp-integration
29
+
30
+ A Python client library for the [Moody's Intelligent Risk Platform (IRP) APIs](https://developer.rms.com/). Built to serve as a foundation for larger Moody's integration projects — use it with Jupyter Notebooks, Azure Functions, or any orchestration layer to build end-to-end risk analysis workflows.
31
+
32
+ Not all Moody's API functionality is covered yet, but the most common operations are available and the library is actively maintained. Contributions are welcome — feel free to fork and modify to fit your project's needs.
33
+
34
+ ## Installation
35
+
36
+ ```bash
37
+ pip install irp-integration
38
+ ```
39
+
40
+ To include Data Bridge (SQL Server) support:
41
+
42
+ ```bash
43
+ pip install irp-integration[databridge]
44
+ ```
45
+
46
+ > **Note:** Data Bridge requires [Microsoft ODBC Driver 18 for SQL Server](https://learn.microsoft.com/en-us/sql/connect/odbc/download-odbc-driver-for-sql-server) to be installed on your system.
47
+
48
+ ## Quick Start
49
+
50
+ ```python
51
+ from irp_integration import IRPClient
52
+
53
+ # Requires environment variables (see Configuration below)
54
+ client = IRPClient()
55
+
56
+ # Search EDMs
57
+ edms = client.edm.search_edms(filter = f'exposureName = "my_edm"')
58
+
59
+ # Get portfolios for an EDM
60
+ edm = edms[0]
61
+ exposure_id = edm['exposureId']
62
+ portfolios = client.portfolio.search_portfolios(exposure_id = exposure_id)
63
+
64
+ # Run analysis on a portfolio
65
+ edm_name = edm['exposureName']
66
+ portfolio = portfolios[0]
67
+ portfolio_name = portfolio['portfolioName']
68
+ client.analysis.submit_portfolio_analysis_job(
69
+ edm_name=edm_name,
70
+ portfolio_name=portfolio_name,
71
+ job_name="Readme Analysis",
72
+ model_profile_id=4418,
73
+ output_profile_id=123,
74
+ event_rate_scheme_id=739,
75
+ treaty_names=['Working Excess Treaty 1'],
76
+ tag_names=['Tag1', 'Tag2']
77
+ )
78
+ ```
79
+
80
+ ## Configuration
81
+
82
+ The library reads configuration from environment variables:
83
+
84
+ | Variable | Required | Description |
85
+ |----------|----------|-------------|
86
+ | `RISK_MODELER_BASE_URL` | Yes | Moody's Risk Modeler API base URL |
87
+ | `RISK_MODELER_API_KEY` | Yes | API authentication key |
88
+ | `RISK_MODELER_RESOURCE_GROUP_ID` | Yes | Resource group ID for your organization |
89
+
90
+ You can set these in your shell, or use a `.env` file with [python-dotenv](https://pypi.org/project/python-dotenv/):
91
+
92
+ ```python
93
+ from dotenv import load_dotenv
94
+ load_dotenv()
95
+
96
+ from irp_integration import IRPClient
97
+ client = IRPClient()
98
+ ```
99
+
100
+ ### Data Bridge Configuration
101
+
102
+ The Data Bridge module (`client.databridge`) connects directly to Moody's SQL Server databases via ODBC. It requires separate setup from the REST API.
103
+
104
+ **Prerequisites:**
105
+
106
+ 1. Install the optional dependency: `pip install irp-integration[databridge]`
107
+ 2. Install [Microsoft ODBC Driver 18 for SQL Server](https://learn.microsoft.com/en-us/sql/connect/odbc/download-odbc-driver-for-sql-server):
108
+ - **Windows:** Download and run the MSI installer from Microsoft
109
+ - **Linux (Debian/Ubuntu):** `sudo apt-get install -y unixodbc-dev && sudo ACCEPT_EULA=Y apt-get install -y msodbcsql18`
110
+ - **macOS:** `brew install microsoft/mssql-release/msodbcsql18`
111
+
112
+ **Environment variables (per connection):**
113
+
114
+ Each named connection uses the prefix `MSSQL_{CONNECTION_NAME}_`:
115
+
116
+ | Variable | Required | Description |
117
+ |----------|----------|-------------|
118
+ | `MSSQL_DATABRIDGE_SERVER` | Yes | Server hostname or IP |
119
+ | `MSSQL_DATABRIDGE_USER` | Yes | SQL Server username |
120
+ | `MSSQL_DATABRIDGE_PASSWORD` | Yes | SQL Server password |
121
+ | `MSSQL_DATABRIDGE_PORT` | No | Port (default: 1433) |
122
+
123
+ **Global settings:**
124
+
125
+ | Variable | Default | Description |
126
+ |----------|---------|-------------|
127
+ | `MSSQL_DRIVER` | `ODBC Driver 18 for SQL Server` | ODBC driver name |
128
+ | `MSSQL_TRUST_CERT` | `yes` | Trust server certificate |
129
+ | `MSSQL_TIMEOUT` | `30` | Connection timeout in seconds |
130
+
131
+ **Example:**
132
+
133
+ ```bash
134
+ # .env file
135
+ MSSQL_DATABRIDGE_SERVER=databridge.company.com
136
+ MSSQL_DATABRIDGE_USER=svc_account
137
+ MSSQL_DATABRIDGE_PASSWORD=secretpassword
138
+ ```
139
+
140
+ ```python
141
+ from irp_integration.databridge import DataBridgeManager
142
+
143
+ dbm = DataBridgeManager()
144
+
145
+ # Inline query with parameters
146
+ df = dbm.execute_query(
147
+ "SELECT * FROM portfolios WHERE value > {{ min_value }}",
148
+ params={'min_value': 1000000},
149
+ database='DataWarehouse'
150
+ )
151
+
152
+ # Execute SQL script from file
153
+ results = dbm.execute_query_from_file(
154
+ 'C:/sql/extract_policies.sql',
155
+ params={'cycle_name': 'Q1-2025'},
156
+ database='AnalyticsDB'
157
+ )
158
+ ```
159
+
160
+ ## Features
161
+
162
+ - **Automatic retry** with exponential backoff for transient errors (429, 5xx)
163
+ - **Workflow polling** — submit long-running operations and automatically poll to completion
164
+ - **Batch workflow execution** — run multiple workflows in parallel and wait for all to finish
165
+ - **Structured logging** via Python's `logging` module for visibility into API calls and workflow progress
166
+ - **Connection pooling** via persistent HTTP sessions
167
+ - **Input validation** with descriptive error messages
168
+ - **Custom exception hierarchy** for structured error handling
169
+ - **S3 upload/download** with multipart transfer support
170
+ - **Data Bridge (SQL Server)** — direct SQL execution against Moody's Data Bridge with parameterized queries and file-based scripts
171
+ - **Type hints** on all public methods
172
+
173
+ ## Modules
174
+
175
+ | Manager | Description |
176
+ |---------|-------------|
177
+ | `client.edm` | Exposure Data Manager — create, upgrade, duplicate, and delete EDMs |
178
+ | `client.portfolio` | Portfolio CRUD, geocoding, and hazard processing |
179
+ | `client.mri_import` | MRI (CSV) data import workflow — bucket creation, file upload, mapping, and execution |
180
+ | `client.treaty` | Reinsurance treaty creation, LOB assignment, and reference data |
181
+ | `client.analysis` | Risk analysis execution, profiles, event rate schemes, and analysis groups |
182
+ | `client.rdm` | Results Data Mart — export analysis results to RDM |
183
+ | `client.risk_data_job` | Risk data job status tracking |
184
+ | `client.import_job` | Platform import job management (EDM/RDM imports) |
185
+ | `client.export_job` | Platform export job management — status, polling, and result download |
186
+ | `client.databridge` | Data Bridge (SQL Server) — parameterized queries, file-based SQL execution |
187
+ | `client.reference_data` | Tags, currencies, and other reference data lookups |
188
+
189
+ ## Error Handling
190
+
191
+ The library uses a custom exception hierarchy:
192
+
193
+ ```python
194
+ from irp_integration.exceptions import (
195
+ IRPIntegrationError, # Base exception
196
+ IRPAPIError, # HTTP/API errors
197
+ IRPValidationError, # Input validation failures
198
+ IRPWorkflowError, # Workflow execution failures
199
+ IRPReferenceDataError, # Reference data lookup failures
200
+ IRPFileError, # File operation failures
201
+ IRPJobError, # Job management errors
202
+ IRPDataBridgeError, # Data Bridge base error
203
+ IRPDataBridgeConnectionError, # SQL Server connection failures
204
+ IRPDataBridgeQueryError, # SQL query execution failures
205
+ )
206
+ ```
207
+
208
+ ## API Documentation
209
+
210
+ For detailed API endpoint documentation, see [docs/api.md](https://github.com/premiumiq/irp-integration/blob/main/docs/api.md).
211
+
212
+ ## License
213
+
214
+ This project is licensed under the MIT License — see the [LICENSE](https://github.com/premiumiq/irp-integration/blob/main/LICENSE) file for details.
@@ -0,0 +1,187 @@
1
+ # irp-integration
2
+
3
+ A Python client library for the [Moody's Intelligent Risk Platform (IRP) APIs](https://developer.rms.com/). Built to serve as a foundation for larger Moody's integration projects — use it with Jupyter Notebooks, Azure Functions, or any orchestration layer to build end-to-end risk analysis workflows.
4
+
5
+ Not all Moody's API functionality is covered yet, but the most common operations are available and the library is actively maintained. Contributions are welcome — feel free to fork and modify to fit your project's needs.
6
+
7
+ ## Installation
8
+
9
+ ```bash
10
+ pip install irp-integration
11
+ ```
12
+
13
+ To include Data Bridge (SQL Server) support:
14
+
15
+ ```bash
16
+ pip install irp-integration[databridge]
17
+ ```
18
+
19
+ > **Note:** Data Bridge requires [Microsoft ODBC Driver 18 for SQL Server](https://learn.microsoft.com/en-us/sql/connect/odbc/download-odbc-driver-for-sql-server) to be installed on your system.
20
+
21
+ ## Quick Start
22
+
23
+ ```python
24
+ from irp_integration import IRPClient
25
+
26
+ # Requires environment variables (see Configuration below)
27
+ client = IRPClient()
28
+
29
+ # Search EDMs
30
+ edms = client.edm.search_edms(filter = f'exposureName = "my_edm"')
31
+
32
+ # Get portfolios for an EDM
33
+ edm = edms[0]
34
+ exposure_id = edm['exposureId']
35
+ portfolios = client.portfolio.search_portfolios(exposure_id = exposure_id)
36
+
37
+ # Run analysis on a portfolio
38
+ edm_name = edm['exposureName']
39
+ portfolio = portfolios[0]
40
+ portfolio_name = portfolio['portfolioName']
41
+ client.analysis.submit_portfolio_analysis_job(
42
+ edm_name=edm_name,
43
+ portfolio_name=portfolio_name,
44
+ job_name="Readme Analysis",
45
+ model_profile_id=4418,
46
+ output_profile_id=123,
47
+ event_rate_scheme_id=739,
48
+ treaty_names=['Working Excess Treaty 1'],
49
+ tag_names=['Tag1', 'Tag2']
50
+ )
51
+ ```
52
+
53
+ ## Configuration
54
+
55
+ The library reads configuration from environment variables:
56
+
57
+ | Variable | Required | Description |
58
+ |----------|----------|-------------|
59
+ | `RISK_MODELER_BASE_URL` | Yes | Moody's Risk Modeler API base URL |
60
+ | `RISK_MODELER_API_KEY` | Yes | API authentication key |
61
+ | `RISK_MODELER_RESOURCE_GROUP_ID` | Yes | Resource group ID for your organization |
62
+
63
+ You can set these in your shell, or use a `.env` file with [python-dotenv](https://pypi.org/project/python-dotenv/):
64
+
65
+ ```python
66
+ from dotenv import load_dotenv
67
+ load_dotenv()
68
+
69
+ from irp_integration import IRPClient
70
+ client = IRPClient()
71
+ ```
72
+
73
+ ### Data Bridge Configuration
74
+
75
+ The Data Bridge module (`client.databridge`) connects directly to Moody's SQL Server databases via ODBC. It requires separate setup from the REST API.
76
+
77
+ **Prerequisites:**
78
+
79
+ 1. Install the optional dependency: `pip install irp-integration[databridge]`
80
+ 2. Install [Microsoft ODBC Driver 18 for SQL Server](https://learn.microsoft.com/en-us/sql/connect/odbc/download-odbc-driver-for-sql-server):
81
+ - **Windows:** Download and run the MSI installer from Microsoft
82
+ - **Linux (Debian/Ubuntu):** `sudo apt-get install -y unixodbc-dev && sudo ACCEPT_EULA=Y apt-get install -y msodbcsql18`
83
+ - **macOS:** `brew install microsoft/mssql-release/msodbcsql18`
84
+
85
+ **Environment variables (per connection):**
86
+
87
+ Each named connection uses the prefix `MSSQL_{CONNECTION_NAME}_`:
88
+
89
+ | Variable | Required | Description |
90
+ |----------|----------|-------------|
91
+ | `MSSQL_DATABRIDGE_SERVER` | Yes | Server hostname or IP |
92
+ | `MSSQL_DATABRIDGE_USER` | Yes | SQL Server username |
93
+ | `MSSQL_DATABRIDGE_PASSWORD` | Yes | SQL Server password |
94
+ | `MSSQL_DATABRIDGE_PORT` | No | Port (default: 1433) |
95
+
96
+ **Global settings:**
97
+
98
+ | Variable | Default | Description |
99
+ |----------|---------|-------------|
100
+ | `MSSQL_DRIVER` | `ODBC Driver 18 for SQL Server` | ODBC driver name |
101
+ | `MSSQL_TRUST_CERT` | `yes` | Trust server certificate |
102
+ | `MSSQL_TIMEOUT` | `30` | Connection timeout in seconds |
103
+
104
+ **Example:**
105
+
106
+ ```bash
107
+ # .env file
108
+ MSSQL_DATABRIDGE_SERVER=databridge.company.com
109
+ MSSQL_DATABRIDGE_USER=svc_account
110
+ MSSQL_DATABRIDGE_PASSWORD=secretpassword
111
+ ```
112
+
113
+ ```python
114
+ from irp_integration.databridge import DataBridgeManager
115
+
116
+ dbm = DataBridgeManager()
117
+
118
+ # Inline query with parameters
119
+ df = dbm.execute_query(
120
+ "SELECT * FROM portfolios WHERE value > {{ min_value }}",
121
+ params={'min_value': 1000000},
122
+ database='DataWarehouse'
123
+ )
124
+
125
+ # Execute SQL script from file
126
+ results = dbm.execute_query_from_file(
127
+ 'C:/sql/extract_policies.sql',
128
+ params={'cycle_name': 'Q1-2025'},
129
+ database='AnalyticsDB'
130
+ )
131
+ ```
132
+
133
+ ## Features
134
+
135
+ - **Automatic retry** with exponential backoff for transient errors (429, 5xx)
136
+ - **Workflow polling** — submit long-running operations and automatically poll to completion
137
+ - **Batch workflow execution** — run multiple workflows in parallel and wait for all to finish
138
+ - **Structured logging** via Python's `logging` module for visibility into API calls and workflow progress
139
+ - **Connection pooling** via persistent HTTP sessions
140
+ - **Input validation** with descriptive error messages
141
+ - **Custom exception hierarchy** for structured error handling
142
+ - **S3 upload/download** with multipart transfer support
143
+ - **Data Bridge (SQL Server)** — direct SQL execution against Moody's Data Bridge with parameterized queries and file-based scripts
144
+ - **Type hints** on all public methods
145
+
146
+ ## Modules
147
+
148
+ | Manager | Description |
149
+ |---------|-------------|
150
+ | `client.edm` | Exposure Data Manager — create, upgrade, duplicate, and delete EDMs |
151
+ | `client.portfolio` | Portfolio CRUD, geocoding, and hazard processing |
152
+ | `client.mri_import` | MRI (CSV) data import workflow — bucket creation, file upload, mapping, and execution |
153
+ | `client.treaty` | Reinsurance treaty creation, LOB assignment, and reference data |
154
+ | `client.analysis` | Risk analysis execution, profiles, event rate schemes, and analysis groups |
155
+ | `client.rdm` | Results Data Mart — export analysis results to RDM |
156
+ | `client.risk_data_job` | Risk data job status tracking |
157
+ | `client.import_job` | Platform import job management (EDM/RDM imports) |
158
+ | `client.export_job` | Platform export job management — status, polling, and result download |
159
+ | `client.databridge` | Data Bridge (SQL Server) — parameterized queries, file-based SQL execution |
160
+ | `client.reference_data` | Tags, currencies, and other reference data lookups |
161
+
162
+ ## Error Handling
163
+
164
+ The library uses a custom exception hierarchy:
165
+
166
+ ```python
167
+ from irp_integration.exceptions import (
168
+ IRPIntegrationError, # Base exception
169
+ IRPAPIError, # HTTP/API errors
170
+ IRPValidationError, # Input validation failures
171
+ IRPWorkflowError, # Workflow execution failures
172
+ IRPReferenceDataError, # Reference data lookup failures
173
+ IRPFileError, # File operation failures
174
+ IRPJobError, # Job management errors
175
+ IRPDataBridgeError, # Data Bridge base error
176
+ IRPDataBridgeConnectionError, # SQL Server connection failures
177
+ IRPDataBridgeQueryError, # SQL query execution failures
178
+ )
179
+ ```
180
+
181
+ ## API Documentation
182
+
183
+ For detailed API endpoint documentation, see [docs/api.md](https://github.com/premiumiq/irp-integration/blob/main/docs/api.md).
184
+
185
+ ## License
186
+
187
+ This project is licensed under the MIT License — see the [LICENSE](https://github.com/premiumiq/irp-integration/blob/main/LICENSE) file for details.