flow-platform-sdk 0.1.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flow_platform_sdk-0.1.6/.bumpversion.toml +26 -0
- flow_platform_sdk-0.1.6/.gitignore +135 -0
- flow_platform_sdk-0.1.6/.vscode/settings.json +11 -0
- flow_platform_sdk-0.1.6/PKG-INFO +130 -0
- flow_platform_sdk-0.1.6/README.md +119 -0
- flow_platform_sdk-0.1.6/RELEASE.md +66 -0
- flow_platform_sdk-0.1.6/pyproject.toml +48 -0
- flow_platform_sdk-0.1.6/release.sh +105 -0
- flow_platform_sdk-0.1.6/src/flow_platform_sdk/__init__.py +16 -0
- flow_platform_sdk-0.1.6/src/flow_platform_sdk/models/__init__.py +8 -0
- flow_platform_sdk-0.1.6/src/flow_platform_sdk/models/channel.py +24 -0
- flow_platform_sdk-0.1.6/src/flow_platform_sdk/py.typed +0 -0
- flow_platform_sdk-0.1.6/src/flow_platform_sdk/services/__init__.py +7 -0
- flow_platform_sdk-0.1.6/src/flow_platform_sdk/services/base.py +14 -0
- flow_platform_sdk-0.1.6/src/flow_platform_sdk/services/platform_api.py +123 -0
- flow_platform_sdk-0.1.6/src/flow_platform_sdk/session/channel/__init__.py +12 -0
- flow_platform_sdk-0.1.6/src/flow_platform_sdk/session/channel/base.py +12 -0
- flow_platform_sdk-0.1.6/src/flow_platform_sdk/session/channel/stdio.py +79 -0
- flow_platform_sdk-0.1.6/src/flow_platform_sdk/session/manager.py +16 -0
- flow_platform_sdk-0.1.6/src/flow_platform_sdk/session/protocol.py +21 -0
- flow_platform_sdk-0.1.6/src/flow_platform_sdk/session/session.py +36 -0
- flow_platform_sdk-0.1.6/tests/usage/basic_usage.py +44 -0
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
[tool.bumpversion]
|
|
2
|
+
current_version = "0.1.5"
|
|
3
|
+
parse = "(?P<major>\\d+)\\.(?P<minor>\\d+)\\.(?P<patch>\\d+)"
|
|
4
|
+
serialize = ["{major}.{minor}.{patch}"]
|
|
5
|
+
search = "{current_version}"
|
|
6
|
+
replace = "{new_version}"
|
|
7
|
+
regex = false
|
|
8
|
+
ignore_missing_version = false
|
|
9
|
+
tag = false
|
|
10
|
+
sign_tags = false
|
|
11
|
+
tag_name = "release/v{new_version}"
|
|
12
|
+
tag_message = "Bump version: {current_version} → {new_version}"
|
|
13
|
+
allow_dirty = false
|
|
14
|
+
commit = false
|
|
15
|
+
message = "Bump version: {current_version} → {new_version}"
|
|
16
|
+
commit_args = ""
|
|
17
|
+
|
|
18
|
+
[[tool.bumpversion.files]]
|
|
19
|
+
filename = "pyproject.toml"
|
|
20
|
+
search = 'version = "{current_version}"'
|
|
21
|
+
replace = 'version = "{new_version}"'
|
|
22
|
+
|
|
23
|
+
[[tool.bumpversion.files]]
|
|
24
|
+
filename = "src/flow_platform_sdk/__init__.py"
|
|
25
|
+
search = '__version__ = "{current_version}"'
|
|
26
|
+
replace = '__version__ = "{new_version}"'
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
# Byte-compiled / optimized / DLL files
|
|
2
|
+
__pycache__/
|
|
3
|
+
*.py[cod]
|
|
4
|
+
*$py.class
|
|
5
|
+
|
|
6
|
+
# C extensions
|
|
7
|
+
*.so
|
|
8
|
+
|
|
9
|
+
# Distribution / packaging
|
|
10
|
+
.Python
|
|
11
|
+
build/
|
|
12
|
+
develop-eggs/
|
|
13
|
+
dist/
|
|
14
|
+
downloads/
|
|
15
|
+
eggs/
|
|
16
|
+
.eggs/
|
|
17
|
+
lib/
|
|
18
|
+
lib64/
|
|
19
|
+
parts/
|
|
20
|
+
sdist/
|
|
21
|
+
var/
|
|
22
|
+
wheels/
|
|
23
|
+
pip-wheel-metadata/
|
|
24
|
+
share/python-wheels/
|
|
25
|
+
*.egg-info/
|
|
26
|
+
.installed.cfg
|
|
27
|
+
*.egg
|
|
28
|
+
MANIFEST
|
|
29
|
+
|
|
30
|
+
# PyInstaller
|
|
31
|
+
# Usually these files are written by a python script from a template
|
|
32
|
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
33
|
+
*.manifest
|
|
34
|
+
*.spec
|
|
35
|
+
|
|
36
|
+
# Installer logs
|
|
37
|
+
pip-log.txt
|
|
38
|
+
pip-delete-this-directory.txt
|
|
39
|
+
|
|
40
|
+
# Unit test / coverage reports
|
|
41
|
+
htmlcov/
|
|
42
|
+
.tox/
|
|
43
|
+
.nox/
|
|
44
|
+
.coverage
|
|
45
|
+
.coverage.*
|
|
46
|
+
.cache
|
|
47
|
+
nosetests.xml
|
|
48
|
+
coverage.xml
|
|
49
|
+
*.cover
|
|
50
|
+
*.py,cover
|
|
51
|
+
.hypothesis/
|
|
52
|
+
.pytest_cache/
|
|
53
|
+
.DS_Store
|
|
54
|
+
|
|
55
|
+
# Translations
|
|
56
|
+
*.mo
|
|
57
|
+
*.pot
|
|
58
|
+
|
|
59
|
+
# Django stuff:
|
|
60
|
+
*.log
|
|
61
|
+
local_settings.py
|
|
62
|
+
db.sqlite3
|
|
63
|
+
db.sqlite3-journal
|
|
64
|
+
|
|
65
|
+
# Flask stuff:
|
|
66
|
+
instance/
|
|
67
|
+
.webassets-cache
|
|
68
|
+
|
|
69
|
+
# Scrapy stuff:
|
|
70
|
+
.scrapy
|
|
71
|
+
|
|
72
|
+
# Sphinx documentation
|
|
73
|
+
docs/_build/
|
|
74
|
+
|
|
75
|
+
# PyBuilder
|
|
76
|
+
target/
|
|
77
|
+
|
|
78
|
+
# Jupyter Notebook
|
|
79
|
+
.ipynb_checkpoints
|
|
80
|
+
|
|
81
|
+
# IPython
|
|
82
|
+
profile_default/
|
|
83
|
+
ipython_config.py
|
|
84
|
+
|
|
85
|
+
# pyenv
|
|
86
|
+
.python-version
|
|
87
|
+
|
|
88
|
+
# pipenv
|
|
89
|
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
|
90
|
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
|
91
|
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
|
92
|
+
# install all needed dependencies.
|
|
93
|
+
#Pipfile.lock
|
|
94
|
+
|
|
95
|
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
|
96
|
+
__pypackages__/
|
|
97
|
+
|
|
98
|
+
# Celery stuff
|
|
99
|
+
celerybeat-schedule
|
|
100
|
+
celerybeat.pid
|
|
101
|
+
|
|
102
|
+
# SageMath parsed files
|
|
103
|
+
*.sage.py
|
|
104
|
+
|
|
105
|
+
# Environments
|
|
106
|
+
.env
|
|
107
|
+
.venv
|
|
108
|
+
env/
|
|
109
|
+
venv/
|
|
110
|
+
ENV/
|
|
111
|
+
env.bak/
|
|
112
|
+
venv.bak/
|
|
113
|
+
|
|
114
|
+
# Spyder project settings
|
|
115
|
+
.spyderproject
|
|
116
|
+
.spyproject
|
|
117
|
+
|
|
118
|
+
# Rope project settings
|
|
119
|
+
.ropeproject
|
|
120
|
+
|
|
121
|
+
# mkdocs documentation
|
|
122
|
+
/site
|
|
123
|
+
|
|
124
|
+
# mypy
|
|
125
|
+
.mypy_cache/
|
|
126
|
+
.dmypy.json
|
|
127
|
+
dmypy.json
|
|
128
|
+
|
|
129
|
+
# Pyre type checker
|
|
130
|
+
.pyre/
|
|
131
|
+
|
|
132
|
+
# Other
|
|
133
|
+
.garden
|
|
134
|
+
test-reports/
|
|
135
|
+
runtime_workspace/
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
{
|
|
2
|
+
"python.analysis.extraPaths": [
|
|
3
|
+
"${workspaceFolder}/src"
|
|
4
|
+
],
|
|
5
|
+
"python.autoComplete.extraPaths": [
|
|
6
|
+
"${workspaceFolder}/src"
|
|
7
|
+
],
|
|
8
|
+
"python.defaultInterpreterPath": "${workspaceFolder}/../.venv/bin/python",
|
|
9
|
+
"python.terminal.activateEnvironment": true
|
|
10
|
+
}
|
|
11
|
+
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: flow-platform-sdk
|
|
3
|
+
Version: 0.1.6
|
|
4
|
+
Summary: Python SDK for Uniphore Flow Platform APIs
|
|
5
|
+
Author-email: Uniphore <support@uniphore.com>
|
|
6
|
+
Requires-Python: >=3.10
|
|
7
|
+
Requires-Dist: httpx>=0.27.0
|
|
8
|
+
Requires-Dist: pydantic>=2.0.0
|
|
9
|
+
Requires-Dist: typing-extensions>=4.0.0
|
|
10
|
+
Description-Content-Type: text/markdown
|
|
11
|
+
|
|
12
|
+
# Flow Platform SDK
|
|
13
|
+
|
|
14
|
+
Python SDK for interacting with Uniphore Flow Platform services via async RPC.
|
|
15
|
+
|
|
16
|
+
## Usage
|
|
17
|
+
|
|
18
|
+
```python
|
|
19
|
+
import asyncio
|
|
20
|
+
from flow_platform_sdk import platform_api
|
|
21
|
+
|
|
22
|
+
async def main():
|
|
23
|
+
# List data connectors
|
|
24
|
+
connectors = await platform_api.list_connectors()
|
|
25
|
+
|
|
26
|
+
# Execute SQL query
|
|
27
|
+
result = await platform_api.execute_query(
|
|
28
|
+
connector_id="conn_123",
|
|
29
|
+
sql_query="SELECT * FROM customers LIMIT 10",
|
|
30
|
+
max_rows=10
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
# Query knowledge base
|
|
34
|
+
answer = await platform_api.query_knowledge_base(
|
|
35
|
+
knowledge_base_id="kb_123",
|
|
36
|
+
query="What is the revenue?"
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
asyncio.run(main())
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
## API Reference
|
|
43
|
+
|
|
44
|
+
### Data Connectors
|
|
45
|
+
|
|
46
|
+
```python
|
|
47
|
+
# List all connectors
|
|
48
|
+
await platform_api.list_connectors()
|
|
49
|
+
|
|
50
|
+
# Get schema
|
|
51
|
+
await platform_api.get_schema(connector_id="conn_123")
|
|
52
|
+
|
|
53
|
+
# Execute query
|
|
54
|
+
await platform_api.execute_query(
|
|
55
|
+
connector_id="conn_123",
|
|
56
|
+
sql_query="SELECT * FROM table",
|
|
57
|
+
max_rows=1000
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
# Discover schema
|
|
61
|
+
await platform_api.discover_schema(
|
|
62
|
+
connector_id="conn_123",
|
|
63
|
+
include_sample_data=True,
|
|
64
|
+
table_filter="customer*"
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
# Get table info
|
|
68
|
+
await platform_api.get_table_info(
|
|
69
|
+
connector_id="conn_123",
|
|
70
|
+
table_name="customers"
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
# Get sample data
|
|
74
|
+
await platform_api.get_sample_data(
|
|
75
|
+
connector_id="conn_123",
|
|
76
|
+
table_name="customers",
|
|
77
|
+
limit=5
|
|
78
|
+
)
|
|
79
|
+
```
|
|
80
|
+
|
|
81
|
+
### Knowledge Base
|
|
82
|
+
|
|
83
|
+
```python
|
|
84
|
+
# List knowledge bases
|
|
85
|
+
await platform_api.list_knowledge_bases()
|
|
86
|
+
|
|
87
|
+
# Query knowledge base
|
|
88
|
+
await platform_api.query_knowledge_base(
|
|
89
|
+
knowledge_base_id="kb_123",
|
|
90
|
+
query="What are the main features?"
|
|
91
|
+
)
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
### Agent Evaluation
|
|
95
|
+
|
|
96
|
+
```python
|
|
97
|
+
# Invoke agent with draft skill
|
|
98
|
+
result = await platform_api.invoke_agent(
|
|
99
|
+
agent_spec_id="agent_123",
|
|
100
|
+
draft_skill_id="draft_456",
|
|
101
|
+
input_message="What is the weather today?",
|
|
102
|
+
context={"location": "San Francisco"},
|
|
103
|
+
timeout=60
|
|
104
|
+
)
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
## Architecture
|
|
108
|
+
|
|
109
|
+
The SDK uses stdio-based RPC for communication with backend services:
|
|
110
|
+
|
|
111
|
+
- Requests sent to **stdout**: `REMOTE_SERVICE_CALL:{...}`
|
|
112
|
+
- Responses read from **stdin**: `REMOTE_TOOL_RESULT:{...}`
|
|
113
|
+
|
|
114
|
+
Designed for sandboxed execution environments.
|
|
115
|
+
|
|
116
|
+
## Configuration
|
|
117
|
+
|
|
118
|
+
- Default timeout: 120 seconds per request
|
|
119
|
+
- Daemon threads for clean exit on timeout
|
|
120
|
+
|
|
121
|
+
## Development
|
|
122
|
+
|
|
123
|
+
```bash
|
|
124
|
+
# Setup
|
|
125
|
+
uv sync
|
|
126
|
+
|
|
127
|
+
# Format code
|
|
128
|
+
uvx ruff format .
|
|
129
|
+
uvx ruff check .
|
|
130
|
+
```
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
# Flow Platform SDK
|
|
2
|
+
|
|
3
|
+
Python SDK for interacting with Uniphore Flow Platform services via async RPC.
|
|
4
|
+
|
|
5
|
+
## Usage
|
|
6
|
+
|
|
7
|
+
```python
|
|
8
|
+
import asyncio
|
|
9
|
+
from flow_platform_sdk import platform_api
|
|
10
|
+
|
|
11
|
+
async def main():
|
|
12
|
+
# List data connectors
|
|
13
|
+
connectors = await platform_api.list_connectors()
|
|
14
|
+
|
|
15
|
+
# Execute SQL query
|
|
16
|
+
result = await platform_api.execute_query(
|
|
17
|
+
connector_id="conn_123",
|
|
18
|
+
sql_query="SELECT * FROM customers LIMIT 10",
|
|
19
|
+
max_rows=10
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
# Query knowledge base
|
|
23
|
+
answer = await platform_api.query_knowledge_base(
|
|
24
|
+
knowledge_base_id="kb_123",
|
|
25
|
+
query="What is the revenue?"
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
asyncio.run(main())
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
## API Reference
|
|
32
|
+
|
|
33
|
+
### Data Connectors
|
|
34
|
+
|
|
35
|
+
```python
|
|
36
|
+
# List all connectors
|
|
37
|
+
await platform_api.list_connectors()
|
|
38
|
+
|
|
39
|
+
# Get schema
|
|
40
|
+
await platform_api.get_schema(connector_id="conn_123")
|
|
41
|
+
|
|
42
|
+
# Execute query
|
|
43
|
+
await platform_api.execute_query(
|
|
44
|
+
connector_id="conn_123",
|
|
45
|
+
sql_query="SELECT * FROM table",
|
|
46
|
+
max_rows=1000
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
# Discover schema
|
|
50
|
+
await platform_api.discover_schema(
|
|
51
|
+
connector_id="conn_123",
|
|
52
|
+
include_sample_data=True,
|
|
53
|
+
table_filter="customer*"
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
# Get table info
|
|
57
|
+
await platform_api.get_table_info(
|
|
58
|
+
connector_id="conn_123",
|
|
59
|
+
table_name="customers"
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
# Get sample data
|
|
63
|
+
await platform_api.get_sample_data(
|
|
64
|
+
connector_id="conn_123",
|
|
65
|
+
table_name="customers",
|
|
66
|
+
limit=5
|
|
67
|
+
)
|
|
68
|
+
```
|
|
69
|
+
|
|
70
|
+
### Knowledge Base
|
|
71
|
+
|
|
72
|
+
```python
|
|
73
|
+
# List knowledge bases
|
|
74
|
+
await platform_api.list_knowledge_bases()
|
|
75
|
+
|
|
76
|
+
# Query knowledge base
|
|
77
|
+
await platform_api.query_knowledge_base(
|
|
78
|
+
knowledge_base_id="kb_123",
|
|
79
|
+
query="What are the main features?"
|
|
80
|
+
)
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
### Agent Evaluation
|
|
84
|
+
|
|
85
|
+
```python
|
|
86
|
+
# Invoke agent with draft skill
|
|
87
|
+
result = await platform_api.invoke_agent(
|
|
88
|
+
agent_spec_id="agent_123",
|
|
89
|
+
draft_skill_id="draft_456",
|
|
90
|
+
input_message="What is the weather today?",
|
|
91
|
+
context={"location": "San Francisco"},
|
|
92
|
+
timeout=60
|
|
93
|
+
)
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
## Architecture
|
|
97
|
+
|
|
98
|
+
The SDK uses stdio-based RPC for communication with backend services:
|
|
99
|
+
|
|
100
|
+
- Requests sent to **stdout**: `REMOTE_SERVICE_CALL:{...}`
|
|
101
|
+
- Responses read from **stdin**: `REMOTE_TOOL_RESULT:{...}`
|
|
102
|
+
|
|
103
|
+
Designed for sandboxed execution environments.
|
|
104
|
+
|
|
105
|
+
## Configuration
|
|
106
|
+
|
|
107
|
+
- Default timeout: 120 seconds per request
|
|
108
|
+
- Daemon threads for clean exit on timeout
|
|
109
|
+
|
|
110
|
+
## Development
|
|
111
|
+
|
|
112
|
+
```bash
|
|
113
|
+
# Setup
|
|
114
|
+
uv sync
|
|
115
|
+
|
|
116
|
+
# Format code
|
|
117
|
+
uvx ruff format .
|
|
118
|
+
uvx ruff check .
|
|
119
|
+
```
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
# Release Process
|
|
2
|
+
|
|
3
|
+
## Prerequisites
|
|
4
|
+
|
|
5
|
+
1. Install uv: `curl -LsSf https://astral.sh/uv/install.sh | sh`
|
|
6
|
+
2. Install GitHub CLI: `brew install gh`
|
|
7
|
+
3. Authenticate: `gh auth login`
|
|
8
|
+
4. Configure PyPI trusted publishing (see below)
|
|
9
|
+
|
|
10
|
+
## Creating a Release
|
|
11
|
+
|
|
12
|
+
Run the release script from the `flow-platform-sdk` directory:
|
|
13
|
+
|
|
14
|
+
```bash
|
|
15
|
+
./release.sh [patch|minor|major]
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
Example:
|
|
19
|
+
```bash
|
|
20
|
+
./release.sh patch # 0.1.0 -> 0.1.1
|
|
21
|
+
./release.sh minor # 0.1.0 -> 0.2.0
|
|
22
|
+
./release.sh major # 0.1.0 -> 1.0.0
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
### Dry Run
|
|
26
|
+
|
|
27
|
+
Test without making changes:
|
|
28
|
+
|
|
29
|
+
```bash
|
|
30
|
+
./release.sh patch --dry-run
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
## Release Workflow
|
|
34
|
+
|
|
35
|
+
1. **Run release script** → Bumps version, creates release branch, opens PR
|
|
36
|
+
2. **Review and merge PR** → Automatically creates git tag and triggers deployment
|
|
37
|
+
3. **GitHub Actions** → Builds package and publishes to PyPI
|
|
38
|
+
|
|
39
|
+
The workflow only triggers if:
|
|
40
|
+
- PR is merged from a `release/v*` branch
|
|
41
|
+
- SDK files (`flow-platform-sdk/**`) were modified
|
|
42
|
+
|
|
43
|
+
## PyPI Trusted Publishing Setup
|
|
44
|
+
|
|
45
|
+
1. Go to https://pypi.org/manage/account/publishing/
|
|
46
|
+
2. Add a new trusted publisher:
|
|
47
|
+
- **PyPI Project Name**: `flow-platform-sdk`
|
|
48
|
+
- **Owner**: `uniphore`
|
|
49
|
+
- **Repository**: `flow-agent`
|
|
50
|
+
- **Workflow name**: `sdk-release.yml`
|
|
51
|
+
- **Environment name**: (leave blank)
|
|
52
|
+
|
|
53
|
+
## Manual Trigger (if auto-deployment fails)
|
|
54
|
+
|
|
55
|
+
If the automatic deployment doesn't trigger after merging:
|
|
56
|
+
|
|
57
|
+
```bash
|
|
58
|
+
gh workflow run sdk-release.yml -f tag=release/v0.1.4
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
## Manual Publishing (emergency only)
|
|
62
|
+
|
|
63
|
+
```bash
|
|
64
|
+
uv build
|
|
65
|
+
uv run twine upload dist/*
|
|
66
|
+
```
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "flow-platform-sdk"
|
|
3
|
+
version = "0.1.6"
|
|
4
|
+
description = "Python SDK for Uniphore Flow Platform APIs"
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
authors = [
|
|
7
|
+
{ name = "Uniphore", email = "support@uniphore.com" }
|
|
8
|
+
]
|
|
9
|
+
requires-python = ">=3.10"
|
|
10
|
+
|
|
11
|
+
dependencies = [
|
|
12
|
+
"httpx>=0.27.0",
|
|
13
|
+
"pydantic>=2.0.0",
|
|
14
|
+
"typing-extensions>=4.0.0",
|
|
15
|
+
]
|
|
16
|
+
|
|
17
|
+
[dependency-groups]
|
|
18
|
+
dev = [
|
|
19
|
+
"pytest>=8.0.0",
|
|
20
|
+
"pytest-asyncio>=0.23.0",
|
|
21
|
+
"pytest-cov>=4.1.0",
|
|
22
|
+
"mypy>=1.8.0",
|
|
23
|
+
"ruff>=0.2.0",
|
|
24
|
+
"respx>=0.21.0",
|
|
25
|
+
"bump-my-version>=1.2.0",
|
|
26
|
+
]
|
|
27
|
+
|
|
28
|
+
[build-system]
|
|
29
|
+
requires = ["hatchling"]
|
|
30
|
+
build-backend = "hatchling.build"
|
|
31
|
+
|
|
32
|
+
[tool.ruff]
|
|
33
|
+
line-length = 100
|
|
34
|
+
target-version = "py310"
|
|
35
|
+
|
|
36
|
+
[tool.ruff.lint]
|
|
37
|
+
select = ["E", "F", "I", "N", "W", "UP"]
|
|
38
|
+
ignore = []
|
|
39
|
+
|
|
40
|
+
[tool.mypy]
|
|
41
|
+
python_version = "3.10"
|
|
42
|
+
strict = true
|
|
43
|
+
warn_return_any = true
|
|
44
|
+
warn_unused_configs = true
|
|
45
|
+
|
|
46
|
+
[tool.pytest.ini_options]
|
|
47
|
+
testpaths = ["tests"]
|
|
48
|
+
asyncio_mode = "auto"
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
set -e
|
|
3
|
+
|
|
4
|
+
# Check if gh CLI is installed
|
|
5
|
+
if ! command -v gh &> /dev/null; then
|
|
6
|
+
echo "Error: GitHub CLI (gh) is not installed."
|
|
7
|
+
exit 1
|
|
8
|
+
fi
|
|
9
|
+
|
|
10
|
+
# Check if version type is provided
|
|
11
|
+
if [ $# -eq 0 ]; then
|
|
12
|
+
echo "Usage: ./release.sh [patch|minor|major] [--dry-run]"
|
|
13
|
+
exit 1
|
|
14
|
+
fi
|
|
15
|
+
|
|
16
|
+
# Check if we're on main branch
|
|
17
|
+
CURRENT_BRANCH=$(git branch --show-current)
|
|
18
|
+
if [ "$CURRENT_BRANCH" != "main" ]; then
|
|
19
|
+
echo "Error: Release script can only be run from the main branch."
|
|
20
|
+
echo "Current branch: $CURRENT_BRANCH"
|
|
21
|
+
echo "Please checkout main and try again: git checkout main"
|
|
22
|
+
exit 1
|
|
23
|
+
fi
|
|
24
|
+
|
|
25
|
+
# Check if workspace is clean
|
|
26
|
+
if [ -n "$(git status --porcelain)" ]; then
|
|
27
|
+
echo "Error: Working directory is not clean. Please commit or stash your changes first."
|
|
28
|
+
git status --short
|
|
29
|
+
exit 1
|
|
30
|
+
fi
|
|
31
|
+
|
|
32
|
+
# Ensure main is up to date
|
|
33
|
+
echo "Pulling latest changes from main..."
|
|
34
|
+
git pull origin main
|
|
35
|
+
|
|
36
|
+
VERSION_TYPE=$1
|
|
37
|
+
DRY_RUN=${2:-""}
|
|
38
|
+
|
|
39
|
+
if [ "$DRY_RUN" = "--dry-run" ]; then
|
|
40
|
+
echo "DRY RUN MODE - No actual changes will be made"
|
|
41
|
+
CURRENT_VERSION=$(grep 'current_version = ' .bumpversion.toml | sed 's/current_version = "\(.*\)"/\1/')
|
|
42
|
+
echo "Current version: $CURRENT_VERSION"
|
|
43
|
+
echo "Would bump $VERSION_TYPE version..."
|
|
44
|
+
uv run bump-my-version bump $VERSION_TYPE --dry-run --allow-dirty --verbose
|
|
45
|
+
exit 0
|
|
46
|
+
fi
|
|
47
|
+
|
|
48
|
+
# Bump version without creating commit or tag
|
|
49
|
+
echo "Bumping $VERSION_TYPE version (without commit)..."
|
|
50
|
+
uv run bump-my-version bump $VERSION_TYPE
|
|
51
|
+
|
|
52
|
+
# Get the new version
|
|
53
|
+
NEW_VERSION=$(grep 'current_version = ' .bumpversion.toml | sed 's/current_version = "\(.*\)"/\1/')
|
|
54
|
+
echo "New version: $NEW_VERSION"
|
|
55
|
+
|
|
56
|
+
# Create release branch with new version number
|
|
57
|
+
RELEASE_BRANCH="release/v${NEW_VERSION}"
|
|
58
|
+
echo "Creating release branch: $RELEASE_BRANCH"
|
|
59
|
+
|
|
60
|
+
# Delete branch if it exists locally
|
|
61
|
+
if git show-ref --verify --quiet refs/heads/$RELEASE_BRANCH; then
|
|
62
|
+
echo "Branch $RELEASE_BRANCH already exists locally. Deleting..."
|
|
63
|
+
git branch -D $RELEASE_BRANCH
|
|
64
|
+
fi
|
|
65
|
+
|
|
66
|
+
git checkout -b $RELEASE_BRANCH
|
|
67
|
+
|
|
68
|
+
# Commit the version changes
|
|
69
|
+
echo "Committing version changes..."
|
|
70
|
+
git add -A
|
|
71
|
+
git commit -m "Release v$NEW_VERSION"
|
|
72
|
+
|
|
73
|
+
# Push release branch
|
|
74
|
+
echo "Pushing release branch..."
|
|
75
|
+
git push --force origin $RELEASE_BRANCH
|
|
76
|
+
|
|
77
|
+
# Create PR for release (manual review required)
|
|
78
|
+
echo "Creating PR for release..."
|
|
79
|
+
|
|
80
|
+
# Ensure gh is authenticated
|
|
81
|
+
if ! gh auth status >/dev/null 2>&1; then
|
|
82
|
+
echo "GitHub CLI not authenticated. Starting authentication..."
|
|
83
|
+
gh auth login
|
|
84
|
+
fi
|
|
85
|
+
|
|
86
|
+
gh pr create --title "Release v${NEW_VERSION}" \
|
|
87
|
+
--body "## Pull Request Details
|
|
88
|
+
**JIRA/EBUG Ticket:** https://uniphore.atlassian.net/browse/FOR-0000
|
|
89
|
+
|
|
90
|
+
## Description
|
|
91
|
+
**What changed:** Release version ${NEW_VERSION}
|
|
92
|
+
|
|
93
|
+
**Impact:** New SDK version published to PyPI
|
|
94
|
+
|
|
95
|
+
## Testing & Validation
|
|
96
|
+
**How tested:** Automated version bump and build validation
|
|
97
|
+
|
|
98
|
+
## Deployment
|
|
99
|
+
**Plan for deployment:** Automated PyPI release via GitHub Actions" \
|
|
100
|
+
--base main
|
|
101
|
+
|
|
102
|
+
echo "Release PR created!"
|
|
103
|
+
echo "- Release branch: $RELEASE_BRANCH"
|
|
104
|
+
echo "- PR created for manual review and merge"
|
|
105
|
+
echo "- Tag and release will be created automatically when PR is merged"
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
"""Flow Platform SDK - Python SDK for interacting with the Uniphore Platform API.
|
|
2
|
+
|
|
3
|
+
Public API:
|
|
4
|
+
from flow_platform_sdk import platform_api
|
|
5
|
+
|
|
6
|
+
# Use the service directly
|
|
7
|
+
result = await platform_api.list_connectors()
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from .services.platform_api import PlatformApiService
|
|
11
|
+
|
|
12
|
+
# Singleton service instance
|
|
13
|
+
platform_api = PlatformApiService()
|
|
14
|
+
|
|
15
|
+
__version__ = "0.1.5"
|
|
16
|
+
__all__ = ["platform_api"]
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"""Data models for channel communication."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel, Field
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class ChannelRequest(BaseModel):
|
|
9
|
+
"""Request message structure for channel communication."""
|
|
10
|
+
|
|
11
|
+
id: str = Field(..., description="Unique identifier for the request")
|
|
12
|
+
operation: str = Field(..., description="Type of the operation/command")
|
|
13
|
+
service: str = Field(..., description="Type of the service the operation belong to")
|
|
14
|
+
input: dict[str, Any] = Field(
|
|
15
|
+
default_factory=dict, description="Input parameters for the request"
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class ChannelResponse(BaseModel):
|
|
20
|
+
"""Response message structure for channel communication."""
|
|
21
|
+
|
|
22
|
+
id: str = Field(..., description="Request ID this response corresponds to")
|
|
23
|
+
result: dict[str, Any] | None = Field(None, description="Result data if successful")
|
|
24
|
+
error: dict[str, Any] | None = Field(None, description="Error details if failed")
|
|
File without changes
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
from ..session.manager import get_session
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class ServiceProxy:
|
|
7
|
+
"""Base class for remote platform services."""
|
|
8
|
+
|
|
9
|
+
def __init__(self, service_name: str):
|
|
10
|
+
self.service_name = service_name
|
|
11
|
+
|
|
12
|
+
async def _invoke(self, operation: str, **params: Any) -> Any:
|
|
13
|
+
session = get_session()
|
|
14
|
+
return await session.invoke(self.service_name, operation, **params)
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
from .base import ServiceProxy
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class PlatformApiService(ServiceProxy):
|
|
7
|
+
def __init__(self):
|
|
8
|
+
super().__init__("platform_api")
|
|
9
|
+
|
|
10
|
+
# -------- Data Connectors --------
|
|
11
|
+
|
|
12
|
+
async def list_connectors(self) -> dict[str, Any]:
|
|
13
|
+
return await self._invoke("list_connectors")
|
|
14
|
+
|
|
15
|
+
async def get_schema(self, connector_id: str) -> dict[str, Any]:
|
|
16
|
+
return await self._invoke(
|
|
17
|
+
"get_schema",
|
|
18
|
+
connector_id=connector_id,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
async def execute_query(
|
|
22
|
+
self,
|
|
23
|
+
connector_id: str,
|
|
24
|
+
sql_query: str,
|
|
25
|
+
max_rows: int = 1000,
|
|
26
|
+
) -> dict[str, Any]:
|
|
27
|
+
return await self._invoke(
|
|
28
|
+
"execute_query",
|
|
29
|
+
connector_id=connector_id,
|
|
30
|
+
sql_query=sql_query,
|
|
31
|
+
max_rows=max_rows,
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
async def discover_schema(
|
|
35
|
+
self,
|
|
36
|
+
connector_id: str,
|
|
37
|
+
include_sample_data: bool = False,
|
|
38
|
+
table_filter: str | None = None,
|
|
39
|
+
) -> dict[str, Any]:
|
|
40
|
+
return await self._invoke(
|
|
41
|
+
"discover_schema",
|
|
42
|
+
connector_id=connector_id,
|
|
43
|
+
include_sample_data=include_sample_data,
|
|
44
|
+
table_filter=table_filter,
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
async def get_table_info(
|
|
48
|
+
self,
|
|
49
|
+
connector_id: str,
|
|
50
|
+
table_name: str,
|
|
51
|
+
) -> dict[str, Any]:
|
|
52
|
+
return await self._invoke(
|
|
53
|
+
"get_table_info",
|
|
54
|
+
connector_id=connector_id,
|
|
55
|
+
table_name=table_name,
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
async def get_sample_data(
|
|
59
|
+
self,
|
|
60
|
+
connector_id: str,
|
|
61
|
+
table_name: str,
|
|
62
|
+
limit: int = 5,
|
|
63
|
+
) -> dict[str, Any]:
|
|
64
|
+
return await self._invoke(
|
|
65
|
+
"get_sample_data",
|
|
66
|
+
connector_id=connector_id,
|
|
67
|
+
table_name=table_name,
|
|
68
|
+
limit=min(limit, 100),
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
# -------- Knowledge Base --------
|
|
72
|
+
|
|
73
|
+
async def list_knowledge_bases(self) -> dict[str, Any]:
|
|
74
|
+
return await self._invoke("list_knowledge_bases")
|
|
75
|
+
|
|
76
|
+
async def query_knowledge_base(
|
|
77
|
+
self,
|
|
78
|
+
knowledge_base_id: str,
|
|
79
|
+
query: str,
|
|
80
|
+
) -> dict[str, Any]:
|
|
81
|
+
return await self._invoke(
|
|
82
|
+
"query_knowledge_base",
|
|
83
|
+
knowledge_base_id=knowledge_base_id,
|
|
84
|
+
query=query,
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
# -------- Agent Evaluation --------
|
|
88
|
+
|
|
89
|
+
async def invoke_agent(
|
|
90
|
+
self,
|
|
91
|
+
agent_spec_id: str,
|
|
92
|
+
draft_skill_id: str | None = None,
|
|
93
|
+
input_message: str = "",
|
|
94
|
+
context: dict[str, Any] | None = None,
|
|
95
|
+
timeout: int = 60,
|
|
96
|
+
) -> dict[str, Any]:
|
|
97
|
+
"""
|
|
98
|
+
Invoke an agent with optional draft skill for evaluation.
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
agent_spec_id: Agent specification ID to invoke
|
|
102
|
+
draft_skill_id: Optional draft skill ID for testing
|
|
103
|
+
input_message: User input message/query
|
|
104
|
+
context: Optional context dictionary
|
|
105
|
+
timeout: Request timeout in seconds
|
|
106
|
+
|
|
107
|
+
Returns:
|
|
108
|
+
{
|
|
109
|
+
"conversation_id": str,
|
|
110
|
+
"output": str,
|
|
111
|
+
"latency_ms": float,
|
|
112
|
+
"trace": List[Dict],
|
|
113
|
+
"error": Optional[str]
|
|
114
|
+
}
|
|
115
|
+
"""
|
|
116
|
+
return await self._invoke(
|
|
117
|
+
"invoke_agent",
|
|
118
|
+
agent_spec_id=agent_spec_id,
|
|
119
|
+
draft_skill_id=draft_skill_id,
|
|
120
|
+
input_message=input_message,
|
|
121
|
+
context=context or {},
|
|
122
|
+
timeout=timeout,
|
|
123
|
+
)
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"""Channel implementations for session communication."""
|
|
2
|
+
|
|
3
|
+
from ...models.channel import ChannelRequest, ChannelResponse
|
|
4
|
+
from .base import Channel
|
|
5
|
+
from .stdio import StdioChannel
|
|
6
|
+
|
|
7
|
+
__all__ = [
|
|
8
|
+
"Channel",
|
|
9
|
+
"ChannelRequest",
|
|
10
|
+
"ChannelResponse",
|
|
11
|
+
"StdioChannel",
|
|
12
|
+
]
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
|
|
3
|
+
from ...models.channel import ChannelRequest, ChannelResponse
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class Channel(ABC):
|
|
7
|
+
"""Base class for RPC communication channels."""
|
|
8
|
+
|
|
9
|
+
@abstractmethod
|
|
10
|
+
async def request(self, message: ChannelRequest) -> ChannelResponse:
|
|
11
|
+
"""Send a request and wait for a response."""
|
|
12
|
+
raise NotImplementedError
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import sys
|
|
3
|
+
import threading
|
|
4
|
+
|
|
5
|
+
from ...models.channel import ChannelRequest, ChannelResponse
|
|
6
|
+
from ..protocol import encode_call, try_decode_result
|
|
7
|
+
from .base import Channel
|
|
8
|
+
|
|
9
|
+
TIMEOUT = 120 # seconds
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class StdioChannel(Channel):
|
|
13
|
+
"""Standard I/O based channel implementation for inter-process communication.
|
|
14
|
+
|
|
15
|
+
Each request starts its own reader task that waits for the response.
|
|
16
|
+
Uses daemon threads so the program can exit even if stdin is blocked.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
def _read_stdin_line(self):
|
|
20
|
+
return sys.stdin.readline()
|
|
21
|
+
|
|
22
|
+
async def _read_response(self, call_id: str, result_queue: asyncio.Queue) -> None:
|
|
23
|
+
"""Read response from stdin for a specific request."""
|
|
24
|
+
loop = asyncio.get_event_loop()
|
|
25
|
+
|
|
26
|
+
try:
|
|
27
|
+
while True:
|
|
28
|
+
future = asyncio.Future()
|
|
29
|
+
|
|
30
|
+
def read_in_thread():
|
|
31
|
+
try:
|
|
32
|
+
line = self._read_stdin_line()
|
|
33
|
+
loop.call_soon_threadsafe(future.set_result, line)
|
|
34
|
+
except Exception as e:
|
|
35
|
+
loop.call_soon_threadsafe(future.set_exception, e)
|
|
36
|
+
|
|
37
|
+
thread = threading.Thread(target=read_in_thread, daemon=True)
|
|
38
|
+
thread.start()
|
|
39
|
+
|
|
40
|
+
try:
|
|
41
|
+
line = await asyncio.wait_for(future, timeout=1.0)
|
|
42
|
+
except asyncio.TimeoutError:
|
|
43
|
+
continue
|
|
44
|
+
|
|
45
|
+
if not line:
|
|
46
|
+
continue
|
|
47
|
+
|
|
48
|
+
payload = try_decode_result(line.rstrip("\n"))
|
|
49
|
+
if not payload:
|
|
50
|
+
continue
|
|
51
|
+
|
|
52
|
+
if payload.get("id") == call_id:
|
|
53
|
+
await result_queue.put(payload)
|
|
54
|
+
break
|
|
55
|
+
|
|
56
|
+
except asyncio.CancelledError:
|
|
57
|
+
pass
|
|
58
|
+
|
|
59
|
+
async def request(self, message: ChannelRequest) -> ChannelResponse:
|
|
60
|
+
"""Send request via stdout and wait for response via stdin."""
|
|
61
|
+
call_id = message.id
|
|
62
|
+
result_queue = asyncio.Queue()
|
|
63
|
+
|
|
64
|
+
reader_task = asyncio.create_task(self._read_response(call_id, result_queue))
|
|
65
|
+
|
|
66
|
+
try:
|
|
67
|
+
message_dict = message.model_dump()
|
|
68
|
+
print(encode_call(message_dict), flush=True)
|
|
69
|
+
|
|
70
|
+
response_dict = await asyncio.wait_for(result_queue.get(), timeout=TIMEOUT)
|
|
71
|
+
|
|
72
|
+
return ChannelResponse(**response_dict)
|
|
73
|
+
|
|
74
|
+
finally:
|
|
75
|
+
reader_task.cancel()
|
|
76
|
+
try:
|
|
77
|
+
await reader_task
|
|
78
|
+
except asyncio.CancelledError:
|
|
79
|
+
pass
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from .channel import StdioChannel
|
|
2
|
+
from .session import Session
|
|
3
|
+
|
|
4
|
+
_session: Session | None = None
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def get_session() -> Session:
|
|
8
|
+
"""Get or create singleton session with stdio channel."""
|
|
9
|
+
global _session
|
|
10
|
+
|
|
11
|
+
if _session:
|
|
12
|
+
return _session
|
|
13
|
+
|
|
14
|
+
channel = StdioChannel()
|
|
15
|
+
_session = Session(channel)
|
|
16
|
+
return _session
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
CALL_PREFIX = "REMOTE_SERVICE_CALL:"
|
|
5
|
+
RESULT_PREFIX = "REMOTE_SERVICE_RESULT:"
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def encode_call(payload: dict[str, Any]) -> str:
|
|
9
|
+
"""Encode call message to JSON string."""
|
|
10
|
+
return CALL_PREFIX + json.dumps(payload, separators=(",", ":"))
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def try_decode_result(line: str) -> dict[str, Any] | None:
|
|
14
|
+
"""Decode result message from JSON string."""
|
|
15
|
+
if not line.startswith(RESULT_PREFIX):
|
|
16
|
+
return None
|
|
17
|
+
|
|
18
|
+
try:
|
|
19
|
+
return json.loads(line[len(RESULT_PREFIX) :])
|
|
20
|
+
except json.JSONDecodeError:
|
|
21
|
+
return None
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
from ..models.channel import ChannelRequest, ChannelResponse
|
|
5
|
+
from .channel.base import Channel
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class Session:
|
|
9
|
+
"""Session for making RPC calls through a channel."""
|
|
10
|
+
|
|
11
|
+
def __init__(self, channel: Channel):
|
|
12
|
+
self.channel = channel
|
|
13
|
+
|
|
14
|
+
async def invoke(self, service: str, operation: str, **kwargs: Any) -> Any:
|
|
15
|
+
"""Invoke a remote service operation."""
|
|
16
|
+
request = ChannelRequest(
|
|
17
|
+
id=str(uuid.uuid4()),
|
|
18
|
+
service=service,
|
|
19
|
+
operation=operation,
|
|
20
|
+
input=kwargs,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
try:
|
|
24
|
+
response: ChannelResponse = await self.channel.request(request)
|
|
25
|
+
|
|
26
|
+
if response.error:
|
|
27
|
+
error_msg = (
|
|
28
|
+
response.error.get("message", str(response.error))
|
|
29
|
+
if response.error
|
|
30
|
+
else "Unknown error"
|
|
31
|
+
)
|
|
32
|
+
raise RuntimeError(f"Operation '{operation}' failed: {error_msg}")
|
|
33
|
+
|
|
34
|
+
return response.result
|
|
35
|
+
except Exception as e:
|
|
36
|
+
raise RuntimeError(f"Operation '{operation}' failed: {str(e)}")
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
"""Basic usage example - shows how to use the SDK."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
|
|
5
|
+
from flow_platform_sdk import platform_api
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
async def main():
|
|
9
|
+
"""Basic SDK usage example."""
|
|
10
|
+
# Data Connectors
|
|
11
|
+
print("=== Listing Connectors ===")
|
|
12
|
+
connectors = await platform_api.list_connectors()
|
|
13
|
+
print(f"Available connectors: {connectors}\n")
|
|
14
|
+
|
|
15
|
+
# Get schema
|
|
16
|
+
print("=== Getting Schema ===")
|
|
17
|
+
schema = await platform_api.get_schema(connector_id="conn_123")
|
|
18
|
+
print(f"Schema: {schema}\n")
|
|
19
|
+
|
|
20
|
+
# Execute query
|
|
21
|
+
print("=== Executing Query ===")
|
|
22
|
+
result = await platform_api.execute_query(
|
|
23
|
+
connector_id="conn_123",
|
|
24
|
+
sql_query="SELECT * FROM customers LIMIT 10",
|
|
25
|
+
max_rows=10,
|
|
26
|
+
)
|
|
27
|
+
print(f"Query result: {result}\n")
|
|
28
|
+
|
|
29
|
+
# Knowledge Base
|
|
30
|
+
print("=== Listing Knowledge Bases ===")
|
|
31
|
+
kbs = await platform_api.list_knowledge_bases()
|
|
32
|
+
print(f"Knowledge bases: {kbs}\n")
|
|
33
|
+
|
|
34
|
+
# Query knowledge base
|
|
35
|
+
print("=== Querying Knowledge Base ===")
|
|
36
|
+
answer = await platform_api.query_knowledge_base(
|
|
37
|
+
knowledge_base_id="kb_123",
|
|
38
|
+
query="What is the company revenue?",
|
|
39
|
+
)
|
|
40
|
+
print(f"Answer: {answer}")
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
if __name__ == "__main__":
|
|
44
|
+
asyncio.run(main())
|