fw-nodes-postgres 0.0.1a1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fw_nodes_postgres-0.0.1a1/.forgejo/workflows/tag-release.yml +41 -0
- fw_nodes_postgres-0.0.1a1/.gitignore +12 -0
- fw_nodes_postgres-0.0.1a1/PKG-INFO +25 -0
- fw_nodes_postgres-0.0.1a1/README.md +11 -0
- fw_nodes_postgres-0.0.1a1/fw_nodes_postgres/__init__.py +1 -0
- fw_nodes_postgres-0.0.1a1/fw_nodes_postgres/credentials.py +23 -0
- fw_nodes_postgres-0.0.1a1/fw_nodes_postgres/nodes/__init__.py +0 -0
- fw_nodes_postgres-0.0.1a1/fw_nodes_postgres/nodes/delete.py +113 -0
- fw_nodes_postgres-0.0.1a1/fw_nodes_postgres/nodes/insert.py +146 -0
- fw_nodes_postgres-0.0.1a1/fw_nodes_postgres/nodes/raw_query.py +79 -0
- fw_nodes_postgres-0.0.1a1/fw_nodes_postgres/nodes/select.py +151 -0
- fw_nodes_postgres-0.0.1a1/fw_nodes_postgres/nodes/update.py +129 -0
- fw_nodes_postgres-0.0.1a1/fw_nodes_postgres/utils/__init__.py +0 -0
- fw_nodes_postgres-0.0.1a1/fw_nodes_postgres/utils/db.py +255 -0
- fw_nodes_postgres-0.0.1a1/justfile +53 -0
- fw_nodes_postgres-0.0.1a1/pyproject.toml +58 -0
- fw_nodes_postgres-0.0.1a1/tests/__init__.py +0 -0
- fw_nodes_postgres-0.0.1a1/tests/conftest.py +95 -0
- fw_nodes_postgres-0.0.1a1/tests/test_db_utils.py +178 -0
- fw_nodes_postgres-0.0.1a1/tests/test_delete.py +41 -0
- fw_nodes_postgres-0.0.1a1/tests/test_insert.py +83 -0
- fw_nodes_postgres-0.0.1a1/tests/test_node_execution.py +205 -0
- fw_nodes_postgres-0.0.1a1/tests/test_raw_query.py +81 -0
- fw_nodes_postgres-0.0.1a1/tests/test_select.py +114 -0
- fw_nodes_postgres-0.0.1a1/tests/test_update.py +52 -0
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
name: Tag & Publish
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
branches:
|
|
6
|
+
- main
|
|
7
|
+
|
|
8
|
+
jobs:
|
|
9
|
+
tag-and-publish:
|
|
10
|
+
runs-on: ubuntu-latest
|
|
11
|
+
steps:
|
|
12
|
+
- uses: Public-Mirrors/actions_checkout@v6
|
|
13
|
+
with:
|
|
14
|
+
fetch-depth: 0
|
|
15
|
+
|
|
16
|
+
- name: Extract version from pyproject.toml
|
|
17
|
+
id: version
|
|
18
|
+
run: |
|
|
19
|
+
VERSION=$(grep '^version' pyproject.toml | head -1 | sed 's/.*"\(.*\)".*/\1/')
|
|
20
|
+
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
|
|
21
|
+
echo "Detected version: $VERSION"
|
|
22
|
+
|
|
23
|
+
- name: Check if tag already exists
|
|
24
|
+
run: |
|
|
25
|
+
if git rev-parse "v${{ steps.version.outputs.version }}" >/dev/null 2>&1; then
|
|
26
|
+
echo "::error::Tag v${{ steps.version.outputs.version }} already exists. Bump the version in pyproject.toml before pushing to main."
|
|
27
|
+
exit 1
|
|
28
|
+
fi
|
|
29
|
+
|
|
30
|
+
- name: Create and push tag
|
|
31
|
+
run: |
|
|
32
|
+
git tag "v${{ steps.version.outputs.version }}"
|
|
33
|
+
git push origin "v${{ steps.version.outputs.version }}"
|
|
34
|
+
|
|
35
|
+
- name: Install uv
|
|
36
|
+
run: curl -LsSf https://astral.sh/uv/install.sh | sh
|
|
37
|
+
|
|
38
|
+
- name: Build and publish
|
|
39
|
+
run: |
|
|
40
|
+
uv build
|
|
41
|
+
uv publish --token ${{ secrets.PYPI_API_TOKEN }}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: fw-nodes-postgres
|
|
3
|
+
Version: 0.0.1a1
|
|
4
|
+
Summary: PostgreSQL database nodes for Flowire workflow automation
|
|
5
|
+
License-Expression: MIT
|
|
6
|
+
Requires-Python: >=3.13
|
|
7
|
+
Requires-Dist: asyncpg>=0.30.0
|
|
8
|
+
Requires-Dist: flowire-sdk>=0.0.1a1
|
|
9
|
+
Provides-Extra: dev
|
|
10
|
+
Requires-Dist: pytest-asyncio>=0.23.0; extra == 'dev'
|
|
11
|
+
Requires-Dist: pytest>=8.0.0; extra == 'dev'
|
|
12
|
+
Requires-Dist: ruff>=0.4.0; extra == 'dev'
|
|
13
|
+
Description-Content-Type: text/markdown
|
|
14
|
+
|
|
15
|
+
# fw-nodes-postgres
|
|
16
|
+
|
|
17
|
+
PostgreSQL database nodes for Flowire workflow automation.
|
|
18
|
+
|
|
19
|
+
## Nodes
|
|
20
|
+
|
|
21
|
+
- **Postgres Raw Query** — Execute raw SQL with a code editor and parameterized values
|
|
22
|
+
- **Postgres Select** — Structured query builder for SELECT operations
|
|
23
|
+
- **Postgres Insert** — Insert single or bulk rows
|
|
24
|
+
- **Postgres Update** — Update rows with required WHERE conditions
|
|
25
|
+
- **Postgres Delete** — Delete rows with required WHERE conditions
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
# fw-nodes-postgres
|
|
2
|
+
|
|
3
|
+
PostgreSQL database nodes for Flowire workflow automation.
|
|
4
|
+
|
|
5
|
+
## Nodes
|
|
6
|
+
|
|
7
|
+
- **Postgres Raw Query** — Execute raw SQL with a code editor and parameterized values
|
|
8
|
+
- **Postgres Select** — Structured query builder for SELECT operations
|
|
9
|
+
- **Postgres Insert** — Insert single or bulk rows
|
|
10
|
+
- **Postgres Update** — Update rows with required WHERE conditions
|
|
11
|
+
- **Postgres Delete** — Delete rows with required WHERE conditions
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""PostgreSQL nodes for Flowire workflow automation."""
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"""Shared credential schema for PostgreSQL nodes."""
|
|
2
|
+
|
|
3
|
+
from typing import ClassVar
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel, Field
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class PostgresCredentialSchema(BaseModel):
|
|
9
|
+
"""Credential schema for PostgreSQL database connections.
|
|
10
|
+
|
|
11
|
+
Shared across all PostgreSQL nodes in this package.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
credential_name: ClassVar[str] = "PostgreSQL"
|
|
15
|
+
credential_description: ClassVar[str] = "PostgreSQL database connection details"
|
|
16
|
+
credential_icon: ClassVar[str | None] = "database"
|
|
17
|
+
|
|
18
|
+
host: str = Field(..., description="Database host (e.g., localhost or db.example.com)")
|
|
19
|
+
port: int = Field(default=5432, description="Database port")
|
|
20
|
+
database: str = Field(..., description="Database name")
|
|
21
|
+
user: str = Field(..., description="Database user")
|
|
22
|
+
password: str = Field(..., description="Database password")
|
|
23
|
+
ssl: bool = Field(default=False, description="Use SSL/TLS connection")
|
|
File without changes
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
"""Structured DELETE node for PostgreSQL."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from flowire_sdk import BaseNode, BaseNodeOutput, InputField, NodeExecutionContext, NodeMetadata
|
|
6
|
+
from flowire_sdk.node_base import FieldOption
|
|
7
|
+
from pydantic import BaseModel, Field
|
|
8
|
+
|
|
9
|
+
from fw_nodes_postgres.credentials import PostgresCredentialSchema
|
|
10
|
+
from fw_nodes_postgres.utils.db import (
|
|
11
|
+
WhereCondition,
|
|
12
|
+
build_where_clause,
|
|
13
|
+
execute_query,
|
|
14
|
+
pg_field_options,
|
|
15
|
+
qualify_table,
|
|
16
|
+
sanitize_identifier,
|
|
17
|
+
serialize_rows,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class DeleteInput(BaseModel):
|
|
22
|
+
credential_id: str = InputField(..., description="PostgreSQL credential")
|
|
23
|
+
schema: str | None = InputField(
|
|
24
|
+
default=None,
|
|
25
|
+
description="PostgreSQL schema (defaults to search_path, usually 'public')",
|
|
26
|
+
dynamic_options=["credential_id"],
|
|
27
|
+
)
|
|
28
|
+
table: str = InputField(
|
|
29
|
+
...,
|
|
30
|
+
description="Table to delete from",
|
|
31
|
+
dynamic_options=["credential_id", "schema"],
|
|
32
|
+
)
|
|
33
|
+
where: list[WhereCondition] = InputField(
|
|
34
|
+
...,
|
|
35
|
+
min_length=1,
|
|
36
|
+
description="WHERE conditions (at least one required to prevent accidental full-table deletes)",
|
|
37
|
+
)
|
|
38
|
+
returning: list[str] = InputField(
|
|
39
|
+
default_factory=list,
|
|
40
|
+
description="Columns to return from deleted rows",
|
|
41
|
+
)
|
|
42
|
+
timeout: int = InputField(default=30, ge=1, le=300, description="Query timeout in seconds")
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class DeleteOutput(BaseNodeOutput):
|
|
46
|
+
rows: list[dict[str, Any]] = Field(..., description="Returned rows (if RETURNING was specified)")
|
|
47
|
+
affected_count: int = Field(..., description="Number of rows deleted")
|
|
48
|
+
success: bool = Field(..., description="Whether the delete executed successfully")
|
|
49
|
+
raw_sql: str = Field(..., description="The generated SQL query (with $N placeholders)")
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class DeleteNode(BaseNode):
|
|
53
|
+
"""Delete rows from a PostgreSQL table."""
|
|
54
|
+
|
|
55
|
+
input_schema = DeleteInput
|
|
56
|
+
output_schema = DeleteOutput
|
|
57
|
+
credential_schema = PostgresCredentialSchema
|
|
58
|
+
|
|
59
|
+
metadata = NodeMetadata(
|
|
60
|
+
name="Postgres Delete",
|
|
61
|
+
description="Delete rows from a PostgreSQL table",
|
|
62
|
+
category="database",
|
|
63
|
+
icon="database",
|
|
64
|
+
color="#336791",
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
async def get_field_options(
|
|
68
|
+
self,
|
|
69
|
+
field_name: str,
|
|
70
|
+
credential_data: dict[str, Any] | None = None,
|
|
71
|
+
field_values: dict[str, Any] | None = None,
|
|
72
|
+
) -> list[FieldOption]:
|
|
73
|
+
return await pg_field_options(field_name, credential_data, field_values)
|
|
74
|
+
|
|
75
|
+
async def execute_logic(
|
|
76
|
+
self,
|
|
77
|
+
validated_inputs: dict[str, Any],
|
|
78
|
+
context: NodeExecutionContext,
|
|
79
|
+
) -> DeleteOutput:
|
|
80
|
+
credential_data = await context.resolve_credential(
|
|
81
|
+
credential_id=validated_inputs["credential_id"],
|
|
82
|
+
credential_type=self.get_credential_type(),
|
|
83
|
+
)
|
|
84
|
+
context.register_secret(credential_data["password"])
|
|
85
|
+
|
|
86
|
+
query, params = _build_delete_query(validated_inputs)
|
|
87
|
+
timeout = validated_inputs.get("timeout", 30)
|
|
88
|
+
|
|
89
|
+
rows, row_count = await execute_query(credential_data, query, params, timeout=timeout)
|
|
90
|
+
return DeleteOutput(
|
|
91
|
+
rows=serialize_rows(rows),
|
|
92
|
+
affected_count=row_count,
|
|
93
|
+
success=True,
|
|
94
|
+
raw_sql=query,
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _build_delete_query(inputs: dict[str, Any]) -> tuple[str, list[Any]]:
|
|
99
|
+
"""Build a parameterized DELETE query from structured inputs."""
|
|
100
|
+
table = qualify_table(inputs["table"], inputs.get("schema"))
|
|
101
|
+
|
|
102
|
+
# WHERE clause (required)
|
|
103
|
+
where_sql, params, _ = build_where_clause(inputs["where"])
|
|
104
|
+
|
|
105
|
+
query = f"DELETE FROM {table} {where_sql}"
|
|
106
|
+
|
|
107
|
+
# RETURNING
|
|
108
|
+
returning = inputs.get("returning") or []
|
|
109
|
+
if returning:
|
|
110
|
+
ret_str = ", ".join(sanitize_identifier(c) for c in returning)
|
|
111
|
+
query += f" RETURNING {ret_str}"
|
|
112
|
+
|
|
113
|
+
return query, params
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
"""Structured INSERT node for PostgreSQL."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from flowire_sdk import BaseNode, BaseNodeOutput, InputField, NodeExecutionContext, NodeMetadata
|
|
6
|
+
from flowire_sdk.node_base import FieldOption
|
|
7
|
+
from pydantic import BaseModel, Field
|
|
8
|
+
|
|
9
|
+
from fw_nodes_postgres.credentials import PostgresCredentialSchema
|
|
10
|
+
from fw_nodes_postgres.utils.db import (
|
|
11
|
+
execute_query,
|
|
12
|
+
pg_field_options,
|
|
13
|
+
qualify_table,
|
|
14
|
+
sanitize_identifier,
|
|
15
|
+
serialize_rows,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class InsertInput(BaseModel):
|
|
20
|
+
credential_id: str = InputField(..., description="PostgreSQL credential")
|
|
21
|
+
schema: str | None = InputField(
|
|
22
|
+
default=None,
|
|
23
|
+
description="PostgreSQL schema (defaults to search_path, usually 'public')",
|
|
24
|
+
dynamic_options=["credential_id"],
|
|
25
|
+
)
|
|
26
|
+
table: str = InputField(
|
|
27
|
+
...,
|
|
28
|
+
description="Table to insert into",
|
|
29
|
+
dynamic_options=["credential_id", "schema"],
|
|
30
|
+
)
|
|
31
|
+
data: dict[str, Any] | list[dict[str, Any]] = InputField(
|
|
32
|
+
...,
|
|
33
|
+
description="Row data as key-value pairs, or a list of rows for bulk insert",
|
|
34
|
+
)
|
|
35
|
+
returning: list[str] = InputField(
|
|
36
|
+
default_factory=list,
|
|
37
|
+
description="Columns to return from inserted rows (e.g., ['id', 'created_at'])",
|
|
38
|
+
)
|
|
39
|
+
timeout: int = InputField(default=30, ge=1, le=300, description="Query timeout in seconds")
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class InsertOutput(BaseNodeOutput):
|
|
43
|
+
rows: list[dict[str, Any]] = Field(..., description="Returned rows (if RETURNING was specified)")
|
|
44
|
+
affected_count: int = Field(..., description="Number of rows inserted")
|
|
45
|
+
success: bool = Field(..., description="Whether the insert executed successfully")
|
|
46
|
+
raw_sql: str = Field(..., description="The generated SQL query (with $N placeholders)")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class InsertNode(BaseNode):
|
|
50
|
+
"""Insert rows into a PostgreSQL table."""
|
|
51
|
+
|
|
52
|
+
input_schema = InsertInput
|
|
53
|
+
output_schema = InsertOutput
|
|
54
|
+
credential_schema = PostgresCredentialSchema
|
|
55
|
+
|
|
56
|
+
metadata = NodeMetadata(
|
|
57
|
+
name="Postgres Insert",
|
|
58
|
+
description="Insert rows into a PostgreSQL table",
|
|
59
|
+
category="database",
|
|
60
|
+
icon="database",
|
|
61
|
+
color="#336791",
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
async def get_field_options(
|
|
65
|
+
self,
|
|
66
|
+
field_name: str,
|
|
67
|
+
credential_data: dict[str, Any] | None = None,
|
|
68
|
+
field_values: dict[str, Any] | None = None,
|
|
69
|
+
) -> list[FieldOption]:
|
|
70
|
+
return await pg_field_options(field_name, credential_data, field_values)
|
|
71
|
+
|
|
72
|
+
async def execute_logic(
|
|
73
|
+
self,
|
|
74
|
+
validated_inputs: dict[str, Any],
|
|
75
|
+
context: NodeExecutionContext,
|
|
76
|
+
) -> InsertOutput:
|
|
77
|
+
credential_data = await context.resolve_credential(
|
|
78
|
+
credential_id=validated_inputs["credential_id"],
|
|
79
|
+
credential_type=self.get_credential_type(),
|
|
80
|
+
)
|
|
81
|
+
context.register_secret(credential_data["password"])
|
|
82
|
+
|
|
83
|
+
query, params = _build_insert_query(validated_inputs)
|
|
84
|
+
timeout = validated_inputs.get("timeout", 30)
|
|
85
|
+
|
|
86
|
+
rows, row_count = await execute_query(credential_data, query, params, timeout=timeout)
|
|
87
|
+
return InsertOutput(
|
|
88
|
+
rows=serialize_rows(rows),
|
|
89
|
+
affected_count=row_count,
|
|
90
|
+
success=True,
|
|
91
|
+
raw_sql=query,
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def _build_insert_query(inputs: dict[str, Any]) -> tuple[str, list[Any]]:
|
|
96
|
+
"""Build a parameterized INSERT query from structured inputs."""
|
|
97
|
+
table = qualify_table(inputs["table"], inputs.get("schema"))
|
|
98
|
+
data = inputs["data"]
|
|
99
|
+
|
|
100
|
+
# Normalize to list of rows
|
|
101
|
+
rows_data = data if isinstance(data, list) else [data]
|
|
102
|
+
if not rows_data:
|
|
103
|
+
raise ValueError("No data provided for insert")
|
|
104
|
+
|
|
105
|
+
# Use column order from the first row
|
|
106
|
+
columns = list(rows_data[0].keys())
|
|
107
|
+
if not columns:
|
|
108
|
+
raise ValueError("No columns provided for insert")
|
|
109
|
+
|
|
110
|
+
# Validate all rows have the same columns
|
|
111
|
+
expected = set(columns)
|
|
112
|
+
for i, row in enumerate(rows_data[1:], start=2):
|
|
113
|
+
actual = set(row.keys())
|
|
114
|
+
if actual != expected:
|
|
115
|
+
missing = expected - actual
|
|
116
|
+
extra = actual - expected
|
|
117
|
+
parts = []
|
|
118
|
+
if missing:
|
|
119
|
+
parts.append(f"missing {missing}")
|
|
120
|
+
if extra:
|
|
121
|
+
parts.append(f"unexpected {extra}")
|
|
122
|
+
raise ValueError(f"Row {i} has inconsistent columns: {', '.join(parts)}")
|
|
123
|
+
|
|
124
|
+
col_str = ", ".join(sanitize_identifier(c) for c in columns)
|
|
125
|
+
|
|
126
|
+
params: list[Any] = []
|
|
127
|
+
value_groups: list[str] = []
|
|
128
|
+
param_idx = 1
|
|
129
|
+
|
|
130
|
+
for row in rows_data:
|
|
131
|
+
placeholders = []
|
|
132
|
+
for col in columns:
|
|
133
|
+
placeholders.append(f"${param_idx}")
|
|
134
|
+
params.append(row.get(col))
|
|
135
|
+
param_idx += 1
|
|
136
|
+
value_groups.append(f"({', '.join(placeholders)})")
|
|
137
|
+
|
|
138
|
+
query = f"INSERT INTO {table} ({col_str}) VALUES {', '.join(value_groups)}"
|
|
139
|
+
|
|
140
|
+
# RETURNING
|
|
141
|
+
returning = inputs.get("returning") or []
|
|
142
|
+
if returning:
|
|
143
|
+
ret_str = ", ".join(sanitize_identifier(c) for c in returning)
|
|
144
|
+
query += f" RETURNING {ret_str}"
|
|
145
|
+
|
|
146
|
+
return query, params
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"""Raw SQL query node for executing arbitrary SQL against PostgreSQL."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from flowire_sdk import BaseNode, BaseNodeOutput, CodeEditorContract, InputField, NodeExecutionContext, NodeMetadata
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
from fw_nodes_postgres.credentials import PostgresCredentialSchema
|
|
9
|
+
from fw_nodes_postgres.utils.db import execute_query, serialize_rows
|
|
10
|
+
|
|
11
|
+
SQL_DEFAULT = "SELECT * FROM my_table\nWHERE id = $1\nLIMIT 100;"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class RawQueryInput(BaseModel):
|
|
15
|
+
credential_id: str = InputField(..., description="PostgreSQL credential")
|
|
16
|
+
query: str = InputField(
|
|
17
|
+
default=SQL_DEFAULT,
|
|
18
|
+
description="SQL query to execute. Use $1, $2, etc. for parameterized values.",
|
|
19
|
+
code=True,
|
|
20
|
+
)
|
|
21
|
+
params: list[Any] = InputField(
|
|
22
|
+
default_factory=list,
|
|
23
|
+
description="Query parameters (matched to $1, $2, etc. in the query)",
|
|
24
|
+
)
|
|
25
|
+
timeout: int = InputField(default=30, ge=1, le=300, description="Query timeout in seconds")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class RawQueryOutput(BaseNodeOutput):
|
|
29
|
+
rows: list[dict[str, Any]] = Field(..., description="Query result rows (empty for non-SELECT queries)")
|
|
30
|
+
row_count: int = Field(..., description="Number of rows returned or affected")
|
|
31
|
+
success: bool = Field(..., description="Whether the query executed successfully")
|
|
32
|
+
raw_sql: str = Field(..., description="The SQL query that was executed")
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class RawQueryNode(BaseNode):
|
|
36
|
+
"""Execute raw SQL queries against a PostgreSQL database."""
|
|
37
|
+
|
|
38
|
+
input_schema = RawQueryInput
|
|
39
|
+
output_schema = RawQueryOutput
|
|
40
|
+
credential_schema = PostgresCredentialSchema
|
|
41
|
+
|
|
42
|
+
metadata = NodeMetadata(
|
|
43
|
+
name="Postgres Raw Query",
|
|
44
|
+
description="Execute raw SQL queries against PostgreSQL",
|
|
45
|
+
category="database",
|
|
46
|
+
icon="database",
|
|
47
|
+
color="#336791",
|
|
48
|
+
display_component="code",
|
|
49
|
+
code_editor=CodeEditorContract(
|
|
50
|
+
code_field="query",
|
|
51
|
+
language_field=None,
|
|
52
|
+
code_format="code",
|
|
53
|
+
language_defaults={"sql": SQL_DEFAULT},
|
|
54
|
+
language_options=["sql"],
|
|
55
|
+
),
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
async def execute_logic(
|
|
59
|
+
self,
|
|
60
|
+
validated_inputs: dict[str, Any],
|
|
61
|
+
context: NodeExecutionContext,
|
|
62
|
+
) -> RawQueryOutput:
|
|
63
|
+
credential_data = await context.resolve_credential(
|
|
64
|
+
credential_id=validated_inputs["credential_id"],
|
|
65
|
+
credential_type=self.get_credential_type(),
|
|
66
|
+
)
|
|
67
|
+
context.register_secret(credential_data["password"])
|
|
68
|
+
|
|
69
|
+
query = validated_inputs["query"]
|
|
70
|
+
params = validated_inputs.get("params") or []
|
|
71
|
+
timeout = validated_inputs.get("timeout", 30)
|
|
72
|
+
|
|
73
|
+
rows, row_count = await execute_query(credential_data, query, params, timeout=timeout)
|
|
74
|
+
return RawQueryOutput(
|
|
75
|
+
rows=serialize_rows(rows),
|
|
76
|
+
row_count=row_count,
|
|
77
|
+
success=True,
|
|
78
|
+
raw_sql=query,
|
|
79
|
+
)
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
"""Structured SELECT node for PostgreSQL."""
|
|
2
|
+
|
|
3
|
+
from enum import StrEnum
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
from flowire_sdk import BaseNode, BaseNodeOutput, InputField, NodeExecutionContext, NodeMetadata
|
|
7
|
+
from flowire_sdk.node_base import FieldOption
|
|
8
|
+
from pydantic import BaseModel, Field
|
|
9
|
+
|
|
10
|
+
from fw_nodes_postgres.credentials import PostgresCredentialSchema
|
|
11
|
+
from fw_nodes_postgres.utils.db import (
|
|
12
|
+
WhereCondition,
|
|
13
|
+
build_where_clause,
|
|
14
|
+
execute_query,
|
|
15
|
+
pg_field_options,
|
|
16
|
+
qualify_table,
|
|
17
|
+
sanitize_identifier,
|
|
18
|
+
serialize_rows,
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class SortDirection(StrEnum):
|
|
23
|
+
ASC = "ASC"
|
|
24
|
+
DESC = "DESC"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class SortClause(BaseModel):
|
|
28
|
+
column: str = InputField(..., description="Column to sort by")
|
|
29
|
+
direction: SortDirection = InputField(default=SortDirection.ASC, description="Sort direction")
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class SelectInput(BaseModel):
|
|
33
|
+
credential_id: str = InputField(..., description="PostgreSQL credential")
|
|
34
|
+
schema: str | None = InputField(
|
|
35
|
+
default=None,
|
|
36
|
+
description="PostgreSQL schema (defaults to search_path, usually 'public')",
|
|
37
|
+
dynamic_options=["credential_id"],
|
|
38
|
+
)
|
|
39
|
+
table: str = InputField(
|
|
40
|
+
...,
|
|
41
|
+
description="Table name to query",
|
|
42
|
+
dynamic_options=["credential_id", "schema"],
|
|
43
|
+
)
|
|
44
|
+
columns: list[str] = InputField(
|
|
45
|
+
default_factory=lambda: ["*"],
|
|
46
|
+
description="Columns to select (defaults to all)",
|
|
47
|
+
)
|
|
48
|
+
where: list[WhereCondition] = InputField(
|
|
49
|
+
default_factory=list,
|
|
50
|
+
description="WHERE conditions (combined with AND)",
|
|
51
|
+
)
|
|
52
|
+
order_by: list[SortClause] = InputField(default_factory=list, description="ORDER BY clauses")
|
|
53
|
+
limit: int | None = InputField(default=None, ge=1, description="Maximum rows to return")
|
|
54
|
+
offset: int | None = InputField(default=None, ge=0, description="Number of rows to skip")
|
|
55
|
+
timeout: int = InputField(default=30, ge=1, le=300, description="Query timeout in seconds")
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class SelectOutput(BaseNodeOutput):
|
|
59
|
+
rows: list[dict[str, Any]] = Field(..., description="Query result rows")
|
|
60
|
+
row_count: int = Field(..., description="Number of rows returned")
|
|
61
|
+
success: bool = Field(..., description="Whether the query executed successfully")
|
|
62
|
+
raw_sql: str = Field(..., description="The generated SQL query (with $N placeholders)")
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class SelectNode(BaseNode):
|
|
66
|
+
"""Query rows from a PostgreSQL table with a structured interface."""
|
|
67
|
+
|
|
68
|
+
input_schema = SelectInput
|
|
69
|
+
output_schema = SelectOutput
|
|
70
|
+
credential_schema = PostgresCredentialSchema
|
|
71
|
+
|
|
72
|
+
metadata = NodeMetadata(
|
|
73
|
+
name="Postgres Select",
|
|
74
|
+
description="Query rows from a PostgreSQL table",
|
|
75
|
+
category="database",
|
|
76
|
+
icon="database",
|
|
77
|
+
color="#336791",
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
async def get_field_options(
|
|
81
|
+
self,
|
|
82
|
+
field_name: str,
|
|
83
|
+
credential_data: dict[str, Any] | None = None,
|
|
84
|
+
field_values: dict[str, Any] | None = None,
|
|
85
|
+
) -> list[FieldOption]:
|
|
86
|
+
return await pg_field_options(field_name, credential_data, field_values)
|
|
87
|
+
|
|
88
|
+
async def execute_logic(
|
|
89
|
+
self,
|
|
90
|
+
validated_inputs: dict[str, Any],
|
|
91
|
+
context: NodeExecutionContext,
|
|
92
|
+
) -> SelectOutput:
|
|
93
|
+
credential_data = await context.resolve_credential(
|
|
94
|
+
credential_id=validated_inputs["credential_id"],
|
|
95
|
+
credential_type=self.get_credential_type(),
|
|
96
|
+
)
|
|
97
|
+
context.register_secret(credential_data["password"])
|
|
98
|
+
|
|
99
|
+
query, params = _build_select_query(validated_inputs)
|
|
100
|
+
timeout = validated_inputs.get("timeout", 30)
|
|
101
|
+
|
|
102
|
+
rows, row_count = await execute_query(credential_data, query, params, timeout=timeout)
|
|
103
|
+
return SelectOutput(
|
|
104
|
+
rows=serialize_rows(rows),
|
|
105
|
+
row_count=row_count,
|
|
106
|
+
success=True,
|
|
107
|
+
raw_sql=query,
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def _build_select_query(inputs: dict[str, Any]) -> tuple[str, list[Any]]:
|
|
112
|
+
"""Build a parameterized SELECT query from structured inputs."""
|
|
113
|
+
table = qualify_table(inputs["table"], inputs.get("schema"))
|
|
114
|
+
columns = inputs.get("columns") or ["*"]
|
|
115
|
+
col_str = ", ".join(sanitize_identifier(c) if c != "*" else "*" for c in columns)
|
|
116
|
+
|
|
117
|
+
parts = [f"SELECT {col_str} FROM {table}"]
|
|
118
|
+
params: list[Any] = []
|
|
119
|
+
param_idx = 1
|
|
120
|
+
|
|
121
|
+
# WHERE
|
|
122
|
+
where_conditions = inputs.get("where") or []
|
|
123
|
+
if where_conditions:
|
|
124
|
+
where_sql, where_params, param_idx = build_where_clause(where_conditions, param_idx)
|
|
125
|
+
parts.append(where_sql)
|
|
126
|
+
params.extend(where_params)
|
|
127
|
+
|
|
128
|
+
# ORDER BY
|
|
129
|
+
order_by = inputs.get("order_by") or []
|
|
130
|
+
if order_by:
|
|
131
|
+
order_parts = []
|
|
132
|
+
for clause in order_by:
|
|
133
|
+
col = sanitize_identifier(clause["column"])
|
|
134
|
+
direction = clause.get("direction", "ASC").upper()
|
|
135
|
+
if direction not in ("ASC", "DESC"):
|
|
136
|
+
direction = "ASC"
|
|
137
|
+
order_parts.append(f"{col} {direction}")
|
|
138
|
+
parts.append("ORDER BY " + ", ".join(order_parts))
|
|
139
|
+
|
|
140
|
+
# LIMIT / OFFSET
|
|
141
|
+
if inputs.get("limit") is not None:
|
|
142
|
+
parts.append(f"LIMIT ${param_idx}")
|
|
143
|
+
params.append(inputs["limit"])
|
|
144
|
+
param_idx += 1
|
|
145
|
+
|
|
146
|
+
if inputs.get("offset") is not None:
|
|
147
|
+
parts.append(f"OFFSET ${param_idx}")
|
|
148
|
+
params.append(inputs["offset"])
|
|
149
|
+
param_idx += 1
|
|
150
|
+
|
|
151
|
+
return " ".join(parts), params
|