iflow-mcp_simple-psql-mcp 1.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,14 @@
1
+ # Python-generated files
2
+ __pycache__/
3
+ *.py[oc]
4
+ build/
5
+ dist/
6
+ wheels/
7
+ *.egg-info
8
+
9
+ # Virtual environments
10
+ .venv
11
+
12
+ .idea
13
+ .DS_Store
14
+ mcp_config.json
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Netanel Bollag
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,187 @@
1
+ Metadata-Version: 2.4
2
+ Name: iflow-mcp_simple-psql-mcp
3
+ Version: 1.0.0
4
+ Summary: A PostgreSQL MCP server project
5
+ Author-email: Netanel Bollag <nbollag@gmail.com>
6
+ License-File: LICENSE
7
+ Requires-Python: >=3.13
8
+ Requires-Dist: aiohttp>=3.11.13
9
+ Requires-Dist: mcp[cli]>=1.3.0
10
+ Requires-Dist: pyyaml>=6.0.2
11
+ Description-Content-Type: text/markdown
12
+
13
+ # Simple PostgreSQL MCP Server
14
+
15
+ This is a template project for those looking to build their own MCP servers. I designed it to be dead simple to understand and adapt - the code is straightforward with MCP docs attached so you can quickly get up to speed.
16
+
17
+ ## What is MCP?
18
+
19
+ *TL;DR - It's a way to write plugins for AI*
20
+
21
+ Model Context Protocol (MCP) is a standard way for LLMs to interact with external tools and data. In a nutshell:
22
+
23
+ - **Tools** allow the LLM to execute commands (like running a database query)
24
+ - **Resources** are data you can attach to conversations (like attaching a file to a prompt)
25
+ - **Prompts** are templates that generate consistent LLM instructions
26
+
27
+ ## Features
28
+
29
+ This PostgreSQL MCP server implements:
30
+
31
+ 1. **Tools**
32
+ - `execute_query` - Run SQL queries against your database
33
+ - `test_connection` - Verify the database connection is working
34
+
35
+ 2. **Resources**
36
+ - `db://tables` - List of all tables in the schema
37
+ - `db://tables/{table_name}` - Schema information for a specific table
38
+ - `db://schema` - Complete schema information for all tables in the database
39
+
40
+ 3. **Prompts**
41
+ - Query generation templates
42
+ - Analytical query builders
43
+ - Based on the templates in this repo
44
+
45
+ ## Prerequisites
46
+
47
+ - Python 3.8+
48
+ - [uv](https://github.com/astral-sh/uv) - Modern Python package manager and installer
49
+ - npx (included with Node.js)
50
+ - PostgreSQL database you can connect to
51
+
52
+ ## Quick Setup
53
+
54
+ 1. **Create a virtual environment and install dependencies:**
55
+ ```bash
56
+ # Create a virtual environment with uv
57
+ uv venv
58
+
59
+ # Activate the virtual environment
60
+ source .venv/bin/activate # On Windows: .venv\Scripts\activate
61
+
62
+ # Install dependencies
63
+ uv pip install -r requirements.txt
64
+ ```
65
+
66
+ 2. **Run the server with the MCP Inspector:**
67
+ ```bash
68
+ # Replace with YOUR actual database credentials
69
+ npx @modelcontextprotocol/inspector uv --directory . run postgres -e DSN=postgresql://username:password@hostname:port/database -e SCHEMA=public
70
+ ```
71
+
72
+ > Note: If this is your first time running npx, you'll be prompted to approve the installation. Type 'y' to proceed.
73
+
74
+ After running this command, you'll see the MCP Inspector interface launched in your browser. You should see a message like:
75
+ ```
76
+ MCP Inspector is up and running at http://localhost:5173
77
+ ```
78
+
79
+ If the browser doesn't open automatically, copy and paste the URL into your browser. You should see something like this:
80
+ ![MCP Inspector Interface](inspector-screenshot.png)
81
+ 3. **Using the Inspector:**
82
+ - Click the "Connect" button in the interface (unless there's an error message in the console on the bottom left)
83
+ - Explore the "Tools", "Resources", and "Prompts" tabs to see available functionality
84
+ - Try clicking on listed commands or typing resource names to retrieve resources and prompts
85
+ - The interface allows you to test queries and see how the MCP server responds
86
+
87
+ 4. **Take a look at the official docs**
88
+
89
+ Official server developers guide: https://modelcontextprotocol.io/quickstart/server
90
+
91
+ More on the inspector: https://modelcontextprotocol.io/docs/tools/inspector
92
+
93
+ ## Connect Your AI Tool to the Server
94
+
95
+ You can configure the MCP server for your AI assistant by creating an MCP configuration file:
96
+
97
+ ```json
98
+ {
99
+ "mcpServers": {
100
+ "postgres": {
101
+ "command": "/path/to/uv",
102
+ "args": [
103
+ "--directory",
104
+ "/path/to/simple-psql-mcp",
105
+ "run",
106
+ "postgres"
107
+ ],
108
+ "env": {
109
+ "DSN": "postgresql://username:password@localhost:5432/my-db",
110
+ "SCHEMA": "public"
111
+ }
112
+ }
113
+ }
114
+ }
115
+ ```
116
+
117
+ Alternatively, you can generate this config file using the included script:
118
+
119
+ ```bash
120
+ # Make the script executable
121
+ chmod +x generate_mcp_config.sh
122
+
123
+ # Run the configuration generator
124
+ ./generate_mcp_config.sh
125
+ ```
126
+
127
+ When prompted, enter your PostgreSQL DSN and schema name.
128
+
129
+ ### How to use it
130
+
131
+ You can now ask the LLM questions about your data in natural language:
132
+ - "What are all the tables in my database?"
133
+ - "Show me the top 5 users by creation date"
134
+ - "Count addresses by state"
135
+
136
+ For testing, Claude Desktop supports MCP natively and works with all features (tools, resources, and prompts) right out of the box.
137
+
138
+ ## Example Database (Optional)
139
+
140
+ If you don't have a database ready or encounter connection issues, you can use the included example database:
141
+
142
+ ```bash
143
+ # Make the script executable
144
+ chmod +x example-db/create-db.sh
145
+
146
+ # Run the database setup script
147
+ ./example-db/create-db.sh
148
+ ```
149
+
150
+ This script creates a Docker container with a PostgreSQL database pre-populated with sample users and addresses tables. After running, you can connect using:
151
+
152
+ ```bash
153
+ npx @modelcontextprotocol/inspector uv --directory . run postgres -e DSN=postgresql://postgres:postgres@localhost:5432/user_database -e SCHEMA=public
154
+ ```
155
+
156
+ ## Next Steps
157
+
158
+ To extend this project with your own MCP servers:
159
+
160
+ 1. Create a new directory under `/src` (e.g., `/src/my-new-mcp`)
161
+ 2. Implement your MCP server following the PostgreSQL example
162
+ 3. Add your new MCP to `pyproject.toml`:
163
+
164
+ ```toml
165
+ [project.scripts]
166
+ postgres = "src.postgres:main"
167
+ my-new-mcp = "src.my-new-mcp:main"
168
+ ```
169
+
170
+ You can then run your new MCP with:
171
+
172
+ ```bash
173
+ npx @modelcontextprotocol/inspector uv --directory . run my-new-mcp
174
+ ```
175
+
176
+ ## Documentation
177
+
178
+ - MCP docs included for easy LLM development
179
+ - Based on the approach at: https://modelcontextprotocol.io/tutorials/building-mcp-with-llms
180
+
181
+ ## Security
182
+
183
+ This is an experimental project meant to empower developers to create their own MCP server. I did minimum to make sure it won't die immediately when you try it, but be careful - it's very easy to run SQL injections with this tool. The server will check if the query starts with SELECT, but beyond that nothing is guaranteed. TL;DR - don't run in production unless you're the founder and there are no paying clients.
184
+
185
+ ## License
186
+
187
+ MIT
@@ -0,0 +1,175 @@
1
+ # Simple PostgreSQL MCP Server
2
+
3
+ This is a template project for those looking to build their own MCP servers. I designed it to be dead simple to understand and adapt - the code is straightforward with MCP docs attached so you can quickly get up to speed.
4
+
5
+ ## What is MCP?
6
+
7
+ *TL;DR - It's a way to write plugins for AI*
8
+
9
+ Model Context Protocol (MCP) is a standard way for LLMs to interact with external tools and data. In a nutshell:
10
+
11
+ - **Tools** allow the LLM to execute commands (like running a database query)
12
+ - **Resources** are data you can attach to conversations (like attaching a file to a prompt)
13
+ - **Prompts** are templates that generate consistent LLM instructions
14
+
15
+ ## Features
16
+
17
+ This PostgreSQL MCP server implements:
18
+
19
+ 1. **Tools**
20
+ - `execute_query` - Run SQL queries against your database
21
+ - `test_connection` - Verify the database connection is working
22
+
23
+ 2. **Resources**
24
+ - `db://tables` - List of all tables in the schema
25
+ - `db://tables/{table_name}` - Schema information for a specific table
26
+ - `db://schema` - Complete schema information for all tables in the database
27
+
28
+ 3. **Prompts**
29
+ - Query generation templates
30
+ - Analytical query builders
31
+ - Based on the templates in this repo
32
+
33
+ ## Prerequisites
34
+
35
+ - Python 3.8+
36
+ - [uv](https://github.com/astral-sh/uv) - Modern Python package manager and installer
37
+ - npx (included with Node.js)
38
+ - PostgreSQL database you can connect to
39
+
40
+ ## Quick Setup
41
+
42
+ 1. **Create a virtual environment and install dependencies:**
43
+ ```bash
44
+ # Create a virtual environment with uv
45
+ uv venv
46
+
47
+ # Activate the virtual environment
48
+ source .venv/bin/activate # On Windows: .venv\Scripts\activate
49
+
50
+ # Install dependencies
51
+ uv pip install -r requirements.txt
52
+ ```
53
+
54
+ 2. **Run the server with the MCP Inspector:**
55
+ ```bash
56
+ # Replace with YOUR actual database credentials
57
+ npx @modelcontextprotocol/inspector uv --directory . run postgres -e DSN=postgresql://username:password@hostname:port/database -e SCHEMA=public
58
+ ```
59
+
60
+ > Note: If this is your first time running npx, you'll be prompted to approve the installation. Type 'y' to proceed.
61
+
62
+ After running this command, you'll see the MCP Inspector interface launched in your browser. You should see a message like:
63
+ ```
64
+ MCP Inspector is up and running at http://localhost:5173
65
+ ```
66
+
67
+ If the browser doesn't open automatically, copy and paste the URL into your browser. You should see something like this:
68
+ ![MCP Inspector Interface](inspector-screenshot.png)
69
+ 3. **Using the Inspector:**
70
+ - Click the "Connect" button in the interface (unless there's an error message in the console on the bottom left)
71
+ - Explore the "Tools", "Resources", and "Prompts" tabs to see available functionality
72
+ - Try clicking on listed commands or typing resource names to retrieve resources and prompts
73
+ - The interface allows you to test queries and see how the MCP server responds
74
+
75
+ 4. **Take a look at the official docs**
76
+
77
+ Official server developers guide: https://modelcontextprotocol.io/quickstart/server
78
+
79
+ More on the inspector: https://modelcontextprotocol.io/docs/tools/inspector
80
+
81
+ ## Connect Your AI Tool to the Server
82
+
83
+ You can configure the MCP server for your AI assistant by creating an MCP configuration file:
84
+
85
+ ```json
86
+ {
87
+ "mcpServers": {
88
+ "postgres": {
89
+ "command": "/path/to/uv",
90
+ "args": [
91
+ "--directory",
92
+ "/path/to/simple-psql-mcp",
93
+ "run",
94
+ "postgres"
95
+ ],
96
+ "env": {
97
+ "DSN": "postgresql://username:password@localhost:5432/my-db",
98
+ "SCHEMA": "public"
99
+ }
100
+ }
101
+ }
102
+ }
103
+ ```
104
+
105
+ Alternatively, you can generate this config file using the included script:
106
+
107
+ ```bash
108
+ # Make the script executable
109
+ chmod +x generate_mcp_config.sh
110
+
111
+ # Run the configuration generator
112
+ ./generate_mcp_config.sh
113
+ ```
114
+
115
+ When prompted, enter your PostgreSQL DSN and schema name.
116
+
117
+ ### How to use it
118
+
119
+ You can now ask the LLM questions about your data in natural language:
120
+ - "What are all the tables in my database?"
121
+ - "Show me the top 5 users by creation date"
122
+ - "Count addresses by state"
123
+
124
+ For testing, Claude Desktop supports MCP natively and works with all features (tools, resources, and prompts) right out of the box.
125
+
126
+ ## Example Database (Optional)
127
+
128
+ If you don't have a database ready or encounter connection issues, you can use the included example database:
129
+
130
+ ```bash
131
+ # Make the script executable
132
+ chmod +x example-db/create-db.sh
133
+
134
+ # Run the database setup script
135
+ ./example-db/create-db.sh
136
+ ```
137
+
138
+ This script creates a Docker container with a PostgreSQL database pre-populated with sample users and addresses tables. After running, you can connect using:
139
+
140
+ ```bash
141
+ npx @modelcontextprotocol/inspector uv --directory . run postgres -e DSN=postgresql://postgres:postgres@localhost:5432/user_database -e SCHEMA=public
142
+ ```
143
+
144
+ ## Next Steps
145
+
146
+ To extend this project with your own MCP servers:
147
+
148
+ 1. Create a new directory under `/src` (e.g., `/src/my-new-mcp`)
149
+ 2. Implement your MCP server following the PostgreSQL example
150
+ 3. Add your new MCP to `pyproject.toml`:
151
+
152
+ ```toml
153
+ [project.scripts]
154
+ postgres = "src.postgres:main"
155
+ my-new-mcp = "src.my-new-mcp:main"
156
+ ```
157
+
158
+ You can then run your new MCP with:
159
+
160
+ ```bash
161
+ npx @modelcontextprotocol/inspector uv --directory . run my-new-mcp
162
+ ```
163
+
164
+ ## Documentation
165
+
166
+ - MCP docs included for easy LLM development
167
+ - Based on the approach at: https://modelcontextprotocol.io/tutorials/building-mcp-with-llms
168
+
169
+ ## Security
170
+
171
+ This is an experimental project meant to empower developers to create their own MCP server. I did minimum to make sure it won't die immediately when you try it, but be careful - it's very easy to run SQL injections with this tool. The server will check if the query starts with SELECT, but beyond that nothing is guaranteed. TL;DR - don't run in production unless you're the founder and there are no paying clients.
172
+
173
+ ## License
174
+
175
+ MIT
@@ -0,0 +1,98 @@
1
+ #!/bin/bash
2
+
3
+ # Script to set up PostgreSQL container and initialize database with users and addresses
4
+
5
+ # Define container name and credentials
6
+ CONTAINER_NAME="postgres_local"
7
+ DB_NAME="user_database"
8
+ DB_USER="postgres"
9
+ DB_PASSWORD="postgres"
10
+ DB_PORT="5432"
11
+
12
+ # Stop and remove container if it already exists
13
+ echo "Stopping and removing existing container if it exists..."
14
+ docker stop $CONTAINER_NAME 2>/dev/null
15
+ docker rm $CONTAINER_NAME 2>/dev/null
16
+
17
+ # Run PostgreSQL container
18
+ echo "Starting PostgreSQL container..."
19
+ docker run --name $CONTAINER_NAME \
20
+ -e POSTGRES_PASSWORD=$DB_PASSWORD \
21
+ -e POSTGRES_DB=$DB_NAME \
22
+ -p $DB_PORT:5432 \
23
+ -d postgres:14
24
+
25
+ # Wait for PostgreSQL to initialize
26
+ echo "Waiting for PostgreSQL to initialize..."
27
+ sleep 10
28
+
29
+ # Create SQL script for database setup
30
+ cat > init_db.sql << EOF
31
+ -- Create addresses table
32
+ CREATE TABLE addresses (
33
+ address_id SERIAL PRIMARY KEY,
34
+ street_address VARCHAR(100) NOT NULL,
35
+ city VARCHAR(50) NOT NULL,
36
+ state VARCHAR(50) NOT NULL,
37
+ postal_code VARCHAR(20) NOT NULL,
38
+ country VARCHAR(50) NOT NULL,
39
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
40
+ );
41
+
42
+ -- Create users table
43
+ CREATE TABLE users (
44
+ user_id SERIAL PRIMARY KEY,
45
+ first_name VARCHAR(50) NOT NULL,
46
+ last_name VARCHAR(50) NOT NULL,
47
+ email VARCHAR(100) UNIQUE NOT NULL,
48
+ phone VARCHAR(20),
49
+ address_id INTEGER REFERENCES addresses(address_id),
50
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
51
+ );
52
+
53
+ -- Insert 10 addresses
54
+ INSERT INTO addresses (street_address, city, state, postal_code, country) VALUES
55
+ ('123 Oak Street', 'New York', 'NY', '10001', 'USA'),
56
+ ('456 Pine Avenue', 'Los Angeles', 'CA', '90001', 'USA'),
57
+ ('789 Maple Road', 'Chicago', 'IL', '60007', 'USA'),
58
+ ('321 Cedar Lane', 'Houston', 'TX', '77001', 'USA'),
59
+ ('654 Birch Boulevard', 'Philadelphia', 'PA', '19019', 'USA'),
60
+ ('987 Spruce Court', 'Phoenix', 'AZ', '85001', 'USA'),
61
+ ('135 Willow Way', 'San Antonio', 'TX', '78201', 'USA'),
62
+ ('246 Redwood Drive', 'San Diego', 'CA', '92101', 'USA'),
63
+ ('579 Elm Street', 'Dallas', 'TX', '75201', 'USA'),
64
+ ('864 Aspen Circle', 'San Jose', 'CA', '95101', 'USA');
65
+
66
+ -- Insert 10 users linked to addresses
67
+ INSERT INTO users (first_name, last_name, email, phone, address_id) VALUES
68
+ ('John', 'Doe', 'john.doe@example.com', '212-555-1234', 1),
69
+ ('Jane', 'Smith', 'jane.smith@example.com', '310-555-2345', 2),
70
+ ('Michael', 'Johnson', 'michael.johnson@example.com', '312-555-3456', 3),
71
+ ('Emily', 'Williams', 'emily.williams@example.com', '713-555-4567', 4),
72
+ ('Robert', 'Brown', 'robert.brown@example.com', '267-555-5678', 5),
73
+ ('Sarah', 'Miller', 'sarah.miller@example.com', '602-555-6789', 6),
74
+ ('David', 'Wilson', 'david.wilson@example.com', '210-555-7890', 7),
75
+ ('Jessica', 'Taylor', 'jessica.taylor@example.com', '619-555-8901', 8),
76
+ ('Christopher', 'Anderson', 'christopher.anderson@example.com', '214-555-9012', 9),
77
+ ('Amanda', 'Martinez', 'amanda.martinez@example.com', '408-555-0123', 10);
78
+ EOF
79
+
80
+ # Execute SQL script
81
+ echo "Initializing database with sample data..."
82
+ docker exec -i $CONTAINER_NAME psql -U $DB_USER -d $DB_NAME < init_db.sql
83
+
84
+ # Verify data was inserted
85
+ echo "Verifying data insertion..."
86
+ docker exec -i $CONTAINER_NAME psql -U $DB_USER -d $DB_NAME -c "SELECT COUNT(*) FROM users;"
87
+ docker exec -i $CONTAINER_NAME psql -U $DB_USER -d $DB_NAME -c "SELECT COUNT(*) FROM addresses;"
88
+
89
+ echo "Database setup complete!"
90
+ echo "PostgreSQL is running on localhost:$DB_PORT"
91
+ echo "Database: $DB_NAME"
92
+ echo "Username: $DB_USER"
93
+ echo "Password: $DB_PASSWORD"
94
+
95
+ # Display connection command example
96
+ echo ""
97
+ echo "To connect directly to the PostgreSQL client, run:"
98
+ echo "docker exec -it $CONTAINER_NAME psql -U $DB_USER -d $DB_NAME"
@@ -0,0 +1,140 @@
1
+ -- PostgreSQL seed file to generate 1000 additional users and addresses
2
+ -- This builds on the existing schema with users and addresses tables
3
+
4
+ -- Function to generate random strings for names, streets, etc.
5
+ CREATE OR REPLACE FUNCTION random_string(length INTEGER) RETURNS TEXT AS
6
+ $$
7
+ DECLARE
8
+ chars TEXT := 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz';
9
+ result TEXT := '';
10
+ i INTEGER := 0;
11
+ BEGIN
12
+ FOR i IN 1..length LOOP
13
+ result := result || substr(chars, floor(random() * length(chars) + 1)::INTEGER, 1);
14
+ END LOOP;
15
+ RETURN result;
16
+ END;
17
+ $$ LANGUAGE plpgsql;
18
+
19
+ -- Function to generate random phone numbers
20
+ CREATE OR REPLACE FUNCTION random_phone() RETURNS TEXT AS
21
+ $$
22
+ BEGIN
23
+ RETURN
24
+ LPAD(floor(random() * 900 + 100)::TEXT, 3, '0') || '-' ||
25
+ LPAD(floor(random() * 900 + 100)::TEXT, 3, '0') || '-' ||
26
+ LPAD(floor(random() * 9000 + 1000)::TEXT, 4, '0');
27
+ END;
28
+ $$ LANGUAGE plpgsql;
29
+
30
+ -- Array of common street types
31
+ CREATE TEMP TABLE street_types AS
32
+ SELECT unnest(ARRAY['Street', 'Avenue', 'Boulevard', 'Road', 'Lane', 'Drive', 'Way', 'Court', 'Plaza', 'Terrace']) AS name;
33
+
34
+ -- Array of common first names
35
+ CREATE TEMP TABLE first_names AS
36
+ SELECT unnest(ARRAY[
37
+ 'James', 'Mary', 'John', 'Patricia', 'Robert', 'Jennifer', 'Michael', 'Linda', 'William', 'Elizabeth',
38
+ 'David', 'Susan', 'Richard', 'Jessica', 'Joseph', 'Sarah', 'Thomas', 'Karen', 'Charles', 'Nancy',
39
+ 'Christopher', 'Lisa', 'Daniel', 'Margaret', 'Matthew', 'Betty', 'Anthony', 'Sandra', 'Mark', 'Ashley',
40
+ 'Donald', 'Kimberly', 'Steven', 'Emily', 'Paul', 'Donna', 'Andrew', 'Michelle', 'Joshua', 'Dorothy',
41
+ 'Kenneth', 'Carol', 'Kevin', 'Amanda', 'Brian', 'Melissa', 'George', 'Deborah', 'Edward', 'Stephanie'
42
+ ]) AS name;
43
+
44
+ -- Array of common last names
45
+ CREATE TEMP TABLE last_names AS
46
+ SELECT unnest(ARRAY[
47
+ 'Smith', 'Johnson', 'Williams', 'Jones', 'Brown', 'Davis', 'Miller', 'Wilson', 'Moore', 'Taylor',
48
+ 'Anderson', 'Thomas', 'Jackson', 'White', 'Harris', 'Martin', 'Thompson', 'Garcia', 'Martinez', 'Robinson',
49
+ 'Clark', 'Rodriguez', 'Lewis', 'Lee', 'Walker', 'Hall', 'Allen', 'Young', 'Hernandez', 'King',
50
+ 'Wright', 'Lopez', 'Hill', 'Scott', 'Green', 'Adams', 'Baker', 'Gonzalez', 'Nelson', 'Carter',
51
+ 'Mitchell', 'Perez', 'Roberts', 'Turner', 'Phillips', 'Campbell', 'Parker', 'Evans', 'Edwards', 'Collins'
52
+ ]) AS name;
53
+
54
+ -- Array of cities
55
+ CREATE TEMP TABLE cities AS
56
+ SELECT unnest(ARRAY[
57
+ 'New York', 'Los Angeles', 'Chicago', 'Houston', 'Phoenix', 'Philadelphia', 'San Antonio', 'San Diego',
58
+ 'Dallas', 'San Jose', 'Austin', 'Jacksonville', 'Fort Worth', 'Columbus', 'Indianapolis', 'Charlotte',
59
+ 'San Francisco', 'Seattle', 'Denver', 'Washington', 'Boston', 'El Paso', 'Nashville', 'Detroit', 'Portland'
60
+ ]) AS city,
61
+ unnest(ARRAY[
62
+ 'NY', 'CA', 'IL', 'TX', 'AZ', 'PA', 'TX', 'CA',
63
+ 'TX', 'CA', 'TX', 'FL', 'TX', 'OH', 'IN', 'NC',
64
+ 'CA', 'WA', 'CO', 'DC', 'MA', 'TX', 'TN', 'MI', 'OR'
65
+ ]) AS state;
66
+
67
+ -- Begin transaction
68
+ BEGIN;
69
+
70
+ -- Get the current max IDs to know where to start
71
+ DO $$
72
+ DECLARE
73
+ max_address_id INTEGER;
74
+ max_user_id INTEGER;
75
+ address_id INTEGER;
76
+ first_name TEXT;
77
+ last_name TEXT;
78
+ street_num INTEGER;
79
+ street_name TEXT;
80
+ street_type TEXT;
81
+ city_rec RECORD;
82
+ postal_code TEXT;
83
+ email TEXT;
84
+ i INTEGER;
85
+ BEGIN
86
+ -- Get max IDs
87
+ SELECT COALESCE(MAX(addresses.address_id), 0) INTO max_address_id FROM addresses;
88
+ SELECT COALESCE(MAX(user_id), 0) INTO max_user_id FROM users;
89
+
90
+ -- Generate 1000 new records
91
+ FOR i IN 1..1000 LOOP
92
+ -- Generate address data
93
+ address_id := max_address_id + i;
94
+ street_num := floor(random() * 9900 + 100)::INTEGER;
95
+ street_name := (SELECT name FROM first_names ORDER BY random() LIMIT 1);
96
+ street_type := (SELECT name FROM street_types ORDER BY random() LIMIT 1);
97
+ SELECT * INTO city_rec FROM cities ORDER BY random() LIMIT 1;
98
+ postal_code := LPAD(floor(random() * 90000 + 10000)::TEXT, 5, '0');
99
+
100
+ -- Insert address
101
+ INSERT INTO addresses (street_address, city, state, postal_code, country)
102
+ VALUES (
103
+ street_num || ' ' || street_name || ' ' || street_type,
104
+ city_rec.city,
105
+ city_rec.state,
106
+ postal_code,
107
+ 'USA'
108
+ );
109
+
110
+ -- Generate user data
111
+ first_name := (SELECT name FROM first_names ORDER BY random() LIMIT 1);
112
+ last_name := (SELECT name FROM last_names ORDER BY random() LIMIT 1);
113
+ email := lower(first_name) || '.' || lower(last_name) || i || '@example.com';
114
+
115
+ -- Insert user with reference to the new address
116
+ INSERT INTO users (first_name, last_name, email, phone, address_id)
117
+ VALUES (
118
+ first_name,
119
+ last_name,
120
+ email,
121
+ random_phone(),
122
+ address_id
123
+ );
124
+ END LOOP;
125
+ END $$;
126
+
127
+ -- Verify the data was inserted
128
+ SELECT COUNT(*) AS total_addresses FROM addresses;
129
+ SELECT COUNT(*) AS total_users FROM users;
130
+
131
+ -- Clean up temporary functions and tables
132
+ DROP FUNCTION IF EXISTS random_string;
133
+ DROP FUNCTION IF EXISTS random_phone;
134
+ DROP TABLE IF EXISTS street_types;
135
+ DROP TABLE IF EXISTS first_names;
136
+ DROP TABLE IF EXISTS last_names;
137
+ DROP TABLE IF EXISTS cities;
138
+
139
+ COMMIT;
140
+
@@ -0,0 +1,39 @@
1
+ #!/bin/bash
2
+
3
+ # Find the uv executable path
4
+ UV_PATH=$(which uv)
5
+ if [ -z "$UV_PATH" ]; then
6
+ echo "Error: 'uv' executable not found in PATH"
7
+ exit 1
8
+ fi
9
+
10
+ # Get current working directory
11
+ CURRENT_DIR=$(pwd)
12
+
13
+ # Prompt for DSN and schema
14
+ read -p "Enter PostgreSQL DSN (e.g., postgresql://postgres:postgres@localhost:5432/user_database): " DSN
15
+ read -p "Enter schema name (press Enter for public): " DB_SCHEMA
16
+ DB_SCHEMA=${DB_SCHEMA:-public}
17
+
18
+ # Create the JSON configuration
19
+ cat > mcp_config.json << EOF
20
+ {
21
+ "mcpServers": {
22
+ "postgres": {
23
+ "command": "${UV_PATH}",
24
+ "args": [
25
+ "--directory",
26
+ "${CURRENT_DIR}",
27
+ "run",
28
+ "postgres"
29
+ ],
30
+ "env": {
31
+ "DSN": "${DSN}",
32
+ "SCHEMA": "${DB_SCHEMA}"
33
+ }
34
+ }
35
+ }
36
+ }
37
+ EOF
38
+
39
+ cat mcp_config.json