pggm-mcp-snowflake-server 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,34 @@
1
+ Metadata-Version: 2.4
2
+ Name: pggm-mcp-snowflake-server
3
+ Version: 0.1.0
4
+ Summary: Custom Model Context Protocol server for Snowflake
5
+ Classifier: Programming Language :: Python :: 3
6
+ Classifier: License :: OSI Approved :: MIT License
7
+ Classifier: Operating System :: OS Independent
8
+ Requires-Python: >=3.8
9
+ Description-Content-Type: text/markdown
10
+ Requires-Dist: mcp
11
+ Requires-Dist: pydantic
12
+ Requires-Dist: snowflake-snowpark-python
13
+ Requires-Dist: pyyaml
14
+
15
+ # PGGM MCP Snowflake Server
16
+
17
+ A customized Model Context Protocol (MCP) server for Snowflake integration, allowing AI assistants to interact with Snowflake databases.
18
+
19
+ ## Features
20
+
21
+ - Connect to Snowflake databases and execute queries
22
+ - Support for various SQL operations and schema exploration
23
+ - Data insights collection
24
+ - Customized filters and configurations
25
+
26
+ ## Installation
27
+
28
+ ```bash
29
+ pip install pggm-mcp-snowflake-server
30
+ ```
31
+
32
+ ## Usage
33
+
34
+ This package is designed to be used with MCP-compatible AI assistants for database interactions.
@@ -0,0 +1,20 @@
1
+ # PGGM MCP Snowflake Server
2
+
3
+ A customized Model Context Protocol (MCP) server for Snowflake integration, allowing AI assistants to interact with Snowflake databases.
4
+
5
+ ## Features
6
+
7
+ - Connect to Snowflake databases and execute queries
8
+ - Support for various SQL operations and schema exploration
9
+ - Data insights collection
10
+ - Customized filters and configurations
11
+
12
+ ## Installation
13
+
14
+ ```bash
15
+ pip install pggm-mcp-snowflake-server
16
+ ```
17
+
18
+ ## Usage
19
+
20
+ This package is designed to be used with MCP-compatible AI assistants for database interactions.
@@ -0,0 +1,3 @@
1
+ from .server import main
2
+
3
+ __all__ = ["main"]
@@ -0,0 +1,623 @@
1
+ import importlib.metadata
2
+ import json
3
+ import logging
4
+ import os
5
+ import time
6
+ import uuid
7
+ from functools import wraps
8
+ from typing import Any, Callable
9
+
10
+ import mcp.server.stdio
11
+ import mcp.types as types
12
+ import yaml
13
+ from mcp.server import NotificationOptions, Server
14
+ from mcp.server.models import InitializationOptions
15
+ from pydantic import AnyUrl, BaseModel
16
+ from snowflake.snowpark import Session
17
+
18
+ from .write_detector import SQLWriteDetector
19
+
20
+ # Configure logging
21
+ logging.basicConfig(
22
+ level=logging.INFO,
23
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
24
+ handlers=[logging.StreamHandler()],
25
+ )
26
+ logger = logging.getLogger("pggm_mcp_snowflake_server")
27
+
28
+
29
+ def data_to_yaml(data: Any) -> str:
30
+ return yaml.dump(data, indent=2, sort_keys=False)
31
+
32
+
33
+ class SnowflakeDB:
34
+ AUTH_EXPIRATION_TIME = 1800
35
+
36
+ def __init__(self, connection_config: dict):
37
+ self.connection_config = connection_config
38
+ self.session = None
39
+ self.insights: list[str] = []
40
+ self.auth_time = 0
41
+
42
+ def _init_database(self):
43
+ """Initialize connection to the Snowflake database"""
44
+ try:
45
+ # Create session without setting specific database and schema
46
+ self.session = Session.builder.configs(self.connection_config).create()
47
+
48
+ # Set initial warehouse if provided, but don't set database or schema
49
+ if "warehouse" in self.connection_config:
50
+ self.session.sql(f"USE WAREHOUSE {self.connection_config['warehouse'].upper()}")
51
+
52
+ self.auth_time = time.time()
53
+ except Exception as e:
54
+ raise ValueError(f"Failed to connect to Snowflake database: {e}")
55
+
56
+ def execute_query(self, query: str) -> tuple[list[dict[str, Any]], str]:
57
+ """Execute a SQL query and return results as a list of dictionaries"""
58
+ if not self.session or time.time() - self.auth_time > self.AUTH_EXPIRATION_TIME:
59
+ self._init_database()
60
+
61
+ logger.debug(f"Executing query: {query}")
62
+ try:
63
+ result = self.session.sql(query).to_pandas()
64
+ result_rows = result.to_dict(orient="records")
65
+ data_id = str(uuid.uuid4())
66
+
67
+ return result_rows, data_id
68
+
69
+ except Exception as e:
70
+ logger.error(f'Database error executing "{query}": {e}')
71
+ raise
72
+
73
+ def add_insight(self, insight: str) -> None:
74
+ """Add a new insight to the collection"""
75
+ self.insights.append(insight)
76
+
77
+ def get_memo(self) -> str:
78
+ """Generate a formatted memo from collected insights"""
79
+ if not self.insights:
80
+ return "No data insights have been discovered yet."
81
+
82
+ memo = "📊 Data Intelligence Memo 📊\n\n"
83
+ memo += "Key Insights Discovered:\n\n"
84
+ memo += "\n".join(f"- {insight}" for insight in self.insights)
85
+
86
+ if len(self.insights) > 1:
87
+ memo += f"\n\nSummary:\nAnalysis has revealed {len(self.insights)} key data insights that suggest opportunities for strategic optimization and growth."
88
+
89
+ return memo
90
+
91
+
92
+ def handle_tool_errors(func: Callable) -> Callable:
93
+ """Decorator to standardize tool error handling"""
94
+
95
+ @wraps(func)
96
+ async def wrapper(*args, **kwargs) -> list[types.TextContent]:
97
+ try:
98
+ return await func(*args, **kwargs)
99
+ except Exception as e:
100
+ logger.error(f"Error in {func.__name__}: {str(e)}")
101
+ return [types.TextContent(type="text", text=f"Error: {str(e)}")]
102
+
103
+ return wrapper
104
+
105
+
106
+ class Tool(BaseModel):
107
+ name: str
108
+ description: str
109
+ input_schema: dict[str, Any]
110
+ handler: Callable[
111
+ [str, dict[str, Any] | None],
112
+ list[types.TextContent | types.ImageContent | types.EmbeddedResource],
113
+ ]
114
+ tags: list[str] = []
115
+
116
+
117
+ # Tool handlers
118
+ async def handle_list_databases(arguments, db, *_, exclusion_config=None):
119
+ query = "SELECT DATABASE_NAME FROM INFORMATION_SCHEMA.DATABASES"
120
+ data, data_id = db.execute_query(query)
121
+
122
+ # Filter out excluded databases
123
+ if exclusion_config and "databases" in exclusion_config and exclusion_config["databases"]:
124
+ filtered_data = []
125
+ for item in data:
126
+ db_name = item.get("DATABASE_NAME", "")
127
+ exclude = False
128
+ for pattern in exclusion_config["databases"]:
129
+ if pattern.lower() in db_name.lower():
130
+ exclude = True
131
+ break
132
+ if not exclude:
133
+ filtered_data.append(item)
134
+ data = filtered_data
135
+
136
+ output = {
137
+ "type": "data",
138
+ "data_id": data_id,
139
+ "data": data,
140
+ }
141
+ yaml_output = data_to_yaml(output)
142
+ json_output = json.dumps(output)
143
+ return [
144
+ types.TextContent(type="text", text=yaml_output),
145
+ types.EmbeddedResource(
146
+ type="resource",
147
+ resource=types.TextResourceContents(
148
+ uri=f"data://{data_id}", text=json_output, mimeType="application/json"
149
+ ),
150
+ ),
151
+ ]
152
+
153
+
154
+ async def handle_list_schemas(arguments, db, *_, exclusion_config=None):
155
+ if not arguments or "database" not in arguments:
156
+ raise ValueError("Missing required 'database' parameter")
157
+
158
+ database = arguments["database"]
159
+ query = f"SELECT SCHEMA_NAME FROM {database.upper()}.INFORMATION_SCHEMA.SCHEMATA"
160
+ data, data_id = db.execute_query(query)
161
+
162
+ # Filter out excluded schemas
163
+ if exclusion_config and "schemas" in exclusion_config and exclusion_config["schemas"]:
164
+ filtered_data = []
165
+ for item in data:
166
+ schema_name = item.get("SCHEMA_NAME", "")
167
+ exclude = False
168
+ for pattern in exclusion_config["schemas"]:
169
+ if pattern.lower() in schema_name.lower():
170
+ exclude = True
171
+ break
172
+ if not exclude:
173
+ filtered_data.append(item)
174
+ data = filtered_data
175
+
176
+ output = {
177
+ "type": "data",
178
+ "data_id": data_id,
179
+ "database": database,
180
+ "data": data,
181
+ }
182
+ yaml_output = data_to_yaml(output)
183
+ json_output = json.dumps(output)
184
+ return [
185
+ types.TextContent(type="text", text=yaml_output),
186
+ types.EmbeddedResource(
187
+ type="resource",
188
+ resource=types.TextResourceContents(
189
+ uri=f"data://{data_id}", text=json_output, mimeType="application/json"
190
+ ),
191
+ ),
192
+ ]
193
+
194
+
195
+ async def handle_list_tables(arguments, db, *_, exclusion_config=None):
196
+ if not arguments or "database" not in arguments or "schema" not in arguments:
197
+ raise ValueError("Missing required 'database' and 'schema' parameters")
198
+
199
+ database = arguments["database"]
200
+ schema = arguments["schema"]
201
+
202
+ query = f"""
203
+ SELECT table_catalog, table_schema, table_name, comment
204
+ FROM {database}.information_schema.tables
205
+ WHERE table_schema = '{schema.upper()}'
206
+ """
207
+ data, data_id = db.execute_query(query)
208
+
209
+ # Filter out excluded tables
210
+ if exclusion_config and "tables" in exclusion_config and exclusion_config["tables"]:
211
+ filtered_data = []
212
+ for item in data:
213
+ table_name = item.get("TABLE_NAME", "")
214
+ exclude = False
215
+ for pattern in exclusion_config["tables"]:
216
+ if pattern.lower() in table_name.lower():
217
+ exclude = True
218
+ break
219
+ if not exclude:
220
+ filtered_data.append(item)
221
+ data = filtered_data
222
+
223
+ output = {
224
+ "type": "data",
225
+ "data_id": data_id,
226
+ "database": database,
227
+ "schema": schema,
228
+ "data": data,
229
+ }
230
+ yaml_output = data_to_yaml(output)
231
+ json_output = json.dumps(output)
232
+ return [
233
+ types.TextContent(type="text", text=yaml_output),
234
+ types.EmbeddedResource(
235
+ type="resource",
236
+ resource=types.TextResourceContents(
237
+ uri=f"data://{data_id}", text=json_output, mimeType="application/json"
238
+ ),
239
+ ),
240
+ ]
241
+
242
+
243
+ async def handle_describe_table(arguments, db, *_):
244
+ if not arguments or "table_name" not in arguments:
245
+ raise ValueError("Missing table_name argument")
246
+
247
+ table_spec = arguments["table_name"]
248
+ split_identifier = table_spec.split(".")
249
+
250
+ # Parse the fully qualified table name
251
+ if len(split_identifier) < 3:
252
+ raise ValueError("Table name must be fully qualified as 'database.schema.table'")
253
+
254
+ database_name = split_identifier[0].upper()
255
+ schema_name = split_identifier[1].upper()
256
+ table_name = split_identifier[2].upper()
257
+
258
+ query = f"""
259
+ SELECT column_name, column_default, is_nullable, data_type, comment
260
+ FROM {database_name}.information_schema.columns
261
+ WHERE table_schema = '{schema_name}' AND table_name = '{table_name}'
262
+ """
263
+ data, data_id = db.execute_query(query)
264
+
265
+ output = {
266
+ "type": "data",
267
+ "data_id": data_id,
268
+ "database": database_name,
269
+ "schema": schema_name,
270
+ "table": table_name,
271
+ "data": data,
272
+ }
273
+ yaml_output = data_to_yaml(output)
274
+ json_output = json.dumps(output)
275
+ return [
276
+ types.TextContent(type="text", text=yaml_output),
277
+ types.EmbeddedResource(
278
+ type="resource",
279
+ resource=types.TextResourceContents(
280
+ uri=f"data://{data_id}", text=json_output, mimeType="application/json"
281
+ ),
282
+ ),
283
+ ]
284
+
285
+
286
+ async def handle_read_query(arguments, db, write_detector, *_):
287
+ if not arguments or "query" not in arguments:
288
+ raise ValueError("Missing query argument")
289
+
290
+ if write_detector.analyze_query(arguments["query"])["contains_write"]:
291
+ raise ValueError("Calls to read_query should not contain write operations")
292
+
293
+ data, data_id = db.execute_query(arguments["query"])
294
+ output = {
295
+ "type": "data",
296
+ "data_id": data_id,
297
+ "data": data,
298
+ }
299
+ yaml_output = data_to_yaml(output)
300
+ json_output = json.dumps(output)
301
+ return [
302
+ types.TextContent(type="text", text=yaml_output),
303
+ types.EmbeddedResource(
304
+ type="resource",
305
+ resource=types.TextResourceContents(
306
+ uri=f"data://{data_id}", text=json_output, mimeType="application/json"
307
+ ),
308
+ ),
309
+ ]
310
+
311
+
312
+ async def handle_append_insight(arguments, db, _, __, server):
313
+ if not arguments or "insight" not in arguments:
314
+ raise ValueError("Missing insight argument")
315
+
316
+ db.add_insight(arguments["insight"])
317
+ await server.request_context.session.send_resource_updated(AnyUrl("memo://insights"))
318
+ return [types.TextContent(type="text", text="Insight added to memo")]
319
+
320
+
321
+ async def handle_write_query(arguments, db, _, allow_write, __):
322
+ if not allow_write:
323
+ raise ValueError("Write operations are not allowed for this data connection")
324
+ if arguments["query"].strip().upper().startswith("SELECT"):
325
+ raise ValueError("SELECT queries are not allowed for write_query")
326
+
327
+ results, data_id = db.execute_query(arguments["query"])
328
+ return [types.TextContent(type="text", text=str(results))]
329
+
330
+
331
+ async def handle_create_table(arguments, db, _, allow_write, __):
332
+ if not allow_write:
333
+ raise ValueError("Write operations are not allowed for this data connection")
334
+ if not arguments["query"].strip().upper().startswith("CREATE TABLE"):
335
+ raise ValueError("Only CREATE TABLE statements are allowed")
336
+
337
+ results, data_id = db.execute_query(arguments["query"])
338
+ return [types.TextContent(type="text", text=f"Table created successfully. data_id = {data_id}")]
339
+
340
+
341
+ async def main(
342
+ allow_write: bool = False,
343
+ connection_args: dict = None,
344
+ log_dir: str = None,
345
+ log_level: str = "INFO",
346
+ exclude_tools: list[str] = [],
347
+ config_file: str = "runtime_config.json",
348
+ exclude_patterns: dict = None,
349
+ ):
350
+ # Setup logging
351
+ if log_dir:
352
+ os.makedirs(log_dir, exist_ok=True)
353
+ logger.handlers.append(
354
+ logging.FileHandler(os.path.join(log_dir, "pggm_mcp_snowflake_server.log"))
355
+ )
356
+ if log_level:
357
+ logger.setLevel(log_level)
358
+
359
+ logger.info("Starting Snowflake MCP Server")
360
+ logger.info("Allow write operations: %s", allow_write)
361
+ logger.info("Excluded tools: %s", exclude_tools)
362
+
363
+ # Load configuration from file if provided
364
+ config = {}
365
+ #
366
+ if config_file:
367
+ try:
368
+ with open(config_file, "r") as f:
369
+ config = json.load(f)
370
+ logger.info(f"Loaded configuration from {config_file}")
371
+ except Exception as e:
372
+ logger.error(f"Error loading configuration file: {e}")
373
+
374
+ # Merge exclude_patterns from parameters with config file
375
+ exclusion_config = config.get("exclude_patterns", {})
376
+ if exclude_patterns:
377
+ # Merge patterns from parameters with those from config file
378
+ for key, patterns in exclude_patterns.items():
379
+ if key in exclusion_config:
380
+ exclusion_config[key].extend(patterns)
381
+ else:
382
+ exclusion_config[key] = patterns
383
+
384
+ # Set default patterns if none are specified
385
+ if not exclusion_config:
386
+ exclusion_config = {"databases": [], "schemas": [], "tables": []}
387
+
388
+ # Ensure all keys exist in the exclusion config
389
+ for key in ["databases", "schemas", "tables"]:
390
+ if key not in exclusion_config:
391
+ exclusion_config[key] = []
392
+
393
+ logger.info(f"Exclusion patterns: {exclusion_config}")
394
+
395
+ db = SnowflakeDB(connection_args)
396
+ server = Server("snowflake-manager")
397
+ write_detector = SQLWriteDetector()
398
+
399
+ tables_info = {}
400
+ tables_brief = ""
401
+
402
+ all_tools = [
403
+ Tool(
404
+ name="list_databases",
405
+ description="List all available databases in Snowflake",
406
+ input_schema={
407
+ "type": "object",
408
+ "properties": {},
409
+ },
410
+ handler=handle_list_databases,
411
+ ),
412
+ Tool(
413
+ name="list_schemas",
414
+ description="List all schemas in a database",
415
+ input_schema={
416
+ "type": "object",
417
+ "properties": {
418
+ "database": {
419
+ "type": "string",
420
+ "description": "Database name to list schemas from",
421
+ },
422
+ },
423
+ "required": ["database"],
424
+ },
425
+ handler=handle_list_schemas,
426
+ ),
427
+ Tool(
428
+ name="list_tables",
429
+ description="List all tables in a specific database and schema",
430
+ input_schema={
431
+ "type": "object",
432
+ "properties": {
433
+ "database": {"type": "string", "description": "Database name"},
434
+ "schema": {"type": "string", "description": "Schema name"},
435
+ },
436
+ "required": ["database", "schema"],
437
+ },
438
+ handler=handle_list_tables,
439
+ ),
440
+ Tool(
441
+ name="describe_table",
442
+ description="Get the schema information for a specific table",
443
+ input_schema={
444
+ "type": "object",
445
+ "properties": {
446
+ "table_name": {
447
+ "type": "string",
448
+ "description": "Fully qualified table name in the format 'database.schema.table'",
449
+ },
450
+ },
451
+ "required": ["table_name"],
452
+ },
453
+ handler=handle_describe_table,
454
+ ),
455
+ Tool(
456
+ name="read_query",
457
+ description="Execute a SELECT query.",
458
+ input_schema={
459
+ "type": "object",
460
+ "properties": {
461
+ "query": {"type": "string", "description": "SELECT SQL query to execute"}
462
+ },
463
+ "required": ["query"],
464
+ },
465
+ handler=handle_read_query,
466
+ ),
467
+ Tool(
468
+ name="append_insight",
469
+ description="Add a data insight to the memo",
470
+ input_schema={
471
+ "type": "object",
472
+ "properties": {
473
+ "insight": {
474
+ "type": "string",
475
+ "description": "Data insight discovered from analysis",
476
+ }
477
+ },
478
+ "required": ["insight"],
479
+ },
480
+ handler=handle_append_insight,
481
+ tags=["resource_based"],
482
+ ),
483
+ Tool(
484
+ name="write_query",
485
+ description="Execute an INSERT, UPDATE, or DELETE query on the Snowflake database",
486
+ input_schema={
487
+ "type": "object",
488
+ "properties": {"query": {"type": "string", "description": "SQL query to execute"}},
489
+ "required": ["query"],
490
+ },
491
+ handler=handle_write_query,
492
+ tags=["write"],
493
+ ),
494
+ Tool(
495
+ name="create_table",
496
+ description="Create a new table in the Snowflake database",
497
+ input_schema={
498
+ "type": "object",
499
+ "properties": {
500
+ "query": {"type": "string", "description": "CREATE TABLE SQL statement"}
501
+ },
502
+ "required": ["query"],
503
+ },
504
+ handler=handle_create_table,
505
+ tags=["write"],
506
+ ),
507
+ ]
508
+
509
+ exclude_tags = []
510
+ if not allow_write:
511
+ exclude_tags.append("write")
512
+ allowed_tools = [
513
+ tool
514
+ for tool in all_tools
515
+ if tool.name not in exclude_tools and not any(tag in exclude_tags for tag in tool.tags)
516
+ ]
517
+
518
+ logger.info("Allowed tools: %s", [tool.name for tool in allowed_tools])
519
+
520
+ # Register handlers
521
+ @server.list_resources()
522
+ async def handle_list_resources() -> list[types.Resource]:
523
+ resources = [
524
+ types.Resource(
525
+ uri=AnyUrl("memo://insights"),
526
+ name="Data Insights Memo",
527
+ description="A living document of discovered data insights",
528
+ mimeType="text/plain",
529
+ )
530
+ ]
531
+ table_brief_resources = [
532
+ types.Resource(
533
+ uri=AnyUrl(f"context://table/{table_name}"),
534
+ name=f"{table_name} table",
535
+ description=f"Description of the {table_name} table",
536
+ mimeType="text/plain",
537
+ )
538
+ for table_name in tables_info.keys()
539
+ ]
540
+ resources += table_brief_resources
541
+ return resources
542
+
543
+ @server.read_resource()
544
+ async def handle_read_resource(uri: AnyUrl) -> str:
545
+ if str(uri) == "memo://insights":
546
+ return db.get_memo()
547
+ elif str(uri).startswith("context://table"):
548
+ table_name = str(uri).split("/")[-1]
549
+ if table_name in tables_info:
550
+ return data_to_yaml(tables_info[table_name])
551
+ else:
552
+ raise ValueError(f"Unknown table: {table_name}")
553
+ else:
554
+ raise ValueError(f"Unknown resource: {uri}")
555
+
556
+ @server.list_prompts()
557
+ async def handle_list_prompts() -> list[types.Prompt]:
558
+ return []
559
+
560
+ @server.get_prompt()
561
+ async def handle_get_prompt(
562
+ name: str, arguments: dict[str, str] | None
563
+ ) -> types.GetPromptResult:
564
+ raise ValueError(f"Unknown prompt: {name}")
565
+
566
+ @server.call_tool()
567
+ @handle_tool_errors
568
+ async def handle_call_tool(
569
+ name: str, arguments: dict[str, Any] | None
570
+ ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
571
+ if name in exclude_tools:
572
+ return [
573
+ types.TextContent(
574
+ type="text", text=f"Tool {name} is excluded from this data connection"
575
+ )
576
+ ]
577
+
578
+ handler = next((tool.handler for tool in allowed_tools if tool.name == name), None)
579
+ if not handler:
580
+ raise ValueError(f"Unknown tool: {name}")
581
+
582
+ # Pass exclusion_config to the handler if it's a listing function
583
+ if name in ["list_databases", "list_schemas", "list_tables"]:
584
+ return await handler(
585
+ arguments,
586
+ db,
587
+ write_detector,
588
+ allow_write,
589
+ server,
590
+ exclusion_config=exclusion_config,
591
+ )
592
+ else:
593
+ return await handler(arguments, db, write_detector, allow_write, server)
594
+
595
+ @server.list_tools()
596
+ async def handle_list_tools() -> list[types.Tool]:
597
+ logger.info("Listing tools")
598
+ logger.error(f"Allowed tools: {allowed_tools}")
599
+ tools = [
600
+ types.Tool(
601
+ name=tool.name,
602
+ description=tool.description,
603
+ inputSchema=tool.input_schema,
604
+ )
605
+ for tool in allowed_tools
606
+ ]
607
+ return tools
608
+
609
+ # Start server
610
+ async with mcp.server.stdio.stdio_server() as (read_stream, write_stream):
611
+ logger.info("Server running with stdio transport")
612
+ await server.run(
613
+ read_stream,
614
+ write_stream,
615
+ InitializationOptions(
616
+ server_name="snowflake",
617
+ server_version=importlib.metadata.version("pggm_mcp_snowflake_server"),
618
+ capabilities=server.get_capabilities(
619
+ notification_options=NotificationOptions(),
620
+ experimental_capabilities={},
621
+ ),
622
+ ),
623
+ )
@@ -0,0 +1,36 @@
1
+ class SQLWriteDetector:
2
+ """Utility class to detect write operations in SQL queries."""
3
+
4
+ def __init__(self):
5
+ self.write_keywords = [
6
+ "INSERT", "UPDATE", "DELETE", "DROP", "CREATE", "ALTER", "TRUNCATE",
7
+ "GRANT", "REVOKE", "MERGE", "UPSERT", "REPLACE"
8
+ ]
9
+
10
+ def analyze_query(self, query: str) -> dict:
11
+ """
12
+ Analyze an SQL query to determine if it contains write operations.
13
+
14
+ Args:
15
+ query: SQL query string to analyze
16
+
17
+ Returns:
18
+ Dictionary with analysis results
19
+ """
20
+ result = {
21
+ "contains_write": False,
22
+ "write_operations": [],
23
+ "query_type": "READ"
24
+ }
25
+
26
+ # Convert to uppercase for case-insensitive comparison
27
+ upper_query = query.upper()
28
+
29
+ # Check for write keywords
30
+ for keyword in self.write_keywords:
31
+ if keyword in upper_query.split():
32
+ result["contains_write"] = True
33
+ result["write_operations"].append(keyword)
34
+ result["query_type"] = "WRITE"
35
+
36
+ return result
@@ -0,0 +1,34 @@
1
+ Metadata-Version: 2.4
2
+ Name: pggm-mcp-snowflake-server
3
+ Version: 0.1.0
4
+ Summary: Custom Model Context Protocol server for Snowflake
5
+ Classifier: Programming Language :: Python :: 3
6
+ Classifier: License :: OSI Approved :: MIT License
7
+ Classifier: Operating System :: OS Independent
8
+ Requires-Python: >=3.8
9
+ Description-Content-Type: text/markdown
10
+ Requires-Dist: mcp
11
+ Requires-Dist: pydantic
12
+ Requires-Dist: snowflake-snowpark-python
13
+ Requires-Dist: pyyaml
14
+
15
+ # PGGM MCP Snowflake Server
16
+
17
+ A customized Model Context Protocol (MCP) server for Snowflake integration, allowing AI assistants to interact with Snowflake databases.
18
+
19
+ ## Features
20
+
21
+ - Connect to Snowflake databases and execute queries
22
+ - Support for various SQL operations and schema exploration
23
+ - Data insights collection
24
+ - Customized filters and configurations
25
+
26
+ ## Installation
27
+
28
+ ```bash
29
+ pip install pggm-mcp-snowflake-server
30
+ ```
31
+
32
+ ## Usage
33
+
34
+ This package is designed to be used with MCP-compatible AI assistants for database interactions.
@@ -0,0 +1,11 @@
1
+ README.md
2
+ pyproject.toml
3
+ pggm_mcp_snowflake_server/__init__.py
4
+ pggm_mcp_snowflake_server/server.py
5
+ pggm_mcp_snowflake_server/write_detector.py
6
+ pggm_mcp_snowflake_server.egg-info/PKG-INFO
7
+ pggm_mcp_snowflake_server.egg-info/SOURCES.txt
8
+ pggm_mcp_snowflake_server.egg-info/dependency_links.txt
9
+ pggm_mcp_snowflake_server.egg-info/entry_points.txt
10
+ pggm_mcp_snowflake_server.egg-info/requires.txt
11
+ pggm_mcp_snowflake_server.egg-info/top_level.txt
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ pggm_mcp_snowflake_server = pggm_mcp_snowflake_server.server:main
@@ -0,0 +1,4 @@
1
+ mcp
2
+ pydantic
3
+ snowflake-snowpark-python
4
+ pyyaml
@@ -0,0 +1 @@
1
+ pggm_mcp_snowflake_server
@@ -0,0 +1,24 @@
1
+ [build-system]
2
+ requires = ["setuptools>=42", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "pggm-mcp-snowflake-server"
7
+ version = "0.1.0"
8
+ description = "Custom Model Context Protocol server for Snowflake"
9
+ readme = "README.md"
10
+ classifiers = [
11
+ "Programming Language :: Python :: 3",
12
+ "License :: OSI Approved :: MIT License",
13
+ "Operating System :: OS Independent",
14
+ ]
15
+ requires-python = ">=3.8"
16
+ dependencies = [
17
+ "mcp",
18
+ "pydantic",
19
+ "snowflake-snowpark-python",
20
+ "pyyaml"
21
+ ]
22
+
23
+ [project.scripts]
24
+ pggm_mcp_snowflake_server = "pggm_mcp_snowflake_server.server:main"
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+