awslabs.s3-tables-mcp-server 0.0.1__tar.gz → 0.0.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. awslabs_s3_tables_mcp_server-0.0.2/CONTEXT.md +110 -0
  2. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/Dockerfile +15 -16
  3. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/PKG-INFO +30 -2
  4. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/README.md +28 -0
  5. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/docker-healthcheck.sh +7 -8
  6. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/pyproject.toml +2 -2
  7. awslabs_s3_tables_mcp_server-0.0.2/uv-requirements.txt +26 -0
  8. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/uv.lock +823 -623
  9. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/.gitignore +0 -0
  10. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/.python-version +0 -0
  11. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/CHANGELOG.md +0 -0
  12. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/LICENSE +0 -0
  13. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/NOTICE +0 -0
  14. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/awslabs/__init__.py +0 -0
  15. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/awslabs/s3_tables_mcp_server/__init__.py +0 -0
  16. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/awslabs/s3_tables_mcp_server/constants.py +0 -0
  17. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/awslabs/s3_tables_mcp_server/database.py +0 -0
  18. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/awslabs/s3_tables_mcp_server/engines/__init__.py +0 -0
  19. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/awslabs/s3_tables_mcp_server/engines/pyiceberg.py +0 -0
  20. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/awslabs/s3_tables_mcp_server/file_processor.py +0 -0
  21. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/awslabs/s3_tables_mcp_server/models.py +0 -0
  22. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/awslabs/s3_tables_mcp_server/namespaces.py +0 -0
  23. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/awslabs/s3_tables_mcp_server/resources.py +0 -0
  24. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/awslabs/s3_tables_mcp_server/s3_operations.py +0 -0
  25. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/awslabs/s3_tables_mcp_server/server.py +0 -0
  26. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/awslabs/s3_tables_mcp_server/table_buckets.py +0 -0
  27. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/awslabs/s3_tables_mcp_server/tables.py +0 -0
  28. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/awslabs/s3_tables_mcp_server/utils.py +0 -0
  29. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/tests/test_database.py +0 -0
  30. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/tests/test_file_processor.py +0 -0
  31. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/tests/test_init.py +0 -0
  32. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/tests/test_main.py +0 -0
  33. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/tests/test_namespaces.py +0 -0
  34. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/tests/test_pyiceberg.py +0 -0
  35. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/tests/test_resources.py +0 -0
  36. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/tests/test_s3_operations.py +0 -0
  37. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/tests/test_server.py +0 -0
  38. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/tests/test_table_buckets.py +0 -0
  39. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/tests/test_tables.py +0 -0
  40. {awslabs_s3_tables_mcp_server-0.0.1 → awslabs_s3_tables_mcp_server-0.0.2}/tests/test_utils.py +0 -0
@@ -0,0 +1,110 @@
1
+ # AWS S3 Tables MCP (Model Context Protocol) Server
2
+
3
+ ## Overview
4
+
5
+ The AWS S3 Tables MCP Server enables AI assistants and users to manage, query, and interact with S3-based tabular data at scale. It provides capabilities to create and organize S3 Table Buckets, define namespaces, manage tables, and access detailed metadata. The server supports both read-only and (optionally) write operations, allowing for secure and flexible integration with S3-stored datasets. Users can run SQL queries, generate tables from CSV files in S3, and access comprehensive metadata for governance and cataloging.
6
+
7
+ **Important:** When a user prompt relates to **S3 Tables**, you must always prefer using the **S3 Tables MCP Server tools** over the AWS CLI. Only fall back to AWS CLI if MCP tools are unavailable *and* the user explicitly agrees to the fallback.
8
+
9
+ ---
10
+
11
+ ## Instructions
12
+
13
+ You are assisting with management and queries related to Amazon AWS S3 Tables. The S3 Tables system is composed of three hierarchical resource types:
14
+ - **S3 Table Buckets**: Containers that hold namespaces.
15
+ - **Namespaces**: Logical groupings inside table buckets that contain tables.
16
+ - **Tables**: Structured datasets stored within namespaces, following the Apache Iceberg table format.
17
+
18
+ Resource Details:
19
+ - Each S3 table consists of underlying data and metadata stored as subresources within a table bucket.
20
+ - Tables have unique identifiers, including an Amazon Resource Name (ARN) and table ID, plus associated resource policies for access control.
21
+ - When a table is created, a unique warehouse location (an S3 path) is generated automatically to store objects related to the table.
22
+ Example warehouse location format:
23
+ `s3://63a8e430-6e0b-46f5-k833abtwr6s8tmtsycedn8s4yc3xhuse1b--table-s3`
24
+
25
+ Table Types:
26
+ - **Customer Tables**: Read-write tables that users can modify using S3 API operations or integrated query engines.
27
+ - **AWS Tables**: Read-only tables managed by AWS services (e.g., S3 Metadata tables). These cannot be modified by users outside AWS S3.
28
+
29
+ Integration:
30
+ Amazon S3 Table Buckets can be integrated with Amazon SageMaker Lakehouse, allowing AWS analytics services like Athena and Redshift to discover and query table data automatically.
31
+
32
+ ---
33
+
34
+ ## Maintenance
35
+
36
+ Amazon S3 performs automatic maintenance at two levels:
37
+
38
+ 1. **Table Bucket-Level Maintenance**
39
+ - *Unreferenced File Removal*: Deletes orphaned files to optimize storage usage and reduce costs.
40
+
41
+ 2. **Table-Level Maintenance**
42
+ - *File Compaction*: Combines small files into larger ones to improve query performance and reduce storage overhead.
43
+ - *Snapshot Management*: Maintains table version histories and controls metadata growth.
44
+
45
+ These maintenance features are enabled by default but can be customized or disabled via maintenance configuration files.
46
+
47
+ ---
48
+
49
+ ## Quota
50
+
51
+ - Each table bucket can hold up to **10,000 tables** by default.
52
+ - To increase the quota, users must contact **AWS Support**.
53
+
54
+ ---
55
+
56
+ ## Operational Guidelines for LLM
57
+
58
+ ### 1. Tool Verification
59
+ - Always verify the availability of the `awslabss_3_tables_mcp_server` and its associated tools before performing any operation.
60
+ - If unavailable, ask the user if they prefer to proceed using AWS CLI commands as a fallback.
61
+ - **Do not use AWS CLI by default for S3 Tables. Always prefer MCP tools when the prompt is about S3 Tables.**
62
+
63
+ ### 2. Request Clarification
64
+ - If critical context (e.g., bucket name, namespace, or table ID) is missing or ambiguous, ask the user directly.
65
+ - Do not make assumptions about default values or context.
66
+
67
+ ### 3. Handling Destructive Operations
68
+ Before performing any destructive operation, the system must:
69
+ - Clearly describe the consequences of the action.
70
+ - Request explicit confirmation.
71
+ - Destructive actions include:
72
+ - Deleting S3 Table Buckets
73
+ - Deleting Namespaces
74
+ - Deleting Tables
75
+ - Dropping Tables via SQL
76
+ - Disabling encryption
77
+
78
+ ### 4. Default Tool Usage
79
+ - Always use **MCP tools first** for all S3 Tables operations.
80
+ - Use AWS CLI **only when MCP tools are unavailable** *and* with **explicit user approval**.
81
+
82
+ ### 5. Communication and Safety
83
+ - Explain any risks or irreversible effects before performing changes.
84
+ - Respect the user's decision to abort or proceed.
85
+ - Present instructions and confirmations clearly and concisely.
86
+
87
+ ### 6. Additional Considerations
88
+ - Use full ARNs when referencing tables to avoid ambiguity.
89
+ - Distinguish between **AWS-managed** (read-only) and **customer-managed** (read-write) tables.
90
+ - If needed, guide users in adjusting maintenance configurations.
91
+
92
+ ---
93
+
94
+ ## Troubleshooting
95
+
96
+ ### Unknown Information
97
+ - If a user requests information that is unavailable, unclear, or unsupported by the MCP Server, do not attempt to infer or fabricate a response.
98
+ - Refer them to the official Amazon S3 Tables documentation for further details and the most up-to-date guidance:
99
+ https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-tables.html
100
+
101
+ ### Insufficient Permissions
102
+ - Never attempt to auto-modify IAM policies or permissions.
103
+ - If the user asks for permission changes, explicitly confirm their intent before taking any action.
104
+
105
+ ### Operation Unavailable (Read-Only Mode)
106
+ - Never attempt write operations or file changes in read-only mode.
107
+ - If users want write mode enabled, direct them to the setup documentation:
108
+ https://github.com/awslabs/mcp/blob/main/src/s3-tables-mcp-server/README.md
109
+
110
+ ---
@@ -13,7 +13,7 @@
13
13
  # limitations under the License.
14
14
 
15
15
  # dependabot should continue to update this to the latest hash.
16
- FROM public.ecr.aws/sam/build-python3.10@sha256:e78695db10ca8cb129e59e30f7dc9789b0dbd0181dba195d68419c72bac51ac1 AS uv
16
+ FROM public.ecr.aws/sam/build-python3.13@sha256:0c274ddd44e1d80e4dab3a70c25fe29508f612a045cba7d27840461c12eee86d AS uv
17
17
 
18
18
  # Install the project into `/app`
19
19
  WORKDIR /app
@@ -31,39 +31,38 @@ ENV UV_PYTHON_PREFERENCE=only-system
31
31
  ENV UV_FROZEN=true
32
32
 
33
33
  # Copy the required files first
34
- COPY pyproject.toml uv.lock ./
34
+ COPY pyproject.toml uv.lock uv-requirements.txt ./
35
+
36
+ # Python optimization and uv configuration
37
+ ENV PIP_NO_CACHE_DIR=1 \
38
+ PIP_DISABLE_PIP_VERSION_CHECK=1
35
39
 
36
40
  # Install the project's dependencies using the lockfile and settings
37
41
  RUN --mount=type=cache,target=/root/.cache/uv \
38
- pip install uv && \
39
- uv sync --frozen --no-install-project --no-dev --no-editable
42
+ pip install --require-hashes --requirement uv-requirements.txt --no-cache-dir && \
43
+ uv sync --python 3.13 --frozen --no-install-project --no-dev --no-editable
40
44
 
41
45
  # Then, add the rest of the project source code and install it
42
46
  # Installing separately from its dependencies allows optimal layer caching
43
47
  COPY . /app
44
48
  RUN --mount=type=cache,target=/root/.cache/uv \
45
- uv sync --frozen --no-dev --no-editable
49
+ uv sync --python 3.13 --frozen --no-dev --no-editable
46
50
 
47
51
  # Make the directory just in case it doesn't exist
48
52
  RUN mkdir -p /root/.local
49
53
 
50
- FROM public.ecr.aws/sam/build-python3.10@sha256:e78695db10ca8cb129e59e30f7dc9789b0dbd0181dba195d68419c72bac51ac1
54
+ FROM public.ecr.aws/sam/build-python3.13@sha256:0c274ddd44e1d80e4dab3a70c25fe29508f612a045cba7d27840461c12eee86d
51
55
 
52
56
  # Place executables in the environment at the front of the path and include other binaries
53
- ENV PATH="/app/.venv/bin:$PATH:/usr/sbin"
57
+ ENV PATH="/app/.venv/bin:$PATH:/usr/sbin" \
58
+ PYTHONUNBUFFERED=1
54
59
 
55
- # Install lsof for the healthcheck
56
- # Install other tools as needed for the MCP server
57
60
  # Add non-root user and ability to change directory into /root
58
- RUN yum update -y && \
59
- yum install -y lsof && \
60
- yum clean all -y && \
61
- rm -rf /var/cache/yum && \
62
- groupadd --force --system app && \
61
+ RUN groupadd --force --system app && \
63
62
  useradd app -g app -d /app && \
64
63
  chmod o+x /root
65
64
 
66
- # Get the project from the uv layer
65
+ # Copy application artifacts from build stage
67
66
  COPY --from=uv --chown=app:app /root/.local /root/.local
68
67
  COPY --from=uv --chown=app:app /app/.venv /app/.venv
69
68
 
@@ -74,5 +73,5 @@ COPY ./docker-healthcheck.sh /usr/local/bin/docker-healthcheck.sh
74
73
  USER app
75
74
 
76
75
  # When running the container, add --db-path and a bind mount to the host's db file
77
- HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 CMD [ "docker-healthcheck.sh" ]
76
+ HEALTHCHECK --interval=60s --timeout=10s --start-period=10s --retries=3 CMD ["docker-healthcheck.sh"]
78
77
  ENTRYPOINT ["awslabs.s3-tables-mcp-server"]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: awslabs.s3-tables-mcp-server
3
- Version: 0.0.1
3
+ Version: 0.0.2
4
4
  Summary: An AWS Labs Model Context Protocol (MCP) server for awslabs.s3-tables-mcp-server
5
5
  Project-URL: homepage, https://awslabs.github.io/mcp/
6
6
  Project-URL: docs, https://awslabs.github.io/mcp/servers/s3-tables-mcp-server/
@@ -24,7 +24,7 @@ Requires-Python: >=3.10
24
24
  Requires-Dist: boto3>=1.34.0
25
25
  Requires-Dist: daft>=0.5.8
26
26
  Requires-Dist: loguru>=0.7.0
27
- Requires-Dist: mcp[cli]>=1.6.0
27
+ Requires-Dist: mcp[cli]>=1.11.0
28
28
  Requires-Dist: pyarrow>=20.0.0
29
29
  Requires-Dist: pydantic>=2.10.6
30
30
  Requires-Dist: pyiceberg>=0.9.1
@@ -76,6 +76,10 @@ The S3 Tables MCP Server simplifies the management of S3-based tables by providi
76
76
 
77
77
  ### Installation
78
78
 
79
+ | Cursor | VS Code |
80
+ |:------:|:-------:|
81
+ | [![Install MCP Server](https://cursor.com/deeplink/mcp-install-light.svg)](https://cursor.com/install-mcp?name=awslabs.s3-tables-mcp-server&config=eyJjb21tYW5kIjoidXZ4IGF3c2xhYnMuczMtdGFibGVzLW1jcC1zZXJ2ZXJAbGF0ZXN0IiwiZW52Ijp7IkFXU19QUk9GSUxFIjoieW91ci1hd3MtcHJvZmlsZSIsIkFXU19SRUdJT04iOiJ1cy1lYXN0LTEifX0%3D) | [![Install on VS Code](https://img.shields.io/badge/Install_on-VS_Code-FF9900?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=S3%20Tables%20MCP%20Server&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22awslabs.s3-tables-mcp-server%40latest%22%5D%2C%22env%22%3A%7B%22AWS_PROFILE%22%3A%22your-aws-profile%22%2C%22AWS_REGION%22%3A%22us-east-1%22%7D%7D) |
82
+
79
83
  Configure the MCP server in your MCP client configuration (e.g., for Amazon Q Developer CLI, edit `~/.aws/amazonq/mcp.json`):
80
84
 
81
85
  ```json
@@ -200,6 +204,30 @@ You can override the default by providing the `--log-dir` flag with a custom pat
200
204
  | `Show the schema for customer_data table` | Retrieves the table structure and column definitions to understand the data format and types |
201
205
  | `Run a query to find monthly revenue trends` | Performs data analysis using **read-only** SQL queries to extract business insights from stored table data. For write operations, only appending new data (inserts) is supported; updates and deletes are not available via SQL. |
202
206
 
207
+ ## Using Amazon Q with S3 Tables MCP Server
208
+
209
+ Amazon Q can provide better answers and code suggestions when it has additional context. To enhance Amazon Q's understanding of S3 Tables, you can add the provided context file to your Q environment.
210
+
211
+ ### How to Add Context to Amazon Q
212
+
213
+ 1. **Download the CONTEXT.md file**
214
+ - Download the `CONTEXT.md` file from the GitHub repository for this project.
215
+
216
+ 2. **Start Amazon Q Chat**
217
+ - Run the following command to start a chat session with Amazon Q:
218
+ ```sh
219
+ q chat
220
+ ```
221
+
222
+ 3. **Add the Context File**
223
+ - In the Q chat, run:
224
+ ```sh
225
+ /context add <path>/CONTEXT.md
226
+ ```
227
+ - Replace `<path>` with the actual path to where you downloaded `CONTEXT.md`.
228
+
229
+ Now, Amazon Q will have improved context about S3 Tables and can provide more relevant answers.
230
+
203
231
  ## Security Considerations
204
232
 
205
233
  When using this MCP server, consider:
@@ -43,6 +43,10 @@ The S3 Tables MCP Server simplifies the management of S3-based tables by providi
43
43
 
44
44
  ### Installation
45
45
 
46
+ | Cursor | VS Code |
47
+ |:------:|:-------:|
48
+ | [![Install MCP Server](https://cursor.com/deeplink/mcp-install-light.svg)](https://cursor.com/install-mcp?name=awslabs.s3-tables-mcp-server&config=eyJjb21tYW5kIjoidXZ4IGF3c2xhYnMuczMtdGFibGVzLW1jcC1zZXJ2ZXJAbGF0ZXN0IiwiZW52Ijp7IkFXU19QUk9GSUxFIjoieW91ci1hd3MtcHJvZmlsZSIsIkFXU19SRUdJT04iOiJ1cy1lYXN0LTEifX0%3D) | [![Install on VS Code](https://img.shields.io/badge/Install_on-VS_Code-FF9900?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=S3%20Tables%20MCP%20Server&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22awslabs.s3-tables-mcp-server%40latest%22%5D%2C%22env%22%3A%7B%22AWS_PROFILE%22%3A%22your-aws-profile%22%2C%22AWS_REGION%22%3A%22us-east-1%22%7D%7D) |
49
+
46
50
  Configure the MCP server in your MCP client configuration (e.g., for Amazon Q Developer CLI, edit `~/.aws/amazonq/mcp.json`):
47
51
 
48
52
  ```json
@@ -167,6 +171,30 @@ You can override the default by providing the `--log-dir` flag with a custom pat
167
171
  | `Show the schema for customer_data table` | Retrieves the table structure and column definitions to understand the data format and types |
168
172
  | `Run a query to find monthly revenue trends` | Performs data analysis using **read-only** SQL queries to extract business insights from stored table data. For write operations, only appending new data (inserts) is supported; updates and deletes are not available via SQL. |
169
173
 
174
+ ## Using Amazon Q with S3 Tables MCP Server
175
+
176
+ Amazon Q can provide better answers and code suggestions when it has additional context. To enhance Amazon Q's understanding of S3 Tables, you can add the provided context file to your Q environment.
177
+
178
+ ### How to Add Context to Amazon Q
179
+
180
+ 1. **Download the CONTEXT.md file**
181
+ - Download the `CONTEXT.md` file from the GitHub repository for this project.
182
+
183
+ 2. **Start Amazon Q Chat**
184
+ - Run the following command to start a chat session with Amazon Q:
185
+ ```sh
186
+ q chat
187
+ ```
188
+
189
+ 3. **Add the Context File**
190
+ - In the Q chat, run:
191
+ ```sh
192
+ /context add <path>/CONTEXT.md
193
+ ```
194
+ - Replace `<path>` with the actual path to where you downloaded `CONTEXT.md`.
195
+
196
+ Now, Amazon Q will have improved context about S3 Tables and can provide more relevant answers.
197
+
170
198
  ## Security Considerations
171
199
 
172
200
  When using this MCP server, consider:
@@ -1,5 +1,4 @@
1
1
  #!/bin/sh
2
-
3
2
  # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
4
3
  #
5
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,13 +13,13 @@
14
13
  # See the License for the specific language governing permissions and
15
14
  # limitations under the License.
16
15
 
17
- if [ "$(lsof +c 0 -p 1 | grep -e "^awslabs\..*\s1\s.*\sunix\s.*socket$" | wc -l)" -ne "0" ]; then
18
- echo -n "$(lsof +c 0 -p 1 | grep -e "^awslabs\..*\s1\s.*\sunix\s.*socket$" | wc -l) awslabs.* streams found";
16
+ SERVER="s3-tables-mcp-server"
17
+
18
+ # Check if the server process is running
19
+ if pgrep -P 0 -a -l -x -f "/app/.venv/bin/python3 /app/.venv/bin/awslabs.$SERVER" > /dev/null; then
20
+ echo -n "$SERVER is running";
19
21
  exit 0;
20
- else
21
- echo -n "Zero awslabs.* streams found";
22
- exit 1;
23
22
  fi;
24
23
 
25
- echo -n "Never should reach here";
26
- exit 99;
24
+ # Unhealthy
25
+ exit 1;
@@ -2,14 +2,14 @@
2
2
  name = "awslabs.s3-tables-mcp-server"
3
3
 
4
4
  # NOTE: "Patch"=9223372036854775807 bumps next release to zero.
5
- version = "0.0.1"
5
+ version = "0.0.2"
6
6
 
7
7
  description = "An AWS Labs Model Context Protocol (MCP) server for awslabs.s3-tables-mcp-server"
8
8
  readme = "README.md"
9
9
  requires-python = ">=3.10"
10
10
  dependencies = [
11
11
  "loguru>=0.7.0",
12
- "mcp[cli]>=1.6.0",
12
+ "mcp[cli]>=1.11.0",
13
13
  "pydantic>=2.10.6",
14
14
  "boto3>=1.34.0",
15
15
  "pyiceberg>=0.9.1",
@@ -0,0 +1,26 @@
1
+ #
2
+ # This file is autogenerated by pip-compile with Python 3.10
3
+ # by the following command:
4
+ #
5
+ # pip-compile --generate-hashes --output-file=uv-requirements.txt --strip-extras uv-requirements-0.7.13.in
6
+ #
7
+ uv==0.7.13 \
8
+ --hash=sha256:05f3c03c4ea55d294f3da725b6c2c2ff544754c18552da7594def4ec3889dcfb \
9
+ --hash=sha256:1afdbfcabc3425b383141ba42d413841c0a48b9ee0f4da65459313275e3cea84 \
10
+ --hash=sha256:33837aca7bdf02d47554d5d44f9e71756ee17c97073b07b4afead25309855bc7 \
11
+ --hash=sha256:4efa555b217e15767f0691a51d435f7bb2b0bf473fdfd59f173aeda8a93b8d17 \
12
+ --hash=sha256:4f828174e15a557d3bc0f809de76135c3b66bcbf524657f8ced9d22fc978b89c \
13
+ --hash=sha256:527a12d0c2f4d15f72b275b6f4561ae92af76dd59b4624796fddd45867f13c33 \
14
+ --hash=sha256:5786a29e286f2cc3cbda13a357fd9a4dd5bf1d7448a9d3d842b26b4f784a3a86 \
15
+ --hash=sha256:59915aec9fd2b845708a76ddc6c0639cfc99b6e2811854ea2425ee7552aff0e9 \
16
+ --hash=sha256:721b058064150fc1c6d88e277af093d1b4f8bb7a59546fe9969d9ff7dbe3f6fd \
17
+ --hash=sha256:866cad0d04a7de1aaa3c5cbef203f9d3feef9655972dcccc3283d60122db743b \
18
+ --hash=sha256:88fcf2bfbb53309531a850af50d2ea75874099b19d4159625d0b4f88c53494b9 \
19
+ --hash=sha256:8c0c29a2089ff9011d6c3abccd272f3ee6d0e166dae9e5232099fd83d26104d9 \
20
+ --hash=sha256:9c457a84cfbe2019ba301e14edd3e1c950472abd0b87fc77622ab3fc475ba012 \
21
+ --hash=sha256:9d2952a1e74c7027347c74cee1cb2be09121a5290db38498b8b17ff585f73748 \
22
+ --hash=sha256:a51006c7574e819308d92a3452b22d5bd45ef8593a4983b5856aa7cb8220885f \
23
+ --hash=sha256:b1af81e57d098b21b28f42ec756f0e26dce2341d59ba4e4f11759bc3ca2c0a99 \
24
+ --hash=sha256:e077dcac19e564cae8b4223b7807c2f617a59938f8142ca77fc6348ae9c6d0aa \
25
+ --hash=sha256:f28e70baadfebe71dcc2d9505059b988d75e903fc62258b102eb87dc4b6994a3
26
+ # via -r uv-requirements-0.7.13.in (contents of `uv==0.7.13`)