nvidia-nat-s3 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nat/plugins/s3/__init__.py +0 -0
- nat/plugins/s3/object_store.py +49 -0
- nat/plugins/s3/register.py +22 -0
- nat/plugins/s3/s3_object_store.py +168 -0
- nvidia_nat_s3-1.2.0.dist-info/METADATA +10 -0
- nvidia_nat_s3-1.2.0.dist-info/RECORD +9 -0
- nvidia_nat_s3-1.2.0.dist-info/WHEEL +5 -0
- nvidia_nat_s3-1.2.0.dist-info/entry_points.txt +2 -0
- nvidia_nat_s3-1.2.0.dist-info/top_level.txt +1 -0
File without changes
|
@@ -0,0 +1,49 @@
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
3
|
+
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
+
# you may not use this file except in compliance with the License.
|
6
|
+
# You may obtain a copy of the License at
|
7
|
+
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
+
#
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
+
# See the License for the specific language governing permissions and
|
14
|
+
# limitations under the License.
|
15
|
+
|
16
|
+
import os
|
17
|
+
from typing import ClassVar
|
18
|
+
|
19
|
+
from pydantic import Field
|
20
|
+
|
21
|
+
from nat.builder.builder import Builder
|
22
|
+
from nat.cli.register_workflow import register_object_store
|
23
|
+
from nat.data_models.object_store import ObjectStoreBaseConfig
|
24
|
+
|
25
|
+
|
26
|
+
class S3ObjectStoreClientConfig(ObjectStoreBaseConfig, name="s3"):
|
27
|
+
"""
|
28
|
+
Object store that stores objects in an S3 bucket.
|
29
|
+
"""
|
30
|
+
|
31
|
+
ACCESS_KEY_ENV: ClassVar[str] = "NAT_S3_OBJECT_STORE_ACCESS_KEY"
|
32
|
+
SECRET_KEY_ENV: ClassVar[str] = "NAT_S3_OBJECT_STORE_SECRET_KEY"
|
33
|
+
|
34
|
+
bucket_name: str = Field(..., description="The name of the bucket to use for the object store")
|
35
|
+
endpoint_url: str | None = Field(default=None, description="The URL of the S3 server to connect to")
|
36
|
+
access_key: str | None = Field(default=os.environ.get(ACCESS_KEY_ENV),
|
37
|
+
description=f"Access key. If omitted, reads from {ACCESS_KEY_ENV}")
|
38
|
+
secret_key: str | None = Field(default=os.environ.get(SECRET_KEY_ENV),
|
39
|
+
description=f"Secret key. If omitted, reads from {SECRET_KEY_ENV}")
|
40
|
+
region: str | None = Field(default=None, description="Region to access (or none if unspecified)")
|
41
|
+
|
42
|
+
|
43
|
+
@register_object_store(config_type=S3ObjectStoreClientConfig)
|
44
|
+
async def s3_object_store_client(config: S3ObjectStoreClientConfig, builder: Builder):
|
45
|
+
|
46
|
+
from nat.plugins.s3.s3_object_store import S3ObjectStore
|
47
|
+
|
48
|
+
async with S3ObjectStore(config) as store:
|
49
|
+
yield store
|
@@ -0,0 +1,22 @@
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
3
|
+
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
+
# you may not use this file except in compliance with the License.
|
6
|
+
# You may obtain a copy of the License at
|
7
|
+
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
+
#
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
+
# See the License for the specific language governing permissions and
|
14
|
+
# limitations under the License.
|
15
|
+
|
16
|
+
# pylint: disable=unused-import
|
17
|
+
# flake8: noqa
|
18
|
+
# isort:skip_file
|
19
|
+
|
20
|
+
# Import any providers which need to be automatically registered here
|
21
|
+
|
22
|
+
from . import object_store
|
@@ -0,0 +1,168 @@
|
|
1
|
+
# SPDX-FileCopyrightText: Copyright (c) 2024-2025, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
3
|
+
#
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
5
|
+
# you may not use this file except in compliance with the License.
|
6
|
+
# You may obtain a copy of the License at
|
7
|
+
#
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
9
|
+
#
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13
|
+
# See the License for the specific language governing permissions and
|
14
|
+
# limitations under the License.
|
15
|
+
|
16
|
+
import logging
|
17
|
+
|
18
|
+
import aioboto3
|
19
|
+
from botocore.client import BaseClient
|
20
|
+
from botocore.exceptions import ClientError
|
21
|
+
|
22
|
+
from nat.data_models.object_store import KeyAlreadyExistsError
|
23
|
+
from nat.data_models.object_store import NoSuchKeyError
|
24
|
+
from nat.object_store.interfaces import ObjectStore
|
25
|
+
from nat.object_store.models import ObjectStoreItem
|
26
|
+
from nat.plugins.s3.object_store import S3ObjectStoreClientConfig
|
27
|
+
|
28
|
+
logger = logging.getLogger(__name__)
|
29
|
+
|
30
|
+
|
31
|
+
class S3ObjectStore(ObjectStore):
|
32
|
+
"""
|
33
|
+
S3ObjectStore is an ObjectStore implementation that uses S3 as the underlying storage.
|
34
|
+
"""
|
35
|
+
|
36
|
+
def __init__(self, config: S3ObjectStoreClientConfig):
|
37
|
+
|
38
|
+
super().__init__()
|
39
|
+
|
40
|
+
self.bucket_name = config.bucket_name
|
41
|
+
self.session = aioboto3.Session()
|
42
|
+
self._client: BaseClient | None = None
|
43
|
+
self._client_context = None
|
44
|
+
|
45
|
+
if not config.access_key:
|
46
|
+
raise ValueError("Access key is not set. Please specify it in the environment variable "
|
47
|
+
"'{S3ObjectStoreClientConfig.ACCESS_KEY_ENV}'.")
|
48
|
+
|
49
|
+
if not config.secret_key:
|
50
|
+
raise ValueError("Secret key is not set. Please specify it in the environment variable "
|
51
|
+
"'{S3ObjectStoreClientConfig.SECRET_KEY_ENV}'.")
|
52
|
+
|
53
|
+
self._client_args = {
|
54
|
+
"aws_access_key_id": config.access_key,
|
55
|
+
"aws_secret_access_key": config.secret_key,
|
56
|
+
"region_name": config.region,
|
57
|
+
"endpoint_url": config.endpoint_url
|
58
|
+
}
|
59
|
+
|
60
|
+
async def __aenter__(self):
|
61
|
+
|
62
|
+
if self._client_context is not None:
|
63
|
+
raise RuntimeError("Connection already established")
|
64
|
+
|
65
|
+
self._client_context = self.session.client("s3", **self._client_args)
|
66
|
+
if self._client_context is None:
|
67
|
+
raise RuntimeError("Connection unable to be established")
|
68
|
+
self._client = await self._client_context.__aenter__()
|
69
|
+
if self._client is None:
|
70
|
+
raise RuntimeError("Connection unable to be established")
|
71
|
+
|
72
|
+
# Ensure the bucket exists
|
73
|
+
try:
|
74
|
+
await self._client.head_bucket(Bucket=self.bucket_name)
|
75
|
+
except ClientError as e:
|
76
|
+
if e.response['Error']['Code'] == '404':
|
77
|
+
await self._client.create_bucket(Bucket=self.bucket_name)
|
78
|
+
logger.info("Created bucket %s", self.bucket_name)
|
79
|
+
|
80
|
+
return self
|
81
|
+
|
82
|
+
async def __aexit__(self, exc_type, exc_value, traceback):
|
83
|
+
|
84
|
+
if self._client_context is None:
|
85
|
+
raise RuntimeError("Connection not established")
|
86
|
+
|
87
|
+
await self._client_context.__aexit__(None, None, None)
|
88
|
+
self._client = None
|
89
|
+
self._client_context = None
|
90
|
+
|
91
|
+
async def put_object(self, key: str, item: ObjectStoreItem) -> None:
|
92
|
+
|
93
|
+
if self._client is None:
|
94
|
+
raise RuntimeError("Connection not established")
|
95
|
+
|
96
|
+
put_args = {
|
97
|
+
"Bucket": self.bucket_name,
|
98
|
+
"Key": key,
|
99
|
+
"Body": item.data,
|
100
|
+
}
|
101
|
+
if item.content_type:
|
102
|
+
put_args["ContentType"] = item.content_type
|
103
|
+
|
104
|
+
if item.metadata:
|
105
|
+
put_args["Metadata"] = item.metadata
|
106
|
+
|
107
|
+
try:
|
108
|
+
await self._client.put_object(
|
109
|
+
**put_args,
|
110
|
+
IfNoneMatch='*' # only succeed if the key does not already exist
|
111
|
+
)
|
112
|
+
except ClientError as e:
|
113
|
+
http_status_code = e.response.get("ResponseMetadata", {}).get("HTTPStatusCode", None)
|
114
|
+
if http_status_code == 412:
|
115
|
+
raise KeyAlreadyExistsError(key=key,
|
116
|
+
additional_message=f"S3 object {self.bucket_name}/{key} already exists")
|
117
|
+
else:
|
118
|
+
# Other errors — rethrow or handle accordingly
|
119
|
+
raise
|
120
|
+
|
121
|
+
async def upsert_object(self, key: str, item: ObjectStoreItem) -> None:
|
122
|
+
|
123
|
+
if self._client is None:
|
124
|
+
raise RuntimeError("Connection not established")
|
125
|
+
|
126
|
+
put_args = {
|
127
|
+
"Bucket": self.bucket_name,
|
128
|
+
"Key": key,
|
129
|
+
"Body": item.data,
|
130
|
+
}
|
131
|
+
if item.content_type:
|
132
|
+
put_args["ContentType"] = item.content_type
|
133
|
+
|
134
|
+
if item.metadata:
|
135
|
+
put_args["Metadata"] = item.metadata
|
136
|
+
|
137
|
+
await self._client.put_object(**put_args)
|
138
|
+
|
139
|
+
async def get_object(self, key: str) -> ObjectStoreItem:
|
140
|
+
if self._client is None:
|
141
|
+
raise RuntimeError("Connection not established")
|
142
|
+
|
143
|
+
try:
|
144
|
+
response = await self._client.get_object(Bucket=self.bucket_name, Key=key)
|
145
|
+
data = await response["Body"].read()
|
146
|
+
return ObjectStoreItem(data=data, content_type=response['ContentType'], metadata=response['Metadata'])
|
147
|
+
except ClientError as e:
|
148
|
+
if e.response['Error']['Code'] == 'NoSuchKey':
|
149
|
+
raise NoSuchKeyError(key=key, additional_message=str(e))
|
150
|
+
else:
|
151
|
+
raise
|
152
|
+
|
153
|
+
async def delete_object(self, key: str) -> None:
|
154
|
+
if self._client is None:
|
155
|
+
raise RuntimeError("Connection not established")
|
156
|
+
|
157
|
+
try:
|
158
|
+
await self._client.get_object(Bucket=self.bucket_name, Key=key)
|
159
|
+
except ClientError as e:
|
160
|
+
if e.response['Error']['Code'] == 'NoSuchKey':
|
161
|
+
raise NoSuchKeyError(key=key, additional_message=str(e))
|
162
|
+
else:
|
163
|
+
raise
|
164
|
+
|
165
|
+
results = await self._client.delete_object(Bucket=self.bucket_name, Key=key)
|
166
|
+
|
167
|
+
if results.get('DeleteMarker', False):
|
168
|
+
raise NoSuchKeyError(key=key, additional_message="Object was a delete marker")
|
@@ -0,0 +1,10 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: nvidia-nat-s3
|
3
|
+
Version: 1.2.0
|
4
|
+
Summary: Subpackage for S3-compatible integration in NeMo Agent toolkit
|
5
|
+
Keywords: ai,agents,memory,data store
|
6
|
+
Classifier: Programming Language :: Python
|
7
|
+
Requires-Python: <3.13,>=3.11
|
8
|
+
Description-Content-Type: text/markdown
|
9
|
+
Requires-Dist: nvidia-nat==v1.2.0
|
10
|
+
Requires-Dist: aioboto3>=11.0.0
|
@@ -0,0 +1,9 @@
|
|
1
|
+
nat/plugins/s3/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
|
+
nat/plugins/s3/object_store.py,sha256=yg-YBd4DgQC7MPIznuMfCNoW8XBeb5fpSS3Aj6bKr_Q,2134
|
3
|
+
nat/plugins/s3/register.py,sha256=7gqnwyDrYttIlEaa7lo9AASYt-2GrZJE0YT2jpKjepo,845
|
4
|
+
nat/plugins/s3/s3_object_store.py,sha256=jFWFS_Ocl5Mil_WPB-zfmfi6SflyTBGvRf5ib-uU5dw,6250
|
5
|
+
nvidia_nat_s3-1.2.0.dist-info/METADATA,sha256=GnNEQGwyCltl2LUDqhfUkrWwYjuvj7vbPys02sDN6dg,346
|
6
|
+
nvidia_nat_s3-1.2.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
7
|
+
nvidia_nat_s3-1.2.0.dist-info/entry_points.txt,sha256=HSc9lsaEu-3DyVezRMR-VZrfhWnDtA9llVaWE2CYZNw,63
|
8
|
+
nvidia_nat_s3-1.2.0.dist-info/top_level.txt,sha256=8-CJ2cP6-f0ZReXe5Hzqp-5pvzzHz-5Ds5H2bGqh1-U,4
|
9
|
+
nvidia_nat_s3-1.2.0.dist-info/RECORD,,
|
@@ -0,0 +1 @@
|
|
1
|
+
nat
|