otdf-python 0.1.10__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- otdf_python/__init__.py +25 -0
- otdf_python/__main__.py +12 -0
- otdf_python/address_normalizer.py +84 -0
- otdf_python/aesgcm.py +55 -0
- otdf_python/assertion_config.py +84 -0
- otdf_python/asym_crypto.py +85 -0
- otdf_python/asym_decryption.py +53 -0
- otdf_python/asym_encryption.py +75 -0
- otdf_python/auth_headers.py +21 -0
- otdf_python/autoconfigure_utils.py +113 -0
- otdf_python/cli.py +570 -0
- otdf_python/collection_store.py +41 -0
- otdf_python/collection_store_impl.py +22 -0
- otdf_python/config.py +69 -0
- otdf_python/connect_client.py +0 -0
- otdf_python/constants.py +1 -0
- otdf_python/crypto_utils.py +78 -0
- otdf_python/dpop.py +81 -0
- otdf_python/ecc_mode.py +32 -0
- otdf_python/eckeypair.py +75 -0
- otdf_python/header.py +143 -0
- otdf_python/invalid_zip_exception.py +8 -0
- otdf_python/kas_client.py +603 -0
- otdf_python/kas_connect_rpc_client.py +207 -0
- otdf_python/kas_info.py +25 -0
- otdf_python/kas_key_cache.py +52 -0
- otdf_python/key_type.py +31 -0
- otdf_python/key_type_constants.py +43 -0
- otdf_python/manifest.py +215 -0
- otdf_python/nanotdf.py +553 -0
- otdf_python/nanotdf_ecdsa_struct.py +132 -0
- otdf_python/nanotdf_type.py +43 -0
- otdf_python/policy_binding_serializer.py +39 -0
- otdf_python/policy_info.py +78 -0
- otdf_python/policy_object.py +22 -0
- otdf_python/policy_stub.py +2 -0
- otdf_python/resource_locator.py +44 -0
- otdf_python/sdk.py +528 -0
- otdf_python/sdk_builder.py +448 -0
- otdf_python/sdk_exceptions.py +16 -0
- otdf_python/symmetric_and_payload_config.py +30 -0
- otdf_python/tdf.py +479 -0
- otdf_python/tdf_reader.py +153 -0
- otdf_python/tdf_writer.py +23 -0
- otdf_python/token_source.py +34 -0
- otdf_python/version.py +57 -0
- otdf_python/zip_reader.py +47 -0
- otdf_python/zip_writer.py +70 -0
- otdf_python-0.3.1.dist-info/METADATA +231 -0
- otdf_python-0.3.1.dist-info/RECORD +137 -0
- {otdf_python-0.1.10.dist-info → otdf_python-0.3.1.dist-info}/WHEEL +1 -2
- {otdf_python-0.1.10.dist-info → otdf_python-0.3.1.dist-info/licenses}/LICENSE +1 -1
- otdf_python_proto/__init__.py +37 -0
- otdf_python_proto/authorization/__init__.py +1 -0
- otdf_python_proto/authorization/authorization_pb2.py +80 -0
- otdf_python_proto/authorization/authorization_pb2.pyi +161 -0
- otdf_python_proto/authorization/authorization_pb2_connect.py +191 -0
- otdf_python_proto/authorization/v2/authorization_pb2.py +105 -0
- otdf_python_proto/authorization/v2/authorization_pb2.pyi +134 -0
- otdf_python_proto/authorization/v2/authorization_pb2_connect.py +233 -0
- otdf_python_proto/common/__init__.py +1 -0
- otdf_python_proto/common/common_pb2.py +52 -0
- otdf_python_proto/common/common_pb2.pyi +61 -0
- otdf_python_proto/entity/__init__.py +1 -0
- otdf_python_proto/entity/entity_pb2.py +47 -0
- otdf_python_proto/entity/entity_pb2.pyi +50 -0
- otdf_python_proto/entityresolution/__init__.py +1 -0
- otdf_python_proto/entityresolution/entity_resolution_pb2.py +57 -0
- otdf_python_proto/entityresolution/entity_resolution_pb2.pyi +55 -0
- otdf_python_proto/entityresolution/entity_resolution_pb2_connect.py +149 -0
- otdf_python_proto/entityresolution/v2/entity_resolution_pb2.py +55 -0
- otdf_python_proto/entityresolution/v2/entity_resolution_pb2.pyi +55 -0
- otdf_python_proto/entityresolution/v2/entity_resolution_pb2_connect.py +149 -0
- otdf_python_proto/kas/__init__.py +9 -0
- otdf_python_proto/kas/kas_pb2.py +103 -0
- otdf_python_proto/kas/kas_pb2.pyi +170 -0
- otdf_python_proto/kas/kas_pb2_connect.py +192 -0
- otdf_python_proto/legacy_grpc/__init__.py +1 -0
- otdf_python_proto/legacy_grpc/authorization/authorization_pb2_grpc.py +163 -0
- otdf_python_proto/legacy_grpc/authorization/v2/authorization_pb2_grpc.py +206 -0
- otdf_python_proto/legacy_grpc/common/common_pb2_grpc.py +4 -0
- otdf_python_proto/legacy_grpc/entity/entity_pb2_grpc.py +4 -0
- otdf_python_proto/legacy_grpc/entityresolution/entity_resolution_pb2_grpc.py +122 -0
- otdf_python_proto/legacy_grpc/entityresolution/v2/entity_resolution_pb2_grpc.py +120 -0
- otdf_python_proto/legacy_grpc/kas/kas_pb2_grpc.py +172 -0
- otdf_python_proto/legacy_grpc/logger/audit/test_pb2_grpc.py +4 -0
- otdf_python_proto/legacy_grpc/policy/actions/actions_pb2_grpc.py +249 -0
- otdf_python_proto/legacy_grpc/policy/attributes/attributes_pb2_grpc.py +873 -0
- otdf_python_proto/legacy_grpc/policy/kasregistry/key_access_server_registry_pb2_grpc.py +602 -0
- otdf_python_proto/legacy_grpc/policy/keymanagement/key_management_pb2_grpc.py +251 -0
- otdf_python_proto/legacy_grpc/policy/namespaces/namespaces_pb2_grpc.py +427 -0
- otdf_python_proto/legacy_grpc/policy/objects_pb2_grpc.py +4 -0
- otdf_python_proto/legacy_grpc/policy/registeredresources/registered_resources_pb2_grpc.py +524 -0
- otdf_python_proto/legacy_grpc/policy/resourcemapping/resource_mapping_pb2_grpc.py +516 -0
- otdf_python_proto/legacy_grpc/policy/selectors_pb2_grpc.py +4 -0
- otdf_python_proto/legacy_grpc/policy/subjectmapping/subject_mapping_pb2_grpc.py +551 -0
- otdf_python_proto/legacy_grpc/policy/unsafe/unsafe_pb2_grpc.py +485 -0
- otdf_python_proto/legacy_grpc/wellknownconfiguration/wellknown_configuration_pb2_grpc.py +77 -0
- otdf_python_proto/logger/__init__.py +1 -0
- otdf_python_proto/logger/audit/test_pb2.py +43 -0
- otdf_python_proto/logger/audit/test_pb2.pyi +45 -0
- otdf_python_proto/policy/__init__.py +1 -0
- otdf_python_proto/policy/actions/actions_pb2.py +75 -0
- otdf_python_proto/policy/actions/actions_pb2.pyi +87 -0
- otdf_python_proto/policy/actions/actions_pb2_connect.py +275 -0
- otdf_python_proto/policy/attributes/attributes_pb2.py +234 -0
- otdf_python_proto/policy/attributes/attributes_pb2.pyi +328 -0
- otdf_python_proto/policy/attributes/attributes_pb2_connect.py +863 -0
- otdf_python_proto/policy/kasregistry/key_access_server_registry_pb2.py +266 -0
- otdf_python_proto/policy/kasregistry/key_access_server_registry_pb2.pyi +450 -0
- otdf_python_proto/policy/kasregistry/key_access_server_registry_pb2_connect.py +611 -0
- otdf_python_proto/policy/keymanagement/key_management_pb2.py +79 -0
- otdf_python_proto/policy/keymanagement/key_management_pb2.pyi +87 -0
- otdf_python_proto/policy/keymanagement/key_management_pb2_connect.py +275 -0
- otdf_python_proto/policy/namespaces/namespaces_pb2.py +117 -0
- otdf_python_proto/policy/namespaces/namespaces_pb2.pyi +147 -0
- otdf_python_proto/policy/namespaces/namespaces_pb2_connect.py +443 -0
- otdf_python_proto/policy/objects_pb2.py +150 -0
- otdf_python_proto/policy/objects_pb2.pyi +464 -0
- otdf_python_proto/policy/registeredresources/registered_resources_pb2.py +139 -0
- otdf_python_proto/policy/registeredresources/registered_resources_pb2.pyi +196 -0
- otdf_python_proto/policy/registeredresources/registered_resources_pb2_connect.py +527 -0
- otdf_python_proto/policy/resourcemapping/resource_mapping_pb2.py +139 -0
- otdf_python_proto/policy/resourcemapping/resource_mapping_pb2.pyi +194 -0
- otdf_python_proto/policy/resourcemapping/resource_mapping_pb2_connect.py +527 -0
- otdf_python_proto/policy/selectors_pb2.py +57 -0
- otdf_python_proto/policy/selectors_pb2.pyi +90 -0
- otdf_python_proto/policy/subjectmapping/subject_mapping_pb2.py +127 -0
- otdf_python_proto/policy/subjectmapping/subject_mapping_pb2.pyi +189 -0
- otdf_python_proto/policy/subjectmapping/subject_mapping_pb2_connect.py +569 -0
- otdf_python_proto/policy/unsafe/unsafe_pb2.py +113 -0
- otdf_python_proto/policy/unsafe/unsafe_pb2.pyi +145 -0
- otdf_python_proto/policy/unsafe/unsafe_pb2_connect.py +485 -0
- otdf_python_proto/wellknownconfiguration/__init__.py +1 -0
- otdf_python_proto/wellknownconfiguration/wellknown_configuration_pb2.py +51 -0
- otdf_python_proto/wellknownconfiguration/wellknown_configuration_pb2.pyi +32 -0
- otdf_python_proto/wellknownconfiguration/wellknown_configuration_pb2_connect.py +107 -0
- otdf_python/_gotdf_python.cpython-312-darwin.so +0 -0
- otdf_python/build.py +0 -190
- otdf_python/go.py +0 -1478
- otdf_python/gotdf_python.py +0 -383
- otdf_python-0.1.10.dist-info/METADATA +0 -149
- otdf_python-0.1.10.dist-info/RECORD +0 -10
- otdf_python-0.1.10.dist-info/top_level.txt +0 -1
otdf_python/cli.py
ADDED
|
@@ -0,0 +1,570 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
OpenTDF Python CLI
|
|
4
|
+
|
|
5
|
+
A command-line interface for encrypting and decrypting files using OpenTDF.
|
|
6
|
+
Provides encrypt, decrypt, and inspect commands similar to the otdfctl CLI.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import argparse
|
|
10
|
+
import contextlib
|
|
11
|
+
import json
|
|
12
|
+
import logging
|
|
13
|
+
import sys
|
|
14
|
+
from dataclasses import asdict
|
|
15
|
+
from importlib import metadata
|
|
16
|
+
from io import BytesIO
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
|
|
19
|
+
from otdf_python.config import KASInfo, NanoTDFConfig, TDFConfig
|
|
20
|
+
from otdf_python.sdk import SDK
|
|
21
|
+
from otdf_python.sdk_builder import SDKBuilder
|
|
22
|
+
from otdf_python.sdk_exceptions import SDKException
|
|
23
|
+
from otdf_python.tdf import TDFReaderConfig
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
__version__ = metadata.version("otdf-python")
|
|
27
|
+
except metadata.PackageNotFoundError:
|
|
28
|
+
# package is not installed, e.g., in development
|
|
29
|
+
__version__ = "0.0.0"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
# Set up logging
|
|
33
|
+
logger = logging.getLogger(__name__)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class CLIError(Exception):
|
|
37
|
+
"""Custom exception for CLI errors."""
|
|
38
|
+
|
|
39
|
+
def __init__(self, level: str, message: str, cause: Exception | None = None):
|
|
40
|
+
self.level = level
|
|
41
|
+
self.message = message
|
|
42
|
+
self.cause = cause
|
|
43
|
+
super().__init__(message)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def setup_logging(level: str = "INFO", silent: bool = False):
|
|
47
|
+
"""Set up logging configuration."""
|
|
48
|
+
if silent:
|
|
49
|
+
level = "CRITICAL"
|
|
50
|
+
|
|
51
|
+
log_level = getattr(logging, level.upper(), logging.INFO)
|
|
52
|
+
logging.basicConfig(
|
|
53
|
+
level=log_level,
|
|
54
|
+
format="%(levelname)s: %(message)s",
|
|
55
|
+
handlers=[logging.StreamHandler(sys.stderr)],
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def validate_file_exists(file_path: str) -> Path:
|
|
60
|
+
"""Validate that a file exists and is readable."""
|
|
61
|
+
path = Path(file_path)
|
|
62
|
+
if not path.exists():
|
|
63
|
+
raise CLIError("CRITICAL", f"File does not exist: {file_path}")
|
|
64
|
+
if not path.is_file():
|
|
65
|
+
raise CLIError("CRITICAL", f"Path is not a file: {file_path}")
|
|
66
|
+
return path
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def parse_attributes(attributes_str: str) -> list[str]:
|
|
70
|
+
"""Parse comma-separated attributes string."""
|
|
71
|
+
if not attributes_str:
|
|
72
|
+
return []
|
|
73
|
+
return [attr.strip() for attr in attributes_str.split(",") if attr.strip()]
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def parse_kas_endpoints(kas_str: str) -> list[str]:
|
|
77
|
+
"""Parse comma-separated KAS endpoints."""
|
|
78
|
+
if not kas_str:
|
|
79
|
+
return []
|
|
80
|
+
return [kas.strip() for kas in kas_str.split(",") if kas.strip()]
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def load_client_credentials(creds_file_path: str) -> tuple[str, str]:
|
|
84
|
+
"""Load client credentials from JSON file."""
|
|
85
|
+
try:
|
|
86
|
+
creds_path = Path(creds_file_path)
|
|
87
|
+
if not creds_path.exists():
|
|
88
|
+
raise CLIError(
|
|
89
|
+
"CRITICAL", f"Credentials file does not exist: {creds_file_path}"
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
with open(creds_path) as f:
|
|
93
|
+
creds = json.load(f)
|
|
94
|
+
|
|
95
|
+
client_id = creds.get("clientId")
|
|
96
|
+
client_secret = creds.get("clientSecret")
|
|
97
|
+
|
|
98
|
+
if not client_id or not client_secret:
|
|
99
|
+
raise CLIError(
|
|
100
|
+
"CRITICAL",
|
|
101
|
+
f"Credentials file must contain 'clientId' and 'clientSecret' fields: {creds_file_path}",
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
return client_id, client_secret
|
|
105
|
+
|
|
106
|
+
except json.JSONDecodeError as e:
|
|
107
|
+
raise CLIError(
|
|
108
|
+
"CRITICAL", f"Invalid JSON in credentials file {creds_file_path}: {e}"
|
|
109
|
+
)
|
|
110
|
+
except Exception as e:
|
|
111
|
+
raise CLIError(
|
|
112
|
+
"CRITICAL", f"Error reading credentials file {creds_file_path}: {e}"
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def build_sdk(args) -> SDK:
|
|
117
|
+
"""Build SDK instance from CLI arguments."""
|
|
118
|
+
builder = SDKBuilder()
|
|
119
|
+
|
|
120
|
+
if args.platform_url:
|
|
121
|
+
builder.set_platform_endpoint(args.platform_url)
|
|
122
|
+
|
|
123
|
+
# Auto-detect HTTP URLs and enable plaintext mode
|
|
124
|
+
if args.platform_url.startswith("http://") and (
|
|
125
|
+
not hasattr(args, "plaintext") or not args.plaintext
|
|
126
|
+
):
|
|
127
|
+
logger.debug(
|
|
128
|
+
f"Auto-detected HTTP URL {args.platform_url}, enabling plaintext mode"
|
|
129
|
+
)
|
|
130
|
+
builder.use_insecure_plaintext_connection(True)
|
|
131
|
+
|
|
132
|
+
if args.oidc_endpoint:
|
|
133
|
+
builder.set_issuer_endpoint(args.oidc_endpoint)
|
|
134
|
+
|
|
135
|
+
if args.client_id and args.client_secret:
|
|
136
|
+
builder.client_secret(args.client_id, args.client_secret)
|
|
137
|
+
elif hasattr(args, "with_client_creds_file") and args.with_client_creds_file:
|
|
138
|
+
# Load credentials from file
|
|
139
|
+
client_id, client_secret = load_client_credentials(args.with_client_creds_file)
|
|
140
|
+
builder.client_secret(client_id, client_secret)
|
|
141
|
+
elif hasattr(args, "auth") and args.auth:
|
|
142
|
+
# Parse combined auth string (clientId:clientSecret) - legacy support
|
|
143
|
+
auth_parts = args.auth.split(":")
|
|
144
|
+
if len(auth_parts) != 2:
|
|
145
|
+
raise CLIError(
|
|
146
|
+
"CRITICAL",
|
|
147
|
+
f"Auth expects <clientId>:<clientSecret>, received {args.auth}",
|
|
148
|
+
)
|
|
149
|
+
builder.client_secret(auth_parts[0], auth_parts[1])
|
|
150
|
+
else:
|
|
151
|
+
raise CLIError(
|
|
152
|
+
"CRITICAL",
|
|
153
|
+
"Authentication required: provide --with-client-creds-file OR --client-id and --client-secret",
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
if hasattr(args, "plaintext") and args.plaintext:
|
|
157
|
+
builder.use_insecure_plaintext_connection(True)
|
|
158
|
+
|
|
159
|
+
if args.insecure:
|
|
160
|
+
builder.use_insecure_skip_verify(True)
|
|
161
|
+
|
|
162
|
+
return builder.build()
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def create_tdf_config(sdk: SDK, args) -> TDFConfig:
|
|
166
|
+
"""Create TDF configuration from CLI arguments."""
|
|
167
|
+
attributes = (
|
|
168
|
+
parse_attributes(args.attributes)
|
|
169
|
+
if hasattr(args, "attributes") and args.attributes
|
|
170
|
+
else []
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
config = sdk.new_tdf_config(attributes=attributes)
|
|
174
|
+
|
|
175
|
+
if hasattr(args, "kas_endpoint") and args.kas_endpoint:
|
|
176
|
+
# Add KAS endpoints
|
|
177
|
+
kas_endpoints = parse_kas_endpoints(args.kas_endpoint)
|
|
178
|
+
kas_info_list = [KASInfo(url=kas_url) for kas_url in kas_endpoints]
|
|
179
|
+
config.kas_info_list.extend(kas_info_list)
|
|
180
|
+
|
|
181
|
+
if hasattr(args, "mime_type") and args.mime_type:
|
|
182
|
+
config.mime_type = args.mime_type
|
|
183
|
+
|
|
184
|
+
if hasattr(args, "autoconfigure") and args.autoconfigure is not None:
|
|
185
|
+
config.autoconfigure = args.autoconfigure
|
|
186
|
+
|
|
187
|
+
return config
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def create_nano_tdf_config(sdk: SDK, args) -> NanoTDFConfig:
|
|
191
|
+
"""Create NanoTDF configuration from CLI arguments."""
|
|
192
|
+
attributes = (
|
|
193
|
+
parse_attributes(args.attributes)
|
|
194
|
+
if hasattr(args, "attributes") and args.attributes
|
|
195
|
+
else []
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
config = NanoTDFConfig(attributes=attributes)
|
|
199
|
+
|
|
200
|
+
if hasattr(args, "kas_endpoint") and args.kas_endpoint:
|
|
201
|
+
# Add KAS endpoints
|
|
202
|
+
kas_endpoints = parse_kas_endpoints(args.kas_endpoint)
|
|
203
|
+
kas_info_list = [KASInfo(url=kas_url) for kas_url in kas_endpoints]
|
|
204
|
+
config.kas_info_list.extend(kas_info_list)
|
|
205
|
+
|
|
206
|
+
if hasattr(args, "policy_binding") and args.policy_binding:
|
|
207
|
+
if args.policy_binding.lower() == "ecdsa":
|
|
208
|
+
config.ecc_mode = "ecdsa"
|
|
209
|
+
else:
|
|
210
|
+
config.ecc_mode = "gmac" # default
|
|
211
|
+
|
|
212
|
+
return config
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def cmd_encrypt(args):
|
|
216
|
+
"""Handle encrypt command."""
|
|
217
|
+
logger.info("Running encrypt command")
|
|
218
|
+
|
|
219
|
+
# Validate input file
|
|
220
|
+
input_path = validate_file_exists(args.file)
|
|
221
|
+
|
|
222
|
+
# Build SDK
|
|
223
|
+
sdk = build_sdk(args)
|
|
224
|
+
|
|
225
|
+
try:
|
|
226
|
+
# Read input file
|
|
227
|
+
with open(input_path, "rb") as input_file:
|
|
228
|
+
payload = input_file.read()
|
|
229
|
+
|
|
230
|
+
# Determine output
|
|
231
|
+
if args.output:
|
|
232
|
+
output_path = Path(args.output)
|
|
233
|
+
with open(output_path, "wb") as output_file:
|
|
234
|
+
try:
|
|
235
|
+
# Create appropriate config based on container type
|
|
236
|
+
container_type = getattr(args, "container_type", "tdf")
|
|
237
|
+
|
|
238
|
+
if container_type == "nano":
|
|
239
|
+
logger.debug("Creating NanoTDF")
|
|
240
|
+
config = create_nano_tdf_config(sdk, args)
|
|
241
|
+
output_stream = BytesIO()
|
|
242
|
+
size = sdk.create_nano_tdf(
|
|
243
|
+
BytesIO(payload), output_stream, config
|
|
244
|
+
)
|
|
245
|
+
output_file.write(output_stream.getvalue())
|
|
246
|
+
logger.info(f"Created NanoTDF of size {size} bytes")
|
|
247
|
+
else:
|
|
248
|
+
logger.debug("Creating TDF")
|
|
249
|
+
config = create_tdf_config(sdk, args)
|
|
250
|
+
output_stream = BytesIO()
|
|
251
|
+
manifest, size, _ = sdk.create_tdf(
|
|
252
|
+
BytesIO(payload), config, output_stream
|
|
253
|
+
)
|
|
254
|
+
output_file.write(output_stream.getvalue())
|
|
255
|
+
logger.info(f"Created TDF of size {size} bytes")
|
|
256
|
+
|
|
257
|
+
except Exception:
|
|
258
|
+
# Clean up the output file if there was an error
|
|
259
|
+
with contextlib.suppress(Exception):
|
|
260
|
+
output_path.unlink()
|
|
261
|
+
raise
|
|
262
|
+
else:
|
|
263
|
+
output_file = sys.stdout.buffer
|
|
264
|
+
# Create appropriate config based on container type
|
|
265
|
+
container_type = getattr(args, "container_type", "tdf")
|
|
266
|
+
|
|
267
|
+
if container_type == "nano":
|
|
268
|
+
logger.debug("Creating NanoTDF")
|
|
269
|
+
config = create_nano_tdf_config(sdk, args)
|
|
270
|
+
output_stream = BytesIO()
|
|
271
|
+
size = sdk.create_nano_tdf(BytesIO(payload), output_stream, config)
|
|
272
|
+
output_file.write(output_stream.getvalue())
|
|
273
|
+
logger.info(f"Created NanoTDF of size {size} bytes")
|
|
274
|
+
else:
|
|
275
|
+
logger.debug("Creating TDF")
|
|
276
|
+
config = create_tdf_config(sdk, args)
|
|
277
|
+
output_stream = BytesIO()
|
|
278
|
+
manifest, size, _ = sdk.create_tdf(
|
|
279
|
+
BytesIO(payload), config, output_stream
|
|
280
|
+
)
|
|
281
|
+
output_file.write(output_stream.getvalue())
|
|
282
|
+
logger.info(f"Created TDF of size {size} bytes")
|
|
283
|
+
|
|
284
|
+
finally:
|
|
285
|
+
sdk.close()
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def cmd_decrypt(args):
|
|
289
|
+
"""Handle decrypt command."""
|
|
290
|
+
logger.info("Running decrypt command")
|
|
291
|
+
|
|
292
|
+
# Validate input file
|
|
293
|
+
input_path = validate_file_exists(args.file)
|
|
294
|
+
|
|
295
|
+
# Build SDK
|
|
296
|
+
sdk = build_sdk(args)
|
|
297
|
+
|
|
298
|
+
try:
|
|
299
|
+
# Read encrypted file
|
|
300
|
+
with open(input_path, "rb") as input_file:
|
|
301
|
+
encrypted_data = input_file.read()
|
|
302
|
+
|
|
303
|
+
# Determine output
|
|
304
|
+
if args.output:
|
|
305
|
+
output_path = Path(args.output)
|
|
306
|
+
with open(output_path, "wb") as output_file:
|
|
307
|
+
try:
|
|
308
|
+
# Try to determine if it's a NanoTDF or regular TDF
|
|
309
|
+
# NanoTDFs have a specific header format, regular TDFs are ZIP files
|
|
310
|
+
if encrypted_data.startswith(b"PK"):
|
|
311
|
+
# Regular TDF (ZIP format)
|
|
312
|
+
logger.debug("Decrypting TDF")
|
|
313
|
+
reader_config = TDFReaderConfig()
|
|
314
|
+
tdf_reader = sdk.load_tdf_with_config(
|
|
315
|
+
encrypted_data, reader_config
|
|
316
|
+
)
|
|
317
|
+
# Access payload directly from TDFReader
|
|
318
|
+
payload_bytes = tdf_reader.payload
|
|
319
|
+
output_file.write(payload_bytes)
|
|
320
|
+
logger.info("Successfully decrypted TDF")
|
|
321
|
+
else:
|
|
322
|
+
# Assume NanoTDF
|
|
323
|
+
logger.debug("Decrypting NanoTDF")
|
|
324
|
+
config = create_nano_tdf_config(sdk, args)
|
|
325
|
+
sdk.read_nano_tdf(BytesIO(encrypted_data), output_file, config)
|
|
326
|
+
logger.info("Successfully decrypted NanoTDF")
|
|
327
|
+
|
|
328
|
+
except Exception:
|
|
329
|
+
# Clean up the output file if there was an error
|
|
330
|
+
output_path.unlink(missing_ok=True)
|
|
331
|
+
raise
|
|
332
|
+
else:
|
|
333
|
+
output_file = sys.stdout.buffer
|
|
334
|
+
# Try to determine if it's a NanoTDF or regular TDF
|
|
335
|
+
# NanoTDFs have a specific header format, regular TDFs are ZIP files
|
|
336
|
+
if encrypted_data.startswith(b"PK"):
|
|
337
|
+
# Regular TDF (ZIP format)
|
|
338
|
+
logger.debug("Decrypting TDF")
|
|
339
|
+
reader_config = TDFReaderConfig()
|
|
340
|
+
tdf_reader = sdk.load_tdf_with_config(encrypted_data, reader_config)
|
|
341
|
+
payload_bytes = tdf_reader.payload
|
|
342
|
+
output_file.write(payload_bytes)
|
|
343
|
+
logger.info("Successfully decrypted TDF")
|
|
344
|
+
else:
|
|
345
|
+
# Assume NanoTDF
|
|
346
|
+
logger.debug("Decrypting NanoTDF")
|
|
347
|
+
config = create_nano_tdf_config(sdk, args)
|
|
348
|
+
sdk.read_nano_tdf(BytesIO(encrypted_data), output_file, config)
|
|
349
|
+
logger.info("Successfully decrypted NanoTDF")
|
|
350
|
+
|
|
351
|
+
finally:
|
|
352
|
+
sdk.close()
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
def cmd_inspect(args):
|
|
356
|
+
"""Handle inspect command."""
|
|
357
|
+
logger.info("Running inspect command")
|
|
358
|
+
|
|
359
|
+
# Validate input file
|
|
360
|
+
input_path = validate_file_exists(args.file)
|
|
361
|
+
|
|
362
|
+
try:
|
|
363
|
+
sdk = build_sdk(args)
|
|
364
|
+
|
|
365
|
+
try:
|
|
366
|
+
# Read encrypted file
|
|
367
|
+
with open(input_path, "rb") as input_file:
|
|
368
|
+
encrypted_data = input_file.read()
|
|
369
|
+
|
|
370
|
+
if encrypted_data.startswith(b"PK"):
|
|
371
|
+
# Regular TDF
|
|
372
|
+
logger.debug("Inspecting TDF")
|
|
373
|
+
reader_config = TDFReaderConfig()
|
|
374
|
+
tdf_reader = sdk.load_tdf_with_config(
|
|
375
|
+
BytesIO(encrypted_data), reader_config
|
|
376
|
+
)
|
|
377
|
+
manifest = tdf_reader.manifest
|
|
378
|
+
|
|
379
|
+
# Try to get data attributes
|
|
380
|
+
try:
|
|
381
|
+
data_attributes = [] # This would need to be implemented in the SDK
|
|
382
|
+
inspection_result = {
|
|
383
|
+
"manifest": asdict(manifest),
|
|
384
|
+
"dataAttributes": data_attributes,
|
|
385
|
+
}
|
|
386
|
+
except Exception as e:
|
|
387
|
+
logger.warning(f"Could not retrieve data attributes: {e}")
|
|
388
|
+
inspection_result = {"manifest": asdict(manifest)}
|
|
389
|
+
|
|
390
|
+
print(json.dumps(inspection_result, indent=2, default=str))
|
|
391
|
+
else:
|
|
392
|
+
# NanoTDF - for now just show basic info
|
|
393
|
+
logger.debug("Inspecting NanoTDF")
|
|
394
|
+
print(
|
|
395
|
+
json.dumps(
|
|
396
|
+
{
|
|
397
|
+
"type": "NanoTDF",
|
|
398
|
+
"size": len(encrypted_data),
|
|
399
|
+
"note": "NanoTDF inspection not fully implemented",
|
|
400
|
+
},
|
|
401
|
+
indent=2,
|
|
402
|
+
)
|
|
403
|
+
)
|
|
404
|
+
|
|
405
|
+
finally:
|
|
406
|
+
sdk.close()
|
|
407
|
+
|
|
408
|
+
except Exception as e:
|
|
409
|
+
# If we can't inspect due to auth issues, show what we can
|
|
410
|
+
logger.warning(f"Limited inspection due to: {e}")
|
|
411
|
+
with open(input_path, "rb") as input_file:
|
|
412
|
+
encrypted_data = input_file.read()
|
|
413
|
+
|
|
414
|
+
file_type = "TDF" if encrypted_data.startswith(b"PK") else "NanoTDF"
|
|
415
|
+
print(
|
|
416
|
+
json.dumps(
|
|
417
|
+
{
|
|
418
|
+
"type": file_type,
|
|
419
|
+
"size": len(encrypted_data),
|
|
420
|
+
"note": "Full inspection requires authentication",
|
|
421
|
+
},
|
|
422
|
+
indent=2,
|
|
423
|
+
)
|
|
424
|
+
)
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
def create_parser() -> argparse.ArgumentParser:
|
|
428
|
+
"""Create the argument parser."""
|
|
429
|
+
parser = argparse.ArgumentParser(
|
|
430
|
+
description="OpenTDF CLI - Encrypt and decrypt files using OpenTDF",
|
|
431
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
432
|
+
epilog="""
|
|
433
|
+
Examples:
|
|
434
|
+
%(prog)s encrypt --file plain.txt --with-client-creds-file creds.json --platform-url https://platform.example.com
|
|
435
|
+
%(prog)s decrypt --file encrypted.tdf --with-client-creds-file creds.json --platform-url https://platform.example.com
|
|
436
|
+
%(prog)s inspect --file encrypted.tdf
|
|
437
|
+
|
|
438
|
+
Where creds.json contains:
|
|
439
|
+
{"clientId": "your-client-id", "clientSecret": "your-client-secret"}
|
|
440
|
+
""",
|
|
441
|
+
)
|
|
442
|
+
|
|
443
|
+
# Global options
|
|
444
|
+
parser.add_argument(
|
|
445
|
+
"--version", action="version", version=f"OpenTDF Python SDK {__version__}"
|
|
446
|
+
)
|
|
447
|
+
parser.add_argument(
|
|
448
|
+
"--log-level",
|
|
449
|
+
choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
|
|
450
|
+
default="INFO",
|
|
451
|
+
help="Set logging level",
|
|
452
|
+
)
|
|
453
|
+
parser.add_argument("--silent", action="store_true", help="Disable logging")
|
|
454
|
+
|
|
455
|
+
# Server endpoints
|
|
456
|
+
server_group = parser.add_argument_group("Server Endpoints")
|
|
457
|
+
server_group.add_argument("--platform-url", help="OpenTDF platform URL")
|
|
458
|
+
server_group.add_argument(
|
|
459
|
+
"--kas-endpoint", help="KAS endpoint URL (comma-separated for multiple)"
|
|
460
|
+
)
|
|
461
|
+
server_group.add_argument("--oidc-endpoint", help="OIDC endpoint URL")
|
|
462
|
+
|
|
463
|
+
# Authentication
|
|
464
|
+
auth_group = parser.add_argument_group("Authentication")
|
|
465
|
+
auth_group.add_argument(
|
|
466
|
+
"--with-client-creds-file",
|
|
467
|
+
help="Path to JSON file containing OAuth credentials (clientId and clientSecret)",
|
|
468
|
+
)
|
|
469
|
+
auth_group.add_argument("--client-id", help="OAuth client ID")
|
|
470
|
+
auth_group.add_argument("--client-secret", help="OAuth client secret")
|
|
471
|
+
|
|
472
|
+
# Security options
|
|
473
|
+
security_group = parser.add_argument_group("Security")
|
|
474
|
+
security_group.add_argument(
|
|
475
|
+
"--plaintext", action="store_true", help="Use HTTP instead of HTTPS"
|
|
476
|
+
)
|
|
477
|
+
security_group.add_argument(
|
|
478
|
+
"--insecure", action="store_true", help="Skip TLS verification"
|
|
479
|
+
)
|
|
480
|
+
|
|
481
|
+
# Subcommands
|
|
482
|
+
subparsers = parser.add_subparsers(dest="command", help="Available commands")
|
|
483
|
+
|
|
484
|
+
# Encrypt command
|
|
485
|
+
encrypt_parser = subparsers.add_parser("encrypt", help="Encrypt a file")
|
|
486
|
+
encrypt_parser.add_argument("file", help="Path to file to encrypt")
|
|
487
|
+
encrypt_parser.add_argument(
|
|
488
|
+
"--output", "-o", help="Output file path (default: stdout)"
|
|
489
|
+
)
|
|
490
|
+
encrypt_parser.add_argument(
|
|
491
|
+
"--attributes", help="Data attributes (comma-separated)"
|
|
492
|
+
)
|
|
493
|
+
encrypt_parser.add_argument(
|
|
494
|
+
"--container-type",
|
|
495
|
+
choices=["tdf", "nano"],
|
|
496
|
+
default="tdf",
|
|
497
|
+
help="Container format",
|
|
498
|
+
)
|
|
499
|
+
encrypt_parser.add_argument("--mime-type", help="MIME type of the input file")
|
|
500
|
+
encrypt_parser.add_argument(
|
|
501
|
+
"--autoconfigure",
|
|
502
|
+
action="store_true",
|
|
503
|
+
help="Enable automatic configuration from attributes",
|
|
504
|
+
)
|
|
505
|
+
encrypt_parser.add_argument(
|
|
506
|
+
"--policy-binding",
|
|
507
|
+
choices=["ecdsa", "gmac"],
|
|
508
|
+
default="gmac",
|
|
509
|
+
help="Policy binding type (nano only)",
|
|
510
|
+
)
|
|
511
|
+
|
|
512
|
+
# Decrypt command
|
|
513
|
+
decrypt_parser = subparsers.add_parser("decrypt", help="Decrypt a file")
|
|
514
|
+
decrypt_parser.add_argument("file", help="Path to encrypted file")
|
|
515
|
+
decrypt_parser.add_argument(
|
|
516
|
+
"--output", "-o", help="Output file path (default: stdout)"
|
|
517
|
+
)
|
|
518
|
+
|
|
519
|
+
# Inspect command
|
|
520
|
+
inspect_parser = subparsers.add_parser(
|
|
521
|
+
"inspect", help="Inspect encrypted file metadata"
|
|
522
|
+
)
|
|
523
|
+
inspect_parser.add_argument("file", help="Path to encrypted file")
|
|
524
|
+
|
|
525
|
+
return parser
|
|
526
|
+
|
|
527
|
+
|
|
528
|
+
def main():
|
|
529
|
+
"""Main CLI entry point."""
|
|
530
|
+
parser = create_parser()
|
|
531
|
+
args = parser.parse_args()
|
|
532
|
+
|
|
533
|
+
# Set up logging
|
|
534
|
+
setup_logging(args.log_level, args.silent)
|
|
535
|
+
|
|
536
|
+
# Validate command
|
|
537
|
+
if not args.command:
|
|
538
|
+
parser.print_help()
|
|
539
|
+
sys.exit(1)
|
|
540
|
+
|
|
541
|
+
try:
|
|
542
|
+
if args.command == "encrypt":
|
|
543
|
+
cmd_encrypt(args)
|
|
544
|
+
elif args.command == "decrypt":
|
|
545
|
+
cmd_decrypt(args)
|
|
546
|
+
elif args.command == "inspect":
|
|
547
|
+
cmd_inspect(args)
|
|
548
|
+
else:
|
|
549
|
+
parser.print_help()
|
|
550
|
+
sys.exit(1)
|
|
551
|
+
|
|
552
|
+
except CLIError as e:
|
|
553
|
+
logger.error(f"{e.level}: {e.message}")
|
|
554
|
+
if e.cause:
|
|
555
|
+
logger.debug(f"Caused by: {e.cause}")
|
|
556
|
+
sys.exit(1)
|
|
557
|
+
except SDKException as e:
|
|
558
|
+
logger.error(f"SDK Error: {e}")
|
|
559
|
+
sys.exit(1)
|
|
560
|
+
except KeyboardInterrupt:
|
|
561
|
+
logger.info("Interrupted by user")
|
|
562
|
+
sys.exit(1)
|
|
563
|
+
except Exception as e:
|
|
564
|
+
logger.error(f"Unexpected error: {e}")
|
|
565
|
+
logger.debug("", exc_info=True)
|
|
566
|
+
sys.exit(1)
|
|
567
|
+
|
|
568
|
+
|
|
569
|
+
if __name__ == "__main__":
|
|
570
|
+
main()
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
from collections import OrderedDict
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class CollectionKey:
|
|
5
|
+
def __init__(self, key: bytes | None):
|
|
6
|
+
self.key = key
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class CollectionStore:
|
|
10
|
+
NO_PRIVATE_KEY = CollectionKey(None)
|
|
11
|
+
|
|
12
|
+
def store(self, header, key: CollectionKey):
|
|
13
|
+
raise NotImplementedError
|
|
14
|
+
|
|
15
|
+
def get_key(self, header) -> CollectionKey:
|
|
16
|
+
raise NotImplementedError
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class NoOpCollectionStore(CollectionStore):
|
|
20
|
+
def store(self, header, key: CollectionKey):
|
|
21
|
+
pass
|
|
22
|
+
|
|
23
|
+
def get_key(self, header) -> CollectionKey:
|
|
24
|
+
return self.NO_PRIVATE_KEY
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class CollectionStoreImpl(OrderedDict, CollectionStore):
|
|
28
|
+
MAX_SIZE_STORE = 500
|
|
29
|
+
|
|
30
|
+
def __init__(self):
|
|
31
|
+
super().__init__()
|
|
32
|
+
|
|
33
|
+
def store(self, header, key: CollectionKey):
|
|
34
|
+
buf = header.to_bytes()
|
|
35
|
+
self[buf] = key
|
|
36
|
+
if len(self) > self.MAX_SIZE_STORE:
|
|
37
|
+
self.popitem(last=False)
|
|
38
|
+
|
|
39
|
+
def get_key(self, header) -> CollectionKey:
|
|
40
|
+
buf = header.to_bytes()
|
|
41
|
+
return self.get(buf, self.NO_PRIVATE_KEY)
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from collections import OrderedDict
|
|
2
|
+
from threading import RLock
|
|
3
|
+
|
|
4
|
+
MAX_SIZE_STORE = 500
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class CollectionStoreImpl(OrderedDict):
|
|
8
|
+
def __init__(self):
|
|
9
|
+
super().__init__()
|
|
10
|
+
self._lock = RLock()
|
|
11
|
+
|
|
12
|
+
def store(self, header, key):
|
|
13
|
+
buf = header.to_bytes() # Assumes header has a to_bytes() method
|
|
14
|
+
with self._lock:
|
|
15
|
+
self[buf] = key
|
|
16
|
+
if len(self) > MAX_SIZE_STORE:
|
|
17
|
+
self.popitem(last=False)
|
|
18
|
+
|
|
19
|
+
def get_key(self, header, no_private_key=None):
|
|
20
|
+
buf = header.to_bytes()
|
|
21
|
+
with self._lock:
|
|
22
|
+
return self.get(buf, no_private_key)
|
otdf_python/config.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import Any
|
|
4
|
+
from urllib.parse import urlparse, urlunparse
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class TDFFormat(Enum):
|
|
8
|
+
JSONFormat = "JSONFormat"
|
|
9
|
+
XMLFormat = "XMLFormat"
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class IntegrityAlgorithm(Enum):
|
|
13
|
+
HS256 = "HS256"
|
|
14
|
+
GMAC = "GMAC"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@dataclass
|
|
18
|
+
class KASInfo:
|
|
19
|
+
url: str
|
|
20
|
+
public_key: str | None = None
|
|
21
|
+
kid: str | None = None
|
|
22
|
+
default: bool | None = None
|
|
23
|
+
algorithm: str | None = None
|
|
24
|
+
|
|
25
|
+
def __str__(self):
|
|
26
|
+
return f"KASInfo{{URL:'{self.url}', PublicKey:'{self.public_key}', KID:'{self.kid}', Default:{self.default}, Algorithm:'{self.algorithm}'}}"
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@dataclass
|
|
30
|
+
class TDFConfig:
|
|
31
|
+
autoconfigure: bool = True
|
|
32
|
+
default_segment_size: int = 2 * 1024 * 1024
|
|
33
|
+
enable_encryption: bool = True
|
|
34
|
+
tdf_format: TDFFormat = TDFFormat.JSONFormat
|
|
35
|
+
tdf_public_key: str | None = None
|
|
36
|
+
tdf_private_key: str | None = None
|
|
37
|
+
meta_data: str | None = None
|
|
38
|
+
integrity_algorithm: IntegrityAlgorithm = IntegrityAlgorithm.HS256
|
|
39
|
+
segment_integrity_algorithm: IntegrityAlgorithm = IntegrityAlgorithm.GMAC
|
|
40
|
+
attributes: list[str] = field(default_factory=list)
|
|
41
|
+
kas_info_list: list[KASInfo] = field(default_factory=list)
|
|
42
|
+
mime_type: str = "application/octet-stream"
|
|
43
|
+
split_plan: list[str] | None = field(default_factory=list)
|
|
44
|
+
wrapping_key_type: str | None = None
|
|
45
|
+
hex_encode_root_and_segment_hashes: bool = False
|
|
46
|
+
render_version_info_in_manifest: bool = True
|
|
47
|
+
policy_object: Any | None = None
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class NanoTDFConfig:
|
|
52
|
+
ecc_mode: str | None = None
|
|
53
|
+
cipher: str | None = None
|
|
54
|
+
config: str | None = None
|
|
55
|
+
attributes: list[str] = field(default_factory=list)
|
|
56
|
+
kas_info_list: list[KASInfo] = field(default_factory=list)
|
|
57
|
+
collection_config: str | None = None
|
|
58
|
+
policy_type: str | None = None
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
# Utility function to normalize KAS URLs (Python equivalent)
|
|
62
|
+
def get_kas_address(kas_url: str) -> str:
|
|
63
|
+
if "://" not in kas_url:
|
|
64
|
+
kas_url = "https://" + kas_url
|
|
65
|
+
parsed = urlparse(kas_url)
|
|
66
|
+
scheme = parsed.scheme or "https"
|
|
67
|
+
netloc = parsed.hostname or ""
|
|
68
|
+
port = parsed.port or 443
|
|
69
|
+
return urlunparse((scheme, f"{netloc}:{port}", "", "", "", ""))
|
|
File without changes
|