otdf-python 0.1.10__py3-none-any.whl → 0.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. otdf_python/__init__.py +25 -0
  2. otdf_python/__main__.py +12 -0
  3. otdf_python/address_normalizer.py +84 -0
  4. otdf_python/aesgcm.py +55 -0
  5. otdf_python/assertion_config.py +84 -0
  6. otdf_python/asym_crypto.py +198 -0
  7. otdf_python/auth_headers.py +33 -0
  8. otdf_python/autoconfigure_utils.py +113 -0
  9. otdf_python/cli.py +569 -0
  10. otdf_python/collection_store.py +41 -0
  11. otdf_python/collection_store_impl.py +22 -0
  12. otdf_python/config.py +69 -0
  13. otdf_python/connect_client.py +0 -0
  14. otdf_python/constants.py +1 -0
  15. otdf_python/crypto_utils.py +78 -0
  16. otdf_python/dpop.py +81 -0
  17. otdf_python/ecc_constants.py +176 -0
  18. otdf_python/ecc_mode.py +83 -0
  19. otdf_python/ecdh.py +317 -0
  20. otdf_python/eckeypair.py +75 -0
  21. otdf_python/header.py +181 -0
  22. otdf_python/invalid_zip_exception.py +8 -0
  23. otdf_python/kas_client.py +709 -0
  24. otdf_python/kas_connect_rpc_client.py +213 -0
  25. otdf_python/kas_info.py +25 -0
  26. otdf_python/kas_key_cache.py +52 -0
  27. otdf_python/key_type.py +31 -0
  28. otdf_python/key_type_constants.py +43 -0
  29. otdf_python/manifest.py +215 -0
  30. otdf_python/nanotdf.py +863 -0
  31. otdf_python/nanotdf_ecdsa_struct.py +132 -0
  32. otdf_python/nanotdf_type.py +43 -0
  33. otdf_python/policy_binding_serializer.py +39 -0
  34. otdf_python/policy_info.py +55 -0
  35. otdf_python/policy_object.py +22 -0
  36. otdf_python/policy_stub.py +2 -0
  37. otdf_python/resource_locator.py +172 -0
  38. otdf_python/sdk.py +436 -0
  39. otdf_python/sdk_builder.py +416 -0
  40. otdf_python/sdk_exceptions.py +16 -0
  41. otdf_python/symmetric_and_payload_config.py +30 -0
  42. otdf_python/tdf.py +480 -0
  43. otdf_python/tdf_reader.py +153 -0
  44. otdf_python/tdf_writer.py +23 -0
  45. otdf_python/token_source.py +34 -0
  46. otdf_python/version.py +57 -0
  47. otdf_python/zip_reader.py +47 -0
  48. otdf_python/zip_writer.py +70 -0
  49. otdf_python-0.3.5.dist-info/METADATA +153 -0
  50. otdf_python-0.3.5.dist-info/RECORD +137 -0
  51. {otdf_python-0.1.10.dist-info → otdf_python-0.3.5.dist-info}/WHEEL +1 -2
  52. {otdf_python-0.1.10.dist-info → otdf_python-0.3.5.dist-info/licenses}/LICENSE +1 -1
  53. otdf_python_proto/__init__.py +37 -0
  54. otdf_python_proto/authorization/__init__.py +1 -0
  55. otdf_python_proto/authorization/authorization_pb2.py +80 -0
  56. otdf_python_proto/authorization/authorization_pb2.pyi +161 -0
  57. otdf_python_proto/authorization/authorization_pb2_connect.py +191 -0
  58. otdf_python_proto/authorization/v2/authorization_pb2.py +105 -0
  59. otdf_python_proto/authorization/v2/authorization_pb2.pyi +134 -0
  60. otdf_python_proto/authorization/v2/authorization_pb2_connect.py +233 -0
  61. otdf_python_proto/common/__init__.py +1 -0
  62. otdf_python_proto/common/common_pb2.py +52 -0
  63. otdf_python_proto/common/common_pb2.pyi +61 -0
  64. otdf_python_proto/entity/__init__.py +1 -0
  65. otdf_python_proto/entity/entity_pb2.py +47 -0
  66. otdf_python_proto/entity/entity_pb2.pyi +50 -0
  67. otdf_python_proto/entityresolution/__init__.py +1 -0
  68. otdf_python_proto/entityresolution/entity_resolution_pb2.py +57 -0
  69. otdf_python_proto/entityresolution/entity_resolution_pb2.pyi +55 -0
  70. otdf_python_proto/entityresolution/entity_resolution_pb2_connect.py +149 -0
  71. otdf_python_proto/entityresolution/v2/entity_resolution_pb2.py +55 -0
  72. otdf_python_proto/entityresolution/v2/entity_resolution_pb2.pyi +55 -0
  73. otdf_python_proto/entityresolution/v2/entity_resolution_pb2_connect.py +149 -0
  74. otdf_python_proto/kas/__init__.py +9 -0
  75. otdf_python_proto/kas/kas_pb2.py +103 -0
  76. otdf_python_proto/kas/kas_pb2.pyi +170 -0
  77. otdf_python_proto/kas/kas_pb2_connect.py +192 -0
  78. otdf_python_proto/legacy_grpc/__init__.py +1 -0
  79. otdf_python_proto/legacy_grpc/authorization/authorization_pb2_grpc.py +163 -0
  80. otdf_python_proto/legacy_grpc/authorization/v2/authorization_pb2_grpc.py +206 -0
  81. otdf_python_proto/legacy_grpc/common/common_pb2_grpc.py +4 -0
  82. otdf_python_proto/legacy_grpc/entity/entity_pb2_grpc.py +4 -0
  83. otdf_python_proto/legacy_grpc/entityresolution/entity_resolution_pb2_grpc.py +122 -0
  84. otdf_python_proto/legacy_grpc/entityresolution/v2/entity_resolution_pb2_grpc.py +120 -0
  85. otdf_python_proto/legacy_grpc/kas/kas_pb2_grpc.py +172 -0
  86. otdf_python_proto/legacy_grpc/logger/audit/test_pb2_grpc.py +4 -0
  87. otdf_python_proto/legacy_grpc/policy/actions/actions_pb2_grpc.py +249 -0
  88. otdf_python_proto/legacy_grpc/policy/attributes/attributes_pb2_grpc.py +873 -0
  89. otdf_python_proto/legacy_grpc/policy/kasregistry/key_access_server_registry_pb2_grpc.py +602 -0
  90. otdf_python_proto/legacy_grpc/policy/keymanagement/key_management_pb2_grpc.py +251 -0
  91. otdf_python_proto/legacy_grpc/policy/namespaces/namespaces_pb2_grpc.py +427 -0
  92. otdf_python_proto/legacy_grpc/policy/objects_pb2_grpc.py +4 -0
  93. otdf_python_proto/legacy_grpc/policy/registeredresources/registered_resources_pb2_grpc.py +524 -0
  94. otdf_python_proto/legacy_grpc/policy/resourcemapping/resource_mapping_pb2_grpc.py +516 -0
  95. otdf_python_proto/legacy_grpc/policy/selectors_pb2_grpc.py +4 -0
  96. otdf_python_proto/legacy_grpc/policy/subjectmapping/subject_mapping_pb2_grpc.py +551 -0
  97. otdf_python_proto/legacy_grpc/policy/unsafe/unsafe_pb2_grpc.py +485 -0
  98. otdf_python_proto/legacy_grpc/wellknownconfiguration/wellknown_configuration_pb2_grpc.py +77 -0
  99. otdf_python_proto/logger/__init__.py +1 -0
  100. otdf_python_proto/logger/audit/test_pb2.py +43 -0
  101. otdf_python_proto/logger/audit/test_pb2.pyi +45 -0
  102. otdf_python_proto/policy/__init__.py +1 -0
  103. otdf_python_proto/policy/actions/actions_pb2.py +75 -0
  104. otdf_python_proto/policy/actions/actions_pb2.pyi +87 -0
  105. otdf_python_proto/policy/actions/actions_pb2_connect.py +275 -0
  106. otdf_python_proto/policy/attributes/attributes_pb2.py +234 -0
  107. otdf_python_proto/policy/attributes/attributes_pb2.pyi +328 -0
  108. otdf_python_proto/policy/attributes/attributes_pb2_connect.py +863 -0
  109. otdf_python_proto/policy/kasregistry/key_access_server_registry_pb2.py +266 -0
  110. otdf_python_proto/policy/kasregistry/key_access_server_registry_pb2.pyi +450 -0
  111. otdf_python_proto/policy/kasregistry/key_access_server_registry_pb2_connect.py +611 -0
  112. otdf_python_proto/policy/keymanagement/key_management_pb2.py +79 -0
  113. otdf_python_proto/policy/keymanagement/key_management_pb2.pyi +87 -0
  114. otdf_python_proto/policy/keymanagement/key_management_pb2_connect.py +275 -0
  115. otdf_python_proto/policy/namespaces/namespaces_pb2.py +117 -0
  116. otdf_python_proto/policy/namespaces/namespaces_pb2.pyi +147 -0
  117. otdf_python_proto/policy/namespaces/namespaces_pb2_connect.py +443 -0
  118. otdf_python_proto/policy/objects_pb2.py +150 -0
  119. otdf_python_proto/policy/objects_pb2.pyi +464 -0
  120. otdf_python_proto/policy/registeredresources/registered_resources_pb2.py +139 -0
  121. otdf_python_proto/policy/registeredresources/registered_resources_pb2.pyi +196 -0
  122. otdf_python_proto/policy/registeredresources/registered_resources_pb2_connect.py +527 -0
  123. otdf_python_proto/policy/resourcemapping/resource_mapping_pb2.py +139 -0
  124. otdf_python_proto/policy/resourcemapping/resource_mapping_pb2.pyi +194 -0
  125. otdf_python_proto/policy/resourcemapping/resource_mapping_pb2_connect.py +527 -0
  126. otdf_python_proto/policy/selectors_pb2.py +57 -0
  127. otdf_python_proto/policy/selectors_pb2.pyi +90 -0
  128. otdf_python_proto/policy/subjectmapping/subject_mapping_pb2.py +127 -0
  129. otdf_python_proto/policy/subjectmapping/subject_mapping_pb2.pyi +189 -0
  130. otdf_python_proto/policy/subjectmapping/subject_mapping_pb2_connect.py +569 -0
  131. otdf_python_proto/policy/unsafe/unsafe_pb2.py +113 -0
  132. otdf_python_proto/policy/unsafe/unsafe_pb2.pyi +145 -0
  133. otdf_python_proto/policy/unsafe/unsafe_pb2_connect.py +485 -0
  134. otdf_python_proto/wellknownconfiguration/__init__.py +1 -0
  135. otdf_python_proto/wellknownconfiguration/wellknown_configuration_pb2.py +51 -0
  136. otdf_python_proto/wellknownconfiguration/wellknown_configuration_pb2.pyi +32 -0
  137. otdf_python_proto/wellknownconfiguration/wellknown_configuration_pb2_connect.py +107 -0
  138. otdf_python/_gotdf_python.cpython-312-darwin.so +0 -0
  139. otdf_python/build.py +0 -190
  140. otdf_python/go.py +0 -1478
  141. otdf_python/gotdf_python.py +0 -383
  142. otdf_python-0.1.10.dist-info/METADATA +0 -149
  143. otdf_python-0.1.10.dist-info/RECORD +0 -10
  144. otdf_python-0.1.10.dist-info/top_level.txt +0 -1
otdf_python/cli.py ADDED
@@ -0,0 +1,569 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ OpenTDF Python CLI
4
+
5
+ A command-line interface for encrypting and decrypting files using OpenTDF.
6
+ Provides encrypt, decrypt, and inspect commands similar to the otdfctl CLI.
7
+ """
8
+
9
+ import argparse
10
+ import contextlib
11
+ import json
12
+ import logging
13
+ import sys
14
+ from dataclasses import asdict
15
+ from importlib import metadata
16
+ from io import BytesIO
17
+ from pathlib import Path
18
+
19
+ from otdf_python.config import KASInfo, NanoTDFConfig, TDFConfig
20
+ from otdf_python.sdk import SDK
21
+ from otdf_python.sdk_builder import SDKBuilder
22
+ from otdf_python.sdk_exceptions import SDKException
23
+
24
+ try:
25
+ __version__ = metadata.version("otdf-python")
26
+ except metadata.PackageNotFoundError:
27
+ # package is not installed, e.g., in development
28
+ __version__ = "0.0.0"
29
+
30
+
31
+ # Set up logging
32
+ logger = logging.getLogger(__name__)
33
+
34
+
35
+ class CLIError(Exception):
36
+ """Custom exception for CLI errors."""
37
+
38
+ def __init__(self, level: str, message: str, cause: Exception | None = None):
39
+ self.level = level
40
+ self.message = message
41
+ self.cause = cause
42
+ super().__init__(message)
43
+
44
+
45
+ def setup_logging(level: str = "INFO", silent: bool = False):
46
+ """Set up logging configuration."""
47
+ if silent:
48
+ level = "CRITICAL"
49
+
50
+ log_level = getattr(logging, level.upper(), logging.INFO)
51
+ logging.basicConfig(
52
+ level=log_level,
53
+ format="%(levelname)s: %(message)s",
54
+ handlers=[logging.StreamHandler(sys.stderr)],
55
+ )
56
+
57
+
58
+ def validate_file_exists(file_path: str) -> Path:
59
+ """Validate that a file exists and is readable."""
60
+ path = Path(file_path)
61
+ if not path.exists():
62
+ raise CLIError("CRITICAL", f"File does not exist: {file_path}")
63
+ if not path.is_file():
64
+ raise CLIError("CRITICAL", f"Path is not a file: {file_path}")
65
+ return path
66
+
67
+
68
+ def parse_attributes(attributes_str: str) -> list[str]:
69
+ """Parse comma-separated attributes string."""
70
+ if not attributes_str:
71
+ return []
72
+ return [attr.strip() for attr in attributes_str.split(",") if attr.strip()]
73
+
74
+
75
+ def parse_kas_endpoints(kas_str: str) -> list[str]:
76
+ """Parse comma-separated KAS endpoints."""
77
+ if not kas_str:
78
+ return []
79
+ return [kas.strip() for kas in kas_str.split(",") if kas.strip()]
80
+
81
+
82
+ def load_client_credentials(creds_file_path: str) -> tuple[str, str]:
83
+ """Load client credentials from JSON file."""
84
+ try:
85
+ creds_path = Path(creds_file_path)
86
+ if not creds_path.exists():
87
+ raise CLIError(
88
+ "CRITICAL", f"Credentials file does not exist: {creds_file_path}"
89
+ )
90
+
91
+ with creds_path.open() as f:
92
+ creds = json.load(f)
93
+
94
+ client_id = creds.get("clientId")
95
+ client_secret = creds.get("clientSecret")
96
+
97
+ if not client_id or not client_secret:
98
+ raise CLIError(
99
+ "CRITICAL",
100
+ f"Credentials file must contain 'clientId' and 'clientSecret' fields: {creds_file_path}",
101
+ )
102
+
103
+ return client_id, client_secret
104
+
105
+ except json.JSONDecodeError as e:
106
+ raise CLIError(
107
+ "CRITICAL", f"Invalid JSON in credentials file {creds_file_path}: {e}"
108
+ )
109
+ except Exception as e:
110
+ raise CLIError(
111
+ "CRITICAL", f"Error reading credentials file {creds_file_path}: {e}"
112
+ )
113
+
114
+
115
+ def build_sdk(args) -> SDK:
116
+ """Build SDK instance from CLI arguments."""
117
+ builder = SDKBuilder()
118
+
119
+ if args.platform_url:
120
+ builder.set_platform_endpoint(args.platform_url)
121
+
122
+ # Auto-detect HTTP URLs and enable plaintext mode
123
+ if args.platform_url.startswith("http://") and (
124
+ not hasattr(args, "plaintext") or not args.plaintext
125
+ ):
126
+ logger.debug(
127
+ f"Auto-detected HTTP URL {args.platform_url}, enabling plaintext mode"
128
+ )
129
+ builder.use_insecure_plaintext_connection(True)
130
+
131
+ if args.oidc_endpoint:
132
+ builder.set_issuer_endpoint(args.oidc_endpoint)
133
+
134
+ if args.client_id and args.client_secret:
135
+ builder.client_secret(args.client_id, args.client_secret)
136
+ elif hasattr(args, "with_client_creds_file") and args.with_client_creds_file:
137
+ # Load credentials from file
138
+ client_id, client_secret = load_client_credentials(args.with_client_creds_file)
139
+ builder.client_secret(client_id, client_secret)
140
+ elif hasattr(args, "auth") and args.auth:
141
+ # Parse combined auth string (clientId:clientSecret) - legacy support
142
+ auth_parts = args.auth.split(":")
143
+ if len(auth_parts) != 2:
144
+ raise CLIError(
145
+ "CRITICAL",
146
+ f"Auth expects <clientId>:<clientSecret>, received {args.auth}",
147
+ )
148
+ builder.client_secret(auth_parts[0], auth_parts[1])
149
+ else:
150
+ raise CLIError(
151
+ "CRITICAL",
152
+ "Authentication required: provide --with-client-creds-file OR --client-id and --client-secret",
153
+ )
154
+
155
+ if hasattr(args, "plaintext") and args.plaintext:
156
+ builder.use_insecure_plaintext_connection(True)
157
+
158
+ if args.insecure:
159
+ builder.use_insecure_skip_verify(True)
160
+
161
+ return builder.build()
162
+
163
+
164
+ def create_tdf_config(sdk: SDK, args) -> TDFConfig:
165
+ """Create TDF configuration from CLI arguments."""
166
+ attributes = (
167
+ parse_attributes(args.attributes)
168
+ if hasattr(args, "attributes") and args.attributes
169
+ else []
170
+ )
171
+
172
+ config = sdk.new_tdf_config(attributes=attributes)
173
+
174
+ if hasattr(args, "kas_endpoint") and args.kas_endpoint:
175
+ # Add KAS endpoints
176
+ kas_endpoints = parse_kas_endpoints(args.kas_endpoint)
177
+ kas_info_list = [KASInfo(url=kas_url) for kas_url in kas_endpoints]
178
+ config.kas_info_list.extend(kas_info_list)
179
+
180
+ if hasattr(args, "mime_type") and args.mime_type:
181
+ config.mime_type = args.mime_type
182
+
183
+ if hasattr(args, "autoconfigure") and args.autoconfigure is not None:
184
+ config.autoconfigure = args.autoconfigure
185
+
186
+ return config
187
+
188
+
189
+ def create_nano_tdf_config(sdk: SDK, args) -> NanoTDFConfig:
190
+ """Create NanoTDF configuration from CLI arguments."""
191
+ attributes = (
192
+ parse_attributes(args.attributes)
193
+ if hasattr(args, "attributes") and args.attributes
194
+ else []
195
+ )
196
+
197
+ config = NanoTDFConfig(attributes=attributes)
198
+
199
+ if hasattr(args, "kas_endpoint") and args.kas_endpoint:
200
+ # Add KAS endpoints
201
+ kas_endpoints = parse_kas_endpoints(args.kas_endpoint)
202
+ kas_info_list = [KASInfo(url=kas_url) for kas_url in kas_endpoints]
203
+ config.kas_info_list.extend(kas_info_list)
204
+ elif args.platform_url:
205
+ # If no explicit KAS endpoint provided, derive from platform URL
206
+ # This matches the default KAS path convention
207
+ kas_url = args.platform_url.rstrip("/") + "/kas"
208
+ logger.debug(f"Deriving KAS endpoint from platform URL: {kas_url}")
209
+ kas_info = KASInfo(url=kas_url)
210
+ config.kas_info_list.append(kas_info)
211
+
212
+ if hasattr(args, "policy_binding") and args.policy_binding:
213
+ if args.policy_binding.lower() == "ecdsa":
214
+ config.ecc_mode = "ecdsa"
215
+ else:
216
+ config.ecc_mode = "gmac" # default
217
+
218
+ return config
219
+
220
+
221
+ def cmd_encrypt(args):
222
+ """Handle encrypt command."""
223
+ logger.info("Running encrypt command")
224
+
225
+ # Validate input file
226
+ input_path = validate_file_exists(args.file)
227
+
228
+ # Build SDK
229
+ sdk = build_sdk(args)
230
+
231
+ try:
232
+ # Read input file
233
+ with input_path.open("rb") as input_file:
234
+ payload = input_file.read()
235
+
236
+ # Determine output
237
+ if args.output:
238
+ output_path = Path(args.output)
239
+ with output_path.open("wb") as output_file:
240
+ try:
241
+ # Create appropriate config based on container type
242
+ container_type = getattr(args, "container_type", "tdf")
243
+
244
+ if container_type == "nano":
245
+ logger.debug("Creating NanoTDF")
246
+ config = create_nano_tdf_config(sdk, args)
247
+ output_stream = BytesIO()
248
+ size = sdk.create_nano_tdf(
249
+ BytesIO(payload), output_stream, config
250
+ )
251
+ output_file.write(output_stream.getvalue())
252
+ logger.info(f"Created NanoTDF of size {size} bytes")
253
+ else:
254
+ logger.debug("Creating TDF")
255
+ config = create_tdf_config(sdk, args)
256
+ output_stream = BytesIO()
257
+ _manifest, size, _ = sdk.create_tdf(
258
+ BytesIO(payload), config, output_stream
259
+ )
260
+ output_file.write(output_stream.getvalue())
261
+ logger.info(f"Created TDF of size {size} bytes")
262
+
263
+ except Exception:
264
+ # Clean up the output file if there was an error
265
+ with contextlib.suppress(Exception):
266
+ output_path.unlink()
267
+ raise
268
+ else:
269
+ output_file = sys.stdout.buffer
270
+ # Create appropriate config based on container type
271
+ container_type = getattr(args, "container_type", "tdf")
272
+
273
+ if container_type == "nano":
274
+ logger.debug("Creating NanoTDF")
275
+ config = create_nano_tdf_config(sdk, args)
276
+ output_stream = BytesIO()
277
+ size = sdk.create_nano_tdf(BytesIO(payload), output_stream, config)
278
+ output_file.write(output_stream.getvalue())
279
+ logger.info(f"Created NanoTDF of size {size} bytes")
280
+ else:
281
+ logger.debug("Creating TDF")
282
+ config = create_tdf_config(sdk, args)
283
+ output_stream = BytesIO()
284
+ _manifest, size, _ = sdk.create_tdf(
285
+ BytesIO(payload), config, output_stream
286
+ )
287
+ output_file.write(output_stream.getvalue())
288
+ logger.info(f"Created TDF of size {size} bytes")
289
+
290
+ finally:
291
+ sdk.close()
292
+
293
+
294
+ def cmd_decrypt(args):
295
+ """Handle decrypt command."""
296
+ logger.info("Running decrypt command")
297
+
298
+ # Validate input file
299
+ input_path = validate_file_exists(args.file)
300
+
301
+ # Build SDK
302
+ sdk = build_sdk(args)
303
+
304
+ try:
305
+ # Read encrypted file
306
+ with input_path.open("rb") as input_file:
307
+ encrypted_data = input_file.read()
308
+
309
+ # Determine output
310
+ if args.output:
311
+ output_path = Path(args.output)
312
+ with output_path.open("wb") as output_file:
313
+ try:
314
+ # Try to determine if it's a NanoTDF or regular TDF
315
+ # NanoTDFs have a specific header format, regular TDFs are ZIP files
316
+ if encrypted_data.startswith(b"PK"):
317
+ # Regular TDF (ZIP format)
318
+ logger.debug("Decrypting TDF")
319
+ tdf_reader = sdk.load_tdf(encrypted_data)
320
+ # Access payload directly from TDFReader
321
+ payload_bytes = tdf_reader.payload
322
+ output_file.write(payload_bytes)
323
+ logger.info("Successfully decrypted TDF")
324
+ else:
325
+ # Assume NanoTDF
326
+ logger.debug("Decrypting NanoTDF")
327
+ config = create_nano_tdf_config(sdk, args)
328
+ sdk.read_nano_tdf(BytesIO(encrypted_data), output_file, config)
329
+ logger.info("Successfully decrypted NanoTDF")
330
+
331
+ except Exception:
332
+ # Clean up the output file if there was an error
333
+ output_path.unlink(missing_ok=True)
334
+ raise
335
+ else:
336
+ output_file = sys.stdout.buffer
337
+ # Try to determine if it's a NanoTDF or regular TDF
338
+ # NanoTDFs have a specific header format, regular TDFs are ZIP files
339
+ if encrypted_data.startswith(b"PK"):
340
+ # Regular TDF (ZIP format)
341
+ logger.debug("Decrypting TDF")
342
+ tdf_reader = sdk.load_tdf(encrypted_data)
343
+ payload_bytes = tdf_reader.payload
344
+ output_file.write(payload_bytes)
345
+ logger.info("Successfully decrypted TDF")
346
+ else:
347
+ # Assume NanoTDF
348
+ logger.debug("Decrypting NanoTDF")
349
+ config = create_nano_tdf_config(sdk, args)
350
+ sdk.read_nano_tdf(BytesIO(encrypted_data), output_file, config)
351
+ logger.info("Successfully decrypted NanoTDF")
352
+
353
+ finally:
354
+ sdk.close()
355
+
356
+
357
+ def cmd_inspect(args):
358
+ """Handle inspect command."""
359
+ logger.info("Running inspect command")
360
+
361
+ # Validate input file
362
+ input_path = validate_file_exists(args.file)
363
+
364
+ try:
365
+ sdk = build_sdk(args)
366
+
367
+ try:
368
+ # Read encrypted file
369
+ with input_path.open("rb") as input_file:
370
+ encrypted_data = input_file.read()
371
+
372
+ if encrypted_data.startswith(b"PK"):
373
+ # Regular TDF
374
+ logger.debug("Inspecting TDF")
375
+ tdf_reader = sdk.load_tdf(BytesIO(encrypted_data))
376
+ manifest = tdf_reader.manifest
377
+
378
+ # Try to get data attributes
379
+ try:
380
+ data_attributes = [] # This would need to be implemented in the SDK
381
+ inspection_result = {
382
+ "manifest": asdict(manifest),
383
+ "dataAttributes": data_attributes,
384
+ }
385
+ except Exception as e:
386
+ logger.warning(f"Could not retrieve data attributes: {e}")
387
+ inspection_result = {"manifest": asdict(manifest)}
388
+
389
+ print(json.dumps(inspection_result, indent=2, default=str))
390
+ else:
391
+ # NanoTDF - for now just show basic info
392
+ logger.debug("Inspecting NanoTDF")
393
+ print(
394
+ json.dumps(
395
+ {
396
+ "type": "NanoTDF",
397
+ "size": len(encrypted_data),
398
+ "note": "NanoTDF inspection not fully implemented",
399
+ },
400
+ indent=2,
401
+ )
402
+ )
403
+
404
+ finally:
405
+ sdk.close()
406
+
407
+ except Exception as e:
408
+ # If we can't inspect due to auth issues, show what we can
409
+ logger.warning(f"Limited inspection due to: {e}")
410
+ with input_path.open("rb") as input_file:
411
+ encrypted_data = input_file.read()
412
+
413
+ file_type = "TDF" if encrypted_data.startswith(b"PK") else "NanoTDF"
414
+ print(
415
+ json.dumps(
416
+ {
417
+ "type": file_type,
418
+ "size": len(encrypted_data),
419
+ "note": "Full inspection requires authentication",
420
+ },
421
+ indent=2,
422
+ )
423
+ )
424
+
425
+
426
+ def create_parser() -> argparse.ArgumentParser:
427
+ """Create the argument parser."""
428
+ parser = argparse.ArgumentParser(
429
+ description="OpenTDF CLI - Encrypt and decrypt files using OpenTDF",
430
+ formatter_class=argparse.RawDescriptionHelpFormatter,
431
+ epilog="""
432
+ Examples:
433
+ %(prog)s encrypt --file plain.txt --with-client-creds-file creds.json --platform-url https://platform.example.com
434
+ %(prog)s decrypt --file encrypted.tdf --with-client-creds-file creds.json --platform-url https://platform.example.com
435
+ %(prog)s inspect --file encrypted.tdf
436
+
437
+ Where creds.json contains:
438
+ {"clientId": "your-client-id", "clientSecret": "your-client-secret"}
439
+ """,
440
+ )
441
+
442
+ # Global options
443
+ parser.add_argument(
444
+ "--version", action="version", version=f"OpenTDF Python SDK {__version__}"
445
+ )
446
+ parser.add_argument(
447
+ "--log-level",
448
+ choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
449
+ default="INFO",
450
+ help="Set logging level",
451
+ )
452
+ parser.add_argument("--silent", action="store_true", help="Disable logging")
453
+
454
+ # Server endpoints
455
+ server_group = parser.add_argument_group("Server Endpoints")
456
+ server_group.add_argument("--platform-url", help="OpenTDF platform URL")
457
+ server_group.add_argument(
458
+ "--kas-endpoint", help="KAS endpoint URL (comma-separated for multiple)"
459
+ )
460
+ server_group.add_argument("--oidc-endpoint", help="OIDC endpoint URL")
461
+
462
+ # Authentication
463
+ auth_group = parser.add_argument_group("Authentication")
464
+ auth_group.add_argument(
465
+ "--with-client-creds-file",
466
+ help="Path to JSON file containing OAuth credentials (clientId and clientSecret)",
467
+ )
468
+ auth_group.add_argument("--client-id", help="OAuth client ID")
469
+ auth_group.add_argument("--client-secret", help="OAuth client secret")
470
+
471
+ # Security options
472
+ security_group = parser.add_argument_group("Security")
473
+ security_group.add_argument(
474
+ "--plaintext", action="store_true", help="Use HTTP instead of HTTPS"
475
+ )
476
+ security_group.add_argument(
477
+ "--insecure", action="store_true", help="Skip TLS verification"
478
+ )
479
+
480
+ # Subcommands
481
+ subparsers = parser.add_subparsers(dest="command", help="Available commands")
482
+
483
+ # Encrypt command
484
+ encrypt_parser = subparsers.add_parser("encrypt", help="Encrypt a file")
485
+ encrypt_parser.add_argument("file", help="Path to file to encrypt")
486
+ encrypt_parser.add_argument(
487
+ "--output", "-o", help="Output file path (default: stdout)"
488
+ )
489
+ encrypt_parser.add_argument(
490
+ "--attributes", help="Data attributes (comma-separated)"
491
+ )
492
+ encrypt_parser.add_argument(
493
+ "--container-type",
494
+ choices=["tdf", "nano"],
495
+ default="tdf",
496
+ help="Container format",
497
+ )
498
+ encrypt_parser.add_argument("--mime-type", help="MIME type of the input file")
499
+ encrypt_parser.add_argument(
500
+ "--autoconfigure",
501
+ action="store_true",
502
+ help="Enable automatic configuration from attributes",
503
+ )
504
+ encrypt_parser.add_argument(
505
+ "--policy-binding",
506
+ choices=["ecdsa", "gmac"],
507
+ default="gmac",
508
+ help="Policy binding type (nano only)",
509
+ )
510
+
511
+ # Decrypt command
512
+ decrypt_parser = subparsers.add_parser("decrypt", help="Decrypt a file")
513
+ decrypt_parser.add_argument("file", help="Path to encrypted file")
514
+ decrypt_parser.add_argument(
515
+ "--output", "-o", help="Output file path (default: stdout)"
516
+ )
517
+
518
+ # Inspect command
519
+ inspect_parser = subparsers.add_parser(
520
+ "inspect", help="Inspect encrypted file metadata"
521
+ )
522
+ inspect_parser.add_argument("file", help="Path to encrypted file")
523
+
524
+ return parser
525
+
526
+
527
+ def main():
528
+ """Main CLI entry point."""
529
+ parser = create_parser()
530
+ args = parser.parse_args()
531
+
532
+ # Set up logging
533
+ setup_logging(args.log_level, args.silent)
534
+
535
+ # Validate command
536
+ if not args.command:
537
+ parser.print_help()
538
+ sys.exit(1)
539
+
540
+ try:
541
+ if args.command == "encrypt":
542
+ cmd_encrypt(args)
543
+ elif args.command == "decrypt":
544
+ cmd_decrypt(args)
545
+ elif args.command == "inspect":
546
+ cmd_inspect(args)
547
+ else:
548
+ parser.print_help()
549
+ sys.exit(1)
550
+
551
+ except CLIError as e:
552
+ logger.error(f"{e.level}: {e.message}")
553
+ if e.cause:
554
+ logger.debug(f"Caused by: {e.cause}")
555
+ sys.exit(1)
556
+ except SDKException as e:
557
+ logger.error(f"SDK Error: {e}")
558
+ sys.exit(1)
559
+ except KeyboardInterrupt:
560
+ logger.info("Interrupted by user")
561
+ sys.exit(1)
562
+ except Exception as e:
563
+ logger.error(f"Unexpected error: {e}")
564
+ logger.error("", exc_info=True) # Always print traceback for unexpected errors
565
+ sys.exit(1)
566
+
567
+
568
+ if __name__ == "__main__":
569
+ main()
@@ -0,0 +1,41 @@
1
+ from collections import OrderedDict
2
+
3
+
4
+ class CollectionKey:
5
+ def __init__(self, key: bytes | None):
6
+ self.key = key
7
+
8
+
9
+ class CollectionStore:
10
+ NO_PRIVATE_KEY = CollectionKey(None)
11
+
12
+ def store(self, header, key: CollectionKey):
13
+ raise NotImplementedError
14
+
15
+ def get_key(self, header) -> CollectionKey:
16
+ raise NotImplementedError
17
+
18
+
19
+ class NoOpCollectionStore(CollectionStore):
20
+ def store(self, header, key: CollectionKey):
21
+ pass
22
+
23
+ def get_key(self, header) -> CollectionKey:
24
+ return self.NO_PRIVATE_KEY
25
+
26
+
27
+ class CollectionStoreImpl(OrderedDict, CollectionStore):
28
+ MAX_SIZE_STORE = 500
29
+
30
+ def __init__(self):
31
+ super().__init__()
32
+
33
+ def store(self, header, key: CollectionKey):
34
+ buf = header.to_bytes()
35
+ self[buf] = key
36
+ if len(self) > self.MAX_SIZE_STORE:
37
+ self.popitem(last=False)
38
+
39
+ def get_key(self, header) -> CollectionKey:
40
+ buf = header.to_bytes()
41
+ return self.get(buf, self.NO_PRIVATE_KEY)
@@ -0,0 +1,22 @@
1
+ from collections import OrderedDict
2
+ from threading import RLock
3
+
4
+ MAX_SIZE_STORE = 500
5
+
6
+
7
+ class CollectionStoreImpl(OrderedDict):
8
+ def __init__(self):
9
+ super().__init__()
10
+ self._lock = RLock()
11
+
12
+ def store(self, header, key):
13
+ buf = header.to_bytes() # Assumes header has a to_bytes() method
14
+ with self._lock:
15
+ self[buf] = key
16
+ if len(self) > MAX_SIZE_STORE:
17
+ self.popitem(last=False)
18
+
19
+ def get_key(self, header, no_private_key=None):
20
+ buf = header.to_bytes()
21
+ with self._lock:
22
+ return self.get(buf, no_private_key)
otdf_python/config.py ADDED
@@ -0,0 +1,69 @@
1
+ from dataclasses import dataclass, field
2
+ from enum import Enum
3
+ from typing import Any
4
+ from urllib.parse import urlparse, urlunparse
5
+
6
+
7
+ class TDFFormat(Enum):
8
+ JSONFormat = "JSONFormat"
9
+ XMLFormat = "XMLFormat"
10
+
11
+
12
+ class IntegrityAlgorithm(Enum):
13
+ HS256 = "HS256"
14
+ GMAC = "GMAC"
15
+
16
+
17
+ @dataclass
18
+ class KASInfo:
19
+ url: str
20
+ public_key: str | None = None
21
+ kid: str | None = None
22
+ default: bool | None = None
23
+ algorithm: str | None = None
24
+
25
+ def __str__(self):
26
+ return f"KASInfo{{URL:'{self.url}', PublicKey:'{self.public_key}', KID:'{self.kid}', Default:{self.default}, Algorithm:'{self.algorithm}'}}"
27
+
28
+
29
+ @dataclass
30
+ class TDFConfig:
31
+ autoconfigure: bool = True
32
+ default_segment_size: int = 2 * 1024 * 1024
33
+ enable_encryption: bool = True
34
+ tdf_format: TDFFormat = TDFFormat.JSONFormat
35
+ tdf_public_key: str | None = None
36
+ tdf_private_key: str | None = None
37
+ meta_data: str | None = None
38
+ integrity_algorithm: IntegrityAlgorithm = IntegrityAlgorithm.HS256
39
+ segment_integrity_algorithm: IntegrityAlgorithm = IntegrityAlgorithm.GMAC
40
+ attributes: list[str] = field(default_factory=list)
41
+ kas_info_list: list[KASInfo] = field(default_factory=list)
42
+ mime_type: str = "application/octet-stream"
43
+ split_plan: list[str] | None = field(default_factory=list)
44
+ wrapping_key_type: str | None = None
45
+ hex_encode_root_and_segment_hashes: bool = False
46
+ render_version_info_in_manifest: bool = True
47
+ policy_object: Any | None = None
48
+
49
+
50
+ @dataclass
51
+ class NanoTDFConfig:
52
+ ecc_mode: str | None = None
53
+ cipher: str | None = None
54
+ config: str | None = None
55
+ attributes: list[str] = field(default_factory=list)
56
+ kas_info_list: list[KASInfo] = field(default_factory=list)
57
+ collection_config: str | None = None
58
+ policy_type: str | None = None
59
+
60
+
61
+ # Utility function to normalize KAS URLs (Python equivalent)
62
+ def get_kas_address(kas_url: str) -> str:
63
+ if "://" not in kas_url:
64
+ kas_url = "https://" + kas_url
65
+ parsed = urlparse(kas_url)
66
+ scheme = parsed.scheme or "https"
67
+ netloc = parsed.hostname or ""
68
+ port = parsed.port or 443
69
+ return urlunparse((scheme, f"{netloc}:{port}", "", "", "", ""))
File without changes