emergent-translator 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- emergent_translator/__init__.py +126 -0
- emergent_translator/adaptive_codebook.py +342 -0
- emergent_translator/api_server.py +4988 -0
- emergent_translator/batch_encoder.py +555 -0
- emergent_translator/chunk_collector.py +978 -0
- emergent_translator/chunk_coordinator.py +738 -0
- emergent_translator/claude_compression.py +375 -0
- emergent_translator/cli.py +413 -0
- emergent_translator/client_sdk.py +903 -0
- emergent_translator/code_skeleton.py +448 -0
- emergent_translator/core.py +1081 -0
- emergent_translator/emergent_symbols.py +690 -0
- emergent_translator/format_handlers.py +901 -0
- emergent_translator/gpu_batch_encoder.py +848 -0
- emergent_translator/intelligent_router.py +509 -0
- emergent_translator/metrics.py +436 -0
- emergent_translator/py.typed +0 -0
- emergent_translator-1.1.0.dist-info/METADATA +568 -0
- emergent_translator-1.1.0.dist-info/RECORD +23 -0
- emergent_translator-1.1.0.dist-info/WHEEL +5 -0
- emergent_translator-1.1.0.dist-info/entry_points.txt +2 -0
- emergent_translator-1.1.0.dist-info/licenses/LICENSE +82 -0
- emergent_translator-1.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,413 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Emergent Language Translator CLI
|
|
4
|
+
|
|
5
|
+
Command-line interface for the Emergent Language Translator.
|
|
6
|
+
Provides easy access to compression and decompression functionality.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import argparse
|
|
10
|
+
import json
|
|
11
|
+
import sys
|
|
12
|
+
import time
|
|
13
|
+
from typing import Any, Dict
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
|
|
16
|
+
from . import TranslatorSDK, __version__
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def main():
|
|
20
|
+
"""Main CLI entry point."""
|
|
21
|
+
parser = argparse.ArgumentParser(
|
|
22
|
+
description="Emergent Language Translator CLI - 60x compression efficiency for AI communication",
|
|
23
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
24
|
+
epilog="""
|
|
25
|
+
Examples:
|
|
26
|
+
# Compress a JSON file
|
|
27
|
+
emergent-translator compress data.json
|
|
28
|
+
|
|
29
|
+
# Compress JSON from stdin
|
|
30
|
+
echo '{"message": "hello"}' | emergent-translator compress -
|
|
31
|
+
|
|
32
|
+
# Decompress a file
|
|
33
|
+
emergent-translator decompress compressed.bin
|
|
34
|
+
|
|
35
|
+
# Check API health
|
|
36
|
+
emergent-translator health
|
|
37
|
+
|
|
38
|
+
# Benchmark compression
|
|
39
|
+
emergent-translator benchmark --size 1000
|
|
40
|
+
"""
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
parser.add_argument(
|
|
44
|
+
"--version",
|
|
45
|
+
action="version",
|
|
46
|
+
version=f"emergent-translator {__version__}"
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
parser.add_argument(
|
|
50
|
+
"--api-url",
|
|
51
|
+
default="http://149.28.33.118:8001",
|
|
52
|
+
help="API base URL (default: %(default)s)"
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
parser.add_argument(
|
|
56
|
+
"--api-key",
|
|
57
|
+
default="eudaimonia-translator-demo",
|
|
58
|
+
help="API key for authentication"
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
parser.add_argument(
|
|
62
|
+
"--verbose", "-v",
|
|
63
|
+
action="store_true",
|
|
64
|
+
help="Verbose output"
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
subparsers = parser.add_subparsers(dest="command", help="Available commands")
|
|
68
|
+
|
|
69
|
+
# Compress command
|
|
70
|
+
compress_parser = subparsers.add_parser("compress", help="Compress data")
|
|
71
|
+
compress_parser.add_argument(
|
|
72
|
+
"input",
|
|
73
|
+
help="Input file (JSON) or '-' for stdin"
|
|
74
|
+
)
|
|
75
|
+
compress_parser.add_argument(
|
|
76
|
+
"--output", "-o",
|
|
77
|
+
help="Output file (default: input.compressed)"
|
|
78
|
+
)
|
|
79
|
+
compress_parser.add_argument(
|
|
80
|
+
"--format", "-f",
|
|
81
|
+
choices=["json", "csv", "jsonl", "yaml", "toml", "ini", "xml",
|
|
82
|
+
"msgpack", "protobuf", "parquet", "arrow",
|
|
83
|
+
"bson", "cbor", "xlsx"],
|
|
84
|
+
default=None,
|
|
85
|
+
help="Input format (auto-detected from extension if omitted)"
|
|
86
|
+
)
|
|
87
|
+
compress_parser.add_argument(
|
|
88
|
+
"--intent",
|
|
89
|
+
default="general",
|
|
90
|
+
choices=["general", "work", "governance", "social", "resource"],
|
|
91
|
+
help="Compression intent (default: %(default)s)"
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
# Decompress command
|
|
95
|
+
decompress_parser = subparsers.add_parser("decompress", help="Decompress data")
|
|
96
|
+
decompress_parser.add_argument(
|
|
97
|
+
"input",
|
|
98
|
+
help="Input compressed file or '-' for stdin"
|
|
99
|
+
)
|
|
100
|
+
decompress_parser.add_argument(
|
|
101
|
+
"--output", "-o",
|
|
102
|
+
help="Output file (default: stdout)"
|
|
103
|
+
)
|
|
104
|
+
decompress_parser.add_argument(
|
|
105
|
+
"--format", "-f",
|
|
106
|
+
choices=["json", "csv", "jsonl", "yaml", "toml", "ini", "xml",
|
|
107
|
+
"msgpack", "protobuf", "parquet", "arrow",
|
|
108
|
+
"bson", "cbor", "xlsx"],
|
|
109
|
+
default=None,
|
|
110
|
+
help="Output format (auto-detected from output extension if omitted)"
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
# Health command
|
|
114
|
+
health_parser = subparsers.add_parser("health", help="Check API health")
|
|
115
|
+
|
|
116
|
+
# Benchmark command
|
|
117
|
+
benchmark_parser = subparsers.add_parser("benchmark", help="Run performance benchmarks")
|
|
118
|
+
benchmark_parser.add_argument(
|
|
119
|
+
"--size",
|
|
120
|
+
type=int,
|
|
121
|
+
default=100,
|
|
122
|
+
help="Data size for benchmark (default: %(default)s)"
|
|
123
|
+
)
|
|
124
|
+
benchmark_parser.add_argument(
|
|
125
|
+
"--iterations",
|
|
126
|
+
type=int,
|
|
127
|
+
default=10,
|
|
128
|
+
help="Number of iterations (default: %(default)s)"
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
# Stats command
|
|
132
|
+
stats_parser = subparsers.add_parser("stats", help="Get API statistics")
|
|
133
|
+
|
|
134
|
+
args = parser.parse_args()
|
|
135
|
+
|
|
136
|
+
if not args.command:
|
|
137
|
+
parser.print_help()
|
|
138
|
+
return 1
|
|
139
|
+
|
|
140
|
+
try:
|
|
141
|
+
# Initialize SDK
|
|
142
|
+
sdk = TranslatorSDK(api_url=args.api_url, api_key=args.api_key)
|
|
143
|
+
|
|
144
|
+
if args.command == "compress":
|
|
145
|
+
return compress_command(sdk, args)
|
|
146
|
+
elif args.command == "decompress":
|
|
147
|
+
return decompress_command(sdk, args)
|
|
148
|
+
elif args.command == "health":
|
|
149
|
+
return health_command(sdk, args)
|
|
150
|
+
elif args.command == "benchmark":
|
|
151
|
+
return benchmark_command(sdk, args)
|
|
152
|
+
elif args.command == "stats":
|
|
153
|
+
return stats_command(sdk, args)
|
|
154
|
+
|
|
155
|
+
except KeyboardInterrupt:
|
|
156
|
+
print("\nā Operation cancelled", file=sys.stderr)
|
|
157
|
+
return 1
|
|
158
|
+
except Exception as e:
|
|
159
|
+
print(f"ā Error: {e}", file=sys.stderr)
|
|
160
|
+
return 1
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def compress_command(sdk: TranslatorSDK, args) -> int:
|
|
164
|
+
"""Handle compress command."""
|
|
165
|
+
# Determine format
|
|
166
|
+
fmt = getattr(args, "format", None)
|
|
167
|
+
if fmt is None and args.input != "-":
|
|
168
|
+
try:
|
|
169
|
+
from .format_handlers import detect_format
|
|
170
|
+
fmt = detect_format(args.input)
|
|
171
|
+
except (ImportError, ValueError):
|
|
172
|
+
fmt = "json"
|
|
173
|
+
fmt = fmt or "json"
|
|
174
|
+
|
|
175
|
+
# Read input data (binary or text depending on format)
|
|
176
|
+
try:
|
|
177
|
+
from .format_handlers import is_binary_format
|
|
178
|
+
binary_input = is_binary_format(fmt)
|
|
179
|
+
except ImportError:
|
|
180
|
+
binary_input = False
|
|
181
|
+
|
|
182
|
+
if args.input == "-":
|
|
183
|
+
input_data = sys.stdin.buffer.read() if binary_input else sys.stdin.read()
|
|
184
|
+
else:
|
|
185
|
+
input_path = Path(args.input)
|
|
186
|
+
if not input_path.exists():
|
|
187
|
+
print(f"ā Input file not found: {args.input}", file=sys.stderr)
|
|
188
|
+
return 1
|
|
189
|
+
input_data = input_path.read_bytes() if binary_input else input_path.read_text()
|
|
190
|
+
|
|
191
|
+
# Parse input according to format
|
|
192
|
+
try:
|
|
193
|
+
if fmt == "json":
|
|
194
|
+
data = json.loads(input_data)
|
|
195
|
+
else:
|
|
196
|
+
from .format_handlers import get_handler
|
|
197
|
+
parse_fn, _ = get_handler(fmt)
|
|
198
|
+
data = parse_fn(input_data)
|
|
199
|
+
except (json.JSONDecodeError, ValueError) as e:
|
|
200
|
+
print(f"ā Invalid {fmt.upper()} input: {e}", file=sys.stderr)
|
|
201
|
+
return 1
|
|
202
|
+
|
|
203
|
+
# Compress
|
|
204
|
+
if args.verbose:
|
|
205
|
+
print(f"š§ Compressing {fmt.upper()} data with intent: {args.intent}")
|
|
206
|
+
print(f" Original size: {len(input_data)} bytes")
|
|
207
|
+
|
|
208
|
+
start_time = time.time()
|
|
209
|
+
compressed = sdk.compress(data, intent=args.intent)
|
|
210
|
+
compression_time = (time.time() - start_time) * 1000
|
|
211
|
+
|
|
212
|
+
if args.verbose:
|
|
213
|
+
compressed_size = len(compressed)
|
|
214
|
+
efficiency = (1 - compressed_size / len(input_data)) * 100
|
|
215
|
+
print(f" Compressed size: {compressed_size} bytes")
|
|
216
|
+
print(f" Efficiency: {efficiency:.1f}%")
|
|
217
|
+
print(f" Compression time: {compression_time:.2f}ms")
|
|
218
|
+
|
|
219
|
+
# Write output
|
|
220
|
+
if args.output:
|
|
221
|
+
output_path = Path(args.output)
|
|
222
|
+
else:
|
|
223
|
+
if args.input == "-":
|
|
224
|
+
# Write to stdout for stdin input
|
|
225
|
+
sys.stdout.buffer.write(compressed)
|
|
226
|
+
return 0
|
|
227
|
+
else:
|
|
228
|
+
output_path = Path(args.input).with_suffix(".compressed")
|
|
229
|
+
|
|
230
|
+
output_path.write_bytes(compressed)
|
|
231
|
+
print(f"ā
Compressed data written to: {output_path}")
|
|
232
|
+
return 0
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def decompress_command(sdk: TranslatorSDK, args) -> int:
|
|
236
|
+
"""Handle decompress command."""
|
|
237
|
+
# Read compressed data
|
|
238
|
+
if args.input == "-":
|
|
239
|
+
compressed = sys.stdin.buffer.read()
|
|
240
|
+
else:
|
|
241
|
+
input_path = Path(args.input)
|
|
242
|
+
if not input_path.exists():
|
|
243
|
+
print(f"ā Input file not found: {args.input}", file=sys.stderr)
|
|
244
|
+
return 1
|
|
245
|
+
compressed = input_path.read_bytes()
|
|
246
|
+
|
|
247
|
+
# Decompress
|
|
248
|
+
if args.verbose:
|
|
249
|
+
print(f"š§ Decompressing data")
|
|
250
|
+
print(f" Compressed size: {len(compressed)} bytes")
|
|
251
|
+
|
|
252
|
+
start_time = time.time()
|
|
253
|
+
decompressed = sdk.decompress(compressed)
|
|
254
|
+
decompress_time = (time.time() - start_time) * 1000
|
|
255
|
+
|
|
256
|
+
# Determine output format
|
|
257
|
+
fmt = getattr(args, "format", None)
|
|
258
|
+
if fmt is None and args.output:
|
|
259
|
+
try:
|
|
260
|
+
from .format_handlers import detect_format
|
|
261
|
+
fmt = detect_format(args.output)
|
|
262
|
+
except (ImportError, ValueError):
|
|
263
|
+
fmt = "json"
|
|
264
|
+
fmt = fmt or "json"
|
|
265
|
+
|
|
266
|
+
# Serialize to output format
|
|
267
|
+
try:
|
|
268
|
+
from .format_handlers import is_binary_format
|
|
269
|
+
binary_output = is_binary_format(fmt)
|
|
270
|
+
except ImportError:
|
|
271
|
+
binary_output = False
|
|
272
|
+
|
|
273
|
+
if fmt == "json":
|
|
274
|
+
output_data = json.dumps(decompressed, indent=2)
|
|
275
|
+
else:
|
|
276
|
+
from .format_handlers import get_handler
|
|
277
|
+
_, serialize_fn = get_handler(fmt)
|
|
278
|
+
data_list = decompressed if isinstance(decompressed, list) else [decompressed]
|
|
279
|
+
output_data = serialize_fn(data_list)
|
|
280
|
+
|
|
281
|
+
if args.verbose:
|
|
282
|
+
print(f" Decompressed size: {len(output_data)} bytes")
|
|
283
|
+
print(f" Output format: {fmt.upper()}")
|
|
284
|
+
print(f" Decompression time: {decompress_time:.2f}ms")
|
|
285
|
+
|
|
286
|
+
# Write output
|
|
287
|
+
if args.output:
|
|
288
|
+
output_path = Path(args.output)
|
|
289
|
+
if binary_output:
|
|
290
|
+
output_path.write_bytes(output_data)
|
|
291
|
+
else:
|
|
292
|
+
output_path.write_text(output_data)
|
|
293
|
+
print(f"ā
Decompressed data written to: {output_path}")
|
|
294
|
+
else:
|
|
295
|
+
if binary_output:
|
|
296
|
+
sys.stdout.buffer.write(output_data)
|
|
297
|
+
else:
|
|
298
|
+
print(output_data)
|
|
299
|
+
|
|
300
|
+
return 0
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
def health_command(sdk: TranslatorSDK, args) -> int:
|
|
304
|
+
"""Handle health command."""
|
|
305
|
+
if args.verbose:
|
|
306
|
+
print(f"š Checking API health at: {sdk.api_url}")
|
|
307
|
+
|
|
308
|
+
try:
|
|
309
|
+
health = sdk.get_health()
|
|
310
|
+
|
|
311
|
+
if health.get("status") == "healthy":
|
|
312
|
+
print("ā
API is healthy")
|
|
313
|
+
else:
|
|
314
|
+
print("ā ļø API health check failed")
|
|
315
|
+
return 1
|
|
316
|
+
|
|
317
|
+
if args.verbose:
|
|
318
|
+
print(f" Version: {health.get('version', 'unknown')}")
|
|
319
|
+
print(f" Emergent language: {'ā
' if health.get('emergent_language_available') else 'ā'}")
|
|
320
|
+
print(f" Oracle: {'ā
' if health.get('oracle_available') else 'ā'}")
|
|
321
|
+
print(f" Uptime: {health.get('uptime_seconds', 0):.1f} seconds")
|
|
322
|
+
print(f" Memory usage: {health.get('memory_usage_mb', 0):.1f} MB")
|
|
323
|
+
|
|
324
|
+
return 0
|
|
325
|
+
|
|
326
|
+
except Exception as e:
|
|
327
|
+
print(f"ā Health check failed: {e}", file=sys.stderr)
|
|
328
|
+
return 1
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
def benchmark_command(sdk: TranslatorSDK, args) -> int:
|
|
332
|
+
"""Handle benchmark command."""
|
|
333
|
+
print(f"ā” Running compression benchmark")
|
|
334
|
+
print(f" Data size: {args.size} items")
|
|
335
|
+
print(f" Iterations: {args.iterations}")
|
|
336
|
+
print("-" * 50)
|
|
337
|
+
|
|
338
|
+
# Generate test data
|
|
339
|
+
test_data = {
|
|
340
|
+
"benchmark": True,
|
|
341
|
+
"items": [f"item_{i}" for i in range(args.size)],
|
|
342
|
+
"metadata": {f"key_{i}": f"value_{i}" for i in range(min(args.size, 100))}
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
original_size = len(json.dumps(test_data))
|
|
346
|
+
print(f"š Original data size: {original_size} bytes")
|
|
347
|
+
|
|
348
|
+
# Warmup
|
|
349
|
+
if args.verbose:
|
|
350
|
+
print("š„ Warming up...")
|
|
351
|
+
for _ in range(3):
|
|
352
|
+
sdk.compress(test_data)
|
|
353
|
+
|
|
354
|
+
# Benchmark compression
|
|
355
|
+
compression_times = []
|
|
356
|
+
compressed_sizes = []
|
|
357
|
+
|
|
358
|
+
for i in range(args.iterations):
|
|
359
|
+
start_time = time.time()
|
|
360
|
+
compressed = sdk.compress(test_data)
|
|
361
|
+
end_time = time.time()
|
|
362
|
+
|
|
363
|
+
compression_time = (end_time - start_time) * 1000
|
|
364
|
+
compression_times.append(compression_time)
|
|
365
|
+
compressed_sizes.append(len(compressed))
|
|
366
|
+
|
|
367
|
+
if args.verbose:
|
|
368
|
+
print(f" Iteration {i+1}: {compression_time:.2f}ms")
|
|
369
|
+
|
|
370
|
+
# Calculate statistics
|
|
371
|
+
avg_compression_time = sum(compression_times) / len(compression_times)
|
|
372
|
+
min_compression_time = min(compression_times)
|
|
373
|
+
max_compression_time = max(compression_times)
|
|
374
|
+
|
|
375
|
+
avg_compressed_size = sum(compressed_sizes) / len(compressed_sizes)
|
|
376
|
+
efficiency = (1 - avg_compressed_size / original_size) * 100
|
|
377
|
+
|
|
378
|
+
print("\nš Benchmark Results:")
|
|
379
|
+
print(f" Average compression time: {avg_compression_time:.2f}ms")
|
|
380
|
+
print(f" Min compression time: {min_compression_time:.2f}ms")
|
|
381
|
+
print(f" Max compression time: {max_compression_time:.2f}ms")
|
|
382
|
+
print(f" Average compressed size: {avg_compressed_size:.0f} bytes")
|
|
383
|
+
print(f" Compression efficiency: {efficiency:.1f}%")
|
|
384
|
+
print(f" Throughput: {1000/avg_compression_time:.1f} compressions/second")
|
|
385
|
+
|
|
386
|
+
return 0
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
def stats_command(sdk: TranslatorSDK, args) -> int:
|
|
390
|
+
"""Handle stats command."""
|
|
391
|
+
if args.verbose:
|
|
392
|
+
print(f"š Getting API statistics from: {sdk.api_url}")
|
|
393
|
+
|
|
394
|
+
try:
|
|
395
|
+
stats = sdk.get_stats()
|
|
396
|
+
|
|
397
|
+
print("š API Statistics:")
|
|
398
|
+
print(f" Total translations: {stats.get('total_translations', 0)}")
|
|
399
|
+
print(f" Data processed: {stats.get('total_data_processed', 0)} bytes")
|
|
400
|
+
print(f" Compression savings: {stats.get('total_compression_savings', 0)} bytes")
|
|
401
|
+
print(f" Average compression ratio: {stats.get('average_compression_ratio', 0):.3f}")
|
|
402
|
+
print(f" Requests per minute: {stats.get('requests_per_minute', 0):.1f}")
|
|
403
|
+
print(f" Uptime: {stats.get('uptime_seconds', 0) / 3600:.1f} hours")
|
|
404
|
+
|
|
405
|
+
return 0
|
|
406
|
+
|
|
407
|
+
except Exception as e:
|
|
408
|
+
print(f"ā Failed to get stats: {e}", file=sys.stderr)
|
|
409
|
+
return 1
|
|
410
|
+
|
|
411
|
+
|
|
412
|
+
if __name__ == "__main__":
|
|
413
|
+
sys.exit(main())
|