pltr-cli 0.11.0__py3-none-any.whl → 0.13.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pltr/__init__.py +1 -1
- pltr/cli.py +40 -0
- pltr/commands/admin.py +565 -11
- pltr/commands/aip_agents.py +333 -0
- pltr/commands/connectivity.py +309 -1
- pltr/commands/cp.py +103 -0
- pltr/commands/dataset.py +104 -4
- pltr/commands/functions.py +503 -0
- pltr/commands/language_models.py +515 -0
- pltr/commands/mediasets.py +176 -0
- pltr/commands/models.py +362 -0
- pltr/commands/ontology.py +44 -13
- pltr/commands/orchestration.py +167 -11
- pltr/commands/project.py +231 -22
- pltr/commands/resource.py +416 -17
- pltr/commands/space.py +25 -303
- pltr/commands/sql.py +54 -7
- pltr/commands/streams.py +616 -0
- pltr/commands/third_party_applications.py +82 -0
- pltr/services/admin.py +331 -3
- pltr/services/aip_agents.py +147 -0
- pltr/services/base.py +104 -1
- pltr/services/connectivity.py +139 -0
- pltr/services/copy.py +391 -0
- pltr/services/dataset.py +77 -4
- pltr/services/folder.py +6 -1
- pltr/services/functions.py +223 -0
- pltr/services/language_models.py +281 -0
- pltr/services/mediasets.py +144 -9
- pltr/services/models.py +179 -0
- pltr/services/ontology.py +48 -1
- pltr/services/orchestration.py +133 -1
- pltr/services/project.py +213 -39
- pltr/services/resource.py +229 -60
- pltr/services/space.py +24 -175
- pltr/services/sql.py +44 -20
- pltr/services/streams.py +290 -0
- pltr/services/third_party_applications.py +53 -0
- pltr/utils/formatting.py +195 -1
- pltr/utils/pagination.py +325 -0
- {pltr_cli-0.11.0.dist-info → pltr_cli-0.13.0.dist-info}/METADATA +55 -4
- pltr_cli-0.13.0.dist-info/RECORD +70 -0
- {pltr_cli-0.11.0.dist-info → pltr_cli-0.13.0.dist-info}/WHEEL +1 -1
- pltr_cli-0.11.0.dist-info/RECORD +0 -55
- {pltr_cli-0.11.0.dist-info → pltr_cli-0.13.0.dist-info}/entry_points.txt +0 -0
- {pltr_cli-0.11.0.dist-info → pltr_cli-0.13.0.dist-info}/licenses/LICENSE +0 -0
pltr/commands/streams.py
ADDED
|
@@ -0,0 +1,616 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Streams management commands for Foundry.
|
|
3
|
+
Provides commands for managing streaming datasets and publishing records.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import typer
|
|
7
|
+
import json
|
|
8
|
+
from typing import Optional
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from rich.console import Console
|
|
11
|
+
|
|
12
|
+
from ..services.streams import StreamsService
|
|
13
|
+
from ..utils.formatting import OutputFormatter
|
|
14
|
+
from ..utils.progress import SpinnerProgressTracker
|
|
15
|
+
from ..auth.base import ProfileNotFoundError, MissingCredentialsError
|
|
16
|
+
from ..utils.completion import (
|
|
17
|
+
complete_rid,
|
|
18
|
+
complete_profile,
|
|
19
|
+
complete_output_format,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
# Create main app and sub-apps
|
|
23
|
+
app = typer.Typer(help="Manage streaming datasets and streams")
|
|
24
|
+
dataset_app = typer.Typer(help="Manage streaming datasets")
|
|
25
|
+
stream_app = typer.Typer(help="Manage streams and publish records")
|
|
26
|
+
|
|
27
|
+
# Add sub-apps
|
|
28
|
+
app.add_typer(dataset_app, name="dataset")
|
|
29
|
+
app.add_typer(stream_app, name="stream")
|
|
30
|
+
|
|
31
|
+
console = Console()
|
|
32
|
+
formatter = OutputFormatter(console)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def parse_json_or_file(data_str: Optional[str]) -> Optional[dict]:
|
|
36
|
+
"""
|
|
37
|
+
Parse JSON from string or file.
|
|
38
|
+
|
|
39
|
+
Supports:
|
|
40
|
+
- Inline JSON: '{"key": "value"}'
|
|
41
|
+
- File reference: @data.json
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
data_str: JSON string or file reference
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
Parsed dictionary or None
|
|
48
|
+
|
|
49
|
+
Raises:
|
|
50
|
+
FileNotFoundError: If file reference doesn't exist
|
|
51
|
+
json.JSONDecodeError: If JSON is invalid
|
|
52
|
+
"""
|
|
53
|
+
if not data_str:
|
|
54
|
+
return None
|
|
55
|
+
|
|
56
|
+
# Handle file reference
|
|
57
|
+
if data_str.startswith("@"):
|
|
58
|
+
file_path = Path(data_str[1:])
|
|
59
|
+
if not file_path.exists():
|
|
60
|
+
raise FileNotFoundError(f"File not found: {file_path}")
|
|
61
|
+
|
|
62
|
+
with open(file_path, "r") as f:
|
|
63
|
+
return json.load(f)
|
|
64
|
+
|
|
65
|
+
# Handle inline JSON
|
|
66
|
+
return json.loads(data_str)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@dataset_app.command("create")
|
|
70
|
+
def create_dataset(
|
|
71
|
+
name: str = typer.Argument(
|
|
72
|
+
...,
|
|
73
|
+
help="Dataset name",
|
|
74
|
+
),
|
|
75
|
+
parent_folder_rid: str = typer.Option(
|
|
76
|
+
...,
|
|
77
|
+
"--folder",
|
|
78
|
+
"-f",
|
|
79
|
+
help="Parent folder RID (e.g., ri.compass.main.folder.xxx)",
|
|
80
|
+
autocompletion=complete_rid,
|
|
81
|
+
),
|
|
82
|
+
schema: str = typer.Option(
|
|
83
|
+
...,
|
|
84
|
+
"--schema",
|
|
85
|
+
"-s",
|
|
86
|
+
help="Stream schema as JSON or @file.json. Format: {'fieldSchemaList': [{'name': 'field', 'type': 'STRING'}]}",
|
|
87
|
+
),
|
|
88
|
+
branch: Optional[str] = typer.Option(
|
|
89
|
+
None,
|
|
90
|
+
"--branch",
|
|
91
|
+
"-b",
|
|
92
|
+
help="Branch name (default: master)",
|
|
93
|
+
),
|
|
94
|
+
compressed: Optional[bool] = typer.Option(
|
|
95
|
+
None,
|
|
96
|
+
"--compressed",
|
|
97
|
+
help="Enable compression",
|
|
98
|
+
),
|
|
99
|
+
partitions: Optional[int] = typer.Option(
|
|
100
|
+
None,
|
|
101
|
+
"--partitions",
|
|
102
|
+
help="Number of partitions (default: 1). Each partition handles ~5 MB/s.",
|
|
103
|
+
),
|
|
104
|
+
stream_type: Optional[str] = typer.Option(
|
|
105
|
+
None,
|
|
106
|
+
"--type",
|
|
107
|
+
help="Stream type: HIGH_THROUGHPUT or LOW_LATENCY (default: LOW_LATENCY)",
|
|
108
|
+
),
|
|
109
|
+
profile: Optional[str] = typer.Option(
|
|
110
|
+
None,
|
|
111
|
+
"--profile",
|
|
112
|
+
"-p",
|
|
113
|
+
help="Profile name",
|
|
114
|
+
autocompletion=complete_profile,
|
|
115
|
+
),
|
|
116
|
+
format: str = typer.Option(
|
|
117
|
+
"table",
|
|
118
|
+
"--format",
|
|
119
|
+
help="Output format (table, json, csv)",
|
|
120
|
+
autocompletion=complete_output_format,
|
|
121
|
+
),
|
|
122
|
+
preview: bool = typer.Option(
|
|
123
|
+
False,
|
|
124
|
+
"--preview",
|
|
125
|
+
help="Enable preview mode",
|
|
126
|
+
),
|
|
127
|
+
):
|
|
128
|
+
"""
|
|
129
|
+
Create a new streaming dataset with an initial stream.
|
|
130
|
+
|
|
131
|
+
The schema defines the structure of records in the stream.
|
|
132
|
+
Each field must have a 'name' and 'type' (STRING, INTEGER, DOUBLE, BOOLEAN, etc.).
|
|
133
|
+
|
|
134
|
+
Examples:
|
|
135
|
+
|
|
136
|
+
# Create basic streaming dataset
|
|
137
|
+
pltr streams dataset create my-stream \\
|
|
138
|
+
--folder ri.compass.main.folder.xxx \\
|
|
139
|
+
--schema '{"fieldSchemaList": [{"name": "value", "type": "STRING"}]}'
|
|
140
|
+
|
|
141
|
+
# Create from schema file
|
|
142
|
+
pltr streams dataset create sensor-data \\
|
|
143
|
+
--folder ri.compass.main.folder.xxx \\
|
|
144
|
+
--schema @schema.json \\
|
|
145
|
+
--partitions 5 \\
|
|
146
|
+
--type HIGH_THROUGHPUT
|
|
147
|
+
|
|
148
|
+
# With specific branch
|
|
149
|
+
pltr streams dataset create my-stream \\
|
|
150
|
+
--folder ri.compass.main.folder.xxx \\
|
|
151
|
+
--schema @schema.json \\
|
|
152
|
+
--branch develop
|
|
153
|
+
"""
|
|
154
|
+
try:
|
|
155
|
+
# Parse schema
|
|
156
|
+
schema_dict = parse_json_or_file(schema)
|
|
157
|
+
if schema_dict is None:
|
|
158
|
+
console.print("[red]Error: Schema is required[/red]")
|
|
159
|
+
raise typer.Exit(1)
|
|
160
|
+
|
|
161
|
+
with SpinnerProgressTracker().track_spinner("Creating streaming dataset"):
|
|
162
|
+
service = StreamsService(profile=profile)
|
|
163
|
+
result = service.create_dataset(
|
|
164
|
+
name=name,
|
|
165
|
+
parent_folder_rid=parent_folder_rid,
|
|
166
|
+
schema=schema_dict,
|
|
167
|
+
branch_name=branch,
|
|
168
|
+
compressed=compressed,
|
|
169
|
+
partitions_count=partitions,
|
|
170
|
+
stream_type=stream_type,
|
|
171
|
+
preview=preview,
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
console.print(
|
|
175
|
+
f"[green]✓[/green] Created streaming dataset: {result.get('name')}"
|
|
176
|
+
)
|
|
177
|
+
console.print(f" Dataset RID: {result.get('rid')}")
|
|
178
|
+
console.print(f" Stream RID: {result.get('streamRid')}")
|
|
179
|
+
|
|
180
|
+
formatter.format_output(result, format)
|
|
181
|
+
|
|
182
|
+
except (ProfileNotFoundError, MissingCredentialsError) as e:
|
|
183
|
+
console.print(f"[red]Authentication Error: {e}[/red]")
|
|
184
|
+
raise typer.Exit(1)
|
|
185
|
+
except (FileNotFoundError, json.JSONDecodeError) as e:
|
|
186
|
+
console.print(f"[red]Error parsing schema: {e}[/red]")
|
|
187
|
+
raise typer.Exit(1)
|
|
188
|
+
except Exception as e:
|
|
189
|
+
console.print(f"[red]Error: {e}[/red]")
|
|
190
|
+
raise typer.Exit(1)
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
@stream_app.command("create")
|
|
194
|
+
def create_stream(
|
|
195
|
+
dataset_rid: str = typer.Argument(
|
|
196
|
+
...,
|
|
197
|
+
help="Dataset RID (e.g., ri.foundry.main.dataset.xxx)",
|
|
198
|
+
autocompletion=complete_rid,
|
|
199
|
+
),
|
|
200
|
+
branch: str = typer.Option(
|
|
201
|
+
...,
|
|
202
|
+
"--branch",
|
|
203
|
+
"-b",
|
|
204
|
+
help="Branch name to create stream on",
|
|
205
|
+
),
|
|
206
|
+
schema: str = typer.Option(
|
|
207
|
+
...,
|
|
208
|
+
"--schema",
|
|
209
|
+
"-s",
|
|
210
|
+
help="Stream schema as JSON or @file.json",
|
|
211
|
+
),
|
|
212
|
+
compressed: Optional[bool] = typer.Option(
|
|
213
|
+
None,
|
|
214
|
+
"--compressed",
|
|
215
|
+
help="Enable compression",
|
|
216
|
+
),
|
|
217
|
+
partitions: Optional[int] = typer.Option(
|
|
218
|
+
None,
|
|
219
|
+
"--partitions",
|
|
220
|
+
help="Number of partitions (default: 1)",
|
|
221
|
+
),
|
|
222
|
+
stream_type: Optional[str] = typer.Option(
|
|
223
|
+
None,
|
|
224
|
+
"--type",
|
|
225
|
+
help="Stream type: HIGH_THROUGHPUT or LOW_LATENCY",
|
|
226
|
+
),
|
|
227
|
+
profile: Optional[str] = typer.Option(
|
|
228
|
+
None,
|
|
229
|
+
"--profile",
|
|
230
|
+
"-p",
|
|
231
|
+
help="Profile name",
|
|
232
|
+
autocompletion=complete_profile,
|
|
233
|
+
),
|
|
234
|
+
format: str = typer.Option(
|
|
235
|
+
"table",
|
|
236
|
+
"--format",
|
|
237
|
+
help="Output format (table, json, csv)",
|
|
238
|
+
autocompletion=complete_output_format,
|
|
239
|
+
),
|
|
240
|
+
preview: bool = typer.Option(
|
|
241
|
+
False,
|
|
242
|
+
"--preview",
|
|
243
|
+
help="Enable preview mode",
|
|
244
|
+
),
|
|
245
|
+
):
|
|
246
|
+
"""
|
|
247
|
+
Create a new stream on a branch of an existing streaming dataset.
|
|
248
|
+
|
|
249
|
+
Creates a new branch and stream in one operation.
|
|
250
|
+
|
|
251
|
+
Examples:
|
|
252
|
+
|
|
253
|
+
# Create stream on new branch
|
|
254
|
+
pltr streams stream create ri.foundry.main.dataset.xxx \\
|
|
255
|
+
--branch feature-branch \\
|
|
256
|
+
--schema '{"fieldSchemaList": [{"name": "id", "type": "INTEGER"}]}'
|
|
257
|
+
|
|
258
|
+
# High-throughput stream
|
|
259
|
+
pltr streams stream create ri.foundry.main.dataset.xxx \\
|
|
260
|
+
--branch production \\
|
|
261
|
+
--schema @schema.json \\
|
|
262
|
+
--partitions 10 \\
|
|
263
|
+
--type HIGH_THROUGHPUT
|
|
264
|
+
"""
|
|
265
|
+
try:
|
|
266
|
+
# Parse schema
|
|
267
|
+
schema_dict = parse_json_or_file(schema)
|
|
268
|
+
if schema_dict is None:
|
|
269
|
+
console.print("[red]Error: Schema is required[/red]")
|
|
270
|
+
raise typer.Exit(1)
|
|
271
|
+
|
|
272
|
+
with SpinnerProgressTracker().track_spinner("Creating stream"):
|
|
273
|
+
service = StreamsService(profile=profile)
|
|
274
|
+
result = service.create_stream(
|
|
275
|
+
dataset_rid=dataset_rid,
|
|
276
|
+
branch_name=branch,
|
|
277
|
+
schema=schema_dict,
|
|
278
|
+
compressed=compressed,
|
|
279
|
+
partitions_count=partitions,
|
|
280
|
+
stream_type=stream_type,
|
|
281
|
+
preview=preview,
|
|
282
|
+
)
|
|
283
|
+
|
|
284
|
+
console.print(f"[green]✓[/green] Created stream on branch: {branch}")
|
|
285
|
+
console.print(f" Stream RID: {result.get('streamRid')}")
|
|
286
|
+
|
|
287
|
+
formatter.format_output(result, format)
|
|
288
|
+
|
|
289
|
+
except (ProfileNotFoundError, MissingCredentialsError) as e:
|
|
290
|
+
console.print(f"[red]Authentication Error: {e}[/red]")
|
|
291
|
+
raise typer.Exit(1)
|
|
292
|
+
except (FileNotFoundError, json.JSONDecodeError) as e:
|
|
293
|
+
console.print(f"[red]Error parsing schema: {e}[/red]")
|
|
294
|
+
raise typer.Exit(1)
|
|
295
|
+
except Exception as e:
|
|
296
|
+
console.print(f"[red]Error: {e}[/red]")
|
|
297
|
+
raise typer.Exit(1)
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
@stream_app.command("get")
|
|
301
|
+
def get_stream(
|
|
302
|
+
dataset_rid: str = typer.Argument(
|
|
303
|
+
...,
|
|
304
|
+
help="Dataset RID",
|
|
305
|
+
autocompletion=complete_rid,
|
|
306
|
+
),
|
|
307
|
+
branch: str = typer.Option(
|
|
308
|
+
...,
|
|
309
|
+
"--branch",
|
|
310
|
+
"-b",
|
|
311
|
+
help="Stream branch name",
|
|
312
|
+
),
|
|
313
|
+
profile: Optional[str] = typer.Option(
|
|
314
|
+
None,
|
|
315
|
+
"--profile",
|
|
316
|
+
"-p",
|
|
317
|
+
help="Profile name",
|
|
318
|
+
autocompletion=complete_profile,
|
|
319
|
+
),
|
|
320
|
+
format: str = typer.Option(
|
|
321
|
+
"table",
|
|
322
|
+
"--format",
|
|
323
|
+
help="Output format (table, json, csv)",
|
|
324
|
+
autocompletion=complete_output_format,
|
|
325
|
+
),
|
|
326
|
+
preview: bool = typer.Option(
|
|
327
|
+
False,
|
|
328
|
+
"--preview",
|
|
329
|
+
help="Enable preview mode",
|
|
330
|
+
),
|
|
331
|
+
):
|
|
332
|
+
"""
|
|
333
|
+
Get information about a stream.
|
|
334
|
+
|
|
335
|
+
Retrieves stream metadata including schema and configuration.
|
|
336
|
+
|
|
337
|
+
Examples:
|
|
338
|
+
|
|
339
|
+
# Get stream on master branch
|
|
340
|
+
pltr streams stream get ri.foundry.main.dataset.xxx --branch master
|
|
341
|
+
|
|
342
|
+
# Get stream as JSON
|
|
343
|
+
pltr streams stream get ri.foundry.main.dataset.xxx \\
|
|
344
|
+
--branch feature-branch \\
|
|
345
|
+
--format json
|
|
346
|
+
"""
|
|
347
|
+
try:
|
|
348
|
+
with SpinnerProgressTracker().track_spinner("Fetching stream information"):
|
|
349
|
+
service = StreamsService(profile=profile)
|
|
350
|
+
result = service.get_stream(
|
|
351
|
+
dataset_rid=dataset_rid,
|
|
352
|
+
stream_branch_name=branch,
|
|
353
|
+
preview=preview,
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
formatter.format_output(result, format)
|
|
357
|
+
|
|
358
|
+
except (ProfileNotFoundError, MissingCredentialsError) as e:
|
|
359
|
+
console.print(f"[red]Authentication Error: {e}[/red]")
|
|
360
|
+
raise typer.Exit(1)
|
|
361
|
+
except Exception as e:
|
|
362
|
+
console.print(f"[red]Error: {e}[/red]")
|
|
363
|
+
raise typer.Exit(1)
|
|
364
|
+
|
|
365
|
+
|
|
366
|
+
@stream_app.command("publish")
|
|
367
|
+
def publish_record(
|
|
368
|
+
dataset_rid: str = typer.Argument(
|
|
369
|
+
...,
|
|
370
|
+
help="Dataset RID",
|
|
371
|
+
autocompletion=complete_rid,
|
|
372
|
+
),
|
|
373
|
+
branch: str = typer.Option(
|
|
374
|
+
...,
|
|
375
|
+
"--branch",
|
|
376
|
+
"-b",
|
|
377
|
+
help="Stream branch name",
|
|
378
|
+
),
|
|
379
|
+
record: str = typer.Option(
|
|
380
|
+
...,
|
|
381
|
+
"--record",
|
|
382
|
+
"-r",
|
|
383
|
+
help="Record data as JSON or @file.json",
|
|
384
|
+
),
|
|
385
|
+
view_rid: Optional[str] = typer.Option(
|
|
386
|
+
None,
|
|
387
|
+
"--view",
|
|
388
|
+
help="View RID for partitioning",
|
|
389
|
+
autocompletion=complete_rid,
|
|
390
|
+
),
|
|
391
|
+
profile: Optional[str] = typer.Option(
|
|
392
|
+
None,
|
|
393
|
+
"--profile",
|
|
394
|
+
"-p",
|
|
395
|
+
help="Profile name",
|
|
396
|
+
autocompletion=complete_profile,
|
|
397
|
+
),
|
|
398
|
+
preview: bool = typer.Option(
|
|
399
|
+
False,
|
|
400
|
+
"--preview",
|
|
401
|
+
help="Enable preview mode",
|
|
402
|
+
),
|
|
403
|
+
):
|
|
404
|
+
"""
|
|
405
|
+
Publish a single record to a stream.
|
|
406
|
+
|
|
407
|
+
The record must match the stream's schema.
|
|
408
|
+
|
|
409
|
+
Examples:
|
|
410
|
+
|
|
411
|
+
# Publish inline record
|
|
412
|
+
pltr streams stream publish ri.foundry.main.dataset.xxx \\
|
|
413
|
+
--branch master \\
|
|
414
|
+
--record '{"id": 123, "name": "test", "timestamp": 1234567890}'
|
|
415
|
+
|
|
416
|
+
# Publish from file
|
|
417
|
+
pltr streams stream publish ri.foundry.main.dataset.xxx \\
|
|
418
|
+
--branch master \\
|
|
419
|
+
--record @record.json
|
|
420
|
+
"""
|
|
421
|
+
try:
|
|
422
|
+
# Parse record
|
|
423
|
+
record_dict = parse_json_or_file(record)
|
|
424
|
+
if record_dict is None:
|
|
425
|
+
console.print("[red]Error: Record is required[/red]")
|
|
426
|
+
raise typer.Exit(1)
|
|
427
|
+
|
|
428
|
+
with SpinnerProgressTracker().track_spinner("Publishing record"):
|
|
429
|
+
service = StreamsService(profile=profile)
|
|
430
|
+
service.publish_record(
|
|
431
|
+
dataset_rid=dataset_rid,
|
|
432
|
+
stream_branch_name=branch,
|
|
433
|
+
record=record_dict,
|
|
434
|
+
view_rid=view_rid,
|
|
435
|
+
preview=preview,
|
|
436
|
+
)
|
|
437
|
+
|
|
438
|
+
console.print("[green]✓[/green] Record published successfully")
|
|
439
|
+
|
|
440
|
+
except (ProfileNotFoundError, MissingCredentialsError) as e:
|
|
441
|
+
console.print(f"[red]Authentication Error: {e}[/red]")
|
|
442
|
+
raise typer.Exit(1)
|
|
443
|
+
except (FileNotFoundError, json.JSONDecodeError) as e:
|
|
444
|
+
console.print(f"[red]Error parsing record: {e}[/red]")
|
|
445
|
+
raise typer.Exit(1)
|
|
446
|
+
except Exception as e:
|
|
447
|
+
console.print(f"[red]Error: {e}[/red]")
|
|
448
|
+
raise typer.Exit(1)
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
@stream_app.command("publish-batch")
|
|
452
|
+
def publish_records(
|
|
453
|
+
dataset_rid: str = typer.Argument(
|
|
454
|
+
...,
|
|
455
|
+
help="Dataset RID",
|
|
456
|
+
autocompletion=complete_rid,
|
|
457
|
+
),
|
|
458
|
+
branch: str = typer.Option(
|
|
459
|
+
...,
|
|
460
|
+
"--branch",
|
|
461
|
+
"-b",
|
|
462
|
+
help="Stream branch name",
|
|
463
|
+
),
|
|
464
|
+
records: str = typer.Option(
|
|
465
|
+
...,
|
|
466
|
+
"--records",
|
|
467
|
+
"-r",
|
|
468
|
+
help="Records as JSON array or @file.json",
|
|
469
|
+
),
|
|
470
|
+
view_rid: Optional[str] = typer.Option(
|
|
471
|
+
None,
|
|
472
|
+
"--view",
|
|
473
|
+
help="View RID for partitioning",
|
|
474
|
+
autocompletion=complete_rid,
|
|
475
|
+
),
|
|
476
|
+
profile: Optional[str] = typer.Option(
|
|
477
|
+
None,
|
|
478
|
+
"--profile",
|
|
479
|
+
"-p",
|
|
480
|
+
help="Profile name",
|
|
481
|
+
autocompletion=complete_profile,
|
|
482
|
+
),
|
|
483
|
+
preview: bool = typer.Option(
|
|
484
|
+
False,
|
|
485
|
+
"--preview",
|
|
486
|
+
help="Enable preview mode",
|
|
487
|
+
),
|
|
488
|
+
):
|
|
489
|
+
"""
|
|
490
|
+
Publish multiple records to a stream in a batch.
|
|
491
|
+
|
|
492
|
+
More efficient than publishing records individually.
|
|
493
|
+
|
|
494
|
+
Examples:
|
|
495
|
+
|
|
496
|
+
# Publish multiple records inline
|
|
497
|
+
pltr streams stream publish-batch ri.foundry.main.dataset.xxx \\
|
|
498
|
+
--branch master \\
|
|
499
|
+
--records '[{"id": 1, "name": "alice"}, {"id": 2, "name": "bob"}]'
|
|
500
|
+
|
|
501
|
+
# Publish from file
|
|
502
|
+
pltr streams stream publish-batch ri.foundry.main.dataset.xxx \\
|
|
503
|
+
--branch master \\
|
|
504
|
+
--records @records.json
|
|
505
|
+
"""
|
|
506
|
+
try:
|
|
507
|
+
# Parse records
|
|
508
|
+
records_list = parse_json_or_file(records)
|
|
509
|
+
if not records_list or not isinstance(records_list, list):
|
|
510
|
+
console.print("[red]Error: Records must be a JSON array[/red]")
|
|
511
|
+
raise typer.Exit(1)
|
|
512
|
+
|
|
513
|
+
with SpinnerProgressTracker().track_spinner("Publishing records"):
|
|
514
|
+
service = StreamsService(profile=profile)
|
|
515
|
+
service.publish_records(
|
|
516
|
+
dataset_rid=dataset_rid,
|
|
517
|
+
stream_branch_name=branch,
|
|
518
|
+
records=records_list,
|
|
519
|
+
view_rid=view_rid,
|
|
520
|
+
preview=preview,
|
|
521
|
+
)
|
|
522
|
+
|
|
523
|
+
console.print(
|
|
524
|
+
f"[green]✓[/green] Published {len(records_list)} records successfully"
|
|
525
|
+
)
|
|
526
|
+
|
|
527
|
+
except (ProfileNotFoundError, MissingCredentialsError) as e:
|
|
528
|
+
console.print(f"[red]Authentication Error: {e}[/red]")
|
|
529
|
+
raise typer.Exit(1)
|
|
530
|
+
except (FileNotFoundError, json.JSONDecodeError) as e:
|
|
531
|
+
console.print(f"[red]Error parsing records: {e}[/red]")
|
|
532
|
+
raise typer.Exit(1)
|
|
533
|
+
except Exception as e:
|
|
534
|
+
console.print(f"[red]Error: {e}[/red]")
|
|
535
|
+
raise typer.Exit(1)
|
|
536
|
+
|
|
537
|
+
|
|
538
|
+
@stream_app.command("reset")
|
|
539
|
+
def reset_stream(
|
|
540
|
+
dataset_rid: str = typer.Argument(
|
|
541
|
+
...,
|
|
542
|
+
help="Dataset RID",
|
|
543
|
+
autocompletion=complete_rid,
|
|
544
|
+
),
|
|
545
|
+
branch: str = typer.Option(
|
|
546
|
+
...,
|
|
547
|
+
"--branch",
|
|
548
|
+
"-b",
|
|
549
|
+
help="Stream branch name to reset",
|
|
550
|
+
),
|
|
551
|
+
profile: Optional[str] = typer.Option(
|
|
552
|
+
None,
|
|
553
|
+
"--profile",
|
|
554
|
+
"-p",
|
|
555
|
+
help="Profile name",
|
|
556
|
+
autocompletion=complete_profile,
|
|
557
|
+
),
|
|
558
|
+
format: str = typer.Option(
|
|
559
|
+
"table",
|
|
560
|
+
"--format",
|
|
561
|
+
help="Output format (table, json, csv)",
|
|
562
|
+
autocompletion=complete_output_format,
|
|
563
|
+
),
|
|
564
|
+
preview: bool = typer.Option(
|
|
565
|
+
False,
|
|
566
|
+
"--preview",
|
|
567
|
+
help="Enable preview mode",
|
|
568
|
+
),
|
|
569
|
+
confirm: bool = typer.Option(
|
|
570
|
+
False,
|
|
571
|
+
"--confirm",
|
|
572
|
+
help="Skip confirmation prompt",
|
|
573
|
+
),
|
|
574
|
+
):
|
|
575
|
+
"""
|
|
576
|
+
Reset a stream, clearing all existing data.
|
|
577
|
+
|
|
578
|
+
WARNING: This operation is irreversible and will delete all records.
|
|
579
|
+
|
|
580
|
+
Examples:
|
|
581
|
+
|
|
582
|
+
# Reset with confirmation
|
|
583
|
+
pltr streams stream reset ri.foundry.main.dataset.xxx --branch master
|
|
584
|
+
|
|
585
|
+
# Skip confirmation
|
|
586
|
+
pltr streams stream reset ri.foundry.main.dataset.xxx \\
|
|
587
|
+
--branch master \\
|
|
588
|
+
--confirm
|
|
589
|
+
"""
|
|
590
|
+
if not confirm:
|
|
591
|
+
proceed = typer.confirm(
|
|
592
|
+
f"⚠️ This will delete all data in stream on branch '{branch}'. Continue?"
|
|
593
|
+
)
|
|
594
|
+
if not proceed:
|
|
595
|
+
console.print("Operation cancelled")
|
|
596
|
+
raise typer.Exit(0)
|
|
597
|
+
|
|
598
|
+
try:
|
|
599
|
+
with SpinnerProgressTracker().track_spinner("Resetting stream"):
|
|
600
|
+
service = StreamsService(profile=profile)
|
|
601
|
+
result = service.reset_stream(
|
|
602
|
+
dataset_rid=dataset_rid,
|
|
603
|
+
stream_branch_name=branch,
|
|
604
|
+
preview=preview,
|
|
605
|
+
)
|
|
606
|
+
|
|
607
|
+
console.print(f"[green]✓[/green] Stream reset successfully on branch: {branch}")
|
|
608
|
+
|
|
609
|
+
formatter.format_output(result, format)
|
|
610
|
+
|
|
611
|
+
except (ProfileNotFoundError, MissingCredentialsError) as e:
|
|
612
|
+
console.print(f"[red]Authentication Error: {e}[/red]")
|
|
613
|
+
raise typer.Exit(1)
|
|
614
|
+
except Exception as e:
|
|
615
|
+
console.print(f"[red]Error: {e}[/red]")
|
|
616
|
+
raise typer.Exit(1)
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Third-party applications management commands for Foundry.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import typer
|
|
6
|
+
from typing import Optional
|
|
7
|
+
from rich.console import Console
|
|
8
|
+
|
|
9
|
+
from ..services.third_party_applications import ThirdPartyApplicationsService
|
|
10
|
+
from ..utils.formatting import OutputFormatter
|
|
11
|
+
from ..utils.progress import SpinnerProgressTracker
|
|
12
|
+
from ..auth.base import ProfileNotFoundError, MissingCredentialsError
|
|
13
|
+
from ..utils.completion import (
|
|
14
|
+
complete_rid,
|
|
15
|
+
complete_profile,
|
|
16
|
+
complete_output_format,
|
|
17
|
+
cache_rid,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
app = typer.Typer(help="Manage third-party applications in Foundry")
|
|
21
|
+
console = Console()
|
|
22
|
+
formatter = OutputFormatter(console)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@app.command("get")
|
|
26
|
+
def get_application(
|
|
27
|
+
application_rid: str = typer.Argument(
|
|
28
|
+
...,
|
|
29
|
+
help="Third-party application Resource Identifier",
|
|
30
|
+
autocompletion=complete_rid,
|
|
31
|
+
),
|
|
32
|
+
profile: Optional[str] = typer.Option(
|
|
33
|
+
None,
|
|
34
|
+
"--profile",
|
|
35
|
+
"-p",
|
|
36
|
+
help="Profile name",
|
|
37
|
+
autocompletion=complete_profile,
|
|
38
|
+
),
|
|
39
|
+
format: str = typer.Option(
|
|
40
|
+
"table",
|
|
41
|
+
"--format",
|
|
42
|
+
"-f",
|
|
43
|
+
help="Output format (table, json, csv)",
|
|
44
|
+
autocompletion=complete_output_format,
|
|
45
|
+
),
|
|
46
|
+
output: Optional[str] = typer.Option(
|
|
47
|
+
None, "--output", "-o", help="Output file path"
|
|
48
|
+
),
|
|
49
|
+
preview: bool = typer.Option(
|
|
50
|
+
False,
|
|
51
|
+
"--preview",
|
|
52
|
+
help="Enable preview mode",
|
|
53
|
+
),
|
|
54
|
+
):
|
|
55
|
+
"""Get detailed information about a third-party application."""
|
|
56
|
+
try:
|
|
57
|
+
# Cache the RID for future completions
|
|
58
|
+
cache_rid(application_rid)
|
|
59
|
+
|
|
60
|
+
service = ThirdPartyApplicationsService(profile=profile)
|
|
61
|
+
|
|
62
|
+
with SpinnerProgressTracker().track_spinner(
|
|
63
|
+
f"Fetching third-party application {application_rid}..."
|
|
64
|
+
):
|
|
65
|
+
application = service.get_application(application_rid, preview=preview)
|
|
66
|
+
|
|
67
|
+
# Format output
|
|
68
|
+
if output:
|
|
69
|
+
formatter.save_to_file(application, output, format)
|
|
70
|
+
formatter.print_success(f"Application information saved to {output}")
|
|
71
|
+
else:
|
|
72
|
+
formatter.display(application, format)
|
|
73
|
+
|
|
74
|
+
except (ProfileNotFoundError, MissingCredentialsError) as e:
|
|
75
|
+
formatter.print_error(f"Authentication error: {e}")
|
|
76
|
+
raise typer.Exit(1)
|
|
77
|
+
except ValueError as e:
|
|
78
|
+
formatter.print_error(f"Invalid request: {e}")
|
|
79
|
+
raise typer.Exit(1)
|
|
80
|
+
except Exception as e:
|
|
81
|
+
formatter.print_error(f"Failed to get third-party application: {e}")
|
|
82
|
+
raise typer.Exit(1)
|