pltr-cli 0.7.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pltr/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.7.0"
1
+ __version__ = "0.9.0"
pltr/commands/dataset.py CHANGED
@@ -93,11 +93,17 @@ def get_schema(
93
93
  None, "--output", "-o", help="Output file path"
94
94
  ),
95
95
  ):
96
- """Get the schema of a dataset."""
96
+ """Get the schema of a dataset (requires API preview access)."""
97
97
  try:
98
98
  cache_rid(dataset_rid)
99
99
  service = DatasetService(profile=profile)
100
100
 
101
+ formatter.print_warning(
102
+ "Note: This command requires API preview access. "
103
+ "If you encounter an 'ApiFeaturePreviewUsageOnly' error, "
104
+ "use 'pltr dataset schema apply' instead to infer/apply schema."
105
+ )
106
+
101
107
  with SpinnerProgressTracker().track_spinner(
102
108
  f"Fetching schema for {dataset_rid}..."
103
109
  ):
@@ -120,7 +126,54 @@ def get_schema(
120
126
  formatter.print_error(f"Authentication error: {e}")
121
127
  raise typer.Exit(1)
122
128
  except Exception as e:
123
- formatter.print_error(f"Failed to get schema: {e}")
129
+ if "ApiFeaturePreviewUsageOnly" in str(e):
130
+ formatter.print_error(
131
+ "This command requires API preview access. "
132
+ "Please use 'pltr dataset schema apply' instead."
133
+ )
134
+ else:
135
+ formatter.print_error(f"Failed to get schema: {e}")
136
+ raise typer.Exit(1)
137
+
138
+
139
+ @schema_app.command("apply")
140
+ def apply_schema(
141
+ dataset_rid: str = typer.Argument(
142
+ ..., help="Dataset Resource Identifier", autocompletion=complete_rid
143
+ ),
144
+ branch: str = typer.Option("master", "--branch", "-b", help="Dataset branch name"),
145
+ profile: Optional[str] = typer.Option(
146
+ None, "--profile", "-p", help="Profile name", autocompletion=complete_profile
147
+ ),
148
+ format: str = typer.Option(
149
+ "table",
150
+ "--format",
151
+ "-f",
152
+ help="Output format (table, json, csv)",
153
+ autocompletion=complete_output_format,
154
+ ),
155
+ ):
156
+ """Apply/infer schema for a dataset."""
157
+ try:
158
+ cache_rid(dataset_rid)
159
+ service = DatasetService(profile=profile)
160
+
161
+ with SpinnerProgressTracker().track_spinner(
162
+ f"Applying schema to dataset {dataset_rid} on branch '{branch}'..."
163
+ ):
164
+ result = service.apply_schema(dataset_rid, branch)
165
+
166
+ formatter.print_success(f"Schema applied successfully to branch '{branch}'")
167
+
168
+ # Display result if available
169
+ if result.get("result"):
170
+ formatter._format_json(result.get("result"))
171
+
172
+ except (ProfileNotFoundError, MissingCredentialsError) as e:
173
+ formatter.print_error(f"Authentication error: {e}")
174
+ raise typer.Exit(1)
175
+ except Exception as e:
176
+ formatter.print_error(f"Failed to apply schema: {e}")
124
177
  raise typer.Exit(1)
125
178
 
126
179
 
@@ -585,9 +638,30 @@ def upload_file(
585
638
 
586
639
  except (ProfileNotFoundError, MissingCredentialsError) as e:
587
640
  formatter.print_error(f"Authentication error: {e}")
641
+ raise typer.Exit(1)
642
+ except FileNotFoundError as e:
643
+ formatter.print_error(f"File error: {e}")
644
+ raise typer.Exit(1)
645
+ except RuntimeError as e:
646
+ # RuntimeError from our service layer contains detailed error info
647
+ error_msg = str(e)
648
+ formatter.print_error(f"Upload failed: {error_msg}")
649
+
650
+ # If it looks like our enhanced error message, extract the suggestion part
651
+ if ". Suggestions: " in error_msg:
652
+ main_error, suggestions = error_msg.split(". Suggestions: ", 1)
653
+ formatter.print_error(main_error)
654
+ formatter.print_info(f"💡 Suggestions: {suggestions}")
655
+
588
656
  raise typer.Exit(1)
589
657
  except Exception as e:
590
- formatter.print_error(f"Failed to upload file: {e}")
658
+ # Fallback for any other exceptions
659
+ formatter.print_error(
660
+ f"Unexpected error during file upload: {type(e).__name__}: {e}"
661
+ )
662
+ formatter.print_info(
663
+ "💡 Try running the command again or check your connection"
664
+ )
591
665
  raise typer.Exit(1)
592
666
 
593
667
 
pltr/services/base.py CHANGED
@@ -2,10 +2,13 @@
2
2
  Base service class for Foundry API wrappers.
3
3
  """
4
4
 
5
- from typing import Any, Optional
5
+ from typing import Any, Optional, Dict
6
6
  from abc import ABC, abstractmethod
7
+ import requests
7
8
 
8
9
  from ..auth.manager import AuthManager
10
+ from ..auth.storage import CredentialStorage
11
+ from ..config.profiles import ProfileManager
9
12
 
10
13
 
11
14
  class BaseService(ABC):
@@ -60,3 +63,67 @@ class BaseService(ABC):
60
63
  Configured service instance
61
64
  """
62
65
  return self._get_service()
66
+
67
+ def _make_request(
68
+ self,
69
+ method: str,
70
+ endpoint: str,
71
+ data: Optional[Dict] = None,
72
+ json_data: Optional[Dict] = None,
73
+ headers: Optional[Dict] = None,
74
+ ) -> requests.Response:
75
+ """
76
+ Make a direct HTTP request to Foundry API.
77
+
78
+ Args:
79
+ method: HTTP method (GET, POST, PUT, DELETE)
80
+ endpoint: API endpoint path (e.g., '/foundry-schema-inference/api/...')
81
+ data: Form data to send
82
+ json_data: JSON data to send
83
+ headers: Additional headers
84
+
85
+ Returns:
86
+ Response object
87
+
88
+ Raises:
89
+ requests.HTTPError: If request fails
90
+ """
91
+ # Get credentials for authentication
92
+ storage = CredentialStorage()
93
+ profile_manager = ProfileManager()
94
+ profile_name = self.profile or profile_manager.get_active_profile()
95
+ if not profile_name:
96
+ from ..auth.base import ProfileNotFoundError
97
+
98
+ raise ProfileNotFoundError(
99
+ "No profile specified and no default profile configured. "
100
+ "Run 'pltr configure configure' to set up authentication."
101
+ )
102
+ credentials = storage.get_profile(profile_name)
103
+
104
+ # Build full URL
105
+ host = credentials.get("host", "").rstrip("/")
106
+ url = f"{host}{endpoint}"
107
+
108
+ # Set up headers with authentication
109
+ request_headers = {
110
+ "Authorization": f"Bearer {credentials.get('token')}",
111
+ "Content-Type": "application/json",
112
+ "Accept": "application/json",
113
+ }
114
+ if headers:
115
+ request_headers.update(headers)
116
+
117
+ # Make the request
118
+ response = requests.request(
119
+ method=method,
120
+ url=url,
121
+ data=data,
122
+ json=json_data,
123
+ headers=request_headers,
124
+ )
125
+
126
+ # Raise an error for bad status codes
127
+ response.raise_for_status()
128
+
129
+ return response
pltr/services/dataset.py CHANGED
@@ -56,6 +56,33 @@ class DatasetService(BaseService):
56
56
  except Exception as e:
57
57
  raise RuntimeError(f"Failed to get schema for dataset {dataset_rid}: {e}")
58
58
 
59
+ def apply_schema(self, dataset_rid: str, branch: str = "master") -> Dict[str, Any]:
60
+ """
61
+ Apply/infer schema for a dataset using the schema inference API.
62
+
63
+ Args:
64
+ dataset_rid: Dataset Resource Identifier
65
+ branch: Dataset branch name (default: "master")
66
+
67
+ Returns:
68
+ Schema inference result
69
+ """
70
+ try:
71
+ endpoint = f"/foundry-schema-inference/api/datasets/{dataset_rid}/branches/{branch}/schema"
72
+ response = self._make_request("POST", endpoint, json_data={})
73
+
74
+ # Parse the response
75
+ result = response.json() if response.text else {}
76
+
77
+ return {
78
+ "dataset_rid": dataset_rid,
79
+ "branch": branch,
80
+ "status": "Schema applied successfully",
81
+ "result": result,
82
+ }
83
+ except Exception as e:
84
+ raise RuntimeError(f"Failed to apply schema for dataset {dataset_rid}: {e}")
85
+
59
86
  def put_schema(
60
87
  self,
61
88
  dataset_rid: str,
@@ -319,9 +346,93 @@ class DatasetService(BaseService):
319
346
  "transaction_rid": getattr(result, "transaction_rid", transaction_rid),
320
347
  }
321
348
  except Exception as e:
322
- raise RuntimeError(
323
- f"Failed to upload file {file_path} to dataset {dataset_rid}: {e}"
324
- )
349
+ # Try to extract more detailed error information
350
+ error_msg = str(e).strip()
351
+ error_type = type(e).__name__
352
+
353
+ # Check for common HTTP/API errors
354
+ if hasattr(e, "response") and hasattr(e.response, "status_code"):
355
+ status_code = e.response.status_code
356
+ if hasattr(e.response, "text"):
357
+ response_text = e.response.text[:500] # Limit to 500 chars
358
+ error_details = f"HTTP {status_code}: {response_text}"
359
+ else:
360
+ error_details = f"HTTP {status_code}"
361
+ error_msg = f"{error_details} ({error_type}: {error_msg})"
362
+ elif hasattr(e, "status_code"):
363
+ error_msg = f"HTTP {e.status_code}: {error_msg}"
364
+ elif hasattr(e, "message"):
365
+ error_msg = f"{error_type}: {e.message}"
366
+ else:
367
+ if error_msg:
368
+ error_msg = f"{error_type}: {error_msg}"
369
+ else:
370
+ error_msg = f"{error_type} (no additional details available)"
371
+
372
+ # Add context about what might have gone wrong
373
+ context_hints = []
374
+ error_lower = error_msg.lower()
375
+
376
+ if (
377
+ "permission" in error_lower
378
+ or "forbidden" in error_lower
379
+ or "401" in error_msg
380
+ or "403" in error_msg
381
+ ):
382
+ context_hints.append(
383
+ "Check your authentication credentials and dataset permissions"
384
+ )
385
+ if "not found" in error_lower or "404" in error_msg:
386
+ context_hints.append(
387
+ "Verify the dataset RID and transaction RID are correct"
388
+ )
389
+ if "transaction" in error_lower:
390
+ context_hints.append(
391
+ "Check if the transaction is still open and not expired"
392
+ )
393
+ if "schema" in error_lower or "validation" in error_lower:
394
+ context_hints.append(
395
+ "The file might not match the expected dataset schema"
396
+ )
397
+ if (
398
+ "invalidparametercombination" in error_lower
399
+ or "invalid parameter" in error_lower
400
+ ):
401
+ context_hints.append(
402
+ "The combination of parameters (dataset RID, transaction RID, branch) may be invalid"
403
+ )
404
+ context_hints.append(
405
+ "Try without --transaction-rid, or verify the transaction belongs to this dataset"
406
+ )
407
+ if (
408
+ "opentransactionalreadyexists" in error_lower
409
+ or "transaction already exists" in error_lower
410
+ ):
411
+ context_hints.append(
412
+ "There's already an open transaction for this dataset"
413
+ )
414
+ context_hints.append(
415
+ "Use the existing transaction with --transaction-rid, or commit/abort it first"
416
+ )
417
+ context_hints.append(
418
+ "List transactions with: pltr dataset transactions list "
419
+ + dataset_rid
420
+ )
421
+
422
+ # Try to get more detailed error information from the exception
423
+ if hasattr(e, "__dict__"):
424
+ for attr in ["detail", "details", "error_message", "description"]:
425
+ if hasattr(e, attr):
426
+ detail = getattr(e, attr)
427
+ if detail and str(detail).strip():
428
+ error_msg += f" - {detail}"
429
+ break
430
+
431
+ full_error = f"Failed to upload file {file_path.name} to dataset {dataset_rid}: {error_msg}"
432
+ if context_hints:
433
+ full_error += f". Suggestions: {'; '.join(context_hints)}"
434
+
435
+ raise RuntimeError(full_error)
325
436
 
326
437
  def download_file(
327
438
  self,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pltr-cli
3
- Version: 0.7.0
3
+ Version: 0.9.0
4
4
  Summary: Command-line interface for Palantir Foundry APIs
5
5
  Project-URL: Homepage, https://github.com/anjor/pltr-cli
6
6
  Project-URL: Repository, https://github.com/anjor/pltr-cli
@@ -1,4 +1,4 @@
1
- pltr/__init__.py,sha256=RaANGbRu5e-vehwXI1-Qe2ggPPfs1TQaZj072JdbLk4,22
1
+ pltr/__init__.py,sha256=H9NWRZb7NbeRRPLP_V1fARmLNXranorVM-OOY-8_2ug,22
2
2
  pltr/__main__.py,sha256=HWJ49UoAYBQCf8kjuySPmBTuUjTZrOx-y6PzMTyS1KE,879
3
3
  pltr/cli.py,sha256=DikRsWsU7QWvRWHgB6wZIct916ebWyaub7PlAjKJXws,2664
4
4
  pltr/auth/__init__.py,sha256=G0V-Rh25FaJsH2nhrf146XQQG_ApdbyPJNuHJC25kgk,38
@@ -13,7 +13,7 @@ pltr/commands/alias.py,sha256=r9xMsQNrGvaixSlspzoO2IXQ44LFXuZM4itt8vC0dRc,6862
13
13
  pltr/commands/completion.py,sha256=YTxaRL4-rDs5n7aXf3ogFsxbHVJUBo_HiBbd0fbBPZ0,10870
14
14
  pltr/commands/configure.py,sha256=oYj-VlOEj3MDwtB2RC4bYOYzI_sXTanPnz7y1GmMTqY,4800
15
15
  pltr/commands/connectivity.py,sha256=m8_BYwHij_5IbrYFTU_SYYtbqLCjxA8VIQpbdlWJqHs,14758
16
- pltr/commands/dataset.py,sha256=NqGGF5IGhLGuy6FZxG-hd0p6yWlrVblzNwVqNjv3z20,50536
16
+ pltr/commands/dataset.py,sha256=zuYtBXAGcfRjxE7cP9Hsz2tqSlsdNzdIflGKwytHbVI,53346
17
17
  pltr/commands/folder.py,sha256=IAPPA3Smk1IWqThneEtZ08Zp79vDKVUabSkL_nDvUWk,10679
18
18
  pltr/commands/mediasets.py,sha256=FXq7OtYU9wLgUxQFcS_fkA4i_CozGnsYKxh8GOSI0ok,15342
19
19
  pltr/commands/ontology.py,sha256=zUgSrmv8xi26SQK7GsM3qusgR9Wuka0GyzE7L8DkduE,18317
@@ -31,9 +31,9 @@ pltr/config/profiles.py,sha256=XMUIp6Ez5LNC6rGXZe2JLH7IKepXhARtuc8ASUA9FYA,3431
31
31
  pltr/config/settings.py,sha256=bfIiosPqH_W73TOHS71DvgZdAHka4fJDopU1SvBRFuQ,2908
32
32
  pltr/services/__init__.py,sha256=zQpgrqPdAkZI-nobi33mctU2-iGNgazzvjBVY8YRbSQ,101
33
33
  pltr/services/admin.py,sha256=8FjExmDeIKeVqkAxM83SVvpp_pH9W-Q33cgVs6BHxLQ,9957
34
- pltr/services/base.py,sha256=R2G781FI-sXtjUyLd91bVnmLb4cYZI3G8U5ndR9NLA4,1593
34
+ pltr/services/base.py,sha256=JF9cyYf7njZuj1ldOLdgzIDhJjOfazBvXPNR-gKVnMY,3682
35
35
  pltr/services/connectivity.py,sha256=34kazXhue5gNi1_2s2R5Ma4VQe6jP25CO-ztiPhCeZw,10548
36
- pltr/services/dataset.py,sha256=23EBeJrFZkUBOU8EJcf5uMZyu-10rmIwMVGDZ4RZ2lI,38827
36
+ pltr/services/dataset.py,sha256=UuPJ11OryJZg2zUSRdtwJq_pj9unE24-mYGr5oBWobU,43602
37
37
  pltr/services/folder.py,sha256=mWElyvn-wXPB5sv8Ik_dLeW5JM6jZg3g9KKBk6UcrlQ,5389
38
38
  pltr/services/mediasets.py,sha256=HgHNFWoG9r-5xupANVOxHg_h5EKsBDl6PsO8hwdbm28,9854
39
39
  pltr/services/ontology.py,sha256=iW7qRK8ptlw-u4eAwLNC-mdzLoLZzh7SRqJyok2c3GU,14883
@@ -48,8 +48,8 @@ pltr/utils/alias_resolver.py,sha256=DIF7P1UnUU8kqocJfIDEWjYq4s8_0KfqRZBbECeZEh8,
48
48
  pltr/utils/completion.py,sha256=bjeqjleEfB2YcQFpcxvF0GoQ763F6KBbULSZC4FWY_g,4980
49
49
  pltr/utils/formatting.py,sha256=38g3G2T5WUFKCCKo42NIBR8_Rdl4pp0ytCEZURoV3l4,50275
50
50
  pltr/utils/progress.py,sha256=BKYbiLO61uhQbibabU7pxvvbAWMRLRmqk4pZldBQK_g,9053
51
- pltr_cli-0.7.0.dist-info/METADATA,sha256=yuvIf5pOqN1pl_I_s6czGO8IeXVfEgZuyLGFsKjC8mw,17714
52
- pltr_cli-0.7.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
53
- pltr_cli-0.7.0.dist-info/entry_points.txt,sha256=8tvEcW04kA_oAE2Dwwu-Og9efjl4ESJvs4AzlP2KBdQ,38
54
- pltr_cli-0.7.0.dist-info/licenses/LICENSE,sha256=6VUFd_ytnOBD2O1tmkKrA-smigi9QEhYr_tge4h4z8Y,1070
55
- pltr_cli-0.7.0.dist-info/RECORD,,
51
+ pltr_cli-0.9.0.dist-info/METADATA,sha256=Fddmpgwz08LfEHN4sjHymbv0a69Pi9Ss7hA7t-Oxx08,17714
52
+ pltr_cli-0.9.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
53
+ pltr_cli-0.9.0.dist-info/entry_points.txt,sha256=8tvEcW04kA_oAE2Dwwu-Og9efjl4ESJvs4AzlP2KBdQ,38
54
+ pltr_cli-0.9.0.dist-info/licenses/LICENSE,sha256=6VUFd_ytnOBD2O1tmkKrA-smigi9QEhYr_tge4h4z8Y,1070
55
+ pltr_cli-0.9.0.dist-info/RECORD,,