pointblank 0.14.0__py3-none-any.whl → 0.15.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pointblank/_constants.py +63 -0
- pointblank/_interrogation.py +883 -1
- pointblank/_spec_utils.py +1015 -0
- pointblank/_utils.py +14 -4
- pointblank/_utils_ai.py +28 -3
- pointblank/assistant.py +1 -1
- pointblank/data/api-docs.txt +1599 -76
- pointblank/draft.py +52 -3
- pointblank/validate.py +1686 -275
- {pointblank-0.14.0.dist-info → pointblank-0.15.0.dist-info}/METADATA +2 -1
- {pointblank-0.14.0.dist-info → pointblank-0.15.0.dist-info}/RECORD +15 -14
- {pointblank-0.14.0.dist-info → pointblank-0.15.0.dist-info}/WHEEL +0 -0
- {pointblank-0.14.0.dist-info → pointblank-0.15.0.dist-info}/entry_points.txt +0 -0
- {pointblank-0.14.0.dist-info → pointblank-0.15.0.dist-info}/licenses/LICENSE +0 -0
- {pointblank-0.14.0.dist-info → pointblank-0.15.0.dist-info}/top_level.txt +0 -0
pointblank/draft.py
CHANGED
|
@@ -38,10 +38,15 @@ class DraftValidation:
|
|
|
38
38
|
The data to be used for drafting a validation plan.
|
|
39
39
|
model
|
|
40
40
|
The model to be used. This should be in the form of `provider:model` (e.g.,
|
|
41
|
-
`"anthropic:claude-
|
|
41
|
+
`"anthropic:claude-sonnet-4-5"`). Supported providers are `"anthropic"`, `"openai"`,
|
|
42
42
|
`"ollama"`, and `"bedrock"`.
|
|
43
43
|
api_key
|
|
44
44
|
The API key to be used for the model.
|
|
45
|
+
verify_ssl
|
|
46
|
+
Whether to verify SSL certificates when making requests to the LLM provider. Set to `False`
|
|
47
|
+
to disable SSL verification (e.g., when behind a corporate firewall with self-signed
|
|
48
|
+
certificates). Defaults to `True`. Use with caution as disabling SSL verification can pose
|
|
49
|
+
security risks.
|
|
45
50
|
|
|
46
51
|
Returns
|
|
47
52
|
-------
|
|
@@ -83,6 +88,33 @@ class DraftValidation:
|
|
|
83
88
|
There's no need to have the `python-dotenv` package installed when using `.env` files in this
|
|
84
89
|
way.
|
|
85
90
|
|
|
91
|
+
Notes on SSL Certificate Verification
|
|
92
|
+
--------------------------------------
|
|
93
|
+
By default, SSL certificate verification is enabled for all requests to LLM providers. However,
|
|
94
|
+
in certain network environments (such as corporate networks with self-signed certificates or
|
|
95
|
+
firewall proxies), you may encounter SSL certificate verification errors.
|
|
96
|
+
|
|
97
|
+
To disable SSL verification, set the `verify_ssl` parameter to `False`:
|
|
98
|
+
|
|
99
|
+
```python
|
|
100
|
+
import pointblank as pb
|
|
101
|
+
|
|
102
|
+
data = pb.load_dataset(dataset="nycflights", tbl_type="duckdb")
|
|
103
|
+
|
|
104
|
+
# Disable SSL verification for networks with self-signed certificates
|
|
105
|
+
pb.DraftValidation(
|
|
106
|
+
data=data,
|
|
107
|
+
model="anthropic:claude-sonnet-4-5",
|
|
108
|
+
verify_ssl=False
|
|
109
|
+
)
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
:::{.callout-warning}
|
|
113
|
+
Disabling SSL verification (through `verify_ssl=False`) can expose your API keys and data to
|
|
114
|
+
man-in-the-middle attacks. Only use this option in trusted network environments and when
|
|
115
|
+
absolutely necessary.
|
|
116
|
+
:::
|
|
117
|
+
|
|
86
118
|
Notes on Data Sent to the Model Provider
|
|
87
119
|
----------------------------------------
|
|
88
120
|
The data sent to the model provider is a JSON summary of the table. This data summary is
|
|
@@ -109,7 +141,7 @@ class DraftValidation:
|
|
|
109
141
|
Let's look at how the `DraftValidation` class can be used to draft a validation plan for a
|
|
110
142
|
table. The table to be used is `"nycflights"`, which is available here via the
|
|
111
143
|
[`load_dataset()`](`pointblank.load_dataset`) function. The model to be used is
|
|
112
|
-
`"anthropic:claude-
|
|
144
|
+
`"anthropic:claude-sonnet-4-5"` (which performs very well compared to other LLMs). The
|
|
113
145
|
example assumes that the API key is stored in an `.env` file as `ANTHROPIC_API_KEY`.
|
|
114
146
|
|
|
115
147
|
```python
|
|
@@ -119,7 +151,7 @@ class DraftValidation:
|
|
|
119
151
|
data = pb.load_dataset(dataset="nycflights", tbl_type="duckdb")
|
|
120
152
|
|
|
121
153
|
# Draft a validation plan for the "nycflights" table
|
|
122
|
-
pb.DraftValidation(data=data, model="anthropic:claude-
|
|
154
|
+
pb.DraftValidation(data=data, model="anthropic:claude-sonnet-4-5")
|
|
123
155
|
```
|
|
124
156
|
|
|
125
157
|
The output will be a drafted validation plan for the `"nycflights"` table and this will appear
|
|
@@ -194,6 +226,7 @@ class DraftValidation:
|
|
|
194
226
|
data: FrameT | Any
|
|
195
227
|
model: str
|
|
196
228
|
api_key: str | None = None
|
|
229
|
+
verify_ssl: bool = True
|
|
197
230
|
response: str = field(init=False)
|
|
198
231
|
|
|
199
232
|
def __post_init__(self):
|
|
@@ -280,6 +313,18 @@ class DraftValidation:
|
|
|
280
313
|
" per line)"
|
|
281
314
|
)
|
|
282
315
|
|
|
316
|
+
# Create httpx client with SSL verification settings
|
|
317
|
+
# This will be passed to the LLM provider's chat client
|
|
318
|
+
try:
|
|
319
|
+
import httpx # noqa
|
|
320
|
+
except ImportError: # pragma: no cover
|
|
321
|
+
raise ImportError( # pragma: no cover
|
|
322
|
+
"The `httpx` package is required for SSL configuration. "
|
|
323
|
+
"Please install it using `pip install httpx`."
|
|
324
|
+
)
|
|
325
|
+
|
|
326
|
+
http_client = httpx.AsyncClient(verify=self.verify_ssl)
|
|
327
|
+
|
|
283
328
|
if provider == "anthropic": # pragma: no cover
|
|
284
329
|
# Check that the anthropic package is installed
|
|
285
330
|
try:
|
|
@@ -296,6 +341,7 @@ class DraftValidation:
|
|
|
296
341
|
model=model_name,
|
|
297
342
|
system_prompt="You are a terse assistant and a Python expert.",
|
|
298
343
|
api_key=self.api_key,
|
|
344
|
+
kwargs={"http_client": http_client},
|
|
299
345
|
)
|
|
300
346
|
|
|
301
347
|
if provider == "openai": # pragma: no cover
|
|
@@ -314,6 +360,7 @@ class DraftValidation:
|
|
|
314
360
|
model=model_name,
|
|
315
361
|
system_prompt="You are a terse assistant and a Python expert.",
|
|
316
362
|
api_key=self.api_key,
|
|
363
|
+
kwargs={"http_client": http_client},
|
|
317
364
|
)
|
|
318
365
|
|
|
319
366
|
if provider == "ollama": # pragma: no cover
|
|
@@ -331,6 +378,7 @@ class DraftValidation:
|
|
|
331
378
|
chat = ChatOllama( # pragma: no cover
|
|
332
379
|
model=model_name,
|
|
333
380
|
system_prompt="You are a terse assistant and a Python expert.",
|
|
381
|
+
kwargs={"http_client": http_client},
|
|
334
382
|
)
|
|
335
383
|
|
|
336
384
|
if provider == "bedrock": # pragma: no cover
|
|
@@ -339,6 +387,7 @@ class DraftValidation:
|
|
|
339
387
|
chat = ChatBedrockAnthropic( # pragma: no cover
|
|
340
388
|
model=model_name,
|
|
341
389
|
system_prompt="You are a terse assistant and a Python expert.",
|
|
390
|
+
kwargs={"http_client": http_client},
|
|
342
391
|
)
|
|
343
392
|
|
|
344
393
|
self.response = str(chat.chat(prompt, stream=False, echo="none")) # pragma: no cover
|