ostruct-cli 0.4.0__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,43 @@
1
+ """Token estimation utilities."""
2
+
3
+ from typing import Any, Dict, List, Union
4
+
5
+ import tiktoken
6
+
7
+
8
+ def estimate_tokens_with_encoding(
9
+ messages: Union[str, Dict[str, str], List[Dict[str, str]]],
10
+ model: str,
11
+ encoder: Any = None,
12
+ ) -> int:
13
+ """Estimate the number of tokens in a chat completion.
14
+
15
+ Args:
16
+ messages: Message content - can be string, single message dict, or list of messages
17
+ model: Model name
18
+ encoder: Optional tiktoken encoder for testing
19
+
20
+ Returns:
21
+ int: Estimated token count
22
+ """
23
+ if encoder is None:
24
+ # Use o200k_base for gpt-4o and o1 models
25
+ if model.startswith(("gpt-4o", "o1", "o3")):
26
+ encoder = tiktoken.get_encoding("o200k_base")
27
+ else:
28
+ encoder = tiktoken.get_encoding("cl100k_base")
29
+
30
+ if isinstance(messages, str):
31
+ return len(encoder.encode(messages))
32
+ elif isinstance(messages, dict):
33
+ return len(encoder.encode(str(messages.get("content", ""))))
34
+ else:
35
+ num_tokens = 0
36
+ for message in messages:
37
+ num_tokens += 4 # message overhead
38
+ for key, value in message.items():
39
+ num_tokens += len(encoder.encode(str(value)))
40
+ if key == "name":
41
+ num_tokens -= 1 # role is omitted
42
+ num_tokens += 2 # reply priming
43
+ return num_tokens
@@ -0,0 +1,109 @@
1
+ """Validators for CLI options and arguments."""
2
+
3
+ import json
4
+ from pathlib import Path
5
+ from typing import Any, List, Optional, Tuple, Union
6
+
7
+ import click
8
+
9
+ from .errors import InvalidJSONError, VariableNameError
10
+
11
+
12
+ def validate_name_path_pair(
13
+ ctx: click.Context,
14
+ param: click.Parameter,
15
+ value: List[Tuple[str, Union[str, Path]]],
16
+ ) -> List[Tuple[str, Union[str, Path]]]:
17
+ """Validate name/path pairs for files and directories.
18
+
19
+ Args:
20
+ ctx: Click context
21
+ param: Click parameter
22
+ value: List of (name, path) tuples
23
+
24
+ Returns:
25
+ List of validated (name, Path) tuples
26
+
27
+ Raises:
28
+ click.BadParameter: If validation fails
29
+ """
30
+ if not value:
31
+ return value
32
+
33
+ result: List[Tuple[str, Union[str, Path]]] = []
34
+ for name, path in value:
35
+ if not name.isidentifier():
36
+ raise click.BadParameter(f"Invalid variable name: {name}")
37
+ result.append((name, Path(path)))
38
+ return result
39
+
40
+
41
+ def validate_variable(
42
+ ctx: click.Context, param: click.Parameter, value: Optional[List[str]]
43
+ ) -> Optional[List[Tuple[str, str]]]:
44
+ """Validate name=value format for simple variables.
45
+
46
+ Args:
47
+ ctx: Click context
48
+ param: Click parameter
49
+ value: List of "name=value" strings
50
+
51
+ Returns:
52
+ List of validated (name, value) tuples
53
+
54
+ Raises:
55
+ click.BadParameter: If validation fails
56
+ """
57
+ if not value:
58
+ return None
59
+
60
+ result = []
61
+ for var in value:
62
+ if "=" not in var:
63
+ raise click.BadParameter(
64
+ f"Variable must be in format name=value: {var}"
65
+ )
66
+ name, val = var.split("=", 1)
67
+ if not name.isidentifier():
68
+ raise click.BadParameter(f"Invalid variable name: {name}")
69
+ result.append((name, val))
70
+ return result
71
+
72
+
73
+ def validate_json_variable(
74
+ ctx: click.Context, param: click.Parameter, value: Optional[List[str]]
75
+ ) -> Optional[List[Tuple[str, Any]]]:
76
+ """Validate JSON variable format.
77
+
78
+ Args:
79
+ ctx: Click context
80
+ param: Click parameter
81
+ value: List of "name=json_string" values
82
+
83
+ Returns:
84
+ List of validated (name, parsed_json) tuples
85
+
86
+ Raises:
87
+ click.BadParameter: If validation fails
88
+ """
89
+ if not value:
90
+ return None
91
+
92
+ result = []
93
+ for var in value:
94
+ if "=" not in var:
95
+ raise InvalidJSONError(
96
+ f'JSON variable must be in format name=\'{"json":"value"}\': {var}'
97
+ )
98
+ name, json_str = var.split("=", 1)
99
+ if not name.isidentifier():
100
+ raise VariableNameError(f"Invalid variable name: {name}")
101
+ try:
102
+ json_value = json.loads(json_str)
103
+ result.append((name, json_value))
104
+ except json.JSONDecodeError as e:
105
+ raise InvalidJSONError(
106
+ f"Invalid JSON value for variable {name!r}: {json_str!r}",
107
+ context={"variable_name": name},
108
+ ) from e
109
+ return result
@@ -0,0 +1,404 @@
1
+ Metadata-Version: 2.3
2
+ Name: ostruct-cli
3
+ Version: 0.6.0
4
+ Summary: CLI for OpenAI Structured Output
5
+ Author: Yaniv Golan
6
+ Author-email: yaniv@golan.name
7
+ Requires-Python: >=3.10,<4.0
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: Programming Language :: Python :: 3.10
10
+ Classifier: Programming Language :: Python :: 3.11
11
+ Classifier: Programming Language :: Python :: 3.12
12
+ Classifier: Programming Language :: Python :: 3.13
13
+ Requires-Dist: cachetools (>=5.3.2,<6.0.0)
14
+ Requires-Dist: chardet (>=5.0.0,<6.0.0)
15
+ Requires-Dist: click (>=8.1.7,<9.0.0)
16
+ Requires-Dist: ijson (>=3.2.3,<4.0.0)
17
+ Requires-Dist: jsonschema (>=4.23.0,<5.0.0)
18
+ Requires-Dist: openai (>=1.0.0,<2.0.0)
19
+ Requires-Dist: openai-structured (>=2.0.0,<3.0.0)
20
+ Requires-Dist: pydantic (>=2.6.3,<3.0.0)
21
+ Requires-Dist: pyyaml (>=6.0.2,<7.0.0)
22
+ Requires-Dist: tiktoken (==0.9.0)
23
+ Requires-Dist: tomli (>=2.0.1,<3.0.0) ; python_version < "3.11"
24
+ Requires-Dist: typing-extensions (>=4.9.0,<5.0.0)
25
+ Requires-Dist: werkzeug (>=3.1.3,<4.0.0)
26
+ Description-Content-Type: text/markdown
27
+
28
+ # ostruct-cli
29
+
30
+ [![PyPI version](https://badge.fury.io/py/ostruct-cli.svg)](https://badge.fury.io/py/ostruct-cli)
31
+ [![Python Versions](https://img.shields.io/pypi/pyversions/ostruct-cli.svg)](https://pypi.org/project/ostruct-cli)
32
+ [![Documentation Status](https://readthedocs.org/projects/ostruct/badge/?version=latest)](https://ostruct.readthedocs.io/en/latest/?badge=latest)
33
+ [![CI](https://github.com/yaniv-golan/ostruct/actions/workflows/ci.yml/badge.svg)](https://github.com/yaniv-golan/ostruct/actions/workflows/ci.yml)
34
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
35
+
36
+ Command-line interface for working with OpenAI models and structured output, powered by the [openai-structured](https://github.com/yaniv-golan/openai-structured) library.
37
+
38
+ ## Features
39
+
40
+ - Generate structured JSON output from natural language using OpenAI models and a JSON schema
41
+ - Rich template system for defining prompts (Jinja2-based)
42
+ - Automatic token counting and context window management
43
+ - Streaming support for real-time output
44
+ - Secure handling of sensitive data
45
+
46
+ ## Installation
47
+
48
+ ### For Users
49
+
50
+ To install the latest stable version from PyPI:
51
+
52
+ ```bash
53
+ pip install ostruct-cli
54
+ ```
55
+
56
+ ### For Developers
57
+
58
+ If you plan to contribute to the project, see the [Development Setup](#development-setup) section below for instructions on setting up the development environment with Poetry.
59
+
60
+ ## Shell Completion
61
+
62
+ ostruct-cli supports shell completion for Bash, Zsh, and Fish shells. To enable it:
63
+
64
+ ### Bash
65
+
66
+ Add this to your `~/.bashrc`:
67
+
68
+ ```bash
69
+ eval "$(_OSTRUCT_COMPLETE=bash_source ostruct)"
70
+ ```
71
+
72
+ ### Zsh
73
+
74
+ Add this to your `~/.zshrc`:
75
+
76
+ ```bash
77
+ eval "$(_OSTRUCT_COMPLETE=zsh_source ostruct)"
78
+ ```
79
+
80
+ ### Fish
81
+
82
+ Add this to your `~/.config/fish/completions/ostruct.fish`:
83
+
84
+ ```fish
85
+ eval (env _OSTRUCT_COMPLETE=fish_source ostruct)
86
+ ```
87
+
88
+ After adding the appropriate line, restart your shell or source the configuration file.
89
+ Shell completion will help you with:
90
+
91
+ - Command options and their arguments
92
+ - File paths for template and schema files
93
+ - Directory paths for `-d` and `--base-dir` options
94
+ - And more!
95
+
96
+ ## Quick Start
97
+
98
+ 1. Set your OpenAI API key:
99
+
100
+ ```bash
101
+ export OPENAI_API_KEY=your-api-key
102
+ ```
103
+
104
+ ### Example 1: Using stdin (Simplest)
105
+
106
+ 1. Create a template file `extract_person.j2`:
107
+
108
+ ```jinja
109
+ Extract information about the person from this text: {{ stdin }}
110
+ ```
111
+
112
+ 2. Create a schema file `schema.json`:
113
+
114
+ ```json
115
+ {
116
+ "type": "object",
117
+ "properties": {
118
+ "person": {
119
+ "type": "object",
120
+ "properties": {
121
+ "name": {
122
+ "type": "string",
123
+ "description": "The person's full name"
124
+ },
125
+ "age": {
126
+ "type": "integer",
127
+ "description": "The person's age"
128
+ },
129
+ "occupation": {
130
+ "type": "string",
131
+ "description": "The person's job or profession"
132
+ }
133
+ },
134
+ "required": ["name", "age", "occupation"],
135
+ "additionalProperties": false
136
+ }
137
+ },
138
+ "required": ["person"],
139
+ "additionalProperties": false
140
+ }
141
+ ```
142
+
143
+ 3. Run the CLI:
144
+
145
+ ```bash
146
+ # Basic usage
147
+ echo "John Smith is a 35-year-old software engineer" | ostruct run extract_person.j2 schema.json
148
+
149
+ # For longer text using heredoc
150
+ cat << EOF | ostruct run extract_person.j2 schema.json
151
+ John Smith is a 35-year-old software engineer
152
+ working at Tech Corp. He has been programming
153
+ for over 10 years.
154
+ EOF
155
+
156
+ # With advanced options
157
+ echo "John Smith is a 35-year-old software engineer" | \
158
+ ostruct run extract_person.j2 schema.json \
159
+ --model gpt-4o \
160
+ --sys-prompt "Extract precise information about the person" \
161
+ --temperature 0.7
162
+ ```
163
+
164
+ The command will output:
165
+
166
+ ```json
167
+ {
168
+ "person": {
169
+ "name": "John Smith",
170
+ "age": 35,
171
+ "occupation": "software engineer"
172
+ }
173
+ }
174
+ ```
175
+
176
+ ### Example 2: Processing a Single File
177
+
178
+ 1. Create a template file `extract_from_file.j2`:
179
+
180
+ ```jinja
181
+ Extract information about the person from this text: {{ text.content }}
182
+ ```
183
+
184
+ 2. Use the same schema file `schema.json` as above.
185
+
186
+ 3. Run the CLI:
187
+
188
+ ```bash
189
+ # Basic usage
190
+ ostruct run extract_from_file.j2 schema.json -f text input.txt
191
+
192
+ # With advanced options
193
+ ostruct run extract_from_file.j2 schema.json \
194
+ -f text input.txt \
195
+ --model gpt-4o \
196
+ --max-output-tokens 1000 \
197
+ --temperature 0.7
198
+ ```
199
+
200
+ The command will output:
201
+
202
+ ```json
203
+ {
204
+ "person": {
205
+ "name": "John Smith",
206
+ "age": 35,
207
+ "occupation": "software engineer"
208
+ }
209
+ }
210
+ ```
211
+
212
+ ### Example 3: Processing Multiple Files
213
+
214
+ 1. Create a template file `extract_from_profiles.j2`:
215
+
216
+ ```jinja
217
+ Extract information about the people from this data:
218
+
219
+ {% for profile in profiles %}
220
+ == {{ profile.name }}
221
+
222
+ {{ profile.content }}
223
+
224
+ {% endfor %}
225
+ ```
226
+
227
+ 2. Use the same schema file `schema.json` as above, but updated for multiple people:
228
+
229
+ ```json
230
+ {
231
+ "type": "object",
232
+ "properties": {
233
+ "people": {
234
+ "type": "array",
235
+ "items": {
236
+ "type": "object",
237
+ "properties": {
238
+ "name": {
239
+ "type": "string",
240
+ "description": "The person's full name"
241
+ },
242
+ "age": {
243
+ "type": "integer",
244
+ "description": "The person's age"
245
+ },
246
+ "occupation": {
247
+ "type": "string",
248
+ "description": "The person's job or profession"
249
+ }
250
+ },
251
+ "required": ["name", "age", "occupation"],
252
+ "additionalProperties": false
253
+ }
254
+ }
255
+ },
256
+ "required": ["people"],
257
+ "additionalProperties": false
258
+ }
259
+ ```
260
+
261
+ 3. Run the CLI:
262
+
263
+ ```bash
264
+ # Basic usage
265
+ ostruct run extract_from_profiles.j2 schema.json -p profiles "profiles/*.txt"
266
+
267
+ # With advanced options
268
+ ostruct run extract_from_profiles.j2 schema.json \
269
+ -p profiles "profiles/*.txt" \
270
+ --model gpt-4o \
271
+ --sys-prompt "Extract precise information about the person" \
272
+ --temperature 0.5
273
+ ```
274
+
275
+ The command will output:
276
+
277
+ ```json
278
+ {
279
+ "people": [
280
+ {
281
+ "name": "John Smith",
282
+ "age": 35,
283
+ "occupation": "software engineer"
284
+ },
285
+ {
286
+ "name": "Jane Doe",
287
+ "age": 28,
288
+ "occupation": "data scientist"
289
+ }
290
+ ]
291
+ }
292
+ ```
293
+
294
+ ### About Template Files
295
+
296
+ Template files use the `.j2` extension to indicate they contain Jinja2 template syntax. This convention:
297
+
298
+ - Enables proper syntax highlighting in most editors
299
+ - Makes it clear the file contains template logic
300
+ - Follows industry standards for Jinja2 templates
301
+
302
+ ## CLI Options
303
+
304
+ The CLI revolves around a single subcommand called `run`. Basic usage:
305
+
306
+ ```bash
307
+ ostruct run <TASK_TEMPLATE> <SCHEMA_FILE> [OPTIONS]
308
+ ```
309
+
310
+ Common options include:
311
+
312
+ - File & Directory Inputs:
313
+ - `-f <NAME> <PATH>`: Map a single file to a variable name
314
+ - `-d <NAME> <DIR>`: Map a directory to a variable name
315
+ - `-p <NAME> <PATTERN>`: Map files matching a glob pattern to a variable name
316
+ - `-R, --recursive`: Enable recursive directory/pattern scanning
317
+
318
+ - Variables:
319
+ - `-V name=value`: Define a simple string variable
320
+ - `-J name='{"key":"value"}'`: Define a JSON variable
321
+
322
+ - Model Parameters:
323
+ - `-m, --model MODEL`: Select the OpenAI model (supported: gpt-4o, o1, o3-mini)
324
+ - `--temperature FLOAT`: Set sampling temperature (0.0-2.0)
325
+ - `--max-output-tokens INT`: Set maximum output tokens
326
+ - `--top-p FLOAT`: Set top-p sampling parameter (0.0-1.0)
327
+ - `--frequency-penalty FLOAT`: Adjust frequency penalty (-2.0-2.0)
328
+ - `--presence-penalty FLOAT`: Adjust presence penalty (-2.0-2.0)
329
+ - `--reasoning-effort [low|medium|high]`: Control model reasoning effort
330
+
331
+ - System Prompt:
332
+ - `--sys-prompt TEXT`: Provide system prompt directly
333
+ - `--sys-file FILE`: Load system prompt from file
334
+ - `--ignore-task-sysprompt`: Ignore system prompt in template frontmatter
335
+
336
+ - API Configuration:
337
+ - `--api-key KEY`: OpenAI API key (defaults to OPENAI_API_KEY env var)
338
+ - `--timeout FLOAT`: API timeout in seconds (default: 60.0)
339
+
340
+ ## Debug Options
341
+
342
+ - `--debug-validation`: Show detailed schema validation debugging
343
+ - `--debug-openai-stream`: Enable low-level debug output for OpenAI streaming
344
+ - `--progress-level {none,basic,detailed}`: Set progress reporting level
345
+ - `none`: No progress indicators
346
+ - `basic`: Show key operation steps (default)
347
+ - `detailed`: Show all steps with additional info
348
+ - `--show-model-schema`: Display the generated Pydantic model schema
349
+ - `--verbose`: Enable verbose logging
350
+ - `--dry-run`: Validate and render template without making API calls
351
+ - `--no-progress`: Disable all progress indicators
352
+
353
+ All debug and error logs are written to:
354
+
355
+ - `~/.ostruct/logs/ostruct.log`: General application logs
356
+ - `~/.ostruct/logs/openai_stream.log`: OpenAI streaming operations logs
357
+
358
+ For more detailed documentation and examples, visit our [documentation](https://ostruct.readthedocs.io/).
359
+
360
+ ## Development
361
+
362
+ To contribute or report issues, please visit our [GitHub repository](https://github.com/yaniv-golan/ostruct).
363
+
364
+ ## Development Setup
365
+
366
+ 1. Clone the repository:
367
+
368
+ ```bash
369
+ git clone https://github.com/yanivgolan/ostruct.git
370
+ cd ostruct
371
+ ```
372
+
373
+ 2. Install Poetry if you haven't already:
374
+
375
+ ```bash
376
+ curl -sSL https://install.python-poetry.org | python3 -
377
+ ```
378
+
379
+ 3. Install dependencies:
380
+
381
+ ```bash
382
+ poetry install
383
+ ```
384
+
385
+ 4. Install openai-structured in editable mode:
386
+
387
+ ```bash
388
+ poetry add --editable ../openai-structured # Adjust path as needed
389
+ ```
390
+
391
+ 5. Run tests:
392
+
393
+ ```bash
394
+ poetry run pytest
395
+ ```
396
+
397
+ ## Contributing
398
+
399
+ Contributions are welcome! Please feel free to submit a Pull Request.
400
+
401
+ ## License
402
+
403
+ This project is licensed under the MIT License - see the LICENSE file for details.
404
+
@@ -0,0 +1,43 @@
1
+ ostruct/__init__.py,sha256=X6zo6V7ZNMv731Wi388aTVQngD1410ExGwGx4J6lpyo,187
2
+ ostruct/cli/__init__.py,sha256=sYHKT6o1kFy1acbXejzAvVm8Cy8U91Yf1l4DlzquHKg,409
3
+ ostruct/cli/base_errors.py,sha256=S1cQxoiALbXKPxzgLo6XdSWpzPRb7RKz0QARmu9Zt4g,5987
4
+ ostruct/cli/cache_manager.py,sha256=ej3KrRfkKKZ_lEp2JswjbJ5bW2ncsvna9NeJu81cqqs,5192
5
+ ostruct/cli/cli.py,sha256=wfO5Z8PPoP8eUn5CfhxrjrdMzfbvr4ryo_tsRST0LlU,74588
6
+ ostruct/cli/click_options.py,sha256=WbRJdB9sO63ChN3fnCP7XWs73DHKl0C1ervfwL11am0,11371
7
+ ostruct/cli/errors.py,sha256=zJdJ-AyzjCE8glVKbJGAcB-Mz1J1SlzTDJDmhqAVFYc,14930
8
+ ostruct/cli/exit_codes.py,sha256=uNjvQeUGwU1mlUJYIDrExAn7YlwOXZo603yLAwpqIwk,338
9
+ ostruct/cli/file_info.py,sha256=ilpT8IuckfhadLF1QQAPLXJp7p8kVpffDEEJ2erHPZU,14485
10
+ ostruct/cli/file_list.py,sha256=jLuCd1ardoAXX8FNwPgIqEM-ixzr1xP5ZSqXo2lmrj0,11270
11
+ ostruct/cli/file_utils.py,sha256=J3-6fbEGQ7KD_bU81pAxueHLv9XV0X7f8FSMt_0AJGQ,22537
12
+ ostruct/cli/path_utils.py,sha256=j44q1OoLkqMErgK-qEuhuIZ1VyzqRIvNgxR1et9PoXA,4813
13
+ ostruct/cli/progress.py,sha256=rj9nVEco5UeZORMbzd7mFJpFGJjbH9KbBFh5oTE5Anw,3415
14
+ ostruct/cli/schema_validation.py,sha256=ohEuxJ0KF93qphj0JSZDnrxDn0C2ZU37g-U2JY03onM,8154
15
+ ostruct/cli/security/__init__.py,sha256=CQpkCgTFYlA1p6atpQeNgIKtE4LZGUKt4EbytbGKpCs,846
16
+ ostruct/cli/security/allowed_checker.py,sha256=N5UXlpjdj5zAbKk-lRDlHiHV3KtQHtJNhtZI_qGB4zw,1638
17
+ ostruct/cli/security/base.py,sha256=q9YUdHEj2eg5w8GEw5403E9OQKIjZbEiaWsvYFnCGLw,1359
18
+ ostruct/cli/security/case_manager.py,sha256=I_ZJSyntLuGx5qVzze559CI-OxsaNPSibkAN8zZ7PvE,2345
19
+ ostruct/cli/security/errors.py,sha256=VZDOGGD-jYLf6E5gCkKxrE34RJXJP_CPWGOF5jV_r4I,5230
20
+ ostruct/cli/security/normalization.py,sha256=qevvxW3hHDtD1cVvDym8LJEQD1AKenVB-0ZvjCYjn5E,5242
21
+ ostruct/cli/security/safe_joiner.py,sha256=PHowCeBAkfHfPqRwuO5Com0OemGuq3cHkdu2p9IYNT0,7107
22
+ ostruct/cli/security/security_manager.py,sha256=R54CgE7eG_0VybvjXj4fNn1jB-RHMUlnJ6Yw8BOtKKc,13512
23
+ ostruct/cli/security/symlink_resolver.py,sha256=wtZdJ_T_0FOy6B1P5ty1odEXQk9vr8BzlWeAFD4huJE,16744
24
+ ostruct/cli/security/types.py,sha256=15yuG_T4CXyAFFFdSWLjVS7ACmDGIPXhQpZ8awcDwCQ,2991
25
+ ostruct/cli/security/windows_paths.py,sha256=qxC2H2kLwtmQ7YePYde3UrmOJcGnsLEebDLh242sUaI,13453
26
+ ostruct/cli/serialization.py,sha256=ec0UswDE2onwtZVUoZaMCsGv6zW_tSKdBng2qVo6Ucs,704
27
+ ostruct/cli/template_env.py,sha256=S2ZvxuMQMicodSVqUhrw0kOzbNmlpQjSHtWlOwjXCms,1538
28
+ ostruct/cli/template_extensions.py,sha256=tJN3HGAS2yzGI8Up6STPday8NVL0VV6UCClBrtDKYr0,1623
29
+ ostruct/cli/template_filters.py,sha256=SjuQxlM5S283TS2El_AbrzETGnYoQeTpmA9sv5et3QI,19222
30
+ ostruct/cli/template_io.py,sha256=yUWO-8rZnSdX97DTMSEX8fG9CP1ISsOhm2NZN3Fab9A,8821
31
+ ostruct/cli/template_rendering.py,sha256=vp_4gvrYLd_kbQi3TYrYNniXLTeLmTaitGVBQManXvo,13342
32
+ ostruct/cli/template_schema.py,sha256=ckH4rUZnEgfm_BHS9LnMGr8LtDxRmZ0C6UBVrSp8KTc,19604
33
+ ostruct/cli/template_utils.py,sha256=Lf1TvonlRA835nxyevEBSPTEbKiz_momvQYM0ZoZDZU,8034
34
+ ostruct/cli/template_validation.py,sha256=AXa2zmsws1j-0CTFlp7fMiZR43iNLnj4h467up2JdgU,12693
35
+ ostruct/cli/token_utils.py,sha256=r4KPEO3Sec18Q6mU0aClK6XGShvusgUggXEQgEPPlaA,1369
36
+ ostruct/cli/utils.py,sha256=1UCl4rHjBWKR5EKugvlVGHiHjO3XXmqvkgeAUSyIPDU,831
37
+ ostruct/cli/validators.py,sha256=BYFZeebCPZObTUjO1TaAMpsD6h7ROkYAFn9C7uf1Q68,2992
38
+ ostruct/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
+ ostruct_cli-0.6.0.dist-info/LICENSE,sha256=QUOY6QCYVxAiH8vdrUTDqe3i9hQ5bcNczppDSVpLTjk,1068
40
+ ostruct_cli-0.6.0.dist-info/METADATA,sha256=Zrq8a-EvLhnZdOQBBlYvONWKo61XmdAR8934_OtHUa4,10426
41
+ ostruct_cli-0.6.0.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
42
+ ostruct_cli-0.6.0.dist-info/entry_points.txt,sha256=NFq9IuqHVTem0j9zKjV8C1si_zGcP1RL6Wbvt9fUDXw,48
43
+ ostruct_cli-0.6.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 2.0.1
2
+ Generator: poetry-core 2.1.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any