ostruct-cli 0.5.0__tar.gz → 0.6.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/PKG-INFO +211 -32
  2. ostruct_cli-0.6.1/README.md +376 -0
  3. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/pyproject.toml +2 -2
  4. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/cli.py +119 -374
  5. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/errors.py +63 -18
  6. ostruct_cli-0.6.1/src/ostruct/cli/model_creation.py +507 -0
  7. ostruct_cli-0.6.1/src/ostruct/cli/schema_validation.py +213 -0
  8. ostruct_cli-0.5.0/README.md +0 -197
  9. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/LICENSE +0 -0
  10. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/__init__.py +0 -0
  11. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/__init__.py +0 -0
  12. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/base_errors.py +0 -0
  13. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/cache_manager.py +0 -0
  14. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/click_options.py +0 -0
  15. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/exit_codes.py +0 -0
  16. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/file_info.py +0 -0
  17. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/file_list.py +0 -0
  18. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/file_utils.py +0 -0
  19. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/path_utils.py +0 -0
  20. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/progress.py +0 -0
  21. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/security/__init__.py +0 -0
  22. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/security/allowed_checker.py +0 -0
  23. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/security/base.py +0 -0
  24. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/security/case_manager.py +0 -0
  25. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/security/errors.py +0 -0
  26. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/security/normalization.py +0 -0
  27. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/security/safe_joiner.py +0 -0
  28. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/security/security_manager.py +0 -0
  29. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/security/symlink_resolver.py +0 -0
  30. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/security/types.py +0 -0
  31. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/security/windows_paths.py +0 -0
  32. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/serialization.py +0 -0
  33. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/template_env.py +0 -0
  34. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/template_extensions.py +0 -0
  35. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/template_filters.py +0 -0
  36. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/template_io.py +0 -0
  37. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/template_rendering.py +0 -0
  38. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/template_schema.py +0 -0
  39. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/template_utils.py +0 -0
  40. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/template_validation.py +0 -0
  41. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/token_utils.py +0 -0
  42. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/utils.py +0 -0
  43. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/cli/validators.py +0 -0
  44. {ostruct_cli-0.5.0 → ostruct_cli-0.6.1}/src/ostruct/py.typed +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: ostruct-cli
3
- Version: 0.5.0
3
+ Version: 0.6.1
4
4
  Summary: CLI for OpenAI Structured Output
5
5
  Author: Yaniv Golan
6
6
  Author-email: yaniv@golan.name
@@ -19,7 +19,7 @@ Requires-Dist: openai (>=1.0.0,<2.0.0)
19
19
  Requires-Dist: openai-structured (>=2.0.0,<3.0.0)
20
20
  Requires-Dist: pydantic (>=2.6.3,<3.0.0)
21
21
  Requires-Dist: pyyaml (>=6.0.2,<7.0.0)
22
- Requires-Dist: tiktoken (>=0.9.0,<0.10.0)
22
+ Requires-Dist: tiktoken (==0.9.0)
23
23
  Requires-Dist: tomli (>=2.0.1,<3.0.0) ; python_version < "3.11"
24
24
  Requires-Dist: typing-extensions (>=4.9.0,<5.0.0)
25
25
  Requires-Dist: werkzeug (>=3.1.3,<4.0.0)
@@ -37,19 +37,62 @@ Command-line interface for working with OpenAI models and structured output, pow
37
37
 
38
38
  ## Features
39
39
 
40
- - Generate structured output from natural language using OpenAI models
41
- - Rich template system for defining output schemas
40
+ - Generate structured JSON output from natural language using OpenAI models and a JSON schema
41
+ - Rich template system for defining prompts (Jinja2-based)
42
42
  - Automatic token counting and context window management
43
43
  - Streaming support for real-time output
44
- - Caching system for cost optimization
45
44
  - Secure handling of sensitive data
46
45
 
47
46
  ## Installation
48
47
 
48
+ ### For Users
49
+
50
+ To install the latest stable version from PyPI:
51
+
49
52
  ```bash
50
53
  pip install ostruct-cli
51
54
  ```
52
55
 
56
+ ### For Developers
57
+
58
+ If you plan to contribute to the project, see the [Development Setup](#development-setup) section below for instructions on setting up the development environment with Poetry.
59
+
60
+ ## Shell Completion
61
+
62
+ ostruct-cli supports shell completion for Bash, Zsh, and Fish shells. To enable it:
63
+
64
+ ### Bash
65
+
66
+ Add this to your `~/.bashrc`:
67
+
68
+ ```bash
69
+ eval "$(_OSTRUCT_COMPLETE=bash_source ostruct)"
70
+ ```
71
+
72
+ ### Zsh
73
+
74
+ Add this to your `~/.zshrc`:
75
+
76
+ ```bash
77
+ eval "$(_OSTRUCT_COMPLETE=zsh_source ostruct)"
78
+ ```
79
+
80
+ ### Fish
81
+
82
+ Add this to your `~/.config/fish/completions/ostruct.fish`:
83
+
84
+ ```fish
85
+ eval (env _OSTRUCT_COMPLETE=fish_source ostruct)
86
+ ```
87
+
88
+ After adding the appropriate line, restart your shell or source the configuration file.
89
+ Shell completion will help you with:
90
+
91
+ - Command options and their arguments
92
+ - File paths for template and schema files
93
+ - Directory paths for `-d` and `--base-dir` options
94
+ - And more!
95
+
53
96
  ## Quick Start
54
97
 
55
98
  1. Set your OpenAI API key:
@@ -58,57 +101,193 @@ pip install ostruct-cli
58
101
  export OPENAI_API_KEY=your-api-key
59
102
  ```
60
103
 
61
- 2. Create a task template file `task.j2`:
104
+ ### Example 1: Using stdin (Simplest)
62
105
 
63
- ```
64
- Extract information about the person: {{ stdin }}
106
+ 1. Create a template file `extract_person.j2`:
107
+
108
+ ```jinja
109
+ Extract information about the person from this text: {{ stdin }}
65
110
  ```
66
111
 
67
- 3. Create a schema file `schema.json`:
112
+ 2. Create a schema file `schema.json`:
68
113
 
69
114
  ```json
70
115
  {
71
116
  "type": "object",
72
117
  "properties": {
73
- "name": {
74
- "type": "string",
75
- "description": "The person's full name"
76
- },
77
- "age": {
78
- "type": "integer",
79
- "description": "The person's age"
80
- },
81
- "occupation": {
82
- "type": "string",
83
- "description": "The person's job or profession"
118
+ "person": {
119
+ "type": "object",
120
+ "properties": {
121
+ "name": {
122
+ "type": "string",
123
+ "description": "The person's full name"
124
+ },
125
+ "age": {
126
+ "type": "integer",
127
+ "description": "The person's age"
128
+ },
129
+ "occupation": {
130
+ "type": "string",
131
+ "description": "The person's job or profession"
132
+ }
133
+ },
134
+ "required": ["name", "age", "occupation"],
135
+ "additionalProperties": false
84
136
  }
85
137
  },
86
- "required": ["name", "age", "occupation"]
138
+ "required": ["person"],
139
+ "additionalProperties": false
140
+ }
141
+ ```
142
+
143
+ 3. Run the CLI:
144
+
145
+ ```bash
146
+ # Basic usage
147
+ echo "John Smith is a 35-year-old software engineer" | ostruct run extract_person.j2 schema.json
148
+
149
+ # For longer text using heredoc
150
+ cat << EOF | ostruct run extract_person.j2 schema.json
151
+ John Smith is a 35-year-old software engineer
152
+ working at Tech Corp. He has been programming
153
+ for over 10 years.
154
+ EOF
155
+
156
+ # With advanced options
157
+ echo "John Smith is a 35-year-old software engineer" | \
158
+ ostruct run extract_person.j2 schema.json \
159
+ --model gpt-4o \
160
+ --sys-prompt "Extract precise information about the person" \
161
+ --temperature 0.7
162
+ ```
163
+
164
+ The command will output:
165
+
166
+ ```json
167
+ {
168
+ "person": {
169
+ "name": "John Smith",
170
+ "age": 35,
171
+ "occupation": "software engineer"
172
+ }
87
173
  }
88
174
  ```
89
175
 
90
- 4. Run the CLI:
176
+ ### Example 2: Processing a Single File
177
+
178
+ 1. Create a template file `extract_from_file.j2`:
179
+
180
+ ```jinja
181
+ Extract information about the person from this text: {{ text.content }}
182
+ ```
183
+
184
+ 2. Use the same schema file `schema.json` as above.
185
+
186
+ 3. Run the CLI:
91
187
 
92
188
  ```bash
93
- ostruct run task.j2 schema.json
189
+ # Basic usage
190
+ ostruct run extract_from_file.j2 schema.json -f text input.txt
191
+
192
+ # With advanced options
193
+ ostruct run extract_from_file.j2 schema.json \
194
+ -f text input.txt \
195
+ --model gpt-4o \
196
+ --max-output-tokens 1000 \
197
+ --temperature 0.7
94
198
  ```
95
199
 
96
- Or with more options:
200
+ The command will output:
201
+
202
+ ```json
203
+ {
204
+ "person": {
205
+ "name": "John Smith",
206
+ "age": 35,
207
+ "occupation": "software engineer"
208
+ }
209
+ }
210
+ ```
211
+
212
+ ### Example 3: Processing Multiple Files
213
+
214
+ 1. Create a template file `extract_from_profiles.j2`:
215
+
216
+ ```jinja
217
+ Extract information about the people from this data:
218
+
219
+ {% for profile in profiles %}
220
+ == {{ profile.name }}
221
+
222
+ {{ profile.content }}
223
+
224
+ {% endfor %}
225
+ ```
226
+
227
+ 2. Use the same schema file `schema.json` as above, but updated for multiple people:
228
+
229
+ ```json
230
+ {
231
+ "type": "object",
232
+ "properties": {
233
+ "people": {
234
+ "type": "array",
235
+ "items": {
236
+ "type": "object",
237
+ "properties": {
238
+ "name": {
239
+ "type": "string",
240
+ "description": "The person's full name"
241
+ },
242
+ "age": {
243
+ "type": "integer",
244
+ "description": "The person's age"
245
+ },
246
+ "occupation": {
247
+ "type": "string",
248
+ "description": "The person's job or profession"
249
+ }
250
+ },
251
+ "required": ["name", "age", "occupation"],
252
+ "additionalProperties": false
253
+ }
254
+ }
255
+ },
256
+ "required": ["people"],
257
+ "additionalProperties": false
258
+ }
259
+ ```
260
+
261
+ 3. Run the CLI:
97
262
 
98
263
  ```bash
99
- ostruct run task.j2 schema.json \
100
- -f content input.txt \
101
- -m gpt-4o \
102
- --sys-prompt "You are an expert content analyzer"
264
+ # Basic usage
265
+ ostruct run extract_from_profiles.j2 schema.json -p profiles "profiles/*.txt"
266
+
267
+ # With advanced options
268
+ ostruct run extract_from_profiles.j2 schema.json \
269
+ -p profiles "profiles/*.txt" \
270
+ --model gpt-4o \
271
+ --sys-prompt "Extract precise information about the person" \
272
+ --temperature 0.5
103
273
  ```
104
274
 
105
- Output:
275
+ The command will output:
106
276
 
107
277
  ```json
108
278
  {
109
- "name": "John Smith",
110
- "age": 35,
111
- "occupation": "software engineer"
279
+ "people": [
280
+ {
281
+ "name": "John Smith",
282
+ "age": 35,
283
+ "occupation": "software engineer"
284
+ },
285
+ {
286
+ "name": "Jane Doe",
287
+ "age": 28,
288
+ "occupation": "data scientist"
289
+ }
290
+ ]
112
291
  }
113
292
  ```
114
293
 
@@ -0,0 +1,376 @@
1
+ # ostruct-cli
2
+
3
+ [![PyPI version](https://badge.fury.io/py/ostruct-cli.svg)](https://badge.fury.io/py/ostruct-cli)
4
+ [![Python Versions](https://img.shields.io/pypi/pyversions/ostruct-cli.svg)](https://pypi.org/project/ostruct-cli)
5
+ [![Documentation Status](https://readthedocs.org/projects/ostruct/badge/?version=latest)](https://ostruct.readthedocs.io/en/latest/?badge=latest)
6
+ [![CI](https://github.com/yaniv-golan/ostruct/actions/workflows/ci.yml/badge.svg)](https://github.com/yaniv-golan/ostruct/actions/workflows/ci.yml)
7
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
8
+
9
+ Command-line interface for working with OpenAI models and structured output, powered by the [openai-structured](https://github.com/yaniv-golan/openai-structured) library.
10
+
11
+ ## Features
12
+
13
+ - Generate structured JSON output from natural language using OpenAI models and a JSON schema
14
+ - Rich template system for defining prompts (Jinja2-based)
15
+ - Automatic token counting and context window management
16
+ - Streaming support for real-time output
17
+ - Secure handling of sensitive data
18
+
19
+ ## Installation
20
+
21
+ ### For Users
22
+
23
+ To install the latest stable version from PyPI:
24
+
25
+ ```bash
26
+ pip install ostruct-cli
27
+ ```
28
+
29
+ ### For Developers
30
+
31
+ If you plan to contribute to the project, see the [Development Setup](#development-setup) section below for instructions on setting up the development environment with Poetry.
32
+
33
+ ## Shell Completion
34
+
35
+ ostruct-cli supports shell completion for Bash, Zsh, and Fish shells. To enable it:
36
+
37
+ ### Bash
38
+
39
+ Add this to your `~/.bashrc`:
40
+
41
+ ```bash
42
+ eval "$(_OSTRUCT_COMPLETE=bash_source ostruct)"
43
+ ```
44
+
45
+ ### Zsh
46
+
47
+ Add this to your `~/.zshrc`:
48
+
49
+ ```bash
50
+ eval "$(_OSTRUCT_COMPLETE=zsh_source ostruct)"
51
+ ```
52
+
53
+ ### Fish
54
+
55
+ Add this to your `~/.config/fish/completions/ostruct.fish`:
56
+
57
+ ```fish
58
+ eval (env _OSTRUCT_COMPLETE=fish_source ostruct)
59
+ ```
60
+
61
+ After adding the appropriate line, restart your shell or source the configuration file.
62
+ Shell completion will help you with:
63
+
64
+ - Command options and their arguments
65
+ - File paths for template and schema files
66
+ - Directory paths for `-d` and `--base-dir` options
67
+ - And more!
68
+
69
+ ## Quick Start
70
+
71
+ 1. Set your OpenAI API key:
72
+
73
+ ```bash
74
+ export OPENAI_API_KEY=your-api-key
75
+ ```
76
+
77
+ ### Example 1: Using stdin (Simplest)
78
+
79
+ 1. Create a template file `extract_person.j2`:
80
+
81
+ ```jinja
82
+ Extract information about the person from this text: {{ stdin }}
83
+ ```
84
+
85
+ 2. Create a schema file `schema.json`:
86
+
87
+ ```json
88
+ {
89
+ "type": "object",
90
+ "properties": {
91
+ "person": {
92
+ "type": "object",
93
+ "properties": {
94
+ "name": {
95
+ "type": "string",
96
+ "description": "The person's full name"
97
+ },
98
+ "age": {
99
+ "type": "integer",
100
+ "description": "The person's age"
101
+ },
102
+ "occupation": {
103
+ "type": "string",
104
+ "description": "The person's job or profession"
105
+ }
106
+ },
107
+ "required": ["name", "age", "occupation"],
108
+ "additionalProperties": false
109
+ }
110
+ },
111
+ "required": ["person"],
112
+ "additionalProperties": false
113
+ }
114
+ ```
115
+
116
+ 3. Run the CLI:
117
+
118
+ ```bash
119
+ # Basic usage
120
+ echo "John Smith is a 35-year-old software engineer" | ostruct run extract_person.j2 schema.json
121
+
122
+ # For longer text using heredoc
123
+ cat << EOF | ostruct run extract_person.j2 schema.json
124
+ John Smith is a 35-year-old software engineer
125
+ working at Tech Corp. He has been programming
126
+ for over 10 years.
127
+ EOF
128
+
129
+ # With advanced options
130
+ echo "John Smith is a 35-year-old software engineer" | \
131
+ ostruct run extract_person.j2 schema.json \
132
+ --model gpt-4o \
133
+ --sys-prompt "Extract precise information about the person" \
134
+ --temperature 0.7
135
+ ```
136
+
137
+ The command will output:
138
+
139
+ ```json
140
+ {
141
+ "person": {
142
+ "name": "John Smith",
143
+ "age": 35,
144
+ "occupation": "software engineer"
145
+ }
146
+ }
147
+ ```
148
+
149
+ ### Example 2: Processing a Single File
150
+
151
+ 1. Create a template file `extract_from_file.j2`:
152
+
153
+ ```jinja
154
+ Extract information about the person from this text: {{ text.content }}
155
+ ```
156
+
157
+ 2. Use the same schema file `schema.json` as above.
158
+
159
+ 3. Run the CLI:
160
+
161
+ ```bash
162
+ # Basic usage
163
+ ostruct run extract_from_file.j2 schema.json -f text input.txt
164
+
165
+ # With advanced options
166
+ ostruct run extract_from_file.j2 schema.json \
167
+ -f text input.txt \
168
+ --model gpt-4o \
169
+ --max-output-tokens 1000 \
170
+ --temperature 0.7
171
+ ```
172
+
173
+ The command will output:
174
+
175
+ ```json
176
+ {
177
+ "person": {
178
+ "name": "John Smith",
179
+ "age": 35,
180
+ "occupation": "software engineer"
181
+ }
182
+ }
183
+ ```
184
+
185
+ ### Example 3: Processing Multiple Files
186
+
187
+ 1. Create a template file `extract_from_profiles.j2`:
188
+
189
+ ```jinja
190
+ Extract information about the people from this data:
191
+
192
+ {% for profile in profiles %}
193
+ == {{ profile.name }}
194
+
195
+ {{ profile.content }}
196
+
197
+ {% endfor %}
198
+ ```
199
+
200
+ 2. Use the same schema file `schema.json` as above, but updated for multiple people:
201
+
202
+ ```json
203
+ {
204
+ "type": "object",
205
+ "properties": {
206
+ "people": {
207
+ "type": "array",
208
+ "items": {
209
+ "type": "object",
210
+ "properties": {
211
+ "name": {
212
+ "type": "string",
213
+ "description": "The person's full name"
214
+ },
215
+ "age": {
216
+ "type": "integer",
217
+ "description": "The person's age"
218
+ },
219
+ "occupation": {
220
+ "type": "string",
221
+ "description": "The person's job or profession"
222
+ }
223
+ },
224
+ "required": ["name", "age", "occupation"],
225
+ "additionalProperties": false
226
+ }
227
+ }
228
+ },
229
+ "required": ["people"],
230
+ "additionalProperties": false
231
+ }
232
+ ```
233
+
234
+ 3. Run the CLI:
235
+
236
+ ```bash
237
+ # Basic usage
238
+ ostruct run extract_from_profiles.j2 schema.json -p profiles "profiles/*.txt"
239
+
240
+ # With advanced options
241
+ ostruct run extract_from_profiles.j2 schema.json \
242
+ -p profiles "profiles/*.txt" \
243
+ --model gpt-4o \
244
+ --sys-prompt "Extract precise information about the person" \
245
+ --temperature 0.5
246
+ ```
247
+
248
+ The command will output:
249
+
250
+ ```json
251
+ {
252
+ "people": [
253
+ {
254
+ "name": "John Smith",
255
+ "age": 35,
256
+ "occupation": "software engineer"
257
+ },
258
+ {
259
+ "name": "Jane Doe",
260
+ "age": 28,
261
+ "occupation": "data scientist"
262
+ }
263
+ ]
264
+ }
265
+ ```
266
+
267
+ ### About Template Files
268
+
269
+ Template files use the `.j2` extension to indicate they contain Jinja2 template syntax. This convention:
270
+
271
+ - Enables proper syntax highlighting in most editors
272
+ - Makes it clear the file contains template logic
273
+ - Follows industry standards for Jinja2 templates
274
+
275
+ ## CLI Options
276
+
277
+ The CLI revolves around a single subcommand called `run`. Basic usage:
278
+
279
+ ```bash
280
+ ostruct run <TASK_TEMPLATE> <SCHEMA_FILE> [OPTIONS]
281
+ ```
282
+
283
+ Common options include:
284
+
285
+ - File & Directory Inputs:
286
+ - `-f <NAME> <PATH>`: Map a single file to a variable name
287
+ - `-d <NAME> <DIR>`: Map a directory to a variable name
288
+ - `-p <NAME> <PATTERN>`: Map files matching a glob pattern to a variable name
289
+ - `-R, --recursive`: Enable recursive directory/pattern scanning
290
+
291
+ - Variables:
292
+ - `-V name=value`: Define a simple string variable
293
+ - `-J name='{"key":"value"}'`: Define a JSON variable
294
+
295
+ - Model Parameters:
296
+ - `-m, --model MODEL`: Select the OpenAI model (supported: gpt-4o, o1, o3-mini)
297
+ - `--temperature FLOAT`: Set sampling temperature (0.0-2.0)
298
+ - `--max-output-tokens INT`: Set maximum output tokens
299
+ - `--top-p FLOAT`: Set top-p sampling parameter (0.0-1.0)
300
+ - `--frequency-penalty FLOAT`: Adjust frequency penalty (-2.0-2.0)
301
+ - `--presence-penalty FLOAT`: Adjust presence penalty (-2.0-2.0)
302
+ - `--reasoning-effort [low|medium|high]`: Control model reasoning effort
303
+
304
+ - System Prompt:
305
+ - `--sys-prompt TEXT`: Provide system prompt directly
306
+ - `--sys-file FILE`: Load system prompt from file
307
+ - `--ignore-task-sysprompt`: Ignore system prompt in template frontmatter
308
+
309
+ - API Configuration:
310
+ - `--api-key KEY`: OpenAI API key (defaults to OPENAI_API_KEY env var)
311
+ - `--timeout FLOAT`: API timeout in seconds (default: 60.0)
312
+
313
+ ## Debug Options
314
+
315
+ - `--debug-validation`: Show detailed schema validation debugging
316
+ - `--debug-openai-stream`: Enable low-level debug output for OpenAI streaming
317
+ - `--progress-level {none,basic,detailed}`: Set progress reporting level
318
+ - `none`: No progress indicators
319
+ - `basic`: Show key operation steps (default)
320
+ - `detailed`: Show all steps with additional info
321
+ - `--show-model-schema`: Display the generated Pydantic model schema
322
+ - `--verbose`: Enable verbose logging
323
+ - `--dry-run`: Validate and render template without making API calls
324
+ - `--no-progress`: Disable all progress indicators
325
+
326
+ All debug and error logs are written to:
327
+
328
+ - `~/.ostruct/logs/ostruct.log`: General application logs
329
+ - `~/.ostruct/logs/openai_stream.log`: OpenAI streaming operations logs
330
+
331
+ For more detailed documentation and examples, visit our [documentation](https://ostruct.readthedocs.io/).
332
+
333
+ ## Development
334
+
335
+ To contribute or report issues, please visit our [GitHub repository](https://github.com/yaniv-golan/ostruct).
336
+
337
+ ## Development Setup
338
+
339
+ 1. Clone the repository:
340
+
341
+ ```bash
342
+ git clone https://github.com/yanivgolan/ostruct.git
343
+ cd ostruct
344
+ ```
345
+
346
+ 2. Install Poetry if you haven't already:
347
+
348
+ ```bash
349
+ curl -sSL https://install.python-poetry.org | python3 -
350
+ ```
351
+
352
+ 3. Install dependencies:
353
+
354
+ ```bash
355
+ poetry install
356
+ ```
357
+
358
+ 4. Install openai-structured in editable mode:
359
+
360
+ ```bash
361
+ poetry add --editable ../openai-structured # Adjust path as needed
362
+ ```
363
+
364
+ 5. Run tests:
365
+
366
+ ```bash
367
+ poetry run pytest
368
+ ```
369
+
370
+ ## Contributing
371
+
372
+ Contributions are welcome! Please feel free to submit a Pull Request.
373
+
374
+ ## License
375
+
376
+ This project is licensed under the MIT License - see the LICENSE file for details.
@@ -4,7 +4,7 @@
4
4
 
5
5
  [tool.poetry]
6
6
  name = "ostruct-cli"
7
- version = "0.5.0"
7
+ version = "0.6.1"
8
8
  description = "CLI for OpenAI Structured Output"
9
9
  authors = ["Yaniv Golan <yaniv@golan.name>"]
10
10
  readme = "README.md"
@@ -19,13 +19,13 @@
19
19
  cachetools = "^5.3.2"
20
20
  ijson = "^3.2.3"
21
21
  typing-extensions = "^4.9.0"
22
- tiktoken = "^0.9.0"
23
22
  pyyaml = "^6.0.2"
24
23
  tomli = {version = "^2.0.1", python = "<3.11"}
25
24
  click = "^8.1.7"
26
25
  werkzeug = "^3.1.3"
27
26
  openai = "^1.0.0"
28
27
  openai-structured = "^2.0.0"
28
+ tiktoken = "0.9.0"
29
29
 
30
30
  [tool.poetry.scripts]
31
31
  ostruct = "ostruct.cli.cli:main"