openai 0.29.0 → 0.31.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +22 -0
- data/README.md +1 -1
- data/lib/openai/client.rb +4 -0
- data/lib/openai/internal/conversation_cursor_page.rb +2 -2
- data/lib/openai/models/all_models.rb +2 -0
- data/lib/openai/models/beta/assistant_create_params.rb +3 -0
- data/lib/openai/models/beta/assistant_update_params.rb +3 -0
- data/lib/openai/models/beta/chatkit/chat_session.rb +94 -0
- data/lib/openai/models/beta/chatkit/chat_session_automatic_thread_titling.rb +22 -0
- data/lib/openai/models/beta/chatkit/chat_session_chatkit_configuration.rb +38 -0
- data/lib/openai/models/beta/chatkit/chat_session_chatkit_configuration_param.rb +125 -0
- data/lib/openai/models/beta/chatkit/chat_session_expires_after_param.rb +30 -0
- data/lib/openai/models/beta/chatkit/chat_session_file_upload.rb +38 -0
- data/lib/openai/models/beta/chatkit/chat_session_history.rb +34 -0
- data/lib/openai/models/beta/chatkit/chat_session_rate_limits.rb +22 -0
- data/lib/openai/models/beta/chatkit/chat_session_rate_limits_param.rb +22 -0
- data/lib/openai/models/beta/chatkit/chat_session_status.rb +20 -0
- data/lib/openai/models/beta/chatkit/chat_session_workflow_param.rb +80 -0
- data/lib/openai/models/beta/chatkit/chatkit_attachment.rb +69 -0
- data/lib/openai/models/beta/chatkit/chatkit_response_output_text.rb +143 -0
- data/lib/openai/models/beta/chatkit/chatkit_thread.rb +145 -0
- data/lib/openai/models/beta/chatkit/chatkit_thread_assistant_message_item.rb +65 -0
- data/lib/openai/models/beta/chatkit/chatkit_thread_item_list.rb +374 -0
- data/lib/openai/models/beta/chatkit/chatkit_thread_user_message_item.rb +183 -0
- data/lib/openai/models/beta/chatkit/chatkit_widget_item.rb +64 -0
- data/lib/openai/models/beta/chatkit/session_cancel_params.rb +18 -0
- data/lib/openai/models/beta/chatkit/session_create_params.rb +63 -0
- data/lib/openai/models/beta/chatkit/thread_delete_params.rb +18 -0
- data/lib/openai/models/beta/chatkit/thread_delete_response.rb +39 -0
- data/lib/openai/models/beta/chatkit/thread_list_items_params.rb +66 -0
- data/lib/openai/models/beta/chatkit/thread_list_params.rb +75 -0
- data/lib/openai/models/beta/chatkit/thread_retrieve_params.rb +18 -0
- data/lib/openai/models/beta/chatkit_upload_file_params.rb +28 -0
- data/lib/openai/models/beta/chatkit_upload_file_response.rb +25 -0
- data/lib/openai/models/beta/chatkit_workflow.rb +78 -0
- data/lib/openai/models/beta/file_part.rb +56 -0
- data/lib/openai/models/beta/image_part.rb +64 -0
- data/lib/openai/models/beta/threads/run_create_params.rb +3 -0
- data/lib/openai/models/chat/completion_create_params.rb +3 -0
- data/lib/openai/models/comparison_filter.rb +29 -6
- data/lib/openai/models/evals/create_eval_completions_run_data_source.rb +3 -0
- data/lib/openai/models/evals/run_cancel_response.rb +6 -0
- data/lib/openai/models/evals/run_create_params.rb +6 -0
- data/lib/openai/models/evals/run_create_response.rb +6 -0
- data/lib/openai/models/evals/run_list_response.rb +6 -0
- data/lib/openai/models/evals/run_retrieve_response.rb +6 -0
- data/lib/openai/models/graders/score_model_grader.rb +3 -0
- data/lib/openai/models/image_edit_params.rb +4 -2
- data/lib/openai/models/image_model.rb +1 -0
- data/lib/openai/models/realtime/realtime_session.rb +4 -0
- data/lib/openai/models/realtime/realtime_session_create_request.rb +12 -0
- data/lib/openai/models/realtime/realtime_session_create_response.rb +12 -0
- data/lib/openai/models/reasoning.rb +3 -0
- data/lib/openai/models/reasoning_effort.rb +3 -0
- data/lib/openai/models/responses/tool.rb +5 -2
- data/lib/openai/models/responses_model.rb +2 -0
- data/lib/openai/models/vector_stores/vector_store_file.rb +3 -3
- data/lib/openai/models/video.rb +122 -0
- data/lib/openai/models/video_create_error.rb +21 -0
- data/lib/openai/models/video_create_params.rb +54 -0
- data/lib/openai/models/video_delete_params.rb +14 -0
- data/lib/openai/models/video_delete_response.rb +35 -0
- data/lib/openai/models/video_download_content_params.rb +34 -0
- data/lib/openai/models/video_list_params.rb +54 -0
- data/lib/openai/models/video_model.rb +15 -0
- data/lib/openai/models/video_remix_params.rb +22 -0
- data/lib/openai/models/video_retrieve_params.rb +14 -0
- data/lib/openai/models/video_seconds.rb +16 -0
- data/lib/openai/models/video_size.rb +17 -0
- data/lib/openai/models.rb +22 -0
- data/lib/openai/resources/beta/chatkit/sessions.rb +71 -0
- data/lib/openai/resources/beta/chatkit/threads.rb +126 -0
- data/lib/openai/resources/beta/chatkit.rb +50 -0
- data/lib/openai/resources/beta.rb +4 -0
- data/lib/openai/resources/files.rb +1 -1
- data/lib/openai/resources/videos.rb +165 -0
- data/lib/openai/version.rb +1 -1
- data/lib/openai.rb +46 -0
- data/rbi/openai/client.rbi +3 -0
- data/rbi/openai/models/all_models.rbi +10 -0
- data/rbi/openai/models/beta/assistant_create_params.rbi +6 -0
- data/rbi/openai/models/beta/assistant_update_params.rbi +6 -0
- data/rbi/openai/models/beta/chatkit/chat_session.rbi +141 -0
- data/rbi/openai/models/beta/chatkit/chat_session_automatic_thread_titling.rbi +35 -0
- data/rbi/openai/models/beta/chatkit/chat_session_chatkit_configuration.rbi +87 -0
- data/rbi/openai/models/beta/chatkit/chat_session_chatkit_configuration_param.rbi +256 -0
- data/rbi/openai/models/beta/chatkit/chat_session_expires_after_param.rbi +43 -0
- data/rbi/openai/models/beta/chatkit/chat_session_file_upload.rbi +61 -0
- data/rbi/openai/models/beta/chatkit/chat_session_history.rbi +52 -0
- data/rbi/openai/models/beta/chatkit/chat_session_rate_limits.rbi +37 -0
- data/rbi/openai/models/beta/chatkit/chat_session_rate_limits_param.rbi +40 -0
- data/rbi/openai/models/beta/chatkit/chat_session_status.rbi +43 -0
- data/rbi/openai/models/beta/chatkit/chat_session_workflow_param.rbi +166 -0
- data/rbi/openai/models/beta/chatkit/chatkit_attachment.rbi +116 -0
- data/rbi/openai/models/beta/chatkit/chatkit_response_output_text.rbi +287 -0
- data/rbi/openai/models/beta/chatkit/chatkit_thread.rbi +220 -0
- data/rbi/openai/models/beta/chatkit/chatkit_thread_assistant_message_item.rbi +94 -0
- data/rbi/openai/models/beta/chatkit/chatkit_thread_item_list.rbi +590 -0
- data/rbi/openai/models/beta/chatkit/chatkit_thread_user_message_item.rbi +324 -0
- data/rbi/openai/models/beta/chatkit/chatkit_widget_item.rbi +87 -0
- data/rbi/openai/models/beta/chatkit/session_cancel_params.rbi +34 -0
- data/rbi/openai/models/beta/chatkit/session_create_params.rbi +136 -0
- data/rbi/openai/models/beta/chatkit/thread_delete_params.rbi +34 -0
- data/rbi/openai/models/beta/chatkit/thread_delete_response.rbi +55 -0
- data/rbi/openai/models/beta/chatkit/thread_list_items_params.rbi +138 -0
- data/rbi/openai/models/beta/chatkit/thread_list_params.rbi +145 -0
- data/rbi/openai/models/beta/chatkit/thread_retrieve_params.rbi +34 -0
- data/rbi/openai/models/beta/chatkit_upload_file_params.rbi +50 -0
- data/rbi/openai/models/beta/chatkit_upload_file_response.rbi +25 -0
- data/rbi/openai/models/beta/chatkit_workflow.rbi +134 -0
- data/rbi/openai/models/beta/file_part.rbi +74 -0
- data/rbi/openai/models/beta/image_part.rbi +82 -0
- data/rbi/openai/models/beta/threads/run_create_params.rbi +6 -0
- data/rbi/openai/models/chat/completion_create_params.rbi +6 -0
- data/rbi/openai/models/comparison_filter.rbi +43 -4
- data/rbi/openai/models/eval_create_response.rbi +4 -4
- data/rbi/openai/models/eval_list_response.rbi +4 -4
- data/rbi/openai/models/eval_retrieve_response.rbi +4 -4
- data/rbi/openai/models/eval_update_response.rbi +4 -4
- data/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +6 -0
- data/rbi/openai/models/evals/run_cancel_response.rbi +12 -0
- data/rbi/openai/models/evals/run_create_params.rbi +12 -0
- data/rbi/openai/models/evals/run_create_response.rbi +12 -0
- data/rbi/openai/models/evals/run_list_response.rbi +12 -0
- data/rbi/openai/models/evals/run_retrieve_response.rbi +12 -0
- data/rbi/openai/models/graders/score_model_grader.rbi +6 -0
- data/rbi/openai/models/image_edit_params.rbi +6 -3
- data/rbi/openai/models/image_model.rbi +2 -0
- data/rbi/openai/models/realtime/realtime_session.rbi +20 -0
- data/rbi/openai/models/realtime/realtime_session_create_request.rbi +20 -0
- data/rbi/openai/models/realtime/realtime_session_create_response.rbi +20 -0
- data/rbi/openai/models/reasoning.rbi +6 -0
- data/rbi/openai/models/reasoning_effort.rbi +3 -0
- data/rbi/openai/models/responses/tool.rbi +11 -3
- data/rbi/openai/models/responses_model.rbi +10 -0
- data/rbi/openai/models/vector_stores/vector_store_file.rbi +3 -3
- data/rbi/openai/models/video.rbi +143 -0
- data/rbi/openai/models/video_create_error.rbi +26 -0
- data/rbi/openai/models/video_create_params.rbi +87 -0
- data/rbi/openai/models/video_delete_params.rbi +27 -0
- data/rbi/openai/models/video_delete_response.rbi +46 -0
- data/rbi/openai/models/video_download_content_params.rbi +89 -0
- data/rbi/openai/models/video_list_params.rbi +91 -0
- data/rbi/openai/models/video_model.rbi +19 -0
- data/rbi/openai/models/video_remix_params.rbi +40 -0
- data/rbi/openai/models/video_retrieve_params.rbi +27 -0
- data/rbi/openai/models/video_seconds.rbi +20 -0
- data/rbi/openai/models/video_size.rbi +23 -0
- data/rbi/openai/models.rbi +22 -0
- data/rbi/openai/resources/beta/assistants.rbi +6 -0
- data/rbi/openai/resources/beta/chatkit/sessions.rbi +61 -0
- data/rbi/openai/resources/beta/chatkit/threads.rbi +110 -0
- data/rbi/openai/resources/beta/chatkit.rbi +35 -0
- data/rbi/openai/resources/beta/threads/runs.rbi +6 -0
- data/rbi/openai/resources/beta.rbi +3 -0
- data/rbi/openai/resources/chat/completions.rbi +6 -0
- data/rbi/openai/resources/files.rbi +1 -1
- data/rbi/openai/resources/images.rbi +4 -2
- data/rbi/openai/resources/videos.rbi +121 -0
- data/sig/openai/client.rbs +2 -0
- data/sig/openai/models/all_models.rbs +4 -0
- data/sig/openai/models/beta/chatkit/chat_session.rbs +69 -0
- data/sig/openai/models/beta/chatkit/chat_session_automatic_thread_titling.rbs +17 -0
- data/sig/openai/models/beta/chatkit/chat_session_chatkit_configuration.rbs +34 -0
- data/sig/openai/models/beta/chatkit/chat_session_chatkit_configuration_param.rbs +103 -0
- data/sig/openai/models/beta/chatkit/chat_session_expires_after_param.rbs +20 -0
- data/sig/openai/models/beta/chatkit/chat_session_file_upload.rbs +30 -0
- data/sig/openai/models/beta/chatkit/chat_session_history.rbs +19 -0
- data/sig/openai/models/beta/chatkit/chat_session_rate_limits.rbs +18 -0
- data/sig/openai/models/beta/chatkit/chat_session_rate_limits_param.rbs +20 -0
- data/sig/openai/models/beta/chatkit/chat_session_status.rbs +19 -0
- data/sig/openai/models/beta/chatkit/chat_session_workflow_param.rbs +69 -0
- data/sig/openai/models/beta/chatkit/chatkit_attachment.rbs +57 -0
- data/sig/openai/models/beta/chatkit/chatkit_response_output_text.rbs +114 -0
- data/sig/openai/models/beta/chatkit/chatkit_thread.rbs +96 -0
- data/sig/openai/models/beta/chatkit/chatkit_thread_assistant_message_item.rbs +51 -0
- data/sig/openai/models/beta/chatkit/chatkit_thread_item_list.rbs +276 -0
- data/sig/openai/models/beta/chatkit/chatkit_thread_user_message_item.rbs +127 -0
- data/sig/openai/models/beta/chatkit/chatkit_widget_item.rbs +51 -0
- data/sig/openai/models/beta/chatkit/session_cancel_params.rbs +19 -0
- data/sig/openai/models/beta/chatkit/session_create_params.rbs +62 -0
- data/sig/openai/models/beta/chatkit/thread_delete_params.rbs +19 -0
- data/sig/openai/models/beta/chatkit/thread_delete_response.rbs +30 -0
- data/sig/openai/models/beta/chatkit/thread_list_items_params.rbs +66 -0
- data/sig/openai/models/beta/chatkit/thread_list_params.rbs +73 -0
- data/sig/openai/models/beta/chatkit/thread_retrieve_params.rbs +19 -0
- data/sig/openai/models/beta/chatkit_upload_file_params.rbs +26 -0
- data/sig/openai/models/beta/chatkit_upload_file_response.rbs +14 -0
- data/sig/openai/models/beta/chatkit_workflow.rbs +55 -0
- data/sig/openai/models/beta/file_part.rbs +42 -0
- data/sig/openai/models/beta/image_part.rbs +47 -0
- data/sig/openai/models/comparison_filter.rbs +15 -1
- data/sig/openai/models/eval_create_response.rbs +2 -2
- data/sig/openai/models/eval_list_response.rbs +2 -2
- data/sig/openai/models/eval_retrieve_response.rbs +2 -2
- data/sig/openai/models/eval_update_response.rbs +2 -2
- data/sig/openai/models/image_model.rbs +3 -1
- data/sig/openai/models/realtime/realtime_session.rbs +8 -0
- data/sig/openai/models/realtime/realtime_session_create_request.rbs +8 -0
- data/sig/openai/models/realtime/realtime_session_create_response.rbs +8 -0
- data/sig/openai/models/responses/tool.rbs +2 -1
- data/sig/openai/models/responses_model.rbs +4 -0
- data/sig/openai/models/video.rbs +88 -0
- data/sig/openai/models/video_create_error.rbs +15 -0
- data/sig/openai/models/video_create_params.rbs +58 -0
- data/sig/openai/models/video_delete_params.rbs +14 -0
- data/sig/openai/models/video_delete_response.rbs +22 -0
- data/sig/openai/models/video_download_content_params.rbs +40 -0
- data/sig/openai/models/video_list_params.rbs +55 -0
- data/sig/openai/models/video_model.rbs +14 -0
- data/sig/openai/models/video_remix_params.rbs +23 -0
- data/sig/openai/models/video_retrieve_params.rbs +15 -0
- data/sig/openai/models/video_seconds.rbs +15 -0
- data/sig/openai/models/video_size.rbs +16 -0
- data/sig/openai/models.rbs +22 -0
- data/sig/openai/resources/beta/chatkit/sessions.rbs +25 -0
- data/sig/openai/resources/beta/chatkit/threads.rbs +39 -0
- data/sig/openai/resources/beta/chatkit.rbs +18 -0
- data/sig/openai/resources/beta.rbs +2 -0
- data/sig/openai/resources/videos.rbs +45 -0
- metadata +140 -2
@@ -78,6 +78,16 @@ module OpenAI
|
|
78
78
|
:"gpt-5-codex",
|
79
79
|
OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol
|
80
80
|
)
|
81
|
+
GPT_5_PRO =
|
82
|
+
T.let(
|
83
|
+
:"gpt-5-pro",
|
84
|
+
OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol
|
85
|
+
)
|
86
|
+
GPT_5_PRO_2025_10_06 =
|
87
|
+
T.let(
|
88
|
+
:"gpt-5-pro-2025-10-06",
|
89
|
+
OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol
|
90
|
+
)
|
81
91
|
|
82
92
|
sig do
|
83
93
|
override.returns(
|
@@ -190,7 +190,7 @@ module OpenAI
|
|
190
190
|
)
|
191
191
|
end
|
192
192
|
|
193
|
-
# One of `server_error` or `
|
193
|
+
# One of `server_error`, `unsupported_file`, or `invalid_file`.
|
194
194
|
sig do
|
195
195
|
returns(
|
196
196
|
OpenAI::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol
|
@@ -212,7 +212,7 @@ module OpenAI
|
|
212
212
|
).returns(T.attached_class)
|
213
213
|
end
|
214
214
|
def self.new(
|
215
|
-
# One of `server_error` or `
|
215
|
+
# One of `server_error`, `unsupported_file`, or `invalid_file`.
|
216
216
|
code:,
|
217
217
|
# A human-readable description of the error.
|
218
218
|
message:
|
@@ -231,7 +231,7 @@ module OpenAI
|
|
231
231
|
def to_hash
|
232
232
|
end
|
233
233
|
|
234
|
-
# One of `server_error` or `
|
234
|
+
# One of `server_error`, `unsupported_file`, or `invalid_file`.
|
235
235
|
module Code
|
236
236
|
extend OpenAI::Internal::Type::Enum
|
237
237
|
|
@@ -0,0 +1,143 @@
|
|
1
|
+
# typed: strong
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
class Video < OpenAI::Internal::Type::BaseModel
|
6
|
+
OrHash = T.type_alias { T.any(OpenAI::Video, OpenAI::Internal::AnyHash) }
|
7
|
+
|
8
|
+
# Unique identifier for the video job.
|
9
|
+
sig { returns(String) }
|
10
|
+
attr_accessor :id
|
11
|
+
|
12
|
+
# Unix timestamp (seconds) for when the job completed, if finished.
|
13
|
+
sig { returns(T.nilable(Integer)) }
|
14
|
+
attr_accessor :completed_at
|
15
|
+
|
16
|
+
# Unix timestamp (seconds) for when the job was created.
|
17
|
+
sig { returns(Integer) }
|
18
|
+
attr_accessor :created_at
|
19
|
+
|
20
|
+
# Error payload that explains why generation failed, if applicable.
|
21
|
+
sig { returns(T.nilable(OpenAI::VideoCreateError)) }
|
22
|
+
attr_reader :error
|
23
|
+
|
24
|
+
sig { params(error: T.nilable(OpenAI::VideoCreateError::OrHash)).void }
|
25
|
+
attr_writer :error
|
26
|
+
|
27
|
+
# Unix timestamp (seconds) for when the downloadable assets expire, if set.
|
28
|
+
sig { returns(T.nilable(Integer)) }
|
29
|
+
attr_accessor :expires_at
|
30
|
+
|
31
|
+
# The video generation model that produced the job.
|
32
|
+
sig { returns(OpenAI::VideoModel::TaggedSymbol) }
|
33
|
+
attr_accessor :model
|
34
|
+
|
35
|
+
# The object type, which is always `video`.
|
36
|
+
sig { returns(Symbol) }
|
37
|
+
attr_accessor :object
|
38
|
+
|
39
|
+
# Approximate completion percentage for the generation task.
|
40
|
+
sig { returns(Integer) }
|
41
|
+
attr_accessor :progress
|
42
|
+
|
43
|
+
# Identifier of the source video if this video is a remix.
|
44
|
+
sig { returns(T.nilable(String)) }
|
45
|
+
attr_accessor :remixed_from_video_id
|
46
|
+
|
47
|
+
# Duration of the generated clip in seconds.
|
48
|
+
sig { returns(OpenAI::VideoSeconds::TaggedSymbol) }
|
49
|
+
attr_accessor :seconds
|
50
|
+
|
51
|
+
# The resolution of the generated video.
|
52
|
+
sig { returns(OpenAI::VideoSize::TaggedSymbol) }
|
53
|
+
attr_accessor :size
|
54
|
+
|
55
|
+
# Current lifecycle status of the video job.
|
56
|
+
sig { returns(OpenAI::Video::Status::TaggedSymbol) }
|
57
|
+
attr_accessor :status
|
58
|
+
|
59
|
+
# Structured information describing a generated video job.
|
60
|
+
sig do
|
61
|
+
params(
|
62
|
+
id: String,
|
63
|
+
completed_at: T.nilable(Integer),
|
64
|
+
created_at: Integer,
|
65
|
+
error: T.nilable(OpenAI::VideoCreateError::OrHash),
|
66
|
+
expires_at: T.nilable(Integer),
|
67
|
+
model: OpenAI::VideoModel::OrSymbol,
|
68
|
+
progress: Integer,
|
69
|
+
remixed_from_video_id: T.nilable(String),
|
70
|
+
seconds: OpenAI::VideoSeconds::OrSymbol,
|
71
|
+
size: OpenAI::VideoSize::OrSymbol,
|
72
|
+
status: OpenAI::Video::Status::OrSymbol,
|
73
|
+
object: Symbol
|
74
|
+
).returns(T.attached_class)
|
75
|
+
end
|
76
|
+
def self.new(
|
77
|
+
# Unique identifier for the video job.
|
78
|
+
id:,
|
79
|
+
# Unix timestamp (seconds) for when the job completed, if finished.
|
80
|
+
completed_at:,
|
81
|
+
# Unix timestamp (seconds) for when the job was created.
|
82
|
+
created_at:,
|
83
|
+
# Error payload that explains why generation failed, if applicable.
|
84
|
+
error:,
|
85
|
+
# Unix timestamp (seconds) for when the downloadable assets expire, if set.
|
86
|
+
expires_at:,
|
87
|
+
# The video generation model that produced the job.
|
88
|
+
model:,
|
89
|
+
# Approximate completion percentage for the generation task.
|
90
|
+
progress:,
|
91
|
+
# Identifier of the source video if this video is a remix.
|
92
|
+
remixed_from_video_id:,
|
93
|
+
# Duration of the generated clip in seconds.
|
94
|
+
seconds:,
|
95
|
+
# The resolution of the generated video.
|
96
|
+
size:,
|
97
|
+
# Current lifecycle status of the video job.
|
98
|
+
status:,
|
99
|
+
# The object type, which is always `video`.
|
100
|
+
object: :video
|
101
|
+
)
|
102
|
+
end
|
103
|
+
|
104
|
+
sig do
|
105
|
+
override.returns(
|
106
|
+
{
|
107
|
+
id: String,
|
108
|
+
completed_at: T.nilable(Integer),
|
109
|
+
created_at: Integer,
|
110
|
+
error: T.nilable(OpenAI::VideoCreateError),
|
111
|
+
expires_at: T.nilable(Integer),
|
112
|
+
model: OpenAI::VideoModel::TaggedSymbol,
|
113
|
+
object: Symbol,
|
114
|
+
progress: Integer,
|
115
|
+
remixed_from_video_id: T.nilable(String),
|
116
|
+
seconds: OpenAI::VideoSeconds::TaggedSymbol,
|
117
|
+
size: OpenAI::VideoSize::TaggedSymbol,
|
118
|
+
status: OpenAI::Video::Status::TaggedSymbol
|
119
|
+
}
|
120
|
+
)
|
121
|
+
end
|
122
|
+
def to_hash
|
123
|
+
end
|
124
|
+
|
125
|
+
# Current lifecycle status of the video job.
|
126
|
+
module Status
|
127
|
+
extend OpenAI::Internal::Type::Enum
|
128
|
+
|
129
|
+
TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Video::Status) }
|
130
|
+
OrSymbol = T.type_alias { T.any(Symbol, String) }
|
131
|
+
|
132
|
+
QUEUED = T.let(:queued, OpenAI::Video::Status::TaggedSymbol)
|
133
|
+
IN_PROGRESS = T.let(:in_progress, OpenAI::Video::Status::TaggedSymbol)
|
134
|
+
COMPLETED = T.let(:completed, OpenAI::Video::Status::TaggedSymbol)
|
135
|
+
FAILED = T.let(:failed, OpenAI::Video::Status::TaggedSymbol)
|
136
|
+
|
137
|
+
sig { override.returns(T::Array[OpenAI::Video::Status::TaggedSymbol]) }
|
138
|
+
def self.values
|
139
|
+
end
|
140
|
+
end
|
141
|
+
end
|
142
|
+
end
|
143
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
# typed: strong
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
class VideoCreateError < OpenAI::Internal::Type::BaseModel
|
6
|
+
OrHash =
|
7
|
+
T.type_alias do
|
8
|
+
T.any(OpenAI::VideoCreateError, OpenAI::Internal::AnyHash)
|
9
|
+
end
|
10
|
+
|
11
|
+
sig { returns(String) }
|
12
|
+
attr_accessor :code
|
13
|
+
|
14
|
+
sig { returns(String) }
|
15
|
+
attr_accessor :message
|
16
|
+
|
17
|
+
sig { params(code: String, message: String).returns(T.attached_class) }
|
18
|
+
def self.new(code:, message:)
|
19
|
+
end
|
20
|
+
|
21
|
+
sig { override.returns({ code: String, message: String }) }
|
22
|
+
def to_hash
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,87 @@
|
|
1
|
+
# typed: strong
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
class VideoCreateParams < OpenAI::Internal::Type::BaseModel
|
6
|
+
extend OpenAI::Internal::Type::RequestParameters::Converter
|
7
|
+
include OpenAI::Internal::Type::RequestParameters
|
8
|
+
|
9
|
+
OrHash =
|
10
|
+
T.type_alias do
|
11
|
+
T.any(OpenAI::VideoCreateParams, OpenAI::Internal::AnyHash)
|
12
|
+
end
|
13
|
+
|
14
|
+
# Text prompt that describes the video to generate.
|
15
|
+
sig { returns(String) }
|
16
|
+
attr_accessor :prompt
|
17
|
+
|
18
|
+
# Optional image reference that guides generation.
|
19
|
+
sig { returns(T.nilable(OpenAI::Internal::FileInput)) }
|
20
|
+
attr_reader :input_reference
|
21
|
+
|
22
|
+
sig { params(input_reference: OpenAI::Internal::FileInput).void }
|
23
|
+
attr_writer :input_reference
|
24
|
+
|
25
|
+
# The video generation model to use. Defaults to `sora-2`.
|
26
|
+
sig { returns(T.nilable(OpenAI::VideoModel::OrSymbol)) }
|
27
|
+
attr_reader :model
|
28
|
+
|
29
|
+
sig { params(model: OpenAI::VideoModel::OrSymbol).void }
|
30
|
+
attr_writer :model
|
31
|
+
|
32
|
+
# Clip duration in seconds. Defaults to 4 seconds.
|
33
|
+
sig { returns(T.nilable(OpenAI::VideoSeconds::OrSymbol)) }
|
34
|
+
attr_reader :seconds
|
35
|
+
|
36
|
+
sig { params(seconds: OpenAI::VideoSeconds::OrSymbol).void }
|
37
|
+
attr_writer :seconds
|
38
|
+
|
39
|
+
# Output resolution formatted as width x height. Defaults to 720x1280.
|
40
|
+
sig { returns(T.nilable(OpenAI::VideoSize::OrSymbol)) }
|
41
|
+
attr_reader :size
|
42
|
+
|
43
|
+
sig { params(size: OpenAI::VideoSize::OrSymbol).void }
|
44
|
+
attr_writer :size
|
45
|
+
|
46
|
+
sig do
|
47
|
+
params(
|
48
|
+
prompt: String,
|
49
|
+
input_reference: OpenAI::Internal::FileInput,
|
50
|
+
model: OpenAI::VideoModel::OrSymbol,
|
51
|
+
seconds: OpenAI::VideoSeconds::OrSymbol,
|
52
|
+
size: OpenAI::VideoSize::OrSymbol,
|
53
|
+
request_options: OpenAI::RequestOptions::OrHash
|
54
|
+
).returns(T.attached_class)
|
55
|
+
end
|
56
|
+
def self.new(
|
57
|
+
# Text prompt that describes the video to generate.
|
58
|
+
prompt:,
|
59
|
+
# Optional image reference that guides generation.
|
60
|
+
input_reference: nil,
|
61
|
+
# The video generation model to use. Defaults to `sora-2`.
|
62
|
+
model: nil,
|
63
|
+
# Clip duration in seconds. Defaults to 4 seconds.
|
64
|
+
seconds: nil,
|
65
|
+
# Output resolution formatted as width x height. Defaults to 720x1280.
|
66
|
+
size: nil,
|
67
|
+
request_options: {}
|
68
|
+
)
|
69
|
+
end
|
70
|
+
|
71
|
+
sig do
|
72
|
+
override.returns(
|
73
|
+
{
|
74
|
+
prompt: String,
|
75
|
+
input_reference: OpenAI::Internal::FileInput,
|
76
|
+
model: OpenAI::VideoModel::OrSymbol,
|
77
|
+
seconds: OpenAI::VideoSeconds::OrSymbol,
|
78
|
+
size: OpenAI::VideoSize::OrSymbol,
|
79
|
+
request_options: OpenAI::RequestOptions
|
80
|
+
}
|
81
|
+
)
|
82
|
+
end
|
83
|
+
def to_hash
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# typed: strong
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
class VideoDeleteParams < OpenAI::Internal::Type::BaseModel
|
6
|
+
extend OpenAI::Internal::Type::RequestParameters::Converter
|
7
|
+
include OpenAI::Internal::Type::RequestParameters
|
8
|
+
|
9
|
+
OrHash =
|
10
|
+
T.type_alias do
|
11
|
+
T.any(OpenAI::VideoDeleteParams, OpenAI::Internal::AnyHash)
|
12
|
+
end
|
13
|
+
|
14
|
+
sig do
|
15
|
+
params(request_options: OpenAI::RequestOptions::OrHash).returns(
|
16
|
+
T.attached_class
|
17
|
+
)
|
18
|
+
end
|
19
|
+
def self.new(request_options: {})
|
20
|
+
end
|
21
|
+
|
22
|
+
sig { override.returns({ request_options: OpenAI::RequestOptions }) }
|
23
|
+
def to_hash
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,46 @@
|
|
1
|
+
# typed: strong
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
class VideoDeleteResponse < OpenAI::Internal::Type::BaseModel
|
6
|
+
OrHash =
|
7
|
+
T.type_alias do
|
8
|
+
T.any(OpenAI::Models::VideoDeleteResponse, OpenAI::Internal::AnyHash)
|
9
|
+
end
|
10
|
+
|
11
|
+
# Identifier of the deleted video.
|
12
|
+
sig { returns(String) }
|
13
|
+
attr_accessor :id
|
14
|
+
|
15
|
+
# Indicates that the video resource was deleted.
|
16
|
+
sig { returns(T::Boolean) }
|
17
|
+
attr_accessor :deleted
|
18
|
+
|
19
|
+
# The object type that signals the deletion response.
|
20
|
+
sig { returns(Symbol) }
|
21
|
+
attr_accessor :object
|
22
|
+
|
23
|
+
# Confirmation payload returned after deleting a video.
|
24
|
+
sig do
|
25
|
+
params(id: String, deleted: T::Boolean, object: Symbol).returns(
|
26
|
+
T.attached_class
|
27
|
+
)
|
28
|
+
end
|
29
|
+
def self.new(
|
30
|
+
# Identifier of the deleted video.
|
31
|
+
id:,
|
32
|
+
# Indicates that the video resource was deleted.
|
33
|
+
deleted:,
|
34
|
+
# The object type that signals the deletion response.
|
35
|
+
object: :"video.deleted"
|
36
|
+
)
|
37
|
+
end
|
38
|
+
|
39
|
+
sig do
|
40
|
+
override.returns({ id: String, deleted: T::Boolean, object: Symbol })
|
41
|
+
end
|
42
|
+
def to_hash
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
@@ -0,0 +1,89 @@
|
|
1
|
+
# typed: strong
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
class VideoDownloadContentParams < OpenAI::Internal::Type::BaseModel
|
6
|
+
extend OpenAI::Internal::Type::RequestParameters::Converter
|
7
|
+
include OpenAI::Internal::Type::RequestParameters
|
8
|
+
|
9
|
+
OrHash =
|
10
|
+
T.type_alias do
|
11
|
+
T.any(OpenAI::VideoDownloadContentParams, OpenAI::Internal::AnyHash)
|
12
|
+
end
|
13
|
+
|
14
|
+
# Which downloadable asset to return. Defaults to the MP4 video.
|
15
|
+
sig do
|
16
|
+
returns(
|
17
|
+
T.nilable(OpenAI::VideoDownloadContentParams::Variant::OrSymbol)
|
18
|
+
)
|
19
|
+
end
|
20
|
+
attr_reader :variant
|
21
|
+
|
22
|
+
sig do
|
23
|
+
params(
|
24
|
+
variant: OpenAI::VideoDownloadContentParams::Variant::OrSymbol
|
25
|
+
).void
|
26
|
+
end
|
27
|
+
attr_writer :variant
|
28
|
+
|
29
|
+
sig do
|
30
|
+
params(
|
31
|
+
variant: OpenAI::VideoDownloadContentParams::Variant::OrSymbol,
|
32
|
+
request_options: OpenAI::RequestOptions::OrHash
|
33
|
+
).returns(T.attached_class)
|
34
|
+
end
|
35
|
+
def self.new(
|
36
|
+
# Which downloadable asset to return. Defaults to the MP4 video.
|
37
|
+
variant: nil,
|
38
|
+
request_options: {}
|
39
|
+
)
|
40
|
+
end
|
41
|
+
|
42
|
+
sig do
|
43
|
+
override.returns(
|
44
|
+
{
|
45
|
+
variant: OpenAI::VideoDownloadContentParams::Variant::OrSymbol,
|
46
|
+
request_options: OpenAI::RequestOptions
|
47
|
+
}
|
48
|
+
)
|
49
|
+
end
|
50
|
+
def to_hash
|
51
|
+
end
|
52
|
+
|
53
|
+
# Which downloadable asset to return. Defaults to the MP4 video.
|
54
|
+
module Variant
|
55
|
+
extend OpenAI::Internal::Type::Enum
|
56
|
+
|
57
|
+
TaggedSymbol =
|
58
|
+
T.type_alias do
|
59
|
+
T.all(Symbol, OpenAI::VideoDownloadContentParams::Variant)
|
60
|
+
end
|
61
|
+
OrSymbol = T.type_alias { T.any(Symbol, String) }
|
62
|
+
|
63
|
+
VIDEO =
|
64
|
+
T.let(
|
65
|
+
:video,
|
66
|
+
OpenAI::VideoDownloadContentParams::Variant::TaggedSymbol
|
67
|
+
)
|
68
|
+
THUMBNAIL =
|
69
|
+
T.let(
|
70
|
+
:thumbnail,
|
71
|
+
OpenAI::VideoDownloadContentParams::Variant::TaggedSymbol
|
72
|
+
)
|
73
|
+
SPRITESHEET =
|
74
|
+
T.let(
|
75
|
+
:spritesheet,
|
76
|
+
OpenAI::VideoDownloadContentParams::Variant::TaggedSymbol
|
77
|
+
)
|
78
|
+
|
79
|
+
sig do
|
80
|
+
override.returns(
|
81
|
+
T::Array[OpenAI::VideoDownloadContentParams::Variant::TaggedSymbol]
|
82
|
+
)
|
83
|
+
end
|
84
|
+
def self.values
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
@@ -0,0 +1,91 @@
|
|
1
|
+
# typed: strong
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
class VideoListParams < OpenAI::Internal::Type::BaseModel
|
6
|
+
extend OpenAI::Internal::Type::RequestParameters::Converter
|
7
|
+
include OpenAI::Internal::Type::RequestParameters
|
8
|
+
|
9
|
+
OrHash =
|
10
|
+
T.type_alias do
|
11
|
+
T.any(OpenAI::VideoListParams, OpenAI::Internal::AnyHash)
|
12
|
+
end
|
13
|
+
|
14
|
+
# Identifier for the last item from the previous pagination request
|
15
|
+
sig { returns(T.nilable(String)) }
|
16
|
+
attr_reader :after
|
17
|
+
|
18
|
+
sig { params(after: String).void }
|
19
|
+
attr_writer :after
|
20
|
+
|
21
|
+
# Number of items to retrieve
|
22
|
+
sig { returns(T.nilable(Integer)) }
|
23
|
+
attr_reader :limit
|
24
|
+
|
25
|
+
sig { params(limit: Integer).void }
|
26
|
+
attr_writer :limit
|
27
|
+
|
28
|
+
# Sort order of results by timestamp. Use `asc` for ascending order or `desc` for
|
29
|
+
# descending order.
|
30
|
+
sig { returns(T.nilable(OpenAI::VideoListParams::Order::OrSymbol)) }
|
31
|
+
attr_reader :order
|
32
|
+
|
33
|
+
sig { params(order: OpenAI::VideoListParams::Order::OrSymbol).void }
|
34
|
+
attr_writer :order
|
35
|
+
|
36
|
+
sig do
|
37
|
+
params(
|
38
|
+
after: String,
|
39
|
+
limit: Integer,
|
40
|
+
order: OpenAI::VideoListParams::Order::OrSymbol,
|
41
|
+
request_options: OpenAI::RequestOptions::OrHash
|
42
|
+
).returns(T.attached_class)
|
43
|
+
end
|
44
|
+
def self.new(
|
45
|
+
# Identifier for the last item from the previous pagination request
|
46
|
+
after: nil,
|
47
|
+
# Number of items to retrieve
|
48
|
+
limit: nil,
|
49
|
+
# Sort order of results by timestamp. Use `asc` for ascending order or `desc` for
|
50
|
+
# descending order.
|
51
|
+
order: nil,
|
52
|
+
request_options: {}
|
53
|
+
)
|
54
|
+
end
|
55
|
+
|
56
|
+
sig do
|
57
|
+
override.returns(
|
58
|
+
{
|
59
|
+
after: String,
|
60
|
+
limit: Integer,
|
61
|
+
order: OpenAI::VideoListParams::Order::OrSymbol,
|
62
|
+
request_options: OpenAI::RequestOptions
|
63
|
+
}
|
64
|
+
)
|
65
|
+
end
|
66
|
+
def to_hash
|
67
|
+
end
|
68
|
+
|
69
|
+
# Sort order of results by timestamp. Use `asc` for ascending order or `desc` for
|
70
|
+
# descending order.
|
71
|
+
module Order
|
72
|
+
extend OpenAI::Internal::Type::Enum
|
73
|
+
|
74
|
+
TaggedSymbol =
|
75
|
+
T.type_alias { T.all(Symbol, OpenAI::VideoListParams::Order) }
|
76
|
+
OrSymbol = T.type_alias { T.any(Symbol, String) }
|
77
|
+
|
78
|
+
ASC = T.let(:asc, OpenAI::VideoListParams::Order::TaggedSymbol)
|
79
|
+
DESC = T.let(:desc, OpenAI::VideoListParams::Order::TaggedSymbol)
|
80
|
+
|
81
|
+
sig do
|
82
|
+
override.returns(
|
83
|
+
T::Array[OpenAI::VideoListParams::Order::TaggedSymbol]
|
84
|
+
)
|
85
|
+
end
|
86
|
+
def self.values
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
# typed: strong
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module VideoModel
|
6
|
+
extend OpenAI::Internal::Type::Enum
|
7
|
+
|
8
|
+
TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::VideoModel) }
|
9
|
+
OrSymbol = T.type_alias { T.any(Symbol, String) }
|
10
|
+
|
11
|
+
SORA_2 = T.let(:"sora-2", OpenAI::VideoModel::TaggedSymbol)
|
12
|
+
SORA_2_PRO = T.let(:"sora-2-pro", OpenAI::VideoModel::TaggedSymbol)
|
13
|
+
|
14
|
+
sig { override.returns(T::Array[OpenAI::VideoModel::TaggedSymbol]) }
|
15
|
+
def self.values
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
# typed: strong
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
class VideoRemixParams < OpenAI::Internal::Type::BaseModel
|
6
|
+
extend OpenAI::Internal::Type::RequestParameters::Converter
|
7
|
+
include OpenAI::Internal::Type::RequestParameters
|
8
|
+
|
9
|
+
OrHash =
|
10
|
+
T.type_alias do
|
11
|
+
T.any(OpenAI::VideoRemixParams, OpenAI::Internal::AnyHash)
|
12
|
+
end
|
13
|
+
|
14
|
+
# Updated text prompt that directs the remix generation.
|
15
|
+
sig { returns(String) }
|
16
|
+
attr_accessor :prompt
|
17
|
+
|
18
|
+
sig do
|
19
|
+
params(
|
20
|
+
prompt: String,
|
21
|
+
request_options: OpenAI::RequestOptions::OrHash
|
22
|
+
).returns(T.attached_class)
|
23
|
+
end
|
24
|
+
def self.new(
|
25
|
+
# Updated text prompt that directs the remix generation.
|
26
|
+
prompt:,
|
27
|
+
request_options: {}
|
28
|
+
)
|
29
|
+
end
|
30
|
+
|
31
|
+
sig do
|
32
|
+
override.returns(
|
33
|
+
{ prompt: String, request_options: OpenAI::RequestOptions }
|
34
|
+
)
|
35
|
+
end
|
36
|
+
def to_hash
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# typed: strong
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
class VideoRetrieveParams < OpenAI::Internal::Type::BaseModel
|
6
|
+
extend OpenAI::Internal::Type::RequestParameters::Converter
|
7
|
+
include OpenAI::Internal::Type::RequestParameters
|
8
|
+
|
9
|
+
OrHash =
|
10
|
+
T.type_alias do
|
11
|
+
T.any(OpenAI::VideoRetrieveParams, OpenAI::Internal::AnyHash)
|
12
|
+
end
|
13
|
+
|
14
|
+
sig do
|
15
|
+
params(request_options: OpenAI::RequestOptions::OrHash).returns(
|
16
|
+
T.attached_class
|
17
|
+
)
|
18
|
+
end
|
19
|
+
def self.new(request_options: {})
|
20
|
+
end
|
21
|
+
|
22
|
+
sig { override.returns({ request_options: OpenAI::RequestOptions }) }
|
23
|
+
def to_hash
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
# typed: strong
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module VideoSeconds
|
6
|
+
extend OpenAI::Internal::Type::Enum
|
7
|
+
|
8
|
+
TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::VideoSeconds) }
|
9
|
+
OrSymbol = T.type_alias { T.any(Symbol, String) }
|
10
|
+
|
11
|
+
VIDEO_SECONDS_4 = T.let(:"4", OpenAI::VideoSeconds::TaggedSymbol)
|
12
|
+
VIDEO_SECONDS_8 = T.let(:"8", OpenAI::VideoSeconds::TaggedSymbol)
|
13
|
+
VIDEO_SECONDS_12 = T.let(:"12", OpenAI::VideoSeconds::TaggedSymbol)
|
14
|
+
|
15
|
+
sig { override.returns(T::Array[OpenAI::VideoSeconds::TaggedSymbol]) }
|
16
|
+
def self.values
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
# typed: strong
|
2
|
+
|
3
|
+
module OpenAI
|
4
|
+
module Models
|
5
|
+
module VideoSize
|
6
|
+
extend OpenAI::Internal::Type::Enum
|
7
|
+
|
8
|
+
TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::VideoSize) }
|
9
|
+
OrSymbol = T.type_alias { T.any(Symbol, String) }
|
10
|
+
|
11
|
+
VIDEO_SIZE_720X1280 = T.let(:"720x1280", OpenAI::VideoSize::TaggedSymbol)
|
12
|
+
VIDEO_SIZE_1280X720 = T.let(:"1280x720", OpenAI::VideoSize::TaggedSymbol)
|
13
|
+
VIDEO_SIZE_1024X1792 =
|
14
|
+
T.let(:"1024x1792", OpenAI::VideoSize::TaggedSymbol)
|
15
|
+
VIDEO_SIZE_1792X1024 =
|
16
|
+
T.let(:"1792x1024", OpenAI::VideoSize::TaggedSymbol)
|
17
|
+
|
18
|
+
sig { override.returns(T::Array[OpenAI::VideoSize::TaggedSymbol]) }
|
19
|
+
def self.values
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|