openai 0.29.0 → 0.30.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (178) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +8 -0
  3. data/README.md +1 -1
  4. data/lib/openai/client.rb +4 -0
  5. data/lib/openai/internal/conversation_cursor_page.rb +2 -2
  6. data/lib/openai/models/all_models.rb +2 -0
  7. data/lib/openai/models/beta/chatkit/chat_session.rb +94 -0
  8. data/lib/openai/models/beta/chatkit/chat_session_automatic_thread_titling.rb +22 -0
  9. data/lib/openai/models/beta/chatkit/chat_session_chatkit_configuration.rb +38 -0
  10. data/lib/openai/models/beta/chatkit/chat_session_chatkit_configuration_param.rb +125 -0
  11. data/lib/openai/models/beta/chatkit/chat_session_expires_after_param.rb +30 -0
  12. data/lib/openai/models/beta/chatkit/chat_session_file_upload.rb +38 -0
  13. data/lib/openai/models/beta/chatkit/chat_session_history.rb +34 -0
  14. data/lib/openai/models/beta/chatkit/chat_session_rate_limits.rb +22 -0
  15. data/lib/openai/models/beta/chatkit/chat_session_rate_limits_param.rb +22 -0
  16. data/lib/openai/models/beta/chatkit/chat_session_status.rb +20 -0
  17. data/lib/openai/models/beta/chatkit/chat_session_workflow_param.rb +80 -0
  18. data/lib/openai/models/beta/chatkit/chatkit_attachment.rb +69 -0
  19. data/lib/openai/models/beta/chatkit/chatkit_response_output_text.rb +143 -0
  20. data/lib/openai/models/beta/chatkit/chatkit_thread.rb +145 -0
  21. data/lib/openai/models/beta/chatkit/chatkit_thread_assistant_message_item.rb +65 -0
  22. data/lib/openai/models/beta/chatkit/chatkit_thread_item_list.rb +374 -0
  23. data/lib/openai/models/beta/chatkit/chatkit_thread_user_message_item.rb +183 -0
  24. data/lib/openai/models/beta/chatkit/chatkit_widget_item.rb +64 -0
  25. data/lib/openai/models/beta/chatkit/session_cancel_params.rb +18 -0
  26. data/lib/openai/models/beta/chatkit/session_create_params.rb +63 -0
  27. data/lib/openai/models/beta/chatkit/thread_delete_params.rb +18 -0
  28. data/lib/openai/models/beta/chatkit/thread_delete_response.rb +39 -0
  29. data/lib/openai/models/beta/chatkit/thread_list_items_params.rb +66 -0
  30. data/lib/openai/models/beta/chatkit/thread_list_params.rb +75 -0
  31. data/lib/openai/models/beta/chatkit/thread_retrieve_params.rb +18 -0
  32. data/lib/openai/models/beta/chatkit_upload_file_params.rb +28 -0
  33. data/lib/openai/models/beta/chatkit_upload_file_response.rb +25 -0
  34. data/lib/openai/models/beta/chatkit_workflow.rb +78 -0
  35. data/lib/openai/models/beta/file_part.rb +56 -0
  36. data/lib/openai/models/beta/image_part.rb +64 -0
  37. data/lib/openai/models/image_edit_params.rb +4 -2
  38. data/lib/openai/models/image_model.rb +1 -0
  39. data/lib/openai/models/realtime/realtime_session.rb +4 -0
  40. data/lib/openai/models/realtime/realtime_session_create_request.rb +12 -0
  41. data/lib/openai/models/realtime/realtime_session_create_response.rb +12 -0
  42. data/lib/openai/models/responses/tool.rb +5 -2
  43. data/lib/openai/models/responses_model.rb +2 -0
  44. data/lib/openai/models/video.rb +122 -0
  45. data/lib/openai/models/video_create_error.rb +21 -0
  46. data/lib/openai/models/video_create_params.rb +54 -0
  47. data/lib/openai/models/video_delete_params.rb +14 -0
  48. data/lib/openai/models/video_delete_response.rb +35 -0
  49. data/lib/openai/models/video_download_content_params.rb +34 -0
  50. data/lib/openai/models/video_list_params.rb +54 -0
  51. data/lib/openai/models/video_model.rb +15 -0
  52. data/lib/openai/models/video_remix_params.rb +22 -0
  53. data/lib/openai/models/video_retrieve_params.rb +14 -0
  54. data/lib/openai/models/video_seconds.rb +16 -0
  55. data/lib/openai/models/video_size.rb +17 -0
  56. data/lib/openai/models.rb +22 -0
  57. data/lib/openai/resources/beta/chatkit/sessions.rb +71 -0
  58. data/lib/openai/resources/beta/chatkit/threads.rb +126 -0
  59. data/lib/openai/resources/beta/chatkit.rb +50 -0
  60. data/lib/openai/resources/beta.rb +4 -0
  61. data/lib/openai/resources/videos.rb +165 -0
  62. data/lib/openai/version.rb +1 -1
  63. data/lib/openai.rb +46 -0
  64. data/rbi/openai/client.rbi +3 -0
  65. data/rbi/openai/models/all_models.rbi +10 -0
  66. data/rbi/openai/models/beta/chatkit/chat_session.rbi +141 -0
  67. data/rbi/openai/models/beta/chatkit/chat_session_automatic_thread_titling.rbi +35 -0
  68. data/rbi/openai/models/beta/chatkit/chat_session_chatkit_configuration.rbi +87 -0
  69. data/rbi/openai/models/beta/chatkit/chat_session_chatkit_configuration_param.rbi +256 -0
  70. data/rbi/openai/models/beta/chatkit/chat_session_expires_after_param.rbi +43 -0
  71. data/rbi/openai/models/beta/chatkit/chat_session_file_upload.rbi +61 -0
  72. data/rbi/openai/models/beta/chatkit/chat_session_history.rbi +52 -0
  73. data/rbi/openai/models/beta/chatkit/chat_session_rate_limits.rbi +37 -0
  74. data/rbi/openai/models/beta/chatkit/chat_session_rate_limits_param.rbi +40 -0
  75. data/rbi/openai/models/beta/chatkit/chat_session_status.rbi +43 -0
  76. data/rbi/openai/models/beta/chatkit/chat_session_workflow_param.rbi +166 -0
  77. data/rbi/openai/models/beta/chatkit/chatkit_attachment.rbi +116 -0
  78. data/rbi/openai/models/beta/chatkit/chatkit_response_output_text.rbi +287 -0
  79. data/rbi/openai/models/beta/chatkit/chatkit_thread.rbi +220 -0
  80. data/rbi/openai/models/beta/chatkit/chatkit_thread_assistant_message_item.rbi +94 -0
  81. data/rbi/openai/models/beta/chatkit/chatkit_thread_item_list.rbi +590 -0
  82. data/rbi/openai/models/beta/chatkit/chatkit_thread_user_message_item.rbi +324 -0
  83. data/rbi/openai/models/beta/chatkit/chatkit_widget_item.rbi +87 -0
  84. data/rbi/openai/models/beta/chatkit/session_cancel_params.rbi +34 -0
  85. data/rbi/openai/models/beta/chatkit/session_create_params.rbi +136 -0
  86. data/rbi/openai/models/beta/chatkit/thread_delete_params.rbi +34 -0
  87. data/rbi/openai/models/beta/chatkit/thread_delete_response.rbi +55 -0
  88. data/rbi/openai/models/beta/chatkit/thread_list_items_params.rbi +138 -0
  89. data/rbi/openai/models/beta/chatkit/thread_list_params.rbi +145 -0
  90. data/rbi/openai/models/beta/chatkit/thread_retrieve_params.rbi +34 -0
  91. data/rbi/openai/models/beta/chatkit_upload_file_params.rbi +50 -0
  92. data/rbi/openai/models/beta/chatkit_upload_file_response.rbi +25 -0
  93. data/rbi/openai/models/beta/chatkit_workflow.rbi +134 -0
  94. data/rbi/openai/models/beta/file_part.rbi +74 -0
  95. data/rbi/openai/models/beta/image_part.rbi +82 -0
  96. data/rbi/openai/models/image_edit_params.rbi +6 -3
  97. data/rbi/openai/models/image_model.rbi +2 -0
  98. data/rbi/openai/models/realtime/realtime_session.rbi +20 -0
  99. data/rbi/openai/models/realtime/realtime_session_create_request.rbi +20 -0
  100. data/rbi/openai/models/realtime/realtime_session_create_response.rbi +20 -0
  101. data/rbi/openai/models/responses/tool.rbi +11 -3
  102. data/rbi/openai/models/responses_model.rbi +10 -0
  103. data/rbi/openai/models/video.rbi +143 -0
  104. data/rbi/openai/models/video_create_error.rbi +26 -0
  105. data/rbi/openai/models/video_create_params.rbi +87 -0
  106. data/rbi/openai/models/video_delete_params.rbi +27 -0
  107. data/rbi/openai/models/video_delete_response.rbi +46 -0
  108. data/rbi/openai/models/video_download_content_params.rbi +89 -0
  109. data/rbi/openai/models/video_list_params.rbi +91 -0
  110. data/rbi/openai/models/video_model.rbi +19 -0
  111. data/rbi/openai/models/video_remix_params.rbi +40 -0
  112. data/rbi/openai/models/video_retrieve_params.rbi +27 -0
  113. data/rbi/openai/models/video_seconds.rbi +20 -0
  114. data/rbi/openai/models/video_size.rbi +23 -0
  115. data/rbi/openai/models.rbi +22 -0
  116. data/rbi/openai/resources/beta/chatkit/sessions.rbi +61 -0
  117. data/rbi/openai/resources/beta/chatkit/threads.rbi +110 -0
  118. data/rbi/openai/resources/beta/chatkit.rbi +35 -0
  119. data/rbi/openai/resources/beta.rbi +3 -0
  120. data/rbi/openai/resources/images.rbi +4 -2
  121. data/rbi/openai/resources/videos.rbi +121 -0
  122. data/sig/openai/client.rbs +2 -0
  123. data/sig/openai/models/all_models.rbs +4 -0
  124. data/sig/openai/models/beta/chatkit/chat_session.rbs +69 -0
  125. data/sig/openai/models/beta/chatkit/chat_session_automatic_thread_titling.rbs +17 -0
  126. data/sig/openai/models/beta/chatkit/chat_session_chatkit_configuration.rbs +34 -0
  127. data/sig/openai/models/beta/chatkit/chat_session_chatkit_configuration_param.rbs +103 -0
  128. data/sig/openai/models/beta/chatkit/chat_session_expires_after_param.rbs +20 -0
  129. data/sig/openai/models/beta/chatkit/chat_session_file_upload.rbs +30 -0
  130. data/sig/openai/models/beta/chatkit/chat_session_history.rbs +19 -0
  131. data/sig/openai/models/beta/chatkit/chat_session_rate_limits.rbs +18 -0
  132. data/sig/openai/models/beta/chatkit/chat_session_rate_limits_param.rbs +20 -0
  133. data/sig/openai/models/beta/chatkit/chat_session_status.rbs +19 -0
  134. data/sig/openai/models/beta/chatkit/chat_session_workflow_param.rbs +69 -0
  135. data/sig/openai/models/beta/chatkit/chatkit_attachment.rbs +57 -0
  136. data/sig/openai/models/beta/chatkit/chatkit_response_output_text.rbs +114 -0
  137. data/sig/openai/models/beta/chatkit/chatkit_thread.rbs +96 -0
  138. data/sig/openai/models/beta/chatkit/chatkit_thread_assistant_message_item.rbs +51 -0
  139. data/sig/openai/models/beta/chatkit/chatkit_thread_item_list.rbs +276 -0
  140. data/sig/openai/models/beta/chatkit/chatkit_thread_user_message_item.rbs +127 -0
  141. data/sig/openai/models/beta/chatkit/chatkit_widget_item.rbs +51 -0
  142. data/sig/openai/models/beta/chatkit/session_cancel_params.rbs +19 -0
  143. data/sig/openai/models/beta/chatkit/session_create_params.rbs +62 -0
  144. data/sig/openai/models/beta/chatkit/thread_delete_params.rbs +19 -0
  145. data/sig/openai/models/beta/chatkit/thread_delete_response.rbs +30 -0
  146. data/sig/openai/models/beta/chatkit/thread_list_items_params.rbs +66 -0
  147. data/sig/openai/models/beta/chatkit/thread_list_params.rbs +73 -0
  148. data/sig/openai/models/beta/chatkit/thread_retrieve_params.rbs +19 -0
  149. data/sig/openai/models/beta/chatkit_upload_file_params.rbs +26 -0
  150. data/sig/openai/models/beta/chatkit_upload_file_response.rbs +14 -0
  151. data/sig/openai/models/beta/chatkit_workflow.rbs +55 -0
  152. data/sig/openai/models/beta/file_part.rbs +42 -0
  153. data/sig/openai/models/beta/image_part.rbs +47 -0
  154. data/sig/openai/models/image_model.rbs +3 -1
  155. data/sig/openai/models/realtime/realtime_session.rbs +8 -0
  156. data/sig/openai/models/realtime/realtime_session_create_request.rbs +8 -0
  157. data/sig/openai/models/realtime/realtime_session_create_response.rbs +8 -0
  158. data/sig/openai/models/responses/tool.rbs +2 -1
  159. data/sig/openai/models/responses_model.rbs +4 -0
  160. data/sig/openai/models/video.rbs +88 -0
  161. data/sig/openai/models/video_create_error.rbs +15 -0
  162. data/sig/openai/models/video_create_params.rbs +58 -0
  163. data/sig/openai/models/video_delete_params.rbs +14 -0
  164. data/sig/openai/models/video_delete_response.rbs +22 -0
  165. data/sig/openai/models/video_download_content_params.rbs +40 -0
  166. data/sig/openai/models/video_list_params.rbs +55 -0
  167. data/sig/openai/models/video_model.rbs +14 -0
  168. data/sig/openai/models/video_remix_params.rbs +23 -0
  169. data/sig/openai/models/video_retrieve_params.rbs +15 -0
  170. data/sig/openai/models/video_seconds.rbs +15 -0
  171. data/sig/openai/models/video_size.rbs +16 -0
  172. data/sig/openai/models.rbs +22 -0
  173. data/sig/openai/resources/beta/chatkit/sessions.rbs +25 -0
  174. data/sig/openai/resources/beta/chatkit/threads.rbs +39 -0
  175. data/sig/openai/resources/beta/chatkit.rbs +18 -0
  176. data/sig/openai/resources/beta.rbs +2 -0
  177. data/sig/openai/resources/videos.rbs +45 -0
  178. metadata +140 -2
@@ -38,7 +38,8 @@ module OpenAI
38
38
 
39
39
  # Control how much effort the model will exert to match the style and features,
40
40
  # especially facial features, of input images. This parameter is only supported
41
- # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`.
41
+ # for `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and
42
+ # `low`. Defaults to `low`.
42
43
  sig do
43
44
  returns(T.nilable(OpenAI::ImageEditParams::InputFidelity::OrSymbol))
44
45
  end
@@ -161,7 +162,8 @@ module OpenAI
161
162
  background: nil,
162
163
  # Control how much effort the model will exert to match the style and features,
163
164
  # especially facial features, of input images. This parameter is only supported
164
- # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`.
165
+ # for `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and
166
+ # `low`. Defaults to `low`.
165
167
  input_fidelity: nil,
166
168
  # An additional image whose fully transparent areas (e.g. where alpha is zero)
167
169
  # indicate where `image` should be edited. If there are multiple images provided,
@@ -294,7 +296,8 @@ module OpenAI
294
296
 
295
297
  # Control how much effort the model will exert to match the style and features,
296
298
  # especially facial features, of input images. This parameter is only supported
297
- # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`.
299
+ # for `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and
300
+ # `low`. Defaults to `low`.
298
301
  module InputFidelity
299
302
  extend OpenAI::Internal::Type::Enum
300
303
 
@@ -11,6 +11,8 @@ module OpenAI
11
11
  DALL_E_2 = T.let(:"dall-e-2", OpenAI::ImageModel::TaggedSymbol)
12
12
  DALL_E_3 = T.let(:"dall-e-3", OpenAI::ImageModel::TaggedSymbol)
13
13
  GPT_IMAGE_1 = T.let(:"gpt-image-1", OpenAI::ImageModel::TaggedSymbol)
14
+ GPT_IMAGE_1_MINI =
15
+ T.let(:"gpt-image-1-mini", OpenAI::ImageModel::TaggedSymbol)
14
16
 
15
17
  sig { override.returns(T::Array[OpenAI::ImageModel::TaggedSymbol]) }
16
18
  def self.values
@@ -707,6 +707,26 @@ module OpenAI
707
707
  :"gpt-4o-mini-realtime-preview-2024-12-17",
708
708
  OpenAI::Realtime::RealtimeSession::Model::TaggedSymbol
709
709
  )
710
+ GPT_REALTIME_MINI =
711
+ T.let(
712
+ :"gpt-realtime-mini",
713
+ OpenAI::Realtime::RealtimeSession::Model::TaggedSymbol
714
+ )
715
+ GPT_REALTIME_MINI_2025_10_06 =
716
+ T.let(
717
+ :"gpt-realtime-mini-2025-10-06",
718
+ OpenAI::Realtime::RealtimeSession::Model::TaggedSymbol
719
+ )
720
+ GPT_AUDIO_MINI =
721
+ T.let(
722
+ :"gpt-audio-mini",
723
+ OpenAI::Realtime::RealtimeSession::Model::TaggedSymbol
724
+ )
725
+ GPT_AUDIO_MINI_2025_10_06 =
726
+ T.let(
727
+ :"gpt-audio-mini-2025-10-06",
728
+ OpenAI::Realtime::RealtimeSession::Model::TaggedSymbol
729
+ )
710
730
 
711
731
  sig do
712
732
  override.returns(
@@ -508,6 +508,26 @@ module OpenAI
508
508
  :"gpt-4o-mini-realtime-preview-2024-12-17",
509
509
  OpenAI::Realtime::RealtimeSessionCreateRequest::Model::TaggedSymbol
510
510
  )
511
+ GPT_REALTIME_MINI =
512
+ T.let(
513
+ :"gpt-realtime-mini",
514
+ OpenAI::Realtime::RealtimeSessionCreateRequest::Model::TaggedSymbol
515
+ )
516
+ GPT_REALTIME_MINI_2025_10_06 =
517
+ T.let(
518
+ :"gpt-realtime-mini-2025-10-06",
519
+ OpenAI::Realtime::RealtimeSessionCreateRequest::Model::TaggedSymbol
520
+ )
521
+ GPT_AUDIO_MINI =
522
+ T.let(
523
+ :"gpt-audio-mini",
524
+ OpenAI::Realtime::RealtimeSessionCreateRequest::Model::TaggedSymbol
525
+ )
526
+ GPT_AUDIO_MINI_2025_10_06 =
527
+ T.let(
528
+ :"gpt-audio-mini-2025-10-06",
529
+ OpenAI::Realtime::RealtimeSessionCreateRequest::Model::TaggedSymbol
530
+ )
511
531
  end
512
532
 
513
533
  module OutputModality
@@ -1308,6 +1308,26 @@ module OpenAI
1308
1308
  :"gpt-4o-mini-realtime-preview-2024-12-17",
1309
1309
  OpenAI::Realtime::RealtimeSessionCreateResponse::Model::TaggedSymbol
1310
1310
  )
1311
+ GPT_REALTIME_MINI =
1312
+ T.let(
1313
+ :"gpt-realtime-mini",
1314
+ OpenAI::Realtime::RealtimeSessionCreateResponse::Model::TaggedSymbol
1315
+ )
1316
+ GPT_REALTIME_MINI_2025_10_06 =
1317
+ T.let(
1318
+ :"gpt-realtime-mini-2025-10-06",
1319
+ OpenAI::Realtime::RealtimeSessionCreateResponse::Model::TaggedSymbol
1320
+ )
1321
+ GPT_AUDIO_MINI =
1322
+ T.let(
1323
+ :"gpt-audio-mini",
1324
+ OpenAI::Realtime::RealtimeSessionCreateResponse::Model::TaggedSymbol
1325
+ )
1326
+ GPT_AUDIO_MINI_2025_10_06 =
1327
+ T.let(
1328
+ :"gpt-audio-mini-2025-10-06",
1329
+ OpenAI::Realtime::RealtimeSessionCreateResponse::Model::TaggedSymbol
1330
+ )
1311
1331
  end
1312
1332
 
1313
1333
  module OutputModality
@@ -781,7 +781,8 @@ module OpenAI
781
781
 
782
782
  # Control how much effort the model will exert to match the style and features,
783
783
  # especially facial features, of input images. This parameter is only supported
784
- # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`.
784
+ # for `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and
785
+ # `low`. Defaults to `low`.
785
786
  sig do
786
787
  returns(
787
788
  T.nilable(
@@ -946,7 +947,8 @@ module OpenAI
946
947
  background: nil,
947
948
  # Control how much effort the model will exert to match the style and features,
948
949
  # especially facial features, of input images. This parameter is only supported
949
- # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`.
950
+ # for `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and
951
+ # `low`. Defaults to `low`.
950
952
  input_fidelity: nil,
951
953
  # Optional mask for inpainting. Contains `image_url` (string, optional) and
952
954
  # `file_id` (string, optional).
@@ -1046,7 +1048,8 @@ module OpenAI
1046
1048
 
1047
1049
  # Control how much effort the model will exert to match the style and features,
1048
1050
  # especially facial features, of input images. This parameter is only supported
1049
- # for `gpt-image-1`. Supports `high` and `low`. Defaults to `low`.
1051
+ # for `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and
1052
+ # `low`. Defaults to `low`.
1050
1053
  module InputFidelity
1051
1054
  extend OpenAI::Internal::Type::Enum
1052
1055
 
@@ -1139,6 +1142,11 @@ module OpenAI
1139
1142
  :"gpt-image-1",
1140
1143
  OpenAI::Responses::Tool::ImageGeneration::Model::TaggedSymbol
1141
1144
  )
1145
+ GPT_IMAGE_1_MINI =
1146
+ T.let(
1147
+ :"gpt-image-1-mini",
1148
+ OpenAI::Responses::Tool::ImageGeneration::Model::TaggedSymbol
1149
+ )
1142
1150
 
1143
1151
  sig do
1144
1152
  override.returns(
@@ -78,6 +78,16 @@ module OpenAI
78
78
  :"gpt-5-codex",
79
79
  OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol
80
80
  )
81
+ GPT_5_PRO =
82
+ T.let(
83
+ :"gpt-5-pro",
84
+ OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol
85
+ )
86
+ GPT_5_PRO_2025_10_06 =
87
+ T.let(
88
+ :"gpt-5-pro-2025-10-06",
89
+ OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol
90
+ )
81
91
 
82
92
  sig do
83
93
  override.returns(
@@ -0,0 +1,143 @@
1
+ # typed: strong
2
+
3
+ module OpenAI
4
+ module Models
5
+ class Video < OpenAI::Internal::Type::BaseModel
6
+ OrHash = T.type_alias { T.any(OpenAI::Video, OpenAI::Internal::AnyHash) }
7
+
8
+ # Unique identifier for the video job.
9
+ sig { returns(String) }
10
+ attr_accessor :id
11
+
12
+ # Unix timestamp (seconds) for when the job completed, if finished.
13
+ sig { returns(T.nilable(Integer)) }
14
+ attr_accessor :completed_at
15
+
16
+ # Unix timestamp (seconds) for when the job was created.
17
+ sig { returns(Integer) }
18
+ attr_accessor :created_at
19
+
20
+ # Error payload that explains why generation failed, if applicable.
21
+ sig { returns(T.nilable(OpenAI::VideoCreateError)) }
22
+ attr_reader :error
23
+
24
+ sig { params(error: T.nilable(OpenAI::VideoCreateError::OrHash)).void }
25
+ attr_writer :error
26
+
27
+ # Unix timestamp (seconds) for when the downloadable assets expire, if set.
28
+ sig { returns(T.nilable(Integer)) }
29
+ attr_accessor :expires_at
30
+
31
+ # The video generation model that produced the job.
32
+ sig { returns(OpenAI::VideoModel::TaggedSymbol) }
33
+ attr_accessor :model
34
+
35
+ # The object type, which is always `video`.
36
+ sig { returns(Symbol) }
37
+ attr_accessor :object
38
+
39
+ # Approximate completion percentage for the generation task.
40
+ sig { returns(Integer) }
41
+ attr_accessor :progress
42
+
43
+ # Identifier of the source video if this video is a remix.
44
+ sig { returns(T.nilable(String)) }
45
+ attr_accessor :remixed_from_video_id
46
+
47
+ # Duration of the generated clip in seconds.
48
+ sig { returns(OpenAI::VideoSeconds::TaggedSymbol) }
49
+ attr_accessor :seconds
50
+
51
+ # The resolution of the generated video.
52
+ sig { returns(OpenAI::VideoSize::TaggedSymbol) }
53
+ attr_accessor :size
54
+
55
+ # Current lifecycle status of the video job.
56
+ sig { returns(OpenAI::Video::Status::TaggedSymbol) }
57
+ attr_accessor :status
58
+
59
+ # Structured information describing a generated video job.
60
+ sig do
61
+ params(
62
+ id: String,
63
+ completed_at: T.nilable(Integer),
64
+ created_at: Integer,
65
+ error: T.nilable(OpenAI::VideoCreateError::OrHash),
66
+ expires_at: T.nilable(Integer),
67
+ model: OpenAI::VideoModel::OrSymbol,
68
+ progress: Integer,
69
+ remixed_from_video_id: T.nilable(String),
70
+ seconds: OpenAI::VideoSeconds::OrSymbol,
71
+ size: OpenAI::VideoSize::OrSymbol,
72
+ status: OpenAI::Video::Status::OrSymbol,
73
+ object: Symbol
74
+ ).returns(T.attached_class)
75
+ end
76
+ def self.new(
77
+ # Unique identifier for the video job.
78
+ id:,
79
+ # Unix timestamp (seconds) for when the job completed, if finished.
80
+ completed_at:,
81
+ # Unix timestamp (seconds) for when the job was created.
82
+ created_at:,
83
+ # Error payload that explains why generation failed, if applicable.
84
+ error:,
85
+ # Unix timestamp (seconds) for when the downloadable assets expire, if set.
86
+ expires_at:,
87
+ # The video generation model that produced the job.
88
+ model:,
89
+ # Approximate completion percentage for the generation task.
90
+ progress:,
91
+ # Identifier of the source video if this video is a remix.
92
+ remixed_from_video_id:,
93
+ # Duration of the generated clip in seconds.
94
+ seconds:,
95
+ # The resolution of the generated video.
96
+ size:,
97
+ # Current lifecycle status of the video job.
98
+ status:,
99
+ # The object type, which is always `video`.
100
+ object: :video
101
+ )
102
+ end
103
+
104
+ sig do
105
+ override.returns(
106
+ {
107
+ id: String,
108
+ completed_at: T.nilable(Integer),
109
+ created_at: Integer,
110
+ error: T.nilable(OpenAI::VideoCreateError),
111
+ expires_at: T.nilable(Integer),
112
+ model: OpenAI::VideoModel::TaggedSymbol,
113
+ object: Symbol,
114
+ progress: Integer,
115
+ remixed_from_video_id: T.nilable(String),
116
+ seconds: OpenAI::VideoSeconds::TaggedSymbol,
117
+ size: OpenAI::VideoSize::TaggedSymbol,
118
+ status: OpenAI::Video::Status::TaggedSymbol
119
+ }
120
+ )
121
+ end
122
+ def to_hash
123
+ end
124
+
125
+ # Current lifecycle status of the video job.
126
+ module Status
127
+ extend OpenAI::Internal::Type::Enum
128
+
129
+ TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Video::Status) }
130
+ OrSymbol = T.type_alias { T.any(Symbol, String) }
131
+
132
+ QUEUED = T.let(:queued, OpenAI::Video::Status::TaggedSymbol)
133
+ IN_PROGRESS = T.let(:in_progress, OpenAI::Video::Status::TaggedSymbol)
134
+ COMPLETED = T.let(:completed, OpenAI::Video::Status::TaggedSymbol)
135
+ FAILED = T.let(:failed, OpenAI::Video::Status::TaggedSymbol)
136
+
137
+ sig { override.returns(T::Array[OpenAI::Video::Status::TaggedSymbol]) }
138
+ def self.values
139
+ end
140
+ end
141
+ end
142
+ end
143
+ end
@@ -0,0 +1,26 @@
1
+ # typed: strong
2
+
3
+ module OpenAI
4
+ module Models
5
+ class VideoCreateError < OpenAI::Internal::Type::BaseModel
6
+ OrHash =
7
+ T.type_alias do
8
+ T.any(OpenAI::VideoCreateError, OpenAI::Internal::AnyHash)
9
+ end
10
+
11
+ sig { returns(String) }
12
+ attr_accessor :code
13
+
14
+ sig { returns(String) }
15
+ attr_accessor :message
16
+
17
+ sig { params(code: String, message: String).returns(T.attached_class) }
18
+ def self.new(code:, message:)
19
+ end
20
+
21
+ sig { override.returns({ code: String, message: String }) }
22
+ def to_hash
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,87 @@
1
+ # typed: strong
2
+
3
+ module OpenAI
4
+ module Models
5
+ class VideoCreateParams < OpenAI::Internal::Type::BaseModel
6
+ extend OpenAI::Internal::Type::RequestParameters::Converter
7
+ include OpenAI::Internal::Type::RequestParameters
8
+
9
+ OrHash =
10
+ T.type_alias do
11
+ T.any(OpenAI::VideoCreateParams, OpenAI::Internal::AnyHash)
12
+ end
13
+
14
+ # Text prompt that describes the video to generate.
15
+ sig { returns(String) }
16
+ attr_accessor :prompt
17
+
18
+ # Optional image reference that guides generation.
19
+ sig { returns(T.nilable(OpenAI::Internal::FileInput)) }
20
+ attr_reader :input_reference
21
+
22
+ sig { params(input_reference: OpenAI::Internal::FileInput).void }
23
+ attr_writer :input_reference
24
+
25
+ # The video generation model to use. Defaults to `sora-2`.
26
+ sig { returns(T.nilable(OpenAI::VideoModel::OrSymbol)) }
27
+ attr_reader :model
28
+
29
+ sig { params(model: OpenAI::VideoModel::OrSymbol).void }
30
+ attr_writer :model
31
+
32
+ # Clip duration in seconds. Defaults to 4 seconds.
33
+ sig { returns(T.nilable(OpenAI::VideoSeconds::OrSymbol)) }
34
+ attr_reader :seconds
35
+
36
+ sig { params(seconds: OpenAI::VideoSeconds::OrSymbol).void }
37
+ attr_writer :seconds
38
+
39
+ # Output resolution formatted as width x height. Defaults to 720x1280.
40
+ sig { returns(T.nilable(OpenAI::VideoSize::OrSymbol)) }
41
+ attr_reader :size
42
+
43
+ sig { params(size: OpenAI::VideoSize::OrSymbol).void }
44
+ attr_writer :size
45
+
46
+ sig do
47
+ params(
48
+ prompt: String,
49
+ input_reference: OpenAI::Internal::FileInput,
50
+ model: OpenAI::VideoModel::OrSymbol,
51
+ seconds: OpenAI::VideoSeconds::OrSymbol,
52
+ size: OpenAI::VideoSize::OrSymbol,
53
+ request_options: OpenAI::RequestOptions::OrHash
54
+ ).returns(T.attached_class)
55
+ end
56
+ def self.new(
57
+ # Text prompt that describes the video to generate.
58
+ prompt:,
59
+ # Optional image reference that guides generation.
60
+ input_reference: nil,
61
+ # The video generation model to use. Defaults to `sora-2`.
62
+ model: nil,
63
+ # Clip duration in seconds. Defaults to 4 seconds.
64
+ seconds: nil,
65
+ # Output resolution formatted as width x height. Defaults to 720x1280.
66
+ size: nil,
67
+ request_options: {}
68
+ )
69
+ end
70
+
71
+ sig do
72
+ override.returns(
73
+ {
74
+ prompt: String,
75
+ input_reference: OpenAI::Internal::FileInput,
76
+ model: OpenAI::VideoModel::OrSymbol,
77
+ seconds: OpenAI::VideoSeconds::OrSymbol,
78
+ size: OpenAI::VideoSize::OrSymbol,
79
+ request_options: OpenAI::RequestOptions
80
+ }
81
+ )
82
+ end
83
+ def to_hash
84
+ end
85
+ end
86
+ end
87
+ end
@@ -0,0 +1,27 @@
1
+ # typed: strong
2
+
3
+ module OpenAI
4
+ module Models
5
+ class VideoDeleteParams < OpenAI::Internal::Type::BaseModel
6
+ extend OpenAI::Internal::Type::RequestParameters::Converter
7
+ include OpenAI::Internal::Type::RequestParameters
8
+
9
+ OrHash =
10
+ T.type_alias do
11
+ T.any(OpenAI::VideoDeleteParams, OpenAI::Internal::AnyHash)
12
+ end
13
+
14
+ sig do
15
+ params(request_options: OpenAI::RequestOptions::OrHash).returns(
16
+ T.attached_class
17
+ )
18
+ end
19
+ def self.new(request_options: {})
20
+ end
21
+
22
+ sig { override.returns({ request_options: OpenAI::RequestOptions }) }
23
+ def to_hash
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,46 @@
1
+ # typed: strong
2
+
3
+ module OpenAI
4
+ module Models
5
+ class VideoDeleteResponse < OpenAI::Internal::Type::BaseModel
6
+ OrHash =
7
+ T.type_alias do
8
+ T.any(OpenAI::Models::VideoDeleteResponse, OpenAI::Internal::AnyHash)
9
+ end
10
+
11
+ # Identifier of the deleted video.
12
+ sig { returns(String) }
13
+ attr_accessor :id
14
+
15
+ # Indicates that the video resource was deleted.
16
+ sig { returns(T::Boolean) }
17
+ attr_accessor :deleted
18
+
19
+ # The object type that signals the deletion response.
20
+ sig { returns(Symbol) }
21
+ attr_accessor :object
22
+
23
+ # Confirmation payload returned after deleting a video.
24
+ sig do
25
+ params(id: String, deleted: T::Boolean, object: Symbol).returns(
26
+ T.attached_class
27
+ )
28
+ end
29
+ def self.new(
30
+ # Identifier of the deleted video.
31
+ id:,
32
+ # Indicates that the video resource was deleted.
33
+ deleted:,
34
+ # The object type that signals the deletion response.
35
+ object: :"video.deleted"
36
+ )
37
+ end
38
+
39
+ sig do
40
+ override.returns({ id: String, deleted: T::Boolean, object: Symbol })
41
+ end
42
+ def to_hash
43
+ end
44
+ end
45
+ end
46
+ end
@@ -0,0 +1,89 @@
1
+ # typed: strong
2
+
3
+ module OpenAI
4
+ module Models
5
+ class VideoDownloadContentParams < OpenAI::Internal::Type::BaseModel
6
+ extend OpenAI::Internal::Type::RequestParameters::Converter
7
+ include OpenAI::Internal::Type::RequestParameters
8
+
9
+ OrHash =
10
+ T.type_alias do
11
+ T.any(OpenAI::VideoDownloadContentParams, OpenAI::Internal::AnyHash)
12
+ end
13
+
14
+ # Which downloadable asset to return. Defaults to the MP4 video.
15
+ sig do
16
+ returns(
17
+ T.nilable(OpenAI::VideoDownloadContentParams::Variant::OrSymbol)
18
+ )
19
+ end
20
+ attr_reader :variant
21
+
22
+ sig do
23
+ params(
24
+ variant: OpenAI::VideoDownloadContentParams::Variant::OrSymbol
25
+ ).void
26
+ end
27
+ attr_writer :variant
28
+
29
+ sig do
30
+ params(
31
+ variant: OpenAI::VideoDownloadContentParams::Variant::OrSymbol,
32
+ request_options: OpenAI::RequestOptions::OrHash
33
+ ).returns(T.attached_class)
34
+ end
35
+ def self.new(
36
+ # Which downloadable asset to return. Defaults to the MP4 video.
37
+ variant: nil,
38
+ request_options: {}
39
+ )
40
+ end
41
+
42
+ sig do
43
+ override.returns(
44
+ {
45
+ variant: OpenAI::VideoDownloadContentParams::Variant::OrSymbol,
46
+ request_options: OpenAI::RequestOptions
47
+ }
48
+ )
49
+ end
50
+ def to_hash
51
+ end
52
+
53
+ # Which downloadable asset to return. Defaults to the MP4 video.
54
+ module Variant
55
+ extend OpenAI::Internal::Type::Enum
56
+
57
+ TaggedSymbol =
58
+ T.type_alias do
59
+ T.all(Symbol, OpenAI::VideoDownloadContentParams::Variant)
60
+ end
61
+ OrSymbol = T.type_alias { T.any(Symbol, String) }
62
+
63
+ VIDEO =
64
+ T.let(
65
+ :video,
66
+ OpenAI::VideoDownloadContentParams::Variant::TaggedSymbol
67
+ )
68
+ THUMBNAIL =
69
+ T.let(
70
+ :thumbnail,
71
+ OpenAI::VideoDownloadContentParams::Variant::TaggedSymbol
72
+ )
73
+ SPRITESHEET =
74
+ T.let(
75
+ :spritesheet,
76
+ OpenAI::VideoDownloadContentParams::Variant::TaggedSymbol
77
+ )
78
+
79
+ sig do
80
+ override.returns(
81
+ T::Array[OpenAI::VideoDownloadContentParams::Variant::TaggedSymbol]
82
+ )
83
+ end
84
+ def self.values
85
+ end
86
+ end
87
+ end
88
+ end
89
+ end
@@ -0,0 +1,91 @@
1
+ # typed: strong
2
+
3
+ module OpenAI
4
+ module Models
5
+ class VideoListParams < OpenAI::Internal::Type::BaseModel
6
+ extend OpenAI::Internal::Type::RequestParameters::Converter
7
+ include OpenAI::Internal::Type::RequestParameters
8
+
9
+ OrHash =
10
+ T.type_alias do
11
+ T.any(OpenAI::VideoListParams, OpenAI::Internal::AnyHash)
12
+ end
13
+
14
+ # Identifier for the last item from the previous pagination request
15
+ sig { returns(T.nilable(String)) }
16
+ attr_reader :after
17
+
18
+ sig { params(after: String).void }
19
+ attr_writer :after
20
+
21
+ # Number of items to retrieve
22
+ sig { returns(T.nilable(Integer)) }
23
+ attr_reader :limit
24
+
25
+ sig { params(limit: Integer).void }
26
+ attr_writer :limit
27
+
28
+ # Sort order of results by timestamp. Use `asc` for ascending order or `desc` for
29
+ # descending order.
30
+ sig { returns(T.nilable(OpenAI::VideoListParams::Order::OrSymbol)) }
31
+ attr_reader :order
32
+
33
+ sig { params(order: OpenAI::VideoListParams::Order::OrSymbol).void }
34
+ attr_writer :order
35
+
36
+ sig do
37
+ params(
38
+ after: String,
39
+ limit: Integer,
40
+ order: OpenAI::VideoListParams::Order::OrSymbol,
41
+ request_options: OpenAI::RequestOptions::OrHash
42
+ ).returns(T.attached_class)
43
+ end
44
+ def self.new(
45
+ # Identifier for the last item from the previous pagination request
46
+ after: nil,
47
+ # Number of items to retrieve
48
+ limit: nil,
49
+ # Sort order of results by timestamp. Use `asc` for ascending order or `desc` for
50
+ # descending order.
51
+ order: nil,
52
+ request_options: {}
53
+ )
54
+ end
55
+
56
+ sig do
57
+ override.returns(
58
+ {
59
+ after: String,
60
+ limit: Integer,
61
+ order: OpenAI::VideoListParams::Order::OrSymbol,
62
+ request_options: OpenAI::RequestOptions
63
+ }
64
+ )
65
+ end
66
+ def to_hash
67
+ end
68
+
69
+ # Sort order of results by timestamp. Use `asc` for ascending order or `desc` for
70
+ # descending order.
71
+ module Order
72
+ extend OpenAI::Internal::Type::Enum
73
+
74
+ TaggedSymbol =
75
+ T.type_alias { T.all(Symbol, OpenAI::VideoListParams::Order) }
76
+ OrSymbol = T.type_alias { T.any(Symbol, String) }
77
+
78
+ ASC = T.let(:asc, OpenAI::VideoListParams::Order::TaggedSymbol)
79
+ DESC = T.let(:desc, OpenAI::VideoListParams::Order::TaggedSymbol)
80
+
81
+ sig do
82
+ override.returns(
83
+ T::Array[OpenAI::VideoListParams::Order::TaggedSymbol]
84
+ )
85
+ end
86
+ def self.values
87
+ end
88
+ end
89
+ end
90
+ end
91
+ end