openai 0.12.0 → 0.13.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +21 -0
- data/README.md +1 -1
- data/lib/openai/helpers/structured_output/json_schema_converter.rb +34 -10
- data/lib/openai/models/audio/transcription.rb +4 -4
- data/lib/openai/models/audio/transcription_verbose.rb +4 -4
- data/lib/openai/models/eval_create_params.rb +50 -5
- data/lib/openai/models/evals/create_eval_completions_run_data_source.rb +50 -5
- data/lib/openai/models/evals/run_cancel_response.rb +48 -5
- data/lib/openai/models/evals/run_create_params.rb +50 -5
- data/lib/openai/models/evals/run_create_response.rb +48 -5
- data/lib/openai/models/evals/run_list_response.rb +48 -5
- data/lib/openai/models/evals/run_retrieve_response.rb +48 -5
- data/lib/openai/models/file_object.rb +5 -4
- data/lib/openai/models/graders/label_model_grader.rb +48 -5
- data/lib/openai/models/graders/score_model_grader.rb +48 -5
- data/lib/openai/models/responses/response_input_file.rb +9 -1
- data/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb +5 -5
- data/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb +5 -5
- data/lib/openai/models/responses/response_output_text_annotation_added_event.rb +5 -5
- data/lib/openai/models/responses/response_stream_event.rb +3 -3
- data/lib/openai/models/responses/tool.rb +9 -1
- data/lib/openai/version.rb +1 -1
- data/rbi/openai/helpers/structured_output/json_schema_converter.rbi +4 -0
- data/rbi/openai/models/audio/transcription.rbi +4 -4
- data/rbi/openai/models/audio/transcription_verbose.rbi +4 -6
- data/rbi/openai/models/eval_create_params.rbi +76 -7
- data/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +76 -7
- data/rbi/openai/models/evals/run_cancel_response.rbi +70 -5
- data/rbi/openai/models/evals/run_create_params.rbi +76 -7
- data/rbi/openai/models/evals/run_create_response.rbi +70 -5
- data/rbi/openai/models/evals/run_list_response.rbi +70 -5
- data/rbi/openai/models/evals/run_retrieve_response.rbi +70 -5
- data/rbi/openai/models/file_object.rbi +7 -6
- data/rbi/openai/models/graders/label_model_grader.rbi +74 -7
- data/rbi/openai/models/graders/score_model_grader.rbi +74 -7
- data/rbi/openai/models/responses/response_input_file.rbi +11 -0
- data/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi +3 -3
- data/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi +3 -3
- data/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi +3 -3
- data/rbi/openai/models/responses/tool.rbi +12 -1
- data/sig/openai/models/audio/transcription.rbs +4 -4
- data/sig/openai/models/audio/transcription_verbose.rbs +4 -4
- data/sig/openai/models/eval_create_params.rbs +29 -0
- data/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +29 -0
- data/sig/openai/models/evals/run_cancel_response.rbs +33 -0
- data/sig/openai/models/evals/run_create_params.rbs +33 -0
- data/sig/openai/models/evals/run_create_response.rbs +33 -0
- data/sig/openai/models/evals/run_list_response.rbs +33 -0
- data/sig/openai/models/evals/run_retrieve_response.rbs +33 -0
- data/sig/openai/models/file_object.rbs +2 -0
- data/sig/openai/models/graders/label_model_grader.rbs +29 -0
- data/sig/openai/models/graders/score_model_grader.rbs +29 -0
- data/sig/openai/models/responses/response_input_file.rbs +7 -0
- data/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs +4 -4
- data/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs +4 -4
- data/sig/openai/models/responses/response_output_text_annotation_added_event.rbs +4 -4
- data/sig/openai/models/responses/tool.rbs +9 -2
- metadata +2 -2
@@ -474,13 +474,15 @@ module OpenAI
|
|
474
474
|
)
|
475
475
|
end
|
476
476
|
|
477
|
-
#
|
477
|
+
# Inputs to the model - can contain template strings.
|
478
478
|
sig do
|
479
479
|
returns(
|
480
480
|
T.any(
|
481
481
|
String,
|
482
482
|
OpenAI::Responses::ResponseInputText,
|
483
|
-
OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText
|
483
|
+
OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText,
|
484
|
+
OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage,
|
485
|
+
T::Array[T.anything]
|
484
486
|
)
|
485
487
|
)
|
486
488
|
end
|
@@ -524,7 +526,9 @@ module OpenAI
|
|
524
526
|
T.any(
|
525
527
|
String,
|
526
528
|
OpenAI::Responses::ResponseInputText::OrHash,
|
527
|
-
OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText::OrHash
|
529
|
+
OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText::OrHash,
|
530
|
+
OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage::OrHash,
|
531
|
+
T::Array[T.anything]
|
528
532
|
),
|
529
533
|
role:
|
530
534
|
OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol,
|
@@ -533,7 +537,7 @@ module OpenAI
|
|
533
537
|
).returns(T.attached_class)
|
534
538
|
end
|
535
539
|
def self.new(
|
536
|
-
#
|
540
|
+
# Inputs to the model - can contain template strings.
|
537
541
|
content:,
|
538
542
|
# The role of the message input. One of `user`, `assistant`, `system`, or
|
539
543
|
# `developer`.
|
@@ -550,7 +554,9 @@ module OpenAI
|
|
550
554
|
T.any(
|
551
555
|
String,
|
552
556
|
OpenAI::Responses::ResponseInputText,
|
553
|
-
OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText
|
557
|
+
OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText,
|
558
|
+
OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage,
|
559
|
+
T::Array[T.anything]
|
554
560
|
),
|
555
561
|
role:
|
556
562
|
OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol,
|
@@ -562,7 +568,7 @@ module OpenAI
|
|
562
568
|
def to_hash
|
563
569
|
end
|
564
570
|
|
565
|
-
#
|
571
|
+
# Inputs to the model - can contain template strings.
|
566
572
|
module Content
|
567
573
|
extend OpenAI::Internal::Type::Union
|
568
574
|
|
@@ -571,7 +577,9 @@ module OpenAI
|
|
571
577
|
T.any(
|
572
578
|
String,
|
573
579
|
OpenAI::Responses::ResponseInputText,
|
574
|
-
OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText
|
580
|
+
OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText,
|
581
|
+
OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage,
|
582
|
+
T::Array[T.anything]
|
575
583
|
)
|
576
584
|
end
|
577
585
|
|
@@ -609,6 +617,59 @@ module OpenAI
|
|
609
617
|
end
|
610
618
|
end
|
611
619
|
|
620
|
+
class InputImage < OpenAI::Internal::Type::BaseModel
|
621
|
+
OrHash =
|
622
|
+
T.type_alias do
|
623
|
+
T.any(
|
624
|
+
OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::InputImage,
|
625
|
+
OpenAI::Internal::AnyHash
|
626
|
+
)
|
627
|
+
end
|
628
|
+
|
629
|
+
# The URL of the image input.
|
630
|
+
sig { returns(String) }
|
631
|
+
attr_accessor :image_url
|
632
|
+
|
633
|
+
# The type of the image input. Always `input_image`.
|
634
|
+
sig { returns(Symbol) }
|
635
|
+
attr_accessor :type
|
636
|
+
|
637
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
638
|
+
# `auto`. Defaults to `auto`.
|
639
|
+
sig { returns(T.nilable(String)) }
|
640
|
+
attr_reader :detail
|
641
|
+
|
642
|
+
sig { params(detail: String).void }
|
643
|
+
attr_writer :detail
|
644
|
+
|
645
|
+
# An image input to the model.
|
646
|
+
sig do
|
647
|
+
params(
|
648
|
+
image_url: String,
|
649
|
+
detail: String,
|
650
|
+
type: Symbol
|
651
|
+
).returns(T.attached_class)
|
652
|
+
end
|
653
|
+
def self.new(
|
654
|
+
# The URL of the image input.
|
655
|
+
image_url:,
|
656
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
657
|
+
# `auto`. Defaults to `auto`.
|
658
|
+
detail: nil,
|
659
|
+
# The type of the image input. Always `input_image`.
|
660
|
+
type: :input_image
|
661
|
+
)
|
662
|
+
end
|
663
|
+
|
664
|
+
sig do
|
665
|
+
override.returns(
|
666
|
+
{ image_url: String, type: Symbol, detail: String }
|
667
|
+
)
|
668
|
+
end
|
669
|
+
def to_hash
|
670
|
+
end
|
671
|
+
end
|
672
|
+
|
612
673
|
sig do
|
613
674
|
override.returns(
|
614
675
|
T::Array[
|
@@ -618,6 +679,14 @@ module OpenAI
|
|
618
679
|
end
|
619
680
|
def self.variants
|
620
681
|
end
|
682
|
+
|
683
|
+
AnArrayOfInputTextAndInputImageArray =
|
684
|
+
T.let(
|
685
|
+
OpenAI::Internal::Type::ArrayOf[
|
686
|
+
OpenAI::Internal::Type::Unknown
|
687
|
+
],
|
688
|
+
OpenAI::Internal::Type::Converter
|
689
|
+
)
|
621
690
|
end
|
622
691
|
|
623
692
|
# The role of the message input. One of `user`, `assistant`, `system`, or
|
@@ -515,13 +515,15 @@ module OpenAI
|
|
515
515
|
)
|
516
516
|
end
|
517
517
|
|
518
|
-
#
|
518
|
+
# Inputs to the model - can contain template strings.
|
519
519
|
sig do
|
520
520
|
returns(
|
521
521
|
T.any(
|
522
522
|
String,
|
523
523
|
OpenAI::Responses::ResponseInputText,
|
524
|
-
OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText
|
524
|
+
OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText,
|
525
|
+
OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage,
|
526
|
+
T::Array[T.anything]
|
525
527
|
)
|
526
528
|
)
|
527
529
|
end
|
@@ -565,7 +567,9 @@ module OpenAI
|
|
565
567
|
T.any(
|
566
568
|
String,
|
567
569
|
OpenAI::Responses::ResponseInputText::OrHash,
|
568
|
-
OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText::OrHash
|
570
|
+
OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText::OrHash,
|
571
|
+
OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage::OrHash,
|
572
|
+
T::Array[T.anything]
|
569
573
|
),
|
570
574
|
role:
|
571
575
|
OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol,
|
@@ -574,7 +578,7 @@ module OpenAI
|
|
574
578
|
).returns(T.attached_class)
|
575
579
|
end
|
576
580
|
def self.new(
|
577
|
-
#
|
581
|
+
# Inputs to the model - can contain template strings.
|
578
582
|
content:,
|
579
583
|
# The role of the message input. One of `user`, `assistant`, `system`, or
|
580
584
|
# `developer`.
|
@@ -591,7 +595,9 @@ module OpenAI
|
|
591
595
|
T.any(
|
592
596
|
String,
|
593
597
|
OpenAI::Responses::ResponseInputText,
|
594
|
-
OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText
|
598
|
+
OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText,
|
599
|
+
OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage,
|
600
|
+
T::Array[T.anything]
|
595
601
|
),
|
596
602
|
role:
|
597
603
|
OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol,
|
@@ -603,7 +609,7 @@ module OpenAI
|
|
603
609
|
def to_hash
|
604
610
|
end
|
605
611
|
|
606
|
-
#
|
612
|
+
# Inputs to the model - can contain template strings.
|
607
613
|
module Content
|
608
614
|
extend OpenAI::Internal::Type::Union
|
609
615
|
|
@@ -612,7 +618,9 @@ module OpenAI
|
|
612
618
|
T.any(
|
613
619
|
String,
|
614
620
|
OpenAI::Responses::ResponseInputText,
|
615
|
-
OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText
|
621
|
+
OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText,
|
622
|
+
OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage,
|
623
|
+
T::Array[T.anything]
|
616
624
|
)
|
617
625
|
end
|
618
626
|
|
@@ -652,6 +660,59 @@ module OpenAI
|
|
652
660
|
end
|
653
661
|
end
|
654
662
|
|
663
|
+
class InputImage < OpenAI::Internal::Type::BaseModel
|
664
|
+
OrHash =
|
665
|
+
T.type_alias do
|
666
|
+
T.any(
|
667
|
+
OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::InputImage,
|
668
|
+
OpenAI::Internal::AnyHash
|
669
|
+
)
|
670
|
+
end
|
671
|
+
|
672
|
+
# The URL of the image input.
|
673
|
+
sig { returns(String) }
|
674
|
+
attr_accessor :image_url
|
675
|
+
|
676
|
+
# The type of the image input. Always `input_image`.
|
677
|
+
sig { returns(Symbol) }
|
678
|
+
attr_accessor :type
|
679
|
+
|
680
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
681
|
+
# `auto`. Defaults to `auto`.
|
682
|
+
sig { returns(T.nilable(String)) }
|
683
|
+
attr_reader :detail
|
684
|
+
|
685
|
+
sig { params(detail: String).void }
|
686
|
+
attr_writer :detail
|
687
|
+
|
688
|
+
# An image input to the model.
|
689
|
+
sig do
|
690
|
+
params(
|
691
|
+
image_url: String,
|
692
|
+
detail: String,
|
693
|
+
type: Symbol
|
694
|
+
).returns(T.attached_class)
|
695
|
+
end
|
696
|
+
def self.new(
|
697
|
+
# The URL of the image input.
|
698
|
+
image_url:,
|
699
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
700
|
+
# `auto`. Defaults to `auto`.
|
701
|
+
detail: nil,
|
702
|
+
# The type of the image input. Always `input_image`.
|
703
|
+
type: :input_image
|
704
|
+
)
|
705
|
+
end
|
706
|
+
|
707
|
+
sig do
|
708
|
+
override.returns(
|
709
|
+
{ image_url: String, type: Symbol, detail: String }
|
710
|
+
)
|
711
|
+
end
|
712
|
+
def to_hash
|
713
|
+
end
|
714
|
+
end
|
715
|
+
|
655
716
|
sig do
|
656
717
|
override.returns(
|
657
718
|
T::Array[
|
@@ -661,6 +722,14 @@ module OpenAI
|
|
661
722
|
end
|
662
723
|
def self.variants
|
663
724
|
end
|
725
|
+
|
726
|
+
AnArrayOfInputTextAndInputImageArray =
|
727
|
+
T.let(
|
728
|
+
OpenAI::Internal::Type::ArrayOf[
|
729
|
+
OpenAI::Internal::Type::Unknown
|
730
|
+
],
|
731
|
+
OpenAI::Internal::Type::Converter
|
732
|
+
)
|
664
733
|
end
|
665
734
|
|
666
735
|
# The role of the message input. One of `user`, `assistant`, `system`, or
|
@@ -749,7 +749,7 @@ module OpenAI
|
|
749
749
|
)
|
750
750
|
end
|
751
751
|
|
752
|
-
#
|
752
|
+
# Inputs to the model - can contain template strings.
|
753
753
|
sig do
|
754
754
|
returns(
|
755
755
|
OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants
|
@@ -795,7 +795,9 @@ module OpenAI
|
|
795
795
|
T.any(
|
796
796
|
String,
|
797
797
|
OpenAI::Responses::ResponseInputText::OrHash,
|
798
|
-
OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash
|
798
|
+
OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash,
|
799
|
+
OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage::OrHash,
|
800
|
+
T::Array[T.anything]
|
799
801
|
),
|
800
802
|
role:
|
801
803
|
OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol,
|
@@ -804,7 +806,7 @@ module OpenAI
|
|
804
806
|
).returns(T.attached_class)
|
805
807
|
end
|
806
808
|
def self.new(
|
807
|
-
#
|
809
|
+
# Inputs to the model - can contain template strings.
|
808
810
|
content:,
|
809
811
|
# The role of the message input. One of `user`, `assistant`, `system`, or
|
810
812
|
# `developer`.
|
@@ -829,7 +831,7 @@ module OpenAI
|
|
829
831
|
def to_hash
|
830
832
|
end
|
831
833
|
|
832
|
-
#
|
834
|
+
# Inputs to the model - can contain template strings.
|
833
835
|
module Content
|
834
836
|
extend OpenAI::Internal::Type::Union
|
835
837
|
|
@@ -838,7 +840,9 @@ module OpenAI
|
|
838
840
|
T.any(
|
839
841
|
String,
|
840
842
|
OpenAI::Responses::ResponseInputText,
|
841
|
-
OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText
|
843
|
+
OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText,
|
844
|
+
OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage,
|
845
|
+
T::Array[T.anything]
|
842
846
|
)
|
843
847
|
end
|
844
848
|
|
@@ -878,6 +882,59 @@ module OpenAI
|
|
878
882
|
end
|
879
883
|
end
|
880
884
|
|
885
|
+
class InputImage < OpenAI::Internal::Type::BaseModel
|
886
|
+
OrHash =
|
887
|
+
T.type_alias do
|
888
|
+
T.any(
|
889
|
+
OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage,
|
890
|
+
OpenAI::Internal::AnyHash
|
891
|
+
)
|
892
|
+
end
|
893
|
+
|
894
|
+
# The URL of the image input.
|
895
|
+
sig { returns(String) }
|
896
|
+
attr_accessor :image_url
|
897
|
+
|
898
|
+
# The type of the image input. Always `input_image`.
|
899
|
+
sig { returns(Symbol) }
|
900
|
+
attr_accessor :type
|
901
|
+
|
902
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
903
|
+
# `auto`. Defaults to `auto`.
|
904
|
+
sig { returns(T.nilable(String)) }
|
905
|
+
attr_reader :detail
|
906
|
+
|
907
|
+
sig { params(detail: String).void }
|
908
|
+
attr_writer :detail
|
909
|
+
|
910
|
+
# An image input to the model.
|
911
|
+
sig do
|
912
|
+
params(
|
913
|
+
image_url: String,
|
914
|
+
detail: String,
|
915
|
+
type: Symbol
|
916
|
+
).returns(T.attached_class)
|
917
|
+
end
|
918
|
+
def self.new(
|
919
|
+
# The URL of the image input.
|
920
|
+
image_url:,
|
921
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
922
|
+
# `auto`. Defaults to `auto`.
|
923
|
+
detail: nil,
|
924
|
+
# The type of the image input. Always `input_image`.
|
925
|
+
type: :input_image
|
926
|
+
)
|
927
|
+
end
|
928
|
+
|
929
|
+
sig do
|
930
|
+
override.returns(
|
931
|
+
{ image_url: String, type: Symbol, detail: String }
|
932
|
+
)
|
933
|
+
end
|
934
|
+
def to_hash
|
935
|
+
end
|
936
|
+
end
|
937
|
+
|
881
938
|
sig do
|
882
939
|
override.returns(
|
883
940
|
T::Array[
|
@@ -887,6 +944,14 @@ module OpenAI
|
|
887
944
|
end
|
888
945
|
def self.variants
|
889
946
|
end
|
947
|
+
|
948
|
+
AnArrayOfInputTextAndInputImageArray =
|
949
|
+
T.let(
|
950
|
+
OpenAI::Internal::Type::ArrayOf[
|
951
|
+
OpenAI::Internal::Type::Unknown
|
952
|
+
],
|
953
|
+
OpenAI::Internal::Type::Converter
|
954
|
+
)
|
890
955
|
end
|
891
956
|
|
892
957
|
# The role of the message input. One of `user`, `assistant`, `system`, or
|
@@ -693,13 +693,15 @@ module OpenAI
|
|
693
693
|
)
|
694
694
|
end
|
695
695
|
|
696
|
-
#
|
696
|
+
# Inputs to the model - can contain template strings.
|
697
697
|
sig do
|
698
698
|
returns(
|
699
699
|
T.any(
|
700
700
|
String,
|
701
701
|
OpenAI::Responses::ResponseInputText,
|
702
|
-
OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText
|
702
|
+
OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText,
|
703
|
+
OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage,
|
704
|
+
T::Array[T.anything]
|
703
705
|
)
|
704
706
|
)
|
705
707
|
end
|
@@ -743,7 +745,9 @@ module OpenAI
|
|
743
745
|
T.any(
|
744
746
|
String,
|
745
747
|
OpenAI::Responses::ResponseInputText::OrHash,
|
746
|
-
OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash
|
748
|
+
OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash,
|
749
|
+
OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage::OrHash,
|
750
|
+
T::Array[T.anything]
|
747
751
|
),
|
748
752
|
role:
|
749
753
|
OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol,
|
@@ -752,7 +756,7 @@ module OpenAI
|
|
752
756
|
).returns(T.attached_class)
|
753
757
|
end
|
754
758
|
def self.new(
|
755
|
-
#
|
759
|
+
# Inputs to the model - can contain template strings.
|
756
760
|
content:,
|
757
761
|
# The role of the message input. One of `user`, `assistant`, `system`, or
|
758
762
|
# `developer`.
|
@@ -769,7 +773,9 @@ module OpenAI
|
|
769
773
|
T.any(
|
770
774
|
String,
|
771
775
|
OpenAI::Responses::ResponseInputText,
|
772
|
-
OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText
|
776
|
+
OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText,
|
777
|
+
OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage,
|
778
|
+
T::Array[T.anything]
|
773
779
|
),
|
774
780
|
role:
|
775
781
|
OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol,
|
@@ -781,7 +787,7 @@ module OpenAI
|
|
781
787
|
def to_hash
|
782
788
|
end
|
783
789
|
|
784
|
-
#
|
790
|
+
# Inputs to the model - can contain template strings.
|
785
791
|
module Content
|
786
792
|
extend OpenAI::Internal::Type::Union
|
787
793
|
|
@@ -790,7 +796,9 @@ module OpenAI
|
|
790
796
|
T.any(
|
791
797
|
String,
|
792
798
|
OpenAI::Responses::ResponseInputText,
|
793
|
-
OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText
|
799
|
+
OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText,
|
800
|
+
OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage,
|
801
|
+
T::Array[T.anything]
|
794
802
|
)
|
795
803
|
end
|
796
804
|
|
@@ -830,6 +838,59 @@ module OpenAI
|
|
830
838
|
end
|
831
839
|
end
|
832
840
|
|
841
|
+
class InputImage < OpenAI::Internal::Type::BaseModel
|
842
|
+
OrHash =
|
843
|
+
T.type_alias do
|
844
|
+
T.any(
|
845
|
+
OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::InputImage,
|
846
|
+
OpenAI::Internal::AnyHash
|
847
|
+
)
|
848
|
+
end
|
849
|
+
|
850
|
+
# The URL of the image input.
|
851
|
+
sig { returns(String) }
|
852
|
+
attr_accessor :image_url
|
853
|
+
|
854
|
+
# The type of the image input. Always `input_image`.
|
855
|
+
sig { returns(Symbol) }
|
856
|
+
attr_accessor :type
|
857
|
+
|
858
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
859
|
+
# `auto`. Defaults to `auto`.
|
860
|
+
sig { returns(T.nilable(String)) }
|
861
|
+
attr_reader :detail
|
862
|
+
|
863
|
+
sig { params(detail: String).void }
|
864
|
+
attr_writer :detail
|
865
|
+
|
866
|
+
# An image input to the model.
|
867
|
+
sig do
|
868
|
+
params(
|
869
|
+
image_url: String,
|
870
|
+
detail: String,
|
871
|
+
type: Symbol
|
872
|
+
).returns(T.attached_class)
|
873
|
+
end
|
874
|
+
def self.new(
|
875
|
+
# The URL of the image input.
|
876
|
+
image_url:,
|
877
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
878
|
+
# `auto`. Defaults to `auto`.
|
879
|
+
detail: nil,
|
880
|
+
# The type of the image input. Always `input_image`.
|
881
|
+
type: :input_image
|
882
|
+
)
|
883
|
+
end
|
884
|
+
|
885
|
+
sig do
|
886
|
+
override.returns(
|
887
|
+
{ image_url: String, type: Symbol, detail: String }
|
888
|
+
)
|
889
|
+
end
|
890
|
+
def to_hash
|
891
|
+
end
|
892
|
+
end
|
893
|
+
|
833
894
|
sig do
|
834
895
|
override.returns(
|
835
896
|
T::Array[
|
@@ -839,6 +900,14 @@ module OpenAI
|
|
839
900
|
end
|
840
901
|
def self.variants
|
841
902
|
end
|
903
|
+
|
904
|
+
AnArrayOfInputTextAndInputImageArray =
|
905
|
+
T.let(
|
906
|
+
OpenAI::Internal::Type::ArrayOf[
|
907
|
+
OpenAI::Internal::Type::Unknown
|
908
|
+
],
|
909
|
+
OpenAI::Internal::Type::Converter
|
910
|
+
)
|
842
911
|
end
|
843
912
|
|
844
913
|
# The role of the message input. One of `user`, `assistant`, `system`, or
|
@@ -749,7 +749,7 @@ module OpenAI
|
|
749
749
|
)
|
750
750
|
end
|
751
751
|
|
752
|
-
#
|
752
|
+
# Inputs to the model - can contain template strings.
|
753
753
|
sig do
|
754
754
|
returns(
|
755
755
|
OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants
|
@@ -795,7 +795,9 @@ module OpenAI
|
|
795
795
|
T.any(
|
796
796
|
String,
|
797
797
|
OpenAI::Responses::ResponseInputText::OrHash,
|
798
|
-
OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash
|
798
|
+
OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash,
|
799
|
+
OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage::OrHash,
|
800
|
+
T::Array[T.anything]
|
799
801
|
),
|
800
802
|
role:
|
801
803
|
OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol,
|
@@ -804,7 +806,7 @@ module OpenAI
|
|
804
806
|
).returns(T.attached_class)
|
805
807
|
end
|
806
808
|
def self.new(
|
807
|
-
#
|
809
|
+
# Inputs to the model - can contain template strings.
|
808
810
|
content:,
|
809
811
|
# The role of the message input. One of `user`, `assistant`, `system`, or
|
810
812
|
# `developer`.
|
@@ -829,7 +831,7 @@ module OpenAI
|
|
829
831
|
def to_hash
|
830
832
|
end
|
831
833
|
|
832
|
-
#
|
834
|
+
# Inputs to the model - can contain template strings.
|
833
835
|
module Content
|
834
836
|
extend OpenAI::Internal::Type::Union
|
835
837
|
|
@@ -838,7 +840,9 @@ module OpenAI
|
|
838
840
|
T.any(
|
839
841
|
String,
|
840
842
|
OpenAI::Responses::ResponseInputText,
|
841
|
-
OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText
|
843
|
+
OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText,
|
844
|
+
OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage,
|
845
|
+
T::Array[T.anything]
|
842
846
|
)
|
843
847
|
end
|
844
848
|
|
@@ -878,6 +882,59 @@ module OpenAI
|
|
878
882
|
end
|
879
883
|
end
|
880
884
|
|
885
|
+
class InputImage < OpenAI::Internal::Type::BaseModel
|
886
|
+
OrHash =
|
887
|
+
T.type_alias do
|
888
|
+
T.any(
|
889
|
+
OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::InputImage,
|
890
|
+
OpenAI::Internal::AnyHash
|
891
|
+
)
|
892
|
+
end
|
893
|
+
|
894
|
+
# The URL of the image input.
|
895
|
+
sig { returns(String) }
|
896
|
+
attr_accessor :image_url
|
897
|
+
|
898
|
+
# The type of the image input. Always `input_image`.
|
899
|
+
sig { returns(Symbol) }
|
900
|
+
attr_accessor :type
|
901
|
+
|
902
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
903
|
+
# `auto`. Defaults to `auto`.
|
904
|
+
sig { returns(T.nilable(String)) }
|
905
|
+
attr_reader :detail
|
906
|
+
|
907
|
+
sig { params(detail: String).void }
|
908
|
+
attr_writer :detail
|
909
|
+
|
910
|
+
# An image input to the model.
|
911
|
+
sig do
|
912
|
+
params(
|
913
|
+
image_url: String,
|
914
|
+
detail: String,
|
915
|
+
type: Symbol
|
916
|
+
).returns(T.attached_class)
|
917
|
+
end
|
918
|
+
def self.new(
|
919
|
+
# The URL of the image input.
|
920
|
+
image_url:,
|
921
|
+
# The detail level of the image to be sent to the model. One of `high`, `low`, or
|
922
|
+
# `auto`. Defaults to `auto`.
|
923
|
+
detail: nil,
|
924
|
+
# The type of the image input. Always `input_image`.
|
925
|
+
type: :input_image
|
926
|
+
)
|
927
|
+
end
|
928
|
+
|
929
|
+
sig do
|
930
|
+
override.returns(
|
931
|
+
{ image_url: String, type: Symbol, detail: String }
|
932
|
+
)
|
933
|
+
end
|
934
|
+
def to_hash
|
935
|
+
end
|
936
|
+
end
|
937
|
+
|
881
938
|
sig do
|
882
939
|
override.returns(
|
883
940
|
T::Array[
|
@@ -887,6 +944,14 @@ module OpenAI
|
|
887
944
|
end
|
888
945
|
def self.variants
|
889
946
|
end
|
947
|
+
|
948
|
+
AnArrayOfInputTextAndInputImageArray =
|
949
|
+
T.let(
|
950
|
+
OpenAI::Internal::Type::ArrayOf[
|
951
|
+
OpenAI::Internal::Type::Unknown
|
952
|
+
],
|
953
|
+
OpenAI::Internal::Type::Converter
|
954
|
+
)
|
890
955
|
end
|
891
956
|
|
892
957
|
# The role of the message input. One of `user`, `assistant`, `system`, or
|