instructor-rb 0.1.2 → 0.1.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c5f3b630f135c5ab67b275814d8986b732a0deac2a8cb751189444d5774ab169
4
- data.tar.gz: cc25330573de9056fdf7e6c5f14d4fb143777b9f26a7d085e00499df6610c8da
3
+ metadata.gz: f6bc3d3377404642d01aeee121baa7d16b02c279dd966908bb22b3a3bd776d36
4
+ data.tar.gz: 703cb4a43ce2ed3878008eca696059739dd458db97d1de4a6eb12b8487adbda0
5
5
  SHA512:
6
- metadata.gz: 90ce22b6ca7bdc23a063b7fcee1329a194cc493b71803902d2455d0c22d105de7506c7387f6b0e7b682405493dc7eb9c7da68e6da68feedb89bdbcbea598cea2
7
- data.tar.gz: f6ebf426c755b1d55cb809d75c56a38e5861515419e61e235097c2033729abf8e7e9df200146844c742fcc17fb8b984b4b6cd11d751927a1d93760db321f670c
6
+ metadata.gz: d26bb4abc2156e60dd5f7f1d4a76f9a737b8ce31d4a58962a509c0882548e7cda4f90b18e7ccfb26a36b5dec53b21c3a99f8388d68ad6e6e2105906812ac8e6f
7
+ data.tar.gz: 3f2845af3d88a1b581ba09c5a2d1661101670a625abc499972af00391f1a49f604b4043177dcaf687026255773a0df23bd7b981dae6138aa455cf624e134d1c4
data/CHANGELOG.md CHANGED
@@ -1,3 +1,8 @@
1
+ ## [0.1.3] - 2024-05-22
2
+ - Bumped OpenAI client version.
3
+ - Laying the work for more modes. See https://python.useinstructor.com/concepts/patching/ for more information.
4
+ - Allow the OpenAI client to be used normally in case you just want to use other client features.
5
+
1
6
  ## [0.1.2] - 2024-05-17
2
7
  - Improved the ability to customize the function name and the LLM function call description (instructions).
3
8
 
@@ -0,0 +1,22 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'ostruct'
4
+
5
+ module Instructor
6
+ # This module defines constants related to different modes of operation.
7
+ # It provides options for tool behavior, function types, and JSON modes.
8
+ # Currently supported modes are:
9
+ # - tools: select between function, auto, required, and none.
10
+ # more modes will be added in the near future.
11
+ module Mode
12
+ tool_options = %w[function auto required none].index_by(&:itself)
13
+ TOOL_BEHAVIOR = OpenStruct.new(tool_options)
14
+
15
+ FUNCTIONS = 'function_call'
16
+ PARALLEL_TOOLS = 'parallel_tool_call'
17
+ TOOLS = TOOL_BEHAVIOR
18
+ JSON = 'json_mode'
19
+ MD_JSON = 'markdown_json_mode'
20
+ JSON_SCHEMA = 'json_schema_mode'
21
+ end
22
+ end
@@ -29,6 +29,8 @@ module Instructor
29
29
  # @param validation_context [Hash] The validation context for the parameters. Optional.
30
30
  # @return [Object] The processed response.
31
31
  def chat(parameters:, response_model: nil, max_retries: 0, validation_context: nil)
32
+ return json_post(path: '/chat/completions', parameters:) if response_model.nil?
33
+
32
34
  with_retries(max_retries, [JSON::ParserError, Instructor::ValidationError, Faraday::ParsingError]) do
33
35
  model = determine_model(response_model)
34
36
  function = build_function(model)
@@ -46,7 +48,22 @@ module Instructor
46
48
  # @return [Hash] The prepared parameters.
47
49
  def prepare_parameters(parameters, validation_context, function)
48
50
  parameters = apply_validation_context(parameters, validation_context)
49
- parameters.merge(tools: [function])
51
+ parameters.merge!(tools: [function])
52
+ tool_choice = resolve_tool_choice(function)
53
+ parameters.merge!(tool_choice:)
54
+ end
55
+
56
+ def resolve_tool_choice(function)
57
+ case Instructor.mode
58
+ when Instructor::Mode::TOOLS.function
59
+ { type: 'function', function: { name: function[:function][:name] } }
60
+ when Instructor::Mode::TOOLS.auto
61
+ 'auto'
62
+ when Instructor::Mode::TOOLS.required
63
+ 'required'
64
+ when Instructor::Mode::TOOLS.none
65
+ 'none'
66
+ end
50
67
  end
51
68
 
52
69
  # Processes the API response.
@@ -56,7 +73,11 @@ module Instructor
56
73
  # @return [Object] The processed response.
57
74
  def process_response(response, model)
58
75
  parsed_response = Response.new(response).parse
59
- iterable? ? process_multiple_responses(parsed_response, model) : process_single_response(parsed_response, model)
76
+ if iterable?(parsed_response)
77
+ process_multiple_responses(parsed_response, model)
78
+ else
79
+ process_single_response(parsed_response, model)
80
+ end
60
81
  end
61
82
 
62
83
  # Processes multiple responses from the API.
@@ -84,7 +105,7 @@ module Instructor
84
105
  # Determines the response model based on the provided value.
85
106
  #
86
107
  # @param response_model [Class] The response model class or typed array.
87
- # @return [Class] The determined response model class.
108
+ # @return [Class] The response model.
88
109
  def determine_model(response_model)
89
110
  if response_model.is_a?(T::Types::TypedArray)
90
111
  @iterable = true
@@ -146,8 +167,8 @@ module Instructor
146
167
  # Checks if the response is iterable.
147
168
  #
148
169
  # @return [Boolean] `true` if the response is iterable, `false` otherwise.
149
- def iterable?
150
- @iterable
170
+ def iterable?(response)
171
+ @iterable && response.is_a?(Array)
151
172
  end
152
173
  end
153
174
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Instructor
4
- VERSION = '0.1.2'
4
+ VERSION = '0.1.3'
5
5
  end
data/lib/instructor.rb CHANGED
@@ -6,23 +6,32 @@ require 'active_support/all'
6
6
  require_relative 'instructor/version'
7
7
  require_relative 'instructor/openai/patch'
8
8
  require_relative 'instructor/openai/response'
9
+ require_relative 'instructor/mode'
9
10
 
10
11
  # Instructor makes it easy to reliably get structured data like JSON from Large Language Models (LLMs)
11
12
  # like GPT-3.5, GPT-4, GPT-4-Vision
12
13
  module Instructor
14
+ @mode = nil
15
+
13
16
  class Error < ::StandardError; end
14
17
 
15
18
  # The ValidationError class represents an error that occurs during validation.
16
19
  class ValidationError < ::StandardError; end
17
20
 
21
+ def self.mode
22
+ @mode
23
+ end
24
+
18
25
  # Patches the OpenAI client to add the following functionality:
19
26
  # - Retries on exceptions
20
27
  # - Accepts and validates a response model
21
28
  # - Accepts a validation_context argument
22
29
  #
23
30
  # @param openai_client [OpenAI::Client] The OpenAI client to be patched.
31
+ # @param mode [Symbol] The mode to be used. Default is `Instructor::Mode::TOOLS.function`.
24
32
  # @return [OpenAI::Client] The patched OpenAI client.
25
- def self.patch(openai_client)
33
+ def self.patch(openai_client, mode: Instructor::Mode::TOOLS.function)
34
+ @mode = mode
26
35
  openai_client.prepend(Instructor::OpenAI::Patch)
27
36
  end
28
37
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: instructor-rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.2
4
+ version: 0.1.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Sergio Bayona
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2024-05-17 00:00:00.000000000 Z
12
+ date: 2024-05-22 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: activesupport
@@ -45,14 +45,14 @@ dependencies:
45
45
  requirements:
46
46
  - - "~>"
47
47
  - !ruby/object:Gem::Version
48
- version: '6'
48
+ version: '7'
49
49
  type: :runtime
50
50
  prerelease: false
51
51
  version_requirements: !ruby/object:Gem::Requirement
52
52
  requirements:
53
53
  - - "~>"
54
54
  - !ruby/object:Gem::Version
55
- version: '6'
55
+ version: '7'
56
56
  - !ruby/object:Gem::Dependency
57
57
  name: pry-byebug
58
58
  requirement: !ruby/object:Gem::Requirement
@@ -215,6 +215,7 @@ files:
215
215
  - ellipsis.Dockerfile
216
216
  - ellipsis.yaml
217
217
  - lib/instructor.rb
218
+ - lib/instructor/mode.rb
218
219
  - lib/instructor/openai/patch.rb
219
220
  - lib/instructor/openai/response.rb
220
221
  - lib/instructor/version.rb