instructor-rb 0.1.1 → 0.1.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5baca74deed1078f1695501139db2033ca2cb4b1e0d414ba9898b13100263bed
4
- data.tar.gz: b845048bd4b712181eab322d94ba20fd64aae15eb3949560028ff3a4058d594c
3
+ metadata.gz: f6bc3d3377404642d01aeee121baa7d16b02c279dd966908bb22b3a3bd776d36
4
+ data.tar.gz: 703cb4a43ce2ed3878008eca696059739dd458db97d1de4a6eb12b8487adbda0
5
5
  SHA512:
6
- metadata.gz: a949d892bf2fef53c9c34fd06da30b143e08acdfc8967e428b262eedc7eabcc7147421f1b41dcde2007fe58677c808a1ca6bb9dc1be72a4f9c20c3a5474a91bb
7
- data.tar.gz: ccfc2bf5c2c702b226972895eb290c68f2a625b688936979d9f5093e39e185f1c8423aedc4f40537b3b1ad2d313ea73e4f058491bbacadb075ad3cca1cacaae7
6
+ metadata.gz: d26bb4abc2156e60dd5f7f1d4a76f9a737b8ce31d4a58962a509c0882548e7cda4f90b18e7ccfb26a36b5dec53b21c3a99f8388d68ad6e6e2105906812ac8e6f
7
+ data.tar.gz: 3f2845af3d88a1b581ba09c5a2d1661101670a625abc499972af00391f1a49f604b4043177dcaf687026255773a0df23bd7b981dae6138aa455cf624e134d1c4
data/CHANGELOG.md CHANGED
@@ -1,3 +1,11 @@
1
+ ## [0.1.3] - 2024-05-22
2
+ - Bumped OpenAI client version.
3
+ - Laying the work for more modes. See https://python.useinstructor.com/concepts/patching/ for more information.
4
+ - Allow the OpenAI client to be used normally in case you just want to use other client features.
5
+
6
+ ## [0.1.2] - 2024-05-17
7
+ - Improved the ability to customize the function name and the LLM function call description (instructions).
8
+
1
9
  ## [0.1.1] - 2024-05-07
2
10
  - Improved documentation in /docs folder.
3
11
  - Readme updates.
@@ -0,0 +1,22 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'ostruct'
4
+
5
+ module Instructor
6
+ # This module defines constants related to different modes of operation.
7
+ # It provides options for tool behavior, function types, and JSON modes.
8
+ # Currently supported modes are:
9
+ # - tools: select between function, auto, required, and none.
10
+ # more modes will be added in the near future.
11
+ module Mode
12
+ tool_options = %w[function auto required none].index_by(&:itself)
13
+ TOOL_BEHAVIOR = OpenStruct.new(tool_options)
14
+
15
+ FUNCTIONS = 'function_call'
16
+ PARALLEL_TOOLS = 'parallel_tool_call'
17
+ TOOLS = TOOL_BEHAVIOR
18
+ JSON = 'json_mode'
19
+ MD_JSON = 'markdown_json_mode'
20
+ JSON_SCHEMA = 'json_schema_mode'
21
+ end
22
+ end
@@ -29,6 +29,8 @@ module Instructor
29
29
  # @param validation_context [Hash] The validation context for the parameters. Optional.
30
30
  # @return [Object] The processed response.
31
31
  def chat(parameters:, response_model: nil, max_retries: 0, validation_context: nil)
32
+ return json_post(path: '/chat/completions', parameters:) if response_model.nil?
33
+
32
34
  with_retries(max_retries, [JSON::ParserError, Instructor::ValidationError, Faraday::ParsingError]) do
33
35
  model = determine_model(response_model)
34
36
  function = build_function(model)
@@ -46,7 +48,22 @@ module Instructor
46
48
  # @return [Hash] The prepared parameters.
47
49
  def prepare_parameters(parameters, validation_context, function)
48
50
  parameters = apply_validation_context(parameters, validation_context)
49
- parameters.merge(tools: [function])
51
+ parameters.merge!(tools: [function])
52
+ tool_choice = resolve_tool_choice(function)
53
+ parameters.merge!(tool_choice:)
54
+ end
55
+
56
+ def resolve_tool_choice(function)
57
+ case Instructor.mode
58
+ when Instructor::Mode::TOOLS.function
59
+ { type: 'function', function: { name: function[:function][:name] } }
60
+ when Instructor::Mode::TOOLS.auto
61
+ 'auto'
62
+ when Instructor::Mode::TOOLS.required
63
+ 'required'
64
+ when Instructor::Mode::TOOLS.none
65
+ 'none'
66
+ end
50
67
  end
51
68
 
52
69
  # Processes the API response.
@@ -56,7 +73,11 @@ module Instructor
56
73
  # @return [Object] The processed response.
57
74
  def process_response(response, model)
58
75
  parsed_response = Response.new(response).parse
59
- iterable? ? process_multiple_responses(parsed_response, model) : process_single_response(parsed_response, model)
76
+ if iterable?(parsed_response)
77
+ process_multiple_responses(parsed_response, model)
78
+ else
79
+ process_single_response(parsed_response, model)
80
+ end
60
81
  end
61
82
 
62
83
  # Processes multiple responses from the API.
@@ -84,7 +105,7 @@ module Instructor
84
105
  # Determines the response model based on the provided value.
85
106
  #
86
107
  # @param response_model [Class] The response model class or typed array.
87
- # @return [Class] The determined response model class.
108
+ # @return [Class] The response model.
88
109
  def determine_model(response_model)
89
110
  if response_model.is_a?(T::Types::TypedArray)
90
111
  @iterable = true
@@ -118,26 +139,36 @@ module Instructor
118
139
  {
119
140
  type: 'function',
120
141
  function: {
121
- name: model.name.humanize.titleize,
142
+ name: generate_function_name(model),
122
143
  description: generate_description(model),
123
144
  parameters: model.json_schema
124
145
  }
125
146
  }
126
147
  end
127
148
 
149
+ def generate_function_name(model)
150
+ model.schema.fetch(:title, model.name)
151
+ end
152
+
128
153
  # Generates the description for the function.
129
154
  #
130
155
  # @param model [Class] The response model class.
131
156
  # @return [String] The generated description.
132
157
  def generate_description(model)
133
- "Correctly extracted `#{model.name}` with all the required parameters with correct types"
158
+ if model.respond_to?(:instructions)
159
+ raise Instructor::Error, 'The instructions must be a string' unless model.instructions.is_a?(String)
160
+
161
+ model.instructions
162
+ else
163
+ "Correctly extracted `#{model.name}` with all the required parameters with correct types"
164
+ end
134
165
  end
135
166
 
136
167
  # Checks if the response is iterable.
137
168
  #
138
169
  # @return [Boolean] `true` if the response is iterable, `false` otherwise.
139
- def iterable?
140
- @iterable
170
+ def iterable?(response)
171
+ @iterable && response.is_a?(Array)
141
172
  end
142
173
  end
143
174
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Instructor
4
- VERSION = '0.1.1'
4
+ VERSION = '0.1.3'
5
5
  end
data/lib/instructor.rb CHANGED
@@ -6,23 +6,32 @@ require 'active_support/all'
6
6
  require_relative 'instructor/version'
7
7
  require_relative 'instructor/openai/patch'
8
8
  require_relative 'instructor/openai/response'
9
+ require_relative 'instructor/mode'
9
10
 
10
11
  # Instructor makes it easy to reliably get structured data like JSON from Large Language Models (LLMs)
11
12
  # like GPT-3.5, GPT-4, GPT-4-Vision
12
13
  module Instructor
14
+ @mode = nil
15
+
13
16
  class Error < ::StandardError; end
14
17
 
15
18
  # The ValidationError class represents an error that occurs during validation.
16
19
  class ValidationError < ::StandardError; end
17
20
 
21
+ def self.mode
22
+ @mode
23
+ end
24
+
18
25
  # Patches the OpenAI client to add the following functionality:
19
26
  # - Retries on exceptions
20
27
  # - Accepts and validates a response model
21
28
  # - Accepts a validation_context argument
22
29
  #
23
30
  # @param openai_client [OpenAI::Client] The OpenAI client to be patched.
31
+ # @param mode [Symbol] The mode to be used. Default is `Instructor::Mode::TOOLS.function`.
24
32
  # @return [OpenAI::Client] The patched OpenAI client.
25
- def self.patch(openai_client)
33
+ def self.patch(openai_client, mode: Instructor::Mode::TOOLS.function)
34
+ @mode = mode
26
35
  openai_client.prepend(Instructor::OpenAI::Patch)
27
36
  end
28
37
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: instructor-rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.1
4
+ version: 0.1.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Sergio Bayona
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2024-05-07 00:00:00.000000000 Z
12
+ date: 2024-05-22 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: activesupport
@@ -45,14 +45,14 @@ dependencies:
45
45
  requirements:
46
46
  - - "~>"
47
47
  - !ruby/object:Gem::Version
48
- version: '6'
48
+ version: '7'
49
49
  type: :runtime
50
50
  prerelease: false
51
51
  version_requirements: !ruby/object:Gem::Requirement
52
52
  requirements:
53
53
  - - "~>"
54
54
  - !ruby/object:Gem::Version
55
- version: '6'
55
+ version: '7'
56
56
  - !ruby/object:Gem::Dependency
57
57
  name: pry-byebug
58
58
  requirement: !ruby/object:Gem::Requirement
@@ -215,6 +215,7 @@ files:
215
215
  - ellipsis.Dockerfile
216
216
  - ellipsis.yaml
217
217
  - lib/instructor.rb
218
+ - lib/instructor/mode.rb
218
219
  - lib/instructor/openai/patch.rb
219
220
  - lib/instructor/openai/response.rb
220
221
  - lib/instructor/version.rb