dspy 0.29.0 → 0.30.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. checksums.yaml +4 -4
  2. data/LICENSE +45 -0
  3. data/README.md +121 -101
  4. data/lib/dspy/callbacks.rb +74 -19
  5. data/lib/dspy/context.rb +49 -4
  6. data/lib/dspy/errors.rb +19 -1
  7. data/lib/dspy/{datasets.rb → evals/version.rb} +2 -3
  8. data/lib/dspy/{evaluate.rb → evals.rb} +373 -110
  9. data/lib/dspy/mixins/instruction_updatable.rb +22 -0
  10. data/lib/dspy/observability.rb +40 -182
  11. data/lib/dspy/predict.rb +10 -2
  12. data/lib/dspy/propose/dataset_summary_generator.rb +28 -18
  13. data/lib/dspy/re_act.rb +21 -0
  14. data/lib/dspy/schema/sorbet_json_schema.rb +302 -0
  15. data/lib/dspy/schema/version.rb +7 -0
  16. data/lib/dspy/schema.rb +4 -0
  17. data/lib/dspy/structured_outputs_prompt.rb +48 -0
  18. data/lib/dspy/support/warning_filters.rb +27 -0
  19. data/lib/dspy/teleprompt/gepa.rb +9 -588
  20. data/lib/dspy/teleprompt/instruction_updates.rb +94 -0
  21. data/lib/dspy/teleprompt/teleprompter.rb +6 -6
  22. data/lib/dspy/teleprompt/utils.rb +5 -65
  23. data/lib/dspy/type_system/sorbet_json_schema.rb +2 -299
  24. data/lib/dspy/version.rb +1 -1
  25. data/lib/dspy.rb +33 -7
  26. metadata +14 -60
  27. data/lib/dspy/code_act.rb +0 -477
  28. data/lib/dspy/datasets/ade.rb +0 -90
  29. data/lib/dspy/observability/async_span_processor.rb +0 -250
  30. data/lib/dspy/observability/observation_type.rb +0 -65
  31. data/lib/dspy/optimizers/gaussian_process.rb +0 -141
  32. data/lib/dspy/teleprompt/mipro_v2.rb +0 -1423
  33. data/lib/gepa/api.rb +0 -61
  34. data/lib/gepa/core/engine.rb +0 -226
  35. data/lib/gepa/core/evaluation_batch.rb +0 -26
  36. data/lib/gepa/core/result.rb +0 -92
  37. data/lib/gepa/core/state.rb +0 -231
  38. data/lib/gepa/logging/experiment_tracker.rb +0 -54
  39. data/lib/gepa/logging/logger.rb +0 -57
  40. data/lib/gepa/logging.rb +0 -9
  41. data/lib/gepa/proposer/base.rb +0 -27
  42. data/lib/gepa/proposer/merge_proposer.rb +0 -424
  43. data/lib/gepa/proposer/reflective_mutation/base.rb +0 -48
  44. data/lib/gepa/proposer/reflective_mutation/reflective_mutation.rb +0 -188
  45. data/lib/gepa/strategies/batch_sampler.rb +0 -91
  46. data/lib/gepa/strategies/candidate_selector.rb +0 -97
  47. data/lib/gepa/strategies/component_selector.rb +0 -57
  48. data/lib/gepa/strategies/instruction_proposal.rb +0 -120
  49. data/lib/gepa/telemetry.rb +0 -122
  50. data/lib/gepa/utils/pareto.rb +0 -119
  51. data/lib/gepa.rb +0 -21
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require 'sorbet-runtime'
4
- require_relative '../evaluate'
4
+ require_relative '../evals'
5
5
  require_relative '../example'
6
6
 
7
7
  module DSPy
@@ -125,7 +125,7 @@ module DSPy
125
125
  sig { returns(T.nilable(T.proc.params(arg0: T.untyped, arg1: T.untyped).returns(T.untyped))) }
126
126
  attr_reader :metric
127
127
 
128
- sig { returns(T.nilable(DSPy::Evaluate)) }
128
+ sig { returns(T.nilable(DSPy::Evals)) }
129
129
  attr_reader :evaluator
130
130
 
131
131
  sig do
@@ -183,12 +183,12 @@ module DSPy
183
183
  end
184
184
 
185
185
  # Create evaluator for given examples and metric
186
- sig { params(examples: T::Array[T.untyped]).returns(DSPy::Evaluate) }
186
+ sig { params(examples: T::Array[T.untyped]).returns(DSPy::Evals) }
187
187
  def create_evaluator(examples)
188
188
  # Use provided metric or create a default one for DSPy::Example objects
189
189
  evaluation_metric = @metric || default_metric_for_examples(examples)
190
190
 
191
- @evaluator = DSPy::Evaluate.new(
191
+ @evaluator = DSPy::Evals.new(
192
192
  nil, # Program will be set during evaluation
193
193
  metric: evaluation_metric,
194
194
  num_threads: @config.num_threads,
@@ -202,12 +202,12 @@ module DSPy
202
202
  program: T.untyped,
203
203
  examples: T::Array[T.untyped],
204
204
  metric: T.nilable(T.proc.params(arg0: T.untyped, arg1: T.untyped).returns(T.untyped))
205
- ).returns(DSPy::Evaluate::BatchEvaluationResult)
205
+ ).returns(DSPy::Evals::BatchEvaluationResult)
206
206
  end
207
207
  def evaluate_program(program, examples, metric: nil)
208
208
  evaluation_metric = metric || @metric || default_metric_for_examples(examples)
209
209
 
210
- evaluator = DSPy::Evaluate.new(
210
+ evaluator = DSPy::Evals.new(
211
211
  program,
212
212
  metric: evaluation_metric,
213
213
  num_threads: @config.num_threads,
@@ -2,7 +2,7 @@
2
2
 
3
3
  require 'sorbet-runtime'
4
4
  require 'fileutils'
5
- require_relative '../evaluate'
5
+ require_relative '../evals'
6
6
  require_relative '../example'
7
7
  require_relative 'data_handler'
8
8
 
@@ -13,66 +13,6 @@ module DSPy
13
13
  module Utils
14
14
  extend T::Sig
15
15
 
16
- # Wrapper class that provides Python-compatible signature API
17
- # Wraps a Predict instance to provide signature access and modification
18
- class SignatureWrapper
19
- extend T::Sig
20
-
21
- sig { returns(T.untyped) }
22
- attr_reader :predictor
23
-
24
- sig { params(predictor: T.untyped).void }
25
- def initialize(predictor)
26
- @predictor = predictor
27
- end
28
-
29
- sig { returns(String) }
30
- def instructions
31
- # Get instructions from the predictor's prompt
32
- @predictor.prompt.instruction
33
- end
34
-
35
- sig { params(new_instructions: String).returns(SignatureWrapper) }
36
- def with_instructions(new_instructions)
37
- # Return a new wrapper that will apply new instructions when set
38
- updated_wrapper = SignatureWrapper.new(@predictor)
39
- updated_wrapper.instance_variable_set(:@pending_instructions, new_instructions)
40
- updated_wrapper
41
- end
42
-
43
- sig { returns(T.nilable(String)) }
44
- def pending_instructions
45
- @pending_instructions
46
- end
47
- end
48
-
49
- # Get signature information from a predictor (Python compatibility)
50
- # Returns a wrapper that provides Python-like signature API
51
- #
52
- # @param predictor [Predict] The predictor to get signature from
53
- # @return [SignatureWrapper] Wrapper providing signature access
54
- sig { params(predictor: T.untyped).returns(SignatureWrapper) }
55
- def self.get_signature(predictor)
56
- SignatureWrapper.new(predictor)
57
- end
58
-
59
- # Set signature on a predictor (Python compatibility)
60
- # Updates the predictor's prompt with new instructions
61
- #
62
- # @param predictor [Predict] The predictor to update
63
- # @param updated_signature [SignatureWrapper] The updated signature wrapper
64
- sig { params(predictor: T.untyped, updated_signature: SignatureWrapper).void }
65
- def self.set_signature(predictor, updated_signature)
66
- # Extract pending instructions from the wrapper
67
- new_instructions = updated_signature.pending_instructions
68
-
69
- if new_instructions
70
- # Update the predictor's prompt with new instructions
71
- # We mutate the prompt's instruction directly for MIPROv2 compatibility
72
- predictor.prompt.instance_variable_set(:@instruction, new_instructions)
73
- end
74
- end
75
-
76
16
  # Create a minibatch from the trainset using random sampling
77
17
  # This function is compatible with Python DSPy's MIPROv2 implementation
78
18
  #
@@ -459,7 +399,7 @@ module DSPy
459
399
  examples: T::Array[T.untyped],
460
400
  config: BootstrapConfig,
461
401
  metric: T.nilable(T.proc.params(arg0: T.untyped, arg1: T.untyped).returns(T::Boolean))
462
- ).returns(DSPy::Evaluate::BatchEvaluationResult)
402
+ ).returns(DSPy::Evals::BatchEvaluationResult)
463
403
  end
464
404
  def self.eval_candidate_program(program, examples, config: BootstrapConfig.new, metric: nil)
465
405
  # Use minibatch evaluation for large datasets
@@ -477,7 +417,7 @@ module DSPy
477
417
  examples: T::Array[T.untyped],
478
418
  config: BootstrapConfig,
479
419
  metric: T.nilable(T.proc.params(arg0: T.untyped, arg1: T.untyped).returns(T::Boolean))
480
- ).returns(DSPy::Evaluate::BatchEvaluationResult)
420
+ ).returns(DSPy::Evals::BatchEvaluationResult)
481
421
  end
482
422
  def self.eval_candidate_program_minibatch(program, examples, config, metric)
483
423
  DSPy::Context.with_span(
@@ -502,11 +442,11 @@ module DSPy
502
442
  examples: T::Array[T.untyped],
503
443
  config: BootstrapConfig,
504
444
  metric: T.nilable(T.proc.params(arg0: T.untyped, arg1: T.untyped).returns(T::Boolean))
505
- ).returns(DSPy::Evaluate::BatchEvaluationResult)
445
+ ).returns(DSPy::Evals::BatchEvaluationResult)
506
446
  end
507
447
  def self.eval_candidate_program_full(program, examples, config, metric)
508
448
  # Create evaluator with proper configuration
509
- evaluator = DSPy::Evaluate.new(
449
+ evaluator = DSPy::Evals.new(
510
450
  program,
511
451
  metric: metric || default_metric_for_examples(examples),
512
452
  num_threads: config.num_threads,
@@ -1,301 +1,4 @@
1
- # typed: strict
1
+ # typed: false
2
2
  # frozen_string_literal: true
3
3
 
4
- require 'sorbet-runtime'
5
-
6
- module DSPy
7
- module TypeSystem
8
- # Unified module for converting Sorbet types to JSON Schema
9
- # Extracted from Signature class to ensure consistency across Tools, Toolsets, and Signatures
10
- module SorbetJsonSchema
11
- extend T::Sig
12
- extend T::Helpers
13
-
14
- # Convert a Sorbet type to JSON Schema format
15
- sig { params(type: T.untyped, visited: T.nilable(T::Set[T.untyped])).returns(T::Hash[Symbol, T.untyped]) }
16
- def self.type_to_json_schema(type, visited = nil)
17
- visited ||= Set.new
18
-
19
- # Handle T::Boolean type alias first
20
- if type == T::Boolean
21
- return { type: "boolean" }
22
- end
23
-
24
- # Handle type aliases by resolving to their underlying type
25
- if type.is_a?(T::Private::Types::TypeAlias)
26
- return self.type_to_json_schema(type.aliased_type, visited)
27
- end
28
-
29
- # Handle raw class types first
30
- if type.is_a?(Class)
31
- if type < T::Enum
32
- # Get all enum values
33
- values = type.values.map(&:serialize)
34
- { type: "string", enum: values }
35
- elsif type == String
36
- { type: "string" }
37
- elsif type == Integer
38
- { type: "integer" }
39
- elsif type == Float
40
- { type: "number" }
41
- elsif type == Numeric
42
- { type: "number" }
43
- elsif type == Date
44
- { type: "string", format: "date" }
45
- elsif type == DateTime
46
- { type: "string", format: "date-time" }
47
- elsif type == Time
48
- { type: "string", format: "date-time" }
49
- elsif [TrueClass, FalseClass].include?(type)
50
- { type: "boolean" }
51
- elsif type < T::Struct
52
- # Handle custom T::Struct classes by generating nested object schema
53
- # Check for recursion
54
- if visited.include?(type)
55
- # Return a reference to avoid infinite recursion
56
- {
57
- "$ref" => "#/definitions/#{type.name.split('::').last}",
58
- description: "Recursive reference to #{type.name}"
59
- }
60
- else
61
- self.generate_struct_schema(type, visited)
62
- end
63
- else
64
- { type: "string" } # Default fallback
65
- end
66
- elsif type.is_a?(T::Types::Simple)
67
- case type.raw_type.to_s
68
- when "String"
69
- { type: "string" }
70
- when "Integer"
71
- { type: "integer" }
72
- when "Float"
73
- { type: "number" }
74
- when "Numeric"
75
- { type: "number" }
76
- when "Date"
77
- { type: "string", format: "date" }
78
- when "DateTime"
79
- { type: "string", format: "date-time" }
80
- when "Time"
81
- { type: "string", format: "date-time" }
82
- when "TrueClass", "FalseClass"
83
- { type: "boolean" }
84
- when "T::Boolean"
85
- { type: "boolean" }
86
- else
87
- # Check if it's an enum
88
- if type.raw_type < T::Enum
89
- # Get all enum values
90
- values = type.raw_type.values.map(&:serialize)
91
- { type: "string", enum: values }
92
- elsif type.raw_type < T::Struct
93
- # Handle custom T::Struct classes
94
- if visited.include?(type.raw_type)
95
- {
96
- "$ref" => "#/definitions/#{type.raw_type.name.split('::').last}",
97
- description: "Recursive reference to #{type.raw_type.name}"
98
- }
99
- else
100
- generate_struct_schema(type.raw_type, visited)
101
- end
102
- else
103
- { type: "string" } # Default fallback
104
- end
105
- end
106
- elsif type.is_a?(T::Types::TypedArray)
107
- # Handle arrays properly with nested item type
108
- {
109
- type: "array",
110
- items: self.type_to_json_schema(type.type, visited)
111
- }
112
- elsif type.is_a?(T::Types::TypedHash)
113
- # Handle hashes as objects with additionalProperties
114
- # TypedHash has keys and values methods to access its key and value types
115
- key_schema = self.type_to_json_schema(type.keys, visited)
116
- value_schema = self.type_to_json_schema(type.values, visited)
117
-
118
- # Create a more descriptive schema for nested structures
119
- {
120
- type: "object",
121
- propertyNames: key_schema, # Describe key constraints
122
- additionalProperties: value_schema,
123
- # Add a more explicit description of the expected structure
124
- description: "A mapping where keys are #{key_schema[:type]}s and values are #{value_schema[:description] || value_schema[:type]}s"
125
- }
126
- elsif type.is_a?(T::Types::FixedHash)
127
- # Handle fixed hashes (from type aliases like { "key" => Type })
128
- properties = {}
129
- required = []
130
-
131
- type.types.each do |key, value_type|
132
- properties[key] = self.type_to_json_schema(value_type, visited)
133
- required << key
134
- end
135
-
136
- {
137
- type: "object",
138
- properties: properties,
139
- required: required,
140
- additionalProperties: false
141
- }
142
- elsif type.class.name == "T::Private::Types::SimplePairUnion"
143
- # Handle T.nilable types (T::Private::Types::SimplePairUnion)
144
- # This is the actual implementation of T.nilable(SomeType)
145
- has_nil = type.respond_to?(:types) && type.types.any? do |t|
146
- (t.respond_to?(:raw_type) && t.raw_type == NilClass) ||
147
- (t.respond_to?(:name) && t.name == "NilClass")
148
- end
149
-
150
- if has_nil
151
- # Find the non-nil type
152
- non_nil_type = type.types.find do |t|
153
- !(t.respond_to?(:raw_type) && t.raw_type == NilClass) &&
154
- !(t.respond_to?(:name) && t.name == "NilClass")
155
- end
156
-
157
- if non_nil_type
158
- base_schema = self.type_to_json_schema(non_nil_type, visited)
159
- if base_schema[:type].is_a?(String)
160
- # Convert single type to array with null
161
- { type: [base_schema[:type], "null"] }.merge(base_schema.except(:type))
162
- else
163
- # For complex schemas, use anyOf to allow null
164
- { anyOf: [base_schema, { type: "null" }] }
165
- end
166
- else
167
- { type: "string" } # Fallback
168
- end
169
- else
170
- # Not nilable SimplePairUnion - this is a regular T.any() union
171
- # Generate oneOf schema for all types
172
- if type.respond_to?(:types) && type.types.length > 1
173
- {
174
- oneOf: type.types.map { |t| self.type_to_json_schema(t, visited) },
175
- description: "Union of multiple types"
176
- }
177
- else
178
- # Single type or fallback
179
- first_type = type.respond_to?(:types) ? type.types.first : type
180
- self.type_to_json_schema(first_type, visited)
181
- end
182
- end
183
- elsif type.is_a?(T::Types::Union)
184
- # Check if this is a nilable type (contains NilClass)
185
- is_nilable = type.types.any? { |t| t == T::Utils.coerce(NilClass) }
186
- non_nil_types = type.types.reject { |t| t == T::Utils.coerce(NilClass) }
187
-
188
- # Special case: check if we have TrueClass + FalseClass (T.nilable(T::Boolean))
189
- if non_nil_types.size == 2 && is_nilable
190
- true_class_type = non_nil_types.find { |t| t.respond_to?(:raw_type) && t.raw_type == TrueClass }
191
- false_class_type = non_nil_types.find { |t| t.respond_to?(:raw_type) && t.raw_type == FalseClass }
192
-
193
- if true_class_type && false_class_type
194
- # This is T.nilable(T::Boolean) - treat as nilable boolean
195
- return { type: ["boolean", "null"] }
196
- end
197
- end
198
-
199
- if non_nil_types.size == 1 && is_nilable
200
- # This is T.nilable(SomeType) - generate proper schema with null allowed
201
- base_schema = self.type_to_json_schema(non_nil_types.first, visited)
202
- if base_schema[:type].is_a?(String)
203
- # Convert single type to array with null
204
- { type: [base_schema[:type], "null"] }.merge(base_schema.except(:type))
205
- else
206
- # For complex schemas, use anyOf to allow null
207
- { anyOf: [base_schema, { type: "null" }] }
208
- end
209
- elsif non_nil_types.size == 1
210
- # Non-nilable single type union (shouldn't happen in practice)
211
- self.type_to_json_schema(non_nil_types.first, visited)
212
- elsif non_nil_types.size > 1
213
- # Handle complex unions with oneOf for better JSON schema compliance
214
- base_schema = {
215
- oneOf: non_nil_types.map { |t| self.type_to_json_schema(t, visited) },
216
- description: "Union of multiple types"
217
- }
218
- if is_nilable
219
- # Add null as an option for complex nilable unions
220
- base_schema[:oneOf] << { type: "null" }
221
- end
222
- base_schema
223
- else
224
- { type: "string" } # Fallback for complex unions
225
- end
226
- elsif type.is_a?(T::Types::ClassOf)
227
- # Handle T.class_of() types
228
- {
229
- type: "string",
230
- description: "Class name (T.class_of type)"
231
- }
232
- else
233
- { type: "string" } # Default fallback
234
- end
235
- end
236
-
237
- # Generate JSON schema for custom T::Struct classes
238
- sig { params(struct_class: T.class_of(T::Struct), visited: T.nilable(T::Set[T.untyped])).returns(T::Hash[Symbol, T.untyped]) }
239
- def self.generate_struct_schema(struct_class, visited = nil)
240
- visited ||= Set.new
241
-
242
- return { type: "string", description: "Struct (schema introspection not available)" } unless struct_class.respond_to?(:props)
243
-
244
- # Add this struct to visited set to detect recursion
245
- visited.add(struct_class)
246
-
247
- properties = {}
248
- required = []
249
-
250
- # Check if struct already has a _type field
251
- if struct_class.props.key?(:_type)
252
- raise DSPy::ValidationError, "_type field conflict: #{struct_class.name} already has a _type field defined. " \
253
- "DSPy uses _type for automatic type detection in union types."
254
- end
255
-
256
- # Add automatic _type field for type detection
257
- properties[:_type] = {
258
- type: "string",
259
- const: struct_class.name.split('::').last # Use the simple class name
260
- }
261
- required << "_type"
262
-
263
- struct_class.props.each do |prop_name, prop_info|
264
- prop_type = prop_info[:type_object] || prop_info[:type]
265
- properties[prop_name] = self.type_to_json_schema(prop_type, visited)
266
-
267
- # A field is required if it's not fully optional
268
- # fully_optional is true for nilable prop fields
269
- # immutable const fields are required unless nilable
270
- unless prop_info[:fully_optional]
271
- required << prop_name.to_s
272
- end
273
- end
274
-
275
- # Remove this struct from visited set after processing
276
- visited.delete(struct_class)
277
-
278
- {
279
- type: "object",
280
- properties: properties,
281
- required: required,
282
- description: "#{struct_class.name} struct"
283
- }
284
- end
285
-
286
- private
287
-
288
- # Extensions to Hash for Rails-like except method if not available
289
- # This ensures compatibility with the original code
290
- unless Hash.method_defined?(:except)
291
- Hash.class_eval do
292
- def except(*keys)
293
- dup.tap do |hash|
294
- keys.each { |key| hash.delete(key) }
295
- end
296
- end
297
- end
298
- end
299
- end
300
- end
301
- end
4
+ require 'dspy/schema'
data/lib/dspy/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module DSPy
4
- VERSION = "0.29.0"
4
+ VERSION = "0.30.0"
5
5
  end
data/lib/dspy.rb CHANGED
@@ -1,4 +1,5 @@
1
1
  # frozen_string_literal: true
2
+ require_relative 'dspy/support/warning_filters'
2
3
  require 'sorbet-runtime'
3
4
  require 'dry-configurable'
4
5
  require 'dry/logger'
@@ -8,7 +9,6 @@ require_relative 'dspy/version'
8
9
  require_relative 'dspy/errors'
9
10
  require_relative 'dspy/type_serializer'
10
11
  require_relative 'dspy/observability'
11
- require_relative 'dspy/observability/observation_type'
12
12
  require_relative 'dspy/context'
13
13
  require_relative 'dspy/events'
14
14
  require_relative 'dspy/events/types'
@@ -30,6 +30,10 @@ module DSPy
30
30
  @logger ||= create_logger
31
31
  end
32
32
 
33
+ # Writes structured output to the configured logger. Use this for human-readable
34
+ # logs only—listeners and telemetry exporters are not triggered by `DSPy.log`.
35
+ # Prefer `DSPy.event` whenever you want consumers (or Langfuse/OpenTelemetry)
36
+ # to react to what happened.
33
37
  def self.log(event_name, **attributes)
34
38
  # Return nil early if logger is not configured (backward compatibility)
35
39
  return nil unless logger
@@ -41,6 +45,10 @@ module DSPy
41
45
  nil
42
46
  end
43
47
 
48
+ # Emits a structured event that flows through DSPy's event bus, fires any
49
+ # subscribed listeners, and creates OpenTelemetry spans when observability
50
+ # is enabled. Use this for anything that should be tracked, instrumented,
51
+ # or forwarded to Langfuse.
44
52
  def self.event(event_name_or_object, attributes = {})
45
53
  # Handle typed event objects
46
54
  if event_name_or_object.respond_to?(:name) && event_name_or_object.respond_to?(:to_attributes)
@@ -108,7 +116,8 @@ module DSPy
108
116
  'observability.error',
109
117
  'observability.span_error',
110
118
  'observability.span_finish_error',
111
- 'event.span_creation_error'
119
+ 'event.span_creation_error',
120
+ 'lm.tokens'
112
121
  ].freeze
113
122
 
114
123
  def self.create_event_span(event_name, attributes)
@@ -199,7 +208,6 @@ require_relative 'dspy/signature'
199
208
  require_relative 'dspy/few_shot_example'
200
209
  require_relative 'dspy/prompt'
201
210
  require_relative 'dspy/example'
202
- require_relative 'dspy/datasets'
203
211
  require_relative 'dspy/lm'
204
212
  require_relative 'dspy/image'
205
213
  require_relative 'dspy/prediction'
@@ -208,16 +216,34 @@ require_relative 'dspy/events/subscribers'
208
216
  require_relative 'dspy/events/subscriber_mixin'
209
217
  require_relative 'dspy/chain_of_thought'
210
218
  require_relative 'dspy/re_act'
211
- require_relative 'dspy/code_act'
212
- require_relative 'dspy/evaluate'
219
+ require_relative 'dspy/evals'
213
220
  require_relative 'dspy/teleprompt/teleprompter'
214
221
  require_relative 'dspy/teleprompt/utils'
215
222
  require_relative 'dspy/teleprompt/data_handler'
216
- require_relative 'dspy/teleprompt/gepa'
217
223
  require_relative 'dspy/propose/grounded_proposer'
218
- require_relative 'dspy/teleprompt/mipro_v2'
224
+ begin
225
+ require 'dspy/o11y/langfuse'
226
+ rescue LoadError
227
+ end
228
+ begin
229
+ require 'dspy/gepa'
230
+ rescue LoadError
231
+ end
232
+ begin
233
+ require 'dspy/code_act'
234
+ rescue LoadError
235
+ end
236
+ begin
237
+ require 'dspy/miprov2'
238
+ rescue LoadError
239
+ end
219
240
  require_relative 'dspy/tools'
220
241
  require_relative 'dspy/memory'
242
+
243
+ begin
244
+ require 'dspy/datasets'
245
+ rescue LoadError
246
+ end
221
247
  require_relative 'dspy/storage/program_storage'
222
248
  require_relative 'dspy/storage/storage_manager'
223
249
  require_relative 'dspy/registry/signature_registry'