omniai 2.8.2 → 2.8.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b6f5ce2ea16526510c60f251aae959edd1f9d509e17736bf04c34c20fc484f7c
4
- data.tar.gz: 6713edfadd57f58f75faeae63e1b39059aa576fd65fb5e2a133e1033d8061b33
3
+ metadata.gz: 533dd085c62bb70b08d71d16025423b981d96cbffe7a1280072908f175800d28
4
+ data.tar.gz: c40684be74123a132c11ecc70a620ee49d4b6e949daf08f91c8f5a360d0ea56f
5
5
  SHA512:
6
- metadata.gz: e428cf0d49f1f076942f301b6a2e66c774101f0f54bcaa6fae3fa305be6d8af8017153fea31bccaed73fe046b0c49883eace49bd50dbb5730f774c8615d907e7
7
- data.tar.gz: fa4e525fc536d34cbd1e3fdc7883d6f655ed2f5fc032bf10d8e124ceed6bd8ec34a0925b73a6c5390c77e1836d1b27a7cce976f5aaab7a824929b316fd2921bd
6
+ metadata.gz: 355abe067bdc6a6156300ea305b441a14388239506565a753f12dfc7824fe2d1a7acee4772fe8556fec5dbee2a9a3a9e25506ddb9faa7d78d3838bf647d17424
7
+ data.tar.gz: 03d292f24d8345c4f4043f185b4c5e01ce09894afc7e9cd72e81f251cd9c394c8af2165f2c054bcb0cff60f50f75498efb90f4eb79e724e829b86561420819ff
data/lib/omniai/client.rb CHANGED
@@ -207,7 +207,7 @@ module OmniAI
207
207
  # @param language [String, nil] optional
208
208
  # @param prompt [String, nil] optional
209
209
  # @param temperature [Float, nil] optional
210
- # @param format [Symbol] :text, :srt, :vtt, or :json (default)
210
+ # @param format [String] 'text', 'srt', 'vtt', 'json' (default), or 'verbose_json'
211
211
  #
212
212
  # @return [OmniAI::Transcribe::Transcription]
213
213
  def transcribe(io, model:, language: nil, prompt: nil, temperature: nil, format: nil)
@@ -19,6 +19,8 @@ module OmniAI
19
19
 
20
20
  # @param transport [OmniAI::MCP::Transport]
21
21
  def run(transport: OmniAI::MCP::Transport::Stdio.new)
22
+ @logger&.info("#{self.class}#run: running")
23
+
22
24
  loop do
23
25
  message = transport.gets
24
26
  break if message.nil?
@@ -29,9 +31,9 @@ module OmniAI
29
31
 
30
32
  transport.puts(response) if response
31
33
  end
32
- end
33
34
 
34
- private
35
+ @logger&.info("#{self.class}#run: finished")
36
+ end
35
37
 
36
38
  # @param message [String]
37
39
  #
@@ -68,7 +70,9 @@ module OmniAI
68
70
  name: @name,
69
71
  version: @version,
70
72
  },
71
- capabilities: {},
73
+ capabilities: {
74
+ tools: { listChanged: true },
75
+ },
72
76
  })
73
77
  end
74
78
 
@@ -84,7 +88,7 @@ module OmniAI
84
88
  #
85
89
  # @return [JRPC::Response]
86
90
  def process_tools_list(request:)
87
- result = @tools.map do |tool|
91
+ tools = @tools.map do |tool|
88
92
  {
89
93
  name: tool.name,
90
94
  description: tool.description,
@@ -92,7 +96,7 @@ module OmniAI
92
96
  }
93
97
  end
94
98
 
95
- JRPC::Response.new(id: request.id, result:)
99
+ JRPC::Response.new(id: request.id, result: { tools: })
96
100
  end
97
101
 
98
102
  # @param request [JRPC::Request]
@@ -104,14 +108,21 @@ module OmniAI
104
108
  name = request.params["name"]
105
109
  tool = @tools.find { |tool| tool.name.eql?(name) }
106
110
 
107
- result =
111
+ text =
108
112
  begin
109
- tool.call(request.params["input"])
113
+ tool.call(request.params["arguments"])
110
114
  rescue StandardError => e
111
115
  raise JRPC::Error.new(code: JRPC::Error::Code::INTERNAL_ERROR, message: e.message)
112
116
  end
113
117
 
114
- JRPC::Response.new(id: request.id, result:)
118
+ JRPC::Response.new(id: request.id, result: {
119
+ content: [
120
+ {
121
+ type: "text",
122
+ text:,
123
+ },
124
+ ],
125
+ })
115
126
  end
116
127
  end
117
128
  end
@@ -0,0 +1,7 @@
1
+ # frozen_string_literal: true
2
+
3
+ module OmniAI
4
+ # An error that wraps a JSON::ParserError when using schemas.
5
+ class ParseError < Error
6
+ end
7
+ end
@@ -62,9 +62,13 @@ module OmniAI
62
62
  #
63
63
  # @param text [String]
64
64
  #
65
+ # @raise OmniAI::ParseError
66
+ #
65
67
  # @return [Hash]
66
68
  def parse(text)
67
69
  schema.parse(JSON.parse(text))
70
+ rescue JSON::ParserError => e
71
+ raise OmniAI::ParseError, "Unable to parse JSON text=#{text.inspect} message=#{e.message.inspect}."
68
72
  end
69
73
  end
70
74
  end
@@ -16,18 +16,59 @@ module OmniAI
16
16
  # @return [String]
17
17
  attr_accessor :format
18
18
 
19
+ # @!attribute [rw] duration
20
+ # @return [Float, nil]
21
+ attr_accessor :duration
22
+
23
+ # @!attribute [rw] segments
24
+ # @return [Array<Hash>, nil]
25
+ attr_accessor :segments
26
+
27
+ # @!attribute [rw] language
28
+ # @return [String, nil]
29
+ attr_accessor :language
30
+
31
+ # @param data [Hash, String]
32
+ # @param model [String]
33
+ # @param format [String]
34
+ #
35
+ # @return [OmniAI::Transcribe::Transcription]
36
+ def self.parse(data:, model:, format:)
37
+ data = { "text" => data } if data.is_a?(String)
38
+
39
+ text = data["text"]
40
+ duration = data["duration"]
41
+ segments = data["segments"]
42
+ language = data["language"]
43
+
44
+ new(text:, model:, format:, duration:, segments:, language:)
45
+ end
46
+
19
47
  # @param text [String]
20
48
  # @param model [String]
21
49
  # @param format [String]
22
- def initialize(text:, model:, format:)
50
+ # @param duration [Float, nil]
51
+ # @param segments [Array, nil]
52
+ # @param language [String, nil]
53
+ def initialize(text:, model:, format:, duration: nil, segments: nil, language: nil)
23
54
  @text = text
24
55
  @model = model
25
56
  @format = format
57
+ @duration = duration
58
+ @segments = segments
59
+ @language = language
26
60
  end
27
61
 
28
62
  # @return [String]
29
63
  def inspect
30
- "#<#{self.class} text=#{text.inspect}>"
64
+ attrs = [
65
+ ("text=#{@text.inspect}" if @text),
66
+ ("model=#{@model.inspect}" if @model),
67
+ ("format=#{@format.inspect}" if @format),
68
+ ("duration=#{@duration.inspect}" if @duration),
69
+ ].compact.join(" ")
70
+
71
+ "#<#{self.class} #{attrs}>"
31
72
  end
32
73
  end
33
74
  end
@@ -92,6 +92,7 @@ module OmniAI
92
92
  TEXT = "text"
93
93
  VTT = "vtt"
94
94
  SRT = "srt"
95
+ VERBOSE_JSON = "verbose_json"
95
96
  end
96
97
 
97
98
  def self.process!(...)
@@ -105,29 +106,40 @@ module OmniAI
105
106
  # @param prompt [String, nil] optional
106
107
  # @param temperature [Float, nil] optional
107
108
  # @param format [String, nil] optional
108
- def initialize(io, client:, model:, language: nil, prompt: nil, temperature: nil, format: Format::JSON)
109
+ def initialize(io, client:, model:, language: nil, prompt: nil, temperature: nil, format: nil)
109
110
  @io = io
111
+ @client = client
110
112
  @model = model
111
113
  @language = language
112
114
  @prompt = prompt
113
115
  @temperature = temperature
114
- @format = format
115
- @client = client
116
+ @format = format || Format::JSON
116
117
  end
117
118
 
118
119
  # @raise [HTTPError]
120
+ #
119
121
  # @return [OmniAI::Transcribe::Transcription]
120
122
  def process!
121
123
  response = request!
122
-
123
124
  raise HTTPError, response.flush unless response.status.ok?
124
125
 
125
- text = @format.nil? || @format.eql?(Format::JSON) ? response.parse["text"] : String(response.body)
126
- Transcription.new(text:, model: @model, format: @format)
126
+ data = json? || verbose_json? ? response.parse : String(response.body)
127
+
128
+ Transcription.parse(model: @model, format: @format, data:)
127
129
  end
128
130
 
129
131
  protected
130
132
 
133
+ # @return [Boolean]
134
+ def json?
135
+ String(@format).eql?(Format::JSON)
136
+ end
137
+
138
+ # @return [Boolean]
139
+ def verbose_json?
140
+ String(@format).eql?(Format::VERBOSE_JSON)
141
+ end
142
+
131
143
  # @return [Hash]
132
144
  def payload
133
145
  {
@@ -136,9 +148,13 @@ module OmniAI
136
148
  language: @language,
137
149
  prompt: @prompt,
138
150
  temperature: @temperature,
151
+ response_format: @format,
152
+ timestamp_granularities: verbose_json? ? %w[segment] : nil,
139
153
  }.compact
140
154
  end
141
155
 
156
+ private
157
+
142
158
  # @return [String]
143
159
  def path
144
160
  raise NotImplementedError, "#{self.class.name}#path undefined"
@@ -148,7 +164,7 @@ module OmniAI
148
164
  def request!
149
165
  @client
150
166
  .connection
151
- .accept(@format.eql?(Format::JSON) ? :json : :text)
167
+ .accept(json? || verbose_json? ? :json : :text)
152
168
  .post(path, form: payload)
153
169
  end
154
170
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module OmniAI
4
- VERSION = "2.8.2"
4
+ VERSION = "2.8.4"
5
5
  end
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: omniai
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.8.2
4
+ version: 2.8.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kevin Sylvestre
8
8
  bindir: exe
9
9
  cert_chain: []
10
- date: 2025-06-09 00:00:00.000000000 Z
10
+ date: 1980-01-02 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
13
  name: base64
@@ -136,6 +136,7 @@ files:
136
136
  - lib/omniai/mcp/server.rb
137
137
  - lib/omniai/mcp/transport/base.rb
138
138
  - lib/omniai/mcp/transport/stdio.rb
139
+ - lib/omniai/parse_error.rb
139
140
  - lib/omniai/schema.rb
140
141
  - lib/omniai/schema/array.rb
141
142
  - lib/omniai/schema/format.rb
@@ -168,7 +169,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
168
169
  - !ruby/object:Gem::Version
169
170
  version: '0'
170
171
  requirements: []
171
- rubygems_version: 3.6.3
172
+ rubygems_version: 3.7.2
172
173
  specification_version: 4
173
174
  summary: A generalized framework for interacting with many AI services
174
175
  test_files: []