luo 0.2.5 → 0.2.7

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 4078773adb559f48cefc4e2bf43e64c0476bd4eb6f4e9ccfa215558c93c2d346
4
- data.tar.gz: 4732dba7c93e21066cc828bc8042617ad29c93a6b50c799c518b8d96332cd640
3
+ metadata.gz: 4569a6fd6ae87797597f59bc73f8c1b6ac8d805f3410ecc716ffc7e1ddf428c1
4
+ data.tar.gz: 9695305bfe27cec9708fbc6fea8d1b000ff8450b17befdaa1a0c822da815f157
5
5
  SHA512:
6
- metadata.gz: 521b30ee3642e76b70ff2ddd6d906b0394e7ad3ecee9ff4e69eb4155482c385947ab3051aafd1c38083c9e7e8aca90ca28b1edf8144d662ef76c8e9659be70a3
7
- data.tar.gz: 325450266621354fcda412c1b40c4df88079c2eb985690e43f1b6fae4d7f87a5bf6de7a04ea077c5426a61e5e76c9d93df3908aa560189135228218382e9619e
6
+ metadata.gz: 281fca6621c8aaccc3c21559c4a0e8ef80738fdf648bb5db68fd639b3692bb4a5f5890c5ad20b94dab9159ae27a5569b99c1a3cf1408603af5894be76f315586
7
+ data.tar.gz: 80a2b4894e507b1874e21ba0928bc8b5c16cc88fda9870e5ee2c8b333dd1dcc0e2d80c3d5f954ce6abf8f35b56f93127d1aad1ebca34278c7d7427241429c904
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- luo (0.2.5)
4
+ luo (0.2.7)
5
5
  dotenv (~> 2.8, >= 2.8.1)
6
6
  dry-configurable (~> 1.0, >= 1.0.1)
7
7
  dry-schema (~> 1.13, >= 1.13.1)
data/lib/luo/open_ai.rb CHANGED
@@ -46,7 +46,12 @@ module Luo
46
46
  end
47
47
  params = EMBEDDING_PARAMS.call(input: text, model: model)
48
48
  return params.errors unless params.success?
49
- embeddings(params).body.dig("data").map { |v| v["embedding"] }
49
+ response = embeddings(params)
50
+ if response.success?
51
+ response.body.dig("data").map { |v| v["embedding"] }
52
+ else
53
+ raise "create_embeddings failed: #{response.body}"
54
+ end
50
55
  end
51
56
 
52
57
  def chat(messages, temperature: nil)
@@ -59,7 +64,12 @@ module Luo
59
64
  messages: messages
60
65
  )
61
66
  return params.errors unless params.success?
62
- chat_completions(params).body.dig("choices", 0, "message", "content")
67
+ response = chat_completions(params)
68
+ if response.success?
69
+ response.body.dig("choices", 0, "message", "content")
70
+ else
71
+ raise "request_chat failed: #{response.body}"
72
+ end
63
73
  end
64
74
 
65
75
  class << self
data/lib/luo/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Luo
4
- VERSION = "0.2.5"
4
+ VERSION = "0.2.7"
5
5
  end
data/lib/luo/xinghuo.rb CHANGED
@@ -23,15 +23,35 @@ module Luo
23
23
  optional(:max_tokens).maybe(:integer)
24
24
  optional(:random_threshold).maybe(:float)
25
25
  optional(:uid).maybe(:string)
26
+ optional(:stream).maybe(:bool)
27
+ end
28
+
29
+ EMBEDDING_PARAMS = Dry::Schema.Params do
30
+ required(:input).filled(:string)
26
31
  end
27
32
 
28
33
  # header uid max length is 32 todo
29
34
 
30
- def request_chat(params)
31
- client.post('/v1/spark/completions', params.to_h)
35
+ def request_chat(params, &block)
36
+ client.post('/v1/spark/completions', params.to_h, &block)
32
37
  end
33
38
 
34
- def chat(messages, random_threshold: nil)
39
+ def embedding(params)
40
+ client.post('/v1/embedding', params.to_h)
41
+ end
42
+
43
+ def create_embedding(text, model: 'text-embedding-ada-002')
44
+ params = EMBEDDING_PARAMS.call(input: text)
45
+ return params.errors unless params.success?
46
+ response = embedding(params)
47
+ if response.success?
48
+ response.body.dig("data")
49
+ else
50
+ raise "create_embeddings failed: #{response.body}"
51
+ end
52
+ end
53
+
54
+ def chat(messages, random_threshold: nil, &block)
35
55
  if messages.is_a?(Messages)
36
56
  messages = messages.to_a
37
57
  end
@@ -41,10 +61,36 @@ module Luo
41
61
  messages: messages,
42
62
  max_tokens: config.max_tokens,
43
63
  random_threshold: random_threshold || config.random_threshold,
44
- uid: config.uid.call
64
+ uid: config.uid.call,
65
+ stream: block_given?
45
66
  )
46
67
  return params.errors unless params.success?
47
- request_chat(params).body.dig('choices', 0, 'message', 'content')
68
+
69
+ body = {}
70
+ if block_given?
71
+ content = ""
72
+ response = request_chat(params) do |req|
73
+ req.options.on_data = Proc.new do |chunk, *|
74
+ if chunk =~ /data: (.+?)\n(?!data: \[DONE\])/
75
+ json = JSON.parse($1)
76
+ content += json.dig('choices', 0, 'delta', 'content')
77
+ body.merge!(json)
78
+ end
79
+ block.call(chunk)
80
+ end
81
+ end
82
+ body['choices'][0]['delta']['content'] = content
83
+ body['choices'][0]['message'] = body['choices'][0].delete('delta')
84
+ else
85
+ response = request_chat(params)
86
+ end
87
+
88
+ if response.success?
89
+ body = response.body if body.empty?
90
+ body.dig('choices', 0, 'message', 'content')
91
+ else
92
+ raise "request_chat failed: #{response.body}"
93
+ end
48
94
  end
49
95
 
50
96
  class << self
@@ -54,6 +100,15 @@ module Luo
54
100
  client.chat(messages, random_threshold: temperature)
55
101
  end
56
102
  end
103
+
104
+ def llm_func_adapter_stream
105
+ client = self.new
106
+ Proc.new do |messages, temperature|
107
+ client.chat(messages, random_threshold: temperature) do |chunk|
108
+ yield chunk
109
+ end
110
+ end
111
+ end
57
112
  end
58
113
 
59
114
  end
@@ -10,17 +10,26 @@ module Luo
10
10
  setting :response_error, default: Luo::Prompts.xinghuo_response_error
11
11
  end
12
12
  setting :client, default: Luo::Xinghuo.new
13
+ setting :stream_callback, default: nil
14
+
15
+ def request(messages)
16
+ if config.stream_callback&.respond_to? :call
17
+ client.chat(messages, &config.stream_callback)
18
+ else
19
+ client.chat(messages)
20
+ end
21
+ end
13
22
 
14
23
  on_request do
15
24
  context.messages = Messages.create(history: context.histories.search(context.user_input))
16
25
  .user(prompt: config.prompts.input, context: {agents: self.class.agents, last_user_input: context.user_input})
17
- response = client.chat(context.messages)
26
+ response = request(context.messages)
18
27
  if response.split("\n").select { |line| line.size >1 }.size > 1
19
28
  message = Messages.create(history: context.histories.search(context.user_input))
20
29
  .user(prompt: config.prompts.input, context: {agents: self.class.agents, last_user_input: context.user_input})
21
30
  .assistant(text: response)
22
31
  .user(prompt: config.prompts.response_error, context: {agents: self.class.agents, last_user_input: context.user_input})
23
- context.response = client.chat(message)
32
+ context.response = request(message)
24
33
  else
25
34
  context.response = response
26
35
  end
@@ -37,7 +46,7 @@ module Luo
37
46
  add_agent(agent)
38
47
  else
39
48
  messages = Messages.create(history: context.histories.search(context.user_input)).user(text: context.user_input)
40
- context.final_result = client.chat(messages)
49
+ context.final_result = request(messages)
41
50
  end
42
51
  end
43
52
 
@@ -5,6 +5,9 @@ Luo.app_setup do |loader|
5
5
  end
6
6
 
7
7
  class Runner < XinghuoAgentRunner
8
+
9
+ setting :stream_callback, default: ->(chunk) { puts chunk }
10
+
8
11
  register WeatherAgent
9
12
  register TimeAgent
10
13
  register XinghuoFinalAgent
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: luo
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.5
4
+ version: 0.2.7
5
5
  platform: ruby
6
6
  authors:
7
7
  - MJ
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2023-05-31 00:00:00.000000000 Z
11
+ date: 2023-06-01 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: zeitwerk