luo 0.2.4 → 0.2.6

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 6b2af1ba5e628a075900879b4f2b54efceac535fe816fe9863535c51c33e6de1
4
- data.tar.gz: a8a7e2c548676dfd4adb6ca35e41e3cd3451930f845be02d06af7d38ca0c4e44
3
+ metadata.gz: 8d99e88006e5445420b8659fd680122ca1b047e15d9e97e7b62b6fd2144930b9
4
+ data.tar.gz: e7b6e686e73e1daa3bc3d345e8a1523267220e768bdd474d345dc263f2e9ab6b
5
5
  SHA512:
6
- metadata.gz: b0fa9249b55d4ee157c57966e2ac362f5827fce4cb2b82382d9f6ee7637b29ac6084a04130d67d4d91a1b91bfbbc1088b14f45d85419b3702332b9d9d3118fbd
7
- data.tar.gz: fcb78e2300c5dddce4a32ce6eadb208af15453751d49cab20b6e8c8ab7b9521ef4f08cd6eb231452e82f87ecf375b52ac99c5b189da9f9b869621cf8e17d8f60
6
+ metadata.gz: 20eafa8f4fafc7483e3acf28f532729eb6a993dad888d93f5c28d7bd7608643909d7fe771db4772c0e896658d9c3a6587760c9e85c115e81e7d29ea4813afc7a
7
+ data.tar.gz: 0b6cf883f769c6cbad3a178faa16ae6cd02dc7f240335e2b17b30f096b1d4f2b126a07cde18ecf8b5dacf4d8ead7d7a0add0f308abbeb4dd406f818c9778fe68
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- luo (0.2.4)
4
+ luo (0.2.6)
5
5
  dotenv (~> 2.8, >= 2.8.1)
6
6
  dry-configurable (~> 1.0, >= 1.0.1)
7
7
  dry-schema (~> 1.13, >= 1.13.1)
@@ -6,6 +6,8 @@ module Luo
6
6
  include Configurable
7
7
 
8
8
  setting :language, default: "en"
9
+ setting :client, default: nil
10
+ setting :context_adapter, default: -> { Luo::AgentRunnerContext.new }
9
11
 
10
12
  def initialize(histories: nil)
11
13
  context.histories = histories unless histories.nil?
@@ -15,13 +17,18 @@ module Luo
15
17
  def on_init
16
18
  end
17
19
 
20
+ def client
21
+ raise Luo::ClientNotSetError, "client not set" if self.class.config.client.nil?
22
+ self.class.config.client
23
+ end
24
+
18
25
  def context
19
- @context ||= Luo::AgentRunnerContext.new
26
+ @context ||= config.context_adapter.call
20
27
  end
21
28
 
22
29
  def reset_context
23
30
  histories = context.histories
24
- @context = Luo::AgentRunnerContext.new
31
+ @context = config.context_adapter.call
25
32
  @context.histories = histories
26
33
  @context
27
34
  end
@@ -59,6 +66,16 @@ module Luo
59
66
  context.have_running_agents << agent
60
67
  end
61
68
 
69
+ def save_history
70
+ context.histories.save(context.user_input, context.final_result) if save_history?
71
+ end
72
+
73
+ # @private
74
+ private
75
+ def save_history?
76
+ true
77
+ end
78
+
62
79
  class << self
63
80
 
64
81
  def agents
@@ -85,6 +102,12 @@ module Luo
85
102
  end
86
103
  end
87
104
 
105
+ def disable_history
106
+ define_method(:save_history?) do
107
+ false
108
+ end
109
+ end
110
+
88
111
  end
89
112
  end
90
113
  end
@@ -3,7 +3,7 @@
3
3
  module Luo
4
4
  class AgentRunnerContext
5
5
  include Configurable
6
- setting :history_adapter, default: MemoryHistory
6
+ setting :history_adapter, default: ->(context) { MemoryHistory.new(context) }
7
7
 
8
8
  attr_accessor :user_input, :action_input, :response, :agent_results, :final_result, :messages, :retries
9
9
 
@@ -13,7 +13,7 @@ module Luo
13
13
  end
14
14
 
15
15
  def histories
16
- @histories ||= config.history_adapter.new
16
+ @histories ||= config.history_adapter.call(self)
17
17
  end
18
18
 
19
19
  def histories=(histories)
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Luo
4
+ module LLMFunc
5
+ class Cain
6
+ include Dry::Configurable
7
+
8
+ setting :system, default: nil
9
+ setting :prompt
10
+ setting :adapter, default: Luo::OpenAI.llm_func_adapter
11
+ setting :temperature, default: 0
12
+
13
+
14
+ def call(env)
15
+ temperature = env.fetch(:temperature, nil)
16
+ history = env.fetch(:history, nil)
17
+ messages = Messages.create(history: history).user(prompt: config.prompt, context: env.to_h)
18
+
19
+ if config.system
20
+ messages = messages.system(text: config.system)
21
+ end
22
+
23
+ output = config.adapter.call(messages, temperature || config.temperature)
24
+ env.set(:output, output)
25
+ end
26
+
27
+ end
28
+
29
+ end
30
+ end
@@ -0,0 +1,65 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Luo
4
+ module LLMFunc
5
+ class CainBuilder
6
+ def initialize
7
+ @system = nil
8
+ @prompt = nil
9
+ @adapter = Luo::OpenAI.llm_func_adapter
10
+ @temperature = 0
11
+
12
+ @middlewares = []
13
+ end
14
+
15
+ def system(text=nil)
16
+ @system = text
17
+ self
18
+ end
19
+
20
+ def prompt(prompt=nil)
21
+ @prompt = prompt
22
+ self
23
+ end
24
+
25
+ def adapter(adapter=nil)
26
+ @adapter = adapter
27
+ self
28
+ end
29
+
30
+ def temperature(temperature=nil)
31
+ @temperature = temperature
32
+ self
33
+ end
34
+
35
+ def use(middleware)
36
+ @middlewares << middleware
37
+ self
38
+ end
39
+
40
+ def build
41
+ cain = Cain.new.configure do |c|
42
+ c.system = @system
43
+ c.prompt = @prompt
44
+ c.adapter = @adapter
45
+ c.temperature = @temperature
46
+ end
47
+
48
+ _next_ = cain
49
+ @middlewares.reverse_each do |middleware|
50
+ _next_ = middleware.new(_next_)
51
+ end
52
+
53
+ Proc.new do |**input|
54
+ env = Luo::Middleware::Env.new(**input)
55
+ _next_.call(env)
56
+ end
57
+ end
58
+
59
+ def call(**input)
60
+ build.call(**input)
61
+ end
62
+
63
+ end
64
+ end
65
+ end
@@ -0,0 +1,12 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Luo
4
+ module LLMFunc
5
+ extend self
6
+
7
+ def cain
8
+ CainBuilder.new
9
+ end
10
+
11
+ end
12
+ end
@@ -7,12 +7,14 @@ module Luo
7
7
  class MemoryHistory
8
8
  include Configurable
9
9
 
10
- setting :max_size, default: 12
10
+ setting :max_size, default: 6
11
11
 
12
+ attr_reader :context
12
13
  ##
13
14
  # 初始化一个队列
14
15
  # @param [Integer] max_size 队列的最大长度
15
- def initialize(max_size = config.max_size)
16
+ def initialize(context = nil, max_size: config.max_size)
17
+ @context = context
16
18
  @queue = []
17
19
  @max_size = max_size
18
20
  end
@@ -30,6 +32,15 @@ module Luo
30
32
  Marshal.load(Marshal.dump(self))
31
33
  end
32
34
 
35
+ def save(input, output)
36
+ @context_model ||= true
37
+ enqueue({input: input, output: output})
38
+ end
39
+
40
+ def context_model
41
+ @context_model
42
+ end
43
+
33
44
  def user(content)
34
45
  enqueue({role: "user", content: content})
35
46
  end
@@ -49,7 +60,17 @@ module Luo
49
60
  end
50
61
 
51
62
  def to_a
52
- @queue
63
+ return @queue unless context_model
64
+
65
+ @queue.reduce([]) do |rt, node|
66
+ rt << {role: "user", content: node[:input]}
67
+ rt << {role: "assistant", content: node[:output]}
68
+ rt
69
+ end
70
+ end
71
+
72
+ def search(_input)
73
+ to_a
53
74
  end
54
75
 
55
76
  def to_json
data/lib/luo/messages.rb CHANGED
@@ -41,8 +41,12 @@ module Luo
41
41
  end
42
42
 
43
43
  class << self
44
- def create(history: [])
45
- self.new(history: history)
44
+ def create(history: nil)
45
+ if history
46
+ self.new(history: history)
47
+ else
48
+ self.new(history: [])
49
+ end
46
50
  end
47
51
  end
48
52
 
@@ -0,0 +1,61 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Luo
4
+ module Middleware
5
+ class Base
6
+ extend Dry::Configurable
7
+ def initialize(app)
8
+ raise ArgumentError, "app must respond to `call`" unless app.respond_to? :call
9
+ @app = app
10
+ end
11
+
12
+ def call(env)
13
+ env = Env.validate_env!(env)
14
+ env = Env.validate_env! _before_call_(env)
15
+ env = Env.validate_env! _call_(env)
16
+ env = Env.validate_env! @app.call(env)
17
+ Env.validate_env! _after_call_(env)
18
+ end
19
+
20
+ def _before_call_(env)
21
+ env
22
+ end
23
+
24
+ def _after_call_(env)
25
+ env
26
+ end
27
+
28
+ def _call_(env)
29
+ env
30
+ end
31
+
32
+ class << self
33
+
34
+ def create_method(name, &block)
35
+ define_method name do |env|
36
+ _env_ = block.call(env)
37
+ if _env_.is_a? Env
38
+ _env_
39
+ else
40
+ env
41
+ end
42
+ end
43
+ end
44
+
45
+ def before(&block)
46
+ create_method :_before_call_, &block
47
+ end
48
+
49
+ def after(&block)
50
+ create_method :_after_call_, &block
51
+ end
52
+
53
+ def call(&block)
54
+ create_method :_call_, &block
55
+ end
56
+
57
+ end
58
+
59
+ end
60
+ end
61
+ end
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Luo
4
+ module Middleware
5
+ class Env
6
+ attr_accessor :meta
7
+ def initialize(**meta)
8
+ @meta = meta
9
+ end
10
+
11
+ def set(key, value)
12
+ @meta[key] = value
13
+ self
14
+ end
15
+
16
+ def get(key)
17
+ @meta[key]
18
+ end
19
+
20
+ def input
21
+ @meta[:input]
22
+ end
23
+
24
+ def output
25
+ @meta[:output]
26
+ end
27
+
28
+ def fetch(key, default=nil)
29
+ @meta.fetch(key, default)
30
+ end
31
+
32
+ def fetch_and_delete!(key, default=nil)
33
+ @meta.fetch(key, default).tap do
34
+ @meta.delete(key)
35
+ end
36
+ end
37
+
38
+ def to_s
39
+ @meta.to_s
40
+ end
41
+
42
+ def to_h
43
+ @meta
44
+ end
45
+
46
+ def create_method(name, &block)
47
+ self.class.send(:define_method, name, &block)
48
+ end
49
+
50
+ def self.validate_env!(env)
51
+ raise ArgumentError, "env must be a Luo::Middleware::Env" unless env.is_a? Luo::Middleware::Env
52
+ env
53
+ end
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+ module Luo
3
+ module Middleware
4
+ class Logger < Base
5
+
6
+ setting :level, default: ::Logger::INFO
7
+ setting :logger, default: ::Logger.new(STDOUT)
8
+
9
+ call do |env|
10
+ logger = config.logger
11
+ logger.level = config.level
12
+ env.create_method(:logger) do
13
+ logger
14
+ end
15
+ end
16
+
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,28 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Luo
4
+ module Middleware
5
+ module MemoryHistory
6
+ extend self
7
+
8
+ def create(history)
9
+ Class.new(Base) do
10
+ @history = history
11
+ def initialize(app)
12
+ @app = app
13
+ end
14
+
15
+ before do |env|
16
+ env.set(:history, @history)
17
+ end
18
+
19
+ after do |env|
20
+ @history.user env.fetch(:input)
21
+ @history.assistant env.fetch(:output)
22
+ end
23
+
24
+ end
25
+ end
26
+ end
27
+ end
28
+ end
@@ -0,0 +1,6 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Luo
4
+ module Middleware
5
+ end
6
+ end
data/lib/luo/open_ai.rb CHANGED
@@ -46,7 +46,12 @@ module Luo
46
46
  end
47
47
  params = EMBEDDING_PARAMS.call(input: text, model: model)
48
48
  return params.errors unless params.success?
49
- embeddings(params).body.dig("data").map { |v| v["embedding"] }
49
+ response = embeddings(params)
50
+ if response.success?
51
+ response.body.dig("data").map { |v| v["embedding"] }
52
+ else
53
+ raise "create_embeddings failed: #{response.body}"
54
+ end
50
55
  end
51
56
 
52
57
  def chat(messages, temperature: nil)
@@ -59,9 +64,24 @@ module Luo
59
64
  messages: messages
60
65
  )
61
66
  return params.errors unless params.success?
62
- chat_completions(params).body.dig("choices", 0, "message", "content")
67
+ response = chat_completions(params)
68
+ if response.success?
69
+ response.body.dig("choices", 0, "message", "content")
70
+ else
71
+ raise "request_chat failed: #{response.body}"
72
+ end
73
+ end
74
+
75
+ class << self
76
+ def llm_func_adapter
77
+ client = self.new
78
+ Proc.new do |messages, temperature|
79
+ client.chat(messages, temperature: temperature)
80
+ end
81
+ end
63
82
  end
64
83
 
84
+
65
85
  end
66
86
 
67
87
  end
@@ -12,19 +12,15 @@ module Luo
12
12
  setting :tool_input, default: Luo::Prompts.agent_tool_input
13
13
  end
14
14
 
15
- on_init do
16
- @openai = OpenAI.new
17
- end
15
+ setting :client, default: Luo::OpenAI.new
18
16
 
19
17
  on_request do
20
- context.messages = Messages.create(history: context.histories)
18
+ context.messages = Messages.create(history: context.histories.search(context.user_input))
21
19
  .system(prompt: config.prompts.system)
22
20
  .user(prompt: config.prompts.input, context: {agents: self.class.agents, last_user_input: context.user_input})
23
- context.response = @openai.chat(context.messages)
21
+ context.response = client.chat(context.messages)
24
22
  end
25
23
 
26
- ##
27
- # TODO: 用markdown解析库来解析response
28
24
  on_result do
29
25
  begin
30
26
  actions = JSON.parse(context.response)
@@ -36,13 +32,13 @@ module Luo
36
32
  agent = self.class.agents[action['action']]&.new(
37
33
  context: context,
38
34
  action_input: action['action_input'],
39
- client: @openai
35
+ client: client
40
36
  )
41
37
  add_agent(agent) if agent
42
38
  if action['action'] == "Final Answer"
43
39
  context.final_result = action['action_input']
44
- context.histories.user(context.user_input)
45
- context.histories.assistant(context.final_result)
40
+
41
+ save_history
46
42
  end
47
43
  end
48
44
  end
@@ -55,7 +51,7 @@ module Luo
55
51
  tools_response: context.agent_results
56
52
  }
57
53
  )
58
- context.response = @openai.chat(context.messages)
54
+ context.response = client.chat(context.messages)
59
55
  context.retries += 1
60
56
  on_result
61
57
  on_run
data/lib/luo/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Luo
4
- VERSION = "0.2.4"
4
+ VERSION = "0.2.6"
5
5
  end
data/lib/luo/xinghuo.rb CHANGED
@@ -23,15 +23,16 @@ module Luo
23
23
  optional(:max_tokens).maybe(:integer)
24
24
  optional(:random_threshold).maybe(:float)
25
25
  optional(:uid).maybe(:string)
26
+ optional(:stream).maybe(:bool)
26
27
  end
27
28
 
28
29
  # header uid max length is 32 todo
29
30
 
30
- def request_chat(params)
31
- client.post('/v1/spark/completions', params.to_h)
31
+ def request_chat(params, &block)
32
+ client.post('/v1/spark/completions', params.to_h, &block)
32
33
  end
33
34
 
34
- def chat(messages, random_threshold: nil)
35
+ def chat(messages, random_threshold: nil, &block)
35
36
  if messages.is_a?(Messages)
36
37
  messages = messages.to_a
37
38
  end
@@ -41,10 +42,54 @@ module Luo
41
42
  messages: messages,
42
43
  max_tokens: config.max_tokens,
43
44
  random_threshold: random_threshold || config.random_threshold,
44
- uid: config.uid.call
45
+ uid: config.uid.call,
46
+ stream: block_given?
45
47
  )
46
48
  return params.errors unless params.success?
47
- request_chat(params).body.dig('choices', 0, 'message', 'content')
49
+
50
+ body = {}
51
+ if block_given?
52
+ content = ""
53
+ response = request_chat(params) do |req|
54
+ req.options.on_data = Proc.new do |chunk, *|
55
+ if chunk =~ /data: (.+?)\n(?!data: \[DONE\])/
56
+ json = JSON.parse($1)
57
+ content += json.dig('choices', 0, 'delta', 'content')
58
+ body.merge!(json)
59
+ end
60
+ block.call(chunk)
61
+ end
62
+ end
63
+ body['choices'][0]['delta']['content'] = content
64
+ body['choices'][0]['message'] = body['choices'][0].delete('delta')
65
+ else
66
+ response = request_chat(params)
67
+ end
68
+
69
+ if response.success?
70
+ body = response.body if body.empty?
71
+ body.dig('choices', 0, 'message', 'content')
72
+ else
73
+ raise "request_chat failed: #{response.body}"
74
+ end
75
+ end
76
+
77
+ class << self
78
+ def llm_func_adapter
79
+ client = self.new
80
+ Proc.new do |messages, temperature|
81
+ client.chat(messages, random_threshold: temperature)
82
+ end
83
+ end
84
+
85
+ def llm_func_adapter_stream
86
+ client = self.new
87
+ Proc.new do |messages, temperature|
88
+ client.chat(messages, random_threshold: temperature) do |chunk|
89
+ yield chunk
90
+ end
91
+ end
92
+ end
48
93
  end
49
94
 
50
95
  end
@@ -9,21 +9,27 @@ module Luo
9
9
  setting :input, default: Luo::Prompts.xinghuo_agent_input
10
10
  setting :response_error, default: Luo::Prompts.xinghuo_response_error
11
11
  end
12
+ setting :client, default: Luo::Xinghuo.new
13
+ setting :stream_callback, default: nil
12
14
 
13
- on_init do
14
- @xinghuo = Xinghuo.new
15
+ def request(messages)
16
+ if config.stream_callback&.respond_to? :call
17
+ client.chat(messages, &config.stream_callback)
18
+ else
19
+ client.chat(messages)
20
+ end
15
21
  end
16
22
 
17
23
  on_request do
18
- context.messages = Messages.create(history: context.histories)
24
+ context.messages = Messages.create(history: context.histories.search(context.user_input))
19
25
  .user(prompt: config.prompts.input, context: {agents: self.class.agents, last_user_input: context.user_input})
20
- response = @xinghuo.chat(context.messages)
26
+ response = request(context.messages)
21
27
  if response.split("\n").select { |line| line.size >1 }.size > 1
22
- message = Messages.create(history: context.histories)
28
+ message = Messages.create(history: context.histories.search(context.user_input))
23
29
  .user(prompt: config.prompts.input, context: {agents: self.class.agents, last_user_input: context.user_input})
24
30
  .assistant(text: response)
25
31
  .user(prompt: config.prompts.response_error, context: {agents: self.class.agents, last_user_input: context.user_input})
26
- context.response = @xinghuo.chat(message)
32
+ context.response = request(message)
27
33
  else
28
34
  context.response = response
29
35
  end
@@ -35,12 +41,12 @@ module Luo
35
41
  agent = self.class.agents[agent_name]&.new(
36
42
  context: context,
37
43
  action_input: context.user_input,
38
- client: @xinghuo
44
+ client: client
39
45
  )
40
46
  add_agent(agent)
41
47
  else
42
- messages = Messages.create(history: context.histories).user(text: context.user_input)
43
- context.final_result = @xinghuo.chat(messages)
48
+ messages = Messages.create(history: context.histories.search(context.user_input)).user(text: context.user_input)
49
+ context.final_result = request(messages)
44
50
  end
45
51
  end
46
52
 
@@ -50,8 +56,7 @@ module Luo
50
56
  context.final_result = answer if answer
51
57
  end
52
58
 
53
- context.histories.user(context.user_input)
54
- context.histories.assistant(context.final_result)
59
+ save_history
55
60
  end
56
61
  end
57
62
 
@@ -60,7 +65,7 @@ module Luo
60
65
  agent_desc '你可以问我任何问题,我都会尽力回答你'
61
66
 
62
67
  on_call_with_final_result do
63
- messages = Messages.create(history: context.histories).user(text: context.user_input)
68
+ messages = Messages.create(history: context.histories.search(context.user_input)).user(text: context.user_input)
64
69
  client.chat(messages)
65
70
  end
66
71
 
data/lib/luo.rb CHANGED
@@ -16,6 +16,7 @@ require 'dry-initializer'
16
16
  require 'uri'
17
17
  require 'redcarpet'
18
18
  require 'thor'
19
+ require 'logger'
19
20
 
20
21
  require "zeitwerk"
21
22
  loader = Zeitwerk::Loader.for_gem
@@ -23,10 +24,12 @@ loader.inflector.inflect("open_ai" => "OpenAI")
23
24
  loader.inflector.inflect("aiui" => "AIUI")
24
25
  loader.inflector.inflect("cli" => "CLI")
25
26
  loader.inflector.inflect("open_ai_agent_runner" => "OpenAIAgentRunner")
27
+ loader.inflector.inflect('llm_func' => 'LLMFunc')
26
28
  loader.setup
27
29
 
28
30
  module Luo
29
31
  class Error < StandardError; end
32
+ class ClientNotSetError < StandardError; end
30
33
  # Your code goes here...
31
34
 
32
35
  module_eval do
@@ -5,6 +5,9 @@ Luo.app_setup do |loader|
5
5
  end
6
6
 
7
7
  class Runner < XinghuoAgentRunner
8
+
9
+ setting :stream_callback, default: ->(chunk) { puts chunk }
10
+
8
11
  register WeatherAgent
9
12
  register TimeAgent
10
13
  register XinghuoFinalAgent
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: luo
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.4
4
+ version: 0.2.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - MJ
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2023-05-25 00:00:00.000000000 Z
11
+ date: 2023-06-01 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: zeitwerk
@@ -236,10 +236,18 @@ files:
236
236
  - lib/luo/error_handle.rb
237
237
  - lib/luo/helpers.rb
238
238
  - lib/luo/http_client.rb
239
+ - lib/luo/llm_func.rb
240
+ - lib/luo/llm_func/cain.rb
241
+ - lib/luo/llm_func/cain_builder.rb
239
242
  - lib/luo/loader.rb
240
243
  - lib/luo/marqo.rb
241
244
  - lib/luo/memory_history.rb
242
245
  - lib/luo/messages.rb
246
+ - lib/luo/middleware.rb
247
+ - lib/luo/middleware/base.rb
248
+ - lib/luo/middleware/env.rb
249
+ - lib/luo/middleware/logger.rb
250
+ - lib/luo/middleware/memory_history.rb
243
251
  - lib/luo/open_ai.rb
244
252
  - lib/luo/open_ai_agent_runner.rb
245
253
  - lib/luo/parser_markdown.rb