llmed 0.1.7 → 0.1.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. checksums.yaml +4 -4
  2. data/lib/llm.rb +66 -0
  3. data/lib/llmed.rb +81 -60
  4. metadata +3 -2
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8e5acda1d0b919ac5e1224b9a08837b45f8cf34f771703c73d4f9118cf2cf7f1
4
- data.tar.gz: b8530ee6ca2e7676165a4c05e636894fe3c0e532bd5d9e1b22a340fb99d311b3
3
+ metadata.gz: 44c211eda3efc57a664ab947ebd9f8a89d7b1baf5fd320d78f54eee9f742dbcc
4
+ data.tar.gz: c5659d561c6e5e6fa2a62e8a96bfb959b9b8ab017de8c5f36a482461e16b4434
5
5
  SHA512:
6
- metadata.gz: d4804d236284eb39c49424dbb1b971e36e2c77dc83fc632bc9899599585139ea78c1840782379b653ac942da9e893a32184ec4e4c50c57ab533fd7016af606d1
7
- data.tar.gz: 60ae4787f24a46c90106eeff7022cf906e509f31e975cc08046fed84b9b709f55af9a467b4da75467928711830d98d883c31cab380f655ebc4ae2b3555b1be80
6
+ metadata.gz: f095496d76a624e17f8fa32c055bff45202c4f62a67acf2529a93873d6c03c1c6a7345ea6c3323dcf065c93ce4d056b5e2e4821ef165267ed9074526b383d3f9
7
+ data.tar.gz: 146a2b89ff296bf65deb7f837ed1348fe26543d709e0a1fb892cb7743ac5f22f8932fa254e38448576838de8b31de29d1accd5b44f9e4da7ec719819a6859e21
data/lib/llm.rb ADDED
@@ -0,0 +1,66 @@
1
+ require 'openai'
2
+ require 'langchain'
3
+
4
+ Langchain.logger.level = Logger::ERROR
5
+
6
+ class LLMed
7
+ module LLM
8
+ module Message
9
+ System = Struct.new(:content)
10
+ User = Struct.new(:content)
11
+ end
12
+
13
+ module Template
14
+ def self.build(template:, input_variables:)
15
+ Langchain::Prompt::PromptTemplate.new(template: template, input_variables: input_variables)
16
+ end
17
+ end
18
+
19
+ class Response
20
+ def initialize(response, tokens)
21
+ @response = response
22
+ @tokens = tokens
23
+ end
24
+
25
+ def source_code
26
+ @response
27
+ end
28
+
29
+ def total_tokens
30
+ @tokens
31
+ end
32
+ end
33
+
34
+ class OpenAI
35
+ def initialize(**args)
36
+ @llm = Langchain::LLM::OpenAI.new(**args)
37
+ end
38
+
39
+ def chat(messages: [])
40
+ messages = messages.map do |m|
41
+ case m
42
+ when Message::System
43
+ { role: 'system', content: m.content }
44
+ when Message::User
45
+ { role: 'user', content: m.content }
46
+ end
47
+ end
48
+
49
+ llm_response = @llm.chat(messages: messages)
50
+ Response.new(llm_response.chat_completion, llm_response.total_tokens)
51
+ end
52
+ end
53
+
54
+ class Test
55
+ def initialize
56
+ @output = ''
57
+ end
58
+
59
+ def chat(messages: [])
60
+ @output = messages.map { |m| m[:content] }.join("\n")
61
+
62
+ Response.new(@output, 0)
63
+ end
64
+ end
65
+ end
66
+ end
data/lib/llmed.rb CHANGED
@@ -2,14 +2,11 @@
2
2
  # frozen_string_literal: true
3
3
 
4
4
  require 'pp'
5
- require 'openai'
6
- require 'langchain'
5
+ require 'csv'
7
6
  require 'pathname'
8
7
  require 'fileutils'
9
8
  require 'forwardable'
10
9
 
11
- Langchain.logger.level = Logger::ERROR
12
-
13
10
  class LLMed
14
11
  extend Forwardable
15
12
 
@@ -34,13 +31,13 @@ class LLMed
34
31
  end
35
32
 
36
33
  def message?
37
- not (@message.nil? || @message.empty?)
34
+ !(@message.nil? || @message.empty?)
38
35
  end
39
36
 
40
37
  # Example:
41
38
  # context("files") { sh "ls /etc" }
42
39
  def sh(cmd)
43
- %x{#{cmd}}
40
+ `#{cmd}`
44
41
  end
45
42
 
46
43
  # Example:
@@ -59,7 +56,7 @@ class LLMed
59
56
 
60
57
  class Configuration
61
58
  def initialize
62
- @prompt = Langchain::Prompt::PromptTemplate.new(template: "
59
+ @prompt = LLMed::LLM::Template.build(template: "
63
60
  You are a software developer and only have knowledge of the programming language {language}.
64
61
  Your response must contain only the generated source code, with no additional text.
65
62
  All source code must be written in a single file, and you must ensure it runs correctly on the first attempt.
@@ -68,7 +65,7 @@ Always include the properly escaped comment: LLMED-COMPILED.
68
65
  You must only modify the following source code:
69
66
  {source_code}
70
67
 
71
- ", input_variables: ["language", "source_code"])
68
+ ", input_variables: %w[language source_code])
72
69
  end
73
70
 
74
71
  def prompt(language:, source_code:)
@@ -78,8 +75,11 @@ You must only modify the following source code:
78
75
  # Change the default prompt, input variables: language, source_code
79
76
  # Example:
80
77
  # set_prompt "my new prompt"
81
- def set_prompt(prompt, input_variables: ["language", "source_code"])
82
- @prompt = Langchain::Prompt::PromptTemplate.new(template: prompt, input_variables: input_variables)
78
+ def set_prompt(*arg, input_variables: %w[language source_code], **args)
79
+ input_variables = {} if args[:file]
80
+ prompt = File.read(args[:file]) if args[:file]
81
+ prompt ||= arg.first
82
+ @prompt = LLMed::LLM::Template.build(template: prompt, input_variables: input_variables)
83
83
  end
84
84
 
85
85
  # Set default language used for all applications.
@@ -97,19 +97,22 @@ You must only modify the following source code:
97
97
 
98
98
  def language(main)
99
99
  lang = main || @language
100
- raise "Please assign a language to the application or general with the function set_languag" if lang.nil?
100
+ raise 'Please assign a language to the application or general with the function set_languag' if lang.nil?
101
+
101
102
  lang
102
103
  end
103
104
 
104
- def llm()
105
+ def llm
105
106
  case @provider
106
107
  when :openai
107
- Langchain::LLM::OpenAI.new(
108
+ LLMed::LLM::OpenAI.new(
108
109
  api_key: @provider_api_key,
109
- default_options: { temperature: 0.7, chat_model: @provider_model}
110
+ default_options: { temperature: 0.7, chat_model: @provider_model }
110
111
  )
112
+ when :test
113
+ LLMed::LLM::Test.new
111
114
  when nil
112
- raise "Please set the provider with `set_llm(provider, api_key, model)`"
115
+ raise 'Please set the provider with `set_llm(provider, api_key, model)`'
113
116
  else
114
117
  raise "not implemented provider #{@provider}"
115
118
  end
@@ -120,7 +123,7 @@ You must only modify the following source code:
120
123
  attr_reader :contexts, :name, :language
121
124
 
122
125
  def initialize(name:, language:, output_file:, block:, logger:, release:)
123
- raise "required language" if language.nil?
126
+ raise 'required language' if language.nil?
124
127
 
125
128
  @name = name
126
129
  @output_file = output_file
@@ -134,30 +137,32 @@ You must only modify the following source code:
134
137
  def context(name, **opts, &block)
135
138
  ctx = Context.new(name: name, options: opts)
136
139
  output = ctx.instance_eval(&block)
137
- unless ctx.message?
138
- ctx.llm(output)
139
- end
140
+ ctx.llm(output) unless ctx.message?
140
141
 
141
142
  @contexts << ctx
142
143
  end
143
144
 
144
145
  def evaluate
145
- self.instance_eval(&@block)
146
+ instance_eval(&@block)
146
147
  end
147
148
 
148
149
  def source_code(output_dir, release_dir)
150
+ return unless @output_file.is_a?(String)
149
151
  return unless @release
150
- release_source_code = Pathname.new(release_dir) + "#{@output_file}.r#{@release}#{@language}"
152
+
153
+ release_source_code = Pathname.new(release_dir) + "#{@output_file}.r#{@release}#{@language}.cache"
154
+ release_main_source_code = Pathname.new(release_dir) + "#{@output_file}.release"
151
155
  output_file = Pathname.new(output_dir) + @output_file
152
- if @release and not File.exist?(release_source_code)
156
+ if @release && !File.exist?(release_source_code)
153
157
  FileUtils.cp(output_file, release_source_code)
158
+ FileUtils.cp(output_file, release_main_source_code)
154
159
  @logger.info("APPLICATION #{@name} RELEASE FILE #{release_source_code}")
155
160
  end
156
-
157
- return File.read(release_source_code)
161
+ @logger.info("APPLICATION #{@name} INPUT RELEASE FILE #{release_main_source_code}")
162
+ File.read(release_main_source_code)
158
163
  end
159
164
 
160
- def output_file(output_dir, mode = 'w')
165
+ def output_file(output_dir, mode = 'w', &block)
161
166
  if @output_file.respond_to? :write
162
167
  yield @output_file
163
168
  else
@@ -166,21 +171,31 @@ You must only modify the following source code:
166
171
 
167
172
  @logger.info("APPLICATION #{@name} OUTPUT FILE #{path}")
168
173
 
169
- File.open(path, mode) do |file|
170
- yield file
171
- end
174
+ File.open(path, mode, &block)
172
175
  end
173
176
  end
177
+
178
+ def write_statistics(release_dir, total_tokens)
179
+ return unless @output_file.is_a?(String)
180
+
181
+ statistics_file = Pathname.new(release_dir) + "#{@output_file}.statistics"
182
+
183
+ File.open(statistics_file, 'a') do |file|
184
+ csv = CSV.new(file)
185
+ csv << [Time.now.to_i, @name, @release, total_tokens]
186
+ end
187
+ @logger.info("APPLICATION #{@name} WROTE STATISTICS FILE #{statistics_file}")
188
+ end
174
189
  end
175
190
 
176
191
  def initialize(logger:)
177
192
  @logger = logger
178
193
  @applications = []
179
- @configuration = Configuration.new()
194
+ @configuration = Configuration.new
180
195
  end
181
196
 
182
197
  def eval_source(code)
183
- self.instance_eval(code)
198
+ instance_eval(code)
184
199
  end
185
200
 
186
201
  # changes default language
@@ -190,38 +205,50 @@ You must only modify the following source code:
190
205
  # changes default prompt
191
206
  def_delegator :@configuration, :set_prompt, :set_prompt
192
207
 
193
- def application(name, language: nil, release: nil, output_file:, &block)
194
- @app = Application.new(name: name, language: @configuration.language(language), output_file: output_file, block: block, logger: @logger, release: release)
208
+ def application(name, output_file:, language: nil, release: nil, &block)
209
+ @app = Application.new(name: name, language: @configuration.language(language), output_file: output_file,
210
+ block: block, logger: @logger, release: release)
195
211
  @applications << @app
196
212
  end
197
213
 
198
214
  def compile(output_dir:, release_dir: nil)
199
- release_dir = output_dir unless release_dir
200
- @applications.each do |app|
201
- @logger.info("APPLICATION #{app.name} COMPILING")
202
-
203
- llm = @configuration.llm()
204
- system_content = @configuration.prompt(language: app.language, source_code: app.source_code(output_dir, release_dir))
205
- messages = [
206
- {role: "system", content: system_content},
207
- ]
208
- app.evaluate
209
- app.contexts.each do |ctx|
210
- next if ctx.skip?
211
- messages << {role: "user", content: ctx.message}
212
- end
215
+ @applications.each { |app| compile_application(app, output_dir, release_dir) }
216
+ end
213
217
 
214
- llm_response = llm.chat(messages: messages)
215
- response = llm_response.chat_completion
216
- @logger.info("APPLICATION #{app.name} TOTAL TOKENS #{llm_response.total_tokens}")
217
- write_output(app, output_dir, source_code(response))
218
+ private
219
+
220
+ def compile_application(app, output_dir, release_dir)
221
+ release_dir ||= output_dir
222
+
223
+ @logger.info("APPLICATION #{app.name} COMPILING")
224
+
225
+ llm = @configuration.llm
226
+ system_content = @configuration.prompt(language: app.language,
227
+ source_code: app.source_code(
228
+ output_dir, release_dir
229
+ ))
230
+ messages = [LLMed::LLM::Message::System.new(system_content)]
231
+ app.evaluate
232
+ app.contexts.each do |ctx|
233
+ next if ctx.skip?
234
+
235
+ messages << LLMed::LLM::Message::User.new(ctx.message)
218
236
  end
237
+
238
+ llm_response = llm.chat(messages: messages)
239
+ response = llm_response.source_code
240
+ @logger.info("APPLICATION #{app.name} TOTAL TOKENS #{llm_response.total_tokens}")
241
+ write_output(app, output_dir, source_code(response))
242
+ write_statistics(app, release_dir, llm_response)
219
243
  end
220
244
 
221
- private
222
245
  def source_code(content)
223
246
  # TODO: by provider?
224
- content.gsub('```', '').sub(/^(ruby|python(\d*)|elixir|c(pp)?|perl|bash)/, '')
247
+ content.gsub('```', '').sub(/^(node(js)?|javascript|ruby|python(\d*)|elixir|perl|bash|c(pp)?)/, '')
248
+ end
249
+
250
+ def write_statistics(app, release_dir, response)
251
+ app.write_statistics(release_dir, response.total_tokens)
225
252
  end
226
253
 
227
254
  def write_output(app, output_dir, output)
@@ -229,12 +256,6 @@ You must only modify the following source code:
229
256
  file.write(output)
230
257
  end
231
258
  end
232
-
233
- def edit_same_source_code(app, output_dir, messages)
234
- content = ""
235
- app.output_file(output_dir, 'r') do |file|
236
- content = "Codigo fuente a modificar: #{file.read()}"
237
- end
238
- messages << {role: "user", content: content}
239
- end
240
259
  end
260
+
261
+ require_relative 'llm'
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llmed
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.7
4
+ version: 0.1.10
5
5
  platform: ruby
6
6
  authors:
7
7
  - Jovany Leandro G.C
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2025-06-04 00:00:00.000000000 Z
11
+ date: 2025-06-06 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: langchainrb
@@ -76,6 +76,7 @@ extensions: []
76
76
  extra_rdoc_files: []
77
77
  files:
78
78
  - exe/llmed
79
+ - lib/llm.rb
79
80
  - lib/llmed.rb
80
81
  homepage: https://github.com/bit4bit/llm-labs/tree/main/llmed
81
82
  licenses: