monadic-chat 0.3.2 → 0.3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,6 +5,10 @@ class MonadicApp
5
5
  # methods for preparation and updating
6
6
  ##################################################
7
7
 
8
+ def count_tokens(text)
9
+ MonadicChat.tokenize(text).size
10
+ end
11
+
8
12
  def fulfill_placeholders
9
13
  input = nil
10
14
  replacements = []
@@ -52,11 +56,9 @@ class MonadicApp
52
56
  end
53
57
  end
54
58
 
55
- def prepare_params(input)
59
+ def prepare_params(input_role, input)
56
60
  params = @params.dup
57
61
 
58
- @update_proc.call
59
-
60
62
  case @mode
61
63
  when :research
62
64
  messages = +""
@@ -67,19 +69,19 @@ class MonadicApp
67
69
  case role
68
70
  when "system"
69
71
  system << "#{content}\n"
70
- when "assistant", "gpt"
71
- messages << "- #{mes["role"].strip}: #{content}\n"
72
72
  else
73
- messages << "- #{mes["role"].strip}: #{mes["content"]}\n"
73
+ messages << "- #{mes["role"].strip}: #{content}\n"
74
74
  end
75
75
  end
76
76
  template = @template.dup.sub("{{SYSTEM}}", system)
77
77
  .sub("{{PROMPT}}", input)
78
78
  .sub("{{MESSAGES}}", messages.strip)
79
79
 
80
+ @template_tokens = count_tokens(template)
81
+
80
82
  File.open(TEMP_MD, "w") { |f| f.write template }
81
83
 
82
- @messages << { "role" => "user", "content" => input }
84
+ @messages << { "role" => input_role, "content" => input }
83
85
 
84
86
  case @method
85
87
  when "completions"
@@ -89,14 +91,16 @@ class MonadicApp
89
91
  end
90
92
 
91
93
  when :normal
92
- @messages << { "role" => "user", "content" => input }
94
+ @messages << { "role" => input_role, "content" => input }
93
95
  params["messages"] = @messages
94
96
  end
95
97
 
98
+ @update_proc.call unless input_role == "system"
99
+
96
100
  params
97
101
  end
98
102
 
99
- def update_template(res)
103
+ def update_template(res, role)
100
104
  case @mode
101
105
  when :research
102
106
  @metadata = res
@@ -107,15 +111,22 @@ class MonadicApp
107
111
  when :normal
108
112
  @messages << { "role" => "assistant", "content" => res }
109
113
  end
114
+ remove_intermediate_messages if role == "system"
115
+ end
116
+
117
+ def remove_intermediate_messages
118
+ @messages = @messages.reject { |ele| ele["role"] == "assistant" && /SEARCH\(.+\)/m =~ ele["content"] }
119
+ @messages = @messages.reject { |ele| ele["role"] == "system" && /^SEARCH SNIPPETS/ =~ ele["content"] }
110
120
  end
111
121
 
112
122
  ##################################################
113
123
  # function to bind data
114
124
  ##################################################
115
125
 
116
- def bind(input, num_retry: 0)
126
+ def bind(input, role: "user", num_retry: 0)
127
+ @turns += 1 if role == "user"
117
128
  print PROMPT_ASSISTANT.prefix, "\n"
118
- params = prepare_params(input)
129
+ params = prepare_params(role, input)
119
130
  research_mode = @mode == :research
120
131
 
121
132
  escaping = +""
@@ -141,7 +152,41 @@ class MonadicApp
141
152
  print last_chunk
142
153
  print "\n"
143
154
 
144
- update_template(res)
145
- set_html if @html
155
+ webdata = use_tool(res)
156
+ update_template(res, role) unless webdata
157
+ if webdata && role != "system"
158
+ bind(webdata, role: "system", num_retry: num_retry)
159
+ elsif @html
160
+ set_html
161
+ end
162
+ end
163
+
164
+ ##################################################
165
+ # function to have GPT use tools
166
+ ##################################################
167
+
168
+ def use_tool(res)
169
+ case @mode
170
+ when :normal
171
+ text = res
172
+ when :research
173
+ text = res.is_a?(Hash) ? res["response"] : res
174
+ end
175
+
176
+ case text
177
+ when /\bSEARCH_WIKI\((.+?)\)/m
178
+ search_key = Regexp.last_match(1)
179
+ search_keys = search_key.split(",").map do |key|
180
+ key.strip.sub(/^"(.+)"$/, '\1')
181
+ end
182
+ text = "SEARCH SNIPPETS\n#{wikipedia_search(*search_keys)}"
183
+ return text
184
+ when /\bSEARCH_WEB\("?(.+?)"?\)/m
185
+ search_key = Regexp.last_match(1)
186
+ text = "SEARCH SNIPPETS\n#{bing_search(search_key)}"
187
+ return text
188
+ end
189
+
190
+ false
146
191
  end
147
192
  end
@@ -57,6 +57,7 @@ class MonadicApp
57
57
  @params = @params_initial.dup
58
58
  @messages = @messages_initial.dup
59
59
  @template = @template_initial.dup
60
+ @template_tokens = 0
60
61
 
61
62
  if @placeholders.empty?
62
63
  print PROMPT_SYSTEM.prefix
@@ -68,7 +69,7 @@ class MonadicApp
68
69
 
69
70
  def ask_retrial(input, message = nil)
70
71
  print PROMPT_SYSTEM.prefix
71
- print " Error: #{message.capitalize}\n" if message
72
+ print "Error: #{message.capitalize}\n" if message
72
73
  retrial = PROMPT_USER.select("Do you want to try again?",
73
74
  show_help: :never) do |menu|
74
75
  menu.choice "Yes", "yes"
@@ -5,13 +5,12 @@ require "oj"
5
5
  require "net/http"
6
6
  require "uri"
7
7
  require "strscan"
8
- require "parallel"
9
8
  require "tty-progressbar"
10
9
 
11
10
  Oj.mimic_JSON
12
11
 
13
12
  module OpenAI
14
- def self.model_name(research_mode: false)
13
+ def self.default_model(research_mode: false)
15
14
  if research_mode
16
15
  "text-davinci-003"
17
16
  else
@@ -89,18 +88,8 @@ module OpenAI
89
88
  class Completion
90
89
  attr_reader :access_token
91
90
 
92
- def initialize(access_token, normal_mode_model = nil, research_mode_model = nil)
91
+ def initialize(access_token)
93
92
  @access_token = access_token
94
- @normal_mode_model = normal_mode_model || OpenAI.model_name(research_mode: false)
95
- @research_mode_model = research_mode_model || OpenAI.model_name(research_mode: true)
96
- end
97
-
98
- def model_name(research_mode: false)
99
- if research_mode
100
- @research_mode_model
101
- else
102
- @normal_mode_model
103
- end
104
93
  end
105
94
 
106
95
  def models
@@ -127,7 +116,7 @@ module OpenAI
127
116
  when 0
128
117
  raise e
129
118
  else
130
- run(params, num_retry: num_retry - 1, &block)
119
+ run(params, research_mode: research_mode, num_retry: num_retry - 1, &block)
131
120
  end
132
121
  end
133
122
 
@@ -0,0 +1,63 @@
1
+ # frozen_string_literal: true
2
+
3
+ class MonadicApp
4
+ ##################################################
5
+ # method for web search
6
+ ##################################################
7
+
8
+ def bing_search(query, retrial: 5)
9
+ uri = "https://www.bing.com/search"
10
+ css_selector = "#b_results"
11
+
12
+ q = URI.encode_www_form(q: query)
13
+ doc = Nokogiri::HTML(URI.parse([uri, q].join("?")).read)
14
+ doc.css("script, link").each(&:remove)
15
+ doc.css(css_selector).text.squeeze(" \n")
16
+ rescue StandardError
17
+ return "SEARCH ENGINE NOT AVAILABLE" if retrial.zero?
18
+
19
+ sleep 1
20
+ retrial -= 1
21
+ bing_search(query, retrial: retrial)
22
+ end
23
+
24
+ def wikipedia_search(keywords, base_url = nil)
25
+ base_url ||= "https://en.wikipedia.org/w/api.php"
26
+ search_params = {
27
+ action: "query",
28
+ list: "search",
29
+ format: "json",
30
+ srsearch: keywords,
31
+ utf8: 1,
32
+ formatversion: 2
33
+ }
34
+
35
+ search_uri = URI(base_url)
36
+ search_uri.query = URI.encode_www_form(search_params)
37
+ search_response = Net::HTTP.get(search_uri)
38
+ search_data = JSON.parse(search_response)
39
+
40
+ raise if search_data["query"]["search"].empty?
41
+
42
+ title = search_data["query"]["search"][0]["title"]
43
+
44
+ content_params = {
45
+ action: "query",
46
+ prop: "extracts",
47
+ format: "json",
48
+ titles: title,
49
+ explaintext: 1,
50
+ utf8: 1,
51
+ formatversion: 2
52
+ }
53
+
54
+ content_uri = URI(base_url)
55
+ content_uri.query = URI.encode_www_form(content_params)
56
+ content_response = Net::HTTP.get(content_uri)
57
+ content_data = JSON.parse(content_response)
58
+
59
+ content_data["query"]["pages"][0]["extract"][0..1000]
60
+ rescue StandardError
61
+ "SEARCH RESULTS EMPTY"
62
+ end
63
+ end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module MonadicChat
4
- VERSION = "0.3.2"
4
+ VERSION = "0.3.4"
5
5
  end
data/lib/monadic_chat.rb CHANGED
@@ -1,5 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require "blingfire"
3
4
  require "tty-cursor"
4
5
  require "tty-screen"
5
6
  require "tty-markdown"
@@ -13,6 +14,9 @@ require "rouge"
13
14
  require "launchy"
14
15
  require "io/console"
15
16
  require "readline"
17
+ require "nokogiri"
18
+ require "open-uri"
19
+ require "wikipedia"
16
20
 
17
21
  require_relative "./monadic_chat/version"
18
22
  require_relative "./monadic_chat/open_ai"
@@ -21,6 +25,10 @@ require_relative "./monadic_chat/helper"
21
25
  Oj.mimic_JSON
22
26
 
23
27
  module MonadicChat
28
+ SETTINGS = {}
29
+ MAX_CHARS_WIKI = 1000
30
+ gpt2model_path = File.absolute_path(File.join(__dir__, "..", "assets", "gpt2.bin"))
31
+ BLINGFIRE = BlingFire.load_model(gpt2model_path)
24
32
  CONFIG = File.join(Dir.home, "monadic_chat.conf")
25
33
  NUM_RETRY = 2
26
34
  MIN_LENGTH = 5
@@ -105,54 +113,74 @@ module MonadicChat
105
113
 
106
114
  def self.open_readme
107
115
  url = "https://github.com/yohasebe/monadic-chat/"
108
- shellscript = <<~SHELL
109
- if [[ "$OSTYPE" == "darwin"* ]]; then
110
- open "#{url}"
111
- elif [[ "$OSTYPE" == "linux-gnu"* ]]; then
112
- if command -v xdg-open >/dev/null 2>&1; then
113
- xdg-open "#{url}"
114
- else
115
- echo "#{url}"
116
- fi
117
- else
118
- echo "#{url}"
119
- fi
120
- SHELL
121
- `#{shellscript}`
116
+ Launchy.open(url)
122
117
  end
123
118
 
124
- def self.authenticate(overwrite: false)
125
- check = lambda do |token, normal_mode_model, research_mode_model|
126
- print "Checking configuration\n"
127
- SPINNER.auto_spin
119
+ def self.mdprint(str)
120
+ print TTY::Markdown.parse(str, indent: 0)
121
+ end
122
+
123
+ def self.authenticate(overwrite: false, message: true)
124
+ check = lambda do |token|
125
+ if message
126
+ print TTY::Cursor.restore
127
+ print TTY::Cursor.clear_screen_down
128
+ print "\n"
129
+ SPINNER.auto_spin
130
+ end
131
+
132
+ if !token || token.strip == ""
133
+ if message
134
+ SPINNER.stop
135
+ print TTY::Cursor.restore
136
+ print "\n"
137
+ mdprint "- Authentication: #{PASTEL.bold.red("Failure")}\n" if message
138
+ end
139
+ return false
140
+ end
141
+
128
142
  begin
129
143
  models = OpenAI.models(token)
130
144
  raise if models.empty?
131
145
 
132
- SPINNER.stop
133
-
134
- print "Success\n"
135
-
136
- if normal_mode_model && !models.map { |m| m["id"] }.index(normal_mode_model)
146
+ if message
137
147
  SPINNER.stop
138
- print "Normal mode model set in config file not available.\n"
139
- normal_mode_model = false
148
+ print TTY::Cursor.restore, "\n"
149
+ mdprint "#{PASTEL.on_green(" System ")} Config file: `#{CONFIG}`\n"
150
+ print "\n"
151
+ mdprint "- Authentication: #{PASTEL.bold.green("Success")}\n"
140
152
  end
141
- normal_mode_model ||= OpenAI.model_name(research_mode: false)
142
- print "Normal mode model: #{normal_mode_model}\n"
143
153
 
144
- if research_mode_model && !models.map { |m| m["id"] }.index(research_mode_model)
145
- SPINNER.stop
146
- print "Normal mode model set in config file not available.\n"
147
- print "Fallback to the default model (#{OpenAI.model_name(research_mode: true)}).\n"
154
+ if SETTINGS["normal_model"] && !models.map { |m| m["id"] }.index(SETTINGS["normal_model"])
155
+ if message
156
+ SPINNER.stop
157
+ mdprint "- Normal mode model specified in config file not available\n"
158
+ mdprint "- Fallback to the default model (`#{OpenAI.default_model(research_mode: false)}`)\n"
159
+ end
160
+ SETTINGS["normal_model"] = false
161
+ end
162
+ SETTINGS["normal_model"] ||= OpenAI.default_model(research_mode: false)
163
+ mdprint "- Normal mode model: `#{SETTINGS["normal_model"]}`\n" if message
164
+
165
+ if SETTINGS["research_model"] && !models.map { |m| m["id"] }.index(SETTINGS["research_model"])
166
+ if message
167
+ SPINNER.stop
168
+ mdprint "- Research mode model specified in config file not available\n"
169
+ mdprint "- Fallback to the default model (`#{OpenAI.default_model(research_mode: true)}`)\n"
170
+ end
171
+ SETTINGS["research_model"] = false
148
172
  end
149
- research_mode_model ||= OpenAI.model_name(research_mode: true)
150
- print "Research mode model: #{research_mode_model}\n"
173
+ SETTINGS["research_model"] ||= OpenAI.default_model(research_mode: true)
174
+ mdprint "- Research mode model: `#{SETTINGS["research_model"]}`\n" if message
151
175
 
152
- OpenAI::Completion.new(token, normal_mode_model, research_mode_model)
176
+ OpenAI::Completion.new(token)
153
177
  rescue StandardError
154
- SPINNER.stop
155
- print "Authentication: failure.\n"
178
+ if message
179
+ SPINNER.stop
180
+ print TTY::Cursor.restore
181
+ print "\n"
182
+ mdprint "- Authentication: #{PASTEL.bold.red("Failure")}\n" if message
183
+ end
156
184
  false
157
185
  end
158
186
  end
@@ -160,16 +188,20 @@ module MonadicChat
160
188
  completion = nil
161
189
 
162
190
  if overwrite
163
- access_token = PROMPT_SYSTEM.ask(" Input your OpenAI access token:")
191
+ access_token = PROMPT_SYSTEM.ask("Input your OpenAI access token:")
164
192
  return false if access_token.to_s == ""
165
193
 
166
- completion = check.call(access_token, nil, nil)
194
+ completion = check.call(access_token)
167
195
 
168
196
  if completion
169
197
  File.open(CONFIG, "w") do |f|
170
- config = { "access_token" => access_token }
198
+ config = {
199
+ "access_token" => access_token,
200
+ "normal_model" => SETTINGS["normal_model"],
201
+ "research_model" => SETTINGS["research_model"]
202
+ }
171
203
  f.write(JSON.pretty_generate(config))
172
- print "New access token has been saved to #{CONFIG}\n"
204
+ print "New access token has been saved to #{CONFIG}\n" if message
173
205
  end
174
206
  end
175
207
  elsif File.exist?(CONFIG)
@@ -180,19 +212,25 @@ module MonadicChat
180
212
  puts "Error: config file does not contain a valid JSON object."
181
213
  exit
182
214
  end
215
+ SETTINGS["normal_model"] = config["normal_model"] if config["normal_model"]
216
+ SETTINGS["research_model"] = config["research_model"] if config["research_model"]
183
217
  access_token = config["access_token"]
184
- normal_mode_model = config["normal_mode_model"]
185
- research_mode_model = config["research_mode_model"]
186
- completion = check.call(access_token, normal_mode_model, research_mode_model)
218
+ completion = check.call(access_token)
187
219
  else
188
- access_token ||= PROMPT_SYSTEM.ask(" Input your OpenAI access token:")
189
- completion = check.call(access_token, nil, nil)
220
+ access_token ||= PROMPT_SYSTEM.ask("Input your OpenAI access token:")
221
+ return false if access_token.to_s == ""
222
+
223
+ completion = check.call(access_token)
190
224
  if completion
191
225
  File.open(CONFIG, "w") do |f|
192
- config = { "access_token" => access_token }
226
+ config = {
227
+ "access_token" => access_token,
228
+ "normal_model" => SETTINGS["normal_model"],
229
+ "research_model" => SETTINGS["research_model"]
230
+ }
193
231
  f.write(JSON.pretty_generate(config))
194
232
  end
195
- print "Access token has been saved to #{CONFIG}\n"
233
+ print "Access token has been saved to #{CONFIG}\n" if message
196
234
  end
197
235
  end
198
236
  completion || authenticate(overwrite: true)
@@ -219,6 +257,10 @@ module MonadicChat
219
257
  "\n#{PASTEL.send(:"on_#{color}", name)}"
220
258
  end
221
259
 
260
+ def self.tokenize(text)
261
+ BLINGFIRE.text_to_ids(text)
262
+ end
263
+
222
264
  PROMPT_USER = TTY::PromptX.new(active_color: :blue, prefix: prompt_user)
223
265
  PROMPT_SYSTEM = TTY::PromptX.new(active_color: :blue, prefix: "#{prompt_system} ")
224
266
  PROMPT_ASSISTANT = TTY::PromptX.new(active_color: :red, prefix: "#{prompt_assistant} ")
data/monadic_chat.gemspec CHANGED
@@ -37,11 +37,11 @@ Gem::Specification.new do |spec|
37
37
  spec.add_development_dependency "rake"
38
38
  spec.add_development_dependency "rspec"
39
39
 
40
+ spec.add_dependency "blingfire"
40
41
  spec.add_dependency "http"
41
42
  spec.add_dependency "kramdown"
42
43
  spec.add_dependency "launchy"
43
44
  spec.add_dependency "oj"
44
- spec.add_dependency "parallel"
45
45
  spec.add_dependency "pastel"
46
46
  spec.add_dependency "rouge"
47
47
  spec.add_dependency "tty-box"
@@ -51,4 +51,5 @@ Gem::Specification.new do |spec|
51
51
  spec.add_dependency "tty-prompt"
52
52
  spec.add_dependency "tty-screen"
53
53
  spec.add_dependency "tty-spinner"
54
+ spec.add_dependency "wikipedia-client"
54
55
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: monadic-chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.2
4
+ version: 0.3.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - yohasebe
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-03-25 00:00:00.000000000 Z
11
+ date: 2023-04-02 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -53,7 +53,7 @@ dependencies:
53
53
  - !ruby/object:Gem::Version
54
54
  version: '0'
55
55
  - !ruby/object:Gem::Dependency
56
- name: http
56
+ name: blingfire
57
57
  requirement: !ruby/object:Gem::Requirement
58
58
  requirements:
59
59
  - - ">="
@@ -67,7 +67,7 @@ dependencies:
67
67
  - !ruby/object:Gem::Version
68
68
  version: '0'
69
69
  - !ruby/object:Gem::Dependency
70
- name: kramdown
70
+ name: http
71
71
  requirement: !ruby/object:Gem::Requirement
72
72
  requirements:
73
73
  - - ">="
@@ -81,7 +81,7 @@ dependencies:
81
81
  - !ruby/object:Gem::Version
82
82
  version: '0'
83
83
  - !ruby/object:Gem::Dependency
84
- name: launchy
84
+ name: kramdown
85
85
  requirement: !ruby/object:Gem::Requirement
86
86
  requirements:
87
87
  - - ">="
@@ -95,7 +95,7 @@ dependencies:
95
95
  - !ruby/object:Gem::Version
96
96
  version: '0'
97
97
  - !ruby/object:Gem::Dependency
98
- name: oj
98
+ name: launchy
99
99
  requirement: !ruby/object:Gem::Requirement
100
100
  requirements:
101
101
  - - ">="
@@ -109,7 +109,7 @@ dependencies:
109
109
  - !ruby/object:Gem::Version
110
110
  version: '0'
111
111
  - !ruby/object:Gem::Dependency
112
- name: parallel
112
+ name: oj
113
113
  requirement: !ruby/object:Gem::Requirement
114
114
  requirements:
115
115
  - - ">="
@@ -248,6 +248,20 @@ dependencies:
248
248
  - - ">="
249
249
  - !ruby/object:Gem::Version
250
250
  version: '0'
251
+ - !ruby/object:Gem::Dependency
252
+ name: wikipedia-client
253
+ requirement: !ruby/object:Gem::Requirement
254
+ requirements:
255
+ - - ">="
256
+ - !ruby/object:Gem::Version
257
+ version: '0'
258
+ type: :runtime
259
+ prerelease: false
260
+ version_requirements: !ruby/object:Gem::Requirement
261
+ requirements:
262
+ - - ">="
263
+ - !ruby/object:Gem::Version
264
+ version: '0'
251
265
  description: 'Monadic Chat is a command-line client application program that uses
252
266
  OpenAI''s Text Completion API and Chat API to enable chat-style conversations with
253
267
  OpenAI''s artificial intelligence system in a ChatGPT-like style.
@@ -284,6 +298,7 @@ files:
284
298
  - apps/translate/translate.md
285
299
  - apps/translate/translate.rb
286
300
  - assets/github.css
301
+ - assets/gpt2.bin
287
302
  - assets/pigments-default.css
288
303
  - bin/monadic-chat
289
304
  - doc/img/code-example-time-html.png
@@ -310,6 +325,7 @@ files:
310
325
  - lib/monadic_chat/menu.rb
311
326
  - lib/monadic_chat/open_ai.rb
312
327
  - lib/monadic_chat/parameters.rb
328
+ - lib/monadic_chat/tools.rb
313
329
  - lib/monadic_chat/version.rb
314
330
  - monadic_chat.gemspec
315
331
  homepage: https://github.com/yohasebe/monadic-chat