monadic-chat 0.3.3 → 0.3.4
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -0
- data/Gemfile.lock +4 -1
- data/README.md +2 -3
- data/apps/chat/chat.json +3 -1
- data/apps/chat/chat.md +3 -4
- data/apps/chat/chat.rb +8 -9
- data/apps/code/code.md +2 -4
- data/apps/code/code.rb +8 -9
- data/apps/linguistic/linguistic.md +0 -4
- data/apps/linguistic/linguistic.rb +8 -9
- data/apps/novel/novel.md +4 -6
- data/apps/novel/novel.rb +8 -9
- data/apps/translate/translate.md +3 -4
- data/apps/translate/translate.rb +8 -9
- data/bin/monadic-chat +2 -1
- data/doc/img/how-research-mode-works.svg +1 -1
- data/doc/img/research-mode-template.svg +1 -1
- data/lib/monadic_app.rb +9 -3
- data/lib/monadic_chat/formatting.rb +8 -1
- data/lib/monadic_chat/internals.rb +54 -13
- data/lib/monadic_chat/menu.rb +2 -1
- data/lib/monadic_chat/open_ai.rb +3 -13
- data/lib/monadic_chat/tools.rb +63 -0
- data/lib/monadic_chat/version.rb +1 -1
- data/lib/monadic_chat.rb +78 -30
- data/monadic_chat.gemspec +1 -0
- metadata +17 -2
@@ -56,11 +56,9 @@ class MonadicApp
|
|
56
56
|
end
|
57
57
|
end
|
58
58
|
|
59
|
-
def prepare_params(input)
|
59
|
+
def prepare_params(input_role, input)
|
60
60
|
params = @params.dup
|
61
61
|
|
62
|
-
@update_proc.call
|
63
|
-
|
64
62
|
case @mode
|
65
63
|
when :research
|
66
64
|
messages = +""
|
@@ -71,19 +69,19 @@ class MonadicApp
|
|
71
69
|
case role
|
72
70
|
when "system"
|
73
71
|
system << "#{content}\n"
|
74
|
-
when "assistant", "gpt"
|
75
|
-
messages << "- #{mes["role"].strip}: #{content}\n"
|
76
72
|
else
|
77
|
-
messages << "- #{mes["role"].strip}: #{
|
73
|
+
messages << "- #{mes["role"].strip}: #{content}\n"
|
78
74
|
end
|
79
75
|
end
|
80
76
|
template = @template.dup.sub("{{SYSTEM}}", system)
|
81
77
|
.sub("{{PROMPT}}", input)
|
82
78
|
.sub("{{MESSAGES}}", messages.strip)
|
83
79
|
|
80
|
+
@template_tokens = count_tokens(template)
|
81
|
+
|
84
82
|
File.open(TEMP_MD, "w") { |f| f.write template }
|
85
83
|
|
86
|
-
@messages << { "role" =>
|
84
|
+
@messages << { "role" => input_role, "content" => input }
|
87
85
|
|
88
86
|
case @method
|
89
87
|
when "completions"
|
@@ -93,14 +91,16 @@ class MonadicApp
|
|
93
91
|
end
|
94
92
|
|
95
93
|
when :normal
|
96
|
-
@messages << { "role" =>
|
94
|
+
@messages << { "role" => input_role, "content" => input }
|
97
95
|
params["messages"] = @messages
|
98
96
|
end
|
99
97
|
|
98
|
+
@update_proc.call unless input_role == "system"
|
99
|
+
|
100
100
|
params
|
101
101
|
end
|
102
102
|
|
103
|
-
def update_template(res)
|
103
|
+
def update_template(res, role)
|
104
104
|
case @mode
|
105
105
|
when :research
|
106
106
|
@metadata = res
|
@@ -111,15 +111,22 @@ class MonadicApp
|
|
111
111
|
when :normal
|
112
112
|
@messages << { "role" => "assistant", "content" => res }
|
113
113
|
end
|
114
|
+
remove_intermediate_messages if role == "system"
|
115
|
+
end
|
116
|
+
|
117
|
+
def remove_intermediate_messages
|
118
|
+
@messages = @messages.reject { |ele| ele["role"] == "assistant" && /SEARCH\(.+\)/m =~ ele["content"] }
|
119
|
+
@messages = @messages.reject { |ele| ele["role"] == "system" && /^SEARCH SNIPPETS/ =~ ele["content"] }
|
114
120
|
end
|
115
121
|
|
116
122
|
##################################################
|
117
123
|
# function to bind data
|
118
124
|
##################################################
|
119
125
|
|
120
|
-
def bind(input, num_retry: 0)
|
126
|
+
def bind(input, role: "user", num_retry: 0)
|
127
|
+
@turns += 1 if role == "user"
|
121
128
|
print PROMPT_ASSISTANT.prefix, "\n"
|
122
|
-
params = prepare_params(input)
|
129
|
+
params = prepare_params(role, input)
|
123
130
|
research_mode = @mode == :research
|
124
131
|
|
125
132
|
escaping = +""
|
@@ -145,7 +152,41 @@ class MonadicApp
|
|
145
152
|
print last_chunk
|
146
153
|
print "\n"
|
147
154
|
|
148
|
-
|
149
|
-
|
155
|
+
webdata = use_tool(res)
|
156
|
+
update_template(res, role) unless webdata
|
157
|
+
if webdata && role != "system"
|
158
|
+
bind(webdata, role: "system", num_retry: num_retry)
|
159
|
+
elsif @html
|
160
|
+
set_html
|
161
|
+
end
|
162
|
+
end
|
163
|
+
|
164
|
+
##################################################
|
165
|
+
# function to have GPT use tools
|
166
|
+
##################################################
|
167
|
+
|
168
|
+
def use_tool(res)
|
169
|
+
case @mode
|
170
|
+
when :normal
|
171
|
+
text = res
|
172
|
+
when :research
|
173
|
+
text = res.is_a?(Hash) ? res["response"] : res
|
174
|
+
end
|
175
|
+
|
176
|
+
case text
|
177
|
+
when /\bSEARCH_WIKI\((.+?)\)/m
|
178
|
+
search_key = Regexp.last_match(1)
|
179
|
+
search_keys = search_key.split(",").map do |key|
|
180
|
+
key.strip.sub(/^"(.+)"$/, '\1')
|
181
|
+
end
|
182
|
+
text = "SEARCH SNIPPETS\n#{wikipedia_search(*search_keys)}"
|
183
|
+
return text
|
184
|
+
when /\bSEARCH_WEB\("?(.+?)"?\)/m
|
185
|
+
search_key = Regexp.last_match(1)
|
186
|
+
text = "SEARCH SNIPPETS\n#{bing_search(search_key)}"
|
187
|
+
return text
|
188
|
+
end
|
189
|
+
|
190
|
+
false
|
150
191
|
end
|
151
192
|
end
|
data/lib/monadic_chat/menu.rb
CHANGED
@@ -57,6 +57,7 @@ class MonadicApp
|
|
57
57
|
@params = @params_initial.dup
|
58
58
|
@messages = @messages_initial.dup
|
59
59
|
@template = @template_initial.dup
|
60
|
+
@template_tokens = 0
|
60
61
|
|
61
62
|
if @placeholders.empty?
|
62
63
|
print PROMPT_SYSTEM.prefix
|
@@ -68,7 +69,7 @@ class MonadicApp
|
|
68
69
|
|
69
70
|
def ask_retrial(input, message = nil)
|
70
71
|
print PROMPT_SYSTEM.prefix
|
71
|
-
print "
|
72
|
+
print "Error: #{message.capitalize}\n" if message
|
72
73
|
retrial = PROMPT_USER.select("Do you want to try again?",
|
73
74
|
show_help: :never) do |menu|
|
74
75
|
menu.choice "Yes", "yes"
|
data/lib/monadic_chat/open_ai.rb
CHANGED
@@ -10,7 +10,7 @@ require "tty-progressbar"
|
|
10
10
|
Oj.mimic_JSON
|
11
11
|
|
12
12
|
module OpenAI
|
13
|
-
def self.
|
13
|
+
def self.default_model(research_mode: false)
|
14
14
|
if research_mode
|
15
15
|
"text-davinci-003"
|
16
16
|
else
|
@@ -88,18 +88,8 @@ module OpenAI
|
|
88
88
|
class Completion
|
89
89
|
attr_reader :access_token
|
90
90
|
|
91
|
-
def initialize(access_token
|
91
|
+
def initialize(access_token)
|
92
92
|
@access_token = access_token
|
93
|
-
@normal_mode_model = normal_mode_model || OpenAI.model_name(research_mode: false)
|
94
|
-
@research_mode_model = research_mode_model || OpenAI.model_name(research_mode: true)
|
95
|
-
end
|
96
|
-
|
97
|
-
def model_name(research_mode: false)
|
98
|
-
if research_mode
|
99
|
-
@research_mode_model
|
100
|
-
else
|
101
|
-
@normal_mode_model
|
102
|
-
end
|
103
93
|
end
|
104
94
|
|
105
95
|
def models
|
@@ -126,7 +116,7 @@ module OpenAI
|
|
126
116
|
when 0
|
127
117
|
raise e
|
128
118
|
else
|
129
|
-
run(params, num_retry: num_retry - 1, &block)
|
119
|
+
run(params, research_mode: research_mode, num_retry: num_retry - 1, &block)
|
130
120
|
end
|
131
121
|
end
|
132
122
|
|
@@ -0,0 +1,63 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class MonadicApp
|
4
|
+
##################################################
|
5
|
+
# method for web search
|
6
|
+
##################################################
|
7
|
+
|
8
|
+
def bing_search(query, retrial: 5)
|
9
|
+
uri = "https://www.bing.com/search"
|
10
|
+
css_selector = "#b_results"
|
11
|
+
|
12
|
+
q = URI.encode_www_form(q: query)
|
13
|
+
doc = Nokogiri::HTML(URI.parse([uri, q].join("?")).read)
|
14
|
+
doc.css("script, link").each(&:remove)
|
15
|
+
doc.css(css_selector).text.squeeze(" \n")
|
16
|
+
rescue StandardError
|
17
|
+
return "SEARCH ENGINE NOT AVAILABLE" if retrial.zero?
|
18
|
+
|
19
|
+
sleep 1
|
20
|
+
retrial -= 1
|
21
|
+
bing_search(query, retrial: retrial)
|
22
|
+
end
|
23
|
+
|
24
|
+
def wikipedia_search(keywords, base_url = nil)
|
25
|
+
base_url ||= "https://en.wikipedia.org/w/api.php"
|
26
|
+
search_params = {
|
27
|
+
action: "query",
|
28
|
+
list: "search",
|
29
|
+
format: "json",
|
30
|
+
srsearch: keywords,
|
31
|
+
utf8: 1,
|
32
|
+
formatversion: 2
|
33
|
+
}
|
34
|
+
|
35
|
+
search_uri = URI(base_url)
|
36
|
+
search_uri.query = URI.encode_www_form(search_params)
|
37
|
+
search_response = Net::HTTP.get(search_uri)
|
38
|
+
search_data = JSON.parse(search_response)
|
39
|
+
|
40
|
+
raise if search_data["query"]["search"].empty?
|
41
|
+
|
42
|
+
title = search_data["query"]["search"][0]["title"]
|
43
|
+
|
44
|
+
content_params = {
|
45
|
+
action: "query",
|
46
|
+
prop: "extracts",
|
47
|
+
format: "json",
|
48
|
+
titles: title,
|
49
|
+
explaintext: 1,
|
50
|
+
utf8: 1,
|
51
|
+
formatversion: 2
|
52
|
+
}
|
53
|
+
|
54
|
+
content_uri = URI(base_url)
|
55
|
+
content_uri.query = URI.encode_www_form(content_params)
|
56
|
+
content_response = Net::HTTP.get(content_uri)
|
57
|
+
content_data = JSON.parse(content_response)
|
58
|
+
|
59
|
+
content_data["query"]["pages"][0]["extract"][0..1000]
|
60
|
+
rescue StandardError
|
61
|
+
"SEARCH RESULTS EMPTY"
|
62
|
+
end
|
63
|
+
end
|
data/lib/monadic_chat/version.rb
CHANGED
data/lib/monadic_chat.rb
CHANGED
@@ -14,6 +14,9 @@ require "rouge"
|
|
14
14
|
require "launchy"
|
15
15
|
require "io/console"
|
16
16
|
require "readline"
|
17
|
+
require "nokogiri"
|
18
|
+
require "open-uri"
|
19
|
+
require "wikipedia"
|
17
20
|
|
18
21
|
require_relative "./monadic_chat/version"
|
19
22
|
require_relative "./monadic_chat/open_ai"
|
@@ -22,6 +25,8 @@ require_relative "./monadic_chat/helper"
|
|
22
25
|
Oj.mimic_JSON
|
23
26
|
|
24
27
|
module MonadicChat
|
28
|
+
SETTINGS = {}
|
29
|
+
MAX_CHARS_WIKI = 1000
|
25
30
|
gpt2model_path = File.absolute_path(File.join(__dir__, "..", "assets", "gpt2.bin"))
|
26
31
|
BLINGFIRE = BlingFire.load_model(gpt2model_path)
|
27
32
|
CONFIG = File.join(Dir.home, "monadic_chat.conf")
|
@@ -111,38 +116,71 @@ module MonadicChat
|
|
111
116
|
Launchy.open(url)
|
112
117
|
end
|
113
118
|
|
119
|
+
def self.mdprint(str)
|
120
|
+
print TTY::Markdown.parse(str, indent: 0)
|
121
|
+
end
|
122
|
+
|
114
123
|
def self.authenticate(overwrite: false, message: true)
|
115
|
-
check = lambda do |token
|
116
|
-
|
117
|
-
|
124
|
+
check = lambda do |token|
|
125
|
+
if message
|
126
|
+
print TTY::Cursor.restore
|
127
|
+
print TTY::Cursor.clear_screen_down
|
128
|
+
print "\n"
|
129
|
+
SPINNER.auto_spin
|
130
|
+
end
|
131
|
+
|
132
|
+
if !token || token.strip == ""
|
133
|
+
if message
|
134
|
+
SPINNER.stop
|
135
|
+
print TTY::Cursor.restore
|
136
|
+
print "\n"
|
137
|
+
mdprint "- Authentication: #{PASTEL.bold.red("Failure")}\n" if message
|
138
|
+
end
|
139
|
+
return false
|
140
|
+
end
|
141
|
+
|
118
142
|
begin
|
119
143
|
models = OpenAI.models(token)
|
120
144
|
raise if models.empty?
|
121
145
|
|
122
|
-
|
123
|
-
|
124
|
-
print "Success\n" if message
|
125
|
-
|
126
|
-
if normal_mode_model && !models.map { |m| m["id"] }.index(normal_mode_model)
|
146
|
+
if message
|
127
147
|
SPINNER.stop
|
128
|
-
print "
|
129
|
-
|
148
|
+
print TTY::Cursor.restore, "\n"
|
149
|
+
mdprint "#{PASTEL.on_green(" System ")} Config file: `#{CONFIG}`\n"
|
150
|
+
print "\n"
|
151
|
+
mdprint "- Authentication: #{PASTEL.bold.green("Success")}\n"
|
130
152
|
end
|
131
|
-
normal_mode_model ||= OpenAI.model_name(research_mode: false)
|
132
|
-
print "Normal mode model: #{normal_mode_model}\n" if message
|
133
153
|
|
134
|
-
if
|
135
|
-
|
136
|
-
|
137
|
-
|
154
|
+
if SETTINGS["normal_model"] && !models.map { |m| m["id"] }.index(SETTINGS["normal_model"])
|
155
|
+
if message
|
156
|
+
SPINNER.stop
|
157
|
+
mdprint "- Normal mode model specified in config file not available\n"
|
158
|
+
mdprint "- Fallback to the default model (`#{OpenAI.default_model(research_mode: false)}`)\n"
|
159
|
+
end
|
160
|
+
SETTINGS["normal_model"] = false
|
161
|
+
end
|
162
|
+
SETTINGS["normal_model"] ||= OpenAI.default_model(research_mode: false)
|
163
|
+
mdprint "- Normal mode model: `#{SETTINGS["normal_model"]}`\n" if message
|
164
|
+
|
165
|
+
if SETTINGS["research_model"] && !models.map { |m| m["id"] }.index(SETTINGS["research_model"])
|
166
|
+
if message
|
167
|
+
SPINNER.stop
|
168
|
+
mdprint "- Research mode model specified in config file not available\n"
|
169
|
+
mdprint "- Fallback to the default model (`#{OpenAI.default_model(research_mode: true)}`)\n"
|
170
|
+
end
|
171
|
+
SETTINGS["research_model"] = false
|
138
172
|
end
|
139
|
-
|
140
|
-
|
173
|
+
SETTINGS["research_model"] ||= OpenAI.default_model(research_mode: true)
|
174
|
+
mdprint "- Research mode model: `#{SETTINGS["research_model"]}`\n" if message
|
141
175
|
|
142
|
-
OpenAI::Completion.new(token
|
176
|
+
OpenAI::Completion.new(token)
|
143
177
|
rescue StandardError
|
144
|
-
|
145
|
-
|
178
|
+
if message
|
179
|
+
SPINNER.stop
|
180
|
+
print TTY::Cursor.restore
|
181
|
+
print "\n"
|
182
|
+
mdprint "- Authentication: #{PASTEL.bold.red("Failure")}\n" if message
|
183
|
+
end
|
146
184
|
false
|
147
185
|
end
|
148
186
|
end
|
@@ -150,14 +188,18 @@ module MonadicChat
|
|
150
188
|
completion = nil
|
151
189
|
|
152
190
|
if overwrite
|
153
|
-
access_token = PROMPT_SYSTEM.ask("
|
191
|
+
access_token = PROMPT_SYSTEM.ask("Input your OpenAI access token:")
|
154
192
|
return false if access_token.to_s == ""
|
155
193
|
|
156
|
-
completion = check.call(access_token
|
194
|
+
completion = check.call(access_token)
|
157
195
|
|
158
196
|
if completion
|
159
197
|
File.open(CONFIG, "w") do |f|
|
160
|
-
config = {
|
198
|
+
config = {
|
199
|
+
"access_token" => access_token,
|
200
|
+
"normal_model" => SETTINGS["normal_model"],
|
201
|
+
"research_model" => SETTINGS["research_model"]
|
202
|
+
}
|
161
203
|
f.write(JSON.pretty_generate(config))
|
162
204
|
print "New access token has been saved to #{CONFIG}\n" if message
|
163
205
|
end
|
@@ -170,16 +212,22 @@ module MonadicChat
|
|
170
212
|
puts "Error: config file does not contain a valid JSON object."
|
171
213
|
exit
|
172
214
|
end
|
215
|
+
SETTINGS["normal_model"] = config["normal_model"] if config["normal_model"]
|
216
|
+
SETTINGS["research_model"] = config["research_model"] if config["research_model"]
|
173
217
|
access_token = config["access_token"]
|
174
|
-
|
175
|
-
research_mode_model = config["research_mode_model"]
|
176
|
-
completion = check.call(access_token, normal_mode_model, research_mode_model)
|
218
|
+
completion = check.call(access_token)
|
177
219
|
else
|
178
|
-
access_token ||= PROMPT_SYSTEM.ask("
|
179
|
-
|
220
|
+
access_token ||= PROMPT_SYSTEM.ask("Input your OpenAI access token:")
|
221
|
+
return false if access_token.to_s == ""
|
222
|
+
|
223
|
+
completion = check.call(access_token)
|
180
224
|
if completion
|
181
225
|
File.open(CONFIG, "w") do |f|
|
182
|
-
config = {
|
226
|
+
config = {
|
227
|
+
"access_token" => access_token,
|
228
|
+
"normal_model" => SETTINGS["normal_model"],
|
229
|
+
"research_model" => SETTINGS["research_model"]
|
230
|
+
}
|
183
231
|
f.write(JSON.pretty_generate(config))
|
184
232
|
end
|
185
233
|
print "Access token has been saved to #{CONFIG}\n" if message
|
data/monadic_chat.gemspec
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: monadic-chat
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.3.
|
4
|
+
version: 0.3.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- yohasebe
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-
|
11
|
+
date: 2023-04-02 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -248,6 +248,20 @@ dependencies:
|
|
248
248
|
- - ">="
|
249
249
|
- !ruby/object:Gem::Version
|
250
250
|
version: '0'
|
251
|
+
- !ruby/object:Gem::Dependency
|
252
|
+
name: wikipedia-client
|
253
|
+
requirement: !ruby/object:Gem::Requirement
|
254
|
+
requirements:
|
255
|
+
- - ">="
|
256
|
+
- !ruby/object:Gem::Version
|
257
|
+
version: '0'
|
258
|
+
type: :runtime
|
259
|
+
prerelease: false
|
260
|
+
version_requirements: !ruby/object:Gem::Requirement
|
261
|
+
requirements:
|
262
|
+
- - ">="
|
263
|
+
- !ruby/object:Gem::Version
|
264
|
+
version: '0'
|
251
265
|
description: 'Monadic Chat is a command-line client application program that uses
|
252
266
|
OpenAI''s Text Completion API and Chat API to enable chat-style conversations with
|
253
267
|
OpenAI''s artificial intelligence system in a ChatGPT-like style.
|
@@ -311,6 +325,7 @@ files:
|
|
311
325
|
- lib/monadic_chat/menu.rb
|
312
326
|
- lib/monadic_chat/open_ai.rb
|
313
327
|
- lib/monadic_chat/parameters.rb
|
328
|
+
- lib/monadic_chat/tools.rb
|
314
329
|
- lib/monadic_chat/version.rb
|
315
330
|
- monadic_chat.gemspec
|
316
331
|
homepage: https://github.com/yohasebe/monadic-chat
|