monadic-chat 0.3.3 → 0.3.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +10 -1
- data/Gemfile.lock +3 -4
- data/README.md +37 -38
- data/apps/chat/chat.json +3 -1
- data/apps/chat/chat.md +12 -14
- data/apps/chat/chat.rb +8 -9
- data/apps/code/code.md +10 -13
- data/apps/code/code.rb +8 -9
- data/apps/novel/novel.md +11 -14
- data/apps/novel/novel.rb +8 -9
- data/apps/translate/translate.md +9 -11
- data/apps/translate/translate.rb +8 -9
- data/bin/monadic-chat +25 -10
- data/doc/img/how-research-mode-works.svg +1 -1
- data/doc/img/research-mode-template.svg +1 -1
- data/lib/monadic_app.rb +10 -4
- data/lib/monadic_chat/authenticate.rb +115 -0
- data/lib/monadic_chat/commands.rb +75 -0
- data/lib/monadic_chat/formatting.rb +8 -1
- data/lib/monadic_chat/interaction.rb +1 -6
- data/lib/monadic_chat/internals.rb +97 -16
- data/lib/monadic_chat/menu.rb +2 -1
- data/lib/monadic_chat/open_ai.rb +9 -19
- data/lib/monadic_chat/tools.rb +87 -0
- data/lib/monadic_chat/version.rb +1 -1
- data/lib/monadic_chat.rb +39 -120
- data/user_apps/boilerplates/boilerplate.json +5 -0
- data/user_apps/boilerplates/boilerplate.md +41 -0
- data/user_apps/boilerplates/boilerplate.rb +85 -0
- data/{apps → user_apps}/linguistic/linguistic.md +7 -12
- data/{apps → user_apps}/linguistic/linguistic.rb +8 -9
- data/user_apps/wikipedia/wikipedia.json +3 -0
- data/user_apps/wikipedia/wikipedia.md +38 -0
- data/user_apps/wikipedia/wikipedia.rb +85 -0
- metadata +14 -5
- /data/{apps → user_apps}/linguistic/linguistic.json +0 -0
@@ -0,0 +1,75 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module MonadicChat
|
4
|
+
def self.open_readme
|
5
|
+
url = "https://github.com/yohasebe/monadic-chat/"
|
6
|
+
Launchy.open(url)
|
7
|
+
end
|
8
|
+
|
9
|
+
def self.mdprint(str)
|
10
|
+
print TTY::Markdown.parse(str, indent: 0)
|
11
|
+
end
|
12
|
+
|
13
|
+
def self.prompt_system
|
14
|
+
box_width = 8
|
15
|
+
name = "System".center(box_width, " ")
|
16
|
+
color = "green"
|
17
|
+
"\n#{PASTEL.send(:"on_#{color}", name)}"
|
18
|
+
end
|
19
|
+
|
20
|
+
def self.prompt_user
|
21
|
+
box_width = 6
|
22
|
+
color = "blue"
|
23
|
+
name = "User".center(box_width, " ")
|
24
|
+
"\n#{PASTEL.send(:"on_#{color}", name)}"
|
25
|
+
end
|
26
|
+
|
27
|
+
def self.prompt_assistant
|
28
|
+
box_width = 5
|
29
|
+
color = "red"
|
30
|
+
name = "GPT".center(box_width, " ")
|
31
|
+
"\n#{PASTEL.send(:"on_#{color}", name)}"
|
32
|
+
end
|
33
|
+
|
34
|
+
def self.tokenize(text)
|
35
|
+
BLINGFIRE.text_to_ids(text)
|
36
|
+
end
|
37
|
+
|
38
|
+
def self.create_app(app_name)
|
39
|
+
app_name = +app_name.downcase
|
40
|
+
user_apps_dir = File.join(HOME, "user_apps")
|
41
|
+
user_app_dir = File.join(user_apps_dir, app_name)
|
42
|
+
FileUtils.mkdir_p(user_app_dir)
|
43
|
+
# replace certain strings in boilerplate files (boilerplate.rb, boilerplate.json, boilerplate.md)
|
44
|
+
[".rb", ".json", ".md"].each do |ext|
|
45
|
+
file = File.join(HOME, "user_apps", "boilerplates", "boilerplate#{ext}")
|
46
|
+
content = File.read(file)
|
47
|
+
content.gsub!("{{APP_NAME}}", app_name)
|
48
|
+
content.gsub!("{{APP_CLASS_NAME}}", app_name.capitalize)
|
49
|
+
File.open(File.join(user_app_dir, "#{app_name}#{ext}"), "w") do |f|
|
50
|
+
f.write(content)
|
51
|
+
end
|
52
|
+
end
|
53
|
+
print PROMPT_SYSTEM.prefix, "Scaffolding of the app created successfully", "\n"
|
54
|
+
print "Edit the app files:", "\n"
|
55
|
+
print HOME, "\n"
|
56
|
+
print "user_apps", "\n"
|
57
|
+
print "└── #{app_name}", "\n"
|
58
|
+
print " ├── #{app_name}.json", "\n"
|
59
|
+
print " ├── #{app_name}.md", "\n"
|
60
|
+
print " └── #{app_name}.rb", "\n"
|
61
|
+
end
|
62
|
+
|
63
|
+
def self.delete_app(app_name)
|
64
|
+
app_name = +app_name.downcase
|
65
|
+
user_apps_dir = File.join(HOME, "user_apps")
|
66
|
+
user_app_dir = File.join(user_apps_dir, app_name)
|
67
|
+
# confirm user wants to delete the app
|
68
|
+
if PROMPT_SYSTEM.yes?("Are you sure you want to delete the app #{app_name}?")
|
69
|
+
FileUtils.rm_rf(user_app_dir)
|
70
|
+
print PROMPT_SYSTEM.prefix, "App deleted successfully", "\n"
|
71
|
+
else
|
72
|
+
print PROMPT_SYSTEM.prefix, "App deletion cancelled", "\n"
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
@@ -4,6 +4,13 @@ class MonadicApp
|
|
4
4
|
##################################################
|
5
5
|
# methods for formatting and presenting
|
6
6
|
##################################################
|
7
|
+
|
8
|
+
def show_template
|
9
|
+
puts "-----------------------------------------"
|
10
|
+
puts @template
|
11
|
+
puts "-----------------------------------------"
|
12
|
+
end
|
13
|
+
|
7
14
|
def format_data
|
8
15
|
contextual = []
|
9
16
|
accumulator = []
|
@@ -14,7 +21,7 @@ class MonadicApp
|
|
14
21
|
|
15
22
|
contextual << "- **#{key.split("_").map(&:capitalize).join(" ")}**: #{val.to_s.strip}"
|
16
23
|
end
|
17
|
-
contextual << "- **Num of Tokens in Template**: #{
|
24
|
+
contextual << "- **Num of Tokens in Template**: #{@template_tokens}"
|
18
25
|
end
|
19
26
|
|
20
27
|
@messages.each do |m|
|
@@ -6,14 +6,9 @@ class MonadicApp
|
|
6
6
|
##################################################
|
7
7
|
|
8
8
|
def user_input(text = "")
|
9
|
-
# if count_lines_below < 1
|
10
|
-
# ask_clear
|
11
|
-
# user_input
|
12
|
-
# else
|
13
9
|
res = PROMPT_USER.readline(text)
|
14
10
|
print TTY::Cursor.clear_line_after
|
15
11
|
res == "" ? nil : res
|
16
|
-
# end
|
17
12
|
end
|
18
13
|
|
19
14
|
def show_greet
|
@@ -32,7 +27,7 @@ class MonadicApp
|
|
32
27
|
end
|
33
28
|
|
34
29
|
def confirm_query(input)
|
35
|
-
if input.size <
|
30
|
+
if input.size < SETTINGS["min_query_size"]
|
36
31
|
PROMPT_SYSTEM.yes?("Would you like to proceed with this (very short) prompt?")
|
37
32
|
else
|
38
33
|
true
|
@@ -56,10 +56,15 @@ class MonadicApp
|
|
56
56
|
end
|
57
57
|
end
|
58
58
|
|
59
|
-
def prepare_params(input)
|
59
|
+
def prepare_params(input_role, input)
|
60
60
|
params = @params.dup
|
61
61
|
|
62
|
-
|
62
|
+
delimited_input = case input_role
|
63
|
+
when "user"
|
64
|
+
"NEW PROMPT: ###\n#{input}\n###"
|
65
|
+
when "system" # i.e. search engine
|
66
|
+
"SEARCH SNIPPETS: ###\n#{input}\n###"
|
67
|
+
end
|
63
68
|
|
64
69
|
case @mode
|
65
70
|
when :research
|
@@ -70,20 +75,22 @@ class MonadicApp
|
|
70
75
|
content = mes["content"]
|
71
76
|
case role
|
72
77
|
when "system"
|
73
|
-
system << "#{content}\n"
|
74
|
-
when "assistant", "gpt"
|
75
|
-
messages << "- #{mes["role"].strip}: #{content}\n"
|
78
|
+
system << "#{content}\n" if system == ""
|
76
79
|
else
|
77
|
-
messages << "- #{mes["role"].strip}: #{
|
80
|
+
messages << "- #{mes["role"].strip}: #{content}\n"
|
78
81
|
end
|
79
82
|
end
|
83
|
+
|
84
|
+
delimited_messages = "MESSAGES: ###\n#{messages}\n###"
|
80
85
|
template = @template.dup.sub("{{SYSTEM}}", system)
|
81
|
-
.sub("{{PROMPT}}",
|
82
|
-
.sub("{{MESSAGES}}",
|
86
|
+
.sub("{{PROMPT}}", delimited_input)
|
87
|
+
.sub("{{MESSAGES}}", delimited_messages.strip)
|
88
|
+
|
89
|
+
@template_tokens = count_tokens(template)
|
83
90
|
|
84
91
|
File.open(TEMP_MD, "w") { |f| f.write template }
|
85
92
|
|
86
|
-
@messages << { "role" =>
|
93
|
+
@messages << { "role" => input_role, "content" => input }
|
87
94
|
|
88
95
|
case @method
|
89
96
|
when "completions"
|
@@ -93,18 +100,20 @@ class MonadicApp
|
|
93
100
|
end
|
94
101
|
|
95
102
|
when :normal
|
96
|
-
@messages << { "role" =>
|
103
|
+
@messages << { "role" => input_role, "content" => input }
|
97
104
|
params["messages"] = @messages
|
98
105
|
end
|
99
106
|
|
107
|
+
@update_proc.call unless input_role == "system"
|
108
|
+
|
100
109
|
params
|
101
110
|
end
|
102
111
|
|
103
|
-
def update_template(res)
|
112
|
+
def update_template(res, role)
|
104
113
|
case @mode
|
105
114
|
when :research
|
106
115
|
@metadata = res
|
107
|
-
@messages << { "role" =>
|
116
|
+
@messages << { "role" => role, "content" => @metadata["response"] }
|
108
117
|
json = @metadata.to_json.strip
|
109
118
|
File.open(TEMP_JSON, "w") { |f| f.write json }
|
110
119
|
@template.sub!(/JSON:\n+```json.+```\n\n/m, "JSON:\n\n```json\n#{json}\n```\n\n")
|
@@ -113,19 +122,42 @@ class MonadicApp
|
|
113
122
|
end
|
114
123
|
end
|
115
124
|
|
125
|
+
##################################################
|
126
|
+
# function to package plain text into a unit
|
127
|
+
##################################################
|
128
|
+
|
129
|
+
def unit(input)
|
130
|
+
if input.instance_of?(Hash)
|
131
|
+
input
|
132
|
+
else
|
133
|
+
@metadata["response"] = input
|
134
|
+
@metadata
|
135
|
+
end
|
136
|
+
end
|
137
|
+
|
116
138
|
##################################################
|
117
139
|
# function to bind data
|
118
140
|
##################################################
|
119
141
|
|
120
|
-
def bind(input,
|
142
|
+
def bind(input, role: "user", num_retrials: 0)
|
143
|
+
case role
|
144
|
+
when "user"
|
145
|
+
@turns += 1
|
146
|
+
when "system" # i.e. search engine
|
147
|
+
input = "\n\n#{input}"
|
148
|
+
end
|
149
|
+
|
121
150
|
print PROMPT_ASSISTANT.prefix, "\n"
|
122
|
-
params = prepare_params(input)
|
151
|
+
params = prepare_params(role, input)
|
123
152
|
research_mode = @mode == :research
|
124
153
|
|
125
154
|
escaping = +""
|
126
155
|
last_chunk = +""
|
127
156
|
|
128
|
-
res = @completion.run(params,
|
157
|
+
res = @completion.run(params,
|
158
|
+
research_mode: research_mode,
|
159
|
+
timeout_sec: SETTINGS["timeout_sec"],
|
160
|
+
num_retrials: num_retrials) do |chunk|
|
129
161
|
if escaping
|
130
162
|
chunk = escaping + chunk
|
131
163
|
escaping = ""
|
@@ -145,7 +177,56 @@ class MonadicApp
|
|
145
177
|
print last_chunk
|
146
178
|
print "\n"
|
147
179
|
|
148
|
-
|
180
|
+
message = case role
|
181
|
+
when "system" # i.e. search engine; the response given above should be by "assistant"
|
182
|
+
{ role: "assistant", content: @mode == :research ? unit(res) : res }
|
183
|
+
when "user" # the response give above should be either "assistant"
|
184
|
+
searched = use_tool(res)
|
185
|
+
# but if the response is a search query, it should be by "system" (search engine)
|
186
|
+
if searched
|
187
|
+
@messages << { "role" => "assistant",
|
188
|
+
"content" => @mode == :research ? unit(res)["response"] : res }
|
189
|
+
if searched == "empty"
|
190
|
+
print PROMPT_SYSTEM.prefix, "Search results are empty", "\n"
|
191
|
+
return
|
192
|
+
else
|
193
|
+
bind(searched, role: "system")
|
194
|
+
return
|
195
|
+
end
|
196
|
+
# otherwise, it should be by "assistant"
|
197
|
+
else
|
198
|
+
{ role: "assistant", content: @mode == :researh ? unit(res) : res }
|
199
|
+
end
|
200
|
+
end
|
201
|
+
|
202
|
+
update_template(message[:content], message[:role])
|
203
|
+
|
149
204
|
set_html if @html
|
150
205
|
end
|
206
|
+
|
207
|
+
##################################################
|
208
|
+
# function to have GPT use tools
|
209
|
+
##################################################
|
210
|
+
|
211
|
+
def use_tool(res)
|
212
|
+
case @mode
|
213
|
+
when :normal
|
214
|
+
text = res
|
215
|
+
when :research
|
216
|
+
text = res.is_a?(Hash) ? res["response"] : res
|
217
|
+
end
|
218
|
+
|
219
|
+
case text
|
220
|
+
when /\bSEARCH_WIKI\("?(.+?)"?\)/m
|
221
|
+
@wiki_search_cache ||= {}
|
222
|
+
search_key = Regexp.last_match(1)
|
223
|
+
wikipedia_search(search_key, @wiki_search_cache)
|
224
|
+
when /\bSEARCH_WEB\("?(.+?)"?\)/m
|
225
|
+
@web_search_cache ||= {}
|
226
|
+
search_key = Regexp.last_match(1)
|
227
|
+
bing_search(search_key, @web_searh_cache)
|
228
|
+
else
|
229
|
+
false
|
230
|
+
end
|
231
|
+
end
|
151
232
|
end
|
data/lib/monadic_chat/menu.rb
CHANGED
@@ -57,6 +57,7 @@ class MonadicApp
|
|
57
57
|
@params = @params_initial.dup
|
58
58
|
@messages = @messages_initial.dup
|
59
59
|
@template = @template_initial.dup
|
60
|
+
@template_tokens = 0
|
60
61
|
|
61
62
|
if @placeholders.empty?
|
62
63
|
print PROMPT_SYSTEM.prefix
|
@@ -68,7 +69,7 @@ class MonadicApp
|
|
68
69
|
|
69
70
|
def ask_retrial(input, message = nil)
|
70
71
|
print PROMPT_SYSTEM.prefix
|
71
|
-
print "
|
72
|
+
print "Error: #{message.capitalize}\n" if message
|
72
73
|
retrial = PROMPT_USER.select("Do you want to try again?",
|
73
74
|
show_help: :never) do |menu|
|
74
75
|
menu.choice "Yes", "yes"
|
data/lib/monadic_chat/open_ai.rb
CHANGED
@@ -10,9 +10,9 @@ require "tty-progressbar"
|
|
10
10
|
Oj.mimic_JSON
|
11
11
|
|
12
12
|
module OpenAI
|
13
|
-
def self.
|
13
|
+
def self.default_model(research_mode: false)
|
14
14
|
if research_mode
|
15
|
-
"
|
15
|
+
"gpt-3.5-turbo"
|
16
16
|
else
|
17
17
|
"gpt-3.5-turbo"
|
18
18
|
end
|
@@ -88,28 +88,18 @@ module OpenAI
|
|
88
88
|
class Completion
|
89
89
|
attr_reader :access_token
|
90
90
|
|
91
|
-
def initialize(access_token
|
91
|
+
def initialize(access_token)
|
92
92
|
@access_token = access_token
|
93
|
-
@normal_mode_model = normal_mode_model || OpenAI.model_name(research_mode: false)
|
94
|
-
@research_mode_model = research_mode_model || OpenAI.model_name(research_mode: true)
|
95
|
-
end
|
96
|
-
|
97
|
-
def model_name(research_mode: false)
|
98
|
-
if research_mode
|
99
|
-
@research_mode_model
|
100
|
-
else
|
101
|
-
@normal_mode_model
|
102
|
-
end
|
103
93
|
end
|
104
94
|
|
105
95
|
def models
|
106
96
|
OpenAI.models(@access_token)
|
107
97
|
end
|
108
98
|
|
109
|
-
def run(params, research_mode: false,
|
99
|
+
def run(params, research_mode: false, timeout_sec: 60, num_retrials: 1, &block)
|
110
100
|
method = OpenAI.model_to_method(params["model"])
|
111
101
|
|
112
|
-
response = OpenAI.query(@access_token, "post", method,
|
102
|
+
response = OpenAI.query(@access_token, "post", method, timeout_sec, params, &block)
|
113
103
|
if response["error"]
|
114
104
|
raise response["error"]["message"]
|
115
105
|
elsif response["choices"][0]["finish_reason"] == "length"
|
@@ -122,11 +112,11 @@ module OpenAI
|
|
122
112
|
response["choices"][0]["text"]
|
123
113
|
end
|
124
114
|
rescue StandardError => e
|
125
|
-
case
|
115
|
+
case num_retrials
|
126
116
|
when 0
|
127
117
|
raise e
|
128
118
|
else
|
129
|
-
run(params,
|
119
|
+
run(params, research_mode: research_mode, timeout_sec: timeout_sec, num_retrials: num_retrials - 1, &block)
|
130
120
|
end
|
131
121
|
end
|
132
122
|
|
@@ -144,7 +134,7 @@ module OpenAI
|
|
144
134
|
res
|
145
135
|
end
|
146
136
|
|
147
|
-
def run_iteration(params, prompts, template, replace_key = "{{PROMPT}}",
|
137
|
+
def run_iteration(params, prompts, template, replace_key = "{{PROMPT}}", timeout_sec: 60, num_retrials: 0)
|
148
138
|
bar = TTY::ProgressBar.new("[:bar] :current/:total :total_byte :percent ET::elapsed ETA::eta",
|
149
139
|
total: prompts.size,
|
150
140
|
bar_format: :box)
|
@@ -152,7 +142,7 @@ module OpenAI
|
|
152
142
|
json = ""
|
153
143
|
prompts.each do |prompt|
|
154
144
|
params["prompt"] = template.sub(replace_key, prompt)
|
155
|
-
res = run(params,
|
145
|
+
res = run(params, timeout_sec: timeout_sec, num_retrials: num_retrials)
|
156
146
|
json = JSON.pretty_generate(get_json(res))
|
157
147
|
bar.advance(1)
|
158
148
|
template = template.sub(/JSON:\n+```json.+?```\n\n/m, "JSON:\n\n```json\n#{json}\n```\n\n")
|
@@ -0,0 +1,87 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class MonadicApp
|
4
|
+
##################################################
|
5
|
+
# method for web search
|
6
|
+
##################################################
|
7
|
+
|
8
|
+
def bing_search(query, num_retrial: 3)
|
9
|
+
base_uri = "https://www.bing.com/search?setlang=en"
|
10
|
+
css_selector = "#b_results"
|
11
|
+
|
12
|
+
q = URI.encode_www_form(q: query)
|
13
|
+
doc = Nokogiri::HTML(URI.parse([base_uri, q].join("&")).read)
|
14
|
+
doc.css("script, link").each(&:remove)
|
15
|
+
doc.css(css_selector).text.squeeze(" \n")
|
16
|
+
rescue StandardError
|
17
|
+
num_retrial -= 1
|
18
|
+
if num_retrial.positive?
|
19
|
+
sleep 1
|
20
|
+
bing_search(keywords, num_retrial: num_retrial)
|
21
|
+
else
|
22
|
+
"empty"
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
def wikipedia_search(keywords, cache = {}, num_retrial: 10)
|
27
|
+
base_url = "https://en.wikipedia.org/w/api.php"
|
28
|
+
search_params = {
|
29
|
+
action: "query",
|
30
|
+
list: "search",
|
31
|
+
format: "json",
|
32
|
+
srsearch: keywords,
|
33
|
+
utf8: 1,
|
34
|
+
formatversion: 2
|
35
|
+
}
|
36
|
+
|
37
|
+
search_uri = URI(base_url)
|
38
|
+
search_uri.query = URI.encode_www_form(search_params)
|
39
|
+
search_response = Net::HTTP.get(search_uri)
|
40
|
+
search_data = JSON.parse(search_response)
|
41
|
+
|
42
|
+
raise if search_data["query"]["search"].empty?
|
43
|
+
|
44
|
+
title = search_data["query"]["search"][0]["title"]
|
45
|
+
|
46
|
+
return cache[title] if cache.keys.include?(title)
|
47
|
+
|
48
|
+
content_params = {
|
49
|
+
action: "query",
|
50
|
+
prop: "extracts",
|
51
|
+
format: "json",
|
52
|
+
titles: title,
|
53
|
+
explaintext: 1,
|
54
|
+
utf8: 1,
|
55
|
+
formatversion: 2
|
56
|
+
}
|
57
|
+
|
58
|
+
content_uri = URI(base_url)
|
59
|
+
content_uri.query = URI.encode_www_form(content_params)
|
60
|
+
content_response = Net::HTTP.get(content_uri)
|
61
|
+
content_data = JSON.parse(content_response)
|
62
|
+
|
63
|
+
result_data = content_data["query"]["pages"][0]["extract"]
|
64
|
+
tokenized = BLINGFIRE.text_to_ids(result_data)
|
65
|
+
if tokenized.size > SETTINGS["max_tokens_wiki"].to_i
|
66
|
+
ratio = SETTINGS["max_tokens_wiki"].to_f / tokenized.size
|
67
|
+
result_data = result_data[0..(result_data.size * ratio).to_i]
|
68
|
+
end
|
69
|
+
|
70
|
+
text = <<~TEXT
|
71
|
+
```MediaWiki
|
72
|
+
#{result_data}
|
73
|
+
```
|
74
|
+
TEXT
|
75
|
+
cache[title] = text
|
76
|
+
|
77
|
+
text
|
78
|
+
rescue StandardError
|
79
|
+
num_retrial -= 1
|
80
|
+
if num_retrial.positive?
|
81
|
+
sleep 1
|
82
|
+
wikipedia_search(keywords, num_retrial: num_retrial)
|
83
|
+
else
|
84
|
+
"empty"
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
data/lib/monadic_chat/version.rb
CHANGED
data/lib/monadic_chat.rb
CHANGED
@@ -14,26 +14,45 @@ require "rouge"
|
|
14
14
|
require "launchy"
|
15
15
|
require "io/console"
|
16
16
|
require "readline"
|
17
|
+
require "nokogiri"
|
18
|
+
require "open-uri"
|
17
19
|
|
18
20
|
require_relative "./monadic_chat/version"
|
19
21
|
require_relative "./monadic_chat/open_ai"
|
22
|
+
require_relative "./monadic_chat/authenticate"
|
23
|
+
require_relative "./monadic_chat/commands"
|
20
24
|
require_relative "./monadic_chat/helper"
|
21
25
|
|
22
26
|
Oj.mimic_JSON
|
23
27
|
|
24
28
|
module MonadicChat
|
29
|
+
SETTINGS = {
|
30
|
+
"normal_model" => "gpt-3.5-turbo",
|
31
|
+
"research_model" => "gpt-3.5-turbo",
|
32
|
+
"max_tokens_wiki" => 1000,
|
33
|
+
"num_retrials" => 2,
|
34
|
+
"min_query_size" => 5,
|
35
|
+
"timeout_sec" => 120
|
36
|
+
}
|
25
37
|
gpt2model_path = File.absolute_path(File.join(__dir__, "..", "assets", "gpt2.bin"))
|
38
|
+
|
26
39
|
BLINGFIRE = BlingFire.load_model(gpt2model_path)
|
27
40
|
CONFIG = File.join(Dir.home, "monadic_chat.conf")
|
28
|
-
NUM_RETRY = 2
|
29
|
-
MIN_LENGTH = 5
|
30
|
-
TIMEOUT_SEC = 120
|
31
41
|
TITLE_WIDTH = 72
|
32
|
-
|
33
42
|
APPS_DIR = File.absolute_path(File.join(__dir__, "..", "apps"))
|
34
|
-
|
43
|
+
USER_APPS_DIR = File.absolute_path(File.join(__dir__, "..", "user_apps"))
|
44
|
+
|
45
|
+
apps_dir_list = Dir.entries(APPS_DIR)
|
35
46
|
.reject { |entry| /\A\./ =~ entry || /\A_/ =~ entry.split("/").last }
|
36
47
|
.map { |entry| File.join(APPS_DIR, entry) }
|
48
|
+
|
49
|
+
user_apps_dir_list = Dir.entries(USER_APPS_DIR)
|
50
|
+
.reject { |entry| /\A\./ =~ entry || /\A_/ =~ entry.split("/").last }
|
51
|
+
.reject { |entry| /\Aboilerplates/ =~ entry }
|
52
|
+
.map { |entry| File.join(USER_APPS_DIR, entry) }
|
53
|
+
|
54
|
+
APPS_DIR_LIST = apps_dir_list + user_apps_dir_list
|
55
|
+
|
37
56
|
templates = {}
|
38
57
|
APPS_DIR_LIST.each do |app|
|
39
58
|
basename = File.basename(app, ".*")
|
@@ -84,6 +103,14 @@ module MonadicChat
|
|
84
103
|
background-color: #c4ffcb;
|
85
104
|
margin-bottom: 0.5em;
|
86
105
|
}
|
106
|
+
.monadic_search_engine {
|
107
|
+
display:inline-block;
|
108
|
+
padding-left: 0.5em;
|
109
|
+
padding-right: 0.5em;
|
110
|
+
font-weight: bold;
|
111
|
+
background-color: #ffe9c4;
|
112
|
+
margin-bottom: 0.5em;
|
113
|
+
}
|
87
114
|
.monadic_gray {
|
88
115
|
display:inline-block;
|
89
116
|
font-weight: bold;
|
@@ -97,7 +124,14 @@ module MonadicChat
|
|
97
124
|
margin-bottom: 0.5em;
|
98
125
|
}
|
99
126
|
CSS
|
127
|
+
|
100
128
|
GITHUB_STYLE = style
|
129
|
+
PROMPT_USER = TTY::PromptX.new(active_color: :blue, prefix: prompt_user)
|
130
|
+
PROMPT_SYSTEM = TTY::PromptX.new(active_color: :blue, prefix: "#{prompt_system} ")
|
131
|
+
PROMPT_ASSISTANT = TTY::PromptX.new(active_color: :red, prefix: "#{prompt_assistant} ")
|
132
|
+
SPINNER = TTY::Spinner.new(format: :arrow_pulse, clear: true)
|
133
|
+
BULLET = "\e[33m●\e[0m"
|
134
|
+
HOME = File.expand_path(File.join(__dir__, ".."))
|
101
135
|
|
102
136
|
def self.require_apps
|
103
137
|
MonadicChat::APPS_DIR_LIST.each do |app_dir|
|
@@ -105,119 +139,4 @@ module MonadicChat
|
|
105
139
|
require "#{app_dir}/#{basename}"
|
106
140
|
end
|
107
141
|
end
|
108
|
-
|
109
|
-
def self.open_readme
|
110
|
-
url = "https://github.com/yohasebe/monadic-chat/"
|
111
|
-
Launchy.open(url)
|
112
|
-
end
|
113
|
-
|
114
|
-
def self.authenticate(overwrite: false, message: true)
|
115
|
-
check = lambda do |token, normal_mode_model, research_mode_model|
|
116
|
-
print "Checking configuration\n" if message
|
117
|
-
SPINNER.auto_spin
|
118
|
-
begin
|
119
|
-
models = OpenAI.models(token)
|
120
|
-
raise if models.empty?
|
121
|
-
|
122
|
-
SPINNER.stop
|
123
|
-
|
124
|
-
print "Success\n" if message
|
125
|
-
|
126
|
-
if normal_mode_model && !models.map { |m| m["id"] }.index(normal_mode_model)
|
127
|
-
SPINNER.stop
|
128
|
-
print "Normal mode model set in config file not available.\n" if message
|
129
|
-
normal_mode_model = false
|
130
|
-
end
|
131
|
-
normal_mode_model ||= OpenAI.model_name(research_mode: false)
|
132
|
-
print "Normal mode model: #{normal_mode_model}\n" if message
|
133
|
-
|
134
|
-
if research_mode_model && !models.map { |m| m["id"] }.index(research_mode_model)
|
135
|
-
SPINNER.stop
|
136
|
-
print "Normal mode model set in config file not available.\n" if message
|
137
|
-
print "Fallback to the default model (#{OpenAI.model_name(research_mode: true)}).\n" if message
|
138
|
-
end
|
139
|
-
research_mode_model ||= OpenAI.model_name(research_mode: true)
|
140
|
-
print "Research mode model: #{research_mode_model}\n" if message
|
141
|
-
|
142
|
-
OpenAI::Completion.new(token, normal_mode_model, research_mode_model)
|
143
|
-
rescue StandardError
|
144
|
-
SPINNER.stop
|
145
|
-
print "Authentication: failure.\n" if message
|
146
|
-
false
|
147
|
-
end
|
148
|
-
end
|
149
|
-
|
150
|
-
completion = nil
|
151
|
-
|
152
|
-
if overwrite
|
153
|
-
access_token = PROMPT_SYSTEM.ask(" Input your OpenAI access token:")
|
154
|
-
return false if access_token.to_s == ""
|
155
|
-
|
156
|
-
completion = check.call(access_token, nil, nil)
|
157
|
-
|
158
|
-
if completion
|
159
|
-
File.open(CONFIG, "w") do |f|
|
160
|
-
config = { "access_token" => access_token }
|
161
|
-
f.write(JSON.pretty_generate(config))
|
162
|
-
print "New access token has been saved to #{CONFIG}\n" if message
|
163
|
-
end
|
164
|
-
end
|
165
|
-
elsif File.exist?(CONFIG)
|
166
|
-
json = File.read(CONFIG)
|
167
|
-
begin
|
168
|
-
config = JSON.parse(json)
|
169
|
-
rescue JSON::ParserError
|
170
|
-
puts "Error: config file does not contain a valid JSON object."
|
171
|
-
exit
|
172
|
-
end
|
173
|
-
access_token = config["access_token"]
|
174
|
-
normal_mode_model = config["normal_mode_model"]
|
175
|
-
research_mode_model = config["research_mode_model"]
|
176
|
-
completion = check.call(access_token, normal_mode_model, research_mode_model)
|
177
|
-
else
|
178
|
-
access_token ||= PROMPT_SYSTEM.ask(" Input your OpenAI access token:")
|
179
|
-
completion = check.call(access_token, nil, nil)
|
180
|
-
if completion
|
181
|
-
File.open(CONFIG, "w") do |f|
|
182
|
-
config = { "access_token" => access_token }
|
183
|
-
f.write(JSON.pretty_generate(config))
|
184
|
-
end
|
185
|
-
print "Access token has been saved to #{CONFIG}\n" if message
|
186
|
-
end
|
187
|
-
end
|
188
|
-
completion || authenticate(overwrite: true)
|
189
|
-
end
|
190
|
-
|
191
|
-
def self.prompt_system
|
192
|
-
box_width = 8
|
193
|
-
name = "System".center(box_width, " ")
|
194
|
-
color = "green"
|
195
|
-
"\n#{PASTEL.send(:"on_#{color}", name)}"
|
196
|
-
end
|
197
|
-
|
198
|
-
def self.prompt_user
|
199
|
-
box_width = 6
|
200
|
-
color = "blue"
|
201
|
-
name = "User".center(box_width, " ")
|
202
|
-
"\n#{PASTEL.send(:"on_#{color}", name)}"
|
203
|
-
end
|
204
|
-
|
205
|
-
def self.prompt_assistant
|
206
|
-
box_width = 5
|
207
|
-
color = "red"
|
208
|
-
name = "GPT".center(box_width, " ")
|
209
|
-
"\n#{PASTEL.send(:"on_#{color}", name)}"
|
210
|
-
end
|
211
|
-
|
212
|
-
def self.tokenize(text)
|
213
|
-
BLINGFIRE.text_to_ids(text)
|
214
|
-
end
|
215
|
-
|
216
|
-
PROMPT_USER = TTY::PromptX.new(active_color: :blue, prefix: prompt_user)
|
217
|
-
PROMPT_SYSTEM = TTY::PromptX.new(active_color: :blue, prefix: "#{prompt_system} ")
|
218
|
-
PROMPT_ASSISTANT = TTY::PromptX.new(active_color: :red, prefix: "#{prompt_assistant} ")
|
219
|
-
|
220
|
-
SPINNER = TTY::Spinner.new(format: :arrow_pulse, clear: true)
|
221
|
-
|
222
|
-
BULLET = "\e[33m●\e[0m"
|
223
142
|
end
|