monadic-chat 0.3.4 → 0.3.5
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +8 -3
- data/Gemfile.lock +2 -6
- data/README.md +36 -36
- data/apps/chat/chat.md +9 -10
- data/apps/code/code.md +8 -9
- data/apps/novel/novel.md +7 -8
- data/apps/translate/translate.md +6 -7
- data/bin/monadic-chat +23 -9
- data/doc/img/research-mode-template.svg +1 -1
- data/lib/monadic_app.rb +1 -1
- data/lib/monadic_chat/authenticate.rb +115 -0
- data/lib/monadic_chat/commands.rb +75 -0
- data/lib/monadic_chat/interaction.rb +1 -6
- data/lib/monadic_chat/internals.rb +68 -28
- data/lib/monadic_chat/open_ai.rb +7 -7
- data/lib/monadic_chat/tools.rb +36 -12
- data/lib/monadic_chat/version.rb +1 -1
- data/lib/monadic_chat.rb +37 -166
- data/monadic_chat.gemspec +0 -1
- data/user_apps/boilerplates/boilerplate.json +5 -0
- data/user_apps/boilerplates/boilerplate.md +41 -0
- data/user_apps/boilerplates/boilerplate.rb +85 -0
- data/{apps → user_apps}/linguistic/linguistic.md +7 -8
- data/user_apps/wikipedia/wikipedia.json +3 -0
- data/user_apps/wikipedia/wikipedia.md +38 -0
- data/user_apps/wikipedia/wikipedia.rb +85 -0
- metadata +13 -19
- /data/{apps → user_apps}/linguistic/linguistic.json +0 -0
- /data/{apps → user_apps}/linguistic/linguistic.rb +0 -0
@@ -0,0 +1,75 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module MonadicChat
|
4
|
+
def self.open_readme
|
5
|
+
url = "https://github.com/yohasebe/monadic-chat/"
|
6
|
+
Launchy.open(url)
|
7
|
+
end
|
8
|
+
|
9
|
+
def self.mdprint(str)
|
10
|
+
print TTY::Markdown.parse(str, indent: 0)
|
11
|
+
end
|
12
|
+
|
13
|
+
def self.prompt_system
|
14
|
+
box_width = 8
|
15
|
+
name = "System".center(box_width, " ")
|
16
|
+
color = "green"
|
17
|
+
"\n#{PASTEL.send(:"on_#{color}", name)}"
|
18
|
+
end
|
19
|
+
|
20
|
+
def self.prompt_user
|
21
|
+
box_width = 6
|
22
|
+
color = "blue"
|
23
|
+
name = "User".center(box_width, " ")
|
24
|
+
"\n#{PASTEL.send(:"on_#{color}", name)}"
|
25
|
+
end
|
26
|
+
|
27
|
+
def self.prompt_assistant
|
28
|
+
box_width = 5
|
29
|
+
color = "red"
|
30
|
+
name = "GPT".center(box_width, " ")
|
31
|
+
"\n#{PASTEL.send(:"on_#{color}", name)}"
|
32
|
+
end
|
33
|
+
|
34
|
+
def self.tokenize(text)
|
35
|
+
BLINGFIRE.text_to_ids(text)
|
36
|
+
end
|
37
|
+
|
38
|
+
def self.create_app(app_name)
|
39
|
+
app_name = +app_name.downcase
|
40
|
+
user_apps_dir = File.join(HOME, "user_apps")
|
41
|
+
user_app_dir = File.join(user_apps_dir, app_name)
|
42
|
+
FileUtils.mkdir_p(user_app_dir)
|
43
|
+
# replace certain strings in boilerplate files (boilerplate.rb, boilerplate.json, boilerplate.md)
|
44
|
+
[".rb", ".json", ".md"].each do |ext|
|
45
|
+
file = File.join(HOME, "user_apps", "boilerplates", "boilerplate#{ext}")
|
46
|
+
content = File.read(file)
|
47
|
+
content.gsub!("{{APP_NAME}}", app_name)
|
48
|
+
content.gsub!("{{APP_CLASS_NAME}}", app_name.capitalize)
|
49
|
+
File.open(File.join(user_app_dir, "#{app_name}#{ext}"), "w") do |f|
|
50
|
+
f.write(content)
|
51
|
+
end
|
52
|
+
end
|
53
|
+
print PROMPT_SYSTEM.prefix, "Scaffolding of the app created successfully", "\n"
|
54
|
+
print "Edit the app files:", "\n"
|
55
|
+
print HOME, "\n"
|
56
|
+
print "user_apps", "\n"
|
57
|
+
print "└── #{app_name}", "\n"
|
58
|
+
print " ├── #{app_name}.json", "\n"
|
59
|
+
print " ├── #{app_name}.md", "\n"
|
60
|
+
print " └── #{app_name}.rb", "\n"
|
61
|
+
end
|
62
|
+
|
63
|
+
def self.delete_app(app_name)
|
64
|
+
app_name = +app_name.downcase
|
65
|
+
user_apps_dir = File.join(HOME, "user_apps")
|
66
|
+
user_app_dir = File.join(user_apps_dir, app_name)
|
67
|
+
# confirm user wants to delete the app
|
68
|
+
if PROMPT_SYSTEM.yes?("Are you sure you want to delete the app #{app_name}?")
|
69
|
+
FileUtils.rm_rf(user_app_dir)
|
70
|
+
print PROMPT_SYSTEM.prefix, "App deleted successfully", "\n"
|
71
|
+
else
|
72
|
+
print PROMPT_SYSTEM.prefix, "App deletion cancelled", "\n"
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
@@ -6,14 +6,9 @@ class MonadicApp
|
|
6
6
|
##################################################
|
7
7
|
|
8
8
|
def user_input(text = "")
|
9
|
-
# if count_lines_below < 1
|
10
|
-
# ask_clear
|
11
|
-
# user_input
|
12
|
-
# else
|
13
9
|
res = PROMPT_USER.readline(text)
|
14
10
|
print TTY::Cursor.clear_line_after
|
15
11
|
res == "" ? nil : res
|
16
|
-
# end
|
17
12
|
end
|
18
13
|
|
19
14
|
def show_greet
|
@@ -32,7 +27,7 @@ class MonadicApp
|
|
32
27
|
end
|
33
28
|
|
34
29
|
def confirm_query(input)
|
35
|
-
if input.size <
|
30
|
+
if input.size < SETTINGS["min_query_size"]
|
36
31
|
PROMPT_SYSTEM.yes?("Would you like to proceed with this (very short) prompt?")
|
37
32
|
else
|
38
33
|
true
|
@@ -59,6 +59,13 @@ class MonadicApp
|
|
59
59
|
def prepare_params(input_role, input)
|
60
60
|
params = @params.dup
|
61
61
|
|
62
|
+
delimited_input = case input_role
|
63
|
+
when "user"
|
64
|
+
"NEW PROMPT: ###\n#{input}\n###"
|
65
|
+
when "system" # i.e. search engine
|
66
|
+
"SEARCH SNIPPETS: ###\n#{input}\n###"
|
67
|
+
end
|
68
|
+
|
62
69
|
case @mode
|
63
70
|
when :research
|
64
71
|
messages = +""
|
@@ -68,14 +75,16 @@ class MonadicApp
|
|
68
75
|
content = mes["content"]
|
69
76
|
case role
|
70
77
|
when "system"
|
71
|
-
system << "#{content}\n"
|
78
|
+
system << "#{content}\n" if system == ""
|
72
79
|
else
|
73
80
|
messages << "- #{mes["role"].strip}: #{content}\n"
|
74
81
|
end
|
75
82
|
end
|
83
|
+
|
84
|
+
delimited_messages = "MESSAGES: ###\n#{messages}\n###"
|
76
85
|
template = @template.dup.sub("{{SYSTEM}}", system)
|
77
|
-
.sub("{{PROMPT}}",
|
78
|
-
.sub("{{MESSAGES}}",
|
86
|
+
.sub("{{PROMPT}}", delimited_input)
|
87
|
+
.sub("{{MESSAGES}}", delimited_messages.strip)
|
79
88
|
|
80
89
|
@template_tokens = count_tokens(template)
|
81
90
|
|
@@ -104,27 +113,40 @@ class MonadicApp
|
|
104
113
|
case @mode
|
105
114
|
when :research
|
106
115
|
@metadata = res
|
107
|
-
@messages << { "role" =>
|
116
|
+
@messages << { "role" => role, "content" => @metadata["response"] }
|
108
117
|
json = @metadata.to_json.strip
|
109
118
|
File.open(TEMP_JSON, "w") { |f| f.write json }
|
110
119
|
@template.sub!(/JSON:\n+```json.+```\n\n/m, "JSON:\n\n```json\n#{json}\n```\n\n")
|
111
120
|
when :normal
|
112
121
|
@messages << { "role" => "assistant", "content" => res }
|
113
122
|
end
|
114
|
-
remove_intermediate_messages if role == "system"
|
115
123
|
end
|
116
124
|
|
117
|
-
|
118
|
-
|
119
|
-
|
125
|
+
##################################################
|
126
|
+
# function to package plain text into a unit
|
127
|
+
##################################################
|
128
|
+
|
129
|
+
def unit(input)
|
130
|
+
if input.instance_of?(Hash)
|
131
|
+
input
|
132
|
+
else
|
133
|
+
@metadata["response"] = input
|
134
|
+
@metadata
|
135
|
+
end
|
120
136
|
end
|
121
137
|
|
122
138
|
##################################################
|
123
139
|
# function to bind data
|
124
140
|
##################################################
|
125
141
|
|
126
|
-
def bind(input, role: "user",
|
127
|
-
|
142
|
+
def bind(input, role: "user", num_retrials: 0)
|
143
|
+
case role
|
144
|
+
when "user"
|
145
|
+
@turns += 1
|
146
|
+
when "system" # i.e. search engine
|
147
|
+
input = "\n\n#{input}"
|
148
|
+
end
|
149
|
+
|
128
150
|
print PROMPT_ASSISTANT.prefix, "\n"
|
129
151
|
params = prepare_params(role, input)
|
130
152
|
research_mode = @mode == :research
|
@@ -132,7 +154,10 @@ class MonadicApp
|
|
132
154
|
escaping = +""
|
133
155
|
last_chunk = +""
|
134
156
|
|
135
|
-
res = @completion.run(params,
|
157
|
+
res = @completion.run(params,
|
158
|
+
research_mode: research_mode,
|
159
|
+
timeout_sec: SETTINGS["timeout_sec"],
|
160
|
+
num_retrials: num_retrials) do |chunk|
|
136
161
|
if escaping
|
137
162
|
chunk = escaping + chunk
|
138
163
|
escaping = ""
|
@@ -152,13 +177,31 @@ class MonadicApp
|
|
152
177
|
print last_chunk
|
153
178
|
print "\n"
|
154
179
|
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
180
|
+
message = case role
|
181
|
+
when "system" # i.e. search engine; the response given above should be by "assistant"
|
182
|
+
{ role: "assistant", content: @mode == :research ? unit(res) : res }
|
183
|
+
when "user" # the response give above should be either "assistant"
|
184
|
+
searched = use_tool(res)
|
185
|
+
# but if the response is a search query, it should be by "system" (search engine)
|
186
|
+
if searched
|
187
|
+
@messages << { "role" => "assistant",
|
188
|
+
"content" => @mode == :research ? unit(res)["response"] : res }
|
189
|
+
if searched == "empty"
|
190
|
+
print PROMPT_SYSTEM.prefix, "Search results are empty", "\n"
|
191
|
+
return
|
192
|
+
else
|
193
|
+
bind(searched, role: "system")
|
194
|
+
return
|
195
|
+
end
|
196
|
+
# otherwise, it should be by "assistant"
|
197
|
+
else
|
198
|
+
{ role: "assistant", content: @mode == :researh ? unit(res) : res }
|
199
|
+
end
|
200
|
+
end
|
201
|
+
|
202
|
+
update_template(message[:content], message[:role])
|
203
|
+
|
204
|
+
set_html if @html
|
162
205
|
end
|
163
206
|
|
164
207
|
##################################################
|
@@ -174,19 +217,16 @@ class MonadicApp
|
|
174
217
|
end
|
175
218
|
|
176
219
|
case text
|
177
|
-
when /\bSEARCH_WIKI\((.+?)
|
220
|
+
when /\bSEARCH_WIKI\("?(.+?)"?\)/m
|
221
|
+
@wiki_search_cache ||= {}
|
178
222
|
search_key = Regexp.last_match(1)
|
179
|
-
|
180
|
-
key.strip.sub(/^"(.+)"$/, '\1')
|
181
|
-
end
|
182
|
-
text = "SEARCH SNIPPETS\n#{wikipedia_search(*search_keys)}"
|
183
|
-
return text
|
223
|
+
wikipedia_search(search_key, @wiki_search_cache)
|
184
224
|
when /\bSEARCH_WEB\("?(.+?)"?\)/m
|
225
|
+
@web_search_cache ||= {}
|
185
226
|
search_key = Regexp.last_match(1)
|
186
|
-
|
187
|
-
|
227
|
+
bing_search(search_key, @web_searh_cache)
|
228
|
+
else
|
229
|
+
false
|
188
230
|
end
|
189
|
-
|
190
|
-
false
|
191
231
|
end
|
192
232
|
end
|
data/lib/monadic_chat/open_ai.rb
CHANGED
@@ -12,7 +12,7 @@ Oj.mimic_JSON
|
|
12
12
|
module OpenAI
|
13
13
|
def self.default_model(research_mode: false)
|
14
14
|
if research_mode
|
15
|
-
"
|
15
|
+
"gpt-3.5-turbo"
|
16
16
|
else
|
17
17
|
"gpt-3.5-turbo"
|
18
18
|
end
|
@@ -96,10 +96,10 @@ module OpenAI
|
|
96
96
|
OpenAI.models(@access_token)
|
97
97
|
end
|
98
98
|
|
99
|
-
def run(params, research_mode: false,
|
99
|
+
def run(params, research_mode: false, timeout_sec: 60, num_retrials: 1, &block)
|
100
100
|
method = OpenAI.model_to_method(params["model"])
|
101
101
|
|
102
|
-
response = OpenAI.query(@access_token, "post", method,
|
102
|
+
response = OpenAI.query(@access_token, "post", method, timeout_sec, params, &block)
|
103
103
|
if response["error"]
|
104
104
|
raise response["error"]["message"]
|
105
105
|
elsif response["choices"][0]["finish_reason"] == "length"
|
@@ -112,11 +112,11 @@ module OpenAI
|
|
112
112
|
response["choices"][0]["text"]
|
113
113
|
end
|
114
114
|
rescue StandardError => e
|
115
|
-
case
|
115
|
+
case num_retrials
|
116
116
|
when 0
|
117
117
|
raise e
|
118
118
|
else
|
119
|
-
run(params, research_mode: research_mode,
|
119
|
+
run(params, research_mode: research_mode, timeout_sec: timeout_sec, num_retrials: num_retrials - 1, &block)
|
120
120
|
end
|
121
121
|
end
|
122
122
|
|
@@ -134,7 +134,7 @@ module OpenAI
|
|
134
134
|
res
|
135
135
|
end
|
136
136
|
|
137
|
-
def run_iteration(params, prompts, template, replace_key = "{{PROMPT}}",
|
137
|
+
def run_iteration(params, prompts, template, replace_key = "{{PROMPT}}", timeout_sec: 60, num_retrials: 0)
|
138
138
|
bar = TTY::ProgressBar.new("[:bar] :current/:total :total_byte :percent ET::elapsed ETA::eta",
|
139
139
|
total: prompts.size,
|
140
140
|
bar_format: :box)
|
@@ -142,7 +142,7 @@ module OpenAI
|
|
142
142
|
json = ""
|
143
143
|
prompts.each do |prompt|
|
144
144
|
params["prompt"] = template.sub(replace_key, prompt)
|
145
|
-
res = run(params,
|
145
|
+
res = run(params, timeout_sec: timeout_sec, num_retrials: num_retrials)
|
146
146
|
json = JSON.pretty_generate(get_json(res))
|
147
147
|
bar.advance(1)
|
148
148
|
template = template.sub(/JSON:\n+```json.+?```\n\n/m, "JSON:\n\n```json\n#{json}\n```\n\n")
|
data/lib/monadic_chat/tools.rb
CHANGED
@@ -5,24 +5,26 @@ class MonadicApp
|
|
5
5
|
# method for web search
|
6
6
|
##################################################
|
7
7
|
|
8
|
-
def bing_search(query,
|
9
|
-
|
8
|
+
def bing_search(query, num_retrial: 3)
|
9
|
+
base_uri = "https://www.bing.com/search?setlang=en"
|
10
10
|
css_selector = "#b_results"
|
11
11
|
|
12
12
|
q = URI.encode_www_form(q: query)
|
13
|
-
doc = Nokogiri::HTML(URI.parse([
|
13
|
+
doc = Nokogiri::HTML(URI.parse([base_uri, q].join("&")).read)
|
14
14
|
doc.css("script, link").each(&:remove)
|
15
15
|
doc.css(css_selector).text.squeeze(" \n")
|
16
16
|
rescue StandardError
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
17
|
+
num_retrial -= 1
|
18
|
+
if num_retrial.positive?
|
19
|
+
sleep 1
|
20
|
+
bing_search(keywords, num_retrial: num_retrial)
|
21
|
+
else
|
22
|
+
"empty"
|
23
|
+
end
|
22
24
|
end
|
23
25
|
|
24
|
-
def wikipedia_search(keywords,
|
25
|
-
base_url
|
26
|
+
def wikipedia_search(keywords, cache = {}, num_retrial: 10)
|
27
|
+
base_url = "https://en.wikipedia.org/w/api.php"
|
26
28
|
search_params = {
|
27
29
|
action: "query",
|
28
30
|
list: "search",
|
@@ -41,6 +43,8 @@ class MonadicApp
|
|
41
43
|
|
42
44
|
title = search_data["query"]["search"][0]["title"]
|
43
45
|
|
46
|
+
return cache[title] if cache.keys.include?(title)
|
47
|
+
|
44
48
|
content_params = {
|
45
49
|
action: "query",
|
46
50
|
prop: "extracts",
|
@@ -56,8 +60,28 @@ class MonadicApp
|
|
56
60
|
content_response = Net::HTTP.get(content_uri)
|
57
61
|
content_data = JSON.parse(content_response)
|
58
62
|
|
59
|
-
content_data["query"]["pages"][0]["extract"]
|
63
|
+
result_data = content_data["query"]["pages"][0]["extract"]
|
64
|
+
tokenized = BLINGFIRE.text_to_ids(result_data)
|
65
|
+
if tokenized.size > SETTINGS["max_tokens_wiki"].to_i
|
66
|
+
ratio = SETTINGS["max_tokens_wiki"].to_f / tokenized.size
|
67
|
+
result_data = result_data[0..(result_data.size * ratio).to_i]
|
68
|
+
end
|
69
|
+
|
70
|
+
text = <<~TEXT
|
71
|
+
```MediaWiki
|
72
|
+
#{result_data}
|
73
|
+
```
|
74
|
+
TEXT
|
75
|
+
cache[title] = text
|
76
|
+
|
77
|
+
text
|
60
78
|
rescue StandardError
|
61
|
-
|
79
|
+
num_retrial -= 1
|
80
|
+
if num_retrial.positive?
|
81
|
+
sleep 1
|
82
|
+
wikipedia_search(keywords, num_retrial: num_retrial)
|
83
|
+
else
|
84
|
+
"empty"
|
85
|
+
end
|
62
86
|
end
|
63
87
|
end
|
data/lib/monadic_chat/version.rb
CHANGED
data/lib/monadic_chat.rb
CHANGED
@@ -16,29 +16,43 @@ require "io/console"
|
|
16
16
|
require "readline"
|
17
17
|
require "nokogiri"
|
18
18
|
require "open-uri"
|
19
|
-
require "wikipedia"
|
20
19
|
|
21
20
|
require_relative "./monadic_chat/version"
|
22
21
|
require_relative "./monadic_chat/open_ai"
|
22
|
+
require_relative "./monadic_chat/authenticate"
|
23
|
+
require_relative "./monadic_chat/commands"
|
23
24
|
require_relative "./monadic_chat/helper"
|
24
25
|
|
25
26
|
Oj.mimic_JSON
|
26
27
|
|
27
28
|
module MonadicChat
|
28
|
-
SETTINGS = {
|
29
|
-
|
29
|
+
SETTINGS = {
|
30
|
+
"normal_model" => "gpt-3.5-turbo",
|
31
|
+
"research_model" => "gpt-3.5-turbo",
|
32
|
+
"max_tokens_wiki" => 1000,
|
33
|
+
"num_retrials" => 2,
|
34
|
+
"min_query_size" => 5,
|
35
|
+
"timeout_sec" => 120
|
36
|
+
}
|
30
37
|
gpt2model_path = File.absolute_path(File.join(__dir__, "..", "assets", "gpt2.bin"))
|
38
|
+
|
31
39
|
BLINGFIRE = BlingFire.load_model(gpt2model_path)
|
32
40
|
CONFIG = File.join(Dir.home, "monadic_chat.conf")
|
33
|
-
NUM_RETRY = 2
|
34
|
-
MIN_LENGTH = 5
|
35
|
-
TIMEOUT_SEC = 120
|
36
41
|
TITLE_WIDTH = 72
|
37
|
-
|
38
42
|
APPS_DIR = File.absolute_path(File.join(__dir__, "..", "apps"))
|
39
|
-
|
43
|
+
USER_APPS_DIR = File.absolute_path(File.join(__dir__, "..", "user_apps"))
|
44
|
+
|
45
|
+
apps_dir_list = Dir.entries(APPS_DIR)
|
40
46
|
.reject { |entry| /\A\./ =~ entry || /\A_/ =~ entry.split("/").last }
|
41
47
|
.map { |entry| File.join(APPS_DIR, entry) }
|
48
|
+
|
49
|
+
user_apps_dir_list = Dir.entries(USER_APPS_DIR)
|
50
|
+
.reject { |entry| /\A\./ =~ entry || /\A_/ =~ entry.split("/").last }
|
51
|
+
.reject { |entry| /\Aboilerplates/ =~ entry }
|
52
|
+
.map { |entry| File.join(USER_APPS_DIR, entry) }
|
53
|
+
|
54
|
+
APPS_DIR_LIST = apps_dir_list + user_apps_dir_list
|
55
|
+
|
42
56
|
templates = {}
|
43
57
|
APPS_DIR_LIST.each do |app|
|
44
58
|
basename = File.basename(app, ".*")
|
@@ -89,6 +103,14 @@ module MonadicChat
|
|
89
103
|
background-color: #c4ffcb;
|
90
104
|
margin-bottom: 0.5em;
|
91
105
|
}
|
106
|
+
.monadic_search_engine {
|
107
|
+
display:inline-block;
|
108
|
+
padding-left: 0.5em;
|
109
|
+
padding-right: 0.5em;
|
110
|
+
font-weight: bold;
|
111
|
+
background-color: #ffe9c4;
|
112
|
+
margin-bottom: 0.5em;
|
113
|
+
}
|
92
114
|
.monadic_gray {
|
93
115
|
display:inline-block;
|
94
116
|
font-weight: bold;
|
@@ -102,7 +124,14 @@ module MonadicChat
|
|
102
124
|
margin-bottom: 0.5em;
|
103
125
|
}
|
104
126
|
CSS
|
127
|
+
|
105
128
|
GITHUB_STYLE = style
|
129
|
+
PROMPT_USER = TTY::PromptX.new(active_color: :blue, prefix: prompt_user)
|
130
|
+
PROMPT_SYSTEM = TTY::PromptX.new(active_color: :blue, prefix: "#{prompt_system} ")
|
131
|
+
PROMPT_ASSISTANT = TTY::PromptX.new(active_color: :red, prefix: "#{prompt_assistant} ")
|
132
|
+
SPINNER = TTY::Spinner.new(format: :arrow_pulse, clear: true)
|
133
|
+
BULLET = "\e[33m●\e[0m"
|
134
|
+
HOME = File.expand_path(File.join(__dir__, ".."))
|
106
135
|
|
107
136
|
def self.require_apps
|
108
137
|
MonadicChat::APPS_DIR_LIST.each do |app_dir|
|
@@ -110,162 +139,4 @@ module MonadicChat
|
|
110
139
|
require "#{app_dir}/#{basename}"
|
111
140
|
end
|
112
141
|
end
|
113
|
-
|
114
|
-
def self.open_readme
|
115
|
-
url = "https://github.com/yohasebe/monadic-chat/"
|
116
|
-
Launchy.open(url)
|
117
|
-
end
|
118
|
-
|
119
|
-
def self.mdprint(str)
|
120
|
-
print TTY::Markdown.parse(str, indent: 0)
|
121
|
-
end
|
122
|
-
|
123
|
-
def self.authenticate(overwrite: false, message: true)
|
124
|
-
check = lambda do |token|
|
125
|
-
if message
|
126
|
-
print TTY::Cursor.restore
|
127
|
-
print TTY::Cursor.clear_screen_down
|
128
|
-
print "\n"
|
129
|
-
SPINNER.auto_spin
|
130
|
-
end
|
131
|
-
|
132
|
-
if !token || token.strip == ""
|
133
|
-
if message
|
134
|
-
SPINNER.stop
|
135
|
-
print TTY::Cursor.restore
|
136
|
-
print "\n"
|
137
|
-
mdprint "- Authentication: #{PASTEL.bold.red("Failure")}\n" if message
|
138
|
-
end
|
139
|
-
return false
|
140
|
-
end
|
141
|
-
|
142
|
-
begin
|
143
|
-
models = OpenAI.models(token)
|
144
|
-
raise if models.empty?
|
145
|
-
|
146
|
-
if message
|
147
|
-
SPINNER.stop
|
148
|
-
print TTY::Cursor.restore, "\n"
|
149
|
-
mdprint "#{PASTEL.on_green(" System ")} Config file: `#{CONFIG}`\n"
|
150
|
-
print "\n"
|
151
|
-
mdprint "- Authentication: #{PASTEL.bold.green("Success")}\n"
|
152
|
-
end
|
153
|
-
|
154
|
-
if SETTINGS["normal_model"] && !models.map { |m| m["id"] }.index(SETTINGS["normal_model"])
|
155
|
-
if message
|
156
|
-
SPINNER.stop
|
157
|
-
mdprint "- Normal mode model specified in config file not available\n"
|
158
|
-
mdprint "- Fallback to the default model (`#{OpenAI.default_model(research_mode: false)}`)\n"
|
159
|
-
end
|
160
|
-
SETTINGS["normal_model"] = false
|
161
|
-
end
|
162
|
-
SETTINGS["normal_model"] ||= OpenAI.default_model(research_mode: false)
|
163
|
-
mdprint "- Normal mode model: `#{SETTINGS["normal_model"]}`\n" if message
|
164
|
-
|
165
|
-
if SETTINGS["research_model"] && !models.map { |m| m["id"] }.index(SETTINGS["research_model"])
|
166
|
-
if message
|
167
|
-
SPINNER.stop
|
168
|
-
mdprint "- Research mode model specified in config file not available\n"
|
169
|
-
mdprint "- Fallback to the default model (`#{OpenAI.default_model(research_mode: true)}`)\n"
|
170
|
-
end
|
171
|
-
SETTINGS["research_model"] = false
|
172
|
-
end
|
173
|
-
SETTINGS["research_model"] ||= OpenAI.default_model(research_mode: true)
|
174
|
-
mdprint "- Research mode model: `#{SETTINGS["research_model"]}`\n" if message
|
175
|
-
|
176
|
-
OpenAI::Completion.new(token)
|
177
|
-
rescue StandardError
|
178
|
-
if message
|
179
|
-
SPINNER.stop
|
180
|
-
print TTY::Cursor.restore
|
181
|
-
print "\n"
|
182
|
-
mdprint "- Authentication: #{PASTEL.bold.red("Failure")}\n" if message
|
183
|
-
end
|
184
|
-
false
|
185
|
-
end
|
186
|
-
end
|
187
|
-
|
188
|
-
completion = nil
|
189
|
-
|
190
|
-
if overwrite
|
191
|
-
access_token = PROMPT_SYSTEM.ask("Input your OpenAI access token:")
|
192
|
-
return false if access_token.to_s == ""
|
193
|
-
|
194
|
-
completion = check.call(access_token)
|
195
|
-
|
196
|
-
if completion
|
197
|
-
File.open(CONFIG, "w") do |f|
|
198
|
-
config = {
|
199
|
-
"access_token" => access_token,
|
200
|
-
"normal_model" => SETTINGS["normal_model"],
|
201
|
-
"research_model" => SETTINGS["research_model"]
|
202
|
-
}
|
203
|
-
f.write(JSON.pretty_generate(config))
|
204
|
-
print "New access token has been saved to #{CONFIG}\n" if message
|
205
|
-
end
|
206
|
-
end
|
207
|
-
elsif File.exist?(CONFIG)
|
208
|
-
json = File.read(CONFIG)
|
209
|
-
begin
|
210
|
-
config = JSON.parse(json)
|
211
|
-
rescue JSON::ParserError
|
212
|
-
puts "Error: config file does not contain a valid JSON object."
|
213
|
-
exit
|
214
|
-
end
|
215
|
-
SETTINGS["normal_model"] = config["normal_model"] if config["normal_model"]
|
216
|
-
SETTINGS["research_model"] = config["research_model"] if config["research_model"]
|
217
|
-
access_token = config["access_token"]
|
218
|
-
completion = check.call(access_token)
|
219
|
-
else
|
220
|
-
access_token ||= PROMPT_SYSTEM.ask("Input your OpenAI access token:")
|
221
|
-
return false if access_token.to_s == ""
|
222
|
-
|
223
|
-
completion = check.call(access_token)
|
224
|
-
if completion
|
225
|
-
File.open(CONFIG, "w") do |f|
|
226
|
-
config = {
|
227
|
-
"access_token" => access_token,
|
228
|
-
"normal_model" => SETTINGS["normal_model"],
|
229
|
-
"research_model" => SETTINGS["research_model"]
|
230
|
-
}
|
231
|
-
f.write(JSON.pretty_generate(config))
|
232
|
-
end
|
233
|
-
print "Access token has been saved to #{CONFIG}\n" if message
|
234
|
-
end
|
235
|
-
end
|
236
|
-
completion || authenticate(overwrite: true)
|
237
|
-
end
|
238
|
-
|
239
|
-
def self.prompt_system
|
240
|
-
box_width = 8
|
241
|
-
name = "System".center(box_width, " ")
|
242
|
-
color = "green"
|
243
|
-
"\n#{PASTEL.send(:"on_#{color}", name)}"
|
244
|
-
end
|
245
|
-
|
246
|
-
def self.prompt_user
|
247
|
-
box_width = 6
|
248
|
-
color = "blue"
|
249
|
-
name = "User".center(box_width, " ")
|
250
|
-
"\n#{PASTEL.send(:"on_#{color}", name)}"
|
251
|
-
end
|
252
|
-
|
253
|
-
def self.prompt_assistant
|
254
|
-
box_width = 5
|
255
|
-
color = "red"
|
256
|
-
name = "GPT".center(box_width, " ")
|
257
|
-
"\n#{PASTEL.send(:"on_#{color}", name)}"
|
258
|
-
end
|
259
|
-
|
260
|
-
def self.tokenize(text)
|
261
|
-
BLINGFIRE.text_to_ids(text)
|
262
|
-
end
|
263
|
-
|
264
|
-
PROMPT_USER = TTY::PromptX.new(active_color: :blue, prefix: prompt_user)
|
265
|
-
PROMPT_SYSTEM = TTY::PromptX.new(active_color: :blue, prefix: "#{prompt_system} ")
|
266
|
-
PROMPT_ASSISTANT = TTY::PromptX.new(active_color: :red, prefix: "#{prompt_assistant} ")
|
267
|
-
|
268
|
-
SPINNER = TTY::Spinner.new(format: :arrow_pulse, clear: true)
|
269
|
-
|
270
|
-
BULLET = "\e[33m●\e[0m"
|
271
142
|
end
|
data/monadic_chat.gemspec
CHANGED