monadic-chat 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rspec +3 -0
- data/CHANGELOG.md +9 -0
- data/Gemfile +4 -0
- data/Gemfile.lock +172 -0
- data/LICENSE.txt +21 -0
- data/README.md +652 -0
- data/Rakefile +12 -0
- data/apps/chat/chat.json +4 -0
- data/apps/chat/chat.md +42 -0
- data/apps/chat/chat.rb +79 -0
- data/apps/code/code.json +4 -0
- data/apps/code/code.md +42 -0
- data/apps/code/code.rb +77 -0
- data/apps/novel/novel.json +4 -0
- data/apps/novel/novel.md +36 -0
- data/apps/novel/novel.rb +77 -0
- data/apps/translate/translate.json +4 -0
- data/apps/translate/translate.md +37 -0
- data/apps/translate/translate.rb +81 -0
- data/assets/github.css +1036 -0
- data/assets/pigments-default.css +69 -0
- data/bin/monadic-chat +122 -0
- data/doc/img/code-example-time-html.png +0 -0
- data/doc/img/code-example-time.png +0 -0
- data/doc/img/example-translation.png +0 -0
- data/doc/img/how-research-mode-works.svg +1 -0
- data/doc/img/input-acess-token.png +0 -0
- data/doc/img/langacker-2001.svg +41 -0
- data/doc/img/linguistic-html.png +0 -0
- data/doc/img/monadic-chat-main-menu.png +0 -0
- data/doc/img/monadic-chat.svg +13 -0
- data/doc/img/readme-example-beatles-html.png +0 -0
- data/doc/img/readme-example-beatles.png +0 -0
- data/doc/img/research-mode-template.svg +198 -0
- data/doc/img/select-app-menu.png +0 -0
- data/doc/img/select-feature-menu.png +0 -0
- data/doc/img/state-monad.svg +154 -0
- data/doc/img/syntree-sample.png +0 -0
- data/lib/monadic_app.rb +115 -0
- data/lib/monadic_chat/console.rb +29 -0
- data/lib/monadic_chat/formatting.rb +110 -0
- data/lib/monadic_chat/helper.rb +72 -0
- data/lib/monadic_chat/interaction.rb +41 -0
- data/lib/monadic_chat/internals.rb +269 -0
- data/lib/monadic_chat/menu.rb +189 -0
- data/lib/monadic_chat/open_ai.rb +150 -0
- data/lib/monadic_chat/parameters.rb +109 -0
- data/lib/monadic_chat/version.rb +5 -0
- data/lib/monadic_chat.rb +190 -0
- data/monadic_chat.gemspec +54 -0
- data/samples/linguistic/linguistic.json +17 -0
- data/samples/linguistic/linguistic.md +39 -0
- data/samples/linguistic/linguistic.rb +74 -0
- metadata +343 -0
@@ -0,0 +1,189 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class MonadicApp
|
4
|
+
##################################################
|
5
|
+
# methods for showing menu and menu items
|
6
|
+
##################################################
|
7
|
+
|
8
|
+
def show_menu
|
9
|
+
clear_screen
|
10
|
+
print TTY::Cursor.save
|
11
|
+
parameter = PROMPT_SYSTEM.select("Select function:", per_page: 10, cycle: true, filter: true, default: 1, show_help: :never) do |menu|
|
12
|
+
menu.choice "#{BULLET} #{PASTEL.bold("cancel/return/escape")} cancel this menu", "cancel"
|
13
|
+
menu.choice "#{BULLET} #{PASTEL.bold("params/settings/config")} show and change values of parameters", "params"
|
14
|
+
menu.choice "#{BULLET} #{PASTEL.bold("data/context")} show currrent contextual info", "data"
|
15
|
+
menu.choice "#{BULLET} #{PASTEL.bold("html")} view contextual info on the web browser", "html"
|
16
|
+
menu.choice "#{BULLET} #{PASTEL.bold("reset")} reset context to original state", "reset"
|
17
|
+
menu.choice "#{BULLET} #{PASTEL.bold("save")} save current contextual info to file", "save"
|
18
|
+
menu.choice "#{BULLET} #{PASTEL.bold("load")} load current contextual info from file", "load"
|
19
|
+
menu.choice "#{BULLET} #{PASTEL.bold("clear/clean")} clear screen", "clear"
|
20
|
+
menu.choice "#{BULLET} #{PASTEL.bold("readme/documentation")} open readme/documentation", "readme"
|
21
|
+
menu.choice "#{BULLET} #{PASTEL.bold("exit/bye/quit")} go back to main menu", "exit"
|
22
|
+
end
|
23
|
+
|
24
|
+
print TTY::Cursor.restore
|
25
|
+
print TTY::Cursor.clear_screen_down
|
26
|
+
print TTY::Cursor.restore
|
27
|
+
|
28
|
+
case parameter
|
29
|
+
when "cancel"
|
30
|
+
return true
|
31
|
+
when "params"
|
32
|
+
change_parameter
|
33
|
+
when "data"
|
34
|
+
show_data
|
35
|
+
when "html"
|
36
|
+
set_html
|
37
|
+
when "reset"
|
38
|
+
reset
|
39
|
+
when "save"
|
40
|
+
save_data
|
41
|
+
when "load"
|
42
|
+
load_data
|
43
|
+
when "clear"
|
44
|
+
clear_screen
|
45
|
+
print TTY::Cursor.clear_screen_down
|
46
|
+
when "readme"
|
47
|
+
MonadicChat.open_readme
|
48
|
+
when "exit"
|
49
|
+
return false
|
50
|
+
end
|
51
|
+
true
|
52
|
+
end
|
53
|
+
|
54
|
+
def reset
|
55
|
+
@params = @params_original.dup
|
56
|
+
|
57
|
+
case @method
|
58
|
+
when "completions"
|
59
|
+
@template = @template_original.dup
|
60
|
+
when "chat/completions"
|
61
|
+
@template = JSON.parse @template_original
|
62
|
+
end
|
63
|
+
|
64
|
+
if @placeholders.empty?
|
65
|
+
print PROMPT_SYSTEM.prefix
|
66
|
+
print "Context and parameters have been reset.\n"
|
67
|
+
else
|
68
|
+
fulfill_placeholders
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
def ask_retrial(input, message = nil)
|
73
|
+
print PROMPT_SYSTEM.prefix
|
74
|
+
print " Error: #{message.capitalize}\n" if message
|
75
|
+
retrial = PROMPT_USER.select("Do you want to try again?",
|
76
|
+
show_help: :never) do |menu|
|
77
|
+
menu.choice "Yes", "yes"
|
78
|
+
menu.choice "No", "no"
|
79
|
+
menu.choice "Show current contextual data", "show"
|
80
|
+
end
|
81
|
+
case retrial
|
82
|
+
when "yes"
|
83
|
+
input
|
84
|
+
when "no"
|
85
|
+
user_input
|
86
|
+
when "show"
|
87
|
+
show_data
|
88
|
+
ask_retrial(input)
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
def check_file(path)
|
93
|
+
dirname = File.dirname(File.expand_path(path))
|
94
|
+
path == "" || (/\.json\z/ =~ path.strip && Dir.exist?(dirname)) ? true : false
|
95
|
+
end
|
96
|
+
|
97
|
+
def save_data
|
98
|
+
input = ""
|
99
|
+
loop do
|
100
|
+
print TTY::Cursor.save
|
101
|
+
path = PROMPT_SYSTEM.readline("Enter the file path for the JSON file (including the file name and .json extension): ")
|
102
|
+
if check_file(path)
|
103
|
+
input = path
|
104
|
+
break
|
105
|
+
else
|
106
|
+
print TTY::Cursor.restore
|
107
|
+
print TTY::Cursor.clear_screen_down
|
108
|
+
end
|
109
|
+
end
|
110
|
+
print TTY::Cursor.save
|
111
|
+
|
112
|
+
return if input.to_s == ""
|
113
|
+
|
114
|
+
filepath = File.expand_path(input.strip)
|
115
|
+
|
116
|
+
if File.exist? filepath
|
117
|
+
overwrite = PROMPT_SYSTEM.select("#{filepath} already exists.\nOverwrite?",
|
118
|
+
show_help: :never) do |menu|
|
119
|
+
menu.choice "Yes", "yes"
|
120
|
+
menu.choice "No", "no"
|
121
|
+
end
|
122
|
+
return if overwrite == "no"
|
123
|
+
end
|
124
|
+
|
125
|
+
FileUtils.touch(filepath)
|
126
|
+
unless File.exist? filepath
|
127
|
+
print "File cannot be created\n"
|
128
|
+
save_data
|
129
|
+
end
|
130
|
+
|
131
|
+
begin
|
132
|
+
File.open(filepath, "w") do |f|
|
133
|
+
case @method
|
134
|
+
when "completions"
|
135
|
+
m = /\n\n```json\s*(\{.+\})\s*```\n\n/m.match(@template)
|
136
|
+
f.write JSON.pretty_generate(JSON.parse(m[1]))
|
137
|
+
when "chat/completions"
|
138
|
+
f.write JSON.pretty_generate(@template)
|
139
|
+
end
|
140
|
+
|
141
|
+
print "Data has been saved successfully\n"
|
142
|
+
end
|
143
|
+
true
|
144
|
+
rescue StandardError
|
145
|
+
print "Error: Something went wrong"
|
146
|
+
false
|
147
|
+
end
|
148
|
+
end
|
149
|
+
|
150
|
+
def load_data
|
151
|
+
input = ""
|
152
|
+
loop do
|
153
|
+
print TTY::Cursor.save
|
154
|
+
path = PROMPT_SYSTEM.readline("Enter the file path for the JSON file (press Enter to cancel): ")
|
155
|
+
if check_file(path)
|
156
|
+
input = path
|
157
|
+
break
|
158
|
+
else
|
159
|
+
print TTY::Cursor.restore
|
160
|
+
print TTY::Cursor.clear_screen_down
|
161
|
+
end
|
162
|
+
end
|
163
|
+
print TTY::Cursor.save
|
164
|
+
|
165
|
+
return if input.to_s == ""
|
166
|
+
|
167
|
+
begin
|
168
|
+
filepath = File.expand_path(input.strip)
|
169
|
+
json = File.read(filepath)
|
170
|
+
data = JSON.parse(json)
|
171
|
+
case @method
|
172
|
+
when "completions"
|
173
|
+
raise unless data["mode"] == self.class.name.downcase.split("::")[-1]
|
174
|
+
|
175
|
+
new_template = @template.sub(/\n\n```json\s*\{.+\}\s*```\n\n/m, "\n\n```json\n#{JSON.pretty_generate(data).strip}\n```\n\n")
|
176
|
+
@template = new_template
|
177
|
+
when "chat/completions"
|
178
|
+
raise unless data["messages"] && data["messages"][0]["role"]
|
179
|
+
|
180
|
+
@template["messages"] = data["messages"]
|
181
|
+
end
|
182
|
+
print "Data has been loaded successfully\n"
|
183
|
+
true
|
184
|
+
rescue StandardError
|
185
|
+
print "The data structure is not valid for this app\n"
|
186
|
+
false
|
187
|
+
end
|
188
|
+
end
|
189
|
+
end
|
@@ -0,0 +1,150 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "http"
|
4
|
+
require "oj"
|
5
|
+
require "net/http"
|
6
|
+
require "uri"
|
7
|
+
require "strscan"
|
8
|
+
require "parallel"
|
9
|
+
require "tty-progressbar"
|
10
|
+
|
11
|
+
Oj.mimic_JSON
|
12
|
+
|
13
|
+
module OpenAI
|
14
|
+
def self.model_name(research_mode: false)
|
15
|
+
research_mode ? "text-davinci-003" : "gpt-3.5-turbo"
|
16
|
+
end
|
17
|
+
|
18
|
+
def self.model_to_method(model)
|
19
|
+
{
|
20
|
+
"text-davinci-003" => "completions",
|
21
|
+
"gpt-3.5-turbo" => "chat/completions",
|
22
|
+
"gpt-3.5-turbo-0301" => "chat/completions"
|
23
|
+
}[model]
|
24
|
+
end
|
25
|
+
|
26
|
+
def self.query(access_token, mode, method, timeout_sec = 60, query = {}, &block)
|
27
|
+
target_uri = "https://api.openai.com/v1/#{method}"
|
28
|
+
headers = {
|
29
|
+
"Content-Type" => "application/json",
|
30
|
+
"Authorization" => "Bearer #{access_token}"
|
31
|
+
}
|
32
|
+
headers["Accept"] = "text/event-stream" if query["stream"]
|
33
|
+
http = HTTP.headers(headers)
|
34
|
+
|
35
|
+
case mode
|
36
|
+
when "post"
|
37
|
+
res = http.timeout(timeout_sec).post(target_uri, json: query)
|
38
|
+
when "get"
|
39
|
+
res = http.timeout(timeout_sec).get(target_uri)
|
40
|
+
end
|
41
|
+
|
42
|
+
if query["stream"]
|
43
|
+
json = nil
|
44
|
+
res.body.each do |chunk|
|
45
|
+
chunk.split("\n\n").each do |data|
|
46
|
+
content = data.strip[6..]
|
47
|
+
break if content == "[DONE]"
|
48
|
+
|
49
|
+
stream = JSON.parse(content)
|
50
|
+
fragment = case method
|
51
|
+
when "completions"
|
52
|
+
stream["choices"][0]["text"]
|
53
|
+
when "chat/completions"
|
54
|
+
stream["choices"][0]["delta"]["content"] || ""
|
55
|
+
end
|
56
|
+
block&.call fragment
|
57
|
+
if !json
|
58
|
+
json = stream
|
59
|
+
else
|
60
|
+
case method
|
61
|
+
when "completions"
|
62
|
+
json["choices"][0]["text"] << fragment
|
63
|
+
when "chat/completions"
|
64
|
+
json["choices"][0]["text"] ||= +""
|
65
|
+
json["choices"][0]["text"] << fragment
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
70
|
+
json
|
71
|
+
else
|
72
|
+
JSON.parse res.body
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
def self.models(access_token)
|
77
|
+
res = query(access_token, "get", "models")
|
78
|
+
res.fetch("data", []).sort_by { |m| -m["created"] }
|
79
|
+
end
|
80
|
+
|
81
|
+
class Completion
|
82
|
+
attr_reader :access_token
|
83
|
+
|
84
|
+
def initialize(access_token, tmp_file: nil)
|
85
|
+
@access_token = access_token
|
86
|
+
@tmp_file = tmp_file
|
87
|
+
end
|
88
|
+
|
89
|
+
def models
|
90
|
+
OpenAI.models(@access_token)
|
91
|
+
end
|
92
|
+
|
93
|
+
def run(params, num_retry: 1, &block)
|
94
|
+
method = OpenAI.model_to_method(params["model"])
|
95
|
+
|
96
|
+
response = OpenAI.query(@access_token, "post", method, 60, params, &block)
|
97
|
+
if response["error"]
|
98
|
+
raise response["error"]["message"]
|
99
|
+
elsif response["choices"][0]["finish_reason"] == "length"
|
100
|
+
raise "finished because of length"
|
101
|
+
end
|
102
|
+
|
103
|
+
case method
|
104
|
+
when "completions"
|
105
|
+
get_json response ["choices"][0]["text"]
|
106
|
+
when "chat/completions"
|
107
|
+
response ["choices"][0]["text"]
|
108
|
+
end
|
109
|
+
rescue StandardError => e
|
110
|
+
case num_retry
|
111
|
+
when 0
|
112
|
+
raise e
|
113
|
+
else
|
114
|
+
run(params, num_retry: num_retry - 1, &block)
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
def get_json(data)
|
119
|
+
case data
|
120
|
+
when %r{<JSON>\n*(\{.+\})\n*</JSON>}m
|
121
|
+
json = Regexp.last_match(1).gsub(/\r\n?/, "\n").gsub(/\r\n/) { "\n" }
|
122
|
+
res = JSON.parse(json)
|
123
|
+
when /(\{.+\})/m
|
124
|
+
json = Regexp.last_match(1).gsub(/\r\n?/, "\n").gsub(/\r\n/) { "\n" }
|
125
|
+
res = JSON.parse(json)
|
126
|
+
else
|
127
|
+
res = data
|
128
|
+
end
|
129
|
+
File.open(@tmp_file, "w") { |f| f.write json } if @tmp_file
|
130
|
+
res
|
131
|
+
end
|
132
|
+
|
133
|
+
def run_iteration(params, prompts, template, replace_key = "{{PROMPT}}", num_retry: 0)
|
134
|
+
bar = TTY::ProgressBar.new("[:bar] :current/:total :total_byte :percent ET::elapsed ETA::eta",
|
135
|
+
total: prompts.size,
|
136
|
+
bar_format: :box)
|
137
|
+
bar.start
|
138
|
+
json = ""
|
139
|
+
prompts.each do |prompt|
|
140
|
+
params["prompt"] = template.sub(replace_key, prompt)
|
141
|
+
res = run(params, num_retry: num_retry)
|
142
|
+
json = JSON.pretty_generate(res)
|
143
|
+
bar.advance(1)
|
144
|
+
template = template.sub(/\n\n```json.+?```\n\n/m, "\n\n```json\n#{json}\n```\n\n")
|
145
|
+
end
|
146
|
+
bar.finish
|
147
|
+
JSON.parse(json)
|
148
|
+
end
|
149
|
+
end
|
150
|
+
end
|
@@ -0,0 +1,109 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class MonadicApp
|
4
|
+
##################################################
|
5
|
+
# methods for parametter setting
|
6
|
+
##################################################
|
7
|
+
|
8
|
+
def change_parameter
|
9
|
+
parameter = PROMPT_SYSTEM.select("Select the parmeter to be set:", per_page: 7, cycle: true, show_help: :never, filter: true, default: 1) do |menu|
|
10
|
+
menu.choice "#{BULLET} Cancel", "cancel"
|
11
|
+
menu.choice "#{BULLET} model: #{@params["model"]}", "model"
|
12
|
+
menu.choice "#{BULLET} max_tokens: #{@params["max_tokens"]}", "max_tokens"
|
13
|
+
menu.choice "#{BULLET} temperature: #{@params["temperature"]}", "temperature"
|
14
|
+
menu.choice "#{BULLET} top_p: #{@params["top_p"]}", "top_p"
|
15
|
+
menu.choice "#{BULLET} frequency_penalty: #{@params["frequency_penalty"]}", "frequency_penalty"
|
16
|
+
menu.choice "#{BULLET} presence_penalty: #{@params["presence_penalty"]}", "presence_penalty"
|
17
|
+
end
|
18
|
+
return if parameter == "cancel"
|
19
|
+
|
20
|
+
case parameter
|
21
|
+
when "model"
|
22
|
+
value = change_model
|
23
|
+
case @method
|
24
|
+
when "completions"
|
25
|
+
@template = @template_original.dup
|
26
|
+
when "chat/completions"
|
27
|
+
@template = JSON.parse @template_original
|
28
|
+
end
|
29
|
+
when "max_tokens"
|
30
|
+
value = change_max_tokens
|
31
|
+
when "temperature"
|
32
|
+
value = change_temperature
|
33
|
+
when "top_p"
|
34
|
+
value = change_top_p
|
35
|
+
when "frequency_penalty"
|
36
|
+
value = change_frequency_penalty
|
37
|
+
when "presence_penalty"
|
38
|
+
value = change_presence_penalty
|
39
|
+
end
|
40
|
+
@params[parameter] = value if value
|
41
|
+
print "Parameter #{parameter} has been set to #{PASTEL.green(value)}\n" if value
|
42
|
+
end
|
43
|
+
|
44
|
+
def change_max_tokens
|
45
|
+
PROMPT_SYSTEM.ask("Set value of max tokens [1000 to 8000]:", convert: :int) do |q|
|
46
|
+
q.in "1000-8000"
|
47
|
+
q.messages[:range?] = "Value out of expected range [1000 to 2048]"
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
def change_temperature
|
52
|
+
PROMPT_SYSTEM.ask("Set value of temperature [0.0 to 1.0]:", convert: :float) do |q|
|
53
|
+
q.in "0.0-1.0"
|
54
|
+
q.messages[:range?] = "Value out of expected range [0.0 to 1.0]"
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
def change_top_p
|
59
|
+
PROMPT_SYSTEM.ask("Set value of top_p [0.0 to 1.0]:", convert: :float) do |q|
|
60
|
+
q.in "0.0-1.0"
|
61
|
+
q.messages[:range?] = "Value out of expected range [0.0 to 1.0]"
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
def change_frequency_penalty
|
66
|
+
PROMPT_SYSTEM.ask("Set value of frequency penalty [-2.0 to 2.0]:", convert: :float) do |q|
|
67
|
+
q.in "-2.0-2.0"
|
68
|
+
q.messages[:range?] = "Value out of expected range [-2.0 to 2.0]"
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
def change_presence_penalty
|
73
|
+
PROMPT_SYSTEM.ask("Set value of presence penalty [-2.0 to 2.0]:", convert: :float) do |q|
|
74
|
+
q.in "-2.0-2.0"
|
75
|
+
q.messages[:range?] = "Value out of expected range [-2.0 to 2.0]"
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
def change_model
|
80
|
+
model = PROMPT_SYSTEM.select("Select a model:", per_page: 10, cycle: false, show_help: :never, filter: true, default: 1) do |menu|
|
81
|
+
menu.choice "#{BULLET} Cancel", "cancel"
|
82
|
+
TTY::Cursor.save
|
83
|
+
print SPINNER
|
84
|
+
models = @completion.models
|
85
|
+
go_up_and_clear
|
86
|
+
TTY::Cursor.restore
|
87
|
+
TTY::Cursor.restore
|
88
|
+
models.filter { |m| OpenAI.model_to_method(m["id"]) == @method }.sort_by { |m| -m["created"] }.each do |m|
|
89
|
+
menu.choice "#{BULLET} #{m["id"]}", m["id"]
|
90
|
+
end
|
91
|
+
end
|
92
|
+
if model == "cancel"
|
93
|
+
nil
|
94
|
+
else
|
95
|
+
model
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
def show_params
|
100
|
+
params_md = "# Current Parameter Values\n\n"
|
101
|
+
@params.each do |key, val|
|
102
|
+
next if /\A(?:prompt|stream|logprobs|echo|stop)\z/ =~ key
|
103
|
+
|
104
|
+
params_md += "- #{key}: #{val}\n"
|
105
|
+
end
|
106
|
+
print prompt_system, "\n"
|
107
|
+
print "#{TTY::Markdown.parse(params_md, indent: 0).strip}\n\n"
|
108
|
+
end
|
109
|
+
end
|
data/lib/monadic_chat.rb
ADDED
@@ -0,0 +1,190 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "tty-cursor"
|
4
|
+
require "tty-screen"
|
5
|
+
require "tty-markdown"
|
6
|
+
require "tty-prompt"
|
7
|
+
require "tty-box"
|
8
|
+
require "pastel"
|
9
|
+
require "oj"
|
10
|
+
require "kramdown"
|
11
|
+
require "rouge"
|
12
|
+
require "launchy"
|
13
|
+
require "io/console"
|
14
|
+
require "readline"
|
15
|
+
|
16
|
+
require_relative "./monadic_chat/version"
|
17
|
+
require_relative "./monadic_chat/open_ai"
|
18
|
+
require_relative "./monadic_chat/helper"
|
19
|
+
|
20
|
+
Oj.mimic_JSON
|
21
|
+
|
22
|
+
module MonadicChat
|
23
|
+
CONFIG = File.join(Dir.home, "monadic_chat.conf")
|
24
|
+
NUM_RETRY = 2
|
25
|
+
MIN_LENGTH = 5
|
26
|
+
TIMEOUT_SEC = 120
|
27
|
+
TITLE_WIDTH = 72
|
28
|
+
|
29
|
+
APPS_DIR = File.absolute_path(File.join(__dir__, "..", "apps"))
|
30
|
+
APPS_DIR_LIST = Dir.entries(APPS_DIR)
|
31
|
+
.reject { |entry| /\A\./ =~ entry || /\A_/ =~ entry.split("/").last }
|
32
|
+
.map { |entry| File.join(APPS_DIR, entry) }
|
33
|
+
templates = {}
|
34
|
+
APPS_DIR_LIST.each do |app|
|
35
|
+
basename = File.basename(app, ".*")
|
36
|
+
normal_mode_template = File.absolute_path(File.join(app, "#{basename}.json"))
|
37
|
+
templates["normal/#{basename}"] = normal_mode_template if File.exist? normal_mode_template
|
38
|
+
research_mode_template = File.absolute_path(File.join(app, "#{basename}.md"))
|
39
|
+
templates["research/#{basename}"] = research_mode_template if File.exist? research_mode_template
|
40
|
+
end
|
41
|
+
APPS = APPS_DIR_LIST.map { |dir| File.basename(dir, ".*") }
|
42
|
+
|
43
|
+
TEMPLATES = templates
|
44
|
+
|
45
|
+
PASTEL = Pastel.new
|
46
|
+
|
47
|
+
TEMP_HTML = File.join(Dir.home, "monadic_chat.html")
|
48
|
+
TEMP_JSON = File.join(Dir.home, "monadic_chat.json")
|
49
|
+
|
50
|
+
style = +File.read(File.join(__dir__, "..", "assets", "github.css")).gsub(".markdown-") { "" }
|
51
|
+
style << File.read(File.join(__dir__, "..", "assets", "pigments-default.css"))
|
52
|
+
style << <<~CSS
|
53
|
+
body {
|
54
|
+
margin: 50px;
|
55
|
+
font-family: "Helvetica Neue", Arial, "Hiragino Kaku Gothic ProN", "Hiragino Sans", Meiryo, sans-serif;
|
56
|
+
color: #333
|
57
|
+
}
|
58
|
+
.monadic_user{
|
59
|
+
display:inline-block;
|
60
|
+
padding-left: 0.5em;
|
61
|
+
padding-right: 0.5em;
|
62
|
+
font-weight: bold;
|
63
|
+
background-color: #c8e5ff;
|
64
|
+
margin-bottom: 0.5em;
|
65
|
+
}
|
66
|
+
.monadic_chat {
|
67
|
+
display:inline-block;
|
68
|
+
padding-left: 0.5em;
|
69
|
+
padding-right: 0.5em;
|
70
|
+
font-weight: bold;
|
71
|
+
background-color: #ffcaca;
|
72
|
+
margin-bottom: 0.5em;
|
73
|
+
}
|
74
|
+
.monadic_system {
|
75
|
+
display:inline-block;
|
76
|
+
padding-left: 0.5em;
|
77
|
+
padding-right: 0.5em;
|
78
|
+
font-weight: bold;
|
79
|
+
background-color: #c4ffcb;
|
80
|
+
margin-bottom: 0.5em;
|
81
|
+
}
|
82
|
+
.monadic_gray {
|
83
|
+
display:inline-block;
|
84
|
+
font-weight: bold;
|
85
|
+
color: #999;
|
86
|
+
margin-bottom: 0.5em;
|
87
|
+
}
|
88
|
+
.monadic_app {
|
89
|
+
display:inline-block;
|
90
|
+
font-weight: bold;
|
91
|
+
color: #EB742B;
|
92
|
+
margin-bottom: 0.5em;
|
93
|
+
}
|
94
|
+
CSS
|
95
|
+
GITHUB_STYLE = style
|
96
|
+
|
97
|
+
def self.require_apps
|
98
|
+
MonadicChat::APPS_DIR_LIST.each do |app_dir|
|
99
|
+
basename = app_dir.split("/").last
|
100
|
+
require "#{app_dir}/#{basename}"
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
def self.open_readme
|
105
|
+
url = "https://github.com/yohasebe/monadic-chat/"
|
106
|
+
shellscript = <<~SHELL
|
107
|
+
if [[ "$OSTYPE" == "darwin"* ]]; then
|
108
|
+
open "#{url}"
|
109
|
+
elif [[ "$OSTYPE" == "linux-gnu"* ]]; then
|
110
|
+
if command -v xdg-open >/dev/null 2>&1; then
|
111
|
+
xdg-open "#{url}"
|
112
|
+
else
|
113
|
+
echo "#{url}"
|
114
|
+
fi
|
115
|
+
else
|
116
|
+
echo "#{url}"
|
117
|
+
fi
|
118
|
+
SHELL
|
119
|
+
`#{shellscript}`
|
120
|
+
end
|
121
|
+
|
122
|
+
def self.authenticate(overwrite: false)
|
123
|
+
check = lambda do |token|
|
124
|
+
print "Checking configuration #{SPINNER} "
|
125
|
+
begin
|
126
|
+
raise if OpenAI.models(token).empty?
|
127
|
+
|
128
|
+
print "success\n"
|
129
|
+
OpenAI::Completion.new(token, tmp_file: TEMP_JSON)
|
130
|
+
rescue StandardError
|
131
|
+
print "failure.\n"
|
132
|
+
authenticate(overwrite: true)
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
access_token = ENV["OPENAI_API_KEY"]
|
137
|
+
if overwrite
|
138
|
+
access_token = nil
|
139
|
+
access_token ||= PROMPT_SYSTEM.ask(" Input your OpenAI access token:")
|
140
|
+
|
141
|
+
check.call(access_token)
|
142
|
+
|
143
|
+
File.open(CONFIG, "w") do |f|
|
144
|
+
config = { "access_token" => access_token }
|
145
|
+
f.write(JSON.pretty_generate(config))
|
146
|
+
print "New access token has been saved to #{CONFIG}\n"
|
147
|
+
end
|
148
|
+
elsif File.exist?(CONFIG)
|
149
|
+
json = File.read(CONFIG)
|
150
|
+
config = JSON.parse(json)
|
151
|
+
access_token = config["access_token"]
|
152
|
+
check.call(access_token)
|
153
|
+
else
|
154
|
+
access_token ||= PROMPT_SYSTEM.ask(" Input your OpenAI access token:")
|
155
|
+
check.call(access_token)
|
156
|
+
File.open(CONFIG, "w") do |f|
|
157
|
+
config = { "access_token" => access_token }
|
158
|
+
f.write(JSON.pretty_generate(config))
|
159
|
+
print "Access token has been saved to #{CONFIG}\n"
|
160
|
+
end
|
161
|
+
end
|
162
|
+
end
|
163
|
+
|
164
|
+
def self.prompt_system
|
165
|
+
box_width = 8
|
166
|
+
name = "System".center(box_width, " ")
|
167
|
+
color = "green"
|
168
|
+
"\n#{PASTEL.send(:"on_#{color}", name)}"
|
169
|
+
end
|
170
|
+
|
171
|
+
def self.prompt_user
|
172
|
+
box_width = 6
|
173
|
+
color = "blue"
|
174
|
+
name = "User".center(box_width, " ")
|
175
|
+
"\n#{PASTEL.send(:"on_#{color}", name)}"
|
176
|
+
end
|
177
|
+
|
178
|
+
def self.prompt_assistant
|
179
|
+
box_width = 5
|
180
|
+
color = "red"
|
181
|
+
name = "GPT".center(box_width, " ")
|
182
|
+
"\n#{PASTEL.send(:"on_#{color}", name)}"
|
183
|
+
end
|
184
|
+
|
185
|
+
PROMPT_USER = TTY::PromptX.new(active_color: :blue, prefix: prompt_user)
|
186
|
+
PROMPT_SYSTEM = TTY::PromptX.new(active_color: :blue, prefix: "#{prompt_system} ")
|
187
|
+
PROMPT_ASSISTANT = TTY::PromptX.new(active_color: :red, prefix: "#{prompt_assistant} ")
|
188
|
+
SPINNER = "▹▹▹▹"
|
189
|
+
BULLET = "\e[33m●\e[0m"
|
190
|
+
end
|
@@ -0,0 +1,54 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative "lib/monadic_chat/version"
|
4
|
+
|
5
|
+
Gem::Specification.new do |spec|
|
6
|
+
spec.name = "monadic-chat"
|
7
|
+
spec.version = MonadicChat::VERSION
|
8
|
+
spec.authors = ["yohasebe"]
|
9
|
+
spec.email = ["yohasebe@gmail.com"]
|
10
|
+
|
11
|
+
spec.summary = "Highly configurable CLI client app for OpenAI chat/text-completion API"
|
12
|
+
spec.description = <<~DESC
|
13
|
+
Monadic Chat is a command-line client application program that uses OpenAI's Text Completion API and Chat API to enable chat-style conversations with OpenAI's artificial intelligence system in a ChatGPT-like style.
|
14
|
+
DESC
|
15
|
+
spec.homepage = "https://github.com/yohasebe/monadic-chat"
|
16
|
+
spec.license = "MIT"
|
17
|
+
spec.required_ruby_version = ">= 2.6.0"
|
18
|
+
|
19
|
+
spec.metadata["allowed_push_host"] = "https://rubygems.org"
|
20
|
+
|
21
|
+
spec.metadata["homepage_uri"] = spec.homepage
|
22
|
+
spec.metadata["source_code_uri"] = "https://github.com/yohasebe/monadic-chat"
|
23
|
+
spec.metadata["changelog_uri"] = "https://github.com/yohasebe/monadic-chat/CHANGELOG.md"
|
24
|
+
|
25
|
+
# Specify which files should be added to the gem when it is released.
|
26
|
+
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
|
27
|
+
spec.files = Dir.chdir(__dir__) do
|
28
|
+
`git ls-files -z`.split("\x0").reject do |f|
|
29
|
+
(f == __FILE__) || f.match(%r{\A(?:(?:test|spec|features)/|\.(?:git|circleci)|appveyor)})
|
30
|
+
end
|
31
|
+
end
|
32
|
+
spec.bindir = "bin"
|
33
|
+
spec.executables = ["monadic-chat"]
|
34
|
+
spec.require_paths = ["lib"]
|
35
|
+
|
36
|
+
spec.add_development_dependency "bundler"
|
37
|
+
spec.add_development_dependency "rake"
|
38
|
+
spec.add_development_dependency "rspec"
|
39
|
+
spec.add_development_dependency "solargraph"
|
40
|
+
|
41
|
+
spec.add_dependency "http"
|
42
|
+
spec.add_dependency "kramdown"
|
43
|
+
spec.add_dependency "launchy"
|
44
|
+
spec.add_dependency "oj"
|
45
|
+
spec.add_dependency "parallel"
|
46
|
+
spec.add_dependency "pastel"
|
47
|
+
spec.add_dependency "rouge"
|
48
|
+
spec.add_dependency "tty-box"
|
49
|
+
spec.add_dependency "tty-cursor"
|
50
|
+
spec.add_dependency "tty-markdown"
|
51
|
+
spec.add_dependency "tty-progressbar"
|
52
|
+
spec.add_dependency "tty-prompt"
|
53
|
+
spec.add_dependency "tty-screen"
|
54
|
+
end
|