monadic-chat 0.2.2 → 0.3.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -39,35 +39,15 @@ class MonadicApp
39
39
  end
40
40
  end
41
41
 
42
- def wait
43
- return self if @threads.empty?
44
-
45
- print TTY::Cursor.save
46
- message = PASTEL.red "Processing contextual data #{SPINNER} "
47
- print message
48
-
49
- TIMEOUT_SEC.times do |i|
50
- raise MonadicError, "Error: something went wrong" if i + 1 == TIMEOUT_SEC
51
-
52
- break if @threads.empty?
53
-
54
- sleep 1
55
- end
56
- print TTY::Cursor.restore
57
- print TTY::Cursor.clear_char(message.size)
58
-
59
- self
60
- end
61
-
62
42
  def objectify
63
- case @method
64
- when RESEARCH_MODE
65
- m = /\n\n```json\s*(\{.+\})\s*```\n\n/m.match(@template)
43
+ case @mode
44
+ when :research
45
+ m = /JSON:\n+```json\s*(\{.+\})\s*```\n\n/m.match(@template)
66
46
  json = m[1].gsub(/(?!\\\\\\)\\\\"/) { '\\\"' }
67
47
  res = JSON.parse(json)
68
48
  res["messages"] = @messages
69
49
  res
70
- when NORMAL_MODE
50
+ when :normal
71
51
  @messages
72
52
  end
73
53
  end
@@ -76,8 +56,9 @@ class MonadicApp
76
56
  params = @params.dup
77
57
 
78
58
  @update_proc.call
79
- case @method
80
- when RESEARCH_MODE
59
+
60
+ case @mode
61
+ when :research
81
62
  messages = +""
82
63
  system = +""
83
64
  @messages.each do |mes|
@@ -87,20 +68,28 @@ class MonadicApp
87
68
  when "system"
88
69
  system << "#{content}\n"
89
70
  when "assistant", "gpt"
90
- system << "- #{mes["role"].strip}: #{content.sub("\n\n###\n\n", "")}\n\n###\n\n"
71
+ messages << "- #{mes["role"].strip}: #{content}\n"
91
72
  else
92
- messages << "- #{mes["role"].strip}: #{mes["content"]}"
73
+ messages << "- #{mes["role"].strip}: #{mes["content"]}\n"
93
74
  end
94
75
  end
95
76
  template = @template.dup.sub("{{SYSTEM}}", system)
96
77
  .sub("{{PROMPT}}", input)
97
78
  .sub("{{MESSAGES}}", messages.strip)
98
79
 
99
- params["prompt"] = template
80
+ File.open(TEMP_MD, "w") { |f| f.write template }
81
+
100
82
  @messages << { "role" => "user", "content" => input }
101
- when NORMAL_MODE
83
+
84
+ case @method
85
+ when "completions"
86
+ params["prompt"] = template
87
+ when "chat/completions"
88
+ params["messages"] = [{ "role" => "system", "content" => template }]
89
+ end
90
+
91
+ when :normal
102
92
  @messages << { "role" => "user", "content" => input }
103
- @update_proc.call
104
93
  params["messages"] = @messages
105
94
  end
106
95
 
@@ -108,36 +97,31 @@ class MonadicApp
108
97
  end
109
98
 
110
99
  def update_template(res)
111
- case @method
112
- when RESEARCH_MODE
100
+ case @mode
101
+ when :research
113
102
  @metadata = res
114
- @messages << { "role" => "assistant", "content" => res["response"] }
115
- json = res.to_json.strip
116
- @template.sub!(/\n\n```json.+```\n\n/m, "\n\n```json\n#{json}\n```\n\n")
117
- when NORMAL_MODE
103
+ @messages << { "role" => "assistant", "content" => @metadata["response"] }
104
+ json = @metadata.to_json.strip
105
+ File.open(TEMP_JSON, "w") { |f| f.write json }
106
+ @template.sub!(/JSON:\n+```json.+```\n\n/m, "JSON:\n\n```json\n#{json}\n```\n\n")
107
+ when :normal
118
108
  @messages << { "role" => "assistant", "content" => res }
119
109
  end
120
110
  end
121
111
 
122
112
  ##################################################
123
- # functions for binding data
113
+ # function to bind data
124
114
  ##################################################
125
115
 
126
- def bind_normal_mode(input, num_retry: 0)
116
+ def bind(input, num_retry: 0)
127
117
  print PROMPT_ASSISTANT.prefix, "\n"
128
- print TTY::Cursor.save
129
-
130
- wait
131
-
132
118
  params = prepare_params(input)
133
-
134
- print TTY::Cursor.save
119
+ research_mode = @mode == :research
135
120
 
136
121
  escaping = +""
137
122
  last_chunk = +""
138
- response = +""
139
- spinning = false
140
- res = @completion.run(params, num_retry: num_retry) do |chunk|
123
+
124
+ res = @completion.run(params, research_mode: research_mode, num_retry: num_retry) do |chunk|
141
125
  if escaping
142
126
  chunk = escaping + chunk
143
127
  escaping = ""
@@ -147,147 +131,17 @@ class MonadicApp
147
131
  escaping += chunk
148
132
  next
149
133
  else
150
- chunk = chunk.gsub('\\n', "\n")
151
- response << chunk
152
- end
153
-
154
- if count_lines_below > 1
155
- print PASTEL.magenta(last_chunk)
156
- elsif !spinning
157
- print PASTEL.red SPINNER
158
- spinning = true
134
+ chunk = chunk.gsub('\\n') { "\n" }
159
135
  end
160
136
 
137
+ print last_chunk
161
138
  last_chunk = chunk
162
139
  end
163
140
 
164
- print TTY::Cursor.restore
165
- print TTY::Cursor.clear_screen_down
166
-
167
- text = response.gsub(/(?<![\\>\s])(?!\n[\n<])\n/m) { "{{NEWLINE}}\n" }
168
- text = text.gsub(/```(.+?)```/m) do
169
- m = Regexp.last_match
170
- "```#{m[1].gsub("{{NEWLINE}}\n") { "\n" }}```"
171
- end
172
- text = text.gsub(/`(.+?)`/) do
173
- m = Regexp.last_match
174
- "`#{m[1].gsub("{{NEWLINE}}\n") { "\n" }}`"
175
- end
176
-
177
- text = text.gsub(/(?!\\\\)\\/) { "" }
178
- print TTY::Markdown.parse(text).gsub("{{NEWLINE}}") { "\n" }.strip
141
+ print last_chunk
179
142
  print "\n"
180
143
 
181
144
  update_template(res)
182
145
  set_html if @html
183
146
  end
184
-
185
- def bind_research_mode(input, num_retry: 0)
186
- print PROMPT_ASSISTANT.prefix, "\n"
187
-
188
- wait
189
-
190
- params = prepare_params(input)
191
-
192
- print TTY::Cursor.save
193
-
194
- @threads << true
195
- Thread.new do
196
- response_all_shown = false
197
- key_start = /"#{@prop_newdata}":\s*"/
198
- key_finish = /\s+###\s*"/m
199
- started = false
200
- escaping = +""
201
- last_chunk = +""
202
- finished = false
203
- response = +""
204
- spinning = false
205
- res = @completion.run(params, num_retry: num_retry, tmp_json_file: TEMP_JSON, tmp_md_file: TEMP_MD) do |chunk|
206
- if finished && !response_all_shown
207
- response_all_shown = true
208
- @responses << response.sub(/\s+###\s*".*/m, "")
209
- if spinning
210
- TTY::Cursor.backword(" ▹▹▹▹▹ ".size)
211
- TTY::Cursor.clear_char(" ▹▹▹▹▹ ".size)
212
- end
213
- end
214
-
215
- unless finished
216
- if escaping
217
- chunk = escaping + chunk
218
- escaping = ""
219
- end
220
-
221
- if /(?:\\\z)/ =~ chunk
222
- escaping += chunk
223
- next
224
- else
225
- chunk = chunk.gsub('\\n', "\n")
226
- response << chunk
227
- end
228
-
229
- if started && !finished
230
- if key_finish =~ response
231
- finished = true
232
- else
233
- if count_lines_below > 1
234
- print PASTEL.magenta(last_chunk)
235
- elsif !spinning
236
- print PASTEL.red SPINNER
237
- spinning = true
238
- end
239
- last_chunk = chunk
240
- end
241
- elsif !started && !finished && key_start =~ response
242
- started = true
243
- response = +""
244
- end
245
- end
246
- end
247
-
248
- unless response_all_shown
249
- if spinning
250
- TTY::Cursor.backword(SPINNER.size)
251
- TTY::Cursor.clear_char(SPINNER.size)
252
- end
253
- @responses << response.sub(/\s+###\s*".*/m, "")
254
- end
255
-
256
- update_template(res)
257
- @threads.clear
258
- rescue StandardError => e
259
- @threads.clear
260
- @responses << <<~ERROR
261
- Error: something went wrong in a thread"
262
- #{e.message}
263
- #{e.backtrace}
264
- ERROR
265
- end
266
-
267
- loop do
268
- if @responses.empty?
269
- sleep 1
270
- else
271
- print TTY::Cursor.restore
272
- print TTY::Cursor.clear_screen_down
273
- text = @responses.pop
274
-
275
- text = text.gsub(/(?<![\\>\s])(?!\n[\n<])\n/m) { "{{NEWLINE}}\n" }
276
- text = text.gsub(/```(.+?)```/m) do
277
- m = Regexp.last_match
278
- "```#{m[1].gsub("{{NEWLINE}}\n") { "\n" }}```"
279
- end
280
- text = text.gsub(/`(.+?)`/) do
281
- m = Regexp.last_match
282
- "`#{m[1].gsub("{{NEWLINE}}\n") { "\n" }}`"
283
- end
284
-
285
- text = text.gsub(/(?!\\\\)\\/) { "" }
286
- print TTY::Markdown.parse(text).gsub("{{NEWLINE}}") { "\n" }.strip
287
- print "\n"
288
- break
289
- end
290
- end
291
- set_html if @html
292
- end
293
147
  end
@@ -127,13 +127,13 @@ class MonadicApp
127
127
 
128
128
  begin
129
129
  File.open(filepath, "w") do |f|
130
- case @method
131
- when RESEARCH_MODE
132
- m = /\n\n```json\s*(\{.+\})\s*```\n\n/m.match(@template)
130
+ case @mode
131
+ when :research
132
+ m = /JSON:\n+```json\s*(\{.+\})\s*```\n\n/m.match(@template)
133
133
  data = JSON.parse(m[1])
134
134
  data["messages"] = @messages
135
135
  f.write JSON.pretty_generate(data)
136
- when NORMAL_MODE
136
+ when :normal
137
137
  f.write JSON.pretty_generate({ "messages" => @messages })
138
138
  end
139
139
 
@@ -167,15 +167,15 @@ class MonadicApp
167
167
  filepath = File.expand_path(input.strip)
168
168
  json = File.read(filepath)
169
169
  data = JSON.parse(json)
170
- case @method
171
- when RESEARCH_MODE
170
+ case @mode
171
+ when :research
172
172
  self.class.name.downcase.split("::")[-1]
173
173
 
174
174
  raise unless data["mode"] == self.class.name.downcase.split("::")[-1]
175
175
 
176
176
  @messages = data.delete "messages"
177
- @template = @template.sub(/\n\n```json\s*\{.+\}\s*```\n\n/m, "\n\n```json\n#{JSON.pretty_generate(data).strip}\n```\n\n")
178
- when NORMAL_MODE
177
+ @template = @template.sub(/JSON:\n+```json\s*\{.+\}\s*```\n\n/m, "JSON:\n\n```json\n#{JSON.pretty_generate(data).strip}\n```\n\n")
178
+ when :normal
179
179
  raise unless data["messages"] && data["messages"][0]["role"]
180
180
 
181
181
  @messages = data["messages"]
@@ -12,7 +12,11 @@ Oj.mimic_JSON
12
12
 
13
13
  module OpenAI
14
14
  def self.model_name(research_mode: false)
15
- research_mode ? "text-davinci-003" : "gpt-3.5-turbo"
15
+ if research_mode
16
+ "text-davinci-003"
17
+ else
18
+ "gpt-3.5-turbo"
19
+ end
16
20
  end
17
21
 
18
22
  def self.model_to_method(model)
@@ -85,15 +89,25 @@ module OpenAI
85
89
  class Completion
86
90
  attr_reader :access_token
87
91
 
88
- def initialize(access_token)
92
+ def initialize(access_token, normal_mode_model = nil, research_mode_model = nil)
89
93
  @access_token = access_token
94
+ @normal_mode_model = normal_mode_model || OpenAI.model_name(research_mode: false)
95
+ @research_mode_model = research_mode_model || OpenAI.model_name(research_mode: true)
96
+ end
97
+
98
+ def model_name(research_mode: false)
99
+ if research_mode
100
+ @research_mode_model
101
+ else
102
+ @normal_mode_model
103
+ end
90
104
  end
91
105
 
92
106
  def models
93
107
  OpenAI.models(@access_token)
94
108
  end
95
109
 
96
- def run(params, num_retry: 1, tmp_json_file: nil, tmp_md_file: nil, &block)
110
+ def run(params, research_mode: false, num_retry: 1, &block)
97
111
  method = OpenAI.model_to_method(params["model"])
98
112
 
99
113
  response = OpenAI.query(@access_token, "post", method, 60, params, &block)
@@ -103,12 +117,10 @@ module OpenAI
103
117
  raise "finished because of length"
104
118
  end
105
119
 
106
- case method
107
- when "completions"
108
- File.open(tmp_md_file, "w") { |f| f.write params["prompt"] } if tmp_md_file
109
- get_json(response["choices"][0]["text"], tmp_json_file: tmp_json_file)
110
- when "chat/completions"
111
- response ["choices"][0]["text"]
120
+ if research_mode
121
+ get_json response["choices"][0]["text"]
122
+ else
123
+ response["choices"][0]["text"]
112
124
  end
113
125
  rescue StandardError => e
114
126
  case num_retry
@@ -119,9 +131,9 @@ module OpenAI
119
131
  end
120
132
  end
121
133
 
122
- def get_json(data, tmp_json_file: nil)
134
+ def get_json(data)
123
135
  case data
124
- when %r{<JSON>\n*(\{.+\})\n*</JSON>}m
136
+ when %r{<JSON>\n*(\{.+?\})\n*</JSON>}m
125
137
  json = Regexp.last_match(1).gsub(/\r\n?/, "\n").gsub(/\r\n/) { "\n" }
126
138
  res = JSON.parse(json)
127
139
  when /(\{.+\})/m
@@ -130,7 +142,6 @@ module OpenAI
130
142
  else
131
143
  res = data
132
144
  end
133
- File.open(tmp_json_file, "w") { |f| f.write json } if tmp_json_file
134
145
  res
135
146
  end
136
147
 
@@ -143,9 +154,9 @@ module OpenAI
143
154
  prompts.each do |prompt|
144
155
  params["prompt"] = template.sub(replace_key, prompt)
145
156
  res = run(params, num_retry: num_retry)
146
- json = JSON.pretty_generate(res)
157
+ json = JSON.pretty_generate(get_json(res))
147
158
  bar.advance(1)
148
- template = template.sub(/\n\n```json.+?```\n\n/m, "\n\n```json\n#{json}\n```\n\n")
159
+ template = template.sub(/JSON:\n+```json.+?```\n\n/m, "JSON:\n\n```json\n#{json}\n```\n\n")
149
160
  end
150
161
  bar.finish
151
162
  JSON.parse(json)
@@ -20,11 +20,7 @@ class MonadicApp
20
20
  case parameter
21
21
  when "model"
22
22
  value = change_model
23
- @messages = @messages_initial.dup
24
- case @method
25
- when RESEARCH_MODE
26
- @template = @template_initial.dup
27
- end
23
+ @method = OpenAI.model_to_method(value)
28
24
  when "max_tokens"
29
25
  value = change_max_tokens
30
26
  when "temperature"
@@ -79,13 +75,19 @@ class MonadicApp
79
75
  model = PROMPT_SYSTEM.select("Select a model:", per_page: 10, cycle: false, show_help: :never, filter: true, default: 1) do |menu|
80
76
  menu.choice "#{BULLET} Cancel", "cancel"
81
77
  TTY::Cursor.save
82
- print SPINNER
78
+ SPINNER.auto_spin
83
79
  models = @completion.models
84
- go_up_and_clear
85
- TTY::Cursor.restore
80
+ SPINNER.stop
86
81
  TTY::Cursor.restore
87
- models.filter { |m| OpenAI.model_to_method(m["id"]) == @method }.sort_by { |m| -m["created"] }.each do |m|
88
- menu.choice "#{BULLET} #{m["id"]}", m["id"]
82
+ case @mode
83
+ when :research
84
+ models.filter { |m| ["completions", "chat/completions"].include? OpenAI.model_to_method(m["id"]) }.sort_by { |m| -m["created"] }.each do |m|
85
+ menu.choice "#{BULLET} #{m["id"]}", m["id"]
86
+ end
87
+ when :normal
88
+ models.filter { |m| OpenAI.model_to_method(m["id"]) == "chat/completions" }.sort_by { |m| -m["created"] }.each do |m|
89
+ menu.choice "#{BULLET} #{m["id"]}", m["id"]
90
+ end
89
91
  end
90
92
  end
91
93
  if model == "cancel"
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module MonadicChat
4
- VERSION = "0.2.2"
4
+ VERSION = "0.3.1"
5
5
  end
data/lib/monadic_chat.rb CHANGED
@@ -3,6 +3,7 @@
3
3
  require "tty-cursor"
4
4
  require "tty-screen"
5
5
  require "tty-markdown"
6
+ require "tty-spinner"
6
7
  require "tty-prompt"
7
8
  require "tty-box"
8
9
  require "pastel"
@@ -25,8 +26,6 @@ module MonadicChat
25
26
  MIN_LENGTH = 5
26
27
  TIMEOUT_SEC = 120
27
28
  TITLE_WIDTH = 72
28
- NORMAL_MODE = "chat/completions"
29
- RESEARCH_MODE = "completions"
30
29
 
31
30
  APPS_DIR = File.absolute_path(File.join(__dir__, "..", "apps"))
32
31
  APPS_DIR_LIST = Dir.entries(APPS_DIR)
@@ -123,15 +122,37 @@ module MonadicChat
123
122
  end
124
123
 
125
124
  def self.authenticate(overwrite: false)
126
- check = lambda do |token|
127
- print "Checking configuration #{SPINNER} "
125
+ check = lambda do |token, normal_mode_model, research_mode_model|
126
+ print "Checking configuration\n"
127
+ SPINNER.auto_spin
128
128
  begin
129
- raise if OpenAI.models(token).empty?
129
+ models = OpenAI.models(token)
130
+ raise if models.empty?
130
131
 
131
- print "success\n"
132
- OpenAI::Completion.new(token)
132
+ SPINNER.stop
133
+
134
+ print "Success\n"
135
+
136
+ if normal_mode_model && !models.map { |m| m["id"] }.index(normal_mode_model)
137
+ SPINNER.stop
138
+ print "Normal mode model set in config file not available.\n"
139
+ normal_mode_model = false
140
+ end
141
+ normal_mode_model ||= OpenAI.model_name(research_mode: false)
142
+ print "Normal mode model: #{normal_mode_model}\n"
143
+
144
+ if research_mode_model && !models.map { |m| m["id"] }.index(research_mode_model)
145
+ SPINNER.stop
146
+ print "Normal mode model set in config file not available.\n"
147
+ print "Fallback to the default model (#{OpenAI.model_name(research_mode: true)}).\n"
148
+ end
149
+ research_mode_model ||= OpenAI.model_name(research_mode: true)
150
+ print "Research mode model: #{research_mode_model}\n"
151
+
152
+ OpenAI::Completion.new(token, normal_mode_model, research_mode_model)
133
153
  rescue StandardError
134
- print "failure.\n"
154
+ SPINNER.stop
155
+ print "Authentication: failure.\n"
135
156
  false
136
157
  end
137
158
  end
@@ -142,7 +163,7 @@ module MonadicChat
142
163
  access_token = PROMPT_SYSTEM.ask(" Input your OpenAI access token:")
143
164
  return false if access_token.to_s == ""
144
165
 
145
- completion = check.call(access_token)
166
+ completion = check.call(access_token, nil, nil)
146
167
 
147
168
  if completion
148
169
  File.open(CONFIG, "w") do |f|
@@ -153,12 +174,19 @@ module MonadicChat
153
174
  end
154
175
  elsif File.exist?(CONFIG)
155
176
  json = File.read(CONFIG)
156
- config = JSON.parse(json)
177
+ begin
178
+ config = JSON.parse(json)
179
+ rescue JSON::ParserError
180
+ puts "Error: config file does not contain a valid JSON object."
181
+ exit
182
+ end
157
183
  access_token = config["access_token"]
158
- completion = check.call(access_token)
184
+ normal_mode_model = config["normal_mode_model"]
185
+ research_mode_model = config["research_mode_model"]
186
+ completion = check.call(access_token, normal_mode_model, research_mode_model)
159
187
  else
160
188
  access_token ||= PROMPT_SYSTEM.ask(" Input your OpenAI access token:")
161
- completion = check.call(access_token)
189
+ completion = check.call(access_token, nil, nil)
162
190
  if completion
163
191
  File.open(CONFIG, "w") do |f|
164
192
  config = { "access_token" => access_token }
@@ -194,6 +222,8 @@ module MonadicChat
194
222
  PROMPT_USER = TTY::PromptX.new(active_color: :blue, prefix: prompt_user)
195
223
  PROMPT_SYSTEM = TTY::PromptX.new(active_color: :blue, prefix: "#{prompt_system} ")
196
224
  PROMPT_ASSISTANT = TTY::PromptX.new(active_color: :red, prefix: "#{prompt_assistant} ")
197
- SPINNER = "▹▹▹▹"
225
+
226
+ SPINNER = TTY::Spinner.new(format: :arrow_pulse, clear: true)
227
+
198
228
  BULLET = "\e[33m●\e[0m"
199
229
  end
data/monadic_chat.gemspec CHANGED
@@ -50,4 +50,5 @@ Gem::Specification.new do |spec|
50
50
  spec.add_dependency "tty-progressbar"
51
51
  spec.add_dependency "tty-prompt"
52
52
  spec.add_dependency "tty-screen"
53
+ spec.add_dependency "tty-spinner"
53
54
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: monadic-chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.2
4
+ version: 0.3.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - yohasebe
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-03-21 00:00:00.000000000 Z
11
+ date: 2023-03-24 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -234,6 +234,20 @@ dependencies:
234
234
  - - ">="
235
235
  - !ruby/object:Gem::Version
236
236
  version: '0'
237
+ - !ruby/object:Gem::Dependency
238
+ name: tty-spinner
239
+ requirement: !ruby/object:Gem::Requirement
240
+ requirements:
241
+ - - ">="
242
+ - !ruby/object:Gem::Version
243
+ version: '0'
244
+ type: :runtime
245
+ prerelease: false
246
+ version_requirements: !ruby/object:Gem::Requirement
247
+ requirements:
248
+ - - ">="
249
+ - !ruby/object:Gem::Version
250
+ version: '0'
237
251
  description: 'Monadic Chat is a command-line client application program that uses
238
252
  OpenAI''s Text Completion API and Chat API to enable chat-style conversations with
239
253
  OpenAI''s artificial intelligence system in a ChatGPT-like style.
@@ -275,10 +289,8 @@ files:
275
289
  - doc/img/code-example-time-html.png
276
290
  - doc/img/code-example-time.png
277
291
  - doc/img/example-translation.png
278
- - doc/img/extra-template-json.png
279
292
  - doc/img/how-research-mode-works.svg
280
293
  - doc/img/input-acess-token.png
281
- - doc/img/langacker-2001.svg
282
294
  - doc/img/linguistic-html.png
283
295
  - doc/img/monadic-chat.svg
284
296
  - doc/img/readme-example-beatles-html.png
Binary file