scout-ai 1.0.0 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. checksums.yaml +4 -4
  2. data/.vimproject +87 -15
  3. data/README.md +296 -0
  4. data/Rakefile +2 -0
  5. data/VERSION +1 -1
  6. data/doc/Agent.md +279 -0
  7. data/doc/Chat.md +258 -0
  8. data/doc/LLM.md +446 -0
  9. data/doc/Model.md +513 -0
  10. data/doc/RAG.md +129 -0
  11. data/lib/scout/llm/agent/chat.rb +48 -1
  12. data/lib/scout/llm/agent/delegate.rb +51 -0
  13. data/lib/scout/llm/agent/iterate.rb +44 -0
  14. data/lib/scout/llm/agent.rb +43 -22
  15. data/lib/scout/llm/ask.rb +47 -7
  16. data/lib/scout/llm/backends/anthropic.rb +147 -0
  17. data/lib/scout/llm/backends/bedrock.rb +1 -1
  18. data/lib/scout/llm/backends/ollama.rb +27 -30
  19. data/lib/scout/llm/backends/openai.rb +36 -41
  20. data/lib/scout/llm/backends/responses.rb +166 -113
  21. data/lib/scout/llm/chat.rb +270 -102
  22. data/lib/scout/llm/embed.rb +4 -4
  23. data/lib/scout/llm/mcp.rb +28 -0
  24. data/lib/scout/llm/parse.rb +1 -0
  25. data/lib/scout/llm/rag.rb +9 -0
  26. data/lib/scout/llm/tools/call.rb +76 -0
  27. data/lib/scout/llm/tools/knowledge_base.rb +159 -0
  28. data/lib/scout/llm/tools/mcp.rb +59 -0
  29. data/lib/scout/llm/tools/workflow.rb +106 -0
  30. data/lib/scout/llm/tools.rb +98 -141
  31. data/lib/scout-ai.rb +1 -0
  32. data/scout-ai.gemspec +31 -18
  33. data/scout_commands/agent/ask +59 -78
  34. data/scout_commands/documenter +148 -0
  35. data/scout_commands/llm/ask +3 -2
  36. data/scout_commands/llm/server +319 -0
  37. data/share/server/chat.html +138 -0
  38. data/share/server/chat.js +468 -0
  39. data/test/scout/llm/backends/test_anthropic.rb +134 -0
  40. data/test/scout/llm/backends/test_ollama.rb +1 -1
  41. data/test/scout/llm/backends/test_openai.rb +45 -6
  42. data/test/scout/llm/backends/test_responses.rb +124 -0
  43. data/test/scout/llm/test_agent.rb +1 -93
  44. data/test/scout/llm/test_ask.rb +3 -1
  45. data/test/scout/llm/test_chat.rb +43 -1
  46. data/test/scout/llm/test_mcp.rb +29 -0
  47. data/test/scout/llm/tools/test_knowledge_base.rb +22 -0
  48. data/test/scout/llm/tools/test_mcp.rb +11 -0
  49. data/test/scout/llm/tools/test_workflow.rb +39 -0
  50. metadata +56 -17
  51. data/README.rdoc +0 -18
  52. data/python/scout_ai/__pycache__/__init__.cpython-310.pyc +0 -0
  53. data/python/scout_ai/__pycache__/__init__.cpython-311.pyc +0 -0
  54. data/python/scout_ai/__pycache__/huggingface.cpython-310.pyc +0 -0
  55. data/python/scout_ai/__pycache__/huggingface.cpython-311.pyc +0 -0
  56. data/python/scout_ai/__pycache__/util.cpython-310.pyc +0 -0
  57. data/python/scout_ai/__pycache__/util.cpython-311.pyc +0 -0
  58. data/python/scout_ai/atcold/plot_lib.py +0 -141
  59. data/python/scout_ai/atcold/spiral.py +0 -27
  60. data/python/scout_ai/huggingface/train/__pycache__/__init__.cpython-310.pyc +0 -0
  61. data/python/scout_ai/huggingface/train/__pycache__/next_token.cpython-310.pyc +0 -0
  62. data/python/scout_ai/language_model.py +0 -70
  63. /data/{python/scout_ai/atcold/__init__.py → test/scout/llm/tools/test_call.rb} +0 -0
data/lib/scout-ai.rb CHANGED
@@ -5,5 +5,6 @@ require 'scout/resource'
5
5
  Path.add_path :scout_ai_lib, File.join(Path.caller_lib_dir(__FILE__), "{TOPLEVEL}/{SUBPATH}")
6
6
 
7
7
  require 'scout/llm/ask'
8
+ require 'scout/llm/chat'
8
9
  require 'scout/llm/embed'
9
10
  require 'scout/llm/agent'
data/scout-ai.gemspec CHANGED
@@ -2,37 +2,45 @@
2
2
  # DO NOT EDIT THIS FILE DIRECTLY
3
3
  # Instead, edit Juwelier::Tasks in Rakefile, and run 'rake gemspec'
4
4
  # -*- encoding: utf-8 -*-
5
- # stub: scout-ai 1.0.0 ruby lib
5
+ # stub: scout-ai 1.1.0 ruby lib
6
6
 
7
7
  Gem::Specification.new do |s|
8
8
  s.name = "scout-ai".freeze
9
- s.version = "1.0.0".freeze
9
+ s.version = "1.1.0".freeze
10
10
 
11
11
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
12
12
  s.require_paths = ["lib".freeze]
13
13
  s.authors = ["Miguel Vazquez".freeze]
14
- s.date = "2025-06-05"
14
+ s.date = "1980-01-02"
15
15
  s.description = "assorted functionalities to help scouts use AI".freeze
16
16
  s.email = "mikisvaz@gmail.com".freeze
17
17
  s.executables = ["scout-ai".freeze]
18
18
  s.extra_rdoc_files = [
19
19
  "LICENSE",
20
20
  "LICENSE.txt",
21
- "README.rdoc"
21
+ "README.md"
22
22
  ]
23
23
  s.files = [
24
24
  ".document",
25
25
  ".vimproject",
26
26
  "LICENSE",
27
27
  "LICENSE.txt",
28
- "README.rdoc",
28
+ "README.md",
29
29
  "Rakefile",
30
30
  "VERSION",
31
31
  "bin/scout-ai",
32
+ "doc/Agent.md",
33
+ "doc/Chat.md",
34
+ "doc/LLM.md",
35
+ "doc/Model.md",
36
+ "doc/RAG.md",
32
37
  "lib/scout-ai.rb",
33
38
  "lib/scout/llm/agent.rb",
34
39
  "lib/scout/llm/agent/chat.rb",
40
+ "lib/scout/llm/agent/delegate.rb",
41
+ "lib/scout/llm/agent/iterate.rb",
35
42
  "lib/scout/llm/ask.rb",
43
+ "lib/scout/llm/backends/anthropic.rb",
36
44
  "lib/scout/llm/backends/bedrock.rb",
37
45
  "lib/scout/llm/backends/huggingface.rb",
38
46
  "lib/scout/llm/backends/ollama.rb",
@@ -42,9 +50,14 @@ Gem::Specification.new do |s|
42
50
  "lib/scout/llm/backends/responses.rb",
43
51
  "lib/scout/llm/chat.rb",
44
52
  "lib/scout/llm/embed.rb",
53
+ "lib/scout/llm/mcp.rb",
45
54
  "lib/scout/llm/parse.rb",
46
55
  "lib/scout/llm/rag.rb",
47
56
  "lib/scout/llm/tools.rb",
57
+ "lib/scout/llm/tools/call.rb",
58
+ "lib/scout/llm/tools/knowledge_base.rb",
59
+ "lib/scout/llm/tools/mcp.rb",
60
+ "lib/scout/llm/tools/workflow.rb",
48
61
  "lib/scout/llm/utils.rb",
49
62
  "lib/scout/model/base.rb",
50
63
  "lib/scout/model/python/base.rb",
@@ -61,37 +74,30 @@ Gem::Specification.new do |s|
61
74
  "lib/scout/model/util/run.rb",
62
75
  "lib/scout/model/util/save.rb",
63
76
  "python/scout_ai/__init__.py",
64
- "python/scout_ai/__pycache__/__init__.cpython-310.pyc",
65
- "python/scout_ai/__pycache__/__init__.cpython-311.pyc",
66
- "python/scout_ai/__pycache__/huggingface.cpython-310.pyc",
67
- "python/scout_ai/__pycache__/huggingface.cpython-311.pyc",
68
- "python/scout_ai/__pycache__/util.cpython-310.pyc",
69
- "python/scout_ai/__pycache__/util.cpython-311.pyc",
70
- "python/scout_ai/atcold/__init__.py",
71
- "python/scout_ai/atcold/plot_lib.py",
72
- "python/scout_ai/atcold/spiral.py",
73
77
  "python/scout_ai/huggingface/data.py",
74
78
  "python/scout_ai/huggingface/eval.py",
75
79
  "python/scout_ai/huggingface/model.py",
76
80
  "python/scout_ai/huggingface/rlhf.py",
77
81
  "python/scout_ai/huggingface/train/__init__.py",
78
- "python/scout_ai/huggingface/train/__pycache__/__init__.cpython-310.pyc",
79
- "python/scout_ai/huggingface/train/__pycache__/next_token.cpython-310.pyc",
80
82
  "python/scout_ai/huggingface/train/next_token.py",
81
- "python/scout_ai/language_model.py",
82
83
  "python/scout_ai/util.py",
83
84
  "scout-ai.gemspec",
84
85
  "scout_commands/agent/ask",
85
86
  "scout_commands/agent/kb",
87
+ "scout_commands/documenter",
86
88
  "scout_commands/llm/ask",
87
89
  "scout_commands/llm/process",
90
+ "scout_commands/llm/server",
88
91
  "scout_commands/llm/template",
92
+ "share/server/chat.html",
93
+ "share/server/chat.js",
89
94
  "test/data/cat.jpg",
90
95
  "test/data/person/brothers",
91
96
  "test/data/person/identifiers",
92
97
  "test/data/person/marriages",
93
98
  "test/data/person/parents",
94
99
  "test/scout/llm/agent/test_chat.rb",
100
+ "test/scout/llm/backends/test_anthropic.rb",
95
101
  "test/scout/llm/backends/test_bedrock.rb",
96
102
  "test/scout/llm/backends/test_huggingface.rb",
97
103
  "test/scout/llm/backends/test_ollama.rb",
@@ -103,10 +109,15 @@ Gem::Specification.new do |s|
103
109
  "test/scout/llm/test_ask.rb",
104
110
  "test/scout/llm/test_chat.rb",
105
111
  "test/scout/llm/test_embed.rb",
112
+ "test/scout/llm/test_mcp.rb",
106
113
  "test/scout/llm/test_parse.rb",
107
114
  "test/scout/llm/test_rag.rb",
108
115
  "test/scout/llm/test_tools.rb",
109
116
  "test/scout/llm/test_utils.rb",
117
+ "test/scout/llm/tools/test_call.rb",
118
+ "test/scout/llm/tools/test_knowledge_base.rb",
119
+ "test/scout/llm/tools/test_mcp.rb",
120
+ "test/scout/llm/tools/test_workflow.rb",
110
121
  "test/scout/model/python/huggingface/causal/test_next_token.rb",
111
122
  "test/scout/model/python/huggingface/test_causal.rb",
112
123
  "test/scout/model/python/huggingface/test_classification.rb",
@@ -120,11 +131,13 @@ Gem::Specification.new do |s|
120
131
  ]
121
132
  s.homepage = "http://github.com/mikisvaz/scout-ai".freeze
122
133
  s.licenses = ["MIT".freeze]
123
- s.rubygems_version = "3.6.6".freeze
134
+ s.rubygems_version = "3.7.0.dev".freeze
124
135
  s.summary = "AI gear for scouts".freeze
125
136
 
126
137
  s.specification_version = 4
127
138
 
128
139
  s.add_runtime_dependency(%q<scout-rig>.freeze, [">= 0".freeze])
140
+ s.add_runtime_dependency(%q<ruby-openai>.freeze, [">= 0".freeze])
141
+ s.add_runtime_dependency(%q<ruby-mcp-client>.freeze, [">= 0".freeze])
129
142
  end
130
143
 
@@ -22,8 +22,9 @@ Use STDIN to add context to the question
22
22
  -c--chat* Follow a conversation
23
23
  -m--model* Model to use
24
24
  -e--endpoint* Endpoint to use
25
- -f--file* Incorporate file at the start
25
+ -f--file* Incorporate file
26
26
  -wt--workflow_tasks* Export these tasks to the agent
27
+ -i--imports* Chat files to import, separated by comma
27
28
  EOF
28
29
  if options[:help]
29
30
  if defined? scout_usage
@@ -36,86 +37,42 @@ end
36
37
 
37
38
  Log.severity = options.delete(:log).to_i if options.include? :log
38
39
 
39
- file = options.delete(:file)
40
-
41
- agent, *question_parts = ARGV
42
-
43
- agent_path = Scout.var.Agent[agent].find_with_extension(:rb)
44
-
45
- agent = LLM::Agent.load agent_path
46
-
47
- #workflow_tasks = options.delete(:workflow_tasks)
48
- #
49
- #workflow = begin
50
- # if agent_dir.workflow.set_extension('rb').exists?
51
- # Workflow.require_workflow agent_dir.workflow.set_extension('rb').find
52
- # else
53
- # Misc.with_env "SCOUT_WORKFLOW_AUTOINSTALL", false do
54
- # Workflow.require_workflow agent
55
- # end
56
- # end
57
- # rescue
58
- # end
59
- #
60
- #if workflow_tasks and workflow
61
- # workflow.clear_exports
62
- # workflow.export_asynchronous *workflow_tasks.split(',')
63
- #end
64
- #
65
- #knowledge_base = KnowledgeBase.load(agent_dir.knowledge_base) if agent_dir.knowledge_base.exists?
66
- #knowledge_base ||= begin workflow.knowledge_base rescue nil end || KnowledgeBase.new(agent_dir.knowledge_base)
67
- #
68
- #agent = LLM::Agent.new **options.merge(workflow: workflow, knowledge_base: knowledge_base)
69
-
70
- #question = question_parts * " "
71
- #
72
- #if template = options.delete(:template)
73
- # if Open.exists?(template)
74
- # template_question = Open.read(template)
75
- # else
76
- # template_question = Scout.questions[template].read
77
- # end
78
- # if template_question.include?('???')
79
- # question = template_question.sub('???', question)
80
- # else
81
- # question = template_question
82
- # end
83
- #end
84
- #
85
- #if question.include?('...')
86
- # context = file ? Open.read(file) : STDIN.read
87
- # question = question.sub('...', context)
88
- #end
89
- #
90
- #if chat
91
- # conversation = Open.exist?(chat)? Open.read(chat) : ""
92
- # question = question.empty? ? conversation : conversation + "\nuser:\n" + question
93
- # new = agent.ask(question, options)
94
- # conversation = question + "\nassistant:\n" + new
95
- # Open.write(chat, conversation)
96
- #else
97
- # puts agent.ask(question, options)
98
- #end
99
-
100
- file, chat, inline, template, dry_run = IndiferentHash.process_options options, :file, :chat, :inline, :template, :dry_run
101
-
102
- agent, question = ARGV * " "
40
+ agent_name, *question_parts = ARGV
41
+
42
+ question = question_parts * " "
43
+
44
+ file, chat, inline, template, dry_run, imports, endpoint, model = IndiferentHash.process_options options, :file, :chat, :inline, :template, :dry_run, :imports, :endpoint, :model
45
+
46
+ file = Path.setup(file) if file
47
+
48
+ imports = imports.split(/,\s*/) if imports
103
49
 
104
50
  agent_name ||= 'default'
105
- agent_file = Scout.chats[agent_name]
106
51
 
107
- agent_file = agent_file.find_with_extension('rb')
52
+ agent_file = Scout.workflows[agent_name]
53
+
54
+ agent_file = Scout.chats[agent_name] unless agent_file.exists?
55
+
56
+ agent_file = agent_file.find_with_extension('rb') unless agent_file.exists?
57
+
108
58
 
109
59
  if agent_file.exists?
110
60
  if agent_file.directory?
111
- agent = load agent_file.agent.find_with_extension('rb')
61
+ if agent_file.agent.find_with_extension('rb').exists?
62
+ agent = load agent_file.agent.find_with_extension('rb')
63
+ else
64
+ agent = LLM::Agent.load_from_path agent_file
65
+ end
112
66
  else
113
67
  agent = load agent_file
114
68
  end
115
69
  else
116
- raise ParameterException agent_file
70
+ #raise ParameterException agent_file
117
71
  end
118
72
 
73
+ agent.other_options[:endpoint] = endpoint if endpoint
74
+ agent.other_options[:model] = model if model
75
+
119
76
  if template
120
77
  if Open.exists?(template)
121
78
  template_question = Open.read(template)
@@ -139,28 +96,47 @@ if question.include?('...')
139
96
  context = file ? Open.read(file) : STDIN.read
140
97
  question = question.sub('...', context)
141
98
  elsif file
142
- question = "<file basename=#{File.basename file}>[[[\n" + Open.read(file) + "\n]]]</file>"
99
+ question = "<file basename=#{File.basename file}>\n" + Open.read(file) + "\n</file>\n\n" + question
143
100
  end
144
101
 
145
102
  if chat
146
- conversation = Open.exist?(chat)? LLM.chat(chat) : []
103
+ #conversation = Open.exist?(chat)? LLM.chat(chat) : []
104
+ #convo_options = LLM.options conversation
105
+ #conversation = question.empty? ? conversation : conversation + LLM.chat(question)
106
+
107
+ #if dry_run
108
+ # ppp LLM.print conversation
109
+ # exit 0
110
+ #end
111
+
112
+ #new = agent.ask(conversation, convo_options.merge(options.merge(return_messages: true)))
113
+ #conversation = Open.read(chat) + LLM.print(new)
114
+ #Open.write(chat, conversation)
115
+
116
+ conversation = Open.exist?(chat)? LLM.chat(chat) : []
147
117
  convo_options = LLM.options conversation
148
- conversation = question.empty? ? conversation : conversation + LLM.chat(question)
118
+ agent.start(Chat.setup(conversation)) if conversation.any?
119
+ agent.current_chat.concat LLM.chat(question) if question && ! question.empty?
120
+ imports.each{|import| agent.import import } if imports
149
121
 
150
122
  if dry_run
151
123
  ppp LLM.print conversation
152
124
  exit 0
153
125
  end
154
- new = agent.ask(conversation, convo_options.merge(options.merge(return_messages: true)))
155
- conversation = Open.read(chat) + LLM.print(new)
156
- Open.write(chat, conversation)
126
+
127
+ new = agent.ask(agent.current_chat, convo_options.merge(options.merge(return_messages: true)))
128
+ conversation += LLM.chat(question) if question
129
+ conversation += new
130
+
131
+ Open.open(chat, mode: 'a'){|f| f.puts LLM.print(new) }
132
+ puts LLM.purge(new).last[:content]
157
133
  elsif inline
158
134
 
159
135
  file = Open.read inline
160
136
 
161
137
  new_file = ""
162
138
  while true
163
- pre, question, post =
139
+ pre, question, post =
164
140
  file.partition(/^\s*#\s*ask:(?:.*?)(?=^\s*[^\s#])/smu)
165
141
 
166
142
  break if post.empty?
@@ -169,7 +145,7 @@ elsif inline
169
145
  new_file << question
170
146
  clean_question = question.gsub('#', '').gsub(/\s+/,' ').sub(/.*ask:\s*/,'').strip
171
147
  chat = [
172
- {role: :system, content: "Write a succint reply with no commentary and no formatting."},
148
+ {role: :system, content: "Write a succint reply with no commentary and no formatting."},
173
149
  {role: :user, content: "Find the following question as a comment in the file give a response to be placed inline: #{question}"},
174
150
  LLM.tag('file', file, inline)
175
151
  ]
@@ -184,5 +160,10 @@ elsif inline
184
160
  new_file << file
185
161
  Open.write(inline, new_file)
186
162
  else
187
- puts agent.ask(question, options)
163
+ #conversation = Chat.setup(LLM.chat question)
164
+ #imports.each{|import| conversation.import import } if imports
165
+ #puts agent.ask(conversation, nil, options)
166
+ agent.current_chat.concat LLM.chat(question)
167
+ imports.each{|import| agent.import import } if imports
168
+ puts agent.chat
188
169
  end
@@ -0,0 +1,148 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'scout'
4
+
5
+ $0 = "scout-ai #{$previous_commands.any? ? $previous_commands*" " + " " : "" }#{ File.basename(__FILE__) }" if $previous_commands
6
+
7
+ options = SOPT.setup <<EOF
8
+ Scout documenter tool
9
+
10
+ $ #{$0} [<options>] <topic>
11
+
12
+ Generates technical, example-driven documentation for a given Scout topic by
13
+ analyzing Ruby source and corresponding test files. For a specified topic, it
14
+ locates main and subtopic files, invokes an LLM agent to synthesize markdown
15
+ documentation using real test behavior and examples, then outputs comprehensive
16
+ topic and subtopic documentation files.
17
+
18
+ -h--help Print this help
19
+ EOF
20
+ if options[:help]
21
+ if defined? scout_usage
22
+ scout_usage
23
+ else
24
+ puts SOPT.doc
25
+ end
26
+ exit 0
27
+ end
28
+
29
+ require 'scout-ai'
30
+
31
+ topic = ARGV.first
32
+
33
+ raise MissingParameterException if topic.nil?
34
+
35
+ src_files = Scout.lib.scout.glob("#{topic}*/**/*") + Scout.lib.scout.glob("#{topic}.rb")
36
+ src_files.collect! do |file|
37
+ file.sub(Dir.pwd, '.')
38
+ end
39
+
40
+ bin_files = Scout.scout_commands.glob("**/*").select{|file| file.include?(topic) && ! file.directory? }
41
+
42
+ main = src_files.select{|f| f.split("/").length == 4}.first
43
+ subtopics = src_files.select{|f| f.split("/").length == 5}.collect{|f| File.basename(f).sub(".rb",'') }.uniq
44
+
45
+ def source_to_test(file)
46
+ file.sub(%r{\A./lib/}, './test/').sub(%r{([^/]+)\.rb\z}, 'test_\1.rb')
47
+ end
48
+
49
+ documenter = LLM::Agent.new
50
+
51
+ documenter.start_chat.system <<-EOF
52
+ You are a world-class Ruby documentation author. For each (source, test) file
53
+ pair given, produce technically precise module- and file-level documentation,
54
+ incorporating specific code usage and behavior from the test file as worked
55
+ examples, code idioms, and edge-case handling.
56
+
57
+ Never insert your own example code: always use live content from the tests as examples.
58
+
59
+ Integrate documentation and test-derived examples smoothly.
60
+
61
+ You will be given first the main topic documentation for the main file and
62
+ test_file, then you will be asked to produce documentation for a subtopic.
63
+
64
+ Finally you will be ask to aggregate all the documentation portions into
65
+ a final topic documentation file
66
+
67
+ User markdown
68
+
69
+ Avoid initial and final comments like: Certainly! I'll do this and that
70
+ EOF
71
+
72
+ documenter.start_chat.file main
73
+ documenter.start_chat.file source_to_test(main)
74
+
75
+ documenter.start_chat.user <<-EOF
76
+ This is the basic topic file. Write the markdown documentation for it.
77
+ EOF
78
+
79
+ docs = {}
80
+ subtopics.each do |subtopic|
81
+ src = src_files.select{|f| f.include? subtopic}.select{|f| f.end_with?(".rb") }
82
+ test = src.collect{|f| source_to_test(f) }.select{|f| Open.exists? f }.select{|f| f.end_with?(".rb")}
83
+
84
+ documenter.start
85
+ (src + test + bin_files).each do |file|
86
+ documenter.file file
87
+ end
88
+
89
+ documenter.start_chat.user <<-EOF
90
+ Write documentation for topic #{topic} subtopic #{subtopic}
91
+ EOF
92
+ docs[subtopic] = documenter.respond
93
+ end
94
+
95
+ documenter.start
96
+
97
+ docs.each do |subtopic, documentation|
98
+ documenter.user <<-EOF
99
+ Please construct a comprehensive documentation on topic #{topic}. For each
100
+ subtopic reproduce all the most important from the original documentation
101
+ files. The subtopic documentation files will not be available anymore, so
102
+ don't leave anything imporant out
103
+
104
+ <file subtopic=#{subtopic}>
105
+ #{documentation}
106
+ <file>
107
+ EOF
108
+ end
109
+
110
+ main_documentation = documenter.chat
111
+
112
+ documenter.start_chat.user <<-EOF
113
+ This is the revise documentation for the topic:
114
+
115
+ ---
116
+
117
+ #{main_documentation}
118
+
119
+ ---
120
+ EOF
121
+
122
+ revised_subtopics = {}
123
+ docs.each do |subtopic, documentation|
124
+ documenter.start
125
+
126
+ src = src_files.select{|f| f.include? subtopic}.select{|f| f.end_with?(".rb") }
127
+ test = src.collect{|f| source_to_test(f) }.select{|f| Open.exists? f }.select{|f| f.end_with?(".rb")}
128
+
129
+ documenter.start
130
+ (src + test + bin_files).each do |file|
131
+ documenter.file file
132
+ end
133
+
134
+ documenter.user <<-EOF
135
+ Please revise the subtopic documentation in light of the revised main_documentation
136
+
137
+ <file subtopic=#{subtopic}>
138
+ #{documentation}
139
+ <file>
140
+ EOF
141
+ revised_subtopics[subtopic] = documenter.respond
142
+ end
143
+
144
+
145
+ Open.write Scout.doc.lib.scout[topic + '.md'].find(:current), main_documentation
146
+ revised_subtopics.each do |subtopic,documentation|
147
+ Open.write Scout.doc.lib.scout[topic][subtopic + '.md'].find(:current), documentation
148
+ end
@@ -78,6 +78,7 @@ if chat
78
78
  ppp LLM.print conversation
79
79
  exit 0
80
80
  end
81
+
81
82
  new = LLM.ask(conversation, convo_options.merge(options.merge(return_messages: true)))
82
83
  conversation = Open.read(chat) + LLM.print(new)
83
84
  Open.write(chat, conversation)
@@ -88,9 +89,9 @@ elsif inline
88
89
  new_file = ""
89
90
  while true
90
91
  pre, question, post =
91
- file.partition(/^\s*#\s*ask:(?:.*?)(?=^\s*[^\s#])/smu)
92
+ file.partition(/^\s*#\s*ask:(?:.*?)(?=^\s*[^\s#]|\z)/smu)
92
93
 
93
- break if post.empty?
94
+ break if question.empty?
94
95
 
95
96
  new_file << pre
96
97
  new_file << question