aia 0.3.19 → 0.4.1

Sign up to get free protection for your applications and to get access to all the features.
data/lib/aia/tools.rb CHANGED
@@ -1,77 +1,52 @@
1
1
  # lib/aia/tools.rb
2
2
 
3
- class AIA::Tools
4
- @@subclasses = []
5
-
6
- def self.inherited(subclass)
7
- @@subclasses << subclass
8
- end
3
+ require 'hashie'
9
4
 
10
- attr_accessor :role, :name, :description, :url, :install
11
-
5
+ class AIA::Tools
6
+ @@catalog = []
12
7
 
13
- def initialize(*)
14
- @role = :role
15
- @name = self.class.name.split('::').last.downcase
16
- @description = "description"
17
- @url = "URL"
18
- @install = "brew install #{name}"
19
- end
8
+ def meta = self.class::meta
20
9
 
10
+ class << self
11
+ def inherited(subclass)
12
+ subclass_meta = Hashie::Mash.new(klass: subclass)
13
+ subclass.instance_variable_set(:@_metadata, subclass_meta)
21
14
 
22
- def installed?
23
- path = `which #{name}`.chomp
24
- !path.empty? && File.executable?(path)
25
- end
15
+ @@catalog << subclass_meta
16
+ end
26
17
 
27
18
 
28
- def help
29
- `#{name} --help`
30
- end
31
-
19
+ def meta(metadata = nil)
20
+ return @_metadata if metadata.nil?
32
21
 
33
- def version
34
- `#{name} --version`
35
- end
22
+ @_metadata = Hashie::Mash.new(metadata)
23
+ entry = @@catalog.detect { |item| item[:klass] == self }
24
+
25
+ entry.merge!(metadata) if entry
26
+ end
36
27
 
37
28
 
38
- #########################################
39
- class << self
40
- def tools
41
- @@subclasses.map(&:name)
29
+ def get_meta
30
+ @_metadata
42
31
  end
43
32
 
44
33
 
45
- def verify_tools
46
- missing_tools = @@subclasses.map(&:new).reject(&:installed?)
47
- unless missing_tools.empty?
48
- puts format_missing_tools_response(missing_tools)
34
+ def search_for(criteria = {})
35
+ @@catalog.select do |meta|
36
+ criteria.all? { |k, v| meta[k] == v }
49
37
  end
50
38
  end
51
39
 
52
40
 
53
- def format_missing_tools_response(missing_tools)
54
- response = <<~EOS
55
-
56
- WARNING: AIA makes use of external CLI tools that are missing.
57
-
58
- Please install the following tools:
41
+ def catalog
42
+ @@catalog
43
+ end
59
44
 
60
- EOS
61
45
 
62
- missing_tools.each do |tool|
63
- response << " #{tool.name}: install from #{tool.url}\n"
46
+ def load_tools
47
+ Dir.glob(File.join(File.dirname(__FILE__), 'tools', '*.rb')).each do |file|
48
+ require file
64
49
  end
65
-
66
- response
67
50
  end
68
51
  end
69
52
  end
70
-
71
-
72
- (Pathname.new(__dir__)+"tools")
73
- .glob('*.rb')
74
- .each do |tool|
75
- require_relative "tools/#{tool.basename.to_s}"
76
- end
77
-
data/lib/aia.rb CHANGED
@@ -5,7 +5,8 @@ include DebugMe
5
5
 
6
6
  require 'hashie'
7
7
  require 'pathname'
8
- require 'readline'
8
+ require 'reline'
9
+ require 'shellwords'
9
10
  require 'tempfile'
10
11
 
11
12
  require 'prompt_manager'
data/man/aia.1 CHANGED
@@ -22,68 +22,67 @@ This is an optional argument\. One or more files can be added to the prompt as
22
22
  External options are optional\. Anything that follow \[lq] \-\- \[lq] will be sent to the backend gen\-AI tool\. For example \[lq]\-\- \-C \-m gpt4\-128k\[rq] will send the options \[lq]\-C \-m gpt4\-128k\[rq] to the backend gen\-AI tool\. \fBaia\fR will not validate these external options before sending them to the backend gen\-AI tool\.
23
23
  .SH OPTIONS
24
24
  .TP
25
- \fB\-c\fR, \fB\-\-config\fR \fIPATH\[ru]TO\[ru]CONFIG\[ru]FILE\fP
26
- Load Config File \- default: nil
25
+ \fB\-\-chat\fR
26
+ begin a chat session with the backend after the initial prompt response; will set \-\-no\-output so that the backend response comes to STDOUT\.
27
+ .TP
28
+ \fB\-\-completion\fR \fISHELL\[ru]NAME\fP
27
29
  .TP
28
30
  \fB\-\-dump\fR \fIFORMAT\fP
29
31
  .TP
30
- \fB\-e\fR, \fB\-\-edit\fR
31
- Edit the Prompt File \- default: false
32
+ \fB\-\-model\fR \fINAME\fP
33
+ Name of the LLM model to use \- default is gpt\-4\-1106\-preview
32
34
  .TP
33
- \fB\-d\fR, \fB\-\-debug\fR
34
- Turn On Debugging \- default: false
35
+ \fB\-\-speak\fR
36
+ Simple implementation\. Uses the \[lq]say\[rq] command to speak the response\. Fun with \-\-chat
35
37
  .TP
36
- \fB\-v\fR, \fB\-\-verbose\fR
37
- Be Verbose \- default: false
38
+ \fB\-\-terse\fR
39
+ Add a clause to the prompt text that instructs the backend to be terse in its response\.
38
40
  .TP
39
41
  \fB\-\-version\fR
40
- Print Version \- default: false
42
+ Print Version \- default is false
41
43
  .TP
42
- \fB\-h\fR, \fB\-\-help\fR
43
- Show Usage \- default: false
44
+ \fB\-b\fR, \fB\-\-\[lB]no\[rB]\-backend\fR \fILLM TOOL\fP
45
+ Specify the backend prompt resolver \- default is mods
44
46
  .TP
45
- \fB\-s\fR, \fB\-\-search\fR \fITERM\fP
46
- Search for prompts contain TERM \- default: nil
47
+ \fB\-c\fR, \fB\-\-config\fR \fIPATH\[ru]TO\[ru]CONFIG\[ru]FILE\fP
48
+ Load Config File \- default is nil
47
49
  .TP
48
- \fB\-f\fR, \-\-fuzzy\`
49
- Use Fuzzy Matching when searching for a prompt \- default: false
50
+ \fB\-d\fR, \fB\-\-debug\fR
51
+ Turn On Debugging \- default is false
50
52
  .TP
51
- \fB\-\-completion\fR \fISHELL\[ru]NAME\fP
53
+ \fB\-e\fR, \fB\-\-edit\fR
54
+ Edit the Prompt File \- default is false
52
55
  .TP
53
- \fB\-o\fR, \fB\-\-\[lB]no\[rB]\-output\fR \fIPATH\[ru]TO\[ru]OUTPUT\[ru]FILE\fP
54
- Out FILENAME \- default: \.\[sl]temp\.md
56
+ \fB\-f\fR, \-\-fuzzy\`
57
+ Use Fuzzy Matching when searching for a prompt \- default is false
58
+ .TP
59
+ \fB\-h\fR, \fB\-\-help\fR
60
+ Show Usage \- default is false
55
61
  .TP
56
62
  \fB\-l\fR, \fB\-\-\[lB]no\[rB]\-log\fR \fIPATH\[ru]TO\[ru]LOG\[ru]FILE\fP
57
- Log FILEPATH \- default: \[Do]HOME\[sl]\.prompts\[sl]prompts\.log
63
+ Log FILEPATH \- default is \[Do]HOME\[sl]\.prompts\[sl]prompts\.log
58
64
  .TP
59
65
  \fB\-m\fR, \fB\-\-\[lB]no\[rB]\-markdown\fR
60
- Format with Markdown \- default: true
66
+ Format with Markdown \- default is true
61
67
  .TP
62
- \fB\-\-model\fR \fINAME\fP
63
- Name of the LLM model to use \- default: gpt\-4\-1106\-preview
68
+ \fB\-o\fR, \fB\-\-\[lB]no\[rB]\-output\fR \fIPATH\[ru]TO\[ru]OUTPUT\[ru]FILE\fP
69
+ Out FILENAME \- default is \.\[sl]temp\.md
64
70
  .TP
65
71
  \fB\-p\fR, \fB\-\-prompts\fR \fIPATH\[ru]TO\[ru]DIRECTORY\fP
66
- Directory containing the prompt files \- default: \[ti]\[sl]\.prompts
72
+ Directory containing the prompt files \- default is \[ti]\[sl]\.prompts
67
73
  .TP
68
- \fB\-b\fR, \fB\-\-\[lB]no\[rB]\-backend\fR \fILLM TOOL\fP
69
- Specify the backend prompt resolver \- default: :mods
70
- .SH ENVIRONMENT
74
+ \fB\-v\fR, \fB\-\-verbose\fR
75
+ Be Verbose \- default is false
76
+ .SH CONFIGURATION HIERARCHY
71
77
  .PP
72
- The aia CLI uses the following environment variables:
73
- .RS
74
- .IP \(bu 2
75
- \fBAIA\[ru]PROMPTS\[ru]DIR\fR: Path to the directory containing prompts files \- default: \fB\[Do]HOME\[sl]\.prompts\[ru]dir\fR
76
- .IP \(bu 2
77
- \fBAIA\[ru]BACKEND\fR: The AI command\-line program used \- default: \fBmods\fR
78
- .IP \(bu 2
79
- \fBEDITOR\fR: The text editor used by the edit option \- default: edit
80
- .IP \(bu 2
81
- \fBAIA\[ru]MODEL\fR: The AI model specification \- default: \fBgpt\-4\-1106\-preview\fR
82
- .IP \(bu 2
83
- \fBAIA\[ru]OUTPUT\fR: The default filename for output \- default: \fB\.\[sl]temp\.md\fR
84
- .IP \(bu 2
85
- \fBAIA\[ru]PROMPT\[ru]LOG\fR: The default filepath for the prompts log \- default: \fB\[Do]HOME\[sl]\.prompts\[sl]\[ru]prompts\.log\fR
86
- .RE
78
+ System Environment Variables (envars) that are all uppercase and begin with \[lq]AIA\[ru]\[rq] can be used to over\-ride the default configuration settings\. For example setting \[lq]export AIA\[ru]PROMPTS\[ru]DIR\[eq]\[ti]\[sl]Documents\[sl]prompts\[rq] will over\-ride the default configuration; however, a config value provided by a command line options will over\-ride an envar setting\.
79
+ .PP
80
+ Configuration values found in a config file will over\-ride all other values set for a config item\.
81
+ .PP
82
+ \[rq]\[sl]\[sl]config\[rq] directives found inside a prompt file over\-rides that config item regardless of where the value was set\.
83
+ .PP
84
+ For example \[lq]\[sl]\[sl]config chat? \[eq] true\[rq] within a prompt will setup the chat back and forth chat session for that specific prompt regardless of the command line options or the envar AIA\[ru]CHAT settings
85
+ .SH OpenAI ACCOUNT IS REQUIRED
87
86
  .PP
88
87
  Additionally, the program requires an OpenAI access key, which can be specified using one of the following environment variables:
89
88
  .RS
@@ -98,9 +97,19 @@ Currently there is not specific standard for name of the OpenAI key\. Some prog
98
97
  To acquire an OpenAI access key, first create an account on the OpenAI platform, where further documentation is available\.
99
98
  .SH USAGE NOTES
100
99
  .PP
101
- \fBaia\fR is designed for flexibility, allowing users to pass prompt ids and context files as arguments\. Some options change the behavior of the output, such as \fB\-\-output\fR for specifying a file or \fB\-\-no\-output\fR for disabling file output in favor of standard output\.
100
+ \fBaia\fR is designed for flexibility, allowing users to pass prompt ids and context files as arguments\. Some options change the behavior of the output, such as \fB\-\-output\fR for specifying a file or \fB\-\-no\-output\fR for disabling file output in favor of standard output (STDPIT)\.
102
101
  .PP
103
102
  The \fB\-\-completion\fR option displays a script that enables prompt ID auto\-completion for bash, zsh, or fish shells\. It\[cq]s crucial to integrate the script into the shell\[cq]s runtime to take effect\.
103
+ .PP
104
+ The \fB\-\-dump\fR options will send the current configuration to STDOUT in the format requested\. Both YAML and TOML formats are supported\.
105
+ .SH PROMPT DIRECTIVES
106
+ .PP
107
+ Within a prompt text file any line that begins with \[lq]\[sl]\[sl]\[rq] is considered a prompt directive\. There are numerious prompt directives available\. In the discussion above on the configuration you learned about the \[lq]\[sl]\[sl]config\[rq] directive\.
108
+ .PP
109
+ Detail discussion on individual prompt directives is TBD\. Most likely it will be handled in the github wiki
110
+ .UR https:\[sl]\[sl]github\.com\[sl]MadBomber\[sl]aia
111
+ .UE
112
+ \.
104
113
  .SH SEE ALSO
105
114
  .RS
106
115
  .IP \(bu 2
data/man/aia.1.md CHANGED
@@ -1,9 +1,11 @@
1
1
  # aia 1 "2024-01-01" AIA "User Manuals"
2
2
 
3
3
  ## NAME
4
+
4
5
  aia - command-line interface for an AI assistant
5
6
 
6
7
  ## SYNOPSIS
8
+
7
9
  aia [options]* PROMPT_ID [CONTEXT_FILE]* [-- EXTERNAL_OPTIONS+]
8
10
 
9
11
  ## DESCRIPTION
@@ -23,63 +25,72 @@ The aia command-line tool is an interface for interacting with an AI model backe
23
25
 
24
26
  ## OPTIONS
25
27
 
26
- `-c`, `--config` *PATH_TO_CONFIG_FILE*
27
- : Load Config File - default: nil
28
+ `--chat`
29
+ : begin a chat session with the backend after the initial prompt response; will set --no-output so that the backend response comes to STDOUT.
30
+
31
+ `--completion` *SHELL_NAME*
32
+ : Show completion script for bash|zsh|fish - default is nil
28
33
 
29
34
  `--dump` *FORMAT*
30
- : Dump a Config File in [yaml | toml] to STDOUT - default: nil
35
+ : Dump a Config File in [yaml | toml] to STDOUT - default is nil
31
36
 
32
- `-e`, `--edit`
33
- : Edit the Prompt File - default: false
37
+ `--model` *NAME*
38
+ : Name of the LLM model to use - default is gpt-4-1106-preview
34
39
 
35
- `-d`, `--debug`
36
- : Turn On Debugging - default: false
40
+ `--speak`
41
+ : Simple implementation. Uses the "say" command to speak the response. Fun with --chat
37
42
 
38
- `-v`, `--verbose`
39
- : Be Verbose - default: false
43
+ `--terse`
44
+ : Add a clause to the prompt text that instructs the backend to be terse in its response.
40
45
 
41
46
  `--version`
42
- : Print Version - default: false
47
+ : Print Version - default is false
43
48
 
44
- `-h`, `--help`
45
- : Show Usage - default: false
49
+ `-b`, `--[no]-backend` *LLM TOOL*
50
+ : Specify the backend prompt resolver - default is mods
46
51
 
47
- `-s`, `--search` *TERM*
48
- : Search for prompts contain TERM - default: nil
52
+ `-c`, `--config` *PATH_TO_CONFIG_FILE*
53
+ : Load Config File - default is nil
49
54
 
50
- `-f`, --fuzzy`
51
- : Use Fuzzy Matching when searching for a prompt - default: false
55
+ `-d`, `--debug`
56
+ : Turn On Debugging - default is false
52
57
 
53
- `--completion` *SHELL_NAME*
54
- : Show completion script for bash|zsh|fish - default: nil
58
+ `-e`, `--edit`
59
+ : Edit the Prompt File - default is false
55
60
 
56
- `-o`, `--[no]-output` *PATH_TO_OUTPUT_FILE*
57
- : Out FILENAME - default: ./temp.md
61
+ `-f`, --fuzzy`
62
+ : Use Fuzzy Matching when searching for a prompt - default is false
63
+
64
+ `-h`, `--help`
65
+ : Show Usage - default is false
58
66
 
59
67
  `-l`, `--[no]-log` *PATH_TO_LOG_FILE*
60
- : Log FILEPATH - default: $HOME/.prompts/prompts.log
68
+ : Log FILEPATH - default is $HOME/.prompts/prompts.log
61
69
 
62
70
  `-m`, `--[no]-markdown`
63
- : Format with Markdown - default: true
71
+ : Format with Markdown - default is true
64
72
 
65
- `--model` *NAME*
66
- : Name of the LLM model to use - default: gpt-4-1106-preview
73
+ `-o`, `--[no]-output` *PATH_TO_OUTPUT_FILE*
74
+ : Out FILENAME - default is ./temp.md
67
75
 
68
76
  `-p`, `--prompts` *PATH_TO_DIRECTORY*
69
- : Directory containing the prompt files - default: ~/.prompts
77
+ : Directory containing the prompt files - default is ~/.prompts
70
78
 
71
- `-b`, `--[no]-backend` *LLM TOOL*
72
- : Specify the backend prompt resolver - default: :mods
79
+ `-v`, `--verbose`
80
+ : Be Verbose - default is false
81
+
82
+
83
+ ## CONFIGURATION HIERARCHY
84
+
85
+ System Environment Variables (envars) that are all uppercase and begin with "AIA_" can be used to over-ride the default configuration settings. For example setting "export AIA_PROMPTS_DIR=~/Documents/prompts" will over-ride the default configuration; however, a config value provided by a command line options will over-ride an envar setting.
73
86
 
74
- ## ENVIRONMENT
75
- The aia CLI uses the following environment variables:
87
+ Configuration values found in a config file will over-ride all other values set for a config item.
76
88
 
77
- - `AIA_PROMPTS_DIR`: Path to the directory containing prompts files - default: `$HOME/.prompts_dir`
78
- - `AIA_BACKEND`: The AI command-line program used - default: `mods`
79
- - `EDITOR`: The text editor used by the edit option - default: edit
80
- - `AIA_MODEL`: The AI model specification - default: `gpt-4-1106-preview`
81
- - `AIA_OUTPUT`: The default filename for output - default: `./temp.md`
82
- - `AIA_PROMPT_LOG`: The default filepath for the prompts log - default: `$HOME/.prompts/_prompts.log`
89
+ "//config" directives found inside a prompt file over-rides that config item regardless of where the value was set.
90
+
91
+ For example "//config chat? = true" within a prompt will setup the chat back and forth chat session for that specific prompt regardless of the command line options or the envar AIA_CHAT settings
92
+
93
+ ## OpenAI ACCOUNT IS REQUIRED
83
94
 
84
95
  Additionally, the program requires an OpenAI access key, which can be specified using one of the following environment variables:
85
96
 
@@ -90,12 +101,22 @@ Currently there is not specific standard for name of the OpenAI key. Some progr
90
101
 
91
102
  To acquire an OpenAI access key, first create an account on the OpenAI platform, where further documentation is available.
92
103
 
93
- ## USAGE NOTES
94
- `aia` is designed for flexibility, allowing users to pass prompt ids and context files as arguments. Some options change the behavior of the output, such as `--output` for specifying a file or `--no-output` for disabling file output in favor of standard output.
104
+ ## USAGE NOTES
105
+
106
+ `aia` is designed for flexibility, allowing users to pass prompt ids and context files as arguments. Some options change the behavior of the output, such as `--output` for specifying a file or `--no-output` for disabling file output in favor of standard output (STDPIT).
95
107
 
96
108
  The `--completion` option displays a script that enables prompt ID auto-completion for bash, zsh, or fish shells. It's crucial to integrate the script into the shell's runtime to take effect.
97
109
 
98
- ## SEE ALSO
110
+ The `--dump` options will send the current configuration to STDOUT in the format requested. Both YAML and TOML formats are supported.
111
+
112
+ ## PROMPT DIRECTIVES
113
+
114
+ Within a prompt text file any line that begins with "//" is considered a prompt directive. There are numerious prompt directives available. In the discussion above on the configuration you learned about the "//config" directive.
115
+
116
+ Detail discussion on individual prompt directives is TBD. Most likely it will be handled in the [github wiki](https://github.com/MadBomber/aia).
117
+
118
+ ## SEE ALSO
119
+
99
120
  - [OpenAI Platform Documentation](https://platform.openai.com/docs/overview) for more information on [obtaining access tokens](https://platform.openai.com/account/api-keys) and working with OpenAI models.
100
121
 
101
122
  - [mods](https://github.com/charmbracelet/mods) for more information on `mods` - AI for the command line, built for pipelines. LLM based AI is really good at interpreting the output of commands and returning the results in CLI friendly text formats like Markdown. Mods is a simple tool that makes it super easy to use AI on the command line and in your pipelines. Mods works with [OpenAI](https://platform.openai.com/account/api-keys) and [LocalAI](https://github.com/go-skynet/LocalAI)
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aia
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.19
4
+ version: 0.4.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dewayne VanHoozer
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-12-26 00:00:00.000000000 Z
11
+ date: 2023-12-31 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: hashie
@@ -26,6 +26,20 @@ dependencies:
26
26
  version: '0'
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: prompt_manager
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ">="
32
+ - !ruby/object:Gem::Version
33
+ version: 0.4.1
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ">="
39
+ - !ruby/object:Gem::Version
40
+ version: 0.4.1
41
+ - !ruby/object:Gem::Dependency
42
+ name: reline
29
43
  requirement: !ruby/object:Gem::Requirement
30
44
  requirements:
31
45
  - - ">="
@@ -52,6 +66,20 @@ dependencies:
52
66
  - - ">="
53
67
  - !ruby/object:Gem::Version
54
68
  version: '0'
69
+ - !ruby/object:Gem::Dependency
70
+ name: shellwords
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - ">="
74
+ - !ruby/object:Gem::Version
75
+ version: '0'
76
+ type: :runtime
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - ">="
81
+ - !ruby/object:Gem::Version
82
+ version: '0'
55
83
  - !ruby/object:Gem::Dependency
56
84
  name: toml-rb
57
85
  requirement: !ruby/object:Gem::Requirement
@@ -164,10 +192,12 @@ files:
164
192
  - lib/aia/aia_completion.zsh
165
193
  - lib/aia/cli.rb
166
194
  - lib/aia/config.rb
195
+ - lib/aia/directives.rb
167
196
  - lib/aia/logging.rb
168
197
  - lib/aia/main.rb
169
- - lib/aia/prompt_processing.rb
198
+ - lib/aia/prompt.rb
170
199
  - lib/aia/tools.rb
200
+ - lib/aia/tools/backend_common.rb
171
201
  - lib/aia/tools/editor.rb
172
202
  - lib/aia/tools/mods.rb
173
203
  - lib/aia/tools/sgpt.rb