personality 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CLAUDE.md +88 -0
- data/PLAN.md +621 -0
- data/README.md +35 -0
- data/Rakefile +10 -0
- data/TODO.md +65 -0
- data/docs/mcp-ruby-sdk.md +193 -0
- data/exe/psn +6 -0
- data/exe/psn-mcp +7 -0
- data/lib/personality/cart.rb +75 -0
- data/lib/personality/chunker.rb +27 -0
- data/lib/personality/cli/cart.rb +61 -0
- data/lib/personality/cli/context.rb +67 -0
- data/lib/personality/cli/hooks.rb +120 -0
- data/lib/personality/cli/index.rb +147 -0
- data/lib/personality/cli/memory.rb +130 -0
- data/lib/personality/cli/tts.rb +140 -0
- data/lib/personality/cli.rb +54 -0
- data/lib/personality/context.rb +73 -0
- data/lib/personality/db.rb +148 -0
- data/lib/personality/embedding.rb +44 -0
- data/lib/personality/hooks.rb +143 -0
- data/lib/personality/indexer.rb +211 -0
- data/lib/personality/init.rb +257 -0
- data/lib/personality/mcp/server.rb +314 -0
- data/lib/personality/memory.rb +125 -0
- data/lib/personality/tts.rb +191 -0
- data/lib/personality/version.rb +5 -0
- data/lib/personality.rb +17 -0
- metadata +269 -0
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "net/http"
|
|
4
|
+
require "json"
|
|
5
|
+
require "uri"
|
|
6
|
+
|
|
7
|
+
module Personality
|
|
8
|
+
module Embedding
|
|
9
|
+
DEFAULT_URL = "http://localhost:11434"
|
|
10
|
+
DEFAULT_MODEL = "nomic-embed-text"
|
|
11
|
+
MAX_INPUT_LENGTH = 8000
|
|
12
|
+
DIMENSIONS = 768
|
|
13
|
+
|
|
14
|
+
class Error < Personality::Error; end
|
|
15
|
+
|
|
16
|
+
class << self
|
|
17
|
+
def generate(text, model: nil, url: nil)
|
|
18
|
+
truncated = text.to_s[0, MAX_INPUT_LENGTH]
|
|
19
|
+
return [] if truncated.empty?
|
|
20
|
+
|
|
21
|
+
ollama_url = url || ENV.fetch("OLLAMA_URL", DEFAULT_URL)
|
|
22
|
+
ollama_model = model || ENV.fetch("OLLAMA_MODEL", DEFAULT_MODEL)
|
|
23
|
+
|
|
24
|
+
uri = URI.join(ollama_url, "/api/embeddings")
|
|
25
|
+
body = {model: ollama_model, prompt: truncated}.to_json
|
|
26
|
+
|
|
27
|
+
response = Net::HTTP.post(uri, body, "Content-Type" => "application/json")
|
|
28
|
+
|
|
29
|
+
unless response.is_a?(Net::HTTPSuccess)
|
|
30
|
+
raise Error, "Ollama returned #{response.code}: #{response.body}"
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
result = JSON.parse(response.body)
|
|
34
|
+
embedding = result["embedding"]
|
|
35
|
+
|
|
36
|
+
unless embedding.is_a?(Array) && !embedding.empty?
|
|
37
|
+
raise Error, "Unexpected Ollama response: missing embedding"
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
embedding
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
end
|
|
44
|
+
end
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
require "fileutils"
|
|
5
|
+
|
|
6
|
+
module Personality
|
|
7
|
+
module Hooks
|
|
8
|
+
LOG_DIR = File.join(Dir.home, ".config", "psn")
|
|
9
|
+
LOG_FILE = File.join(LOG_DIR, "hooks.jsonl")
|
|
10
|
+
CONFIG_FILE = File.join(LOG_DIR, "logging.toml")
|
|
11
|
+
|
|
12
|
+
DEFAULT_MAX_LENGTH = 50
|
|
13
|
+
DEFAULT_PRESERVE_FIELDS = %w[path file_path cwd transcript_path file directory].freeze
|
|
14
|
+
DEFAULT_PRESERVE_SUFFIXES = %w[_path _dir].freeze
|
|
15
|
+
|
|
16
|
+
HOOKS_JSON_TEMPLATE = {
|
|
17
|
+
hooks: {
|
|
18
|
+
PreToolUse: [{hooks: [{type: "command", command: "psn hooks pre-tool-use", timeout: 5000}]}],
|
|
19
|
+
PostToolUse: [
|
|
20
|
+
{matcher: "Read", hooks: [{type: "command", command: "psn context track-read", timeout: 5000}]},
|
|
21
|
+
{matcher: "Write|Edit", hooks: [{type: "command", command: "psn index hook", timeout: 30_000}]}
|
|
22
|
+
],
|
|
23
|
+
Stop: [{hooks: [
|
|
24
|
+
{type: "command", command: "psn tts mark-natural-stop", timeout: 1000},
|
|
25
|
+
{type: "command", command: "psn memory save", timeout: 5000}
|
|
26
|
+
]}],
|
|
27
|
+
SubagentStop: [{hooks: [{type: "command", command: "psn hooks subagent-stop", timeout: 5000}]}],
|
|
28
|
+
SessionStart: [{hooks: [{type: "command", command: "psn hooks session-start", timeout: 5000}]}],
|
|
29
|
+
SessionEnd: [{hooks: [
|
|
30
|
+
{type: "command", command: "psn hooks session-end", timeout: 5000},
|
|
31
|
+
{type: "command", command: "psn tts stop", timeout: 1000}
|
|
32
|
+
]}],
|
|
33
|
+
UserPromptSubmit: [{hooks: [
|
|
34
|
+
{type: "command", command: "psn hooks user-prompt-submit", timeout: 5000},
|
|
35
|
+
{type: "command", command: "psn tts interrupt-check", timeout: 1000}
|
|
36
|
+
]}],
|
|
37
|
+
PreCompact: [{hooks: [{type: "command", command: "psn memory save", timeout: 5000}]}],
|
|
38
|
+
Notification: [{hooks: [{type: "command", command: "psn hooks notification", timeout: 5000}]}]
|
|
39
|
+
}
|
|
40
|
+
}.freeze
|
|
41
|
+
|
|
42
|
+
class << self
|
|
43
|
+
def log(event, data = nil)
|
|
44
|
+
FileUtils.mkdir_p(LOG_DIR)
|
|
45
|
+
|
|
46
|
+
entry = {
|
|
47
|
+
ts: Time.now.utc.iso8601,
|
|
48
|
+
event: event,
|
|
49
|
+
session: ENV.fetch("CLAUDE_SESSION_ID", ""),
|
|
50
|
+
cwd: Dir.pwd
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
if data.is_a?(Hash)
|
|
54
|
+
data.each do |key, value|
|
|
55
|
+
next if key.to_s == "hook_event_name"
|
|
56
|
+
entry[key.to_sym] = process_value(key.to_s, value)
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
File.open(LOG_FILE, "a") { |f| f.puts(JSON.generate(entry)) }
|
|
61
|
+
rescue
|
|
62
|
+
nil # Silent fail — don't break hooks on logging errors
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
def read_stdin_json
|
|
66
|
+
return nil if $stdin.tty?
|
|
67
|
+
JSON.parse($stdin.read)
|
|
68
|
+
rescue JSON::ParserError, EOFError
|
|
69
|
+
nil
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
def generate_hooks_json
|
|
73
|
+
JSON.pretty_generate(HOOKS_JSON_TEMPLATE)
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
def truncate(value, max_length: nil)
|
|
77
|
+
max = max_length || config[:max_length]
|
|
78
|
+
return value if value.length <= max
|
|
79
|
+
"#{value[0, max - 3]}..."
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
def preserved_key?(key)
|
|
83
|
+
key_lower = key.downcase
|
|
84
|
+
return true if config[:preserve_fields].include?(key_lower)
|
|
85
|
+
config[:preserve_suffixes].any? { |suffix| key_lower.end_with?(suffix) }
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
def process_value(key, value)
|
|
89
|
+
case value
|
|
90
|
+
when nil then nil
|
|
91
|
+
when true, false then value
|
|
92
|
+
when Integer, Float then value
|
|
93
|
+
when String
|
|
94
|
+
preserved_key?(key) ? value : truncate(value)
|
|
95
|
+
when Hash
|
|
96
|
+
value.transform_keys(&:to_s).each_with_object({}) do |(k, v), h|
|
|
97
|
+
h[k] = process_value(k, v)
|
|
98
|
+
end
|
|
99
|
+
when Array
|
|
100
|
+
processed = value.first(5).map { |item| process_value(key, item) }
|
|
101
|
+
processed << "...+#{value.length - 5} more" if value.length > 5
|
|
102
|
+
processed
|
|
103
|
+
else
|
|
104
|
+
truncate(value.to_s)
|
|
105
|
+
end
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
def config
|
|
109
|
+
@config ||= load_config
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
def reset_config!
|
|
113
|
+
@config = nil
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
private
|
|
117
|
+
|
|
118
|
+
def load_config
|
|
119
|
+
cfg = {
|
|
120
|
+
max_length: DEFAULT_MAX_LENGTH,
|
|
121
|
+
preserve_fields: DEFAULT_PRESERVE_FIELDS.dup,
|
|
122
|
+
preserve_suffixes: DEFAULT_PRESERVE_SUFFIXES.dup
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
if File.exist?(CONFIG_FILE)
|
|
126
|
+
begin
|
|
127
|
+
require "toml-rb"
|
|
128
|
+
file_config = TomlRB.load_file(CONFIG_FILE)
|
|
129
|
+
if (t = file_config["truncation"])
|
|
130
|
+
cfg[:max_length] = t["max_length"] if t["max_length"]
|
|
131
|
+
cfg[:preserve_fields] = t["preserve_fields"] if t["preserve_fields"]
|
|
132
|
+
cfg[:preserve_suffixes] = t["preserve_suffixes"] if t["preserve_suffixes"]
|
|
133
|
+
end
|
|
134
|
+
rescue
|
|
135
|
+
# Use defaults on error
|
|
136
|
+
end
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
cfg
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
end
|
|
143
|
+
end
|
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
require_relative "db"
|
|
5
|
+
require_relative "embedding"
|
|
6
|
+
require_relative "chunker"
|
|
7
|
+
|
|
8
|
+
module Personality
|
|
9
|
+
class Indexer
|
|
10
|
+
CODE_EXTENSIONS = %w[.py .rs .rb .js .ts .go .java .c .cpp .h].to_set.freeze
|
|
11
|
+
DOC_EXTENSIONS = %w[.md .txt .rst .adoc].to_set.freeze
|
|
12
|
+
|
|
13
|
+
def index_code(path:, project: nil, extensions: nil)
|
|
14
|
+
dir = File.expand_path(path)
|
|
15
|
+
proj = project || File.basename(dir)
|
|
16
|
+
exts = extensions ? extensions.map { |e| e.start_with?(".") ? e : ".#{e}" }.to_set : CODE_EXTENSIONS
|
|
17
|
+
|
|
18
|
+
index_files(dir, proj, exts, table: "code_chunks", vec_table: "vec_code", language: true)
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def index_docs(path:, project: nil)
|
|
22
|
+
dir = File.expand_path(path)
|
|
23
|
+
proj = project || File.basename(dir)
|
|
24
|
+
|
|
25
|
+
index_files(dir, proj, DOC_EXTENSIONS, table: "doc_chunks", vec_table: "vec_docs", language: false)
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
def search(query:, type: :all, project: nil, limit: 10)
|
|
29
|
+
embedding = Embedding.generate(query)
|
|
30
|
+
return {results: []} if embedding.empty?
|
|
31
|
+
|
|
32
|
+
results = []
|
|
33
|
+
db = DB.connection
|
|
34
|
+
|
|
35
|
+
if type == :all || type == :code
|
|
36
|
+
results.concat(
|
|
37
|
+
search_table(db, "code_chunks", "vec_code", embedding, project: project, limit: limit, type: :code)
|
|
38
|
+
)
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
if type == :all || type == :docs
|
|
42
|
+
results.concat(
|
|
43
|
+
search_table(db, "doc_chunks", "vec_docs", embedding, project: project, limit: limit, type: :docs)
|
|
44
|
+
)
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
results.sort_by! { |r| r[:distance] }
|
|
48
|
+
{results: results.first(limit)}
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
def status(project: nil)
|
|
52
|
+
db = DB.connection
|
|
53
|
+
|
|
54
|
+
code_stats = count_by_project(db, "code_chunks", project)
|
|
55
|
+
doc_stats = count_by_project(db, "doc_chunks", project)
|
|
56
|
+
|
|
57
|
+
{code_index: code_stats, doc_index: doc_stats}
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
def clear(project: nil, type: :all)
|
|
61
|
+
db = DB.connection
|
|
62
|
+
|
|
63
|
+
if type == :all || type == :code
|
|
64
|
+
clear_table(db, "code_chunks", "vec_code", project)
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
if type == :all || type == :docs
|
|
68
|
+
clear_table(db, "doc_chunks", "vec_docs", project)
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
{cleared: type.to_s, project: project || "all"}
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
def index_single_file(file_path:, project: nil)
|
|
75
|
+
path = File.expand_path(file_path)
|
|
76
|
+
return unless File.exist?(path)
|
|
77
|
+
|
|
78
|
+
ext = File.extname(path).downcase
|
|
79
|
+
proj = project || File.basename(Dir.pwd)
|
|
80
|
+
|
|
81
|
+
if CODE_EXTENSIONS.include?(ext)
|
|
82
|
+
index_one_file(path, proj, "code_chunks", "vec_code", language: ext[1..])
|
|
83
|
+
elsif DOC_EXTENSIONS.include?(ext)
|
|
84
|
+
index_one_file(path, proj, "doc_chunks", "vec_docs", language: nil)
|
|
85
|
+
end
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
private
|
|
89
|
+
|
|
90
|
+
def index_files(dir, project, extensions, table:, vec_table:, language:)
|
|
91
|
+
indexed = 0
|
|
92
|
+
errors = []
|
|
93
|
+
|
|
94
|
+
Dir.glob(File.join(dir, "**", "*")).each do |file_path|
|
|
95
|
+
next unless File.file?(file_path)
|
|
96
|
+
next unless extensions.include?(File.extname(file_path).downcase)
|
|
97
|
+
|
|
98
|
+
begin
|
|
99
|
+
lang = language ? File.extname(file_path).downcase[1..] : nil
|
|
100
|
+
count = index_one_file(file_path, project, table, vec_table, language: lang)
|
|
101
|
+
indexed += count
|
|
102
|
+
rescue => e
|
|
103
|
+
errors << "#{file_path}: #{e.message}"
|
|
104
|
+
end
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
{indexed: indexed, project: project, errors: errors.first(5)}
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
def index_one_file(file_path, project, table, vec_table, language: nil)
|
|
111
|
+
content = File.read(file_path, encoding: "UTF-8")
|
|
112
|
+
return 0 if content.length < Chunker::MIN_LENGTH
|
|
113
|
+
|
|
114
|
+
db = DB.connection
|
|
115
|
+
chunks = Chunker.split(content)
|
|
116
|
+
count = 0
|
|
117
|
+
|
|
118
|
+
# Remove old chunks for this file
|
|
119
|
+
old_ids = db.execute(
|
|
120
|
+
"SELECT id FROM #{table} WHERE path = ? AND project = ?", [file_path, project]
|
|
121
|
+
).map { |r| r["id"] }
|
|
122
|
+
|
|
123
|
+
old_ids.each do |id|
|
|
124
|
+
db.execute("DELETE FROM #{vec_table} WHERE chunk_id = ?", [id])
|
|
125
|
+
db.execute("DELETE FROM #{table} WHERE id = ?", [id])
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
chunks.each_with_index do |chunk, idx|
|
|
129
|
+
embedding = Embedding.generate(chunk)
|
|
130
|
+
next if embedding.empty?
|
|
131
|
+
|
|
132
|
+
if language
|
|
133
|
+
db.execute(
|
|
134
|
+
"INSERT INTO #{table} (path, content, language, project, chunk_index) VALUES (?, ?, ?, ?, ?)",
|
|
135
|
+
[file_path, chunk, language, project, idx]
|
|
136
|
+
)
|
|
137
|
+
else
|
|
138
|
+
db.execute(
|
|
139
|
+
"INSERT INTO #{table} (path, content, project, chunk_index) VALUES (?, ?, ?, ?)",
|
|
140
|
+
[file_path, chunk, project, idx]
|
|
141
|
+
)
|
|
142
|
+
end
|
|
143
|
+
|
|
144
|
+
chunk_id = db.last_insert_row_id
|
|
145
|
+
db.execute(
|
|
146
|
+
"INSERT INTO #{vec_table} (chunk_id, embedding) VALUES (?, ?)",
|
|
147
|
+
[chunk_id, embedding.to_json]
|
|
148
|
+
)
|
|
149
|
+
count += 1
|
|
150
|
+
end
|
|
151
|
+
|
|
152
|
+
count
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
def search_table(db, table, vec_table, embedding, project:, limit:, type:)
|
|
156
|
+
if project
|
|
157
|
+
rows = db.execute(<<~SQL, [embedding.to_json, limit, project])
|
|
158
|
+
SELECT c.id, c.path, c.content, c.project, v.distance
|
|
159
|
+
FROM #{vec_table} v
|
|
160
|
+
INNER JOIN #{table} c ON c.id = v.chunk_id
|
|
161
|
+
WHERE v.embedding MATCH ? AND k = ?
|
|
162
|
+
AND c.project = ?
|
|
163
|
+
ORDER BY v.distance
|
|
164
|
+
SQL
|
|
165
|
+
else
|
|
166
|
+
rows = db.execute(<<~SQL, [embedding.to_json, limit])
|
|
167
|
+
SELECT c.id, c.path, c.content, c.project, v.distance
|
|
168
|
+
FROM #{vec_table} v
|
|
169
|
+
INNER JOIN #{table} c ON c.id = v.chunk_id
|
|
170
|
+
WHERE v.embedding MATCH ? AND k = ?
|
|
171
|
+
ORDER BY v.distance
|
|
172
|
+
SQL
|
|
173
|
+
end
|
|
174
|
+
|
|
175
|
+
rows.map do |r|
|
|
176
|
+
{
|
|
177
|
+
type: type,
|
|
178
|
+
id: r["id"],
|
|
179
|
+
path: r["path"],
|
|
180
|
+
content: r["content"]&.slice(0, 500),
|
|
181
|
+
project: r["project"],
|
|
182
|
+
distance: r["distance"]
|
|
183
|
+
}
|
|
184
|
+
end
|
|
185
|
+
end
|
|
186
|
+
|
|
187
|
+
def count_by_project(db, table, project)
|
|
188
|
+
if project
|
|
189
|
+
db.execute(
|
|
190
|
+
"SELECT project, COUNT(*) AS count FROM #{table} WHERE project = ? GROUP BY project",
|
|
191
|
+
[project]
|
|
192
|
+
)
|
|
193
|
+
else
|
|
194
|
+
db.execute("SELECT project, COUNT(*) AS count FROM #{table} GROUP BY project")
|
|
195
|
+
end.map { |r| {project: r["project"], count: r["count"]} }
|
|
196
|
+
end
|
|
197
|
+
|
|
198
|
+
def clear_table(db, table, vec_table, project)
|
|
199
|
+
if project
|
|
200
|
+
ids = db.execute("SELECT id FROM #{table} WHERE project = ?", [project]).map { |r| r["id"] }
|
|
201
|
+
ids.each do |id|
|
|
202
|
+
db.execute("DELETE FROM #{vec_table} WHERE chunk_id = ?", [id])
|
|
203
|
+
end
|
|
204
|
+
db.execute("DELETE FROM #{table} WHERE project = ?", [project])
|
|
205
|
+
else
|
|
206
|
+
db.execute("DELETE FROM #{vec_table}")
|
|
207
|
+
db.execute("DELETE FROM #{table}")
|
|
208
|
+
end
|
|
209
|
+
end
|
|
210
|
+
end
|
|
211
|
+
end
|
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "open3"
|
|
4
|
+
require "fileutils"
|
|
5
|
+
require "pastel"
|
|
6
|
+
require "tty-spinner"
|
|
7
|
+
require_relative "db"
|
|
8
|
+
|
|
9
|
+
module Personality
|
|
10
|
+
class Init
|
|
11
|
+
DB_PATH = DB::DB_PATH
|
|
12
|
+
|
|
13
|
+
attr_reader :pastel, :auto_yes
|
|
14
|
+
|
|
15
|
+
def initialize(auto_yes: false)
|
|
16
|
+
@pastel = Pastel.new
|
|
17
|
+
@auto_yes = auto_yes
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def run
|
|
21
|
+
puts pastel.bold("Personality Init")
|
|
22
|
+
puts pastel.dim("=" * 40)
|
|
23
|
+
puts
|
|
24
|
+
|
|
25
|
+
results = []
|
|
26
|
+
results << setup_database
|
|
27
|
+
results << check_ollama
|
|
28
|
+
results << check_nomic_embed
|
|
29
|
+
results << check_uv
|
|
30
|
+
results << check_piper
|
|
31
|
+
|
|
32
|
+
puts
|
|
33
|
+
puts pastel.bold("Summary")
|
|
34
|
+
puts pastel.dim("-" * 40)
|
|
35
|
+
results.each { |label, status| print_result(label, status) }
|
|
36
|
+
puts
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
private
|
|
40
|
+
|
|
41
|
+
# Step 1: Create sqlite-vec database
|
|
42
|
+
def setup_database
|
|
43
|
+
label = "sqlite-vec database"
|
|
44
|
+
puts pastel.bold("\n1. #{label}")
|
|
45
|
+
|
|
46
|
+
if File.exist?(DB_PATH)
|
|
47
|
+
puts " #{pastel.green("exists")} #{DB_PATH}"
|
|
48
|
+
return [label, :exists]
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
unless confirm?("Create database at #{DB_PATH}?")
|
|
52
|
+
return [label, :skipped]
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
spinner = spin("Creating database")
|
|
56
|
+
begin
|
|
57
|
+
DB.migrate!
|
|
58
|
+
spinner.success(pastel.green("done"))
|
|
59
|
+
[label, :installed]
|
|
60
|
+
rescue => e
|
|
61
|
+
spinner.error(pastel.red("failed"))
|
|
62
|
+
puts " #{pastel.red(e.message)}"
|
|
63
|
+
[label, :failed]
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
# Step 2: Check for Ollama
|
|
68
|
+
def check_ollama
|
|
69
|
+
label = "ollama"
|
|
70
|
+
puts pastel.bold("\n2. #{label}")
|
|
71
|
+
|
|
72
|
+
version = command_version("ollama", "--version")
|
|
73
|
+
if version
|
|
74
|
+
puts " #{pastel.green("found")} #{version}"
|
|
75
|
+
@ollama_was_present = true
|
|
76
|
+
ensure_ollama_running
|
|
77
|
+
return [label, :exists]
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
@ollama_was_present = false
|
|
81
|
+
install_cmd = brew_available? ? "brew install ollama" : "curl -fsSL https://ollama.com/install.sh | sh"
|
|
82
|
+
|
|
83
|
+
unless confirm?("Ollama not found. Install via `#{install_cmd}`?")
|
|
84
|
+
return [label, :skipped]
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
spinner = spin("Installing ollama")
|
|
88
|
+
if run_command(install_cmd)
|
|
89
|
+
spinner.success(pastel.green("done"))
|
|
90
|
+
ensure_ollama_running
|
|
91
|
+
[label, :installed]
|
|
92
|
+
else
|
|
93
|
+
spinner.error(pastel.red("failed"))
|
|
94
|
+
[label, :failed]
|
|
95
|
+
end
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
# Step 3: Install nomic-embed-text model
|
|
99
|
+
def check_nomic_embed
|
|
100
|
+
label = "nomic-embed-text"
|
|
101
|
+
puts pastel.bold("\n3. #{label}")
|
|
102
|
+
|
|
103
|
+
unless command_exists?("ollama")
|
|
104
|
+
puts " #{pastel.yellow("skipped")} ollama not available"
|
|
105
|
+
return [label, :skipped]
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
if model_installed?("nomic-embed-text")
|
|
109
|
+
puts " #{pastel.green("found")} nomic-embed-text"
|
|
110
|
+
return [label, :exists]
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
# Auto-pull if ollama was just installed, otherwise prompt
|
|
114
|
+
unless @ollama_was_present == false || confirm?("Pull nomic-embed-text model?")
|
|
115
|
+
return [label, :skipped]
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
spinner = spin("Pulling nomic-embed-text")
|
|
119
|
+
if run_command("ollama pull nomic-embed-text")
|
|
120
|
+
spinner.success(pastel.green("done"))
|
|
121
|
+
[label, :installed]
|
|
122
|
+
else
|
|
123
|
+
spinner.error(pastel.red("failed"))
|
|
124
|
+
[label, :failed]
|
|
125
|
+
end
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
# Step 4a: Check for uv
|
|
129
|
+
def check_uv
|
|
130
|
+
label = "uv"
|
|
131
|
+
puts pastel.bold("\n4a. #{label}")
|
|
132
|
+
|
|
133
|
+
version = command_version("uv", "--version")
|
|
134
|
+
if version
|
|
135
|
+
puts " #{pastel.green("found")} #{version}"
|
|
136
|
+
return [label, :exists]
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
install_cmd = brew_available? ? "brew install uv" : "curl -LsSf https://astral.sh/uv/install.sh | sh"
|
|
140
|
+
|
|
141
|
+
unless confirm?("uv not found. Install via `#{install_cmd}`?")
|
|
142
|
+
return [label, :skipped]
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
spinner = spin("Installing uv")
|
|
146
|
+
if run_command(install_cmd)
|
|
147
|
+
spinner.success(pastel.green("done"))
|
|
148
|
+
[label, :installed]
|
|
149
|
+
else
|
|
150
|
+
spinner.error(pastel.red("failed"))
|
|
151
|
+
[label, :failed]
|
|
152
|
+
end
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
# Step 4: Install piper-tts
|
|
156
|
+
def check_piper
|
|
157
|
+
label = "piper-tts"
|
|
158
|
+
puts pastel.bold("\n4b. #{label}")
|
|
159
|
+
|
|
160
|
+
version = command_version("piper", "--help")
|
|
161
|
+
if version
|
|
162
|
+
puts " #{pastel.green("found")} piper"
|
|
163
|
+
return [label, :exists]
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
uv_bin = find_executable("uv")
|
|
167
|
+
unless uv_bin
|
|
168
|
+
puts " #{pastel.yellow("skipped")} uv not available"
|
|
169
|
+
return [label, :skipped]
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
unless confirm?("piper-tts not found. Install via `uv tool install`?")
|
|
173
|
+
return [label, :skipped]
|
|
174
|
+
end
|
|
175
|
+
|
|
176
|
+
spinner = spin("Installing piper-tts")
|
|
177
|
+
if run_command("#{uv_bin} tool install piper-tts --with pathvalidate")
|
|
178
|
+
spinner.success(pastel.green("done"))
|
|
179
|
+
[label, :installed]
|
|
180
|
+
else
|
|
181
|
+
spinner.error(pastel.red("failed"))
|
|
182
|
+
[label, :failed]
|
|
183
|
+
end
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
# Helpers
|
|
187
|
+
|
|
188
|
+
def confirm?(message)
|
|
189
|
+
return true if auto_yes
|
|
190
|
+
|
|
191
|
+
print " #{message} #{pastel.dim("[Y/n]")} "
|
|
192
|
+
response = $stdin.gets&.strip&.downcase
|
|
193
|
+
response.empty? || response == "y" || response == "yes"
|
|
194
|
+
end
|
|
195
|
+
|
|
196
|
+
def spin(message)
|
|
197
|
+
TTY::Spinner.new(" :spinner #{message}...", format: :dots)
|
|
198
|
+
.tap(&:auto_spin)
|
|
199
|
+
end
|
|
200
|
+
|
|
201
|
+
def command_exists?(cmd)
|
|
202
|
+
_, status = Open3.capture2e("which", cmd)
|
|
203
|
+
status.success?
|
|
204
|
+
end
|
|
205
|
+
|
|
206
|
+
def command_version(cmd, flag)
|
|
207
|
+
stdout, status = Open3.capture2e(cmd, flag)
|
|
208
|
+
status.success? ? stdout.strip.lines.first&.strip : nil
|
|
209
|
+
rescue Errno::ENOENT
|
|
210
|
+
nil
|
|
211
|
+
end
|
|
212
|
+
|
|
213
|
+
def find_executable(cmd)
|
|
214
|
+
path, status = Open3.capture2e("which", cmd)
|
|
215
|
+
status.success? ? path.strip : nil
|
|
216
|
+
end
|
|
217
|
+
|
|
218
|
+
def run_command(cmd)
|
|
219
|
+
_, status = Open3.capture2e(cmd)
|
|
220
|
+
status.success?
|
|
221
|
+
end
|
|
222
|
+
|
|
223
|
+
def brew_available?
|
|
224
|
+
command_exists?("brew")
|
|
225
|
+
end
|
|
226
|
+
|
|
227
|
+
def model_installed?(name)
|
|
228
|
+
stdout, status = Open3.capture2e("ollama", "list")
|
|
229
|
+
return false unless status.success?
|
|
230
|
+
|
|
231
|
+
stdout.lines.any? { |line| line.include?(name) }
|
|
232
|
+
rescue Errno::ENOENT
|
|
233
|
+
false
|
|
234
|
+
end
|
|
235
|
+
|
|
236
|
+
def ensure_ollama_running
|
|
237
|
+
stdout, status = Open3.capture2e("ollama", "list")
|
|
238
|
+
return if status.success?
|
|
239
|
+
|
|
240
|
+
puts " #{pastel.yellow("starting")} ollama serve"
|
|
241
|
+
spawn("ollama serve", [:out, :err] => "/dev/null")
|
|
242
|
+
sleep 2
|
|
243
|
+
rescue Errno::ENOENT
|
|
244
|
+
nil
|
|
245
|
+
end
|
|
246
|
+
|
|
247
|
+
def print_result(label, status)
|
|
248
|
+
icon = case status
|
|
249
|
+
when :exists then pastel.green("[OK]")
|
|
250
|
+
when :installed then pastel.cyan("[INSTALLED]")
|
|
251
|
+
when :skipped then pastel.yellow("[SKIPPED]")
|
|
252
|
+
when :failed then pastel.red("[FAILED]")
|
|
253
|
+
end
|
|
254
|
+
puts " #{icon} #{label}"
|
|
255
|
+
end
|
|
256
|
+
end
|
|
257
|
+
end
|