personality 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CLAUDE.md +88 -0
- data/PLAN.md +621 -0
- data/README.md +35 -0
- data/Rakefile +10 -0
- data/TODO.md +65 -0
- data/docs/mcp-ruby-sdk.md +193 -0
- data/exe/psn +6 -0
- data/exe/psn-mcp +7 -0
- data/lib/personality/cart.rb +75 -0
- data/lib/personality/chunker.rb +27 -0
- data/lib/personality/cli/cart.rb +61 -0
- data/lib/personality/cli/context.rb +67 -0
- data/lib/personality/cli/hooks.rb +120 -0
- data/lib/personality/cli/index.rb +147 -0
- data/lib/personality/cli/memory.rb +130 -0
- data/lib/personality/cli/tts.rb +140 -0
- data/lib/personality/cli.rb +54 -0
- data/lib/personality/context.rb +73 -0
- data/lib/personality/db.rb +148 -0
- data/lib/personality/embedding.rb +44 -0
- data/lib/personality/hooks.rb +143 -0
- data/lib/personality/indexer.rb +211 -0
- data/lib/personality/init.rb +257 -0
- data/lib/personality/mcp/server.rb +314 -0
- data/lib/personality/memory.rb +125 -0
- data/lib/personality/tts.rb +191 -0
- data/lib/personality/version.rb +5 -0
- data/lib/personality.rb +17 -0
- metadata +269 -0
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "thor"
|
|
4
|
+
|
|
5
|
+
module Personality
|
|
6
|
+
class CLI < Thor
|
|
7
|
+
class Index < Thor
|
|
8
|
+
desc "code PATH", "Index code files in a directory"
|
|
9
|
+
option :project, type: :string, desc: "Project name"
|
|
10
|
+
def code(path)
|
|
11
|
+
require_relative "../indexer"
|
|
12
|
+
require_relative "../db"
|
|
13
|
+
require "pastel"
|
|
14
|
+
require "tty-spinner"
|
|
15
|
+
|
|
16
|
+
DB.migrate!
|
|
17
|
+
pastel = Pastel.new
|
|
18
|
+
spinner = TTY::Spinner.new(" :spinner Indexing code...", format: :dots)
|
|
19
|
+
spinner.auto_spin
|
|
20
|
+
|
|
21
|
+
result = Personality::Indexer.new.index_code(path: path, project: options[:project])
|
|
22
|
+
|
|
23
|
+
spinner.success(pastel.green("done"))
|
|
24
|
+
puts " #{pastel.bold(result[:project])}: #{result[:indexed]} chunks indexed"
|
|
25
|
+
if result[:errors].any?
|
|
26
|
+
puts pastel.yellow(" Errors (#{result[:errors].length}):")
|
|
27
|
+
result[:errors].each { |e| puts " #{e}" }
|
|
28
|
+
end
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
desc "docs PATH", "Index documentation files"
|
|
32
|
+
option :project, type: :string, desc: "Project name"
|
|
33
|
+
def docs(path)
|
|
34
|
+
require_relative "../indexer"
|
|
35
|
+
require_relative "../db"
|
|
36
|
+
require "pastel"
|
|
37
|
+
require "tty-spinner"
|
|
38
|
+
|
|
39
|
+
DB.migrate!
|
|
40
|
+
pastel = Pastel.new
|
|
41
|
+
spinner = TTY::Spinner.new(" :spinner Indexing docs...", format: :dots)
|
|
42
|
+
spinner.auto_spin
|
|
43
|
+
|
|
44
|
+
result = Personality::Indexer.new.index_docs(path: path, project: options[:project])
|
|
45
|
+
|
|
46
|
+
spinner.success(pastel.green("done"))
|
|
47
|
+
puts " #{pastel.bold(result[:project])}: #{result[:indexed]} chunks indexed"
|
|
48
|
+
if result[:errors].any?
|
|
49
|
+
puts pastel.yellow(" Errors (#{result[:errors].length}):")
|
|
50
|
+
result[:errors].each { |e| puts " #{e}" }
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
desc "search QUERY", "Semantic search across indexed code and docs"
|
|
55
|
+
option :type, type: :string, default: "all", desc: "Search type: code, docs, all"
|
|
56
|
+
option :project, type: :string, desc: "Filter by project"
|
|
57
|
+
option :limit, type: :numeric, default: 10, desc: "Max results"
|
|
58
|
+
def search(query)
|
|
59
|
+
require_relative "../indexer"
|
|
60
|
+
require_relative "../db"
|
|
61
|
+
require "pastel"
|
|
62
|
+
|
|
63
|
+
DB.migrate!
|
|
64
|
+
pastel = Pastel.new
|
|
65
|
+
type = options[:type].to_sym
|
|
66
|
+
|
|
67
|
+
result = Personality::Indexer.new.search(
|
|
68
|
+
query: query, type: type, project: options[:project], limit: options[:limit]
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
if result[:results].empty?
|
|
72
|
+
puts pastel.dim("No results found")
|
|
73
|
+
return
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
result[:results].each do |r|
|
|
77
|
+
puts "#{pastel.cyan(r[:type].to_s)} #{pastel.bold(r[:path])} #{pastel.dim("(dist: #{r[:distance]&.round(4)})")}"
|
|
78
|
+
puts " #{r[:content]&.slice(0, 150)}"
|
|
79
|
+
puts
|
|
80
|
+
end
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
desc "status", "Show indexing statistics"
|
|
84
|
+
option :project, type: :string, desc: "Filter by project"
|
|
85
|
+
def status
|
|
86
|
+
require_relative "../indexer"
|
|
87
|
+
require_relative "../db"
|
|
88
|
+
require "pastel"
|
|
89
|
+
require "tty-table"
|
|
90
|
+
|
|
91
|
+
DB.migrate!
|
|
92
|
+
pastel = Pastel.new
|
|
93
|
+
result = Personality::Indexer.new.status(project: options[:project])
|
|
94
|
+
|
|
95
|
+
all_stats = result[:code_index].map { |s| [s[:project], s[:count], "code"] } +
|
|
96
|
+
result[:doc_index].map { |s| [s[:project], s[:count], "docs"] }
|
|
97
|
+
|
|
98
|
+
if all_stats.empty?
|
|
99
|
+
puts pastel.dim("No indexed content")
|
|
100
|
+
return
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
table = TTY::Table.new(
|
|
104
|
+
header: %w[Project Chunks Type],
|
|
105
|
+
rows: all_stats
|
|
106
|
+
)
|
|
107
|
+
puts table.render(:unicode, padding: [0, 1])
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
desc "clear", "Clear indexed content"
|
|
111
|
+
option :project, type: :string, desc: "Project to clear (omit for all)"
|
|
112
|
+
option :type, type: :string, default: "all", desc: "What to clear: code, docs, all"
|
|
113
|
+
def clear
|
|
114
|
+
require_relative "../indexer"
|
|
115
|
+
require_relative "../db"
|
|
116
|
+
require "pastel"
|
|
117
|
+
|
|
118
|
+
DB.migrate!
|
|
119
|
+
result = Personality::Indexer.new.clear(project: options[:project], type: options[:type].to_sym)
|
|
120
|
+
puts Pastel.new.green("Cleared #{result[:cleared]} for #{result[:project]}")
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
desc "hook", "Re-index a file (PostToolUse hook, reads JSON from stdin)"
|
|
124
|
+
def hook
|
|
125
|
+
require_relative "../indexer"
|
|
126
|
+
require_relative "../hooks"
|
|
127
|
+
require_relative "../db"
|
|
128
|
+
|
|
129
|
+
DB.migrate!
|
|
130
|
+
data = Personality::Hooks.read_stdin_json
|
|
131
|
+
return unless data
|
|
132
|
+
|
|
133
|
+
file_path = data.dig("tool_input", "file_path")
|
|
134
|
+
return unless file_path
|
|
135
|
+
|
|
136
|
+
cwd = data["cwd"] || Dir.pwd
|
|
137
|
+
project = File.basename(cwd)
|
|
138
|
+
|
|
139
|
+
Personality::Indexer.new.index_single_file(file_path: file_path, project: project)
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
def self.exit_on_failure?
|
|
143
|
+
true
|
|
144
|
+
end
|
|
145
|
+
end
|
|
146
|
+
end
|
|
147
|
+
end
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "thor"
|
|
4
|
+
|
|
5
|
+
module Personality
|
|
6
|
+
class CLI < Thor
|
|
7
|
+
class Memory < Thor
|
|
8
|
+
desc "store SUBJECT CONTENT", "Store a memory"
|
|
9
|
+
def store(subject, content)
|
|
10
|
+
require_relative "../memory"
|
|
11
|
+
require_relative "../db"
|
|
12
|
+
require "pastel"
|
|
13
|
+
|
|
14
|
+
DB.migrate!
|
|
15
|
+
result = Personality::Memory.new.store(subject: subject, content: content)
|
|
16
|
+
puts "#{Pastel.new.green("Stored:")} #{result[:subject]} (id: #{result[:id]})"
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
desc "recall QUERY", "Recall memories by semantic similarity"
|
|
20
|
+
option :limit, type: :numeric, default: 5, desc: "Max results"
|
|
21
|
+
option :subject, type: :string, desc: "Filter by subject"
|
|
22
|
+
def recall(query)
|
|
23
|
+
require_relative "../memory"
|
|
24
|
+
require_relative "../db"
|
|
25
|
+
require "pastel"
|
|
26
|
+
|
|
27
|
+
DB.migrate!
|
|
28
|
+
result = Personality::Memory.new.recall(
|
|
29
|
+
query: query, limit: options[:limit], subject: options[:subject]
|
|
30
|
+
)
|
|
31
|
+
pastel = Pastel.new
|
|
32
|
+
|
|
33
|
+
if result[:memories].empty?
|
|
34
|
+
puts pastel.dim("No memories found")
|
|
35
|
+
return
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
result[:memories].each do |m|
|
|
39
|
+
puts "#{pastel.cyan("##{m[:id]}")} #{pastel.bold(m[:subject])} #{pastel.dim("(dist: #{m[:distance]&.round(4)})") if m[:distance]}"
|
|
40
|
+
puts " #{m[:content][0, 200]}"
|
|
41
|
+
puts
|
|
42
|
+
end
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
desc "search", "Search memories by subject"
|
|
46
|
+
option :subject, type: :string, desc: "Filter by subject"
|
|
47
|
+
option :limit, type: :numeric, default: 20, desc: "Max results"
|
|
48
|
+
def search
|
|
49
|
+
require_relative "../memory"
|
|
50
|
+
require_relative "../db"
|
|
51
|
+
require "pastel"
|
|
52
|
+
|
|
53
|
+
DB.migrate!
|
|
54
|
+
result = Personality::Memory.new.search(subject: options[:subject], limit: options[:limit])
|
|
55
|
+
pastel = Pastel.new
|
|
56
|
+
|
|
57
|
+
if result[:memories].empty?
|
|
58
|
+
puts pastel.dim("No memories found")
|
|
59
|
+
return
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
result[:memories].each do |m|
|
|
63
|
+
puts "#{pastel.cyan("##{m[:id]}")} #{pastel.bold(m[:subject])}"
|
|
64
|
+
puts " #{m[:content][0, 200]}"
|
|
65
|
+
puts
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
desc "forget ID", "Delete a memory"
|
|
70
|
+
def forget(id)
|
|
71
|
+
require_relative "../memory"
|
|
72
|
+
require_relative "../db"
|
|
73
|
+
require "pastel"
|
|
74
|
+
|
|
75
|
+
DB.migrate!
|
|
76
|
+
result = Personality::Memory.new.forget(id: id.to_i)
|
|
77
|
+
pastel = Pastel.new
|
|
78
|
+
if result[:deleted]
|
|
79
|
+
puts pastel.green("Deleted memory ##{id}")
|
|
80
|
+
else
|
|
81
|
+
puts pastel.yellow("Memory ##{id} not found")
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
desc "list", "List memory subjects"
|
|
86
|
+
def list
|
|
87
|
+
require_relative "../memory"
|
|
88
|
+
require_relative "../db"
|
|
89
|
+
require "pastel"
|
|
90
|
+
require "tty-table"
|
|
91
|
+
|
|
92
|
+
DB.migrate!
|
|
93
|
+
result = Personality::Memory.new.list
|
|
94
|
+
pastel = Pastel.new
|
|
95
|
+
|
|
96
|
+
if result[:subjects].empty?
|
|
97
|
+
puts pastel.dim("No memories stored")
|
|
98
|
+
return
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
table = TTY::Table.new(
|
|
102
|
+
header: %w[Subject Count],
|
|
103
|
+
rows: result[:subjects].map { |s| [s[:subject], s[:count]] }
|
|
104
|
+
)
|
|
105
|
+
puts table.render(:unicode, padding: [0, 1])
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
desc "save", "Save memories from Stop hook (reads JSON from stdin)"
|
|
109
|
+
def save
|
|
110
|
+
require_relative "../memory"
|
|
111
|
+
require_relative "../hooks"
|
|
112
|
+
require_relative "../db"
|
|
113
|
+
|
|
114
|
+
DB.migrate!
|
|
115
|
+
data = Personality::Hooks.read_stdin_json
|
|
116
|
+
return unless data
|
|
117
|
+
|
|
118
|
+
transcript_path = data["transcript_path"]
|
|
119
|
+
return unless transcript_path && File.exist?(transcript_path)
|
|
120
|
+
|
|
121
|
+
# Extract learnings from transcript — placeholder for future implementation
|
|
122
|
+
# For now, this is a no-op hook endpoint
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
def self.exit_on_failure?
|
|
126
|
+
true
|
|
127
|
+
end
|
|
128
|
+
end
|
|
129
|
+
end
|
|
130
|
+
end
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "thor"
|
|
4
|
+
|
|
5
|
+
module Personality
|
|
6
|
+
class CLI < Thor
|
|
7
|
+
class Tts < Thor
|
|
8
|
+
desc "speak TEXT", "Speak text aloud"
|
|
9
|
+
option :voice, type: :string, aliases: "-v", desc: "Voice model name"
|
|
10
|
+
def speak(text)
|
|
11
|
+
require_relative "../tts"
|
|
12
|
+
require "pastel"
|
|
13
|
+
|
|
14
|
+
result = Personality::TTS.speak_and_wait(text, voice: options[:voice])
|
|
15
|
+
if result[:error]
|
|
16
|
+
puts Pastel.new.red(result[:error])
|
|
17
|
+
exit 1
|
|
18
|
+
end
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
desc "stop", "Stop currently playing TTS"
|
|
22
|
+
def stop
|
|
23
|
+
require_relative "../tts"
|
|
24
|
+
require "pastel"
|
|
25
|
+
|
|
26
|
+
pastel = Pastel.new
|
|
27
|
+
if Personality::TTS.stop_current
|
|
28
|
+
puts pastel.green("TTS stopped")
|
|
29
|
+
else
|
|
30
|
+
puts pastel.dim("No TTS playing")
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
desc "mark-natural-stop", "Mark natural agent stop (Stop hook)"
|
|
35
|
+
def mark_natural_stop
|
|
36
|
+
require_relative "../tts"
|
|
37
|
+
Personality::TTS.mark_natural_stop
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
desc "interrupt-check", "Check and handle TTS interrupt (UserPromptSubmit hook)"
|
|
41
|
+
def interrupt_check
|
|
42
|
+
require_relative "../tts"
|
|
43
|
+
require "pastel"
|
|
44
|
+
|
|
45
|
+
pastel = Pastel.new
|
|
46
|
+
result = Personality::TTS.interrupt_check
|
|
47
|
+
case result[:action]
|
|
48
|
+
when :continue
|
|
49
|
+
puts pastel.dim("Natural stop — TTS continues")
|
|
50
|
+
when :stopped
|
|
51
|
+
if result[:was_playing]
|
|
52
|
+
puts pastel.green("User interrupt — TTS stopped")
|
|
53
|
+
else
|
|
54
|
+
puts pastel.dim("No TTS playing")
|
|
55
|
+
end
|
|
56
|
+
end
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
desc "voices", "List installed voice models"
|
|
60
|
+
def voices
|
|
61
|
+
require_relative "../tts"
|
|
62
|
+
require "pastel"
|
|
63
|
+
require "tty-table"
|
|
64
|
+
|
|
65
|
+
voices = Personality::TTS.list_voices
|
|
66
|
+
pastel = Pastel.new
|
|
67
|
+
|
|
68
|
+
if voices.empty?
|
|
69
|
+
puts pastel.dim("No voices installed")
|
|
70
|
+
puts "\nDownload a voice:"
|
|
71
|
+
puts " psn tts download en_US-lessac-medium"
|
|
72
|
+
return
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
table = TTY::Table.new(
|
|
76
|
+
header: %w[Name Size],
|
|
77
|
+
rows: voices.map { |v| [v[:name], "#{v[:size_mb]} MB"] }
|
|
78
|
+
)
|
|
79
|
+
puts table.render(:unicode, padding: [0, 1])
|
|
80
|
+
puts pastel.dim("\nVoices dir: #{Personality::TTS::VOICES_DIR}")
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
desc "download VOICE", "Download a piper voice from HuggingFace"
|
|
84
|
+
def download(voice_name)
|
|
85
|
+
require_relative "../tts"
|
|
86
|
+
require "pastel"
|
|
87
|
+
require "tty-spinner"
|
|
88
|
+
|
|
89
|
+
pastel = Pastel.new
|
|
90
|
+
spinner = TTY::Spinner.new(" :spinner Downloading #{voice_name}...", format: :dots)
|
|
91
|
+
spinner.auto_spin
|
|
92
|
+
|
|
93
|
+
result = Personality::TTS.download_voice(voice_name)
|
|
94
|
+
|
|
95
|
+
if result[:error]
|
|
96
|
+
spinner.error(pastel.red("failed"))
|
|
97
|
+
puts " #{pastel.red(result[:error])}"
|
|
98
|
+
exit 1
|
|
99
|
+
elsif result[:exists]
|
|
100
|
+
spinner.success(pastel.yellow("already installed"))
|
|
101
|
+
else
|
|
102
|
+
spinner.success(pastel.green("done (#{result[:size_mb]} MB)"))
|
|
103
|
+
end
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
desc "test", "Test a voice with sample text"
|
|
107
|
+
option :voice, type: :string, aliases: "-v", desc: "Voice to test"
|
|
108
|
+
def test
|
|
109
|
+
require_relative "../tts"
|
|
110
|
+
require "pastel"
|
|
111
|
+
|
|
112
|
+
voice = options[:voice]
|
|
113
|
+
result = Personality::TTS.speak_and_wait("Hello! This is a test of the text to speech system.", voice: voice)
|
|
114
|
+
if result[:error]
|
|
115
|
+
puts Pastel.new.red(result[:error])
|
|
116
|
+
exit 1
|
|
117
|
+
end
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
desc "current", "Show active voice"
|
|
121
|
+
def current
|
|
122
|
+
require_relative "../tts"
|
|
123
|
+
require "pastel"
|
|
124
|
+
|
|
125
|
+
pastel = Pastel.new
|
|
126
|
+
voice = Personality::TTS.active_voice
|
|
127
|
+
puts "#{pastel.bold("Voice:")} #{voice}"
|
|
128
|
+
if Personality::TTS.find_voice(voice)
|
|
129
|
+
puts "#{pastel.green("✓")} Installed"
|
|
130
|
+
else
|
|
131
|
+
puts "#{pastel.yellow("!")} Not installed — run: psn tts download #{voice}"
|
|
132
|
+
end
|
|
133
|
+
end
|
|
134
|
+
|
|
135
|
+
def self.exit_on_failure?
|
|
136
|
+
true
|
|
137
|
+
end
|
|
138
|
+
end
|
|
139
|
+
end
|
|
140
|
+
end
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "thor"
|
|
4
|
+
require_relative "cli/hooks"
|
|
5
|
+
require_relative "cli/context"
|
|
6
|
+
require_relative "cli/cart"
|
|
7
|
+
require_relative "cli/memory"
|
|
8
|
+
require_relative "cli/tts"
|
|
9
|
+
require_relative "cli/index"
|
|
10
|
+
|
|
11
|
+
module Personality
|
|
12
|
+
class CLI < Thor
|
|
13
|
+
desc "version", "Show version"
|
|
14
|
+
def version
|
|
15
|
+
puts "psn #{Personality::VERSION}"
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
desc "info", "Show personality info"
|
|
19
|
+
def info
|
|
20
|
+
puts "Personality - Infrastructure layer for Claude Code"
|
|
21
|
+
puts "Version: #{Personality::VERSION}"
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
desc "init", "Initialize personality environment"
|
|
25
|
+
option :yes, type: :boolean, default: false, aliases: "-y",
|
|
26
|
+
desc: "Skip confirmation prompts"
|
|
27
|
+
def init
|
|
28
|
+
require_relative "init"
|
|
29
|
+
Init.new(auto_yes: options[:yes]).run
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
desc "hooks SUBCOMMAND", "Claude Code hooks"
|
|
33
|
+
subcommand "hooks", Hooks
|
|
34
|
+
|
|
35
|
+
desc "context SUBCOMMAND", "Session context tracking"
|
|
36
|
+
subcommand "context", Context
|
|
37
|
+
|
|
38
|
+
desc "cart SUBCOMMAND", "Persona management"
|
|
39
|
+
subcommand "cart", Cart
|
|
40
|
+
|
|
41
|
+
desc "memory SUBCOMMAND", "Persistent memory"
|
|
42
|
+
subcommand "memory", Memory
|
|
43
|
+
|
|
44
|
+
desc "tts SUBCOMMAND", "Text-to-speech"
|
|
45
|
+
subcommand "tts", Tts
|
|
46
|
+
|
|
47
|
+
desc "index SUBCOMMAND", "Code and doc indexing"
|
|
48
|
+
subcommand "index", Index
|
|
49
|
+
|
|
50
|
+
def self.exit_on_failure?
|
|
51
|
+
true
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
end
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
require "fileutils"
|
|
5
|
+
|
|
6
|
+
module Personality
|
|
7
|
+
module Context
|
|
8
|
+
TRACKING_DIR = File.join(ENV.fetch("TMPDIR", "/tmp"), "psn-context")
|
|
9
|
+
|
|
10
|
+
class << self
|
|
11
|
+
def track_read(file_path, session_id: nil)
|
|
12
|
+
return if file_path.nil? || file_path.empty?
|
|
13
|
+
|
|
14
|
+
sid = session_id || current_session_id
|
|
15
|
+
ctx = load(sid)
|
|
16
|
+
return if ctx[:files].include?(file_path)
|
|
17
|
+
|
|
18
|
+
ctx[:files] << file_path
|
|
19
|
+
|
|
20
|
+
# Also track resolved path for robustness
|
|
21
|
+
resolved = File.expand_path(file_path)
|
|
22
|
+
ctx[:files] << resolved if resolved != file_path && !ctx[:files].include?(resolved)
|
|
23
|
+
|
|
24
|
+
save(sid, ctx)
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def check(file_path, session_id: nil)
|
|
28
|
+
sid = session_id || current_session_id
|
|
29
|
+
ctx = load(sid)
|
|
30
|
+
abs_path = File.expand_path(file_path)
|
|
31
|
+
ctx[:files].include?(file_path) || ctx[:files].include?(abs_path)
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def list(session_id: nil)
|
|
35
|
+
sid = session_id || current_session_id
|
|
36
|
+
ctx = load(sid)
|
|
37
|
+
ctx[:files]
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def clear(session_id: nil)
|
|
41
|
+
sid = session_id || current_session_id
|
|
42
|
+
path = tracking_file(sid)
|
|
43
|
+
File.delete(path) if File.exist?(path)
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def load(session_id)
|
|
47
|
+
path = tracking_file(session_id)
|
|
48
|
+
return {files: []} unless File.exist?(path)
|
|
49
|
+
|
|
50
|
+
data = JSON.parse(File.read(path))
|
|
51
|
+
{files: data.fetch("files", [])}
|
|
52
|
+
rescue JSON::ParserError
|
|
53
|
+
{files: []}
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
def current_session_id
|
|
57
|
+
ENV.fetch("CLAUDE_SESSION_ID", "default")
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
private
|
|
61
|
+
|
|
62
|
+
def tracking_file(session_id)
|
|
63
|
+
FileUtils.mkdir_p(TRACKING_DIR)
|
|
64
|
+
File.join(TRACKING_DIR, "#{session_id}.json")
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
def save(session_id, context)
|
|
68
|
+
path = tracking_file(session_id)
|
|
69
|
+
File.write(path, JSON.generate({files: context[:files]}))
|
|
70
|
+
end
|
|
71
|
+
end
|
|
72
|
+
end
|
|
73
|
+
end
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "fileutils"
|
|
4
|
+
require "sqlite3"
|
|
5
|
+
require "sqlite_vec"
|
|
6
|
+
|
|
7
|
+
module Personality
|
|
8
|
+
module DB
|
|
9
|
+
DB_PATH = File.join(Dir.home, ".local", "share", "personality", "main.db")
|
|
10
|
+
SCHEMA_VERSION = 2
|
|
11
|
+
|
|
12
|
+
class << self
|
|
13
|
+
def connection(path: nil)
|
|
14
|
+
@connections ||= {}
|
|
15
|
+
db_path = path || DB_PATH
|
|
16
|
+
@connections[db_path] ||= open_connection(db_path)
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def reset!
|
|
20
|
+
@connections&.each_value(&:close)
|
|
21
|
+
@connections = {}
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def migrate!(path: nil)
|
|
25
|
+
db = connection(path: path)
|
|
26
|
+
current = current_version(db)
|
|
27
|
+
return if current >= SCHEMA_VERSION
|
|
28
|
+
|
|
29
|
+
apply_migrations(db, from: current)
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
def current_version(db = nil)
|
|
33
|
+
db ||= connection
|
|
34
|
+
row = db.execute("SELECT MAX(version) AS ver FROM schema_version").first
|
|
35
|
+
row&.fetch("ver", 0) || 0
|
|
36
|
+
rescue SQLite3::SQLException
|
|
37
|
+
0
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def transaction(path: nil, &block)
|
|
41
|
+
connection(path: path).transaction(&block)
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
private
|
|
45
|
+
|
|
46
|
+
def open_connection(db_path)
|
|
47
|
+
FileUtils.mkdir_p(File.dirname(db_path))
|
|
48
|
+
db = SQLite3::Database.new(db_path)
|
|
49
|
+
db.results_as_hash = true
|
|
50
|
+
db.execute("PRAGMA journal_mode=WAL")
|
|
51
|
+
db.execute("PRAGMA foreign_keys=ON")
|
|
52
|
+
|
|
53
|
+
db.enable_load_extension(true)
|
|
54
|
+
SqliteVec.load(db)
|
|
55
|
+
db.enable_load_extension(false)
|
|
56
|
+
|
|
57
|
+
db
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
def apply_migrations(db, from:)
|
|
61
|
+
migrations.each do |version, sql|
|
|
62
|
+
next if version <= from
|
|
63
|
+
|
|
64
|
+
db.transaction do
|
|
65
|
+
sql.each { |stmt| db.execute(stmt) }
|
|
66
|
+
db.execute("INSERT OR REPLACE INTO schema_version (version) VALUES (?)", [version])
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
def migrations
|
|
72
|
+
{
|
|
73
|
+
1 => [
|
|
74
|
+
"CREATE TABLE IF NOT EXISTS schema_version (
|
|
75
|
+
version INTEGER PRIMARY KEY,
|
|
76
|
+
applied_at TEXT DEFAULT (datetime('now'))
|
|
77
|
+
)"
|
|
78
|
+
],
|
|
79
|
+
2 => [
|
|
80
|
+
# Carts
|
|
81
|
+
"CREATE TABLE IF NOT EXISTS carts (
|
|
82
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
83
|
+
tag TEXT UNIQUE NOT NULL,
|
|
84
|
+
version TEXT,
|
|
85
|
+
name TEXT,
|
|
86
|
+
type TEXT,
|
|
87
|
+
tagline TEXT,
|
|
88
|
+
source TEXT,
|
|
89
|
+
created_at TEXT DEFAULT (datetime('now')),
|
|
90
|
+
updated_at TEXT DEFAULT (datetime('now'))
|
|
91
|
+
)",
|
|
92
|
+
|
|
93
|
+
# Memories
|
|
94
|
+
"CREATE TABLE IF NOT EXISTS memories (
|
|
95
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
96
|
+
cart_id INTEGER NOT NULL REFERENCES carts(id) ON DELETE CASCADE,
|
|
97
|
+
subject TEXT NOT NULL,
|
|
98
|
+
content TEXT NOT NULL,
|
|
99
|
+
metadata TEXT DEFAULT '{}',
|
|
100
|
+
created_at TEXT DEFAULT (datetime('now')),
|
|
101
|
+
updated_at TEXT DEFAULT (datetime('now'))
|
|
102
|
+
)",
|
|
103
|
+
"CREATE INDEX IF NOT EXISTS idx_memories_cart_id ON memories(cart_id)",
|
|
104
|
+
"CREATE INDEX IF NOT EXISTS idx_memories_subject ON memories(subject)",
|
|
105
|
+
"CREATE VIRTUAL TABLE IF NOT EXISTS vec_memories USING vec0(
|
|
106
|
+
memory_id INTEGER PRIMARY KEY,
|
|
107
|
+
embedding float[768]
|
|
108
|
+
)",
|
|
109
|
+
|
|
110
|
+
# Code index
|
|
111
|
+
"CREATE TABLE IF NOT EXISTS code_chunks (
|
|
112
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
113
|
+
path TEXT NOT NULL,
|
|
114
|
+
content TEXT NOT NULL,
|
|
115
|
+
language TEXT,
|
|
116
|
+
project TEXT,
|
|
117
|
+
chunk_index INTEGER DEFAULT 0,
|
|
118
|
+
indexed_at TEXT DEFAULT (datetime('now'))
|
|
119
|
+
)",
|
|
120
|
+
"CREATE INDEX IF NOT EXISTS idx_code_chunks_project ON code_chunks(project)",
|
|
121
|
+
"CREATE VIRTUAL TABLE IF NOT EXISTS vec_code USING vec0(
|
|
122
|
+
chunk_id INTEGER PRIMARY KEY,
|
|
123
|
+
embedding float[768]
|
|
124
|
+
)",
|
|
125
|
+
|
|
126
|
+
# Doc index
|
|
127
|
+
"CREATE TABLE IF NOT EXISTS doc_chunks (
|
|
128
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
129
|
+
path TEXT NOT NULL,
|
|
130
|
+
content TEXT NOT NULL,
|
|
131
|
+
project TEXT,
|
|
132
|
+
chunk_index INTEGER DEFAULT 0,
|
|
133
|
+
indexed_at TEXT DEFAULT (datetime('now'))
|
|
134
|
+
)",
|
|
135
|
+
"CREATE INDEX IF NOT EXISTS idx_doc_chunks_project ON doc_chunks(project)",
|
|
136
|
+
"CREATE VIRTUAL TABLE IF NOT EXISTS vec_docs USING vec0(
|
|
137
|
+
chunk_id INTEGER PRIMARY KEY,
|
|
138
|
+
embedding float[768]
|
|
139
|
+
)",
|
|
140
|
+
|
|
141
|
+
# Drop legacy table if migrating from v1
|
|
142
|
+
"DROP TABLE IF EXISTS embeddings"
|
|
143
|
+
]
|
|
144
|
+
}
|
|
145
|
+
end
|
|
146
|
+
end
|
|
147
|
+
end
|
|
148
|
+
end
|