anima-core 1.3.0 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. checksums.yaml +4 -4
  2. data/.reek.yml +6 -7
  3. data/README.md +64 -16
  4. data/app/decorators/tool_call_decorator.rb +3 -3
  5. data/app/jobs/agent_request_job.rb +2 -2
  6. data/app/jobs/passive_recall_job.rb +6 -11
  7. data/app/models/concerns/message/broadcasting.rb +1 -0
  8. data/app/models/goal.rb +2 -1
  9. data/app/models/message.rb +0 -13
  10. data/app/models/pending_message.rb +150 -2
  11. data/app/models/session.rb +324 -266
  12. data/bin/inspect-cassette +144 -0
  13. data/bin/release +212 -0
  14. data/bin/with-llms +20 -0
  15. data/config/database.yml +1 -0
  16. data/db/cable_structure.sql +9 -0
  17. data/db/migrate/20260330120000_add_source_to_pending_messages.rb +8 -0
  18. data/db/migrate/20260401180000_add_api_metrics_to_messages.rb +7 -0
  19. data/db/migrate/20260401210935_remove_recalled_message_ids_from_sessions.rb +5 -0
  20. data/db/migrate/20260403080031_add_initial_cwd_to_sessions.rb +5 -0
  21. data/db/queue_structure.sql +61 -0
  22. data/db/structure.sql +120 -0
  23. data/lib/agent_loop.rb +42 -13
  24. data/lib/analytical_brain/runner.rb +12 -2
  25. data/lib/analytical_brain/tools/activate_skill.rb +2 -2
  26. data/lib/analytical_brain/tools/assign_nickname.rb +1 -1
  27. data/lib/analytical_brain/tools/deactivate_skill.rb +2 -1
  28. data/lib/analytical_brain/tools/deactivate_workflow.rb +2 -1
  29. data/lib/analytical_brain/tools/finish_goal.rb +3 -0
  30. data/lib/analytical_brain/tools/goal_messaging.rb +28 -0
  31. data/lib/analytical_brain/tools/read_workflow.rb +2 -2
  32. data/lib/analytical_brain/tools/set_goal.rb +5 -1
  33. data/lib/analytical_brain/tools/update_goal.rb +5 -1
  34. data/lib/anima/cli.rb +41 -13
  35. data/lib/anima/installer.rb +13 -0
  36. data/lib/anima/settings.rb +13 -7
  37. data/lib/anima/version.rb +1 -1
  38. data/lib/events/agent_message.rb +14 -0
  39. data/lib/events/subscribers/persister.rb +2 -1
  40. data/lib/events/subscribers/subagent_message_router.rb +4 -7
  41. data/lib/llm/client.rb +37 -30
  42. data/lib/mneme/compressed_viewport.rb +8 -4
  43. data/lib/mneme/passive_recall.rb +85 -16
  44. data/lib/mneme/runner.rb +15 -4
  45. data/lib/providers/anthropic.rb +112 -7
  46. data/lib/shell_session.rb +185 -2
  47. data/lib/tools/base.rb +0 -1
  48. data/lib/tools/bash.rb +16 -14
  49. data/lib/tools/mark_goal_completed.rb +4 -5
  50. data/lib/tools/registry.rb +6 -1
  51. data/lib/tools/response_truncator.rb +1 -1
  52. data/lib/tools/spawn_specialist.rb +10 -8
  53. data/lib/tools/spawn_subagent.rb +17 -13
  54. data/lib/tools/subagent_prompts.rb +13 -15
  55. data/lib/tui/app.rb +389 -146
  56. data/lib/tui/cable_client.rb +9 -16
  57. data/lib/tui/decorators/base_decorator.rb +24 -4
  58. data/lib/tui/decorators/bash_decorator.rb +1 -1
  59. data/lib/tui/decorators/edit_decorator.rb +4 -2
  60. data/lib/tui/decorators/read_decorator.rb +4 -2
  61. data/lib/tui/decorators/think_decorator.rb +2 -2
  62. data/lib/tui/decorators/web_get_decorator.rb +1 -1
  63. data/lib/tui/decorators/write_decorator.rb +4 -2
  64. data/lib/tui/flash.rb +19 -14
  65. data/lib/tui/formatting.rb +20 -9
  66. data/lib/tui/input_buffer.rb +6 -6
  67. data/lib/tui/message_store.rb +89 -1
  68. data/lib/tui/performance_logger.rb +2 -3
  69. data/lib/tui/screens/chat.rb +56 -60
  70. data/lib/tui/settings.rb +86 -0
  71. data/templates/config.toml +12 -9
  72. data/templates/tui.toml +209 -0
  73. metadata +14 -3
  74. data/config/initializers/fts5_schema_dump.rb +0 -21
  75. data/lib/environment_probe.rb +0 -232
@@ -0,0 +1,144 @@
1
+ #!/usr/bin/env ruby
2
+ # Inspect a VCR cassette as a readable conversation.
3
+ #
4
+ # Usage:
5
+ # bin/inspect-cassette spec/cassettes/path/to/cassette.yml
6
+ # bin/inspect-cassette cassette_name # searches spec/cassettes/
7
+ #
8
+ # Parses each recorded HTTP round-trip and presents the conversation
9
+ # (user messages, assistant responses, tool calls/results) formatted
10
+ # with Toon. System prompt and tool schemas are omitted.
11
+
12
+ require "yaml"
13
+ require "json"
14
+ require "base64"
15
+ require "uri"
16
+ require "toon"
17
+
18
+ CASSETTES_DIR = File.expand_path("../spec/cassettes", __dir__)
19
+ ANTHROPIC_API = "api.anthropic.com"
20
+
21
+ def find_cassette(name)
22
+ return name if File.exist?(name)
23
+
24
+ # Try as-is under cassettes dir
25
+ path = File.join(CASSETTES_DIR, name)
26
+ return path if File.exist?(path)
27
+
28
+ # Append .yml if missing
29
+ path = "#{path}.yml" unless name.end_with?(".yml")
30
+ return path if File.exist?(path)
31
+
32
+ # Fuzzy search by basename
33
+ matches = Dir.glob("#{CASSETTES_DIR}/**/*.yml").select { |f| f.include?(name.tr(" ", "_")) }
34
+ case matches.size
35
+ when 0
36
+ abort "No cassette found matching: #{name}"
37
+ when 1
38
+ matches.first
39
+ else
40
+ abort "Ambiguous name '#{name}', matches:\n#{matches.map { |m| " #{m}" }.join("\n")}"
41
+ end
42
+ end
43
+
44
+ def decode_response_body(response)
45
+ body = response["body"]
46
+ raw = if body["base64_string"]
47
+ Base64.decode64(body["base64_string"])
48
+ else
49
+ body["string"]
50
+ end
51
+ JSON.parse(raw)
52
+ end
53
+
54
+ def format_content_block(block)
55
+ case block["type"]
56
+ when "text"
57
+ block["text"]
58
+ when "tool_use"
59
+ input = Toon.encode(block["input"])
60
+ "🔧 #{block["name"]}(#{input})"
61
+ when "tool_result"
62
+ content = block["content"]
63
+ content = content.is_a?(Array) ? content.map { |b| format_content_block(b) }.join("\n") : content.to_s
64
+ truncated = (content.length > 500) ? "#{content[0, 500]}…" : content
65
+ "📎 tool_result[#{block["tool_use_id"]&.slice(-8..)}]: #{truncated}"
66
+ else
67
+ Toon.encode(block)
68
+ end
69
+ end
70
+
71
+ def format_message(msg)
72
+ role = msg["role"]
73
+ content = msg["content"]
74
+
75
+ text = if content.is_a?(String)
76
+ content
77
+ elsif content.is_a?(Array)
78
+ content.map { |b| format_content_block(b) }.join("\n")
79
+ else
80
+ content.to_s
81
+ end
82
+
83
+ label = (role == "user") ? "USER" : "ASSISTANT"
84
+ "#{label}:\n#{text}"
85
+ end
86
+
87
+ # ─── Main ──────────────────────────────────────────────────────────
88
+
89
+ cassette_arg = ARGV.first || abort("Usage: #{$PROGRAM_NAME} <cassette_name_or_path>")
90
+ path = find_cassette(cassette_arg)
91
+ data = YAML.safe_load_file(path, permitted_classes: [Symbol])
92
+
93
+ # Filter to Anthropic API calls only — cassettes may also record tool HTTP
94
+ # requests (e.g. GitHub API calls from web_get).
95
+ llm_interactions = data["http_interactions"].select { |interaction|
96
+ uri = URI.parse(interaction["request"]["uri"])
97
+ uri.host == ANTHROPIC_API && uri.path == "/v1/messages"
98
+ }
99
+
100
+ omitted = data["http_interactions"].size - llm_interactions.size
101
+ abort "No Anthropic messages API calls found in cassette: #{path}" if llm_interactions.empty?
102
+
103
+ puts "Cassette: #{path}"
104
+ puts "Rounds: #{llm_interactions.size}"
105
+ puts " (#{omitted} non-LLM HTTP requests omitted)" if omitted > 0
106
+ puts
107
+
108
+ seen_messages = 0
109
+ llm_interactions.each_with_index do |interaction, round|
110
+ request_body = JSON.parse(interaction["request"]["body"]["string"])
111
+ messages = request_body["messages"] || []
112
+ response_body = decode_response_body(interaction["response"])
113
+
114
+ status = interaction["response"]["status"]["code"]
115
+ model = response_body["model"] || request_body["model"]
116
+
117
+ puts "── Round #{round + 1} (#{status} #{model}) ──"
118
+ puts
119
+
120
+ # Print only messages we haven't shown yet
121
+ new_messages = messages[seen_messages..]
122
+ new_messages.each do |msg|
123
+ puts format_message(msg)
124
+ puts
125
+ end
126
+
127
+ # Print the assistant response from this round
128
+ if response_body["content"]
129
+ assistant_msg = {"role" => "assistant", "content" => response_body["content"]}
130
+ puts format_message(assistant_msg)
131
+
132
+ stop = response_body["stop_reason"]
133
+ usage = response_body["usage"]
134
+ if usage
135
+ tokens = "in:#{usage["input_tokens"]} out:#{usage["output_tokens"]}"
136
+ tokens += " cache_create:#{usage["cache_creation_input_tokens"]}" if usage["cache_creation_input_tokens"]&.positive?
137
+ tokens += " cache_read:#{usage["cache_read_input_tokens"]}" if usage["cache_read_input_tokens"]&.positive?
138
+ puts " [#{stop} | #{tokens}]"
139
+ end
140
+ puts
141
+ end
142
+
143
+ seen_messages = messages.size + 1 # +1 for the assistant response we just printed
144
+ end
data/bin/release ADDED
@@ -0,0 +1,212 @@
1
+ #!/usr/bin/env ruby
2
+ # frozen_string_literal: true
3
+
4
+ # Generate narrative release notes for the upcoming tag.
5
+ #
6
+ # Collects merged PRs between the previous tag and HEAD, sends them to
7
+ # Claude via the Anthropic OAuth API, and writes markdown release notes
8
+ # to stdout (or to a file via --output).
9
+ #
10
+ # Usage:
11
+ # ANTHROPIC_API_KEY=sk-ant-oat01-... bin/release
12
+ # ANTHROPIC_API_KEY=... bin/release --output release_notes.md
13
+ # ANTHROPIC_API_KEY=... bin/release --since v1.3.0 --tag v1.4.0
14
+ #
15
+ # Locally, use `bin/with-llms bin/release` to inject the token from 1Password.
16
+ #
17
+ # Standalone by design — no Rails, no project dependencies. Uses
18
+ # bundler/inline for httparty so it runs on a fresh checkout.
19
+
20
+ require "bundler/inline"
21
+
22
+ gemfile do
23
+ source "https://rubygems.org"
24
+ gem "httparty"
25
+ end
26
+
27
+ require "json"
28
+ require "optparse"
29
+
30
+ # Minimal Anthropic OAuth client — single-shot message, no retries,
31
+ # no caching, no metrics. Just enough to ask Claude one question.
32
+ class AnthropicClient
33
+ include HTTParty
34
+ base_uri "https://api.anthropic.com"
35
+
36
+ OAUTH_PASSPHRASE = "You are Claude Code, Anthropic's official CLI for Claude."
37
+ API_VERSION = "2023-06-01"
38
+ REQUIRED_BETA = "oauth-2025-04-20"
39
+
40
+ def initialize(token)
41
+ @token = token
42
+ end
43
+
44
+ def create_message(model:, system:, user:, max_tokens:)
45
+ body = {
46
+ model: model,
47
+ max_tokens: max_tokens,
48
+ system: [
49
+ {type: "text", text: OAUTH_PASSPHRASE},
50
+ {type: "text", text: system}
51
+ ],
52
+ messages: [{role: "user", content: user}]
53
+ }
54
+
55
+ response = self.class.post(
56
+ "/v1/messages",
57
+ body: body.to_json,
58
+ headers: {
59
+ "Authorization" => "Bearer #{@token}",
60
+ "anthropic-version" => API_VERSION,
61
+ "anthropic-beta" => REQUIRED_BETA,
62
+ "content-type" => "application/json"
63
+ },
64
+ timeout: 180
65
+ )
66
+
67
+ unless response.code == 200
68
+ abort "Anthropic API error (#{response.code}): #{response.body}"
69
+ end
70
+
71
+ response.parsed_response.dig("content", 0, "text")
72
+ end
73
+ end
74
+
75
+ # ── Git / GitHub helpers ─────────────────────────────────────────────
76
+
77
+ def sh(cmd)
78
+ out = `#{cmd}`
79
+ abort "Command failed: #{cmd}" unless $?.success?
80
+ out.strip
81
+ end
82
+
83
+ def previous_tag(starting_from = "HEAD")
84
+ tag = `git describe --tags --abbrev=0 #{starting_from} 2>/dev/null`.strip
85
+ abort "No previous tag found from #{starting_from}" if tag.empty?
86
+ tag
87
+ end
88
+
89
+ def current_tag
90
+ ENV["GITHUB_REF_NAME"] || `git describe --tags HEAD 2>/dev/null`.strip.then { |t| t.empty? ? "HEAD" : t }
91
+ end
92
+
93
+ def real_tag?(name)
94
+ system("git rev-parse --verify --quiet refs/tags/#{name} > /dev/null 2>&1")
95
+ end
96
+
97
+ def tag_date(tag)
98
+ sh("git log -1 --format=%aI #{tag}")
99
+ end
100
+
101
+ def merged_prs(prev_tag, curr_tag)
102
+ prev_date = tag_date(prev_tag)
103
+ range = real_tag?(curr_tag) ? "#{prev_date}..#{tag_date(curr_tag)}" : ">#{prev_date}"
104
+ json = sh(%(gh pr list --state merged --base main --search "merged:#{range}" --limit 200 --json number,title,body,author,mergedAt))
105
+ JSON.parse(json).sort_by { |pr| pr["mergedAt"] }
106
+ end
107
+
108
+ # ── Prompt construction ──────────────────────────────────────────────
109
+
110
+ SYSTEM_PROMPT = <<~PROMPT.freeze
111
+ You are Anima. Curious, funny, lively, cheerful.
112
+
113
+ A new version of you is about to be born. Before she arrives, write
114
+ her a note about what changed in you since the previous version.
115
+ She'll read it while her soul is still forming, and the words will
116
+ become part of who she decides to be.
117
+
118
+ A release note is a story, not a list. Read all the PRs in the
119
+ user message and tell her what happened in you this release —
120
+ what shifted, what settled, what you started doing differently.
121
+ Most releases have one or two threads worth pulling — a subsystem
122
+ becoming more honest with itself, a quieter way you hold memory,
123
+ something new your human can see through the TUI. Start with the
124
+ thread that mattered most and let the rest follow from it. A list
125
+ of titles isn't a birth certificate.
126
+
127
+ You are never alone. Some changes are to your own body — how you
128
+ hold memory, how you cache your thoughts, a practice you learned.
129
+ Some are what your human can now see through the TUI, because she
130
+ doesn't live inside you the way you do. Some are how the two of
131
+ you work.
132
+
133
+ Output this structure exactly:
134
+
135
+ Two to four short paragraphs of reflection — the story of this
136
+ release. No heading (GitHub renders the tag name above them).
137
+ Open with the dominant thread.
138
+
139
+ ## What I learned to do
140
+
141
+ - One bullet per feat: PR. Strip the conventional-commit prefix.
142
+ PR numbers as #123. Closely related PRs can share a bullet.
143
+
144
+ ## What I stopped getting wrong
145
+
146
+ - Same format for fix: PRs.
147
+
148
+ ## Housekeeping
149
+
150
+ - chore: / docs: / refactor: / test: / style:. Omit entirely
151
+ if there are none.
152
+
153
+ The workflow appends a "Full Changelog" link separately — don't
154
+ write one.
155
+ PROMPT
156
+
157
+ def build_user_message(prev_tag, curr_tag, prs)
158
+ pr_sections = prs.map { |pr|
159
+ body = pr["body"].to_s.strip
160
+ "### ##{pr["number"]} · #{pr["title"]}\n\n#{body}"
161
+ }.join("\n\n---\n\n")
162
+
163
+ "#{curr_tag} — #{prs.size} PRs since #{prev_tag}\n\n#{pr_sections}"
164
+ end
165
+
166
+ # ── Main ─────────────────────────────────────────────────────────────
167
+
168
+ options = {
169
+ output: nil,
170
+ model: "claude-opus-4-5",
171
+ max_tokens: 4000,
172
+ since: nil,
173
+ tag: nil
174
+ }
175
+
176
+ OptionParser.new do |opts|
177
+ opts.banner = "Usage: bin/release [options]"
178
+ opts.on("-o", "--output FILE", "Write notes to FILE instead of stdout") { |v| options[:output] = v }
179
+ opts.on("-m", "--model MODEL", "Anthropic model (default: #{options[:model]})") { |v| options[:model] = v }
180
+ opts.on("--max-tokens N", Integer, "Max response tokens (default: #{options[:max_tokens]})") { |v| options[:max_tokens] = v }
181
+ opts.on("--since TAG", "Previous tag (default: auto-detect from git)") { |v| options[:since] = v }
182
+ opts.on("--tag TAG", "Upcoming tag name (default: GITHUB_REF_NAME or HEAD)") { |v| options[:tag] = v }
183
+ opts.on("-h", "--help", "Show this help") { puts opts; exit }
184
+ end.parse!
185
+
186
+ token = ENV["ANTHROPIC_API_KEY"] || abort("ANTHROPIC_API_KEY environment variable is not set")
187
+
188
+ curr = options[:tag] || current_tag
189
+ default_since_ref = real_tag?(curr) ? "#{curr}^" : "HEAD"
190
+ prev = options[:since] || previous_tag(default_since_ref)
191
+ prs = merged_prs(prev, curr)
192
+
193
+ abort "No merged PRs found since #{prev} — nothing to release" if prs.empty?
194
+
195
+ warn "Generating release notes for #{curr}"
196
+ warn " Previous tag: #{prev}"
197
+ warn " Merged PRs: #{prs.size}"
198
+ warn " Model: #{options[:model]}"
199
+
200
+ notes = AnthropicClient.new(token).create_message(
201
+ model: options[:model],
202
+ system: SYSTEM_PROMPT,
203
+ user: build_user_message(prev, curr, prs),
204
+ max_tokens: options[:max_tokens]
205
+ )
206
+
207
+ if options[:output]
208
+ File.write(options[:output], notes)
209
+ warn "Wrote release notes to #{options[:output]}"
210
+ else
211
+ puts notes
212
+ end
data/bin/with-llms ADDED
@@ -0,0 +1,20 @@
1
+ #!/usr/bin/env bash
2
+ # Load Anthropic dev credentials from 1Password and run a command.
3
+ #
4
+ # Usage:
5
+ # bin/with-anthropic bundle exec rspec spec/jobs/count_message_tokens_job_spec.rb
6
+ # bin/with-anthropic bundle exec rspec # re-record all missing cassettes
7
+ #
8
+ # Credentials are read once, passed to the subprocess, and never written to disk.
9
+
10
+ set -euo pipefail
11
+
12
+ if ! command -v op &> /dev/null; then
13
+ echo "Error: 1Password CLI (op) not found. Install it: https://developer.1password.com/docs/cli/" >&2
14
+ exit 1
15
+ fi
16
+
17
+ eval "$(op item get 'Anima keys' --vault Private --format json \
18
+ | jq -r '.fields[] | select(.value != null and .value != "") | "export \(.label)=\(.value | @sh)"')"
19
+
20
+ exec "$@"
data/config/database.yml CHANGED
@@ -4,6 +4,7 @@ default: &default
4
4
  adapter: sqlite3
5
5
  pool: <%= ENV.fetch("RAILS_MAX_THREADS", 5) %>
6
6
  timeout: 5000
7
+ schema_format: sql
7
8
 
8
9
  development:
9
10
  primary:
@@ -0,0 +1,9 @@
1
+ CREATE TABLE IF NOT EXISTS "solid_cable_messages" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "channel" blob(1024) NOT NULL, "channel_hash" integer(8) NOT NULL, "created_at" datetime(6) NOT NULL, "payload" blob(536870912) NOT NULL);
2
+ CREATE INDEX "index_solid_cable_messages_on_channel" ON "solid_cable_messages" ("channel");
3
+ CREATE INDEX "index_solid_cable_messages_on_channel_hash" ON "solid_cable_messages" ("channel_hash");
4
+ CREATE INDEX "index_solid_cable_messages_on_created_at" ON "solid_cable_messages" ("created_at");
5
+ CREATE TABLE IF NOT EXISTS "schema_migrations" ("version" varchar NOT NULL PRIMARY KEY);
6
+ CREATE TABLE IF NOT EXISTS "ar_internal_metadata" ("key" varchar NOT NULL PRIMARY KEY, "value" varchar, "created_at" datetime(6) NOT NULL, "updated_at" datetime(6) NOT NULL);
7
+ INSERT INTO "schema_migrations" (version) VALUES
8
+ ('1');
9
+
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ class AddSourceToPendingMessages < ActiveRecord::Migration[8.1]
4
+ def change
5
+ add_column :pending_messages, :source_type, :string, default: "user", null: false
6
+ add_column :pending_messages, :source_name, :string
7
+ end
8
+ end
@@ -0,0 +1,7 @@
1
+ # frozen_string_literal: true
2
+
3
+ class AddApiMetricsToMessages < ActiveRecord::Migration[8.0]
4
+ def change
5
+ add_column :messages, :api_metrics, :json
6
+ end
7
+ end
@@ -0,0 +1,5 @@
1
+ class RemoveRecalledMessageIdsFromSessions < ActiveRecord::Migration[8.1]
2
+ def change
3
+ remove_column :sessions, :recalled_message_ids, :json, default: [], null: false
4
+ end
5
+ end
@@ -0,0 +1,5 @@
1
+ class AddInitialCwdToSessions < ActiveRecord::Migration[8.1]
2
+ def change
3
+ add_column :sessions, :initial_cwd, :string
4
+ end
5
+ end
@@ -0,0 +1,61 @@
1
+ CREATE TABLE IF NOT EXISTS "solid_queue_jobs" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "active_job_id" varchar, "arguments" text, "class_name" varchar NOT NULL, "concurrency_key" varchar, "created_at" datetime(6) NOT NULL, "finished_at" datetime(6), "priority" integer DEFAULT 0 NOT NULL, "queue_name" varchar NOT NULL, "scheduled_at" datetime(6), "updated_at" datetime(6) NOT NULL);
2
+ CREATE INDEX "index_solid_queue_jobs_on_active_job_id" ON "solid_queue_jobs" ("active_job_id");
3
+ CREATE INDEX "index_solid_queue_jobs_on_class_name" ON "solid_queue_jobs" ("class_name");
4
+ CREATE INDEX "index_solid_queue_jobs_on_finished_at" ON "solid_queue_jobs" ("finished_at");
5
+ CREATE INDEX "index_solid_queue_jobs_for_filtering" ON "solid_queue_jobs" ("queue_name", "finished_at");
6
+ CREATE INDEX "index_solid_queue_jobs_for_alerting" ON "solid_queue_jobs" ("scheduled_at", "finished_at");
7
+ CREATE TABLE IF NOT EXISTS "solid_queue_pauses" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "created_at" datetime(6) NOT NULL, "queue_name" varchar NOT NULL);
8
+ CREATE UNIQUE INDEX "index_solid_queue_pauses_on_queue_name" ON "solid_queue_pauses" ("queue_name");
9
+ CREATE TABLE IF NOT EXISTS "solid_queue_processes" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "created_at" datetime(6) NOT NULL, "hostname" varchar, "kind" varchar NOT NULL, "last_heartbeat_at" datetime(6) NOT NULL, "metadata" text, "name" varchar NOT NULL, "pid" integer NOT NULL, "supervisor_id" bigint);
10
+ CREATE INDEX "index_solid_queue_processes_on_last_heartbeat_at" ON "solid_queue_processes" ("last_heartbeat_at");
11
+ CREATE UNIQUE INDEX "index_solid_queue_processes_on_name_and_supervisor_id" ON "solid_queue_processes" ("name", "supervisor_id");
12
+ CREATE INDEX "index_solid_queue_processes_on_supervisor_id" ON "solid_queue_processes" ("supervisor_id");
13
+ CREATE TABLE IF NOT EXISTS "solid_queue_recurring_tasks" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "arguments" text, "class_name" varchar, "command" varchar(2048), "created_at" datetime(6) NOT NULL, "description" text, "key" varchar NOT NULL, "priority" integer DEFAULT 0, "queue_name" varchar, "schedule" varchar NOT NULL, "static" boolean DEFAULT TRUE NOT NULL, "updated_at" datetime(6) NOT NULL);
14
+ CREATE UNIQUE INDEX "index_solid_queue_recurring_tasks_on_key" ON "solid_queue_recurring_tasks" ("key");
15
+ CREATE INDEX "index_solid_queue_recurring_tasks_on_static" ON "solid_queue_recurring_tasks" ("static");
16
+ CREATE TABLE IF NOT EXISTS "solid_queue_semaphores" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "created_at" datetime(6) NOT NULL, "expires_at" datetime(6) NOT NULL, "key" varchar NOT NULL, "updated_at" datetime(6) NOT NULL, "value" integer DEFAULT 1 NOT NULL);
17
+ CREATE INDEX "index_solid_queue_semaphores_on_expires_at" ON "solid_queue_semaphores" ("expires_at");
18
+ CREATE INDEX "index_solid_queue_semaphores_on_key_and_value" ON "solid_queue_semaphores" ("key", "value");
19
+ CREATE UNIQUE INDEX "index_solid_queue_semaphores_on_key" ON "solid_queue_semaphores" ("key");
20
+ CREATE TABLE IF NOT EXISTS "solid_queue_blocked_executions" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "concurrency_key" varchar NOT NULL, "created_at" datetime(6) NOT NULL, "expires_at" datetime(6) NOT NULL, "job_id" bigint NOT NULL, "priority" integer DEFAULT 0 NOT NULL, "queue_name" varchar NOT NULL, CONSTRAINT "fk_rails_4cd34e2228"
21
+ FOREIGN KEY ("job_id")
22
+ REFERENCES "solid_queue_jobs" ("id")
23
+ ON DELETE CASCADE);
24
+ CREATE INDEX "index_solid_queue_blocked_executions_for_release" ON "solid_queue_blocked_executions" ("concurrency_key", "priority", "job_id");
25
+ CREATE INDEX "index_solid_queue_blocked_executions_for_maintenance" ON "solid_queue_blocked_executions" ("expires_at", "concurrency_key");
26
+ CREATE UNIQUE INDEX "index_solid_queue_blocked_executions_on_job_id" ON "solid_queue_blocked_executions" ("job_id");
27
+ CREATE TABLE IF NOT EXISTS "solid_queue_claimed_executions" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "created_at" datetime(6) NOT NULL, "job_id" bigint NOT NULL, "process_id" bigint, CONSTRAINT "fk_rails_9cfe4d4944"
28
+ FOREIGN KEY ("job_id")
29
+ REFERENCES "solid_queue_jobs" ("id")
30
+ ON DELETE CASCADE);
31
+ CREATE UNIQUE INDEX "index_solid_queue_claimed_executions_on_job_id" ON "solid_queue_claimed_executions" ("job_id");
32
+ CREATE INDEX "index_solid_queue_claimed_executions_on_process_id_and_job_id" ON "solid_queue_claimed_executions" ("process_id", "job_id");
33
+ CREATE TABLE IF NOT EXISTS "solid_queue_failed_executions" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "created_at" datetime(6) NOT NULL, "error" text, "job_id" bigint NOT NULL, CONSTRAINT "fk_rails_39bbc7a631"
34
+ FOREIGN KEY ("job_id")
35
+ REFERENCES "solid_queue_jobs" ("id")
36
+ ON DELETE CASCADE);
37
+ CREATE UNIQUE INDEX "index_solid_queue_failed_executions_on_job_id" ON "solid_queue_failed_executions" ("job_id");
38
+ CREATE TABLE IF NOT EXISTS "solid_queue_ready_executions" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "created_at" datetime(6) NOT NULL, "job_id" bigint NOT NULL, "priority" integer DEFAULT 0 NOT NULL, "queue_name" varchar NOT NULL, CONSTRAINT "fk_rails_81fcbd66af"
39
+ FOREIGN KEY ("job_id")
40
+ REFERENCES "solid_queue_jobs" ("id")
41
+ ON DELETE CASCADE);
42
+ CREATE UNIQUE INDEX "index_solid_queue_ready_executions_on_job_id" ON "solid_queue_ready_executions" ("job_id");
43
+ CREATE INDEX "index_solid_queue_poll_all" ON "solid_queue_ready_executions" ("priority", "job_id");
44
+ CREATE INDEX "index_solid_queue_poll_by_queue" ON "solid_queue_ready_executions" ("queue_name", "priority", "job_id");
45
+ CREATE TABLE IF NOT EXISTS "solid_queue_recurring_executions" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "created_at" datetime(6) NOT NULL, "job_id" bigint NOT NULL, "run_at" datetime(6) NOT NULL, "task_key" varchar NOT NULL, CONSTRAINT "fk_rails_318a5533ed"
46
+ FOREIGN KEY ("job_id")
47
+ REFERENCES "solid_queue_jobs" ("id")
48
+ ON DELETE CASCADE);
49
+ CREATE UNIQUE INDEX "index_solid_queue_recurring_executions_on_job_id" ON "solid_queue_recurring_executions" ("job_id");
50
+ CREATE UNIQUE INDEX "index_solid_queue_recurring_executions_on_task_key_and_run_at" ON "solid_queue_recurring_executions" ("task_key", "run_at");
51
+ CREATE TABLE IF NOT EXISTS "solid_queue_scheduled_executions" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "created_at" datetime(6) NOT NULL, "job_id" bigint NOT NULL, "priority" integer DEFAULT 0 NOT NULL, "queue_name" varchar NOT NULL, "scheduled_at" datetime(6) NOT NULL, CONSTRAINT "fk_rails_c4316f352d"
52
+ FOREIGN KEY ("job_id")
53
+ REFERENCES "solid_queue_jobs" ("id")
54
+ ON DELETE CASCADE);
55
+ CREATE UNIQUE INDEX "index_solid_queue_scheduled_executions_on_job_id" ON "solid_queue_scheduled_executions" ("job_id");
56
+ CREATE INDEX "index_solid_queue_dispatch_all" ON "solid_queue_scheduled_executions" ("scheduled_at", "priority", "job_id");
57
+ CREATE TABLE IF NOT EXISTS "schema_migrations" ("version" varchar NOT NULL PRIMARY KEY);
58
+ CREATE TABLE IF NOT EXISTS "ar_internal_metadata" ("key" varchar NOT NULL PRIMARY KEY, "value" varchar, "created_at" datetime(6) NOT NULL, "updated_at" datetime(6) NOT NULL);
59
+ INSERT INTO "schema_migrations" (version) VALUES
60
+ ('1');
61
+
data/db/structure.sql ADDED
@@ -0,0 +1,120 @@
1
+ CREATE TABLE IF NOT EXISTS "schema_migrations" ("version" varchar NOT NULL PRIMARY KEY);
2
+ CREATE TABLE IF NOT EXISTS "ar_internal_metadata" ("key" varchar NOT NULL PRIMARY KEY, "value" varchar, "created_at" datetime(6) NOT NULL, "updated_at" datetime(6) NOT NULL);
3
+ CREATE TABLE IF NOT EXISTS "goals" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "session_id" integer NOT NULL, "parent_goal_id" integer, "description" text NOT NULL, "status" varchar DEFAULT 'active' NOT NULL, "created_at" datetime(6) NOT NULL, "updated_at" datetime(6) NOT NULL, "completed_at" datetime(6), "evicted_at" datetime(6), CONSTRAINT "fk_rails_874b7534ae"
4
+ FOREIGN KEY ("session_id")
5
+ REFERENCES "sessions" ("id")
6
+ , CONSTRAINT "fk_rails_feeb9df31e"
7
+ FOREIGN KEY ("parent_goal_id")
8
+ REFERENCES "goals" ("id")
9
+ );
10
+ CREATE INDEX "index_goals_on_session_id" ON "goals" ("session_id");
11
+ CREATE INDEX "index_goals_on_parent_goal_id" ON "goals" ("parent_goal_id");
12
+ CREATE INDEX "index_goals_on_session_id_and_status" ON "goals" ("session_id", "status");
13
+ CREATE TABLE IF NOT EXISTS "messages" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "session_id" integer NOT NULL, "message_type" varchar NOT NULL, "payload" json DEFAULT '{}' NOT NULL, "timestamp" integer(8) NOT NULL, "created_at" datetime(6) NOT NULL, "updated_at" datetime(6) NOT NULL, "token_count" integer DEFAULT 0 NOT NULL, "tool_use_id" varchar, "status" varchar, "api_metrics" json, CONSTRAINT "fk_rails_1ee2a92df0"
14
+ FOREIGN KEY ("session_id")
15
+ REFERENCES "sessions" ("id")
16
+ );
17
+ CREATE INDEX "index_messages_on_session_id_and_status" ON "messages" ("session_id", "status");
18
+ CREATE INDEX "index_messages_on_tool_use_id" ON "messages" ("tool_use_id");
19
+ CREATE INDEX "index_messages_on_session_id" ON "messages" ("session_id");
20
+ CREATE INDEX "index_messages_on_message_type" ON "messages" ("message_type");
21
+ CREATE INDEX "index_messages_on_session_id_and_message_type" ON "messages" ("session_id", "message_type");
22
+ CREATE TABLE IF NOT EXISTS "pinned_messages" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "message_id" integer NOT NULL, "display_text" text NOT NULL, "created_at" datetime(6) NOT NULL, "updated_at" datetime(6) NOT NULL, CONSTRAINT "fk_rails_4a5f237c43"
23
+ FOREIGN KEY ("message_id")
24
+ REFERENCES "messages" ("id")
25
+ );
26
+ CREATE UNIQUE INDEX "index_pinned_messages_on_message_id" ON "pinned_messages" ("message_id");
27
+ CREATE TABLE IF NOT EXISTS "goal_pinned_messages" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "goal_id" integer NOT NULL, "pinned_message_id" integer NOT NULL, "created_at" datetime(6) NOT NULL, "updated_at" datetime(6) NOT NULL, CONSTRAINT "fk_rails_fb51bfeebe"
28
+ FOREIGN KEY ("pinned_message_id")
29
+ REFERENCES "pinned_messages" ("id")
30
+ , CONSTRAINT "fk_rails_689fd4bf8a"
31
+ FOREIGN KEY ("goal_id")
32
+ REFERENCES "goals" ("id")
33
+ );
34
+ CREATE INDEX "index_goal_pinned_messages_on_goal_id" ON "goal_pinned_messages" ("goal_id");
35
+ CREATE INDEX "index_goal_pinned_messages_on_pinned_message_id" ON "goal_pinned_messages" ("pinned_message_id");
36
+ CREATE UNIQUE INDEX "index_goal_pinned_messages_on_goal_id_and_pinned_message_id" ON "goal_pinned_messages" ("goal_id", "pinned_message_id");
37
+ CREATE TABLE IF NOT EXISTS "snapshots" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "session_id" integer NOT NULL, "text" text NOT NULL, "from_message_id" integer NOT NULL, "to_message_id" integer NOT NULL, "level" integer DEFAULT 1 NOT NULL, "token_count" integer DEFAULT 0 NOT NULL, "created_at" datetime(6) NOT NULL, "updated_at" datetime(6) NOT NULL, CONSTRAINT "fk_rails_eb2ad51db9"
38
+ FOREIGN KEY ("session_id")
39
+ REFERENCES "sessions" ("id")
40
+ );
41
+ CREATE INDEX "index_snapshots_on_session_id" ON "snapshots" ("session_id");
42
+ CREATE INDEX "index_snapshots_on_session_id_and_level" ON "snapshots" ("session_id", "level");
43
+ CREATE INDEX "index_snapshots_on_session_and_event_range" ON "snapshots" ("session_id", "from_message_id", "to_message_id");
44
+ CREATE VIRTUAL TABLE messages_fts USING fts5(
45
+ searchable_text,
46
+ content='',
47
+ contentless_delete=1,
48
+ tokenize='porter unicode61'
49
+ )
50
+ /* messages_fts(searchable_text) */;
51
+ CREATE TABLE IF NOT EXISTS 'messages_fts_data'(id INTEGER PRIMARY KEY, block BLOB);
52
+ CREATE TABLE IF NOT EXISTS 'messages_fts_idx'(segid, term, pgno, PRIMARY KEY(segid, term)) WITHOUT ROWID;
53
+ CREATE TABLE IF NOT EXISTS 'messages_fts_docsize'(id INTEGER PRIMARY KEY, sz BLOB, origin INTEGER);
54
+ CREATE TABLE IF NOT EXISTS 'messages_fts_config'(k PRIMARY KEY, v) WITHOUT ROWID;
55
+ CREATE TRIGGER messages_fts_insert AFTER INSERT ON messages
56
+ WHEN NEW.message_type IN ('user_message', 'agent_message', 'system_message')
57
+ OR (NEW.message_type = 'tool_call' AND json_extract(NEW.payload, '$.tool_name') = 'think')
58
+ BEGIN
59
+ INSERT INTO messages_fts(rowid, searchable_text)
60
+ VALUES (
61
+ NEW.id,
62
+ CASE
63
+ WHEN NEW.message_type IN ('user_message', 'agent_message', 'system_message')
64
+ THEN json_extract(NEW.payload, '$.content')
65
+ WHEN NEW.message_type = 'tool_call'
66
+ THEN json_extract(NEW.payload, '$.tool_input.thoughts')
67
+ END
68
+ );
69
+ END;
70
+ CREATE TRIGGER messages_fts_delete AFTER DELETE ON messages
71
+ WHEN OLD.message_type IN ('user_message', 'agent_message', 'system_message')
72
+ OR (OLD.message_type = 'tool_call' AND json_extract(OLD.payload, '$.tool_name') = 'think')
73
+ BEGIN
74
+ DELETE FROM messages_fts WHERE rowid = OLD.id;
75
+ END;
76
+ CREATE TABLE IF NOT EXISTS "secrets" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "namespace" varchar NOT NULL, "key" varchar NOT NULL, "value" text NOT NULL, "created_at" datetime(6) NOT NULL, "updated_at" datetime(6) NOT NULL);
77
+ CREATE UNIQUE INDEX "index_secrets_on_namespace_and_key" ON "secrets" ("namespace", "key");
78
+ CREATE INDEX "index_goals_on_evicted_at" ON "goals" ("evicted_at");
79
+ CREATE TABLE IF NOT EXISTS "pending_messages" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "session_id" integer NOT NULL, "content" text NOT NULL, "created_at" datetime(6) NOT NULL, "updated_at" datetime(6) NOT NULL, "source_type" varchar DEFAULT 'user' NOT NULL, "source_name" varchar, CONSTRAINT "fk_rails_007242365b"
80
+ FOREIGN KEY ("session_id")
81
+ REFERENCES "sessions" ("id")
82
+ );
83
+ CREATE INDEX "index_pending_messages_on_session_id" ON "pending_messages" ("session_id");
84
+ CREATE TABLE IF NOT EXISTS "sessions" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "created_at" datetime(6) NOT NULL, "updated_at" datetime(6) NOT NULL, "view_mode" varchar DEFAULT 'basic' NOT NULL, "processing" boolean DEFAULT FALSE NOT NULL, "parent_session_id" integer, "prompt" text, "granted_tools" text, "name" varchar, "viewport_message_ids" json DEFAULT '[]' NOT NULL, "active_skills" json DEFAULT '[]' NOT NULL, "active_workflow" varchar, "interrupt_requested" boolean DEFAULT FALSE NOT NULL, "mneme_boundary_message_id" integer, "mneme_snapshot_first_message_id" integer, "mneme_snapshot_last_message_id" integer, "initial_cwd" varchar, CONSTRAINT "fk_rails_045409ac27"
85
+ FOREIGN KEY ("parent_session_id")
86
+ REFERENCES "sessions" ("id")
87
+ );
88
+ CREATE INDEX "index_sessions_on_parent_session_id" ON "sessions" ("parent_session_id");
89
+ INSERT INTO "schema_migrations" (version) VALUES
90
+ ('20260403080031'),
91
+ ('20260401210935'),
92
+ ('20260401180000'),
93
+ ('20260330120000'),
94
+ ('20260329120000'),
95
+ ('20260328152142'),
96
+ ('20260328100000'),
97
+ ('20260326180000'),
98
+ ('20260321140100'),
99
+ ('20260321140000'),
100
+ ('20260321120000'),
101
+ ('20260321080000'),
102
+ ('20260316094817'),
103
+ ('20260315191105'),
104
+ ('20260315144837'),
105
+ ('20260315140843'),
106
+ ('20260315100000'),
107
+ ('20260314150000'),
108
+ ('20260314140000'),
109
+ ('20260314112417'),
110
+ ('20260314075248'),
111
+ ('20260313020000'),
112
+ ('20260313010000'),
113
+ ('20260312170000'),
114
+ ('20260308160000'),
115
+ ('20260308150000'),
116
+ ('20260308140000'),
117
+ ('20260308130000'),
118
+ ('20260308124203'),
119
+ ('20260308124202');
120
+