trak_flow 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.envrc +3 -0
- data/CHANGELOG.md +69 -0
- data/COMMITS.md +196 -0
- data/Gemfile +8 -0
- data/Gemfile.lock +281 -0
- data/README.md +479 -0
- data/Rakefile +16 -0
- data/bin/tf +6 -0
- data/bin/tf_mcp +81 -0
- data/docs/.keep +0 -0
- data/docs/api/database.md +434 -0
- data/docs/api/ruby-library.md +349 -0
- data/docs/api/task-model.md +341 -0
- data/docs/assets/stylesheets/extra.css +53 -0
- data/docs/assets/trak_flow.jpg +0 -0
- data/docs/cli/admin-commands.md +369 -0
- data/docs/cli/dependency-commands.md +321 -0
- data/docs/cli/label-commands.md +222 -0
- data/docs/cli/overview.md +163 -0
- data/docs/cli/plan-commands.md +344 -0
- data/docs/cli/task-commands.md +333 -0
- data/docs/core-concepts/dependencies.md +232 -0
- data/docs/core-concepts/labels.md +217 -0
- data/docs/core-concepts/overview.md +178 -0
- data/docs/core-concepts/plans-workflows.md +264 -0
- data/docs/core-concepts/tasks.md +205 -0
- data/docs/getting-started/configuration.md +120 -0
- data/docs/getting-started/installation.md +79 -0
- data/docs/getting-started/quick-start.md +245 -0
- data/docs/index.md +169 -0
- data/docs/mcp/integration.md +302 -0
- data/docs/mcp/overview.md +206 -0
- data/docs/mcp/resources.md +284 -0
- data/docs/mcp/tools.md +457 -0
- data/examples/basic_usage.rb +365 -0
- data/examples/cli_demo.sh +314 -0
- data/examples/mcp/Gemfile +9 -0
- data/examples/mcp/Gemfile.lock +226 -0
- data/examples/mcp/http_demo.rb +232 -0
- data/examples/mcp/stdio_demo.rb +146 -0
- data/lib/trak_flow/cli/admin_commands.rb +136 -0
- data/lib/trak_flow/cli/config_commands.rb +260 -0
- data/lib/trak_flow/cli/dep_commands.rb +71 -0
- data/lib/trak_flow/cli/label_commands.rb +76 -0
- data/lib/trak_flow/cli/main_commands.rb +386 -0
- data/lib/trak_flow/cli/plan_commands.rb +185 -0
- data/lib/trak_flow/cli/workflow_commands.rb +133 -0
- data/lib/trak_flow/cli.rb +110 -0
- data/lib/trak_flow/config/defaults.yml +114 -0
- data/lib/trak_flow/config/section.rb +74 -0
- data/lib/trak_flow/config.rb +276 -0
- data/lib/trak_flow/graph/dependency_graph.rb +288 -0
- data/lib/trak_flow/id_generator.rb +52 -0
- data/lib/trak_flow/mcp/resources/base_resource.rb +25 -0
- data/lib/trak_flow/mcp/resources/dependency_graph.rb +31 -0
- data/lib/trak_flow/mcp/resources/label_list.rb +21 -0
- data/lib/trak_flow/mcp/resources/plan_by_id.rb +27 -0
- data/lib/trak_flow/mcp/resources/plan_list.rb +21 -0
- data/lib/trak_flow/mcp/resources/task_by_id.rb +31 -0
- data/lib/trak_flow/mcp/resources/task_list.rb +21 -0
- data/lib/trak_flow/mcp/resources/task_next.rb +30 -0
- data/lib/trak_flow/mcp/resources/workflow_by_id.rb +27 -0
- data/lib/trak_flow/mcp/resources/workflow_list.rb +21 -0
- data/lib/trak_flow/mcp/server.rb +140 -0
- data/lib/trak_flow/mcp/tools/base_tool.rb +29 -0
- data/lib/trak_flow/mcp/tools/comment_add.rb +33 -0
- data/lib/trak_flow/mcp/tools/dep_add.rb +34 -0
- data/lib/trak_flow/mcp/tools/dep_remove.rb +25 -0
- data/lib/trak_flow/mcp/tools/label_add.rb +28 -0
- data/lib/trak_flow/mcp/tools/label_remove.rb +25 -0
- data/lib/trak_flow/mcp/tools/plan_add_step.rb +35 -0
- data/lib/trak_flow/mcp/tools/plan_create.rb +33 -0
- data/lib/trak_flow/mcp/tools/plan_run.rb +58 -0
- data/lib/trak_flow/mcp/tools/plan_start.rb +58 -0
- data/lib/trak_flow/mcp/tools/task_block.rb +27 -0
- data/lib/trak_flow/mcp/tools/task_close.rb +26 -0
- data/lib/trak_flow/mcp/tools/task_create.rb +51 -0
- data/lib/trak_flow/mcp/tools/task_defer.rb +27 -0
- data/lib/trak_flow/mcp/tools/task_start.rb +25 -0
- data/lib/trak_flow/mcp/tools/task_update.rb +36 -0
- data/lib/trak_flow/mcp/tools/workflow_discard.rb +28 -0
- data/lib/trak_flow/mcp/tools/workflow_summarize.rb +34 -0
- data/lib/trak_flow/mcp.rb +38 -0
- data/lib/trak_flow/models/comment.rb +71 -0
- data/lib/trak_flow/models/dependency.rb +96 -0
- data/lib/trak_flow/models/label.rb +90 -0
- data/lib/trak_flow/models/task.rb +188 -0
- data/lib/trak_flow/storage/database.rb +638 -0
- data/lib/trak_flow/storage/jsonl.rb +259 -0
- data/lib/trak_flow/time_parser.rb +15 -0
- data/lib/trak_flow/version.rb +5 -0
- data/lib/trak_flow.rb +100 -0
- data/mkdocs.yml +143 -0
- metadata +392 -0
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TrakFlow
|
|
4
|
+
class CLI < Thor
|
|
5
|
+
# NOTE: class_option :json is defined in main_commands.rb, NOT here.
|
|
6
|
+
# This prevents Thor from consuming -j before routing to subcommands.
|
|
7
|
+
|
|
8
|
+
no_commands do
|
|
9
|
+
def json?
|
|
10
|
+
options[:json]
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
def pastel
|
|
14
|
+
@pastel ||= Pastel.new
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def output_json(data)
|
|
18
|
+
puts Oj.dump(data, mode: :compat, indent: 2)
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def output(json_data, &human_block)
|
|
22
|
+
if json?
|
|
23
|
+
output_json(json_data)
|
|
24
|
+
else
|
|
25
|
+
human_block.call
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def with_database
|
|
30
|
+
TrakFlow.ensure_initialized!
|
|
31
|
+
|
|
32
|
+
db = Storage::Database.new
|
|
33
|
+
db.connect
|
|
34
|
+
|
|
35
|
+
jsonl = Storage::Jsonl.new
|
|
36
|
+
jsonl.import(db) if jsonl.exists?
|
|
37
|
+
|
|
38
|
+
yield db
|
|
39
|
+
|
|
40
|
+
jsonl.export(db) if db.dirty?
|
|
41
|
+
ensure
|
|
42
|
+
db&.close
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
def colorize_status(status)
|
|
46
|
+
case status
|
|
47
|
+
when "open" then pastel.green(status)
|
|
48
|
+
when "in_progress" then pastel.blue(status)
|
|
49
|
+
when "blocked" then pastel.red(status)
|
|
50
|
+
when "deferred" then pastel.yellow(status)
|
|
51
|
+
when "closed" then pastel.dim(status)
|
|
52
|
+
when "tombstone" then pastel.dim.strikethrough(status)
|
|
53
|
+
when "pinned" then pastel.magenta(status)
|
|
54
|
+
else status
|
|
55
|
+
end
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def colorize_priority(priority)
|
|
59
|
+
case priority
|
|
60
|
+
when 0 then pastel.red.bold("P0 (critical)")
|
|
61
|
+
when 1 then pastel.red("P1 (high)")
|
|
62
|
+
when 2 then pastel.yellow("P2 (medium)")
|
|
63
|
+
when 3 then pastel.blue("P3 (low)")
|
|
64
|
+
when 4 then pastel.dim("P4 (backlog)")
|
|
65
|
+
else "P#{priority}"
|
|
66
|
+
end
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
def print_tasks_table(tasks)
|
|
70
|
+
table = TTY::Table.new(
|
|
71
|
+
header: %w[ID Priority Status Type Title],
|
|
72
|
+
rows: tasks.map do |task|
|
|
73
|
+
[
|
|
74
|
+
task.id,
|
|
75
|
+
"P#{task.priority}",
|
|
76
|
+
task.status,
|
|
77
|
+
task.type,
|
|
78
|
+
truncate(task.title, 50)
|
|
79
|
+
]
|
|
80
|
+
end
|
|
81
|
+
)
|
|
82
|
+
puts table.render(:unicode, padding: [0, 1])
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
def truncate(str, length)
|
|
86
|
+
return str if str.nil? || str.length <= length
|
|
87
|
+
|
|
88
|
+
"#{str[0, length - 3]}..."
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
def status_icon(status)
|
|
92
|
+
case status
|
|
93
|
+
when "closed" then "[x]"
|
|
94
|
+
when "in_progress" then "[~]"
|
|
95
|
+
when "blocked" then "[!]"
|
|
96
|
+
else "[ ]"
|
|
97
|
+
end
|
|
98
|
+
end
|
|
99
|
+
end
|
|
100
|
+
end
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
# Load subcommand classes first (main_commands references them)
|
|
104
|
+
require_relative "cli/config_commands"
|
|
105
|
+
require_relative "cli/dep_commands"
|
|
106
|
+
require_relative "cli/label_commands"
|
|
107
|
+
require_relative "cli/plan_commands"
|
|
108
|
+
require_relative "cli/workflow_commands"
|
|
109
|
+
require_relative "cli/admin_commands"
|
|
110
|
+
require_relative "cli/main_commands"
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
# TrakFlow Bundled Defaults
|
|
2
|
+
#
|
|
3
|
+
# This file is the SINGLE SOURCE OF TRUTH for TrakFlow configuration schema.
|
|
4
|
+
# All attributes must be declared here (even if nil) to be recognized.
|
|
5
|
+
# It is bundled with the gem and loaded automatically at lowest priority.
|
|
6
|
+
#
|
|
7
|
+
# Loading priority (lowest to highest):
|
|
8
|
+
# 1. This file (bundled defaults)
|
|
9
|
+
# 2. XDG user config (~/.config/trak_flow/trak_flow.yml)
|
|
10
|
+
# 3. Project config (./.trak_flow/config.yml)
|
|
11
|
+
# 4. Environment variables (TF_*)
|
|
12
|
+
# 5. Programmatic (TrakFlow.configure block)
|
|
13
|
+
#
|
|
14
|
+
# Structure:
|
|
15
|
+
# - defaults: Base values for all environments
|
|
16
|
+
|
|
17
|
+
# =============================================================================
|
|
18
|
+
# Shared Defaults (base for all environments)
|
|
19
|
+
# =============================================================================
|
|
20
|
+
defaults:
|
|
21
|
+
# ---------------------------------------------------------------------------
|
|
22
|
+
# Output Configuration
|
|
23
|
+
# Access: TrakFlow.config.output.json, TrakFlow.config.output.stealth
|
|
24
|
+
# ---------------------------------------------------------------------------
|
|
25
|
+
output:
|
|
26
|
+
json: false
|
|
27
|
+
stealth: false
|
|
28
|
+
|
|
29
|
+
# ---------------------------------------------------------------------------
|
|
30
|
+
# Daemon Configuration
|
|
31
|
+
# Access: TrakFlow.config.daemon.disabled, TrakFlow.config.daemon.auto_start
|
|
32
|
+
# ---------------------------------------------------------------------------
|
|
33
|
+
daemon:
|
|
34
|
+
disabled: false
|
|
35
|
+
auto_start: true
|
|
36
|
+
flush_debounce: 5
|
|
37
|
+
|
|
38
|
+
# ---------------------------------------------------------------------------
|
|
39
|
+
# Sync Configuration
|
|
40
|
+
# Access: TrakFlow.config.sync.auto_flush, TrakFlow.config.sync.auto_import, etc.
|
|
41
|
+
# ---------------------------------------------------------------------------
|
|
42
|
+
sync:
|
|
43
|
+
auto_flush: true
|
|
44
|
+
auto_import: true
|
|
45
|
+
push: true
|
|
46
|
+
|
|
47
|
+
# ---------------------------------------------------------------------------
|
|
48
|
+
# Create Configuration
|
|
49
|
+
# Access: TrakFlow.config.create.require_description
|
|
50
|
+
# ---------------------------------------------------------------------------
|
|
51
|
+
create:
|
|
52
|
+
require_description: false
|
|
53
|
+
|
|
54
|
+
# ---------------------------------------------------------------------------
|
|
55
|
+
# Validation Configuration
|
|
56
|
+
# Access: TrakFlow.config.validation.on_create, TrakFlow.config.validation.on_sync
|
|
57
|
+
# ---------------------------------------------------------------------------
|
|
58
|
+
validation:
|
|
59
|
+
on_create: none
|
|
60
|
+
on_sync: none
|
|
61
|
+
|
|
62
|
+
# ---------------------------------------------------------------------------
|
|
63
|
+
# ID Generation Configuration
|
|
64
|
+
# Access: TrakFlow.config.id.max_collision_prob, etc.
|
|
65
|
+
# ---------------------------------------------------------------------------
|
|
66
|
+
id:
|
|
67
|
+
max_collision_prob: 0.25
|
|
68
|
+
min_hash_length: 4
|
|
69
|
+
max_hash_length: 8
|
|
70
|
+
|
|
71
|
+
# ---------------------------------------------------------------------------
|
|
72
|
+
# Import Configuration
|
|
73
|
+
# Access: TrakFlow.config.import.orphan_handling, TrakFlow.config.import.error_policy
|
|
74
|
+
# ---------------------------------------------------------------------------
|
|
75
|
+
import:
|
|
76
|
+
orphan_handling: allow
|
|
77
|
+
error_policy: warn
|
|
78
|
+
|
|
79
|
+
# ---------------------------------------------------------------------------
|
|
80
|
+
# Export Configuration
|
|
81
|
+
# Access: TrakFlow.config.export.error_policy, etc.
|
|
82
|
+
# ---------------------------------------------------------------------------
|
|
83
|
+
export:
|
|
84
|
+
error_policy: strict
|
|
85
|
+
retry_attempts: 3
|
|
86
|
+
retry_backoff_ms: 100
|
|
87
|
+
skip_encoding_errors: false
|
|
88
|
+
|
|
89
|
+
# ---------------------------------------------------------------------------
|
|
90
|
+
# Storage Configuration
|
|
91
|
+
# Access: TrakFlow.config.storage.jsonl_file
|
|
92
|
+
# ---------------------------------------------------------------------------
|
|
93
|
+
storage:
|
|
94
|
+
jsonl_file: tasks.jsonl
|
|
95
|
+
|
|
96
|
+
# ---------------------------------------------------------------------------
|
|
97
|
+
# Database Configuration
|
|
98
|
+
# Access: TrakFlow.config.database.path
|
|
99
|
+
# ---------------------------------------------------------------------------
|
|
100
|
+
database:
|
|
101
|
+
path: ~/.config/trak_flow/tf.db
|
|
102
|
+
|
|
103
|
+
# ---------------------------------------------------------------------------
|
|
104
|
+
# MCP Server Configuration
|
|
105
|
+
# Access: TrakFlow.config.mcp.port
|
|
106
|
+
# ---------------------------------------------------------------------------
|
|
107
|
+
mcp:
|
|
108
|
+
port: 3333
|
|
109
|
+
|
|
110
|
+
# ---------------------------------------------------------------------------
|
|
111
|
+
# Actor Configuration
|
|
112
|
+
# Access: TrakFlow.config.actor
|
|
113
|
+
# ---------------------------------------------------------------------------
|
|
114
|
+
actor: ~
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module TrakFlow
|
|
4
|
+
# ConfigSection provides method access to nested configuration hashes
|
|
5
|
+
#
|
|
6
|
+
# @example
|
|
7
|
+
# section = ConfigSection.new(host: 'localhost', port: 5432)
|
|
8
|
+
# section.host # => 'localhost'
|
|
9
|
+
# section.port # => 5432
|
|
10
|
+
#
|
|
11
|
+
class ConfigSection
|
|
12
|
+
def initialize(hash = {})
|
|
13
|
+
@data = {}
|
|
14
|
+
(hash || {}).each do |key, value|
|
|
15
|
+
@data[key.to_sym] = value.is_a?(Hash) ? ConfigSection.new(value) : value
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def method_missing(method, *args, &block)
|
|
20
|
+
key = method.to_s
|
|
21
|
+
if key.end_with?('=')
|
|
22
|
+
@data[key.chomp('=').to_sym] = args.first
|
|
23
|
+
elsif @data.key?(method)
|
|
24
|
+
@data[method]
|
|
25
|
+
else
|
|
26
|
+
nil
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def respond_to_missing?(method, include_private = false)
|
|
31
|
+
key = method.to_s.chomp('=').to_sym
|
|
32
|
+
@data.key?(key) || super
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
def to_h
|
|
36
|
+
@data.transform_values do |v|
|
|
37
|
+
v.is_a?(ConfigSection) ? v.to_h : v
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def [](key)
|
|
42
|
+
@data[key.to_sym]
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
def []=(key, value)
|
|
46
|
+
@data[key.to_sym] = value
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def merge(other)
|
|
50
|
+
other_hash = other.is_a?(ConfigSection) ? other.to_h : other
|
|
51
|
+
ConfigSection.new(deep_merge(to_h, other_hash || {}))
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
def keys
|
|
55
|
+
@data.keys
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def each(&block)
|
|
59
|
+
@data.each(&block)
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
private
|
|
63
|
+
|
|
64
|
+
def deep_merge(base, overlay)
|
|
65
|
+
base.merge(overlay) do |_key, old_val, new_val|
|
|
66
|
+
if old_val.is_a?(Hash) && new_val.is_a?(Hash)
|
|
67
|
+
deep_merge(old_val, new_val)
|
|
68
|
+
else
|
|
69
|
+
new_val
|
|
70
|
+
end
|
|
71
|
+
end
|
|
72
|
+
end
|
|
73
|
+
end
|
|
74
|
+
end
|
|
@@ -0,0 +1,276 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'anyway_config'
|
|
4
|
+
require 'yaml'
|
|
5
|
+
|
|
6
|
+
require_relative 'config/section'
|
|
7
|
+
|
|
8
|
+
module TrakFlow
|
|
9
|
+
# TrakFlow Configuration using Anyway Config
|
|
10
|
+
#
|
|
11
|
+
# Schema is defined in lib/trak_flow/config/defaults.yml (single source of truth)
|
|
12
|
+
# Configuration uses nested sections for better organization:
|
|
13
|
+
# - TrakFlow.config.output.json
|
|
14
|
+
# - TrakFlow.config.daemon.auto_start
|
|
15
|
+
# - TrakFlow.config.export.error_policy
|
|
16
|
+
#
|
|
17
|
+
# Configuration sources (lowest to highest priority):
|
|
18
|
+
# 1. Bundled defaults: lib/trak_flow/config/defaults.yml (ships with gem)
|
|
19
|
+
# 2. XDG user config: ~/.config/trak_flow/trak_flow.yml
|
|
20
|
+
# 3. Project config: ./.trak_flow/config.yml
|
|
21
|
+
# 4. Environment variables (TF_*)
|
|
22
|
+
# 5. Explicit values passed to configure block
|
|
23
|
+
#
|
|
24
|
+
# @example Configure with environment variables
|
|
25
|
+
# export TF_OUTPUT__JSON=true
|
|
26
|
+
# export TF_DAEMON__AUTO_START=false
|
|
27
|
+
# export TF_ACTOR=robot
|
|
28
|
+
#
|
|
29
|
+
# @example Configure with Ruby block
|
|
30
|
+
# TrakFlow.configure do |config|
|
|
31
|
+
# config.output.json = true
|
|
32
|
+
# config.daemon.auto_start = false
|
|
33
|
+
# end
|
|
34
|
+
#
|
|
35
|
+
class Config < Anyway::Config
|
|
36
|
+
config_name :trak_flow
|
|
37
|
+
env_prefix :tf
|
|
38
|
+
|
|
39
|
+
# ==========================================================================
|
|
40
|
+
# Schema Definition (loaded from defaults.yml - single source of truth)
|
|
41
|
+
# ==========================================================================
|
|
42
|
+
|
|
43
|
+
DEFAULTS_PATH = File.expand_path('config/defaults.yml', __dir__).freeze
|
|
44
|
+
|
|
45
|
+
begin
|
|
46
|
+
defaults_content = File.read(DEFAULTS_PATH)
|
|
47
|
+
raw_yaml = YAML.safe_load(
|
|
48
|
+
defaults_content,
|
|
49
|
+
permitted_classes: [Symbol],
|
|
50
|
+
symbolize_names: true,
|
|
51
|
+
aliases: true
|
|
52
|
+
) || {}
|
|
53
|
+
SCHEMA = raw_yaml[:defaults] || {}
|
|
54
|
+
rescue StandardError => e
|
|
55
|
+
raise TrakFlow::ConfigurationError,
|
|
56
|
+
"Could not load schema from #{DEFAULTS_PATH}: #{e.message}"
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
# Nested section attributes (defined as hashes, converted to ConfigSection)
|
|
60
|
+
attr_config :output, :daemon, :sync, :create, :validation, :id, :import, :export, :storage, :database, :mcp
|
|
61
|
+
|
|
62
|
+
# Top-level scalar attributes
|
|
63
|
+
attr_config :actor
|
|
64
|
+
|
|
65
|
+
# ==========================================================================
|
|
66
|
+
# Type Coercion
|
|
67
|
+
# ==========================================================================
|
|
68
|
+
|
|
69
|
+
def self.config_section_with_defaults(section_key)
|
|
70
|
+
defaults = SCHEMA[section_key] || {}
|
|
71
|
+
->(v) {
|
|
72
|
+
return v if v.is_a?(ConfigSection)
|
|
73
|
+
incoming = v || {}
|
|
74
|
+
merged = deep_merge_hashes(defaults.dup, incoming)
|
|
75
|
+
ConfigSection.new(merged)
|
|
76
|
+
}
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
def self.deep_merge_hashes(base, overlay)
|
|
80
|
+
base.merge(overlay) do |_key, old_val, new_val|
|
|
81
|
+
if old_val.is_a?(Hash) && new_val.is_a?(Hash)
|
|
82
|
+
deep_merge_hashes(old_val, new_val)
|
|
83
|
+
else
|
|
84
|
+
new_val.nil? ? old_val : new_val
|
|
85
|
+
end
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
coerce_types(
|
|
90
|
+
output: config_section_with_defaults(:output),
|
|
91
|
+
daemon: config_section_with_defaults(:daemon),
|
|
92
|
+
sync: config_section_with_defaults(:sync),
|
|
93
|
+
create: config_section_with_defaults(:create),
|
|
94
|
+
validation: config_section_with_defaults(:validation),
|
|
95
|
+
id: config_section_with_defaults(:id),
|
|
96
|
+
import: config_section_with_defaults(:import),
|
|
97
|
+
export: config_section_with_defaults(:export),
|
|
98
|
+
storage: config_section_with_defaults(:storage),
|
|
99
|
+
database: config_section_with_defaults(:database),
|
|
100
|
+
mcp: config_section_with_defaults(:mcp)
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
on_load :setup_defaults
|
|
104
|
+
|
|
105
|
+
# ==========================================================================
|
|
106
|
+
# Convenience Accessors (for backward compatibility)
|
|
107
|
+
# ==========================================================================
|
|
108
|
+
|
|
109
|
+
def json?
|
|
110
|
+
output.json
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
def no_daemon?
|
|
114
|
+
daemon.disabled
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
def auto_start_daemon?
|
|
118
|
+
daemon.auto_start
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
def flush_debounce
|
|
122
|
+
daemon.flush_debounce
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
def no_auto_flush?
|
|
126
|
+
!sync.auto_flush
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
def no_auto_import?
|
|
130
|
+
!sync.auto_import
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
def no_push?
|
|
134
|
+
!sync.push
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
def require_description?
|
|
138
|
+
create.require_description
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
def validation_on_create
|
|
142
|
+
validation.on_create
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
def validation_on_sync
|
|
146
|
+
validation.on_sync
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
def max_collision_prob
|
|
150
|
+
id.max_collision_prob
|
|
151
|
+
end
|
|
152
|
+
|
|
153
|
+
def min_hash_length
|
|
154
|
+
id.min_hash_length
|
|
155
|
+
end
|
|
156
|
+
|
|
157
|
+
def max_hash_length
|
|
158
|
+
id.max_hash_length
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
def orphan_handling
|
|
162
|
+
import.orphan_handling
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
def import_error_policy
|
|
166
|
+
import.error_policy
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
def error_policy
|
|
170
|
+
export.error_policy
|
|
171
|
+
end
|
|
172
|
+
|
|
173
|
+
def retry_attempts
|
|
174
|
+
export.retry_attempts
|
|
175
|
+
end
|
|
176
|
+
|
|
177
|
+
def retry_backoff_ms
|
|
178
|
+
export.retry_backoff_ms
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
def skip_encoding_errors?
|
|
182
|
+
export.skip_encoding_errors
|
|
183
|
+
end
|
|
184
|
+
|
|
185
|
+
# ==========================================================================
|
|
186
|
+
# Legacy API Support (for backward compatibility)
|
|
187
|
+
# ==========================================================================
|
|
188
|
+
|
|
189
|
+
LEGACY_KEY_MAP = {
|
|
190
|
+
'json' => %i[output json],
|
|
191
|
+
'stealth' => %i[output stealth],
|
|
192
|
+
'no_daemon' => %i[daemon disabled],
|
|
193
|
+
'no_auto_flush' => %i[sync auto_flush],
|
|
194
|
+
'no_auto_import' => %i[sync auto_import],
|
|
195
|
+
'no_push' => %i[sync push],
|
|
196
|
+
'create.require_description' => %i[create require_description],
|
|
197
|
+
'validation.on_create' => %i[validation on_create],
|
|
198
|
+
'validation.on_sync' => %i[validation on_sync],
|
|
199
|
+
'flush_debounce' => %i[daemon flush_debounce],
|
|
200
|
+
'auto_start_daemon' => %i[daemon auto_start],
|
|
201
|
+
'max_collision_prob' => %i[id max_collision_prob],
|
|
202
|
+
'min_hash_length' => %i[id min_hash_length],
|
|
203
|
+
'max_hash_length' => %i[id max_hash_length],
|
|
204
|
+
'import.orphan_handling' => %i[import orphan_handling],
|
|
205
|
+
'import.error_policy' => %i[import error_policy],
|
|
206
|
+
'export.error_policy' => %i[export error_policy],
|
|
207
|
+
'export.retry_attempts' => %i[export retry_attempts],
|
|
208
|
+
'export.retry_backoff_ms' => %i[export retry_backoff_ms],
|
|
209
|
+
'export.skip_encoding_errors' => %i[export skip_encoding_errors],
|
|
210
|
+
'actor' => [:actor]
|
|
211
|
+
}.freeze
|
|
212
|
+
|
|
213
|
+
def get(key)
|
|
214
|
+
mapping = LEGACY_KEY_MAP[key.to_s]
|
|
215
|
+
return nil unless mapping
|
|
216
|
+
|
|
217
|
+
if mapping.is_a?(Array)
|
|
218
|
+
value = self
|
|
219
|
+
mapping.each { |k| value = value.respond_to?(k) ? value.send(k) : value[k] }
|
|
220
|
+
value
|
|
221
|
+
elsif mapping.is_a?(Proc)
|
|
222
|
+
nil
|
|
223
|
+
else
|
|
224
|
+
send(mapping)
|
|
225
|
+
end
|
|
226
|
+
end
|
|
227
|
+
|
|
228
|
+
def set(key, value)
|
|
229
|
+
mapping = LEGACY_KEY_MAP[key.to_s]
|
|
230
|
+
return unless mapping
|
|
231
|
+
|
|
232
|
+
if mapping.is_a?(Array)
|
|
233
|
+
if mapping.length == 1
|
|
234
|
+
send("#{mapping[0]}=", value)
|
|
235
|
+
else
|
|
236
|
+
section = send(mapping[0])
|
|
237
|
+
section.send("#{mapping[1]}=", value)
|
|
238
|
+
end
|
|
239
|
+
end
|
|
240
|
+
end
|
|
241
|
+
|
|
242
|
+
private
|
|
243
|
+
|
|
244
|
+
def setup_defaults
|
|
245
|
+
# Ensure all sections are initialized with defaults even when no config files exist
|
|
246
|
+
# Manually apply coercion since it only fires when values come from config sources
|
|
247
|
+
self.output = self.class.config_section_with_defaults(:output).call(output) unless output.is_a?(ConfigSection)
|
|
248
|
+
self.daemon = self.class.config_section_with_defaults(:daemon).call(daemon) unless daemon.is_a?(ConfigSection)
|
|
249
|
+
self.sync = self.class.config_section_with_defaults(:sync).call(sync) unless sync.is_a?(ConfigSection)
|
|
250
|
+
self.create = self.class.config_section_with_defaults(:create).call(create) unless create.is_a?(ConfigSection)
|
|
251
|
+
self.validation = self.class.config_section_with_defaults(:validation).call(validation) unless validation.is_a?(ConfigSection)
|
|
252
|
+
self.id = self.class.config_section_with_defaults(:id).call(self.id) unless self.id.is_a?(ConfigSection)
|
|
253
|
+
self.import = self.class.config_section_with_defaults(:import).call(self.import) unless self.import.is_a?(ConfigSection)
|
|
254
|
+
self.export = self.class.config_section_with_defaults(:export).call(self.export) unless self.export.is_a?(ConfigSection)
|
|
255
|
+
self.storage = self.class.config_section_with_defaults(:storage).call(self.storage) unless self.storage.is_a?(ConfigSection)
|
|
256
|
+
self.database = self.class.config_section_with_defaults(:database).call(self.database) unless self.database.is_a?(ConfigSection)
|
|
257
|
+
self.mcp = self.class.config_section_with_defaults(:mcp).call(self.mcp) unless self.mcp.is_a?(ConfigSection)
|
|
258
|
+
self.actor ||= ENV.fetch('USER', 'unknown')
|
|
259
|
+
end
|
|
260
|
+
end
|
|
261
|
+
|
|
262
|
+
class << self
|
|
263
|
+
def config
|
|
264
|
+
@config ||= Config.new
|
|
265
|
+
end
|
|
266
|
+
|
|
267
|
+
def configure
|
|
268
|
+
yield(config) if block_given?
|
|
269
|
+
config
|
|
270
|
+
end
|
|
271
|
+
|
|
272
|
+
def reset_config!
|
|
273
|
+
@config = nil
|
|
274
|
+
end
|
|
275
|
+
end
|
|
276
|
+
end
|