spurline-test 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/lib/spurline/adapters/base.rb +17 -0
- data/lib/spurline/adapters/claude.rb +208 -0
- data/lib/spurline/adapters/open_ai.rb +213 -0
- data/lib/spurline/adapters/registry.rb +33 -0
- data/lib/spurline/adapters/scheduler/base.rb +15 -0
- data/lib/spurline/adapters/scheduler/sync.rb +15 -0
- data/lib/spurline/adapters/stub_adapter.rb +54 -0
- data/lib/spurline/agent.rb +433 -0
- data/lib/spurline/audit/log.rb +156 -0
- data/lib/spurline/audit/secret_filter.rb +121 -0
- data/lib/spurline/base.rb +130 -0
- data/lib/spurline/cartographer/analyzer.rb +71 -0
- data/lib/spurline/cartographer/analyzers/ci_config.rb +171 -0
- data/lib/spurline/cartographer/analyzers/dotfiles.rb +134 -0
- data/lib/spurline/cartographer/analyzers/entry_points.rb +145 -0
- data/lib/spurline/cartographer/analyzers/file_signatures.rb +55 -0
- data/lib/spurline/cartographer/analyzers/manifests.rb +217 -0
- data/lib/spurline/cartographer/analyzers/security_scan.rb +223 -0
- data/lib/spurline/cartographer/repo_profile.rb +140 -0
- data/lib/spurline/cartographer/runner.rb +88 -0
- data/lib/spurline/cartographer.rb +6 -0
- data/lib/spurline/channels/base.rb +41 -0
- data/lib/spurline/channels/event.rb +136 -0
- data/lib/spurline/channels/github.rb +205 -0
- data/lib/spurline/channels/router.rb +103 -0
- data/lib/spurline/cli/check.rb +88 -0
- data/lib/spurline/cli/checks/adapter_resolution.rb +81 -0
- data/lib/spurline/cli/checks/agent_loadability.rb +41 -0
- data/lib/spurline/cli/checks/base.rb +35 -0
- data/lib/spurline/cli/checks/credentials.rb +43 -0
- data/lib/spurline/cli/checks/permissions.rb +22 -0
- data/lib/spurline/cli/checks/project_structure.rb +48 -0
- data/lib/spurline/cli/checks/session_store.rb +97 -0
- data/lib/spurline/cli/console.rb +73 -0
- data/lib/spurline/cli/credentials.rb +181 -0
- data/lib/spurline/cli/generators/agent.rb +123 -0
- data/lib/spurline/cli/generators/migration.rb +62 -0
- data/lib/spurline/cli/generators/project.rb +331 -0
- data/lib/spurline/cli/generators/tool.rb +98 -0
- data/lib/spurline/cli/router.rb +121 -0
- data/lib/spurline/configuration.rb +23 -0
- data/lib/spurline/dsl/guardrails.rb +108 -0
- data/lib/spurline/dsl/hooks.rb +51 -0
- data/lib/spurline/dsl/memory.rb +39 -0
- data/lib/spurline/dsl/model.rb +23 -0
- data/lib/spurline/dsl/persona.rb +74 -0
- data/lib/spurline/dsl/suspend_until.rb +53 -0
- data/lib/spurline/dsl/tools.rb +176 -0
- data/lib/spurline/errors.rb +109 -0
- data/lib/spurline/lifecycle/deterministic_runner.rb +207 -0
- data/lib/spurline/lifecycle/runner.rb +456 -0
- data/lib/spurline/lifecycle/states.rb +47 -0
- data/lib/spurline/lifecycle/suspension_boundary.rb +82 -0
- data/lib/spurline/memory/context_assembler.rb +100 -0
- data/lib/spurline/memory/embedder/base.rb +17 -0
- data/lib/spurline/memory/embedder/open_ai.rb +70 -0
- data/lib/spurline/memory/episode.rb +56 -0
- data/lib/spurline/memory/episodic_store.rb +147 -0
- data/lib/spurline/memory/long_term/base.rb +22 -0
- data/lib/spurline/memory/long_term/postgres.rb +106 -0
- data/lib/spurline/memory/manager.rb +147 -0
- data/lib/spurline/memory/short_term.rb +57 -0
- data/lib/spurline/orchestration/agent_spawner.rb +151 -0
- data/lib/spurline/orchestration/judge.rb +109 -0
- data/lib/spurline/orchestration/ledger/store/base.rb +28 -0
- data/lib/spurline/orchestration/ledger/store/memory.rb +50 -0
- data/lib/spurline/orchestration/ledger.rb +339 -0
- data/lib/spurline/orchestration/merge_queue.rb +133 -0
- data/lib/spurline/orchestration/permission_intersection.rb +151 -0
- data/lib/spurline/orchestration/task_envelope.rb +201 -0
- data/lib/spurline/persona/base.rb +42 -0
- data/lib/spurline/persona/registry.rb +42 -0
- data/lib/spurline/secrets/resolver.rb +65 -0
- data/lib/spurline/secrets/vault.rb +42 -0
- data/lib/spurline/security/content.rb +76 -0
- data/lib/spurline/security/context_pipeline.rb +58 -0
- data/lib/spurline/security/gates/base.rb +36 -0
- data/lib/spurline/security/gates/operator_config.rb +22 -0
- data/lib/spurline/security/gates/system_prompt.rb +23 -0
- data/lib/spurline/security/gates/tool_result.rb +23 -0
- data/lib/spurline/security/gates/user_input.rb +22 -0
- data/lib/spurline/security/injection_scanner.rb +109 -0
- data/lib/spurline/security/pii_filter.rb +104 -0
- data/lib/spurline/session/resumption.rb +36 -0
- data/lib/spurline/session/serializer.rb +169 -0
- data/lib/spurline/session/session.rb +154 -0
- data/lib/spurline/session/store/base.rb +27 -0
- data/lib/spurline/session/store/memory.rb +45 -0
- data/lib/spurline/session/store/postgres.rb +123 -0
- data/lib/spurline/session/store/sqlite.rb +139 -0
- data/lib/spurline/session/suspension.rb +93 -0
- data/lib/spurline/session/turn.rb +98 -0
- data/lib/spurline/spur.rb +213 -0
- data/lib/spurline/streaming/buffer.rb +77 -0
- data/lib/spurline/streaming/chunk.rb +62 -0
- data/lib/spurline/streaming/stream_enumerator.rb +29 -0
- data/lib/spurline/testing.rb +245 -0
- data/lib/spurline/toolkit.rb +110 -0
- data/lib/spurline/tools/base.rb +209 -0
- data/lib/spurline/tools/idempotency.rb +220 -0
- data/lib/spurline/tools/permissions.rb +44 -0
- data/lib/spurline/tools/registry.rb +43 -0
- data/lib/spurline/tools/runner.rb +255 -0
- data/lib/spurline/tools/scope.rb +309 -0
- data/lib/spurline/tools/toolkit_registry.rb +63 -0
- data/lib/spurline/version.rb +5 -0
- data/lib/spurline.rb +56 -0
- metadata +160 -0
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Spurline
|
|
4
|
+
# Framework internals. Developers never interact with this class directly.
|
|
5
|
+
# Includes all DSL modules and provides registry access.
|
|
6
|
+
#
|
|
7
|
+
# Default adapters are registered here so `use_model :claude_sonnet` works
|
|
8
|
+
# out of the box without manual registration.
|
|
9
|
+
class Base
|
|
10
|
+
include Spurline::DSL::Model
|
|
11
|
+
include Spurline::DSL::Persona
|
|
12
|
+
include Spurline::DSL::Tools
|
|
13
|
+
include Spurline::DSL::Memory
|
|
14
|
+
include Spurline::DSL::Guardrails
|
|
15
|
+
include Spurline::DSL::Hooks
|
|
16
|
+
include Spurline::DSL::SuspendUntil
|
|
17
|
+
|
|
18
|
+
# Default model-to-adapter mapping.
|
|
19
|
+
DEFAULT_ADAPTERS = {
|
|
20
|
+
claude_sonnet: { adapter: Spurline::Adapters::Claude, model: "claude-sonnet-4-20250514" },
|
|
21
|
+
claude_opus: { adapter: Spurline::Adapters::Claude, model: "claude-opus-4-20250514" },
|
|
22
|
+
claude_haiku: { adapter: Spurline::Adapters::Claude, model: "claude-haiku-4-5-20251001" },
|
|
23
|
+
openai_gpt4o: { adapter: Spurline::Adapters::OpenAI, model: "gpt-4o" },
|
|
24
|
+
openai_gpt4o_mini: { adapter: Spurline::Adapters::OpenAI, model: "gpt-4o-mini" },
|
|
25
|
+
openai_o3_mini: { adapter: Spurline::Adapters::OpenAI, model: "o3-mini" },
|
|
26
|
+
stub: { adapter: Spurline::Adapters::StubAdapter },
|
|
27
|
+
}.freeze
|
|
28
|
+
|
|
29
|
+
class << self
|
|
30
|
+
def deterministic_sequence(*tool_names)
|
|
31
|
+
if tool_names.empty?
|
|
32
|
+
raise Spurline::ConfigurationError,
|
|
33
|
+
"deterministic_sequence requires at least one tool name."
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
@deterministic_sequence_config = tool_names.map do |item|
|
|
37
|
+
item.is_a?(Hash) ? item : item.to_sym
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def deterministic_sequence_config
|
|
42
|
+
own = instance_variable_defined?(:@deterministic_sequence_config) ? @deterministic_sequence_config : nil
|
|
43
|
+
if own
|
|
44
|
+
own
|
|
45
|
+
elsif superclass.respond_to?(:deterministic_sequence_config)
|
|
46
|
+
superclass.deterministic_sequence_config
|
|
47
|
+
else
|
|
48
|
+
nil
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
def tool_registry
|
|
53
|
+
@tool_registry ||= Spurline::Tools::Registry.new
|
|
54
|
+
Spurline::Spur.flush_pending_registrations!(@tool_registry)
|
|
55
|
+
@tool_registry
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def toolkit_registry
|
|
59
|
+
@toolkit_registry ||= Spurline::Tools::ToolkitRegistry.new(tool_registry: tool_registry)
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
def adapter_registry
|
|
63
|
+
@adapter_registry ||= begin
|
|
64
|
+
registry = Spurline::Adapters::Registry.new
|
|
65
|
+
register_default_adapters!(registry)
|
|
66
|
+
registry
|
|
67
|
+
end
|
|
68
|
+
Spurline::Spur.flush_pending_adapter_registrations!(@adapter_registry)
|
|
69
|
+
@adapter_registry
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
def session_store
|
|
73
|
+
@session_store ||= resolve_session_store(Spurline.config.session_store)
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
def session_store=(store)
|
|
77
|
+
@session_store = resolve_session_store(store)
|
|
78
|
+
end
|
|
79
|
+
|
|
80
|
+
def inherited(subclass)
|
|
81
|
+
super
|
|
82
|
+
# Share registries with subclasses
|
|
83
|
+
subclass.instance_variable_set(:@tool_registry, tool_registry)
|
|
84
|
+
subclass.instance_variable_set(:@toolkit_registry, toolkit_registry)
|
|
85
|
+
subclass.instance_variable_set(:@adapter_registry, adapter_registry)
|
|
86
|
+
subclass.instance_variable_set(:@session_store, @session_store)
|
|
87
|
+
if instance_variable_defined?(:@deterministic_sequence_config)
|
|
88
|
+
subclass.instance_variable_set(
|
|
89
|
+
:@deterministic_sequence_config,
|
|
90
|
+
@deterministic_sequence_config&.dup
|
|
91
|
+
)
|
|
92
|
+
end
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
private
|
|
96
|
+
|
|
97
|
+
def resolve_session_store(store)
|
|
98
|
+
case store
|
|
99
|
+
when nil, :memory
|
|
100
|
+
Spurline::Session::Store::Memory.new
|
|
101
|
+
when :sqlite
|
|
102
|
+
Spurline::Session::Store::SQLite.new(path: Spurline.config.session_store_path)
|
|
103
|
+
when :postgres
|
|
104
|
+
url = Spurline.config.session_store_postgres_url
|
|
105
|
+
unless url && !url.strip.empty?
|
|
106
|
+
raise Spurline::ConfigurationError,
|
|
107
|
+
"session_store_postgres_url must be set when using :postgres session store. " \
|
|
108
|
+
"Set it via Spurline.configure { |c| c.session_store_postgres_url = \"postgresql://...\" }."
|
|
109
|
+
end
|
|
110
|
+
Spurline::Session::Store::Postgres.new(url: url)
|
|
111
|
+
else
|
|
112
|
+
return store if store.respond_to?(:save) &&
|
|
113
|
+
store.respond_to?(:load) &&
|
|
114
|
+
store.respond_to?(:delete) &&
|
|
115
|
+
store.respond_to?(:exists?)
|
|
116
|
+
|
|
117
|
+
raise Spurline::ConfigurationError,
|
|
118
|
+
"Invalid session_store: #{store.inspect}. " \
|
|
119
|
+
"Use :memory, :sqlite, :postgres, or an object implementing save/load/delete/exists?."
|
|
120
|
+
end
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
def register_default_adapters!(registry)
|
|
124
|
+
DEFAULT_ADAPTERS.each do |name, config|
|
|
125
|
+
registry.register(name, config[:adapter])
|
|
126
|
+
end
|
|
127
|
+
end
|
|
128
|
+
end
|
|
129
|
+
end
|
|
130
|
+
end
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Spurline
|
|
4
|
+
module Cartographer
|
|
5
|
+
class Analyzer
|
|
6
|
+
attr_reader :repo_path, :findings
|
|
7
|
+
|
|
8
|
+
def initialize(repo_path:)
|
|
9
|
+
@repo_path = File.expand_path(repo_path)
|
|
10
|
+
@findings = {}
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
# Subclasses implement this. Returns a hash merged into RepoProfile.
|
|
14
|
+
def analyze
|
|
15
|
+
raise NotImplementedError, "#{self.class}#analyze must return a findings hash"
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
# Per-layer confidence score (0.0-1.0).
|
|
19
|
+
def confidence
|
|
20
|
+
1.0
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
private
|
|
24
|
+
|
|
25
|
+
def file_exists?(relative_path)
|
|
26
|
+
return false if excluded_relative_path?(relative_path)
|
|
27
|
+
|
|
28
|
+
File.exist?(File.join(repo_path, relative_path))
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def read_file(relative_path)
|
|
32
|
+
return nil if excluded_relative_path?(relative_path)
|
|
33
|
+
|
|
34
|
+
path = File.join(repo_path, relative_path)
|
|
35
|
+
return nil unless File.file?(path)
|
|
36
|
+
|
|
37
|
+
File.read(path)
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def glob(pattern)
|
|
41
|
+
Dir.glob(File.join(repo_path, pattern)).reject do |path|
|
|
42
|
+
excluded_relative_path?(relative_path(path))
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def relative_path(path)
|
|
47
|
+
path.to_s.sub(%r{\A#{Regexp.escape(repo_path)}/?}, "")
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
def excluded_relative_path?(relative_path)
|
|
51
|
+
normalized = relative_path.to_s.sub(%r{\A\./}, "").sub(%r{\A/}, "")
|
|
52
|
+
return false if normalized.empty?
|
|
53
|
+
|
|
54
|
+
excluded_patterns.any? do |pattern|
|
|
55
|
+
token = pattern.to_s.sub(%r{\A\./}, "").sub(%r{\A/}, "").sub(%r{/$}, "")
|
|
56
|
+
if token.include?("/")
|
|
57
|
+
normalized == token || normalized.start_with?("#{token}/")
|
|
58
|
+
else
|
|
59
|
+
normalized.split("/").include?(token)
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
def excluded_patterns
|
|
65
|
+
Array(Spurline.config.cartographer_exclude_patterns)
|
|
66
|
+
rescue StandardError
|
|
67
|
+
[]
|
|
68
|
+
end
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
end
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "yaml"
|
|
4
|
+
|
|
5
|
+
module Spurline
|
|
6
|
+
module Cartographer
|
|
7
|
+
module Analyzers
|
|
8
|
+
class CIConfig < Analyzer
|
|
9
|
+
def analyze
|
|
10
|
+
providers = []
|
|
11
|
+
commands = []
|
|
12
|
+
|
|
13
|
+
github_workflows = glob(".github/workflows/*.{yml,yaml}")
|
|
14
|
+
unless github_workflows.empty?
|
|
15
|
+
providers << :github_actions
|
|
16
|
+
github_workflows.each do |workflow_path|
|
|
17
|
+
commands.concat(extract_github_commands(workflow_path))
|
|
18
|
+
end
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
circle_config_path = File.join(repo_path, ".circleci", "config.yml")
|
|
22
|
+
if File.file?(circle_config_path)
|
|
23
|
+
providers << :circleci
|
|
24
|
+
commands.concat(extract_circleci_commands(circle_config_path))
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
gitlab_path = File.join(repo_path, ".gitlab-ci.yml")
|
|
28
|
+
if File.file?(gitlab_path)
|
|
29
|
+
providers << :gitlab_ci
|
|
30
|
+
commands.concat(extract_gitlab_commands(gitlab_path))
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
jenkinsfile_path = File.join(repo_path, "Jenkinsfile")
|
|
34
|
+
if File.file?(jenkinsfile_path)
|
|
35
|
+
providers << :jenkins
|
|
36
|
+
commands.concat(extract_jenkins_commands(jenkinsfile_path))
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
ci_hash = {}
|
|
40
|
+
ci_hash[:provider] = providers.first if providers.any?
|
|
41
|
+
ci_hash[:providers] = providers if providers.any?
|
|
42
|
+
ci_hash[:test_command] = pick_command(commands) { |cmd| test_command?(cmd) }
|
|
43
|
+
ci_hash[:lint_command] = pick_command(commands) { |cmd| lint_command?(cmd) }
|
|
44
|
+
ci_hash[:deploy_command] = pick_command(commands) { |cmd| deploy_command?(cmd) }
|
|
45
|
+
ci_hash.compact!
|
|
46
|
+
|
|
47
|
+
@findings = {
|
|
48
|
+
ci: ci_hash,
|
|
49
|
+
metadata: {
|
|
50
|
+
ci_config: {
|
|
51
|
+
command_count: commands.length,
|
|
52
|
+
},
|
|
53
|
+
},
|
|
54
|
+
}
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def confidence
|
|
58
|
+
providers = findings.dig(:ci, :providers)
|
|
59
|
+
providers && !providers.empty? ? 1.0 : 0.5
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
private
|
|
63
|
+
|
|
64
|
+
def extract_github_commands(path)
|
|
65
|
+
payload = safe_yaml_load(path)
|
|
66
|
+
return [] unless payload.is_a?(Hash)
|
|
67
|
+
|
|
68
|
+
jobs = payload["jobs"]
|
|
69
|
+
return [] unless jobs.is_a?(Hash)
|
|
70
|
+
|
|
71
|
+
jobs.values.flat_map do |job|
|
|
72
|
+
next [] unless job.is_a?(Hash)
|
|
73
|
+
|
|
74
|
+
steps = job["steps"]
|
|
75
|
+
next [] unless steps.is_a?(Array)
|
|
76
|
+
|
|
77
|
+
steps.filter_map do |step|
|
|
78
|
+
next unless step.is_a?(Hash)
|
|
79
|
+
|
|
80
|
+
normalize_command(step["run"])
|
|
81
|
+
end
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
def extract_circleci_commands(path)
|
|
86
|
+
payload = safe_yaml_load(path)
|
|
87
|
+
return [] unless payload.is_a?(Hash)
|
|
88
|
+
|
|
89
|
+
jobs = payload["jobs"]
|
|
90
|
+
return [] unless jobs.is_a?(Hash)
|
|
91
|
+
|
|
92
|
+
jobs.values.flat_map do |job|
|
|
93
|
+
next [] unless job.is_a?(Hash)
|
|
94
|
+
|
|
95
|
+
steps = job["steps"]
|
|
96
|
+
next [] unless steps.is_a?(Array)
|
|
97
|
+
|
|
98
|
+
steps.filter_map do |step|
|
|
99
|
+
case step
|
|
100
|
+
when String
|
|
101
|
+
nil
|
|
102
|
+
when Hash
|
|
103
|
+
run = step["run"] || step[:run]
|
|
104
|
+
if run.is_a?(Hash)
|
|
105
|
+
normalize_command(run["command"] || run[:command])
|
|
106
|
+
else
|
|
107
|
+
normalize_command(run)
|
|
108
|
+
end
|
|
109
|
+
end
|
|
110
|
+
end
|
|
111
|
+
end
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
def extract_gitlab_commands(path)
|
|
115
|
+
payload = safe_yaml_load(path)
|
|
116
|
+
return [] unless payload.is_a?(Hash)
|
|
117
|
+
|
|
118
|
+
payload.values.flat_map do |job|
|
|
119
|
+
next [] unless job.is_a?(Hash)
|
|
120
|
+
|
|
121
|
+
scripts = job["script"] || job[:script]
|
|
122
|
+
case scripts
|
|
123
|
+
when Array
|
|
124
|
+
scripts.filter_map { |script| normalize_command(script) }
|
|
125
|
+
when String
|
|
126
|
+
[normalize_command(scripts)].compact
|
|
127
|
+
else
|
|
128
|
+
[]
|
|
129
|
+
end
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
def extract_jenkins_commands(path)
|
|
134
|
+
content = File.read(path)
|
|
135
|
+
commands = content.scan(/\bsh\s+["']([^"']+)["']/).flatten
|
|
136
|
+
commands.filter_map { |command| normalize_command(command) }
|
|
137
|
+
rescue Errno::ENOENT
|
|
138
|
+
[]
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
def safe_yaml_load(path)
|
|
142
|
+
YAML.safe_load(File.read(path), aliases: true)
|
|
143
|
+
rescue Psych::SyntaxError, Errno::ENOENT
|
|
144
|
+
nil
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
def pick_command(commands)
|
|
148
|
+
commands.find { |command| yield(command) }
|
|
149
|
+
end
|
|
150
|
+
|
|
151
|
+
def normalize_command(value)
|
|
152
|
+
return nil unless value
|
|
153
|
+
|
|
154
|
+
value.to_s.strip.gsub(/\s+/, " ")
|
|
155
|
+
end
|
|
156
|
+
|
|
157
|
+
def test_command?(command)
|
|
158
|
+
command.match?(/\b(rspec|minitest|pytest|go test|cargo test|npm test|yarn test|pnpm test|rake test|bundle exec rspec|bundle exec rake spec)\b/i)
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
def lint_command?(command)
|
|
162
|
+
command.match?(/\b(rubocop|eslint|prettier|standardrb|lint)\b/i)
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
def deploy_command?(command)
|
|
166
|
+
command.match?(/\b(deploy|kubectl|helm|terraform apply|cap\s)\b/i)
|
|
167
|
+
end
|
|
168
|
+
end
|
|
169
|
+
end
|
|
170
|
+
end
|
|
171
|
+
end
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
require "yaml"
|
|
5
|
+
|
|
6
|
+
module Spurline
|
|
7
|
+
module Cartographer
|
|
8
|
+
module Analyzers
|
|
9
|
+
class Dotfiles < Analyzer
|
|
10
|
+
RUBOCOP_FILES = [".rubocop.yml"].freeze
|
|
11
|
+
ESLINT_FILES = %w[
|
|
12
|
+
.eslintrc
|
|
13
|
+
.eslintrc.json
|
|
14
|
+
.eslintrc.yml
|
|
15
|
+
.eslintrc.yaml
|
|
16
|
+
.eslintrc.js
|
|
17
|
+
].freeze
|
|
18
|
+
PRETTIER_FILES = %w[
|
|
19
|
+
.prettierrc
|
|
20
|
+
.prettierrc.json
|
|
21
|
+
.prettierrc.yml
|
|
22
|
+
.prettierrc.yaml
|
|
23
|
+
].freeze
|
|
24
|
+
|
|
25
|
+
def analyze
|
|
26
|
+
style_configs = {}
|
|
27
|
+
env_vars = parse_env_example
|
|
28
|
+
|
|
29
|
+
rubocop_file = RUBOCOP_FILES.find { |path| file_exists?(path) }
|
|
30
|
+
style_configs[:rubocop] = parse_yaml_keys(rubocop_file) if rubocop_file
|
|
31
|
+
|
|
32
|
+
eslint_file = ESLINT_FILES.find { |path| file_exists?(path) }
|
|
33
|
+
style_configs[:eslint] = parse_config_keys(eslint_file) if eslint_file
|
|
34
|
+
|
|
35
|
+
prettier_file = PRETTIER_FILES.find { |path| file_exists?(path) }
|
|
36
|
+
style_configs[:prettier] = parse_config_keys(prettier_file) if prettier_file
|
|
37
|
+
|
|
38
|
+
style_configs[:editorconfig] = parse_editorconfig_keys if file_exists?(".editorconfig")
|
|
39
|
+
|
|
40
|
+
runtime_versions = {}
|
|
41
|
+
nvmrc = read_file(".nvmrc")&.strip
|
|
42
|
+
runtime_versions[:node] = nvmrc if nvmrc && !nvmrc.empty?
|
|
43
|
+
runtime_versions.merge!(parse_tool_versions)
|
|
44
|
+
|
|
45
|
+
@findings = {
|
|
46
|
+
environment_vars_required: env_vars,
|
|
47
|
+
metadata: {
|
|
48
|
+
dotfiles: {
|
|
49
|
+
style_configs: style_configs,
|
|
50
|
+
runtime_versions: runtime_versions,
|
|
51
|
+
},
|
|
52
|
+
},
|
|
53
|
+
}
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
def confidence
|
|
57
|
+
has_dotfiles = findings.dig(:metadata, :dotfiles, :style_configs)&.any?
|
|
58
|
+
has_dotfiles ? 0.9 : 0.6
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
private
|
|
62
|
+
|
|
63
|
+
def parse_env_example
|
|
64
|
+
content = read_file(".env.example")
|
|
65
|
+
return [] unless content
|
|
66
|
+
|
|
67
|
+
content.each_line.filter_map do |line|
|
|
68
|
+
match = line.match(/^\s*([A-Z][A-Z0-9_]*)\s*=/)
|
|
69
|
+
match&.captures&.first
|
|
70
|
+
end.uniq.sort
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
def parse_editorconfig_keys
|
|
74
|
+
content = read_file(".editorconfig")
|
|
75
|
+
return [] unless content
|
|
76
|
+
|
|
77
|
+
content.each_line.filter_map do |line|
|
|
78
|
+
stripped = line.strip
|
|
79
|
+
next if stripped.empty? || stripped.start_with?("#", ";", "[")
|
|
80
|
+
|
|
81
|
+
stripped.split("=").first&.strip
|
|
82
|
+
end.uniq.sort
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
def parse_tool_versions
|
|
86
|
+
content = read_file(".tool-versions")
|
|
87
|
+
return {} unless content
|
|
88
|
+
|
|
89
|
+
content.each_line.each_with_object({}) do |line, hash|
|
|
90
|
+
stripped = line.strip
|
|
91
|
+
next if stripped.empty? || stripped.start_with?("#")
|
|
92
|
+
|
|
93
|
+
tool, version = stripped.split(/\s+/, 2)
|
|
94
|
+
next unless tool && version
|
|
95
|
+
|
|
96
|
+
hash[tool.to_sym] = version.strip
|
|
97
|
+
end
|
|
98
|
+
end
|
|
99
|
+
|
|
100
|
+
def parse_config_keys(relative_path)
|
|
101
|
+
return [] unless relative_path
|
|
102
|
+
|
|
103
|
+
if relative_path.end_with?(".json") || relative_path == ".eslintrc" || relative_path == ".prettierrc"
|
|
104
|
+
parse_json_keys(relative_path)
|
|
105
|
+
elsif relative_path.end_with?(".yml") || relative_path.end_with?(".yaml")
|
|
106
|
+
parse_yaml_keys(relative_path)
|
|
107
|
+
else
|
|
108
|
+
["config_present"]
|
|
109
|
+
end
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
def parse_yaml_keys(relative_path)
|
|
113
|
+
return [] unless relative_path
|
|
114
|
+
|
|
115
|
+
payload = YAML.safe_load(read_file(relative_path), aliases: true)
|
|
116
|
+
return [] unless payload.is_a?(Hash)
|
|
117
|
+
|
|
118
|
+
payload.keys.map(&:to_s).sort
|
|
119
|
+
rescue Psych::SyntaxError, NoMethodError
|
|
120
|
+
[]
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
def parse_json_keys(relative_path)
|
|
124
|
+
payload = JSON.parse(read_file(relative_path))
|
|
125
|
+
return [] unless payload.is_a?(Hash)
|
|
126
|
+
|
|
127
|
+
payload.keys.map(&:to_s).sort
|
|
128
|
+
rescue JSON::ParserError, NoMethodError
|
|
129
|
+
[]
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
end
|
|
133
|
+
end
|
|
134
|
+
end
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "json"
|
|
4
|
+
require "set"
|
|
5
|
+
|
|
6
|
+
module Spurline
|
|
7
|
+
module Cartographer
|
|
8
|
+
module Analyzers
|
|
9
|
+
class EntryPoints < Analyzer
|
|
10
|
+
def analyze
|
|
11
|
+
grouped = {
|
|
12
|
+
web: Set.new,
|
|
13
|
+
background: Set.new,
|
|
14
|
+
console: Set.new,
|
|
15
|
+
test: Set.new,
|
|
16
|
+
lint: Set.new,
|
|
17
|
+
deploy: Set.new,
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
collect_executables(grouped)
|
|
21
|
+
collect_procfile(grouped)
|
|
22
|
+
collect_makefile(grouped)
|
|
23
|
+
collect_package_scripts(grouped)
|
|
24
|
+
collect_rakefile(grouped)
|
|
25
|
+
|
|
26
|
+
@findings = {
|
|
27
|
+
entry_points: grouped.transform_values { |commands| commands.to_a.sort },
|
|
28
|
+
}
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def confidence
|
|
32
|
+
commands = findings[:entry_points].values.flatten
|
|
33
|
+
commands.empty? ? 0.5 : 0.9
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
private
|
|
37
|
+
|
|
38
|
+
def collect_executables(grouped)
|
|
39
|
+
(glob("bin/*") + glob("exe/*")).uniq.each do |path|
|
|
40
|
+
next unless File.file?(path)
|
|
41
|
+
|
|
42
|
+
command = "./#{relative_path(path)}"
|
|
43
|
+
classify_command(grouped, File.basename(path), command)
|
|
44
|
+
end
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def collect_procfile(grouped)
|
|
48
|
+
content = read_file("Procfile")
|
|
49
|
+
return unless content
|
|
50
|
+
|
|
51
|
+
content.each_line do |line|
|
|
52
|
+
stripped = line.strip
|
|
53
|
+
next if stripped.empty? || stripped.start_with?("#")
|
|
54
|
+
|
|
55
|
+
type, command = stripped.split(":", 2)
|
|
56
|
+
next unless type && command
|
|
57
|
+
|
|
58
|
+
normalized = command.strip
|
|
59
|
+
case type.strip
|
|
60
|
+
when "web"
|
|
61
|
+
grouped[:web] << normalized
|
|
62
|
+
when "worker", "jobs", "queue"
|
|
63
|
+
grouped[:background] << normalized
|
|
64
|
+
when "console"
|
|
65
|
+
grouped[:console] << normalized
|
|
66
|
+
when "test"
|
|
67
|
+
grouped[:test] << normalized
|
|
68
|
+
end
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
def collect_makefile(grouped)
|
|
73
|
+
content = read_file("Makefile")
|
|
74
|
+
return unless content
|
|
75
|
+
|
|
76
|
+
content.each_line do |line|
|
|
77
|
+
match = line.match(/^([A-Za-z0-9_.-]+):(?:\s|$)/)
|
|
78
|
+
next unless match
|
|
79
|
+
|
|
80
|
+
target = match[1]
|
|
81
|
+
next if target.start_with?(".") || target.include?("%")
|
|
82
|
+
|
|
83
|
+
command = "make #{target}"
|
|
84
|
+
classify_command(grouped, target, command)
|
|
85
|
+
end
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
def collect_package_scripts(grouped)
|
|
89
|
+
content = read_file("package.json")
|
|
90
|
+
return unless content
|
|
91
|
+
|
|
92
|
+
package = JSON.parse(content)
|
|
93
|
+
scripts = package["scripts"]
|
|
94
|
+
return unless scripts.is_a?(Hash)
|
|
95
|
+
|
|
96
|
+
scripts.each do |name, script|
|
|
97
|
+
command = script.to_s.strip
|
|
98
|
+
next if command.empty?
|
|
99
|
+
|
|
100
|
+
classify_command(grouped, name, command)
|
|
101
|
+
end
|
|
102
|
+
rescue JSON::ParserError
|
|
103
|
+
nil
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
def collect_rakefile(grouped)
|
|
107
|
+
content = read_file("Rakefile")
|
|
108
|
+
return unless content
|
|
109
|
+
|
|
110
|
+
grouped[:test] << "bundle exec rake spec" if content.match?(/RSpec::Core::RakeTask|task\s+:spec/)
|
|
111
|
+
grouped[:console] << "bundle exec rake -T"
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
def classify_command(grouped, name, command)
|
|
115
|
+
token = name.to_s.downcase
|
|
116
|
+
lower_command = command.downcase
|
|
117
|
+
|
|
118
|
+
if token.match?(/web|server|start|puma|rails/) || lower_command.match?(/\b(puma|rails server|rackup|npm start|node\s+)/)
|
|
119
|
+
grouped[:web] << command
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
if token.match?(/worker|job|queue|sidekiq|resque/) || lower_command.match?(/\b(sidekiq|resque|worker)\b/)
|
|
123
|
+
grouped[:background] << command
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
if token.match?(/console|repl|irb|pry/) || lower_command.match?(/\b(rails console|irb|pry)\b/)
|
|
127
|
+
grouped[:console] << command
|
|
128
|
+
end
|
|
129
|
+
|
|
130
|
+
if token.match?(/test|spec|rspec|jest|pytest/) || lower_command.match?(/\b(rspec|jest|pytest|minitest|go test|cargo test|npm test|bundle exec rspec)\b/)
|
|
131
|
+
grouped[:test] << command
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
if token.match?(/lint|rubocop|eslint|prettier/) || lower_command.match?(/\b(rubocop|eslint|prettier|lint)\b/)
|
|
135
|
+
grouped[:lint] << command
|
|
136
|
+
end
|
|
137
|
+
|
|
138
|
+
if token.match?(/deploy|release/) || lower_command.match?(/\b(deploy|kubectl|helm|cap\s)\b/)
|
|
139
|
+
grouped[:deploy] << command
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
end
|
|
143
|
+
end
|
|
144
|
+
end
|
|
145
|
+
end
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Spurline
|
|
4
|
+
module Cartographer
|
|
5
|
+
module Analyzers
|
|
6
|
+
class FileSignatures < Analyzer
|
|
7
|
+
LANGUAGE_SENTINELS = {
|
|
8
|
+
ruby: %w[Gemfile Gemfile.lock .ruby-version Rakefile],
|
|
9
|
+
javascript: %w[package.json .node-version .nvmrc],
|
|
10
|
+
python: %w[pyproject.toml .python-version requirements.txt],
|
|
11
|
+
go: %w[go.mod],
|
|
12
|
+
rust: %w[Cargo.toml],
|
|
13
|
+
java: %w[pom.xml],
|
|
14
|
+
}.freeze
|
|
15
|
+
|
|
16
|
+
TOOLCHAIN_SENTINELS = %w[Makefile docker-compose.yml Dockerfile].freeze
|
|
17
|
+
PRIORITY = %i[ruby javascript python go rust java].freeze
|
|
18
|
+
|
|
19
|
+
def analyze
|
|
20
|
+
language_scores = {}
|
|
21
|
+
detected = {}
|
|
22
|
+
|
|
23
|
+
LANGUAGE_SENTINELS.each do |language, files|
|
|
24
|
+
present = files.select { |file| file_exists?(file) }
|
|
25
|
+
next if present.empty?
|
|
26
|
+
|
|
27
|
+
language_scores[language] = present.length
|
|
28
|
+
detected[language] = present
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
ordered_languages = language_scores.keys.sort_by do |language|
|
|
32
|
+
[-language_scores[language], PRIORITY.index(language) || PRIORITY.length]
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
@findings = {
|
|
36
|
+
languages: {
|
|
37
|
+
primary: ordered_languages.first,
|
|
38
|
+
secondary: ordered_languages.drop(1),
|
|
39
|
+
},
|
|
40
|
+
metadata: {
|
|
41
|
+
file_signatures: {
|
|
42
|
+
detected: detected,
|
|
43
|
+
toolchain: TOOLCHAIN_SENTINELS.select { |file| file_exists?(file) },
|
|
44
|
+
},
|
|
45
|
+
},
|
|
46
|
+
}
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def confidence
|
|
50
|
+
findings.dig(:languages, :primary).nil? ? 0.85 : 1.0
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
end
|