aidp 0.28.0 → 0.30.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -160,11 +160,53 @@ module Aidp
160
160
  if configured_models.any?
161
161
  # Use first configured model for this provider and tier
162
162
  model_name = configured_models.first
163
- Aidp.log_debug("thinking_depth_manager", "Selected model from user config",
164
- tier: tier,
165
- provider: provider,
166
- model: model_name)
167
- return [provider, model_name, {}]
163
+
164
+ # Check if model is deprecated and try to upgrade
165
+ require_relative "ruby_llm_registry" unless defined?(Aidp::Harness::RubyLLMRegistry)
166
+ llm_registry = Aidp::Harness::RubyLLMRegistry.new
167
+
168
+ if llm_registry.model_deprecated?(model_name, provider)
169
+ Aidp.log_warn("thinking_depth_manager", "Configured model is deprecated",
170
+ tier: tier,
171
+ provider: provider,
172
+ model: model_name)
173
+
174
+ # Try to find replacement
175
+ replacement = llm_registry.find_replacement_model(model_name, provider: provider)
176
+ if replacement
177
+ Aidp.log_info("thinking_depth_manager", "Auto-upgrading to non-deprecated model",
178
+ tier: tier,
179
+ provider: provider,
180
+ old_model: model_name,
181
+ new_model: replacement)
182
+ model_name = replacement
183
+ else
184
+ # Try next model in config list
185
+ non_deprecated = configured_models.find { |m| !llm_registry.model_deprecated?(m, provider) }
186
+ if non_deprecated
187
+ Aidp.log_info("thinking_depth_manager", "Using alternate configured model",
188
+ tier: tier,
189
+ provider: provider,
190
+ skipped: model_name,
191
+ selected: non_deprecated)
192
+ model_name = non_deprecated
193
+ else
194
+ Aidp.log_warn("thinking_depth_manager", "All configured models deprecated, falling back to catalog",
195
+ tier: tier,
196
+ provider: provider)
197
+ # Fall through to catalog selection
198
+ model_name = nil
199
+ end
200
+ end
201
+ end
202
+
203
+ if model_name
204
+ Aidp.log_debug("thinking_depth_manager", "Selected model from user config",
205
+ tier: tier,
206
+ provider: provider,
207
+ model: model_name)
208
+ return [provider, model_name, {}]
209
+ end
168
210
  end
169
211
 
170
212
  # Provider specified but has no models for this tier in config
@@ -0,0 +1,201 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "json"
4
+ require "fileutils"
5
+ require_relative "../errors"
6
+ require_relative "scanner"
7
+ require_relative "compiler"
8
+
9
+ module Aidp
10
+ module Metadata
11
+ # Manages cached tool directory with automatic invalidation
12
+ #
13
+ # Loads compiled tool directory from cache, checks for file changes,
14
+ # and regenerates cache when needed.
15
+ #
16
+ # @example Loading from cache
17
+ # cache = Cache.new(
18
+ # cache_path: ".aidp/cache/tool_directory.json",
19
+ # directories: [".aidp/skills", ".aidp/templates"]
20
+ # )
21
+ # directory = cache.load
22
+ class Cache
23
+ # Default cache TTL (24 hours)
24
+ DEFAULT_TTL = 86400
25
+
26
+ # Initialize cache
27
+ #
28
+ # @param cache_path [String] Path to cache file
29
+ # @param directories [Array<String>] Directories to monitor
30
+ # @param ttl [Integer] Cache TTL in seconds (default: 24 hours)
31
+ # @param strict [Boolean] Whether to fail on validation errors
32
+ def initialize(cache_path:, directories: [], ttl: DEFAULT_TTL, strict: false)
33
+ @cache_path = cache_path
34
+ @directories = Array(directories)
35
+ @ttl = ttl
36
+ @strict = strict
37
+ @file_hashes_path = "#{cache_path}.hashes"
38
+ end
39
+
40
+ # Load tool directory from cache or regenerate
41
+ #
42
+ # @return [Hash] Tool directory structure
43
+ def load
44
+ Aidp.log_debug("metadata", "Loading cache", path: @cache_path)
45
+
46
+ if cache_valid?
47
+ Aidp.log_debug("metadata", "Using cached directory")
48
+ load_from_cache
49
+ else
50
+ Aidp.log_info("metadata", "Cache invalid, regenerating")
51
+ regenerate
52
+ end
53
+ end
54
+
55
+ # Regenerate cache from source files
56
+ #
57
+ # @return [Hash] Tool directory structure
58
+ def regenerate
59
+ Aidp.log_info("metadata", "Regenerating tool directory", directories: @directories)
60
+
61
+ # Compile directory
62
+ compiler = Compiler.new(directories: @directories, strict: @strict)
63
+ directory = compiler.compile(output_path: @cache_path)
64
+
65
+ # Save file hashes for change detection
66
+ save_file_hashes
67
+
68
+ directory
69
+ end
70
+
71
+ # Force reload cache
72
+ #
73
+ # @return [Hash] Tool directory structure
74
+ def reload
75
+ Aidp.log_info("metadata", "Force reloading cache")
76
+ regenerate
77
+ end
78
+
79
+ # Check if cache is valid
80
+ #
81
+ # @return [Boolean] True if cache exists and is not stale
82
+ def cache_valid?
83
+ return false unless File.exist?(@cache_path)
84
+ return false if cache_expired?
85
+ return false if files_changed?
86
+
87
+ true
88
+ end
89
+
90
+ # Check if cache has expired based on TTL
91
+ #
92
+ # @return [Boolean] True if cache is expired
93
+ def cache_expired?
94
+ return true unless File.exist?(@cache_path)
95
+
96
+ cache_age = Time.now - File.mtime(@cache_path)
97
+ expired = cache_age > @ttl
98
+
99
+ if expired
100
+ Aidp.log_debug(
101
+ "metadata",
102
+ "Cache expired",
103
+ age_seconds: cache_age.to_i,
104
+ ttl: @ttl
105
+ )
106
+ end
107
+
108
+ expired
109
+ end
110
+
111
+ # Check if source files have changed
112
+ #
113
+ # @return [Boolean] True if any source files have changed
114
+ def files_changed?
115
+ previous_hashes = load_file_hashes
116
+ current_hashes = compute_current_hashes
117
+
118
+ changed = previous_hashes != current_hashes
119
+
120
+ if changed
121
+ Aidp.log_debug(
122
+ "metadata",
123
+ "Source files changed",
124
+ previous_count: previous_hashes.size,
125
+ current_count: current_hashes.size
126
+ )
127
+ end
128
+
129
+ changed
130
+ end
131
+
132
+ # Load directory from cache file
133
+ #
134
+ # @return [Hash] Cached directory structure
135
+ # @raise [Aidp::Errors::ConfigurationError] if cache is invalid
136
+ def load_from_cache
137
+ content = File.read(@cache_path, encoding: "UTF-8")
138
+ directory = JSON.parse(content)
139
+
140
+ Aidp.log_debug(
141
+ "metadata",
142
+ "Loaded from cache",
143
+ tools: directory["statistics"]["total_tools"],
144
+ compiled_at: directory["compiled_at"]
145
+ )
146
+
147
+ directory
148
+ rescue JSON::ParserError => e
149
+ Aidp.log_error("metadata", "Invalid cache JSON", error: e.message)
150
+ raise Aidp::Errors::ConfigurationError, "Invalid tool directory cache: #{e.message}"
151
+ end
152
+
153
+ # Compute current file hashes for all source files
154
+ #
155
+ # @return [Hash<String, String>] Map of file_path => file_hash
156
+ def compute_current_hashes
157
+ hashes = {}
158
+
159
+ @directories.each do |dir|
160
+ next unless Dir.exist?(dir)
161
+
162
+ scanner = Scanner.new([dir])
163
+ md_files = scanner.find_markdown_files(dir)
164
+
165
+ md_files.each do |file_path|
166
+ content = File.read(file_path, encoding: "UTF-8")
167
+ hashes[file_path] = Parser.compute_file_hash(content)
168
+ end
169
+ end
170
+
171
+ hashes
172
+ end
173
+
174
+ # Load saved file hashes
175
+ #
176
+ # @return [Hash<String, String>] Saved file hashes
177
+ def load_file_hashes
178
+ return {} unless File.exist?(@file_hashes_path)
179
+
180
+ content = File.read(@file_hashes_path, encoding: "UTF-8")
181
+ JSON.parse(content)
182
+ rescue JSON::ParserError
183
+ Aidp.log_warn("metadata", "Invalid file hashes cache, regenerating")
184
+ {}
185
+ end
186
+
187
+ # Save current file hashes
188
+ def save_file_hashes
189
+ hashes = compute_current_hashes
190
+
191
+ # Ensure directory exists
192
+ dir = File.dirname(@file_hashes_path)
193
+ FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
194
+
195
+ File.write(@file_hashes_path, JSON.pretty_generate(hashes))
196
+
197
+ Aidp.log_debug("metadata", "Saved file hashes", count: hashes.size, path: @file_hashes_path)
198
+ end
199
+ end
200
+ end
201
+ end
@@ -0,0 +1,229 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "json"
4
+ require_relative "../errors"
5
+ require_relative "scanner"
6
+ require_relative "validator"
7
+
8
+ module Aidp
9
+ module Metadata
10
+ # Compiles tool metadata into a cached directory structure
11
+ #
12
+ # Aggregates metadata from all tool files, builds indexes, resolves dependencies,
13
+ # and generates a cached tool_directory.json for fast lookups.
14
+ #
15
+ # @example Compiling the tool directory
16
+ # compiler = Compiler.new(directories: [".aidp/skills", ".aidp/templates"])
17
+ # compiler.compile(output_path: ".aidp/cache/tool_directory.json")
18
+ class Compiler
19
+ # Compiled directory structure
20
+ attr_reader :tools, :indexes, :dependency_graph
21
+
22
+ # Initialize compiler
23
+ #
24
+ # @param directories [Array<String>] Directories to scan
25
+ # @param strict [Boolean] Whether to fail on validation errors
26
+ def initialize(directories: [], strict: false)
27
+ @directories = Array(directories)
28
+ @strict = strict
29
+ @tools = []
30
+ @indexes = {}
31
+ @dependency_graph = {}
32
+ end
33
+
34
+ # Compile tool directory
35
+ #
36
+ # @param output_path [String] Path to output JSON file
37
+ # @return [Hash] Compiled directory structure
38
+ def compile(output_path:)
39
+ Aidp.log_info("metadata", "Compiling tool directory", directories: @directories, output: output_path)
40
+
41
+ # Scan all directories
42
+ scanner = Scanner.new(@directories)
43
+ @tools = scanner.scan_all
44
+
45
+ # Validate tools
46
+ validator = Validator.new(@tools)
47
+ validation_results = validator.validate_all
48
+
49
+ # Handle validation failures
50
+ handle_validation_results(validation_results)
51
+
52
+ # Build indexes and graphs
53
+ build_indexes
54
+ build_dependency_graph
55
+
56
+ # Create directory structure
57
+ directory = create_directory_structure
58
+
59
+ # Write to file
60
+ write_directory(directory, output_path)
61
+
62
+ Aidp.log_info(
63
+ "metadata",
64
+ "Compilation complete",
65
+ tools: @tools.size,
66
+ output: output_path
67
+ )
68
+
69
+ directory
70
+ end
71
+
72
+ # Build indexes for fast lookups
73
+ def build_indexes
74
+ Aidp.log_debug("metadata", "Building indexes")
75
+
76
+ @indexes = {
77
+ by_id: {},
78
+ by_type: {},
79
+ by_tag: {},
80
+ by_work_unit_type: {}
81
+ }
82
+
83
+ @tools.each do |tool|
84
+ # Index by ID
85
+ @indexes[:by_id][tool.id] = tool
86
+
87
+ # Index by type
88
+ @indexes[:by_type][tool.type] ||= []
89
+ @indexes[:by_type][tool.type] << tool
90
+
91
+ # Index by tags
92
+ tool.applies_to.each do |tag|
93
+ @indexes[:by_tag][tag] ||= []
94
+ @indexes[:by_tag][tag] << tool
95
+ end
96
+
97
+ # Index by work unit types
98
+ tool.work_unit_types.each do |wut|
99
+ @indexes[:by_work_unit_type][wut] ||= []
100
+ @indexes[:by_work_unit_type][wut] << tool
101
+ end
102
+ end
103
+
104
+ Aidp.log_debug(
105
+ "metadata",
106
+ "Indexes built",
107
+ types: @indexes[:by_type].keys,
108
+ tags: @indexes[:by_tag].keys.size,
109
+ work_unit_types: @indexes[:by_work_unit_type].keys
110
+ )
111
+ end
112
+
113
+ # Build dependency graph
114
+ def build_dependency_graph
115
+ Aidp.log_debug("metadata", "Building dependency graph")
116
+
117
+ @dependency_graph = {}
118
+
119
+ @tools.each do |tool|
120
+ @dependency_graph[tool.id] = {
121
+ dependencies: tool.dependencies,
122
+ dependents: []
123
+ }
124
+ end
125
+
126
+ # Build reverse dependencies (dependents)
127
+ @tools.each do |tool|
128
+ tool.dependencies.each do |dep_id|
129
+ next unless @dependency_graph[dep_id]
130
+
131
+ @dependency_graph[dep_id][:dependents] << tool.id
132
+ end
133
+ end
134
+
135
+ Aidp.log_debug(
136
+ "metadata",
137
+ "Dependency graph built",
138
+ nodes: @dependency_graph.size
139
+ )
140
+ end
141
+
142
+ # Create directory structure for serialization
143
+ #
144
+ # @return [Hash] Directory structure
145
+ def create_directory_structure
146
+ {
147
+ version: "1.0.0",
148
+ compiled_at: Time.now.iso8601,
149
+ tools: @tools.map(&:to_h),
150
+ indexes: {
151
+ by_type: @indexes[:by_type].transform_values { |tools| tools.map(&:id) },
152
+ by_tag: @indexes[:by_tag].transform_values { |tools| tools.map(&:id) },
153
+ by_work_unit_type: @indexes[:by_work_unit_type].transform_values { |tools| tools.map(&:id) }
154
+ },
155
+ dependency_graph: @dependency_graph,
156
+ statistics: {
157
+ total_tools: @tools.size,
158
+ by_type: @tools.group_by(&:type).transform_values(&:size),
159
+ total_tags: @indexes[:by_tag].size,
160
+ total_work_unit_types: @indexes[:by_work_unit_type].size
161
+ }
162
+ }
163
+ end
164
+
165
+ # Write directory to JSON file
166
+ #
167
+ # @param directory [Hash] Directory structure
168
+ # @param output_path [String] Output file path
169
+ def write_directory(directory, output_path)
170
+ # Ensure output directory exists
171
+ output_dir = File.dirname(output_path)
172
+ FileUtils.mkdir_p(output_dir) unless Dir.exist?(output_dir)
173
+
174
+ # Write with pretty formatting
175
+ File.write(output_path, JSON.pretty_generate(directory))
176
+
177
+ Aidp.log_debug("metadata", "Wrote directory", path: output_path, size: File.size(output_path))
178
+ end
179
+
180
+ # Handle validation results
181
+ #
182
+ # @param results [Array<ValidationResult>] Validation results
183
+ # @raise [Aidp::Errors::ValidationError] if strict mode and errors found
184
+ def handle_validation_results(results)
185
+ invalid_results = results.reject(&:valid)
186
+
187
+ if invalid_results.any?
188
+ Aidp.log_warn(
189
+ "metadata",
190
+ "Validation errors found",
191
+ count: invalid_results.size
192
+ )
193
+
194
+ invalid_results.each do |result|
195
+ Aidp.log_error(
196
+ "metadata",
197
+ "Tool validation failed",
198
+ tool_id: result.tool_id,
199
+ file: result.file_path,
200
+ errors: result.errors
201
+ )
202
+ end
203
+
204
+ if @strict
205
+ raise Aidp::Errors::ValidationError,
206
+ "#{invalid_results.size} tool(s) failed validation (strict mode enabled)"
207
+ end
208
+
209
+ # Remove invalid tools from compilation
210
+ invalid_ids = invalid_results.map(&:tool_id)
211
+ @tools.reject! { |tool| invalid_ids.include?(tool.id) }
212
+ end
213
+
214
+ # Log warnings
215
+ results.each do |result|
216
+ next if result.warnings.empty?
217
+
218
+ Aidp.log_warn(
219
+ "metadata",
220
+ "Tool validation warnings",
221
+ tool_id: result.tool_id,
222
+ file: result.file_path,
223
+ warnings: result.warnings
224
+ )
225
+ end
226
+ end
227
+ end
228
+ end
229
+ end
@@ -0,0 +1,204 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "yaml"
4
+ require "digest"
5
+ require_relative "../errors"
6
+ require_relative "tool_metadata"
7
+
8
+ module Aidp
9
+ module Metadata
10
+ # Parses tool metadata from markdown files with YAML frontmatter
11
+ #
12
+ # Extracts metadata headers from skill, persona, and template files.
13
+ # Supports both new metadata format and legacy skill format.
14
+ #
15
+ # @example Parsing a file
16
+ # metadata = Parser.parse_file("/path/to/tool.md", type: "skill")
17
+ #
18
+ # @example Parsing with auto-detection
19
+ # metadata = Parser.parse_file("/path/to/SKILL.md")
20
+ class Parser
21
+ # Parse metadata from a file
22
+ #
23
+ # @param file_path [String] Path to .md file
24
+ # @param type [String, nil] Tool type ("skill", "persona", "template") or nil to auto-detect
25
+ # @return [ToolMetadata] Parsed metadata
26
+ # @raise [Aidp::Errors::ValidationError] if file format is invalid
27
+ def self.parse_file(file_path, type: nil)
28
+ Aidp.log_debug("metadata", "Parsing file", file: file_path, type: type)
29
+
30
+ unless File.exist?(file_path)
31
+ raise Aidp::Errors::ValidationError, "File not found: #{file_path}"
32
+ end
33
+
34
+ content = File.read(file_path, encoding: "UTF-8")
35
+ file_hash = compute_file_hash(content)
36
+
37
+ # Auto-detect type from filename or path if not specified
38
+ type ||= detect_type(file_path)
39
+
40
+ parse_string(content, source_path: file_path, file_hash: file_hash, type: type)
41
+ end
42
+
43
+ # Parse metadata from a string
44
+ #
45
+ # @param content [String] File content with frontmatter
46
+ # @param source_path [String] Source file path for reference
47
+ # @param file_hash [String] SHA256 hash of content
48
+ # @param type [String] Tool type ("skill", "persona", "template")
49
+ # @return [ToolMetadata] Parsed metadata
50
+ # @raise [Aidp::Errors::ValidationError] if format is invalid
51
+ def self.parse_string(content, source_path:, file_hash:, type:)
52
+ metadata_hash, markdown = parse_frontmatter(content, source_path: source_path)
53
+
54
+ # Map legacy skill fields to new metadata schema
55
+ normalized = normalize_metadata(metadata_hash, type: type)
56
+
57
+ ToolMetadata.new(
58
+ type: type,
59
+ id: normalized["id"],
60
+ title: normalized["title"],
61
+ summary: normalized["summary"],
62
+ version: normalized["version"],
63
+ applies_to: normalized["applies_to"] || [],
64
+ work_unit_types: normalized["work_unit_types"] || [],
65
+ priority: normalized["priority"] || ToolMetadata::DEFAULT_PRIORITY,
66
+ capabilities: normalized["capabilities"] || [],
67
+ dependencies: normalized["dependencies"] || [],
68
+ experimental: normalized["experimental"] || false,
69
+ content: markdown,
70
+ source_path: source_path,
71
+ file_hash: file_hash
72
+ )
73
+ rescue Aidp::Errors::ValidationError => e
74
+ Aidp.log_error("metadata", "Metadata validation failed", error: e.message, file: source_path)
75
+ raise
76
+ end
77
+
78
+ # Compute SHA256 hash of file content
79
+ #
80
+ # @param content [String] File content
81
+ # @return [String] SHA256 hex string
82
+ def self.compute_file_hash(content)
83
+ Digest::SHA256.hexdigest(content)
84
+ end
85
+
86
+ # Detect tool type from file path
87
+ #
88
+ # @param file_path [String] File path
89
+ # @return [String] Detected type ("skill", "persona", or "template")
90
+ def self.detect_type(file_path)
91
+ case file_path
92
+ when %r{/skills/}
93
+ "skill"
94
+ when %r{/personas/}
95
+ "persona"
96
+ when /SKILL\.md$/
97
+ "skill"
98
+ else
99
+ "template"
100
+ end
101
+ end
102
+
103
+ # Parse YAML frontmatter from content
104
+ #
105
+ # @param content [String] File content with frontmatter
106
+ # @param source_path [String] Source path for error messages
107
+ # @return [Array(Hash, String)] Tuple of [metadata, markdown_content]
108
+ # @raise [Aidp::Errors::ValidationError] if frontmatter is invalid
109
+ def self.parse_frontmatter(content, source_path:)
110
+ # Ensure content is UTF-8 encoded
111
+ content = content.encode("UTF-8", invalid: :replace, undef: :replace) unless content.encoding == Encoding::UTF_8
112
+ lines = content.lines
113
+
114
+ unless lines.first&.strip == "---"
115
+ raise Aidp::Errors::ValidationError,
116
+ "Invalid format: missing YAML frontmatter in #{source_path}"
117
+ end
118
+
119
+ frontmatter_lines = []
120
+ body_start_index = nil
121
+
122
+ lines[1..].each_with_index do |line, index|
123
+ if line.strip == "---"
124
+ body_start_index = index + 2
125
+ break
126
+ end
127
+
128
+ frontmatter_lines << line
129
+ end
130
+
131
+ unless body_start_index
132
+ raise Aidp::Errors::ValidationError,
133
+ "Invalid format: missing closing frontmatter delimiter in #{source_path}"
134
+ end
135
+
136
+ markdown_content = lines[body_start_index..]&.join.to_s.strip
137
+ frontmatter_yaml = frontmatter_lines.join
138
+
139
+ begin
140
+ metadata = YAML.safe_load(frontmatter_yaml, permitted_classes: [Symbol])
141
+ rescue Psych::SyntaxError => e
142
+ raise Aidp::Errors::ValidationError,
143
+ "Invalid YAML frontmatter in #{source_path}: #{e.message}"
144
+ end
145
+
146
+ unless metadata.is_a?(Hash)
147
+ raise Aidp::Errors::ValidationError,
148
+ "YAML frontmatter must be a hash in #{source_path}"
149
+ end
150
+
151
+ [metadata, markdown_content]
152
+ end
153
+
154
+ # Normalize metadata from various formats to unified schema
155
+ #
156
+ # Handles both legacy skill format and new metadata format.
157
+ #
158
+ # @param metadata [Hash] Raw metadata from frontmatter
159
+ # @param type [String] Tool type
160
+ # @return [Hash] Normalized metadata
161
+ def self.normalize_metadata(metadata, type:)
162
+ normalized = {}
163
+
164
+ # Required fields (map from legacy names)
165
+ normalized["id"] = metadata["id"]
166
+ normalized["title"] = metadata["title"] || metadata["name"]
167
+ normalized["summary"] = metadata["summary"] || metadata["description"]
168
+ normalized["version"] = metadata["version"]
169
+
170
+ # Optional fields (new schema)
171
+ normalized["applies_to"] = extract_applies_to(metadata)
172
+ normalized["work_unit_types"] = metadata["work_unit_types"] || []
173
+ normalized["priority"] = metadata["priority"]&.to_i
174
+ normalized["capabilities"] = metadata["capabilities"] || []
175
+ normalized["dependencies"] = metadata["dependencies"] || []
176
+ normalized["experimental"] = metadata["experimental"] || false
177
+
178
+ normalized
179
+ end
180
+
181
+ # Extract applies_to tags from various metadata fields
182
+ #
183
+ # Combines keywords, tags, expertise areas, etc. into unified applies_to list
184
+ #
185
+ # @param metadata [Hash] Raw metadata
186
+ # @return [Array<String>] Combined applies_to tags
187
+ def self.extract_applies_to(metadata)
188
+ applies_to = []
189
+
190
+ # New schema
191
+ applies_to.concat(metadata["applies_to"] || [])
192
+
193
+ # Legacy skill schema
194
+ applies_to.concat(metadata["keywords"] || [])
195
+ applies_to.concat(metadata["tags"] || [])
196
+
197
+ # Flatten and deduplicate
198
+ applies_to.flatten.compact.uniq
199
+ end
200
+
201
+ private_class_method :parse_frontmatter, :normalize_metadata, :extract_applies_to
202
+ end
203
+ end
204
+ end