ace-docs 0.31.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.ace-defaults/docs/config.yml +169 -0
- data/.ace-defaults/docs/multi-subject-example.md +130 -0
- data/.ace-defaults/docs/single-subject-example.md +150 -0
- data/.ace-defaults/nav/protocols/guide-sources/ace-docs.yml +10 -0
- data/.ace-defaults/nav/protocols/prompt-sources/ace-docs.yml +34 -0
- data/.ace-defaults/nav/protocols/tmpl-sources/ace-docs.yml +10 -0
- data/.ace-defaults/nav/protocols/wfi-sources/ace-docs.yml +19 -0
- data/CHANGELOG.md +1082 -0
- data/LICENSE +21 -0
- data/README.md +40 -0
- data/Rakefile +14 -0
- data/exe/ace-docs +14 -0
- data/handbook/guides/documentation/ruby.md +16 -0
- data/handbook/guides/documentation/rust.md +35 -0
- data/handbook/guides/documentation/typescript.md +18 -0
- data/handbook/guides/documentation.g.md +437 -0
- data/handbook/guides/documents-embedded-sync.g.md +473 -0
- data/handbook/guides/documents-embedding.g.md +276 -0
- data/handbook/guides/markdown-style.g.md +290 -0
- data/handbook/prompts/ace-change-analyzer.system.md +113 -0
- data/handbook/prompts/ace-change-analyzer.user.md +95 -0
- data/handbook/prompts/document-analysis.md +74 -0
- data/handbook/prompts/document-analysis.system.md +129 -0
- data/handbook/prompts/markdown-style.system.md +113 -0
- data/handbook/skills/as-docs-create-adr/SKILL.md +35 -0
- data/handbook/skills/as-docs-create-api/SKILL.md +35 -0
- data/handbook/skills/as-docs-create-user/SKILL.md +35 -0
- data/handbook/skills/as-docs-maintain-adrs/SKILL.md +35 -0
- data/handbook/skills/as-docs-squash-changelog/SKILL.md +42 -0
- data/handbook/skills/as-docs-update/SKILL.md +36 -0
- data/handbook/skills/as-docs-update-blueprint/SKILL.md +28 -0
- data/handbook/skills/as-docs-update-roadmap/SKILL.md +24 -0
- data/handbook/skills/as-docs-update-tools/SKILL.md +36 -0
- data/handbook/skills/as-docs-update-usage/SKILL.md +26 -0
- data/handbook/templates/code-docs/javascript-jsdoc.template.md +102 -0
- data/handbook/templates/code-docs/ruby-yard.template.md +85 -0
- data/handbook/templates/project-docs/README.template.md +73 -0
- data/handbook/templates/project-docs/architecture.template.md +300 -0
- data/handbook/templates/project-docs/blueprint.template.md +165 -0
- data/handbook/templates/project-docs/context/ownership.yml +160 -0
- data/handbook/templates/project-docs/decisions/adr.template.md +60 -0
- data/handbook/templates/project-docs/prd.template.md +144 -0
- data/handbook/templates/project-docs/roadmap/roadmap.template.md +47 -0
- data/handbook/templates/project-docs/vision.template.md +233 -0
- data/handbook/templates/user-docs/user-guide.template.md +107 -0
- data/handbook/workflow-instructions/docs/create-adr.wf.md +334 -0
- data/handbook/workflow-instructions/docs/create-api.wf.md +448 -0
- data/handbook/workflow-instructions/docs/create-cookbook.wf.md +434 -0
- data/handbook/workflow-instructions/docs/create-user.wf.md +399 -0
- data/handbook/workflow-instructions/docs/maintain-adrs.wf.md +589 -0
- data/handbook/workflow-instructions/docs/squash-changelog.wf.md +246 -0
- data/handbook/workflow-instructions/docs/update-blueprint.wf.md +361 -0
- data/handbook/workflow-instructions/docs/update-context.wf.md +336 -0
- data/handbook/workflow-instructions/docs/update-roadmap.wf.md +421 -0
- data/handbook/workflow-instructions/docs/update-tools.wf.md +307 -0
- data/handbook/workflow-instructions/docs/update-usage.wf.md +710 -0
- data/handbook/workflow-instructions/docs/update.wf.md +418 -0
- data/lib/ace/docs/atoms/diff_filterer.rb +131 -0
- data/lib/ace/docs/atoms/frontmatter_free_matcher.rb +20 -0
- data/lib/ace/docs/atoms/git_date_resolver.rb +16 -0
- data/lib/ace/docs/atoms/readme_metadata_inferrer.rb +60 -0
- data/lib/ace/docs/atoms/terminology_extractor.rb +308 -0
- data/lib/ace/docs/atoms/time_range_calculator.rb +96 -0
- data/lib/ace/docs/atoms/timestamp_parser.rb +106 -0
- data/lib/ace/docs/atoms/type_inferrer.rb +70 -0
- data/lib/ace/docs/cli/commands/analyze.rb +351 -0
- data/lib/ace/docs/cli/commands/analyze_consistency.rb +185 -0
- data/lib/ace/docs/cli/commands/discover.rb +75 -0
- data/lib/ace/docs/cli/commands/scope_options.rb +71 -0
- data/lib/ace/docs/cli/commands/status.rb +241 -0
- data/lib/ace/docs/cli/commands/update.rb +198 -0
- data/lib/ace/docs/cli/commands/validate.rb +225 -0
- data/lib/ace/docs/cli.rb +60 -0
- data/lib/ace/docs/models/analysis_report.rb +120 -0
- data/lib/ace/docs/models/consistency_report.rb +259 -0
- data/lib/ace/docs/models/document.rb +354 -0
- data/lib/ace/docs/molecules/change_detector.rb +389 -0
- data/lib/ace/docs/molecules/document_loader.rb +133 -0
- data/lib/ace/docs/molecules/frontmatter_manager.rb +85 -0
- data/lib/ace/docs/molecules/git_date_resolver.rb +30 -0
- data/lib/ace/docs/organisms/cross_document_analyzer.rb +274 -0
- data/lib/ace/docs/organisms/document_registry.rb +318 -0
- data/lib/ace/docs/organisms/validator.rb +164 -0
- data/lib/ace/docs/prompts/compact_diff_prompt.rb +119 -0
- data/lib/ace/docs/prompts/consistency_prompt.rb +286 -0
- data/lib/ace/docs/prompts/document_analysis_prompt.rb +389 -0
- data/lib/ace/docs/version.rb +7 -0
- data/lib/ace/docs.rb +82 -0
- data/lib/test.rb +4 -0
- metadata +347 -0
|
@@ -0,0 +1,308 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Ace
|
|
4
|
+
module Docs
|
|
5
|
+
module Atoms
|
|
6
|
+
# Extracts and analyzes terminology from documents to find conflicts
|
|
7
|
+
class TerminologyExtractor
|
|
8
|
+
# Common words to exclude from terminology analysis
|
|
9
|
+
COMMON_WORDS = %w[
|
|
10
|
+
a an and are as at be but by for from has have i in is it of on or
|
|
11
|
+
that the this to was will with you your we our us their them they
|
|
12
|
+
can could should would may might must shall will do does did done
|
|
13
|
+
get got gets getting make makes made making take takes took taken
|
|
14
|
+
use uses used using go goes went gone going come comes came coming
|
|
15
|
+
see sees saw seen seeing know knows knew known knowing think thinks
|
|
16
|
+
thought thinking want wants wanted wanting need needs needed needing
|
|
17
|
+
give gives gave given giving find finds found finding tell tells told
|
|
18
|
+
telling work works worked working call calls called calling try tries
|
|
19
|
+
tried trying ask asks asked asking feel feels felt feeling become
|
|
20
|
+
becomes became becoming leave leaves left leaving put puts putting
|
|
21
|
+
keep keeps kept keeping let lets letting begin begins began beginning
|
|
22
|
+
seem seems seemed seeming help helps helped helping talk talks talked
|
|
23
|
+
talking turn turns turned turning start starts started starting show
|
|
24
|
+
shows showed shown showing hear hears heard hearing play plays played
|
|
25
|
+
playing run runs ran running move moves moved moving like likes liked
|
|
26
|
+
liking live lives lived living believe believes believed believing
|
|
27
|
+
bring brings brought bringing happen happens happened happening write
|
|
28
|
+
writes wrote written writing provide provides provided providing sit
|
|
29
|
+
sits sat sitting stand stands stood standing lose loses lost losing
|
|
30
|
+
pay pays paid paying meet meets met meeting include includes included
|
|
31
|
+
including continue continues continued continuing set sets setting
|
|
32
|
+
learn learns learned learning change changes changed changing lead
|
|
33
|
+
leads led leading understand understands understood understanding
|
|
34
|
+
watch watches watched watching follow follows followed following stop
|
|
35
|
+
stops stopped stopping create creates created creating speak speaks
|
|
36
|
+
spoke spoken speaking read reads reading allow allows allowed allowing
|
|
37
|
+
add adds added adding spend spends spent spending grow grows grew
|
|
38
|
+
grown growing open opens opened opening walk walks walked walking win
|
|
39
|
+
wins won winning offer offers offered offering remember remembers
|
|
40
|
+
remembered remembering love loves loved loving consider considers
|
|
41
|
+
considered considering appear appears appeared appearing buy buys
|
|
42
|
+
bought buying wait waits waited waiting serve serves served serving
|
|
43
|
+
die dies died dying send sends sent sending expect expects expected
|
|
44
|
+
expecting build builds built building stay stays stayed staying fall
|
|
45
|
+
falls fell fallen falling cut cuts cutting reach reaches reached
|
|
46
|
+
reaching kill kills killed killing remain remains remained remaining
|
|
47
|
+
suggest suggests suggested suggesting raise raises raised raising
|
|
48
|
+
pass passes passed passing sell sells sold selling require requires
|
|
49
|
+
required requiring report reports reported reporting decide decides
|
|
50
|
+
decided deciding pull pulls pulled pulling one two three four five
|
|
51
|
+
six seven eight nine ten first second third last next new old good
|
|
52
|
+
bad best worst more most less least very much many few some any all
|
|
53
|
+
no not yes other another each every either neither both such own same
|
|
54
|
+
different various certain several many most few little much enough
|
|
55
|
+
only just still already yet even also too quite rather almost nearly
|
|
56
|
+
always usually often sometimes rarely never again further then once
|
|
57
|
+
now here there where when why how what which who whom whose if unless
|
|
58
|
+
until while although though because since before after during within
|
|
59
|
+
without through across beyond behind below beneath beside between
|
|
60
|
+
above over under around among against along toward towards upon down
|
|
61
|
+
up out off away back forward backward forwards backwards inside
|
|
62
|
+
outside onto into about for from with without by at in on to as of
|
|
63
|
+
].freeze
|
|
64
|
+
|
|
65
|
+
# Extract key terms from document content with frequency counts
|
|
66
|
+
# @param content [String] the document content
|
|
67
|
+
# @param doc_path [String] the document path for reference
|
|
68
|
+
# @return [Hash] terms with their frequencies and locations
|
|
69
|
+
def extract_terms(content, doc_path = nil)
|
|
70
|
+
terms = {}
|
|
71
|
+
lines = content.lines
|
|
72
|
+
|
|
73
|
+
lines.each_with_index do |line, index|
|
|
74
|
+
# Skip code blocks and front matter
|
|
75
|
+
next if line.strip.start_with?("```", "---")
|
|
76
|
+
|
|
77
|
+
# Extract words and normalize them
|
|
78
|
+
words = line.downcase.scan(/\b[a-z]+(?:-[a-z]+)*\b/)
|
|
79
|
+
|
|
80
|
+
words.each do |word|
|
|
81
|
+
# Skip common words and very short words
|
|
82
|
+
next if COMMON_WORDS.include?(word) || word.length < 3
|
|
83
|
+
|
|
84
|
+
# Track term frequency and locations
|
|
85
|
+
terms[word] ||= {count: 0, locations: [], variations: Set.new}
|
|
86
|
+
terms[word][:count] += 1
|
|
87
|
+
terms[word][:locations] << {file: doc_path, line: index + 1}
|
|
88
|
+
|
|
89
|
+
# Track original variations (case)
|
|
90
|
+
original = line[/\b#{Regexp.escape(word)}\b/i]
|
|
91
|
+
terms[word][:variations] << original if original
|
|
92
|
+
end
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
# Filter to meaningful terms (appears multiple times or has variations)
|
|
96
|
+
terms.select do |_term, data|
|
|
97
|
+
data[:count] > 1 || data[:variations].size > 1
|
|
98
|
+
end
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
# Find terminology conflicts across multiple documents
|
|
102
|
+
# @param documents [Hash] hash of { path => content }
|
|
103
|
+
# @return [Array] array of conflict hashes
|
|
104
|
+
def find_conflicts(documents)
|
|
105
|
+
all_terms = {}
|
|
106
|
+
conflicts = []
|
|
107
|
+
|
|
108
|
+
# Extract terms from each document
|
|
109
|
+
documents.each do |path, content|
|
|
110
|
+
doc_terms = extract_terms(content, path)
|
|
111
|
+
|
|
112
|
+
doc_terms.each do |term, data|
|
|
113
|
+
all_terms[term] ||= {}
|
|
114
|
+
all_terms[term][path] = data
|
|
115
|
+
end
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
# Find similar terms that might be conflicts
|
|
119
|
+
term_list = all_terms.keys
|
|
120
|
+
|
|
121
|
+
term_list.each_with_index do |term1, i|
|
|
122
|
+
term_list[(i + 1)..-1].each do |term2|
|
|
123
|
+
similarity = calculate_similarity(term1, term2)
|
|
124
|
+
|
|
125
|
+
# Check for potential conflicts (similar but not identical)
|
|
126
|
+
if similarity > 0.7 && similarity < 1.0
|
|
127
|
+
conflicts << build_conflict(term1, term2, all_terms)
|
|
128
|
+
elsif are_variants?(term1, term2)
|
|
129
|
+
conflicts << build_conflict(term1, term2, all_terms)
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
# Also find inconsistent usage of the same base term
|
|
135
|
+
find_inconsistent_usage(all_terms, conflicts)
|
|
136
|
+
|
|
137
|
+
conflicts.compact
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
# Filter out common words from a list of terms
|
|
141
|
+
# @param terms [Array] list of terms to filter
|
|
142
|
+
# @return [Array] filtered list without common words
|
|
143
|
+
def filter_common_words(terms)
|
|
144
|
+
terms.reject { |term| COMMON_WORDS.include?(term.downcase) }
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
private
|
|
148
|
+
|
|
149
|
+
# Calculate similarity between two terms (simple Levenshtein-like)
|
|
150
|
+
def calculate_similarity(term1, term2)
|
|
151
|
+
return 1.0 if term1 == term2
|
|
152
|
+
|
|
153
|
+
# Normalize comparison
|
|
154
|
+
t1 = term1.downcase
|
|
155
|
+
t2 = term2.downcase
|
|
156
|
+
|
|
157
|
+
# Check for plural/singular variants
|
|
158
|
+
return 0.95 if t1 == "#{t2}s" || t2 == "#{t1}s"
|
|
159
|
+
return 0.95 if t1 == "#{t2}es" || t2 == "#{t1}es"
|
|
160
|
+
return 0.95 if t1.end_with?("y") && t2 == t1[0...-1] + "ies"
|
|
161
|
+
return 0.95 if t2.end_with?("y") && t1 == t2[0...-1] + "ies"
|
|
162
|
+
|
|
163
|
+
# Check for common variations
|
|
164
|
+
return 0.9 if one_char_diff?(t1, t2)
|
|
165
|
+
|
|
166
|
+
# Otherwise, calculate based on common characters
|
|
167
|
+
common_chars = (t1.chars & t2.chars).size
|
|
168
|
+
max_length = [t1.length, t2.length].max.to_f
|
|
169
|
+
common_chars / max_length
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
# Check if two terms are known variants
|
|
173
|
+
def are_variants?(term1, term2)
|
|
174
|
+
variants = {
|
|
175
|
+
"analyze" => "analyse",
|
|
176
|
+
"organize" => "organise",
|
|
177
|
+
"recognize" => "recognise",
|
|
178
|
+
"realize" => "realise",
|
|
179
|
+
"color" => "colour",
|
|
180
|
+
"behavior" => "behaviour",
|
|
181
|
+
"center" => "centre",
|
|
182
|
+
"fiber" => "fibre",
|
|
183
|
+
"license" => "licence"
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
variants.any? do |us, uk|
|
|
187
|
+
(term1.include?(us) && term2.include?(uk)) ||
|
|
188
|
+
(term1.include?(uk) && term2.include?(us))
|
|
189
|
+
end
|
|
190
|
+
end
|
|
191
|
+
|
|
192
|
+
# Check if terms differ by only one character
|
|
193
|
+
def one_char_diff?(term1, term2)
|
|
194
|
+
return false if (term1.length - term2.length).abs > 1
|
|
195
|
+
|
|
196
|
+
if term1.length == term2.length
|
|
197
|
+
diff_count = 0
|
|
198
|
+
term1.chars.each_with_index do |char, i|
|
|
199
|
+
diff_count += 1 if char != term2[i]
|
|
200
|
+
end
|
|
201
|
+
diff_count == 1
|
|
202
|
+
else
|
|
203
|
+
# Check for single insertion/deletion
|
|
204
|
+
longer = (term1.length > term2.length) ? term1 : term2
|
|
205
|
+
shorter = (term1.length > term2.length) ? term2 : term1
|
|
206
|
+
|
|
207
|
+
longer.length.times do |i|
|
|
208
|
+
test = longer[0...i] + longer[(i + 1)..-1]
|
|
209
|
+
return true if test == shorter
|
|
210
|
+
end
|
|
211
|
+
false
|
|
212
|
+
end
|
|
213
|
+
end
|
|
214
|
+
|
|
215
|
+
# Build a conflict report entry
|
|
216
|
+
def build_conflict(term1, term2, all_terms)
|
|
217
|
+
docs1 = all_terms[term1]&.keys || []
|
|
218
|
+
docs2 = all_terms[term2]&.keys || []
|
|
219
|
+
|
|
220
|
+
return nil if docs1.empty? || docs2.empty?
|
|
221
|
+
|
|
222
|
+
{
|
|
223
|
+
type: "terminology",
|
|
224
|
+
terms: [term1, term2],
|
|
225
|
+
documents: {
|
|
226
|
+
term1 => docs1.map { |doc|
|
|
227
|
+
{
|
|
228
|
+
file: doc,
|
|
229
|
+
count: all_terms[term1][doc][:count]
|
|
230
|
+
}
|
|
231
|
+
},
|
|
232
|
+
term2 => docs2.map { |doc|
|
|
233
|
+
{
|
|
234
|
+
file: doc,
|
|
235
|
+
count: all_terms[term2][doc][:count]
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
},
|
|
239
|
+
recommendation: suggest_standardization(term1, term2, all_terms)
|
|
240
|
+
}
|
|
241
|
+
end
|
|
242
|
+
|
|
243
|
+
# Find inconsistent usage of the same base term
|
|
244
|
+
def find_inconsistent_usage(all_terms, conflicts)
|
|
245
|
+
all_terms.each do |term, docs|
|
|
246
|
+
next unless docs.size > 1
|
|
247
|
+
|
|
248
|
+
# Check if the same term has very different case variations
|
|
249
|
+
all_variations = docs.values.flat_map { |d| d[:variations].to_a }
|
|
250
|
+
unique_variations = all_variations.uniq
|
|
251
|
+
|
|
252
|
+
if unique_variations.size > 1 && significantly_different_cases?(unique_variations)
|
|
253
|
+
conflicts << {
|
|
254
|
+
type: "case_inconsistency",
|
|
255
|
+
term: term,
|
|
256
|
+
variations: unique_variations,
|
|
257
|
+
documents: docs.map { |path, data|
|
|
258
|
+
{
|
|
259
|
+
file: path,
|
|
260
|
+
variations: data[:variations].to_a,
|
|
261
|
+
count: data[:count]
|
|
262
|
+
}
|
|
263
|
+
},
|
|
264
|
+
recommendation: "Standardize capitalization of '#{term}' across documents"
|
|
265
|
+
}
|
|
266
|
+
end
|
|
267
|
+
end
|
|
268
|
+
end
|
|
269
|
+
|
|
270
|
+
# Check if case variations are significantly different
|
|
271
|
+
def significantly_different_cases?(variations)
|
|
272
|
+
patterns = variations.map { |v| categorize_case(v) }.uniq
|
|
273
|
+
patterns.size > 1
|
|
274
|
+
end
|
|
275
|
+
|
|
276
|
+
# Categorize the case pattern of a word
|
|
277
|
+
def categorize_case(word)
|
|
278
|
+
return :lower if word == word.downcase
|
|
279
|
+
return :upper if word == word.upcase
|
|
280
|
+
return :title if word == word.capitalize
|
|
281
|
+
:mixed
|
|
282
|
+
end
|
|
283
|
+
|
|
284
|
+
# Suggest which term to standardize on
|
|
285
|
+
def suggest_standardization(term1, term2, all_terms)
|
|
286
|
+
# Prefer the more frequently used term
|
|
287
|
+
count1 = all_terms[term1]&.values&.sum { |d| d[:count] } || 0
|
|
288
|
+
count2 = all_terms[term2]&.values&.sum { |d| d[:count] } || 0
|
|
289
|
+
|
|
290
|
+
if count1 > count2 * 2
|
|
291
|
+
"Standardize to '#{term1}' (used #{count1} times vs #{count2})"
|
|
292
|
+
elsif count2 > count1 * 2
|
|
293
|
+
"Standardize to '#{term2}' (used #{count2} times vs #{count1})"
|
|
294
|
+
elsif are_variants?(term1, term2)
|
|
295
|
+
# Check for US vs UK spelling
|
|
296
|
+
if term1.include?("z") || term1.include?("or")
|
|
297
|
+
"Standardize to '#{term1}' (US spelling)"
|
|
298
|
+
else
|
|
299
|
+
"Standardize to '#{term2}' (UK spelling)"
|
|
300
|
+
end
|
|
301
|
+
else
|
|
302
|
+
"Consider standardizing to '#{(count1 >= count2) ? term1 : term2}'"
|
|
303
|
+
end
|
|
304
|
+
end
|
|
305
|
+
end
|
|
306
|
+
end
|
|
307
|
+
end
|
|
308
|
+
end
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "date"
|
|
4
|
+
require "time"
|
|
5
|
+
|
|
6
|
+
module Ace
|
|
7
|
+
module Docs
|
|
8
|
+
module Atoms
|
|
9
|
+
# Pure function for time range calculations
|
|
10
|
+
class TimeRangeCalculator
|
|
11
|
+
class << self
|
|
12
|
+
# Calculate a git-compatible "since" string from a date
|
|
13
|
+
# @param date [Date, Time, String] The date to calculate from
|
|
14
|
+
# @return [String] Git-compatible since string (e.g., "2 weeks ago")
|
|
15
|
+
def calculate_since(date)
|
|
16
|
+
return date if date.is_a?(String) && date.match?(/^\d+\s+(days?|weeks?|months?)\s+ago$/)
|
|
17
|
+
|
|
18
|
+
parsed_date = parse_date(date)
|
|
19
|
+
days_ago = (Date.today - parsed_date.to_date).to_i
|
|
20
|
+
|
|
21
|
+
format_days_ago(days_ago)
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
# Parse a date from various formats
|
|
25
|
+
# @param date_string [String, Date, Time] Date in various formats
|
|
26
|
+
# @return [Date] Parsed date
|
|
27
|
+
def parse_date(date_string)
|
|
28
|
+
return date_string if date_string.is_a?(Date)
|
|
29
|
+
return date_string.to_date if date_string.is_a?(Time)
|
|
30
|
+
|
|
31
|
+
# Handle various string formats
|
|
32
|
+
case date_string
|
|
33
|
+
when /^today$/i
|
|
34
|
+
Date.today
|
|
35
|
+
when /^yesterday$/i
|
|
36
|
+
Date.today - 1
|
|
37
|
+
when /^(\d+)\s+days?\s+ago$/i
|
|
38
|
+
Date.today - Regexp.last_match(1).to_i
|
|
39
|
+
when /^(\d+)\s+weeks?\s+ago$/i
|
|
40
|
+
Date.today - (Regexp.last_match(1).to_i * 7)
|
|
41
|
+
when /^(\d+)\s+months?\s+ago$/i
|
|
42
|
+
Date.today << Regexp.last_match(1).to_i
|
|
43
|
+
when /^\d{4}-\d{2}-\d{2}$/
|
|
44
|
+
Date.parse(date_string)
|
|
45
|
+
else
|
|
46
|
+
# Try to parse with Date.parse as fallback
|
|
47
|
+
Date.parse(date_string)
|
|
48
|
+
end
|
|
49
|
+
rescue ArgumentError => e
|
|
50
|
+
raise ArgumentError, "Invalid date format: #{date_string}. Error: #{e.message}"
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
# Format a date for human-readable display
|
|
54
|
+
# @param date [Date, Time] Date to format
|
|
55
|
+
# @return [String] Human-readable date (e.g., "2 weeks ago", "3 days ago")
|
|
56
|
+
def format_human(date)
|
|
57
|
+
parsed_date = parse_date(date)
|
|
58
|
+
days_ago = (Date.today - parsed_date).to_i
|
|
59
|
+
|
|
60
|
+
format_days_ago(days_ago)
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
private
|
|
64
|
+
|
|
65
|
+
# Format days into human-readable time ago string
|
|
66
|
+
def format_days_ago(days)
|
|
67
|
+
case days
|
|
68
|
+
when 0
|
|
69
|
+
"today"
|
|
70
|
+
when 1
|
|
71
|
+
"yesterday"
|
|
72
|
+
when 2..6
|
|
73
|
+
"#{days} days ago"
|
|
74
|
+
when 7..13
|
|
75
|
+
"1 week ago"
|
|
76
|
+
when 14..20
|
|
77
|
+
"2 weeks ago"
|
|
78
|
+
when 21..29
|
|
79
|
+
"3 weeks ago"
|
|
80
|
+
when 30..59
|
|
81
|
+
"1 month ago"
|
|
82
|
+
when 60..89
|
|
83
|
+
"2 months ago"
|
|
84
|
+
when 90..179
|
|
85
|
+
"3 months ago"
|
|
86
|
+
when 180..364
|
|
87
|
+
"6 months ago"
|
|
88
|
+
else
|
|
89
|
+
"#{(days / 365.0).round} years ago"
|
|
90
|
+
end
|
|
91
|
+
end
|
|
92
|
+
end
|
|
93
|
+
end
|
|
94
|
+
end
|
|
95
|
+
end
|
|
96
|
+
end
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "date"
|
|
4
|
+
require "time"
|
|
5
|
+
|
|
6
|
+
module Ace
|
|
7
|
+
module Docs
|
|
8
|
+
module Atoms
|
|
9
|
+
# Pure timestamp parsing and validation functions
|
|
10
|
+
# Supports ISO 8601 UTC and date-only formats
|
|
11
|
+
#
|
|
12
|
+
# Timezone Behavior:
|
|
13
|
+
# - ISO 8601 UTC format (YYYY-MM-DDTHH:MM:SSZ) is the recommended format
|
|
14
|
+
# - Date-only format (YYYY-MM-DD) remains timezone-agnostic
|
|
15
|
+
#
|
|
16
|
+
# Return Types:
|
|
17
|
+
# - Date-only strings → Date objects
|
|
18
|
+
# - ISO 8601 UTC strings → Time objects (in UTC)
|
|
19
|
+
# This polymorphic return type preserves the precision of the input format.
|
|
20
|
+
module TimestampParser
|
|
21
|
+
# Regular expression patterns for timestamp validation
|
|
22
|
+
DATE_ONLY_PATTERN = /^\d{4}-\d{2}-\d{2}$/
|
|
23
|
+
ISO8601_UTC_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z$/
|
|
24
|
+
|
|
25
|
+
# Parse a timestamp string to Date or Time object
|
|
26
|
+
# @param value [String, Date, Time] Timestamp to parse
|
|
27
|
+
# @return [Date, Time] Parsed timestamp
|
|
28
|
+
# @raise [ArgumentError] If format is invalid
|
|
29
|
+
def self.parse_timestamp(value)
|
|
30
|
+
raise ArgumentError, "Cannot parse nil timestamp" if value.nil?
|
|
31
|
+
|
|
32
|
+
# Return already parsed objects as-is
|
|
33
|
+
return value if value.is_a?(Date) || value.is_a?(Time)
|
|
34
|
+
|
|
35
|
+
# Must be a string at this point
|
|
36
|
+
raise ArgumentError, "Timestamp must be a String, Date, or Time" unless value.is_a?(String)
|
|
37
|
+
|
|
38
|
+
# Validate format before parsing
|
|
39
|
+
unless validate_format(value)
|
|
40
|
+
raise ArgumentError, "Invalid timestamp format. Use YYYY-MM-DDTHH:MM:SSZ (ISO 8601 UTC) or YYYY-MM-DD"
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
# Parse based on format
|
|
44
|
+
if value.match?(ISO8601_UTC_PATTERN)
|
|
45
|
+
parse_iso8601_utc(value)
|
|
46
|
+
elsif value.match?(DATE_ONLY_PATTERN)
|
|
47
|
+
parse_date(value)
|
|
48
|
+
else
|
|
49
|
+
raise ArgumentError, "Invalid timestamp format. Use YYYY-MM-DDTHH:MM:SSZ (ISO 8601 UTC) or YYYY-MM-DD"
|
|
50
|
+
end
|
|
51
|
+
rescue Date::Error, ArgumentError => e
|
|
52
|
+
# Improve error message for date parsing errors
|
|
53
|
+
raise ArgumentError, "Invalid timestamp: #{e.message}"
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
# Validate timestamp format
|
|
57
|
+
# @param value [String] Timestamp string to validate
|
|
58
|
+
# @return [Boolean] true if format is valid
|
|
59
|
+
def self.validate_format(value)
|
|
60
|
+
return false if value.nil? || !value.is_a?(String) || value.empty?
|
|
61
|
+
|
|
62
|
+
value.match?(DATE_ONLY_PATTERN) || value.match?(ISO8601_UTC_PATTERN)
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
# Format a Date or Time object to string
|
|
66
|
+
# @param time_obj [Date, Time] Object to format
|
|
67
|
+
# @return [String] Formatted timestamp in ISO 8601 UTC format (for Time) or date-only (for Date)
|
|
68
|
+
# @raise [ArgumentError] If object is not Date or Time
|
|
69
|
+
def self.format_timestamp(time_obj)
|
|
70
|
+
raise ArgumentError, "Cannot format nil timestamp" if time_obj.nil?
|
|
71
|
+
|
|
72
|
+
case time_obj
|
|
73
|
+
when Date
|
|
74
|
+
time_obj.strftime("%Y-%m-%d")
|
|
75
|
+
when Time
|
|
76
|
+
time_obj.utc.strftime("%Y-%m-%dT%H:%M:%SZ") # ISO 8601 UTC format
|
|
77
|
+
else
|
|
78
|
+
raise ArgumentError, "Timestamp must be a Date or Time object"
|
|
79
|
+
end
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
private
|
|
83
|
+
|
|
84
|
+
# Parse date-only string
|
|
85
|
+
# @param date_str [String] Date string in YYYY-MM-DD format
|
|
86
|
+
# @return [Date] Parsed date
|
|
87
|
+
# @raise [ArgumentError] If date is invalid
|
|
88
|
+
def self.parse_date(date_str)
|
|
89
|
+
Date.parse(date_str)
|
|
90
|
+
rescue Date::Error => e
|
|
91
|
+
raise ArgumentError, "Invalid date: #{e.message}"
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
# Parse ISO 8601 UTC datetime string
|
|
95
|
+
# @param iso8601_str [String] ISO 8601 datetime string in YYYY-MM-DDTHH:MM:SSZ format
|
|
96
|
+
# @return [Time] Parsed time in UTC
|
|
97
|
+
# @raise [ArgumentError] If datetime is invalid
|
|
98
|
+
def self.parse_iso8601_utc(iso8601_str)
|
|
99
|
+
Time.parse(iso8601_str).utc # Ensure UTC
|
|
100
|
+
rescue ArgumentError => e
|
|
101
|
+
raise ArgumentError, "Invalid ISO 8601 datetime: #{e.message}"
|
|
102
|
+
end
|
|
103
|
+
end
|
|
104
|
+
end
|
|
105
|
+
end
|
|
106
|
+
end
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Ace
|
|
4
|
+
module Docs
|
|
5
|
+
module Atoms
|
|
6
|
+
# Infers document type from file extension and patterns
|
|
7
|
+
# with configurable priority hierarchy
|
|
8
|
+
class TypeInferrer
|
|
9
|
+
# Map of file extensions to document types
|
|
10
|
+
EXTENSION_MAP = {
|
|
11
|
+
".wf.md" => "workflow",
|
|
12
|
+
".g.md" => "guide",
|
|
13
|
+
".template.md" => "template",
|
|
14
|
+
".api.md" => "api"
|
|
15
|
+
}.freeze
|
|
16
|
+
|
|
17
|
+
# Infer type from file extension
|
|
18
|
+
# @param path [String] File path
|
|
19
|
+
# @return [String, nil] Document type or nil
|
|
20
|
+
def self.from_extension(path)
|
|
21
|
+
EXTENSION_MAP.each do |ext, type|
|
|
22
|
+
return type if path.end_with?(ext)
|
|
23
|
+
end
|
|
24
|
+
nil
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
# Resolve document type using priority hierarchy:
|
|
28
|
+
# 1. Explicit frontmatter doc-type (highest priority)
|
|
29
|
+
# 2. Config pattern type
|
|
30
|
+
# 3. README basename inference
|
|
31
|
+
# 4. File extension inference (lowest priority)
|
|
32
|
+
#
|
|
33
|
+
# @param path [String] File path
|
|
34
|
+
# @param pattern_type [String, nil] Type from config pattern matching
|
|
35
|
+
# @param frontmatter_type [String, nil] Explicit doc-type from frontmatter
|
|
36
|
+
# @return [String, nil] Resolved document type
|
|
37
|
+
def self.resolve(path, pattern_type: nil, frontmatter_type: nil)
|
|
38
|
+
# Priority 1: Explicit frontmatter (overrides everything)
|
|
39
|
+
return frontmatter_type if frontmatter_type && !frontmatter_type.empty?
|
|
40
|
+
|
|
41
|
+
# Priority 2: Config pattern type
|
|
42
|
+
return pattern_type if pattern_type && !pattern_type.empty?
|
|
43
|
+
|
|
44
|
+
# Priority 3: Basename inference
|
|
45
|
+
if File.basename(path).casecmp("README.md").zero?
|
|
46
|
+
return "root_readme" if root_readme?(path)
|
|
47
|
+
|
|
48
|
+
return "readme"
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
# Priority 4: Extension-based inference
|
|
52
|
+
extension_type = from_extension(path)
|
|
53
|
+
return extension_type if extension_type
|
|
54
|
+
|
|
55
|
+
nil
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def self.root_readme?(path)
|
|
59
|
+
normalized = path.to_s.sub(%r{\A\./}, "")
|
|
60
|
+
return true if normalized.casecmp("README.md").zero?
|
|
61
|
+
|
|
62
|
+
File.expand_path(path.to_s) == File.join(Dir.pwd, "README.md")
|
|
63
|
+
rescue
|
|
64
|
+
false
|
|
65
|
+
end
|
|
66
|
+
private_class_method :root_readme?
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
end
|
|
70
|
+
end
|