starscope 1.5.3 → 1.5.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop.yml +3 -9
- data/.travis.yml +2 -2
- data/CHANGELOG.md +11 -1
- data/bin/starscope +2 -0
- data/lib/starscope/db.rb +266 -264
- data/lib/starscope/exportable.rb +225 -221
- data/lib/starscope/fragment_extractor.rb +18 -16
- data/lib/starscope/langs/erb.rb +34 -32
- data/lib/starscope/langs/golang.rb +176 -174
- data/lib/starscope/langs/javascript.rb +96 -94
- data/lib/starscope/langs/ruby.rb +109 -91
- data/lib/starscope/matcher.rb +1 -2
- data/lib/starscope/output.rb +37 -35
- data/lib/starscope/queryable.rb +31 -29
- data/lib/starscope/version.rb +1 -1
- data/starscope.gemspec +4 -4
- data/test/fixtures/sample_ruby.rb +1 -0
- data/test/unit/exportable_test.rb +17 -4
- data/test/unit/fragment_extractor_test.rb +6 -1
- metadata +40 -40
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: fe59ab1f2e2cf5aaee13db7f085dc708323a3c50
|
4
|
+
data.tar.gz: 2d32eee21d9bc30996097deb13ac9c6fcb2363cb
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 21d7d6d5d3cb0eb6b9f7903d54fdc90e1ac9abc34b1e0c6751e3814a664987bea98756a34c7f0d3b1fa08d78f61de9212b1714bfb38bccc48c3dc899ded71814
|
7
|
+
data.tar.gz: 5da04677144db25e80b40a96db6608f4ac32f1de8e2f7f36904726a7da0b6b1cff67df642df74a43b89419aa0bea9cb6ba731b98f5cd6244eac7917291ca41d5
|
data/.rubocop.yml
CHANGED
@@ -1,5 +1,6 @@
|
|
1
|
-
|
2
|
-
|
1
|
+
AllCops:
|
2
|
+
Exclude:
|
3
|
+
- 'test/fixtures/sample_ruby.rb'
|
3
4
|
|
4
5
|
Metrics/AbcSize:
|
5
6
|
Enabled: false
|
@@ -25,16 +26,9 @@ Metrics/ModuleLength:
|
|
25
26
|
Metrics/PerceivedComplexity:
|
26
27
|
Enabled: false
|
27
28
|
|
28
|
-
Style/ClassAndModuleChildren:
|
29
|
-
Enabled: false
|
30
|
-
|
31
29
|
Style/Documentation:
|
32
30
|
Enabled: false
|
33
31
|
|
34
|
-
Style/Next:
|
35
|
-
Exclude:
|
36
|
-
- 'test/fixtures/sample_ruby.rb'
|
37
|
-
|
38
32
|
Style/SpecialGlobalVars:
|
39
33
|
Enabled: false
|
40
34
|
|
data/.travis.yml
CHANGED
data/CHANGELOG.md
CHANGED
@@ -1,7 +1,17 @@
|
|
1
1
|
Changelog
|
2
2
|
=========
|
3
3
|
|
4
|
-
|
4
|
+
v1.5.4 (unreleased)
|
5
|
+
--------------------
|
6
|
+
|
7
|
+
Improvements:
|
8
|
+
* When dumping file metadata, don't include the file contents.
|
9
|
+
|
10
|
+
Bug Fixes:
|
11
|
+
* Fix parsing ruby files with invalidly-encoded literals (#160).
|
12
|
+
* Fix exporting ctags files to different output directories (#163).
|
13
|
+
|
14
|
+
v1.5.3 (2016-03-02)
|
5
15
|
--------------------
|
6
16
|
|
7
17
|
Improvements:
|
data/bin/starscope
CHANGED
data/lib/starscope/db.rb
CHANGED
@@ -8,331 +8,333 @@ require 'starscope/fragment_extractor'
|
|
8
8
|
require 'starscope/queryable'
|
9
9
|
require 'starscope/output'
|
10
10
|
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
11
|
+
module Starscope
|
12
|
+
class DB
|
13
|
+
include Starscope::Exportable
|
14
|
+
include Starscope::Queryable
|
15
|
+
|
16
|
+
DB_FORMAT = 5
|
17
|
+
FRAGMENT = :'!fragment'
|
18
|
+
|
19
|
+
# dynamically load all our language extractors
|
20
|
+
Dir.glob("#{File.dirname(__FILE__)}/langs/*.rb").each { |path| require path }
|
21
|
+
|
22
|
+
langs = {}
|
23
|
+
extractors = []
|
24
|
+
Starscope::Lang.constants.each do |lang|
|
25
|
+
extractor = Starscope::Lang.const_get(lang)
|
26
|
+
extractors << extractor
|
27
|
+
langs[lang.to_sym] = extractor.const_get(:VERSION)
|
28
|
+
end
|
29
|
+
LANGS = langs.freeze
|
30
|
+
EXTRACTORS = extractors.freeze
|
31
|
+
|
32
|
+
class NoTableError < StandardError; end
|
33
|
+
class UnknownDBFormatError < StandardError; end
|
34
|
+
|
35
|
+
def initialize(output, config = {})
|
36
|
+
@output = output
|
37
|
+
@meta = { paths: [], files: {}, excludes: [],
|
38
|
+
langs: LANGS.dup, version: Starscope::VERSION }
|
39
|
+
@tables = {}
|
40
|
+
@config = config
|
41
|
+
end
|
41
42
|
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
43
|
+
# returns true iff the database was already in the most recent format
|
44
|
+
def load(filename)
|
45
|
+
@output.extra("Reading database from `#{filename}`... ")
|
46
|
+
current_fmt = open_db(filename)
|
47
|
+
fixup if current_fmt
|
48
|
+
current_fmt
|
49
|
+
end
|
49
50
|
|
50
|
-
|
51
|
-
|
51
|
+
def save(filename)
|
52
|
+
@output.extra("Writing database to `#{filename}`...")
|
52
53
|
|
53
|
-
|
54
|
-
|
54
|
+
# regardless of what the old version was, the new version is written by us
|
55
|
+
@meta[:version] = Starscope::VERSION
|
55
56
|
|
56
|
-
|
57
|
+
@meta[:langs].merge!(LANGS)
|
57
58
|
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
59
|
+
File.open(filename, 'w') do |file|
|
60
|
+
Zlib::GzipWriter.wrap(file) do |stream|
|
61
|
+
stream.puts DB_FORMAT
|
62
|
+
stream.puts Oj.dump @meta
|
63
|
+
stream.puts Oj.dump @tables
|
64
|
+
end
|
63
65
|
end
|
64
66
|
end
|
65
|
-
end
|
66
67
|
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
excluded = @meta[:files].keys.select { |name| matches_exclude?(name, paths) }
|
76
|
-
remove_files(excluded)
|
77
|
-
end
|
68
|
+
def add_excludes(paths)
|
69
|
+
@output.extra("Excluding files in paths #{paths}...")
|
70
|
+
@meta[:paths] -= paths.map { |p| self.class.normalize_glob(p) }
|
71
|
+
paths = paths.map { |p| self.class.normalize_fnmatch(p) }
|
72
|
+
@meta[:excludes] += paths
|
73
|
+
@meta[:excludes].uniq!
|
74
|
+
@all_excludes = nil # clear cache
|
78
75
|
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
@all_excludes = nil # clear cache
|
83
|
-
paths = paths.map { |p| self.class.normalize_glob(p) }
|
84
|
-
@meta[:paths] += paths
|
85
|
-
@meta[:paths].uniq!
|
86
|
-
files = Dir.glob(paths).select { |f| File.file? f }
|
87
|
-
files.delete_if { |f| matches_exclude?(f) }
|
88
|
-
return if files.empty?
|
89
|
-
@output.new_pbar('Building', files.length)
|
90
|
-
add_files(files)
|
91
|
-
@output.finish_pbar
|
92
|
-
end
|
76
|
+
excluded = @meta[:files].keys.select { |name| matches_exclude?(name, paths) }
|
77
|
+
remove_files(excluded)
|
78
|
+
end
|
93
79
|
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
80
|
+
def add_paths(paths)
|
81
|
+
@output.extra("Adding files in paths #{paths}...")
|
82
|
+
@meta[:excludes] -= paths.map { |p| self.class.normalize_fnmatch(p) }
|
83
|
+
@all_excludes = nil # clear cache
|
84
|
+
paths = paths.map { |p| self.class.normalize_glob(p) }
|
85
|
+
@meta[:paths] += paths
|
86
|
+
@meta[:paths].uniq!
|
87
|
+
files = Dir.glob(paths).select { |f| File.file? f }
|
88
|
+
files.delete_if { |f| matches_exclude?(f) }
|
89
|
+
return if files.empty?
|
90
|
+
@output.new_pbar('Building', files.length)
|
91
|
+
add_files(files)
|
92
|
+
@output.finish_pbar
|
93
|
+
end
|
98
94
|
|
99
|
-
|
100
|
-
|
95
|
+
def update
|
96
|
+
changes = @meta[:files].keys.group_by { |name| file_changed(name) }
|
97
|
+
changes[:modified] ||= []
|
98
|
+
changes[:deleted] ||= []
|
101
99
|
|
102
|
-
|
103
|
-
|
104
|
-
return false
|
105
|
-
end
|
100
|
+
new_files = (Dir.glob(@meta[:paths]).select { |f| File.file? f }) - @meta[:files].keys
|
101
|
+
new_files.delete_if { |f| matches_exclude?(f) }
|
106
102
|
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
@output.finish_pbar
|
103
|
+
if changes[:deleted].empty? && changes[:modified].empty? && new_files.empty?
|
104
|
+
@output.normal('No changes detected.')
|
105
|
+
return false
|
106
|
+
end
|
112
107
|
|
113
|
-
|
114
|
-
|
108
|
+
@output.new_pbar('Updating', changes[:modified].length + new_files.length)
|
109
|
+
remove_files(changes[:deleted])
|
110
|
+
update_files(changes[:modified])
|
111
|
+
add_files(new_files)
|
112
|
+
@output.finish_pbar
|
115
113
|
|
116
|
-
|
117
|
-
|
114
|
+
true
|
115
|
+
end
|
118
116
|
|
119
|
-
|
117
|
+
def line_for_record(rec)
|
118
|
+
return rec[:line] if rec[:line]
|
120
119
|
|
121
|
-
|
122
|
-
end
|
120
|
+
file = @meta[:files][rec[:file]]
|
123
121
|
|
124
|
-
|
125
|
-
|
126
|
-
end
|
122
|
+
return file[:lines][rec[:line_no] - 1] if file[:lines]
|
123
|
+
end
|
127
124
|
|
128
|
-
|
129
|
-
|
125
|
+
def tables
|
126
|
+
@tables.keys
|
127
|
+
end
|
130
128
|
|
131
|
-
|
132
|
-
|
129
|
+
def records(table)
|
130
|
+
raise NoTableError unless @tables[table]
|
133
131
|
|
134
|
-
|
135
|
-
|
132
|
+
@tables[table]
|
133
|
+
end
|
136
134
|
|
137
|
-
|
135
|
+
def metadata(key = nil)
|
136
|
+
return @meta.keys if key.nil?
|
138
137
|
|
139
|
-
|
140
|
-
end
|
138
|
+
raise NoTableError unless @meta[key]
|
141
139
|
|
142
|
-
|
143
|
-
|
144
|
-
@tables = {}
|
145
|
-
end
|
140
|
+
@meta[key]
|
141
|
+
end
|
146
142
|
|
147
|
-
|
143
|
+
def drop_all
|
144
|
+
@meta[:files] = {}
|
145
|
+
@tables = {}
|
146
|
+
end
|
148
147
|
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
148
|
+
private
|
149
|
+
|
150
|
+
def open_db(filename)
|
151
|
+
File.open(filename, 'r') do |file|
|
152
|
+
begin
|
153
|
+
Zlib::GzipReader.wrap(file) do |stream|
|
154
|
+
parse_db(stream)
|
155
|
+
end
|
156
|
+
rescue Zlib::GzipFile::Error
|
157
|
+
file.rewind
|
158
|
+
parse_db(file)
|
154
159
|
end
|
155
|
-
rescue Zlib::GzipFile::Error
|
156
|
-
file.rewind
|
157
|
-
parse_db(file)
|
158
160
|
end
|
159
161
|
end
|
160
|
-
end
|
161
162
|
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
163
|
+
# returns true iff the database is in the most recent format
|
164
|
+
def parse_db(stream)
|
165
|
+
case stream.gets.to_i
|
166
|
+
when DB_FORMAT
|
167
|
+
@meta = Oj.load(stream.gets)
|
168
|
+
@tables = Oj.load(stream.gets)
|
169
|
+
return true
|
170
|
+
when 3..4
|
171
|
+
# Old format, so read the directories segment then rebuild
|
172
|
+
add_paths(Oj.load(stream.gets))
|
173
|
+
return false
|
174
|
+
when 0..2
|
175
|
+
# Old format (pre-json), so read the directories segment then rebuild
|
176
|
+
len = stream.gets.to_i
|
177
|
+
add_paths(Marshal.load(stream.read(len)))
|
178
|
+
return false
|
179
|
+
else
|
180
|
+
raise UnknownDBFormatError
|
181
|
+
end
|
182
|
+
rescue Oj::ParseError
|
183
|
+
stream.rewind
|
184
|
+
raise unless stream.gets.to_i == DB_FORMAT
|
185
|
+
# try reading as formated json, which is much slower, but it is sometimes
|
186
|
+
# useful to be able to directly read your db
|
187
|
+
objects = []
|
188
|
+
Oj.load(stream) { |obj| objects << obj }
|
189
|
+
@meta, @tables = objects
|
168
190
|
return true
|
169
|
-
when 3..4
|
170
|
-
# Old format, so read the directories segment then rebuild
|
171
|
-
add_paths(Oj.load(stream.gets))
|
172
|
-
return false
|
173
|
-
when 0..2
|
174
|
-
# Old format (pre-json), so read the directories segment then rebuild
|
175
|
-
len = stream.gets.to_i
|
176
|
-
add_paths(Marshal.load(stream.read(len)))
|
177
|
-
return false
|
178
|
-
else
|
179
|
-
raise UnknownDBFormatError
|
180
191
|
end
|
181
|
-
rescue Oj::ParseError
|
182
|
-
stream.rewind
|
183
|
-
raise unless stream.gets.to_i == DB_FORMAT
|
184
|
-
# try reading as formated json, which is much slower, but it is sometimes
|
185
|
-
# useful to be able to directly read your db
|
186
|
-
objects = []
|
187
|
-
Oj.load(stream) { |obj| objects << obj }
|
188
|
-
@meta, @tables = objects
|
189
|
-
return true
|
190
|
-
end
|
191
|
-
|
192
|
-
def fixup
|
193
|
-
# misc things that were't worth bumping the format for, but which might not be written by old versions
|
194
|
-
@meta[:langs] ||= {}
|
195
|
-
end
|
196
|
-
|
197
|
-
def all_excludes
|
198
|
-
@all_excludes ||= @meta[:excludes] + (@config[:excludes] || []).map { |x| self.class.normalize_fnmatch(x) }
|
199
|
-
end
|
200
|
-
|
201
|
-
def matches_exclude?(file, patterns = all_excludes)
|
202
|
-
patterns.map { |p| File.fnmatch(p, file) }.any?
|
203
|
-
end
|
204
192
|
|
205
|
-
|
206
|
-
|
207
|
-
@
|
208
|
-
parse_file(file)
|
209
|
-
@output.inc_pbar
|
193
|
+
def fixup
|
194
|
+
# misc things that were't worth bumping the format for, but which might not be written by old versions
|
195
|
+
@meta[:langs] ||= {}
|
210
196
|
end
|
211
|
-
end
|
212
197
|
|
213
|
-
|
214
|
-
|
215
|
-
@output.extra("Removing `#{file}`")
|
216
|
-
@meta[:files].delete(file)
|
217
|
-
end
|
218
|
-
files = files.to_set
|
219
|
-
@tables.each do |_, tbl|
|
220
|
-
tbl.delete_if { |val| files.include?(val[:file]) }
|
198
|
+
def all_excludes
|
199
|
+
@all_excludes ||= @meta[:excludes] + (@config[:excludes] || []).map { |x| self.class.normalize_fnmatch(x) }
|
221
200
|
end
|
222
|
-
end
|
223
|
-
|
224
|
-
def update_files(files)
|
225
|
-
remove_files(files)
|
226
|
-
add_files(files)
|
227
|
-
end
|
228
201
|
|
229
|
-
|
230
|
-
|
202
|
+
def matches_exclude?(file, patterns = all_excludes)
|
203
|
+
patterns.map { |p| File.fnmatch(p, file) }.any?
|
204
|
+
end
|
231
205
|
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
@output.
|
237
|
-
next
|
206
|
+
def add_files(files)
|
207
|
+
files.each do |file|
|
208
|
+
@output.extra("Adding `#{file}`")
|
209
|
+
parse_file(file)
|
210
|
+
@output.inc_pbar
|
238
211
|
end
|
239
|
-
|
240
|
-
line_cache = File.readlines(file)
|
241
|
-
lines = Array.new(line_cache.length)
|
242
|
-
@meta[:files][file][:sublangs] = []
|
243
|
-
extract_file(extractor, file, line_cache, lines)
|
244
|
-
|
245
|
-
break
|
246
212
|
end
|
247
|
-
end
|
248
213
|
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
else
|
258
|
-
@tables[tbl] ||= []
|
259
|
-
@tables[tbl] << self.class.normalize_record(file, name, args)
|
260
|
-
|
261
|
-
if args[:line_no]
|
262
|
-
line_cache ||= File.readlines(file)
|
263
|
-
lines ||= Array.new(line_cache.length)
|
264
|
-
lines[args[:line_no] - 1] = line_cache[args[:line_no] - 1].chomp
|
265
|
-
end
|
214
|
+
def remove_files(files)
|
215
|
+
files.each do |file|
|
216
|
+
@output.extra("Removing `#{file}`")
|
217
|
+
@meta[:files].delete(file)
|
218
|
+
end
|
219
|
+
files = files.to_set
|
220
|
+
@tables.each do |_, tbl|
|
221
|
+
tbl.delete_if { |val| files.include?(val[:file]) }
|
266
222
|
end
|
267
223
|
end
|
268
224
|
|
269
|
-
|
270
|
-
|
271
|
-
|
225
|
+
def update_files(files)
|
226
|
+
remove_files(files)
|
227
|
+
add_files(files)
|
272
228
|
end
|
273
229
|
|
274
|
-
|
275
|
-
|
230
|
+
def parse_file(file)
|
231
|
+
@meta[:files][file] = { last_updated: File.mtime(file).to_i }
|
276
232
|
|
277
|
-
|
278
|
-
|
279
|
-
|
233
|
+
self.class.extractors.each do |extractor|
|
234
|
+
begin
|
235
|
+
next unless extractor.match_file file
|
236
|
+
rescue => e
|
237
|
+
@output.normal("#{extractor} raised \"#{e}\" while matching #{file}")
|
238
|
+
next
|
239
|
+
end
|
280
240
|
|
281
|
-
|
282
|
-
|
283
|
-
|
241
|
+
line_cache = File.readlines(file)
|
242
|
+
lines = Array.new(line_cache.length)
|
243
|
+
@meta[:files][file][:sublangs] = []
|
244
|
+
extract_file(extractor, file, line_cache, lines)
|
284
245
|
|
285
|
-
|
286
|
-
|
287
|
-
if matches_exclude?(name) || !File.exist?(name) || !File.file?(name)
|
288
|
-
:deleted
|
289
|
-
elsif (file_meta[:last_updated] < File.mtime(name).to_i) ||
|
290
|
-
language_out_of_date(file_meta[:lang]) ||
|
291
|
-
(file_meta[:sublangs] || []).any? { |lang| language_out_of_date(lang) }
|
292
|
-
:modified
|
293
|
-
else
|
294
|
-
:unchanged
|
246
|
+
break
|
247
|
+
end
|
295
248
|
end
|
296
|
-
end
|
297
249
|
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
250
|
+
def extract_file(extractor, file, line_cache, lines)
|
251
|
+
fragment_cache = {}
|
252
|
+
|
253
|
+
extractor_metadata = extractor.extract(file, File.read(file)) do |tbl, name, args|
|
254
|
+
case tbl
|
255
|
+
when FRAGMENT
|
256
|
+
fragment_cache[name] ||= []
|
257
|
+
fragment_cache[name] << args
|
258
|
+
else
|
259
|
+
@tables[tbl] ||= []
|
260
|
+
@tables[tbl] << self.class.normalize_record(file, name, args)
|
261
|
+
|
262
|
+
if args[:line_no]
|
263
|
+
line_cache ||= File.readlines(file)
|
264
|
+
lines ||= Array.new(line_cache.length)
|
265
|
+
lines[args[:line_no] - 1] = line_cache[args[:line_no] - 1].chomp
|
266
|
+
end
|
267
|
+
end
|
268
|
+
end
|
303
269
|
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
if path == '.'
|
308
|
-
'**'
|
309
|
-
elsif File.directory?(path)
|
310
|
-
File.join(path, '**')
|
311
|
-
else
|
312
|
-
path
|
270
|
+
fragment_cache.each do |lang, frags|
|
271
|
+
extract_file(Starscope::FragmentExtractor.new(lang, frags), file, line_cache, lines)
|
272
|
+
@meta[:files][file][:sublangs] << lang
|
313
273
|
end
|
274
|
+
|
275
|
+
@meta[:files][file][:lang] = extractor.name.split('::').last.to_sym
|
276
|
+
@meta[:files][file][:lines] = lines
|
277
|
+
|
278
|
+
if extractor_metadata.is_a? Hash
|
279
|
+
@meta[:files][file] = extractor_metadata.merge!(@meta[:files][file])
|
280
|
+
end
|
281
|
+
|
282
|
+
rescue => e
|
283
|
+
@output.normal("#{extractor} raised \"#{e}\" while extracting #{file}")
|
314
284
|
end
|
315
285
|
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
286
|
+
def file_changed(name)
|
287
|
+
file_meta = @meta[:files][name]
|
288
|
+
if matches_exclude?(name) || !File.exist?(name) || !File.file?(name)
|
289
|
+
:deleted
|
290
|
+
elsif (file_meta[:last_updated] < File.mtime(name).to_i) ||
|
291
|
+
language_out_of_date(file_meta[:lang]) ||
|
292
|
+
(file_meta[:sublangs] || []).any? { |lang| language_out_of_date(lang) }
|
293
|
+
:modified
|
323
294
|
else
|
324
|
-
|
295
|
+
:unchanged
|
325
296
|
end
|
326
297
|
end
|
327
298
|
|
328
|
-
def
|
329
|
-
|
330
|
-
|
331
|
-
|
299
|
+
def language_out_of_date(lang)
|
300
|
+
return false unless lang
|
301
|
+
return true unless LANGS[lang]
|
302
|
+
(@meta[:langs][lang] || 0) < LANGS[lang]
|
332
303
|
end
|
333
304
|
|
334
|
-
|
335
|
-
|
305
|
+
class << self
|
306
|
+
# File.fnmatch treats a "**" to match files and directories recursively
|
307
|
+
def normalize_fnmatch(path)
|
308
|
+
if path == '.'
|
309
|
+
'**'
|
310
|
+
elsif File.directory?(path)
|
311
|
+
File.join(path, '**')
|
312
|
+
else
|
313
|
+
path
|
314
|
+
end
|
315
|
+
end
|
316
|
+
|
317
|
+
# Dir.glob treats a "**" to only match directories recursively; you need
|
318
|
+
# "**/*" to match all files recursively
|
319
|
+
def normalize_glob(path)
|
320
|
+
if path == '.'
|
321
|
+
File.join('**', '*')
|
322
|
+
elsif File.directory?(path)
|
323
|
+
File.join(path, '**', '*')
|
324
|
+
else
|
325
|
+
path
|
326
|
+
end
|
327
|
+
end
|
328
|
+
|
329
|
+
def normalize_record(file, name, args)
|
330
|
+
args[:file] = file
|
331
|
+
args[:name] = Array(name).map(&:to_sym)
|
332
|
+
args
|
333
|
+
end
|
334
|
+
|
335
|
+
def extractors # so we can stub it in tests
|
336
|
+
EXTRACTORS
|
337
|
+
end
|
336
338
|
end
|
337
339
|
end
|
338
340
|
end
|