markdownr 0.7.1 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/bin/Dockerfile.markdownr +1 -1
- data/bin/markdownr +15 -0
- data/bin/markdownr-servers.yaml +39 -0
- data/lib/markdown_server/app.rb +729 -90
- data/lib/markdown_server/csv_browser/addon_registry.rb +137 -0
- data/lib/markdown_server/csv_browser/config_loader.rb +231 -0
- data/lib/markdown_server/csv_browser/row_context.rb +146 -0
- data/lib/markdown_server/csv_browser/table_reader.rb +259 -0
- data/lib/markdown_server/helpers/admin_helpers.rb +15 -1
- data/lib/markdown_server/plugin.rb +11 -0
- data/lib/markdown_server/version.rb +1 -1
- data/views/browser.erb +4408 -0
- data/views/layout.erb +2 -15
- metadata +35 -1
data/lib/markdown_server/app.rb
CHANGED
|
@@ -11,6 +11,10 @@ require "set"
|
|
|
11
11
|
require "net/http"
|
|
12
12
|
require "base64"
|
|
13
13
|
require_relative "plugin"
|
|
14
|
+
require_relative "csv_browser/config_loader"
|
|
15
|
+
require_relative "csv_browser/table_reader"
|
|
16
|
+
require_relative "csv_browser/addon_registry"
|
|
17
|
+
require_relative "csv_browser/row_context"
|
|
14
18
|
require_relative "helpers/path_helpers"
|
|
15
19
|
require_relative "helpers/formatting_helpers"
|
|
16
20
|
require_relative "helpers/markdown_helpers"
|
|
@@ -48,6 +52,7 @@ module MarkdownServer
|
|
|
48
52
|
set :popup_external_domains, []
|
|
49
53
|
set :dictionary_url, nil
|
|
50
54
|
set :plugin_dirs, []
|
|
55
|
+
set :csv_browser_config, nil
|
|
51
56
|
end
|
|
52
57
|
|
|
53
58
|
def self.load_plugins!
|
|
@@ -78,7 +83,11 @@ module MarkdownServer
|
|
|
78
83
|
end
|
|
79
84
|
|
|
80
85
|
before do
|
|
81
|
-
|
|
86
|
+
if request.path_info.start_with?("/browser", "/csv-browser")
|
|
87
|
+
cache_control :no_cache, :no_store, :must_revalidate
|
|
88
|
+
else
|
|
89
|
+
cache_control :public, max_age: 14400
|
|
90
|
+
end
|
|
82
91
|
|
|
83
92
|
if settings.verbose
|
|
84
93
|
$stdout.puts "#{Time.now.strftime("%Y-%m-%d %H:%M:%S")} #{client_ip} #{request.request_method} #{request.fullpath}"
|
|
@@ -90,6 +99,17 @@ module MarkdownServer
|
|
|
90
99
|
redirect "/browse/"
|
|
91
100
|
end
|
|
92
101
|
|
|
102
|
+
get "/version" do
|
|
103
|
+
content_type :json
|
|
104
|
+
plugins = settings.plugins.map { |p| p.class.plugin_name }
|
|
105
|
+
JSON.dump({ version: MarkdownServer::VERSION, plugins: plugins })
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
post "/ping" do
|
|
109
|
+
content_type :json
|
|
110
|
+
{ ok: true }.to_json
|
|
111
|
+
end
|
|
112
|
+
|
|
93
113
|
get "/setup-info" do
|
|
94
114
|
@title = "Setup Info"
|
|
95
115
|
@client_ip = client_ip
|
|
@@ -125,6 +145,411 @@ module MarkdownServer
|
|
|
125
145
|
redirect "/"
|
|
126
146
|
end
|
|
127
147
|
|
|
148
|
+
get "/browser" do
|
|
149
|
+
@title = dir_title
|
|
150
|
+
@root_title = dir_title
|
|
151
|
+
@start_mode = "directory"
|
|
152
|
+
@csv_databases = csv_databases_json
|
|
153
|
+
erb :browser, layout: false
|
|
154
|
+
end
|
|
155
|
+
|
|
156
|
+
get "/csv-browser" do
|
|
157
|
+
@title = dir_title
|
|
158
|
+
@root_title = dir_title
|
|
159
|
+
@start_mode = "csv"
|
|
160
|
+
@csv_databases = csv_databases_json
|
|
161
|
+
erb :browser, layout: false
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
get "/browser/api/render/?*" do
|
|
165
|
+
content_type :json
|
|
166
|
+
requested = params["splat"].first.to_s.chomp("/")
|
|
167
|
+
|
|
168
|
+
if requested.empty?
|
|
169
|
+
real_path = File.realpath(root_dir)
|
|
170
|
+
else
|
|
171
|
+
real_path = safe_path(requested)
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
if File.directory?(real_path)
|
|
175
|
+
JSON.dump(browser_render_directory(real_path, requested))
|
|
176
|
+
else
|
|
177
|
+
JSON.dump(browser_render_file(real_path, requested, raw: params["raw"] == "1"))
|
|
178
|
+
end
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
get "/browser/api/csv/databases" do
|
|
182
|
+
content_type :json
|
|
183
|
+
csv_databases_json
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
post "/browser/api/csv/reload" do
|
|
187
|
+
content_type :json
|
|
188
|
+
loader = settings.csv_browser_config
|
|
189
|
+
halt 404, '{"error":"not configured"}' unless loader
|
|
190
|
+
loader.reload!
|
|
191
|
+
csv_databases_json
|
|
192
|
+
end
|
|
193
|
+
|
|
194
|
+
get "/browser/api/csv/databases/:db/tables/:table" do
|
|
195
|
+
content_type :json
|
|
196
|
+
loader = settings.csv_browser_config
|
|
197
|
+
halt 404, '{"error":"not configured"}' unless loader
|
|
198
|
+
|
|
199
|
+
db = loader.database(params[:db])
|
|
200
|
+
halt 404, { error: "Database not found" }.to_json unless db
|
|
201
|
+
|
|
202
|
+
table = db.tables.find { |t| t.key == params[:table] }
|
|
203
|
+
halt 404, { error: "Table not found" }.to_json unless table
|
|
204
|
+
|
|
205
|
+
view_key = params[:view] || "all"
|
|
206
|
+
reader = CsvBrowser::TableReader.new(table)
|
|
207
|
+
data = reader.read(view_key)
|
|
208
|
+
lookups = loader.resolve_references(db, table)
|
|
209
|
+
|
|
210
|
+
# Enrich column references with the referenced table's color
|
|
211
|
+
data[:columns].each do |col|
|
|
212
|
+
next unless col[:references]
|
|
213
|
+
ref_table = db.tables.find { |t| t.key == col[:references][:table] }
|
|
214
|
+
col[:references] = col[:references].merge(color: ref_table.color) if ref_table&.color
|
|
215
|
+
end
|
|
216
|
+
|
|
217
|
+
reverse_refs = loader.resolve_reverse_references(db, table)
|
|
218
|
+
|
|
219
|
+
result = {
|
|
220
|
+
database: db.key,
|
|
221
|
+
table: table.key,
|
|
222
|
+
view: view_key,
|
|
223
|
+
views: table.views.map { |v| { key: v.key, title: v.title } },
|
|
224
|
+
columns: data[:columns],
|
|
225
|
+
rows: data[:rows]
|
|
226
|
+
}
|
|
227
|
+
result[:color] = table.color if table.color
|
|
228
|
+
result[:required] = table.required unless table.required.empty?
|
|
229
|
+
result[:references] = lookups unless lookups.empty?
|
|
230
|
+
result[:reverse_references] = reverse_refs unless reverse_refs.empty?
|
|
231
|
+
result.to_json
|
|
232
|
+
end
|
|
233
|
+
|
|
234
|
+
get "/browser/api/csv/databases/:db/search" do
|
|
235
|
+
content_type :json
|
|
236
|
+
loader = settings.csv_browser_config
|
|
237
|
+
halt 404, '{"error":"not configured"}' unless loader
|
|
238
|
+
|
|
239
|
+
db = loader.database(params[:db])
|
|
240
|
+
halt 404, { error: "Database not found" }.to_json unless db
|
|
241
|
+
|
|
242
|
+
query = (params[:q] || "").strip
|
|
243
|
+
halt 400, { error: "No query" }.to_json if query.empty?
|
|
244
|
+
|
|
245
|
+
terms = query.split(/\s+/).map { |t| parse_filter_term(t) }
|
|
246
|
+
|
|
247
|
+
results = db.tables.filter_map do |table|
|
|
248
|
+
next unless File.exist?(table.csv_path)
|
|
249
|
+
|
|
250
|
+
count = 0
|
|
251
|
+
CSV.foreach(table.csv_path, headers: true) do |row|
|
|
252
|
+
values = row.fields.map { |v| v.to_s }
|
|
253
|
+
match = terms.all? do |term|
|
|
254
|
+
values.any? { |v| filter_term_matches?(term, v) }
|
|
255
|
+
end
|
|
256
|
+
count += 1 if match
|
|
257
|
+
end
|
|
258
|
+
next if count == 0
|
|
259
|
+
|
|
260
|
+
entry = { table: table.key, title: table.title, count: count }
|
|
261
|
+
entry[:color] = table.color if table.color
|
|
262
|
+
entry[:views] = table.views.map { |v| { key: v.key, title: v.title } }
|
|
263
|
+
entry
|
|
264
|
+
end
|
|
265
|
+
|
|
266
|
+
{ database: db.key, query: query, results: results }.to_json
|
|
267
|
+
end
|
|
268
|
+
|
|
269
|
+
get "/browser/api/csv/databases/:db/tables/:table/schema" do
|
|
270
|
+
content_type "text/plain"
|
|
271
|
+
loader = settings.csv_browser_config
|
|
272
|
+
halt 404, "not configured" unless loader
|
|
273
|
+
|
|
274
|
+
db = loader.database(params[:db])
|
|
275
|
+
halt 404, "Database not found" unless db
|
|
276
|
+
|
|
277
|
+
table = db.tables.find { |t| t.key == params[:table] }
|
|
278
|
+
halt 404, "Table not found" unless table
|
|
279
|
+
|
|
280
|
+
schema = { "title" => table.title }
|
|
281
|
+
schema["csv"] = File.basename(table.csv_path)
|
|
282
|
+
schema["color"] = table.color if table.color
|
|
283
|
+
|
|
284
|
+
props = {}
|
|
285
|
+
table.columns.each do |col|
|
|
286
|
+
col_def = {}
|
|
287
|
+
col_def["type"] = col.type
|
|
288
|
+
col_def["title"] = col.title if col.title != col.key.capitalize
|
|
289
|
+
col.constraints.each { |k, v| col_def[k] = v } if col.constraints
|
|
290
|
+
if col.references
|
|
291
|
+
col_def["references"] = {
|
|
292
|
+
"table" => col.references[:table],
|
|
293
|
+
"column" => col.references[:column],
|
|
294
|
+
"display" => col.references[:display]
|
|
295
|
+
}
|
|
296
|
+
end
|
|
297
|
+
props[col.key] = col_def
|
|
298
|
+
end
|
|
299
|
+
schema["properties"] = props
|
|
300
|
+
schema["required"] = table.required unless table.required.empty?
|
|
301
|
+
|
|
302
|
+
if table.views.length > 1 || (table.views.length == 1 && table.views.first.key != "all")
|
|
303
|
+
views = {}
|
|
304
|
+
table.views.each do |v|
|
|
305
|
+
view_def = { "title" => v.title }
|
|
306
|
+
view_def["columns"] = v.columns if v.columns
|
|
307
|
+
views[v.key] = view_def
|
|
308
|
+
end
|
|
309
|
+
schema["views"] = views
|
|
310
|
+
end
|
|
311
|
+
|
|
312
|
+
yaml = Psych.dump(schema, indentation: 4).sub(/\A---\n/, "")
|
|
313
|
+
yaml.gsub(/^(\s*)- /, '\1 - ')
|
|
314
|
+
end
|
|
315
|
+
|
|
316
|
+
get "/browser/api/csv/databases/:db/tables/:table/validate" do
|
|
317
|
+
content_type :json
|
|
318
|
+
loader = settings.csv_browser_config
|
|
319
|
+
halt 404, '{"error":"not configured"}' unless loader
|
|
320
|
+
|
|
321
|
+
db = loader.database(params[:db])
|
|
322
|
+
halt 404, { error: "Database not found" }.to_json unless db
|
|
323
|
+
|
|
324
|
+
table = db.tables.find { |t| t.key == params[:table] }
|
|
325
|
+
halt 404, { error: "Table not found" }.to_json unless table
|
|
326
|
+
|
|
327
|
+
reader = CsvBrowser::TableReader.new(table)
|
|
328
|
+
errors = reader.validate_all
|
|
329
|
+
|
|
330
|
+
{ errors: errors }.to_json
|
|
331
|
+
end
|
|
332
|
+
|
|
333
|
+
get "/browser/api/csv/databases/:db/tables/:table/rows/:row/validate" do
|
|
334
|
+
content_type :json
|
|
335
|
+
loader = settings.csv_browser_config
|
|
336
|
+
halt 404, '{"error":"not configured"}' unless loader
|
|
337
|
+
|
|
338
|
+
db = loader.database(params[:db])
|
|
339
|
+
halt 404, { error: "Database not found" }.to_json unless db
|
|
340
|
+
|
|
341
|
+
table = db.tables.find { |t| t.key == params[:table] }
|
|
342
|
+
halt 404, { error: "Table not found" }.to_json unless table
|
|
343
|
+
|
|
344
|
+
row_index = Integer(params[:row]) rescue nil
|
|
345
|
+
halt 400, { error: "Invalid row index" }.to_json unless row_index
|
|
346
|
+
|
|
347
|
+
reader = CsvBrowser::TableReader.new(table)
|
|
348
|
+
result = reader.validate_cells(row_index, {})
|
|
349
|
+
result.to_json
|
|
350
|
+
end
|
|
351
|
+
|
|
352
|
+
put "/browser/api/csv/databases/:db/tables/:table/rows/:row" do
|
|
353
|
+
content_type :json
|
|
354
|
+
unless admin?
|
|
355
|
+
halt 403, { error: "Admin login required to save changes.",
|
|
356
|
+
admin_url: "https://github.com/brianmd/markdown-server#admin-access",
|
|
357
|
+
client_ip: client_ip }.to_json
|
|
358
|
+
end
|
|
359
|
+
loader = settings.csv_browser_config
|
|
360
|
+
halt 404, '{"error":"not configured"}' unless loader
|
|
361
|
+
|
|
362
|
+
db = loader.database(params[:db])
|
|
363
|
+
halt 404, { error: "Database not found" }.to_json unless db
|
|
364
|
+
|
|
365
|
+
table = db.tables.find { |t| t.key == params[:table] }
|
|
366
|
+
halt 404, { error: "Table not found" }.to_json unless table
|
|
367
|
+
|
|
368
|
+
row_index = Integer(params[:row]) rescue nil
|
|
369
|
+
halt 400, { error: "Invalid row index" }.to_json unless row_index
|
|
370
|
+
|
|
371
|
+
body = JSON.parse(request.body.read) rescue {}
|
|
372
|
+
changes = body["changes"] || {}
|
|
373
|
+
halt 400, { error: "No changes provided" }.to_json if changes.empty?
|
|
374
|
+
|
|
375
|
+
reader = CsvBrowser::TableReader.new(table)
|
|
376
|
+
result = reader.update_row(row_index, changes)
|
|
377
|
+
|
|
378
|
+
if result[:valid]
|
|
379
|
+
result.to_json
|
|
380
|
+
else
|
|
381
|
+
status 422
|
|
382
|
+
result.to_json
|
|
383
|
+
end
|
|
384
|
+
end
|
|
385
|
+
|
|
386
|
+
post "/browser/api/csv/databases/:db/tables/:table/rows/:row/duplicate" do
|
|
387
|
+
content_type :json
|
|
388
|
+
unless admin?
|
|
389
|
+
halt 403, { error: "Admin login required to duplicate rows.",
|
|
390
|
+
admin_url: "https://github.com/brianmd/markdown-server#admin-access",
|
|
391
|
+
client_ip: client_ip }.to_json
|
|
392
|
+
end
|
|
393
|
+
loader = settings.csv_browser_config
|
|
394
|
+
halt 404, '{"error":"not configured"}' unless loader
|
|
395
|
+
|
|
396
|
+
db = loader.database(params[:db])
|
|
397
|
+
halt 404, { error: "Database not found" }.to_json unless db
|
|
398
|
+
|
|
399
|
+
table = db.tables.find { |t| t.key == params[:table] }
|
|
400
|
+
halt 404, { error: "Table not found" }.to_json unless table
|
|
401
|
+
|
|
402
|
+
row_index = Integer(params[:row]) rescue nil
|
|
403
|
+
halt 400, { error: "Invalid row index" }.to_json unless row_index
|
|
404
|
+
|
|
405
|
+
reader = CsvBrowser::TableReader.new(table)
|
|
406
|
+
result = reader.duplicate_row(row_index)
|
|
407
|
+
|
|
408
|
+
if result[:duplicated]
|
|
409
|
+
result.to_json
|
|
410
|
+
else
|
|
411
|
+
status 400
|
|
412
|
+
result.to_json
|
|
413
|
+
end
|
|
414
|
+
end
|
|
415
|
+
|
|
416
|
+
delete "/browser/api/csv/databases/:db/tables/:table/rows/:row" do
|
|
417
|
+
content_type :json
|
|
418
|
+
unless admin?
|
|
419
|
+
halt 403, { error: "Admin login required to delete rows.",
|
|
420
|
+
admin_url: "https://github.com/brianmd/markdown-server#admin-access",
|
|
421
|
+
client_ip: client_ip }.to_json
|
|
422
|
+
end
|
|
423
|
+
loader = settings.csv_browser_config
|
|
424
|
+
halt 404, '{"error":"not configured"}' unless loader
|
|
425
|
+
|
|
426
|
+
db = loader.database(params[:db])
|
|
427
|
+
halt 404, { error: "Database not found" }.to_json unless db
|
|
428
|
+
|
|
429
|
+
table = db.tables.find { |t| t.key == params[:table] }
|
|
430
|
+
halt 404, { error: "Table not found" }.to_json unless table
|
|
431
|
+
|
|
432
|
+
row_index = Integer(params[:row]) rescue nil
|
|
433
|
+
halt 400, { error: "Invalid row index" }.to_json unless row_index
|
|
434
|
+
|
|
435
|
+
reader = CsvBrowser::TableReader.new(table)
|
|
436
|
+
result = reader.delete_row(row_index)
|
|
437
|
+
|
|
438
|
+
if result[:deleted]
|
|
439
|
+
result.to_json
|
|
440
|
+
else
|
|
441
|
+
status 400
|
|
442
|
+
result.to_json
|
|
443
|
+
end
|
|
444
|
+
end
|
|
445
|
+
|
|
446
|
+
# List add-on actions available for a row on a given table.
|
|
447
|
+
get "/browser/api/csv/databases/:db/tables/:table/addons" do
|
|
448
|
+
content_type :json
|
|
449
|
+
loader = settings.csv_browser_config
|
|
450
|
+
halt 404, '{"error":"not configured"}' unless loader
|
|
451
|
+
|
|
452
|
+
db = loader.database(params[:db])
|
|
453
|
+
halt 404, { error: "Database not found" }.to_json unless db
|
|
454
|
+
|
|
455
|
+
table = db.tables.find { |t| t.key == params[:table] }
|
|
456
|
+
halt 404, { error: "Table not found" }.to_json unless table
|
|
457
|
+
|
|
458
|
+
row_index = Integer(params[:row]) rescue nil
|
|
459
|
+
halt 400, { error: "Invalid row index" }.to_json unless row_index
|
|
460
|
+
|
|
461
|
+
row_hash = read_row_hash(table, row_index)
|
|
462
|
+
halt 404, { error: "Row not found" }.to_json unless row_hash
|
|
463
|
+
|
|
464
|
+
attachments = CsvBrowser::CsvAddonRegistry.for_table(db, table)
|
|
465
|
+
actions = attachments.flat_map do |att|
|
|
466
|
+
ctx = CsvBrowser::RowContext.new(
|
|
467
|
+
database: db, table: table, row_index: row_index, row: row_hash,
|
|
468
|
+
options: att[:options]
|
|
469
|
+
)
|
|
470
|
+
att[:definition].actions_for(ctx).map do |a|
|
|
471
|
+
{
|
|
472
|
+
addon: att[:definition].name.to_s,
|
|
473
|
+
id: a[:id].to_s,
|
|
474
|
+
label: a[:label] || a[:id].to_s,
|
|
475
|
+
enabled: a[:enabled]
|
|
476
|
+
}.tap { |h| h[:icon] = a[:icon] if a[:icon] }
|
|
477
|
+
end
|
|
478
|
+
end
|
|
479
|
+
{ actions: actions }.to_json
|
|
480
|
+
end
|
|
481
|
+
|
|
482
|
+
# Invoke an add-on action (initial call or prompt continuation).
|
|
483
|
+
post "/browser/api/csv/databases/:db/tables/:table/addons/:addon/:action" do
|
|
484
|
+
content_type :json
|
|
485
|
+
unless admin?
|
|
486
|
+
halt 403, { error: "Admin login required to run add-on actions.",
|
|
487
|
+
admin_url: "https://github.com/brianmd/markdown-server#admin-access",
|
|
488
|
+
client_ip: client_ip }.to_json
|
|
489
|
+
end
|
|
490
|
+
loader = settings.csv_browser_config
|
|
491
|
+
halt 404, '{"error":"not configured"}' unless loader
|
|
492
|
+
|
|
493
|
+
db = loader.database(params[:db])
|
|
494
|
+
halt 404, { error: "Database not found" }.to_json unless db
|
|
495
|
+
|
|
496
|
+
table = db.tables.find { |t| t.key == params[:table] }
|
|
497
|
+
halt 404, { error: "Table not found" }.to_json unless table
|
|
498
|
+
|
|
499
|
+
attachments = CsvBrowser::CsvAddonRegistry.for_table(db, table)
|
|
500
|
+
attachment = attachments.find { |a| a[:definition].name.to_s == params[:addon] }
|
|
501
|
+
halt 404, { error: "Add-on not found" }.to_json unless attachment
|
|
502
|
+
|
|
503
|
+
handler = attachment[:definition].handler_for(params[:action])
|
|
504
|
+
halt 404, { error: "Action not found" }.to_json unless handler
|
|
505
|
+
|
|
506
|
+
body = JSON.parse(request.body.read) rescue {}
|
|
507
|
+
row_index = Integer(body["row_index"]) rescue nil
|
|
508
|
+
halt 400, { error: "Invalid row index" }.to_json unless row_index
|
|
509
|
+
|
|
510
|
+
row_hash = read_row_hash(table, row_index)
|
|
511
|
+
halt 404, { error: "Row not found" }.to_json unless row_hash
|
|
512
|
+
|
|
513
|
+
ctx = CsvBrowser::RowContext.new(
|
|
514
|
+
database: db, table: table, row_index: row_index, row: row_hash,
|
|
515
|
+
options: attachment[:options], input: body["input"], state: body["state"]
|
|
516
|
+
)
|
|
517
|
+
|
|
518
|
+
result = handler.call(ctx)
|
|
519
|
+
result = { kind: "done", reload: true } unless result.is_a?(Hash)
|
|
520
|
+
result.to_json
|
|
521
|
+
end
|
|
522
|
+
|
|
523
|
+
get "/browser/api/csv/unmapped/*" do
|
|
524
|
+
content_type :json
|
|
525
|
+
relative = params["splat"].first.to_s
|
|
526
|
+
real_path = safe_path(relative)
|
|
527
|
+
halt 404, '{"error":"not found"}' unless real_path && File.exist?(real_path) && real_path.end_with?(".csv")
|
|
528
|
+
|
|
529
|
+
# Verify this is actually an unmapped file
|
|
530
|
+
loader = settings.csv_browser_config
|
|
531
|
+
if loader&.find_table_by_csv_path(File.realpath(real_path))
|
|
532
|
+
halt 400, '{"error":"file belongs to a configured database"}'
|
|
533
|
+
end
|
|
534
|
+
|
|
535
|
+
rows = CSV.read(real_path, headers: true)
|
|
536
|
+
headers = rows.headers.compact
|
|
537
|
+
columns = headers.map { |h| { key: h, title: h, type: "string" } }
|
|
538
|
+
data_rows = rows.each_with_index.map do |row, idx|
|
|
539
|
+
[idx] + headers.map { |h| row[h] }
|
|
540
|
+
end
|
|
541
|
+
|
|
542
|
+
{
|
|
543
|
+
database: "_unmapped",
|
|
544
|
+
table: relative,
|
|
545
|
+
view: "all",
|
|
546
|
+
views: [{ key: "all", title: "All" }],
|
|
547
|
+
columns: columns,
|
|
548
|
+
rows: data_rows,
|
|
549
|
+
readonly: true
|
|
550
|
+
}.to_json
|
|
551
|
+
end
|
|
552
|
+
|
|
128
553
|
get "/browse/?*" do
|
|
129
554
|
requested = params["splat"].first.to_s
|
|
130
555
|
requested = requested.chomp("/")
|
|
@@ -149,88 +574,6 @@ module MarkdownServer
|
|
|
149
574
|
end
|
|
150
575
|
end
|
|
151
576
|
|
|
152
|
-
get "/debug/raw-fetch" do
|
|
153
|
-
halt 404, "not available" unless respond_to?(:blueletterbible_html)
|
|
154
|
-
url = params[:url].to_s.strip
|
|
155
|
-
halt 400, "missing ?url=" if url.empty?
|
|
156
|
-
html = fetch_external_page(url)
|
|
157
|
-
halt 502, "fetch failed" unless html
|
|
158
|
-
content_type :text
|
|
159
|
-
# Show processing steps for first verse
|
|
160
|
-
chunk = html.split(/<div\s[^>]*id="bVerse_\d+"[^>]*>/).drop(1).first
|
|
161
|
-
return "no bVerse chunks found" unless chunk
|
|
162
|
-
|
|
163
|
-
cite = chunk[/tablet-order-2[^>]*>[\s\S]{0,400}?<a[^>]*>(.*?)<\/a>/im, 1]
|
|
164
|
-
&.gsub(/<[^>]+>/, "")&.strip || "?"
|
|
165
|
-
raw_html = chunk[/class="EngBibleText[^"]*"[^>]*>([\s\S]*?)<\/div>/im, 1] || "(no EngBibleText found)"
|
|
166
|
-
|
|
167
|
-
lines = ["=== cite: #{cite} ===",
|
|
168
|
-
"=== EngBibleText raw (#{raw_html.length} chars) ===",
|
|
169
|
-
raw_html, ""]
|
|
170
|
-
|
|
171
|
-
# Simulate the processing steps
|
|
172
|
-
rh = raw_html.dup
|
|
173
|
-
rh.gsub!(/<img[^>]*>/, "")
|
|
174
|
-
rh.gsub!(/<a[^>]*class="hide-for-tablet"[^>]*>[\s\S]*?<\/a>/im, "")
|
|
175
|
-
rh.gsub!(/<span[^>]*class="hide-for-tablet"[^>]*>[\s\S]*?<\/span>/im, "")
|
|
176
|
-
|
|
177
|
-
wp_matches = rh.scan(/<span\s[^>]*class="word-phrase"[^>]*>([\s\S]*?)<\/span>/im)
|
|
178
|
-
lines << "=== word-phrase matches (#{wp_matches.length}) ==="
|
|
179
|
-
wp_matches.each_with_index do |(inner), i|
|
|
180
|
-
is_criteria = inner.match?(/<sup[^>]*class="[^"]*strongs criteria[^"]*"/i)
|
|
181
|
-
word = inner.sub(/<sup[\s\S]*/im, "").gsub(/<[^>]+>/, "").gsub(/ /i, " ").strip
|
|
182
|
-
lines << " [#{i}] criteria=#{is_criteria} word=#{word.inspect}"
|
|
183
|
-
end
|
|
184
|
-
|
|
185
|
-
# Now simulate the full processing pipeline
|
|
186
|
-
verse_html = rh.gsub(/<span\s[^>]*class="word-phrase"[^>]*>([\s\S]*?)<\/span>/im) do
|
|
187
|
-
inner = $1
|
|
188
|
-
word = inner.sub(/<sup[\s\S]*/im, "").gsub(/<[^>]+>/, "").gsub(/ /i, " ").strip
|
|
189
|
-
inner.match?(/<sup[^>]*class="[^"]*strongs criteria[^"]*"/i) ? "\x02#{word}\x03" : word
|
|
190
|
-
end
|
|
191
|
-
lines << "\n=== after word-phrase gsub (placeholder check) ==="
|
|
192
|
-
lines << " contains \\x02: #{verse_html.include?("\x02")}"
|
|
193
|
-
lines << " contains \\x03: #{verse_html.include?("\x03")}"
|
|
194
|
-
ph = verse_html[/\x02[^\x03]*\x03/]
|
|
195
|
-
lines << " placeholder found: #{ph.inspect}"
|
|
196
|
-
|
|
197
|
-
verse_html.gsub!(/<sup[^>]*>[\s\S]*?<\/sup>/im, "")
|
|
198
|
-
verse_html.gsub!(/<[^>]+>/, "")
|
|
199
|
-
verse_html.gsub!(/ /i, " ")
|
|
200
|
-
verse_html.gsub!(/&#(\d+);/) { [$1.to_i].pack("U") rescue " " }
|
|
201
|
-
verse_html.gsub!(/&#x([\da-f]+);/i) { [$1.to_i(16)].pack("U") rescue " " }
|
|
202
|
-
verse_html.gsub!(/&/, "&")
|
|
203
|
-
verse_html.gsub!(/</, "<")
|
|
204
|
-
verse_html.gsub!(/>/, ">")
|
|
205
|
-
verse_html.gsub!(/\s+/, " ")
|
|
206
|
-
verse_html.strip!
|
|
207
|
-
|
|
208
|
-
lines << "=== after tag-strip (placeholder check) ==="
|
|
209
|
-
lines << " contains \\x02: #{verse_html.include?("\x02")}"
|
|
210
|
-
ph2 = verse_html[/\x02[^\x03]*\x03/]
|
|
211
|
-
lines << " placeholder found: #{ph2.inspect}"
|
|
212
|
-
lines << " verse_html snippet: #{verse_html[0, 200].inspect}"
|
|
213
|
-
|
|
214
|
-
# Apply the final restore
|
|
215
|
-
restored = verse_html.gsub(/\x02([^\x03]*)\x03/) { "<span class=\"blb-match\">#{$1.strip}</span>" }
|
|
216
|
-
lines << "\n=== after placeholder restore ==="
|
|
217
|
-
lines << " restored snippet: #{restored[0, 300].inspect}"
|
|
218
|
-
|
|
219
|
-
# Now compare with actual blueletterbible_html output
|
|
220
|
-
full_output = blueletterbible_html(html, url)
|
|
221
|
-
conc_match = full_output[/blb-match[^<]*<\/span>/]
|
|
222
|
-
lines << "\n=== blueletterbible_html output (blb-match check) ==="
|
|
223
|
-
lines << " contains blb-match: #{full_output.include?("blb-match")}"
|
|
224
|
-
lines << " blb-match context: #{conc_match.inspect}"
|
|
225
|
-
# Show the concordance section
|
|
226
|
-
conc_start = full_output.index("blb-heading") ? full_output.rindex("<h4", full_output.index("Concordance") || 0) : nil
|
|
227
|
-
if conc_start
|
|
228
|
-
lines << " concordance html (first 500 chars): #{full_output[conc_start, 500].inspect}"
|
|
229
|
-
end
|
|
230
|
-
|
|
231
|
-
lines.join("\n")
|
|
232
|
-
end
|
|
233
|
-
|
|
234
577
|
get "/debug/fetch" do
|
|
235
578
|
halt 404, "not available" unless respond_to?(:blueletterbible_html)
|
|
236
579
|
url = params[:url].to_s.strip
|
|
@@ -400,23 +743,37 @@ module MarkdownServer
|
|
|
400
743
|
|
|
401
744
|
private
|
|
402
745
|
|
|
746
|
+
def read_row_hash(table, row_index)
|
|
747
|
+
return nil unless File.exist?(table.csv_path)
|
|
748
|
+
return nil if row_index < 0
|
|
749
|
+
|
|
750
|
+
idx = 0
|
|
751
|
+
CSV.foreach(table.csv_path, headers: true) do |row|
|
|
752
|
+
return row.to_h if idx == row_index
|
|
753
|
+
idx += 1
|
|
754
|
+
end
|
|
755
|
+
nil
|
|
756
|
+
end
|
|
757
|
+
|
|
403
758
|
def render_directory(real_path, relative_path)
|
|
404
759
|
entries = Dir.entries(real_path).reject do |e|
|
|
405
760
|
e.start_with?(".") || EXCLUDED.include?(e)
|
|
406
761
|
end
|
|
407
762
|
|
|
763
|
+
browse_prefix = relative_path.empty? ? "/browse/" : "/browse/#{relative_path}/"
|
|
764
|
+
|
|
408
765
|
items = entries.map do |name|
|
|
409
766
|
full = File.join(real_path, name)
|
|
410
767
|
stat = File.stat(full) rescue next
|
|
768
|
+
is_dir = stat.directory?
|
|
411
769
|
btime = stat.respond_to?(:birthtime) ? stat.birthtime : stat.mtime rescue stat.mtime
|
|
412
770
|
{
|
|
413
771
|
name: name,
|
|
414
|
-
is_dir:
|
|
415
|
-
size:
|
|
772
|
+
is_dir: is_dir,
|
|
773
|
+
size: is_dir ? nil : stat.size,
|
|
416
774
|
mtime: stat.mtime,
|
|
417
775
|
ctime: btime,
|
|
418
|
-
href:
|
|
419
|
-
encode_path_component(name) + (stat.directory? ? "/" : "")
|
|
776
|
+
href: browse_prefix + encode_path_component(name) + (is_dir ? "/" : "")
|
|
420
777
|
}
|
|
421
778
|
end.compact
|
|
422
779
|
|
|
@@ -436,8 +793,9 @@ module MarkdownServer
|
|
|
436
793
|
effective_order == "desc" ? sorted.reverse : sorted
|
|
437
794
|
end
|
|
438
795
|
|
|
439
|
-
dirs =
|
|
440
|
-
|
|
796
|
+
dirs, files = items.partition { |i| i[:is_dir] }
|
|
797
|
+
dirs = sort_items.call(dirs)
|
|
798
|
+
files = sort_items.call(files)
|
|
441
799
|
|
|
442
800
|
@items = dirs + files
|
|
443
801
|
@path = relative_path
|
|
@@ -446,6 +804,284 @@ module MarkdownServer
|
|
|
446
804
|
erb :directory
|
|
447
805
|
end
|
|
448
806
|
|
|
807
|
+
def parse_comparable_value(s)
|
|
808
|
+
stripped = s.gsub(/[$,]/, "")
|
|
809
|
+
if stripped.match?(/\A-?\d+\.?\d*\z/)
|
|
810
|
+
return { kind: :number, value: stripped.to_f }
|
|
811
|
+
end
|
|
812
|
+
if (m = s.match(%r{\A(\d{1,2})/(\d{1,2})/(\d{4})\z}))
|
|
813
|
+
d = Date.new(m[3].to_i, m[1].to_i, m[2].to_i)
|
|
814
|
+
return { kind: :date, value: d }
|
|
815
|
+
end
|
|
816
|
+
nil
|
|
817
|
+
rescue Date::Error
|
|
818
|
+
nil
|
|
819
|
+
end
|
|
820
|
+
|
|
821
|
+
def parse_filter_term(t)
|
|
822
|
+
if (m = t.match(/\A(>=?|<=?)\s*(.+)\z/))
|
|
823
|
+
val = parse_comparable_value(m[2])
|
|
824
|
+
return { type: :compare, op: m[1], value: val } if val
|
|
825
|
+
end
|
|
826
|
+
re = Regexp.new(t, Regexp::IGNORECASE)
|
|
827
|
+
{ type: :regex, re: re }
|
|
828
|
+
rescue RegexpError
|
|
829
|
+
{ type: :regex, re: Regexp.new(Regexp.escape(t), Regexp::IGNORECASE) }
|
|
830
|
+
end
|
|
831
|
+
|
|
832
|
+
def filter_term_matches?(term, cell_str)
|
|
833
|
+
if term[:type] == :compare
|
|
834
|
+
cell_val = parse_comparable_value(cell_str)
|
|
835
|
+
return false unless cell_val && cell_val[:kind] == term[:value][:kind]
|
|
836
|
+
case term[:op]
|
|
837
|
+
when ">" then cell_val[:value] > term[:value][:value]
|
|
838
|
+
when ">=" then cell_val[:value] >= term[:value][:value]
|
|
839
|
+
when "<" then cell_val[:value] < term[:value][:value]
|
|
840
|
+
when "<=" then cell_val[:value] <= term[:value][:value]
|
|
841
|
+
else false
|
|
842
|
+
end
|
|
843
|
+
else
|
|
844
|
+
term[:re].match?(cell_str)
|
|
845
|
+
end
|
|
846
|
+
end
|
|
847
|
+
|
|
848
|
+
def csv_databases_json
|
|
849
|
+
loader = settings.csv_browser_config
|
|
850
|
+
return "[]" unless loader
|
|
851
|
+
|
|
852
|
+
databases = loader.databases.map do |db|
|
|
853
|
+
yaml_dir = File.dirname(db.yaml_path)
|
|
854
|
+
db_entry = {
|
|
855
|
+
key: db.key,
|
|
856
|
+
title: db.title,
|
|
857
|
+
tables: db.tables.select { |t| File.exist?(t.csv_path) }.map do |t|
|
|
858
|
+
entry = {
|
|
859
|
+
key: t.key,
|
|
860
|
+
title: t.title,
|
|
861
|
+
views: t.views.map { |v| { key: v.key, title: v.title } }
|
|
862
|
+
}
|
|
863
|
+
entry[:color] = t.color if t.color
|
|
864
|
+
if db.group_by_directory
|
|
865
|
+
rel_dir = File.dirname(t.csv_path).delete_prefix("#{yaml_dir}/")
|
|
866
|
+
rel_dir = nil if rel_dir == yaml_dir || rel_dir == "."
|
|
867
|
+
entry[:group] = rel_dir
|
|
868
|
+
end
|
|
869
|
+
entry[:record_count] = [File.foreach(t.csv_path).count - 1, 0].max if db.show_record_counts
|
|
870
|
+
entry
|
|
871
|
+
end
|
|
872
|
+
}
|
|
873
|
+
db_entry[:group_by_directory] = true if db.group_by_directory
|
|
874
|
+
base = File.realpath(root_dir)
|
|
875
|
+
db_entry[:yaml_path] = db.yaml_path.delete_prefix("#{base}/")
|
|
876
|
+
db_entry
|
|
877
|
+
end
|
|
878
|
+
|
|
879
|
+
unmapped = loader.unmapped_csv_files
|
|
880
|
+
unless unmapped.empty?
|
|
881
|
+
databases << {
|
|
882
|
+
key: "_unmapped",
|
|
883
|
+
title: "Unmapped Files",
|
|
884
|
+
virtual: true,
|
|
885
|
+
tables: unmapped.map do |entry|
|
|
886
|
+
dir = File.dirname(entry[:relative])
|
|
887
|
+
{
|
|
888
|
+
key: entry[:relative],
|
|
889
|
+
title: File.basename(entry[:relative], ".csv"),
|
|
890
|
+
group: dir == "." ? nil : dir,
|
|
891
|
+
views: [{ key: "all", title: "All" }],
|
|
892
|
+
color: "#888"
|
|
893
|
+
}
|
|
894
|
+
end
|
|
895
|
+
}
|
|
896
|
+
end
|
|
897
|
+
|
|
898
|
+
databases.to_json
|
|
899
|
+
end
|
|
900
|
+
|
|
901
|
+
def browser_render_directory(real_path, requested)
|
|
902
|
+
entries = Dir.entries(real_path).reject do |e|
|
|
903
|
+
e.start_with?(".") || EXCLUDED.include?(e)
|
|
904
|
+
end
|
|
905
|
+
|
|
906
|
+
browse_prefix = requested.empty? ? "" : "#{requested}/"
|
|
907
|
+
|
|
908
|
+
items = entries.map do |name|
|
|
909
|
+
full = File.join(real_path, name)
|
|
910
|
+
stat = File.stat(full) rescue next
|
|
911
|
+
is_dir = stat.directory?
|
|
912
|
+
btime = stat.respond_to?(:birthtime) ? stat.birthtime : stat.mtime rescue stat.mtime
|
|
913
|
+
item = {
|
|
914
|
+
name: name,
|
|
915
|
+
is_dir: is_dir,
|
|
916
|
+
path: browse_prefix + name,
|
|
917
|
+
icon: icon_for(name, is_dir),
|
|
918
|
+
mtime: format_date(stat.mtime),
|
|
919
|
+
mtime_ts: stat.mtime.to_f,
|
|
920
|
+
ctime: format_date(btime),
|
|
921
|
+
ctime_ts: btime.to_f
|
|
922
|
+
}
|
|
923
|
+
unless is_dir
|
|
924
|
+
item[:size] = format_size(stat.size)
|
|
925
|
+
item[:size_bytes] = stat.size
|
|
926
|
+
end
|
|
927
|
+
item
|
|
928
|
+
end.compact
|
|
929
|
+
|
|
930
|
+
title = requested.empty? ? dir_title : File.basename(requested)
|
|
931
|
+
{ type: "directory", path: requested, title: title, items: items }
|
|
932
|
+
end
|
|
933
|
+
|
|
934
|
+
def browser_render_file(real_path, requested, raw: false)
|
|
935
|
+
ext = File.extname(real_path).downcase
|
|
936
|
+
title = File.basename(real_path)
|
|
937
|
+
download_href = "/download/" + requested.split("/").map { |p| encode_path_component(p) }.join("/")
|
|
938
|
+
|
|
939
|
+
# Check plugins first
|
|
940
|
+
settings.plugins.each do |p|
|
|
941
|
+
result = p.browser_render(requested, real_path, self)
|
|
942
|
+
return result if result
|
|
943
|
+
end
|
|
944
|
+
|
|
945
|
+
# Check CSV browser (skip when raw rendering requested)
|
|
946
|
+
loader = settings.csv_browser_config
|
|
947
|
+
if loader && !raw
|
|
948
|
+
if ext == ".yaml" || ext == ".yml"
|
|
949
|
+
db = loader.find_database_by_yaml_path(real_path)
|
|
950
|
+
if db
|
|
951
|
+
return {
|
|
952
|
+
type: "csv_database",
|
|
953
|
+
title: db.title,
|
|
954
|
+
db_key: db.key,
|
|
955
|
+
tables: db.tables.map do |t|
|
|
956
|
+
entry = {
|
|
957
|
+
key: t.key,
|
|
958
|
+
title: t.title,
|
|
959
|
+
views: t.views.map { |v| { key: v.key, title: v.title } }
|
|
960
|
+
}
|
|
961
|
+
entry[:color] = t.color if t.color
|
|
962
|
+
entry
|
|
963
|
+
end
|
|
964
|
+
}
|
|
965
|
+
end
|
|
966
|
+
end
|
|
967
|
+
|
|
968
|
+
result = loader.find_table_by_csv_path(real_path)
|
|
969
|
+
if result
|
|
970
|
+
db, table = result
|
|
971
|
+
view_key = table.views.first&.key || "all"
|
|
972
|
+
reader = CsvBrowser::TableReader.new(table)
|
|
973
|
+
data = reader.read(view_key)
|
|
974
|
+
lookups = loader.resolve_references(db, table)
|
|
975
|
+
data[:columns].each do |col|
|
|
976
|
+
next unless col[:references]
|
|
977
|
+
ref_table = db.tables.find { |t| t.key == col[:references][:table] }
|
|
978
|
+
col[:references] = col[:references].merge(color: ref_table.color) if ref_table&.color
|
|
979
|
+
end
|
|
980
|
+
reverse_refs = loader.resolve_reverse_references(db, table)
|
|
981
|
+
resp = {
|
|
982
|
+
type: "csv_table",
|
|
983
|
+
title: "#{table.title} \u2014 #{table.views.first&.title || "All"}",
|
|
984
|
+
db_key: db.key,
|
|
985
|
+
table_key: table.key,
|
|
986
|
+
view_key: view_key,
|
|
987
|
+
views: table.views.map { |v| { key: v.key, title: v.title } },
|
|
988
|
+
columns: data[:columns],
|
|
989
|
+
rows: data[:rows]
|
|
990
|
+
}
|
|
991
|
+
resp[:color] = table.color if table.color
|
|
992
|
+
resp[:references] = lookups unless lookups.empty?
|
|
993
|
+
resp[:reverse_references] = reverse_refs unless reverse_refs.empty?
|
|
994
|
+
return resp
|
|
995
|
+
end
|
|
996
|
+
end
|
|
997
|
+
|
|
998
|
+
# Standalone CSV files (not in any database) — render as table
|
|
999
|
+
if ext == ".csv" && !raw
|
|
1000
|
+
return render_standalone_csv(real_path, title, requested)
|
|
1001
|
+
end
|
|
1002
|
+
|
|
1003
|
+
# Default file rendering
|
|
1004
|
+
case ext
|
|
1005
|
+
when ".md"
|
|
1006
|
+
content = File.read(real_path, encoding: "utf-8")
|
|
1007
|
+
meta, body = parse_frontmatter(content)
|
|
1008
|
+
@current_wiki_dir = File.dirname(real_path)
|
|
1009
|
+
html = render_markdown(body)
|
|
1010
|
+
settings.plugins.each { |p| html = p.post_render(html, meta, self) }
|
|
1011
|
+
|
|
1012
|
+
frontmatter_html = ""
|
|
1013
|
+
if meta && !meta.empty?
|
|
1014
|
+
rows = meta.map { |key, value|
|
|
1015
|
+
"<tr><th>#{h(key)}</th><td>#{render_frontmatter_value(value)}</td></tr>"
|
|
1016
|
+
}.join
|
|
1017
|
+
frontmatter_html = %(<div class="frontmatter"><div class="frontmatter-heading">Frontmatter</div><table class="meta-table">#{rows}</table></div>)
|
|
1018
|
+
end
|
|
1019
|
+
|
|
1020
|
+
{ type: "markdown", title: title, html: html, frontmatter_html: frontmatter_html }
|
|
1021
|
+
|
|
1022
|
+
when ".json"
|
|
1023
|
+
raw = File.read(real_path, encoding: "utf-8")
|
|
1024
|
+
begin
|
|
1025
|
+
data = JSON.parse(raw)
|
|
1026
|
+
yaml_str = YAML.dump(data)
|
|
1027
|
+
{ type: "code", title: title, language: "yaml", html: syntax_highlight(yaml_str, "yaml") }
|
|
1028
|
+
rescue JSON::ParserError
|
|
1029
|
+
{ type: "code", title: title, language: "json", html: syntax_highlight(raw, "json") }
|
|
1030
|
+
end
|
|
1031
|
+
|
|
1032
|
+
when ".pdf"
|
|
1033
|
+
size = File.size(real_path) rescue 0
|
|
1034
|
+
{ type: "download", title: title, href: download_href, size: format_size(size) }
|
|
1035
|
+
|
|
1036
|
+
when ".epub"
|
|
1037
|
+
size = File.size(real_path) rescue 0
|
|
1038
|
+
{ type: "download", title: title, href: download_href, size: format_size(size) }
|
|
1039
|
+
|
|
1040
|
+
when ".html"
|
|
1041
|
+
{ type: "external", title: title, href: "/browse/" + requested.split("/").map { |p| encode_path_component(p) }.join("/") }
|
|
1042
|
+
|
|
1043
|
+
else
|
|
1044
|
+
content = File.read(real_path, encoding: "utf-8") rescue nil
|
|
1045
|
+
if content.nil? || content.encoding == Encoding::BINARY || !content.valid_encoding?
|
|
1046
|
+
size = File.size(real_path) rescue 0
|
|
1047
|
+
{ type: "download", title: title, href: download_href, size: format_size(size) }
|
|
1048
|
+
else
|
|
1049
|
+
lang = case ext
|
|
1050
|
+
when ".py" then "python"
|
|
1051
|
+
when ".rb" then "ruby"
|
|
1052
|
+
when ".csv" then "text"
|
|
1053
|
+
when ".sh" then "bash"
|
|
1054
|
+
when ".yaml", ".yml" then "yaml"
|
|
1055
|
+
when ".erb" then "html"
|
|
1056
|
+
when ".css" then "css"
|
|
1057
|
+
when ".js" then "javascript"
|
|
1058
|
+
else "text"
|
|
1059
|
+
end
|
|
1060
|
+
{ type: "code", title: title, language: lang, html: syntax_highlight(content, lang) }
|
|
1061
|
+
end
|
|
1062
|
+
end
|
|
1063
|
+
end
|
|
1064
|
+
|
|
1065
|
+
def render_standalone_csv(real_path, title, requested)
|
|
1066
|
+
rows = CSV.read(real_path, headers: true)
|
|
1067
|
+
headers = rows.headers.compact
|
|
1068
|
+
columns = headers.map { |h| { key: h, title: h, type: "string" } }
|
|
1069
|
+
data_rows = rows.each_with_index.map do |row, idx|
|
|
1070
|
+
[idx] + headers.map { |h| row[h] }
|
|
1071
|
+
end
|
|
1072
|
+
|
|
1073
|
+
{
|
|
1074
|
+
type: "csv_table",
|
|
1075
|
+
title: title,
|
|
1076
|
+
standalone_csv: requested,
|
|
1077
|
+
view_key: "all",
|
|
1078
|
+
views: [{ key: "all", title: "All" }],
|
|
1079
|
+
columns: columns,
|
|
1080
|
+
rows: data_rows,
|
|
1081
|
+
readonly: true
|
|
1082
|
+
}
|
|
1083
|
+
end
|
|
1084
|
+
|
|
449
1085
|
def render_file(real_path, relative_path)
|
|
450
1086
|
ext = File.extname(real_path).downcase
|
|
451
1087
|
@crumbs = breadcrumbs(relative_path)
|
|
@@ -512,7 +1148,10 @@ module MarkdownServer
|
|
|
512
1148
|
when ".csv" then "text"
|
|
513
1149
|
when ".sh" then "bash"
|
|
514
1150
|
when ".yaml", ".yml" then "yaml"
|
|
515
|
-
when ".
|
|
1151
|
+
when ".erb" then "html"
|
|
1152
|
+
# .html and .js are handled by dedicated branches above;
|
|
1153
|
+
# kept here for completeness if those branches are ever removed
|
|
1154
|
+
when ".html" then "html"
|
|
516
1155
|
when ".js" then "javascript"
|
|
517
1156
|
else "text"
|
|
518
1157
|
end
|