markdownr 0.7.2 → 0.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/bin/Dockerfile.markdownr +1 -1
- data/bin/markdownr +79 -0
- data/bin/markdownr-servers.yaml +39 -0
- data/bin/start-claude +2 -0
- data/lib/markdown_server/app.rb +953 -107
- data/lib/markdown_server/assets/editor-loader.js +362 -0
- data/lib/markdown_server/csv_browser/addon_registry.rb +137 -0
- data/lib/markdown_server/csv_browser/config_loader.rb +231 -0
- data/lib/markdown_server/csv_browser/row_context.rb +146 -0
- data/lib/markdown_server/csv_browser/table_reader.rb +259 -0
- data/lib/markdown_server/helpers/admin_helpers.rb +25 -1
- data/lib/markdown_server/helpers/formatting_helpers.rb +3 -1
- data/lib/markdown_server/helpers/markdown_helpers.rb +132 -5
- data/lib/markdown_server/helpers/path_helpers.rb +56 -7
- data/lib/markdown_server/helpers/search_helpers.rb +31 -3
- data/lib/markdown_server/permitted_bases.rb +13 -0
- data/lib/markdown_server/plugin.rb +11 -0
- data/lib/markdown_server/plugins/bible_citations/citations.rb +4 -4
- data/lib/markdown_server/unhide.rb +114 -0
- data/lib/markdown_server/version.rb +1 -1
- data/views/browser.erb +5794 -0
- data/views/layout.erb +124 -20
- data/views/popup_assets.erb +52 -26
- metadata +40 -2
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "csv"
|
|
4
|
+
require "yaml"
|
|
5
|
+
require "json_schemer"
|
|
6
|
+
|
|
7
|
+
module MarkdownServer
|
|
8
|
+
module CsvBrowser
|
|
9
|
+
# Loads database definitions from YAML files.
|
|
10
|
+
# Each YAML file defines one database with its tables, schemas, and views.
|
|
11
|
+
# CSV paths are resolved relative to the YAML file's directory.
|
|
12
|
+
class ConfigLoader
|
|
13
|
+
Database = Struct.new(:key, :title, :tables, :yaml_path, :group_by_directory, :show_record_counts, keyword_init: true)
|
|
14
|
+
Table = Struct.new(:key, :title, :csv_path, :columns, :required, :schema, :views, :color, :addons, keyword_init: true)
|
|
15
|
+
Column = Struct.new(:key, :title, :type, :constraints, :references, keyword_init: true)
|
|
16
|
+
View = Struct.new(:key, :title, :columns, keyword_init: true)
|
|
17
|
+
|
|
18
|
+
def initialize(yaml_paths, root_dir)
|
|
19
|
+
@yaml_paths = yaml_paths
|
|
20
|
+
@root_dir = root_dir
|
|
21
|
+
@databases = yaml_paths.filter_map { |path| load_database(path) }
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
attr_reader :databases
|
|
25
|
+
|
|
26
|
+
def reload!
|
|
27
|
+
config_path = File.join(@root_dir, ".markdownr.yml")
|
|
28
|
+
if File.exist?(config_path)
|
|
29
|
+
yaml = YAML.safe_load(File.read(config_path), permitted_classes: [Symbol], aliases: true) rescue nil
|
|
30
|
+
paths = yaml&.dig("csv_databases") || yaml&.dig("plugins", "csv_browser", "databases")
|
|
31
|
+
@yaml_paths = paths if paths.is_a?(Array) && !paths.empty?
|
|
32
|
+
end
|
|
33
|
+
@databases = @yaml_paths.filter_map { |path| load_database(path) }
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
def database(key)
|
|
37
|
+
@databases.find { |db| db.key == key }
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
# Returns Database if real_path matches a configured YAML database file
|
|
41
|
+
def find_database_by_yaml_path(real_path)
|
|
42
|
+
@databases.find { |db| db.yaml_path == real_path }
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
# Builds FK lookup maps for a table's columns that have references.
|
|
46
|
+
# Returns { "column_key" => { id_value => display_value, ... }, ... }
|
|
47
|
+
def resolve_references(database, table)
|
|
48
|
+
lookups = {}
|
|
49
|
+
table.columns.each do |col|
|
|
50
|
+
next unless col.references
|
|
51
|
+
ref_table = database.tables.find { |t| t.key == col.references[:table] }
|
|
52
|
+
next unless ref_table
|
|
53
|
+
next unless File.exist?(ref_table.csv_path)
|
|
54
|
+
|
|
55
|
+
map = {}
|
|
56
|
+
CSV.foreach(ref_table.csv_path, headers: true) do |row|
|
|
57
|
+
key_val = row[col.references[:column]]
|
|
58
|
+
display_val = row[col.references[:display]]
|
|
59
|
+
map[key_val] = display_val if key_val && display_val
|
|
60
|
+
end
|
|
61
|
+
lookups[col.key] = map
|
|
62
|
+
end
|
|
63
|
+
lookups
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
# Finds other tables in the database that have FK columns pointing at this table.
|
|
67
|
+
# Returns [{ table: "enrollments", label: "Enrollments", column: "class_id", references_column: "id" }, ...]
|
|
68
|
+
def resolve_reverse_references(database, table)
|
|
69
|
+
reverse = []
|
|
70
|
+
database.tables.each do |other_table|
|
|
71
|
+
next if other_table.key == table.key
|
|
72
|
+
other_table.columns.each do |col|
|
|
73
|
+
next unless col.references && col.references[:table] == table.key
|
|
74
|
+
entry = {
|
|
75
|
+
table: other_table.key,
|
|
76
|
+
title: other_table.title,
|
|
77
|
+
column: col.key,
|
|
78
|
+
references_column: col.references[:column]
|
|
79
|
+
}
|
|
80
|
+
entry[:color] = other_table.color if other_table.color
|
|
81
|
+
reverse << entry
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
reverse
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
# Returns [database, table] if real_path matches a table's CSV file, or nil
|
|
88
|
+
def find_table_by_csv_path(real_path)
|
|
89
|
+
@databases.each do |db|
|
|
90
|
+
db.tables.each do |table|
|
|
91
|
+
return [db, table] if table.csv_path == real_path
|
|
92
|
+
end
|
|
93
|
+
end
|
|
94
|
+
nil
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
# Returns CSV file paths under root_dir that aren't claimed by any database table.
|
|
98
|
+
# Each entry is { path: "/abs/path.csv", relative: "sub/dir/file.csv" }.
|
|
99
|
+
def unmapped_csv_files
|
|
100
|
+
referenced = Set.new
|
|
101
|
+
@databases.each do |db|
|
|
102
|
+
db.tables.each { |t| referenced << t.csv_path }
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
root = File.realpath(@root_dir)
|
|
106
|
+
Dir.glob(File.join(root, "**", "*.csv")).filter_map do |path|
|
|
107
|
+
real = File.realpath(path)
|
|
108
|
+
next if referenced.include?(real)
|
|
109
|
+
relative = real.delete_prefix("#{root}/")
|
|
110
|
+
{ path: real, relative: relative }
|
|
111
|
+
end.sort_by { |e| e[:relative] }
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
private
|
|
115
|
+
|
|
116
|
+
def load_database(relative_path)
|
|
117
|
+
path = File.expand_path(relative_path, @root_dir)
|
|
118
|
+
$stdout.write "[csv-browser] Loading #{relative_path}..."
|
|
119
|
+
$stdout.flush
|
|
120
|
+
|
|
121
|
+
unless File.exist?(path)
|
|
122
|
+
puts "\n\n \e[33mnot found: #{path}\e[0m\n\n"
|
|
123
|
+
return nil
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
yaml_dir = File.dirname(path)
|
|
127
|
+
config = YAML.safe_load(File.read(path), permitted_classes: [Symbol], aliases: true)
|
|
128
|
+
db_key = File.basename(path, ".*")
|
|
129
|
+
|
|
130
|
+
tables = (config["tables"] || {}).filter_map do |table_key, table_def|
|
|
131
|
+
build_table(table_key, table_def, yaml_dir)
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
group = config.fetch("group_by_directory", true)
|
|
135
|
+
counts = config.fetch("show_record_counts", true)
|
|
136
|
+
db = Database.new(key: db_key, title: config["title"] || config["label"] || db_key.capitalize,
|
|
137
|
+
tables: tables, yaml_path: File.realpath(path),
|
|
138
|
+
group_by_directory: group, show_record_counts: counts)
|
|
139
|
+
puts " ok (#{tables.length} tables)"
|
|
140
|
+
db
|
|
141
|
+
rescue => e
|
|
142
|
+
puts "\n\n \e[31merror: #{e.message}\e[0m\n\n"
|
|
143
|
+
nil
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
def build_table(key, definition, yaml_dir)
|
|
147
|
+
properties = definition["properties"] || {}
|
|
148
|
+
required = definition["required"] || []
|
|
149
|
+
|
|
150
|
+
columns = properties.map do |col_key, col_def|
|
|
151
|
+
ref = col_def["references"]
|
|
152
|
+
Column.new(
|
|
153
|
+
key: col_key,
|
|
154
|
+
title: col_def["title"] || col_key.capitalize,
|
|
155
|
+
type: col_def["type"] || "string",
|
|
156
|
+
constraints: col_def.except("type", "title", "references"),
|
|
157
|
+
references: ref ? { table: ref["table"], column: ref["column"], display: ref["display"] } : nil
|
|
158
|
+
)
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
views = (definition["views"] || { "all" => { "title" => "All" } }).map do |view_key, view_def|
|
|
162
|
+
View.new(
|
|
163
|
+
key: view_key,
|
|
164
|
+
title: view_def["title"] || view_def["label"] || view_key.capitalize,
|
|
165
|
+
columns: view_def["columns"]
|
|
166
|
+
)
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
schema = build_json_schema(key, properties, required, definition)
|
|
170
|
+
|
|
171
|
+
default_title = key.tr("_", " ").split.map(&:capitalize).join(" ")
|
|
172
|
+
csv_path = File.expand_path(definition["csv"], yaml_dir)
|
|
173
|
+
|
|
174
|
+
unless File.exist?(csv_path)
|
|
175
|
+
warn "[csv-browser] Skipping table '#{key}': CSV not found at #{csv_path}"
|
|
176
|
+
return nil
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
Table.new(
|
|
180
|
+
key: key,
|
|
181
|
+
title: definition["title"] || default_title,
|
|
182
|
+
csv_path: File.realpath(csv_path),
|
|
183
|
+
columns: columns,
|
|
184
|
+
required: required,
|
|
185
|
+
schema: schema,
|
|
186
|
+
views: views,
|
|
187
|
+
color: definition["color"],
|
|
188
|
+
addons: definition["addons"] || []
|
|
189
|
+
)
|
|
190
|
+
end
|
|
191
|
+
|
|
192
|
+
def build_json_schema(table_key, properties, required, definition = {})
|
|
193
|
+
json_props = properties.transform_values do |col_def|
|
|
194
|
+
prop = {}
|
|
195
|
+
case col_def["type"]
|
|
196
|
+
when "integer"
|
|
197
|
+
prop["type"] = "integer"
|
|
198
|
+
when "number"
|
|
199
|
+
prop["type"] = "number"
|
|
200
|
+
else
|
|
201
|
+
prop["type"] = "string"
|
|
202
|
+
end
|
|
203
|
+
prop["title"] = col_def["title"] if col_def["title"]
|
|
204
|
+
prop["enum"] = col_def["enum"] if col_def["enum"]
|
|
205
|
+
prop["pattern"] = col_def["pattern"] if col_def["pattern"]
|
|
206
|
+
prop["format"] = col_def["format"] if col_def["format"]
|
|
207
|
+
prop["minimum"] = col_def["minimum"] if col_def["minimum"]
|
|
208
|
+
prop["maximum"] = col_def["maximum"] if col_def["maximum"]
|
|
209
|
+
prop["minLength"] = col_def["minLength"] if col_def["minLength"]
|
|
210
|
+
prop["maxLength"] = col_def["maxLength"] if col_def["maxLength"]
|
|
211
|
+
prop
|
|
212
|
+
end
|
|
213
|
+
|
|
214
|
+
schema = {
|
|
215
|
+
"$schema" => "https://json-schema.org/draft/2020-12/schema",
|
|
216
|
+
"title" => table_key,
|
|
217
|
+
"type" => "object",
|
|
218
|
+
"properties" => json_props,
|
|
219
|
+
"required" => required
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
schema["if"] = definition["if"] if definition["if"]
|
|
223
|
+
schema["then"] = definition["then"] if definition["then"]
|
|
224
|
+
schema["else"] = definition["else"] if definition["else"]
|
|
225
|
+
schema["allOf"] = definition["allOf"] if definition["allOf"]
|
|
226
|
+
|
|
227
|
+
JSONSchemer.schema(schema)
|
|
228
|
+
end
|
|
229
|
+
end
|
|
230
|
+
end
|
|
231
|
+
end
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "csv"
|
|
4
|
+
|
|
5
|
+
module MarkdownServer
|
|
6
|
+
module CsvBrowser
|
|
7
|
+
# Context object passed to add-on action blocks. Exposes the active
|
|
8
|
+
# row/table/options/input/state and provides table read/write primitives
|
|
9
|
+
# that all flow through TableReader (so validation and CSV writes behave
|
|
10
|
+
# exactly like the row-editor path).
|
|
11
|
+
#
|
|
12
|
+
# A RowContext represents one HTTP invocation. For a multi-round prompt,
|
|
13
|
+
# a fresh RowContext is created per POST; state is what carries data
|
|
14
|
+
# across rounds (round-tripping through the client).
|
|
15
|
+
class RowContext
|
|
16
|
+
attr_reader :database, :table, :row_index, :row, :options, :input, :state
|
|
17
|
+
|
|
18
|
+
def initialize(database:, table:, row_index:, row:, options: {}, input: nil, state: nil)
|
|
19
|
+
@database = database
|
|
20
|
+
@table = table
|
|
21
|
+
@row_index = row_index
|
|
22
|
+
@row = row || {}
|
|
23
|
+
@options = options || {}
|
|
24
|
+
@input = symbolize_keys(input)
|
|
25
|
+
@state = StateHash.wrap(state)
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# List rows from any table in this database.
|
|
29
|
+
# Returns an array of plain hashes keyed by column name (strings).
|
|
30
|
+
def read_table(table_name, where: {})
|
|
31
|
+
t = resolve_table(table_name)
|
|
32
|
+
return [] unless t && File.exist?(t.csv_path)
|
|
33
|
+
|
|
34
|
+
rows = []
|
|
35
|
+
CSV.foreach(t.csv_path, headers: true) do |row|
|
|
36
|
+
h = row.to_h
|
|
37
|
+
next unless where.empty? || where.all? { |k, v| h[k.to_s].to_s == v.to_s }
|
|
38
|
+
|
|
39
|
+
rows << h
|
|
40
|
+
end
|
|
41
|
+
rows
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
def insert_row(table_name, values, at: nil)
|
|
45
|
+
reader = TableReader.new(resolve_table!(table_name))
|
|
46
|
+
reader.insert_row(stringify_values(values), at: at)
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def update_row(table_name, row_index, changes)
|
|
50
|
+
reader = TableReader.new(resolve_table!(table_name))
|
|
51
|
+
reader.update_row(row_index, stringify_values(changes))
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
def delete_row(table_name, row_index)
|
|
55
|
+
reader = TableReader.new(resolve_table!(table_name))
|
|
56
|
+
reader.delete_row(row_index)
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
# Return-value helpers — each produces the shape the HTTP layer will
|
|
60
|
+
# serialize back to the client.
|
|
61
|
+
def prompt(title:, fields:, state: {})
|
|
62
|
+
{ kind: "prompt", title: title, fields: fields, state: stringify_state(state) }
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
def done(reload: true)
|
|
66
|
+
{ kind: "done", reload: reload }
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
def error(message, fields: nil)
|
|
70
|
+
h = { kind: "error", message: message }
|
|
71
|
+
h[:fields] = fields if fields
|
|
72
|
+
h
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
private
|
|
76
|
+
|
|
77
|
+
def resolve_table(table_name)
|
|
78
|
+
return table_name if table_name.is_a?(ConfigLoader::Table)
|
|
79
|
+
|
|
80
|
+
key = table_name.to_s
|
|
81
|
+
@database.tables.find { |t| t.key == key }
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
def resolve_table!(table_name)
|
|
85
|
+
t = resolve_table(table_name)
|
|
86
|
+
raise ArgumentError, "unknown table: #{table_name.inspect}" unless t
|
|
87
|
+
|
|
88
|
+
t
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
def stringify_values(values)
|
|
92
|
+
values.each_with_object({}) { |(k, v), h| h[k.to_s] = v.nil? ? nil : v.to_s }
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
def stringify_state(state)
|
|
96
|
+
return {} if state.nil?
|
|
97
|
+
|
|
98
|
+
state.each_with_object({}) { |(k, v), h| h[k.to_s] = v }
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
def symbolize_keys(h)
|
|
102
|
+
return nil if h.nil?
|
|
103
|
+
return h unless h.is_a?(Hash)
|
|
104
|
+
|
|
105
|
+
h.each_with_object({}) { |(k, v), out| out[k.to_sym] = v }
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
# Hash wrapper that allows symbol OR string key access, so handlers can
|
|
109
|
+
# write `ctx.state[:step]` regardless of JSON round-trip. Values are
|
|
110
|
+
# plain JSON-serializable objects (the round-trip strips Ruby symbols —
|
|
111
|
+
# use strings for step identifiers).
|
|
112
|
+
class StateHash
|
|
113
|
+
def self.wrap(h)
|
|
114
|
+
return new({}) if h.nil?
|
|
115
|
+
return h if h.is_a?(StateHash)
|
|
116
|
+
|
|
117
|
+
new(h)
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
def initialize(h)
|
|
121
|
+
@h = h.is_a?(Hash) ? h : {}
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
def [](key)
|
|
125
|
+
@h[key] || @h[key.to_s] || @h[key.to_sym]
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
def []=(key, value)
|
|
129
|
+
@h[key.to_s] = value
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
def key?(key)
|
|
133
|
+
@h.key?(key) || @h.key?(key.to_s) || @h.key?(key.to_sym)
|
|
134
|
+
end
|
|
135
|
+
|
|
136
|
+
def to_h
|
|
137
|
+
@h.dup
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
def empty?
|
|
141
|
+
@h.empty?
|
|
142
|
+
end
|
|
143
|
+
end
|
|
144
|
+
end
|
|
145
|
+
end
|
|
146
|
+
end
|
|
@@ -0,0 +1,259 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "csv"
|
|
4
|
+
|
|
5
|
+
module MarkdownServer
|
|
6
|
+
module CsvBrowser
|
|
7
|
+
# Reads CSV files and applies view filtering (column subsets).
|
|
8
|
+
# Coerces values to match schema types (integer, number, string).
|
|
9
|
+
class TableReader
|
|
10
|
+
def initialize(table)
|
|
11
|
+
@table = table
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
# Returns { columns: [{key, title, type}], rows: [[idx, val, ...]] }
|
|
15
|
+
# for the given view. Each row's first element is its original file index.
|
|
16
|
+
def read(view_key = nil)
|
|
17
|
+
view = find_view(view_key)
|
|
18
|
+
all_data = read_csv
|
|
19
|
+
apply_view(all_data, view)
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
# Validates changed cells against the table schema.
|
|
23
|
+
# changes is a hash of { col_key => new_string_value }.
|
|
24
|
+
# Returns { valid: true } or { valid: false, errors: [...] }.
|
|
25
|
+
def validate_cells(row_index, changes)
|
|
26
|
+
rows = read_csv_raw
|
|
27
|
+
if row_index < 0 || row_index >= rows.length
|
|
28
|
+
return { valid: false, errors: [{ "message" => "Row not found", "fields" => [] }] }
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
row = rows[row_index]
|
|
32
|
+
merged = row.to_h.merge(changes)
|
|
33
|
+
|
|
34
|
+
# Coerce to schema types for validation (skip blank optional fields
|
|
35
|
+
# so json_schemer doesn't reject nil as non-string)
|
|
36
|
+
coerced = {}
|
|
37
|
+
@table.columns.each do |col|
|
|
38
|
+
val = coerce_for_validation(merged[col.key], col.type)
|
|
39
|
+
coerced[col.key] = val unless val.nil?
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
errors = @table.schema.validate(coerced).map { |e| format_error(e) }
|
|
43
|
+
|
|
44
|
+
errors.empty? ? { valid: true } : { valid: false, errors: errors }
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
# Validates all rows against the table schema.
|
|
48
|
+
# Returns { row_index => [error_strings, ...], ... } for failing rows only.
|
|
49
|
+
def validate_all
|
|
50
|
+
rows = read_csv_raw
|
|
51
|
+
errors_by_row = {}
|
|
52
|
+
|
|
53
|
+
rows.each_with_index do |row, idx|
|
|
54
|
+
coerced = {}
|
|
55
|
+
@table.columns.each do |col|
|
|
56
|
+
val = coerce_for_validation(row[col.key], col.type)
|
|
57
|
+
coerced[col.key] = val unless val.nil?
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
errors = @table.schema.validate(coerced).map { |e| format_error(e) }
|
|
61
|
+
|
|
62
|
+
errors_by_row[idx] = errors unless errors.empty?
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
errors_by_row
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
# Deletes a row from the CSV file.
|
|
69
|
+
# Returns { deleted: true } or { deleted: false, error: "..." }.
|
|
70
|
+
def delete_row(row_index)
|
|
71
|
+
rows = read_csv_raw
|
|
72
|
+
return { deleted: false, error: "Row not found" } if row_index < 0 || row_index >= rows.length
|
|
73
|
+
|
|
74
|
+
rows.delete_at(row_index)
|
|
75
|
+
write_csv(rows)
|
|
76
|
+
{ deleted: true }
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
# Inserts a new row. `values` is { col_key => string_value }.
|
|
80
|
+
# When `at` is nil the row is appended; otherwise it's inserted at that index.
|
|
81
|
+
# Returns { valid: true, new_index: i } or { valid: false, errors: [...] }.
|
|
82
|
+
def insert_row(values, at: nil)
|
|
83
|
+
string_values = values.each_with_object({}) { |(k, v), h| h[k.to_s] = v.nil? ? nil : v.to_s }
|
|
84
|
+
|
|
85
|
+
coerced = {}
|
|
86
|
+
@table.columns.each do |col|
|
|
87
|
+
val = coerce_for_validation(string_values[col.key], col.type)
|
|
88
|
+
coerced[col.key] = val unless val.nil?
|
|
89
|
+
end
|
|
90
|
+
errors = @table.schema.validate(coerced).map { |e| format_error(e) }
|
|
91
|
+
return { valid: false, errors: errors } unless errors.empty?
|
|
92
|
+
|
|
93
|
+
rows = read_csv_raw
|
|
94
|
+
headers = @table.columns.map(&:key)
|
|
95
|
+
new_row = CSV::Row.new(headers, headers.map { |h| string_values[h] })
|
|
96
|
+
|
|
97
|
+
if at.nil? || at >= rows.length
|
|
98
|
+
rows << new_row
|
|
99
|
+
new_index = rows.length - 1
|
|
100
|
+
else
|
|
101
|
+
at = 0 if at < 0
|
|
102
|
+
rows.insert(at, new_row)
|
|
103
|
+
new_index = at
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
write_csv(rows)
|
|
107
|
+
{ valid: true, new_index: new_index }
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
# Duplicates a row by inserting an identical copy immediately after it.
|
|
111
|
+
# Returns { duplicated: true, new_index: row_index + 1 } or
|
|
112
|
+
# { duplicated: false, error: "..." }.
|
|
113
|
+
def duplicate_row(row_index)
|
|
114
|
+
rows = read_csv_raw
|
|
115
|
+
return { duplicated: false, error: "Row not found" } if row_index < 0 || row_index >= rows.length
|
|
116
|
+
|
|
117
|
+
source = rows[row_index]
|
|
118
|
+
copy = CSV::Row.new(source.headers, source.fields)
|
|
119
|
+
rows.insert(row_index + 1, copy)
|
|
120
|
+
write_csv(rows)
|
|
121
|
+
{ duplicated: true, new_index: row_index + 1 }
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
# Updates a row in the CSV file. changes is { col_key => new_string_value }.
|
|
125
|
+
# Returns { valid: true } or { valid: false, errors: [...] }.
|
|
126
|
+
def update_row(row_index, changes)
|
|
127
|
+
result = validate_cells(row_index, changes)
|
|
128
|
+
return result unless result[:valid]
|
|
129
|
+
|
|
130
|
+
rows = read_csv_raw
|
|
131
|
+
row = rows[row_index]
|
|
132
|
+
changes.each { |k, v| row[k] = v }
|
|
133
|
+
|
|
134
|
+
write_csv(rows)
|
|
135
|
+
{ valid: true }
|
|
136
|
+
end
|
|
137
|
+
|
|
138
|
+
private
|
|
139
|
+
|
|
140
|
+
def find_view(view_key)
|
|
141
|
+
if view_key
|
|
142
|
+
@table.views.find { |v| v.key == view_key } || @table.views.first
|
|
143
|
+
else
|
|
144
|
+
@table.views.first
|
|
145
|
+
end
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
# Returns array of arrays: [[coerced_val, ...], ...] in column order
|
|
149
|
+
def read_csv
|
|
150
|
+
return [] unless File.exist?(@table.csv_path)
|
|
151
|
+
|
|
152
|
+
rows = []
|
|
153
|
+
CSV.foreach(@table.csv_path, headers: true) do |row|
|
|
154
|
+
rows << @table.columns.map do |col|
|
|
155
|
+
coerce(row[col.key], col.type)
|
|
156
|
+
end
|
|
157
|
+
end
|
|
158
|
+
rows
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
# Returns array of CSV::Row objects (preserves original string values)
|
|
162
|
+
def read_csv_raw
|
|
163
|
+
return [] unless File.exist?(@table.csv_path)
|
|
164
|
+
|
|
165
|
+
rows = []
|
|
166
|
+
CSV.foreach(@table.csv_path, headers: true) do |row|
|
|
167
|
+
rows << row
|
|
168
|
+
end
|
|
169
|
+
rows
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
def write_csv(csv_rows)
|
|
173
|
+
headers = @table.columns.map(&:key)
|
|
174
|
+
CSV.open(@table.csv_path, "w") do |csv|
|
|
175
|
+
csv << headers
|
|
176
|
+
csv_rows.each do |row|
|
|
177
|
+
csv << headers.map { |h| row[h] }
|
|
178
|
+
end
|
|
179
|
+
end
|
|
180
|
+
end
|
|
181
|
+
|
|
182
|
+
def apply_view(rows, view)
|
|
183
|
+
visible_columns = if view&.columns
|
|
184
|
+
@table.columns.select { |c| view.columns.include?(c.key) }
|
|
185
|
+
else
|
|
186
|
+
@table.columns
|
|
187
|
+
end
|
|
188
|
+
|
|
189
|
+
col_indices = visible_columns.map { |vc| @table.columns.index(vc) }
|
|
190
|
+
|
|
191
|
+
# Prepend original row index to each row
|
|
192
|
+
filtered_rows = rows.each_with_index.map do |row, idx|
|
|
193
|
+
[idx] + col_indices.map { |i| row[i] }
|
|
194
|
+
end
|
|
195
|
+
|
|
196
|
+
{
|
|
197
|
+
columns: visible_columns.map { |c|
|
|
198
|
+
col = { key: c.key, title: c.title, type: c.type }
|
|
199
|
+
col[:references] = c.references if c.references
|
|
200
|
+
col[:constraints] = c.constraints if c.constraints && !c.constraints.empty?
|
|
201
|
+
col
|
|
202
|
+
},
|
|
203
|
+
rows: filtered_rows
|
|
204
|
+
}
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
# Formats a json_schemer error into a hash with message and affected fields.
|
|
208
|
+
# Returns { "message" => String, "fields" => [String] }
|
|
209
|
+
def format_error(error)
|
|
210
|
+
field = error["data_pointer"].sub(%r{^/}, "")
|
|
211
|
+
col = @table.columns.find { |c| c.key == field } unless field.empty?
|
|
212
|
+
label = col ? col.title : field
|
|
213
|
+
fields = field.empty? ? [] : [field]
|
|
214
|
+
|
|
215
|
+
case error["type"]
|
|
216
|
+
when "required"
|
|
217
|
+
keys = error.dig("details", "missing_keys") || []
|
|
218
|
+
titles = keys.map { |k| (@table.columns.find { |c| c.key == k }&.title) || k }
|
|
219
|
+
{ "message" => "missing required: #{titles.join(", ")}", "fields" => keys }
|
|
220
|
+
when "enum"
|
|
221
|
+
{ "message" => "#{label}: not a valid option", "fields" => fields }
|
|
222
|
+
when "pattern"
|
|
223
|
+
{ "message" => "#{label}: does not match expected format", "fields" => fields }
|
|
224
|
+
when "integer", "number"
|
|
225
|
+
{ "message" => "#{label}: must be a #{error["type"]}", "fields" => fields }
|
|
226
|
+
else
|
|
227
|
+
msg = col ? "#{label}: #{error["type"]}" : error["error"]
|
|
228
|
+
{ "message" => msg, "fields" => fields }
|
|
229
|
+
end
|
|
230
|
+
end
|
|
231
|
+
|
|
232
|
+
def coerce(value, type)
|
|
233
|
+
return nil if value.nil? || value.strip.empty?
|
|
234
|
+
|
|
235
|
+
case type
|
|
236
|
+
when "integer"
|
|
237
|
+
Integer(value) rescue value
|
|
238
|
+
when "number"
|
|
239
|
+
Float(value) rescue value
|
|
240
|
+
else
|
|
241
|
+
value
|
|
242
|
+
end
|
|
243
|
+
end
|
|
244
|
+
|
|
245
|
+
def coerce_for_validation(value, type)
|
|
246
|
+
return nil if value.nil? || value.to_s.strip.empty?
|
|
247
|
+
|
|
248
|
+
case type
|
|
249
|
+
when "integer"
|
|
250
|
+
Integer(value) rescue value
|
|
251
|
+
when "number"
|
|
252
|
+
Float(value) rescue value
|
|
253
|
+
else
|
|
254
|
+
value.to_s
|
|
255
|
+
end
|
|
256
|
+
end
|
|
257
|
+
end
|
|
258
|
+
end
|
|
259
|
+
end
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
require "ipaddr"
|
|
2
|
+
|
|
1
3
|
module MarkdownServer
|
|
2
4
|
module Helpers
|
|
3
5
|
module AdminHelpers
|
|
@@ -19,13 +21,23 @@ module MarkdownServer
|
|
|
19
21
|
end
|
|
20
22
|
end
|
|
21
23
|
|
|
24
|
+
def admin_only_mode?
|
|
25
|
+
return true if settings.admin_only
|
|
26
|
+
setup_config["admin_only"] == true
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
def admin_only_public_route?(path)
|
|
30
|
+
path == "/admin/login" || path == "/admin/logout" || path == "/robots.txt"
|
|
31
|
+
end
|
|
32
|
+
|
|
22
33
|
def admin?
|
|
34
|
+
return true if settings.admin_all
|
|
23
35
|
return true if session[:admin]
|
|
24
36
|
|
|
25
37
|
adm = setup_config["admin"]
|
|
26
38
|
return false unless adm.is_a?(Hash)
|
|
27
39
|
|
|
28
|
-
return true if adm["ip"]
|
|
40
|
+
return true if ip_allowed?(adm["ip"], client_ip)
|
|
29
41
|
|
|
30
42
|
if adm["user"] && adm["pw"]
|
|
31
43
|
auth = request.env["HTTP_AUTHORIZATION"].to_s
|
|
@@ -37,6 +49,18 @@ module MarkdownServer
|
|
|
37
49
|
|
|
38
50
|
false
|
|
39
51
|
end
|
|
52
|
+
|
|
53
|
+
private
|
|
54
|
+
|
|
55
|
+
def ip_allowed?(allowed, ip)
|
|
56
|
+
return false unless allowed && ip
|
|
57
|
+
|
|
58
|
+
entries = allowed.is_a?(Array) ? allowed : [allowed]
|
|
59
|
+
client = IPAddr.new(ip.to_s.strip)
|
|
60
|
+
entries.any? { |entry| IPAddr.new(entry.to_s.strip).include?(client) }
|
|
61
|
+
rescue IPAddr::InvalidAddressError
|
|
62
|
+
false
|
|
63
|
+
end
|
|
40
64
|
end
|
|
41
65
|
end
|
|
42
66
|
end
|
|
@@ -49,7 +49,9 @@ module MarkdownServer
|
|
|
49
49
|
end
|
|
50
50
|
|
|
51
51
|
def inline_directory_html(dir_path, relative_dir)
|
|
52
|
-
entries = Dir.entries(dir_path).
|
|
52
|
+
entries = Dir.entries(dir_path).select do |e|
|
|
53
|
+
e != "." && e != ".." && entry_admitted?(dir_path, relative_dir, e)
|
|
54
|
+
end
|
|
53
55
|
items = entries.map do |name|
|
|
54
56
|
stat = File.stat(File.join(dir_path, name)) rescue next
|
|
55
57
|
is_dir = stat.directory?
|