markdownr 0.7.1 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/bin/Dockerfile.markdownr +1 -1
- data/bin/markdownr +15 -0
- data/bin/markdownr-servers.yaml +39 -0
- data/lib/markdown_server/app.rb +729 -90
- data/lib/markdown_server/csv_browser/addon_registry.rb +137 -0
- data/lib/markdown_server/csv_browser/config_loader.rb +231 -0
- data/lib/markdown_server/csv_browser/row_context.rb +146 -0
- data/lib/markdown_server/csv_browser/table_reader.rb +259 -0
- data/lib/markdown_server/helpers/admin_helpers.rb +15 -1
- data/lib/markdown_server/plugin.rb +11 -0
- data/lib/markdown_server/version.rb +1 -1
- data/views/browser.erb +4408 -0
- data/views/layout.erb +2 -15
- metadata +35 -1
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module MarkdownServer
|
|
4
|
+
module CsvBrowser
|
|
5
|
+
# Holds one add-on's declarative definition: its `actions` block and a set of
|
|
6
|
+
# `on :action_id` handler blocks. Created via the DSL inside `register`.
|
|
7
|
+
class AddonDefinition
|
|
8
|
+
attr_reader :name
|
|
9
|
+
|
|
10
|
+
def initialize(name)
|
|
11
|
+
@name = name.to_sym
|
|
12
|
+
@actions_block = nil
|
|
13
|
+
@handlers = {}
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def actions(&block)
|
|
17
|
+
@actions_block = block
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def on(action_id, &block)
|
|
21
|
+
@handlers[action_id.to_sym] = block
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
# Returns [{ id:, label:, enabled:, ... }, ...] for the given row context.
|
|
25
|
+
def actions_for(ctx)
|
|
26
|
+
return [] unless @actions_block
|
|
27
|
+
|
|
28
|
+
result = @actions_block.call(ctx)
|
|
29
|
+
Array(result).map do |entry|
|
|
30
|
+
h = entry.dup
|
|
31
|
+
h[:id] = h[:id].to_sym
|
|
32
|
+
h[:enabled] = h.key?(:enabled) ? !!h[:enabled] : true
|
|
33
|
+
h
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def handler_for(action_id)
|
|
38
|
+
@handlers[action_id.to_sym]
|
|
39
|
+
end
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
# Global registry of CSV add-ons.
|
|
43
|
+
#
|
|
44
|
+
# Lifecycle:
|
|
45
|
+
# 1. A Ruby file calls `MarkdownServer::CsvAddonRegistry.register(:name) { ... }`
|
|
46
|
+
# at load time. That block is `instance_exec`'d against a fresh
|
|
47
|
+
# `AddonDefinition`, populating its `actions` and `on` entries.
|
|
48
|
+
# 2. At startup, `load(addons_config, root_dir)` receives the `csv_addons`
|
|
49
|
+
# mapping from .markdownr.yml and `require`s each absolute path; missing
|
|
50
|
+
# files warn but don't halt startup.
|
|
51
|
+
# 3. `for_table(db, table)` returns the definitions a given table has
|
|
52
|
+
# attached via its `addons:` list; unknown add-on names warn and are
|
|
53
|
+
# dropped.
|
|
54
|
+
class CsvAddonRegistry
|
|
55
|
+
@definitions = {}
|
|
56
|
+
@loaded_config = {}
|
|
57
|
+
|
|
58
|
+
class << self
|
|
59
|
+
attr_reader :definitions, :loaded_config
|
|
60
|
+
|
|
61
|
+
def reset!
|
|
62
|
+
@definitions = {}
|
|
63
|
+
@loaded_config = {}
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
def register(name, &block)
|
|
67
|
+
defn = AddonDefinition.new(name)
|
|
68
|
+
defn.instance_exec(&block) if block
|
|
69
|
+
@definitions[name.to_sym] = defn
|
|
70
|
+
defn
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
def [](name)
|
|
74
|
+
@definitions[name.to_sym]
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
def load(addons_config, _root_dir = nil)
|
|
78
|
+
@loaded_config = (addons_config || {}).each_with_object({}) do |(k, v), h|
|
|
79
|
+
h[k.to_sym] = v
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
@loaded_config.each do |name, path|
|
|
83
|
+
unless File.exist?(path.to_s)
|
|
84
|
+
$stderr.puts "\n\e[1;33mWarning: csv_addon '#{name}' file not found: #{path}\e[0m\n\n"
|
|
85
|
+
next
|
|
86
|
+
end
|
|
87
|
+
# Use Kernel#load (not require) so repeated calls re-evaluate the
|
|
88
|
+
# add-on file — important after `reset!` in tests and to let users
|
|
89
|
+
# refresh their add-on without restarting (the gate is the
|
|
90
|
+
# absolute path in .markdownr.yml).
|
|
91
|
+
Kernel.load(path.to_s)
|
|
92
|
+
end
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
# Returns an array of AddonDefinition instances attached to the table.
|
|
96
|
+
# Unknown names (declared on the table but not registered) produce a
|
|
97
|
+
# one-time warning per (table, name) and are skipped.
|
|
98
|
+
def for_table(_database, table)
|
|
99
|
+
attached = Array(table.addons).map { |entry| normalize_attachment(entry) }
|
|
100
|
+
attached.filter_map do |att|
|
|
101
|
+
defn = @definitions[att[:name]]
|
|
102
|
+
unless defn
|
|
103
|
+
key = [table.key, att[:name]]
|
|
104
|
+
@warned_missing ||= {}
|
|
105
|
+
unless @warned_missing[key]
|
|
106
|
+
$stderr.puts "\n\e[1;33mWarning: table '#{table.key}' references unknown csv_addon '#{att[:name]}'\e[0m\n\n"
|
|
107
|
+
@warned_missing[key] = true
|
|
108
|
+
end
|
|
109
|
+
next
|
|
110
|
+
end
|
|
111
|
+
{ definition: defn, options: att[:options] }
|
|
112
|
+
end
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
private
|
|
116
|
+
|
|
117
|
+
def normalize_attachment(entry)
|
|
118
|
+
case entry
|
|
119
|
+
when String, Symbol
|
|
120
|
+
{ name: entry.to_sym, options: {} }
|
|
121
|
+
when Hash
|
|
122
|
+
sym = entry.transform_keys(&:to_sym)
|
|
123
|
+
opts = sym[:options] || {}
|
|
124
|
+
opts = opts.transform_keys(&:to_sym) if opts.is_a?(Hash)
|
|
125
|
+
{ name: (sym[:id] || sym[:name]).to_sym, options: opts }
|
|
126
|
+
else
|
|
127
|
+
{ name: :__invalid__, options: {} }
|
|
128
|
+
end
|
|
129
|
+
end
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
# Top-level alias so add-on files can write:
|
|
135
|
+
# MarkdownServer::CsvAddonRegistry.register(:name) { ... }
|
|
136
|
+
CsvAddonRegistry = CsvBrowser::CsvAddonRegistry
|
|
137
|
+
end
|
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "csv"
|
|
4
|
+
require "yaml"
|
|
5
|
+
require "json_schemer"
|
|
6
|
+
|
|
7
|
+
module MarkdownServer
|
|
8
|
+
module CsvBrowser
|
|
9
|
+
# Loads database definitions from YAML files.
|
|
10
|
+
# Each YAML file defines one database with its tables, schemas, and views.
|
|
11
|
+
# CSV paths are resolved relative to the YAML file's directory.
|
|
12
|
+
class ConfigLoader
|
|
13
|
+
Database = Struct.new(:key, :title, :tables, :yaml_path, :group_by_directory, :show_record_counts, keyword_init: true)
|
|
14
|
+
Table = Struct.new(:key, :title, :csv_path, :columns, :required, :schema, :views, :color, :addons, keyword_init: true)
|
|
15
|
+
Column = Struct.new(:key, :title, :type, :constraints, :references, keyword_init: true)
|
|
16
|
+
View = Struct.new(:key, :title, :columns, keyword_init: true)
|
|
17
|
+
|
|
18
|
+
def initialize(yaml_paths, root_dir)
|
|
19
|
+
@yaml_paths = yaml_paths
|
|
20
|
+
@root_dir = root_dir
|
|
21
|
+
@databases = yaml_paths.filter_map { |path| load_database(path) }
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
attr_reader :databases
|
|
25
|
+
|
|
26
|
+
def reload!
|
|
27
|
+
config_path = File.join(@root_dir, ".markdownr.yml")
|
|
28
|
+
if File.exist?(config_path)
|
|
29
|
+
yaml = YAML.safe_load(File.read(config_path), permitted_classes: [Symbol], aliases: true) rescue nil
|
|
30
|
+
paths = yaml&.dig("csv_databases") || yaml&.dig("plugins", "csv_browser", "databases")
|
|
31
|
+
@yaml_paths = paths if paths.is_a?(Array) && !paths.empty?
|
|
32
|
+
end
|
|
33
|
+
@databases = @yaml_paths.filter_map { |path| load_database(path) }
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
def database(key)
|
|
37
|
+
@databases.find { |db| db.key == key }
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
# Returns Database if real_path matches a configured YAML database file
|
|
41
|
+
def find_database_by_yaml_path(real_path)
|
|
42
|
+
@databases.find { |db| db.yaml_path == real_path }
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
# Builds FK lookup maps for a table's columns that have references.
|
|
46
|
+
# Returns { "column_key" => { id_value => display_value, ... }, ... }
|
|
47
|
+
def resolve_references(database, table)
|
|
48
|
+
lookups = {}
|
|
49
|
+
table.columns.each do |col|
|
|
50
|
+
next unless col.references
|
|
51
|
+
ref_table = database.tables.find { |t| t.key == col.references[:table] }
|
|
52
|
+
next unless ref_table
|
|
53
|
+
next unless File.exist?(ref_table.csv_path)
|
|
54
|
+
|
|
55
|
+
map = {}
|
|
56
|
+
CSV.foreach(ref_table.csv_path, headers: true) do |row|
|
|
57
|
+
key_val = row[col.references[:column]]
|
|
58
|
+
display_val = row[col.references[:display]]
|
|
59
|
+
map[key_val] = display_val if key_val && display_val
|
|
60
|
+
end
|
|
61
|
+
lookups[col.key] = map
|
|
62
|
+
end
|
|
63
|
+
lookups
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
# Finds other tables in the database that have FK columns pointing at this table.
|
|
67
|
+
# Returns [{ table: "enrollments", label: "Enrollments", column: "class_id", references_column: "id" }, ...]
|
|
68
|
+
def resolve_reverse_references(database, table)
|
|
69
|
+
reverse = []
|
|
70
|
+
database.tables.each do |other_table|
|
|
71
|
+
next if other_table.key == table.key
|
|
72
|
+
other_table.columns.each do |col|
|
|
73
|
+
next unless col.references && col.references[:table] == table.key
|
|
74
|
+
entry = {
|
|
75
|
+
table: other_table.key,
|
|
76
|
+
title: other_table.title,
|
|
77
|
+
column: col.key,
|
|
78
|
+
references_column: col.references[:column]
|
|
79
|
+
}
|
|
80
|
+
entry[:color] = other_table.color if other_table.color
|
|
81
|
+
reverse << entry
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
reverse
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
# Returns [database, table] if real_path matches a table's CSV file, or nil
|
|
88
|
+
def find_table_by_csv_path(real_path)
|
|
89
|
+
@databases.each do |db|
|
|
90
|
+
db.tables.each do |table|
|
|
91
|
+
return [db, table] if table.csv_path == real_path
|
|
92
|
+
end
|
|
93
|
+
end
|
|
94
|
+
nil
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
# Returns CSV file paths under root_dir that aren't claimed by any database table.
|
|
98
|
+
# Each entry is { path: "/abs/path.csv", relative: "sub/dir/file.csv" }.
|
|
99
|
+
def unmapped_csv_files
|
|
100
|
+
referenced = Set.new
|
|
101
|
+
@databases.each do |db|
|
|
102
|
+
db.tables.each { |t| referenced << t.csv_path }
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
root = File.realpath(@root_dir)
|
|
106
|
+
Dir.glob(File.join(root, "**", "*.csv")).filter_map do |path|
|
|
107
|
+
real = File.realpath(path)
|
|
108
|
+
next if referenced.include?(real)
|
|
109
|
+
relative = real.delete_prefix("#{root}/")
|
|
110
|
+
{ path: real, relative: relative }
|
|
111
|
+
end.sort_by { |e| e[:relative] }
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
private
|
|
115
|
+
|
|
116
|
+
def load_database(relative_path)
|
|
117
|
+
path = File.expand_path(relative_path, @root_dir)
|
|
118
|
+
$stdout.write "[csv-browser] Loading #{relative_path}..."
|
|
119
|
+
$stdout.flush
|
|
120
|
+
|
|
121
|
+
unless File.exist?(path)
|
|
122
|
+
puts "\n\n \e[33mnot found: #{path}\e[0m\n\n"
|
|
123
|
+
return nil
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
yaml_dir = File.dirname(path)
|
|
127
|
+
config = YAML.safe_load(File.read(path), permitted_classes: [Symbol], aliases: true)
|
|
128
|
+
db_key = File.basename(path, ".*")
|
|
129
|
+
|
|
130
|
+
tables = (config["tables"] || {}).filter_map do |table_key, table_def|
|
|
131
|
+
build_table(table_key, table_def, yaml_dir)
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
group = config.fetch("group_by_directory", true)
|
|
135
|
+
counts = config.fetch("show_record_counts", true)
|
|
136
|
+
db = Database.new(key: db_key, title: config["title"] || config["label"] || db_key.capitalize,
|
|
137
|
+
tables: tables, yaml_path: File.realpath(path),
|
|
138
|
+
group_by_directory: group, show_record_counts: counts)
|
|
139
|
+
puts " ok (#{tables.length} tables)"
|
|
140
|
+
db
|
|
141
|
+
rescue => e
|
|
142
|
+
puts "\n\n \e[31merror: #{e.message}\e[0m\n\n"
|
|
143
|
+
nil
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
def build_table(key, definition, yaml_dir)
|
|
147
|
+
properties = definition["properties"] || {}
|
|
148
|
+
required = definition["required"] || []
|
|
149
|
+
|
|
150
|
+
columns = properties.map do |col_key, col_def|
|
|
151
|
+
ref = col_def["references"]
|
|
152
|
+
Column.new(
|
|
153
|
+
key: col_key,
|
|
154
|
+
title: col_def["title"] || col_key.capitalize,
|
|
155
|
+
type: col_def["type"] || "string",
|
|
156
|
+
constraints: col_def.except("type", "title", "references"),
|
|
157
|
+
references: ref ? { table: ref["table"], column: ref["column"], display: ref["display"] } : nil
|
|
158
|
+
)
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
views = (definition["views"] || { "all" => { "title" => "All" } }).map do |view_key, view_def|
|
|
162
|
+
View.new(
|
|
163
|
+
key: view_key,
|
|
164
|
+
title: view_def["title"] || view_def["label"] || view_key.capitalize,
|
|
165
|
+
columns: view_def["columns"]
|
|
166
|
+
)
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
schema = build_json_schema(key, properties, required, definition)
|
|
170
|
+
|
|
171
|
+
default_title = key.tr("_", " ").split.map(&:capitalize).join(" ")
|
|
172
|
+
csv_path = File.expand_path(definition["csv"], yaml_dir)
|
|
173
|
+
|
|
174
|
+
unless File.exist?(csv_path)
|
|
175
|
+
warn "[csv-browser] Skipping table '#{key}': CSV not found at #{csv_path}"
|
|
176
|
+
return nil
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
Table.new(
|
|
180
|
+
key: key,
|
|
181
|
+
title: definition["title"] || default_title,
|
|
182
|
+
csv_path: File.realpath(csv_path),
|
|
183
|
+
columns: columns,
|
|
184
|
+
required: required,
|
|
185
|
+
schema: schema,
|
|
186
|
+
views: views,
|
|
187
|
+
color: definition["color"],
|
|
188
|
+
addons: definition["addons"] || []
|
|
189
|
+
)
|
|
190
|
+
end
|
|
191
|
+
|
|
192
|
+
def build_json_schema(table_key, properties, required, definition = {})
|
|
193
|
+
json_props = properties.transform_values do |col_def|
|
|
194
|
+
prop = {}
|
|
195
|
+
case col_def["type"]
|
|
196
|
+
when "integer"
|
|
197
|
+
prop["type"] = "integer"
|
|
198
|
+
when "number"
|
|
199
|
+
prop["type"] = "number"
|
|
200
|
+
else
|
|
201
|
+
prop["type"] = "string"
|
|
202
|
+
end
|
|
203
|
+
prop["title"] = col_def["title"] if col_def["title"]
|
|
204
|
+
prop["enum"] = col_def["enum"] if col_def["enum"]
|
|
205
|
+
prop["pattern"] = col_def["pattern"] if col_def["pattern"]
|
|
206
|
+
prop["format"] = col_def["format"] if col_def["format"]
|
|
207
|
+
prop["minimum"] = col_def["minimum"] if col_def["minimum"]
|
|
208
|
+
prop["maximum"] = col_def["maximum"] if col_def["maximum"]
|
|
209
|
+
prop["minLength"] = col_def["minLength"] if col_def["minLength"]
|
|
210
|
+
prop["maxLength"] = col_def["maxLength"] if col_def["maxLength"]
|
|
211
|
+
prop
|
|
212
|
+
end
|
|
213
|
+
|
|
214
|
+
schema = {
|
|
215
|
+
"$schema" => "https://json-schema.org/draft/2020-12/schema",
|
|
216
|
+
"title" => table_key,
|
|
217
|
+
"type" => "object",
|
|
218
|
+
"properties" => json_props,
|
|
219
|
+
"required" => required
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
schema["if"] = definition["if"] if definition["if"]
|
|
223
|
+
schema["then"] = definition["then"] if definition["then"]
|
|
224
|
+
schema["else"] = definition["else"] if definition["else"]
|
|
225
|
+
schema["allOf"] = definition["allOf"] if definition["allOf"]
|
|
226
|
+
|
|
227
|
+
JSONSchemer.schema(schema)
|
|
228
|
+
end
|
|
229
|
+
end
|
|
230
|
+
end
|
|
231
|
+
end
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "csv"
|
|
4
|
+
|
|
5
|
+
module MarkdownServer
|
|
6
|
+
module CsvBrowser
|
|
7
|
+
# Context object passed to add-on action blocks. Exposes the active
|
|
8
|
+
# row/table/options/input/state and provides table read/write primitives
|
|
9
|
+
# that all flow through TableReader (so validation and CSV writes behave
|
|
10
|
+
# exactly like the row-editor path).
|
|
11
|
+
#
|
|
12
|
+
# A RowContext represents one HTTP invocation. For a multi-round prompt,
|
|
13
|
+
# a fresh RowContext is created per POST; state is what carries data
|
|
14
|
+
# across rounds (round-tripping through the client).
|
|
15
|
+
class RowContext
|
|
16
|
+
attr_reader :database, :table, :row_index, :row, :options, :input, :state
|
|
17
|
+
|
|
18
|
+
def initialize(database:, table:, row_index:, row:, options: {}, input: nil, state: nil)
|
|
19
|
+
@database = database
|
|
20
|
+
@table = table
|
|
21
|
+
@row_index = row_index
|
|
22
|
+
@row = row || {}
|
|
23
|
+
@options = options || {}
|
|
24
|
+
@input = symbolize_keys(input)
|
|
25
|
+
@state = StateHash.wrap(state)
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
# List rows from any table in this database.
|
|
29
|
+
# Returns an array of plain hashes keyed by column name (strings).
|
|
30
|
+
def read_table(table_name, where: {})
|
|
31
|
+
t = resolve_table(table_name)
|
|
32
|
+
return [] unless t && File.exist?(t.csv_path)
|
|
33
|
+
|
|
34
|
+
rows = []
|
|
35
|
+
CSV.foreach(t.csv_path, headers: true) do |row|
|
|
36
|
+
h = row.to_h
|
|
37
|
+
next unless where.empty? || where.all? { |k, v| h[k.to_s].to_s == v.to_s }
|
|
38
|
+
|
|
39
|
+
rows << h
|
|
40
|
+
end
|
|
41
|
+
rows
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
def insert_row(table_name, values, at: nil)
|
|
45
|
+
reader = TableReader.new(resolve_table!(table_name))
|
|
46
|
+
reader.insert_row(stringify_values(values), at: at)
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
def update_row(table_name, row_index, changes)
|
|
50
|
+
reader = TableReader.new(resolve_table!(table_name))
|
|
51
|
+
reader.update_row(row_index, stringify_values(changes))
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
def delete_row(table_name, row_index)
|
|
55
|
+
reader = TableReader.new(resolve_table!(table_name))
|
|
56
|
+
reader.delete_row(row_index)
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
# Return-value helpers — each produces the shape the HTTP layer will
|
|
60
|
+
# serialize back to the client.
|
|
61
|
+
def prompt(title:, fields:, state: {})
|
|
62
|
+
{ kind: "prompt", title: title, fields: fields, state: stringify_state(state) }
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
def done(reload: true)
|
|
66
|
+
{ kind: "done", reload: reload }
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
def error(message, fields: nil)
|
|
70
|
+
h = { kind: "error", message: message }
|
|
71
|
+
h[:fields] = fields if fields
|
|
72
|
+
h
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
private
|
|
76
|
+
|
|
77
|
+
def resolve_table(table_name)
|
|
78
|
+
return table_name if table_name.is_a?(ConfigLoader::Table)
|
|
79
|
+
|
|
80
|
+
key = table_name.to_s
|
|
81
|
+
@database.tables.find { |t| t.key == key }
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
def resolve_table!(table_name)
|
|
85
|
+
t = resolve_table(table_name)
|
|
86
|
+
raise ArgumentError, "unknown table: #{table_name.inspect}" unless t
|
|
87
|
+
|
|
88
|
+
t
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
def stringify_values(values)
|
|
92
|
+
values.each_with_object({}) { |(k, v), h| h[k.to_s] = v.nil? ? nil : v.to_s }
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
def stringify_state(state)
|
|
96
|
+
return {} if state.nil?
|
|
97
|
+
|
|
98
|
+
state.each_with_object({}) { |(k, v), h| h[k.to_s] = v }
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
def symbolize_keys(h)
|
|
102
|
+
return nil if h.nil?
|
|
103
|
+
return h unless h.is_a?(Hash)
|
|
104
|
+
|
|
105
|
+
h.each_with_object({}) { |(k, v), out| out[k.to_sym] = v }
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
# Hash wrapper that allows symbol OR string key access, so handlers can
|
|
109
|
+
# write `ctx.state[:step]` regardless of JSON round-trip. Values are
|
|
110
|
+
# plain JSON-serializable objects (the round-trip strips Ruby symbols —
|
|
111
|
+
# use strings for step identifiers).
|
|
112
|
+
class StateHash
|
|
113
|
+
def self.wrap(h)
|
|
114
|
+
return new({}) if h.nil?
|
|
115
|
+
return h if h.is_a?(StateHash)
|
|
116
|
+
|
|
117
|
+
new(h)
|
|
118
|
+
end
|
|
119
|
+
|
|
120
|
+
def initialize(h)
|
|
121
|
+
@h = h.is_a?(Hash) ? h : {}
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
def [](key)
|
|
125
|
+
@h[key] || @h[key.to_s] || @h[key.to_sym]
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
def []=(key, value)
|
|
129
|
+
@h[key.to_s] = value
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
def key?(key)
|
|
133
|
+
@h.key?(key) || @h.key?(key.to_s) || @h.key?(key.to_sym)
|
|
134
|
+
end
|
|
135
|
+
|
|
136
|
+
def to_h
|
|
137
|
+
@h.dup
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
def empty?
|
|
141
|
+
@h.empty?
|
|
142
|
+
end
|
|
143
|
+
end
|
|
144
|
+
end
|
|
145
|
+
end
|
|
146
|
+
end
|