markdown-run 0.1.8 → 0.1.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.tool-versions +1 -0
- data/CHANGELOG.md +11 -0
- data/README.md +51 -2
- data/Rakefile +22 -0
- data/exe/markdown-run +6 -433
- data/lib/code_block_parser.rb +77 -0
- data/lib/code_executor.rb +212 -0
- data/lib/enum_helper.rb +17 -0
- data/lib/execution_decider.rb +225 -0
- data/lib/frontmatter_parser.rb +72 -0
- data/lib/language_configs.rb +53 -8
- data/lib/markdown/run/version.rb +1 -1
- data/lib/markdown_file_writer.rb +25 -0
- data/lib/markdown_processor.rb +342 -0
- data/lib/markdown_run.rb +20 -0
- metadata +49 -11
- data/test_markdown_exec.rb +0 -297
@@ -0,0 +1,212 @@
|
|
1
|
+
require "tempfile"
|
2
|
+
require "open3"
|
3
|
+
require_relative "language_configs"
|
4
|
+
|
5
|
+
class CodeExecutor
|
6
|
+
def self.execute(code_content, lang, temp_dir, input_file_path = nil, explain = false)
|
7
|
+
new.execute(code_content, lang, temp_dir, input_file_path, explain)
|
8
|
+
end
|
9
|
+
|
10
|
+
def execute(code_content, lang, temp_dir, input_file_path = nil, explain = false)
|
11
|
+
lang_key = lang.downcase
|
12
|
+
lang_config = SUPPORTED_LANGUAGES[lang_key]
|
13
|
+
|
14
|
+
return handle_unsupported_language(lang) unless lang_config
|
15
|
+
|
16
|
+
warn "Executing #{lang_key} code block..."
|
17
|
+
|
18
|
+
result = execute_with_config(code_content, lang_config, temp_dir, lang_key, input_file_path, explain)
|
19
|
+
process_execution_result(result, lang_config, lang_key, explain)
|
20
|
+
end
|
21
|
+
|
22
|
+
private
|
23
|
+
|
24
|
+
def handle_unsupported_language(lang)
|
25
|
+
warn "Unsupported language: #{lang}"
|
26
|
+
"ERROR: Unsupported language: #{lang}"
|
27
|
+
end
|
28
|
+
|
29
|
+
def execute_with_config(code_content, lang_config, temp_dir, lang_key, input_file_path = nil, explain = false)
|
30
|
+
cmd_lambda = lang_config[:command]
|
31
|
+
temp_file_suffix = lang_config[:temp_file_suffix]
|
32
|
+
|
33
|
+
if temp_file_suffix
|
34
|
+
execute_with_temp_file(code_content, cmd_lambda, temp_file_suffix, temp_dir, lang_key, input_file_path, explain)
|
35
|
+
else
|
36
|
+
execute_direct_command(code_content, cmd_lambda, explain)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
def execute_with_temp_file(code_content, cmd_lambda, temp_file_suffix, temp_dir, lang_key, input_file_path = nil, explain = false)
|
41
|
+
result = nil
|
42
|
+
Tempfile.create([lang_key, temp_file_suffix], temp_dir) do |temp_file|
|
43
|
+
temp_file.write(code_content)
|
44
|
+
temp_file.close
|
45
|
+
command_to_run, exec_options = cmd_lambda.call(code_content, temp_file.path, input_file_path, explain)
|
46
|
+
|
47
|
+
# Extract output_path if present (for mermaid)
|
48
|
+
output_path = exec_options.delete(:output_path) if exec_options.is_a?(Hash)
|
49
|
+
|
50
|
+
captured_stdout, captured_stderr, captured_status_obj = Open3.capture3(command_to_run, **exec_options)
|
51
|
+
result = {
|
52
|
+
stdout: captured_stdout,
|
53
|
+
stderr: captured_stderr,
|
54
|
+
status: captured_status_obj,
|
55
|
+
output_path: output_path # For mermaid SVG output
|
56
|
+
}
|
57
|
+
end
|
58
|
+
result
|
59
|
+
end
|
60
|
+
|
61
|
+
def execute_direct_command(code_content, cmd_lambda, explain = false)
|
62
|
+
command_to_run, exec_options = cmd_lambda.call(code_content, nil, nil, explain)
|
63
|
+
captured_stdout, captured_stderr, captured_status_obj = Open3.capture3(command_to_run, **exec_options)
|
64
|
+
{ stdout: captured_stdout, stderr: captured_stderr, status: captured_status_obj }
|
65
|
+
end
|
66
|
+
|
67
|
+
def process_execution_result(result, lang_config, lang_key, explain = false)
|
68
|
+
exit_status, result_output, stderr_output = format_captured_output(result, lang_config)
|
69
|
+
|
70
|
+
if exit_status != 0
|
71
|
+
result_output = add_error_to_output(exit_status, lang_config, lang_key, result_output, stderr_output)
|
72
|
+
elsif lang_config && lang_config[:result_handling] == :mermaid_svg
|
73
|
+
result_output = handle_mermaid_svg_result(result, lang_key)
|
74
|
+
elsif explain && lang_key == "psql"
|
75
|
+
result_output = handle_psql_explain_result(result_output)
|
76
|
+
end
|
77
|
+
|
78
|
+
result_output
|
79
|
+
end
|
80
|
+
|
81
|
+
def format_captured_output(result, lang_config)
|
82
|
+
result_output = result[:stdout]
|
83
|
+
stderr_output = result[:stderr]
|
84
|
+
exit_status = result[:status].exitstatus
|
85
|
+
|
86
|
+
# JS-specific: Append stderr to result if execution failed and stderr has content
|
87
|
+
if lang_config && lang_config[:error_handling] == :js_specific && exit_status != 0 && stderr_has_content?(stderr_output)
|
88
|
+
result_output += "\nStderr:\n#{stderr_output.strip}"
|
89
|
+
end
|
90
|
+
|
91
|
+
[exit_status, result_output, stderr_output]
|
92
|
+
end
|
93
|
+
|
94
|
+
def add_error_to_output(exit_status, lang_config, lang_key, result_output, stderr_output)
|
95
|
+
warn "Code execution failed for language '#{lang_key}' with status #{exit_status}."
|
96
|
+
warn "Stderr:\n#{stderr_output}" if stderr_has_content?(stderr_output)
|
97
|
+
|
98
|
+
is_js_error_already_formatted = lang_config && lang_config[:error_handling] == :js_specific && result_output.include?("Stderr:")
|
99
|
+
unless result_output.downcase.include?("error:") || is_js_error_already_formatted
|
100
|
+
error_prefix = "Execution failed (status: #{exit_status})."
|
101
|
+
error_prefix += " Stderr: #{stderr_output.strip}" if stderr_has_content?(stderr_output)
|
102
|
+
result_output = "#{error_prefix}\n#{result_output}"
|
103
|
+
end
|
104
|
+
result_output
|
105
|
+
end
|
106
|
+
|
107
|
+
def stderr_has_content?(stderr_output)
|
108
|
+
stderr_output && !stderr_output.strip.empty?
|
109
|
+
end
|
110
|
+
|
111
|
+
def handle_mermaid_svg_result(result, lang_key)
|
112
|
+
output_path = result[:output_path]
|
113
|
+
|
114
|
+
unless output_path && File.exist?(output_path)
|
115
|
+
warn "Warning: Mermaid SVG file not generated at expected path: #{output_path}"
|
116
|
+
return "Error: SVG file not generated"
|
117
|
+
end
|
118
|
+
|
119
|
+
# Generate relative path for the SVG file
|
120
|
+
# If the SVG is in a subdirectory, include the directory in the path
|
121
|
+
output_dir = File.dirname(output_path)
|
122
|
+
svg_filename = File.basename(output_path)
|
123
|
+
|
124
|
+
# Check if SVG is in a subdirectory (new behavior) or same directory (fallback)
|
125
|
+
parent_dir = File.dirname(output_dir)
|
126
|
+
if File.basename(output_dir) != File.basename(parent_dir)
|
127
|
+
# SVG is in a subdirectory, use relative path with directory
|
128
|
+
relative_path = "#{File.basename(output_dir)}/#{svg_filename}"
|
129
|
+
else
|
130
|
+
# SVG is in same directory (fallback behavior)
|
131
|
+
relative_path = svg_filename
|
132
|
+
end
|
133
|
+
|
134
|
+
warn "Generated Mermaid SVG: #{relative_path}"
|
135
|
+
|
136
|
+
# Return markdown image tag instead of typical result content
|
137
|
+
""
|
138
|
+
end
|
139
|
+
|
140
|
+
def handle_psql_explain_result(result_output)
|
141
|
+
require 'json'
|
142
|
+
require 'net/http'
|
143
|
+
require 'uri'
|
144
|
+
|
145
|
+
# Try to parse the result as JSON (EXPLAIN output)
|
146
|
+
begin
|
147
|
+
# Clean up the result output and try to parse as JSON
|
148
|
+
json_data = JSON.parse(result_output.strip)
|
149
|
+
|
150
|
+
# Submit plan to Dalibo via POST request
|
151
|
+
dalibo_url = submit_plan_to_dalibo(JSON.generate(json_data))
|
152
|
+
|
153
|
+
if dalibo_url
|
154
|
+
# Return a special format that the markdown processor can parse
|
155
|
+
"DALIBO_LINK:#{dalibo_url}\n#{result_output.strip}"
|
156
|
+
else
|
157
|
+
# If submission failed, just return the original output
|
158
|
+
result_output
|
159
|
+
end
|
160
|
+
rescue JSON::ParserError
|
161
|
+
# If it's not valid JSON, just return the original output
|
162
|
+
result_output
|
163
|
+
end
|
164
|
+
end
|
165
|
+
|
166
|
+
private
|
167
|
+
|
168
|
+
def submit_plan_to_dalibo(plan_json)
|
169
|
+
begin
|
170
|
+
# Start with HTTPS directly to avoid the HTTP->HTTPS redirect
|
171
|
+
uri = URI('https://explain.dalibo.com/new')
|
172
|
+
http = Net::HTTP.new(uri.host, uri.port)
|
173
|
+
http.use_ssl = true
|
174
|
+
http.read_timeout = 10 # 10 seconds timeout
|
175
|
+
|
176
|
+
# Prepare the JSON payload
|
177
|
+
payload = {
|
178
|
+
'plan' => plan_json,
|
179
|
+
'title' => "Query Plan - #{Time.now.strftime('%Y-%m-%d %H:%M:%S')}",
|
180
|
+
'query' => ''
|
181
|
+
}
|
182
|
+
|
183
|
+
# Create the POST request
|
184
|
+
request = Net::HTTP::Post.new(uri)
|
185
|
+
request['Content-Type'] = 'application/json'
|
186
|
+
request.body = JSON.generate(payload)
|
187
|
+
|
188
|
+
# Send the request and follow redirects to get the final URL
|
189
|
+
response = http.request(request)
|
190
|
+
|
191
|
+
# Dalibo returns a redirect to the plan URL
|
192
|
+
if response.is_a?(Net::HTTPRedirection)
|
193
|
+
location = response['location']
|
194
|
+
# Make sure it's a full URL
|
195
|
+
if location
|
196
|
+
if location.start_with?('/')
|
197
|
+
location = "https://explain.dalibo.com#{location}"
|
198
|
+
end
|
199
|
+
location
|
200
|
+
else
|
201
|
+
nil
|
202
|
+
end
|
203
|
+
else
|
204
|
+
warn "Failed to submit plan to Dalibo: #{response.code} #{response.message}"
|
205
|
+
nil
|
206
|
+
end
|
207
|
+
rescue => e
|
208
|
+
warn "Error submitting plan to Dalibo: #{e.message}"
|
209
|
+
nil
|
210
|
+
end
|
211
|
+
end
|
212
|
+
end
|
data/lib/enum_helper.rb
ADDED
@@ -0,0 +1,17 @@
|
|
1
|
+
module EnumHelper
|
2
|
+
private
|
3
|
+
|
4
|
+
def safe_enum_operation(file_enum, operation)
|
5
|
+
file_enum.send(operation)
|
6
|
+
rescue StopIteration
|
7
|
+
nil
|
8
|
+
end
|
9
|
+
|
10
|
+
def get_next_line(file_enum)
|
11
|
+
safe_enum_operation(file_enum, :next)
|
12
|
+
end
|
13
|
+
|
14
|
+
def peek_next_line(file_enum)
|
15
|
+
safe_enum_operation(file_enum, :peek)
|
16
|
+
end
|
17
|
+
end
|
@@ -0,0 +1,225 @@
|
|
1
|
+
require_relative "enum_helper"
|
2
|
+
|
3
|
+
class ExecutionDecider
|
4
|
+
include EnumHelper
|
5
|
+
|
6
|
+
def initialize(current_block_run, current_block_rerun, current_block_lang, current_block_explain = false, current_block_result = true)
|
7
|
+
@current_block_run = current_block_run
|
8
|
+
@current_block_rerun = current_block_rerun
|
9
|
+
@current_block_lang = current_block_lang
|
10
|
+
@current_block_explain = current_block_explain
|
11
|
+
@current_block_result = current_block_result
|
12
|
+
end
|
13
|
+
|
14
|
+
def decide(file_enum, result_block_regex_method)
|
15
|
+
return skip_execution_run_false if run_disabled?
|
16
|
+
|
17
|
+
expected_header_regex = result_block_regex_method.call(@current_block_lang)
|
18
|
+
peek1 = peek_next_line(file_enum)
|
19
|
+
|
20
|
+
if line_matches_pattern?(peek1, expected_header_regex)
|
21
|
+
handle_immediate_result_block(file_enum)
|
22
|
+
elsif is_blank_line?(peek1)
|
23
|
+
handle_blank_line_scenario(file_enum, expected_header_regex)
|
24
|
+
elsif @current_block_explain && is_dalibo_link?(peek1)
|
25
|
+
handle_immediate_dalibo_link(file_enum)
|
26
|
+
else
|
27
|
+
execute_without_existing_result
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
private
|
32
|
+
|
33
|
+
def run_disabled?
|
34
|
+
!@current_block_run
|
35
|
+
end
|
36
|
+
|
37
|
+
def skip_execution_run_false
|
38
|
+
{ execute: false, lines_to_pass_through: [] }
|
39
|
+
end
|
40
|
+
|
41
|
+
def handle_immediate_result_block(file_enum)
|
42
|
+
if @current_block_rerun
|
43
|
+
execute_with_consumed_result(file_enum)
|
44
|
+
else
|
45
|
+
skip_with_existing_result(file_enum)
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
def handle_blank_line_scenario(file_enum, expected_header_regex)
|
50
|
+
consumed_blank_line = file_enum.next
|
51
|
+
|
52
|
+
# Look ahead past multiple blank lines to find actual content
|
53
|
+
peek2 = peek_next_line(file_enum)
|
54
|
+
additional_blanks = []
|
55
|
+
|
56
|
+
# Consume consecutive blank lines
|
57
|
+
while is_blank_line?(peek2)
|
58
|
+
additional_blanks << file_enum.next
|
59
|
+
peek2 = peek_next_line(file_enum)
|
60
|
+
end
|
61
|
+
|
62
|
+
if line_matches_pattern?(peek2, expected_header_regex)
|
63
|
+
handle_result_after_blank_lines(file_enum, consumed_blank_line, additional_blanks)
|
64
|
+
elsif @current_block_explain && is_dalibo_link?(peek2)
|
65
|
+
handle_dalibo_after_blank_lines(file_enum, consumed_blank_line, additional_blanks)
|
66
|
+
else
|
67
|
+
execute_with_blank_lines(consumed_blank_line, additional_blanks)
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
def handle_result_after_blank_line(file_enum, consumed_blank_line)
|
72
|
+
if @current_block_rerun
|
73
|
+
execute_with_consumed_result_and_blank(file_enum, consumed_blank_line)
|
74
|
+
else
|
75
|
+
skip_with_blank_and_result(file_enum, consumed_blank_line)
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
def handle_result_after_blank_lines(file_enum, consumed_blank_line, additional_blanks)
|
80
|
+
if @current_block_rerun
|
81
|
+
execute_with_consumed_result_and_blanks(file_enum, consumed_blank_line, additional_blanks)
|
82
|
+
else
|
83
|
+
skip_with_blanks_and_result(file_enum, consumed_blank_line, additional_blanks)
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
def handle_dalibo_after_blank_lines(file_enum, consumed_blank_line, additional_blanks)
|
88
|
+
# For explain result=false, always replace existing Dalibo links
|
89
|
+
# For explain result=true, follow normal rerun logic
|
90
|
+
if should_auto_replace_dalibo_link? || @current_block_rerun
|
91
|
+
execute_with_consumed_dalibo_and_blanks(file_enum, consumed_blank_line, additional_blanks)
|
92
|
+
else
|
93
|
+
skip_with_blanks_and_dalibo(file_enum, consumed_blank_line, additional_blanks)
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
def execute_with_consumed_result(file_enum)
|
98
|
+
consumed_lines = [file_enum.next]
|
99
|
+
{ execute: true, consumed_lines: consumed_lines, consume_existing: true }
|
100
|
+
end
|
101
|
+
|
102
|
+
def skip_with_existing_result(file_enum)
|
103
|
+
{ execute: false, lines_to_pass_through: [file_enum.next] }
|
104
|
+
end
|
105
|
+
|
106
|
+
def execute_with_consumed_result_and_blank(file_enum, consumed_blank_line)
|
107
|
+
consumed_lines = [consumed_blank_line, file_enum.next]
|
108
|
+
{ execute: true, consumed_lines: consumed_lines, blank_line: consumed_blank_line, consume_existing: true }
|
109
|
+
end
|
110
|
+
|
111
|
+
def skip_with_blank_and_result(file_enum, consumed_blank_line)
|
112
|
+
{ execute: false, lines_to_pass_through: [consumed_blank_line, file_enum.next] }
|
113
|
+
end
|
114
|
+
|
115
|
+
def execute_with_blank_line(consumed_blank_line)
|
116
|
+
{ execute: true, blank_line: consumed_blank_line }
|
117
|
+
end
|
118
|
+
|
119
|
+
def execute_with_blank_lines(consumed_blank_line, additional_blanks)
|
120
|
+
{ execute: true, blank_line: consumed_blank_line, additional_blanks: additional_blanks }
|
121
|
+
end
|
122
|
+
|
123
|
+
def execute_with_consumed_result_and_blanks(file_enum, consumed_blank_line, additional_blanks)
|
124
|
+
consumed_lines = [consumed_blank_line] + additional_blanks + [file_enum.next]
|
125
|
+
{ execute: true, consumed_lines: consumed_lines, blank_line: consumed_blank_line, consume_existing: true }
|
126
|
+
end
|
127
|
+
|
128
|
+
def skip_with_blanks_and_result(file_enum, consumed_blank_line, additional_blanks)
|
129
|
+
lines_to_pass = [consumed_blank_line] + additional_blanks + [file_enum.next]
|
130
|
+
{ execute: false, lines_to_pass_through: lines_to_pass }
|
131
|
+
end
|
132
|
+
|
133
|
+
def execute_with_consumed_dalibo_and_blanks(file_enum, consumed_blank_line, additional_blanks)
|
134
|
+
consumed_lines = [consumed_blank_line] + additional_blanks
|
135
|
+
consume_dalibo_links(file_enum, consumed_lines)
|
136
|
+
{ execute: true, consumed_lines: consumed_lines, blank_line: consumed_blank_line, consume_existing_dalibo: true }
|
137
|
+
end
|
138
|
+
|
139
|
+
def skip_with_blanks_and_dalibo(file_enum, consumed_blank_line, additional_blanks)
|
140
|
+
consumed_lines = [consumed_blank_line] + additional_blanks
|
141
|
+
consume_dalibo_links(file_enum, consumed_lines)
|
142
|
+
{ execute: false, lines_to_pass_through: consumed_lines, dalibo_content: true }
|
143
|
+
end
|
144
|
+
|
145
|
+
def execute_without_existing_result
|
146
|
+
{ execute: true }
|
147
|
+
end
|
148
|
+
|
149
|
+
def handle_immediate_dalibo_link(file_enum)
|
150
|
+
# For explain result=false, always replace existing Dalibo links
|
151
|
+
# For explain result=true, follow normal rerun logic
|
152
|
+
if should_auto_replace_dalibo_link? || @current_block_rerun
|
153
|
+
execute_with_consumed_dalibo(file_enum)
|
154
|
+
else
|
155
|
+
skip_with_existing_dalibo(file_enum)
|
156
|
+
end
|
157
|
+
end
|
158
|
+
|
159
|
+
def handle_dalibo_after_blank_line(file_enum, consumed_blank_line)
|
160
|
+
# For explain result=false, always replace existing Dalibo links
|
161
|
+
# For explain result=true, follow normal rerun logic
|
162
|
+
if should_auto_replace_dalibo_link? || @current_block_rerun
|
163
|
+
execute_with_consumed_dalibo_and_blank(file_enum, consumed_blank_line)
|
164
|
+
else
|
165
|
+
skip_with_blank_and_dalibo(file_enum, consumed_blank_line)
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
def execute_with_consumed_dalibo(file_enum)
|
170
|
+
consumed_lines = []
|
171
|
+
consume_dalibo_links(file_enum, consumed_lines)
|
172
|
+
{ execute: true, consumed_lines: consumed_lines, consume_existing_dalibo: true }
|
173
|
+
end
|
174
|
+
|
175
|
+
def skip_with_existing_dalibo(file_enum)
|
176
|
+
consumed_lines = []
|
177
|
+
consume_dalibo_links(file_enum, consumed_lines)
|
178
|
+
{ execute: false, lines_to_pass_through: consumed_lines, dalibo_content: true }
|
179
|
+
end
|
180
|
+
|
181
|
+
def execute_with_consumed_dalibo_and_blank(file_enum, consumed_blank_line)
|
182
|
+
consumed_lines = [consumed_blank_line]
|
183
|
+
consume_dalibo_links(file_enum, consumed_lines)
|
184
|
+
{ execute: true, consumed_lines: consumed_lines, blank_line: consumed_blank_line, consume_existing_dalibo: true }
|
185
|
+
end
|
186
|
+
|
187
|
+
def skip_with_blank_and_dalibo(file_enum, consumed_blank_line)
|
188
|
+
consumed_lines = [consumed_blank_line]
|
189
|
+
consume_dalibo_links(file_enum, consumed_lines)
|
190
|
+
{ execute: false, lines_to_pass_through: consumed_lines, dalibo_content: true }
|
191
|
+
end
|
192
|
+
|
193
|
+
def consume_dalibo_links(file_enum, consumed_lines)
|
194
|
+
# Consume all consecutive Dalibo links and blank lines
|
195
|
+
loop do
|
196
|
+
next_line = peek_next_line(file_enum)
|
197
|
+
|
198
|
+
if is_blank_line?(next_line) || is_dalibo_link?(next_line)
|
199
|
+
consumed_line = file_enum.next
|
200
|
+
consumed_lines << consumed_line
|
201
|
+
else
|
202
|
+
break
|
203
|
+
end
|
204
|
+
end
|
205
|
+
end
|
206
|
+
|
207
|
+
def is_dalibo_link?(line)
|
208
|
+
line&.start_with?("**Dalibo Visualization:**")
|
209
|
+
end
|
210
|
+
|
211
|
+
def line_matches_pattern?(line, pattern)
|
212
|
+
line && line.match?(pattern)
|
213
|
+
end
|
214
|
+
|
215
|
+
def is_blank_line?(line)
|
216
|
+
line && line.strip == ""
|
217
|
+
end
|
218
|
+
|
219
|
+
def should_auto_replace_dalibo_link?
|
220
|
+
# Auto-replace Dalibo links when using explain with result=false
|
221
|
+
# This makes sense because with result=false, there's only a Dalibo link,
|
222
|
+
# so it should be updated on each run
|
223
|
+
@current_block_explain && !@current_block_result
|
224
|
+
end
|
225
|
+
end
|
@@ -0,0 +1,72 @@
|
|
1
|
+
require "yaml"
|
2
|
+
require_relative "enum_helper"
|
3
|
+
|
4
|
+
class FrontmatterParser
|
5
|
+
include EnumHelper
|
6
|
+
|
7
|
+
def initialize
|
8
|
+
@aliases = {}
|
9
|
+
end
|
10
|
+
|
11
|
+
attr_reader :aliases
|
12
|
+
|
13
|
+
def parse_frontmatter(file_enum, output_lines)
|
14
|
+
first_line = peek_next_line(file_enum)
|
15
|
+
return unless first_line&.strip == "---"
|
16
|
+
|
17
|
+
frontmatter_lines = collect_frontmatter_lines(file_enum, output_lines)
|
18
|
+
process_frontmatter_content(frontmatter_lines) unless frontmatter_lines.empty?
|
19
|
+
end
|
20
|
+
|
21
|
+
def resolve_language(lang)
|
22
|
+
@aliases[lang] || lang
|
23
|
+
end
|
24
|
+
|
25
|
+
private
|
26
|
+
|
27
|
+
def collect_frontmatter_lines(file_enum, output_lines)
|
28
|
+
# Consume the opening ---
|
29
|
+
output_lines << file_enum.next
|
30
|
+
frontmatter_lines = []
|
31
|
+
|
32
|
+
loop do
|
33
|
+
line = get_next_line(file_enum)
|
34
|
+
break unless line
|
35
|
+
|
36
|
+
if line.strip == "---"
|
37
|
+
output_lines << line
|
38
|
+
break
|
39
|
+
end
|
40
|
+
|
41
|
+
frontmatter_lines << line
|
42
|
+
output_lines << line
|
43
|
+
end
|
44
|
+
|
45
|
+
frontmatter_lines
|
46
|
+
end
|
47
|
+
|
48
|
+
def process_frontmatter_content(frontmatter_lines)
|
49
|
+
begin
|
50
|
+
frontmatter = YAML.safe_load(frontmatter_lines.join)
|
51
|
+
extract_aliases(frontmatter) if frontmatter.is_a?(Hash)
|
52
|
+
rescue YAML::SyntaxError => e
|
53
|
+
warn "Warning: Invalid YAML frontmatter: #{e.message}"
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
def extract_aliases(frontmatter)
|
58
|
+
markdown_run_config = frontmatter["markdown-run"]
|
59
|
+
return unless markdown_run_config.is_a?(Hash)
|
60
|
+
|
61
|
+
aliases = markdown_run_config["alias"]
|
62
|
+
return unless aliases.is_a?(Array)
|
63
|
+
|
64
|
+
aliases.each do |alias_config|
|
65
|
+
next unless alias_config.is_a?(Hash)
|
66
|
+
|
67
|
+
alias_config.each do |alias_name, target_lang|
|
68
|
+
@aliases[alias_name.to_s] = target_lang.to_s
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
data/lib/language_configs.rb
CHANGED
@@ -1,5 +1,7 @@
|
|
1
|
+
require 'securerandom'
|
2
|
+
|
1
3
|
JS_CONFIG = {
|
2
|
-
command: ->(_code_content, temp_file_path) {
|
4
|
+
command: ->(_code_content, temp_file_path, input_file_path = nil, explain = false) {
|
3
5
|
# Check if bun is available
|
4
6
|
bun_exists = system("command -v bun > /dev/null 2>&1")
|
5
7
|
if bun_exists
|
@@ -14,22 +16,32 @@ JS_CONFIG = {
|
|
14
16
|
}.freeze
|
15
17
|
|
16
18
|
SQLITE_CONFIG = {
|
17
|
-
command: ->(code_content, temp_file_path) { [ "sqlite3 #{temp_file_path}", { stdin_data: code_content } ] },
|
19
|
+
command: ->(code_content, temp_file_path, input_file_path = nil, explain = false) { [ "sqlite3 #{temp_file_path}", { stdin_data: code_content } ] },
|
18
20
|
temp_file_suffix: ".db" # Temp file is the database
|
19
21
|
}.freeze
|
20
22
|
|
21
23
|
SUPPORTED_LANGUAGES = {
|
22
24
|
"psql" => {
|
23
|
-
command: ->(code_content, _temp_file_path) {
|
25
|
+
command: ->(code_content, _temp_file_path, input_file_path = nil, explain = false) {
|
24
26
|
psql_exists = system("command -v psql > /dev/null 2>&1")
|
25
27
|
unless psql_exists
|
26
28
|
abort "Error: psql command not found. Please install PostgreSQL or ensure psql is in your PATH."
|
27
29
|
end
|
28
|
-
|
30
|
+
|
31
|
+
# Modify the SQL query if explain option is enabled
|
32
|
+
if explain
|
33
|
+
# Wrap the query with EXPLAIN (ANALYZE, BUFFERS, FORMAT JSON)
|
34
|
+
# Remove any trailing semicolons and whitespace, then add our EXPLAIN wrapper
|
35
|
+
clean_query = code_content.strip.gsub(/;\s*$/, '')
|
36
|
+
explained_query = "EXPLAIN (ANALYZE, BUFFERS, FORMAT JSON) #{clean_query};"
|
37
|
+
[ "psql -A -t -X", { stdin_data: explained_query } ]
|
38
|
+
else
|
39
|
+
[ "psql -A -t -X", { stdin_data: code_content } ]
|
40
|
+
end
|
29
41
|
}
|
30
42
|
},
|
31
43
|
"ruby" => {
|
32
|
-
command: ->(_code_content, temp_file_path) {
|
44
|
+
command: ->(_code_content, temp_file_path, input_file_path = nil, explain = false) {
|
33
45
|
xmpfilter_exists = system("command -v xmpfilter > /dev/null 2>&1")
|
34
46
|
unless xmpfilter_exists
|
35
47
|
abort "Error: xmpfilter command not found. Please install xmpfilter or ensure it is in your PATH."
|
@@ -45,7 +57,7 @@ SUPPORTED_LANGUAGES = {
|
|
45
57
|
"sqlite" => SQLITE_CONFIG,
|
46
58
|
"sqlite3" => SQLITE_CONFIG, # Alias for sqlite
|
47
59
|
"bash" => {
|
48
|
-
command: ->(_code_content, temp_file_path) {
|
60
|
+
command: ->(_code_content, temp_file_path, input_file_path = nil, explain = false) {
|
49
61
|
bash_exists = system("command -v bash > /dev/null 2>&1")
|
50
62
|
unless bash_exists
|
51
63
|
abort "Error: bash command not found. Please ensure bash is in your PATH."
|
@@ -55,7 +67,7 @@ SUPPORTED_LANGUAGES = {
|
|
55
67
|
temp_file_suffix: ".sh"
|
56
68
|
},
|
57
69
|
"zsh" => {
|
58
|
-
command: ->(_code_content, temp_file_path) {
|
70
|
+
command: ->(_code_content, temp_file_path, input_file_path = nil, explain = false) {
|
59
71
|
zsh_exists = system("command -v zsh > /dev/null 2>&1")
|
60
72
|
unless zsh_exists
|
61
73
|
abort "Error: zsh command not found. Please ensure zsh is in your PATH."
|
@@ -65,7 +77,7 @@ SUPPORTED_LANGUAGES = {
|
|
65
77
|
temp_file_suffix: ".zsh"
|
66
78
|
},
|
67
79
|
"sh" => {
|
68
|
-
command: ->(_code_content, temp_file_path) {
|
80
|
+
command: ->(_code_content, temp_file_path, input_file_path = nil, explain = false) {
|
69
81
|
sh_exists = system("command -v sh > /dev/null 2>&1")
|
70
82
|
unless sh_exists
|
71
83
|
abort "Error: sh command not found. Please ensure sh is in your PATH."
|
@@ -73,6 +85,39 @@ SUPPORTED_LANGUAGES = {
|
|
73
85
|
[ "sh #{temp_file_path}", {} ]
|
74
86
|
},
|
75
87
|
temp_file_suffix: ".sh"
|
88
|
+
},
|
89
|
+
"mermaid" => {
|
90
|
+
command: ->(code_content, temp_file_path, input_file_path = nil, explain = false) {
|
91
|
+
mmdc_exists = system("command -v mmdc > /dev/null 2>&1")
|
92
|
+
unless mmdc_exists
|
93
|
+
abort "Error: mmdc command not found. Please install @mermaid-js/mermaid-cli: npm install -g @mermaid-js/mermaid-cli"
|
94
|
+
end
|
95
|
+
|
96
|
+
# Generate SVG output file path with directory structure based on markdown file
|
97
|
+
if input_file_path
|
98
|
+
# Extract markdown file basename without extension
|
99
|
+
md_basename = File.basename(input_file_path, ".*")
|
100
|
+
|
101
|
+
# Create directory named after the markdown file
|
102
|
+
output_dir = File.join(File.dirname(input_file_path), md_basename)
|
103
|
+
Dir.mkdir(output_dir) unless Dir.exist?(output_dir)
|
104
|
+
|
105
|
+
# Generate unique filename with markdown basename prefix
|
106
|
+
timestamp = Time.now.strftime("%Y%m%d-%H%M%S")
|
107
|
+
random_suffix = SecureRandom.hex(6)
|
108
|
+
svg_filename = "#{md_basename}-#{timestamp}-#{random_suffix}.svg"
|
109
|
+
output_path = File.join(output_dir, svg_filename)
|
110
|
+
else
|
111
|
+
# Fallback to old behavior if no input file path provided
|
112
|
+
input_dir = File.dirname(temp_file_path)
|
113
|
+
base_name = File.basename(temp_file_path, ".*")
|
114
|
+
output_path = File.join(input_dir, "#{base_name}.svg")
|
115
|
+
end
|
116
|
+
|
117
|
+
[ "mmdc -i #{temp_file_path} -o #{output_path}", { output_path: output_path } ]
|
118
|
+
},
|
119
|
+
temp_file_suffix: ".mmd",
|
120
|
+
result_handling: :mermaid_svg # Special handling for SVG generation
|
76
121
|
}
|
77
122
|
}.freeze
|
78
123
|
|
data/lib/markdown/run/version.rb
CHANGED
@@ -0,0 +1,25 @@
|
|
1
|
+
require "tempfile"
|
2
|
+
require "fileutils"
|
3
|
+
|
4
|
+
class MarkdownFileWriter
|
5
|
+
def self.write_output_to_file(output_lines, input_file_path)
|
6
|
+
temp_dir = File.dirname(File.expand_path(input_file_path))
|
7
|
+
|
8
|
+
# Write the modified content back to the input file using atomic operations
|
9
|
+
Tempfile.create([ "md_exec_out_", File.extname(input_file_path) ], temp_dir) do |temp_output_file|
|
10
|
+
temp_output_file.write(output_lines.join(""))
|
11
|
+
temp_output_file.close
|
12
|
+
|
13
|
+
begin
|
14
|
+
FileUtils.mv(temp_output_file.path, input_file_path)
|
15
|
+
rescue Errno::EACCES, Errno::EXDEV
|
16
|
+
warn "Atomic move failed. Falling back to copy and delete."
|
17
|
+
FileUtils.cp(temp_output_file.path, input_file_path)
|
18
|
+
FileUtils.rm_f(temp_output_file.path)
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
warn "Markdown processing complete. Output written to #{input_file_path}"
|
23
|
+
true # Indicate success
|
24
|
+
end
|
25
|
+
end
|