klue-langcraft 0.1.1 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop.yml +30 -3
- data/CHANGELOG.md +21 -0
- data/bin/dsl_watcher.rb +10 -0
- data/bin/langcraft.rb +177 -0
- data/docs/dsl-class-diagram.md +97 -0
- data/docs/dsl-examples.md +9 -0
- data/docs/dsl-upgrade-plan.md +266 -0
- data/lib/base_process.rb +41 -0
- data/lib/dsl_folder_watcher.rb +50 -0
- data/lib/dsl_interpreter.rb +112 -0
- data/lib/dsl_process_data.rb +31 -0
- data/lib/klue/langcraft/dsl/interpreter.rb +114 -0
- data/lib/klue/langcraft/dsl/klue_runner.rb +68 -0
- data/lib/klue/langcraft/dsl/process_data_pipeline.rb +65 -0
- data/lib/klue/langcraft/dsl/process_matcher.rb +59 -0
- data/lib/klue/langcraft/dsl/processor_config.rb +35 -0
- data/lib/klue/langcraft/dsl/processors/file_collector_processor.rb +30 -0
- data/lib/klue/langcraft/dsl/processors/full_name_processor.rb +34 -0
- data/lib/klue/langcraft/dsl/processors/processor.rb +43 -0
- data/lib/klue/langcraft/dsl/watcher.rb +88 -0
- data/lib/klue/langcraft/dsl/webhook.rb +57 -0
- data/lib/klue/langcraft/version.rb +1 -1
- data/lib/klue/langcraft.rb +29 -2
- data/lib/process_file_collector.rb +92 -0
- data/package-lock.json +2 -2
- data/package.json +1 -1
- metadata +39 -7
- data/docs/dsl-samples/index.md +0 -4
- /data/lib/klue/langcraft/{-brief.md → tokenizer-old-needs-revisit/-brief.md} +0 -0
- /data/lib/klue/langcraft/{parser.rb → tokenizer-old-needs-revisit/parser.rb} +0 -0
- /data/lib/klue/langcraft/{sample_usage.rb → tokenizer-old-needs-revisit/sample_usage.rb} +0 -0
- /data/lib/klue/langcraft/{tokenizer.rb → tokenizer-old-needs-revisit/tokenizer.rb} +0 -0
data/lib/base_process.rb
ADDED
@@ -0,0 +1,41 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# file: lib/base_process.rb
|
4
|
+
|
5
|
+
class BaseProcess
|
6
|
+
attr_reader :key
|
7
|
+
|
8
|
+
def initialize(key)
|
9
|
+
@key = key
|
10
|
+
end
|
11
|
+
|
12
|
+
def deep_match(input, predicate)
|
13
|
+
matches = []
|
14
|
+
|
15
|
+
# If the current input is a Hash, iterate over each key-value pair
|
16
|
+
if input.is_a?(Hash)
|
17
|
+
|
18
|
+
input.each do |key, value|
|
19
|
+
|
20
|
+
# If the value matches the predicate, add it to matches
|
21
|
+
if predicate.call(key, value)
|
22
|
+
matches << value
|
23
|
+
end
|
24
|
+
|
25
|
+
# Continue searching deeper within the value
|
26
|
+
matches.concat(deep_match(value, predicate))
|
27
|
+
end
|
28
|
+
|
29
|
+
# If the input is an Array, iterate over each item
|
30
|
+
elsif input.is_a?(Array)
|
31
|
+
|
32
|
+
input.each do |item|
|
33
|
+
|
34
|
+
# Continue searching within each item of the array
|
35
|
+
matches.concat(deep_match(item, predicate))
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
matches
|
40
|
+
end
|
41
|
+
end
|
@@ -0,0 +1,50 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class DSLFolderWatcher
|
4
|
+
def self.watch(folder_path)
|
5
|
+
puts "Watching: #{folder_path}"
|
6
|
+
listener = Listen.to(folder_path) do |modified, added, _removed|
|
7
|
+
changes = (modified + added).uniq
|
8
|
+
|
9
|
+
# DEBOUNCE CURRENTLY NOT WORKING
|
10
|
+
# debounce_map = {}
|
11
|
+
# debounce_interval = 1 # seconds
|
12
|
+
|
13
|
+
changes.each do |file_path|
|
14
|
+
next unless File.extname(file_path) == '.klue'
|
15
|
+
|
16
|
+
puts file_path
|
17
|
+
|
18
|
+
# debounce_map[file_path] ||= Time.now
|
19
|
+
# next unless Time.now - debounce_map[file_path] >= debounce_interval
|
20
|
+
|
21
|
+
# debounce_map[file_path] = Time.now
|
22
|
+
|
23
|
+
base_name = file_path.gsub(/\.klue$/, '')
|
24
|
+
input_file = "#{base_name}.klue"
|
25
|
+
output_file = "#{base_name}.json"
|
26
|
+
|
27
|
+
interpreter = DSLInterpreter.new
|
28
|
+
if interpreter.process('', input_file, output_file)
|
29
|
+
# Process the JSON data to add 'process-data' details
|
30
|
+
dsl_processor = DSLProcessData.new
|
31
|
+
dsl_processor.process('', output_file, output_file)
|
32
|
+
# SKIP EXTEND FILE FOR NOW AND REWRITE THE OUTPUTFILE
|
33
|
+
# dsl_processor.process('', output_file, extended_output_file)
|
34
|
+
|
35
|
+
# interpreter.send_to_endpoint
|
36
|
+
else
|
37
|
+
puts 'Skipping further processing due to errors in DSL interpretation.'
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
# Remove old entries from debounce_map to prevent memory bloat
|
42
|
+
# debounce_map.each_key do |key|
|
43
|
+
# debounce_map.delete(key) if Time.now - debounce_map[key] > debounce_interval * 2
|
44
|
+
# end
|
45
|
+
end
|
46
|
+
listener.start
|
47
|
+
puts "Wait for changes: #{folder_path}"
|
48
|
+
sleep
|
49
|
+
end
|
50
|
+
end
|
@@ -0,0 +1,112 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# ChatReferences:
|
4
|
+
# - https://chatgpt.com/c/67064770-d524-8002-8344-3091e895d150
|
5
|
+
# - https://chatgpt.com/c/6706289c-9b9c-8002-86e3-f9198c1c608a
|
6
|
+
# - https://chatgpt.com/c/670dcd34-5dbc-8002-ad7a-d4df54a6a2e0
|
7
|
+
#
|
8
|
+
class DSLInterpreter
|
9
|
+
def initialize
|
10
|
+
@data = {}
|
11
|
+
end
|
12
|
+
|
13
|
+
# Capturing top-level DSL methods
|
14
|
+
def method_missing(method_name, *args, &block)
|
15
|
+
key = method_name
|
16
|
+
value = process_args(args, block)
|
17
|
+
|
18
|
+
# Append key-value to the current context of @data
|
19
|
+
if @data[key]
|
20
|
+
@data[key] = [@data[key]] unless @data[key].is_a?(Array)
|
21
|
+
@data[key] << value
|
22
|
+
else
|
23
|
+
@data[key] = value
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
# A method to handle parameters and nested blocks
|
28
|
+
def process_args(args, block)
|
29
|
+
data = {}
|
30
|
+
|
31
|
+
# Handling positional and named parameters separately
|
32
|
+
positional_args = []
|
33
|
+
named_args = {}
|
34
|
+
|
35
|
+
args.each do |arg|
|
36
|
+
if arg.is_a?(Hash)
|
37
|
+
named_args.merge!(arg)
|
38
|
+
else
|
39
|
+
positional_args << arg
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
# Assign positional parameters generically
|
44
|
+
positional_args.each_with_index do |arg, index|
|
45
|
+
data[:"param#{index + 1}"] = arg
|
46
|
+
end
|
47
|
+
|
48
|
+
# Merge named parameters directly
|
49
|
+
data.merge!(named_args)
|
50
|
+
|
51
|
+
# Handling a nested block
|
52
|
+
if block
|
53
|
+
interpreter = DSLInterpreter.new
|
54
|
+
interpreter.instance_eval(&block)
|
55
|
+
data.merge!(interpreter.data)
|
56
|
+
end
|
57
|
+
|
58
|
+
data.empty? ? nil : data
|
59
|
+
end
|
60
|
+
|
61
|
+
# To access data after interpreting
|
62
|
+
attr_reader :data
|
63
|
+
|
64
|
+
# Reading file and evaluating as Ruby
|
65
|
+
def process(base_path, input_file, output_file)
|
66
|
+
file_path = File.join(base_path, input_file)
|
67
|
+
content = File.read(file_path)
|
68
|
+
|
69
|
+
# begin
|
70
|
+
instance_eval(content)
|
71
|
+
# rescue SyntaxError => e
|
72
|
+
# puts "Syntax error in DSL file: #{input_file}"
|
73
|
+
# puts "Error message: #{e.message}"
|
74
|
+
# puts "Error occurred at line: #{e.backtrace.first}"
|
75
|
+
# return false # Indicate that processing failed
|
76
|
+
# rescue StandardError => e
|
77
|
+
# puts "Error processing DSL file: #{input_file}"
|
78
|
+
# puts "Error message: #{e.message}"
|
79
|
+
# puts "Error occurred at: #{e.backtrace.first}"
|
80
|
+
# return false # Indicate that processing failed
|
81
|
+
# end
|
82
|
+
|
83
|
+
output_path = File.join(base_path, output_file)
|
84
|
+
File.write(output_path, JSON.pretty_generate(to_hash))
|
85
|
+
true # Indicate that processing succeeded
|
86
|
+
end
|
87
|
+
|
88
|
+
# Convert to hash or JSON as required
|
89
|
+
def to_hash
|
90
|
+
@data
|
91
|
+
end
|
92
|
+
|
93
|
+
def to_json(*_args)
|
94
|
+
@data.to_json
|
95
|
+
end
|
96
|
+
|
97
|
+
# Method to send data to an endpoint
|
98
|
+
def send_to_endpoint
|
99
|
+
root_key = @data.keys.first
|
100
|
+
action_type = root_key.to_s
|
101
|
+
|
102
|
+
uri = URI.parse("http://localhost:4567/dsl/#{action_type}")
|
103
|
+
http = Net::HTTP.new(uri.host, uri.port)
|
104
|
+
request = Net::HTTP::Post.new(uri.path, { 'Content-Type' => 'application/json' })
|
105
|
+
payload = { action_type: action_type, data: @data }
|
106
|
+
request.body = payload.to_json
|
107
|
+
|
108
|
+
response = http.request(request)
|
109
|
+
puts "Response: #{response.code} - #{response.message}"
|
110
|
+
puts "Endpoint: #{uri}"
|
111
|
+
end
|
112
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class DSLProcessData
|
4
|
+
PROCESSORS = [{ file_collector: ProcessFileCollector }].freeze
|
5
|
+
|
6
|
+
# Method to process the JSON file after initial evaluation
|
7
|
+
def process(base_path, input_file, output_file)
|
8
|
+
json_file_path = File.join(base_path, input_file)
|
9
|
+
data = JSON.parse(File.read(json_file_path))
|
10
|
+
|
11
|
+
# Loop through the processors and execute matching ones
|
12
|
+
PROCESSORS.each do |processor_entry|
|
13
|
+
key, processor_class = processor_entry.first
|
14
|
+
processor = processor_class.new(key)
|
15
|
+
|
16
|
+
next unless processor.match?(data)
|
17
|
+
|
18
|
+
result = processor.execute(data)
|
19
|
+
|
20
|
+
data['process-data'] ||= {}
|
21
|
+
|
22
|
+
result.each do |key, result|
|
23
|
+
data['process-data'][key.to_s] = result unless result.empty?
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
# Write the updated JSON data to an extended file
|
28
|
+
extended_output_file = File.join(base_path, output_file)
|
29
|
+
File.write(extended_output_file, JSON.pretty_generate(data))
|
30
|
+
end
|
31
|
+
end
|
@@ -0,0 +1,114 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# ChatReferences:
|
4
|
+
# - https://chatgpt.com/c/6719840b-c72c-8002-bbc2-bbd95fd98d31
|
5
|
+
|
6
|
+
module Klue
|
7
|
+
module Langcraft
|
8
|
+
module DSL
|
9
|
+
# Interpreter class for processing and interpreting DSL input
|
10
|
+
#
|
11
|
+
# This class is responsible for handling method calls, processing arguments,
|
12
|
+
# and managing the overall interpretation of the DSL input. It also provides
|
13
|
+
# methods for processing input and output, as well as converting the data
|
14
|
+
# to hash and JSON formats.
|
15
|
+
class Interpreter
|
16
|
+
attr_reader :data
|
17
|
+
attr_accessor :processed
|
18
|
+
|
19
|
+
def initialize
|
20
|
+
klue_reset
|
21
|
+
end
|
22
|
+
|
23
|
+
def process(input: nil, input_file: nil, output_file: nil)
|
24
|
+
klue_reset
|
25
|
+
klue_validate_input_arguments(input, input_file)
|
26
|
+
klue_input_content = klue_input_content(input, input_file)
|
27
|
+
|
28
|
+
@processed = true
|
29
|
+
instance_eval(klue_input_content)
|
30
|
+
|
31
|
+
klue_write_output(output_file) if output_file
|
32
|
+
data
|
33
|
+
end
|
34
|
+
|
35
|
+
def method_missing(method_name, *args, &block)
|
36
|
+
raise "You must call 'process' before using other methods" unless @processed
|
37
|
+
|
38
|
+
key = method_name
|
39
|
+
value = klue_process_args(args, block)
|
40
|
+
|
41
|
+
if @data[key]
|
42
|
+
@data[key] = [@data[key]] unless @data[key].is_a?(Array)
|
43
|
+
@data[key] << value
|
44
|
+
else
|
45
|
+
@data[key] = value
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
def respond_to_missing?(method_name, include_private = false)
|
50
|
+
@processed || super
|
51
|
+
end
|
52
|
+
|
53
|
+
def klue_process_args(args, block)
|
54
|
+
positional_args = []
|
55
|
+
named_args = {}
|
56
|
+
|
57
|
+
# Handling positional and named parameters separately
|
58
|
+
args.each do |arg|
|
59
|
+
if arg.is_a?(Hash)
|
60
|
+
named_args.merge!(arg)
|
61
|
+
else
|
62
|
+
positional_args << arg
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
# Assign positional parameters generically
|
67
|
+
data = positional_args.each_with_index.to_h { |arg, index| [:"p#{index + 1}", arg] }
|
68
|
+
|
69
|
+
# Merge named parameters after positional ones
|
70
|
+
data.merge!(named_args)
|
71
|
+
|
72
|
+
# Handling a nested block
|
73
|
+
if block
|
74
|
+
interpreter = Interpreter.new
|
75
|
+
interpreter.instance_variable_set(:@processed, true) # Set @processed to true for nested interpreter
|
76
|
+
interpreter.instance_eval(&block)
|
77
|
+
data.merge!(interpreter.data)
|
78
|
+
end
|
79
|
+
|
80
|
+
data.empty? ? nil : data
|
81
|
+
end
|
82
|
+
|
83
|
+
private
|
84
|
+
|
85
|
+
def klue_reset
|
86
|
+
@data = {}
|
87
|
+
@processed = false
|
88
|
+
end
|
89
|
+
|
90
|
+
def klue_validate_input_arguments(input, input_file)
|
91
|
+
raise ArgumentError, 'Either input or input_file must be provided' unless input || input_file
|
92
|
+
raise ArgumentError, 'Both input and input_file cannot be provided' if input && input_file
|
93
|
+
end
|
94
|
+
|
95
|
+
def klue_input_content(input, input_file)
|
96
|
+
input_file ? File.read(input_file) : input
|
97
|
+
end
|
98
|
+
|
99
|
+
def klue_write_output(output_file)
|
100
|
+
output_path = klue_output_path(output_file)
|
101
|
+
File.write(output_path, JSON.pretty_generate(data))
|
102
|
+
end
|
103
|
+
|
104
|
+
def klue_output_path(output_file)
|
105
|
+
if Pathname.new(output_file).absolute?
|
106
|
+
output_file
|
107
|
+
else
|
108
|
+
File.join(File.dirname(output_file), File.basename(output_file))
|
109
|
+
end
|
110
|
+
end
|
111
|
+
end
|
112
|
+
end
|
113
|
+
end
|
114
|
+
end
|
@@ -0,0 +1,68 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Klue
|
4
|
+
module Langcraft
|
5
|
+
module DSL
|
6
|
+
# KlueRunner handles the processing of DSL input data
|
7
|
+
# It manages the execution of various processors and the output of processed data
|
8
|
+
class KlueRunner
|
9
|
+
attr_reader :interpreter, :pipeline, :webhook
|
10
|
+
|
11
|
+
def initialize
|
12
|
+
@interpreter = Klue::Langcraft::DSL::Interpreter.new
|
13
|
+
@pipeline = Klue::Langcraft::DSL::ProcessDataPipeline.new(Klue::Langcraft::DSL::ProcessMatcher.new)
|
14
|
+
@webhook = Klue::Langcraft::DSL::Webhook.new
|
15
|
+
end
|
16
|
+
|
17
|
+
# Run the KlueRunner with the given input data
|
18
|
+
# @param input [String] The input data to process
|
19
|
+
# @param input_file [String] The input file to process (input data and file are mutually exclusive)
|
20
|
+
# @param basic_output_file [String] The output file to write the processed data, this file is before any processing
|
21
|
+
# @param enhanced_output_file [String] The output file to write the processed data, this file is after processing
|
22
|
+
def run(
|
23
|
+
input: nil,
|
24
|
+
input_file: nil,
|
25
|
+
basic_output_file: nil,
|
26
|
+
enhanced_output_file: nil,
|
27
|
+
webhook_url: nil,
|
28
|
+
log_level: :none
|
29
|
+
)
|
30
|
+
@log_level = log_level
|
31
|
+
|
32
|
+
log_info('Processing input')
|
33
|
+
data = interpreter.process(input: input, input_file: input_file, output_file: basic_output_file)
|
34
|
+
log_detailed('Interpreter output:', data)
|
35
|
+
|
36
|
+
log_info('Executing pipeline - enhance')
|
37
|
+
enhanced_data = pipeline.execute(data)
|
38
|
+
log_detailed('Enhanced output:', enhanced_data)
|
39
|
+
|
40
|
+
if enhanced_output_file
|
41
|
+
log_info("Writing enhanced output to file: #{enhanced_output_file}")
|
42
|
+
@pipeline.write_output(enhanced_data, enhanced_output_file)
|
43
|
+
end
|
44
|
+
|
45
|
+
if webhook_url
|
46
|
+
log_info("Delivering data to webhook: #{webhook_url}")
|
47
|
+
@webhook.deliver(webhook_url, enhanced_data)
|
48
|
+
end
|
49
|
+
|
50
|
+
log_info('Processing complete')
|
51
|
+
end
|
52
|
+
|
53
|
+
private
|
54
|
+
|
55
|
+
def log_info(message)
|
56
|
+
puts message if %i[info detailed].include?(@log_level)
|
57
|
+
end
|
58
|
+
|
59
|
+
def log_detailed(message, data)
|
60
|
+
return unless @log_level == :detailed
|
61
|
+
|
62
|
+
puts message
|
63
|
+
pp data
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
@@ -0,0 +1,65 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Klue
|
4
|
+
module Langcraft
|
5
|
+
module DSL
|
6
|
+
# ProcessDataPipeline class for executing data processing pipelines
|
7
|
+
#
|
8
|
+
# This class is responsible for executing a series of data processing steps
|
9
|
+
# based on the configured processors. It manages the flow of data through
|
10
|
+
# the pipeline and handles the storage of results.
|
11
|
+
class ProcessDataPipeline
|
12
|
+
def initialize(matcher)
|
13
|
+
@matcher = matcher # Use the matcher to find processors
|
14
|
+
end
|
15
|
+
|
16
|
+
# Execute the pipeline of processors on the input data
|
17
|
+
def execute(data)
|
18
|
+
# NOTE: This is the complete data object, each processor will get a cloned version the specific data it is matched to
|
19
|
+
matched_processors = @matcher.match_processors(data)
|
20
|
+
|
21
|
+
matched_processors.each do |processor|
|
22
|
+
processed_data = processor.build_result
|
23
|
+
|
24
|
+
# Store the processed data into the result structure
|
25
|
+
store_result(data, processor, processed_data)
|
26
|
+
end
|
27
|
+
|
28
|
+
data
|
29
|
+
end
|
30
|
+
|
31
|
+
# Optionally write the output to a file
|
32
|
+
def write_output(data, output_file)
|
33
|
+
File.write(output_file, JSON.pretty_generate(data))
|
34
|
+
end
|
35
|
+
|
36
|
+
private
|
37
|
+
|
38
|
+
# Store the processed result back into the data structure
|
39
|
+
def store_result(data, _processor, processed_data)
|
40
|
+
return unless processed_data
|
41
|
+
|
42
|
+
data['process-data'] ||= {}
|
43
|
+
|
44
|
+
if processed_data[:name].nil? || processed_data[:name].empty?
|
45
|
+
index = calculate_index(data, processed_data[:type])
|
46
|
+
processed_data[:name] = "#{processed_data[:type]}-#{index}"
|
47
|
+
end
|
48
|
+
|
49
|
+
data['process-data'][processed_data[:name]] = processed_data
|
50
|
+
end
|
51
|
+
|
52
|
+
def calculate_index(data, processor_type)
|
53
|
+
# Find all keys in 'process-data' that match the processor type (e.g., file_collector)
|
54
|
+
last_index = data['process-data'].keys
|
55
|
+
.select { |k| k.start_with?(processor_type.to_s) } # Keys that start with processor type
|
56
|
+
.map { |k| k.split('-').last.to_i } # Extract numeric suffix
|
57
|
+
.max
|
58
|
+
|
59
|
+
# If no entries exist, start at 1; otherwise, increment the last index
|
60
|
+
last_index ? last_index + 1 : 1
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
@@ -0,0 +1,59 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Klue
|
4
|
+
module Langcraft
|
5
|
+
module DSL
|
6
|
+
# ProcessMatcher class for matching processors to input nodes
|
7
|
+
#
|
8
|
+
# This class is responsible for traversing input nodes and finding
|
9
|
+
# the appropriate processor for each node based on the configured
|
10
|
+
# processor rules.
|
11
|
+
class ProcessMatcher
|
12
|
+
def initialize(processor_config = ProcessorConfigDefault)
|
13
|
+
@processor_config = processor_config
|
14
|
+
end
|
15
|
+
|
16
|
+
def match_processors(nodes)
|
17
|
+
matched_processors = []
|
18
|
+
|
19
|
+
traverse_nodes(nodes) do |key, value|
|
20
|
+
processor_class = find_processor_for(key, value)
|
21
|
+
if processor_class
|
22
|
+
if value.is_a?(Array)
|
23
|
+
# If the value is an array, instantiate a processor for each element
|
24
|
+
value.each do |element|
|
25
|
+
matched_processors << processor_class.new(element, key)
|
26
|
+
end
|
27
|
+
else
|
28
|
+
matched_processors << processor_class.new(value, key)
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
matched_processors
|
34
|
+
end
|
35
|
+
|
36
|
+
private
|
37
|
+
|
38
|
+
def traverse_nodes(node, &block)
|
39
|
+
if node.is_a?(Hash)
|
40
|
+
node.each do |key, value|
|
41
|
+
yield(key, value)
|
42
|
+
traverse_nodes(value, &block)
|
43
|
+
end
|
44
|
+
elsif node.is_a?(Array)
|
45
|
+
node.each_with_index do |item, index|
|
46
|
+
# Provide the index to uniquely identify each element
|
47
|
+
traverse_nodes(item) { |key, value| yield("#{key}[#{index}]", value) }
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
# Find the correct processor based on the key using the registered processor config
|
53
|
+
def find_processor_for(key, _value)
|
54
|
+
@processor_config.processor_for(key) # Return the processor class, not an instance
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
@@ -0,0 +1,35 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Klue
|
4
|
+
module Langcraft
|
5
|
+
module DSL
|
6
|
+
# ProcessorConfig class for managing processor configurations
|
7
|
+
#
|
8
|
+
# This class is responsible for registering processors and providing
|
9
|
+
# methods to retrieve processors based on keys or to get all registered
|
10
|
+
# processors.
|
11
|
+
class ProcessorConfig
|
12
|
+
def initialize
|
13
|
+
@processors = {}
|
14
|
+
end
|
15
|
+
|
16
|
+
# Register a processor with its associated keys
|
17
|
+
def register_processor(processor_class)
|
18
|
+
keys = processor_class.keys
|
19
|
+
keys = [keys] unless keys.is_a?(Array)
|
20
|
+
keys.each { |key| @processors[key.to_sym] = processor_class }
|
21
|
+
end
|
22
|
+
|
23
|
+
# Find the processor class by key
|
24
|
+
def processor_for(key)
|
25
|
+
@processors[key.to_sym]
|
26
|
+
end
|
27
|
+
|
28
|
+
# List all registered processors
|
29
|
+
def all_processors
|
30
|
+
@processors
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Klue
|
4
|
+
module Langcraft
|
5
|
+
module DSL
|
6
|
+
module Processors
|
7
|
+
# FileCollectorProcessor class for processing file-related data
|
8
|
+
#
|
9
|
+
# This processor is responsible for handling file collection operations
|
10
|
+
# within the DSL. It inherits from the base Processor class and implements
|
11
|
+
# specific logic for file-related processing.
|
12
|
+
class FileCollectorProcessor < Processor
|
13
|
+
def self.keys
|
14
|
+
[:file_collector]
|
15
|
+
end
|
16
|
+
|
17
|
+
# Example of how a subclass can implement its specific data logic
|
18
|
+
def build_result_data
|
19
|
+
{
|
20
|
+
files: ['file1.txt', 'file2.txt']
|
21
|
+
}
|
22
|
+
end
|
23
|
+
|
24
|
+
# Auto-register the processor as soon as the class is loaded
|
25
|
+
ProcessorConfigDefault.register_processor(self)
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,34 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Klue
|
4
|
+
module Langcraft
|
5
|
+
module DSL
|
6
|
+
module Processors
|
7
|
+
# FullNameProcessor class for processing full name data
|
8
|
+
#
|
9
|
+
# This processor is responsible for handling full name processing operations
|
10
|
+
# within the DSL. It inherits from the base Processor class and implements
|
11
|
+
# specific logic for full name-related processing.
|
12
|
+
class FullNameProcessor < Processor
|
13
|
+
def self.keys
|
14
|
+
[:full_name] # FullNameProcessor's specific key(s)
|
15
|
+
end
|
16
|
+
|
17
|
+
# Implementation of logic for building full name data
|
18
|
+
def build_result_data
|
19
|
+
first_name = data['first_name'] || 'John'
|
20
|
+
last_name = data['last_name'] || 'Doe'
|
21
|
+
full_name = "#{first_name} #{last_name}"
|
22
|
+
|
23
|
+
{
|
24
|
+
full_name: full_name
|
25
|
+
}
|
26
|
+
end
|
27
|
+
|
28
|
+
# Auto-register the processor as soon as the class is loaded
|
29
|
+
ProcessorConfigDefault.register_processor(self)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
@@ -0,0 +1,43 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Klue
|
4
|
+
module Langcraft
|
5
|
+
module DSL
|
6
|
+
module Processors
|
7
|
+
# Base Processor class for defining common processor behavior
|
8
|
+
#
|
9
|
+
# This abstract class serves as the foundation for all specific processors.
|
10
|
+
# It defines the basic structure and common methods that all processors
|
11
|
+
# should implement or inherit.
|
12
|
+
class Processor
|
13
|
+
attr_reader :data, :key
|
14
|
+
|
15
|
+
# Every processor subclass must accept data and key
|
16
|
+
def initialize(data, key)
|
17
|
+
@data = Marshal.load(Marshal.dump(data)) # Deep clone of the data
|
18
|
+
@key = key
|
19
|
+
end
|
20
|
+
|
21
|
+
# Build an envelope result with type, name, and data
|
22
|
+
def build_result
|
23
|
+
{
|
24
|
+
name: data.is_a?(Hash) ? data['as'] : nil,
|
25
|
+
type: key.to_s,
|
26
|
+
data: build_result_data
|
27
|
+
}
|
28
|
+
end
|
29
|
+
|
30
|
+
# Subclasses should override this method to build the actual data.
|
31
|
+
def build_result_data
|
32
|
+
raise NotImplementedError, 'Subclasses must implement `build_data` to generate their specific data'
|
33
|
+
end
|
34
|
+
|
35
|
+
# This will be overridden by subclasses to define keys (or aliases)
|
36
|
+
def self.keys
|
37
|
+
raise NotImplementedError, 'Subclasses must define the `keys` method'
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|