klue-langcraft 0.1.0 → 0.2.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.rubocop.yml +12 -0
- data/CHANGELOG.md +14 -0
- data/bin/dsl_watcher.rb +10 -0
- data/docs/dsl-class-diagram.md +97 -0
- data/docs/dsl-examples.md +9 -0
- data/docs/dsl-samples/youtube-launch-optimizer-strawberry.klue +1 -1
- data/docs/dsl-upgrade-plan.md +266 -0
- data/lib/base_process.rb +41 -0
- data/lib/dsl_folder_watcher.rb +50 -0
- data/lib/dsl_interpreter.rb +112 -0
- data/lib/dsl_process_data.rb +31 -0
- data/lib/klue/langcraft/dsl/interpreter.rb +114 -0
- data/lib/klue/langcraft/dsl/process_data_pipeline.rb +65 -0
- data/lib/klue/langcraft/dsl/process_matcher.rb +59 -0
- data/lib/klue/langcraft/dsl/processor_config.rb +35 -0
- data/lib/klue/langcraft/dsl/processors/file_collector_processor.rb +30 -0
- data/lib/klue/langcraft/dsl/processors/full_name_processor.rb +34 -0
- data/lib/klue/langcraft/dsl/processors/processor.rb +43 -0
- data/lib/klue/langcraft/{-brief.md → tokenizer-old-needs-revisit/-brief.md} +2 -0
- data/lib/klue/langcraft/version.rb +1 -1
- data/lib/klue/langcraft.rb +26 -2
- data/lib/process_file_collector.rb +92 -0
- data/package-lock.json +2 -2
- data/package.json +1 -1
- metadata +35 -7
- data/docs/dsl-samples/index.md +0 -4
- /data/lib/klue/langcraft/{parser.rb → tokenizer-old-needs-revisit/parser.rb} +0 -0
- /data/lib/klue/langcraft/{sample_usage.rb → tokenizer-old-needs-revisit/sample_usage.rb} +0 -0
- /data/lib/klue/langcraft/{tokenizer.rb → tokenizer-old-needs-revisit/tokenizer.rb} +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: '07528e25c004b36664752d235737a9f6f521e231eb2b9e900884861e2fbeccad'
|
4
|
+
data.tar.gz: bd96ac7b1247f8f4fe123b4bb7923df50f1067120b1faa6e1121c6936fe400d1
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: f270af8583b0ef01f5c290f0d69abe2de5534682169440381532fea9510c0b2b03f7c247c13dcde3fd13dc88b89fdadc3255af804138b6373513ea16a32b31d6
|
7
|
+
data.tar.gz: e23279553bc9f644ebe56ce796dd99cda278c10170142806e2a9ed59839a3a51a541de9b0b75082474e46f130cd6e3e5d037d2c5e9c475585280ba032d84a6d8
|
data/.rubocop.yml
CHANGED
@@ -14,6 +14,7 @@ AllCops:
|
|
14
14
|
Exclude:
|
15
15
|
- ".builders/**/*"
|
16
16
|
- "spec/samples/**/*"
|
17
|
+
- "lib/*" # OLD DSL INTERPRETER
|
17
18
|
|
18
19
|
Metrics/BlockLength:
|
19
20
|
Exclude:
|
@@ -104,3 +105,14 @@ RSpec/SpecFilePathSuffix:
|
|
104
105
|
RSpec/NamedSubject:
|
105
106
|
Exclude:
|
106
107
|
- "**/spec/**/*"
|
108
|
+
|
109
|
+
RSpec/MultipleExpectations:
|
110
|
+
Exclude:
|
111
|
+
- "**/spec/**/*"
|
112
|
+
|
113
|
+
RSpec/MultipleMemoizedHelpers:
|
114
|
+
Exclude:
|
115
|
+
- "**/spec/**/*"
|
116
|
+
|
117
|
+
RSpec/ExampleLength:
|
118
|
+
Max: 20
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,17 @@
|
|
1
|
+
## [0.1.1](https://github.com/appydave/klue-langcraft/compare/v0.1.0...v0.1.1) (2024-09-22)
|
2
|
+
|
3
|
+
|
4
|
+
### Bug Fixes
|
5
|
+
|
6
|
+
* add chatgpt conversation link ([ae5448d](https://github.com/appydave/klue-langcraft/commit/ae5448d4e23ab1fd673c788e6f30e736d82afb44))
|
7
|
+
|
8
|
+
# [0.1.0](https://github.com/appydave/klue-langcraft/compare/v0.0.7...v0.1.0) (2024-09-22)
|
9
|
+
|
10
|
+
|
11
|
+
### Features
|
12
|
+
|
13
|
+
* add requirements documentation for sample parser ([c0dbe76](https://github.com/appydave/klue-langcraft/commit/c0dbe76f74d6f3d185a7eb37f507bd1e34578773))
|
14
|
+
|
1
15
|
## [0.0.7](https://github.com/appydave/klue-langcraft/compare/v0.0.6...v0.0.7) (2024-09-21)
|
2
16
|
|
3
17
|
|
data/bin/dsl_watcher.rb
ADDED
@@ -0,0 +1,97 @@
|
|
1
|
+
|
2
|
+
# DSL Class Diagram
|
3
|
+
|
4
|
+
```plaintext
|
5
|
+
+----------------------------+
|
6
|
+
| Klue::Langcraft::DSL |
|
7
|
+
|----------------------------|
|
8
|
+
| + Interpreter |
|
9
|
+
| + ProcessMatcher |
|
10
|
+
| + ProcessorConfig |
|
11
|
+
| + ProcessDataPipeline |
|
12
|
+
| + Processors |
|
13
|
+
+----------------------------+
|
14
|
+
|
15
|
+
|
|
16
|
+
v
|
17
|
+
+-----------------------------------+
|
18
|
+
| Interpreter |
|
19
|
+
|-----------------------------------|
|
20
|
+
| - data: Hash |
|
21
|
+
| - processed: Boolean |
|
22
|
+
|-----------------------------------|
|
23
|
+
| + initialize() |
|
24
|
+
| + process(input, output) |
|
25
|
+
| + method_missing() |
|
26
|
+
| + process_args(args, block) |
|
27
|
+
| + respond_to_missing?() |
|
28
|
+
| + to_json() |
|
29
|
+
+-----------------------------------+
|
30
|
+
|
31
|
+
|
|
32
|
+
v
|
33
|
+
+-----------------------------------+
|
34
|
+
| ProcessMatcher |
|
35
|
+
|-----------------------------------|
|
36
|
+
| + match_processors(nodes) |
|
37
|
+
|-----------------------------------|
|
38
|
+
| - traverse_nodes(node, &block) |
|
39
|
+
| - find_processor_for(key, value) |
|
40
|
+
|-----------------------------------|
|
41
|
+
| - processor_config: ProcessorConfig|
|
42
|
+
+-----------------------------------+
|
43
|
+
|
44
|
+
|
|
45
|
+
v
|
46
|
+
+-----------------------------------+
|
47
|
+
| ProcessDataPipeline |
|
48
|
+
|-----------------------------------|
|
49
|
+
| + execute(data) |
|
50
|
+
| + write_output(data, output_file) |
|
51
|
+
|-----------------------------------|
|
52
|
+
| - store_result(data, processor, |
|
53
|
+
| processed_data) |
|
54
|
+
| - calculate_index(data, processor)|
|
55
|
+
|-----------------------------------|
|
56
|
+
| - matcher: ProcessMatcher |
|
57
|
+
+-----------------------------------+
|
58
|
+
|
59
|
+
|
|
60
|
+
v
|
61
|
+
+-----------------------------------+
|
62
|
+
| ProcessorConfig |
|
63
|
+
|-----------------------------------|
|
64
|
+
| + register_processor(processor) |
|
65
|
+
| + processor_for(key) |
|
66
|
+
| + all_processors() |
|
67
|
+
|-----------------------------------|
|
68
|
+
| - processors: Hash |
|
69
|
+
+-----------------------------------+
|
70
|
+
|
71
|
+
|
|
72
|
+
v
|
73
|
+
+----------------------------+
|
74
|
+
| Processor |
|
75
|
+
|----------------------------|
|
76
|
+
| - data: Hash |
|
77
|
+
| - key: Symbol |
|
78
|
+
|----------------------------|
|
79
|
+
| + initialize(data, key) |
|
80
|
+
| + build_result() |
|
81
|
+
| + build_result_data() |
|
82
|
+
|----------------------------|
|
83
|
+
| + keys() (abstract method) |
|
84
|
+
+----------------------------+
|
85
|
+
|
86
|
+
|
|
87
|
+
v
|
88
|
+
+----------------------------+ +-------------------------+
|
89
|
+
| FileCollectorProcessor | | FullNameProcessor |
|
90
|
+
|----------------------------| +-------------------------+
|
91
|
+
| (inherits Processor) | | (inherits Processor) |
|
92
|
+
|----------------------------| +-------------------------+
|
93
|
+
| + build_result_data() | | + build_result_data() |
|
94
|
+
| + Auto-register with | | + Auto-register with |
|
95
|
+
| ProcessorConfig | | ProcessorConfig |
|
96
|
+
+----------------------------+ +-------------------------+
|
97
|
+
```
|
data/docs/dsl-examples.md
CHANGED
@@ -103,3 +103,12 @@ definition :workflow do
|
|
103
103
|
end
|
104
104
|
end
|
105
105
|
```
|
106
|
+
|
107
|
+
|
108
|
+
You're going to help me process a Ruby inspired DSL into JSON.
|
109
|
+
|
110
|
+
There will be two parts of this conversation, part one will be about exploring the concept with a simple reflection based system that will work with any Ruby compatible structure.
|
111
|
+
|
112
|
+
Part two, which I kick off after I finish part one will be to use an AST style of approach so that I can process the same concept, JavaScript or RUST.
|
113
|
+
|
114
|
+
To get started, I'm going to give you some sample Ruby DSL and you're going to convert them to an equivalent ruby hash
|
@@ -12,7 +12,7 @@ workflow :youtube_launch_optimizer do
|
|
12
12
|
prompt :thumbnail_idea, content: 'Suggest creative thumbnail ideas for the video.'
|
13
13
|
end
|
14
14
|
|
15
|
-
section :title_optimization do
|
15
|
+
section :title_optimization, count: 3, active: true do
|
16
16
|
step :generate_title do
|
17
17
|
input :video_description
|
18
18
|
|
@@ -0,0 +1,266 @@
|
|
1
|
+
# DSL System Expansion Plan
|
2
|
+
|
3
|
+
## Overview
|
4
|
+
|
5
|
+
The current system processes DSL input using an interpreter, matchers, processors, and pipelines. We need to expand the system by introducing several new features:
|
6
|
+
|
7
|
+
1. **Processor Service**: A centralized service that orchestrates all operations, simplifying how the watcher and command-line tools work.
|
8
|
+
2. **Data Saving Option**: An option to toggle whether the processed JSON data is saved to a file or not.
|
9
|
+
3. **Endpoint for Data Delivery**: Add the capability to send the processed data to external HTTP endpoints, configurable via the command line.
|
10
|
+
|
11
|
+
The changes will affect the class structure, as well as introduce new command-line options and configurations. Below is a detailed explanation of the required features and how they will be implemented.
|
12
|
+
|
13
|
+
---
|
14
|
+
|
15
|
+
## New Components and Changes
|
16
|
+
|
17
|
+
### 1. **Processor Service**
|
18
|
+
|
19
|
+
- **Purpose**: We need to introduce a new service (tentatively called `ProcessorService`) to orchestrate the different components. This class will:
|
20
|
+
- Call the interpreter to process the DSL input.
|
21
|
+
- Run the data through the processing pipeline to apply any necessary transformations.
|
22
|
+
- Handle the saving of the processed data to a file if configured.
|
23
|
+
- Send the resulting JSON data to external endpoints if URLs are provided via the command line.
|
24
|
+
|
25
|
+
- **Why**: Currently, the watcher or command-line tool is responsible for calling various components (interpreter, pipeline, etc.). By introducing this service, we centralize the logic and simplify the external interfaces. The watcher will only need to call this service, and any configuration options (like file saving or sending data to an endpoint) will be handled internally by the service.
|
26
|
+
|
27
|
+
- **Key Features**:
|
28
|
+
- **Options Handling**: The service will take in various options, such as:
|
29
|
+
- Whether to save the processed JSON to a file.
|
30
|
+
- URLs for sending data (e.g., one for simplified JSON, one for enhanced JSON).
|
31
|
+
- **Sequential Processing**: The service will sequentially call:
|
32
|
+
1. The interpreter to process the DSL.
|
33
|
+
2. The processing pipeline to enhance the data.
|
34
|
+
3. Save the data to a file (if the option is enabled).
|
35
|
+
4. Send the data to the endpoint (if URLs are provided).
|
36
|
+
|
37
|
+
### 2. **Data Saving Option**
|
38
|
+
|
39
|
+
- **Purpose**: Introduce a command-line option (`--save-to-file`) that will enable or disable saving the processed data to a file. By default, the data should flow through the system, but whether it gets written to the filesystem should be configurable.
|
40
|
+
|
41
|
+
- **Why**: Not all users may want to persist the JSON data to disk. The ability to toggle this behavior will make the system more flexible.
|
42
|
+
|
43
|
+
- **Details**:
|
44
|
+
- When the `--save-to-file` option is enabled, the processed data should be saved to a file.
|
45
|
+
- If the option is not provided, the data will not be saved to a file, but it will still flow through the system for other uses (e.g., sending to endpoints).
|
46
|
+
|
47
|
+
### 3. **Data Delivery to Endpoints**
|
48
|
+
|
49
|
+
- **Purpose**: Add the ability to send the JSON data (either simplified or enhanced) to external HTTP endpoints. These endpoints should be configurable via command-line options (`--endpoint-simplified` and `--endpoint-enhanced`).
|
50
|
+
|
51
|
+
- **Why**: The system should be able to integrate with external services by sending the resulting data to a web application. For example, the web app could receive simplified JSON for debugging and enhanced JSON for final usage. This allows seamless data transfer between the DSL processing system and external applications.
|
52
|
+
|
53
|
+
- **Details**:
|
54
|
+
- The `ProcessorService` will check if any URLs are provided via the command line.
|
55
|
+
- If the URLs are provided, it will send the resulting JSON data (using HTTP POST) to those endpoints.
|
56
|
+
- **Two endpoints**:
|
57
|
+
- One for simplified JSON.
|
58
|
+
- One for enhanced JSON.
|
59
|
+
- The system should allow sending data to either or both endpoints, depending on the provided options.
|
60
|
+
|
61
|
+
### 4. **Command-Line Interface (Expanded)**
|
62
|
+
|
63
|
+
- The existing `CommandLineInterface` class should be expanded to handle the new options:
|
64
|
+
- `--save-to-file`: Toggles whether the JSON is saved to a file.
|
65
|
+
- `--endpoint-simplified`: Specifies the URL for sending simplified JSON.
|
66
|
+
- `--endpoint-enhanced`: Specifies the URL for sending enhanced JSON.
|
67
|
+
|
68
|
+
- These options should be passed to the `ProcessorService`, which will handle the actual behavior (e.g., saving the file, sending data to the endpoint).
|
69
|
+
|
70
|
+
### 5. **Watcher Integration**
|
71
|
+
|
72
|
+
- The existing `Watcher` class, which uses `ListenGem` to monitor file changes, will now delegate all processing to the `ProcessorService`. When a file change is detected, the watcher should simply call the service with the necessary options.
|
73
|
+
|
74
|
+
- This means that the watcher doesn't need to know about interpreters, pipelines, or processors—it just needs to hand over the file to the `ProcessorService`, which will handle everything sequentially.
|
75
|
+
|
76
|
+
---
|
77
|
+
|
78
|
+
## Revised Class Structure
|
79
|
+
|
80
|
+
### New Classes:
|
81
|
+
|
82
|
+
1. **ProcessorService**:
|
83
|
+
- Central orchestrator for calling the interpreter, processing data, saving the file, and sending data to endpoints.
|
84
|
+
- Will take all options from the command line and watcher.
|
85
|
+
|
86
|
+
2. **DataDeliveryService**:
|
87
|
+
- Handles sending JSON data to external HTTP endpoints.
|
88
|
+
- Accepts a URL and sends the provided data using HTTP POST.
|
89
|
+
|
90
|
+
### Expanded Classes:
|
91
|
+
|
92
|
+
1. **CommandLineInterface**:
|
93
|
+
- Now handles additional command-line options (`--save-to-file`, `--endpoint-simplified`, `--endpoint-enhanced`).
|
94
|
+
- Passes these options to the `ProcessorService`.
|
95
|
+
|
96
|
+
2. **Watcher**:
|
97
|
+
- Instead of directly calling interpreters and pipelines, the watcher will now pass the detected file changes to the `ProcessorService` for handling.
|
98
|
+
|
99
|
+
---
|
100
|
+
|
101
|
+
## Command-Line Options
|
102
|
+
|
103
|
+
- **New Options**:
|
104
|
+
- `--save-to-file`: If provided, the resulting JSON will be saved to a file. Otherwise, the data will only flow through the system.
|
105
|
+
- `--endpoint-simplified`: A URL for sending the simplified JSON to an external endpoint.
|
106
|
+
- `--endpoint-enhanced`: A URL for sending the enhanced JSON to an external endpoint.
|
107
|
+
|
108
|
+
These options should be passed to the `ProcessorService`, which will then handle the appropriate behavior based on the configuration.
|
109
|
+
|
110
|
+
---
|
111
|
+
|
112
|
+
## Final Workflow
|
113
|
+
|
114
|
+
1. **Command-Line Usage**:
|
115
|
+
- Users can pass options such as directories to watch, whether to save files, and URLs for endpoints.
|
116
|
+
|
117
|
+
2. **Watcher**:
|
118
|
+
- Monitors the specified directories for file changes and passes any detected changes to the `ProcessorService`.
|
119
|
+
|
120
|
+
3. **ProcessorService**:
|
121
|
+
- Orchestrates the sequence:
|
122
|
+
1. Calls the `Interpreter` to process the DSL input.
|
123
|
+
2. Runs the `ProcessDataPipeline` to enhance the data.
|
124
|
+
3. Saves the resulting data to a file if the `--save-to-file` option is enabled.
|
125
|
+
4. Sends the resulting data to the provided URL(s) if the `--endpoint-simplified` or `--endpoint-enhanced` options are specified.
|
126
|
+
|
127
|
+
4. **Data Delivery**:
|
128
|
+
- The `DataDeliveryService` is responsible for sending the processed data to the external endpoints, handling any HTTP interactions required.
|
129
|
+
|
130
|
+
|
131
|
+
# SAMPLE CODE, NOT TESTED BUT MIGHT BE SUITABLE
|
132
|
+
```ruby
|
133
|
+
|
134
|
+
# file: lib/klue/langcraft/dsl/data_delivery_service.rb
|
135
|
+
require 'net/http'
|
136
|
+
require 'uri'
|
137
|
+
require 'json'
|
138
|
+
|
139
|
+
module Klue
|
140
|
+
module Langcraft
|
141
|
+
module DSL
|
142
|
+
class DataDeliveryService
|
143
|
+
def initialize(url)
|
144
|
+
@url = URI.parse(url)
|
145
|
+
end
|
146
|
+
|
147
|
+
def send(data)
|
148
|
+
http = Net::HTTP.new(@url.host, @url.port)
|
149
|
+
request = Net::HTTP::Post.new(@url.request_uri, { 'Content-Type' => 'application/json' })
|
150
|
+
request.body = data.to_json
|
151
|
+
response = http.request(request)
|
152
|
+
|
153
|
+
puts "Data sent to #{@url}: #{response.code} #{response.message}"
|
154
|
+
end
|
155
|
+
end
|
156
|
+
end
|
157
|
+
end
|
158
|
+
end
|
159
|
+
```
|
160
|
+
|
161
|
+
```ruby
|
162
|
+
# file: lib/klue/langcraft/dsl/processor_service.rb
|
163
|
+
require_relative 'interpreter'
|
164
|
+
require_relative 'process_data_pipeline'
|
165
|
+
require_relative 'data_delivery_service'
|
166
|
+
|
167
|
+
module Klue
|
168
|
+
module Langcraft
|
169
|
+
module DSL
|
170
|
+
class ProcessorService
|
171
|
+
def initialize(options)
|
172
|
+
@options = options
|
173
|
+
@interpreter = Interpreter.new
|
174
|
+
@pipeline = ProcessDataPipeline.new(ProcessMatcher.new)
|
175
|
+
end
|
176
|
+
|
177
|
+
def run
|
178
|
+
data = call_interpreter
|
179
|
+
enhanced_data = run_pipeline(data)
|
180
|
+
save_to_file(enhanced_data) if @options[:save_to_file]
|
181
|
+
send_to_endpoint(enhanced_data)
|
182
|
+
end
|
183
|
+
|
184
|
+
private
|
185
|
+
|
186
|
+
def call_interpreter
|
187
|
+
@interpreter.process(input: 'path_to_input_file.dsl')
|
188
|
+
end
|
189
|
+
|
190
|
+
def run_pipeline(data)
|
191
|
+
@pipeline.execute(data)
|
192
|
+
end
|
193
|
+
|
194
|
+
def save_to_file(data)
|
195
|
+
File.write('output.json', JSON.pretty_generate(data))
|
196
|
+
end
|
197
|
+
|
198
|
+
def send_to_endpoint(data)
|
199
|
+
if @options[:endpoint_simplified]
|
200
|
+
DataDeliveryService.new(@options[:endpoint_simplified]).send(data)
|
201
|
+
end
|
202
|
+
|
203
|
+
if @options[:endpoint_enhanced]
|
204
|
+
DataDeliveryService.new(@options[:endpoint_enhanced]).send(data)
|
205
|
+
end
|
206
|
+
end
|
207
|
+
end
|
208
|
+
end
|
209
|
+
end
|
210
|
+
end
|
211
|
+
|
212
|
+
```
|
213
|
+
|
214
|
+
```ruby
|
215
|
+
# file: lib/klue/langcraft/dsl/command_line_interface.rb
|
216
|
+
require_relative 'watcher'
|
217
|
+
require_relative 'processor_service'
|
218
|
+
|
219
|
+
module Klue
|
220
|
+
module Langcraft
|
221
|
+
module DSL
|
222
|
+
class CommandLineInterface
|
223
|
+
def initialize
|
224
|
+
@watch_dirs = []
|
225
|
+
@options = {}
|
226
|
+
end
|
227
|
+
|
228
|
+
def start
|
229
|
+
parse_arguments
|
230
|
+
start_watcher_or_processor_service
|
231
|
+
end
|
232
|
+
|
233
|
+
private
|
234
|
+
|
235
|
+
def parse_arguments
|
236
|
+
ARGV.each_with_index do |arg, index|
|
237
|
+
case arg
|
238
|
+
when '--watch'
|
239
|
+
@watch_dirs << ARGV[index + 1]
|
240
|
+
when '--save-to-file'
|
241
|
+
@options[:save_to_file] = true
|
242
|
+
when '--endpoint-simplified'
|
243
|
+
@options[:endpoint_simplified] = ARGV[index + 1]
|
244
|
+
when '--endpoint-enhanced'
|
245
|
+
@options[:endpoint_enhanced] = ARGV[index + 1]
|
246
|
+
end
|
247
|
+
end
|
248
|
+
|
249
|
+
raise 'No directories specified for watching' if @watch_dirs.empty?
|
250
|
+
end
|
251
|
+
|
252
|
+
def start_watcher_or_processor_service
|
253
|
+
if @watch_dirs.any?
|
254
|
+
watcher = Watcher.new(@watch_dirs)
|
255
|
+
watcher.start
|
256
|
+
else
|
257
|
+
processor_service = ProcessorService.new(@options)
|
258
|
+
processor_service.run
|
259
|
+
end
|
260
|
+
end
|
261
|
+
end
|
262
|
+
end
|
263
|
+
end
|
264
|
+
end
|
265
|
+
|
266
|
+
```
|
data/lib/base_process.rb
ADDED
@@ -0,0 +1,41 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# file: lib/base_process.rb
|
4
|
+
|
5
|
+
class BaseProcess
|
6
|
+
attr_reader :key
|
7
|
+
|
8
|
+
def initialize(key)
|
9
|
+
@key = key
|
10
|
+
end
|
11
|
+
|
12
|
+
def deep_match(input, predicate)
|
13
|
+
matches = []
|
14
|
+
|
15
|
+
# If the current input is a Hash, iterate over each key-value pair
|
16
|
+
if input.is_a?(Hash)
|
17
|
+
|
18
|
+
input.each do |key, value|
|
19
|
+
|
20
|
+
# If the value matches the predicate, add it to matches
|
21
|
+
if predicate.call(key, value)
|
22
|
+
matches << value
|
23
|
+
end
|
24
|
+
|
25
|
+
# Continue searching deeper within the value
|
26
|
+
matches.concat(deep_match(value, predicate))
|
27
|
+
end
|
28
|
+
|
29
|
+
# If the input is an Array, iterate over each item
|
30
|
+
elsif input.is_a?(Array)
|
31
|
+
|
32
|
+
input.each do |item|
|
33
|
+
|
34
|
+
# Continue searching within each item of the array
|
35
|
+
matches.concat(deep_match(item, predicate))
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
matches
|
40
|
+
end
|
41
|
+
end
|
@@ -0,0 +1,50 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class DSLFolderWatcher
|
4
|
+
def self.watch(folder_path)
|
5
|
+
puts "Watching: #{folder_path}"
|
6
|
+
listener = Listen.to(folder_path) do |modified, added, _removed|
|
7
|
+
changes = (modified + added).uniq
|
8
|
+
|
9
|
+
# DEBOUNCE CURRENTLY NOT WORKING
|
10
|
+
# debounce_map = {}
|
11
|
+
# debounce_interval = 1 # seconds
|
12
|
+
|
13
|
+
changes.each do |file_path|
|
14
|
+
next unless File.extname(file_path) == '.klue'
|
15
|
+
|
16
|
+
puts file_path
|
17
|
+
|
18
|
+
# debounce_map[file_path] ||= Time.now
|
19
|
+
# next unless Time.now - debounce_map[file_path] >= debounce_interval
|
20
|
+
|
21
|
+
# debounce_map[file_path] = Time.now
|
22
|
+
|
23
|
+
base_name = file_path.gsub(/\.klue$/, '')
|
24
|
+
input_file = "#{base_name}.klue"
|
25
|
+
output_file = "#{base_name}.json"
|
26
|
+
|
27
|
+
interpreter = DSLInterpreter.new
|
28
|
+
if interpreter.process('', input_file, output_file)
|
29
|
+
# Process the JSON data to add 'process-data' details
|
30
|
+
dsl_processor = DSLProcessData.new
|
31
|
+
dsl_processor.process('', output_file, output_file)
|
32
|
+
# SKIP EXTEND FILE FOR NOW AND REWRITE THE OUTPUTFILE
|
33
|
+
# dsl_processor.process('', output_file, extended_output_file)
|
34
|
+
|
35
|
+
# interpreter.send_to_endpoint
|
36
|
+
else
|
37
|
+
puts 'Skipping further processing due to errors in DSL interpretation.'
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
# Remove old entries from debounce_map to prevent memory bloat
|
42
|
+
# debounce_map.each_key do |key|
|
43
|
+
# debounce_map.delete(key) if Time.now - debounce_map[key] > debounce_interval * 2
|
44
|
+
# end
|
45
|
+
end
|
46
|
+
listener.start
|
47
|
+
puts "Wait for changes: #{folder_path}"
|
48
|
+
sleep
|
49
|
+
end
|
50
|
+
end
|
@@ -0,0 +1,112 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# ChatReferences:
|
4
|
+
# - https://chatgpt.com/c/67064770-d524-8002-8344-3091e895d150
|
5
|
+
# - https://chatgpt.com/c/6706289c-9b9c-8002-86e3-f9198c1c608a
|
6
|
+
# - https://chatgpt.com/c/670dcd34-5dbc-8002-ad7a-d4df54a6a2e0
|
7
|
+
#
|
8
|
+
class DSLInterpreter
|
9
|
+
def initialize
|
10
|
+
@data = {}
|
11
|
+
end
|
12
|
+
|
13
|
+
# Capturing top-level DSL methods
|
14
|
+
def method_missing(method_name, *args, &block)
|
15
|
+
key = method_name
|
16
|
+
value = process_args(args, block)
|
17
|
+
|
18
|
+
# Append key-value to the current context of @data
|
19
|
+
if @data[key]
|
20
|
+
@data[key] = [@data[key]] unless @data[key].is_a?(Array)
|
21
|
+
@data[key] << value
|
22
|
+
else
|
23
|
+
@data[key] = value
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
# A method to handle parameters and nested blocks
|
28
|
+
def process_args(args, block)
|
29
|
+
data = {}
|
30
|
+
|
31
|
+
# Handling positional and named parameters separately
|
32
|
+
positional_args = []
|
33
|
+
named_args = {}
|
34
|
+
|
35
|
+
args.each do |arg|
|
36
|
+
if arg.is_a?(Hash)
|
37
|
+
named_args.merge!(arg)
|
38
|
+
else
|
39
|
+
positional_args << arg
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
# Assign positional parameters generically
|
44
|
+
positional_args.each_with_index do |arg, index|
|
45
|
+
data[:"param#{index + 1}"] = arg
|
46
|
+
end
|
47
|
+
|
48
|
+
# Merge named parameters directly
|
49
|
+
data.merge!(named_args)
|
50
|
+
|
51
|
+
# Handling a nested block
|
52
|
+
if block
|
53
|
+
interpreter = DSLInterpreter.new
|
54
|
+
interpreter.instance_eval(&block)
|
55
|
+
data.merge!(interpreter.data)
|
56
|
+
end
|
57
|
+
|
58
|
+
data.empty? ? nil : data
|
59
|
+
end
|
60
|
+
|
61
|
+
# To access data after interpreting
|
62
|
+
attr_reader :data
|
63
|
+
|
64
|
+
# Reading file and evaluating as Ruby
|
65
|
+
def process(base_path, input_file, output_file)
|
66
|
+
file_path = File.join(base_path, input_file)
|
67
|
+
content = File.read(file_path)
|
68
|
+
|
69
|
+
# begin
|
70
|
+
instance_eval(content)
|
71
|
+
# rescue SyntaxError => e
|
72
|
+
# puts "Syntax error in DSL file: #{input_file}"
|
73
|
+
# puts "Error message: #{e.message}"
|
74
|
+
# puts "Error occurred at line: #{e.backtrace.first}"
|
75
|
+
# return false # Indicate that processing failed
|
76
|
+
# rescue StandardError => e
|
77
|
+
# puts "Error processing DSL file: #{input_file}"
|
78
|
+
# puts "Error message: #{e.message}"
|
79
|
+
# puts "Error occurred at: #{e.backtrace.first}"
|
80
|
+
# return false # Indicate that processing failed
|
81
|
+
# end
|
82
|
+
|
83
|
+
output_path = File.join(base_path, output_file)
|
84
|
+
File.write(output_path, JSON.pretty_generate(to_hash))
|
85
|
+
true # Indicate that processing succeeded
|
86
|
+
end
|
87
|
+
|
88
|
+
# Convert to hash or JSON as required
|
89
|
+
def to_hash
|
90
|
+
@data
|
91
|
+
end
|
92
|
+
|
93
|
+
def to_json(*_args)
|
94
|
+
@data.to_json
|
95
|
+
end
|
96
|
+
|
97
|
+
# Method to send data to an endpoint
|
98
|
+
def send_to_endpoint
|
99
|
+
root_key = @data.keys.first
|
100
|
+
action_type = root_key.to_s
|
101
|
+
|
102
|
+
uri = URI.parse("http://localhost:4567/dsl/#{action_type}")
|
103
|
+
http = Net::HTTP.new(uri.host, uri.port)
|
104
|
+
request = Net::HTTP::Post.new(uri.path, { 'Content-Type' => 'application/json' })
|
105
|
+
payload = { action_type: action_type, data: @data }
|
106
|
+
request.body = payload.to_json
|
107
|
+
|
108
|
+
response = http.request(request)
|
109
|
+
puts "Response: #{response.code} - #{response.message}"
|
110
|
+
puts "Endpoint: #{uri}"
|
111
|
+
end
|
112
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class DSLProcessData
|
4
|
+
PROCESSORS = [{ file_collector: ProcessFileCollector }].freeze
|
5
|
+
|
6
|
+
# Method to process the JSON file after initial evaluation
|
7
|
+
def process(base_path, input_file, output_file)
|
8
|
+
json_file_path = File.join(base_path, input_file)
|
9
|
+
data = JSON.parse(File.read(json_file_path))
|
10
|
+
|
11
|
+
# Loop through the processors and execute matching ones
|
12
|
+
PROCESSORS.each do |processor_entry|
|
13
|
+
key, processor_class = processor_entry.first
|
14
|
+
processor = processor_class.new(key)
|
15
|
+
|
16
|
+
next unless processor.match?(data)
|
17
|
+
|
18
|
+
result = processor.execute(data)
|
19
|
+
|
20
|
+
data['process-data'] ||= {}
|
21
|
+
|
22
|
+
result.each do |key, result|
|
23
|
+
data['process-data'][key.to_s] = result unless result.empty?
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
# Write the updated JSON data to an extended file
|
28
|
+
extended_output_file = File.join(base_path, output_file)
|
29
|
+
File.write(extended_output_file, JSON.pretty_generate(data))
|
30
|
+
end
|
31
|
+
end
|