klue-langcraft 0.1.0 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,114 @@
1
+ # frozen_string_literal: true
2
+
3
+ # ChatReferences:
4
+ # - https://chatgpt.com/c/6719840b-c72c-8002-bbc2-bbd95fd98d31
5
+
6
+ module Klue
7
+ module Langcraft
8
+ module DSL
9
+ # Interpreter class for processing and interpreting DSL input
10
+ #
11
+ # This class is responsible for handling method calls, processing arguments,
12
+ # and managing the overall interpretation of the DSL input. It also provides
13
+ # methods for processing input and output, as well as converting the data
14
+ # to hash and JSON formats.
15
+ class Interpreter
16
+ attr_reader :data
17
+ attr_accessor :processed
18
+
19
+ def initialize
20
+ @data = {}
21
+ @processed = false
22
+ end
23
+
24
+ def process(input: nil, input_file: nil, output_file: nil)
25
+ validate_input_arguments(input, input_file)
26
+ input_content = input_content(input, input_file)
27
+
28
+ @processed = true
29
+ instance_eval(input_content)
30
+
31
+ write_output(output_file) if output_file
32
+ data
33
+ end
34
+
35
+ def method_missing(method_name, *args, &block)
36
+ raise "You must call 'process' before using other methods" unless @processed
37
+
38
+ key = method_name
39
+ value = process_args(args, block)
40
+
41
+ if @data[key]
42
+ @data[key] = [@data[key]] unless @data[key].is_a?(Array)
43
+ @data[key] << value
44
+ else
45
+ @data[key] = value
46
+ end
47
+ end
48
+
49
+ def respond_to_missing?(method_name, include_private = false)
50
+ @processed || super
51
+ end
52
+
53
+ def process_args(args, block)
54
+ positional_args = []
55
+ named_args = {}
56
+
57
+ # Handling positional and named parameters separately
58
+ args.each do |arg|
59
+ if arg.is_a?(Hash)
60
+ named_args.merge!(arg)
61
+ else
62
+ positional_args << arg
63
+ end
64
+ end
65
+
66
+ # Assign positional parameters generically
67
+ data = positional_args.each_with_index.to_h { |arg, index| [:"p#{index + 1}", arg] }
68
+
69
+ # Merge named parameters after positional ones
70
+ data.merge!(named_args)
71
+
72
+ # Handling a nested block
73
+ if block
74
+ interpreter = Interpreter.new
75
+ interpreter.instance_variable_set(:@processed, true) # Set @processed to true for nested interpreter
76
+ interpreter.instance_eval(&block)
77
+ data.merge!(interpreter.data)
78
+ end
79
+
80
+ data.empty? ? nil : data
81
+ end
82
+
83
+ private
84
+
85
+ def validate_input_arguments(input, input_file)
86
+ raise ArgumentError, 'Either input or input_file must be provided' unless input || input_file
87
+ raise ArgumentError, 'Both input and input_file cannot be provided' if input && input_file
88
+ end
89
+
90
+ def input_content(input, input_file)
91
+ input_file ? File.read(input_file) : input
92
+ end
93
+
94
+ def write_output(output_file)
95
+ output_path = get_output_path(output_file)
96
+ File.write(output_path, JSON.pretty_generate(data))
97
+ end
98
+
99
+ def get_output_path(output_file)
100
+ if Pathname.new(output_file).absolute?
101
+ output_file
102
+ else
103
+ File.join(File.dirname(output_file), File.basename(output_file))
104
+ end
105
+ end
106
+
107
+ # Convert to JSON
108
+ def to_json(*_args)
109
+ @data.to_json
110
+ end
111
+ end
112
+ end
113
+ end
114
+ end
@@ -0,0 +1,65 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Klue
4
+ module Langcraft
5
+ module DSL
6
+ # ProcessDataPipeline class for executing data processing pipelines
7
+ #
8
+ # This class is responsible for executing a series of data processing steps
9
+ # based on the configured processors. It manages the flow of data through
10
+ # the pipeline and handles the storage of results.
11
+ class ProcessDataPipeline
12
+ def initialize(matcher)
13
+ @matcher = matcher # Use the matcher to find processors
14
+ end
15
+
16
+ # Execute the pipeline of processors on the input data
17
+ def execute(data)
18
+ # NOTE: This is the complete data object, each processor will get a cloned version the specific data it is matched to
19
+ matched_processors = @matcher.match_processors(data)
20
+
21
+ matched_processors.each do |processor|
22
+ processed_data = processor.build_result
23
+
24
+ # Store the processed data into the result structure
25
+ store_result(data, processor, processed_data)
26
+ end
27
+
28
+ data
29
+ end
30
+
31
+ # Optionally write the output to a file
32
+ def write_output(data, output_file)
33
+ File.write(output_file, JSON.pretty_generate(data))
34
+ end
35
+
36
+ private
37
+
38
+ # Store the processed result back into the data structure
39
+ def store_result(data, _processor, processed_data)
40
+ return unless processed_data
41
+
42
+ data['process-data'] ||= {}
43
+
44
+ if processed_data[:name].nil? || processed_data[:name].empty?
45
+ index = calculate_index(data, processed_data[:type])
46
+ processed_data[:name] = "#{processed_data[:type]}-#{index}"
47
+ end
48
+
49
+ data['process-data'][processed_data[:name]] = processed_data
50
+ end
51
+
52
+ def calculate_index(data, processor_type)
53
+ # Find all keys in 'process-data' that match the processor type (e.g., file_collector)
54
+ last_index = data['process-data'].keys
55
+ .select { |k| k.start_with?(processor_type.to_s) } # Keys that start with processor type
56
+ .map { |k| k.split('-').last.to_i } # Extract numeric suffix
57
+ .max
58
+
59
+ # If no entries exist, start at 1; otherwise, increment the last index
60
+ last_index ? last_index + 1 : 1
61
+ end
62
+ end
63
+ end
64
+ end
65
+ end
@@ -0,0 +1,59 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Klue
4
+ module Langcraft
5
+ module DSL
6
+ # ProcessMatcher class for matching processors to input nodes
7
+ #
8
+ # This class is responsible for traversing input nodes and finding
9
+ # the appropriate processor for each node based on the configured
10
+ # processor rules.
11
+ class ProcessMatcher
12
+ def initialize(processor_config = ProcessorConfigDefault)
13
+ @processor_config = processor_config
14
+ end
15
+
16
+ def match_processors(nodes)
17
+ matched_processors = []
18
+
19
+ traverse_nodes(nodes) do |key, value|
20
+ processor_class = find_processor_for(key, value)
21
+ if processor_class
22
+ if value.is_a?(Array)
23
+ # If the value is an array, instantiate a processor for each element
24
+ value.each do |element|
25
+ matched_processors << processor_class.new(element, key)
26
+ end
27
+ else
28
+ matched_processors << processor_class.new(value, key)
29
+ end
30
+ end
31
+ end
32
+
33
+ matched_processors
34
+ end
35
+
36
+ private
37
+
38
+ def traverse_nodes(node, &block)
39
+ if node.is_a?(Hash)
40
+ node.each do |key, value|
41
+ yield(key, value)
42
+ traverse_nodes(value, &block)
43
+ end
44
+ elsif node.is_a?(Array)
45
+ node.each_with_index do |item, index|
46
+ # Provide the index to uniquely identify each element
47
+ traverse_nodes(item) { |key, value| yield("#{key}[#{index}]", value) }
48
+ end
49
+ end
50
+ end
51
+
52
+ # Find the correct processor based on the key using the registered processor config
53
+ def find_processor_for(key, _value)
54
+ @processor_config.processor_for(key) # Return the processor class, not an instance
55
+ end
56
+ end
57
+ end
58
+ end
59
+ end
@@ -0,0 +1,35 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Klue
4
+ module Langcraft
5
+ module DSL
6
+ # ProcessorConfig class for managing processor configurations
7
+ #
8
+ # This class is responsible for registering processors and providing
9
+ # methods to retrieve processors based on keys or to get all registered
10
+ # processors.
11
+ class ProcessorConfig
12
+ def initialize
13
+ @processors = {}
14
+ end
15
+
16
+ # Register a processor with its associated keys
17
+ def register_processor(processor_class)
18
+ keys = processor_class.keys
19
+ keys = [keys] unless keys.is_a?(Array)
20
+ keys.each { |key| @processors[key.to_sym] = processor_class }
21
+ end
22
+
23
+ # Find the processor class by key
24
+ def processor_for(key)
25
+ @processors[key.to_sym]
26
+ end
27
+
28
+ # List all registered processors
29
+ def all_processors
30
+ @processors
31
+ end
32
+ end
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Klue
4
+ module Langcraft
5
+ module DSL
6
+ module Processors
7
+ # FileCollectorProcessor class for processing file-related data
8
+ #
9
+ # This processor is responsible for handling file collection operations
10
+ # within the DSL. It inherits from the base Processor class and implements
11
+ # specific logic for file-related processing.
12
+ class FileCollectorProcessor < Processor
13
+ def self.keys
14
+ [:file_collector]
15
+ end
16
+
17
+ # Example of how a subclass can implement its specific data logic
18
+ def build_result_data
19
+ {
20
+ files: ['file1.txt', 'file2.txt']
21
+ }
22
+ end
23
+
24
+ # Auto-register the processor as soon as the class is loaded
25
+ ProcessorConfigDefault.register_processor(self)
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,34 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Klue
4
+ module Langcraft
5
+ module DSL
6
+ module Processors
7
+ # FullNameProcessor class for processing full name data
8
+ #
9
+ # This processor is responsible for handling full name processing operations
10
+ # within the DSL. It inherits from the base Processor class and implements
11
+ # specific logic for full name-related processing.
12
+ class FullNameProcessor < Processor
13
+ def self.keys
14
+ [:full_name] # FullNameProcessor's specific key(s)
15
+ end
16
+
17
+ # Implementation of logic for building full name data
18
+ def build_result_data
19
+ first_name = data['first_name'] || 'John'
20
+ last_name = data['last_name'] || 'Doe'
21
+ full_name = "#{first_name} #{last_name}"
22
+
23
+ {
24
+ full_name: full_name
25
+ }
26
+ end
27
+
28
+ # Auto-register the processor as soon as the class is loaded
29
+ ProcessorConfigDefault.register_processor(self)
30
+ end
31
+ end
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,43 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Klue
4
+ module Langcraft
5
+ module DSL
6
+ module Processors
7
+ # Base Processor class for defining common processor behavior
8
+ #
9
+ # This abstract class serves as the foundation for all specific processors.
10
+ # It defines the basic structure and common methods that all processors
11
+ # should implement or inherit.
12
+ class Processor
13
+ attr_reader :data, :key
14
+
15
+ # Every processor subclass must accept data and key
16
+ def initialize(data, key)
17
+ @data = Marshal.load(Marshal.dump(data)) # Deep clone of the data
18
+ @key = key
19
+ end
20
+
21
+ # Build an envelope result with type, name, and data
22
+ def build_result
23
+ {
24
+ name: data.is_a?(Hash) ? data['as'] : nil,
25
+ type: key.to_s,
26
+ data: build_result_data
27
+ }
28
+ end
29
+
30
+ # Subclasses should override this method to build the actual data.
31
+ def build_result_data
32
+ raise NotImplementedError, 'Subclasses must implement `build_data` to generate their specific data'
33
+ end
34
+
35
+ # This will be overridden by subclasses to define keys (or aliases)
36
+ def self.keys
37
+ raise NotImplementedError, 'Subclasses must define the `keys` method'
38
+ end
39
+ end
40
+ end
41
+ end
42
+ end
43
+ end
@@ -1,5 +1,7 @@
1
1
  # Brief for creating the Parser, Tokenizer and Parsing our first DSL
2
2
 
3
+ [ChatGPT Conversation](https://chatgpt.com/share/66efd141-6644-8002-970d-4ad641c54d00)
4
+
3
5
  # 1. Parsing Libraries in Ruby
4
6
 
5
7
  Here are three Ruby parsing libraries with their pros and cons:
@@ -2,6 +2,6 @@
2
2
 
3
3
  module Klue
4
4
  module Langcraft
5
- VERSION = '0.1.0'
5
+ VERSION = '0.2.0'
6
6
  end
7
7
  end
@@ -1,6 +1,30 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require 'json'
4
+ require 'net/http'
5
+ require 'uri'
6
+ require 'listen'
7
+ require 'pry'
8
+
9
+ require 'appydave/tools'
10
+
3
11
  require 'klue/langcraft/version'
12
+ require 'klue/langcraft/dsl/processor_config'
13
+
14
+ ProcessorConfigDefault = Klue::Langcraft::DSL::ProcessorConfig.new
15
+
16
+ require 'klue/langcraft/dsl/processors/processor'
17
+ require 'klue/langcraft/dsl/processors/file_collector_processor'
18
+ require 'klue/langcraft/dsl/processors/full_name_processor'
19
+ require 'klue/langcraft/dsl/interpreter'
20
+ require 'klue/langcraft/dsl/process_matcher'
21
+ require 'klue/langcraft/dsl/process_data_pipeline'
22
+
23
+ require 'base_process'
24
+ require 'process_file_collector'
25
+ require 'dsl_interpreter'
26
+ require 'dsl_folder_watcher'
27
+ require 'dsl_process_data'
4
28
 
5
29
  module Klue
6
30
  module Langcraft
@@ -12,8 +36,8 @@ module Klue
12
36
  end
13
37
 
14
38
  if ENV.fetch('KLUE_DEBUG', 'false').downcase == 'true'
15
- namespace = 'KlueLangcraft::Version'
39
+ namespace = 'Klue::Langcraft::Version'
16
40
  file_path = $LOADED_FEATURES.find { |f| f.include?('klue-langcraft/version') }
17
- version = KlueLangcraft::VERSION.ljust(9)
41
+ version = Klue::Langcraft::VERSION.ljust(9)
18
42
  puts "#{namespace.ljust(35)} : #{version.ljust(9)} : #{file_path}"
19
43
  end
@@ -0,0 +1,92 @@
1
+ # frozen_string_literal: true
2
+
3
+ # file: lib/process_file_collector.rb
4
+
5
+ class ProcessFileCollector < BaseProcess
6
+ def initialize(key)
7
+ super
8
+ @found = nil
9
+ # @matcher = ->(key, value) { key.to_s == 'file_collector' && value.is_a?(Hash) }
10
+ @matcher = lambda do |key, value|
11
+ if key.to_s == 'file_collector'
12
+ if value.is_a?(Array)
13
+ value.any? { |v| v.is_a?(Hash) }
14
+ else
15
+ value.is_a?(Hash)
16
+ end
17
+ else
18
+ false
19
+ end
20
+ end
21
+ end
22
+
23
+ def match?(input)
24
+ @found = deep_match(input, @matcher)
25
+ !@found.empty?
26
+ end
27
+
28
+ def execute(_input)
29
+ # Iterate over each `file_collector` found and process individually
30
+ results = {}
31
+
32
+ @found.each do |data|
33
+ next unless data.is_a?(Hash)
34
+
35
+ # Extract the `as` key if present
36
+ as_key = data['as']
37
+
38
+ working_directory = File.expand_path(data['root'])
39
+
40
+ options = Appydave::Tools::GptContext::Options.new(
41
+ working_directory: working_directory,
42
+ include_patterns: extract_patterns(data.dig('files', 'include')),
43
+ exclude_patterns: extract_patterns(data.dig('files', 'exclude')),
44
+ format: 'json',
45
+ line_limit: data['line_length']
46
+ )
47
+
48
+ collector = Appydave::Tools::GptContext::FileCollector.new(options)
49
+ json = collector.build
50
+
51
+ # Structuring the result under `process-data` with `as` as key
52
+ result_data = {
53
+ type: 'file_collector',
54
+ data: {
55
+ working_directory: working_directory,
56
+ files: JSON.parse(json)
57
+ }
58
+ }
59
+
60
+ # If `as` key exists, use it to store under process-data with that identifier
61
+ if as_key
62
+ results[as_key] = result_data
63
+ else
64
+ # Generate a unique key if no `as` key is defined
65
+ unique_key = "file_collector_#{results.size + 1}"
66
+ results[unique_key] = result_data
67
+ end
68
+ end
69
+
70
+ results
71
+ rescue SyntaxError, NameError, NoMethodError => e
72
+ puts "Ruby evaluation error in ProcessFileCollector: #{e.message}"
73
+ puts "Error occurred at: #{e.backtrace.first}"
74
+ {}
75
+ rescue StandardError => e
76
+ puts "Unexpected error in ProcessFileCollector: #{e.message}"
77
+ puts e.backtrace.join("\n")
78
+ {}
79
+ end
80
+
81
+ private
82
+
83
+ def extract_patterns(files_data)
84
+ if files_data.is_a?(Hash)
85
+ [files_data['param1']]
86
+ elsif files_data.is_a?(Array)
87
+ files_data.map { |entry| entry['param1'] }
88
+ else
89
+ []
90
+ end
91
+ end
92
+ end
data/package-lock.json CHANGED
@@ -1,12 +1,12 @@
1
1
  {
2
2
  "name": "klue-langcraft",
3
- "version": "0.1.0",
3
+ "version": "0.2.0",
4
4
  "lockfileVersion": 3,
5
5
  "requires": true,
6
6
  "packages": {
7
7
  "": {
8
8
  "name": "klue-langcraft",
9
- "version": "0.1.0",
9
+ "version": "0.2.0",
10
10
  "devDependencies": {
11
11
  "@klueless-js/semantic-release-rubygem": "github:klueless-js/semantic-release-rubygem",
12
12
  "@semantic-release/changelog": "^6.0.3",
data/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "klue-langcraft",
3
- "version": "0.1.0",
3
+ "version": "0.2.0",
4
4
  "description": "Domain Specific Language Crafting",
5
5
  "scripts": {
6
6
  "release": "semantic-release"
metadata CHANGED
@@ -1,15 +1,29 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: klue-langcraft
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - David Cruwys
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-09-22 00:00:00.000000000 Z
11
+ date: 2024-10-25 00:00:00.000000000 Z
12
12
  dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: appydave-tools
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: '0'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: '0'
13
27
  - !ruby/object:Gem::Dependency
14
28
  name: k_log
15
29
  requirement: !ruby/object:Gem::Requirement
@@ -47,27 +61,41 @@ files:
47
61
  - README.md
48
62
  - Rakefile
49
63
  - bin/console
64
+ - bin/dsl_watcher.rb
50
65
  - bin/setup
66
+ - docs/dsl-class-diagram.md
51
67
  - docs/dsl-examples.md
52
68
  - docs/dsl-rules.md
53
- - docs/dsl-samples/index.md
54
69
  - docs/dsl-samples/youtube-launch-optimizer-old.klue
55
70
  - docs/dsl-samples/youtube-launch-optimizer-strawberry.json
56
71
  - docs/dsl-samples/youtube-launch-optimizer-strawberry.klue
57
72
  - docs/dsl-samples/youtube-launch-optimizer.defn.klue
58
73
  - docs/dsl-samples/youtube-launch-optimizer.json
59
74
  - docs/dsl-samples/youtube-launch-optimizer.klue
75
+ - docs/dsl-upgrade-plan.md
60
76
  - docs/project-plan/project-plan.md
61
77
  - docs/project-plan/project.drawio
62
78
  - docs/project-plan/project_done.svg
63
79
  - docs/project-plan/project_in_progress.svg
64
80
  - docs/project-plan/project_todo.svg
81
+ - lib/base_process.rb
82
+ - lib/dsl_folder_watcher.rb
83
+ - lib/dsl_interpreter.rb
84
+ - lib/dsl_process_data.rb
65
85
  - lib/klue/langcraft.rb
66
- - lib/klue/langcraft/-brief.md
67
- - lib/klue/langcraft/parser.rb
68
- - lib/klue/langcraft/sample_usage.rb
69
- - lib/klue/langcraft/tokenizer.rb
86
+ - lib/klue/langcraft/dsl/interpreter.rb
87
+ - lib/klue/langcraft/dsl/process_data_pipeline.rb
88
+ - lib/klue/langcraft/dsl/process_matcher.rb
89
+ - lib/klue/langcraft/dsl/processor_config.rb
90
+ - lib/klue/langcraft/dsl/processors/file_collector_processor.rb
91
+ - lib/klue/langcraft/dsl/processors/full_name_processor.rb
92
+ - lib/klue/langcraft/dsl/processors/processor.rb
93
+ - lib/klue/langcraft/tokenizer-old-needs-revisit/-brief.md
94
+ - lib/klue/langcraft/tokenizer-old-needs-revisit/parser.rb
95
+ - lib/klue/langcraft/tokenizer-old-needs-revisit/sample_usage.rb
96
+ - lib/klue/langcraft/tokenizer-old-needs-revisit/tokenizer.rb
70
97
  - lib/klue/langcraft/version.rb
98
+ - lib/process_file_collector.rb
71
99
  - package-lock.json
72
100
  - package.json
73
101
  - sig/klue/langcraft.rbs
@@ -1,4 +0,0 @@
1
- Printspeak DSL
2
- Project Plan
3
- Agent As Code
4
- Make Chapters out of my recordings using a folder and glob pattern geared to my naming convention