klue-langcraft 0.3.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 586607de2048a4670a8cc9cd1e06dd9a2d1636aa0c9825698af2b6bac51ea24d
4
- data.tar.gz: 1973c38bf614abc045c9d8e2759f984791b52da023d10441d2564bf54f99c323
3
+ metadata.gz: 991b394224dd15a2eccfe3ce1db7b6bca00304d8c6cf75ac8763a34beb6b9d06
4
+ data.tar.gz: 9a814b4cb28a1eefe7763c7705f652f754c567404748e326c60049e7d9f7f7c1
5
5
  SHA512:
6
- metadata.gz: 9c20cb42da33cc805d4ec372f6101f796b44d127724fd1a611cd2620e4614e225148fc2162ab6f0e8d9bdf2fa1909575f0ed78da93ba057b4af38415bae739bd
7
- data.tar.gz: 4ebe337655eba091ec8013700734afcea0a67006bc076ac5d94e075d4210fba5ee490e43f4eb92c716e2230d5c61918ce201fecde6b6919a081bfea1a95088c9
6
+ metadata.gz: e34c11144fefacb28af86e336e48f5bc905fb2a4db2ea75079689fcd21a1ecbc2f5ea8a6ffac8fbc16713a32323007279c3b208655c3864d29861848d84b38d2
7
+ data.tar.gz: 990a9b599923fc5a7117f0fafe0a20057f63798ebbef06a02de8b9debc3b900b6bc1797a9fcd472301b74d10944077077e25aa333fd001a81b88619fdecd668e
data/CHANGELOG.md CHANGED
@@ -1,3 +1,15 @@
1
+ # [0.3.0](https://github.com/appydave/klue-langcraft/compare/v0.2.0...v0.3.0) (2024-10-26)
2
+
3
+
4
+ ### Bug Fixes
5
+
6
+ * fix cop ([d77187a](https://github.com/appydave/klue-langcraft/commit/d77187ac59088b7ce6ca23630a06fbb007f05fb4))
7
+
8
+
9
+ ### Features
10
+
11
+ * update cli for watch and process ([9f0eeb1](https://github.com/appydave/klue-langcraft/commit/9f0eeb1a9409f6734d4ab930ca5c3164b71446eb))
12
+
1
13
  # [0.2.0](https://github.com/appydave/klue-langcraft/compare/v0.1.1...v0.2.0) (2024-10-25)
2
14
 
3
15
 
@@ -2,6 +2,6 @@
2
2
 
3
3
  module Klue
4
4
  module Langcraft
5
- VERSION = '0.3.0'
5
+ VERSION = '0.4.0'
6
6
  end
7
7
  end
@@ -23,12 +23,6 @@ require 'klue/langcraft/dsl/webhook'
23
23
  require 'klue/langcraft/dsl/watcher'
24
24
  require 'klue/langcraft/dsl/klue_runner'
25
25
 
26
- require 'base_process'
27
- require 'process_file_collector'
28
- require 'dsl_interpreter'
29
- require 'dsl_folder_watcher'
30
- require 'dsl_process_data'
31
-
32
26
  module Klue
33
27
  module Langcraft
34
28
  # raise Klue::Langcraft::Error, 'Sample message'
data/package-lock.json CHANGED
@@ -1,12 +1,12 @@
1
1
  {
2
2
  "name": "klue-langcraft",
3
- "version": "0.3.0",
3
+ "version": "0.4.0",
4
4
  "lockfileVersion": 3,
5
5
  "requires": true,
6
6
  "packages": {
7
7
  "": {
8
8
  "name": "klue-langcraft",
9
- "version": "0.3.0",
9
+ "version": "0.4.0",
10
10
  "devDependencies": {
11
11
  "@klueless-js/semantic-release-rubygem": "github:klueless-js/semantic-release-rubygem",
12
12
  "@semantic-release/changelog": "^6.0.3",
data/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "klue-langcraft",
3
- "version": "0.3.0",
3
+ "version": "0.4.0",
4
4
  "description": "Domain Specific Language Crafting",
5
5
  "scripts": {
6
6
  "release": "semantic-release"
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: klue-langcraft
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.0
4
+ version: 0.4.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - David Cruwys
@@ -61,7 +61,6 @@ files:
61
61
  - README.md
62
62
  - Rakefile
63
63
  - bin/console
64
- - bin/dsl_watcher.rb
65
64
  - bin/langcraft.rb
66
65
  - bin/setup
67
66
  - docs/dsl-class-diagram.md
@@ -79,10 +78,6 @@ files:
79
78
  - docs/project-plan/project_done.svg
80
79
  - docs/project-plan/project_in_progress.svg
81
80
  - docs/project-plan/project_todo.svg
82
- - lib/base_process.rb
83
- - lib/dsl_folder_watcher.rb
84
- - lib/dsl_interpreter.rb
85
- - lib/dsl_process_data.rb
86
81
  - lib/klue/langcraft.rb
87
82
  - lib/klue/langcraft/dsl/interpreter.rb
88
83
  - lib/klue/langcraft/dsl/klue_runner.rb
@@ -99,7 +94,6 @@ files:
99
94
  - lib/klue/langcraft/tokenizer-old-needs-revisit/sample_usage.rb
100
95
  - lib/klue/langcraft/tokenizer-old-needs-revisit/tokenizer.rb
101
96
  - lib/klue/langcraft/version.rb
102
- - lib/process_file_collector.rb
103
97
  - package-lock.json
104
98
  - package.json
105
99
  - sig/klue/langcraft.rbs
data/bin/dsl_watcher.rb DELETED
@@ -1,10 +0,0 @@
1
- #!/usr/bin/env ruby
2
- # frozen_string_literal: true
3
-
4
- $LOAD_PATH.unshift File.expand_path('../lib', __dir__)
5
-
6
- require 'klue/langcraft'
7
-
8
- BASE_PATH = ARGV[0] || '/Users/davidcruwys/dev/appydave/klueless'
9
-
10
- DSLFolderWatcher.watch(BASE_PATH)
data/lib/base_process.rb DELETED
@@ -1,41 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- # file: lib/base_process.rb
4
-
5
- class BaseProcess
6
- attr_reader :key
7
-
8
- def initialize(key)
9
- @key = key
10
- end
11
-
12
- def deep_match(input, predicate)
13
- matches = []
14
-
15
- # If the current input is a Hash, iterate over each key-value pair
16
- if input.is_a?(Hash)
17
-
18
- input.each do |key, value|
19
-
20
- # If the value matches the predicate, add it to matches
21
- if predicate.call(key, value)
22
- matches << value
23
- end
24
-
25
- # Continue searching deeper within the value
26
- matches.concat(deep_match(value, predicate))
27
- end
28
-
29
- # If the input is an Array, iterate over each item
30
- elsif input.is_a?(Array)
31
-
32
- input.each do |item|
33
-
34
- # Continue searching within each item of the array
35
- matches.concat(deep_match(item, predicate))
36
- end
37
- end
38
-
39
- matches
40
- end
41
- end
@@ -1,50 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- class DSLFolderWatcher
4
- def self.watch(folder_path)
5
- puts "Watching: #{folder_path}"
6
- listener = Listen.to(folder_path) do |modified, added, _removed|
7
- changes = (modified + added).uniq
8
-
9
- # DEBOUNCE CURRENTLY NOT WORKING
10
- # debounce_map = {}
11
- # debounce_interval = 1 # seconds
12
-
13
- changes.each do |file_path|
14
- next unless File.extname(file_path) == '.klue'
15
-
16
- puts file_path
17
-
18
- # debounce_map[file_path] ||= Time.now
19
- # next unless Time.now - debounce_map[file_path] >= debounce_interval
20
-
21
- # debounce_map[file_path] = Time.now
22
-
23
- base_name = file_path.gsub(/\.klue$/, '')
24
- input_file = "#{base_name}.klue"
25
- output_file = "#{base_name}.json"
26
-
27
- interpreter = DSLInterpreter.new
28
- if interpreter.process('', input_file, output_file)
29
- # Process the JSON data to add 'process-data' details
30
- dsl_processor = DSLProcessData.new
31
- dsl_processor.process('', output_file, output_file)
32
- # SKIP EXTEND FILE FOR NOW AND REWRITE THE OUTPUTFILE
33
- # dsl_processor.process('', output_file, extended_output_file)
34
-
35
- # interpreter.send_to_endpoint
36
- else
37
- puts 'Skipping further processing due to errors in DSL interpretation.'
38
- end
39
- end
40
-
41
- # Remove old entries from debounce_map to prevent memory bloat
42
- # debounce_map.each_key do |key|
43
- # debounce_map.delete(key) if Time.now - debounce_map[key] > debounce_interval * 2
44
- # end
45
- end
46
- listener.start
47
- puts "Wait for changes: #{folder_path}"
48
- sleep
49
- end
50
- end
@@ -1,112 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- # ChatReferences:
4
- # - https://chatgpt.com/c/67064770-d524-8002-8344-3091e895d150
5
- # - https://chatgpt.com/c/6706289c-9b9c-8002-86e3-f9198c1c608a
6
- # - https://chatgpt.com/c/670dcd34-5dbc-8002-ad7a-d4df54a6a2e0
7
- #
8
- class DSLInterpreter
9
- def initialize
10
- @data = {}
11
- end
12
-
13
- # Capturing top-level DSL methods
14
- def method_missing(method_name, *args, &block)
15
- key = method_name
16
- value = process_args(args, block)
17
-
18
- # Append key-value to the current context of @data
19
- if @data[key]
20
- @data[key] = [@data[key]] unless @data[key].is_a?(Array)
21
- @data[key] << value
22
- else
23
- @data[key] = value
24
- end
25
- end
26
-
27
- # A method to handle parameters and nested blocks
28
- def process_args(args, block)
29
- data = {}
30
-
31
- # Handling positional and named parameters separately
32
- positional_args = []
33
- named_args = {}
34
-
35
- args.each do |arg|
36
- if arg.is_a?(Hash)
37
- named_args.merge!(arg)
38
- else
39
- positional_args << arg
40
- end
41
- end
42
-
43
- # Assign positional parameters generically
44
- positional_args.each_with_index do |arg, index|
45
- data[:"param#{index + 1}"] = arg
46
- end
47
-
48
- # Merge named parameters directly
49
- data.merge!(named_args)
50
-
51
- # Handling a nested block
52
- if block
53
- interpreter = DSLInterpreter.new
54
- interpreter.instance_eval(&block)
55
- data.merge!(interpreter.data)
56
- end
57
-
58
- data.empty? ? nil : data
59
- end
60
-
61
- # To access data after interpreting
62
- attr_reader :data
63
-
64
- # Reading file and evaluating as Ruby
65
- def process(base_path, input_file, output_file)
66
- file_path = File.join(base_path, input_file)
67
- content = File.read(file_path)
68
-
69
- # begin
70
- instance_eval(content)
71
- # rescue SyntaxError => e
72
- # puts "Syntax error in DSL file: #{input_file}"
73
- # puts "Error message: #{e.message}"
74
- # puts "Error occurred at line: #{e.backtrace.first}"
75
- # return false # Indicate that processing failed
76
- # rescue StandardError => e
77
- # puts "Error processing DSL file: #{input_file}"
78
- # puts "Error message: #{e.message}"
79
- # puts "Error occurred at: #{e.backtrace.first}"
80
- # return false # Indicate that processing failed
81
- # end
82
-
83
- output_path = File.join(base_path, output_file)
84
- File.write(output_path, JSON.pretty_generate(to_hash))
85
- true # Indicate that processing succeeded
86
- end
87
-
88
- # Convert to hash or JSON as required
89
- def to_hash
90
- @data
91
- end
92
-
93
- def to_json(*_args)
94
- @data.to_json
95
- end
96
-
97
- # Method to send data to an endpoint
98
- def send_to_endpoint
99
- root_key = @data.keys.first
100
- action_type = root_key.to_s
101
-
102
- uri = URI.parse("http://localhost:4567/dsl/#{action_type}")
103
- http = Net::HTTP.new(uri.host, uri.port)
104
- request = Net::HTTP::Post.new(uri.path, { 'Content-Type' => 'application/json' })
105
- payload = { action_type: action_type, data: @data }
106
- request.body = payload.to_json
107
-
108
- response = http.request(request)
109
- puts "Response: #{response.code} - #{response.message}"
110
- puts "Endpoint: #{uri}"
111
- end
112
- end
@@ -1,31 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- class DSLProcessData
4
- PROCESSORS = [{ file_collector: ProcessFileCollector }].freeze
5
-
6
- # Method to process the JSON file after initial evaluation
7
- def process(base_path, input_file, output_file)
8
- json_file_path = File.join(base_path, input_file)
9
- data = JSON.parse(File.read(json_file_path))
10
-
11
- # Loop through the processors and execute matching ones
12
- PROCESSORS.each do |processor_entry|
13
- key, processor_class = processor_entry.first
14
- processor = processor_class.new(key)
15
-
16
- next unless processor.match?(data)
17
-
18
- result = processor.execute(data)
19
-
20
- data['process-data'] ||= {}
21
-
22
- result.each do |key, result|
23
- data['process-data'][key.to_s] = result unless result.empty?
24
- end
25
- end
26
-
27
- # Write the updated JSON data to an extended file
28
- extended_output_file = File.join(base_path, output_file)
29
- File.write(extended_output_file, JSON.pretty_generate(data))
30
- end
31
- end
@@ -1,92 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- # file: lib/process_file_collector.rb
4
-
5
- class ProcessFileCollector < BaseProcess
6
- def initialize(key)
7
- super
8
- @found = nil
9
- # @matcher = ->(key, value) { key.to_s == 'file_collector' && value.is_a?(Hash) }
10
- @matcher = lambda do |key, value|
11
- if key.to_s == 'file_collector'
12
- if value.is_a?(Array)
13
- value.any? { |v| v.is_a?(Hash) }
14
- else
15
- value.is_a?(Hash)
16
- end
17
- else
18
- false
19
- end
20
- end
21
- end
22
-
23
- def match?(input)
24
- @found = deep_match(input, @matcher)
25
- !@found.empty?
26
- end
27
-
28
- def execute(_input)
29
- # Iterate over each `file_collector` found and process individually
30
- results = {}
31
-
32
- @found.each do |data|
33
- next unless data.is_a?(Hash)
34
-
35
- # Extract the `as` key if present
36
- as_key = data['as']
37
-
38
- working_directory = File.expand_path(data['root'])
39
-
40
- options = Appydave::Tools::GptContext::Options.new(
41
- working_directory: working_directory,
42
- include_patterns: extract_patterns(data.dig('files', 'include')),
43
- exclude_patterns: extract_patterns(data.dig('files', 'exclude')),
44
- format: 'json',
45
- line_limit: data['line_length']
46
- )
47
-
48
- collector = Appydave::Tools::GptContext::FileCollector.new(options)
49
- json = collector.build
50
-
51
- # Structuring the result under `process-data` with `as` as key
52
- result_data = {
53
- type: 'file_collector',
54
- data: {
55
- working_directory: working_directory,
56
- files: JSON.parse(json)
57
- }
58
- }
59
-
60
- # If `as` key exists, use it to store under process-data with that identifier
61
- if as_key
62
- results[as_key] = result_data
63
- else
64
- # Generate a unique key if no `as` key is defined
65
- unique_key = "file_collector_#{results.size + 1}"
66
- results[unique_key] = result_data
67
- end
68
- end
69
-
70
- results
71
- rescue SyntaxError, NameError, NoMethodError => e
72
- puts "Ruby evaluation error in ProcessFileCollector: #{e.message}"
73
- puts "Error occurred at: #{e.backtrace.first}"
74
- {}
75
- rescue StandardError => e
76
- puts "Unexpected error in ProcessFileCollector: #{e.message}"
77
- puts e.backtrace.join("\n")
78
- {}
79
- end
80
-
81
- private
82
-
83
- def extract_patterns(files_data)
84
- if files_data.is_a?(Hash)
85
- [files_data['param1']]
86
- elsif files_data.is_a?(Array)
87
- files_data.map { |entry| entry['param1'] }
88
- else
89
- []
90
- end
91
- end
92
- end