spark_engine 1.2.9 → 1.2.10

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 517d9135194b594223815ccfdd158a389fffcde671e037b191b9462725b060e1
4
- data.tar.gz: d629ce93d47645c2e9f65269ccaabf50a849e12f9c39e40248a870735a6bcdbd
3
+ metadata.gz: 03d7698e4656fa18b65b6f012927deb2492725d8006a9eac0aaec010b62a7905
4
+ data.tar.gz: e29f1d210de4f88f804a827309fe73765eca1e55ad186ee8efefeaa5b8c1a3db
5
5
  SHA512:
6
- metadata.gz: 32a7666db7a2c1640762d0996b68e1cbe02be194d123a7e14fea2bf7f5df3664b3c6225cabf2eec92d11f4394b63b719ccf14e2482dd30c18c9e75672809a4b5
7
- data.tar.gz: a4c1b56772f399a9bb07939290c0cb954b3de2baca9a34649585d0f7fda39744519543963d72ae9f0854483fe18f74ba92ac70a197defd7b3ebde5acac070823
6
+ metadata.gz: 73affa9f9892de7d01889afa017cc52e579ee44d7b76ddb1c32ced2e1914cf0ddf4ec852facde87bb0ee03bacef18ab9c8fd3eebc44eb7d6493e506f58f8ca5a
7
+ data.tar.gz: f0df88619ba26cdbbc3932d372e03cd3f0c82b6da10e49ebddb92309a6f647df745f706557c4072cd613a079bfc91b302197bc3711c60887a80858ee4de63864
data/bin/spark CHANGED
@@ -41,6 +41,35 @@ OptionParser.new do |opts|
41
41
  end
42
42
  end
43
43
 
44
+ if %w(g generate).include? options[:command]
45
+ options[:component] = next_arg
46
+
47
+ opts.on("-f", "--force", "overwrite existing files") do
48
+ options[:force] = true
49
+ end
50
+
51
+ opts.on("-t", "--skip-template", "Skip html template for component") do
52
+ options[:template] = false
53
+ end
54
+
55
+ opts.on("-s", "--skip-sass", "Skip Sass for component") do
56
+ options[:sass] = false
57
+ end
58
+
59
+ opts.on("-j", "--skip-js", "Skip javascript for component") do
60
+ options[:js] = false
61
+ end
62
+
63
+ opts.on("-c", "--class ClassName", String, "Component extends a classname (default: Components::Component)") do |val|
64
+ options[:class] = val
65
+ end
66
+
67
+ opts.on('-D', '--delete', "Remove component files") do
68
+ options[:delete] = true
69
+ end
70
+
71
+ end
72
+
44
73
  if %w(s server).include? options[:command]
45
74
  opts.on("-w", "--watch", "Watch assets") do |val|
46
75
  options[:watch] = true
@@ -21,7 +21,7 @@ Options:
21
21
  end
22
22
 
23
23
  def engine_commands
24
- list = %w(help build watch server help gem:build gem:install gem:release)
24
+ list = %w(help build watch server generate clean gem:build gem:install gem:release)
25
25
  begin
26
26
  gem 'bump'
27
27
  list.concat %w(gem:bump:patch gem:bump:minor gem:bump:major)
@@ -42,6 +42,7 @@ Options:
42
42
  when 'build', 'b'; build
43
43
  when 'watch', 'w'; watch
44
44
  when 'server', 's'; server
45
+ when 'generate', 'g'; generate
45
46
  when 'clean', 'c'; clean
46
47
  when 'help', 'h'; help
47
48
  when 'gem:build'; gem_build
@@ -54,55 +55,55 @@ Options:
54
55
  end
55
56
 
56
57
  def new
57
- "new name [options] # Create a new Spark framework engine"
58
+ "new name [options] # Create a new Spark framework engine"
58
59
  end
59
60
 
60
61
  def help
61
- "help [command] # Show help for a specific command"
62
+ "help [command] # Show help for a specific command"
63
+ end
64
+
65
+ def generate
66
+ "generate name [options] # Generate a Component"
62
67
  end
63
68
 
64
69
  def build
65
- "build [options] # Build assets"
70
+ "build [options] # Build assets"
66
71
  end
67
72
 
68
73
  def watch
69
- "watch [options] # Build assets when files change"
74
+ "watch [options] # Build assets when files change"
70
75
  end
71
76
 
72
77
  def server
73
- "server [options] # Serve documentation site"
74
- end
75
-
76
- def clean
77
- "clean # Remove cache files"
78
+ "server [options] # Serve documentation site"
78
79
  end
79
80
 
80
- def help
81
- "help [command] # Show help for a specific command"
81
+ def gem_build
82
+ "gem:build # Build assets for production and build gem"
82
83
  end
83
84
 
84
- def gem_build
85
- "gem:build # Build assets for production and build gem"
85
+ def clean
86
+ "clean # Remove cache files"
86
87
  end
87
88
 
88
89
  def gem_install
89
- "gem:install # Build assets for production, build, and install gem"
90
+ "gem:install # Build assets for production, build, and install gem"
90
91
  end
91
92
 
92
93
  def gem_release
93
- "gem:release # Build assets for production, build, and release gem to rubygems.org"
94
+ "gem:release # Build assets for production, build, and release gem to rubygems.org"
94
95
  end
95
96
 
96
97
  def gem_bump_patch
97
- "gem:bump:patch # Bump version v0.0.0 -> v0.0.1"
98
+ "gem:bump:patch # Bump version v0.0.0 -> v0.0.1"
98
99
  end
99
100
 
100
101
  def gem_bump_minor
101
- "gem:bump:minor # Bump version v0.0.0 -> v0.1.0"
102
+ "gem:bump:minor # Bump version v0.0.0 -> v0.1.0"
102
103
  end
103
104
 
104
105
  def gem_bump_major
105
- "gem:bump:major # Bump version v0.0.0 -> v1.0.0"
106
+ "gem:bump:major # Bump version v0.0.0 -> v1.0.0"
106
107
  end
107
108
  end
108
109
  end
@@ -15,6 +15,12 @@ module SparkEngine
15
15
  when 'new', 'n'
16
16
  require "spark_engine/scaffold"
17
17
  Scaffold.new(options)
18
+ when 'generate', 'g'
19
+ from_root {
20
+ require "spark_engine/scaffold"
21
+ require_rails
22
+ Scaffold.new(options)
23
+ }
18
24
  when 'build', 'b'
19
25
  from_root { dispatch(:build, options) }
20
26
  when 'watch', 'w'
@@ -60,6 +66,7 @@ module SparkEngine
60
66
  FileUtils.rm_rf('public')
61
67
  dispatch(:build)
62
68
  system "bundle exec rake build"
69
+ system "git add Gemfile*"
63
70
  end
64
71
 
65
72
  def gem_install
@@ -0,0 +1,30 @@
1
+ module SparkEngine
2
+ module Data
3
+ extend self
4
+
5
+ def read
6
+ data_files(SparkEngine.plugin.root, Rails.root).each_with_object({}) do |path, data|
7
+ name = File.basename(path, '.*')
8
+ data[name] ||= {}
9
+
10
+ case File.extname(path)
11
+ when '.json'
12
+ data[name].merge!(JSON.parse(File.read(path)))
13
+ when '.yml'
14
+ data[name].merge!(YAML.load_file(path))
15
+ end
16
+
17
+ data
18
+ end
19
+ end
20
+
21
+ def data_files(*roots)
22
+ files = []
23
+ [roots].flatten.each do |root|
24
+ files.concat Dir[File.join(root, 'config', SparkEngine.plugin.name, '**/*.json')]
25
+ files.concat Dir[File.join(root, 'config', SparkEngine.plugin.name, '**/*.yml')]
26
+ end
27
+ files.flatten.compact.uniq
28
+ end
29
+ end
30
+ end
@@ -55,6 +55,7 @@ module SparkEngine
55
55
  end
56
56
 
57
57
  def sass_data(key=nil)
58
+ require 'spark_engine/sass/sass_yaml'
58
59
  if key
59
60
  SparkEngine.plugin.stylesheets.data[key]
60
61
  else
@@ -21,7 +21,7 @@ module SparkEngine
21
21
  def #{method_name}(*args, &block)
22
22
 
23
23
  # Get the current helper object which has all the normal helper methods
24
- if self.is_a?(SparkEngine::BlockHelper)
24
+ if self.is_a?(SparkEngine::BlockHelper)
25
25
  top_level_helper = self.helper
26
26
  parent_block_helper = self
27
27
  else
@@ -34,24 +34,16 @@ module SparkEngine
34
34
  #{klass.name}.current_helper = top_level_helper
35
35
  #{klass.name}.current_parent_block_helper = parent_block_helper
36
36
  renderer = #{klass.name}.new(*args)
37
+
37
38
  # ...then set them anyway on the renderer so that renderer methods can use it
38
39
  renderer.send(:helper=, top_level_helper)
39
40
  renderer.send(:parent=, parent_block_helper)
40
41
 
41
42
  body = block ? capture(renderer, &block) : nil
42
- processed_body = renderer.display(body)
43
- if processed_body
44
43
 
45
- if ::Rails::VERSION::MAJOR >= 3
46
- return processed_body
47
- elsif top_level_helper.method(:concat).arity == 2
48
- concat processed_body, binding
49
- else
50
- concat processed_body
51
- end
52
-
44
+ if processed_body = renderer.display(body)
45
+ return processed_body
53
46
  end
54
- renderer
55
47
  end
56
48
  )
57
49
  end
@@ -89,21 +81,16 @@ module SparkEngine
89
81
  super
90
82
  end
91
83
  end
92
-
84
+
93
85
  def capture(*args)
94
86
  # ActiveSupport 3.1 breaks capture method (defines it on all objects)
95
87
  # so we have to resort to rewrite it
96
- if Rails.version < "3.1"
97
- ActionView::Helpers::CaptureHelper.capture(renderer, &block)
98
- else
99
- value = nil
100
- buffer = with_output_buffer { value = yield(*args) }
101
- if string = buffer.presence || value and string.is_a?(String)
102
- ERB::Util.html_escape string
103
- end
88
+ value = nil
89
+ buffer = with_output_buffer { value = yield(*args) }
90
+ if string = buffer.presence || value and string.is_a?(String)
91
+ ERB::Util.html_escape string
104
92
  end
105
93
  end
106
-
107
94
  end
108
95
 
109
96
  end
@@ -1,4 +1,3 @@
1
- require 'sass'
2
1
  begin
3
2
  require "autoprefixer-rails"
4
3
  rescue
@@ -61,19 +60,48 @@ module SparkEngine
61
60
  compress(destination(file))
62
61
  end
63
62
 
64
- def build_sass(file)
65
- style = SparkEngine.production? ? "compressed" : 'nested'
66
- dest = destination(file)
63
+ def render_sass(file)
64
+ require "spark_engine/sass/engine.rb"
67
65
 
68
66
  Sass.logger.log_level = :error if SparkEngine.production?
67
+ Sass.compile_file(file, style: sass_style)
68
+ end
69
69
 
70
- css = prefix_css( Sass.compile_file(file, style: style) )
70
+ def render_sassc(file)
71
+ require "spark_engine/sassc/importer"
72
+ SassC.logger.log_level = :error if SparkEngine.production?
71
73
 
72
- File.open(dest, 'w') { |io| io.write(css) }
74
+ source = File.open(file, 'rb') { |f| f.read }
75
+ options = {
76
+ importer: SassC::SparkEngine::Importer,
77
+ load_paths: load_paths,
78
+ style: sass_style
79
+ }
80
+
81
+ SassC::Engine.new(source, options).render
82
+ end
83
+
84
+ def sass_style
85
+ SparkEngine.production? ? "compressed" : 'nested'
86
+ end
87
+
88
+ def build_sass(file)
89
+ css = prefix_css begin
90
+ render_sassc(file)
91
+ rescue LoadError => e
92
+ render_sass(file)
93
+ end
94
+
95
+ dest = destination(file)
73
96
 
97
+ File.open(dest, 'w') { |io| io.write(css) }
74
98
  compress(dest)
75
99
  end
76
100
 
101
+ def load_paths
102
+ [SparkEngine.plugin.paths[:stylesheets], SparkEngine.plugin.paths[:components]]
103
+ end
104
+
77
105
  def prefix_css(css)
78
106
  if defined? AutoprefixerRails
79
107
  AutoprefixerRails.process(css, autoprefixer_config).css
@@ -83,18 +111,11 @@ module SparkEngine
83
111
  end
84
112
 
85
113
  def data
86
- if @data
87
- @data
88
- else
89
- data = {}
90
-
91
- Dir[File.join(base, "**/*.yml")].each do |file|
92
- key = file.sub(base+"/", '').sub(/^_/,'').sub('.yml','')
93
-
94
- data[key] = SassParser.parse(file)
95
- end
114
+ return @data if @data && SparkEngine.production?
96
115
 
97
- @data = data if SparkEngine.production?
116
+ @data = Dir[File.join(base, "**/*.yml")].each_with_object({}) do |file, data|
117
+ key = File.basename(file, '.*').sub(/^_/,'')
118
+ data[key] = SassYaml.new(file: file).to_yaml
98
119
  data
99
120
  end
100
121
  end
@@ -19,8 +19,8 @@ module SparkEngine
19
19
  @engine.name.sub(/::Engine/,'')
20
20
  end
21
21
 
22
+ # Create a new Rails::Engine
22
23
  def create_engine(&block)
23
- # Create a new Rails::Engine
24
24
  @engine = parent_module.const_set('Engine', Class.new(Rails::Engine) do
25
25
 
26
26
  def spark_plugin_path
@@ -36,6 +36,7 @@ module SparkEngine
36
36
 
37
37
  require 'spark_engine/middleware'
38
38
 
39
+ # Ensure compiled assets in /public are served
39
40
  initializer "#{name}.static_assets" do |app|
40
41
  if app.config.public_file_server.enabled
41
42
  app.middleware.insert_after ::ActionDispatch::Static, SparkEngine::StaticAssets, "#{root}/public", engine_name: SparkEngine.plugin.name
@@ -43,8 +44,39 @@ module SparkEngine
43
44
  end
44
45
  end
45
46
 
47
+ initializer "#{name}.view_paths" do |app|
48
+ # Ensure Components are readable from engine paths
49
+ ActiveSupport.on_load :action_controller do
50
+ append_view_path "#{SparkEngine.plugin.paths[:components]}"
51
+ end
52
+
53
+ end
54
+
55
+ initializer "#{name}.asset_paths" do |app|
56
+ app.config.assets.paths << SparkEngine.plugin.paths[:components]
57
+ end
58
+
46
59
  end)
47
60
 
61
+ # Autoload engine lib and components path
62
+ @engine.config.autoload_paths.concat [
63
+ File.join(@engine.spark_plugin_path, "lib"),
64
+ SparkEngine.plugin.paths[:components]
65
+ ]
66
+
67
+ @engine.config.after_initialize do |app|
68
+ if defined?(SassC) && defined?(SassC::Rails)
69
+ # Inject Sass importer for yaml files
70
+ require "spark_engine/sassc/importer"
71
+ SassC::Rails::Importer::EXTENSIONS << SassC::SparkEngine::Importer::SassYamlExtension.new
72
+ elsif defined?(Sass)
73
+ # Overwrite Sass engine with Yaml support
74
+ require "spark_engine/sass/engine"
75
+ end
76
+ end
77
+
78
+ # Takes a block passed an evaluates it in the context of a Rails engine
79
+ # This allows plugins to modify engines when created.
48
80
  @engine.instance_eval(&block) if block_given?
49
81
  end
50
82
 
@@ -82,7 +114,6 @@ module SparkEngine
82
114
  assets(options).each do |asset|
83
115
  asset.build
84
116
  end
85
-
86
117
  end
87
118
 
88
119
  def watch(options)
@@ -109,6 +140,7 @@ module SparkEngine
109
140
  paths: {
110
141
  stylesheets: "app/assets/stylesheets/#{name}",
111
142
  javascripts: "app/assets/javascripts/#{name}",
143
+ components: "app/components/#{name}",
112
144
  images: "app/assets/images/#{name}",
113
145
  svgs: "app/assets/svgs/#{name}",
114
146
  }
@@ -1,7 +1,7 @@
1
+ require 'sass'
1
2
  require 'spark_engine/sass/importer'
2
3
 
3
4
  # Taken from https://github.com/chriseppstein/sass-css-importer/blob/master/lib/sass/css_importer/monkey_patches.rb
4
- # TODO: This feels wrong, surely there must be a better way to handle this
5
5
 
6
6
  class Sass::Engine
7
7
  alias initialize_without_yaml_importer initialize
@@ -1,50 +1,6 @@
1
- require 'sass'
2
- require 'yaml'
1
+ require "spark_engine/sass/sass_yaml"
3
2
 
4
3
  module SparkEngine
5
- module SassParser
6
- extend self
7
-
8
- # Global vars beginning with underscore will have their children promoted to globals
9
- # and will be assigned without the underscore
10
- #
11
- # For example: _colors: { yellow: '#fco' }
12
- # becomes: colors: { yellow: '#fco'}, yellow: '#fco'
13
- #
14
- #
15
- def load_yaml(data)
16
- promote_globals YAML.load(data)
17
- end
18
-
19
- def read_file(file)
20
- IO.read(file)
21
- end
22
-
23
- def promote_globals( data )
24
- data.keys.select{|k| k.start_with?('_') }.each do |key|
25
- data[key.sub(/^_/,'')] = data[key]
26
- data = data.delete(key).merge(data)
27
- end
28
-
29
- data
30
- end
31
-
32
- def expand_vars(file)
33
- content = read_file(file)
34
- file_data = load_yaml(content)
35
-
36
- content = content.gsub(/\$(?<var>\w+)/) do
37
- file_data[$~[:var]].inspect
38
- end
39
-
40
- load_yaml content
41
- end
42
-
43
- def parse(file)
44
- expand_vars file
45
- end
46
- end
47
-
48
4
  class Importer < Sass::Importers::Filesystem
49
5
 
50
6
  def watched_file?(uri)
@@ -65,38 +21,17 @@ module SparkEngine
65
21
  private
66
22
 
67
23
  def _find(dir, name, options)
68
- return unless yaml? name
69
-
70
24
  full_filename, syntax = Sass::Util.destructure(find_real_file(dir, name, options))
71
- return unless full_filename && File.readable?(full_filename)
25
+ return unless full_filename && yaml?(full_filename) && File.readable?(full_filename)
72
26
 
73
- yaml = SassParser.parse(full_filename)
74
- variables = yaml.map { |key, value| "$#{key}: #{_convert_to_sass(value)};" }.join("\n")
27
+ variables = SassC::SassYaml.new(file: full_filename).to_sass
75
28
 
76
29
  Sass::Engine.new(variables, options.merge(
77
- :filename => full_filename,
78
- :importer => self,
79
- :syntax => :scss
30
+ :filename => full_filename,
31
+ :importer => self,
32
+ :syntax => :scss
80
33
  ))
81
34
  end
82
-
83
- def _convert_to_sass(item)
84
- if item.is_a? Array
85
- _make_list(item)
86
- elsif item.is_a? Hash
87
- _make_map(item)
88
- else
89
- item.to_s
90
- end
91
- end
92
-
93
- def _make_list(item)
94
- '(' + item.map { |i| _convert_to_sass(i) }.join(',') + ')'
95
- end
96
-
97
- def _make_map(item)
98
- '(' + item.map {|key, value| key.to_s + ':' + _convert_to_sass(value) }.join(',') + ')'
99
- end
100
35
  end
101
36
 
102
37
  end
@@ -0,0 +1,90 @@
1
+ require 'yaml'
2
+
3
+ module SassC
4
+ class SassYaml
5
+
6
+ # Global vars beginning with underscore will have their children promoted to globals
7
+ # and will be assigned without the underscore
8
+ #
9
+ # For example: _colors: { yellow: '#fco' }
10
+ # becomes: colors: { yellow: '#fco'}, yellow: '#fco'
11
+ #
12
+ #
13
+ def initialize(options={})
14
+ @content = options[:content]
15
+
16
+ if options[:file] && File.exist?(options[:file])
17
+ @content = File.open(options[:file], 'rb') { |f| f.read }
18
+ end
19
+
20
+ @data = promote_keys YAML.load(@content)
21
+ end
22
+
23
+ # Flatten dollar values and promote keys before returning YAML
24
+ def to_yaml
25
+ promote_keys YAML.load(convert_dollar_values)
26
+ end
27
+
28
+ # Convert each key to $key and process each value to a
29
+ # Sass data structure (creating maps, lists, strings)
30
+ def to_sass
31
+ @data.map { |key, value|
32
+ "$#{key}: #{convert_to_sass_value(value)};"
33
+ }.join("\n")
34
+ end
35
+
36
+ # If underscore keys, copy children to top level vars too
37
+ # Input:
38
+ # _colors:
39
+ # yellow: '#fco'
40
+ # Output:
41
+ # colors: { yellow: '#fco' }
42
+ # yellow: '#fco'
43
+ #
44
+ def promote_keys( data )
45
+ data.keys.select{|k| k.start_with?('_') }.each do |key|
46
+ data[key.sub(/^_/,'')] = data[key]
47
+ data = data.delete(key).merge(data)
48
+ end
49
+
50
+ data
51
+ end
52
+
53
+ # Allow vars to reference other vars in their value with $
54
+ # Example Input:¬
55
+ # blue: 'blue'¬
56
+ # green: 'green'
57
+ # gradient: [$blue, $green]
58
+ # Output:
59
+ # blue: 'blue'¬
60
+ # green: 'green'
61
+ # gradient: ['blue', 'green']
62
+ #
63
+ def convert_dollar_values
64
+ @content.gsub(/\$(?<var>\w+)/) {
65
+ @data[$~[:var]].inspect
66
+ }
67
+ end
68
+
69
+ # Convert
70
+ def convert_to_sass_value(item)
71
+ if item.is_a? Array
72
+ make_list(item)
73
+ elsif item.is_a? Hash
74
+ make_map(item)
75
+ else
76
+ item.to_s
77
+ end
78
+ end
79
+
80
+ # Convert arrays to Sass list syntax
81
+ def make_list(item)
82
+ '(' + item.map { |i| convert_to_sass_value(i) }.join(',') + ')'
83
+ end
84
+
85
+ # Convert hashes to Sass map syntax
86
+ def make_map(item)
87
+ '(' + item.map {|key, value| key.to_s + ':' + convert_to_sass_value(value) }.join(',') + ')'
88
+ end
89
+ end
90
+ end
@@ -0,0 +1,37 @@
1
+ # Extracted from sassc-rails
2
+ require "sassc-rails"
3
+ require "spark_engine/sass/sass_yaml"
4
+
5
+ module SassC
6
+ module SparkEngine
7
+ class Importer < SassC::Rails::Importer
8
+
9
+ # Create a new importer to process yaml files
10
+ class SassYamlExtension < Extension
11
+ def postfix
12
+ ".yml"
13
+ end
14
+
15
+ def import_for(full_path, parent_dir, options={})
16
+ parsed_scss = SassYaml.new(file: full_path).to_sass
17
+ SassC::Importer::Import.new(full_path, source: parsed_scss)
18
+ end
19
+ end
20
+
21
+ # Inject importer into Rails
22
+ def imports(path, parent_path)
23
+ EXTENSIONS << SassYamlExtension.new
24
+ super(path, parent_path)
25
+ end
26
+
27
+
28
+ private
29
+
30
+ def record_import_as_dependency(path)
31
+ # Replace reference to sprockets for ease of use without Rails
32
+ # environment
33
+ end
34
+
35
+ end
36
+ end
37
+ end
@@ -0,0 +1,11 @@
1
+ source "https://rubygems.org"
2
+
3
+ # Specify your gem's dependencies in <%= @gem %>.gemspec
4
+ gemspec
5
+
6
+ gem 'listen'
7
+ gem 'bump'
8
+
9
+ # Select whichever sass implementation suits your setup
10
+ # gem 'sass'
11
+ gem 'sassc-rails'
@@ -19,6 +19,6 @@ Gem::Specification.new do |spec|
19
19
  spec.add_dependency "rails", ">= 4"
20
20
  spec.add_runtime_dependency "spark_engine"
21
21
 
22
- spec.add_development_dependency "bundler", "~> 1.12"
23
- spec.add_development_dependency "rake", "~> 10.0"
22
+ spec.add_development_dependency "bundler"
23
+ spec.add_development_dependency "rake"
24
24
  end
@@ -6,7 +6,12 @@ module <%= @gem_module %>
6
6
  end
7
7
  end
8
8
 
9
- SparkEngine.register(<%= @gem_module %>::Plugin, {
9
+ plugin_options = {
10
10
  gem: '<%= @gem %>'<% unless @engine.nil? %>,
11
11
  engine: '<%= @engine %>'<% end %>
12
- })
12
+ }
13
+
14
+ SparkEngine.register(<%= @gem_module %>::Plugin, plugin_options) do
15
+ # Customize Engine here.
16
+ # This block is evaluated when Engine is created.
17
+ end
@@ -6,14 +6,23 @@ module SparkEngine
6
6
  attr_reader :gem, :engine, :namespace, :plugin_module, :path, :gemspec_path
7
7
 
8
8
  def initialize(options)
9
- @cwd = File.expand_path(File.dirname(options[:name]))
10
- @gem = underscorize(File.basename(options[:name]))
11
- @engine = underscorize(options[:engine] || @gem)
9
+ require 'erb'
10
+ @options = options
11
+
12
+ return new_component(@options) if @options[:component]
13
+
14
+ @cwd = File.expand_path(File.dirname(@options[:name]))
15
+ @gem = underscorize(File.basename(@options[:name]))
16
+ @engine = underscorize(@options[:engine] || @gem)
12
17
  @namespace = @engine
13
18
  @plugin_module = modulize @engine
14
19
  @gem_module = modulize @gem
15
20
  @gem_temp = ".#{@gem}-temp"
16
21
 
22
+ if Dir.exist?(@options[:name]) && !@options[:force]
23
+ return puts "Path #{@options[:name]} exists. Use --force to override"
24
+ end
25
+
17
26
  FileUtils.mkdir_p @cwd
18
27
 
19
28
  Dir.chdir @cwd do
@@ -53,10 +62,10 @@ module SparkEngine
53
62
  scaffold_path = File.expand_path("scaffold/**/*", File.dirname(__FILE__))
54
63
 
55
64
  Dir.glob(scaffold_path, File::FNM_DOTMATCH).select{|f| File.file? f}.each do |f|
56
- write_template f.split(/spark_engine\/scaffold\//)[1]
65
+ write_template f.split(/spark_engine\/scaffold\//)[1], force: true
57
66
  end
58
67
 
59
- system "bundle add listen bump"
68
+ system 'bundle'
60
69
  end
61
70
 
62
71
  # Create an Rails plugin engine for documentation site
@@ -109,7 +118,7 @@ module SparkEngine
109
118
  end
110
119
  end
111
120
 
112
- def write_template(template, target=nil)
121
+ def write_template(template, options)
113
122
  template_path = File.expand_path("scaffold/#{template}", File.dirname(__FILE__))
114
123
 
115
124
  # Extract file extension
@@ -122,7 +131,7 @@ module SparkEngine
122
131
  'namespace' => @namespace
123
132
  }) + ext
124
133
 
125
- write_file target_path, read_template(template_path)
134
+ write_file target_path, read_template(template_path), options
126
135
  end
127
136
 
128
137
  def read_template(file_path)
@@ -133,22 +142,28 @@ module SparkEngine
133
142
  contents
134
143
  end
135
144
 
136
- def write_file(paths, content='', mode='w')
137
- paths = [paths].flatten
138
- paths.each do |path|
139
- if File.exist?(path)
145
+ def write_file(path, content='', options={})
146
+ options[:mode] ||= 'w'
147
+
148
+ if File.exist?(path)
149
+ if options[:force]
140
150
  type = 'update'
141
151
  else
142
- FileUtils.mkdir_p(File.dirname(path))
143
- type = 'create'
152
+ return action_log('skipped', path)
144
153
  end
154
+ else
155
+ type = 'create'
156
+ end
145
157
 
146
- File.open path, mode do |io|
147
- io.write(content)
148
- end
158
+ FileUtils.mkdir_p(File.dirname(path))
149
159
 
150
- action_log(type, path)
160
+ if content.empty?
161
+ FileUtils.touch(path)
162
+ else
163
+ File.open(path, options[:mode]) { |io| io.write(content) }
151
164
  end
165
+
166
+ action_log(type, path)
152
167
  end
153
168
 
154
169
  def post_install
@@ -173,14 +188,58 @@ module SparkEngine
173
188
  puts dashes + "\n\n"
174
189
  end
175
190
 
191
+ def new_component(options={})
192
+ options = {
193
+ sass: true,
194
+ template: true,
195
+ js: true
196
+ }.merge(options)
197
+
198
+ path = File.join(SparkEngine.plugin.paths[:components], options[:component])
199
+ name = options[:component].split('/').last
200
+
201
+ paths = {
202
+ base: path,
203
+ component: path+'_component.rb',
204
+ template: File.join(path, "_#{name}.html.erb"),
205
+ sass: File.join(path, "_#{name}.scss"),
206
+ js: File.join(SparkEngine.plugin.paths[:javascripts], "components", "#{options[:component].sub(name, '_'+name)}.js")
207
+ }
208
+
209
+ if options[:delete]
210
+ return paths.values.each do |p|
211
+ action_log('delete', FileUtils.rm_rf(p).first) if File.exist?(p)
212
+ end
213
+ end
214
+
215
+ # Write component class
216
+ component_content = %Q{class #{modulize(options[:component])}Component < #{options[:class] || 'Components::Component' }\nend}
217
+ write_file(paths[:component], component_content, options)
218
+
219
+ write_file(paths[:template], '', options) if options[:template]
220
+ write_file(paths[:sass], '', options) if options[:sass]
221
+ write_file(paths[:js], '', options) if options[:js]
222
+ end
223
+
224
+
176
225
  def action_log(action, path)
177
- puts action.rjust(12).colorize(:green).bold + " #{path}"
226
+ color = case action
227
+ when 'create', 'update'
228
+ :green
229
+ when 'skip'
230
+ :white
231
+ when 'delete'
232
+ :red
233
+ end
234
+ puts action.rjust(12).colorize(color).bold + " #{path.sub(Dir.pwd+'/','')}"
178
235
  end
179
236
 
180
237
  def modulize(input)
181
- input.split('_').collect { |name|
182
- (name =~ /[A-Z]/) ? name : name.capitalize
183
- }.join
238
+ classify = lambda { |name|
239
+ name = (name =~ /_/) ? name.split('_').map(&classify).join : name
240
+ (name =~ /[A-Z]/) ? name : name.capitalize
241
+ }
242
+ input.split('/').map(&classify).join('::')
184
243
  end
185
244
 
186
245
  def underscorize(input)
@@ -1,3 +1,3 @@
1
1
  module SparkEngine
2
- VERSION = "1.2.9"
2
+ VERSION = "1.2.10"
3
3
  end
data/lib/spark_engine.rb CHANGED
@@ -5,9 +5,7 @@ require "colorize"
5
5
  require "spark_engine/version"
6
6
  require "spark_engine/plugin"
7
7
  require "spark_engine/assets"
8
- require "spark_engine/sass/engine"
9
- require "spark_engine/sass/importer"
10
- require "spark_engine/config_data"
8
+ require "spark_engine/data"
11
9
 
12
10
  module SparkEngine
13
11
  autoload :BlockHelper, 'spark_engine/helpers/block_helper'
@@ -23,8 +21,12 @@ module SparkEngine
23
21
  @plugin
24
22
  end
25
23
 
26
- def config_data
27
- SparkEngine::ConfigData.read(SparkEngine.plugin.root, Rails.root)
24
+ def data
25
+ if production?
26
+ @data ||= SparkEngine::Data.read
27
+ else
28
+ SparkEngine::Data.read
29
+ end
28
30
  end
29
31
 
30
32
  def register(plugin_module, options={}, &block)
data/spark_engine.gemspec CHANGED
@@ -17,10 +17,9 @@ Gem::Specification.new do |spec|
17
17
  spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
18
18
  spec.require_paths = ['lib']
19
19
 
20
- spec.add_runtime_dependency 'sass', '~> 3.4'
21
20
  spec.add_runtime_dependency 'esvg', '~> 4.2'
22
21
  spec.add_runtime_dependency 'colorize', '~> 0.8'
23
- spec.add_runtime_dependency 'bundler', '~> 1.10'
22
+ spec.add_runtime_dependency 'bundler', '>= 1.10'
24
23
  spec.add_runtime_dependency 'autoprefixer-rails', '>= 8.0', '< 10'
25
24
 
26
25
  spec.add_development_dependency 'rails', '>= 5.0', '< 6'
metadata CHANGED
@@ -1,29 +1,15 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: spark_engine
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.9
4
+ version: 1.2.10
5
5
  platform: ruby
6
6
  authors:
7
7
  - Brandon Mathis
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-02-04 00:00:00.000000000 Z
11
+ date: 2019-03-11 00:00:00.000000000 Z
12
12
  dependencies:
13
- - !ruby/object:Gem::Dependency
14
- name: sass
15
- requirement: !ruby/object:Gem::Requirement
16
- requirements:
17
- - - "~>"
18
- - !ruby/object:Gem::Version
19
- version: '3.4'
20
- type: :runtime
21
- prerelease: false
22
- version_requirements: !ruby/object:Gem::Requirement
23
- requirements:
24
- - - "~>"
25
- - !ruby/object:Gem::Version
26
- version: '3.4'
27
13
  - !ruby/object:Gem::Dependency
28
14
  name: esvg
29
15
  requirement: !ruby/object:Gem::Requirement
@@ -56,14 +42,14 @@ dependencies:
56
42
  name: bundler
57
43
  requirement: !ruby/object:Gem::Requirement
58
44
  requirements:
59
- - - "~>"
45
+ - - ">="
60
46
  - !ruby/object:Gem::Version
61
47
  version: '1.10'
62
48
  type: :runtime
63
49
  prerelease: false
64
50
  version_requirements: !ruby/object:Gem::Requirement
65
51
  requirements:
66
- - - "~>"
52
+ - - ">="
67
53
  - !ruby/object:Gem::Version
68
54
  version: '1.10'
69
55
  - !ruby/object:Gem::Dependency
@@ -168,7 +154,7 @@ files:
168
154
  - lib/spark_engine/command.rb
169
155
  - lib/spark_engine/command/help.rb
170
156
  - lib/spark_engine/command/npm.rb
171
- - lib/spark_engine/config_data.rb
157
+ - lib/spark_engine/data.rb
172
158
  - lib/spark_engine/helpers/asset_helpers.rb
173
159
  - lib/spark_engine/helpers/block_helper.rb
174
160
  - lib/spark_engine/helpers/layout_helpers.rb
@@ -180,8 +166,12 @@ files:
180
166
  - lib/spark_engine/plugin/assets/svgs.rb
181
167
  - lib/spark_engine/sass/engine.rb
182
168
  - lib/spark_engine/sass/importer.rb
169
+ - lib/spark_engine/sass/sass_yaml.rb
170
+ - lib/spark_engine/sassc/importer.rb
183
171
  - lib/spark_engine/scaffold.rb
184
172
  - lib/spark_engine/scaffold/gem/.gitignore
173
+ - lib/spark_engine/scaffold/gem/Gemfile
174
+ - lib/spark_engine/scaffold/gem/app/assets/components/namespace/.keep
185
175
  - lib/spark_engine/scaffold/gem/app/assets/images/namespace/.keep
186
176
  - lib/spark_engine/scaffold/gem/app/assets/javascripts/namespace/engine.js
187
177
  - lib/spark_engine/scaffold/gem/app/assets/stylesheets/namespace/_index.scss
@@ -221,8 +211,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
221
211
  - !ruby/object:Gem::Version
222
212
  version: '0'
223
213
  requirements: []
224
- rubyforge_project:
225
- rubygems_version: 2.7.6
214
+ rubygems_version: 3.0.3
226
215
  signing_key:
227
216
  specification_version: 4
228
217
  summary: A design system framework for Rails (and humans).
@@ -1,29 +0,0 @@
1
- module SparkEngine
2
- module ConfigData
3
- extend self
4
- def read(*roots)
5
- @data ||= {}
6
-
7
- data_files(*roots).each do |path|
8
- name = File.basename(path, '.*')
9
- case File.extname(path)
10
- when '.json'
11
- @data[name] = JSON.parse(File.read(path))
12
- when '.yml'
13
- @data[name] = YAML.load_file(path)
14
- end
15
- end
16
-
17
- @data
18
- end
19
-
20
- def data_files(*roots)
21
- files = []
22
- [roots].flatten.each do |root|
23
- files.concat Dir[File.join(root, 'config/data/**/*.json')]
24
- files.concat Dir[File.join(root, 'config/data/**/*.yml')]
25
- end
26
- files.flatten.compact.uniq
27
- end
28
- end
29
- end