sparkplug 2.0.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,5 @@
1
+ README.rdoc
2
+ lib/**/*.rb
3
+ bin/*
4
+ features/**/*.feature
5
+ LICENSE
@@ -0,0 +1,7 @@
1
+ *.sw?
2
+ .DS_Store
3
+ coverage
4
+ rdoc
5
+ pkg
6
+ test/data
7
+ demos/simple/public/sparks/*
data/LICENSE ADDED
@@ -0,0 +1,20 @@
1
+ Copyright (c) 2009 rick
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining
4
+ a copy of this software and associated documentation files (the
5
+ "Software"), to deal in the Software without restriction, including
6
+ without limitation the rights to use, copy, modify, merge, publish,
7
+ distribute, sublicense, and/or sell copies of the Software, and to
8
+ permit persons to whom the Software is furnished to do so, subject to
9
+ the following conditions:
10
+
11
+ The above copyright notice and this permission notice shall be
12
+ included in all copies or substantial portions of the Software.
13
+
14
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
17
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
18
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
19
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
20
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -0,0 +1,35 @@
1
+ = sparkplug
2
+
3
+ Dynamically generates sparkline graphs from a set of numbers. This is done
4
+ primarily through Handlers and Cachers. Handlers know how to fetch the data,
5
+ and Cachers know how to cache the generated PNG sparkline for future requests.
6
+
7
+ pub_dir = File.expand_path(File.join(File.dirname(__FILE__), 'public'))
8
+ data_dir = File.join(pub_dir, 'temps')
9
+ cache_dir = File.join(pub_dir, 'sparks')
10
+
11
+ use Sparkplug, :prefix => 'sparks',
12
+ :handler => Sparkplug::Handlers::CsvData.new(data_dir),
13
+ :cacher => Sparkplug::Cachers::Filesystem.new(cache_dir)
14
+
15
+ * An incoming request hits your Rack application at "/sparks/foo/stats.csv".
16
+ * The CSV Handler gets 'foo/stats.csv', and checks for this file in its data
17
+ directory. It parses the first row of numbers as the set of points to plot.
18
+ * The Filesystem Cacher checks for a more recent cache. Failing that, it
19
+ generates the PNG graph and writes it to the cache directory.
20
+
21
+ Mix and match your own handlers and cachers with your friends!
22
+
23
+ == Demo
24
+
25
+ See demo/sparkplug_demo.rb or http://rack-sparklines.heroku.com/
26
+
27
+ == Codes
28
+
29
+ gem install sparkplug
30
+
31
+ http://github.com/technoweenie/sparkplug
32
+
33
+ == Copyright
34
+
35
+ Copyright (c) 2009 rick. See LICENSE for details.
@@ -0,0 +1,56 @@
1
+ require 'rubygems'
2
+ require 'rake'
3
+
4
+ begin
5
+ require 'jeweler'
6
+ Jeweler::Tasks.new do |gem|
7
+ gem.name = "sparkplug"
8
+ gem.summary = %Q{Rack module that dynamically generates sparkline graphs from a set of numbers.}
9
+ gem.email = "technoweenie@gmail.com"
10
+ gem.homepage = "http://github.com/technoweenie/sparkplug"
11
+ gem.authors = ["rick"]
12
+ # gem is a Gem::Specification... see http://www.rubygems.org/read/chapter/20 for additional settings
13
+ end
14
+
15
+ rescue LoadError
16
+ puts "Jeweler (or a dependency) not available. Install it with: sudo gem install jeweler"
17
+ end
18
+
19
+ require 'rake/testtask'
20
+ Rake::TestTask.new(:test) do |test|
21
+ test.libs << 'lib' << 'test'
22
+ test.pattern = 'test/**/*_test.rb'
23
+ test.verbose = true
24
+ end
25
+
26
+ begin
27
+ require 'rcov/rcovtask'
28
+ Rcov::RcovTask.new do |test|
29
+ test.libs << 'test'
30
+ test.pattern = 'test/**/*_test.rb'
31
+ test.verbose = true
32
+ end
33
+ rescue LoadError
34
+ task :rcov do
35
+ abort "RCov is not available. In order to run rcov, you must: sudo gem install spicycode-rcov"
36
+ end
37
+ end
38
+
39
+
40
+ task :default => :test
41
+
42
+ require 'rake/rdoctask'
43
+ Rake::RDocTask.new do |rdoc|
44
+ if File.exist?('VERSION.yml')
45
+ config = YAML.load(File.read('VERSION.yml'))
46
+ version = "#{config[:major]}.#{config[:minor]}.#{config[:patch]}"
47
+ else
48
+ version = ""
49
+ end
50
+
51
+ rdoc.rdoc_dir = 'rdoc'
52
+ rdoc.title = "sparkplug #{version}"
53
+ rdoc.rdoc_files.include('README*')
54
+ rdoc.rdoc_files.include('lib/**/*.rb')
55
+ end
56
+
data/VERSION ADDED
@@ -0,0 +1 @@
1
+ 2.0.0
@@ -0,0 +1 @@
1
+ 35.7,47.2,50,42.4,38.8,43.9,46.9,46,46.9,39.6,34.3,28.9,27.5,27.1,28.5,28.5,30.8,34.9,35.6,39.3,38.1,42,43,39.2,37.2,40,39.5,38,37,37.3,40,34.7,36.4,35.1,42
@@ -0,0 +1,29 @@
1
+ $LOAD_PATH << File.join(File.dirname(__FILE__), '..', '..', 'lib')
2
+ require 'rubygems'
3
+ require 'sinatra'
4
+
5
+ require 'sparkplug'
6
+ require 'sparkplug/handlers/csv_data'
7
+ require 'sparkplug/cachers/filesystem'
8
+
9
+ pub_dir = File.expand_path(File.join(File.dirname(__FILE__), 'public'))
10
+ use Sparkplug, :prefix => 'sparks',
11
+ :handler => Sparkplug::Handlers::CsvData.new(File.join(pub_dir, 'temps')),
12
+ :cacher => Sparkplug::Cachers::Filesystem.new(File.join(pub_dir, 'sparks'))
13
+
14
+ get '/' do
15
+ @body = $readme
16
+ erb :readme
17
+ end
18
+
19
+ def simple_format(text)
20
+ start_tag = "<p>"
21
+ text = text.to_s.dup
22
+ text.gsub!(/\r\n?/, "\n") # \r\n and \r -> \n
23
+ text.gsub!(/\n\n+/, "</p>\n\n#{start_tag}") # 2+ newline -> paragraph
24
+ text.gsub!(/([^\n]\n)(?=[^\n])/, '\1<br />') # 1 newline -> br
25
+ text.insert 0, start_tag
26
+ text << "</p>"
27
+ end
28
+
29
+ $readme = simple_format IO.read(File.join(File.dirname(__FILE__), '..', '..', 'README.rdoc'))
@@ -0,0 +1,19 @@
1
+ <html>
2
+ <head>
3
+ <meta http-equiv="Content-type" content="text/html; charset=utf-8">
4
+ <title>Sparkplug - Ruby Rack module for generating sparkline graphs on the fly</title>
5
+ <style type="text/css" media="screen">
6
+ h1, div, p {
7
+ font-family: verdana;
8
+ }
9
+ </style>
10
+ </head>
11
+ <body>
12
+ <h1>Sparkplug</h1>
13
+ <div>
14
+ <img src="/sparks/portland/2007.csv.png" />
15
+ </div>
16
+ <p>(if you can see this, the rack module works!)</p>
17
+ <%= @body %>
18
+ </body>
19
+ </html>
@@ -0,0 +1,231 @@
1
+ # pure ruby sparklines module, generates PNG or ASCII
2
+ # contact thomas@fesch.at for questions
3
+ #
4
+ # strives to be somewhat compatible with sparklines lib by
5
+ # {Dan Nugent}[mailto:nugend@gmail.com] and {Geoffrey Grosenbach}[mailto:boss@topfunky.com]
6
+ #
7
+ # png creation based on http://www.whytheluckystiff.net/bumpspark/
8
+
9
+ class SparkCanvas
10
+ require 'zlib'
11
+
12
+ attr_accessor :color
13
+ attr_reader :width, :height
14
+
15
+ def initialize(width,height)
16
+ @canvas = []
17
+ @height = height
18
+ @width = width
19
+ height.times{ @canvas << [[0xFF,0xFF,0xFF]]*width }
20
+ @color = [0,0,0,0xFF] #RGBA
21
+ end
22
+
23
+ # alpha blends two colors, using the alpha given by c2
24
+ def blend(c1, c2)
25
+ (0..2).map{ |i| (c1[i]*(0xFF-c2[3]) + c2[i]*c2[3]) >> 8 }
26
+ end
27
+
28
+ # calculate a new alpha given a 0-0xFF intensity
29
+ def intensity(c,i)
30
+ [c[0],c[1],c[2],(c[3]*i) >> 8]
31
+ end
32
+
33
+ # calculate perceptive grayscale value
34
+ def grayscale(c)
35
+ (c[0]*0.3 + c[1]*0.59 + c[2]*0.11).to_i
36
+ end
37
+
38
+ def point(x,y,color = nil)
39
+ return if x<0 or y<0 or x>@width-1 or y>@height-1
40
+ @canvas[y][x] = blend(@canvas[y][x], color || @color)
41
+ end
42
+
43
+ def rectangle(x0, y0, x1, y1)
44
+ x0, y0, x1, y1 = x0.to_i, y0.to_i, x1.to_i, y1.to_i
45
+ x0, x1 = x1, x0 if x0 > x1
46
+ y0, y1 = y1, y0 if y0 > y1
47
+ x0.upto(x1) { |x| y0.upto(y1) { |y| point x, y } }
48
+ end
49
+
50
+ # draw an antialiased line
51
+ # google for "wu antialiasing"
52
+ def line(x0, y0, x1, y1)
53
+ # clean params
54
+ x0, y0, x1, y1 = x0.to_i, y0.to_i, x1.to_i, y1.to_i
55
+ y0, y1, x0, x1 = y1, y0, x1, x0 if y0>y1
56
+ sx = (dx = x1-x0) < 0 ? -1 : 1 ; dx *= sx ; dy = y1-y0
57
+
58
+ # special cases
59
+ x0.step(x1,sx) { |x| point x, y0 } and return if dy.zero?
60
+ y0.upto(y1) { |y| point x0, y } and return if dx.zero?
61
+ x0.step(x1,sx) { |x| point x, y0; y0 += 1 } and return if dx==dy
62
+
63
+ # main loops
64
+ point x0, y0
65
+
66
+ e_acc = 0
67
+ if dy > dx
68
+ e = (dx << 16) / dy
69
+ y0.upto(y1-1) do
70
+ e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
71
+ x0 += sx if (e_acc <= e_acc_temp)
72
+ point x0, (y0 += 1), intensity(@color,(w=0xFF-(e_acc >> 8)))
73
+ point x0+sx, y0, intensity(@color,(0xFF-w))
74
+ end
75
+ point x1, y1
76
+ return
77
+ end
78
+
79
+ e = (dy << 16) / dx
80
+ x0.step(x1-sx,sx) do
81
+ e_acc_temp, e_acc = e_acc, (e_acc + e) & 0xFFFF
82
+ y0 += 1 if (e_acc <= e_acc_temp)
83
+ point (x0 += sx), y0, intensity(@color,(w=0xFF-(e_acc >> 8)))
84
+ point x0, y0+1, intensity(@color,(0xFF-w))
85
+ end
86
+ point x1, y1
87
+ end
88
+
89
+ def polyline(arr)
90
+ (0...arr.size-1).each{ |i| line(arr[i][0], arr[i][1], arr[i+1][0], arr[i+1][1]) }
91
+ end
92
+
93
+ def to_png
94
+ header = [137, 80, 78, 71, 13, 10, 26, 10].pack("C*")
95
+ raw_data = @canvas.map { |row| [0] + row }.flatten.pack("C*")
96
+ ihdr_data = [@canvas.first.length,@canvas.length,8,2,0,0,0].pack("NNCCCCC")
97
+
98
+ header +
99
+ build_png_chunk("IHDR", ihdr_data) +
100
+ build_png_chunk("tRNS", ([ 0xFF ]*6).pack("C6")) +
101
+ build_png_chunk("IDAT", Zlib::Deflate.deflate(raw_data)) +
102
+ build_png_chunk("IEND", "")
103
+ end
104
+
105
+ def build_png_chunk(type,data)
106
+ to_check = type + data
107
+ [data.length].pack("N") + to_check + [Zlib.crc32(to_check)].pack("N")
108
+ end
109
+
110
+ def to_ascii
111
+ chr = %w(M O # + ; - .) << ' '
112
+ @canvas.map{ |r| r.map { |pt| chr[grayscale(pt) >> 5] }.to_s << "\n" }.to_s
113
+ end
114
+
115
+ end
116
+
117
+ module Spark
118
+ # normalize arr to contain values between 0..1 inclusive
119
+ def Spark.normalize( arr, type = :linear )
120
+ arr.map!{|v| Math.log(v) } if type == :logarithmic
121
+ adj, fac = arr.min, arr.max-arr.min
122
+ arr.map do |v|
123
+ v = (v-adj).quo(fac) rescue 0
124
+ v = 0 if v.respond_to?(:nan?) && v.nan?
125
+ v
126
+ end
127
+ end
128
+
129
+ def Spark.process_options( options )
130
+ o = options.inject({}) do |o, (key, value)|
131
+ o[key.to_sym] = value ; o
132
+ end
133
+ [:height, :width, :step].each do |k|
134
+ o[k] = o[k].to_i if o.has_key?(k)
135
+ end
136
+ [:has_min, :has_max, :has_last].each do |k|
137
+ o[k] = (o[k] ? true : false) if o.has_key?(k)
138
+ end
139
+ o[:normalize] ||= :linear
140
+ o[:normalize] = o[:normalize].to_sym
141
+ o
142
+ end
143
+
144
+ def Spark.smooth( results, options = {} )
145
+ options = self.process_options(options)
146
+ o = {
147
+ :step => 2,
148
+ :height => 14,
149
+ :has_min => false,
150
+ :has_max => false
151
+ }.merge(options)
152
+
153
+ o[:width] ||= (results.size-1)*o[:step] + 5
154
+
155
+ c = SparkCanvas.new(o[:width], o[:height])
156
+
157
+ results = Spark.normalize(results, o[:normalize])
158
+ fac = c.height-5
159
+ i = -o[:step]
160
+ coords = results.map do |r|
161
+ [(i += o[:step])+2, c.height - 3 - r*fac ]
162
+ end
163
+
164
+ c.color = [0xB0, 0xB0, 0xB0, 0xFF]
165
+ c.polyline coords
166
+
167
+ if o[:has_min]
168
+ min_pt = coords[results.index(results.min)]
169
+ c.color = [0x80, 0x80, 0x00, 0x70]
170
+ c.rectangle(min_pt[0]-2, min_pt[1]-2, min_pt[0]+2, min_pt[1]+2)
171
+ end
172
+
173
+ if o[:has_max]
174
+ max_pt = coords[results.index(results.max)]
175
+ c.color = [0x00, 0x80, 0x00, 0x70]
176
+ c.rectangle(max_pt[0]-2, max_pt[1]-2, max_pt[0]+2, max_pt[1]+2)
177
+ end
178
+
179
+ if o[:has_last]
180
+ c.color = [0xFF, 0x00, 0x00, 0x70]
181
+ c.rectangle(coords.last[0]-2, coords.last[1]-2, coords.last[0]+2, coords.last[1]+2)
182
+ end
183
+
184
+ c
185
+ end
186
+
187
+ def Spark.discrete( results, options = {} )
188
+ options = self.process_options(options)
189
+ o = {
190
+ :height => 14,
191
+ :upper => 0.5,
192
+ :has_min => false,
193
+ :has_max => false
194
+ }.merge(options)
195
+
196
+ o[:width] ||= results.size*2-1
197
+
198
+ c = SparkCanvas.new(o[:width], o[:height])
199
+
200
+ results = Spark.normalize(results, o[:normalize])
201
+ fac = c.height-4
202
+
203
+ i = -2
204
+ results.each do |r|
205
+ p = c.height - 4 - r*fac
206
+ c.color = r < o[:upper] ? [0x66,0x66,0x66,0xFF] : [0xFF,0x00,0x00,0xFF]
207
+ c.line(i+=2, p, i, p+3)
208
+ end
209
+
210
+ c
211
+ end
212
+
213
+ # convenience method
214
+ def Spark.plot( results, options = {})
215
+ options = self.process_options(options)
216
+ options[:type] ||= 'smooth'
217
+ self.send(options[:type], results, options).to_png
218
+ end
219
+ end
220
+
221
+ if $0 == __FILE__
222
+ #to test this:
223
+ #PNG output
224
+ File.open( 'test.png', 'wb' ) do |png|
225
+ png << Spark.plot( [47, 43, 24, 47, 16, 28, 38, 57, 50, 76, 42, 20, 98, 34, 53, 1, 55, 74, 63, 38, 31, 98, 89], :has_min => true, :has_max => true, 'has_last' => 'true', 'height' => '40', :step => 10, :normalize => 'logarithmic' )
226
+ end
227
+
228
+ #ASCII output
229
+ puts Spark.discrete( [47, 43, 24, 47, 16, 28, 38, 57, 50, 76, 42, 1, 98, 34, 53, 97, 55, 74, 63, 38, 31, 98, 89], :has_min => true, :has_max => true, :height => 14, :step => 5 ).to_ascii
230
+ puts Spark.smooth( [47, 43, 24, 47, 16, 28, 38, 57, 50, 76, 42, 1, 98, 34, 53, 97, 55, 74, 63, 38, 31, 98, 89], :has_min => true, :has_max => true, :height => 14, :step => 4 ).to_ascii
231
+ end
@@ -0,0 +1,48 @@
1
+ require 'spark_pr'
2
+
3
+ # Render sparkline graphs dynamically from datapoints in a matching CSV file
4
+ # (or anything that there is a Handler for).
5
+ class Sparkplug
6
+ DEFAULT_SPARK_OPTIONS = {:has_min => true, :has_max => true, :height => 40, :step => 10}
7
+
8
+ # Options:
9
+ # :spark - Hash of sparkline options. See spark_pr.rb
10
+ # :prefix - URL prefix for handled requests. Setting it to "/sparks"
11
+ # treats requests like "/sparks/stats.csv" as dynamic sparklines.
12
+ # :cacher - Cachers know how to store and stream sparkline PNG data.
13
+ # :handler - Handler instances know how to fetch data and pass them
14
+ # to the Sparklines library.
15
+ def initialize(app, options = {})
16
+ @app, @options = app, options
17
+ @options[:spark] = DEFAULT_SPARK_OPTIONS.merge(@options[:spark] || {})
18
+ end
19
+
20
+ def call(env)
21
+ dup._call(env)
22
+ end
23
+
24
+ def _call(env)
25
+ if env['PATH_INFO'][@options[:prefix]] == @options[:prefix]
26
+ @data_path = env['PATH_INFO'][@options[:prefix].size+1..-1]
27
+ @data_path.sub! /\.png$/, ''
28
+ @png_path = @data_path + ".png"
29
+ @cacher = @options[:cacher].set(@png_path)
30
+ @handler = @options[:handler].set(@data_path)
31
+ if !@handler.exists?
32
+ return @app.call(env)
33
+ end
34
+ if !@handler.already_cached?(@cacher)
35
+ @handler.fetch do |data|
36
+ @cacher.save(data, @options[:spark])
37
+ end
38
+ end
39
+ @cacher.serve(self)
40
+ else
41
+ @app.call(env)
42
+ end
43
+ end
44
+
45
+ def each
46
+ @cacher.stream { |part| yield part }
47
+ end
48
+ end
@@ -0,0 +1,57 @@
1
+ require 'sparkplug'
2
+ require 'time'
3
+
4
+ class Sparkplug
5
+ module Cachers
6
+ # Reads sparkline data from CSV files. Only the first line of numbers are
7
+ # read. Requests for "/sparks/stats.csv" will pass a data_path of "stats.csv"
8
+ class Abstract
9
+ attr_accessor :png_path
10
+
11
+ def initialize
12
+ @size, @updated_at = nil
13
+ end
14
+
15
+ # Setting the png_path returns a duplicate of this object that has any
16
+ # custom instance variables (configuration settings, for example).
17
+ def set(png_path)
18
+ cacher = dup
19
+ cacher.png_path = png_path
20
+ cacher
21
+ end
22
+
23
+ def size
24
+ raise NotImplementedError
25
+ end
26
+
27
+ def exists?
28
+ raise NotImplementedError
29
+ end
30
+
31
+ def updated_at
32
+ raise NotImplementedError
33
+ end
34
+
35
+ def create_sparklines(data, options)
36
+ Spark.plot(data, options)
37
+ end
38
+
39
+ def serve(app, headers = {})
40
+ headers = {
41
+ "Last-Modified" => updated_at.rfc822,
42
+ "Content-Type" => "image/png",
43
+ "Content-Length" => size.to_s
44
+ }.update(headers)
45
+ [200, headers, app]
46
+ end
47
+
48
+ def save(data, options)
49
+ raise NotImplementedError
50
+ end
51
+
52
+ def stream
53
+ raise NotImplementedError
54
+ end
55
+ end
56
+ end
57
+ end
@@ -0,0 +1,49 @@
1
+ require 'sparkplug/cachers/abstract'
2
+ require 'fileutils'
3
+
4
+ class Sparkplug
5
+ module Cachers
6
+ # Reads sparkline data from CSV files. Only the first line of numbers are
7
+ # read. Requests for "/sparks/stats.csv" will pass a data_path of "stats.csv"
8
+ class Filesystem < Abstract
9
+ attr_accessor :directory
10
+
11
+ def initialize(directory)
12
+ @directory = directory
13
+ super()
14
+ end
15
+
16
+ def png_path=(s)
17
+ @cache_file = File.join(@directory, s)
18
+ @png_path = s
19
+ end
20
+
21
+ def size
22
+ @size ||= File.size(@cache_file)
23
+ end
24
+
25
+ def exists?
26
+ File.file?(@cache_file)
27
+ end
28
+
29
+ def updated_at
30
+ @updated_at ||= File.mtime(@cache_file)
31
+ end
32
+
33
+ def save(data, options)
34
+ FileUtils.mkdir_p(File.dirname(@cache_file))
35
+ File.open(@cache_file, 'wb') do |png|
36
+ png << create_sparklines(data, options)
37
+ end
38
+ end
39
+
40
+ def stream
41
+ ::File.open(@cache_file, "rb") do |file|
42
+ while part = file.read(8192)
43
+ yield part
44
+ end
45
+ end
46
+ end
47
+ end
48
+ end
49
+ end
@@ -0,0 +1,42 @@
1
+ require 'sparkplug/cachers/abstract'
2
+ require 'fileutils'
3
+
4
+ class Sparkplug
5
+ module Cachers
6
+ # Reads sparkline data from CSV files. Only the first line of numbers are
7
+ # read. Requests for "/sparks/stats.csv" will pass a data_path of "stats.csv"
8
+ class Memory < Abstract
9
+ attr_accessor :sparklines, :cache_time
10
+
11
+ def initialize(cache_time = 86400)
12
+ @cache_time = cache_time
13
+ super()
14
+ end
15
+
16
+ def size
17
+ @sparklines ? @sparklines.size : 0
18
+ end
19
+
20
+ def exists?
21
+ @sparklines
22
+ end
23
+
24
+ def updated_at
25
+ Time.now.utc
26
+ end
27
+
28
+ def save(data, options)
29
+ @sparklines = create_sparklines(data, options)
30
+ end
31
+
32
+ def stream
33
+ yield @sparklines
34
+ end
35
+
36
+ def serve(app, headers = {})
37
+ headers['Cache-Control'] = "public, max-age=#{@cache_time}"
38
+ super(app, headers)
39
+ end
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,38 @@
1
+ require 'sparkplug'
2
+
3
+ class Sparkplug
4
+ module Handlers
5
+ # Abstract class for retrieving the data and determining whether the cache
6
+ # needs to be refreshed.
7
+ class AbstractData
8
+ attr_accessor :data_path
9
+
10
+ # Setting the data_path returns a duplicate of this object that has any
11
+ # custom instance variables (configuration settings, for example).
12
+ def set(data_path)
13
+ data = dup
14
+ data.data_path = data_path
15
+ data
16
+ end
17
+
18
+ def already_cached?(cacher)
19
+ if cache_time = cacher.exists? && cacher.updated_at
20
+ cache_time > updated_at
21
+ end
22
+ end
23
+
24
+ def exists?
25
+ false
26
+ end
27
+
28
+ def updated_at
29
+ raise NotImplementedError
30
+ end
31
+
32
+ # Yield an array of numbers for sparkline datapoints.
33
+ def fetch
34
+ raise NotImplementedError
35
+ end
36
+ end
37
+ end
38
+ end
@@ -0,0 +1,31 @@
1
+ require 'sparkplug/handlers/abstract_data'
2
+
3
+ module Sparkplug::Handlers
4
+ # Reads sparkline data from CSV files. Only the first line of numbers are
5
+ # read. Requests for "/sparks/stats.csv" will pass a data_path of "stats.csv"
6
+ class CsvData < AbstractData
7
+ attr_accessor :directory
8
+
9
+ def initialize(directory)
10
+ @directory = directory
11
+ end
12
+
13
+ def data_path=(s)
14
+ @data_path = s ? File.join(@directory, s) : nil
15
+ end
16
+
17
+ def exists?
18
+ File.exist?(@data_path)
19
+ end
20
+
21
+ def updated_at
22
+ File.mtime(@data_path)
23
+ end
24
+
25
+ def fetch
26
+ array_of_nums = IO.read(@data_path).split("\n").first.split(",")
27
+ array_of_nums.map! { |n| n.to_i }
28
+ yield array_of_nums
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,36 @@
1
+ require 'sparkplug/handlers/abstract_data'
2
+
3
+ module Sparkplug::Handlers
4
+ # Allows you to stub sparkline data in a global hash. Requests for
5
+ # "/sparks/stats.csv" will pass a data_path of "stats.csv"
6
+ class StubbedData < AbstractData
7
+ # A hash of hashes where the key is the filename. The key points to
8
+ # a hash with :updated and :contents keys
9
+ #
10
+ # StubbedData.datasets['stats.csv'] = {
11
+ # :updated => Time.utc(2009, 10, 1),
12
+ # :contents => [1, 2, 3, 4, 5]}
13
+ attr_accessor :datasets
14
+
15
+ def initialize(datasets = {})
16
+ @datasets = datasets
17
+ end
18
+
19
+ def data_path=(s)
20
+ @data = @datasets[s]
21
+ @data_path = s
22
+ end
23
+
24
+ def exists?
25
+ @data
26
+ end
27
+
28
+ def updated_at
29
+ @data[:updated]
30
+ end
31
+
32
+ def fetch
33
+ yield @data[:contents] if @data
34
+ end
35
+ end
36
+ end
@@ -0,0 +1,54 @@
1
+ # -*- encoding: utf-8 -*-
2
+
3
+ Gem::Specification.new do |s|
4
+ s.name = %q{sparkplug}
5
+ s.version = "2.0.0"
6
+
7
+ s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
8
+ s.authors = ["rick"]
9
+ s.date = %q{2009-11-01}
10
+ s.email = %q{technoweenie@gmail.com}
11
+ s.extra_rdoc_files = [
12
+ "LICENSE",
13
+ "README.rdoc"
14
+ ]
15
+ s.files = [
16
+ ".document",
17
+ ".gitignore",
18
+ "LICENSE",
19
+ "README.rdoc",
20
+ "Rakefile",
21
+ "VERSION",
22
+ "demos/simple/public/temps/portland/2007.csv",
23
+ "demos/simple/sparkplug_demo.rb",
24
+ "demos/simple/views/readme.erb",
25
+ "lib/spark_pr.rb",
26
+ "lib/sparkplug.rb",
27
+ "lib/sparkplug/cachers/abstract.rb",
28
+ "lib/sparkplug/cachers/filesystem.rb",
29
+ "lib/sparkplug/cachers/memory.rb",
30
+ "lib/sparkplug/handlers/abstract_data.rb",
31
+ "lib/sparkplug/handlers/csv_data.rb",
32
+ "lib/sparkplug/handlers/stubbed_data.rb",
33
+ "sparkplug.gemspec",
34
+ "test/sparkplug_test.rb"
35
+ ]
36
+ s.homepage = %q{http://github.com/technoweenie/sparkplug}
37
+ s.rdoc_options = ["--charset=UTF-8"]
38
+ s.require_paths = ["lib"]
39
+ s.rubygems_version = %q{1.3.4}
40
+ s.summary = %q{Rack module that dynamically generates sparkline graphs from a set of numbers.}
41
+ s.test_files = [
42
+ "test/sparkplug_test.rb"
43
+ ]
44
+
45
+ if s.respond_to? :specification_version then
46
+ current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
47
+ s.specification_version = 3
48
+
49
+ if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
50
+ else
51
+ end
52
+ else
53
+ end
54
+ end
@@ -0,0 +1,94 @@
1
+ require 'rubygems'
2
+ require 'test/unit'
3
+
4
+ $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
5
+ $LOAD_PATH.unshift(File.dirname(__FILE__))
6
+ require 'rack'
7
+ require 'rack/test'
8
+ require 'sparkplug'
9
+ require 'sparkplug/handlers/stubbed_data'
10
+ require 'sparkplug/handlers/csv_data'
11
+ require 'sparkplug/cachers/filesystem'
12
+ require 'sparkplug/cachers/memory'
13
+
14
+ class SparkplugTest < Test::Unit::TestCase
15
+ include Rack::Test::Methods
16
+
17
+ $data_dir = File.join(File.dirname(__FILE__), 'data')
18
+ $stubbed_data = [47, 43, 24, 47, 16, 28, 38, 57, 50, 76, 42, 20, 98, 34, 53, 1, 55, 74, 63, 38, 31, 98, 89]
19
+ FileUtils.rm_rf $data_dir
20
+ FileUtils.mkdir_p $data_dir
21
+ File.open File.join($data_dir, 'stats.csv'), 'wb' do |csv|
22
+ csv << $stubbed_data.join(",")
23
+ end
24
+ sleep 1 # so that the timestamps don't match in the cache check test below
25
+
26
+ def app
27
+ Sparkplug.new \
28
+ Proc.new {|env| [200, {"Content-Type" => "text/html"}, "booya"] },
29
+ :handler => Sparkplug::Handlers::StubbedData.new('stats.csv' => {:updated => Time.utc(2009, 1, 1), :contents => $stubbed_data.dup}),
30
+ :cacher => Sparkplug::Cachers::Filesystem.new($data_dir),
31
+ :prefix => '/sparks'
32
+ end
33
+
34
+ def setup
35
+ @stats_png = File.join($data_dir, 'stats.csv.png')
36
+ FileUtils.rm_rf @stats_png
37
+ end
38
+
39
+ def test_creates_png_from_csv_request
40
+ assert !File.exist?(@stats_png)
41
+ get "/sparks/stats.csv.png"
42
+ assert File.exist?(@stats_png)
43
+ assert File.size(@stats_png) > 0
44
+ assert_equal IO.read(@stats_png), last_response.body
45
+ end
46
+
47
+ def test_leaves_recent_cached_png
48
+ FileUtils.touch(@stats_png)
49
+ get "/sparks/stats.csv.png"
50
+ assert_equal '', last_response.body
51
+ assert_equal 0, File.size(@stats_png)
52
+ end
53
+
54
+ def test_lets_other_requests_fallthrough
55
+ assert !File.exist?(@stats_png)
56
+ get "/spark/stats.csv.png"
57
+ assert_equal 'booya', last_response.body
58
+ assert !File.exist?(@stats_png)
59
+ end
60
+
61
+ def test_passes_missing_data_requests_through
62
+ get "/sparks/404.csv.png"
63
+ assert_equal 'booya', last_response.body
64
+ end
65
+ end
66
+
67
+ class SparkplugCSVTest < SparkplugTest
68
+ def app
69
+ Sparkplug.new \
70
+ Proc.new {|env| [200, {"Content-Type" => "text/html"}, "booya"] },
71
+ :handler => Sparkplug::Handlers::CsvData.new($data_dir),
72
+ :cacher => Sparkplug::Cachers::Filesystem.new($data_dir),
73
+ :prefix => '/sparks'
74
+ end
75
+ end
76
+
77
+ class SparkplugMemoryTest < SparkplugTest
78
+ def app
79
+ Sparkplug.new \
80
+ Proc.new {|env| [200, {"Content-Type" => "text/html"}, "booya"] },
81
+ :handler => Sparkplug::Handlers::StubbedData.new('stats.csv' => {:updated => Time.utc(2009, 1, 1), :contents => $stubbed_data.dup}),
82
+ :cacher => Sparkplug::Cachers::Memory.new,
83
+ :prefix => '/sparks'
84
+ end
85
+
86
+ def test_creates_png_from_csv_request
87
+ get "/sparks/stats.csv.png"
88
+ assert_equal 1503, last_response.body.size
89
+ end
90
+
91
+ def test_leaves_recent_cached_png
92
+ # useless test for memory cacher
93
+ end
94
+ end
metadata ADDED
@@ -0,0 +1,74 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: sparkplug
3
+ version: !ruby/object:Gem::Version
4
+ version: 2.0.0
5
+ platform: ruby
6
+ authors:
7
+ - rick
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+
12
+ date: 2009-11-01 00:00:00 -07:00
13
+ default_executable:
14
+ dependencies: []
15
+
16
+ description:
17
+ email: technoweenie@gmail.com
18
+ executables: []
19
+
20
+ extensions: []
21
+
22
+ extra_rdoc_files:
23
+ - LICENSE
24
+ - README.rdoc
25
+ files:
26
+ - .document
27
+ - .gitignore
28
+ - LICENSE
29
+ - README.rdoc
30
+ - Rakefile
31
+ - VERSION
32
+ - demos/simple/public/temps/portland/2007.csv
33
+ - demos/simple/sparkplug_demo.rb
34
+ - demos/simple/views/readme.erb
35
+ - lib/spark_pr.rb
36
+ - lib/sparkplug.rb
37
+ - lib/sparkplug/cachers/abstract.rb
38
+ - lib/sparkplug/cachers/filesystem.rb
39
+ - lib/sparkplug/cachers/memory.rb
40
+ - lib/sparkplug/handlers/abstract_data.rb
41
+ - lib/sparkplug/handlers/csv_data.rb
42
+ - lib/sparkplug/handlers/stubbed_data.rb
43
+ - sparkplug.gemspec
44
+ - test/sparkplug_test.rb
45
+ has_rdoc: true
46
+ homepage: http://github.com/technoweenie/sparkplug
47
+ licenses: []
48
+
49
+ post_install_message:
50
+ rdoc_options:
51
+ - --charset=UTF-8
52
+ require_paths:
53
+ - lib
54
+ required_ruby_version: !ruby/object:Gem::Requirement
55
+ requirements:
56
+ - - ">="
57
+ - !ruby/object:Gem::Version
58
+ version: "0"
59
+ version:
60
+ required_rubygems_version: !ruby/object:Gem::Requirement
61
+ requirements:
62
+ - - ">="
63
+ - !ruby/object:Gem::Version
64
+ version: "0"
65
+ version:
66
+ requirements: []
67
+
68
+ rubyforge_project:
69
+ rubygems_version: 1.3.4
70
+ signing_key:
71
+ specification_version: 3
72
+ summary: Rack module that dynamically generates sparkline graphs from a set of numbers.
73
+ test_files:
74
+ - test/sparkplug_test.rb