DatamapperRackStreamer 0.1

Sign up to get free protection for your applications and to get access to all the features.
data/LICENSE ADDED
@@ -0,0 +1,28 @@
1
+ == DatamapperRackStreamer
2
+ BSD Licence
3
+
4
+ Copyright (c) 2009, crealytics GmbH Germany
5
+ All rights reserved.
6
+
7
+ Redistribution and use in source and binary forms, with or without modification,
8
+ are permitted provided that the following conditions are met:
9
+
10
+ * Redistributions of source code must retain the above copyright notice,
11
+ this list of conditions and the following disclaimer.
12
+ * Redistributions in binary form must reproduce the above copyright notice,
13
+ this list of conditions and the following disclaimer in the documentation
14
+ and/or other materials provided with the distribution.
15
+ * Neither the name of the crealytics GmbH nor the names of its contributors
16
+ may be used to endorse or promote products derived from this software without
17
+ specific prior written permission.
18
+
19
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
20
+ ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
22
+ IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
23
+ INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
24
+ BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
26
+ LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
27
+ OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
28
+ OF THE POSSIBILITY OF SUCH DAMAGE.
data/README ADDED
@@ -0,0 +1,7 @@
1
+ == DatamapperRackStreamer
2
+
3
+ This project streams data from a database to a client via a web-interface.
4
+ It uses DataMapper for data retrieval and Sinatra/Rack combo for serving the interface.
5
+ The data is streamed as csv.
6
+
7
+ For more information see demo project streamer_app and spec.
data/Rakefile ADDED
@@ -0,0 +1,37 @@
1
+
2
+ require 'rubygems'
3
+ require 'rake'
4
+ require 'rake/clean'
5
+ require 'rake/gempackagetask'
6
+ require 'rake/rdoctask'
7
+ require 'rake/testtask'
8
+
9
+ spec = Gem::Specification.new do |s|
10
+ s.name = 'DatamapperRackStreamer'
11
+ s.version = '0.1'
12
+ s.has_rdoc = true
13
+ s.extra_rdoc_files = ['README', 'LICENSE']
14
+ s.summary = 'Fetches data via data_mapper and streams it as csv via rack'
15
+ s.description = s.summary
16
+ s.author = 'ugroebner'
17
+ s.email = 'ugroebner@crealytics.de'
18
+ # s.executables = ['your_executable_here']
19
+ s.files = %w(LICENSE README Rakefile) + Dir.glob("{bin,lib,spec}/**/*")
20
+ s.require_path = "lib"
21
+ s.bindir = "bin"
22
+ end
23
+
24
+ Rake::GemPackageTask.new(spec) do |p|
25
+ p.gem_spec = spec
26
+ p.need_tar = true
27
+ p.need_zip = true
28
+ end
29
+
30
+ Rake::RDocTask.new do |rdoc|
31
+ files =['README', 'LICENSE', 'lib/**/*.rb']
32
+ rdoc.rdoc_files.add(files)
33
+ rdoc.main = "README" # page to start on
34
+ rdoc.title = "DatamapperRackStreamer Docs"
35
+ rdoc.rdoc_dir = 'doc/rdoc' # rdoc output folder
36
+ rdoc.options << '--line-numbers'
37
+ end
@@ -0,0 +1,50 @@
1
+ require 'iconv'
2
+ require 'stringio'
3
+
4
+ class CsvStreamer
5
+ include Enumerable
6
+ BYTE_ARRAY_UTF_BOM = [0xff, 0xfe].collect{|byte| byte.chr}.join
7
+ UTF_16_LE_ICONV = Iconv.new('UTF-16LE', 'UTF-8')
8
+ COLUMN_SEPARATOR = "\t"
9
+ ROW_SEPARATOR = "\n"
10
+ attr_reader :csv_sequence
11
+ def initialize(model_class,ids,csv_sequence = nil, per_page = 1000, filter_mappings = {}, &block)
12
+ @ids = ids
13
+ @model_class = model_class
14
+ @csv_sequence = csv_sequence || self.class.csv_sequence(@model_class)
15
+ @per_page = per_page
16
+ @filter_mappings = filter_mappings
17
+ @block = block
18
+ end
19
+
20
+ def self.csv_sequence(model_class)
21
+ model_class.properties.collect {|p| p.name} - [:id]
22
+ end
23
+
24
+ def to_csv(array)
25
+ UTF_16_LE_ICONV.iconv(array.join(COLUMN_SEPARATOR) + ROW_SEPARATOR)
26
+ end
27
+
28
+ def each
29
+ elements_count = @ids.size
30
+ page_counter = 0
31
+ yield BYTE_ARRAY_UTF_BOM
32
+ yield to_csv(@csv_sequence)
33
+ while (page_counter * @per_page <= elements_count) do
34
+ elements = @model_class.all(:id=>@ids[page_counter*@per_page,@per_page])
35
+ elements.each do |element|
36
+ element = @block.call(element) unless @block.nil?
37
+ csv = to_csv(@csv_sequence.collect do |field|
38
+ fieldval = element.send(field)
39
+ if @filter_mappings.has_key?(field)
40
+ filterblock = eval "Proc.new {#{@filter_mappings[field]}}"
41
+ fieldval = filterblock.call(fieldval)
42
+ end
43
+ fieldval
44
+ end)
45
+ yield StringIO.new(csv).read
46
+ end
47
+ page_counter = page_counter + 1
48
+ end
49
+ end
50
+ end
@@ -0,0 +1,25 @@
1
+ require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
2
+ require 'csv_streamer'
3
+ describe CsvStreamer do
4
+ describe "with a block" do
5
+ UTF_8_ICONV = Iconv.new('UTF-8', 'UTF-16LE' )
6
+ before do
7
+ @model_class = Object.new
8
+ stub(@model_class).all {@ids.collect {|id| OpenStruct.new(:value=>id)} }
9
+ stub(@model_class).properties {[OpenStruct.new(:name=>:value)]}
10
+ @ids = [1,4,5]
11
+ end
12
+ it "should apply the block on each model element" do
13
+ streamer = CsvStreamer.new(@model_class, @ids){|elem| OpenStruct.new(:value => elem.value + 1)}
14
+ elements = streamer.collect {|elem| UTF_8_ICONV.iconv(elem).strip.to_i}[2..-1]
15
+ elements.should == [2,5,6]
16
+ end
17
+ it "should apply filtermapping blocks on model elements" do
18
+ filter_mappings = {:value => '|val| val + 1'}
19
+ streamer = CsvStreamer.new(@model_class, @ids, nil, 100, filter_mappings)
20
+ elements = streamer.collect {|elem| UTF_8_ICONV.iconv(elem).strip.to_i}[2..-1]
21
+ elements.should == [2,5,6]
22
+ end
23
+ end
24
+ end
25
+
@@ -0,0 +1,35 @@
1
+ $LOAD_PATH << File.dirname(__FILE__)+'/../lib/'
2
+ $LOAD_PATH << File.dirname(__FILE__)+'/../demo/'
3
+ require "spec" # Satisfies Autotest and anyone else not using the Rake tasks
4
+ require 'rr'
5
+ require 'datamapper'
6
+ require 'ostruct'
7
+ require 'pp'
8
+ require 'demo/model/shop'
9
+ require 'demo/model/product'
10
+
11
+ DataMapper::Logger.new(STDOUT, :debug)
12
+ DataMapper.setup(:products, 'sqlite3::memory:')
13
+ Shop.auto_migrate!
14
+ Product.auto_migrate!
15
+
16
+ Spec::Runner.configure do |config|
17
+ config.mock_with RR::Adapters::Rspec
18
+ config.after(:each) do
19
+ repository(:products) do
20
+ while repository.adapter.current_transaction
21
+ repository.adapter.current_transaction.rollback
22
+ repository.adapter.pop_transaction
23
+ end
24
+ end
25
+ end
26
+
27
+ config.before(:each) do
28
+ repository(:products) do
29
+ transaction = DataMapper::Transaction.new(repository)
30
+ transaction.begin
31
+ repository.adapter.push_transaction(transaction)
32
+ end
33
+ end
34
+
35
+ end
@@ -0,0 +1,62 @@
1
+ require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
2
+ require 'streamer_app' # <-- your sinatra app
3
+ require 'spec'
4
+ require 'rack/test'
5
+ require 'rr'
6
+ require 'iconv'
7
+
8
+ set :environment, :test
9
+ require 'streamer_app'
10
+
11
+ describe StreamerApp do
12
+ include Rack::Test::Methods
13
+
14
+ def app
15
+ StreamerApp
16
+ end
17
+
18
+ before do
19
+ @shops = [Shop.create(:name=>'shop'),Shop.create(:name=>'shop2')]
20
+ @shop = @shops.first
21
+ @products = [1,2,5].collect do |num|
22
+ Product.create(:shop => @shop,
23
+ :id => num,
24
+ :title => "title #{num}",
25
+ :description => "description #{num}",
26
+ :price => num*100
27
+ )
28
+ end
29
+ csv_sequence = CsvStreamer.csv_sequence(Product)
30
+ p csv_sequence
31
+ rows = ([csv_sequence] + @products.collect do |product|
32
+ csv_sequence.collect do |field|
33
+ product.send(field)
34
+ end
35
+ end)
36
+ p rows
37
+ @csv = rows.collect {|values| values.join(CsvStreamer::COLUMN_SEPARATOR)}.join(CsvStreamer::ROW_SEPARATOR)+CsvStreamer::ROW_SEPARATOR
38
+ utf_16_le_iconv = Iconv.new('UTF-16LE', 'UTF-8')
39
+ @csv = [0xff, 0xfe].collect{|byte| byte.chr}.join + utf_16_le_iconv.iconv(@csv)
40
+ end
41
+
42
+ it "should respond to URLs like /demo/$/products.csv" do
43
+ get '/demo/1/products.csv'
44
+ last_response.should be_ok
45
+ end
46
+
47
+ it "should return csv for all products of the shop" do
48
+ get '/demo/1/products.csv'
49
+ last_response.body.should == @csv
50
+ end
51
+
52
+ ['/demo', '/demo/'].each do |shop_string|
53
+ it "should list all shops when calling #{shop_string}" do
54
+ get shop_string
55
+ last_response.should be_ok
56
+ @shops.each do |shop|
57
+ last_response.body.should include "<a href='/demo/#{shop.id}/products.csv'>#{shop.name}</a>"
58
+ end
59
+ end
60
+ end
61
+ end
62
+
metadata ADDED
@@ -0,0 +1,62 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: DatamapperRackStreamer
3
+ version: !ruby/object:Gem::Version
4
+ version: "0.1"
5
+ platform: ruby
6
+ authors:
7
+ - ugroebner
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+
12
+ date: 2010-01-11 00:00:00 +01:00
13
+ default_executable:
14
+ dependencies: []
15
+
16
+ description: Fetches data via data_mapper and streams it as csv via rack
17
+ email: ugroebner@crealytics.de
18
+ executables: []
19
+
20
+ extensions: []
21
+
22
+ extra_rdoc_files:
23
+ - README
24
+ - LICENSE
25
+ files:
26
+ - LICENSE
27
+ - README
28
+ - Rakefile
29
+ - lib/csv_streamer.rb
30
+ - spec/csv_streamer_spec.rb
31
+ - spec/spec_helper.rb
32
+ - spec/streamer_app_spec.rb
33
+ has_rdoc: true
34
+ homepage:
35
+ licenses: []
36
+
37
+ post_install_message:
38
+ rdoc_options: []
39
+
40
+ require_paths:
41
+ - lib
42
+ required_ruby_version: !ruby/object:Gem::Requirement
43
+ requirements:
44
+ - - ">="
45
+ - !ruby/object:Gem::Version
46
+ version: "0"
47
+ version:
48
+ required_rubygems_version: !ruby/object:Gem::Requirement
49
+ requirements:
50
+ - - ">="
51
+ - !ruby/object:Gem::Version
52
+ version: "0"
53
+ version:
54
+ requirements: []
55
+
56
+ rubyforge_project:
57
+ rubygems_version: 1.3.5
58
+ signing_key:
59
+ specification_version: 3
60
+ summary: Fetches data via data_mapper and streams it as csv via rack
61
+ test_files: []
62
+