taps 0.2.26 → 0.3.0

Sign up to get free protection for your applications and to get access to all the features.
data/lib/taps/schema.rb CHANGED
@@ -1,93 +1,75 @@
1
- require 'active_record'
2
- require 'active_support'
3
- require 'stringio'
4
- require 'uri'
1
+ require 'sequel'
2
+ require 'sequel/extensions/schema_dumper'
3
+ require 'sequel/extensions/migration'
5
4
 
6
- require File.dirname(__FILE__) + '/adapter_hacks'
5
+ require 'json'
7
6
 
8
7
  module Taps
9
8
  module Schema
10
9
  extend self
11
10
 
12
- def create_config(url)
13
- uri = URI.parse(url)
14
- adapter = uri.scheme
15
- adapter = 'postgresql' if adapter == 'postgres'
16
- adapter = 'sqlite3' if adapter == 'sqlite'
17
- config = {
18
- 'adapter' => adapter,
19
- 'database' => uri.path.blank? ? uri.host : uri.path.split('/')[1],
20
- 'username' => uri.user,
21
- 'password' => uri.password,
22
- 'host' => uri.host,
23
- }
24
- config = sqlite_config(url) if config['adapter'] == 'sqlite3'
25
- config
11
+ def dump(database_url)
12
+ db = Sequel.connect(database_url)
13
+ db.dump_schema_migration(:indexes => false)
26
14
  end
27
15
 
28
- def sqlite_config(url)
29
- m = %r{(sqlite3?)://(.+)}.match(url)
30
- database = m[2]
31
- database, q = database.split('?')
32
- { 'adapter' => 'sqlite3', 'database' => database }
16
+ def dump_table(database_url, table)
17
+ Sequel.connect(database_url) do |db|
18
+ <<END_MIG
19
+ Class.new(Sequel::Migration) do
20
+ def up
21
+ #{db.dump_table_schema(table, :indexes => false)}
33
22
  end
34
-
35
- def connection(database_url)
36
- config = create_config(database_url)
37
- c = ActiveRecord::Base.establish_connection(config)
38
- Taps::AdapterHacks.load(config['adapter'])
39
- c
23
+ end
24
+ END_MIG
25
+ end
40
26
  end
41
27
 
42
- def dump(database_url)
43
- connection(database_url)
44
-
45
- stream = StringIO.new
46
- ActiveRecord::SchemaDumper.ignore_tables = []
47
- ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, stream)
48
- stream.string
28
+ def indexes(database_url)
29
+ db = Sequel.connect(database_url)
30
+ db.dump_indexes_migration
49
31
  end
50
32
 
51
- def dump_without_indexes(database_url)
52
- schema = dump(database_url)
53
- schema.split("\n").collect do |line|
54
- if line =~ /^\s+add_index/
55
- line = "##{line}"
33
+ def indexes_individual(database_url)
34
+ idxs = {}
35
+ Sequel.connect(database_url) do |db|
36
+ tables = db.tables
37
+ tables.each do |table|
38
+ idxs[table] = db.send(:dump_table_indexes, table, :add_index, {}).split("\n")
56
39
  end
57
- line
58
- end.join("\n")
59
- end
40
+ end
60
41
 
61
- def indexes(database_url)
62
- schema = dump(database_url)
63
- schema.split("\n").collect do |line|
64
- line if line =~ /^\s+add_index/
65
- end.uniq.join("\n")
42
+ idxs.each do |table, indexes|
43
+ idxs[table] = indexes.map do |idx|
44
+ <<END_MIG
45
+ Class.new(Sequel::Migration) do
46
+ def up
47
+ #{idx}
48
+ end
49
+ end
50
+ END_MIG
51
+ end
52
+ end
53
+ idxs.to_json
66
54
  end
67
55
 
68
56
  def load(database_url, schema)
69
- connection(database_url)
70
- eval(schema)
57
+ Sequel.connect(database_url) do |db|
58
+ eval(schema).apply(db, :up)
59
+ end
71
60
  end
72
61
 
73
62
  def load_indexes(database_url, indexes)
74
- connection(database_url)
75
-
76
- schema =<<EORUBY
77
- ActiveRecord::Schema.define do
78
- #{indexes}
79
- end
80
- EORUBY
81
- eval(schema)
63
+ Sequel.connect(database_url) do |db|
64
+ eval(indexes).apply(db, :up)
65
+ end
82
66
  end
83
67
 
84
68
  def reset_db_sequences(database_url)
85
- connection(database_url)
86
-
87
- if ActiveRecord::Base.connection.respond_to?(:reset_pk_sequence!)
88
- ActiveRecord::Base.connection.tables.each do |table|
89
- ActiveRecord::Base.connection.reset_pk_sequence!(table)
90
- end
69
+ db = Sequel.connect(database_url)
70
+ return unless db.respond_to?(:reset_primary_key_sequence)
71
+ db.tables.each do |table|
72
+ db.reset_primary_key_sequence(table)
91
73
  end
92
74
  end
93
75
  end
data/lib/taps/server.rb CHANGED
@@ -1,17 +1,20 @@
1
1
  require 'sinatra/base'
2
- require File.dirname(__FILE__) + '/config'
3
- require File.dirname(__FILE__) + '/utils'
4
- require File.dirname(__FILE__) + '/db_session'
2
+ require 'taps/config'
3
+ require 'taps/utils'
4
+ require 'taps/db_session'
5
+ require 'taps/data_stream'
5
6
 
6
7
  module Taps
7
- class Server < Sinatra::Default
8
+ class Server < Sinatra::Base
8
9
  use Rack::Auth::Basic do |login, password|
9
10
  login == Taps::Config.login && password == Taps::Config.password
10
11
  end
11
12
 
13
+ use Rack::Deflater unless ENV['NO_DEFLATE']
14
+
12
15
  error do
13
16
  e = request.env['sinatra.error']
14
- "Taps Server Error: #{e}"
17
+ "Taps Server Error: #{e}\n#{e.backtrace}"
15
18
  end
16
19
 
17
20
  before do
@@ -27,38 +30,46 @@ class Server < Sinatra::Default
27
30
 
28
31
  post '/sessions' do
29
32
  key = rand(9999999999).to_s
30
- database_url = Taps::Config.database_url || request.body.string
33
+
34
+ if ENV['NO_DEFAULT_DATABASE_URL']
35
+ database_url = request.body.string
36
+ else
37
+ database_url = Taps::Config.database_url || request.body.string
38
+ end
31
39
 
32
40
  DbSession.create(:key => key, :database_url => database_url, :started_at => Time.now, :last_access => Time.now)
33
41
 
34
42
  "/sessions/#{key}"
35
43
  end
36
44
 
37
- post '/sessions/:key/tables/:table' do
45
+ post '/sessions/:key/push/table' do
38
46
  session = DbSession.filter(:key => params[:key]).first
39
47
  halt 404 unless session
40
48
 
41
- gzip_data = request.body.read
42
- halt 412 unless Taps::Utils.valid_data?(gzip_data, request.env['HTTP_TAPS_CHECKSUM'])
49
+ json = DataStream.parse_json(params[:json])
43
50
 
44
- rows = Marshal.load(Taps::Utils.gunzip(gzip_data))
45
-
46
- db = session.connection
47
- table = db[params[:table].to_sym]
48
- table.import(rows[:header], rows[:data])
51
+ size = 0
52
+ session.conn do |db|
53
+ begin
54
+ stream = DataStream.factory(db, json[:state])
55
+ size = stream.fetch_remote_in_server(params)
56
+ rescue Taps::DataStream::CorruptedData
57
+ halt 412
58
+ end
59
+ end
49
60
 
50
- "#{rows[:data].size}"
61
+ # TODO: return the stream's state with the size
62
+ size.to_s
51
63
  end
52
64
 
53
- post '/sessions/:key/reset_sequences' do
65
+ post '/sessions/:key/push/reset_sequences' do
54
66
  session = DbSession.filter(:key => params[:key]).first
55
67
  halt 404 unless session
56
68
 
57
- schema_app = File.dirname(__FILE__) + '/../../bin/schema'
58
69
  Taps::Utils.schema_bin(:reset_db_sequences, session.database_url)
59
70
  end
60
71
 
61
- post '/sessions/:key/schema' do
72
+ post '/sessions/:key/push/schema' do
62
73
  session = DbSession.filter(:key => params[:key]).first
63
74
  halt 404 unless session
64
75
 
@@ -66,7 +77,7 @@ class Server < Sinatra::Default
66
77
  Taps::Utils.load_schema(session.database_url, schema_data)
67
78
  end
68
79
 
69
- post '/sessions/:key/indexes' do
80
+ post '/sessions/:key/push/indexes' do
70
81
  session = DbSession.filter(:key => params[:key]).first
71
82
  halt 404 unless session
72
83
 
@@ -74,63 +85,78 @@ class Server < Sinatra::Default
74
85
  Taps::Utils.load_indexes(session.database_url, index_data)
75
86
  end
76
87
 
77
- get '/sessions/:key/schema' do
88
+ post '/sessions/:key/pull/schema' do
78
89
  session = DbSession.filter(:key => params[:key]).first
79
90
  halt 404 unless session
80
91
 
81
- schema_app = File.dirname(__FILE__) + '/../../bin/schema'
82
- Taps::Utils.schema_bin(:dump, session.database_url)
92
+ Taps::Utils.schema_bin(:dump_table, session.database_url, params[:table_name])
83
93
  end
84
94
 
85
- get '/sessions/:key/indexes' do
95
+ get '/sessions/:key/pull/indexes' do
86
96
  session = DbSession.filter(:key => params[:key]).first
87
97
  halt 404 unless session
88
98
 
89
- schema_app = File.dirname(__FILE__) + '/../../bin/schema'
90
- Taps::Utils.schema_bin(:indexes, session.database_url)
99
+ content_type 'application/json'
100
+ Taps::Utils.schema_bin(:indexes_individual, session.database_url)
91
101
  end
92
102
 
93
- get '/sessions/:key/tables' do
103
+ get '/sessions/:key/pull/table_names' do
94
104
  session = DbSession.filter(:key => params[:key]).first
95
105
  halt 404 unless session
96
106
 
97
- db = session.connection
98
- tables = db.tables
99
-
100
- tables_with_counts = tables.inject({}) do |accum, table|
101
- accum[table] = db[table].count
102
- accum
107
+ tables = []
108
+ session.conn do |db|
109
+ tables = db.tables
103
110
  end
104
111
 
105
- Marshal.dump(tables_with_counts)
112
+ content_type 'application/json'
113
+ tables.to_json
114
+ end
115
+
116
+ post '/sessions/:key/pull/table_count' do
117
+ session = DbSession.filter(:key => params[:key]).first
118
+ halt 404 unless session
119
+
120
+ count = 0
121
+ session.conn do |db|
122
+ count = db[ params[:table].to_sym ].count
123
+ end
124
+ count.to_s
106
125
  end
107
126
 
108
- get '/sessions/:key/tables/:table/:chunk' do
127
+ post '/sessions/:key/pull/table' do
109
128
  session = DbSession.filter(:key => params[:key]).first
110
129
  halt 404 unless session
111
130
 
112
- chunk = params[:chunk].to_i
113
- chunk = 500 if chunk < 1
114
-
115
- offset = params[:offset].to_i
116
- offset = 0 if offset < 0
117
-
118
- db = session.connection
119
- table = db[params[:table].to_sym]
120
- order = Taps::Utils.order_by(db, params[:table].to_sym)
121
- string_columns = Taps::Utils.incorrect_blobs(db, params[:table].to_sym)
122
- raw_data = Marshal.dump(Taps::Utils.format_data(table.order(*order).limit(chunk, offset).all, string_columns))
123
- gzip_data = Taps::Utils.gzip(raw_data)
124
- response['Taps-Checksum'] = Taps::Utils.checksum(gzip_data).to_s
125
- response['Content-Type'] = "application/octet-stream"
126
- gzip_data
131
+ encoded_data = nil
132
+ stream = nil
133
+
134
+ session.conn do |db|
135
+ state = JSON.parse(params[:state]).symbolize_keys
136
+ stream = Taps::DataStream.factory(db, state)
137
+ encoded_data = stream.fetch.first
138
+ end
139
+
140
+ checksum = Taps::Utils.checksum(encoded_data).to_s
141
+ json = { :checksum => checksum, :state => stream.to_hash }.to_json
142
+
143
+ content, content_type_value = Taps::Multipart.create do |r|
144
+ r.attach :name => :encoded_data,
145
+ :payload => encoded_data,
146
+ :content_type => 'application/octet-stream'
147
+ r.attach :name => :json,
148
+ :payload => json,
149
+ :content_type => 'application/json'
150
+ end
151
+
152
+ content_type content_type_value
153
+ content
127
154
  end
128
155
 
129
156
  delete '/sessions/:key' do
130
157
  session = DbSession.filter(:key => params[:key]).first
131
158
  halt 404 unless session
132
159
 
133
- session.disconnect
134
160
  session.destroy
135
161
 
136
162
  "ok"
data/lib/taps/utils.rb CHANGED
@@ -8,7 +8,9 @@ module Utils
8
8
  extend self
9
9
 
10
10
  def windows?
11
- RUBY_PLATFORM =~ /mswin32|mingw32/
11
+ return @windows if defined?(@windows)
12
+ require 'rbconfig'
13
+ @windows = !!(::Config::CONFIG['host_os'] =~ /mswin|mingw/)
12
14
  end
13
15
 
14
16
  def bin(cmd)
@@ -24,24 +26,18 @@ module Utils
24
26
  Zlib.crc32(data) == crc32.to_i
25
27
  end
26
28
 
27
- def gzip(data)
28
- io = StringIO.new
29
- gz = Zlib::GzipWriter.new(io)
30
- gz.write data
31
- gz.close
32
- io.string
29
+ def base64encode(data)
30
+ [data].pack("m")
33
31
  end
34
32
 
35
- def gunzip(gzip_data)
36
- io = StringIO.new(gzip_data)
37
- gz = Zlib::GzipReader.new(io)
38
- data = gz.read
39
- gz.close
40
- data
33
+ def base64decode(data)
34
+ data.unpack("m").first
41
35
  end
42
36
 
43
- def format_data(data, string_columns)
37
+ def format_data(data, opts={})
44
38
  return {} if data.size == 0
39
+ string_columns = opts[:string_columns] || []
40
+
45
41
  header = data[0].keys
46
42
  only_data = data.collect do |row|
47
43
  row = blobs_to_string(row, string_columns)
@@ -54,7 +50,7 @@ module Utils
54
50
  # this is not true for other databases so we must check if the field is
55
51
  # actually text and manually convert it back to a string
56
52
  def incorrect_blobs(db, table)
57
- return [] unless db.class.to_s == "Sequel::MySQL::Database"
53
+ return [] if (db.url =~ /mysql:\/\//).nil?
58
54
 
59
55
  columns = []
60
56
  db.schema(table).each do |data|
@@ -81,7 +77,7 @@ module Utils
81
77
  t1 = Time.now
82
78
  time_in_db = yield chunksize
83
79
  time_in_db = time_in_db.to_f rescue 0
84
- rescue Errno::EPIPE, RestClient::RequestFailed
80
+ rescue Errno::EPIPE, RestClient::RequestFailed, RestClient::RequestTimeout
85
81
  retries += 1
86
82
  raise if retries > 2
87
83
 
@@ -126,21 +122,26 @@ module Utils
126
122
  end
127
123
 
128
124
  def schema_bin(*args)
129
- `#{File.dirname(__FILE__)}/../../bin/#{bin('schema')} #{args.join(' ')}`
125
+ `#{File.dirname(__FILE__)}/../../bin/#{bin('schema')} #{args.map { |a| "'#{a}'" }.join(' ')}`
130
126
  end
131
127
 
132
128
  def primary_key(db, table)
133
129
  if db.respond_to?(:primary_key)
134
130
  db.primary_key(table)
135
131
  else
136
- db.schema(table).select { |c| c[1][:primary_key] }.map { |c| c.first }.shift
132
+ db.schema(table).select { |c| c[1][:primary_key] }.map { |c| c.first.to_sym }
137
133
  end
138
134
  end
139
135
 
136
+ def single_integer_primary_key(db, table)
137
+ keys = db.schema(table).select { |c| c[1][:primary_key] and c[1][:type] == :integer }
138
+ not keys.nil? and keys.size == 1
139
+ end
140
+
140
141
  def order_by(db, table)
141
142
  pkey = primary_key(db, table)
142
143
  if pkey
143
- [pkey.to_sym]
144
+ pkey.kind_of?(Array) ? pkey : [pkey.to_sym]
144
145
  else
145
146
  db[table].columns
146
147
  end
data/spec/base.rb CHANGED
@@ -4,6 +4,8 @@ require 'mocha'
4
4
  require 'rack/test'
5
5
  require 'tempfile'
6
6
 
7
+ $:.unshift File.dirname(__FILE__) + "/../lib"
8
+
7
9
  class Bacon::Context
8
10
  include Mocha::Standalone
9
11
  include Rack::Test::Methods
@@ -19,6 +21,6 @@ class Bacon::Context
19
21
  end
20
22
  end
21
23
 
22
- require File.dirname(__FILE__) + '/../lib/taps/config'
24
+ require 'taps/config'
23
25
  Taps::Config.taps_database_url = "sqlite://#{Tempfile.new('test.db').path}"
24
26
  Sequel.connect(Taps::Config.taps_database_url)