sip 0.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (56) hide show
  1. data/Gemfile +2 -0
  2. data/LICENSE +674 -0
  3. data/README.rdoc +32 -0
  4. data/Rakefile +21 -0
  5. data/bin/sip +83 -0
  6. data/bin/transpart +114 -0
  7. data/docs/classes/Sip.html +169 -0
  8. data/docs/classes/Sip/CmdOpts.html +179 -0
  9. data/docs/classes/Sip/Config.html +362 -0
  10. data/docs/classes/Sip/DBBase.html +368 -0
  11. data/docs/classes/Sip/HadoopException.html +111 -0
  12. data/docs/classes/Sip/Hive.html +295 -0
  13. data/docs/classes/Sip/HiveQueryException.html +111 -0
  14. data/docs/classes/Sip/ImportScriptExecutionError.html +111 -0
  15. data/docs/classes/Sip/MySQLSipper.html +273 -0
  16. data/docs/classes/Sip/NoSuchColumn.html +111 -0
  17. data/docs/classes/Sip/NoSuchTable.html +111 -0
  18. data/docs/classes/Sip/PastFailureException.html +111 -0
  19. data/docs/classes/Sip/Sipper.html +454 -0
  20. data/docs/classes/Sip/UnsupportedDatabaseType.html +111 -0
  21. data/docs/classes/Sip/Utils.html +269 -0
  22. data/docs/classes/Struct.html +146 -0
  23. data/docs/created.rid +1 -0
  24. data/docs/files/README_rdoc.html +174 -0
  25. data/docs/files/lib/sip/cmdopts_rb.html +101 -0
  26. data/docs/files/lib/sip/config_rb.html +108 -0
  27. data/docs/files/lib/sip/databases/dbbase_rb.html +108 -0
  28. data/docs/files/lib/sip/databases/mysql_rb.html +108 -0
  29. data/docs/files/lib/sip/exceptions_rb.html +101 -0
  30. data/docs/files/lib/sip/extensions_rb.html +101 -0
  31. data/docs/files/lib/sip/hive_rb.html +101 -0
  32. data/docs/files/lib/sip/sipper_rb.html +101 -0
  33. data/docs/files/lib/sip/utils_rb.html +110 -0
  34. data/docs/files/lib/sip/version_rb.html +101 -0
  35. data/docs/files/lib/sip_rb.html +117 -0
  36. data/docs/fr_class_index.html +42 -0
  37. data/docs/fr_file_index.html +38 -0
  38. data/docs/fr_method_index.html +72 -0
  39. data/docs/index.html +24 -0
  40. data/docs/rdoc-style.css +208 -0
  41. data/lib/sip.rb +10 -0
  42. data/lib/sip/cmdopts.rb +20 -0
  43. data/lib/sip/config.rb +80 -0
  44. data/lib/sip/databases/dbbase.rb +56 -0
  45. data/lib/sip/databases/mysql.rb +52 -0
  46. data/lib/sip/exceptions.rb +9 -0
  47. data/lib/sip/extensions.rb +5 -0
  48. data/lib/sip/hive.rb +62 -0
  49. data/lib/sip/sipper.rb +118 -0
  50. data/lib/sip/templates/export.sh +73 -0
  51. data/lib/sip/utils.rb +58 -0
  52. data/lib/sip/version.rb +3 -0
  53. data/test/database_interaction_test.rb +7 -0
  54. data/test/hive_test.rb +28 -0
  55. data/test/sipper_test.rb +25 -0
  56. metadata +125 -0
@@ -0,0 +1,9 @@
1
+ module Sip
2
+ class NoSuchTable < StandardError; end
3
+ class NoSuchColumn < StandardError; end
4
+ class ImportScriptExecutionError < StandardError; end
5
+ class PastFailureException < StandardError; end
6
+ class HiveQueryException < StandardError; end
7
+ class HadoopException < StandardError; end
8
+ class UnsupportedDatabaseType < StandardError; end
9
+ end
@@ -0,0 +1,5 @@
1
+ class Struct
2
+ def get_binding
3
+ binding
4
+ end
5
+ end
@@ -0,0 +1,62 @@
1
+ module Sip
2
+ module Hive
3
+ def self.run_hsql_create_table(sipper, db, tableconf, method)
4
+ if method == :overwrite
5
+ Hive.run sipper, "DROP TABLE #{tableconf['hive_table_name']}"
6
+ end
7
+
8
+ partition = ""
9
+ cols = db.hive_columns tableconf['tablename']
10
+ if tableconf.has_key? 'partition_by'
11
+ if not cols.map { |k,v| k }.include? tableconf['partition_by']
12
+ raise NoSuchColumn, "Column to parition by '#{tableconf['partition_by']}' not found in table '#{tableconf['tablename']}'"
13
+ end
14
+ partition_type = cols.select { |k,v| k == tableconf['partition_by'] }.map { |k,v| v }.first
15
+ partition = "PARTITIONED BY (#{tableconf['partition_by']} #{partition_type})"
16
+ end
17
+ colstring = cols.select { |k,v| k != tableconf.fetch('partition_by', nil) }.map { |k,v| "#{k} #{v}" }.join(", ")
18
+ Hive.run sipper, "CREATE TABLE IF NOT EXISTS #{tableconf['hive_table_name']} (#{colstring}) #{partition}"
19
+ end
20
+
21
+ def self.run(sipper, query)
22
+ cmd = "#{ENV['HIVE_HOME']}/bin/hive -S -e \"#{query}\""
23
+ sipper.log "Running Hive command: #{cmd}"
24
+ raise HiveQueryException, "Could not execute hive command #{cmd}" if not system(cmd)
25
+ end
26
+
27
+ def self.run_file(sipper, path)
28
+ cmd = "#{ENV['HIVE_HOME']}/bin/hive -S -f #{path}"
29
+ sipper.log "Running Hive cmd: #{cmd}"
30
+ raise HiveQueryException, "Could not execute hive command #{cmd}" if not system(cmd)
31
+ end
32
+
33
+ def self.run_import(sipper, tableconf)
34
+ base_path = "#{sipper.config['hdfs_tmpdir']}/#{tableconf['hive_table_name']}"
35
+ stmts = Hive.load_data_statements tableconf, base_path
36
+ path = File.join(sipper.config['tmpdir'], 'scripts', 'hive_import.hql')
37
+ open(path, 'w') { |f|
38
+ f.write stmts.join("\n")
39
+ }
40
+ Hive.run_file sipper, path
41
+ end
42
+
43
+ def self.load_data_statements(tableconf, base_path)
44
+ files = Hive.hdfs_ls(base_path).select { |p| p.slice(-5, 5) == '.part' }
45
+ if tableconf.has_key? 'partition_by'
46
+ files.map { |path|
47
+ partval = path.split('/')[-2]
48
+ "LOAD DATA INPATH '#{path}' INTO TABLE #{tableconf['hive_table_name']} PARTITION (#{tableconf['partition_by']}='#{partval}');"
49
+ }
50
+ else
51
+ ["LOAD DATA INPATH '#{base_path}' INTO TABLE #{tableconf['hive_table_name']};"]
52
+ end
53
+ end
54
+
55
+ def Hive.hdfs_ls(location)
56
+ result = `#{ENV['HADOOP_HOME']}/bin/hadoop dfs -lsr hdfs://#{location}`
57
+ lines = result.split("\n")
58
+ lines.shift
59
+ lines.map { |l| l.split(" ").last }
60
+ end
61
+ end
62
+ end
@@ -0,0 +1,118 @@
1
+ module Sip
2
+ class Sipper
3
+ attr_reader :config
4
+
5
+ def initialize(config)
6
+ @config = config
7
+ slavefile = File.join(ENV['HADOOP_HOME'], 'conf', 'slaves')
8
+ log "Reading slaves from file #{slavefile}..."
9
+ begin
10
+ open(slavefile, 'r') { |f| @slaves = f.read.split("\n") }
11
+ rescue
12
+ raise HadoopException, "Could not read \"#{slavefile}\". Is your HADOOP_HOME environment variable correct?"
13
+ end
14
+ Utils::sanity_check(@config)
15
+ end
16
+
17
+ def log(msg)
18
+ puts "#{Utils::hostname} #{Time.now.strftime '%Y-%m-%d %H:%M:%S'}: #{msg}" if @config[:debug]
19
+ end
20
+
21
+ # return number of scripts created
22
+ def create_scripts(dbconf, tableconf)
23
+ @scripts = []
24
+ db = DBBase.make_interface dbconf['type'], dbconf, self
25
+
26
+ # set default columns if necessary
27
+ tableconf['columns'] = db.columns(tableconf['tablename']).map { |c| c.first } if tableconf['columns'].nil?
28
+
29
+ if tableconf['incremental_index'].nil?
30
+ create_script_without_index(dbconf, tableconf, db)
31
+ else
32
+ create_scripts_with_index(dbconf, tableconf, db)
33
+ end
34
+
35
+ @scripts.length
36
+ end
37
+
38
+ # this is the case where there is no primary key index, so the whole
39
+ # table will need to be imported
40
+ def create_script_without_index(dbconf, tableconf, db)
41
+ Hive::run_hsql_create_table self, db, tableconf, :overwrite
42
+
43
+ log "Importing all rows from table #{dbconf['dbname']}.#{tableconf['tablename']}"
44
+ select = db.generate_command tableconf
45
+ transpart_opts = generate_transpart_options(tableconf, db)
46
+ @scripts << Utils::write_script(self, @slaves.first, select, dbconf['dbname'], tableconf['tablename'], transpart_opts)
47
+ end
48
+
49
+ def create_scripts_with_index(dbconf, tableconf, db)
50
+ max = db.get_column_max tableconf['tablename'], tableconf['incremental_index']
51
+
52
+ method = (tableconf['method'] == "append" and not @config[:overwrite]) ? :append : :overwrite
53
+ if method == :append and max == tableconf['incremental_index_value']
54
+ log "Ignoring #{dbconf['dbname']}.#{tableconf['tablename']} - already up to date"
55
+ else
56
+ Hive::run_hsql_create_table self, db, tableconf, method
57
+ @slaves.each_with_index { |slavename, index|
58
+ @scripts << create_script(slavename, index, dbconf, tableconf, db, max, method)
59
+ }
60
+ db.close
61
+ end
62
+
63
+ # update incremental index value if method in conf is append, regardless of whether or
64
+ # not this is a forced overwrite
65
+ tableconf['incremental_index_value'] = max if tableconf['method'] == "append"
66
+ end
67
+
68
+ def create_script(slavename, index, dbconf, tableconf, db, max, method)
69
+ if method == :append
70
+ first, last = get_even_split(tableconf['incremental_index_value']+1, max, index, @slaves.length)
71
+ else
72
+ first, last = 1, max
73
+ end
74
+
75
+ log "Importing #{first} <= #{tableconf['incremental_index']} <= #{last} from table #{dbconf['dbname']}.#{tableconf['tablename']}"
76
+ select = db.generate_command tableconf, first, last
77
+ transpart_opts = generate_transpart_options(tableconf, db)
78
+ Utils::write_script self, slavename, select, dbconf['dbname'], tableconf['tablename'], transpart_opts
79
+ end
80
+
81
+ def generate_transpart_options(tableconf, db)
82
+ opts = CmdOpts.new
83
+ opts['c'] = db.order_column_list(tableconf['tablename'], tableconf['columns']).join(',')
84
+ opts['p'] = tableconf['partition_by'] if tableconf.has_key? "partition_by"
85
+ if tableconf.has_key? 'transformations' and tableconf['transformations'].length > 0
86
+ opts['t'] = tableconf['transformations'].map { |k,v| "#{k}:#{v}" }.join(',')
87
+ end
88
+ opts['H'] = tableconf['hive_table_name']
89
+ opts['o'] = File.join(@config['tmpdir'], 'partitions')
90
+ opts.set('d') if @config[:debug]
91
+ opts
92
+ end
93
+
94
+ def get_even_split(min, max, index, count)
95
+ size = ((max - min + 1).to_f / count.to_f).ceil
96
+ first = (size * index) + min
97
+ last = (size * (index+1)) + min - 1
98
+ [first, [max, last].min]
99
+ end
100
+
101
+ def run_scripts
102
+ pids = {}
103
+ @scripts.each { |script|
104
+ log "Running #{script}..."
105
+ pid = fork { Kernel.exit system("sh #{script}") }
106
+ pids[pid] = script
107
+ sleep 1
108
+ }
109
+ Process.waitall.map { |r| r.last }.each { |status|
110
+ raise ImportScriptExecutionError, "Error runing script '#{pids[status.pid]}'" if status.exitstatus != 0
111
+ }
112
+ end
113
+
114
+ def run_hive_import(tableconf)
115
+ Hive.run_import self, tableconf
116
+ end
117
+ end
118
+ end
@@ -0,0 +1,73 @@
1
+ #!/bin/bash
2
+
3
+ HOST=<%= host %>
4
+ DEBUG=<%= debug %>
5
+ TFILE=<%= tfile %>
6
+ TOPTS="<%= topts %>"
7
+ TOPTS_ODIR=<%= output_dir %>
8
+ TMPDIR=<%= tmpdir %>
9
+ QUERY="<%= query %>"
10
+ HADOOP_HOME=<%= hadoop_home %>
11
+ HIVE_HOME=<%= hive_home %>
12
+ OVERWRITE=<%= overwrite %>
13
+ HIVE_TABLE_NAME=<%= hive_table_name %>
14
+ HDFS_TMPDIR=<%= hdfs_tmpdir %>
15
+
16
+ HDFS_PATH=hdfs://$HDFS_TMPDIR/$HIVE_TABLE_NAME/$HOST
17
+
18
+ function debug {
19
+ if [ "$DEBUG" == '1' ]; then
20
+ echo $HOST $(date +'%Y-%m-%d %H:%M:%S'): $1
21
+ fi
22
+ }
23
+
24
+ function error {
25
+ echo $HOST $(date +'%Y-%m-%d %H:%M:%S'): ERROR - $1
26
+ exit 1
27
+ }
28
+
29
+ function check_result {
30
+ if [ $? != 0 ]; then
31
+ error "$1"
32
+ fi
33
+ }
34
+
35
+ function hdfs_run {
36
+ debug "$2"
37
+ $HADOOP_HOME/bin/hadoop dfs $1
38
+ check_result "$2"
39
+ }
40
+
41
+ function remote_exec {
42
+ debug "$2"
43
+ ssh $HOST "$1"
44
+ check_result "$2"
45
+ }
46
+
47
+ $HADOOP_HOME/bin/hadoop dfs -test -e $HDFS_PATH || hdfs_run "-mkdir $HDFS_PATH" "Creating $HDFS_PATH"
48
+
49
+ if [ "$OVERWRITE" == '0' ]; then
50
+ ssh $HOST "test ! -x $TOPTS_ODIR"
51
+ check_result "The directory $HOST:$TOPTS_ODIR exists, indicating a past failure. Try a full reimport with the -o option."
52
+ FILE_COUNT=$($HADOOP_HOME/bin/hadoop dfs -ls $HDFS_PATH | wc -l)
53
+ if [ "$FILE_COUNT" != "0" ]; then
54
+ error "$HDFS_PATH is not empty, indicating a past failure. Try a full reimport with the -o option."
55
+ fi
56
+ else
57
+ remote_exec "rm -rf $TOPTS_ODIR" "removing directory $TOPTS_ODIR"
58
+ hdfs_run "-rmr $HDFS_PATH" "Clearing directory $HDFS_PATH"
59
+ fi
60
+
61
+ remote_exec "mkdir -p $TMPDIR" "making directory $HOST:$TMPDIR if it does not exist"
62
+
63
+ debug "copying $TFILE to $HOST:$TMPDIR/transpart"
64
+ scp -q $TFILE $HOST:$TMPDIR/transpart
65
+ check_result "could not copy $TFILE to $HOST:$TMPDIR/transpart"
66
+
67
+ remote_exec "$QUERY | ruby $TMPDIR/transpart $TOPTS" "executing query: $QUERY"
68
+
69
+ remote_exec "$HADOOP_HOME/bin/hadoop dfs -moveFromLocal $TOPTS_ODIR $HDFS_PATH" "uploading files in $TOPTS_ODIR to HDFS"
70
+
71
+ remote_exec "rm -rf $TOPTS_ODIR" "removing directory $TOPTS_ODIR"
72
+
73
+ debug "finished import"
@@ -0,0 +1,58 @@
1
+ require 'erb'
2
+ require 'fileutils'
3
+ require 'socket'
4
+
5
+ module Sip
6
+ module Utils
7
+ def self.load_template(name, config)
8
+ path = File.join(File.dirname(__FILE__), "templates", name)
9
+ temp = ERB.new File.open(path, 'r') { |f| f.read }
10
+ klass = Struct.new *config.keys.map { |k| k.intern }
11
+ temp.result klass.new(*config.values).get_binding
12
+ end
13
+
14
+ def self.hostname
15
+ Socket.gethostname
16
+ end
17
+
18
+ def self.write_script(sipper, host, select, dbname, tablename, transpart_opts)
19
+ hive_table_name = sipper.config.tconf(dbname, tablename)['hive_table_name']
20
+ args = {
21
+ 'host' => host,
22
+ 'debug' => (sipper.config[:debug] ? '1' : '0'),
23
+ 'tfile' => sipper.config['tfile'],
24
+ 'topts' => transpart_opts.to_s,
25
+ 'tmpdir' => sipper.config['tmpdir'],
26
+ 'hdfs_tmpdir' => sipper.config['hdfs_tmpdir'],
27
+ 'output_dir' => transpart_opts['o'],
28
+ 'query' => select,
29
+ 'hive_table_name' => sipper.config['databases'][dbname]['tables'][tablename]['hive_table_name'],
30
+ 'hadoop_home' => ENV['HADOOP_HOME'],
31
+ 'hive_home' => ENV['HIVE_HOME'],
32
+ 'overwrite' => (sipper.config[:overwrite] ? '1' : '0')
33
+ }
34
+ script = Utils::load_template('export.sh', args)
35
+
36
+ d = File.join(sipper.config['tmpdir'], 'scripts')
37
+ FileUtils.mkdir_p d
38
+ fname = File.join(d, "#{host}-#{tablename}.sh")
39
+ open(fname, 'w') { |f| f.write(script) }
40
+ sipper.log "just wrote import script to #{fname}"
41
+ fname
42
+ end
43
+
44
+ # look for tfile - return nil if not found
45
+ def self.tfile_path(path=nil)
46
+ if path.nil?
47
+ path = (`which transpart`).strip
48
+ path = "./bin/transpart" if path == ""
49
+ end
50
+ path = File.expand_path path
51
+ test('f', path) ? path : nil
52
+ end
53
+
54
+ def self.sanity_check(config)
55
+ # eventually, check for table structure differences, etc
56
+ end
57
+ end
58
+ end
@@ -0,0 +1,3 @@
1
+ module Sip
2
+ VERSION = "0.0.0"
3
+ end
@@ -0,0 +1,7 @@
1
+ require File.join File.dirname(__FILE__), 'helper'
2
+
3
+ class DatabaseInteractionTest < Test::Unit::TestCase
4
+ def test_connection
5
+ assert_equal 1, 1
6
+ end
7
+ end
@@ -0,0 +1,28 @@
1
+ require File.join File.dirname(__FILE__), 'helper'
2
+ require File.join File.dirname(__FILE__), 'base'
3
+
4
+ class HiveTest < Test::Unit::TestCase
5
+ include Base
6
+
7
+ def test_create_table
8
+ Sip::Hive.run_hsql_create_table @sipper, @db, @tableconf, :overwrite
9
+ #`#{ENV['HIVE_HOME']}/bin/hive -S -e "select
10
+
11
+ # Sip::Hive.run @sipper, "create table #{@tableconf['hive_table_name']} (id integer, name string)"
12
+ # Sip::Hive.run @sipper, "
13
+
14
+ end
15
+
16
+ def test_run
17
+ assert_raise Sip::HiveQueryException do
18
+ hh = ENV['HIVE_HOME']
19
+ ENV['HIVE_HOME'] = "/tmp/asdfasdfasdf"
20
+ Sip::Hive.run @sipper, "show tables"
21
+ ENV['HIVE_HOME'] = hh
22
+ end
23
+
24
+ assert_raises Sip::HiveQueryException do
25
+ Sip::Hive.run @sipper, "this is not a real query"
26
+ end
27
+ end
28
+ end
@@ -0,0 +1,25 @@
1
+ require File.join File.dirname(__FILE__), 'helper'
2
+
3
+ class SipperTest < Test::Unit::TestCase
4
+ def test_bad_hadoop_env
5
+ assert_raise Sip::HadoopException do
6
+ o = ENV['HADOOP_HOME']
7
+ ENV['HADOOP_HOME'] = "/tmp/blahblahfake"
8
+ s = Sip::Sipper.new CONFIG
9
+ ENV['HADOOP_HOME'] = o
10
+ end
11
+ end
12
+
13
+ def test_split
14
+
15
+ end
16
+
17
+ def test_run_scripts
18
+
19
+ end
20
+
21
+ def test_transpart_opt_generation
22
+
23
+ end
24
+
25
+ end
metadata ADDED
@@ -0,0 +1,125 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: sip
3
+ version: !ruby/object:Gem::Version
4
+ hash: 31
5
+ prerelease: false
6
+ segments:
7
+ - 0
8
+ - 0
9
+ - 0
10
+ version: 0.0.0
11
+ platform: ruby
12
+ authors:
13
+ - Brian Muller
14
+ autorequire:
15
+ bindir: bin
16
+ cert_chain: []
17
+
18
+ date: 2011-03-25 00:00:00 -04:00
19
+ default_executable:
20
+ dependencies: []
21
+
22
+ description: SQL database importer for Hadoop / Hive
23
+ email:
24
+ - brian.muller@livingsocial.com
25
+ executables:
26
+ - sip
27
+ - transpart
28
+ extensions: []
29
+
30
+ extra_rdoc_files: []
31
+
32
+ files:
33
+ - lib/sip/cmdopts.rb
34
+ - lib/sip/config.rb
35
+ - lib/sip/databases/dbbase.rb
36
+ - lib/sip/databases/mysql.rb
37
+ - lib/sip/exceptions.rb
38
+ - lib/sip/extensions.rb
39
+ - lib/sip/hive.rb
40
+ - lib/sip/sipper.rb
41
+ - lib/sip/templates/export.sh
42
+ - lib/sip/utils.rb
43
+ - lib/sip/version.rb
44
+ - lib/sip.rb
45
+ - Gemfile
46
+ - LICENSE
47
+ - Rakefile
48
+ - README.rdoc
49
+ - docs/classes/Sip/CmdOpts.html
50
+ - docs/classes/Sip/Config.html
51
+ - docs/classes/Sip/DBBase.html
52
+ - docs/classes/Sip/HadoopException.html
53
+ - docs/classes/Sip/Hive.html
54
+ - docs/classes/Sip/HiveQueryException.html
55
+ - docs/classes/Sip/ImportScriptExecutionError.html
56
+ - docs/classes/Sip/MySQLSipper.html
57
+ - docs/classes/Sip/NoSuchColumn.html
58
+ - docs/classes/Sip/NoSuchTable.html
59
+ - docs/classes/Sip/PastFailureException.html
60
+ - docs/classes/Sip/Sipper.html
61
+ - docs/classes/Sip/UnsupportedDatabaseType.html
62
+ - docs/classes/Sip/Utils.html
63
+ - docs/classes/Sip.html
64
+ - docs/classes/Struct.html
65
+ - docs/created.rid
66
+ - docs/files/lib/sip/cmdopts_rb.html
67
+ - docs/files/lib/sip/config_rb.html
68
+ - docs/files/lib/sip/databases/dbbase_rb.html
69
+ - docs/files/lib/sip/databases/mysql_rb.html
70
+ - docs/files/lib/sip/exceptions_rb.html
71
+ - docs/files/lib/sip/extensions_rb.html
72
+ - docs/files/lib/sip/hive_rb.html
73
+ - docs/files/lib/sip/sipper_rb.html
74
+ - docs/files/lib/sip/utils_rb.html
75
+ - docs/files/lib/sip/version_rb.html
76
+ - docs/files/lib/sip_rb.html
77
+ - docs/files/README_rdoc.html
78
+ - docs/fr_class_index.html
79
+ - docs/fr_file_index.html
80
+ - docs/fr_method_index.html
81
+ - docs/index.html
82
+ - docs/rdoc-style.css
83
+ - test/database_interaction_test.rb
84
+ - test/hive_test.rb
85
+ - test/sipper_test.rb
86
+ - bin/sip
87
+ - bin/transpart
88
+ has_rdoc: true
89
+ homepage: https://github.com/livingsocial/sip
90
+ licenses: []
91
+
92
+ post_install_message:
93
+ rdoc_options: []
94
+
95
+ require_paths:
96
+ - lib
97
+ required_ruby_version: !ruby/object:Gem::Requirement
98
+ none: false
99
+ requirements:
100
+ - - ">="
101
+ - !ruby/object:Gem::Version
102
+ hash: 3
103
+ segments:
104
+ - 0
105
+ version: "0"
106
+ required_rubygems_version: !ruby/object:Gem::Requirement
107
+ none: false
108
+ requirements:
109
+ - - ">="
110
+ - !ruby/object:Gem::Version
111
+ hash: 3
112
+ segments:
113
+ - 0
114
+ version: "0"
115
+ requirements: []
116
+
117
+ rubyforge_project: sip
118
+ rubygems_version: 1.3.7
119
+ signing_key:
120
+ specification_version: 3
121
+ summary: SQL database importer for Hadoop / Hive
122
+ test_files:
123
+ - test/database_interaction_test.rb
124
+ - test/hive_test.rb
125
+ - test/sipper_test.rb