mysql_truck 0.5.4 → 0.5.5
Sign up to get free protection for your applications and to get access to all the features.
- data/lib/mysql_truck/helper.rb +4 -0
- data/lib/mysql_truck/loader.rb +27 -12
- data/lib/mysql_truck/version.rb +1 -1
- metadata +3 -3
data/lib/mysql_truck/helper.rb
CHANGED
data/lib/mysql_truck/loader.rb
CHANGED
@@ -46,12 +46,7 @@ module MysqlTruck
|
|
46
46
|
# Load data
|
47
47
|
puts "\nLoading schema and data by table"
|
48
48
|
puts "--------------------------------"
|
49
|
-
|
50
|
-
import_cmd = "mysqlimport --local --compress #{db_connection_options}"
|
51
|
-
else
|
52
|
-
import_cmd = "mysqlimport #{db_connection_options}"
|
53
|
-
end
|
54
|
-
|
49
|
+
|
55
50
|
files = Dir["#{tmp_path}/*.no_index.sql"]
|
56
51
|
total = files.size
|
57
52
|
count = 0
|
@@ -63,18 +58,23 @@ module MysqlTruck
|
|
63
58
|
|
64
59
|
puts "\nProcessing #{table} (#{count}/#{total})"
|
65
60
|
|
66
|
-
schema_file
|
67
|
-
index_file
|
68
|
-
data_file
|
61
|
+
schema_file = Pathname.new(file)
|
62
|
+
index_file = tmp_path.join("#{table}.indices.sql")
|
63
|
+
data_file = tmp_path.join("#{table}.data.sql")
|
64
|
+
csv_data_file = tmp_path.join("#{table}.csv")
|
69
65
|
|
70
66
|
print " - Loading schema for #{table} ... "
|
71
67
|
execute_sql_file(table, backup_date_str, schema_file)
|
72
68
|
schema_file.delete if schema_file.exist?
|
73
69
|
|
74
70
|
if data_file.exist?
|
75
|
-
print " - Importing #{schema_file.basename(".sql")} ... "
|
71
|
+
print " - Importing #{schema_file.basename(".data.sql")} ... "
|
76
72
|
execute_sql_file(table, backup_date_str, data_file)
|
73
|
+
elsif csv_data_file.exist?
|
74
|
+
print " - Importing #{schema_file.basename(".csv")} ... "
|
75
|
+
import_csv_file(table, backup_date_str, csv_data_file)
|
77
76
|
end
|
77
|
+
|
78
78
|
data_file.delete if data_file.exist?
|
79
79
|
|
80
80
|
if index_file.exist?
|
@@ -97,8 +97,8 @@ module MysqlTruck
|
|
97
97
|
|
98
98
|
def execute_sql_file(table, backup_date_str, file_path)
|
99
99
|
cat_cmd = "cat #{file_path}"
|
100
|
-
sed_cmd = config[:date_suffix
|
101
|
-
sed2_cmd = config[:date_suffix
|
100
|
+
sed_cmd = config[:date_suffix] ? "sed 's/`#{table}`/`#{table}_#{backup_date_str.gsub(/-/, "")}`/g'" : nil
|
101
|
+
sed2_cmd = config[:date_suffix] ? "sed 's/TABLE #{table}/TABLE #{table}_#{backup_date_str.gsub(/-/, "")}/g'" : nil
|
102
102
|
import_cmd = "mysql #{db_connection_options}"
|
103
103
|
|
104
104
|
time = benchmark do
|
@@ -107,6 +107,21 @@ module MysqlTruck
|
|
107
107
|
print "complete (#{formatted_time time}).\n"
|
108
108
|
end
|
109
109
|
|
110
|
+
def import_csv_file(table, backup_date_str, file_path)
|
111
|
+
if config[:date_suffix]
|
112
|
+
old_file_path = file_path
|
113
|
+
file_path = file_path.to_s.gsub(table, "#{table}_#{backup_date_str.gsub(/-/, "")}")
|
114
|
+
# move file
|
115
|
+
`mv #{old_file_path} #{file_path}`
|
116
|
+
end
|
117
|
+
|
118
|
+
time = benchmark do
|
119
|
+
`mysqlimport --local --compress #{csv_options} #{db_connection_options} #{file_path}`
|
120
|
+
end
|
121
|
+
|
122
|
+
print "complete (#{formatted_time time}).\n"
|
123
|
+
end
|
124
|
+
|
110
125
|
def download_file(key)
|
111
126
|
filename = File.basename(key.name)
|
112
127
|
|
data/lib/mysql_truck/version.rb
CHANGED
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: mysql_truck
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.5.
|
4
|
+
version: 0.5.5
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -11,7 +11,7 @@ authors:
|
|
11
11
|
autorequire:
|
12
12
|
bindir: bin
|
13
13
|
cert_chain: []
|
14
|
-
date: 2012-
|
14
|
+
date: 2012-12-11 00:00:00.000000000 Z
|
15
15
|
dependencies:
|
16
16
|
- !ruby/object:Gem::Dependency
|
17
17
|
name: right_aws
|
@@ -70,7 +70,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
70
70
|
version: '0'
|
71
71
|
requirements: []
|
72
72
|
rubyforge_project:
|
73
|
-
rubygems_version: 1.8.
|
73
|
+
rubygems_version: 1.8.23
|
74
74
|
signing_key:
|
75
75
|
specification_version: 3
|
76
76
|
summary: Mysql database backup tool. Dumps/Loads to/from S3.
|