myreplicator 0.1.1 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/app/controllers/myreplicator/exports_controller.rb +7 -3
- data/app/models/myreplicator/export.rb +29 -0
- data/app/views/myreplicator/exports/_form.html.erb +2 -0
- data/app/views/myreplicator/exports/index.html.erb +3 -1
- data/lib/exporter/mysql_exporter.rb +74 -74
- data/lib/loader/loader.rb +9 -12
- data/lib/myreplicator/version.rb +1 -1
- data/lib/transporter/transporter.rb +39 -12
- data/test/dummy/log/development.log +22 -0
- metadata +4 -16
- data/test/dummy/tmp/myreplicator/okl_test_batchy_batches_1354061910.tsv.gz +0 -0
- data/test/dummy/tmp/myreplicator/okl_test_batchy_batches_1354061910.tsv.json +0 -1
- data/test/dummy/tmp/myreplicator/okl_test_batchy_batches_1357260317.tsv.gz +0 -0
- data/test/dummy/tmp/myreplicator/okl_test_batchy_batches_1357260317.tsv.json +0 -1
- data/test/dummy/tmp/myreplicator/okl_test_batchy_batches_1357689827.tsv.gz +0 -0
- data/test/dummy/tmp/myreplicator/okl_test_batchy_batches_1357689827.tsv.json +0 -1
@@ -45,14 +45,17 @@ module Myreplicator
|
|
45
45
|
@dbs = get_dbs
|
46
46
|
@tables = db_metadata
|
47
47
|
@edit = true
|
48
|
-
|
48
|
+
|
49
|
+
Myreplicator::Export.schedule_in_resque # schedule in resque
|
50
|
+
|
49
51
|
end
|
50
52
|
|
51
53
|
# POST /exports
|
52
54
|
# POST /exports.json
|
53
55
|
def create
|
54
56
|
@export = Export.new(params[:export])
|
55
|
-
|
57
|
+
|
58
|
+
Myreplicator::Export.schedule_in_resque # schedule in resque
|
56
59
|
|
57
60
|
@dbs = get_dbs
|
58
61
|
|
@@ -71,7 +74,8 @@ module Myreplicator
|
|
71
74
|
# PUT /exports/1.json
|
72
75
|
def update
|
73
76
|
@export = Export.find(params[:id])
|
74
|
-
|
77
|
+
Myreplicator::Export.schedule_in_resque # schedule in resque
|
78
|
+
|
75
79
|
@dbs = get_dbs
|
76
80
|
|
77
81
|
respond_to do |format|
|
@@ -51,6 +51,21 @@ module Myreplicator
|
|
51
51
|
end
|
52
52
|
end
|
53
53
|
|
54
|
+
def export_type?
|
55
|
+
if state == "new"
|
56
|
+
return :new
|
57
|
+
elsif incremental_export?
|
58
|
+
return :incremental
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
def incremental_export?
|
63
|
+
if export_type == "incremental"
|
64
|
+
return true
|
65
|
+
end
|
66
|
+
return false
|
67
|
+
end
|
68
|
+
|
54
69
|
def filename
|
55
70
|
@file_name ||= "#{source_schema}_#{table_name}_#{Time.now.to_i}.tsv"
|
56
71
|
end
|
@@ -172,6 +187,20 @@ module Myreplicator
|
|
172
187
|
})
|
173
188
|
end
|
174
189
|
|
190
|
+
##
|
191
|
+
# Throws ExportIgnored if the job is still running
|
192
|
+
# Checks the state of the job using PID and state
|
193
|
+
##
|
194
|
+
def is_running?
|
195
|
+
return false if state != "exporting"
|
196
|
+
begin
|
197
|
+
Process.getpgid(exporter_pid)
|
198
|
+
raise Exceptions::ExportIgnored.new("Ignored")
|
199
|
+
rescue Errno::ESRCH
|
200
|
+
return false
|
201
|
+
end
|
202
|
+
end
|
203
|
+
|
175
204
|
##
|
176
205
|
# Inner Class that connects to the source database
|
177
206
|
# Handles connecting to multiple databases
|
@@ -24,6 +24,8 @@ export_type = ["incremental","fulldump"]
|
|
24
24
|
<%= f.select :table_name, [] %>
|
25
25
|
<label>Incremental Column</label>
|
26
26
|
<%= f.text_field :incremental_column %>
|
27
|
+
<label>Incremental Column Type</label>
|
28
|
+
<%= f.text_field :incremental_column_type %>
|
27
29
|
<label>Maximum Incremental Value</label>
|
28
30
|
<%= f.text_field :max_incremental_value %>
|
29
31
|
<label>S3 Path</label>
|
@@ -12,6 +12,7 @@
|
|
12
12
|
<th><%= sortable "destination_schema" %></th>
|
13
13
|
<th><%= sortable "table_name" %></th>
|
14
14
|
<th><%= sortable "incremental_column" %></th>
|
15
|
+
<th><%= sortable "incremental_column_type" %></th>
|
15
16
|
<th><%= sortable "max_incremental_value" %></th>
|
16
17
|
<th><%= sortable "export_to", "Export Desitination" %></th>
|
17
18
|
<th><%= sortable "export_type" %></th>
|
@@ -28,6 +29,7 @@
|
|
28
29
|
<td><%= export.destination_schema %></td>
|
29
30
|
<td><%= export.table_name %></td>
|
30
31
|
<td><%= export.incremental_column %></td>
|
32
|
+
<td><%= export.incremental_column_type %></td>
|
31
33
|
<td><%= export.max_incremental_value %></td>
|
32
34
|
<td><%= export.export_to %></td>
|
33
35
|
<td><%= export.export_type %></td>
|
@@ -53,4 +55,4 @@ $(function(){
|
|
53
55
|
$("span.status").tipTip();
|
54
56
|
$("span.cron").tipTip();
|
55
57
|
})
|
56
|
-
</script>
|
58
|
+
</script>
|
@@ -19,36 +19,21 @@ module Myreplicator
|
|
19
19
|
:filepath => filepath,
|
20
20
|
:incremental_col => @export_obj.incremental_column) do |metadata|
|
21
21
|
|
22
|
-
metadata.on_failure do |m|
|
23
|
-
update_export(:state => "failed", :export_finished_at => Time.now, :error => metadata.error)
|
24
|
-
end
|
25
|
-
|
26
22
|
prepare metadata
|
27
23
|
|
28
|
-
|
24
|
+
case @export_obj.export_type?
|
25
|
+
when :new
|
26
|
+
on_failure_state_trans(metadata, "new") # If failed, go back to new
|
27
|
+
on_export_success(metadata)
|
29
28
|
initial_export metadata
|
30
|
-
metadata.on_success do |m|
|
31
|
-
metadata.state = "export_completed"
|
32
|
-
wrapup metadata
|
33
|
-
end
|
34
|
-
|
35
|
-
elsif !is_running?
|
36
|
-
# local max value for incremental export
|
37
|
-
|
38
|
-
max_value = incremental_export(metadata)
|
39
|
-
#max_value = incremental_export_into_outfile(metadata)
|
40
29
|
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
metadata
|
45
|
-
metadata.state = "export_completed"
|
46
|
-
wrapup metadata
|
47
|
-
@export_obj.update_max_val(max_value) # update max value if export was successful
|
48
|
-
end
|
30
|
+
when :incremental
|
31
|
+
on_failure_state_trans(metadata, "failed") # Set state trans on failure
|
32
|
+
on_export_success(metadata)
|
33
|
+
incremental_export metadata
|
49
34
|
end
|
50
35
|
|
51
|
-
end
|
36
|
+
end # metadata
|
52
37
|
end
|
53
38
|
|
54
39
|
##
|
@@ -58,21 +43,7 @@ module Myreplicator
|
|
58
43
|
ssh = @export_obj.ssh_to_source
|
59
44
|
metadata.ssh = ssh
|
60
45
|
end
|
61
|
-
|
62
|
-
##
|
63
|
-
# Throws ExportIgnored if the job is still running
|
64
|
-
# Checks the state of the job using PID and state
|
65
|
-
##
|
66
|
-
def is_running?
|
67
|
-
return false if @export_obj.state != "exporting"
|
68
|
-
begin
|
69
|
-
Process.getpgid(@export_obj.exporter_pid)
|
70
|
-
raise Exceptions::ExportIgnored.new("Ignored")
|
71
|
-
rescue Errno::ESRCH
|
72
|
-
return false
|
73
|
-
end
|
74
|
-
end
|
75
|
-
|
46
|
+
|
76
47
|
def update_export *args
|
77
48
|
options = args.extract_options!
|
78
49
|
@export_obj.update_attributes! options
|
@@ -91,21 +62,29 @@ module Myreplicator
|
|
91
62
|
##
|
92
63
|
|
93
64
|
def initial_export metadata
|
94
|
-
flags = ["create-options", "single-transaction"]
|
95
|
-
cmd = SqlCommands.mysqldump(:db => @export_obj.source_schema,
|
96
|
-
:flags => flags,
|
97
|
-
:filepath => filepath,
|
98
|
-
:table_name => @export_obj.table_name)
|
99
|
-
|
100
65
|
metadata.export_type = "initial"
|
66
|
+
max_value = @export_obj.max_value if @export_obj.incremental_export?
|
67
|
+
cmd = initial_mysqldump_cmd
|
101
68
|
|
102
|
-
|
69
|
+
exporting_state_trans # mark exporting
|
103
70
|
|
104
71
|
puts "Exporting..."
|
105
72
|
result = execute_export(cmd, metadata)
|
73
|
+
|
106
74
|
check_result(result, 0)
|
75
|
+
|
76
|
+
@export_obj.update_max_val(max_value) if @export_obj.incremental_export?
|
107
77
|
end
|
108
78
|
|
79
|
+
def initial_mysqldump_cmd
|
80
|
+
flags = ["create-options", "single-transaction"]
|
81
|
+
cmd = SqlCommands.mysqldump(:db => @export_obj.source_schema,
|
82
|
+
:flags => flags,
|
83
|
+
:filepath => filepath,
|
84
|
+
:table_name => @export_obj.table_name)
|
85
|
+
return cmd
|
86
|
+
end
|
87
|
+
|
109
88
|
##
|
110
89
|
# Exports table incrementally, using the incremental column specified
|
111
90
|
# If column is not specified, it will export the entire table
|
@@ -113,29 +92,35 @@ module Myreplicator
|
|
113
92
|
##
|
114
93
|
|
115
94
|
def incremental_export metadata
|
116
|
-
|
117
|
-
|
95
|
+
unless @export_obj.is_running?
|
96
|
+
max_value = @export_obj.max_value
|
97
|
+
metadata.export_type = "incremental"
|
98
|
+
@export_obj.update_max_val if @export_obj.max_incremental_value.blank?
|
99
|
+
|
100
|
+
cmd = incremental_export_cmd
|
101
|
+
exporting_state_trans # mark exporting
|
102
|
+
puts "Exporting..."
|
103
|
+
result = execute_export(cmd, metadata)
|
104
|
+
check_result(result, 0)
|
105
|
+
metadata.incremental_val = max_value # store max val in metadata
|
106
|
+
@export_obj.update_max_val(max_value) # update max value if export was successful
|
107
|
+
end
|
108
|
+
return false
|
109
|
+
end
|
118
110
|
|
111
|
+
def incremental_export_cmd
|
119
112
|
sql = SqlCommands.export_sql(:db => @export_obj.source_schema,
|
120
113
|
:table => @export_obj.table_name,
|
121
114
|
:incremental_col => @export_obj.incremental_column,
|
122
115
|
:incremental_col_type => @export_obj.incremental_column_type,
|
123
116
|
:incremental_val => @export_obj.max_incremental_value)
|
124
|
-
|
117
|
+
|
125
118
|
cmd = SqlCommands.mysql_export(:db => @export_obj.source_schema,
|
126
119
|
:filepath => filepath,
|
127
120
|
:sql => sql)
|
128
|
-
|
129
|
-
metadata.export_type = "incremental"
|
130
|
-
update_export(:state => "exporting", :export_started_at => Time.now, :exporter_pid => Process.pid)
|
131
|
-
puts "Exporting..."
|
132
|
-
result = execute_export(cmd, metadata)
|
133
|
-
check_result(result, 0)
|
134
|
-
|
135
|
-
return max_value
|
121
|
+
return cmd
|
136
122
|
end
|
137
123
|
|
138
|
-
|
139
124
|
##
|
140
125
|
# Exports table incrementally, similar to incremental_export method
|
141
126
|
# Dumps file in tmp directory specified in myreplicator.yml
|
@@ -145,6 +130,8 @@ module Myreplicator
|
|
145
130
|
|
146
131
|
def incremental_export_into_outfile metadata
|
147
132
|
max_value = @export_obj.max_value
|
133
|
+
metadata.export_type = "incremental_outfile"
|
134
|
+
|
148
135
|
@export_obj.update_max_val if @export_obj.max_incremental_value.blank?
|
149
136
|
|
150
137
|
cmd = SqlCommands.mysql_export_outfile(:db => @export_obj.source_schema,
|
@@ -153,27 +140,14 @@ module Myreplicator
|
|
153
140
|
:incremental_col => @export_obj.incremental_column,
|
154
141
|
:incremental_col_type => @export_obj.incremental_column_type,
|
155
142
|
:incremental_val => @export_obj.max_incremental_value)
|
156
|
-
|
157
|
-
metadata.export_type = "incremental_outfile"
|
158
|
-
update_export(:state => "exporting", :export_started_at => Time.now, :exporter_pid => Process.pid)
|
143
|
+
exporting_state_trans
|
159
144
|
puts "Exporting..."
|
160
145
|
result = execute_export(cmd, metadata)
|
161
146
|
check_result(result, 0)
|
162
|
-
|
147
|
+
max_value = incremental_export_into_outfile(metadata)
|
163
148
|
return max_value
|
164
149
|
end
|
165
150
|
|
166
|
-
##
|
167
|
-
# Completes an export process
|
168
|
-
# Zips files, updates states etc
|
169
|
-
##
|
170
|
-
def wrapup metadata
|
171
|
-
puts "Zipping..."
|
172
|
-
zipfile(metadata)
|
173
|
-
update_export(:state => "export_completed", :export_finished_at => Time.now)
|
174
|
-
puts "Done.."
|
175
|
-
end
|
176
|
-
|
177
151
|
##
|
178
152
|
# Checks the returned resut from SSH CMD
|
179
153
|
# Size specifies if there should be any returned results or not
|
@@ -203,7 +177,9 @@ module Myreplicator
|
|
203
177
|
##
|
204
178
|
def zipfile metadata
|
205
179
|
cmd = "cd #{Myreplicator.configs[@export_obj.source_schema]["ssh_tmp_dir"]}; gzip #{@export_obj.filename}"
|
206
|
-
|
180
|
+
|
181
|
+
puts cmd
|
182
|
+
|
207
183
|
zip_result = metadata.ssh.exec!(cmd)
|
208
184
|
|
209
185
|
unless zip_result.nil?
|
@@ -214,6 +190,30 @@ module Myreplicator
|
|
214
190
|
|
215
191
|
return zip_result
|
216
192
|
end
|
193
|
+
|
194
|
+
def on_failure_state_trans metadata, state
|
195
|
+
metadata.on_failure do |m|
|
196
|
+
update_export(:state => state,
|
197
|
+
:export_finished_at => Time.now,
|
198
|
+
:error => metadata.error)
|
199
|
+
end
|
200
|
+
end
|
201
|
+
|
202
|
+
def exporting_state_trans
|
203
|
+
update_export(:state => "exporting",
|
204
|
+
:export_started_at => Time.now,
|
205
|
+
:exporter_pid => Process.pid)
|
206
|
+
end
|
207
|
+
|
208
|
+
def on_export_success metadata
|
209
|
+
metadata.on_success do |m|
|
210
|
+
update_export(:state => "export_completed",
|
211
|
+
:export_finished_at => Time.now,
|
212
|
+
:error => metadata.error)
|
213
|
+
metadata.state = "export_completed"
|
214
|
+
zipfile(metadata)
|
215
|
+
end
|
216
|
+
end
|
217
217
|
|
218
218
|
end
|
219
219
|
end
|
data/lib/loader/loader.rb
CHANGED
@@ -24,41 +24,38 @@ module Myreplicator
|
|
24
24
|
# Kicks off all initial loads first and then all incrementals
|
25
25
|
# Looks at metadata files stored locally
|
26
26
|
# Note: Initials are loaded sequentially
|
27
|
-
# If there is a
|
28
27
|
##
|
29
28
|
def self.load
|
30
29
|
initials = []
|
31
30
|
incrementals = []
|
32
|
-
|
31
|
+
all_files = Loader.metadata_files
|
33
32
|
|
34
|
-
|
33
|
+
all_files.each do |m|
|
35
34
|
if m.export_type == "initial"
|
36
35
|
Kernel.p m
|
37
36
|
initials << m # Add initial to the list
|
38
|
-
|
37
|
+
all_files.delete(m) # Delete obj from mixed list
|
39
38
|
|
40
|
-
|
39
|
+
all_files.each do |md|
|
41
40
|
if m.equals(md) && md.export_type == "incremental"
|
42
41
|
initials << md # incremental should happen after the initial load
|
43
|
-
|
42
|
+
all_files.delete(md) # remove from current list of files
|
44
43
|
end
|
45
44
|
end
|
46
45
|
end
|
47
46
|
end
|
48
47
|
|
49
|
-
incrementals =
|
48
|
+
incrementals = all_files # Remaining are all incrementals
|
50
49
|
|
51
50
|
initial_procs = Loader.initial_loads initials
|
52
|
-
Kernel.p initial_procs
|
53
51
|
parallel_load initial_procs
|
54
52
|
|
55
53
|
incremental_procs = Loader.incremental_loads incrementals
|
56
|
-
Kernel.p incremental_procs
|
57
54
|
parallel_load incremental_procs
|
58
55
|
end
|
59
56
|
|
60
57
|
def self.parallel_load procs
|
61
|
-
p = Parallelizer.new(:klass => "Myreplicator::
|
58
|
+
p = Parallelizer.new(:klass => "Myreplicator::Loader")
|
62
59
|
procs.each do |proc|
|
63
60
|
p.queue << {:params => [], :block => proc}
|
64
61
|
end
|
@@ -127,7 +124,9 @@ module Myreplicator
|
|
127
124
|
|
128
125
|
incrementals.each do |metadata|
|
129
126
|
group = [metadata]
|
127
|
+
incrementals.delete(metadata)
|
130
128
|
|
129
|
+
# look for same loads
|
131
130
|
incrementals.each do |md|
|
132
131
|
if metadata.equals(md)
|
133
132
|
group << md
|
@@ -135,10 +134,8 @@ module Myreplicator
|
|
135
134
|
end
|
136
135
|
end
|
137
136
|
|
138
|
-
incrementals.delete(metadata)
|
139
137
|
groups << group
|
140
138
|
end
|
141
|
-
|
142
139
|
return groups
|
143
140
|
end
|
144
141
|
|
data/lib/myreplicator/version.rb
CHANGED
@@ -91,20 +91,42 @@ module Myreplicator
|
|
91
91
|
json_local_path = File.join(tmp_dir,filename)
|
92
92
|
puts "Downloading #{json_file}"
|
93
93
|
sftp.download!(json_file, json_local_path)
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
94
|
+
metadata = Transporter.metadata_obj(json_local_path)
|
95
|
+
dump_file = metadata.export_path
|
96
|
+
|
97
|
+
if metadata.state == "export_completed"
|
98
|
+
Log.run(:job_type => "transporter", :name => "export_file",
|
99
|
+
:file => dump_file, :export_id => export.id) do |log|
|
100
|
+
puts "Downloading #{dump_file}"
|
101
|
+
sftp.download!(dump_file, File.join(tmp_dir, dump_file.split("/").last))
|
102
|
+
Transporter.remove!(ssh, json_file, dump_file)
|
103
|
+
end
|
104
|
+
elsif Transporter.junk_file?(metadata)
|
105
|
+
Transporter.remove!(ssh, json_file, dump_file)
|
106
|
+
end #if
|
107
|
+
|
104
108
|
end
|
105
109
|
}
|
106
110
|
end
|
107
111
|
|
112
|
+
##
|
113
|
+
# Returns true if the file should be deleted
|
114
|
+
##
|
115
|
+
def self.junk_file? metadata
|
116
|
+
case metadata.state
|
117
|
+
when "failed"
|
118
|
+
return true
|
119
|
+
when "ignored"
|
120
|
+
return true
|
121
|
+
end
|
122
|
+
return false
|
123
|
+
end
|
124
|
+
|
125
|
+
def self.remove! ssh, json_file, dump_file
|
126
|
+
ssh.exec!("rm #{json_file}")
|
127
|
+
ssh.exec!("rm #{dump_file}")
|
128
|
+
end
|
129
|
+
|
108
130
|
##
|
109
131
|
# Gets all files ready to be exported from server
|
110
132
|
##
|
@@ -118,11 +140,16 @@ module Myreplicator
|
|
118
140
|
return []
|
119
141
|
end
|
120
142
|
|
143
|
+
def self.metadata_obj json_path
|
144
|
+
metadata = ExportMetadata.new(:metadata_path => json_path)
|
145
|
+
return metadata
|
146
|
+
end
|
147
|
+
|
121
148
|
##
|
122
149
|
# Reads metadata file for the export path
|
123
150
|
##
|
124
|
-
def self.get_dump_path json_path
|
125
|
-
metadata =
|
151
|
+
def self.get_dump_path json_path, metadata = nil
|
152
|
+
metadata = Transporter.metadata_obj(json_path) if metadata.nil?
|
126
153
|
return metadata.export_path
|
127
154
|
end
|
128
155
|
|
@@ -9006,3 +9006,25 @@ Connecting to database specified by database.yml
|
|
9006
9006
|
Connecting to database specified by database.yml
|
9007
9007
|
Connecting to database specified by database.yml
|
9008
9008
|
Connecting to database specified by database.yml
|
9009
|
+
Connecting to database specified by database.yml
|
9010
|
+
Connecting to database specified by database.yml
|
9011
|
+
DEPRECATION WARNING: Database connections will not be closed automatically, please close your
|
9012
|
+
database connection at the end of the thread by calling `close` on your
|
9013
|
+
connection. For example: ActiveRecord::Base.connection.close
|
9014
|
+
. (called from mon_synchronize at /home/sasan/.rvm/rubies/ruby-1.9.3-p125/lib/ruby/1.9.1/monitor.rb:211)
|
9015
|
+
DEPRECATION WARNING: Database connections will not be closed automatically, please close your
|
9016
|
+
database connection at the end of the thread by calling `close` on your
|
9017
|
+
connection. For example: ActiveRecord::Base.connection.close
|
9018
|
+
. (called from mon_synchronize at /home/sasan/.rvm/rubies/ruby-1.9.3-p125/lib/ruby/1.9.1/monitor.rb:211)
|
9019
|
+
DEPRECATION WARNING: Database connections will not be closed automatically, please close your
|
9020
|
+
database connection at the end of the thread by calling `close` on your
|
9021
|
+
connection. For example: ActiveRecord::Base.connection.close
|
9022
|
+
. (called from mon_synchronize at /home/sasan/.rvm/rubies/ruby-1.9.3-p125/lib/ruby/1.9.1/monitor.rb:211)
|
9023
|
+
DEPRECATION WARNING: Database connections will not be closed automatically, please close your
|
9024
|
+
database connection at the end of the thread by calling `close` on your
|
9025
|
+
connection. For example: ActiveRecord::Base.connection.close
|
9026
|
+
. (called from mon_synchronize at /home/sasan/.rvm/rubies/ruby-1.9.3-p125/lib/ruby/1.9.1/monitor.rb:211)
|
9027
|
+
Connecting to database specified by database.yml
|
9028
|
+
Connecting to database specified by database.yml
|
9029
|
+
Connecting to database specified by database.yml
|
9030
|
+
Connecting to database specified by database.yml
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: myreplicator
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 1.0.0
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2013-01-
|
12
|
+
date: 2013-01-14 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: rails
|
@@ -244,12 +244,6 @@ files:
|
|
244
244
|
- test/dummy/app/assets/javascripts/application.js
|
245
245
|
- test/dummy/app/assets/stylesheets/application.css
|
246
246
|
- test/dummy/Rakefile
|
247
|
-
- test/dummy/tmp/myreplicator/okl_test_batchy_batches_1354061910.tsv.gz
|
248
|
-
- test/dummy/tmp/myreplicator/okl_test_batchy_batches_1357260317.tsv.json
|
249
|
-
- test/dummy/tmp/myreplicator/okl_test_batchy_batches_1354061910.tsv.json
|
250
|
-
- test/dummy/tmp/myreplicator/okl_test_batchy_batches_1357260317.tsv.gz
|
251
|
-
- test/dummy/tmp/myreplicator/okl_test_batchy_batches_1357689827.tsv.gz
|
252
|
-
- test/dummy/tmp/myreplicator/okl_test_batchy_batches_1357689827.tsv.json
|
253
247
|
- test/dummy/tmp/cache/assets/DF8/5D0/sprockets%2Fb815ed34d61cfed96222daa3bfd1d84d
|
254
248
|
- test/dummy/tmp/cache/assets/D3F/A00/sprockets%2F7a803404e1f60b8d672d763cb9ba8af5
|
255
249
|
- test/dummy/tmp/cache/assets/C2E/D00/sprockets%2F667019818351638709494c01bddb5f68
|
@@ -314,7 +308,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
314
308
|
version: '0'
|
315
309
|
segments:
|
316
310
|
- 0
|
317
|
-
hash:
|
311
|
+
hash: -2431171312280889434
|
318
312
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
319
313
|
none: false
|
320
314
|
requirements:
|
@@ -323,7 +317,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
323
317
|
version: '0'
|
324
318
|
segments:
|
325
319
|
- 0
|
326
|
-
hash:
|
320
|
+
hash: -2431171312280889434
|
327
321
|
requirements: []
|
328
322
|
rubyforge_project:
|
329
323
|
rubygems_version: 1.8.23
|
@@ -376,12 +370,6 @@ test_files:
|
|
376
370
|
- test/dummy/app/assets/javascripts/application.js
|
377
371
|
- test/dummy/app/assets/stylesheets/application.css
|
378
372
|
- test/dummy/Rakefile
|
379
|
-
- test/dummy/tmp/myreplicator/okl_test_batchy_batches_1354061910.tsv.gz
|
380
|
-
- test/dummy/tmp/myreplicator/okl_test_batchy_batches_1357260317.tsv.json
|
381
|
-
- test/dummy/tmp/myreplicator/okl_test_batchy_batches_1354061910.tsv.json
|
382
|
-
- test/dummy/tmp/myreplicator/okl_test_batchy_batches_1357260317.tsv.gz
|
383
|
-
- test/dummy/tmp/myreplicator/okl_test_batchy_batches_1357689827.tsv.gz
|
384
|
-
- test/dummy/tmp/myreplicator/okl_test_batchy_batches_1357689827.tsv.json
|
385
373
|
- test/dummy/tmp/cache/assets/DF8/5D0/sprockets%2Fb815ed34d61cfed96222daa3bfd1d84d
|
386
374
|
- test/dummy/tmp/cache/assets/D3F/A00/sprockets%2F7a803404e1f60b8d672d763cb9ba8af5
|
387
375
|
- test/dummy/tmp/cache/assets/C2E/D00/sprockets%2F667019818351638709494c01bddb5f68
|
Binary file
|
@@ -1 +0,0 @@
|
|
1
|
-
{"export_time":"2012-11-27T16:18:30-08:00","table":"batchy_batches","database":"okl_test","state":"export_completed","incremental_col":null,"incremental_val":null,"export_id":1,"filepath":"/home/ubuntu/myreplicator_tmp/okl_test_batchy_batches_1354061910.tsv","zipped":true,"export_type":"incremental"}
|
Binary file
|
@@ -1 +0,0 @@
|
|
1
|
-
{"export_time":"2013-01-03T16:45:17-08:00","table":"batchy_batches","database":"okl_test","state":"export_completed","incremental_col":"started_at","incremental_val":null,"export_id":2,"filepath":"/home/ubuntu/myreplicator_tmp/okl_test_batchy_batches_1357260317.tsv","zipped":true,"export_type":"initial"}
|
Binary file
|
@@ -1 +0,0 @@
|
|
1
|
-
{"export_time":"2013-01-08T16:03:47-08:00","table":"batchy_batches","database":"okl_test","state":"export_completed","incremental_col":"updated_at","incremental_val":"2012-11-13T10:25:18Z","export_id":1,"filepath":"/home/ubuntu/myreplicator_tmp/okl_test_batchy_batches_1357689827.tsv","zipped":true,"export_type":"incremental"}
|