dbtools 0.5.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (41) hide show
  1. checksums.yaml +7 -0
  2. data/README.md +333 -0
  3. data/Thorfile +1 -0
  4. data/bin/dbtools +5 -0
  5. data/config/client_secret_dbtools.json +1 -0
  6. data/config/config.yml +1 -0
  7. data/config/database_config.yml +12 -0
  8. data/config/databases.txt +5 -0
  9. data/config/schedule.rb +8 -0
  10. data/dbtools.gemspec +37 -0
  11. data/lib/dbtools.rb +47 -0
  12. data/lib/dbtools/constants.rb +847 -0
  13. data/lib/dbtools/converter/csv2rdf_converter.rb +68 -0
  14. data/lib/dbtools/converter/csv_importer.rb +107 -0
  15. data/lib/dbtools/converter/excel2csv_converter.rb +40 -0
  16. data/lib/dbtools/converter/google_drive2_rdf_converter.rb +97 -0
  17. data/lib/dbtools/database/database_data.rb +146 -0
  18. data/lib/dbtools/database/db_connection.rb +236 -0
  19. data/lib/dbtools/database/mysql_connection.rb +78 -0
  20. data/lib/dbtools/database/postgresql_connection.rb +132 -0
  21. data/lib/dbtools/database/violation.rb +45 -0
  22. data/lib/dbtools/google_drive/google_drive_api.rb +211 -0
  23. data/lib/dbtools/google_drive/google_drive_entity.rb +22 -0
  24. data/lib/dbtools/google_drive/google_drive_file.rb +10 -0
  25. data/lib/dbtools/google_drive/google_drive_folder.rb +9 -0
  26. data/lib/dbtools/plsql_functions/connect_server.sql +30 -0
  27. data/lib/dbtools/plsql_functions/link.sql +17 -0
  28. data/lib/dbtools/plsql_functions/unlink.sql +15 -0
  29. data/lib/dbtools/rdf/rdf_reader.rb +136 -0
  30. data/lib/dbtools/version.rb +3 -0
  31. data/lib/rdf/geophy.rb +27 -0
  32. data/lib/tasks/aws.rb +43 -0
  33. data/lib/tasks/backup.rb +107 -0
  34. data/lib/tasks/check.rb +220 -0
  35. data/lib/tasks/ckan.rb +151 -0
  36. data/lib/tasks/convert.rb +139 -0
  37. data/lib/tasks/dump.rb +110 -0
  38. data/lib/tasks/googledrivetool.rb +252 -0
  39. data/lib/tasks/import.rb +142 -0
  40. data/lib/tasks/postgres.rb +29 -0
  41. metadata +307 -0
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 836a2bf1d0432d2fd21cc831cecdccf8767336fb
4
+ data.tar.gz: f4a8477d5824a0ba5f620a199bfc63ade084ea23
5
+ SHA512:
6
+ metadata.gz: ba133c70b316391c025118b2b84d4244699bde0d1882483c2afea1696a9ea2dfc7473b52683ca7b4c47bece03961d3b9414833d7706246cbc26af4cc75aedd42
7
+ data.tar.gz: 2d0838799a72beec6ec0cf683da9af3d49e1f94e5309407f409c1defd271623f75e1530a010cc2e1949788eca4974848027f61079d21d9b9ebc17f8efa14b1f7
@@ -0,0 +1,333 @@
1
+ # DB tools
2
+ # Usage
3
+
4
+ This sections describes how to use this tool. For development, go to the Development section.
5
+
6
+ ## Installation
7
+
8
+ This application is a Ruby gem that can be installed, and run as an executable.
9
+ Clone this repo and install the latest version gem by running:
10
+
11
+ gem install rdf-reasoner -v 0.4.1
12
+ gem install specific_install
13
+ gem specific_install -l ssh://git@git.geophy.com:9922/coredb/coredb-application-dbtools.git
14
+
15
+ Depending on how Ruby is installed on your machine, you might have to run this command using `sudo` (not recommended).
16
+
17
+ ## Configuration
18
+
19
+ All configuration files is stored in `~/.dbtools/`.
20
+ The configuration for the databases, can be configured in `database_config.yml`.
21
+ Run `dbtools google_drive list` to invoke the Google Drive authorization.
22
+
23
+ ## Running the application
24
+ After installing the gem, you should be able to run this application from the command line:
25
+
26
+ $ dbtools
27
+
28
+ Using `thor`, you can also run this application without installing the gem. Install `thor` using:
29
+
30
+ $ gem install thor
31
+
32
+ Run `thor` from the same folder where this source code is located.
33
+
34
+ $ thor dbtools
35
+
36
+ ## Commands
37
+
38
+ This gem allows you to run a few tasks from the command line, using the `dbtools` executable. The same commands are available using `thor`.
39
+
40
+ + `dbtools check casing [URL]`
41
+ + `dbtools check completeness [URL]`
42
+ + `dbtools check compression [URL] `
43
+ + `dbtools check database_comments [URL] `
44
+ + `dbtools check help [COMMAND] `
45
+ + `dbtools check indexes [URL] `
46
+ + `dbtools check inverse_functional_property [URL] `
47
+ + `dbtools check keywords [URL] `
48
+ + `dbtools check output_indexes [URL] `
49
+ + `dbtools check spelling [URL] `
50
+ + `dbtools check table_comments [URL] `
51
+ + `dbtools ckan help [COMMAND] `
52
+ + `dbtools ckan list_databases `
53
+ + `dbtools ckan load_dataset [dataset] `
54
+ + `dbtools ckan load_rdf_in_desc [target_database, ckan_dataset] `
55
+ + `dbtools ckan load_resource [dataset, resource] `
56
+ + `dbtools dump help [COMMAND] `
57
+ + `dbtools dump schema [URL, PATH] `
58
+ + `dbtools google_drive download [File_id, target_dir(optional)] `
59
+ + `dbtools google_drive export [File_id, format, target_dir(optional)] `
60
+ + `dbtools google_drive help [COMMAND] `
61
+ + `dbtools google_drive list `
62
+ + `dbtools import convert_to_utf8 [file] `
63
+ + `dbtools import create_schema [csv_file, table_name] `
64
+ + `dbtools import csv_in postgres [csv_file, database_name, table_name] `
65
+ + `dbtools import excel [database_name, file] `
66
+ + `dbtools import excel2csv [database_name, file] `
67
+ + `dbtools import help [COMMAND] `
68
+ + `dbtools import mysql_dump [database_name. dump_file] `
69
+ + `dbtools import postgres_dump [database_name. dump_file] `
70
+ + `dbtools postgres help [COMMAND] `
71
+ + `dbtools postgres load_functions [database_url] `
72
+
73
+ ### Namespaces
74
+ All commands are separated by the type of tasks the command performs.
75
+
76
+ #### Check
77
+ This describes all database checking related tasks. The output of these tasks are printed to STDOUT.
78
+ All tasks take an URL as argument. This should be the url to the database in the following format: adapter://username:oassword@host/database.
79
+ Both MySQL and Postgres is supported.
80
+
81
+ + **all [URL]**
82
+
83
+ This runs the following tasks on the specified database:
84
+ + indexes
85
+ + keywords
86
+ + casing
87
+ + completeness
88
+ + compression
89
+ + inverse_functional_property
90
+
91
+ + **all_databases**
92
+
93
+ This runs the task `all` on all databases it can find. Database credentials must be specified in `~/.dbtools/database_config.yml`.
94
+
95
+ + **casing [URL]**
96
+
97
+ This task outputs all columns that have a column name with uppercase characters in it.
98
+
99
+ + **completeness [URL]**
100
+
101
+ This task outputs the completeness of every column. For every column, it counts the amount of empty/null entries and the total entries.
102
+
103
+ + **compression [URL]**
104
+
105
+ This tasks counts, for every column, the amount of distinct lowercased entries and the amount of distinct entries.
106
+
107
+ + **database_comments [URL]**
108
+
109
+ This task checks whether a database has a comment or not. This does not work on MySQL because MySQL does not have support for database comments.
110
+
111
+ + **indexes [URL]**
112
+
113
+ This task checks whether a database column has missing indexes. It looks for all columns with name *_id and does not have an index.
114
+
115
+ + **inverse_functional_property [URL]**
116
+
117
+ This task checks the inverse functional property of the database. It checks for distinct values and total value.
118
+
119
+ + **keywords [URL]**
120
+
121
+ This task checks for column names with reserved keywords in them.
122
+
123
+ + **output_indexes [URL]**
124
+
125
+ This outputs the command to create the missing index on the column.
126
+
127
+ + **spelling [URL]**
128
+
129
+ This task checks for spelling mistakes in column names.
130
+
131
+ + **table_comments [URL]**
132
+
133
+ This task checks whether a table has a comment or not.
134
+
135
+ #### Google Drive
136
+ This describes all Google Drive related tasks. The tool will prompt for Google Drive credentials if they don't exist yet.
137
+
138
+ + **download [File_id, target_dest(optional)]**
139
+
140
+ `download` will download a file stored on Google Drive.
141
+ An optional target directory can be given. This will download the file into the directory,
142
+ using the same name as on Google Drive. If a Google Doc file is given, it will automatically
143
+ export it to the most commonly used format.
144
+ If no target directory is given, the file will be streamed to STDOUT.
145
+
146
+ Examples:
147
+ \x5$ dbtools google_drive download 0B67ew1eLtcXxeUVmTndialhTRTA /tmp/target_destination/"
148
+ \x5$ dbtools google_drive download 0B67ew1eLtcXxeUVmTndialhTRTA > /tmp/image.jpg"
149
+
150
+ + **export [File_id, format, target_dir(optional)]**
151
+
152
+ `export` will download a Google document stored on Google Drive, like Google Spreadsheets, Google slides etc.
153
+ Download doesn't work for these files. You need to specify the export format.
154
+ The supported conversion formats can be found here:
155
+ https://developers.google.com/drive/v3/web/manage-downloads#downloading_a_file
156
+
157
+ An optional target directory can be given. This will download the file into the directory,
158
+ using the same name as on Google Drive.
159
+ If no target directory is given, the file will be streamed to STDOUT.
160
+
161
+ Examples:
162
+ \x5$ dbtools google_drive export 0B67ew1eLtcXxeUVmTndialhTRTA 'text/plain' /tmp/target_dir/"
163
+ \x5$ dbtools google_drive export 0B67ew1eLtcXxeUVmTndialhTRTA 'application/pdf' > /tmp/test.pdf"
164
+
165
+ #### Import
166
+ This describes all import related tasks. The target databases must be specified in `~/.dbtools/database_config.yml`.
167
+
168
+ + **convert_to_utf8 [file]**
169
+
170
+ This tasks converts a file to UTF-8 encoding. It will create a temporary copy of the file, while converting every line to UTF-8.
171
+ After it is done, it will replace the original file with the UTF-8 encoding copy.
172
+
173
+ + **create_schema [csv_file, table_name]**
174
+
175
+ This will output a table schema for a csv file.
176
+
177
+ + **csv_in postgres [csv_file, database_name, table_name]**
178
+
179
+ This will import a csv file into the Postgres database. You have to specify the target database name, and target table name.
180
+
181
+ + **excel [database_name, file]**
182
+
183
+ This will import an excel file into the Postgres database. It will transform an excel file to csv files. Every sheet will be a new csv table, and thus a new table.
184
+ You only have to specify the target database name. The table name will be created using the excel file name and sheet name.
185
+
186
+ + **excel2csv [file, target_dir]**
187
+
188
+ This will export an excel file to csv file(s). Every sheet will be a new table name.
189
+
190
+ + **mysql_dump [database_name. dump_file]**
191
+
192
+ This will import a MySQL dump into the MySQL database.
193
+
194
+ + **postgres_dump [database_name. dump_file]**
195
+
196
+ This will import a Postges dump into the Postgres database.
197
+
198
+ #### Postgres
199
+
200
+ + **load_functions [database_url]**
201
+
202
+ This will load all pl/sql functions in the `lib/dbtools/plsql_functions` folder into the target Postgres database.
203
+
204
+ ##### PL/SQL functions
205
+
206
+ The following PL/SQL functions are included:
207
+
208
+ + **connect_server(servername, host, port, dbname, server_user, server_password)**
209
+
210
+ Creates a server with name `servername` connected to `host:port` and database `dbname`. Credentials of the `host:port`
211
+ must be given in `server_user` and `server_password`. It will also create a user mapping for the current user.
212
+
213
+ + **link(servername, schema, table)**
214
+
215
+ After creating a server, you can simply call this function with the schema name and table name of the foreign server.
216
+ This will create a foreign table in the current database.
217
+
218
+ + **unlink(schema, table)**
219
+
220
+ This will drop the foreign table.
221
+
222
+ #### Convert
223
+
224
+ + **csv2rdf [csv_file, rdf_uri]**
225
+
226
+ `csv2rdf csv_file uri` will convert a csv file to a RDF NTriples file.
227
+ The URI will be the subject in the resulting RDF file.
228
+
229
+ You can optionally specify a third parameter, which will write
230
+ the output to a file. You can also specify a fourth parameter to compress
231
+ the file to .gz format.
232
+
233
+ Example:
234
+ \x5$ dbtools convert csv2rdf data.csv http://geophy.data output.nt
235
+
236
+ Resulting triples will look like:
237
+ <http://geophy.data#ROWNUMBER> <http://geophy.data/COLUMNNAME> VALUE .
238
+ <http://geophy.data#1> <http://geophy.data/name> "Bob" .
239
+
240
+ # Development
241
+
242
+ For development, install `bundler` to manage the gem dependencies. Clone this repository and run:
243
+
244
+ $ bundle install
245
+
246
+ To install all dependencies.
247
+
248
+ ## Components
249
+
250
+ #### CSV importer
251
+ + `lib/dbtools/csv_importer.rb`
252
+ ##### Delimiters
253
+ The csv importer allows you to prepare a table schema fot the csv, so the csv can be loading into Postgres using the `copy` command. When
254
+ invoking the class, you can specify the delimiter. If no delimiter is specified, it will attempt to detect the delimiter by counting
255
+ the most occurred delimiter in the first 10 lines. It is able to detect the following delimiters: `[',', '|', "\t", ';']`.
256
+ If all occurrences are zero, it will raise an exception.
257
+
258
+ ##### SQL Type detection
259
+ To detect the type of the column, it will read the first 10.000 rows of the csv file. For every column, it will then iterate through every entry and
260
+ keep track which type it is. If both integers and floats occures, float will take precedence. If a string has occurenced, then that will take precedence
261
+ over every other type. Date type has been disabled, because the Ruby date parser accepts any string that contains a date, as Date type. E.g. sentences that
262
+ contain a date are accepted by the date parser, but are invalid in SQL.
263
+ Integers will because BigInt in SQL, Floats will because Float and everything else will be a varchar.
264
+ Improvements that can be made:
265
+ + Iterate (per line) through all entries instead of just the first 10.000. Currently it loads it all into memory. If you iterate it line-by-line, you can
266
+ check all entries. You probably won't be able to iterate by column though.
267
+ + Support for more datatypes. Currently BigInt is used for integer type to prevent errors when integers get too large. If you would check all entries,
268
+ you can keep track of the largest entry occurred. Based on that, you have specify the type as either smallInt, Int or BigInt.
269
+ + Conversion to SQL commands. For each row in the csv, you can change it to a SQL insert command. That way, you could run a command to create an entire table.
270
+
271
+ #### Excel to csv converter
272
+ + `lib/dbtools/excel2csv.rb`
273
+ The Excel to csv converter uses the `roo` and `roo-xls` gems to provide Excel reading support. The `roo` gem provides supports modern files, such as `.ods`, `.xlsx` files while the `roo-xls` gem
274
+ provides support for older Excel files, such as `xls` or `xml` in SpreadsheetML format.
275
+ The output folder has to be specified. The output csv file name(s) are combined from the excel file name, and the sheet name.
276
+
277
+ #### Dynamic SQL queries
278
+ + `lib/dbtools/database.rb`
279
+ + `lib/dbtools/db_connection.rb`
280
+ + `lib/dbtools/postgres_connection.rb`
281
+ + `lib/dbtools/mysql_connection.rb`
282
+
283
+ To create the metrics, a query has to be created taking in account all the tables, columns etc. To represent this structure, classes are used to store the data.
284
+ The results can also be stored in the classes. The Table class allows you to construct the query. The result can be stored in the Column class. Another process should
285
+ run the query and store the result in the Column classes. The classes contain no logic to connect and query databases itself.
286
+ All queries that are constructed use quotes around the name identifiers. This is to prevent errors when identifiers use spaces or reserved keywords in their name.
287
+
288
+ The Postgres dialact supports quotes for identifiers. MySQL uses backticks for identifiers. In the code, if the MySQL connection is used,
289
+ it will replace all quotes with backticks.
290
+ Using an ActiveRecord connection, queries are executed. The Postgres adapter, by default, returns a hash. The MySQL adapter needs another option to turn in into a Hash.
291
+ If you use the `execute_query` wrapper command, it will do this automatically.
292
+
293
+
294
+ #### Importer
295
+ + `lib/tasks/import.rb`
296
+
297
+ ##### UTF-8 Conversion
298
+ Since all databases are in UTF-8 encoding format, the csv file that has to be loaded must be in UTF-8 encoding too. If this is not done, the copy command of
299
+ Postgres might fail if there's an invalid character sequence detected.
300
+ That is why before a csv file is loaded into the database, it is converted to UTF-8 encoding.
301
+
302
+ ##### Names
303
+ For loading Excel and csv files, a database name must be specified. These must be a valid database name. For loading a single csv file, the table name must also be specified.
304
+ Excel files use the file name + sheet name as table name. For dumps, the database name can be specified, but depending on how the dump is created,
305
+ it might create it's own database.
306
+
307
+ ##### Process
308
+ The entire excel and csv loading process is as follows:
309
+ excel: convert every sheet to a single csv file => see csv process
310
+ csv: convert to UTF-8 => create table schema command => connect to database using an ActiveRecord connection => run create database command => run table schema command =>
311
+ use copy to load the csv into the database.
312
+
313
+ #### Google Drive
314
+ + `lib/dbtools/google_drive_api.rb`
315
+ + `lib/tasks/googledrivetool.rb`
316
+
317
+ The `google_drive_api.rb` class connects this client to Google Drive with authorization with read only scope. The `client_secret_geophy.json` file specifies to which
318
+ Google Drive it should connect. It will prompt for authorization if it tries to authenticate without credentials available.
319
+ The credentials are stored in `~/.credentials/dbtools_geophy.yml`.
320
+
321
+ ##### Downloading files
322
+ Downloading files is done using the `get_file` function. This does not work for Google Documents however. These files have to be called using the `export_file` command.
323
+ When using the `export_file` function, the output format has to be specified. There does not seem to be a way know whether the file is a Google Document or
324
+ regular file by looking at just the metadata(?). When no download destination is specified, it will return the metadata.
325
+
326
+ #### RDF
327
+ The RDF reader must be used with CKAN with the ckanext-dcat plugin. This allows you to query the RDF graph, and return metadata about the dataset and/or resources.
328
+ This way, it knows where a Google Drive file is located, what the database name should be, etc.
329
+ The CKAN RDF catalog uses pagination, so the RDF reader will load all pages. It can then run SPARQL queries to find datasets.
330
+ To improve this, you might be able to use the CKAN API instead. Parsing the RDF graph seems to be a bit slow, and you have to specify the full url of
331
+ the dataset/resource(unless you use a regex filter).
332
+
333
+
@@ -0,0 +1 @@
1
+ require 'dbtools'
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'dbtools'
4
+
5
+ Dbtools::Main.start(ARGV)
@@ -0,0 +1 @@
1
+ {"installed":{"client_id":"494523368434-74cacnqnkqrj6nbhcfqluajqrv9d5uuf.apps.googleusercontent.com","project_id":"sacred-archway-138523","auth_uri":"https://accounts.google.com/o/oauth2/auth","token_uri":"https://accounts.google.com/o/oauth2/token","auth_provider_x509_cert_url":"https://www.googleapis.com/oauth2/v1/certs","client_secret":"d1aJVQiyDhNX4cPn6395ez1a","redirect_uris":["urn:ietf:wg:oauth:2.0:oob","http://localhost"]}}
@@ -0,0 +1 @@
1
+ defaults:
@@ -0,0 +1,12 @@
1
+ mysql:
2
+ adapter: mysql2
3
+ username: root
4
+ password: root
5
+ host: localhost
6
+
7
+ postgres:
8
+ adapter: postgres
9
+ username: postgres
10
+ password: postgres
11
+ host: localhost
12
+ port: 5432
@@ -0,0 +1,5 @@
1
+ postgres://postgres:postgres@localhost/dvdrental
2
+ postgres://postgres:postgres@localhost/ckan_test
3
+ postgres://postgres:postgres@localhost/PointOfInterest
4
+ mysql2://root:root@localhost/world
5
+ mysql2://root:root@localhost/classicmodels
@@ -0,0 +1,8 @@
1
+ DB_LIST = File.join(Dir.home, '.dbtools/databases.txt')
2
+
3
+ every 4.day, :at => '1:00am' do
4
+ File.foreach(DB_LIST) do |db|
5
+ command "dbtools check all #{db}"
6
+ end
7
+ end
8
+
@@ -0,0 +1,37 @@
1
+ # coding: utf-8
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require "dbtools/version"
5
+
6
+
7
+ Gem::Specification.new do |spec|
8
+ spec.name = "dbtools"
9
+ spec.version = Dbtools::VERSION
10
+ spec.authors = ["Hung Nguyen"]
11
+ spec.email = ["h.nguyen@geophy.com"]
12
+
13
+ spec.summary = "Tool for database maintenance."
14
+
15
+ spec.files = Dir['Rakefile', '{bin,lib,man,test,spec,tasks,config}/**/*', 'README*', 'LICENSE*', '*.gemspec', 'Thorfile*']
16
+
17
+ spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
18
+ spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
19
+ spec.require_paths = ["lib"]
20
+
21
+ spec.add_dependency 'rdf-reasoner', '~> 0.4.1'
22
+ spec.add_dependency 'thor', '~> 0.19.1'
23
+ spec.add_dependency 'activerecord', '~> 4.2'
24
+ spec.add_dependency 'activesupport', '~> 4.2'
25
+ spec.add_dependency 'activemodel', '~> 4.2'
26
+ spec.add_dependency 'google-api-client', '~> 0.9'
27
+ spec.add_dependency 'linkeddata', '~> 2.0'
28
+ spec.add_dependency 'whenever'
29
+ spec.add_dependency 'roo', '~> 2.4'
30
+ spec.add_dependency 'roo-xls', '~> 1.0'
31
+ spec.add_dependency 'ru_bee', '~> 0.1'
32
+ spec.add_dependency 'mysql2', '~> 0.4'
33
+ spec.add_dependency 'pg', '~> 0.18'
34
+ spec.add_dependency 'spira', '~> 2.0.0'
35
+ spec.add_dependency 'aws-sdk', '~>2.7'
36
+ spec.add_development_dependency "bundler", "~> 1.12"
37
+ end
@@ -0,0 +1,47 @@
1
+ require 'thor'
2
+ require 'dbtools/version'
3
+ require_relative 'rdf/geophy'
4
+ require 'tasks/aws'
5
+ require 'tasks/check'
6
+ require 'tasks/dump'
7
+ require 'tasks/googledrivetool'
8
+ require 'tasks/ckan'
9
+ require 'tasks/import'
10
+ require 'tasks/postgres'
11
+ require 'tasks/convert'
12
+ require 'tasks/backup'
13
+ require 'fileutils'
14
+
15
+ module Dbtools
16
+ class Main < Thor
17
+ # Alias module GoogleDrive => Google_Drive
18
+ Dbtools::GoogleDrive = Dbtools::Google_Drive
19
+
20
+ register(Check, 'check', 'check [COMMAND]', 'Runs database checking tasks')
21
+ register(Dump, 'dump', 'dump [COMMAND]', 'Runs schema dumping tasks')
22
+ register(Google_drive, 'google_drive', 'google_drive [COMMAND]', 'Runs Google Drive related tasks')
23
+ register(Google_drive, 'googledrive', 'googledrive [COMMAND]', 'Runs Google Drive related tasks')
24
+ register(Aws, 'aws', 'aws [COMMAND]', 'Runs AWS related tasks')
25
+ register(Backup, 'backup', 'backup [COMMAND]', 'Runs backup tasks')
26
+ register(Import, 'import', 'import [COMMAND]', 'Runs database import related tasks')
27
+ register(Postgres, 'postgres', 'postgres [COMMAND]', 'Runs Postgres related tasks')
28
+ register(Convert, 'convert', 'convert [COMMAND]', 'Runs converting related tasks')
29
+
30
+ def initialize(*args)
31
+ super
32
+ init_config
33
+ end
34
+
35
+ desc 'init_config', 'Inits DbTools configuration'
36
+ def init_config
37
+ begin
38
+ FileUtils.mkdir_p(File.join(Dir.home, '.dbtools'))
39
+ FileUtils.copy(File.join(Dir.pwd, 'config', 'config.yml'), Dbtools::Constants::CONFIG_PATH ) unless File.exists?(Dbtools::Constants::CONFIG_PATH)
40
+ FileUtils.copy(File.join(Dir.pwd, 'config', 'database_config.yml'), Dbtools::Constants::DB_TARGET_CONFIG_PATH ) unless File.exists?(Dbtools::Constants::DB_TARGET_CONFIG_PATH)
41
+ rescue StandardError => e
42
+ STDERR.puts e.message
43
+ STDERR.puts "Warning: not all functions might work."
44
+ end
45
+ end
46
+ end
47
+ end