dbdoc 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +11 -0
- data/.rspec +3 -0
- data/.rubocop.yml +2 -0
- data/.travis.yml +6 -0
- data/CHANGELOG.md +3 -0
- data/CODE_OF_CONDUCT.md +12 -0
- data/Gemfile +13 -0
- data/Gemfile.lock +85 -0
- data/LICENSE.txt +21 -0
- data/README.md +90 -0
- data/Rakefile +8 -0
- data/bin/dbdoc +6 -0
- data/config/confluence.yml +3 -0
- data/config/default.yml +24 -0
- data/config/gitignore.template +2 -0
- data/config/schema_queries/mysql.sql +12 -0
- data/config/schema_queries/pg.sql +13 -0
- data/config/schema_queries/redshift.sql +13 -0
- data/dbdoc.gemspec +25 -0
- data/doc_files/columns.md.erb +4 -0
- data/doc_files/columns.yml.erb +8 -0
- data/doc_files/table_description.md +1 -0
- data/doc_files/table_example.md +10 -0
- data/lib/confluence/api.rb +142 -0
- data/lib/dbdoc.rb +11 -0
- data/lib/dbdoc/cli.rb +130 -0
- data/lib/dbdoc/config.rb +28 -0
- data/lib/dbdoc/constants.rb +3 -0
- data/lib/dbdoc/manager.rb +240 -0
- data/lib/dbdoc/uploader.rb +225 -0
- data/lib/dbdoc/version.rb +3 -0
- metadata +80 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 35a675eef6484bf4c2c6bb78521c9f322ff057804feec72e34061eb899153352
|
4
|
+
data.tar.gz: 5e80744c319d0e2ea77b321704e317467e871e9d93a53ad77871c655106b8f3b
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: b49a2edb0d2e31280b4bd8ed29ef109da6ff38fd6669df30a260f049ad5ad841bea5425d3c68fd9862f8e74116f4cbcb90e767f69e44aeed2c0910429ba13ac1
|
7
|
+
data.tar.gz: 18b09c76bc60de2ffb1a574a9695d03b1be0ba5c40b3bedbeb2d59eaefbd4b431b4a370ea52cb46defb34a388b9a177e2dd6c4496e7a78bdf1c3d7ed0d607a82
|
data/.gitignore
ADDED
data/.rspec
ADDED
data/.rubocop.yml
ADDED
data/.travis.yml
ADDED
data/CHANGELOG.md
ADDED
data/CODE_OF_CONDUCT.md
ADDED
@@ -0,0 +1,12 @@
|
|
1
|
+
# The DbDoc Community Code of Conduct
|
2
|
+
|
3
|
+
**Note:** DbDoc Code of Conduct is based on the [Ruby's CoC](https://www.ruby-lang.org/en/conduct/) which, in turn, is based on an early proposed draft of [the PostgreSQL CoC](https://www.postgresql.org/about/policies/coc/).
|
4
|
+
|
5
|
+
This document provides a few simple guidelines for a safe, respectful,
|
6
|
+
productive, and collaborative place for any person who is willing to contribute
|
7
|
+
to the DbDoc gem. It applies to all "collaborative spaces", which are Github issues, PRs, commit messages, emails, etc.
|
8
|
+
|
9
|
+
* Participants will be tolerant of opposing views.
|
10
|
+
* Participants must ensure that their language and actions are free of personal attacks and disparaging personal remarks.
|
11
|
+
* When interpreting the words and actions of others, participants should always assume good intentions.
|
12
|
+
* Behaviour which can be reasonably considered harassment will not be tolerated.
|
data/Gemfile
ADDED
data/Gemfile.lock
ADDED
@@ -0,0 +1,85 @@
|
|
1
|
+
PATH
|
2
|
+
remote: .
|
3
|
+
specs:
|
4
|
+
dbdoc (0.1.0)
|
5
|
+
|
6
|
+
GEM
|
7
|
+
remote: https://rubygems.org/
|
8
|
+
specs:
|
9
|
+
ast (2.4.1)
|
10
|
+
byebug (11.1.3)
|
11
|
+
coveralls (0.8.23)
|
12
|
+
json (>= 1.8, < 3)
|
13
|
+
simplecov (~> 0.16.1)
|
14
|
+
term-ansicolor (~> 1.3)
|
15
|
+
thor (>= 0.19.4, < 2.0)
|
16
|
+
tins (~> 1.6)
|
17
|
+
diff-lcs (1.4.4)
|
18
|
+
docile (1.3.2)
|
19
|
+
httparty (0.18.1)
|
20
|
+
mime-types (~> 3.0)
|
21
|
+
multi_xml (>= 0.5.2)
|
22
|
+
json (2.3.1)
|
23
|
+
mime-types (3.3.1)
|
24
|
+
mime-types-data (~> 3.2015)
|
25
|
+
mime-types-data (3.2020.0512)
|
26
|
+
multi_xml (0.6.0)
|
27
|
+
parallel (1.19.2)
|
28
|
+
parser (2.7.1.4)
|
29
|
+
ast (~> 2.4.1)
|
30
|
+
rainbow (3.0.0)
|
31
|
+
rake (12.3.3)
|
32
|
+
regexp_parser (1.7.1)
|
33
|
+
rexml (3.2.4)
|
34
|
+
rspec (3.9.0)
|
35
|
+
rspec-core (~> 3.9.0)
|
36
|
+
rspec-expectations (~> 3.9.0)
|
37
|
+
rspec-mocks (~> 3.9.0)
|
38
|
+
rspec-core (3.9.2)
|
39
|
+
rspec-support (~> 3.9.3)
|
40
|
+
rspec-expectations (3.9.2)
|
41
|
+
diff-lcs (>= 1.2.0, < 2.0)
|
42
|
+
rspec-support (~> 3.9.0)
|
43
|
+
rspec-mocks (3.9.1)
|
44
|
+
diff-lcs (>= 1.2.0, < 2.0)
|
45
|
+
rspec-support (~> 3.9.0)
|
46
|
+
rspec-support (3.9.3)
|
47
|
+
rubocop (0.88.0)
|
48
|
+
parallel (~> 1.10)
|
49
|
+
parser (>= 2.7.1.1)
|
50
|
+
rainbow (>= 2.2.2, < 4.0)
|
51
|
+
regexp_parser (>= 1.7)
|
52
|
+
rexml
|
53
|
+
rubocop-ast (>= 0.1.0, < 1.0)
|
54
|
+
ruby-progressbar (~> 1.7)
|
55
|
+
unicode-display_width (>= 1.4.0, < 2.0)
|
56
|
+
rubocop-ast (0.2.0)
|
57
|
+
parser (>= 2.7.0.1)
|
58
|
+
ruby-progressbar (1.10.1)
|
59
|
+
simplecov (0.16.1)
|
60
|
+
docile (~> 1.1)
|
61
|
+
json (>= 1.8, < 3)
|
62
|
+
simplecov-html (~> 0.10.0)
|
63
|
+
simplecov-html (0.10.2)
|
64
|
+
sync (0.5.0)
|
65
|
+
term-ansicolor (1.7.1)
|
66
|
+
tins (~> 1.0)
|
67
|
+
thor (1.0.1)
|
68
|
+
tins (1.25.0)
|
69
|
+
sync
|
70
|
+
unicode-display_width (1.7.0)
|
71
|
+
|
72
|
+
PLATFORMS
|
73
|
+
ruby
|
74
|
+
|
75
|
+
DEPENDENCIES
|
76
|
+
byebug
|
77
|
+
coveralls
|
78
|
+
dbdoc!
|
79
|
+
httparty
|
80
|
+
rake (~> 12.0)
|
81
|
+
rspec (~> 3.0)
|
82
|
+
rubocop
|
83
|
+
|
84
|
+
BUNDLED WITH
|
85
|
+
2.1.4
|
data/LICENSE.txt
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
The MIT License (MIT)
|
2
|
+
|
3
|
+
Copyright (c) 2020 Anatoli Makarevich
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in
|
13
|
+
all copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
21
|
+
THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,90 @@
|
|
1
|
+
[![Build Status](https://travis-ci.org/sqlhabit/dbdoc.svg?branch=master)](https://travis-ci.org/sqlhabit/dbdoc)
|
2
|
+
|
3
|
+
# dbdoc
|
4
|
+
|
5
|
+
Welcome to your new gem! In this directory, you'll find the files you need to be able to package up your Ruby library into a gem. Put your Ruby code in the file `lib/dbdoc`. To experiment with that code, run `bin/console` for an interactive prompt.
|
6
|
+
|
7
|
+
TODO: Delete this and the text above, and describe your gem
|
8
|
+
|
9
|
+
## Installation
|
10
|
+
|
11
|
+
Add this line to your application's Gemfile:
|
12
|
+
|
13
|
+
```ruby
|
14
|
+
gem 'dbdoc'
|
15
|
+
```
|
16
|
+
|
17
|
+
And then execute:
|
18
|
+
|
19
|
+
$ bundle install
|
20
|
+
|
21
|
+
Or install it yourself as:
|
22
|
+
|
23
|
+
$ gem install dbdoc
|
24
|
+
|
25
|
+
## Usage
|
26
|
+
|
27
|
+
```
|
28
|
+
dbdoc help
|
29
|
+
dbdoc init
|
30
|
+
dbdoc query
|
31
|
+
dbdoc plan
|
32
|
+
dbdoc apply
|
33
|
+
dbdoc upload
|
34
|
+
```
|
35
|
+
|
36
|
+
## Development
|
37
|
+
|
38
|
+
After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
|
39
|
+
|
40
|
+
To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
|
41
|
+
|
42
|
+
```
|
43
|
+
gem uninstall dbdoc && gem build dbdoc.gemspec && bundle && rake build && rake install && dbdoc help
|
44
|
+
```
|
45
|
+
|
46
|
+
## Contributing
|
47
|
+
|
48
|
+
Bug reports and pull requests are welcome on GitHub at https://github.com/[USERNAME]/dbdoc.
|
49
|
+
|
50
|
+
|
51
|
+
## License
|
52
|
+
|
53
|
+
The gem is available as open source under the terms of the [MIT License](https://opensource.org/licenses/MIT).
|
54
|
+
|
55
|
+
## Schema queries
|
56
|
+
|
57
|
+
### Postgres / Redshift
|
58
|
+
|
59
|
+
```sql
|
60
|
+
SELECT
|
61
|
+
t.table_schema,
|
62
|
+
t.table_name,
|
63
|
+
c.column_name,
|
64
|
+
c.data_type,
|
65
|
+
c.ordinal_position
|
66
|
+
FROM information_schema.tables t
|
67
|
+
LEFT JOIN information_schema.columns c
|
68
|
+
ON t.table_schema = c.table_schema
|
69
|
+
AND t.table_name = c.table_name
|
70
|
+
WHERE
|
71
|
+
t.table_schema NOT IN ('information_schema', 'pg_catalog')
|
72
|
+
ORDER BY 1, 2, 5
|
73
|
+
```
|
74
|
+
|
75
|
+
### MySQL
|
76
|
+
|
77
|
+
```sql
|
78
|
+
SELECT
|
79
|
+
c.table_schema,
|
80
|
+
c.table_name,
|
81
|
+
c.column_name,
|
82
|
+
c.data_type,
|
83
|
+
c.ordinal_position
|
84
|
+
FROM information_schema.columns c
|
85
|
+
LEFT JOIN information_schema.views v
|
86
|
+
ON v.table_schema = c.table_schema
|
87
|
+
AND v.table_name = c.table_name
|
88
|
+
WHERE
|
89
|
+
c.table_schema NOT IN ('sys','information_schema', 'mysql', 'performance_schema')
|
90
|
+
```
|
data/Rakefile
ADDED
data/bin/dbdoc
ADDED
data/config/default.yml
ADDED
@@ -0,0 +1,24 @@
|
|
1
|
+
db:
|
2
|
+
type: pg # Choose one from redshift | pg | mysql
|
3
|
+
name: your_database_name
|
4
|
+
ignorelist:
|
5
|
+
# 1. Ignore a single table:
|
6
|
+
# - public.users
|
7
|
+
|
8
|
+
# 2. Ignore all tables with prefix:
|
9
|
+
# - public.users*
|
10
|
+
|
11
|
+
# 3. Ignore a whole schema:
|
12
|
+
# - public.*
|
13
|
+
|
14
|
+
# 3a. Also ignore a whole schema:
|
15
|
+
# - information_schema
|
16
|
+
|
17
|
+
# 4. Ignore all schemas that start with a prefix:
|
18
|
+
# - pg_*
|
19
|
+
|
20
|
+
# 5. Ignore a column:
|
21
|
+
# - public.users#id
|
22
|
+
|
23
|
+
# 6. Ignore all columns that include a substring:
|
24
|
+
# - adjust.users#*_account_*
|
@@ -0,0 +1,12 @@
|
|
1
|
+
SELECT
|
2
|
+
c.table_schema,
|
3
|
+
c.table_name,
|
4
|
+
c.column_name,
|
5
|
+
c.data_type,
|
6
|
+
c.ordinal_position
|
7
|
+
FROM information_schema.columns c
|
8
|
+
LEFT JOIN information_schema.views v
|
9
|
+
ON v.table_schema = c.table_schema
|
10
|
+
AND v.table_name = c.table_name
|
11
|
+
WHERE
|
12
|
+
c.table_schema NOT IN ('sys','information_schema', 'mysql', 'performance_schema')
|
@@ -0,0 +1,13 @@
|
|
1
|
+
SELECT
|
2
|
+
t.table_schema,
|
3
|
+
t.table_name,
|
4
|
+
c.column_name,
|
5
|
+
c.data_type,
|
6
|
+
c.ordinal_position
|
7
|
+
FROM information_schema.tables t
|
8
|
+
LEFT JOIN information_schema.columns c
|
9
|
+
ON t.table_schema = c.table_schema
|
10
|
+
AND t.table_name = c.table_name
|
11
|
+
WHERE
|
12
|
+
t.table_schema NOT IN ('information_schema', 'pg_catalog')
|
13
|
+
ORDER BY 1, 2, 5
|
@@ -0,0 +1,13 @@
|
|
1
|
+
SELECT
|
2
|
+
t.table_schema,
|
3
|
+
t.table_name,
|
4
|
+
c.column_name,
|
5
|
+
c.data_type,
|
6
|
+
c.ordinal_position
|
7
|
+
FROM information_schema.tables t
|
8
|
+
LEFT JOIN information_schema.columns c
|
9
|
+
ON t.table_schema = c.table_schema
|
10
|
+
AND t.table_name = c.table_name
|
11
|
+
WHERE
|
12
|
+
t.table_schema NOT IN ('information_schema', 'pg_catalog')
|
13
|
+
ORDER BY 1, 2, 5
|
data/dbdoc.gemspec
ADDED
@@ -0,0 +1,25 @@
|
|
1
|
+
require_relative "lib/dbdoc/version"
|
2
|
+
|
3
|
+
Gem::Specification.new do |spec|
|
4
|
+
spec.name = "dbdoc"
|
5
|
+
spec.version = Dbdoc::VERSION
|
6
|
+
spec.authors = ["Anatoli Makarevich"]
|
7
|
+
spec.email = ["makaroni4@gmail.com"]
|
8
|
+
|
9
|
+
spec.summary = "Dbdoc is a tool to keep your database documentation up-to-date and version controlled."
|
10
|
+
spec.description = "Dbdoc is a tool to keep your database documentation up-to-date and version controlled."
|
11
|
+
spec.homepage = "https://github.com/sqlhabit/dbdoc"
|
12
|
+
spec.license = "MIT"
|
13
|
+
spec.required_ruby_version = Gem::Requirement.new(">= 2.3.0")
|
14
|
+
|
15
|
+
spec.metadata["homepage_uri"] = spec.homepage
|
16
|
+
spec.metadata["source_code_uri"] = "https://github.com/sqlhabit/dbdoc"
|
17
|
+
spec.metadata["changelog_uri"] = "https://github.com/sqlhabit/dbdoc/blob/master/CHANGELOG.md"
|
18
|
+
|
19
|
+
spec.files = Dir.chdir(File.expand_path(__dir__)) do
|
20
|
+
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
|
21
|
+
end
|
22
|
+
|
23
|
+
spec.executables = ["dbdoc"]
|
24
|
+
spec.require_paths = ["lib"]
|
25
|
+
end
|
@@ -0,0 +1 @@
|
|
1
|
+
Write table description here
|
@@ -0,0 +1,142 @@
|
|
1
|
+
require "httparty"
|
2
|
+
require "json"
|
3
|
+
require "yaml"
|
4
|
+
require "dbdoc/constants"
|
5
|
+
|
6
|
+
module Confluence
|
7
|
+
class Api
|
8
|
+
include HTTParty
|
9
|
+
base_uri "dbdoc.atlassian.net"
|
10
|
+
|
11
|
+
CREDENTIALS_FILE = File.join(Dir.pwd, "confluence.yml")
|
12
|
+
|
13
|
+
def initialize
|
14
|
+
credentials = YAML.load(File.read(CREDENTIALS_FILE))
|
15
|
+
@username = credentials["username"]
|
16
|
+
@token = credentials["token"]
|
17
|
+
@space = credentials["space"]
|
18
|
+
end
|
19
|
+
|
20
|
+
def delete_page(page_id:)
|
21
|
+
HTTParty.delete(
|
22
|
+
"https://dbdoc.atlassian.net/wiki/rest/api/content/#{page_id}", {
|
23
|
+
headers: {
|
24
|
+
"Authorization" => "Basic #{basic_auth}",
|
25
|
+
"Content-Type" => "application/json"
|
26
|
+
}
|
27
|
+
}
|
28
|
+
)
|
29
|
+
end
|
30
|
+
|
31
|
+
def existing_pages
|
32
|
+
response = HTTParty.get(
|
33
|
+
"https://dbdoc.atlassian.net/wiki/rest/api/content/?&spaceKey=#{@space}", {
|
34
|
+
headers: {
|
35
|
+
"Authorization" => "Basic #{basic_auth}",
|
36
|
+
"Content-Type" => "application/json"
|
37
|
+
}
|
38
|
+
}
|
39
|
+
)
|
40
|
+
|
41
|
+
JSON.parse(response.body)
|
42
|
+
end
|
43
|
+
|
44
|
+
def update_page(page_id:, body:, page_title:, version:)
|
45
|
+
payload = {
|
46
|
+
id: page_id,
|
47
|
+
type: "page",
|
48
|
+
title: page_title,
|
49
|
+
space: {
|
50
|
+
key: @space
|
51
|
+
},
|
52
|
+
body: {
|
53
|
+
wiki: {
|
54
|
+
value: body,
|
55
|
+
representation: "wiki"
|
56
|
+
}
|
57
|
+
},
|
58
|
+
version: {
|
59
|
+
number: version
|
60
|
+
}
|
61
|
+
}
|
62
|
+
|
63
|
+
response = HTTParty.put(
|
64
|
+
"https://dbdoc.atlassian.net/wiki/rest/api/content/#{page_id}", {
|
65
|
+
headers: {
|
66
|
+
"Authorization" => "Basic #{basic_auth}",
|
67
|
+
"Content-Type" => "application/json"
|
68
|
+
},
|
69
|
+
body: payload.to_json
|
70
|
+
}
|
71
|
+
)
|
72
|
+
|
73
|
+
if response.code == 200
|
74
|
+
{
|
75
|
+
response: response,
|
76
|
+
page_id: JSON.parse(response.body)["id"]
|
77
|
+
}
|
78
|
+
else
|
79
|
+
puts "--> ERROR UPLOADING #{page_title}: "
|
80
|
+
pp response
|
81
|
+
|
82
|
+
{
|
83
|
+
response: response
|
84
|
+
}
|
85
|
+
end
|
86
|
+
end
|
87
|
+
|
88
|
+
def create_page(parent_page_id: nil, body:, page_title:)
|
89
|
+
payload = {
|
90
|
+
type: "page",
|
91
|
+
title: page_title,
|
92
|
+
space: {
|
93
|
+
key: @space
|
94
|
+
},
|
95
|
+
body: {
|
96
|
+
wiki: {
|
97
|
+
value: body,
|
98
|
+
representation: "wiki"
|
99
|
+
}
|
100
|
+
}
|
101
|
+
}
|
102
|
+
|
103
|
+
if parent_page_id
|
104
|
+
payload.merge!({
|
105
|
+
ancestors: [
|
106
|
+
{ id: parent_page_id }
|
107
|
+
]
|
108
|
+
})
|
109
|
+
end
|
110
|
+
|
111
|
+
response = HTTParty.post(
|
112
|
+
"https://dbdoc.atlassian.net/wiki/rest/api/content/", {
|
113
|
+
headers: {
|
114
|
+
"Authorization" => "Basic #{basic_auth}",
|
115
|
+
"Content-Type" => "application/json"
|
116
|
+
},
|
117
|
+
body: payload.to_json
|
118
|
+
}
|
119
|
+
)
|
120
|
+
|
121
|
+
if response.code == 200
|
122
|
+
{
|
123
|
+
response: response,
|
124
|
+
page_id: JSON.parse(response.body)["id"]
|
125
|
+
}
|
126
|
+
else
|
127
|
+
puts "--> ERROR UPLOADING #{page_title}: "
|
128
|
+
pp response
|
129
|
+
|
130
|
+
{
|
131
|
+
response: response
|
132
|
+
}
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
private
|
137
|
+
|
138
|
+
def basic_auth
|
139
|
+
Base64.encode64("#{@username}:#{@token}").chomp
|
140
|
+
end
|
141
|
+
end
|
142
|
+
end
|
data/lib/dbdoc.rb
ADDED
data/lib/dbdoc/cli.rb
ADDED
@@ -0,0 +1,130 @@
|
|
1
|
+
require "optparse"
|
2
|
+
|
3
|
+
$LOAD_PATH << File.expand_path(__dir__)
|
4
|
+
|
5
|
+
module Dbdoc
|
6
|
+
class CLI
|
7
|
+
def run(args = [])
|
8
|
+
if args.first == "init"
|
9
|
+
require "fileutils"
|
10
|
+
|
11
|
+
schema_folder = File.join(Dir.pwd, "schema")
|
12
|
+
unless Dir.exists?(schema_folder)
|
13
|
+
Dir.mkdir(schema_folder)
|
14
|
+
end
|
15
|
+
|
16
|
+
doc_folder = File.join(Dir.pwd, "doc")
|
17
|
+
unless Dir.exists?(doc_folder)
|
18
|
+
Dir.mkdir(doc_folder)
|
19
|
+
end
|
20
|
+
|
21
|
+
target_file = File.join(Dir.pwd, "config.yml")
|
22
|
+
config_file = File.join(File.expand_path(__dir__), "../..", "config", "default.yml")
|
23
|
+
|
24
|
+
FileUtils.cp(config_file, target_file) unless File.exists?(target_file)
|
25
|
+
|
26
|
+
target_file = File.join(Dir.pwd, ".gitignore")
|
27
|
+
config_file = File.join(File.expand_path(__dir__), "../..", "config", "gitignore.template")
|
28
|
+
|
29
|
+
FileUtils.cp(config_file, target_file) unless File.exists?(target_file)
|
30
|
+
|
31
|
+
target_file = File.join(Dir.pwd, "confluence.yml")
|
32
|
+
config_file = File.join(File.expand_path(__dir__), "../..", "config", "confluence.yml")
|
33
|
+
|
34
|
+
FileUtils.cp(config_file, target_file) unless File.exists?(target_file)
|
35
|
+
|
36
|
+
0
|
37
|
+
elsif args.first == "query"
|
38
|
+
options = extract_options(args)
|
39
|
+
|
40
|
+
config = Dbdoc::Config.load
|
41
|
+
config.merge!(options)
|
42
|
+
|
43
|
+
db_type = config["db"]["type"]
|
44
|
+
query_file = File.join(File.expand_path(__dir__), "../..", "config", "schema_queries", "#{db_type}.sql")
|
45
|
+
query = File.read(query_file)
|
46
|
+
|
47
|
+
puts query
|
48
|
+
|
49
|
+
0
|
50
|
+
elsif args.first == "plan"
|
51
|
+
options = extract_options(args)
|
52
|
+
|
53
|
+
config = Dbdoc::Config.load
|
54
|
+
config.merge!(options)
|
55
|
+
|
56
|
+
manager = Dbdoc::Manager.new(config: config)
|
57
|
+
manager.plan
|
58
|
+
|
59
|
+
0
|
60
|
+
elsif args.first == "apply"
|
61
|
+
options = extract_options(args)
|
62
|
+
|
63
|
+
config = Dbdoc::Config.load
|
64
|
+
config.merge!(options)
|
65
|
+
|
66
|
+
manager = Dbdoc::Manager.new(config: config)
|
67
|
+
manager.apply
|
68
|
+
|
69
|
+
0
|
70
|
+
elsif args.first == "upload"
|
71
|
+
options = extract_options(args)
|
72
|
+
|
73
|
+
config = Dbdoc::Config.load
|
74
|
+
config.merge!(options)
|
75
|
+
|
76
|
+
uploader = Dbdoc::Uploader.new(config: config)
|
77
|
+
uploader.upload
|
78
|
+
|
79
|
+
0
|
80
|
+
elsif args.first == "clear_confluence_space"
|
81
|
+
options = extract_options(args)
|
82
|
+
|
83
|
+
config = Dbdoc::Config.load
|
84
|
+
config.merge!(options)
|
85
|
+
|
86
|
+
uploader = Dbdoc::Uploader.new(config: config)
|
87
|
+
uploader.clear_confluence_space
|
88
|
+
|
89
|
+
0
|
90
|
+
elsif args.first == "help"
|
91
|
+
puts "--> SOME HELP"
|
92
|
+
|
93
|
+
0
|
94
|
+
end
|
95
|
+
|
96
|
+
0
|
97
|
+
end
|
98
|
+
|
99
|
+
private
|
100
|
+
|
101
|
+
# This method is needed to unindent
|
102
|
+
# ["here document"](https://en.wikibooks.org/wiki/Ruby_Programming/Here_documents)
|
103
|
+
# help description.
|
104
|
+
#
|
105
|
+
def unindent(str)
|
106
|
+
str.gsub(/^#{str.scan(/^[ \t]+(?=\S)/).min}/, "")
|
107
|
+
end
|
108
|
+
|
109
|
+
def extract_options(args)
|
110
|
+
options = {}
|
111
|
+
|
112
|
+
OptionParser.new do |opts|
|
113
|
+
opts.banner = unindent(<<-TEXT)
|
114
|
+
dbdoc help
|
115
|
+
|
116
|
+
1. dbdoc query
|
117
|
+
|
118
|
+
This will print you a query you need to run to export your database schema.
|
119
|
+
TEXT
|
120
|
+
|
121
|
+
opts.on("-v", "--version", "Prints current version of dbdoc") do
|
122
|
+
puts Dbdoc::VERSION
|
123
|
+
exit 0
|
124
|
+
end
|
125
|
+
end.parse!(args)
|
126
|
+
|
127
|
+
options
|
128
|
+
end
|
129
|
+
end
|
130
|
+
end
|
data/lib/dbdoc/config.rb
ADDED
@@ -0,0 +1,28 @@
|
|
1
|
+
require "yaml"
|
2
|
+
require "dbdoc/constants"
|
3
|
+
|
4
|
+
module Dbdoc
|
5
|
+
class Config
|
6
|
+
FILE_NAME = "config.yml".freeze
|
7
|
+
DEFAULT_FILE = File.join(DBDOC_HOME, "config", "default.yml")
|
8
|
+
|
9
|
+
class << self
|
10
|
+
# Be default gem will try to load config file in user's project folder.
|
11
|
+
# Then user's config (or empty object) will be merge with the default config
|
12
|
+
# from gem's folder.
|
13
|
+
#
|
14
|
+
def load
|
15
|
+
user_config = File.exist?(user_file) ? YAML.safe_load(File.read(user_file)) : {}
|
16
|
+
default_config = YAML.safe_load(File.read(DEFAULT_FILE))
|
17
|
+
|
18
|
+
default_config.merge(user_config)
|
19
|
+
end
|
20
|
+
|
21
|
+
private
|
22
|
+
|
23
|
+
def user_file
|
24
|
+
File.join(Dir.pwd, FILE_NAME)
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
@@ -0,0 +1,240 @@
|
|
1
|
+
require "yaml"
|
2
|
+
require "dbdoc/constants"
|
3
|
+
|
4
|
+
module Dbdoc
|
5
|
+
class Manager
|
6
|
+
def initialize(config: {})
|
7
|
+
@config = Dbdoc::Config.load.merge(config)
|
8
|
+
end
|
9
|
+
|
10
|
+
def plan(verbose: true)
|
11
|
+
puts "--> PLAN"
|
12
|
+
puts
|
13
|
+
puts
|
14
|
+
|
15
|
+
input_schema = read_input_schema.map { |r| r.first(4) }.map { |r| r.join(":") }
|
16
|
+
current_schema = read_documented_schema
|
17
|
+
|
18
|
+
puts "--> New columns:"
|
19
|
+
pp input_schema - current_schema
|
20
|
+
puts
|
21
|
+
puts
|
22
|
+
|
23
|
+
puts "--> Columns to drop:"
|
24
|
+
pp current_schema - input_schema
|
25
|
+
end
|
26
|
+
|
27
|
+
def apply(path: Dir.pwd, verbose: true)
|
28
|
+
puts "--> APPLY"
|
29
|
+
puts
|
30
|
+
puts
|
31
|
+
|
32
|
+
input_schema = read_input_schema.map { |r| r.first(4) }.map { |r| r.join(":") }
|
33
|
+
current_schema = read_documented_schema
|
34
|
+
|
35
|
+
added_columns = input_schema - current_schema
|
36
|
+
dropped_columns = current_schema - input_schema
|
37
|
+
|
38
|
+
doc_folder = File.join(Dir.pwd, "doc")
|
39
|
+
|
40
|
+
## DROP COLUMNS
|
41
|
+
dropped_columns.each do |column|
|
42
|
+
schema_name, table_name, column_name, column_type = column.split(":")
|
43
|
+
|
44
|
+
columns_file = File.join(doc_folder, schema_name, table_name, "columns.yml")
|
45
|
+
next unless File.exists?(columns_file)
|
46
|
+
|
47
|
+
columns = YAML.load(File.read(columns_file))
|
48
|
+
columns.reject! { |c| c[:name] == column_name }
|
49
|
+
columns.each { |c| c[:description].strip! }
|
50
|
+
|
51
|
+
File.open(columns_file, "w") do |f|
|
52
|
+
f.puts(columns.to_yaml)
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
## DROP EMPTY TABLES
|
57
|
+
Dir.entries(doc_folder).each do |schema_name|
|
58
|
+
next if schema_name == "."
|
59
|
+
next if schema_name == ".."
|
60
|
+
|
61
|
+
schema_folder = File.join(doc_folder, schema_name)
|
62
|
+
next unless File.directory?(File.join(doc_folder, schema_name))
|
63
|
+
|
64
|
+
Dir.entries(schema_folder).each do |table_name|
|
65
|
+
next if table_name == "."
|
66
|
+
next if table_name == ".."
|
67
|
+
|
68
|
+
table_folder = File.join(schema_folder, table_name)
|
69
|
+
next unless File.directory?(table_folder)
|
70
|
+
|
71
|
+
columns_file = File.join(table_folder, "columns.yml")
|
72
|
+
next unless File.exists?(columns_file)
|
73
|
+
|
74
|
+
columns = YAML.load(File.read(columns_file))
|
75
|
+
|
76
|
+
if columns.empty?
|
77
|
+
puts "--> DELETING #{schema_name}.#{table_name}"
|
78
|
+
FileUtils.rm_rf(table_folder)
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
## DROP EMPTY SCHEMAS
|
84
|
+
Dir.entries(doc_folder).each do |schema_name|
|
85
|
+
next if schema_name == "."
|
86
|
+
next if schema_name == ".."
|
87
|
+
|
88
|
+
schema_folder = File.join(doc_folder, schema_name)
|
89
|
+
next unless File.directory?(schema_folder)
|
90
|
+
|
91
|
+
FileUtils.rm_rf(schema_folder) if Dir.empty?(schema_folder)
|
92
|
+
end
|
93
|
+
|
94
|
+
create_new_columns(added_columns)
|
95
|
+
end
|
96
|
+
|
97
|
+
private
|
98
|
+
|
99
|
+
def input_schema
|
100
|
+
File.read(File.join(Dir.pwd, "schema", "schema.csv"))
|
101
|
+
end
|
102
|
+
|
103
|
+
def read_input_schema
|
104
|
+
rows = input_schema.split("\n")
|
105
|
+
with_header = rows[0].include?("schema_name")
|
106
|
+
|
107
|
+
rows.shift if with_header
|
108
|
+
|
109
|
+
rows.map! do |r|
|
110
|
+
r.split(",").map(&:strip).map { |c| c.gsub('"', "") }.first(5)
|
111
|
+
end
|
112
|
+
|
113
|
+
config = YAML.load(File.read("config.yml"))
|
114
|
+
|
115
|
+
if @config["ignorelist"]
|
116
|
+
@config["ignorelist"].map { |r| r.split(/[\.\#]/) }.each do |b|
|
117
|
+
schema_pattern, table_pattern, column_pattern = b
|
118
|
+
|
119
|
+
rows.reject! do |row|
|
120
|
+
schema_name, table_name, column_name, _, _ = row
|
121
|
+
|
122
|
+
if column_pattern
|
123
|
+
next unless column_name =~ Regexp.new(column_pattern.gsub("*", ".*"))
|
124
|
+
end
|
125
|
+
|
126
|
+
if table_pattern
|
127
|
+
next unless table_name =~ Regexp.new(table_pattern.gsub("*", ".*"))
|
128
|
+
end
|
129
|
+
|
130
|
+
if schema_pattern
|
131
|
+
next unless schema_name =~ Regexp.new(schema_pattern.gsub("*", ".*"))
|
132
|
+
end
|
133
|
+
|
134
|
+
true
|
135
|
+
end
|
136
|
+
end
|
137
|
+
end
|
138
|
+
|
139
|
+
rows
|
140
|
+
end
|
141
|
+
|
142
|
+
def read_documented_schema
|
143
|
+
doc_folder = File.join(Dir.pwd, "doc")
|
144
|
+
|
145
|
+
return [] if Dir.empty?(doc_folder)
|
146
|
+
|
147
|
+
keys = []
|
148
|
+
Dir.entries(doc_folder).each do |schema_name|
|
149
|
+
next if schema_name == "."
|
150
|
+
next if schema_name == ".."
|
151
|
+
|
152
|
+
schema_folder = File.join(doc_folder, schema_name)
|
153
|
+
next unless File.directory?(schema_folder)
|
154
|
+
|
155
|
+
Dir.entries(schema_folder).each do |table_name|
|
156
|
+
next if table_name == "."
|
157
|
+
next if table_name == ".."
|
158
|
+
|
159
|
+
table_folder = File.join(schema_folder, table_name)
|
160
|
+
next unless File.directory?(table_folder)
|
161
|
+
|
162
|
+
columns_file = File.join(table_folder, "columns.yml")
|
163
|
+
next unless File.exists?(columns_file)
|
164
|
+
|
165
|
+
columns = YAML.load(File.read(columns_file))
|
166
|
+
columns.each do |column|
|
167
|
+
keys.push([
|
168
|
+
schema_name,
|
169
|
+
table_name,
|
170
|
+
column[:name],
|
171
|
+
column[:type]
|
172
|
+
].join(":"))
|
173
|
+
end
|
174
|
+
end
|
175
|
+
end
|
176
|
+
|
177
|
+
keys
|
178
|
+
end
|
179
|
+
|
180
|
+
def create_new_columns(added_columns)
|
181
|
+
doc_folder = File.join(Dir.pwd, "doc")
|
182
|
+
|
183
|
+
added_columns.map! { |r| r.split(":") }
|
184
|
+
new_columns = read_input_schema.select do |row|
|
185
|
+
added_columns.any? { |r| r == row.first(4) }
|
186
|
+
end
|
187
|
+
|
188
|
+
schemas = new_columns.group_by(&:first)
|
189
|
+
|
190
|
+
schemas_and_tables = schemas.each_with_object({}) do |(schema_name, tables), o|
|
191
|
+
tables.map(&:shift)
|
192
|
+
|
193
|
+
o[schema_name] = tables.group_by(&:first)
|
194
|
+
end
|
195
|
+
|
196
|
+
schemas_and_tables.each do |schema_name, tables|
|
197
|
+
schema_folder = File.join(doc_folder, schema_name)
|
198
|
+
|
199
|
+
Dir.mkdir(schema_folder) unless Dir.exist?(schema_folder)
|
200
|
+
|
201
|
+
tables.each do |table_name, columns|
|
202
|
+
# 1. create table folder
|
203
|
+
table_folder = File.join(schema_folder, table_name)
|
204
|
+
|
205
|
+
Dir.mkdir(table_folder) unless Dir.exist?(table_folder)
|
206
|
+
|
207
|
+
# 2. create examples folder with test example
|
208
|
+
table_example_folder = File.join(table_folder, "examples")
|
209
|
+
|
210
|
+
Dir.mkdir(table_example_folder) unless Dir.exist?(table_example_folder)
|
211
|
+
|
212
|
+
# 2a. create example file
|
213
|
+
example_file = File.join(table_example_folder, "1_example.md")
|
214
|
+
example_table_example_file = File.join(DBDOC_HOME, "doc_files", "table_example.md")
|
215
|
+
|
216
|
+
FileUtils.cp(example_table_example_file, example_file)
|
217
|
+
|
218
|
+
# 3. create table description.md
|
219
|
+
table_description_file = File.join(table_folder, "description.md")
|
220
|
+
|
221
|
+
example_table_description_file = File.join(DBDOC_HOME, "doc_files", "table_description.md")
|
222
|
+
FileUtils.cp(example_table_description_file, table_description_file)
|
223
|
+
|
224
|
+
# 4. create table columns.yml
|
225
|
+
columns_yaml = File.join(table_folder, "columns.yml")
|
226
|
+
|
227
|
+
unless File.exists?(columns_yaml)
|
228
|
+
columns_erb_tamplate_file = File.join(DBDOC_HOME, "doc_files", "columns.yml.erb")
|
229
|
+
columns_yaml_template = ERB.new(File.read(columns_erb_tamplate_file), nil, "-")
|
230
|
+
File.open(columns_yaml, "w") do |f|
|
231
|
+
f.puts columns_yaml_template.result_with_hash({
|
232
|
+
columns: columns
|
233
|
+
})
|
234
|
+
end
|
235
|
+
end
|
236
|
+
end
|
237
|
+
end
|
238
|
+
end
|
239
|
+
end
|
240
|
+
end
|
@@ -0,0 +1,225 @@
|
|
1
|
+
require "dbdoc/constants"
|
2
|
+
require_relative "../confluence/api"
|
3
|
+
|
4
|
+
module Dbdoc
|
5
|
+
class Uploader
|
6
|
+
def initialize(config: {})
|
7
|
+
@config = config
|
8
|
+
@confluence_api = Confluence::Api.new
|
9
|
+
@doc_folder = File.join(Dir.pwd, "doc")
|
10
|
+
end
|
11
|
+
|
12
|
+
def upload
|
13
|
+
create_or_updates_pages
|
14
|
+
delete_pages_for_dropped_schemas_or_tables
|
15
|
+
end
|
16
|
+
|
17
|
+
def clear_confluence_space
|
18
|
+
# TODO paginate and fetch all Confluence pages
|
19
|
+
# TODO ask user to Yn if they want to proceed with deletion
|
20
|
+
# TODO iterate over each page_id, unlog it from page_ids.yml and @confluence_api.delete_page(page_id:)
|
21
|
+
end
|
22
|
+
|
23
|
+
private
|
24
|
+
|
25
|
+
def delete_pages_for_dropped_schemas_or_tables
|
26
|
+
uploaded_pages = YAML.load(File.read(page_ids_file))
|
27
|
+
|
28
|
+
uploaded_pages.each do |key, params|
|
29
|
+
next if key == "root"
|
30
|
+
|
31
|
+
if key.start_with?("schema:")
|
32
|
+
schema_name = key.gsub("schema:", "")
|
33
|
+
|
34
|
+
unless Dir.exists?(File.join(@doc_folder, schema_name))
|
35
|
+
page_id = uploaded_pages[key][:page_id]
|
36
|
+
puts "--> delete page #{key}: #{page_id}"
|
37
|
+
@confluence_api.delete_page(page_id: page_id)
|
38
|
+
unlog_page_id(key: key)
|
39
|
+
end
|
40
|
+
elsif key.start_with?("table:")
|
41
|
+
schema_name, table_name = key.gsub("table:", "").split(".")
|
42
|
+
|
43
|
+
unless Dir.exists?(File.join(@doc_folder, schema_name, table_name))
|
44
|
+
page_id = uploaded_pages[key][:page_id]
|
45
|
+
puts "--> delete page #{key}: #{page_id}"
|
46
|
+
@confluence_api.delete_page(page_id: page_id)
|
47
|
+
unlog_page_id(key: key)
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
def create_or_updates_pages
|
54
|
+
root_page_id = create_root_db_page[:page_id]
|
55
|
+
|
56
|
+
log_page_id(key: "root", page_id: root_page_id)
|
57
|
+
|
58
|
+
Dir.entries(@doc_folder).each do |schema_name|
|
59
|
+
next if schema_name == "."
|
60
|
+
next if schema_name == ".."
|
61
|
+
|
62
|
+
next unless File.directory?(File.join(@doc_folder, schema_name))
|
63
|
+
|
64
|
+
upload_schema(
|
65
|
+
schema_name: schema_name,
|
66
|
+
root_page_id: root_page_id
|
67
|
+
)
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
def page_ids_file
|
72
|
+
file = File.join(Dir.pwd, "page_ids.yml")
|
73
|
+
|
74
|
+
unless File.exists?(file)
|
75
|
+
File.open(file, "w") do |f|
|
76
|
+
f.puts("--- {}")
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
file
|
81
|
+
end
|
82
|
+
|
83
|
+
def latest_page_id(key:)
|
84
|
+
page_ids = YAML.load(File.read(page_ids_file))
|
85
|
+
page_ids.dig(key, :page_id)
|
86
|
+
end
|
87
|
+
|
88
|
+
def latest_page_version(key:)
|
89
|
+
page_ids = YAML.load(File.read(page_ids_file))
|
90
|
+
page_ids.dig(key, :version)
|
91
|
+
end
|
92
|
+
|
93
|
+
def log_page_id(key:, page_id:)
|
94
|
+
page_ids = YAML.load(File.read(page_ids_file))
|
95
|
+
page_ids[key] ||= {
|
96
|
+
page_id: page_id,
|
97
|
+
version: 0
|
98
|
+
}
|
99
|
+
|
100
|
+
if page_ids[key][:version] == 0
|
101
|
+
puts "--> create page #{key}: #{page_id}"
|
102
|
+
else
|
103
|
+
puts "--> update page #{key}: #{page_id}"
|
104
|
+
end
|
105
|
+
|
106
|
+
page_ids[key][:version] += 1
|
107
|
+
|
108
|
+
File.open(page_ids_file, "w") do |f|
|
109
|
+
f.puts(page_ids.to_yaml)
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
113
|
+
def unlog_page_id(key:)
|
114
|
+
page_ids = YAML.load(File.read(page_ids_file))
|
115
|
+
|
116
|
+
page_ids.delete(key)
|
117
|
+
|
118
|
+
File.open(page_ids_file, "w") do |f|
|
119
|
+
f.puts(page_ids.to_yaml)
|
120
|
+
end
|
121
|
+
end
|
122
|
+
|
123
|
+
def create_root_db_page
|
124
|
+
page_id = latest_page_id(key: "root")
|
125
|
+
|
126
|
+
return {
|
127
|
+
page_id: page_id
|
128
|
+
} if page_id
|
129
|
+
|
130
|
+
db_name = @config["db"]["name"]
|
131
|
+
@confluence_api.create_page(
|
132
|
+
page_title: db_name,
|
133
|
+
body: "#{db_name} database documentation"
|
134
|
+
)
|
135
|
+
end
|
136
|
+
|
137
|
+
def upload_schema(schema_name:, root_page_id:)
|
138
|
+
schema_folder = File.join(@doc_folder, schema_name)
|
139
|
+
|
140
|
+
schema_page_id = latest_page_id(key: "schema:#{schema_name}")
|
141
|
+
|
142
|
+
unless schema_page_id
|
143
|
+
schema_page_id = @confluence_api.create_page(
|
144
|
+
parent_page_id: root_page_id,
|
145
|
+
page_title: schema_name,
|
146
|
+
body: "#{schema_name} schema documentation"
|
147
|
+
)[:page_id]
|
148
|
+
|
149
|
+
log_page_id(key: "schema:#{schema_name}", page_id: schema_page_id)
|
150
|
+
end
|
151
|
+
|
152
|
+
Dir.entries(schema_folder).each do |table_name|
|
153
|
+
next if table_name == "."
|
154
|
+
next if table_name == ".."
|
155
|
+
next unless File.directory?(File.join(schema_folder, table_name))
|
156
|
+
|
157
|
+
upload_table(
|
158
|
+
schema_name: schema_name,
|
159
|
+
table_name: table_name,
|
160
|
+
schema_page_id: schema_page_id
|
161
|
+
)
|
162
|
+
end
|
163
|
+
end
|
164
|
+
|
165
|
+
def upload_table(schema_name:, table_name:, schema_page_id:)
|
166
|
+
table_folder = File.join(@doc_folder, schema_name, table_name)
|
167
|
+
|
168
|
+
table_description = File.read(File.join(table_folder, "description.md"))
|
169
|
+
|
170
|
+
examples_folder = File.join(table_folder, "examples")
|
171
|
+
table_examples = Dir[File.join(examples_folder, "*.md")].map { |f| File.read(f) }
|
172
|
+
|
173
|
+
columns_markdown_template_file = File.join(DBDOC_HOME, "doc_files", "columns.md.erb")
|
174
|
+
|
175
|
+
columns_table_template = ERB.new(
|
176
|
+
File.read(columns_markdown_template_file),
|
177
|
+
nil,
|
178
|
+
"-"
|
179
|
+
)
|
180
|
+
columns_table = columns_table_template.result_with_hash({
|
181
|
+
columns: YAML.load(File.read(File.join(table_folder, "columns.yml")))
|
182
|
+
})
|
183
|
+
|
184
|
+
page_body = <<-MARKDOWN
|
185
|
+
h2. Description
|
186
|
+
|
187
|
+
#{table_description}
|
188
|
+
|
189
|
+
h2. Columns
|
190
|
+
|
191
|
+
#{columns_table}
|
192
|
+
|
193
|
+
h2. Examples
|
194
|
+
|
195
|
+
#{table_examples.join("\n") }
|
196
|
+
MARKDOWN
|
197
|
+
|
198
|
+
page_title = schema_name == "public" ? table_name : "#{schema_name}.#{table_name}"
|
199
|
+
|
200
|
+
page_key = "table:#{schema_name}.#{table_name}"
|
201
|
+
page_id = latest_page_id(key: page_key)
|
202
|
+
|
203
|
+
if page_id
|
204
|
+
response = @confluence_api.update_page(
|
205
|
+
page_id: page_id,
|
206
|
+
page_title: page_title,
|
207
|
+
body: page_body,
|
208
|
+
version: latest_page_version(key: page_key) + 1
|
209
|
+
)
|
210
|
+
|
211
|
+
log_page_id(key: "table:#{schema_name}.#{table_name}", page_id: schema_page_id)
|
212
|
+
else
|
213
|
+
response = @confluence_api.create_page(
|
214
|
+
parent_page_id: schema_page_id,
|
215
|
+
page_title: page_title,
|
216
|
+
body: page_body
|
217
|
+
)
|
218
|
+
|
219
|
+
table_page_id = response[:page_id]
|
220
|
+
|
221
|
+
log_page_id(key: "table:#{schema_name}.#{table_name}", page_id: table_page_id)
|
222
|
+
end
|
223
|
+
end
|
224
|
+
end
|
225
|
+
end
|
metadata
ADDED
@@ -0,0 +1,80 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: dbdoc
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Anatoli Makarevich
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2020-07-29 00:00:00.000000000 Z
|
12
|
+
dependencies: []
|
13
|
+
description: Dbdoc is a tool to keep your database documentation up-to-date and version
|
14
|
+
controlled.
|
15
|
+
email:
|
16
|
+
- makaroni4@gmail.com
|
17
|
+
executables:
|
18
|
+
- dbdoc
|
19
|
+
extensions: []
|
20
|
+
extra_rdoc_files: []
|
21
|
+
files:
|
22
|
+
- ".gitignore"
|
23
|
+
- ".rspec"
|
24
|
+
- ".rubocop.yml"
|
25
|
+
- ".travis.yml"
|
26
|
+
- CHANGELOG.md
|
27
|
+
- CODE_OF_CONDUCT.md
|
28
|
+
- Gemfile
|
29
|
+
- Gemfile.lock
|
30
|
+
- LICENSE.txt
|
31
|
+
- README.md
|
32
|
+
- Rakefile
|
33
|
+
- bin/dbdoc
|
34
|
+
- config/confluence.yml
|
35
|
+
- config/default.yml
|
36
|
+
- config/gitignore.template
|
37
|
+
- config/schema_queries/mysql.sql
|
38
|
+
- config/schema_queries/pg.sql
|
39
|
+
- config/schema_queries/redshift.sql
|
40
|
+
- dbdoc.gemspec
|
41
|
+
- doc_files/columns.md.erb
|
42
|
+
- doc_files/columns.yml.erb
|
43
|
+
- doc_files/table_description.md
|
44
|
+
- doc_files/table_example.md
|
45
|
+
- lib/confluence/api.rb
|
46
|
+
- lib/dbdoc.rb
|
47
|
+
- lib/dbdoc/cli.rb
|
48
|
+
- lib/dbdoc/config.rb
|
49
|
+
- lib/dbdoc/constants.rb
|
50
|
+
- lib/dbdoc/manager.rb
|
51
|
+
- lib/dbdoc/uploader.rb
|
52
|
+
- lib/dbdoc/version.rb
|
53
|
+
homepage: https://github.com/sqlhabit/dbdoc
|
54
|
+
licenses:
|
55
|
+
- MIT
|
56
|
+
metadata:
|
57
|
+
homepage_uri: https://github.com/sqlhabit/dbdoc
|
58
|
+
source_code_uri: https://github.com/sqlhabit/dbdoc
|
59
|
+
changelog_uri: https://github.com/sqlhabit/dbdoc/blob/master/CHANGELOG.md
|
60
|
+
post_install_message:
|
61
|
+
rdoc_options: []
|
62
|
+
require_paths:
|
63
|
+
- lib
|
64
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - ">="
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: 2.3.0
|
69
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
70
|
+
requirements:
|
71
|
+
- - ">="
|
72
|
+
- !ruby/object:Gem::Version
|
73
|
+
version: '0'
|
74
|
+
requirements: []
|
75
|
+
rubygems_version: 3.0.3
|
76
|
+
signing_key:
|
77
|
+
specification_version: 4
|
78
|
+
summary: Dbdoc is a tool to keep your database documentation up-to-date and version
|
79
|
+
controlled.
|
80
|
+
test_files: []
|