dataMetaDom 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.yardopts +1 -0
- data/History.md +6 -0
- data/PostInstall.txt +1 -0
- data/README.md +137 -0
- data/Rakefile +13 -0
- data/bin/dataMetaGvExport.rb +97 -0
- data/bin/dataMetaMySqlDdl.rb +27 -0
- data/bin/dataMetaOracleDdl.rb +26 -0
- data/bin/dataMetaPojo.rb +27 -0
- data/bin/dataMetaReVersion.rb +66 -0
- data/bin/dataMetaSameFullJ.rb +13 -0
- data/bin/dataMetaSameIdJ.rb +12 -0
- data/lib/dataMetaDom/converter.rb +70 -0
- data/lib/dataMetaDom/dataType.rb +112 -0
- data/lib/dataMetaDom/docs.rb +122 -0
- data/lib/dataMetaDom/enum.rb +125 -0
- data/lib/dataMetaDom/field.rb +182 -0
- data/lib/dataMetaDom/help.rb +41 -0
- data/lib/dataMetaDom/model.rb +274 -0
- data/lib/dataMetaDom/mySql.rb +256 -0
- data/lib/dataMetaDom/ora.rb +295 -0
- data/lib/dataMetaDom/pojo.rb +1056 -0
- data/lib/dataMetaDom/python.rb +168 -0
- data/lib/dataMetaDom/recAttr.rb +271 -0
- data/lib/dataMetaDom/record.rb +397 -0
- data/lib/dataMetaDom/ref.rb +127 -0
- data/lib/dataMetaDom/sourceFile.rb +150 -0
- data/lib/dataMetaDom/sources.rb +68 -0
- data/lib/dataMetaDom/util.rb +279 -0
- data/lib/dataMetaDom/ver.rb +244 -0
- data/lib/dataMetaDom.rb +141 -0
- data/test/test_dataMetaDom.rb +130 -0
- data/test/test_helper.rb +6 -0
- data/tmpl/java/migrationEntityEnums.erb +172 -0
- data/tmpl/python/entity.erb +50 -0
- metadata +126 -0
@@ -0,0 +1,274 @@
|
|
1
|
+
$:.unshift(File.dirname(__FILE__)) unless $:.include?(File.dirname(__FILE__)) || $:.include?(File.expand_path(File.dirname(__FILE__)))
|
2
|
+
|
3
|
+
require 'set'
|
4
|
+
require 'dataMetaDom/converter'
|
5
|
+
require 'dataMetaDom/docs'
|
6
|
+
require 'dataMetaDom/field'
|
7
|
+
require 'dataMetaDom/ver'
|
8
|
+
require 'date'
|
9
|
+
|
10
|
+
module DataMetaDom
|
11
|
+
=begin rdoc
|
12
|
+
Metadata Model, including parsing.
|
13
|
+
|
14
|
+
For command line details either check the new method's source or the README.rdoc file, the usage section.
|
15
|
+
=end
|
16
|
+
class Model
|
17
|
+
include DataMetaDom
|
18
|
+
|
19
|
+
=begin rdoc
|
20
|
+
The instance of SourceFile currently being parsed.
|
21
|
+
=end
|
22
|
+
attr_reader :currentSource
|
23
|
+
|
24
|
+
=begin rdoc
|
25
|
+
All sources, including the includes.
|
26
|
+
=end
|
27
|
+
attr_reader :sources
|
28
|
+
=begin rdoc
|
29
|
+
Instances of Enum, Map and BitSet on this model, keyed by the full name including the namespace if any.
|
30
|
+
=end
|
31
|
+
attr_reader :enums
|
32
|
+
|
33
|
+
=begin rdoc
|
34
|
+
Instances of Record on this model, keyed by the full name including the namespace if any.
|
35
|
+
=end
|
36
|
+
attr_reader :records
|
37
|
+
|
38
|
+
=begin rdoc
|
39
|
+
Reverse references keyed by reference target names
|
40
|
+
=end
|
41
|
+
attr_reader :reRefs
|
42
|
+
=begin
|
43
|
+
Documentation for all options is in this section.
|
44
|
+
|
45
|
+
* +autoNsVer+ - if set to True, advises a generator to append a +vN+ where +N+ is a version number to the namespace.
|
46
|
+
=end
|
47
|
+
attr_accessor :options
|
48
|
+
# Version on the model level
|
49
|
+
attr_accessor :ver
|
50
|
+
|
51
|
+
=begin rdoc
|
52
|
+
Creates a blank model.
|
53
|
+
=end
|
54
|
+
def initialize() # no file name if want to build model manually
|
55
|
+
@reRefs = Hash.new(*[]) # references to the entity, hash keyed by entity
|
56
|
+
@enums={}; @records = {}
|
57
|
+
@ver = nil
|
58
|
+
end
|
59
|
+
|
60
|
+
=begin rdoc
|
61
|
+
Resolves references after parsing all the sources to the types that were used before than they
|
62
|
+
were defined. Verifies integrity.
|
63
|
+
=end
|
64
|
+
def resolveVerify
|
65
|
+
duplicateGuard = {}
|
66
|
+
@records.each_key { |recKey|
|
67
|
+
rec = @records[recKey]
|
68
|
+
rec.refs.each { |ref|
|
69
|
+
ref.resolve self
|
70
|
+
preExisting = duplicateGuard[ref.key]
|
71
|
+
raise "Duplicate reference spec: #{r}(#{r.sourceRef}), pre-existing: #{preExisting}(#{preExisting.sourceRef})" if preExisting
|
72
|
+
duplicateGuard[ref.key] = ref
|
73
|
+
reKey = ref.toEntity.name
|
74
|
+
@reRefs[reKey] = [] unless @reRefs[reKey]
|
75
|
+
@reRefs[reKey] << ref
|
76
|
+
}
|
77
|
+
}
|
78
|
+
raise RuntimeError, "No version defined on #{self}" unless ver
|
79
|
+
self
|
80
|
+
end
|
81
|
+
|
82
|
+
=begin rdoc
|
83
|
+
Builds diagnostics string, including the source info.
|
84
|
+
=end
|
85
|
+
def diagn; "; Src: #{@currentSource ? @currentSource : '<no source>'}" end
|
86
|
+
|
87
|
+
# master parse, initializes process queue and seeds it with the master file
|
88
|
+
def parse(fileName, options={autoVerNs: false})
|
89
|
+
@options = options
|
90
|
+
@sources = Sources.new(fileName)
|
91
|
+
while (@currentSource=@sources.next)
|
92
|
+
@currentSource.parse self
|
93
|
+
end
|
94
|
+
resolveVerify
|
95
|
+
self
|
96
|
+
end
|
97
|
+
|
98
|
+
=begin rdoc
|
99
|
+
Adds the given record to the model
|
100
|
+
* Parameter
|
101
|
+
* +rec+ - instance of a Record
|
102
|
+
=end
|
103
|
+
def addRecord(rec); @records[rec.key] = rec end
|
104
|
+
|
105
|
+
=begin rdoc
|
106
|
+
Adds the given records to the model
|
107
|
+
* Parameter
|
108
|
+
* +recs+ - an array of instances of a Record
|
109
|
+
=end
|
110
|
+
def addRecords(recs); recs.each { |r| addRecord r } end
|
111
|
+
|
112
|
+
=begin rdoc
|
113
|
+
Adds the given enum to the model
|
114
|
+
* Parameter
|
115
|
+
* +rec+ - instance of a Enum or a BitSet or a Map
|
116
|
+
=end
|
117
|
+
def addEnum(newEnum); @enums[newEnum.name] = newEnum end
|
118
|
+
|
119
|
+
=begin rdoc
|
120
|
+
Adds the given enums to the model
|
121
|
+
* Parameter
|
122
|
+
* +rec+ - an array of instances of a Enum or a BitSet or a Map
|
123
|
+
=end
|
124
|
+
def addEnums(enums); enums.each { |e| addEnum e } end
|
125
|
+
|
126
|
+
=begin rdoc
|
127
|
+
Generates DataMeta DOM source for the given Enum, yielding the lines to the caller's block.
|
128
|
+
|
129
|
+
* Parameters
|
130
|
+
* +e+ - instance of a Enum or a BitSet or a Map to generate the DataMeta DOM source for
|
131
|
+
* +baseName+ - the base name excluding the namespace if any, usually available on the caller's side.
|
132
|
+
=end
|
133
|
+
def genSourceEnum(e, baseName)
|
134
|
+
yield '' # yield empty line before a type
|
135
|
+
|
136
|
+
case
|
137
|
+
when e.kind_of?(Enum)
|
138
|
+
if e.docs
|
139
|
+
genDocs(e.docs){|line| yield line}
|
140
|
+
end
|
141
|
+
yield "#{e.sourceKeyWord} #{baseName}"
|
142
|
+
#genVer(e) { |line| yield line }
|
143
|
+
e.values.each { |v|
|
144
|
+
yield "#{SOURCE_INDENT}#{v}"
|
145
|
+
}
|
146
|
+
when e.kind_of?(BitSet), e.kind_of?(Mappings)
|
147
|
+
if e.docs
|
148
|
+
genDocs(e.docs){|line| yield line}
|
149
|
+
end
|
150
|
+
yield "#{e.sourceKeyWord} #{baseName} #{e.kind_of?(BitSet) ? '' : e.fromT.to_s + ' '}#{e.toT}"
|
151
|
+
#genVer(e) { |line| yield line }
|
152
|
+
e.keys.each { |k|
|
153
|
+
fromConv = CONVS[e.fromT.type]
|
154
|
+
toConv = CONVS[e.toT.type]
|
155
|
+
#DataMetaDom::L.debug "k=#{k.inspect}, e=#{e[k].inspect}"
|
156
|
+
raise "Invalid convertor for #{e}: (#{fromConv.inspect} => #{toConv.inspect})" unless fromConv && toConv
|
157
|
+
yield "#{SOURCE_INDENT}#{fromConv.ser.call(k)} => #{toConv.ser.call(e[k])},"
|
158
|
+
}
|
159
|
+
else
|
160
|
+
raise "Enum #{e} - unsupported format"
|
161
|
+
end
|
162
|
+
yield END_KW
|
163
|
+
end
|
164
|
+
|
165
|
+
=begin rdoc
|
166
|
+
Renders the source for the docs property of Documentable.
|
167
|
+
=end
|
168
|
+
def genDocs(docs)
|
169
|
+
docs.each_key{ |t|
|
170
|
+
yield "#{DOC} #{t}"
|
171
|
+
d = docs[t]
|
172
|
+
yield d.text
|
173
|
+
yield END_KW
|
174
|
+
}
|
175
|
+
end
|
176
|
+
|
177
|
+
=begin rdoc
|
178
|
+
Renders the source for the docs property of Ver.
|
179
|
+
=end
|
180
|
+
def genVer(e)
|
181
|
+
raise "No version on #{e}" unless e.ver
|
182
|
+
v = e.ver
|
183
|
+
raise "Version on #{e} is wrong type: #{v.inspect}" unless v.kind_of?(Ver)
|
184
|
+
yield "#{VER_KW} #{v.full}"
|
185
|
+
end
|
186
|
+
|
187
|
+
=begin rdoc
|
188
|
+
Generates DataMeta DOM source for the given Record, yielding the lines to the caller's block.
|
189
|
+
|
190
|
+
* Parameters
|
191
|
+
* +r+ - instance of a Record to generate the DataMeta DOM source for
|
192
|
+
* +namespace+ - the namespace of the record, usually available on the caller's side.
|
193
|
+
* +baseName+ - the base name excluding the namespace if any, usually available on the caller's side.
|
194
|
+
=end
|
195
|
+
def genSourceRec(r, namespace, baseName)
|
196
|
+
yield '' # yield empty line before a type
|
197
|
+
if r.docs
|
198
|
+
genDocs(r.docs){|line| yield line}
|
199
|
+
end
|
200
|
+
|
201
|
+
yield "#{RECORD} #{baseName}"
|
202
|
+
#genVer(r) { |line| yield line }
|
203
|
+
r.fields.values.each { |f|
|
204
|
+
if f.docs
|
205
|
+
genDocs(f.docs) { |line| yield line}
|
206
|
+
end
|
207
|
+
t = f.dataType
|
208
|
+
#puts ">>F: #{f}, ns=#{ns}, base=#{base}, bn=#{baseName}"
|
209
|
+
# render names from other namespaces than the current in full
|
210
|
+
renderType = qualName(namespace, t.type)
|
211
|
+
srcLine = if f.map?
|
212
|
+
trgRender = qualName(namespace, f.trgType.type)
|
213
|
+
"#{SOURCE_INDENT}#{f.req_spec}#{Field::MAP}{#{renderType}#{t.length_spec}, #{trgRender}#{
|
214
|
+
f.trgType.length_spec}} #{f.name}#{f.default_spec}"
|
215
|
+
elsif f.aggr?
|
216
|
+
"#{SOURCE_INDENT}#{f.req_spec}#{f.aggr}{#{renderType}#{t.length_spec}} #{f.name}#{f.default_spec}"
|
217
|
+
else
|
218
|
+
"#{SOURCE_INDENT}#{f.req_spec}#{renderType}#{t.length_spec} #{f.name}#{f.default_spec}#{f.matches_spec}"
|
219
|
+
end
|
220
|
+
yield srcLine
|
221
|
+
}
|
222
|
+
|
223
|
+
yield "#{SOURCE_INDENT}#{IDENTITY}#{r.identity.hints.empty? ? '' : "(#{r.identity.hints.to_a.join(', ')})"} "\
|
224
|
+
"#{r.identity.args.join(', ')}" if r.identity
|
225
|
+
if r.uniques
|
226
|
+
r.uniques.each_value { |uq|
|
227
|
+
yield "#{SOURCE_INDENT}#{UNIQUE}#{uq.hints.empty? ? '' : "(#{uq.hints.to_a.join(', ')})"} #{uq.args.join(', ')}"
|
228
|
+
}
|
229
|
+
end
|
230
|
+
if r.indexes
|
231
|
+
r.indexes.each_value { |ix|
|
232
|
+
yield "#{SOURCE_INDENT}#{INDEX}#{ix.hints.empty? ? '' : "(#{ix.hints.to_a.join(', ')})"} #{ix.args.join(', ')}"
|
233
|
+
}
|
234
|
+
end
|
235
|
+
if r.refs
|
236
|
+
r.refs.each { |ref|
|
237
|
+
yield "# #{ref}"
|
238
|
+
}
|
239
|
+
end
|
240
|
+
yield END_KW
|
241
|
+
end
|
242
|
+
|
243
|
+
=begin rdoc
|
244
|
+
Generates the source lines for the given model,
|
245
|
+
yields the lines to the caller's block, use as:
|
246
|
+
|
247
|
+
genSource{|line| ... }
|
248
|
+
=end
|
249
|
+
def genSource
|
250
|
+
yield '# model definition exported into the source code by DataMeta DOM'
|
251
|
+
namespace = ''
|
252
|
+
(@enums.keys + @records.keys).sort { |a, b| a.to_s <=> b.to_s }.each { |k|
|
253
|
+
ns, base = DataMetaDom.splitNameSpace(k.to_s)
|
254
|
+
if DataMetaDom.validNs?(ns, base) && ns != namespace
|
255
|
+
namespace = ns
|
256
|
+
yield "#{NAMESPACE} #{namespace}"
|
257
|
+
end
|
258
|
+
|
259
|
+
raise 'No version on the model' unless @ver
|
260
|
+
raise "Version on the model is wrong type: #{@ver.inspect}" unless @ver.kind_of?(Ver)
|
261
|
+
yield "#{VER_KW} #{@ver.full}"
|
262
|
+
case
|
263
|
+
when @records[k]
|
264
|
+
genSourceRec(@records[k], namespace, base) { |line| yield line }
|
265
|
+
when @enums[k]
|
266
|
+
genSourceEnum(@enums[k], base) { |line| yield line }
|
267
|
+
else
|
268
|
+
raise "Unsupported entity: #{e.inspect}"
|
269
|
+
end
|
270
|
+
}
|
271
|
+
end
|
272
|
+
end
|
273
|
+
|
274
|
+
end
|
@@ -0,0 +1,256 @@
|
|
1
|
+
$:.unshift(File.dirname(__FILE__)) unless $:.include?(File.dirname(__FILE__)) || $:.include?(File.expand_path(File.dirname(__FILE__)))
|
2
|
+
|
3
|
+
require 'set'
|
4
|
+
require 'fileutils'
|
5
|
+
|
6
|
+
module DataMetaDom
|
7
|
+
|
8
|
+
=begin rdoc
|
9
|
+
Definition for generating MySQL 5 artifacts such as schemas, select statements,
|
10
|
+
ORM input files etc etc
|
11
|
+
|
12
|
+
TODO this isn't a bad way, but beter use templating next time such as {ERB}[http://ruby-doc.org/stdlib-1.9.3/libdoc/erb/rdoc/ERB.html].
|
13
|
+
|
14
|
+
For command line details either check the new method's source or the README.rdoc file, the usage section.
|
15
|
+
=end
|
16
|
+
module MySqlLexer
|
17
|
+
|
18
|
+
=begin rdoc
|
19
|
+
Integer types
|
20
|
+
=end
|
21
|
+
INT_TYPES = {2 => 'smallint', 4 => 'int', 8 => 'bigint'}
|
22
|
+
|
23
|
+
=begin rdoc
|
24
|
+
Float types
|
25
|
+
=end
|
26
|
+
FLOAT_TYPES = {4 => 'float', 8 => 'double'}
|
27
|
+
=begin rdoc
|
28
|
+
Not null (required) wording per MySQL DDL syntax
|
29
|
+
=end
|
30
|
+
NOT_NULL=' not null'
|
31
|
+
|
32
|
+
=begin rdoc
|
33
|
+
\Mapping from DataMeta DOM standard types to correspondent MySQL types renderer lambdas.
|
34
|
+
=end
|
35
|
+
SQL_TYPES={
|
36
|
+
INT => lambda { |len, isReq|
|
37
|
+
concreteType = INT_TYPES[len]
|
38
|
+
raise "Invalid integer type length #{len} " unless concreteType
|
39
|
+
"#{concreteType}#{isReq ? NOT_NULL : ''}"
|
40
|
+
},
|
41
|
+
DataMetaDom::FLOAT => lambda { |len, isReq|
|
42
|
+
concreteType = FLOAT_TYPES[len]
|
43
|
+
raise "Invalid integer type length #{len} " unless concreteType
|
44
|
+
"#{concreteType}#{isReq ? NOT_NULL : ''}"
|
45
|
+
},
|
46
|
+
STRING => lambda { |len, isReq| "varchar(#{len})#{isReq ? NOT_NULL : ''}" },
|
47
|
+
DATETIME => lambda { |len, isReq| "datetime#{isReq ? NOT_NULL : ''}" },
|
48
|
+
BOOL => lambda { |len, isReq| "bool#{isReq ? NOT_NULL : ''}" }
|
49
|
+
}
|
50
|
+
|
51
|
+
=begin rdoc
|
52
|
+
Encapsulates 4 parts of DDL related SQL output:
|
53
|
+
* Creates
|
54
|
+
* Drops
|
55
|
+
* Linking aka Coupling aka creating Foreign Keys
|
56
|
+
* Unlinking aka Uncoupling aka dropping Foreign Keys
|
57
|
+
=end
|
58
|
+
class SqlOutput
|
59
|
+
|
60
|
+
=begin rdoc
|
61
|
+
Open output file into create SQL DDL statements (CREATE TABLE)
|
62
|
+
=end
|
63
|
+
attr_reader :create
|
64
|
+
|
65
|
+
=begin rdoc
|
66
|
+
Open output file into drop SQL DDL statements (DROP TABLE)
|
67
|
+
=end
|
68
|
+
attr_reader :drop
|
69
|
+
|
70
|
+
=begin rdoc
|
71
|
+
Open output file into the \couple SQL DDL statements, creating foreign keys
|
72
|
+
=end
|
73
|
+
attr_reader :couple
|
74
|
+
=begin rdoc
|
75
|
+
Open output file into the \uncouple SQL DDL statements, dropping foreign keys
|
76
|
+
=end
|
77
|
+
attr_reader :uncouple
|
78
|
+
|
79
|
+
=begin rdoc
|
80
|
+
Creates an instance into the given target directory in which all 4 parts of the SQL DDL
|
81
|
+
process will be created.
|
82
|
+
=end
|
83
|
+
def initialize(sqlTargetDir)
|
84
|
+
@selTargetDir = sqlTargetDir
|
85
|
+
@create = File.new("#{sqlTargetDir}/DDL-create.sql", 'wb')
|
86
|
+
@drop = File.new("#{sqlTargetDir}/DDL-drop.sql", 'wb')
|
87
|
+
@couple = File.new("#{sqlTargetDir}/DDL-couple.sql", 'wb')
|
88
|
+
@uncouple = File.new("#{sqlTargetDir}/DDL-uncouple.sql", 'wb')
|
89
|
+
@allScriptFiles = [@create, @drop, @couple, @uncouple]
|
90
|
+
@dropScripts = [@uncouple, @drop]
|
91
|
+
@allScriptFiles.each { |f|
|
92
|
+
f.puts %q</* Generated by DataMeta DOM MySQL utility
|
93
|
+
DO NOT EDIT MANUALLY, update the DataMeta DOM source and regen.
|
94
|
+
*/
|
95
|
+
>
|
96
|
+
}
|
97
|
+
@dropScripts.each { |ds|
|
98
|
+
ds.puts %q<
|
99
|
+
/* Disable all checks for safe dropping without any errors */
|
100
|
+
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0;
|
101
|
+
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
|
102
|
+
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES';
|
103
|
+
|
104
|
+
>
|
105
|
+
}
|
106
|
+
end
|
107
|
+
|
108
|
+
=begin rdoc
|
109
|
+
Safely closes all the output files.
|
110
|
+
=end
|
111
|
+
def close
|
112
|
+
@dropScripts.each { |ds|
|
113
|
+
ds.puts %q<
|
114
|
+
|
115
|
+
/* Re-enable all checks disabled earlier */
|
116
|
+
SET SQL_MODE=@OLD_SQL_MODE;
|
117
|
+
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;
|
118
|
+
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;
|
119
|
+
>
|
120
|
+
}
|
121
|
+
@allScriptFiles.each { |f|
|
122
|
+
begin
|
123
|
+
f.close
|
124
|
+
rescue Exception => x;
|
125
|
+
$stderr.puts x.message
|
126
|
+
end
|
127
|
+
}
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
=begin rdoc
|
132
|
+
Builds and returns an autoincrement clause if applicable, for the given record and the field.
|
133
|
+
|
134
|
+
If the field is the one and only identity on the record *and* if it is an integral type, returns
|
135
|
+
the auto increment clause, otherwise returns and empty string.
|
136
|
+
=end
|
137
|
+
def autoGenClauseIfAny(record, field)
|
138
|
+
record.identity && record.identity.length == 1 && field.name == record.identity[0] &&
|
139
|
+
field.dataType.type == DataMetaDom::INT ? ' AUTO_INCREMENT' : ''
|
140
|
+
end
|
141
|
+
|
142
|
+
=begin rdoc
|
143
|
+
Renders the given field into create statement.
|
144
|
+
* Parameters:
|
145
|
+
* +createStatement+ - the create statement to append the field definition to.
|
146
|
+
* +parser+ - the instance of the Model
|
147
|
+
* +record+ - the instance of the Record to which the field belongs
|
148
|
+
* +fieldKey+ - the full name of the field to render turned into a symbol.
|
149
|
+
* +isFirstField+ - the boolean, true if the field is first in the create statement.
|
150
|
+
=end
|
151
|
+
def renderField(createStatement, parser, record, fieldKey, isFirstField)
|
152
|
+
field = record[fieldKey]
|
153
|
+
ty = field.dataType
|
154
|
+
stdRenderer = SQL_TYPES[ty.type]
|
155
|
+
typeEnum = parser.enums[ty.type]
|
156
|
+
typeRec = parser.records[ty.type]
|
157
|
+
|
158
|
+
typeDef = if stdRenderer
|
159
|
+
stdRenderer.call ty.length, field.isRequired
|
160
|
+
elsif typeEnum
|
161
|
+
"enum('#{typeEnum.values.join("','")}')"
|
162
|
+
elsif typeRec
|
163
|
+
raise "Invalid ref to #{typeRec} - it has no singular ID" unless typeRec.identity.length == 1
|
164
|
+
idField = typeRec[typeRec.identity[0]]
|
165
|
+
idRenderer = SQL_TYPES[idField.dataType.type]
|
166
|
+
raise 'Only one-level prim type references only allowed in this version' unless idRenderer
|
167
|
+
idRenderer.call idField.dataType.length, field.isRequired
|
168
|
+
else
|
169
|
+
raise ArgumentError, "Unsupported datatype #{ty}"
|
170
|
+
end
|
171
|
+
createStatement << ",\n" unless isFirstField
|
172
|
+
createStatement << "\t#{field.name} #{typeDef}#{autoGenClauseIfAny(record, field)}"
|
173
|
+
end
|
174
|
+
|
175
|
+
=begin rdoc
|
176
|
+
Builds and returns the foreign key name for the given entity (Record) name and the counting number of these.
|
177
|
+
* Parameters:
|
178
|
+
* +bareEntityName+ - the entity name without the namespace
|
179
|
+
* +index+ - an integer, an enumerated counting number, starting from one. For each subsequent FK this number is
|
180
|
+
incremented.
|
181
|
+
=end
|
182
|
+
def fkName(bareEntityName, index)
|
183
|
+
"fk_#{bareEntityName}_#{index}"
|
184
|
+
end
|
185
|
+
|
186
|
+
=begin rdoc
|
187
|
+
Render SQL record with for the given model into the given output.
|
188
|
+
* Parameters
|
189
|
+
* +out+ - an instance of SqlOutput
|
190
|
+
* +parser+ - an instance of Model
|
191
|
+
* +recordKey+ - full name of the record datatype including namespeace if any turned into a symbol.
|
192
|
+
=end
|
193
|
+
def renderRecord(out, parser, recordKey)
|
194
|
+
record = parser.records[recordKey]
|
195
|
+
ns, entityName = DataMetaDom.splitNameSpace record.name
|
196
|
+
isFirstField = true
|
197
|
+
out.drop.puts "\ndrop table if exists #{entityName};"
|
198
|
+
fkNumber = 1 # to generate unique names that fit in 64 characters of identifier max length for MySQL
|
199
|
+
record.refs.select { |r| r.type == Reference::RECORD }.each { |ref|
|
200
|
+
ns, fromEntityBareName = DataMetaDom.splitNameSpace ref.fromEntity.name
|
201
|
+
ns, toEntityBareName = DataMetaDom.splitNameSpace ref.toEntity.name
|
202
|
+
out.couple.puts "alter table #{fromEntityBareName} add constraint #{fkName(fromEntityBareName, fkNumber)} "\
|
203
|
+
" foreign key (#{ref.fromField.name}) references #{toEntityBareName}(#{ref.toFields.name});"
|
204
|
+
out.uncouple.puts "alter table #{fromEntityBareName} drop foreign key #{fkName(fromEntityBareName, fkNumber)};"
|
205
|
+
fkNumber += 1
|
206
|
+
}
|
207
|
+
ids = record.identity ? record.identity.args : []
|
208
|
+
createStatement = "create table #{entityName} (\n"
|
209
|
+
fieldKeys = [] << ids.map { |i| i.to_s }.sort.map { |i| i.to_sym } \
|
210
|
+
<< record.fields.keys.select { |k| !ids.include?(k) }.map { |k| k.to_s }.sort.map { |k| k.to_sym }
|
211
|
+
|
212
|
+
fieldKeys.flatten.each { |f|
|
213
|
+
renderField(createStatement, parser, record, f, isFirstField)
|
214
|
+
isFirstField = false
|
215
|
+
}
|
216
|
+
if record.identity && record.identity.length > 0
|
217
|
+
createStatement << ",\n\tprimary key(#{ids.sort.join(', ')})"
|
218
|
+
end
|
219
|
+
unless record.uniques.empty?
|
220
|
+
uqNumber = 1
|
221
|
+
record.uniques.each_value { |uq|
|
222
|
+
createStatement << ",\n\tunique uq_#{entityName}_#{uqNumber}(#{uq.args.join(', ')})"
|
223
|
+
uqNumber += 1 # to generate unique names that fit in 64 characters of identifier max length for MySQL
|
224
|
+
}
|
225
|
+
end
|
226
|
+
unless record.indexes.empty?
|
227
|
+
ixNumber = 1
|
228
|
+
record.indexes.each_value { |ix|
|
229
|
+
createStatement << ",\n\tindex ix_#{entityName}_#{ixNumber}(#{ix.args.join(', ')})"
|
230
|
+
ixNumber += 1 # to generate unique names that fit in 64 characters of identifier max length for MySQL
|
231
|
+
}
|
232
|
+
end
|
233
|
+
createStatement << "\n) Engine=InnoDB;\n\n" # MyISAM, the default engine does not support FKs
|
234
|
+
|
235
|
+
out.create.puts createStatement
|
236
|
+
end
|
237
|
+
|
238
|
+
=begin rdoc
|
239
|
+
Generate the MySQL DDL from the given Model into the given output directory.
|
240
|
+
* Parameters
|
241
|
+
* +parser+ - an instance of a Model
|
242
|
+
* +outDir+ - a String, the directory to generate the DDL into.
|
243
|
+
=end
|
244
|
+
def genDdl(parser, outDir)
|
245
|
+
out = SqlOutput.new(outDir)
|
246
|
+
begin
|
247
|
+
parser.records.each_key { |r|
|
248
|
+
renderRecord(out, parser, r)
|
249
|
+
}
|
250
|
+
ensure
|
251
|
+
out.close
|
252
|
+
end
|
253
|
+
end
|
254
|
+
|
255
|
+
end
|
256
|
+
end
|