dataMetaDom 1.0.6 → 1.0.7
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/History.md +4 -0
- data/bin/dataMetaGenScalaCases.rb +25 -0
- data/lib/dataMetaDom.rb +1 -1
- data/lib/dataMetaDom/help.rb +6 -1
- data/lib/dataMetaDom/pojo.rb +1 -1
- data/lib/dataMetaDom/scala.rb +287 -0
- data/lib/dataMetaDom/sources.rb +4 -3
- metadata +5 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 4d2947d7b7e267278ef3748b7ec57d59d3db9cff
|
4
|
+
data.tar.gz: d9b8bb9b537bf46cfd9e352bc83a285a7051f814
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5a0b28437e94ef6e5ddca7c1226d689faa909d091f1431c180e4302fc710ffec7ba16a21e88ed448cf09fe2eaa8b9780c67dbd7db216477432f48ab603abae44
|
7
|
+
data.tar.gz: 99e2c46dea64d691d7595dd035003d952d86d037330d79f180a3244efa759b08fae97da41ad901230c1e6c5c41e6f37480246dba98d92e248410eb87393d1bad
|
data/History.md
CHANGED
@@ -1,6 +1,10 @@
|
|
1
1
|
|
2
2
|
# `dataMetaDom` Release history:
|
3
3
|
|
4
|
+
## `1.0.7` - `2017-04-17 Mon` by [`mub`](https://github.com/mub)
|
5
|
+
* New feature
|
6
|
+
* Scala Case Classes generation
|
7
|
+
|
4
8
|
## `1.0.6` - `2017-04-03 Mon` by [`mub`](https://github.com/mub)
|
5
9
|
* Bug fix:
|
6
10
|
* Pojo Generator had autoversion parameter misspelled.
|
@@ -0,0 +1,25 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# this script generates Java POJOs and the SQL DDL for the DataMeta DOM model
|
3
|
+
# Sample:
|
4
|
+
# mkdir ../../../../../target/pojo
|
5
|
+
# dataMetaPojo.rb ../../../dataMeta/showCase.dmDom ../../../../../target/pojo
|
6
|
+
|
7
|
+
%w(dataMetaDom dataMetaDom/scala dataMetaDom/help).each(&method(:require))
|
8
|
+
|
9
|
+
include DataMetaDom, DataMetaDom::ScalaLexer
|
10
|
+
|
11
|
+
@source, @target = ARGV
|
12
|
+
DataMetaDom::helpScalaGen __FILE__ unless @source && @target
|
13
|
+
DataMetaDom::helpScalaGen(__FILE__, "DataMeta DOM source #{@source} is not a file") unless File.file?(@source)
|
14
|
+
DataMetaDom::helpScalaGen(__FILE__, "Case Classes destination directory #{@target} is not a dir") unless File.directory?(@target)
|
15
|
+
|
16
|
+
@parser = Model.new
|
17
|
+
begin
|
18
|
+
@parser.parse(@source, options={autoVerNs: true})
|
19
|
+
genCaseClasses(@parser, @target)
|
20
|
+
puts "Scala Case Classes written to #{@target}. Done."
|
21
|
+
rescue Exception => e
|
22
|
+
$stderr.puts "ERROR #{e.message}; #{@parser.diagn}"
|
23
|
+
$stderr.puts e.backtrace.inspect
|
24
|
+
exit 1
|
25
|
+
end
|
data/lib/dataMetaDom.rb
CHANGED
@@ -25,7 +25,7 @@ For command line details either check the new method's source or the README.rdoc
|
|
25
25
|
module DataMetaDom
|
26
26
|
|
27
27
|
# Current version
|
28
|
-
VERSION = '1.0.
|
28
|
+
VERSION = '1.0.7'
|
29
29
|
|
30
30
|
=begin rdoc
|
31
31
|
Quick and dirty turning a Windows path into a path of the platform on which this script is running.
|
data/lib/dataMetaDom/help.rb
CHANGED
@@ -28,6 +28,11 @@ def helpPojoGen(file, errorText=nil)
|
|
28
28
|
help(file, 'POJO generator', '<DataMeta DOM source> <target directory>', errorText)
|
29
29
|
end
|
30
30
|
|
31
|
+
# Shortcut to help for the Pojo Generator.
|
32
|
+
def helpScalaGen(file, errorText=nil)
|
33
|
+
help(file, 'Scala generator', '<DataMeta DOM source> <target directory>', errorText)
|
34
|
+
end
|
35
|
+
|
31
36
|
# Shortcut to help for the MySQL DDL Generator.
|
32
37
|
def helpMySqlDdl(file, errorText=nil)
|
33
38
|
help(file, 'MySQL DDL generator', '<DataMeta DOM source> <target directory>', errorText)
|
@@ -38,5 +43,5 @@ def helpOracleDdl(file, errorText=nil)
|
|
38
43
|
help(file, 'Oracle DDL generator', '<DataMeta DOM source> <target directory>', errorText)
|
39
44
|
end
|
40
45
|
|
41
|
-
module_function :help, :helpPojoGen, :helpMySqlDdl
|
46
|
+
module_function :help, :helpPojoGen, :helpMySqlDdl, :helpScalaGen
|
42
47
|
end
|
data/lib/dataMetaDom/pojo.rb
CHANGED
@@ -347,7 +347,7 @@ import org.ebay.datameta.dom.VerificationException;
|
|
347
347
|
import org.ebay.datameta.util.jdk.SemanticVersion;
|
348
348
|
import static org.ebay.datameta.dom.CannedRegexUtil.getCannedRegEx;
|
349
349
|
|
350
|
-
#{PojoLexer.classJavaDoc
|
350
|
+
#{PojoLexer.classJavaDoc(record.docs)}public class #{baseName} implements Verifiable {
|
351
351
|
|
352
352
|
ENTITY_CLASS_HEADER
|
353
353
|
if record.ver
|
@@ -0,0 +1,287 @@
|
|
1
|
+
$:.unshift(File.dirname(__FILE__)) unless $:.include?(File.dirname(__FILE__)) || $:.include?(File.expand_path(File.dirname(__FILE__)))
|
2
|
+
|
3
|
+
require 'dataMetaDom/help'
|
4
|
+
require 'dataMetaDom/field'
|
5
|
+
require 'dataMetaDom/util'
|
6
|
+
|
7
|
+
module DataMetaDom
|
8
|
+
|
9
|
+
=begin rdoc
|
10
|
+
Definition for generating Scala artifacts such as case classes and everything related that depends on Scala distro only
|
11
|
+
witout any other dependencies.
|
12
|
+
|
13
|
+
For command line details either check the new method's source or the README.rdoc file, the usage section.
|
14
|
+
=end
|
15
|
+
module ScalaLexer
|
16
|
+
include DataMetaDom
|
17
|
+
|
18
|
+
=begin rdoc
|
19
|
+
Scala Data Meta export subpackage, to distinguish between other platforms. Can not make it just ".scala" because if
|
20
|
+
someone imports all the data model classes with underscore, they may pick up the "scala" subpackage too.
|
21
|
+
|
22
|
+
In which case, they will have trouble importing anything from the Scala core by "scala.*",
|
23
|
+
that violates the principle of "Least Astonishment", they may dink around till they find out that
|
24
|
+
they will have to use the _root_ package to access the Scala core's "scala", not the exported DataMeta "scala".
|
25
|
+
=end
|
26
|
+
SCALA_SUBPACKAGE = 'scadm'
|
27
|
+
|
28
|
+
=begin rdoc
|
29
|
+
Renderer for the String type.
|
30
|
+
=end
|
31
|
+
TEXTUAL_TYPER = lambda{|t| 'String'}
|
32
|
+
=begin rdoc
|
33
|
+
Maps DataMeta DOM datatypes to the matching Scala classes, for those that need to be imported.
|
34
|
+
The Scala source generator will import these if they are used in the class.
|
35
|
+
=end
|
36
|
+
SCALA_IMPORTS = {
|
37
|
+
DATETIME => 'java.time.ZonedDateTime',
|
38
|
+
NUMERIC => 'scala.math.BigDecimal'
|
39
|
+
}
|
40
|
+
|
41
|
+
=begin rdoc
|
42
|
+
DataMeta DOM aggregated field type spec mapped to matching Scala Case class:
|
43
|
+
=end
|
44
|
+
AGGR_CLASSES = {
|
45
|
+
Field::SET => 'scala.collection.Seq', # for Case classes, if the identity is different that full set of fields, Set makes no sense
|
46
|
+
# which is a majority of the cases. Wait for full implementation and switch to scala.collection.mutable.Set
|
47
|
+
Field::LIST => 'scala.collection.immutable.List',
|
48
|
+
Field::DEQUE => 'scala.collection.Seq',
|
49
|
+
}
|
50
|
+
|
51
|
+
=begin rdoc
|
52
|
+
A map from DataMeta DOM standard types to the lambdas that render correspondent Scala types per Scala syntax.
|
53
|
+
|
54
|
+
We used to render the primitives for the required types but the Verifiable interface made it impractical.
|
55
|
+
=end
|
56
|
+
SCALA_TYPES = {
|
57
|
+
DataMetaDom::INT => lambda{ |t|
|
58
|
+
len = t.length
|
59
|
+
case
|
60
|
+
when len <= 4; 'Int'
|
61
|
+
when len <=8; 'Long'
|
62
|
+
else; raise "Invalid integer length #{len}"
|
63
|
+
end
|
64
|
+
},
|
65
|
+
STRING => TEXTUAL_TYPER,
|
66
|
+
DATETIME => lambda{|t| 'ZonedDateTime'},
|
67
|
+
BOOL => lambda{|t| 'Boolean'}, # req ? 'boolean' : 'Boolean'},
|
68
|
+
CHAR => TEXTUAL_TYPER,
|
69
|
+
FLOAT => lambda{|t|
|
70
|
+
len = t.length
|
71
|
+
case
|
72
|
+
when len <= 4; 'Float' # req ? 'float' : 'Float'
|
73
|
+
when len <=8; 'Double' #req ? 'double' : 'Double'
|
74
|
+
else; raise "Invalid float length #{len}"
|
75
|
+
end
|
76
|
+
},
|
77
|
+
RAW => lambda{|t| 'Array[Byte]'},
|
78
|
+
URL => lambda{|t| URL_CLASS},
|
79
|
+
NUMERIC => lambda{|t| 'BigDecimal'}
|
80
|
+
}
|
81
|
+
|
82
|
+
=begin rdoc
|
83
|
+
Maximum size of a Mapping (Map), rather aribtrary choice, not backed by any big idea.
|
84
|
+
=end
|
85
|
+
MAX_MAPPING_SIZE = 10000
|
86
|
+
|
87
|
+
class << self
|
88
|
+
=begin rdoc
|
89
|
+
Figures out type adjusted for aggregates and maps.
|
90
|
+
=end
|
91
|
+
def aggrType(aggr, trg, rawType, scalaPackage)
|
92
|
+
if aggr
|
93
|
+
k = rawType.to_sym
|
94
|
+
subType = rawType # PRIMS_TO_WRAP.has_key?(k) ? PRIMS_TO_WRAP[k] :
|
95
|
+
"#{aggr}[#{subType}]"
|
96
|
+
elsif trg
|
97
|
+
k = rawType.to_sym
|
98
|
+
srcType = rawType
|
99
|
+
typeRenderer = SCALA_TYPES[trg.type]
|
100
|
+
rawTrg = typeRenderer ? typeRenderer.call(trg) : self.condenseType(self.scalaNs(trg.type), scalaPackage)
|
101
|
+
k = rawTrg.to_sym
|
102
|
+
trgType = rawTrg
|
103
|
+
"Map[#{srcType}, #{trgType}]"
|
104
|
+
else
|
105
|
+
rawType
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
end
|
110
|
+
|
111
|
+
def self.condenseType(fullType, ref_namespace)
|
112
|
+
ns, base = DataMetaDom.splitNameSpace(fullType)
|
113
|
+
ns = self.scalaNs(ns)
|
114
|
+
# noinspection RubyNestedTernaryOperatorsInspection
|
115
|
+
DataMetaDom.validNs?(ns, base) ? ( ns == ref_namespace ? base : fullType) : fullType
|
116
|
+
end
|
117
|
+
|
118
|
+
# Unaggregated Scala type
|
119
|
+
def self.unaggrScalaType(dt, scalaPackage)
|
120
|
+
typeRenderer = SCALA_TYPES[dt.type]
|
121
|
+
typeRenderer ? typeRenderer.call(dt) : self.condenseType(self.scalaNs(dt.type), scalaPackage)
|
122
|
+
end
|
123
|
+
|
124
|
+
# aggregated Scala type
|
125
|
+
def self.aggrScalaType(f, scalaPackage)
|
126
|
+
rawType = self.unaggrScalaType(f.dataType, scalaPackage)
|
127
|
+
aggr = f.aggr? ? DataMetaDom.splitNameSpace(AGGR_CLASSES[f.aggr])[1] : nil
|
128
|
+
ScalaLexer.aggrType(aggr, f.trgType, rawType, scalaPackage)
|
129
|
+
end
|
130
|
+
=begin rdoc
|
131
|
+
Given the property +docs+ of Documentable, return the SCALA_DOC_TARGET if it is present,
|
132
|
+
PLAIN_DOC_TARGET otherwise. Returns empty string if the argument is nil.
|
133
|
+
=end
|
134
|
+
def scalaDocs(docs)
|
135
|
+
return '' unless docs
|
136
|
+
case
|
137
|
+
when docs[PLAIN_DOC_TARGET]
|
138
|
+
docs[PLAIN_DOC_TARGET].text
|
139
|
+
else
|
140
|
+
''
|
141
|
+
end
|
142
|
+
end
|
143
|
+
=begin rdoc
|
144
|
+
Scala Class ScalaDoc text with the Wiki reference.
|
145
|
+
=end
|
146
|
+
def classScalaDoc(docs)
|
147
|
+
return <<CLASS_SCALADOC
|
148
|
+
/**
|
149
|
+
#{ScalaLexer.scalaDocs(docs)}
|
150
|
+
*/
|
151
|
+
CLASS_SCALADOC
|
152
|
+
end
|
153
|
+
|
154
|
+
=begin rdoc
|
155
|
+
Scala Enum class-level ScalaDoc text with the Wiki reference.
|
156
|
+
=end
|
157
|
+
def enumScalaDoc(docs)
|
158
|
+
return <<ENUM_SCALADOC
|
159
|
+
/**
|
160
|
+
#{ScalaLexer.scalaDocs(docs)}
|
161
|
+
*/
|
162
|
+
ENUM_SCALADOC
|
163
|
+
end
|
164
|
+
|
165
|
+
=begin rdoc
|
166
|
+
For the given DataMeta DOM data type and the isRequired flag, builds and returns the matching Scala data type declaration.
|
167
|
+
For standard types, uses the SCALA_TYPES map
|
168
|
+
=end
|
169
|
+
def getScalaType(dmDomType)
|
170
|
+
typeRenderer = SCALA_TYPES[dmDomType.type]
|
171
|
+
typeRenderer ? typeRenderer.call(dmDomType) : dmDomType.type
|
172
|
+
end
|
173
|
+
|
174
|
+
=begin rdoc
|
175
|
+
Generates Scala source code, the Scala class for a DataMeta DOM Record
|
176
|
+
|
177
|
+
Parameters:
|
178
|
+
* +model+ - the source model to export from
|
179
|
+
* +out+ - open output file to write the result to.
|
180
|
+
* +record+ - instance of DataMetaDom::Record to export
|
181
|
+
* +scalaPackage+ - Scala package to export to
|
182
|
+
* +baseName+ - the name of the class to generate.
|
183
|
+
=end
|
184
|
+
def self.genEntity(model, out, record, scalaPackage)
|
185
|
+
baseName = record.baseName
|
186
|
+
fields = record.fields
|
187
|
+
out.puts <<ENTITY_CLASS_HEADER
|
188
|
+
|
189
|
+
#{record.docs.empty? ? '' : ScalaLexer.classScalaDoc(record.docs)}case class #{baseName} (
|
190
|
+
#{fields.keys.map { |k|
|
191
|
+
f = fields[k]
|
192
|
+
typeDef = self.aggrScalaType(f, scalaPackage)
|
193
|
+
" `#{f.name}`: #{typeDef}#{model.enums.keys.member?(f.dataType.type) ? '.Value' : ''}"
|
194
|
+
}.join(",\n ")
|
195
|
+
}
|
196
|
+
)
|
197
|
+
ENTITY_CLASS_HEADER
|
198
|
+
end
|
199
|
+
|
200
|
+
=begin rdoc
|
201
|
+
Generates Scala source code for the worded enum, DataMeta DOM keyword "<tt>enum</tt>".
|
202
|
+
=end
|
203
|
+
def self.genEnumWorded(out, enum)
|
204
|
+
values = enum.keys.map{|k| enum[k]} # sort by ordinals to preserve the order
|
205
|
+
_, base, _ = assertNamespace(enum.name)
|
206
|
+
out.puts %<
|
207
|
+
|
208
|
+
#{enum.docs.empty? ? '' : enumScalaDoc(enum.docs)}object #{base} extends Enumeration {
|
209
|
+
type #{base} = Value
|
210
|
+
val #{values.map{|v| "`#{v}`"}.join(', ')} = Value
|
211
|
+
}
|
212
|
+
>
|
213
|
+
end
|
214
|
+
|
215
|
+
# Distinguish JVM classes by the platform, unless it's Java
|
216
|
+
def self.scalaNs(ns)
|
217
|
+
"#{ns}.#{SCALA_SUBPACKAGE}"
|
218
|
+
end
|
219
|
+
|
220
|
+
=begin rdoc
|
221
|
+
Extracts 3 pieces of information from the given full name:
|
222
|
+
* The namespace if any, i.e. Scala package, empty string if none
|
223
|
+
* The base name for the type, without the namespace
|
224
|
+
* Scala package's relative path, the dots replaced by the file separator.
|
225
|
+
|
226
|
+
Returns an array of these pieces of info in this exact order as described here.
|
227
|
+
=end
|
228
|
+
def assertNamespace(name)
|
229
|
+
ns, base = DataMetaDom.splitNameSpace(name)
|
230
|
+
scalaPackage = DataMetaDom.validNs?(ns, base) ? ns : ''
|
231
|
+
packagePath = scalaPackage.empty? ? '' : scalaPackage.gsub('.', File::SEPARATOR)
|
232
|
+
|
233
|
+
[scalaPackage, base, packagePath]
|
234
|
+
end
|
235
|
+
=begin rdoc
|
236
|
+
Generates scala sources for the model, the POJOs.
|
237
|
+
* Parameters
|
238
|
+
* +parser+ - instance of Model
|
239
|
+
* +outRoot+ - output directory
|
240
|
+
=end
|
241
|
+
def genCaseClasses(model, outRoot)
|
242
|
+
firstRec = model.records.values.first
|
243
|
+
raise ArgumentError, "No records defined in the model #{model.sources.masterPath}" unless firstRec
|
244
|
+
|
245
|
+
scalaPackage, base, packagePath = assertNamespace(firstRec.name)
|
246
|
+
scalaPackage = self.scalaNs(scalaPackage)
|
247
|
+
destDir = File.join(outRoot, packagePath, SCALA_SUBPACKAGE) # keep this in sync with scalaNs
|
248
|
+
FileUtils.mkdir_p destDir
|
249
|
+
out = File.open(File.join(destDir, 'Model.scala'), 'wb')
|
250
|
+
begin
|
251
|
+
out.puts %<package #{scalaPackage}
|
252
|
+
|
253
|
+
import java.time.ZonedDateTime
|
254
|
+
import scala.math.BigDecimal
|
255
|
+
import scala.collection.immutable.Set
|
256
|
+
import scala.collection.immutable.List
|
257
|
+
import scala.collection.Seq
|
258
|
+
|
259
|
+
/**
|
260
|
+
* This content is generated by DataMeta, do not edit manually!
|
261
|
+
*/
|
262
|
+
>
|
263
|
+
|
264
|
+
(model.enums.values + model.records.values).each {|e|
|
265
|
+
case
|
266
|
+
when e.kind_of?(DataMetaDom::Record)
|
267
|
+
self.genEntity model, out, e, scalaPackage
|
268
|
+
when e.kind_of?(DataMetaDom::Mappings)
|
269
|
+
raise ArgumentError, "For name #{e.name}: Mappings can not be generated to a case class"
|
270
|
+
when e.kind_of?(DataMetaDom::Enum)
|
271
|
+
self.genEnumWorded out, e
|
272
|
+
when e.kind_of?(DataMetaDom::BitSet)
|
273
|
+
raise ArgumentError, "For name #{e.name}: BitsSets can not be generated to a case class"
|
274
|
+
else
|
275
|
+
raise "Unsupported Entity: #{e.inspect}"
|
276
|
+
end
|
277
|
+
}
|
278
|
+
ensure
|
279
|
+
out.close
|
280
|
+
end
|
281
|
+
end
|
282
|
+
|
283
|
+
module_function :genCaseClasses
|
284
|
+
|
285
|
+
end
|
286
|
+
end
|
287
|
+
|
data/lib/dataMetaDom/sources.rb
CHANGED
@@ -11,17 +11,18 @@ For command line details either check the new method's source or the README.rdoc
|
|
11
11
|
=end
|
12
12
|
class Sources
|
13
13
|
|
14
|
+
attr_reader :masterPath
|
14
15
|
=begin rdoc
|
15
16
|
Start parsing from the master file, collect all the files that are included.
|
16
17
|
=end
|
17
18
|
def initialize(masterFile)
|
18
|
-
masterPath = File.dirname(masterFile)
|
19
|
+
@masterPath = File.dirname(masterFile)
|
19
20
|
@todo = {}; @done = {}
|
20
21
|
libSpec = ENV[DATAMETA_LIB]
|
21
22
|
@paths = libSpec ? libSpec.split(File::PATH_SEPARATOR).map { |e| uniPath(e) } : []
|
22
|
-
@paths.unshift(masterPath).flatten! if masterPath
|
23
|
+
@paths.unshift(@masterPath).flatten! if @masterPath
|
23
24
|
@paths.unshift '.' # start looking in the current directory and then in the rest of the path
|
24
|
-
src = SourceFile.new(masterPath, File.basename(masterFile))
|
25
|
+
src = SourceFile.new(@masterPath, File.basename(masterFile))
|
25
26
|
@todo[src.key] = src
|
26
27
|
end
|
27
28
|
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: dataMetaDom
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.0.
|
4
|
+
version: 1.0.7
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Michael Bergens
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-04-
|
11
|
+
date: 2017-04-17 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: dataMetaXtra
|
@@ -60,6 +60,7 @@ executables:
|
|
60
60
|
- dataMetaGvExport.rb
|
61
61
|
- dataMetaOracleDdl.rb
|
62
62
|
- dataMetaReVersion.rb
|
63
|
+
- dataMetaGenScalaCases.rb
|
63
64
|
extensions: []
|
64
65
|
extra_rdoc_files: []
|
65
66
|
files:
|
@@ -68,6 +69,7 @@ files:
|
|
68
69
|
- PostInstall.txt
|
69
70
|
- README.md
|
70
71
|
- Rakefile
|
72
|
+
- bin/dataMetaGenScalaCases.rb
|
71
73
|
- bin/dataMetaGvExport.rb
|
72
74
|
- bin/dataMetaMySqlDdl.rb
|
73
75
|
- bin/dataMetaOracleDdl.rb
|
@@ -90,6 +92,7 @@ files:
|
|
90
92
|
- lib/dataMetaDom/recAttr.rb
|
91
93
|
- lib/dataMetaDom/record.rb
|
92
94
|
- lib/dataMetaDom/ref.rb
|
95
|
+
- lib/dataMetaDom/scala.rb
|
93
96
|
- lib/dataMetaDom/sourceFile.rb
|
94
97
|
- lib/dataMetaDom/sources.rb
|
95
98
|
- lib/dataMetaDom/util.rb
|