cgialib 0.0.1 → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/Manifest.txt +7 -2
- data/Rakefile +4 -3
- data/examples/ut/mytest1.c +22 -0
- data/features/language_parser.feature +21 -0
- data/features/steps/language_parser.rb +84 -0
- data/lib/cgialib/lp/JavaDoc.rb +220 -0
- data/lib/cgialib/lp/JavaLanguageScanner.rb +660 -0
- data/lib/cgialib/lp/SQLLanguageScanner.rb +688 -0
- data/lib/cgialib/lp/SQLTokenizer.rb +403 -0
- data/lib/cgialib/lp.rb +5 -0
- data/lib/cgialib/template/ut/c.rb +10 -3
- data/lib/cgialib.rb +1 -1
- data/website/index.html +4 -21
- data/website/index.txt +2 -27
- data/website/template.html.erb +1 -1
- metadata +29 -4
- data/features/development.feature +0 -13
- data/features/steps/common.rb +0 -174
@@ -0,0 +1,688 @@
|
|
1
|
+
# File: SQLLanguageScanner.rb
|
2
|
+
# Author: Jack Herrington
|
3
|
+
# Purpose: The SQLLanguageScanner object specialized to look for SQL language elements
|
4
|
+
# Date: 12/21/02
|
5
|
+
|
6
|
+
#require "Tokenizer"
|
7
|
+
#require "Language"
|
8
|
+
|
9
|
+
module LanguageParser
|
10
|
+
# class : SQLField
|
11
|
+
#
|
12
|
+
# Represents a field in a table.
|
13
|
+
|
14
|
+
class SQLField
|
15
|
+
|
16
|
+
# initialize()
|
17
|
+
#
|
18
|
+
# The constructor
|
19
|
+
|
20
|
+
def initialize()
|
21
|
+
|
22
|
+
@name = ""
|
23
|
+
@type = ""
|
24
|
+
@not_null = false
|
25
|
+
@unique = false
|
26
|
+
@primary_key = false
|
27
|
+
@comment = ""
|
28
|
+
|
29
|
+
end
|
30
|
+
|
31
|
+
attr_accessor :name # The name of the field
|
32
|
+
attr_accessor :type # The type of the field
|
33
|
+
attr_accessor :unique # True if the field is unique
|
34
|
+
attr_accessor :not_null # True if the field is non-null
|
35
|
+
attr_accessor :primary_key # True if the field is the primary key
|
36
|
+
attr_accessor :comment # Any comment associated with the field
|
37
|
+
|
38
|
+
# to_s()
|
39
|
+
#
|
40
|
+
# Pretty prints the field as text
|
41
|
+
|
42
|
+
def to_s()
|
43
|
+
|
44
|
+
attributes = []
|
45
|
+
attributes.push( "not null" ) if ( @not_null )
|
46
|
+
attributes.push( "unique" ) if ( @unique )
|
47
|
+
attributes.push( "primary key" ) if ( @primary_key )
|
48
|
+
|
49
|
+
"#{@name} - #{@type} - #{attributes.join(',')}"
|
50
|
+
|
51
|
+
end
|
52
|
+
|
53
|
+
end
|
54
|
+
|
55
|
+
# class : SQLTable
|
56
|
+
#
|
57
|
+
# Represents an SQL table definition
|
58
|
+
|
59
|
+
class SQLTable
|
60
|
+
|
61
|
+
# initialize()
|
62
|
+
#
|
63
|
+
# Constructor
|
64
|
+
|
65
|
+
def initialize()
|
66
|
+
|
67
|
+
@name = ""
|
68
|
+
@fields = []
|
69
|
+
@field_hash = {}
|
70
|
+
@comment = ""
|
71
|
+
|
72
|
+
end
|
73
|
+
|
74
|
+
attr_accessor :name # The name of the table
|
75
|
+
attr_reader :fields # The fields of the table
|
76
|
+
attr_accessor :comment # The comment associated with the table
|
77
|
+
|
78
|
+
# add_field( field )
|
79
|
+
#
|
80
|
+
# field - The field object
|
81
|
+
#
|
82
|
+
# Adds a field to the table
|
83
|
+
|
84
|
+
def add_field( field )
|
85
|
+
|
86
|
+
@fields.push( field )
|
87
|
+
@field_hash[ field.name.to_s.downcase ] = field
|
88
|
+
|
89
|
+
end
|
90
|
+
|
91
|
+
# get_field( name )
|
92
|
+
#
|
93
|
+
# name - The name of the field
|
94
|
+
#
|
95
|
+
# Fetchs a field object based on it's name
|
96
|
+
|
97
|
+
def get_field( name )
|
98
|
+
@field_hash[ name.downcase ]
|
99
|
+
end
|
100
|
+
|
101
|
+
end
|
102
|
+
|
103
|
+
# class : SQLLanguageScanner
|
104
|
+
#
|
105
|
+
# This is the SQLLanguageScanner which is an object specialized to read the important language
|
106
|
+
# elements of an SQL file.
|
107
|
+
|
108
|
+
class SQLLanguageScanner < LanguageScanner
|
109
|
+
|
110
|
+
# initialize()
|
111
|
+
#
|
112
|
+
# Constructs the SQL language scanner class
|
113
|
+
|
114
|
+
def initialize()
|
115
|
+
|
116
|
+
@tables = []
|
117
|
+
@table_hash = {}
|
118
|
+
@tableClass = SQLTable
|
119
|
+
@fieldClass = SQLField
|
120
|
+
|
121
|
+
end
|
122
|
+
|
123
|
+
attr_reader :tables # The array of tables
|
124
|
+
attr_accessor :tableClass # The class to use to build Table objects
|
125
|
+
attr_accessor :fieldClass # The class to use to build Field objects
|
126
|
+
|
127
|
+
# to_s()
|
128
|
+
#
|
129
|
+
# Pretty printer for this object
|
130
|
+
|
131
|
+
def to_s()
|
132
|
+
|
133
|
+
text = ""
|
134
|
+
|
135
|
+
tables.each { |table|
|
136
|
+
|
137
|
+
text += "#{table.name}:\n"
|
138
|
+
|
139
|
+
table.fields.each { |field|
|
140
|
+
text += " #{field}\n"
|
141
|
+
}
|
142
|
+
|
143
|
+
text += "Comment:\n#{table.comment.strip}\n"
|
144
|
+
text += "\n"
|
145
|
+
|
146
|
+
}
|
147
|
+
|
148
|
+
text
|
149
|
+
|
150
|
+
end
|
151
|
+
|
152
|
+
# parse( tokens )
|
153
|
+
#
|
154
|
+
# tokens - An array of tokens built by a Tokenizer
|
155
|
+
#
|
156
|
+
# This method reads the stream of tokens built by a Tokenizer
|
157
|
+
# and fills the @prototypes array with the prototypes
|
158
|
+
# that are found.
|
159
|
+
|
160
|
+
def parse( tokens )
|
161
|
+
|
162
|
+
# This is the code fragment leading up to the interior
|
163
|
+
# of the function
|
164
|
+
|
165
|
+
codefrag = TokenStream.new()
|
166
|
+
|
167
|
+
has_create = false
|
168
|
+
|
169
|
+
building_table = false
|
170
|
+
|
171
|
+
# Look through each token
|
172
|
+
|
173
|
+
comment = ""
|
174
|
+
|
175
|
+
tokens.each_index { |index|
|
176
|
+
|
177
|
+
tok = tokens[ index ]
|
178
|
+
|
179
|
+
if tok.to_s =~ /^create$/i
|
180
|
+
|
181
|
+
comment = tokens.get_comments( index )
|
182
|
+
|
183
|
+
has_create = true
|
184
|
+
|
185
|
+
elsif tok.to_s =~ /^table$/i && has_create
|
186
|
+
|
187
|
+
building_table = true
|
188
|
+
has_create = false
|
189
|
+
|
190
|
+
elsif tok.to_s == ";" && building_table
|
191
|
+
|
192
|
+
parse_table( codefrag, comment )
|
193
|
+
|
194
|
+
codefrag = TokenStream.new()
|
195
|
+
|
196
|
+
building_table = false
|
197
|
+
has_create = false
|
198
|
+
comment = ""
|
199
|
+
|
200
|
+
elsif has_create && tok.is_a?( CodeToken )
|
201
|
+
|
202
|
+
has_create = false
|
203
|
+
|
204
|
+
elsif building_table
|
205
|
+
|
206
|
+
codefrag.push( tok )
|
207
|
+
|
208
|
+
end
|
209
|
+
|
210
|
+
}
|
211
|
+
|
212
|
+
end
|
213
|
+
|
214
|
+
protected
|
215
|
+
|
216
|
+
# parse_table( codefrag, comment )
|
217
|
+
#
|
218
|
+
# codefrag - The table tokens
|
219
|
+
# comment - The associated comment
|
220
|
+
#
|
221
|
+
# Parses table tokens into a table object
|
222
|
+
|
223
|
+
def parse_table( codefrag, comment )
|
224
|
+
|
225
|
+
codefrag.strip!
|
226
|
+
|
227
|
+
table_name = codefrag[ 0 ].to_s
|
228
|
+
|
229
|
+
start_table( table_name, comment )
|
230
|
+
|
231
|
+
field_def = TokenStream.new()
|
232
|
+
|
233
|
+
in_parens = 0
|
234
|
+
|
235
|
+
codefrag.each { |tok|
|
236
|
+
|
237
|
+
if ( tok.to_s == ")" )
|
238
|
+
|
239
|
+
if ( field_def.length > 0 && in_parens == 1 )
|
240
|
+
|
241
|
+
parse_field( table_name, field_def )
|
242
|
+
|
243
|
+
field_def = TokenStream.new()
|
244
|
+
|
245
|
+
end
|
246
|
+
|
247
|
+
in_parens -= 1
|
248
|
+
|
249
|
+
end
|
250
|
+
|
251
|
+
if ( tok.to_s == "," )
|
252
|
+
|
253
|
+
parse_field( table_name, field_def ) if ( field_def.length > 0 )
|
254
|
+
field_def = TokenStream.new()
|
255
|
+
|
256
|
+
elsif ( in_parens > 0 )
|
257
|
+
|
258
|
+
field_def.push( tok ) unless ( tok.is_a?( CommentToken ) )
|
259
|
+
|
260
|
+
end
|
261
|
+
|
262
|
+
if ( tok.to_s == "(" )
|
263
|
+
|
264
|
+
in_parens += 1
|
265
|
+
|
266
|
+
end
|
267
|
+
|
268
|
+
}
|
269
|
+
|
270
|
+
end
|
271
|
+
|
272
|
+
# The field_attributes class constant defines series of patterns that
|
273
|
+
# trigger specific modifications to field objects. For examples the 'not
|
274
|
+
# null' keywords set the not_null variable in the field object if they are
|
275
|
+
# found.
|
276
|
+
|
277
|
+
@@field_attributes = [
|
278
|
+
{ :strings => [ "not", "null" ], :found => lambda { |field| field.not_null = true } },
|
279
|
+
{ :strings => [ "unique" ], :found => lambda { |field| field.unique = true } }
|
280
|
+
]
|
281
|
+
|
282
|
+
# get_field_attributes()
|
283
|
+
#
|
284
|
+
# Returns the the array of field_attributes. This method could be
|
285
|
+
# overridden if there we extra field attributes for a specific SQL syntax.
|
286
|
+
|
287
|
+
def get_field_attributes()
|
288
|
+
|
289
|
+
@@field_attributes
|
290
|
+
|
291
|
+
end
|
292
|
+
|
293
|
+
# parse_field( table_name, codefrag )
|
294
|
+
#
|
295
|
+
# table_name - The table being parsed
|
296
|
+
# codefrag - The field definition
|
297
|
+
#
|
298
|
+
# Parses a field definition associated with a table
|
299
|
+
|
300
|
+
def parse_field( table_name, codefrag )
|
301
|
+
|
302
|
+
# Get just the code tokens from the fragment
|
303
|
+
|
304
|
+
codefrag.strip!
|
305
|
+
code_stream = codefrag.code_only
|
306
|
+
|
307
|
+
# Get the field name
|
308
|
+
|
309
|
+
field = build_field()
|
310
|
+
field.name = code_stream.shift
|
311
|
+
|
312
|
+
# Build the field type
|
313
|
+
|
314
|
+
type = code_stream.shift.to_s
|
315
|
+
|
316
|
+
inParen = false
|
317
|
+
|
318
|
+
while( code_stream.length > 0 )
|
319
|
+
|
320
|
+
if code_stream.first.to_s == "[" || code_stream.first.to_s == "("
|
321
|
+
|
322
|
+
inParen = true
|
323
|
+
type += code_stream.shift.to_s
|
324
|
+
|
325
|
+
elsif code_stream.first.to_s == "]" || code_stream.first.to_s == ")"
|
326
|
+
|
327
|
+
inParen = false
|
328
|
+
type += code_stream.shift.to_s
|
329
|
+
|
330
|
+
elsif ( inParen )
|
331
|
+
|
332
|
+
type += code_stream.shift.to_s
|
333
|
+
|
334
|
+
else
|
335
|
+
|
336
|
+
break
|
337
|
+
|
338
|
+
end
|
339
|
+
|
340
|
+
end
|
341
|
+
|
342
|
+
field.type = type
|
343
|
+
|
344
|
+
# Look for special field attributes (e.g. not null, unique)
|
345
|
+
|
346
|
+
begin
|
347
|
+
|
348
|
+
found = false
|
349
|
+
|
350
|
+
get_field_attributes.each { |field_attr|
|
351
|
+
|
352
|
+
strings = field_attr[ :strings ]
|
353
|
+
|
354
|
+
found_lambda = field_attr[ :found ]
|
355
|
+
|
356
|
+
if ( code_stream.length >= strings.length )
|
357
|
+
|
358
|
+
found = true
|
359
|
+
|
360
|
+
strings.each_index { |index|
|
361
|
+
|
362
|
+
found = false unless ( code_stream[ index ].to_s.downcase == strings[ index ].to_s.downcase )
|
363
|
+
|
364
|
+
}
|
365
|
+
|
366
|
+
end
|
367
|
+
|
368
|
+
if found
|
369
|
+
|
370
|
+
strings.each_index { code_stream.shift }
|
371
|
+
|
372
|
+
found_lambda.call( field )
|
373
|
+
|
374
|
+
break
|
375
|
+
|
376
|
+
end
|
377
|
+
|
378
|
+
}
|
379
|
+
|
380
|
+
end while ( found )
|
381
|
+
|
382
|
+
add_field( table_name, field )
|
383
|
+
|
384
|
+
field
|
385
|
+
|
386
|
+
end
|
387
|
+
|
388
|
+
# start_table( table_name, comment )
|
389
|
+
#
|
390
|
+
# table_name - The name of the table
|
391
|
+
# comment - The associated comment
|
392
|
+
#
|
393
|
+
# Builds a new table and gets it ready for fields
|
394
|
+
|
395
|
+
def start_table( table_name, comment )
|
396
|
+
|
397
|
+
unless @table_hash[ table_name ]
|
398
|
+
|
399
|
+
table = build_table
|
400
|
+
table.name = table_name
|
401
|
+
table.comment = comment
|
402
|
+
|
403
|
+
@table_hash[ table_name ] = table
|
404
|
+
|
405
|
+
@tables.push( table )
|
406
|
+
|
407
|
+
end
|
408
|
+
|
409
|
+
end
|
410
|
+
|
411
|
+
# add_field( table_name, field )
|
412
|
+
#
|
413
|
+
# table_name - The name of the table
|
414
|
+
# field - The field object
|
415
|
+
#
|
416
|
+
# Adds a field to the specified table
|
417
|
+
|
418
|
+
def add_field( table_name, field )
|
419
|
+
@table_hash[ table_name ].add_field( field )
|
420
|
+
end
|
421
|
+
|
422
|
+
# build_table()
|
423
|
+
#
|
424
|
+
# Builds a new table
|
425
|
+
|
426
|
+
def build_table()
|
427
|
+
@tableClass.new()
|
428
|
+
end
|
429
|
+
|
430
|
+
# build_field()
|
431
|
+
#
|
432
|
+
# Builds a new field
|
433
|
+
|
434
|
+
def build_field()
|
435
|
+
@fieldClass.new()
|
436
|
+
end
|
437
|
+
|
438
|
+
end
|
439
|
+
|
440
|
+
# class : PostgreSQLScanner
|
441
|
+
#
|
442
|
+
# An SQLLanguageScanner specialized to read PostgreSQL.
|
443
|
+
|
444
|
+
class PostgreSQLScanner < SQLLanguageScanner
|
445
|
+
|
446
|
+
# initialize()
|
447
|
+
#
|
448
|
+
# Constuctor
|
449
|
+
|
450
|
+
def initialize()
|
451
|
+
|
452
|
+
# Create the prototype array
|
453
|
+
|
454
|
+
@prototypes = []
|
455
|
+
|
456
|
+
# Set the prototype class to build to the default
|
457
|
+
# prototype class
|
458
|
+
|
459
|
+
@prototypeClass = Prototype
|
460
|
+
|
461
|
+
super()
|
462
|
+
|
463
|
+
end
|
464
|
+
|
465
|
+
attr_reader :prototypes # The array of prototypes found
|
466
|
+
attr_accessor :prototypeClass # The prototype class to build
|
467
|
+
|
468
|
+
# to_s()
|
469
|
+
#
|
470
|
+
# Pretty prints the result
|
471
|
+
|
472
|
+
def to_s()
|
473
|
+
|
474
|
+
text = "Prototypes:\n"
|
475
|
+
|
476
|
+
@prototypes.each { |proto|
|
477
|
+
text += " #{proto}\n"
|
478
|
+
}
|
479
|
+
|
480
|
+
text += "\nPrototypes:\n"
|
481
|
+
|
482
|
+
@prototypes.each { |proto|
|
483
|
+
text += " #{proto}\n"
|
484
|
+
}
|
485
|
+
|
486
|
+
text += "\n"
|
487
|
+
text
|
488
|
+
|
489
|
+
end
|
490
|
+
|
491
|
+
# parse( tokens )
|
492
|
+
#
|
493
|
+
# tokens - Tokens returned from SQLTokenizer
|
494
|
+
#
|
495
|
+
# An override of the parser to add parsing of stored procedure prototypes
|
496
|
+
|
497
|
+
def parse( tokens )
|
498
|
+
|
499
|
+
super( tokens )
|
500
|
+
|
501
|
+
# This is the code fragment leading up to the interior
|
502
|
+
# of the function
|
503
|
+
|
504
|
+
codefrag = TokenStream.new()
|
505
|
+
|
506
|
+
has_create = false
|
507
|
+
|
508
|
+
building_function = false
|
509
|
+
|
510
|
+
waiting_for_language = false
|
511
|
+
|
512
|
+
# Look through each token
|
513
|
+
|
514
|
+
comment = ""
|
515
|
+
|
516
|
+
tokens.each_index { |index|
|
517
|
+
|
518
|
+
tok = tokens[ index ]
|
519
|
+
|
520
|
+
if tok.to_s =~ /^create$/i
|
521
|
+
|
522
|
+
comment = tokens.get_comments( index )
|
523
|
+
|
524
|
+
has_create = true
|
525
|
+
|
526
|
+
elsif waiting_for_language
|
527
|
+
|
528
|
+
waiting_for_language = false if tok.to_s =~ /^language$/
|
529
|
+
|
530
|
+
elsif tok.to_s =~ /^function$/i && has_create
|
531
|
+
|
532
|
+
building_function = true
|
533
|
+
|
534
|
+
elsif tok.to_s =~ /^declare$/i && building_function
|
535
|
+
|
536
|
+
parse_function( codefrag, comment )
|
537
|
+
|
538
|
+
codefrag = TokenStream.new()
|
539
|
+
|
540
|
+
building_function = false
|
541
|
+
has_create = false
|
542
|
+
waiting_for_language = true
|
543
|
+
comment = ""
|
544
|
+
|
545
|
+
elsif building_function
|
546
|
+
|
547
|
+
codefrag.push( tok )
|
548
|
+
|
549
|
+
end
|
550
|
+
|
551
|
+
}
|
552
|
+
|
553
|
+
end
|
554
|
+
|
555
|
+
protected
|
556
|
+
|
557
|
+
# parse_field( table_name, codefrag )
|
558
|
+
#
|
559
|
+
# table_name - The table name
|
560
|
+
# codefrag - The tokens of the field definition
|
561
|
+
#
|
562
|
+
# Overrides field parsing to handle PostgreSQL specific syntax
|
563
|
+
|
564
|
+
def parse_field( table_name, codefrag )
|
565
|
+
|
566
|
+
# Dump any leading or trailing whitespace tokens
|
567
|
+
|
568
|
+
codefrag.strip!
|
569
|
+
|
570
|
+
# Look for the constraint keyword. If you find it look for the primary
|
571
|
+
# key identifier
|
572
|
+
|
573
|
+
if ( codefrag[0].to_s =~ /^constraint$/i )
|
574
|
+
|
575
|
+
id_field = nil
|
576
|
+
|
577
|
+
codefrag.find_pattern(
|
578
|
+
[ "primary", "key", "(", lambda { |value| id_field = value }, ")" ]
|
579
|
+
)
|
580
|
+
|
581
|
+
# If we found it then id_field will be set to the name of the id field
|
582
|
+
|
583
|
+
if ( id_field )
|
584
|
+
|
585
|
+
field = @table_hash[ table_name ].get_field( id_field )
|
586
|
+
field.primary_key = true
|
587
|
+
|
588
|
+
end
|
589
|
+
|
590
|
+
else
|
591
|
+
|
592
|
+
# If this is not a constraint then let the base class handle the field
|
593
|
+
# parsing
|
594
|
+
|
595
|
+
super( table_name, codefrag )
|
596
|
+
|
597
|
+
end
|
598
|
+
|
599
|
+
end
|
600
|
+
|
601
|
+
# parse_argument( proto, arg )
|
602
|
+
#
|
603
|
+
# proto - The prototype object
|
604
|
+
# arg - The argument
|
605
|
+
#
|
606
|
+
# Adds an argument name to the prototype
|
607
|
+
|
608
|
+
def parse_argument( proto, arg )
|
609
|
+
|
610
|
+
proto.add_argument( arg[0].to_s() )
|
611
|
+
|
612
|
+
end
|
613
|
+
|
614
|
+
# parse_function( codefrag, comment )
|
615
|
+
#
|
616
|
+
# codefrag - The tokens of the function
|
617
|
+
# comment - The preceding comment
|
618
|
+
#
|
619
|
+
# Parses a stored procedure prototype
|
620
|
+
|
621
|
+
def parse_function( codefrag, comment )
|
622
|
+
|
623
|
+
# Create the prototype object
|
624
|
+
|
625
|
+
proto = build_prototype()
|
626
|
+
|
627
|
+
# Get just the code tokens
|
628
|
+
|
629
|
+
code = codefrag.code_only
|
630
|
+
|
631
|
+
# Get the method name
|
632
|
+
|
633
|
+
proto.method_name = code.shift.to_s()
|
634
|
+
proto.add_comment( comment.strip )
|
635
|
+
|
636
|
+
# Build token sets of the arguments and then pass them on
|
637
|
+
# to parse_argument
|
638
|
+
|
639
|
+
in_parens = false
|
640
|
+
|
641
|
+
arg = TokenStream.new()
|
642
|
+
|
643
|
+
code.each { |tok|
|
644
|
+
|
645
|
+
in_parens = false if ( tok.to_s == ")" )
|
646
|
+
|
647
|
+
if ( in_parens )
|
648
|
+
|
649
|
+
if ( tok.to_s == "," )
|
650
|
+
|
651
|
+
parse_argument( proto, arg ) if ( arg.length > 0 )
|
652
|
+
arg = TokenStream.new()
|
653
|
+
|
654
|
+
else
|
655
|
+
|
656
|
+
arg.push( tok )
|
657
|
+
|
658
|
+
end
|
659
|
+
|
660
|
+
end
|
661
|
+
|
662
|
+
in_parens = true if ( tok.to_s == "(" )
|
663
|
+
|
664
|
+
}
|
665
|
+
|
666
|
+
parse_argument( proto, arg ) if ( arg.length > 0 )
|
667
|
+
|
668
|
+
# Get the return type
|
669
|
+
|
670
|
+
index = code.find( "returns" )
|
671
|
+
proto.method_type = code[ index + 1 ].to_s()
|
672
|
+
|
673
|
+
# Add the prototype
|
674
|
+
|
675
|
+
@prototypes.push( proto )
|
676
|
+
|
677
|
+
end
|
678
|
+
|
679
|
+
# build_prototype()
|
680
|
+
#
|
681
|
+
# Builds and returns a prototype object
|
682
|
+
|
683
|
+
def build_prototype()
|
684
|
+
@prototypeClass.new()
|
685
|
+
end
|
686
|
+
|
687
|
+
end
|
688
|
+
end
|