trix51db 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/lib/trix51db.rb +1865 -0
- metadata +46 -0
data/lib/trix51db.rb
ADDED
@@ -0,0 +1,1865 @@
|
|
1
|
+
# :title:Trix51 Database Engine
|
2
|
+
# Trix51 is a simple file based database based on GDBM
|
3
|
+
# and Marshal, which provides a powerful query syntax,
|
4
|
+
# helper classes, one to one and one to many relationships,
|
5
|
+
# calculated fields, indexes and constraints. It is
|
6
|
+
# designed to replace simpler file based databases and
|
7
|
+
# provide more flexibility.
|
8
|
+
#
|
9
|
+
# * +Author:+ - April Ayres-Griffiths (aag6581@gmail.com)
|
10
|
+
# * +Copyright:+ - Copyright (c) 2012 April Ayres-Griffiths
|
11
|
+
# * +License:+ - Distributes under the Apache License
|
12
|
+
|
13
|
+
require 'gdbm'
|
14
|
+
require 'pp'
|
15
|
+
require 'time'
|
16
|
+
require 'logger'
|
17
|
+
|
18
|
+
BND = binding #:nodoc:
|
19
|
+
|
20
|
+
# This class handles top level management of the database engine
|
21
|
+
# and contains various constants and information about active
|
22
|
+
# databases. It also provides a logger based interface to the
|
23
|
+
# internals of the other database objects.
|
24
|
+
class Trix51
|
25
|
+
# Record prefix
|
26
|
+
RECORD_PREFIX = '@R@:'
|
27
|
+
# Metadata Prefix - For table structure records
|
28
|
+
META_PREFIX = '@T@:'
|
29
|
+
# Sequence Prefix - For column sequences for autoincrement fields.
|
30
|
+
SEQ_PREFIX = '@S@:'
|
31
|
+
# Used to join key values in a Record Key
|
32
|
+
KEY_JOIN = ':'
|
33
|
+
|
34
|
+
@@databases = []
|
35
|
+
@@class_to_database = {}
|
36
|
+
@@class_to_tablename = {}
|
37
|
+
@@table_to_database = {}
|
38
|
+
@@defer_classref = false
|
39
|
+
@@logger = Logger.new('trix51.log')
|
40
|
+
@@logger.level = Logger::WARN
|
41
|
+
|
42
|
+
def Trix51.add_connection( database )
|
43
|
+
@@databases.push( database )
|
44
|
+
end
|
45
|
+
|
46
|
+
def Trix51.add_helper( classname, database, table )
|
47
|
+
@@class_to_database[ classname ] = database
|
48
|
+
@@class_to_tablename[ classname ] = table
|
49
|
+
@@table_to_database[ table.tablename.to_sym ] = database
|
50
|
+
self.create_helper_code( classname )
|
51
|
+
end
|
52
|
+
|
53
|
+
# Return a list of active databases managed by the engine
|
54
|
+
def Trix51.connections
|
55
|
+
return @@databases
|
56
|
+
end
|
57
|
+
|
58
|
+
# Map a class name (String) to a Trix51::Table reference.
|
59
|
+
def Trix51.class_to_table( cn )
|
60
|
+
return @@class_to_tablename[ cn ]
|
61
|
+
end
|
62
|
+
|
63
|
+
# Map a class name (String) to a Trix51::Database reference.
|
64
|
+
def Trix51.class_to_database( cn )
|
65
|
+
return @@class_to_database[ cn ]
|
66
|
+
end
|
67
|
+
|
68
|
+
# Map a tablename to its database.
|
69
|
+
def Trix51.table_to_database( tbl )
|
70
|
+
return @@table_to_database[ tbl ]
|
71
|
+
end
|
72
|
+
|
73
|
+
# Log a debug message to the log.
|
74
|
+
def Trix51.debug( msg )
|
75
|
+
@@logger.debug( msg )
|
76
|
+
end
|
77
|
+
|
78
|
+
# Log an info level message to the log.
|
79
|
+
def Trix51.info( msg )
|
80
|
+
@@logger.info( msg )
|
81
|
+
end
|
82
|
+
|
83
|
+
# Log a warning message to the log.
|
84
|
+
def Trix51.warn( msg )
|
85
|
+
@@logger.warn( msg )
|
86
|
+
end
|
87
|
+
|
88
|
+
# Log an error to the log.
|
89
|
+
def Trix51.error( msg )
|
90
|
+
@@logger.error( msg )
|
91
|
+
end
|
92
|
+
|
93
|
+
# Log a fatal message to the log and exit.
|
94
|
+
def Trix51.fatal( msg )
|
95
|
+
@@logger.debug( msg )
|
96
|
+
end
|
97
|
+
|
98
|
+
# Generate a helper class for a table.
|
99
|
+
def Trix51.create_helper_code( classname )
|
100
|
+
return if (classname == 'Trix51::Tuple') or (@@defer_classref == true)
|
101
|
+
code = <<TEMPLATE
|
102
|
+
class #{classname} < Trix51::Tuple
|
103
|
+
|
104
|
+
def #{classname}._table
|
105
|
+
return Trix51.class_to_table( '#{classname}' )
|
106
|
+
end
|
107
|
+
|
108
|
+
def #{classname}._db
|
109
|
+
return Trix51.class_to_database( '#{classname}' )
|
110
|
+
end
|
111
|
+
|
112
|
+
def #{classname}.find_or_create( *args )
|
113
|
+
|
114
|
+
return self._table.find_or_create( *args )
|
115
|
+
|
116
|
+
end
|
117
|
+
|
118
|
+
def #{classname}.select( *args )
|
119
|
+
return self._table.select( *args )
|
120
|
+
end
|
121
|
+
|
122
|
+
def #{classname}.delete( *args )
|
123
|
+
return select._table.delete( *args )
|
124
|
+
end
|
125
|
+
|
126
|
+
def #{classname}.update( *args )
|
127
|
+
return self._table.update( *args )
|
128
|
+
end
|
129
|
+
|
130
|
+
def #{classname}.first( *args )
|
131
|
+
return self._table.first( *args )
|
132
|
+
end
|
133
|
+
|
134
|
+
def #{classname}.all
|
135
|
+
return self._table.all
|
136
|
+
end
|
137
|
+
|
138
|
+
def #{classname}.dump
|
139
|
+
self._table.dump
|
140
|
+
end
|
141
|
+
|
142
|
+
end
|
143
|
+
TEMPLATE
|
144
|
+
#puts code
|
145
|
+
|
146
|
+
eval( code, BND )
|
147
|
+
end
|
148
|
+
|
149
|
+
# Sets the defer_classref flag to true or false
|
150
|
+
def Trix51.defer_classref=( toggle )
|
151
|
+
@@defer_classref = toggle
|
152
|
+
end
|
153
|
+
|
154
|
+
# Returns the value of the defer_classref flag
|
155
|
+
def Trix51.defer_classref
|
156
|
+
return @@defer_classref
|
157
|
+
end
|
158
|
+
|
159
|
+
END {
|
160
|
+
Trix51.connections.each do |db|
|
161
|
+
db.close
|
162
|
+
end
|
163
|
+
}
|
164
|
+
|
165
|
+
end
|
166
|
+
|
167
|
+
# Class for managing a single Trix51 based database, allowing access to tables.
|
168
|
+
class Trix51::Database < Trix51
|
169
|
+
|
170
|
+
# Construct a Trix51::Database object.
|
171
|
+
#
|
172
|
+
# ==== Arguments
|
173
|
+
# * +path:+ - Path to the database file.
|
174
|
+
# * +database:+ - Name of the database file.
|
175
|
+
#
|
176
|
+
# ====Example
|
177
|
+
# require 'trix51database'
|
178
|
+
#
|
179
|
+
# db = Trix51::Database.new( path: './data', database: 'animals.t51' )
|
180
|
+
#
|
181
|
+
|
182
|
+
def initialize( *args )
|
183
|
+
|
184
|
+
@meta_prefix = Trix51::META_PREFIX
|
185
|
+
@tables = {}
|
186
|
+
@args = {
|
187
|
+
:path => './trix',
|
188
|
+
:database => 'database.db'
|
189
|
+
}
|
190
|
+
|
191
|
+
args.each { |key|
|
192
|
+
key.each_pair { |k,v|
|
193
|
+
@args[k] = v
|
194
|
+
}
|
195
|
+
}
|
196
|
+
|
197
|
+
if not Dir.exist?( @args[:path] ) then
|
198
|
+
Dir.mkdir( @args[:path] )
|
199
|
+
end
|
200
|
+
|
201
|
+
# now create / open the dbm file
|
202
|
+
begin
|
203
|
+
@dbm = GDBM.new( "#{@args[:path]}/#{@args[:database]}" )
|
204
|
+
rescue => e
|
205
|
+
raise Trix51::DatabaseError, "Unable to open #{@args[:path]}/#{@args[:database]}: #{e.to_s}"
|
206
|
+
end
|
207
|
+
|
208
|
+
Trix51.add_connection( self )
|
209
|
+
|
210
|
+
list = self.tables
|
211
|
+
list.each do |tag|
|
212
|
+
tagref = self.get_table( tag )
|
213
|
+
Trix51.add_helper( tagref.classname, self, tagref )
|
214
|
+
end
|
215
|
+
|
216
|
+
end
|
217
|
+
|
218
|
+
# Close the database, cleaning up a persisting indexes to disk.
|
219
|
+
def close
|
220
|
+
return if @dbm.closed?
|
221
|
+
@tables.each_value do |tblref|
|
222
|
+
tblref.indices_save( @args[:path] )
|
223
|
+
end
|
224
|
+
@dbm.close
|
225
|
+
end
|
226
|
+
|
227
|
+
# Returns a reference object for a given table
|
228
|
+
# Raises an exception if the table does not exist.
|
229
|
+
# ==== Arguments
|
230
|
+
# * +tablename:+ - The name of the table (symbol).
|
231
|
+
# ==== Example
|
232
|
+
# animal = db.get_table( :animal )
|
233
|
+
# Note: you can also use the alias db.animal to access it.
|
234
|
+
#
|
235
|
+
def get_table( tablename )
|
236
|
+
|
237
|
+
# do we have a cached object?
|
238
|
+
mref = @dbm[ Trix51::META_PREFIX+tablename.to_s ]
|
239
|
+
|
240
|
+
if @tables[tablename].nil? then
|
241
|
+
mref = @dbm[ Trix51::META_PREFIX+tablename.to_s ]
|
242
|
+
raise Trix51::NotFoundError, "Table #{tablename.to_s} does not exist." if mref.nil?
|
243
|
+
@tables[tablename] = Trix51::Table.new( tablename.to_s, @dbm )
|
244
|
+
@tables[tablename].indices_load( @args[:path] )
|
245
|
+
end
|
246
|
+
|
247
|
+
return @tables[tablename]
|
248
|
+
|
249
|
+
end
|
250
|
+
|
251
|
+
# Create a new table.
|
252
|
+
#
|
253
|
+
# ==== Arguments
|
254
|
+
# * +tablename:+ - The name of the table (symbol).
|
255
|
+
# * +args:+ - A hash specifying the structure of the table.
|
256
|
+
# * +classname:+ - The name of a helper class.
|
257
|
+
#
|
258
|
+
# ==== Example
|
259
|
+
# require 'trix51database'
|
260
|
+
#
|
261
|
+
# animal = db.create_table(
|
262
|
+
# :animal,
|
263
|
+
# {
|
264
|
+
# id: { datatype: 'integer', key: true, autoincrement: true },
|
265
|
+
# species_id: { datatype: 'integer', required: true, indexed: true },
|
266
|
+
# species: { datatype: 'record', source: :species_id, table: :species, dest: :id },
|
267
|
+
# name: { datatype: 'string', required: true, unique: true },
|
268
|
+
# created_date: { datatype: 'datetime', default: '#{Time.new.to_s}' },
|
269
|
+
# number_of_legs: { datatype: 'integer', default: 4 },
|
270
|
+
# number_of_eyes: { datatype: 'integer', default: 2 },
|
271
|
+
# legs_and_eyes: { datatype: 'integer', calculation: 'number_of_legs + number_of_eyes' }
|
272
|
+
# },
|
273
|
+
# 'Animal'
|
274
|
+
# )
|
275
|
+
#
|
276
|
+
# ==== Field types
|
277
|
+
# Valid field types are 'string', 'integer', 'datetime', 'float', 'boolean', 'record', 'resultset'
|
278
|
+
#
|
279
|
+
# The 'calculation:' clause fields are evaluated based on other fields in the table.
|
280
|
+
def create_table( tablename, args, classname = 'Trix51::Tuple' )
|
281
|
+
|
282
|
+
mref = @dbm[ Trix51::META_PREFIX+tablename.to_s ]
|
283
|
+
|
284
|
+
if not mref.nil? then
|
285
|
+
raise Trix51::TableExistsError, "Attempt to create a table that exists..."
|
286
|
+
end
|
287
|
+
|
288
|
+
# create it
|
289
|
+
@dbm[ Trix51::META_PREFIX+tablename.to_s ] = Marshal.dump(
|
290
|
+
tablename: tablename,
|
291
|
+
structure: args,
|
292
|
+
classname: classname
|
293
|
+
)
|
294
|
+
@tables[tablename] = Trix51::Table.new( tablename, @dbm )
|
295
|
+
|
296
|
+
Trix51.add_helper( @tables[tablename].classname, self, @tables[tablename] )
|
297
|
+
|
298
|
+
return @tables[tablename]
|
299
|
+
|
300
|
+
end
|
301
|
+
|
302
|
+
# Provides a list of the tables in the database.
|
303
|
+
def tables
|
304
|
+
list = []
|
305
|
+
@dbm.each_key do |k|
|
306
|
+
if k.match( Trix51::META_PREFIX )
|
307
|
+
list.push( k.gsub( Trix51::META_PREFIX, '' ).to_sym )
|
308
|
+
end
|
309
|
+
end
|
310
|
+
return list
|
311
|
+
end
|
312
|
+
|
313
|
+
def method_missing( methodid ) #:nodoc:
|
314
|
+
if self.tables.include?( methodid ) then
|
315
|
+
return self.get_table( methodid )
|
316
|
+
end
|
317
|
+
raise Trix51::NotFoundError, "Table #{methodid.to_s } does not exist"
|
318
|
+
end
|
319
|
+
|
320
|
+
# Return true or false if a given table exists.
|
321
|
+
def has_table?( table )
|
322
|
+
return (self.tables.include?( table ))
|
323
|
+
end
|
324
|
+
|
325
|
+
# Database accessor
|
326
|
+
attr_accessor :dbm
|
327
|
+
|
328
|
+
end
|
329
|
+
|
330
|
+
# Exception class
|
331
|
+
class Trix51::DatabaseError < StandardError
|
332
|
+
end
|
333
|
+
|
334
|
+
# Exception class
|
335
|
+
class Trix51::NotFoundError < Trix51::DatabaseError
|
336
|
+
end
|
337
|
+
|
338
|
+
# Exception class
|
339
|
+
class Trix51::TableExistsError < Trix51::DatabaseError
|
340
|
+
end
|
341
|
+
|
342
|
+
# Exception class
|
343
|
+
class Trix51::ConstraintError < Trix51::DatabaseError
|
344
|
+
end
|
345
|
+
|
346
|
+
# Exception class
|
347
|
+
class Trix51::TypeError < Trix51::DatabaseError
|
348
|
+
end
|
349
|
+
|
350
|
+
# Exception class
|
351
|
+
class Trix51::ExistsError < Trix51::DatabaseError
|
352
|
+
end
|
353
|
+
|
354
|
+
# Represents a single record from the database.
|
355
|
+
class Trix51::Tuple < Trix51
|
356
|
+
|
357
|
+
# Create a new tuple object
|
358
|
+
#
|
359
|
+
# Arguments:
|
360
|
+
# table:: A Trix51::Table object
|
361
|
+
# urid:: A unique record identifier
|
362
|
+
# anonhash:: An optional anonymous hash containing the data for the tuple.
|
363
|
+
def initialize( table, urid, anonhash=nil )
|
364
|
+
@table = table
|
365
|
+
@data = Marshal.load( @table.dbref[urid] ) if anonhash.nil?
|
366
|
+
@data = anonhash if not anonhash.nil?
|
367
|
+
@newdata = {}
|
368
|
+
@urid = urid
|
369
|
+
end
|
370
|
+
|
371
|
+
# Returns true or false if the record has been updated.
|
372
|
+
def has_updates?
|
373
|
+
return (@newdata.keys.length > 0)
|
374
|
+
end
|
375
|
+
|
376
|
+
def method_missing( methodId, *args ) #:nodoc:
|
377
|
+
|
378
|
+
name = methodId.to_s
|
379
|
+
|
380
|
+
basename = name.gsub( '=', '' ).to_sym
|
381
|
+
|
382
|
+
if not @table.structure[basename].nil? then
|
383
|
+
#return @data[name]
|
384
|
+
if name.match( '=' ) then
|
385
|
+
@newdata[basename] = args.shift
|
386
|
+
#puts "setting value of #{basename} to #{@newdata[basename]}.."
|
387
|
+
else
|
388
|
+
return @newdata[basename] || @data[basename] if ['string', 'integer', 'datetime'].include?( self.table.structure[basename][:datatype])
|
389
|
+
|
390
|
+
# derived fields
|
391
|
+
dt = self.table.structure[basename][:datatype]
|
392
|
+
"------------------------------"
|
393
|
+
|
394
|
+
if dt == 'record' then
|
395
|
+
# handle one to one
|
396
|
+
#pp self.table.structure[basename][:table]
|
397
|
+
dbref = Trix51.table_to_database( self.table.structure[basename][:table] )
|
398
|
+
#pp dbref
|
399
|
+
tblref = dbref.get_table( self.table.structure[basename][:table] )
|
400
|
+
srcref = self.table.structure[basename][:source]
|
401
|
+
dstref = self.table.structure[basename][:dest]
|
402
|
+
return tblref.first( dstref => @data[srcref] )
|
403
|
+
end
|
404
|
+
|
405
|
+
if dt == 'resultset' then
|
406
|
+
dbref = Trix51.table_to_database( self.table.structure[basename][:table] )
|
407
|
+
tblref = dbref.get_table( self.table.structure[basename][:table] )
|
408
|
+
srcref = self.table.structure[basename][:source]
|
409
|
+
dstref = self.table.structure[basename][:dest]
|
410
|
+
#puts "#{tblref.tablename}.#{dstref} = #{@data[srcref]}"
|
411
|
+
h = { dstref => @data[srcref].to_s }
|
412
|
+
return tblref.select_hash( h )
|
413
|
+
end
|
414
|
+
|
415
|
+
if dt == 'calculated' then
|
416
|
+
c = self.table.structure[basename][:calculation]
|
417
|
+
return eval( c ).to_s
|
418
|
+
end
|
419
|
+
|
420
|
+
end
|
421
|
+
else
|
422
|
+
raise Trix51::NotFoundError, "No such field #{methodId}"
|
423
|
+
end
|
424
|
+
|
425
|
+
end
|
426
|
+
|
427
|
+
# post any record updates, update any indexes and keys.
|
428
|
+
def update
|
429
|
+
|
430
|
+
ref = @data.merge( @newdata ) { |key, oldval, newval|
|
431
|
+
(not newval.nil?) ? newval : oldval
|
432
|
+
}
|
433
|
+
|
434
|
+
# ref now represents the new record
|
435
|
+
newurid = self.table.get_hash_key( ref )
|
436
|
+
|
437
|
+
@newdata.each_pair do |fieldname,fieldvalue|
|
438
|
+
raise Trix51::ConstraintError, "Update would violate unique constraint on field #{fieldname}" unless self.table.index_chk_unique_update( fieldname, fieldvalue, @urid )
|
439
|
+
end
|
440
|
+
|
441
|
+
if newurid == @urid then
|
442
|
+
self.table.index_update( @urid, newurid, @newdata )
|
443
|
+
# simple update
|
444
|
+
self.table.dbref[ @urid ] = Marshal.dump( ref )
|
445
|
+
@data = ref
|
446
|
+
@newdata = {}
|
447
|
+
|
448
|
+
#puts "Key is unchanged"
|
449
|
+
|
450
|
+
return true
|
451
|
+
end
|
452
|
+
|
453
|
+
# if we are here the key has changed
|
454
|
+
# make sure the new one does not exist
|
455
|
+
if not self.table.dbref[ newurid ].nil? then
|
456
|
+
@newdata = {}
|
457
|
+
raise Trix51::ConstraintError, "Update to record would violate unique contraints"
|
458
|
+
end
|
459
|
+
|
460
|
+
# if we are here, add the new record
|
461
|
+
self.table.index_update( @urid, newurid, @newdata )
|
462
|
+
#puts "Re-keying the record due to a change in unique key"
|
463
|
+
self.table.dbref[ newurid ] = Marshal.dump( ref )
|
464
|
+
@data = @newdata
|
465
|
+
@newdata = {}
|
466
|
+
@urid = newurid
|
467
|
+
|
468
|
+
return true
|
469
|
+
|
470
|
+
end
|
471
|
+
|
472
|
+
# Accessors for the tuple class.
|
473
|
+
attr_accessor :table, :data, :urid
|
474
|
+
|
475
|
+
end
|
476
|
+
|
477
|
+
# This class represents a set of records which can either be a table, query
|
478
|
+
# result, or the result of a group expression.
|
479
|
+
class Trix51::TupleSet
|
480
|
+
|
481
|
+
# Creates a new TupleSet object, the core class for tables / query results.
|
482
|
+
def initialize( tablename, dataref, options )
|
483
|
+
@tablename = tablename
|
484
|
+
@dbref = dataref
|
485
|
+
@options = {}
|
486
|
+
@indices = {}
|
487
|
+
@sort_order = nil
|
488
|
+
@options = options
|
489
|
+
@updateable = false
|
490
|
+
@meta = nil
|
491
|
+
if self.table? then
|
492
|
+
#pp "table"
|
493
|
+
@updateable = true
|
494
|
+
@meta = self.meta
|
495
|
+
else
|
496
|
+
@meta = {}
|
497
|
+
#pp "query or view"
|
498
|
+
@meta[:tablename] = '__query'
|
499
|
+
self.clone_structure( @options[:tableref] )
|
500
|
+
@meta[:classname] = 'Trix51::Tuple'
|
501
|
+
end
|
502
|
+
|
503
|
+
#pp @meta
|
504
|
+
|
505
|
+
@classname = @meta[:classname] || 'Trix51::Tuple'
|
506
|
+
|
507
|
+
#pp @classname
|
508
|
+
|
509
|
+
if @classname != 'Trix51::Tuple' then
|
510
|
+
eval(
|
511
|
+
"
|
512
|
+
class #{@classname} < Trix51::Tuple
|
513
|
+
|
514
|
+
end
|
515
|
+
", BND
|
516
|
+
)
|
517
|
+
end
|
518
|
+
|
519
|
+
end
|
520
|
+
|
521
|
+
def build_temp_idx( field ) #:nodoc:
|
522
|
+
|
523
|
+
if (self.indexed?( field )) and (not self.indices[field].nil?) and (field != :random) then
|
524
|
+
return @indices[field].clone
|
525
|
+
end
|
526
|
+
|
527
|
+
res = {}
|
528
|
+
self.dbref.each_pair { |k, v|
|
529
|
+
next unless self.table_record?( k )
|
530
|
+
record = {}
|
531
|
+
if v.class.name == 'String' then
|
532
|
+
record = Marshal.load( v )
|
533
|
+
else
|
534
|
+
record = v.data
|
535
|
+
end
|
536
|
+
rv = record[field] if field != :random
|
537
|
+
rv = rand() if field == :random
|
538
|
+
rlist = res[rv] || []
|
539
|
+
rlist.push( k )
|
540
|
+
res[rv] = rlist
|
541
|
+
}
|
542
|
+
return res
|
543
|
+
end
|
544
|
+
|
545
|
+
def build_sorted_keys( field, dir='asc' ) #:nodoc:
|
546
|
+
# sort records by list of fields
|
547
|
+
idx = self.build_temp_idx( field )
|
548
|
+
sk = []
|
549
|
+
idx.keys.sort.each do |fieldvalue|
|
550
|
+
keyref = idx[fieldvalue]
|
551
|
+
keyref.each do |key|
|
552
|
+
sk.push( key )
|
553
|
+
end
|
554
|
+
end
|
555
|
+
return sk if dir == 'asc'
|
556
|
+
return sk.reverse if dir == 'desc'
|
557
|
+
end
|
558
|
+
|
559
|
+
def sorted_keys #:nodoc:
|
560
|
+
if (not @options[:sorted].nil?) and (@options[:sorted] == true) then
|
561
|
+
#return self.build_sorted_keys( @options['sortfield'].to_s, @options['sortorder'] )
|
562
|
+
if @sort_order.nil? then
|
563
|
+
@sort_order = self.build_sorted_keys( @options[:sortfield], @options[:sortorder] )
|
564
|
+
end
|
565
|
+
return @sort_order
|
566
|
+
end
|
567
|
+
return @dbref.keys
|
568
|
+
end
|
569
|
+
|
570
|
+
def record_keys #:nodoc:
|
571
|
+
list = []
|
572
|
+
@dbref.each_key do |k|
|
573
|
+
list.push( k ) if self.table_record?(k)
|
574
|
+
end
|
575
|
+
return list
|
576
|
+
end
|
577
|
+
|
578
|
+
# Returns the number of records in the database.
|
579
|
+
def size
|
580
|
+
c = 0
|
581
|
+
@dbref.each_key do |k|
|
582
|
+
c = c + 1 if k.match( self.record_prefix )
|
583
|
+
end
|
584
|
+
return c
|
585
|
+
end
|
586
|
+
|
587
|
+
def clone_structure( tupleset ) #:nodoc:
|
588
|
+
@meta[:structure] = tupleset.structure.clone
|
589
|
+
end
|
590
|
+
|
591
|
+
def structure #:nodoc:
|
592
|
+
return @meta[:structure]
|
593
|
+
end
|
594
|
+
|
595
|
+
# Returns the default for the specified field.
|
596
|
+
def default( fieldname )
|
597
|
+
|
598
|
+
unless self.exists?( fieldname) then
|
599
|
+
raise Trix51::NotFoundError, "field not found: #{fieldname} in table #{@meta[:tablename]}"
|
600
|
+
end
|
601
|
+
|
602
|
+
ref = self.structure[fieldname]
|
603
|
+
|
604
|
+
#pp ref
|
605
|
+
|
606
|
+
if (ref[:autoincrement] == true) and (ref[:datatype] == 'integer') then
|
607
|
+
return self.find_or_create_seq( fieldname )
|
608
|
+
end
|
609
|
+
|
610
|
+
if (not ref[:default].nil?) then
|
611
|
+
d = ref[:default]
|
612
|
+
if m = d.to_s.match( /^\#\{(.+)\}$/ ) then
|
613
|
+
return eval(m[1])
|
614
|
+
end
|
615
|
+
return d
|
616
|
+
end
|
617
|
+
|
618
|
+
return nil
|
619
|
+
|
620
|
+
end
|
621
|
+
|
622
|
+
# Returns true / false if the table contains field
|
623
|
+
def exists?( fieldname )
|
624
|
+
return (not self.structure[fieldname].nil?)
|
625
|
+
end
|
626
|
+
|
627
|
+
def find_or_create_seq( fieldname ) #:nodoc:
|
628
|
+
|
629
|
+
seq_name =self.seq_prefix( fieldname )
|
630
|
+
#puts "Sequence is called: #{seq_name}"
|
631
|
+
|
632
|
+
if @dbref[seq_name].nil? then
|
633
|
+
@dbref[seq_name] = '2'
|
634
|
+
return 1
|
635
|
+
else
|
636
|
+
v = @dbref[seq_name].to_i
|
637
|
+
@dbref[seq_name] = (v+1).to_s
|
638
|
+
return v
|
639
|
+
end
|
640
|
+
|
641
|
+
end
|
642
|
+
|
643
|
+
# insert record into the database using a hash
|
644
|
+
# ==== Example
|
645
|
+
# daisy = animal.insert_hash( {
|
646
|
+
# name: 'cow',
|
647
|
+
# species_id: 2
|
648
|
+
# } )
|
649
|
+
def insert_hash( hashdata ) #:nodoc:
|
650
|
+
|
651
|
+
ref = self.empty_rec.merge( hashdata ) { |key, oldval, newval|
|
652
|
+
(not newval.nil?) ? newval : oldval
|
653
|
+
}
|
654
|
+
|
655
|
+
#pp ref
|
656
|
+
|
657
|
+
flt = {}
|
658
|
+
|
659
|
+
ref.each_key { |key|
|
660
|
+
flt[key] = ref[key] if self.exists?( key )
|
661
|
+
}
|
662
|
+
|
663
|
+
#pp flt
|
664
|
+
|
665
|
+
self.check_required_fields( flt )
|
666
|
+
|
667
|
+
urid = self.get_hash_key( flt )
|
668
|
+
|
669
|
+
#puts "Record key: #{urid}"
|
670
|
+
|
671
|
+
raise Trix51::ConstraintError, "unique constraint has been violated (#{urid})" if @dbref[urid]
|
672
|
+
|
673
|
+
@dbref[urid] = Marshal.dump( flt )
|
674
|
+
|
675
|
+
#puts "INSERT "+@dbref[urid]
|
676
|
+
|
677
|
+
self.index_add( urid, flt )
|
678
|
+
|
679
|
+
return self.init_record( urid )
|
680
|
+
end
|
681
|
+
|
682
|
+
def comparison( value, operand, target ) #:nodoc:
|
683
|
+
|
684
|
+
#puts "[#{value}] #{operand.to_s} [#{target}]"
|
685
|
+
|
686
|
+
if operand.to_s == 'is' then
|
687
|
+
return (value.to_s == target.to_s)
|
688
|
+
elsif operand.to_s == 'not' then
|
689
|
+
return (value.to_s != target.to_s)
|
690
|
+
elsif operand.to_s == 'like' then
|
691
|
+
target = '^' + target.gsub( /\%/, '.*' ) + '$'
|
692
|
+
return (value.to_s.match( target ))
|
693
|
+
elsif operand.to_s == 'unlike' then
|
694
|
+
target = '^' + target.gsub( /\%/, '.*' ) + '$'
|
695
|
+
return (not value.to_s.match( target ))
|
696
|
+
elsif operand.to_s == 'under' then
|
697
|
+
return (value.to_f < target.to_f)
|
698
|
+
elsif operand.to_s == 'over' then
|
699
|
+
return (value.to_f > target.to_f)
|
700
|
+
elsif operand.to_s == 'before' then
|
701
|
+
return (value.to_s < target.to_s)
|
702
|
+
elsif operand.to_s == 'after' then
|
703
|
+
return (value.to_s > target.to_s)
|
704
|
+
elsif operand.to_s == 'between' then
|
705
|
+
raise Trix51::TypeError, "BETWEEN clause requires two values" unless (target.class.name == 'Array') and (target.length > 1)
|
706
|
+
s = target.sort
|
707
|
+
lower_bound = s.shift
|
708
|
+
upper_bound = s.pop
|
709
|
+
return ( ( value.to_s >= lower_bound.to_s ) and ( value.to_s <= upper_bound.to_s ) )
|
710
|
+
elsif operand.to_s == 'in' then
|
711
|
+
raise Trix51::TypeError, "IN clause requires an array" unless (target.class.name == 'Array') and (target.length >= 1)
|
712
|
+
return (target.include?( value ))
|
713
|
+
end
|
714
|
+
|
715
|
+
end
|
716
|
+
|
717
|
+
def eval_in_context( hashdata, expression ) #:nodoc:
|
718
|
+
inits = []
|
719
|
+
hashdata.each_pair do |k,v|
|
720
|
+
next if v.nil?
|
721
|
+
str = ''
|
722
|
+
str = "#{k} = #{v}" if v.class.name != 'String'
|
723
|
+
str = "#{k} = \"#{v}\"" if v.class.name == 'String'
|
724
|
+
inits.push( str )
|
725
|
+
end
|
726
|
+
inits.push( expression )
|
727
|
+
str = inits.join("\n")
|
728
|
+
return eval( str )
|
729
|
+
end
|
730
|
+
|
731
|
+
def hash_compare( h1, h2 ) #:nodoc:
|
732
|
+
|
733
|
+
# Comparitors: like, not_like, not, equals, lt, gt
|
734
|
+
#
|
735
|
+
|
736
|
+
h1.keys.each { |key|
|
737
|
+
#pp key
|
738
|
+
#pp self.structure[key]
|
739
|
+
if self.structure[key][:datatype] == 'calculated' then
|
740
|
+
h2[key] = self.eval_in_context( h2, self.structure[key][:calculation] )
|
741
|
+
end
|
742
|
+
v1 = h1[key]
|
743
|
+
if v1.class.name != 'Hash' then
|
744
|
+
return false if not comparison( h2[key], 'is', h1[key] )
|
745
|
+
else
|
746
|
+
v1.each_pair do |operand, target|
|
747
|
+
return false if not comparison( h2[key], operand, target )
|
748
|
+
end
|
749
|
+
end
|
750
|
+
}
|
751
|
+
return true
|
752
|
+
end
|
753
|
+
|
754
|
+
# delete records matching the specified hash
|
755
|
+
# ==== Example
|
756
|
+
# num_deleted = animal.delete_hash( {
|
757
|
+
# name: 'cow'
|
758
|
+
# } )
|
759
|
+
def delete_hash( hashdata ) #:nodoc:
|
760
|
+
|
761
|
+
matches = self.select_hash( hashdata )
|
762
|
+
|
763
|
+
matches.each_record do |record|
|
764
|
+
urid = record.urid
|
765
|
+
@dbref.delete( urid )
|
766
|
+
self.index_remove( urid )
|
767
|
+
end
|
768
|
+
|
769
|
+
return matches.length
|
770
|
+
|
771
|
+
end
|
772
|
+
|
773
|
+
# Updates any records matching the values in hash
|
774
|
+
# ==== Example
|
775
|
+
# animal.update_hash ( { name: 'cow' } ) do |record|
|
776
|
+
# record.name = 'sheep' # baaa?
|
777
|
+
# end
|
778
|
+
def update_hash( hashdata ) #:nodoc:
|
779
|
+
|
780
|
+
matches = self.select_hash( hashdata )
|
781
|
+
|
782
|
+
matches.each { |record|
|
783
|
+
yield record
|
784
|
+
record.update if record.has_updates?
|
785
|
+
}
|
786
|
+
|
787
|
+
return matches.length
|
788
|
+
end
|
789
|
+
|
790
|
+
# Updates records matching the query.
|
791
|
+
# ==== Example
|
792
|
+
# animal.update( number_of_eyes: 2, number_of_legs: 4 ) do |record|
|
793
|
+
# record.number_of_eyes = 8
|
794
|
+
# end
|
795
|
+
def update( *args )
|
796
|
+
|
797
|
+
hashdata = args_to_hash( *args )
|
798
|
+
|
799
|
+
matches = self.select_hash( hashdata )
|
800
|
+
|
801
|
+
matches.each { |record|
|
802
|
+
yield record
|
803
|
+
record.update if record.has_updates?
|
804
|
+
}
|
805
|
+
|
806
|
+
return matches.length
|
807
|
+
end
|
808
|
+
|
809
|
+
# Deletes records matching the query.
|
810
|
+
# ==== Example
|
811
|
+
# count = animal.delete( species_id: 2 )
|
812
|
+
def delete( *args )
|
813
|
+
return self.delete_hash( args_to_hash(*args) )
|
814
|
+
end
|
815
|
+
|
816
|
+
# Returns a Trix51::ResultSet containing records matching the query.
|
817
|
+
# ==== Example
|
818
|
+
# records = animal.select(
|
819
|
+
# name: { like: 'mon%' },
|
820
|
+
# created_date: { between: [ '2012-04-11', 2012-12-31' ],
|
821
|
+
# number_of_eyes: 2,
|
822
|
+
# number_of_legs: { in: [2,4] }
|
823
|
+
# )
|
824
|
+
#
|
825
|
+
# ==== Query operands
|
826
|
+
# * +is:+ - true if value matches
|
827
|
+
# * +in:+ - true if value is in specified list
|
828
|
+
# * +not:+ - true if value does not match
|
829
|
+
# * +under:+ - true if value is less than
|
830
|
+
# * +over:+ - true if value is greater than
|
831
|
+
# * +like:+ - true if value matches pattern ( '%' acts as wildcard )
|
832
|
+
# * +unlike:+ - true if value does not match pattern ( '%' acts as wildcard )
|
833
|
+
# * +between:+ - true if value is between specified values
|
834
|
+
def select( *args )
|
835
|
+
return self.select_hash( args_to_hash(*args) )
|
836
|
+
end
|
837
|
+
|
838
|
+
# Alias for select()
|
839
|
+
def having( *args )
|
840
|
+
return self.select_hash( args_to_hash(*args) )
|
841
|
+
end
|
842
|
+
|
843
|
+
# Returns the first record matching the query (Trix51::Tuple)
|
844
|
+
def first( *args )
|
845
|
+
res = self.select_hash( args_to_hash(*args) )
|
846
|
+
if res.count > 0 then
|
847
|
+
res.each_record do |x|
|
848
|
+
return x
|
849
|
+
end
|
850
|
+
return nil
|
851
|
+
end
|
852
|
+
end
|
853
|
+
|
854
|
+
# Returns all the records.
|
855
|
+
def all
|
856
|
+
return self.select_hash( {} )
|
857
|
+
end
|
858
|
+
|
859
|
+
# Deletes all records in the table.
|
860
|
+
def empty
|
861
|
+
return self.delete_hash( {} )
|
862
|
+
end
|
863
|
+
|
864
|
+
# Inserts a record into the database using named arguments.
|
865
|
+
def insert( *args )
|
866
|
+
return self.insert_hash( args_to_hash(*args) )
|
867
|
+
end
|
868
|
+
|
869
|
+
def args_to_hash( *args ) #:nodoc:
|
870
|
+
h = {}
|
871
|
+
args.each { |rec|
|
872
|
+
rec.each_pair { |k,v|
|
873
|
+
h[k] = v
|
874
|
+
}
|
875
|
+
}
|
876
|
+
#pp h
|
877
|
+
return h
|
878
|
+
end
|
879
|
+
|
880
|
+
def tuple_by_key( *args ) #:nodoc:
|
881
|
+
keys = self.key_fields
|
882
|
+
h = {}
|
883
|
+
keys.each { |key|
|
884
|
+
h[key] = args.shift
|
885
|
+
}
|
886
|
+
#pp h
|
887
|
+
urid = self.get_hash_key( h )
|
888
|
+
|
889
|
+
s = dbref[urid]
|
890
|
+
|
891
|
+
return Marshal.load( s ) if self.table? and not s.nil?
|
892
|
+
return s if not s.nil?
|
893
|
+
|
894
|
+
return nil
|
895
|
+
end
|
896
|
+
|
897
|
+
def init_record( urid ) #:nodoc:
|
898
|
+
|
899
|
+
#pp self.table?
|
900
|
+
#pp @classname
|
901
|
+
|
902
|
+
unless self.table? then
|
903
|
+
return @dbref[ urid ]
|
904
|
+
end
|
905
|
+
|
906
|
+
r = nil
|
907
|
+
if (@classname == 'Trix51::Tuple') or (Trix51.defer_classref == true) then
|
908
|
+
r = Trix51::Tuple.new( self, urid )
|
909
|
+
else
|
910
|
+
r = eval( "#{@classname}.new( self, urid )" )
|
911
|
+
end
|
912
|
+
return r
|
913
|
+
end
|
914
|
+
|
915
|
+
def check_required_fields( hashdata ) #:nodoc:
|
916
|
+
|
917
|
+
# make sure all required fields are populated
|
918
|
+
self.structure.each_key { |key|
|
919
|
+
next if (not self.structure[key][:key] == true) and (not self.structure[key][:required] == true)
|
920
|
+
raise Trix51::ConstraintError, "required or key field #{self.tablename}.#{key} is nil" if hashdata[key].nil?
|
921
|
+
raise Trix51::ConstraintError, "unique constraint violated for field #{self.tablename}.#{key} [#{hashdata[key]}]" if not self.index_chk_unique( key, hashdata[key] )
|
922
|
+
}
|
923
|
+
|
924
|
+
end
|
925
|
+
|
926
|
+
# Returns the key fields for the table
|
927
|
+
def key_fields
|
928
|
+
res = []
|
929
|
+
self.structure.keys.sort.each { |key|
|
930
|
+
#puts key
|
931
|
+
res.push(key) if self.structure[key][:key] == true
|
932
|
+
}
|
933
|
+
return res
|
934
|
+
end
|
935
|
+
|
936
|
+
def get_hash_key( hashdata ) #:nodoc:
|
937
|
+
keys = self.key_fields
|
938
|
+
|
939
|
+
v = []
|
940
|
+
keys.each { |k|
|
941
|
+
v.push( hashdata[k] )
|
942
|
+
}
|
943
|
+
|
944
|
+
return self.record_prefix + ':' + v.join( Trix51::KEY_JOIN )
|
945
|
+
end
|
946
|
+
|
947
|
+
def empty_rec #:nodoc:
|
948
|
+
ref = {}
|
949
|
+
self.structure.each_key { |key|
|
950
|
+
ref[key] = self.default( key )
|
951
|
+
}
|
952
|
+
return ref
|
953
|
+
end
|
954
|
+
|
955
|
+
def method_missing( meth, *args ) #:nodoc:
|
956
|
+
name = meth.to_s
|
957
|
+
|
958
|
+
if m = name.match( /^select_by_(.+)/ ) then
|
959
|
+
|
960
|
+
fields = m[1].split('_and_')
|
961
|
+
h = {}
|
962
|
+
while fields.length > 0 do
|
963
|
+
n = fields.shift
|
964
|
+
v = args.shift
|
965
|
+
h[n] = v
|
966
|
+
end
|
967
|
+
|
968
|
+
return self.select_hash( h )
|
969
|
+
|
970
|
+
end
|
971
|
+
|
972
|
+
if m = name.match( /^delete_by_(.+)/ ) then
|
973
|
+
|
974
|
+
fields = m[1].split('_and_')
|
975
|
+
h = {}
|
976
|
+
while fields.length > 0 do
|
977
|
+
n = fields.shift
|
978
|
+
v = args.shift
|
979
|
+
h[n] = v
|
980
|
+
end
|
981
|
+
|
982
|
+
return self.delete_hash( h )
|
983
|
+
|
984
|
+
end
|
985
|
+
|
986
|
+
if m = name.match( /^update_by_(.+)/ ) then
|
987
|
+
|
988
|
+
fields = m[1].split('_and_')
|
989
|
+
h = {}
|
990
|
+
while fields.length > 0 do
|
991
|
+
n = fields.shift
|
992
|
+
v = args.shift
|
993
|
+
h[n] = v
|
994
|
+
end
|
995
|
+
|
996
|
+
matches = self.select_hash( h )
|
997
|
+
|
998
|
+
matches.each { |record|
|
999
|
+
yield record
|
1000
|
+
record.update if record.has_updates?
|
1001
|
+
}
|
1002
|
+
|
1003
|
+
return matches.length
|
1004
|
+
|
1005
|
+
end
|
1006
|
+
|
1007
|
+
end
|
1008
|
+
|
1009
|
+
# Find (select) or create a record with the named arguments.
|
1010
|
+
def find_or_create( *args )
|
1011
|
+
|
1012
|
+
res = self.first( *args )
|
1013
|
+
|
1014
|
+
if res.nil? then
|
1015
|
+
return self.insert_hash( args_to_hash(*args) )
|
1016
|
+
end
|
1017
|
+
|
1018
|
+
return res
|
1019
|
+
|
1020
|
+
end
|
1021
|
+
|
1022
|
+
# returns true or false if a table field is indexed.
|
1023
|
+
def indexed?( field )
|
1024
|
+
return false unless self.exists?( field )
|
1025
|
+
return ( (self.key?(field)) or (self.unique?(field)) or ( (not self.structure[field][:indexed].nil?) and (self.structure[field][:indexed] == true) ) )
|
1026
|
+
end
|
1027
|
+
|
1028
|
+
# returns true or false if a table field must be unique
|
1029
|
+
def unique?( field )
|
1030
|
+
return false unless self.exists?( field )
|
1031
|
+
return ( (not self.structure[field][:unique].nil?) and (self.structure[field][:unique] == true) )
|
1032
|
+
end
|
1033
|
+
|
1034
|
+
# returns true or false if a table field forms part of the record key
|
1035
|
+
def key?( field )
|
1036
|
+
return false unless self.exists?( field )
|
1037
|
+
return ( (not self.structure[field][:key].nil?) and (self.structure[field][:key] == true) )
|
1038
|
+
end
|
1039
|
+
|
1040
|
+
# Rebuilds the index for the given field.
|
1041
|
+
def index_build( field )
|
1042
|
+
if not self.indexed?(field) then
|
1043
|
+
raise Trix51::NotFoundError, "field #{field} is not an indexed field"
|
1044
|
+
end
|
1045
|
+
#puts "Building index for field #{field}"
|
1046
|
+
res = {}
|
1047
|
+
self.dbref.each_pair { |k, v|
|
1048
|
+
next unless self.table_record?( k )
|
1049
|
+
record = Marshal.load( v )
|
1050
|
+
rv = record[field]
|
1051
|
+
rlist = res[rv] || []
|
1052
|
+
rlist.push( k )
|
1053
|
+
res[rv] = rlist
|
1054
|
+
}
|
1055
|
+
@indices[field] = res
|
1056
|
+
end
|
1057
|
+
|
1058
|
+
def index_add( urid, record ) #:nodoc:
|
1059
|
+
record.keys.each { |fieldname|
|
1060
|
+
if self.indexed?(fieldname) then
|
1061
|
+
# check index exists
|
1062
|
+
if self.indices[fieldname].nil? then
|
1063
|
+
self.index_build( fieldname )
|
1064
|
+
else
|
1065
|
+
# partial add
|
1066
|
+
res = self.indices[fieldname]
|
1067
|
+
rv = record[fieldname]
|
1068
|
+
rlist = res[rv] || []
|
1069
|
+
rlist.push( urid )
|
1070
|
+
res[rv] = rlist
|
1071
|
+
end
|
1072
|
+
end
|
1073
|
+
}
|
1074
|
+
end
|
1075
|
+
|
1076
|
+
def index_update( oldurid, newurid, record ) #:nodoc:
|
1077
|
+
record.keys.each { |fieldname|
|
1078
|
+
if self.indexed?(fieldname) then
|
1079
|
+
# check index exists
|
1080
|
+
if self.indices[fieldname].nil? then
|
1081
|
+
self.index_build( fieldname )
|
1082
|
+
else
|
1083
|
+
# partial update
|
1084
|
+
self.index_remove( oldurid )
|
1085
|
+
self.index_add( newurid, record )
|
1086
|
+
end
|
1087
|
+
end
|
1088
|
+
}
|
1089
|
+
end
|
1090
|
+
|
1091
|
+
def index_remove( urid ) #:nodoc:
|
1092
|
+
self.indices.each_pair { |fieldname, index|
|
1093
|
+
index.each_value { |rlist|
|
1094
|
+
rlist.delete( urid )
|
1095
|
+
}
|
1096
|
+
}
|
1097
|
+
end
|
1098
|
+
|
1099
|
+
def index_chk_unique( fieldname, fieldvalue )#:nodoc:
|
1100
|
+
|
1101
|
+
#puts fieldname + '=' + fieldvalue.to_s
|
1102
|
+
|
1103
|
+
if self.indexed?( fieldname ) == false then
|
1104
|
+
return true
|
1105
|
+
end
|
1106
|
+
|
1107
|
+
if self.unique?( fieldname) == false then
|
1108
|
+
return true
|
1109
|
+
end
|
1110
|
+
|
1111
|
+
if fieldvalue.nil? then
|
1112
|
+
return false
|
1113
|
+
end
|
1114
|
+
|
1115
|
+
if self.indices[fieldname].nil? then
|
1116
|
+
self.index_build( fieldname )
|
1117
|
+
end
|
1118
|
+
|
1119
|
+
if self.indices[fieldname][fieldvalue].nil? then
|
1120
|
+
return true
|
1121
|
+
end
|
1122
|
+
|
1123
|
+
if self.indices[fieldname][fieldvalue].length == 0 then
|
1124
|
+
return true
|
1125
|
+
end
|
1126
|
+
|
1127
|
+
return false
|
1128
|
+
end
|
1129
|
+
|
1130
|
+
def index_chk_unique_update( fieldname, fieldvalue, urid ) #:nodoc:
|
1131
|
+
|
1132
|
+
#puts fieldname + '=' + fieldvalue.to_s
|
1133
|
+
|
1134
|
+
if self.indexed?( fieldname ) == false then
|
1135
|
+
return true
|
1136
|
+
end
|
1137
|
+
|
1138
|
+
if self.unique?( fieldname) == false then
|
1139
|
+
return true
|
1140
|
+
end
|
1141
|
+
|
1142
|
+
if fieldvalue.nil? then
|
1143
|
+
return false
|
1144
|
+
end
|
1145
|
+
|
1146
|
+
if self.indices[fieldname].nil? then
|
1147
|
+
self.index_build( fieldname )
|
1148
|
+
end
|
1149
|
+
|
1150
|
+
if self.indices[fieldname][fieldvalue].nil? then
|
1151
|
+
return true
|
1152
|
+
end
|
1153
|
+
|
1154
|
+
if self.indices[fieldname][fieldvalue].length == 0 then
|
1155
|
+
return true
|
1156
|
+
end
|
1157
|
+
|
1158
|
+
if (self.indices[fieldname][fieldvalue].length == 1) and (self.indices[fieldname][fieldvalue][0] == urid) then
|
1159
|
+
return true
|
1160
|
+
end
|
1161
|
+
|
1162
|
+
return false
|
1163
|
+
end
|
1164
|
+
|
1165
|
+
# Add a column to the table.
|
1166
|
+
def column_add( name, spec )
|
1167
|
+
|
1168
|
+
if not @meta[:structure][name].nil? then
|
1169
|
+
raise Trix51::ExistsError, "Column #{name} already exists"
|
1170
|
+
end
|
1171
|
+
|
1172
|
+
# add in and reparse
|
1173
|
+
@meta[:structure][name] = spec
|
1174
|
+
self.meta = @meta
|
1175
|
+
@meta = self.meta
|
1176
|
+
|
1177
|
+
#pp @meta
|
1178
|
+
|
1179
|
+
need_rebuild = false
|
1180
|
+
|
1181
|
+
# now assign default
|
1182
|
+
@dbref.each_pair do |k,v|
|
1183
|
+
next unless self.table_record?( k )
|
1184
|
+
|
1185
|
+
record = Marshal.load( v )
|
1186
|
+
record[name] = self.default( name )
|
1187
|
+
v = Marshal.dump( record )
|
1188
|
+
newk = self.get_hash_key( record )
|
1189
|
+
if newk != k then
|
1190
|
+
@dbref.delete(k)
|
1191
|
+
need_rebuild = true
|
1192
|
+
end
|
1193
|
+
|
1194
|
+
@dbref[ newk ] = v
|
1195
|
+
end
|
1196
|
+
|
1197
|
+
@indices.clear if need_rebuild
|
1198
|
+
|
1199
|
+
end
|
1200
|
+
|
1201
|
+
# Remove a column from the table.
|
1202
|
+
def column_remove( name )
|
1203
|
+
|
1204
|
+
if @meta[:structure][name].nil? then
|
1205
|
+
raise Trix51::NotFoundError, "Column #{name} does not exist"
|
1206
|
+
end
|
1207
|
+
|
1208
|
+
# add in and reparse
|
1209
|
+
@meta[:structure].delete(name)
|
1210
|
+
self.meta = @meta
|
1211
|
+
@meta = self.meta
|
1212
|
+
|
1213
|
+
need_rebuild = false
|
1214
|
+
|
1215
|
+
# now assign default
|
1216
|
+
@dbref.each_pair do |k,v|
|
1217
|
+
next unless self.table_record?( k )
|
1218
|
+
|
1219
|
+
record = Marshal.load( v )
|
1220
|
+
record.delete( name ) if record.has_key?( name )
|
1221
|
+
v = Marshal.dump( record )
|
1222
|
+
newk = self.get_hash_key( record )
|
1223
|
+
if newk != k then
|
1224
|
+
@dbref.delete(k)
|
1225
|
+
need_rebuild = true
|
1226
|
+
end
|
1227
|
+
|
1228
|
+
@dbref[ newk ] = v
|
1229
|
+
end
|
1230
|
+
|
1231
|
+
@indices.clear if need_rebuild
|
1232
|
+
end
|
1233
|
+
|
1234
|
+
# Update the specification for a column.
|
1235
|
+
def column_update( name, spec )
|
1236
|
+
|
1237
|
+
if @meta[:structure][name].nil? then
|
1238
|
+
raise Trix51::NotFoundError, "Column #{name} does not exist"
|
1239
|
+
end
|
1240
|
+
|
1241
|
+
oldspec = @meta[:structure][name]
|
1242
|
+
newspec = Marshal.load( Marshal.dump( spec ) )
|
1243
|
+
|
1244
|
+
uval = {}
|
1245
|
+
ukey = {}
|
1246
|
+
|
1247
|
+
@dbref.each_pair do |k,v|
|
1248
|
+
|
1249
|
+
next unless self.table_record?( k )
|
1250
|
+
|
1251
|
+
record = Marshal.load( v )
|
1252
|
+
value = record[name]
|
1253
|
+
|
1254
|
+
# test field uniqueness
|
1255
|
+
if newspec[:unique] then
|
1256
|
+
raise Trix51::ConstraintError, "Field definition change would create a unique key violation condition" if not uval[value].nil?
|
1257
|
+
uval[value] = 1
|
1258
|
+
end
|
1259
|
+
|
1260
|
+
# type check
|
1261
|
+
nrecord = record.clone
|
1262
|
+
if newspec[:datatype] != oldspec[:datatype] then
|
1263
|
+
begin
|
1264
|
+
nrecord[name] = self.convert_type( value, newspec[:datatype] ).to_s
|
1265
|
+
#pp nrecord
|
1266
|
+
newurid = self.get_hash_key( nrecord )
|
1267
|
+
if not ukey[newurid].nil? then
|
1268
|
+
raise Trix51::ConstraintError, "Field definition change would violate uniqueness of keys"
|
1269
|
+
end
|
1270
|
+
ukey[newurid] = 1
|
1271
|
+
rescue
|
1272
|
+
raise Trix51::TypeError, "Type conversion from #{oldspec[:datatype]} to #{newspec[:datatype]} would barf..."
|
1273
|
+
end
|
1274
|
+
end
|
1275
|
+
|
1276
|
+
end
|
1277
|
+
|
1278
|
+
# update will be okay
|
1279
|
+
@meta[:structure][name] = newspec
|
1280
|
+
self.meta = @meta
|
1281
|
+
@meta = self.meta
|
1282
|
+
|
1283
|
+
@indices.delete(name) if not @indices[name].nil?
|
1284
|
+
|
1285
|
+
@dbref.each_pair do |oldurid,v|
|
1286
|
+
next unless oldurid.match( self.record_prefix )
|
1287
|
+
old_record = Marshal.load( v )
|
1288
|
+
new_record = old_record.clone
|
1289
|
+
if oldspec[name] != newspec[name] then
|
1290
|
+
new_record[name] = self.convert_type( old_record[name], newspec[:datatype] )
|
1291
|
+
end
|
1292
|
+
newurid = self.get_hash_key( new_record )
|
1293
|
+
if newurid != oldurid then
|
1294
|
+
@dbref.delete(oldurid)
|
1295
|
+
@dbref[newurid] = Marshal.dump( new_record )
|
1296
|
+
else
|
1297
|
+
@dbref[oldurid] = Marshal.dump( new_record )
|
1298
|
+
#self.index_update( oldurid, newurid, new_record[name] )
|
1299
|
+
end
|
1300
|
+
end
|
1301
|
+
|
1302
|
+
self.index_build( name ) if self.indexed?(name)
|
1303
|
+
|
1304
|
+
end
|
1305
|
+
|
1306
|
+
# Converts a value to the specified type.
|
1307
|
+
def convert_type( value, typedef )
|
1308
|
+
newvalue = nil
|
1309
|
+
if typedef == 'string' then
|
1310
|
+
return value.to_s
|
1311
|
+
end
|
1312
|
+
if typedef == 'integer' then
|
1313
|
+
return value.to_i
|
1314
|
+
end
|
1315
|
+
if typedef == 'float' then
|
1316
|
+
return value.to_f
|
1317
|
+
end
|
1318
|
+
if typedef == 'datetime' then
|
1319
|
+
return Time.parse(value.to_s).to_s
|
1320
|
+
end
|
1321
|
+
end
|
1322
|
+
|
1323
|
+
# Returns true if the given object is a table, false otherwise
|
1324
|
+
def table?
|
1325
|
+
return (@dbref.class.name == 'GDBM')
|
1326
|
+
end
|
1327
|
+
|
1328
|
+
# Interate over records in the table or resultset. Any
|
1329
|
+
# updates are automatically updated in the database.
|
1330
|
+
def each
|
1331
|
+
|
1332
|
+
self.sorted_keys.each do |k|
|
1333
|
+
next unless k.match( Trix51::RECORD_PREFIX )
|
1334
|
+
r = init_record(k)
|
1335
|
+
yield record
|
1336
|
+
r.update if r.has_updates?
|
1337
|
+
end
|
1338
|
+
|
1339
|
+
end
|
1340
|
+
|
1341
|
+
# Iterates through each key, value for the table.
|
1342
|
+
def each_pair
|
1343
|
+
|
1344
|
+
self.sorted_keys.each do |k|
|
1345
|
+
next unless self.table_record?(k)
|
1346
|
+
yield k, init_record(k)
|
1347
|
+
end
|
1348
|
+
|
1349
|
+
end
|
1350
|
+
|
1351
|
+
# Interates through each key in the dbm store.
|
1352
|
+
def each_key
|
1353
|
+
self.sorted_keys.each do |k|
|
1354
|
+
yield k if self.table_record?(k)
|
1355
|
+
end
|
1356
|
+
end
|
1357
|
+
|
1358
|
+
# Interates through each Trix51::Tuple in the table or result set.
|
1359
|
+
def each_value
|
1360
|
+
self.sorted_keys.each do |k|
|
1361
|
+
next unless k.match( self.record_prefix )
|
1362
|
+
r = init_record(k)
|
1363
|
+
yield r
|
1364
|
+
r.update if r.has_updates?
|
1365
|
+
end
|
1366
|
+
end
|
1367
|
+
|
1368
|
+
# Iterates through each Trix51::Tuple in the table or result set.
|
1369
|
+
def each_record
|
1370
|
+
self.sorted_keys.each do |k|
|
1371
|
+
next unless k.match( self.record_prefix )
|
1372
|
+
r = init_record(k)
|
1373
|
+
yield r
|
1374
|
+
r.update if r.has_updates?
|
1375
|
+
end
|
1376
|
+
end
|
1377
|
+
|
1378
|
+
def push( record ) #:nodoc:
|
1379
|
+
#puts "pushing record with urid = #{record.urid}"
|
1380
|
+
@dbref[ record.urid ] = record
|
1381
|
+
end
|
1382
|
+
|
1383
|
+
# Returns the number of records in the table or result set.
|
1384
|
+
def count
|
1385
|
+
return self.size
|
1386
|
+
end
|
1387
|
+
|
1388
|
+
# Returns the number of records in the table or result set.
|
1389
|
+
def length
|
1390
|
+
return self.size
|
1391
|
+
end
|
1392
|
+
|
1393
|
+
def record_prefix #:nodoc:
|
1394
|
+
return Trix51::RECORD_PREFIX+self.tablename.to_s
|
1395
|
+
end
|
1396
|
+
|
1397
|
+
def table_record?( key ) #:nodoc:
|
1398
|
+
return (key.match(self.record_prefix))
|
1399
|
+
end
|
1400
|
+
|
1401
|
+
def meta_record?( key ) #:nodoc:
|
1402
|
+
return (key.match(Trix51::META_PREFIX))
|
1403
|
+
end
|
1404
|
+
|
1405
|
+
def seq_record?( key ) #:nodoc:
|
1406
|
+
return (key.match(Trix51::SEQ_PREFIX))
|
1407
|
+
end
|
1408
|
+
|
1409
|
+
def meta_prefix #:nodoc:
|
1410
|
+
return Trix51::META_PREFIX+self.tablename.to_s
|
1411
|
+
end
|
1412
|
+
|
1413
|
+
def seq_prefix( fieldname ) #:nodoc:
|
1414
|
+
return Trix51::SEQ_PREFIX+self.tablename.to_s+'_'+fieldname.to_s+'_seq'
|
1415
|
+
end
|
1416
|
+
|
1417
|
+
def meta #:nodoc:
|
1418
|
+
if @meta.nil? then
|
1419
|
+
@meta = Marshal.load( @dbref[self.meta_prefix] )
|
1420
|
+
end
|
1421
|
+
return @meta
|
1422
|
+
end
|
1423
|
+
|
1424
|
+
def meta=(meta) #:nodoc:
|
1425
|
+
@dbref[self.meta_prefix ] = Marshal.dump( meta )
|
1426
|
+
end
|
1427
|
+
|
1428
|
+
# Produces a neatly formatted dump of the records (all fields)
|
1429
|
+
def dump
|
1430
|
+
fields = self.structure.keys
|
1431
|
+
self.dump_filtered( *fields )
|
1432
|
+
end
|
1433
|
+
|
1434
|
+
# Produces a neatly formatted dump of the records (selected fields)
|
1435
|
+
def dump_filtered( *fieldnames )
|
1436
|
+
|
1437
|
+
widths = {}
|
1438
|
+
#fieldnames = self.structure.keys.sort if fieldnames.length == 0
|
1439
|
+
lines = []
|
1440
|
+
|
1441
|
+
tmp = []
|
1442
|
+
fieldnames.each do |fn|
|
1443
|
+
tmp.push( fn ) if self.exists?( fn )
|
1444
|
+
end
|
1445
|
+
fieldnames = tmp
|
1446
|
+
|
1447
|
+
fieldnames.each do |field|
|
1448
|
+
widths[field] = field.length+2
|
1449
|
+
|
1450
|
+
str = ''
|
1451
|
+
(field.length+1).times do |n|
|
1452
|
+
str = str + '-'
|
1453
|
+
end
|
1454
|
+
|
1455
|
+
lines.push( str )
|
1456
|
+
end
|
1457
|
+
|
1458
|
+
@dbref.each_pair do |k, v|
|
1459
|
+
#pp v
|
1460
|
+
next unless self.table_record?(k)
|
1461
|
+
record = nil
|
1462
|
+
if self.table? then
|
1463
|
+
record = Marshal.load( v )
|
1464
|
+
else
|
1465
|
+
record = v.data
|
1466
|
+
end
|
1467
|
+
fieldnames.each do |field|
|
1468
|
+
fv = record[field] || 'nil'
|
1469
|
+
widths[field] = fv.to_s.length+2 if fv.to_s.length+2 > widths[field]
|
1470
|
+
end
|
1471
|
+
end
|
1472
|
+
|
1473
|
+
# create formatstr
|
1474
|
+
formatstr = ''
|
1475
|
+
fieldnames.each do |field|
|
1476
|
+
formatstr = formatstr + " %-#{widths[field]}s|"
|
1477
|
+
end
|
1478
|
+
|
1479
|
+
puts sprintf "#{formatstr}", *fieldnames
|
1480
|
+
puts sprintf "#{formatstr}", *lines
|
1481
|
+
|
1482
|
+
self.each_record do |v|
|
1483
|
+
record = v.data
|
1484
|
+
values = []
|
1485
|
+
fieldnames.each do |field|
|
1486
|
+
values.push( record[field] || 'nil' )
|
1487
|
+
end
|
1488
|
+
puts sprintf "#{formatstr}", *values
|
1489
|
+
end
|
1490
|
+
|
1491
|
+
#pp @dbref
|
1492
|
+
|
1493
|
+
end
|
1494
|
+
|
1495
|
+
# Returns a random record from the table or result set.
|
1496
|
+
def random
|
1497
|
+
|
1498
|
+
keys = []
|
1499
|
+
@dbref.each_key do |k|
|
1500
|
+
keys.push( k ) if self.table_record?(k)
|
1501
|
+
end
|
1502
|
+
|
1503
|
+
return nil if keys.count == 0
|
1504
|
+
|
1505
|
+
urid = keys[ rand( keys.count ) ]
|
1506
|
+
|
1507
|
+
return init_record( urid )
|
1508
|
+
|
1509
|
+
end
|
1510
|
+
|
1511
|
+
# Sorts records by specified field and ordering, returning a new result set. Special symbol :random returns
|
1512
|
+
# records in a randomized ordering.
|
1513
|
+
# ==== Example
|
1514
|
+
# animal.select( number_of_legs: 4 ).sort( :name ).dump_filtered( :name )
|
1515
|
+
#
|
1516
|
+
# name |
|
1517
|
+
# ----- |
|
1518
|
+
# aardvark |
|
1519
|
+
# bullock |
|
1520
|
+
# cow |
|
1521
|
+
def sort( field, dir='asc' )
|
1522
|
+
|
1523
|
+
res = Trix51::ResultSet.new( self.tablename, {}, { tableref: self, sorted: true, sortfield: field, sortorder: dir } )
|
1524
|
+
|
1525
|
+
self.each_key do |k|
|
1526
|
+
res.push( init_record( k ) )
|
1527
|
+
end
|
1528
|
+
|
1529
|
+
return res
|
1530
|
+
|
1531
|
+
end
|
1532
|
+
|
1533
|
+
def group_key( hashdata, keylist ) #:nodoc:
|
1534
|
+
keycode = []
|
1535
|
+
keylist.each do |key|
|
1536
|
+
if not hashdata[key].nil? then
|
1537
|
+
keycode.push( hashdata[key].to_s )
|
1538
|
+
else
|
1539
|
+
keycode.push( 'nil' )
|
1540
|
+
end
|
1541
|
+
end
|
1542
|
+
return Trix51::RECORD_PREFIX+'aggregate_'+self.tablename+':'+keycode.join(':')
|
1543
|
+
end
|
1544
|
+
|
1545
|
+
# Groups records by the array of fields specified in the *by:* clause, and returns
|
1546
|
+
# the aggregated results for the fields specified in the *calculate:* clause.
|
1547
|
+
#
|
1548
|
+
# ==== Group calculations
|
1549
|
+
# * +sum:+ - Sum of the field values
|
1550
|
+
# * +max:+ - Max of the field values
|
1551
|
+
# * +min:+ - Min of the field values
|
1552
|
+
# * +avg:+ - Average of the field values
|
1553
|
+
# * +count:+ - Count of the field values
|
1554
|
+
#
|
1555
|
+
# ==== Example
|
1556
|
+
# animal.group( by: [ :species_id ], calculate: { number_of_eyes: 'sum' } ).dump
|
1557
|
+
#
|
1558
|
+
# species_id |sum_number_of_eyes |
|
1559
|
+
# ----------- |------------------- |
|
1560
|
+
# 1 | 28 |
|
1561
|
+
# 2 | 40 |
|
1562
|
+
#
|
1563
|
+
def group( *args )
|
1564
|
+
params = args.shift
|
1565
|
+
#pp params
|
1566
|
+
keylist = params[:by]
|
1567
|
+
fields = params[:calculate]
|
1568
|
+
|
1569
|
+
collected_data = {}
|
1570
|
+
structure = {}
|
1571
|
+
agg_records = {}
|
1572
|
+
|
1573
|
+
keylist.each do |key|
|
1574
|
+
structure[key] = self.structure[key]
|
1575
|
+
structure[key][:key] = true
|
1576
|
+
end
|
1577
|
+
|
1578
|
+
fields.each_pair do |field,function|
|
1579
|
+
new_field = function.to_s + '_' + field.to_s
|
1580
|
+
structure[new_field.to_sym] = { :datatype => 'string' }
|
1581
|
+
end
|
1582
|
+
|
1583
|
+
self.each_record do |record|
|
1584
|
+
|
1585
|
+
nurid = self.group_key( record.data, keylist )
|
1586
|
+
|
1587
|
+
agg_records[nurid] = {}
|
1588
|
+
keylist.each do |key|
|
1589
|
+
key = key
|
1590
|
+
agg_records[nurid][key] = record.data[key]
|
1591
|
+
end
|
1592
|
+
|
1593
|
+
|
1594
|
+
# for this record, aggregate
|
1595
|
+
fields.each_key do |field|
|
1596
|
+
collected_data[nurid] = {} if collected_data[nurid].nil?
|
1597
|
+
collected_data[nurid][field] = [] if collected_data[nurid][field].nil?
|
1598
|
+
values = collected_data[nurid][field]
|
1599
|
+
if self.structure[field][:datatype] == 'calculated' then
|
1600
|
+
values.push( self.eval_in_context( record.data, self.structure[field][:calculation] ) )
|
1601
|
+
else
|
1602
|
+
values.push( record.data[field] )
|
1603
|
+
end
|
1604
|
+
collected_data[nurid][field] = values
|
1605
|
+
end
|
1606
|
+
|
1607
|
+
|
1608
|
+
# now collapse the data into hashes
|
1609
|
+
collected_data.each_key do |nurid|
|
1610
|
+
# by key, process each function
|
1611
|
+
|
1612
|
+
# copy keylist fields in
|
1613
|
+
|
1614
|
+
|
1615
|
+
fields.each_pair do |field,function|
|
1616
|
+
field = field
|
1617
|
+
function = function
|
1618
|
+
new_field = (function.to_s + '_' + field.to_s).to_sym
|
1619
|
+
values = collected_data[nurid][field]
|
1620
|
+
|
1621
|
+
if function == 'sum' then
|
1622
|
+
agg_records[nurid][new_field] = values.reduce(:+)
|
1623
|
+
elsif function == 'avg' then
|
1624
|
+
agg_records[nurid][new_field] = values.reduce(:+) / values.count
|
1625
|
+
elsif function == 'min' then
|
1626
|
+
agg_records[nurid][new_field] = values.min
|
1627
|
+
elsif function == 'max' then
|
1628
|
+
agg_records[nurid][new_field] = values.max
|
1629
|
+
elsif function == 'count' then
|
1630
|
+
agg_records[nurid][new_field] = values.count
|
1631
|
+
else
|
1632
|
+
raise Trix51::NotFoundError, "unknown group function #{function}"
|
1633
|
+
end
|
1634
|
+
|
1635
|
+
end
|
1636
|
+
|
1637
|
+
end
|
1638
|
+
|
1639
|
+
end
|
1640
|
+
|
1641
|
+
res = Trix51::ResultSet.new( 'aggregate_'+self.tablename, {}, { tableref: self, structure: structure } )
|
1642
|
+
meta_init = res.meta
|
1643
|
+
meta_init[:structure] = structure
|
1644
|
+
res.meta = meta_init
|
1645
|
+
|
1646
|
+
agg_records.each_pair do |k,v|
|
1647
|
+
res.dbref[k] = Trix51::Tuple.new( self, k, v )
|
1648
|
+
end
|
1649
|
+
|
1650
|
+
return res
|
1651
|
+
|
1652
|
+
end
|
1653
|
+
|
1654
|
+
# Returns table record at index.
|
1655
|
+
def [](index)
|
1656
|
+
keys = []
|
1657
|
+
self.sorted_keys.each do |k|
|
1658
|
+
keys.push( k ) if self.table_record?(k)
|
1659
|
+
end
|
1660
|
+
|
1661
|
+
return nil if (keys.count == 0) or (keys.count <= index)
|
1662
|
+
|
1663
|
+
urid = keys[ index ]
|
1664
|
+
|
1665
|
+
return init_record( urid )
|
1666
|
+
end
|
1667
|
+
|
1668
|
+
# Returns a new result set containing a list of the unique values.
|
1669
|
+
def distinct( *keylist )
|
1670
|
+
|
1671
|
+
structure = {}
|
1672
|
+
keylist.each do |key|
|
1673
|
+
key = key
|
1674
|
+
structure[key] = self.structure[key]
|
1675
|
+
structure[key][:key] = true
|
1676
|
+
end
|
1677
|
+
|
1678
|
+
agg_records = {}
|
1679
|
+
|
1680
|
+
self.each_record do |record|
|
1681
|
+
|
1682
|
+
nurid = self.group_key( record.data, keylist )
|
1683
|
+
next unless agg_records[nurid].nil?
|
1684
|
+
|
1685
|
+
agg_records[nurid] = {}
|
1686
|
+
keylist.each do |key|
|
1687
|
+
key = key
|
1688
|
+
agg_records[nurid][key] = record.data[key]
|
1689
|
+
end
|
1690
|
+
|
1691
|
+
end
|
1692
|
+
|
1693
|
+
res = Trix51::ResultSet.new( 'aggregate_'+self.tablename, {}, { tableref: self, structure: structure } )
|
1694
|
+
meta_init = res.meta
|
1695
|
+
meta_init[:structure] = structure
|
1696
|
+
res.meta = meta_init
|
1697
|
+
|
1698
|
+
agg_records.each_pair do |k,v|
|
1699
|
+
res.dbref[k] = Trix51::Tuple.new( self, k, v )
|
1700
|
+
end
|
1701
|
+
|
1702
|
+
return res
|
1703
|
+
end
|
1704
|
+
|
1705
|
+
# Returns a new result set contain only the first count records.
|
1706
|
+
# ==== Example
|
1707
|
+
# animal.select( number_of_legs: 4 ).sort( :name ).limit(2).dump_filtered( :name )
|
1708
|
+
#
|
1709
|
+
# name |
|
1710
|
+
# ----- |
|
1711
|
+
# aardvark |
|
1712
|
+
# bullock |
|
1713
|
+
def limit( count )
|
1714
|
+
res = Trix51::ResultSet.new( self.tablename, {}, { tableref: self } )
|
1715
|
+
done = 0
|
1716
|
+
self.each_record do |record|
|
1717
|
+
res.dbref[ record.urid ] = record if done < count
|
1718
|
+
done = done + 1
|
1719
|
+
end
|
1720
|
+
return res
|
1721
|
+
end
|
1722
|
+
|
1723
|
+
|
1724
|
+
def hint_matches_by_index( taglist, field, operand, target ) #:nodoc:
|
1725
|
+
newlist = []
|
1726
|
+
|
1727
|
+
# match the criteria against each key in the index, collecting any matches if they are in the original
|
1728
|
+
# taglist
|
1729
|
+
|
1730
|
+
self.index_build(field) if self.indices[field].nil?
|
1731
|
+
|
1732
|
+
self.indices[field].each_pair do |fieldvalue, urid_list|
|
1733
|
+
if comparison( fieldvalue, operand, target ) then
|
1734
|
+
matches = urid_list & taglist
|
1735
|
+
newlist.push( *matches ) if matches.length > 0
|
1736
|
+
end
|
1737
|
+
end
|
1738
|
+
|
1739
|
+
return newlist
|
1740
|
+
end
|
1741
|
+
|
1742
|
+
def hint_matches_by_recordscan( taglist, field, operand, target ) #:nodoc:
|
1743
|
+
|
1744
|
+
newlist = []
|
1745
|
+
|
1746
|
+
taglist.each do |urid|
|
1747
|
+
v = @dbref[urid]
|
1748
|
+
record = nil
|
1749
|
+
if v.class.name == 'String' then
|
1750
|
+
record = Marshal.load( v )
|
1751
|
+
else
|
1752
|
+
record = v.data
|
1753
|
+
end
|
1754
|
+
if self.structure[field][:datatype] == 'calculated' then
|
1755
|
+
record[field] = self.eval_in_context( record, self.structure[field][:calculation] )
|
1756
|
+
end
|
1757
|
+
newlist.push( urid ) if comparison( record[field], operand, target )
|
1758
|
+
end
|
1759
|
+
|
1760
|
+
return newlist
|
1761
|
+
|
1762
|
+
end
|
1763
|
+
|
1764
|
+
def hint_criteria_to_operand_target( criteria ) #:nodoc:
|
1765
|
+
if criteria.class.name != 'Hash' then
|
1766
|
+
return [ 'is', criteria ]
|
1767
|
+
else
|
1768
|
+
operand = ''
|
1769
|
+
target = ''
|
1770
|
+
criteria.each_pair do |k, v|
|
1771
|
+
operand = k.to_s
|
1772
|
+
target = v
|
1773
|
+
end
|
1774
|
+
return [ operand, target ]
|
1775
|
+
end
|
1776
|
+
end
|
1777
|
+
|
1778
|
+
# Return records based on the specified query.
|
1779
|
+
def select_hash( criteria ) #:nodoc:
|
1780
|
+
#criteria = self.args_to_hash( *args )
|
1781
|
+
taglist = self.record_keys
|
1782
|
+
|
1783
|
+
criteria.each_pair do |field, value|
|
1784
|
+
break if taglist.length == 0
|
1785
|
+
|
1786
|
+
if self.indexed?(field) then
|
1787
|
+
#puts "#{field} will use index"
|
1788
|
+
taglist = self.hint_matches_by_index( taglist, field, *hint_criteria_to_operand_target(value) )
|
1789
|
+
else
|
1790
|
+
#puts "#{field} will use record level scan (less efficient)"
|
1791
|
+
taglist = self.hint_matches_by_recordscan( taglist, field, *hint_criteria_to_operand_target(value) )
|
1792
|
+
end
|
1793
|
+
|
1794
|
+
end
|
1795
|
+
|
1796
|
+
res = Trix51::ResultSet.new( self.tablename, {}, { tableref: self } )
|
1797
|
+
taglist.each do |key|
|
1798
|
+
res.push( self.init_record( key ) )
|
1799
|
+
end
|
1800
|
+
|
1801
|
+
return res
|
1802
|
+
|
1803
|
+
end
|
1804
|
+
|
1805
|
+
# Creates an index on the specified field.
|
1806
|
+
def create_index( fieldname )
|
1807
|
+
fieldname = fieldname
|
1808
|
+
raise Trix51::NotFoundError, "Table '#{self.tablename}' does not have a field '#{fieldname}'" unless self.exists?(fieldname)
|
1809
|
+
raise Trix51::ExistsError, "Table '#{self.tablename}' already has an index on field '#{fieldname}'" if self.indexed?(fieldname)
|
1810
|
+
|
1811
|
+
@meta = self.meta
|
1812
|
+
self.structure[fieldname][:indexed] = true
|
1813
|
+
self.meta = @meta
|
1814
|
+
|
1815
|
+
self.index_build( fieldname )
|
1816
|
+
end
|
1817
|
+
|
1818
|
+
def indices_save( path ) #:nodoc:
|
1819
|
+
filename = path+'/'+self.tablename.to_s+'.idx'
|
1820
|
+
#puts "*** Save index: #{filename}"
|
1821
|
+
File.open( filename, 'w' ) do |file|
|
1822
|
+
Marshal.dump( self.indices, file )
|
1823
|
+
end
|
1824
|
+
end
|
1825
|
+
|
1826
|
+
def indices_load( path ) #:nodoc:
|
1827
|
+
filename = path+'/'+self.tablename.to_s+'.idx'
|
1828
|
+
|
1829
|
+
if File.exists?( filename ) then
|
1830
|
+
#puts "*** Load index: #{filename}"
|
1831
|
+
File.open( filename, 'r' ) do |file|
|
1832
|
+
self.indices = Marshal.load( file )
|
1833
|
+
end
|
1834
|
+
else
|
1835
|
+
#puts "*** Generating index for #{self.tablename}"
|
1836
|
+
self.structure.keys.each { |field|
|
1837
|
+
self.index_build( field ) if self.indexed?(field)
|
1838
|
+
}
|
1839
|
+
end
|
1840
|
+
#pp self.indices
|
1841
|
+
end
|
1842
|
+
|
1843
|
+
# Key accessors for the tupleset class.
|
1844
|
+
attr_accessor :dbref, :indices, :tablename, :classname
|
1845
|
+
end
|
1846
|
+
|
1847
|
+
# Class representing a table
|
1848
|
+
class Trix51::Table < Trix51::TupleSet
|
1849
|
+
|
1850
|
+
# Create an instance of the table class
|
1851
|
+
def initialize( tablename, dbref )
|
1852
|
+
super( tablename, dbref, {} )
|
1853
|
+
end
|
1854
|
+
|
1855
|
+
end
|
1856
|
+
|
1857
|
+
# Class representing a set of results
|
1858
|
+
class Trix51::ResultSet < Trix51::TupleSet
|
1859
|
+
|
1860
|
+
# Create an instance of the resultset class used in query results.
|
1861
|
+
def initialize( tablename, dbref, options )
|
1862
|
+
super( tablename, dbref, options )
|
1863
|
+
end
|
1864
|
+
|
1865
|
+
end
|