hayabusa 0.0.25 → 0.0.30
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -5
- data/README.md +4 -4
- data/Rakefile +0 -14
- data/bin/hayabusa_fcgi.fcgi +1 -0
- data/lib/hayabusa.rb +119 -117
- data/lib/hayabusa_database.rb +84 -84
- data/lib/hayabusa_datarow.rb +873 -0
- data/lib/hayabusa_http_session_response.rb +30 -29
- data/lib/hayabusa_objects.rb +1455 -0
- data/lib/hayabusa_revision.rb +347 -0
- data/lib/models/log.rb +27 -27
- data/lib/models/log_access.rb +20 -20
- data/lib/models/log_data.rb +6 -6
- data/lib/models/log_data_link.rb +2 -2
- data/lib/models/log_data_value.rb +5 -5
- data/lib/models/log_link.rb +12 -12
- data/lib/models/session.rb +6 -6
- data/pages/config_cgi.rb +3 -3
- data/pages/config_fcgi.rb +3 -3
- metadata +85 -26
- data/.document +0 -5
- data/.rspec +0 -1
- data/Gemfile +0 -24
- data/Gemfile.lock +0 -101
- data/VERSION +0 -1
- data/bin/hayabusa_fcgi.fcgi +0 -42
- data/conf/apache2_cgi_rhtml_conf.conf +0 -10
- data/conf/apache2_fcgi_rhtml_conf.conf +0 -22
- data/conf/apache2_hayabusa_conf.conf +0 -15
- data/hayabusa.gemspec +0 -168
- data/spec/fcgi_multiple_processes_spec.rb +0 -104
- data/spec/hayabusa_spec.rb +0 -423
- data/spec/spec_helper.rb +0 -12
- data/spec/test_upload.xlsx +0 -0
@@ -0,0 +1,347 @@
|
|
1
|
+
#This class takes a database-schema from a hash and runs it against the database. It then checks that the database matches the given schema.
|
2
|
+
#
|
3
|
+
#===Examples
|
4
|
+
# db = Knj::Db.new(:type => "sqlite3", :path => "test_db.sqlite3")
|
5
|
+
# schema = {
|
6
|
+
# "tables" => {
|
7
|
+
# "User" => {
|
8
|
+
# columns: [
|
9
|
+
# {"name" => "id", "type" => "int", "autoincr" => true, "primarykey" => true},
|
10
|
+
# {"name" => "name", "type" => "varchar"},
|
11
|
+
# {"name" => "lastname", "type" => "varchar"}
|
12
|
+
# ],
|
13
|
+
# "indexes" => [
|
14
|
+
# "name",
|
15
|
+
# {"name" => "lastname", columns: ["lastname"]}
|
16
|
+
# ],
|
17
|
+
# "on_create_after" => proc{|d|
|
18
|
+
# d["db"].insert("User", {"name" => "John", "lastname" => "Doe"})
|
19
|
+
# }
|
20
|
+
# }
|
21
|
+
# }
|
22
|
+
# }
|
23
|
+
#
|
24
|
+
# rev = Knj::Db::Revision.new
|
25
|
+
# rev.init_db("db" => db, "schema" => schema)
|
26
|
+
class Hayabusa::Revision
|
27
|
+
def initialize(args = {})
|
28
|
+
@args = args
|
29
|
+
end
|
30
|
+
|
31
|
+
#This initializes a database-structure and content based on a schema-hash.
|
32
|
+
#===Examples
|
33
|
+
# dbrev = Knj::Db::Revision.new
|
34
|
+
# dbrev.init_db("db" => db_obj, "schema" => schema_hash)
|
35
|
+
def init_db(args)
|
36
|
+
schema = args["schema"]
|
37
|
+
db = args["db"]
|
38
|
+
|
39
|
+
#Check for normal bugs and raise apropiate error.
|
40
|
+
raise "'schema' argument was not a Hash: '#{schema.class.name}'." if !schema.is_a?(Hash)
|
41
|
+
raise "':return_keys' is not 'symbols' - Knjdbrevision will not work without it." if db.opts[:return_keys] != "symbols"
|
42
|
+
raise "No tables given." if !schema.has_key?("tables")
|
43
|
+
|
44
|
+
#Cache tables to avoid constant reloading.
|
45
|
+
if !args.key?("tables_cache") or args["tables_cache"]
|
46
|
+
print "Caching tables-list.\n" if args["debug"]
|
47
|
+
tables = db.tables.list
|
48
|
+
else
|
49
|
+
print "Skipping tables-cache.\n" if args["debug"]
|
50
|
+
end
|
51
|
+
|
52
|
+
schema["tables"].each do |table_name, table_data|
|
53
|
+
begin
|
54
|
+
begin
|
55
|
+
table_obj = db.tables[table_name]
|
56
|
+
|
57
|
+
#Cache indexes- and column-objects to avoid constant reloading.
|
58
|
+
cols = table_obj.columns
|
59
|
+
indexes = table_obj.indexes
|
60
|
+
|
61
|
+
if table_data[:columns]
|
62
|
+
first_col = true
|
63
|
+
table_data[:columns].each do |col_data|
|
64
|
+
begin
|
65
|
+
col_obj = table_obj.column(col_data["name"])
|
66
|
+
col_str = "#{table_name}.#{col_obj.name}"
|
67
|
+
type = col_data["type"].to_s
|
68
|
+
dochange = false
|
69
|
+
|
70
|
+
if !first_col and !col_data["after"]
|
71
|
+
#Try to find out the previous column - if so we can set "after" which makes the column being created in the right order as defined.
|
72
|
+
if !col_data.has_key?("after")
|
73
|
+
prev_no = table_data[:columns].index(col_data)
|
74
|
+
if prev_no != nil and prev_no != 0
|
75
|
+
prev_no = prev_no - 1
|
76
|
+
prev_col_data = table_data[:columns][prev_no]
|
77
|
+
col_data["after"] = prev_col_data["name"]
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
actual_after = nil
|
82
|
+
set_next = false
|
83
|
+
cols.each do |col_name, col_iter|
|
84
|
+
if col_iter.name == col_obj.name
|
85
|
+
break
|
86
|
+
else
|
87
|
+
actual_after = col_iter.name
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
if actual_after != col_data["after"]
|
92
|
+
print "Changing '#{col_str}' after from '#{actual_after}' to '#{col_data["after"]}'.\n" if args["debug"]
|
93
|
+
dochange = true
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
#BUGFIX: When using SQLite3 the primary-column or a autoincr-column may never change type from int... This will break it!
|
98
|
+
if db.opts[:type] == "sqlite3" and col_obj.type.to_s == "int" and (col_data["primarykey"] or col_data["autoincr"]) and db.int_types.index(col_data["type"].to_s)
|
99
|
+
type = "int"
|
100
|
+
end
|
101
|
+
|
102
|
+
if type and col_obj.type.to_s != type
|
103
|
+
print "Type mismatch on #{col_str}: #{col_data["type"]}, #{col_obj.type}\n" if args["debug"]
|
104
|
+
dochange = true
|
105
|
+
end
|
106
|
+
|
107
|
+
if col_data.has_key?("primarykey") and col_obj.primarykey? != col_data["primarykey"]
|
108
|
+
print "Primary-key mismatch for #{col_str}: #{col_data["primarykey"]}, #{col_obj.primarykey?}\n" if args["debug"]
|
109
|
+
dochange = true
|
110
|
+
end
|
111
|
+
|
112
|
+
if col_data.has_key?("autoincr") and col_obj.autoincr? != col_data["autoincr"]
|
113
|
+
print "Auto-increment mismatch for #{col_str}: #{col_data["autoincr"]}, #{col_obj.autoincr?}\n" if args["debug"]
|
114
|
+
dochange = true
|
115
|
+
end
|
116
|
+
|
117
|
+
if col_data.has_key?("maxlength") and col_obj.maxlength.to_s != col_data["maxlength"].to_s
|
118
|
+
print "Maxlength mismatch on #{col_str}: #{col_data["maxlength"]}, #{col_obj.maxlength}\n" if args["debug"]
|
119
|
+
dochange = true
|
120
|
+
end
|
121
|
+
|
122
|
+
if col_data.has_key?("null") and col_obj.null?.to_s != col_data["null"].to_s
|
123
|
+
print "Null mismatch on #{col_str}: #{col_data["null"]}, #{col_obj.null?}\n" if args["debug"]
|
124
|
+
dochange = true
|
125
|
+
end
|
126
|
+
|
127
|
+
if col_data.has_key?("default") and col_obj.default.to_s != col_data["default"].to_s
|
128
|
+
print "Default mismatch on #{col_str}: #{col_data["default"]}, #{col_obj.default}\n" if args["debug"]
|
129
|
+
dochange = true
|
130
|
+
end
|
131
|
+
|
132
|
+
if col_data.has_key?("comment") and col_obj.respond_to?(:comment) and col_obj.comment.to_s != col_data["comment"].to_s
|
133
|
+
print "Comment mismatch on #{col_str}: #{col_data["comment"]}, #{col_obj.comment}\n" if args["debug"]
|
134
|
+
dochange = true
|
135
|
+
end
|
136
|
+
|
137
|
+
if col_data.is_a?(Hash) and col_data["on_before_alter"]
|
138
|
+
callback_data = col_data["on_before_alter"].call("db" => db, "table" => table_obj, "col" => col_obj, "col_data" => col_data)
|
139
|
+
if callback_data and callback_data["action"]
|
140
|
+
if callback_data["action"] == "retry"
|
141
|
+
raise Knj::Errors::Retry
|
142
|
+
end
|
143
|
+
end
|
144
|
+
end
|
145
|
+
|
146
|
+
if dochange
|
147
|
+
col_obj.change(col_data)
|
148
|
+
|
149
|
+
#Change has been made - update cache.
|
150
|
+
cols = table_obj.columns
|
151
|
+
end
|
152
|
+
|
153
|
+
first_col = false
|
154
|
+
rescue Errno::ENOENT => e
|
155
|
+
print "Column not found: #{table_obj.name}.#{col_data["name"]}.\n" if args["debug"]
|
156
|
+
|
157
|
+
if col_data.has_key?("renames")
|
158
|
+
raise "'renames' was not an array for column '#{table_obj.name}.#{col_data["name"]}'." if !col_data["renames"].is_a?(Array)
|
159
|
+
|
160
|
+
rename_found = false
|
161
|
+
col_data["renames"].each do |col_name|
|
162
|
+
begin
|
163
|
+
col_rename = table_obj.column(col_name)
|
164
|
+
rescue Errno::ENOENT => e
|
165
|
+
next
|
166
|
+
end
|
167
|
+
|
168
|
+
print "Rename #{table_obj.name}.#{col_name} to #{table_obj.name}.#{col_data["name"]}\n" if args["debug"]
|
169
|
+
if col_data.is_a?(Hash) and col_data["on_before_rename"]
|
170
|
+
col_data["on_before_rename"].call("db" => db, "table" => table_obj, "col" => col_rename, "col_data" => col_data)
|
171
|
+
end
|
172
|
+
|
173
|
+
col_rename.change(col_data)
|
174
|
+
|
175
|
+
#Change has been made - update cache.
|
176
|
+
cols = table_obj.columns
|
177
|
+
|
178
|
+
if col_data.is_a?(Hash) and col_data["on_after_rename"]
|
179
|
+
col_data["on_after_rename"].call("db" => db, "table" => table_obj, "col" => col_rename, "col_data" => col_data)
|
180
|
+
end
|
181
|
+
|
182
|
+
rename_found = true
|
183
|
+
break
|
184
|
+
end
|
185
|
+
|
186
|
+
retry if rename_found
|
187
|
+
end
|
188
|
+
|
189
|
+
oncreated = col_data[:on_created]
|
190
|
+
col_data.delete(:on_created) if col_data[:oncreated]
|
191
|
+
col_obj = table_obj.create_columns([col_data])
|
192
|
+
|
193
|
+
#Change has been made - update cache.
|
194
|
+
cols = table_obj.columns
|
195
|
+
|
196
|
+
oncreated.call("db" => db, "table" => table_obj) if oncreated
|
197
|
+
end
|
198
|
+
end
|
199
|
+
end
|
200
|
+
|
201
|
+
if table_data["columns_remove"]
|
202
|
+
table_data["columns_remove"].each do |column_name, column_data|
|
203
|
+
begin
|
204
|
+
col_obj = table_obj.column(column_name)
|
205
|
+
rescue Errno::ENOENT => e
|
206
|
+
next
|
207
|
+
end
|
208
|
+
|
209
|
+
column_data["callback"].call if column_data.is_a?(Hash) and column_data["callback"]
|
210
|
+
col_obj.drop
|
211
|
+
end
|
212
|
+
end
|
213
|
+
|
214
|
+
if table_data[:indexes]
|
215
|
+
table_data[:indexes].each do |index_data|
|
216
|
+
if index_data.is_a?(String)
|
217
|
+
index_data = {name: index_data, columns: [index_data]}
|
218
|
+
end
|
219
|
+
|
220
|
+
begin
|
221
|
+
index_obj = table_obj.index(index_data[:name])
|
222
|
+
|
223
|
+
rewrite_index = false
|
224
|
+
rewrite_index = true if index_data.key?(:unique) and index_data[:unique] != index_obj.unique?
|
225
|
+
|
226
|
+
if rewrite_index
|
227
|
+
index_obj.drop
|
228
|
+
table_obj.create_indexes([index_data])
|
229
|
+
end
|
230
|
+
rescue Errno::ENOENT => e
|
231
|
+
table_obj.create_indexes([index_data])
|
232
|
+
end
|
233
|
+
end
|
234
|
+
end
|
235
|
+
|
236
|
+
if table_data["indexes_remove"]
|
237
|
+
table_data["indexes_remove"].each do |index_name, index_data|
|
238
|
+
begin
|
239
|
+
index_obj = table_obj.index(index_name)
|
240
|
+
rescue Errno::ENOENT => e
|
241
|
+
next
|
242
|
+
end
|
243
|
+
|
244
|
+
if index_data.is_a?(Hash) and index_data["callback"]
|
245
|
+
index_data["callback"].call if index_data["callback"]
|
246
|
+
end
|
247
|
+
|
248
|
+
index_obj.drop
|
249
|
+
end
|
250
|
+
end
|
251
|
+
|
252
|
+
rows_init("db" => db, "table" => table_obj, "rows" => table_data["rows"]) if table_data and table_data["rows"]
|
253
|
+
rescue Baza::Errors::TableNotFound => e
|
254
|
+
if table_data.key?("renames")
|
255
|
+
table_data["renames"].each do |table_name_rename|
|
256
|
+
begin
|
257
|
+
table_rename = db.tables[table_name_rename.to_sym]
|
258
|
+
table_rename.rename(table_name)
|
259
|
+
raise Knj::Errors::Retry
|
260
|
+
rescue Errno::ENOENT
|
261
|
+
next
|
262
|
+
end
|
263
|
+
end
|
264
|
+
end
|
265
|
+
|
266
|
+
if !table_data.key?(:columns)
|
267
|
+
print "Notice: Skipping creation of '#{table_name}' because no columns were given in hash.\n"
|
268
|
+
next
|
269
|
+
end
|
270
|
+
|
271
|
+
if table_data["on_create"]
|
272
|
+
table_data["on_create"].call("db" => db, "table_name" => table_name, "table_data" => table_data)
|
273
|
+
end
|
274
|
+
|
275
|
+
table_data[:columns].each do |column|
|
276
|
+
column.delete(:comment) if column.key?(:comment)
|
277
|
+
column.delete(:on_created) if column.key?(:on_created)
|
278
|
+
end
|
279
|
+
|
280
|
+
db.tables.create(table_name, table_data)
|
281
|
+
table_obj = db.tables[table_name.to_sym]
|
282
|
+
|
283
|
+
if table_data["on_create_after"]
|
284
|
+
table_data["on_create_after"].call("db" => db, "table_name" => table_name, "table_data" => table_data)
|
285
|
+
end
|
286
|
+
|
287
|
+
rows_init("db" => db, "table" => table_obj, "rows" => table_data["rows"]) if table_data["rows"]
|
288
|
+
end
|
289
|
+
rescue Knj::Errors::Retry
|
290
|
+
retry
|
291
|
+
end
|
292
|
+
end
|
293
|
+
|
294
|
+
if schema["tables_remove"]
|
295
|
+
schema["tables_remove"].each do |table_name, table_data|
|
296
|
+
begin
|
297
|
+
table_obj = db.tables[table_name.to_sym]
|
298
|
+
table_data["callback"].call("db" => db, "table" => table_obj) if table_data.is_a?(Hash) and table_data["callback"]
|
299
|
+
table_obj.drop
|
300
|
+
rescue Errno::ENOENT => e
|
301
|
+
next
|
302
|
+
end
|
303
|
+
end
|
304
|
+
end
|
305
|
+
|
306
|
+
|
307
|
+
#Free cache.
|
308
|
+
tables.clear if tables
|
309
|
+
tables = nil
|
310
|
+
end
|
311
|
+
|
312
|
+
private
|
313
|
+
|
314
|
+
#This method checks if certain rows are present in a table based on a hash.
|
315
|
+
def rows_init(args)
|
316
|
+
db = args["db"]
|
317
|
+
table = args["table"]
|
318
|
+
|
319
|
+
raise "No db given." if !db
|
320
|
+
raise "No table given." if !table
|
321
|
+
|
322
|
+
args["rows"].each do |row_data|
|
323
|
+
if row_data["find_by"]
|
324
|
+
find_by = row_data["find_by"]
|
325
|
+
elsif row_data["data"]
|
326
|
+
find_by = row_data["data"]
|
327
|
+
else
|
328
|
+
raise "Could not figure out the find-by."
|
329
|
+
end
|
330
|
+
|
331
|
+
rows_found = 0
|
332
|
+
args["db"].select(table.name, find_by) do |d_rows|
|
333
|
+
rows_found += 1
|
334
|
+
|
335
|
+
if Knj::ArrayExt.hash_diff?(Knj::ArrayExt.hash_sym(row_data["data"]), Knj::ArrayExt.hash_sym(d_rows), {"h2_to_h1" => false})
|
336
|
+
print "Data was not right - updating row: #{JSON.generate(row_data["data"])}\n" if args["debug"]
|
337
|
+
args["db"].update(table.name, row_data["data"], d_rows)
|
338
|
+
end
|
339
|
+
end
|
340
|
+
|
341
|
+
if rows_found == 0
|
342
|
+
print "Inserting row: #{JSON.generate(row_data["data"])}\n" if args["debug"]
|
343
|
+
table.insert(row_data["data"])
|
344
|
+
end
|
345
|
+
end
|
346
|
+
end
|
347
|
+
end
|
data/lib/models/log.rb
CHANGED
@@ -1,15 +1,15 @@
|
|
1
|
-
class Hayabusa::Models::Log <
|
1
|
+
class Hayabusa::Models::Log < Hayabusa::Datarow
|
2
2
|
has_many [
|
3
3
|
{:class => :Log_link, :col => :log_id, :method => :links, :depends => true, :autodelete => true}
|
4
4
|
]
|
5
|
-
|
5
|
+
|
6
6
|
def self.list(d, &block)
|
7
7
|
sql = "SELECT #{table}.* FROM #{table}"
|
8
|
-
|
8
|
+
|
9
9
|
if d.args["object_lookup"]
|
10
10
|
data_val = d.ob.get_by(:Log_data_value, {"value" => d.args["object_lookup"].class.name})
|
11
11
|
return [] if !data_val #if this data-value cannot be found, nothing has been logged for the object. So just return empty array here and skip the rest.
|
12
|
-
|
12
|
+
|
13
13
|
sql << "
|
14
14
|
LEFT JOIN Log_link ON
|
15
15
|
Log_link.log_id = #{table}.id AND
|
@@ -17,14 +17,14 @@ class Hayabusa::Models::Log < Knj::Datarow
|
|
17
17
|
Log_link.object_id = '#{d.db.esc(d.args["object_lookup"].id)}'
|
18
18
|
"
|
19
19
|
end
|
20
|
-
|
20
|
+
|
21
21
|
q_args = nil
|
22
22
|
return_sql = false
|
23
23
|
ret = self.list_helper(d)
|
24
|
-
|
24
|
+
|
25
25
|
sql << ret[:sql_joins]
|
26
26
|
sql << " WHERE 1=1"
|
27
|
-
|
27
|
+
|
28
28
|
d.args.each do |key, val|
|
29
29
|
case key
|
30
30
|
when "object_lookup"
|
@@ -44,87 +44,87 @@ class Hayabusa::Models::Log < Knj::Datarow
|
|
44
44
|
raise "Invalid key: #{key}."
|
45
45
|
end
|
46
46
|
end
|
47
|
-
|
47
|
+
|
48
48
|
sql << ret[:sql_where]
|
49
49
|
sql << ret[:sql_order]
|
50
50
|
sql << ret[:sql_limit]
|
51
|
-
|
51
|
+
|
52
52
|
return sql if return_sql
|
53
|
-
|
53
|
+
|
54
54
|
return d.ob.list_bysql(:Log, sql, q_args, &block)
|
55
55
|
end
|
56
|
-
|
56
|
+
|
57
57
|
def self.add(d)
|
58
58
|
d.data[:date_saved] = Time.now if !d.data.key?(:date_saved)
|
59
59
|
end
|
60
|
-
|
60
|
+
|
61
61
|
def text
|
62
62
|
return ob.get(:Log_data_value, self[:text_value_id])[:value]
|
63
63
|
end
|
64
|
-
|
64
|
+
|
65
65
|
def comment
|
66
66
|
return "" if self[:comment_data_id].to_i == 0
|
67
67
|
log_data = ob.get(:Log_data_value, self[:comment_data_id])
|
68
68
|
return "" if !log_data
|
69
69
|
return log_data[:value]
|
70
70
|
end
|
71
|
-
|
71
|
+
|
72
72
|
def tag
|
73
73
|
return "" if self[:tag_data_id].to_i == 0
|
74
74
|
log_data = ob.get(:Log_data_value, self[:tag_data_id])
|
75
75
|
return "" if !log_data
|
76
76
|
return log_data[:value]
|
77
77
|
end
|
78
|
-
|
78
|
+
|
79
79
|
def get
|
80
80
|
ob.args[:hayabusa].log_data_hash(self[:get_keys_data_id], self[:get_values_data_id])
|
81
81
|
end
|
82
|
-
|
82
|
+
|
83
83
|
def post
|
84
84
|
ob.args[:hayabusa].log_data_hash(self[:post_keys_data_id], self[:post_values_data_id])
|
85
85
|
end
|
86
|
-
|
86
|
+
|
87
87
|
def cookie
|
88
88
|
ob.args[:hayabusa].log_data_hash(self[:cookie_keys_data_id], self[:cookie_values_data_id])
|
89
89
|
end
|
90
|
-
|
90
|
+
|
91
91
|
def meta
|
92
92
|
ob.args[:hayabusa].log_data_hash(self[:meta_keys_data_id], self[:meta_values_data_id])
|
93
93
|
end
|
94
|
-
|
94
|
+
|
95
95
|
def session
|
96
96
|
ob.args[:hayabusa].log_data_hash(self[:session_keys_data_id], self[:session_values_data_id])
|
97
97
|
end
|
98
|
-
|
98
|
+
|
99
99
|
def ip
|
100
100
|
meta_d = self.meta
|
101
|
-
|
101
|
+
|
102
102
|
return meta_d[:HTTP_X_FORWARDED_FOR].split(/\s*,\s*/).first if meta_d.has_key?(:HTTP_X_FORWARDED_FOR)
|
103
103
|
return meta_d[:REMOTE_ADDR] if meta_d.has_key?(:REMOTE_ADDR)
|
104
104
|
return "[no ip logged]"
|
105
105
|
end
|
106
|
-
|
106
|
+
|
107
107
|
def first_line
|
108
108
|
lines = self.text.to_s.split("\n").first.to_s
|
109
109
|
end
|
110
|
-
|
110
|
+
|
111
111
|
def objects_html(ob_use)
|
112
112
|
html = ""
|
113
113
|
first = true
|
114
|
-
|
114
|
+
|
115
115
|
self.links.each do |link|
|
116
116
|
obj = link.object(ob_use)
|
117
|
-
|
117
|
+
|
118
118
|
html << ", " if !first
|
119
119
|
first = false if first
|
120
|
-
|
120
|
+
|
121
121
|
if obj.respond_to?(:html)
|
122
122
|
html << obj.html
|
123
123
|
else
|
124
124
|
html << "#{obj.class.name}{#{obj.id}}"
|
125
125
|
end
|
126
126
|
end
|
127
|
-
|
127
|
+
|
128
128
|
return html
|
129
129
|
end
|
130
130
|
end
|
data/lib/models/log_access.rb
CHANGED
@@ -1,88 +1,88 @@
|
|
1
|
-
class Hayabusa::Models::Log_access <
|
1
|
+
class Hayabusa::Models::Log_access < Hayabusa::Datarow
|
2
2
|
def get
|
3
3
|
return data_hash("get")
|
4
4
|
end
|
5
|
-
|
5
|
+
|
6
6
|
def post
|
7
7
|
return data_hash("post")
|
8
8
|
end
|
9
|
-
|
9
|
+
|
10
10
|
def meta
|
11
11
|
return data_hash("meta")
|
12
12
|
end
|
13
|
-
|
13
|
+
|
14
14
|
def cookie
|
15
15
|
return data_hash("cookie")
|
16
16
|
end
|
17
|
-
|
17
|
+
|
18
18
|
def ips
|
19
19
|
return data_array(self[:ip_data_id])
|
20
20
|
end
|
21
|
-
|
21
|
+
|
22
22
|
def data_array(data_id)
|
23
23
|
sql = "
|
24
24
|
SELECT
|
25
25
|
value_value.value AS value
|
26
|
-
|
26
|
+
|
27
27
|
FROM
|
28
28
|
Log_data_link AS value_links,
|
29
29
|
Log_data_value AS value_value
|
30
|
-
|
30
|
+
|
31
31
|
WHERE
|
32
32
|
value_links.data_id = '#{data_id}' AND
|
33
33
|
value_value.id = value_links.value_id
|
34
|
-
|
34
|
+
|
35
35
|
ORDER BY
|
36
36
|
key_links.no
|
37
37
|
"
|
38
|
-
|
38
|
+
|
39
39
|
arr = []
|
40
40
|
q_array = db.query(sql)
|
41
41
|
while d_array = q_array.fetch
|
42
42
|
arr << d_array[:value]
|
43
43
|
end
|
44
|
-
|
44
|
+
|
45
45
|
return arr
|
46
46
|
end
|
47
|
-
|
47
|
+
|
48
48
|
def data_hash(type)
|
49
49
|
col_keys_id = "#{type}_keys_data_id".to_sym
|
50
50
|
col_values_id = "#{type}_values_data_id".to_sym
|
51
|
-
|
51
|
+
|
52
52
|
keys_id = self[col_keys_id]
|
53
53
|
values_id = self[col_values_id]
|
54
|
-
|
54
|
+
|
55
55
|
keys_data_obj = ob.get(:Log_data, keys_id)
|
56
56
|
values_data_obj = ob.get(:Log_data, values_id)
|
57
|
-
|
57
|
+
|
58
58
|
sql = "
|
59
59
|
SELECT
|
60
60
|
key_value.value AS `key`,
|
61
61
|
value_value.value AS value
|
62
|
-
|
62
|
+
|
63
63
|
FROM
|
64
64
|
Log_data_link AS key_links,
|
65
65
|
Log_data_link AS value_links,
|
66
66
|
Log_data_value AS key_value,
|
67
67
|
Log_data_value AS value_value
|
68
|
-
|
68
|
+
|
69
69
|
WHERE
|
70
70
|
key_links.data_id = '#{keys_id}' AND
|
71
71
|
value_links.data_id = '#{values_id}' AND
|
72
72
|
key_links.no = value_links.no AND
|
73
73
|
key_value.id = key_links.value_id AND
|
74
74
|
value_value.id = value_links.value_id
|
75
|
-
|
75
|
+
|
76
76
|
ORDER BY
|
77
77
|
key_links.no
|
78
78
|
"
|
79
|
-
|
79
|
+
|
80
80
|
hash = {}
|
81
81
|
q_hash = db.query(sql)
|
82
82
|
while d_hash = q_hash.fetch
|
83
83
|
hash[d_hash[:key].to_s] = d_hash[:value]
|
84
84
|
end
|
85
|
-
|
85
|
+
|
86
86
|
return hash
|
87
87
|
end
|
88
88
|
end
|
data/lib/models/log_data.rb
CHANGED
@@ -1,26 +1,26 @@
|
|
1
|
-
class Hayabusa::Models::Log_data <
|
1
|
+
class Hayabusa::Models::Log_data < Hayabusa::Datarow
|
2
2
|
def self.force(d, id_hash)
|
3
3
|
data_obj = d.ob.get_by(:Log_data, {"id_hash" => id_hash})
|
4
|
-
|
4
|
+
|
5
5
|
if !data_obj
|
6
6
|
data_obj = d.ob.add(:Log_data, {"id_hash" => id_hash})
|
7
7
|
end
|
8
|
-
|
8
|
+
|
9
9
|
return data_obj
|
10
10
|
end
|
11
|
-
|
11
|
+
|
12
12
|
def self.force_id(d, id_hash)
|
13
13
|
data = d.db.query("SELECT * FROM Log_data WHERE id_hash = '#{d.db.esc(id_hash)}' LIMIT 1").fetch
|
14
14
|
return data[:id].to_i if data
|
15
15
|
return d.db.insert(:Log_data, {:id_hash => id_hash}, {:return_id => true}).to_i
|
16
16
|
end
|
17
|
-
|
17
|
+
|
18
18
|
def self.by_id_hash(d, id_hash)
|
19
19
|
data = d.db.query("SELECT * FROM Log_data WHERE id_hash = '#{d.db.esc(id_hash)}' LIMIT 1").fetch
|
20
20
|
return data[:id].to_i if data
|
21
21
|
return false
|
22
22
|
end
|
23
|
-
|
23
|
+
|
24
24
|
def links(args = {})
|
25
25
|
return ob.list(:Log_data_link, {"data" => self}.merge(args))
|
26
26
|
end
|
data/lib/models/log_data_link.rb
CHANGED
@@ -1,3 +1,3 @@
|
|
1
|
-
class Hayabusa::Models::Log_data_link <
|
2
|
-
|
1
|
+
class Hayabusa::Models::Log_data_link < Hayabusa::Datarow
|
2
|
+
|
3
3
|
end
|
@@ -1,21 +1,21 @@
|
|
1
|
-
class Hayabusa::Models::Log_data_value <
|
1
|
+
class Hayabusa::Models::Log_data_value < Hayabusa::Datarow
|
2
2
|
def self.force(d, value)
|
3
3
|
value_obj = d.ob.get_by(:Log_data_value, {
|
4
4
|
"value" => value.to_s
|
5
5
|
})
|
6
|
-
|
6
|
+
|
7
7
|
if !value_obj
|
8
8
|
value_obj = d.ob.add(:Log_data_value, {"value" => value})
|
9
9
|
end
|
10
|
-
|
10
|
+
|
11
11
|
return value_obj
|
12
12
|
end
|
13
|
-
|
13
|
+
|
14
14
|
def self.force_id(d, value)
|
15
15
|
d.db.select(:Log_data_value, {"value" => value}) do |d_val|
|
16
16
|
return d_val[:id].to_i if d_val[:value].to_s == value.to_s #MySQL doesnt take upper/lower-case into consideration because value is a text-column... lame! - knj
|
17
17
|
end
|
18
|
-
|
18
|
+
|
19
19
|
return d.db.insert(:Log_data_value, {:value => value}, {:return_id => true}).to_i
|
20
20
|
end
|
21
21
|
end
|