norikra 0.0.13-java → 0.0.14-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/lib/norikra/engine.rb +101 -80
- data/lib/norikra/field.rb +204 -0
- data/lib/norikra/fieldset.rb +167 -0
- data/lib/norikra/logger.rb +37 -22
- data/lib/norikra/query.rb +152 -14
- data/lib/norikra/query/ast.rb +65 -22
- data/lib/norikra/rpc/handler.rb +10 -3
- data/lib/norikra/server.rb +8 -2
- data/lib/norikra/target.rb +24 -0
- data/lib/norikra/typedef.rb +117 -234
- data/lib/norikra/typedef_manager.rb +22 -16
- data/lib/norikra/version.rb +1 -1
- data/norikra.gemspec +1 -1
- data/script/spec_server_pry +31 -0
- data/spec/field_spec.rb +131 -2
- data/spec/fieldset_spec.rb +92 -23
- data/spec/query_spec.rb +204 -29
- data/spec/spec_helper.rb +39 -3
- data/spec/target_spec.rb +18 -0
- data/spec/typedef_manager_spec.rb +35 -12
- data/spec/typedef_spec.rb +235 -24
- metadata +6 -4
@@ -0,0 +1,167 @@
|
|
1
|
+
require 'digest'
|
2
|
+
require 'norikra/field'
|
3
|
+
|
4
|
+
module Norikra
|
5
|
+
class FieldSet
|
6
|
+
attr_accessor :summary, :fields
|
7
|
+
attr_accessor :target, :level
|
8
|
+
|
9
|
+
# fieldset doesn't have container fields
|
10
|
+
def initialize(fields, default_optional=nil, rebounds=0)
|
11
|
+
@fields = {}
|
12
|
+
# fields.keys are raw key for container access chains
|
13
|
+
fields.keys.each do |key|
|
14
|
+
data = fields[key]
|
15
|
+
if data.is_a?(Norikra::Field)
|
16
|
+
@fields[data.name] = data
|
17
|
+
elsif data.is_a?(Hash)
|
18
|
+
type = data[:type].to_s
|
19
|
+
optional = data.has_key?(:optional) ? data[:optional] : default_optional
|
20
|
+
@fields[key.to_s] = Field.new(key.to_s, type, optional)
|
21
|
+
elsif data.is_a?(String) || data.is_a?(Symbol)
|
22
|
+
@fields[key.to_s] = Field.new(key.to_s, data.to_s, default_optional)
|
23
|
+
else
|
24
|
+
raise ArgumentError, "FieldSet.new argument class unknown: #{fields.class}"
|
25
|
+
end
|
26
|
+
end
|
27
|
+
self.update_summary
|
28
|
+
|
29
|
+
@target = nil
|
30
|
+
@level = nil
|
31
|
+
@rebounds = rebounds
|
32
|
+
@event_type_name = nil
|
33
|
+
end
|
34
|
+
|
35
|
+
def dup
|
36
|
+
fields = Hash[@fields.map{|key,field| [key, {:type => field.type, :optional => field.optional}]}]
|
37
|
+
self.class.new(fields, nil, @rebounds)
|
38
|
+
end
|
39
|
+
|
40
|
+
def self.leaves(container)
|
41
|
+
# returns list of [ [key-chain-items-flatten-list, value] ]
|
42
|
+
dig = Proc.new do |obj|
|
43
|
+
if obj.is_a?(Array)
|
44
|
+
ary = []
|
45
|
+
obj.each_with_index do |v,i|
|
46
|
+
if v.is_a?(Hash) || v.is_a?(Array)
|
47
|
+
ary += dig.call(v).map{|chain| [i] + chain}
|
48
|
+
else
|
49
|
+
ary.push([i, v])
|
50
|
+
end
|
51
|
+
end
|
52
|
+
ary
|
53
|
+
else # Hash
|
54
|
+
obj.map {|k,v|
|
55
|
+
if v.is_a?(Hash) || v.is_a?(Array)
|
56
|
+
dig.call(v).map{|chain| [k] + chain}
|
57
|
+
else
|
58
|
+
[[k, v]]
|
59
|
+
end
|
60
|
+
}.reduce(:+)
|
61
|
+
end
|
62
|
+
end
|
63
|
+
dig.call(container)
|
64
|
+
end
|
65
|
+
|
66
|
+
def self.field_names_key(data, fieldset=nil, strict=false, additional_fields=[])
|
67
|
+
if !fieldset && strict
|
68
|
+
raise RuntimeError, "strict(true) cannot be specified with fieldset=nil"
|
69
|
+
end
|
70
|
+
|
71
|
+
unless fieldset
|
72
|
+
return data.keys.sort.join(',')
|
73
|
+
end
|
74
|
+
|
75
|
+
keys = []
|
76
|
+
optionals = []
|
77
|
+
|
78
|
+
fieldset.fields.each do |key,field|
|
79
|
+
if field.optional?
|
80
|
+
optionals.push(field.name)
|
81
|
+
else
|
82
|
+
keys.push(field.name)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
optionals += additional_fields
|
86
|
+
|
87
|
+
Norikra::FieldSet.leaves(data).each do |chain|
|
88
|
+
value = chain.pop
|
89
|
+
key = Norikra::Field.regulate_key_chain(chain).join('.')
|
90
|
+
unless keys.include?(key)
|
91
|
+
if optionals.include?(key) || (!strict && chain.size == 1)
|
92
|
+
keys.push(key)
|
93
|
+
end
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
keys.sort.join(',')
|
98
|
+
end
|
99
|
+
|
100
|
+
def field_names_key
|
101
|
+
self.class.field_names_key(@fields)
|
102
|
+
end
|
103
|
+
|
104
|
+
def update_summary
|
105
|
+
@summary = @fields.keys.sort.map{|k| @fields[k].escaped_name + ':' + @fields[k].type}.join(',')
|
106
|
+
self
|
107
|
+
end
|
108
|
+
|
109
|
+
def update(fields, optional_flag)
|
110
|
+
fields.each do |field|
|
111
|
+
@fields[field.name] = field.dup(optional_flag)
|
112
|
+
end
|
113
|
+
self.update_summary
|
114
|
+
end
|
115
|
+
|
116
|
+
#TODO: have a bug?
|
117
|
+
def ==(other)
|
118
|
+
return false if self.class != other.class
|
119
|
+
self.summary == other.summary
|
120
|
+
end
|
121
|
+
|
122
|
+
def definition
|
123
|
+
d = {}
|
124
|
+
@fields.each do |key, field|
|
125
|
+
d[field.escaped_name] = field.type
|
126
|
+
end
|
127
|
+
d
|
128
|
+
end
|
129
|
+
|
130
|
+
def subset?(other) # self is subset of other (or not)
|
131
|
+
(self.fields.keys - other.fields.keys).size == 0
|
132
|
+
end
|
133
|
+
|
134
|
+
def event_type_name
|
135
|
+
@event_type_name.dup
|
136
|
+
end
|
137
|
+
|
138
|
+
def bind(target, level, update_type_name=false)
|
139
|
+
@target = target
|
140
|
+
@level = level
|
141
|
+
prefix = case level
|
142
|
+
when :base then 'b_'
|
143
|
+
when :query then 'q_'
|
144
|
+
when :data then 'e_' # event
|
145
|
+
else
|
146
|
+
raise ArgumentError, "unknown fieldset bind level: #{level}, for target #{target}"
|
147
|
+
end
|
148
|
+
@rebounds += 1 if update_type_name
|
149
|
+
|
150
|
+
@event_type_name = prefix + Digest::MD5.hexdigest([target, level.to_s, @rebounds.to_s, @summary].join("\t"))
|
151
|
+
self
|
152
|
+
end
|
153
|
+
|
154
|
+
def rebind(update_type_name)
|
155
|
+
self.dup.bind(@target, @level, update_type_name)
|
156
|
+
end
|
157
|
+
|
158
|
+
def format(data)
|
159
|
+
# all keys of data should be already known at #format (before #format, do #refer)
|
160
|
+
ret = {}
|
161
|
+
@fields.each do |key,field|
|
162
|
+
ret[field.escaped_name] = field.format(field.value(data))
|
163
|
+
end
|
164
|
+
ret
|
165
|
+
end
|
166
|
+
end
|
167
|
+
end
|
data/lib/norikra/logger.rb
CHANGED
@@ -41,6 +41,8 @@ module Norikra
|
|
41
41
|
@@levelnum = nil
|
42
42
|
@@devmode = false
|
43
43
|
|
44
|
+
@@test_flag = false
|
45
|
+
|
44
46
|
@@mon = Monitor.new
|
45
47
|
|
46
48
|
def self.init(level, logdir, opts, devmode=false)
|
@@ -49,6 +51,11 @@ module Norikra
|
|
49
51
|
# else => RollingFileAppender (output: directory path)
|
50
52
|
# http://logging.apache.org/log4j/1.2/apidocs/org/apache/log4j/RollingFileAppender.html
|
51
53
|
|
54
|
+
if level.upcase == 'TEST' # with opts[:logger] as DummyLogger instance
|
55
|
+
level = LOG_LEVEL_DEFAULT
|
56
|
+
@@test_flag = true
|
57
|
+
end
|
58
|
+
|
52
59
|
@@level = level.upcase
|
53
60
|
raise ArgumentError, "unknown log level: #{@@level}" unless LOG_LEVELS.include?(@@level)
|
54
61
|
@@levelnum = LOG_LEVELS.index(@@level)
|
@@ -67,30 +74,38 @@ module Norikra
|
|
67
74
|
p.setProperty('log4j.appender.builtin.layout', 'org.apache.log4j.PatternLayout')
|
68
75
|
p.setProperty('log4j.appender.builtin.layout.ConversionPattern', LOG_LOG4J_BUILTIN_FORMAT)
|
69
76
|
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
77
|
+
unless @@test_flag
|
78
|
+
if logdir.nil?
|
79
|
+
p.setProperty('log4j.appender.default', 'org.apache.log4j.ConsoleAppender')
|
80
|
+
p.setProperty('log4j.appender.builtin', 'org.apache.log4j.ConsoleAppender')
|
81
|
+
else
|
82
|
+
# DailyRollingFileAppender ?
|
83
|
+
# http://logging.apache.org/log4j/1.2/apidocs/org/apache/log4j/DailyRollingFileAppender.html
|
84
|
+
norikra_log = File.join(logdir, 'norikra.log')
|
85
|
+
p.setProperty('log4j.appender.default', 'org.apache.log4j.RollingFileAppender')
|
86
|
+
p.setProperty('log4j.appender.default.File', norikra_log)
|
87
|
+
p.setProperty('log4j.appender.default.MaxFileSize', opts[:filesize] || LOGFILE_DEFAULT_MAX_SIZE)
|
88
|
+
p.setProperty('log4j.appender.default.MaxBackupIndex', opts[:backups].to_s || LOGFILE_DEFAULT_MAX_BACKUP_INDEX.to_s)
|
89
|
+
|
90
|
+
builtin_log = File.join(logdir, 'builtin.log')
|
91
|
+
p.setProperty('log4j.appender.builtin', 'org.apache.log4j.RollingFileAppender')
|
92
|
+
p.setProperty('log4j.appender.builtin.File', builtin_log)
|
93
|
+
p.setProperty('log4j.appender.builtin.MaxFileSize', opts[:filesize] || LOGFILE_DEFAULT_MAX_SIZE)
|
94
|
+
p.setProperty('log4j.appender.builtin.MaxBackupIndex', opts[:backups].to_s || LOGFILE_DEFAULT_MAX_BACKUP_INDEX.to_s)
|
95
|
+
end
|
96
|
+
p.setProperty('log4j.rootLogger', "#{@@level},default")
|
97
|
+
org.apache.log4j.PropertyConfigurator.configure(p)
|
98
|
+
|
99
|
+
@@logger = Logger.new('norikra.log')
|
100
|
+
|
101
|
+
else # for test(rspec)
|
102
|
+
p.setProperty('log4j.appender.default', 'org.apache.log4j.varia.NullAppender')
|
103
|
+
p.setProperty('log4j.appender.builtin', 'org.apache.log4j.varia.NullAppender')
|
104
|
+
p.setProperty('log4j.rootLogger', "#{@@level},default")
|
105
|
+
org.apache.log4j.PropertyConfigurator.configure(p)
|
106
|
+
@@logger = opts[:logger]
|
87
107
|
end
|
88
108
|
|
89
|
-
p.setProperty('log4j.rootLogger', "#{@@level},default")
|
90
|
-
org.apache.log4j.PropertyConfigurator.configure(p)
|
91
|
-
|
92
|
-
@@logger = Logger.new('norikra.log')
|
93
|
-
|
94
109
|
@@devmode = devmode
|
95
110
|
end
|
96
111
|
|
data/lib/norikra/query.rb
CHANGED
@@ -6,6 +6,7 @@ require 'esper/lib/cglib-nodep-2.2.jar'
|
|
6
6
|
|
7
7
|
require 'norikra/error'
|
8
8
|
require 'norikra/query/ast'
|
9
|
+
require 'norikra/field'
|
9
10
|
|
10
11
|
module Norikra
|
11
12
|
class Query
|
@@ -19,6 +20,7 @@ module Norikra
|
|
19
20
|
@fieldsets = {} # { target => fieldset }
|
20
21
|
@ast = nil
|
21
22
|
@targets = nil
|
23
|
+
@aliases = nil
|
22
24
|
@subqueries = nil
|
23
25
|
@fields = nil
|
24
26
|
end
|
@@ -27,16 +29,6 @@ module Norikra
|
|
27
29
|
self.class.new(:name => @name, :group => @group, :expression => @expression.dup)
|
28
30
|
end
|
29
31
|
|
30
|
-
def dup_with_stream_name(actual_name)
|
31
|
-
first_target = self.targets.first
|
32
|
-
query = self.dup
|
33
|
-
query.expression = self.expression.gsub(/(\s[Ff][Rr][Oo][Mm]\s+)#{first_target}(\.|\s)/, '\1' + actual_name + '\2')
|
34
|
-
if query.targets.first != actual_name
|
35
|
-
raise RuntimeError, 'failed to replace query target into stream name:' + self.expression
|
36
|
-
end
|
37
|
-
query
|
38
|
-
end
|
39
|
-
|
40
32
|
def to_hash
|
41
33
|
{'name' => @name, 'group' => @group, 'expression' => @expression, 'targets' => self.targets}
|
42
34
|
end
|
@@ -47,6 +39,12 @@ module Norikra
|
|
47
39
|
@targets
|
48
40
|
end
|
49
41
|
|
42
|
+
def aliases
|
43
|
+
return @aliases if @aliases
|
44
|
+
@aliases = (self.ast.listup(:stream).map(&:alias) + self.subqueries.map(&:aliases).flatten).sort.uniq
|
45
|
+
@aliases
|
46
|
+
end
|
47
|
+
|
50
48
|
def subqueries
|
51
49
|
return @subqueries if @subqueries
|
52
50
|
@subqueries = self.ast.listup(:subquery).map{|n| Norikra::SubQuery.new(n)}
|
@@ -82,7 +80,8 @@ module Norikra
|
|
82
80
|
field_bag.push(subquery.explore(fields.keys, alias_map))
|
83
81
|
end
|
84
82
|
|
85
|
-
|
83
|
+
known_targets_aliases = fields.keys + alias_map.keys
|
84
|
+
self.ast.fields(default_target, known_targets_aliases).each do |field_def|
|
86
85
|
f = field_def[:f]
|
87
86
|
all.push(f)
|
88
87
|
|
@@ -145,6 +144,68 @@ module Norikra
|
|
145
144
|
raise Norikra::QueryError, e.message
|
146
145
|
end
|
147
146
|
|
147
|
+
def self.rewrite_query(statement_model, mapping)
|
148
|
+
rewrite_event_type_name(statement_model, mapping)
|
149
|
+
rewrite_event_field_name(statement_model, mapping)
|
150
|
+
end
|
151
|
+
|
152
|
+
def self.rewrite_event_field_name(statement_model, mapping)
|
153
|
+
# mapping: {target_name => query_event_type_name}
|
154
|
+
# mapping is for target name rewriting of fully qualified field name access
|
155
|
+
|
156
|
+
|
157
|
+
# model.getFromClause.getStreams[0].getViews[0].getParameters[0].getPropertyName
|
158
|
+
|
159
|
+
# model.getSelectClause.getSelectList[0].getExpression.getPropertyName
|
160
|
+
# model.getSelectClause.getSelectList[0].getExpression.getChildren[0].getPropertyName #=> 'field.key1.$0'
|
161
|
+
|
162
|
+
# model.getWhereClause.getChildren[1].getChildren[0].getPropertyName #=> 'field.key1.$1'
|
163
|
+
# model.getWhereClause.getChildren[2].getChildren[0].getChain[0].getName #=> 'opts.num.$0' from opts.num.$0.length()
|
164
|
+
|
165
|
+
query = Norikra::Query.new(:expression => statement_model.toEPL)
|
166
|
+
targets = query.targets
|
167
|
+
fqfs_prefixes = targets + query.aliases
|
168
|
+
|
169
|
+
default_target = (targets.size == 1 ? targets.first : nil)
|
170
|
+
|
171
|
+
rewrite_name = lambda {|node,getter,setter|
|
172
|
+
name = node.send(getter)
|
173
|
+
if name && name.index('.')
|
174
|
+
prefix = nil
|
175
|
+
body = nil
|
176
|
+
first_part = name.split('.').first
|
177
|
+
if fqfs_prefixes.include?(first_part) or mapping.has_key?(first_part) # fully qualified field specification
|
178
|
+
prefix = first_part
|
179
|
+
if mapping[prefix]
|
180
|
+
prefix = mapping[prefix]
|
181
|
+
end
|
182
|
+
body = name.split('.')[1..-1].join('.')
|
183
|
+
elsif default_target # default target field (outside of join context)
|
184
|
+
body = name
|
185
|
+
else
|
186
|
+
raise Norikra::QueryError, "target cannot be determined for field '#{name}'"
|
187
|
+
end
|
188
|
+
encoded = (prefix ? "#{prefix}." : "") + Norikra::Field.escape_name(body)
|
189
|
+
node.send(setter, encoded)
|
190
|
+
end
|
191
|
+
}
|
192
|
+
|
193
|
+
rewriter = lambda {|node|
|
194
|
+
if node.respond_to?(:getPropertyName)
|
195
|
+
rewrite_name.call(node, :getPropertyName, :setPropertyName)
|
196
|
+
elsif node.respond_to?(:getChain)
|
197
|
+
node.getChain.each do |chain|
|
198
|
+
rewrite_name.call(chain, :getName, :setName)
|
199
|
+
end
|
200
|
+
end
|
201
|
+
}
|
202
|
+
recaller = lambda {|node|
|
203
|
+
Norikra::Query.rewrite_event_field_name(node.getModel, mapping)
|
204
|
+
}
|
205
|
+
|
206
|
+
traverse_fields(rewriter, recaller, statement_model)
|
207
|
+
end
|
208
|
+
|
148
209
|
def self.rewrite_event_type_name(statement_model, mapping)
|
149
210
|
# mapping: {target_name => query_event_type_name}
|
150
211
|
|
@@ -166,19 +227,96 @@ module Norikra
|
|
166
227
|
stream.getFilter.setEventTypeName(mapping[target_name])
|
167
228
|
end
|
168
229
|
|
230
|
+
rewriter = lambda {|node|
|
231
|
+
# nothing for query expression clauses
|
232
|
+
}
|
233
|
+
recaller = lambda {|node|
|
234
|
+
Norikra::Query.rewrite_event_type_name(node.getModel, mapping)
|
235
|
+
}
|
236
|
+
traverse_fields(rewriter, recaller, statement_model)
|
237
|
+
end
|
238
|
+
|
239
|
+
# model.methods.select{|m| m.to_s.start_with?('get')}
|
240
|
+
# :getContextName,
|
241
|
+
# :getCreateContext,
|
242
|
+
# :getCreateDataFlow,
|
243
|
+
# :getCreateExpression,
|
244
|
+
# :getCreateIndex,
|
245
|
+
# :getCreateSchema,
|
246
|
+
# :getCreateVariable,
|
247
|
+
# :getCreateWindow,
|
248
|
+
# :getExpressionDeclarations,
|
249
|
+
# :getFireAndForgetClause,
|
250
|
+
# :getForClause,
|
251
|
+
# (*) :getFromClause,
|
252
|
+
# :getGroupByClause,
|
253
|
+
# :getHavingClause,
|
254
|
+
# :getInsertInto,
|
255
|
+
# :getMatchRecognizeClause,
|
256
|
+
# :getOnExpr,
|
257
|
+
# :getOrderByClause,
|
258
|
+
# :getOutputLimitClause,
|
259
|
+
# :getRowLimitClause,
|
260
|
+
# :getScriptExpressions,
|
261
|
+
# (*) :getSelectClause,
|
262
|
+
# :getTreeObjectName,
|
263
|
+
# :getUpdateClause,
|
264
|
+
# (*) :getWhereClause,
|
265
|
+
|
266
|
+
def self.traverse_fields(rewriter, recaller, statement_model)
|
267
|
+
#NOTICE: SQLStream is not supported yet.
|
268
|
+
#TODO: other clauses with fields, especially: OrderBy, Having, GroupBy, For
|
269
|
+
|
169
270
|
dig = lambda {|node|
|
271
|
+
rewriter.call(node)
|
272
|
+
|
170
273
|
if node.is_a?(Java::ComEspertechEsperClientSoda::SubqueryExpression)
|
171
|
-
|
172
|
-
|
274
|
+
recaller.call(node)
|
275
|
+
end
|
276
|
+
if node.respond_to?(:getFilter)
|
277
|
+
dig.call(node.getFilter)
|
278
|
+
end
|
279
|
+
if node.respond_to?(:getChildren)
|
173
280
|
node.getChildren.each do |c|
|
174
281
|
dig.call(c)
|
175
282
|
end
|
176
283
|
end
|
284
|
+
if node.respond_to?(:getParameters)
|
285
|
+
node.getParameters.each do |p|
|
286
|
+
dig.call(p)
|
287
|
+
end
|
288
|
+
end
|
289
|
+
if node.respond_to?(:getChain)
|
290
|
+
node.getChain.each do |c|
|
291
|
+
dig.call(c)
|
292
|
+
end
|
293
|
+
end
|
177
294
|
}
|
178
295
|
|
296
|
+
statement_model.getFromClause.getStreams.each do |stream|
|
297
|
+
if stream.respond_to?(:getExpression) # PatternStream < ProjectedStream
|
298
|
+
dig.call(stream.getExpression)
|
299
|
+
end
|
300
|
+
if stream.respond_to?(:getFilter) # Filter < ProjectedStream
|
301
|
+
dig.call(stream.getFilter.getFilter) #=> Expression
|
302
|
+
end
|
303
|
+
if stream.respond_to?(:getParameterExpressions) # MethodInvocationStream
|
304
|
+
dig.call(stream.getParameterExpressions)
|
305
|
+
end
|
306
|
+
if stream.respond_to?(:getViews) # ProjectedStream
|
307
|
+
stream.getViews.each do |view|
|
308
|
+
view.getParameters.each do |parameter|
|
309
|
+
dig.call(parameter)
|
310
|
+
end
|
311
|
+
end
|
312
|
+
end
|
313
|
+
end
|
314
|
+
|
179
315
|
if statement_model.getSelectClause
|
180
316
|
statement_model.getSelectClause.getSelectList.each do |item|
|
181
|
-
|
317
|
+
if item.respond_to?(:getExpression)
|
318
|
+
dig.call(item.getExpression)
|
319
|
+
end
|
182
320
|
end
|
183
321
|
end
|
184
322
|
|