hsdq 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,269 @@
1
+
2
+ module Hsdq
3
+ # This module process the incoming messages and call one of 5 messages (ack, request, callback, feedback or error)
4
+ # depending on the type of the message.
5
+ module Receiver
6
+
7
+
8
+ # **Placeholder for request received. You must override hsdq_request in your HsdqXxx class**<br>
9
+ # After this method has run, there will be no further processing.
10
+ #
11
+ # @param [Hash] message The message to process
12
+ # @param [Hash] context The request that was originally sent.
13
+ # @return [String] placeholder string message, will be what your method will be returning.
14
+ def hsdq_request(message, context); placeholder; end
15
+ # **Placeholder for ack received. You must override hsdq_request in your HsdqXxx class**
16
+ # @see #hsdq_request
17
+ def hsdq_ack(message, context); placeholder; end
18
+ # **Placeholder for callback received. You must override hsdq_request in your HsdqXxx class**
19
+ # @see #hsdq_request
20
+ def hsdq_callback(message, context); placeholder; end
21
+ # **Placeholder for feedback received. You must override hsdq_request in your HsdqXxx class**
22
+ # @see #hsdq_request
23
+ def hsdq_feedback(message, context); placeholder; end
24
+ # **Placeholder for error received. You must override hsdq_request in your HsdqXxx class**
25
+ # @see #hsdq_request
26
+ def hsdq_error(message, context); placeholder; end
27
+
28
+ # Send the ACk and start the processing for the message just received.<br>
29
+ # The processing will be either executed synchronously or a new thread will be started based on the configuration.
30
+ #
31
+ # @param [Json string] raw_spark (ephemeral) just popped out of the queue in json format
32
+ # @param [Hash] options HsdqXxx class configuration options
33
+ def hsdq_ignit(raw_spark, options)
34
+ spark = h_spark raw_spark
35
+ send_ack spark
36
+ if valid_spark? spark, options
37
+ if hsdq_opts[:threaded]
38
+ # :nocov:
39
+ hsdq_start_thread -> { sparkle spark, options }
40
+ # :nocov:
41
+ else
42
+ sparkle spark, options
43
+ end
44
+ end
45
+ end
46
+
47
+ # blpop return an array [list_name, data]
48
+ def get_spark(raw_spark)
49
+ raw_spark.kind_of?(Array) ? raw_spark.last : raw_spark
50
+ end
51
+
52
+ # return the spark (ephemeral part of the message) from the message list
53
+ # @param [Json string or Array] raw_spark from the list
54
+ # @return [Hash] spark ready to be used by the system
55
+ def h_spark(raw_spark)
56
+ JSON.parse get_spark(raw_spark), {symbolize_names: true}
57
+ end
58
+
59
+ # Entry point for the task to process, this is what is executed in the threads when a message is pulled.
60
+ # - Pull the burst (line with the request or response) from the the hash<br>
61
+ # - Pull the context related to a response if it exist
62
+ # - Set values for the next hop context in case of a request.
63
+ # - Call one of the 5 methods (request, ack, callback, feedback, error) in your hsdqXxx class (or the placeholder)
64
+ # based on the message type
65
+ # @param [Hash] spark
66
+ # @param [Hash] options hsdq class options from setup
67
+ def sparkle(spark, options)
68
+ puts spark.inspect
69
+ burst, ctx_burst = get_burst spark, options
70
+ context ctx_burst
71
+
72
+ case spark[:type].to_sym
73
+ when :ack
74
+ hsdq_ack burst, context
75
+ when :callback
76
+ hsdq_callback burst, context
77
+ when :feedback
78
+ hsdq_feedback burst, context
79
+ when :error
80
+ hsdq_error burst, context
81
+ when :request
82
+ set_context spark
83
+
84
+ hsdq_request burst, context
85
+ end
86
+ end
87
+
88
+ # Save for future use context data into the thread_store
89
+ # @param [Hash] spark
90
+ def set_context(spark)
91
+ # store in thread_store for later use
92
+ sent_to spark[:sender]
93
+ previous_sender spark[:sender]
94
+ context_params({ reply_to: spark[:previous_sender], spark_uid: spark[:spark_uid]})
95
+ end
96
+
97
+ # Manage pulling:
98
+ # - the burst (persistent action) associated with the spark from the matching Redis hash
99
+ # - if needed the context data
100
+ # @param [Hash] spark
101
+ # @param [Hash] _options for the app (not used actually)
102
+ def get_burst(spark, _options={})
103
+ # get the context parameters
104
+ context_h = spark[:context]
105
+
106
+ burst_p = -> { cx_data.hget hsdq_key(spark), burst_key(spark) }
107
+ if response?(spark) && context_h
108
+ # save previous_sender in thread_store for later reply
109
+ sent_to context_h[:previous_sender]
110
+ # set the proc for multi redis to pull the initial request
111
+ burst_context_p = -> { cx_data.hget hsdq_key(spark), "request_#{context_h[:spark_uid]}" }
112
+ # exec the redis multi
113
+ burst_j, burst_context_j = pull_burst(burst_p, burst_context_p)
114
+ else
115
+ burst_j, burst_context_j = pull_burst_only burst_p
116
+ end
117
+
118
+ burst = burst_j ? (JSON.parse burst_j, {symbolize_names: true}) : {}
119
+ burst_context = burst_context_j ? (JSON.parse burst_context_j, {symbolize_names: true}) : {}
120
+
121
+ [burst, burst_context]
122
+ end
123
+
124
+ # Execute a multi transaction to get the burst and the context from Redis in a single call
125
+ # @param [Proc] burst_p query to pull the burst from redis
126
+ # @param [Proc] burst_context_p query to pull the context from redis
127
+ # @return [array] [burst, context]
128
+ def pull_burst(burst_p, burst_context_p)
129
+ cx_data.multi do
130
+ burst_p.call
131
+ burst_context_p.call
132
+ end
133
+ end
134
+
135
+ # If there is no context this method is used instead of pull_burst
136
+ # @see #pull_burst
137
+ # @param [Proc] burst_p query to pull the burst from redis
138
+ def pull_burst_only(burst_p)
139
+ [burst_p.call, nil]
140
+ end
141
+
142
+ # Spark validation, call valid_type?. If invalid:
143
+ # - an error is sent back to the sender
144
+ # - false is returned to the processing to stop the action.
145
+ # @param [Hash] spark
146
+ # @param [Hash] options Application options
147
+ # @return [Boolean] true in case of valid spark,
148
+ # @return [Hash] the error message if an error is raised
149
+ def valid_spark?(spark, options)
150
+ begin
151
+ raise ArgumentError.new("Illegal type #{spark[:type]}") unless valid_type? spark[:type]
152
+ 'request' == spark[:type] ? check_whitelist(spark, options) : true
153
+ rescue => e
154
+ reject_spark spark, e
155
+ false
156
+ end
157
+ end
158
+
159
+ # Call whitelisted? to verify the the topic and task are legit.
160
+ # @param [Hash] spark
161
+ # @param [Hash] options
162
+ # @return [Boolean] if whitelist validation is successful
163
+ # @return [Hash] the error message if an error is raised
164
+ def check_whitelist(spark, options)
165
+ begin
166
+ whitelisted?(spark, options) ? true : (raise ArgumentError.new("Illegal argument in topic or task"))
167
+ rescue => e
168
+ reject_spark spark, e
169
+ false
170
+ end
171
+ end
172
+
173
+ # validate the topic and the task
174
+ def whitelisted?(spark, options)
175
+ valid_topic?(spark, options) && valid_task?(spark, options)
176
+ end
177
+
178
+ # Send an error message back to the sender
179
+ # @param [Hash] spark The rejected spark
180
+ # @param [ArgumentError] e if invalid
181
+ # @return [Hash] the error message
182
+ def reject_spark(spark, e)
183
+ error = {
184
+ sent_to: spark[:sender],
185
+ uid: spark[:uid],
186
+ sender: channel,
187
+ params: spark,
188
+ data: e.message
189
+ }
190
+ puts "sending error message: #{error}"
191
+ hsdq_send_error error
192
+ error
193
+ end
194
+
195
+ # Send the ack back to the sender in case of a request
196
+ def send_ack(spark)
197
+ return unless ['request', :request].include? spark[:type]
198
+ ack_msg = spark.merge sent_to: spark[:sender], sender: channel
199
+ hsdq_send_ack ack_msg
200
+ end
201
+
202
+ # Hash of the internal authorized message types
203
+ def hsdq_authorized_types
204
+ [:request, :ack, :feedback, :callback, :error]
205
+ end
206
+
207
+ # Cached value of the tasks authorized to be processed
208
+ # @param [Array] tasks Additional tasks to the one setup in the configuration file
209
+ # @return [Array] the authoriced tasks
210
+ def hsdq_authorized_tasks(*tasks)
211
+ if tasks.any?
212
+ @hsdq_authorized_tasks = [tasks].flatten
213
+ else
214
+ @hsdq_authorized_tasks ||= [hsdq_opts[:tasks]].flatten
215
+ end
216
+ end
217
+
218
+ # Cached value of the topics authorized to be processed
219
+ def hsdq_authorized_topics(*topics)
220
+ if topics.any?
221
+ @hsdq_authorized_topics = [topics].flatten
222
+ else
223
+ @hsdq_authorized_topics ||= [hsdq_opts[:topics]].flatten
224
+ end
225
+ end
226
+
227
+ # @param [Array] tasks REPLACE the tasks set in the configuration file if any
228
+ # @return [Array] the authoriced tasks
229
+ def hsdq_set_authorized_tasks(*tasks)
230
+ @hsdq_authorized_tasks = tasks.flatten
231
+ end
232
+
233
+ # @param [Array] topics REPLACE the topics set in the configuration file if any
234
+ # @return [Array] the authoriced topics
235
+ def hsdq_set_authorized_topics(*topics)
236
+ @hsdq_authorized_topics = topics.flatten
237
+ end
238
+
239
+ # @param [Array] tasks Additional tasks to the ones set in the configuration file
240
+ # @return [Array] the authoriced tasks
241
+ def hsdq_add_authorized_tasks(*tasks)
242
+ @hsdq_authorized_tasks = [hsdq_authorized_tasks, tasks].flatten
243
+ end
244
+
245
+ # @param [Array] topics Additional topics to the ones set in the configuration file
246
+ # @return [Array] the authoriced topics
247
+ def hsdq_add_authorized_topics(*topics)
248
+ @hsdq_authorized_topics = [hsdq_authorized_topics, topics].flatten
249
+ end
250
+
251
+ # test the task against the list of authorised tasks
252
+ #
253
+ def valid_task?(spark, _options)
254
+ return true if spark[:task].nil? || hsdq_authorized_tasks.empty?
255
+ hsdq_authorized_tasks.include?(spark[:task].to_sym)
256
+ end
257
+
258
+ def valid_topic?(spark, _options)
259
+ return true if spark[:topic].nil? || hsdq_authorized_topics.empty?
260
+ hsdq_authorized_topics.include?(spark[:topic].to_sym)
261
+ end
262
+
263
+ # @return [boolean] true if the message received is a response
264
+ def response?(spark)
265
+ [:callback, :feedback, :error].include? spark[:type].to_sym
266
+ end
267
+
268
+ end
269
+ end
@@ -0,0 +1,124 @@
1
+
2
+ module Hsdq
3
+ # This module is holding the methods for the sender (emitter) for your hsdq class.
4
+ #
5
+ # It is holding proxy methods setting the correct type to send your messages
6
+ module Sender
7
+ include Connectors
8
+
9
+ # To be use by your application to send a request from your hsdq class. This is a Proxy for hsdq_send to send request messages
10
+ #
11
+ # @param [Hash] message The request you want to send
12
+ # @return [Hash] your original message with the system additional parameters
13
+ # @return [Boolean] false if the message's validation failed
14
+ def hsdq_send_request(message)
15
+ hsdq_send(message.merge(type: :request))
16
+ end
17
+
18
+ # Ack message is the acknowledgement that message has been received by the receiver (Subscriber).
19
+ #
20
+ # Ack is sent automatically by the system. This method is to send additional ack messages if needed. (very seldom used)
21
+ #
22
+ # @param [Hash] message for the acknowledge you want to send (sent automatically by the system)
23
+ # @return [Hash] your original message with the system additional parameters
24
+ # @return [Boolean] false if the message's validation failed
25
+ def hsdq_send_ack(message)
26
+ hsdq_send(message.merge(type: :ack))
27
+ end
28
+
29
+ # Callback messages are the final step for a successful response.
30
+ #
31
+ # @param [Hash] message for the callback you want to send
32
+ # @return [Hash] your original message with the system additional parameters
33
+ # @return [Boolean] false if the message's validation failed
34
+ def hsdq_send_callback(message)
35
+ hsdq_send(message.merge(type: :callback))
36
+ end
37
+
38
+ # Feedback messages are the intermediate messages used to update the sender of the progress of it's request.
39
+ #
40
+ # @param [Hash] message for the feedback you want to send
41
+ # @return [Hash] your original message with the system additional parameters
42
+ # @return [Boolean] false if the message's validation failed
43
+ def hsdq_send_feedback(message)
44
+ hsdq_send(message.merge(type: :feedback))
45
+ end
46
+
47
+ # Error messages are used to return an error message to the sender in case of error during the processing.
48
+ #
49
+ # Error messages are also automatically sent by the system in case of validation error at message recetion.
50
+ #
51
+ # @param [Hash] message for the error message you want to send
52
+ # @return [Hash] your original message with the system additional parameters
53
+ # @return [Boolean] false if the message's validation failed
54
+ def hsdq_send_error(message)
55
+ hsdq_send(message.merge(type: :error))
56
+ end
57
+
58
+ # Generic method to send any type of message. It is preferred to use the the send proxy methods that are setting
59
+ # the correct type for the message the application has to send: hsdq_send_request, callback etc..
60
+ # The message type must be provided.
61
+ #
62
+ # @param [Hash] message to send
63
+ # @return [Hash] original message with the system additional parameters if the message is sent
64
+ # @return [Boolean] false if the message's validation failed
65
+ def hsdq_send(message)
66
+ message = prepare_message message
67
+ if valid_keys?(message) && valid_type?(message[:type])
68
+ spark = build_spark(message)
69
+ send_message message, spark
70
+ else
71
+ false
72
+ end
73
+ end
74
+
75
+ # Send the message using a Redis multi in order to do everything within a single transaction
76
+ # @param [Hash] message to send, will be stored as an entry in the message hash
77
+ # @param [Hash] spark the ephemeral part of the message. pushed to a list
78
+ # @return [String] "OK"
79
+ def send_message(message, spark)
80
+ # avoid further processing into the multi redis command
81
+ channel_name = message[:sent_to]
82
+ h_key = hsdq_key message
83
+ burst_j = message.to_json
84
+ spark_j = spark.to_json
85
+ bkey = burst_key(message)
86
+
87
+ cx_data.multi do
88
+ cx_data.hset h_key, bkey, burst_j
89
+ cx_data.expire h_key, 259200 #3 days todo set by options
90
+ cx_data.rpush channel_name, spark_j
91
+ end
92
+ end
93
+
94
+ # Complete the message with the key values needed by the system
95
+ # @param [Hash] message original message
96
+ # @return [Hash] message with the additional system data
97
+ def prepare_message(message)
98
+ message[:sender] = channel
99
+ message[:uid] ||= current_uid || SecureRandom.uuid
100
+ message[:spark_uid] = SecureRandom.uuid
101
+ message[:tstamp] = Time.now.utc
102
+ message[:context] = context_params
103
+ message[:previous_sender] = previous_sender
104
+ message[:sent_to] ||= sent_to
105
+ message
106
+ end
107
+
108
+ # Generate the spark from the message. The spark is ephemeral so everything in the spark is included into the message hash.
109
+ # @param [Hash] message to be sent
110
+ # @return [Hash] spark, the tiny part pushed to the list
111
+ def build_spark(message)
112
+ keys = [:sender, :uid, :spark_uid, :tstamp, :context, :previous_sender, :type, :topic, :task ]
113
+ spark = keys.inject({}) { |memo, param| memo.merge(param => message[param]) }
114
+ spark
115
+ end
116
+
117
+ # Validate that the minimum necessary keys are present into the message before sending it.
118
+ # @param [Hash] message The full message to be sent (including the system data)
119
+ def valid_keys?(message)
120
+ [:sender, :sent_to, :type, :uid] - message.keys == []
121
+ end
122
+
123
+ end
124
+ end
@@ -0,0 +1,59 @@
1
+ module Hsdq
2
+ module Session
3
+
4
+ # Store in the session layer in the session hash one or an array of key values (string or json)
5
+ # Create the session hash if it do not exist.
6
+ # @param [String] session_id Use session_key to create the namespaced key based on session_id
7
+ # @param [Hash or Array of key/values] key_values
8
+ def hsdq_session_set(session_id, *key_values)
9
+ key_values = key_values[0].to_a if 1 == key_values.flatten.size && key_values[0].is_a?(Hash)
10
+ hkey = session_key(session_id)
11
+
12
+ cx_session.multi do
13
+ cx_session.hmset hkey, *key_values.flatten
14
+ cx_session.expire hkey, 259200 #3 days todo set by options
15
+ end
16
+ end
17
+
18
+ # Retrieve the session hash from the session layer and return the data (see below)
19
+ # @param [String] session_id used to build the unique namespaced key to retrieve the session hash
20
+ # @param [Array of String] keys either an array of keys or nil or nothing
21
+ # @return [Array] of values in the order of the keys passed
22
+ # @return [Hash] in the case of no keys passed, return a hash of all the data stored
23
+ def hsdq_session(session_id, *keys)
24
+ if keys.any?
25
+ #get only the provided keys
26
+ cx_session.hmget session_key(session_id), *keys
27
+ else
28
+ # get all keys return a hash
29
+ cx_session.hgetall session_key(session_id)
30
+ end
31
+ end
32
+
33
+ # delete all keys from the session
34
+ def hsdq_session_del(session_id, *keys)
35
+ cx_session.hdel session_key(session_id), *keys.flatten
36
+ end
37
+
38
+ # delete the whole session hash
39
+ def hsdq_session_destroy(session_id)
40
+ cx_session.del session_key(session_id)
41
+ end
42
+
43
+ # reset the expiration time for the session
44
+ def hsdq_session_expire(session_id, in_seconds)
45
+ cx_session.expire session_key(session_id), in_seconds
46
+ end
47
+
48
+ # return the expiration time remaining before expiration
49
+ def hsdq_session_expire_in(session_id)
50
+ cx_session.ttl session_key(session_id)
51
+ end
52
+
53
+ # check if a key exist in the session hash
54
+ def hsdq_session_key?(session_id, key)
55
+ cx_session.hexists session_key(session_id), key
56
+ end
57
+
58
+ end
59
+ end