fluent-plugin-azure-queue 0.0.1.pre → 0.0.2.pre
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/Gemfile +1 -1
- data/Gemfile.lock +10 -4
- data/README.md +77 -10
- data/VERSION +1 -1
- data/fluent-plugin-azure-queue.gemspec +2 -0
- data/lib/fluent/plugin/in_azure_event_hub_capture.rb +121 -0
- data/lib/fluent/plugin/in_azure_queue.rb +24 -33
- data/test/test_in_azure_event_hub_capture.rb +103 -0
- data/test/test_in_azure_queue.rb +4 -7
- metadata +34 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 9b5f7a60d891724ad0d4ac6e48558010990ffa61
|
4
|
+
data.tar.gz: e8981830e82a093f33f84ec7280b6bdad1dd06f9
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d0b20d70a0f4b5ac513ecd770d3ea6f66c8fbbc20dae47806f39735a1185fdb2f47f6ffff331f4a6e6802f2f340eeff2ad8cfe6d3fdabdd79b91ba619b2cb05a
|
7
|
+
data.tar.gz: e9b764484e9efee4a4025d0fa5c38765deefee2a8a222209351c208aa12e9b7add59d170ffb11f92c2a3673996028e96004859bb1fd779de31e52e9ff84ba122
|
data/Gemfile
CHANGED
data/Gemfile.lock
CHANGED
@@ -1,14 +1,18 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
fluent-plugin-azure-queue (0.0.1.pre
|
4
|
+
fluent-plugin-azure-queue (0.0.1.pre)
|
5
|
+
avro (>= 1.8)
|
5
6
|
azure-storage (>= 0.12.3.preview, < 0.13)
|
7
|
+
concurrent-ruby
|
6
8
|
fluentd (>= 0.12.2, < 0.14)
|
7
9
|
nokogiri
|
8
10
|
|
9
11
|
GEM
|
10
|
-
remote:
|
12
|
+
remote: https://rubygems.org/
|
11
13
|
specs:
|
14
|
+
avro (1.8.2)
|
15
|
+
multi_json
|
12
16
|
azure-core (0.1.10)
|
13
17
|
faraday (~> 0.9)
|
14
18
|
faraday_middleware (~> 0.10)
|
@@ -16,8 +20,9 @@ GEM
|
|
16
20
|
azure-core (~> 0.1)
|
17
21
|
faraday (~> 0.9)
|
18
22
|
faraday_middleware (~> 0.10)
|
23
|
+
concurrent-ruby (1.0.5)
|
19
24
|
cool.io (1.5.1)
|
20
|
-
faraday (0.13.
|
25
|
+
faraday (0.13.0)
|
21
26
|
multipart-post (>= 1.2, < 3)
|
22
27
|
faraday_middleware (0.12.2)
|
23
28
|
faraday (>= 0.7.4, < 1.0)
|
@@ -36,8 +41,9 @@ GEM
|
|
36
41
|
json (2.1.0)
|
37
42
|
mini_portile2 (2.1.0)
|
38
43
|
msgpack (1.1.0)
|
44
|
+
multi_json (1.12.1)
|
39
45
|
multipart-post (2.0.0)
|
40
|
-
nokogiri (1.
|
46
|
+
nokogiri (1.6.8.1)
|
41
47
|
mini_portile2 (~> 2.1.0)
|
42
48
|
power_assert (1.0.2)
|
43
49
|
rake (12.0.0)
|
data/README.md
CHANGED
@@ -1,10 +1,14 @@
|
|
1
|
-
# A [Fluentd](http://github.com/fluent/fluentd) plugin to read from azure queues
|
1
|
+
# A [Fluentd](http://github.com/fluent/fluentd) plugin to read from azure queues and event hubs
|
2
|
+
The azure queue input plugin performs at about 30 messages/second in my tests. If you need more throughput from event hubs,
|
3
|
+
I suggest using the event hub capture plugin.
|
2
4
|
|
3
5
|
## Dependencies
|
4
6
|
|
5
7
|
fluentd v.12
|
6
8
|
|
7
|
-
## Input
|
9
|
+
## azure_queue Input Plugin
|
10
|
+
|
11
|
+
### Input: Configuration
|
8
12
|
|
9
13
|
<source>
|
10
14
|
@type azure_queue
|
@@ -14,8 +18,7 @@ fluentd v.12
|
|
14
18
|
storage_access_key my_storage_access_key
|
15
19
|
queue_name my_storage_queue
|
16
20
|
fetch_interval 5
|
17
|
-
|
18
|
-
lease_time 30
|
21
|
+
lease_duration 30
|
19
22
|
</source>
|
20
23
|
|
21
24
|
**tag (required)**
|
@@ -38,14 +41,78 @@ The storage queue name
|
|
38
41
|
|
39
42
|
The the record key to put the message data into. Default 'message'
|
40
43
|
|
41
|
-
**
|
44
|
+
**lease_duration**
|
45
|
+
|
46
|
+
The time to lease the messages for. Default 300
|
47
|
+
|
48
|
+
**max_fetch_threads**
|
49
|
+
|
50
|
+
The maximum number of threads to fetch and delete queue messages with. Default 30
|
51
|
+
|
52
|
+
## Integration with Azure Event Hub
|
53
|
+
|
54
|
+
You can use an azure function to forward messages from event hubs to storage queues for easy ingestion by this gem. This is not recommended for high volumes, but should serve as a stop gap until a complete azure event hub gem is created.
|
55
|
+
|
56
|
+
```c#
|
57
|
+
using System;
|
58
|
+
using Microsoft.WindowsAzure.Storage.Queue;
|
59
|
+
|
60
|
+
public static void Run(string[] hubMessages, ICollector<string> outputQueue, TraceWriter log)
|
61
|
+
{
|
62
|
+
foreach (string message in hubMessages)
|
63
|
+
{
|
64
|
+
int bytes = message.Length * sizeof(Char);
|
65
|
+
if (bytes < 64000)
|
66
|
+
{
|
67
|
+
outputQueue.Add(message);
|
68
|
+
}
|
69
|
+
else
|
70
|
+
{
|
71
|
+
log.Warning($"Message is larger than 64k with {bytes} bytes. Dropping message");
|
72
|
+
}
|
73
|
+
}
|
74
|
+
}
|
75
|
+
```
|
76
|
+
## azure_event_hub_capture Input Plugin
|
77
|
+
This plugin is designed to work with blobs stored to a container via [Azure Event Hubs Capture](https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-capture-overview)
|
78
|
+
|
79
|
+
### Input: Configuration
|
80
|
+
|
81
|
+
<source>
|
82
|
+
@type azure_event_hub_capture
|
83
|
+
|
84
|
+
tag event_hub_input
|
85
|
+
storage_account_name my_storage_account
|
86
|
+
storage_access_key my_storage_access_key
|
87
|
+
container_name my_capture_container
|
88
|
+
fetch_interval 30
|
89
|
+
lease_duration 30
|
90
|
+
</source>
|
91
|
+
|
92
|
+
**tag (required)**
|
42
93
|
|
43
|
-
|
94
|
+
The tag for the input
|
44
95
|
|
45
|
-
**
|
96
|
+
**storage_account_name (required)**
|
97
|
+
|
98
|
+
The storage account name
|
99
|
+
|
100
|
+
**storage_access_key (required)**
|
101
|
+
|
102
|
+
The storage account access key
|
103
|
+
|
104
|
+
**container_name (required)**
|
105
|
+
|
106
|
+
The capture container name
|
107
|
+
|
108
|
+
**message_key**
|
109
|
+
|
110
|
+
The the record key to put the message data into. Default 'message'
|
111
|
+
|
112
|
+
**fetch_interval**
|
46
113
|
|
47
|
-
The
|
114
|
+
The time in seconds to sleep between fetching the blob list. Default 30
|
48
115
|
|
49
|
-
**
|
116
|
+
**lease_duration**
|
50
117
|
|
51
|
-
The time to lease the messages for. Default
|
118
|
+
The time to lease the messages for. Default 60
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.0.
|
1
|
+
0.0.2.pre
|
@@ -18,6 +18,8 @@ Gem::Specification.new do |gem|
|
|
18
18
|
gem.add_dependency "fluentd", [">= 0.12.2", "< 0.14"]
|
19
19
|
gem.add_dependency "azure-storage", [">= 0.12.3.preview", "< 0.13"]
|
20
20
|
gem.add_dependency "nokogiri"
|
21
|
+
gem.add_dependency "concurrent-ruby"
|
22
|
+
gem.add_dependency "avro", ">= 1.8"
|
21
23
|
gem.add_development_dependency "rake", ">= 0.9.2"
|
22
24
|
gem.add_development_dependency "test-unit", ">= 3.0.8"
|
23
25
|
gem.add_development_dependency "flexmock", ">= 1.3.3"
|
@@ -0,0 +1,121 @@
|
|
1
|
+
require 'fluent/input'
|
2
|
+
require 'azure/storage'
|
3
|
+
require "avro"
|
4
|
+
|
5
|
+
module Fluent
|
6
|
+
class AzureEventHubCaptureInput < Input
|
7
|
+
Fluent::Plugin.register_input('azure_event_hub_capture', self)
|
8
|
+
|
9
|
+
desc 'Tag of the output events.'
|
10
|
+
config_param :tag, :string
|
11
|
+
desc 'The azure storage account name'
|
12
|
+
config_param :storage_account_name, :string
|
13
|
+
desc 'The azure storage account access key'
|
14
|
+
config_param :storage_access_key, :string
|
15
|
+
desc 'The container name'
|
16
|
+
config_param :container_name, :string
|
17
|
+
desc 'The the record key to put the message data into'
|
18
|
+
config_param :message_key, :string, default: 'message'
|
19
|
+
desc 'The time in seconds to sleep between fetching the blob list'
|
20
|
+
config_param :fetch_interval, :integer, default: 30
|
21
|
+
desc 'The the lease duration on the blob in seconds'
|
22
|
+
config_param :lease_duration, :integer, default: 60
|
23
|
+
|
24
|
+
def configure(conf)
|
25
|
+
super
|
26
|
+
end
|
27
|
+
|
28
|
+
def start
|
29
|
+
super
|
30
|
+
if @lease_duration > 60 || @lease_duration < 15
|
31
|
+
raise Fluent::ConfigError, "fluent-plugin-azure-queue: 'lease_duration' parameter must be between 15 and 60: #{@lease_duration}"
|
32
|
+
end
|
33
|
+
@blob_client = Azure::Storage::Client.create(
|
34
|
+
:storage_account_name => @storage_account_name,
|
35
|
+
:storage_access_key => @storage_access_key).blob_client
|
36
|
+
@running = true
|
37
|
+
|
38
|
+
@thread = Thread.new(&method(:run))
|
39
|
+
end
|
40
|
+
|
41
|
+
def shutdown
|
42
|
+
log.debug("Begin in azure blob shutdown")
|
43
|
+
@running = false
|
44
|
+
@thread.join
|
45
|
+
log.debug("Finish in azure blob shutdown")
|
46
|
+
super
|
47
|
+
end
|
48
|
+
|
49
|
+
private
|
50
|
+
|
51
|
+
def run
|
52
|
+
log.debug("Begin running in azure blob")
|
53
|
+
@next_fetch_time = Time.now
|
54
|
+
while @running
|
55
|
+
if Time.now > @next_fetch_time
|
56
|
+
@next_fetch_time = Time.now + @fetch_interval
|
57
|
+
begin
|
58
|
+
blobs = @blob_client.list_blobs(@container_name)
|
59
|
+
blobs = blobs.select { |b| b.properties[:lease_status] == "unlocked" }
|
60
|
+
log.trace("Found #{blobs.count} unlocked blobs", container_name: @container_name)
|
61
|
+
# Blobs come back with oldest first
|
62
|
+
blobs.each do |blob|
|
63
|
+
ingest_blob(blob)
|
64
|
+
end
|
65
|
+
rescue => e
|
66
|
+
log.warn(error: e)
|
67
|
+
log.warn_backtrace(e.backtrace)
|
68
|
+
end
|
69
|
+
else
|
70
|
+
sleep(@next_fetch_time - Time.now)
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
def ingest_blob(blob)
|
76
|
+
begin
|
77
|
+
lease_id = @blob_client.acquire_blob_lease(@container_name, blob.name, duration: @lease_duration)
|
78
|
+
log.trace("Blob Leased", blob_name: blob.name)
|
79
|
+
blob, blob_contents = @blob_client.get_blob(@container_name, blob.name)
|
80
|
+
emit_blob_messages(blob_contents)
|
81
|
+
log.trace("Done Ingest blob", blob_name: blob.name)
|
82
|
+
begin
|
83
|
+
delete_blob(blob, lease_id)
|
84
|
+
log.debug("Blob deleted", blob_name: blob.name)
|
85
|
+
rescue Exception => e
|
86
|
+
log.warn("Records emmitted but blob not deleted", container_name: @container_name, blob_name: blob.name, error: e)
|
87
|
+
log.warn_backtrace(e.backtrace)
|
88
|
+
end
|
89
|
+
rescue Azure::Core::Http::HTTPError => e
|
90
|
+
if e.status_code == 409
|
91
|
+
log.info("Blob already leased", blob_name: blob.name)
|
92
|
+
else
|
93
|
+
log.warn("Error occurred while ingesting blob", error: e)
|
94
|
+
log.warn_backtrace(e.backtrace)
|
95
|
+
end
|
96
|
+
rescue Exception => e
|
97
|
+
log.warn("Error occurred while ingesting blob", error: e)
|
98
|
+
log.warn_backtrace(e.backtrace)
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
102
|
+
def emit_blob_messages(blob_contents)
|
103
|
+
buffer = StringIO.new(blob_contents)
|
104
|
+
reader = Avro::DataFile::Reader.new(buffer, Avro::IO::DatumReader.new)
|
105
|
+
event_stream = MultiEventStream.new
|
106
|
+
reader.each do |record|
|
107
|
+
time = Time.strptime(record["EnqueuedTimeUtc"], "%m/%d/%Y %r").to_i
|
108
|
+
value = { @message_key => record["Body"] }
|
109
|
+
event_stream.add(time, value)
|
110
|
+
end
|
111
|
+
router.emit_stream(@tag, event_stream)
|
112
|
+
end
|
113
|
+
|
114
|
+
def delete_blob(blob, lease_id)
|
115
|
+
# Hack because 'delete_blob' doesn't support lease_id yet
|
116
|
+
Azure::Storage::Service::StorageService.register_request_callback { |headers| headers["x-ms-lease-id"] = lease_id }
|
117
|
+
@blob_client.delete_blob(@container_name, blob.name)
|
118
|
+
Azure::Storage::Service::StorageService.register_request_callback { |headers| headers }
|
119
|
+
end
|
120
|
+
end
|
121
|
+
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
require 'fluent/input'
|
2
2
|
require 'azure/storage'
|
3
|
-
require '
|
3
|
+
require 'concurrent'
|
4
4
|
|
5
5
|
module Fluent
|
6
6
|
class AzureQueueInput < Input
|
@@ -16,12 +16,10 @@ module Fluent
|
|
16
16
|
config_param :queue_name, :string
|
17
17
|
desc 'The the record key to put the message data into'
|
18
18
|
config_param :message_key, :string, default: 'message'
|
19
|
-
desc 'The time in seconds to sleep between fetching messages'
|
20
|
-
config_param :fetch_interval, :integer, default: 5
|
21
|
-
desc 'The number of messages to fetch during each request'
|
22
|
-
config_param :batch_size, :integer, default: 10
|
23
19
|
desc 'The the lease time on the messages in seconds'
|
24
|
-
config_param :lease_time, :integer, default:
|
20
|
+
config_param :lease_time, :integer, default: 300
|
21
|
+
desc 'The maximum number of threads to fetch messages'
|
22
|
+
config_param :max_fetch_threads, :integer, default: 30
|
25
23
|
|
26
24
|
def configure(conf)
|
27
25
|
super
|
@@ -29,15 +27,17 @@ module Fluent
|
|
29
27
|
|
30
28
|
def start
|
31
29
|
super
|
32
|
-
if @batch_size > 32 || @batch_size < 1
|
33
|
-
raise Fluent::ConfigError, "fluent-plugin-azure-queue: 'batch_size' parameter must be between 1 and 32: #{@batch_size}"
|
34
|
-
end
|
35
30
|
@queue_client = Azure::Storage::Client.create(
|
36
31
|
:storage_account_name => @storage_account_name,
|
37
32
|
:storage_access_key => @storage_access_key).queue_client
|
38
33
|
log.debug("Succeeded to creating azure queue client")
|
39
34
|
@running = true
|
40
35
|
|
36
|
+
@delete_pool = Concurrent::ThreadPoolExecutor.new(
|
37
|
+
min_threads: 1,
|
38
|
+
max_threads: @max_fetch_threads,
|
39
|
+
max_queue: @max_fetch_threads,
|
40
|
+
)
|
41
41
|
@thread = Thread.new(&method(:run))
|
42
42
|
end
|
43
43
|
|
@@ -55,36 +55,27 @@ module Fluent
|
|
55
55
|
log.debug("Begin running azure queue")
|
56
56
|
@next_fetch_time = Time.now
|
57
57
|
while @running
|
58
|
-
|
58
|
+
delete_futures = []
|
59
59
|
begin
|
60
|
-
if
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
@queue_client.delete_message(@queue_name, message.id, message.pop_receipt)
|
71
|
-
log.trace("Deleted azure queue message", queue_name: @queue_name, id: message.id)
|
72
|
-
deleted = true
|
73
|
-
end
|
60
|
+
if ((@delete_pool.queue_length) < @max_fetch_threads)
|
61
|
+
Concurrent::Future.execute(executor: @delete_pool) do
|
62
|
+
start = Time.now
|
63
|
+
message = @queue_client.list_messages(@queue_name, @lease_time, { number_of_messages: 1 })[0]
|
64
|
+
log.trace("Recieved 1 messages from azure queue", queue_name: @queue_name, id: message.id, time: Time.now - start)
|
65
|
+
router.emit(@tag, Fluent::Engine.now, { @message_key => Base64.decode64(message.message_text)})
|
66
|
+
start = Time.now
|
67
|
+
@queue_client.delete_message(@queue_name, message.id, message.pop_receipt)
|
68
|
+
log.trace("Deleted azure queue message", queue_name: @queue_name, id: message.id, time: Time.now - start)
|
69
|
+
end
|
74
70
|
else
|
75
|
-
|
71
|
+
log.trace("Not fetching more messages, already have #{@delete_pool.queue_length} messages to be deleted")
|
72
|
+
sleep 0.5
|
76
73
|
end
|
77
74
|
rescue => e
|
78
|
-
|
79
|
-
|
80
|
-
log.warn_backtrace(e.backtrace)
|
81
|
-
else
|
82
|
-
log.warn("Message emmitted but not deleted from azure queue", queue_name: @queue_name, error: e)
|
83
|
-
log.warn_backtrace(e.backtrace)
|
84
|
-
end
|
75
|
+
log.warn(error: e)
|
76
|
+
log.warn_backtrace(e.backtrace)
|
85
77
|
end
|
86
78
|
end
|
87
|
-
|
88
79
|
end
|
89
80
|
end
|
90
81
|
end
|
@@ -0,0 +1,103 @@
|
|
1
|
+
require 'fluent/test'
|
2
|
+
require 'fluent/plugin/in_azure_event_hub_capture'
|
3
|
+
require 'flexmock/test_unit'
|
4
|
+
require 'fluent/input'
|
5
|
+
|
6
|
+
class AzureEventHubCaptureInputTest < Test::Unit::TestCase
|
7
|
+
def setup
|
8
|
+
Fluent::Test.setup
|
9
|
+
if Fluent.const_defined?(:EventTime)
|
10
|
+
stub(Fluent::EventTime).now { @time }
|
11
|
+
end
|
12
|
+
end
|
13
|
+
|
14
|
+
CONFIG = %[
|
15
|
+
tag test_tag
|
16
|
+
storage_account_name test_storage_account_name
|
17
|
+
storage_access_key test_storage_access_key
|
18
|
+
container_name test_container_name
|
19
|
+
fetch_interval 1
|
20
|
+
]
|
21
|
+
|
22
|
+
def create_driver(conf = CONFIG)
|
23
|
+
d = Fluent::Test::InputTestDriver.new(Fluent::AzureEventHubCaptureInput)
|
24
|
+
d.configure(conf)
|
25
|
+
d
|
26
|
+
end
|
27
|
+
|
28
|
+
Struct.new("Blob", :name, :properties)
|
29
|
+
|
30
|
+
def test_configure
|
31
|
+
d = create_driver
|
32
|
+
assert_equal 'test_tag', d.instance.tag
|
33
|
+
assert_equal 'test_storage_account_name', d.instance.storage_account_name
|
34
|
+
assert_equal 'test_storage_access_key', d.instance.storage_access_key
|
35
|
+
assert_equal 'test_container_name', d.instance.container_name
|
36
|
+
assert_equal 1, d.instance.fetch_interval
|
37
|
+
end
|
38
|
+
|
39
|
+
def setup_mocks(driver)
|
40
|
+
blob_client = flexmock("blob_client")
|
41
|
+
client = flexmock("client", :blob_client => blob_client)
|
42
|
+
flexmock(Azure::Storage::Client, :create => client)
|
43
|
+
blob_client
|
44
|
+
end
|
45
|
+
|
46
|
+
def test_no_blobs
|
47
|
+
d = create_driver
|
48
|
+
blob_client = setup_mocks(d)
|
49
|
+
blob_client.should_receive(:list_blobs).with(d.instance.container_name).and_return([]).once
|
50
|
+
flexmock(d.instance).should_receive(:ingest_blob).never()
|
51
|
+
d.run do
|
52
|
+
sleep 1
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
def test_two_blobs
|
57
|
+
d = create_driver
|
58
|
+
blobs = [Struct::Blob.new("test1", lease_status: "unlocked"), Struct::Blob.new("test2", lease_status: "unlocked")]
|
59
|
+
blob_client = setup_mocks(d)
|
60
|
+
blob_client.should_receive(:list_blobs).with(d.instance.container_name).and_return(blobs).once
|
61
|
+
plugin = flexmock(d.instance)
|
62
|
+
plugin.should_receive(:ingest_blob).with(blobs[0]).once()
|
63
|
+
plugin.should_receive(:ingest_blob).with(blobs[1]).once()
|
64
|
+
d.run do
|
65
|
+
sleep 1
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
def test_ingest_blob
|
70
|
+
d = create_driver
|
71
|
+
blob = Struct::Blob.new("test1", lease_status: "unlocked")
|
72
|
+
blob_client = setup_mocks(d)
|
73
|
+
plugin = flexmock(d.instance)
|
74
|
+
lease_id = "123"
|
75
|
+
blob_client.should_receive(:acquire_blob_lease).with(d.instance.container_name, blob.name, duration: d.instance.lease_duration).and_return(lease_id).once
|
76
|
+
updated_blob = Struct::Blob.new("test1", lease_status: "locked")
|
77
|
+
blob_contents = flexmock("blob_contents")
|
78
|
+
blob_client.should_receive(:get_blob).with(d.instance.container_name, blob.name).and_return([updated_blob, blob_contents]).once
|
79
|
+
plugin.should_receive(:emit_blob_messages).with(blob_contents).once
|
80
|
+
plugin.should_receive(:delete_blob).with(updated_blob, lease_id).once
|
81
|
+
d.run do
|
82
|
+
plugin.send(:ingest_blob, blob)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
def test_emit_blob_messages
|
87
|
+
d = create_driver
|
88
|
+
setup_mocks(d)
|
89
|
+
test_payload = flexmock("test_payload")
|
90
|
+
buffer = flexmock("buffer")
|
91
|
+
flexmock(StringIO).should_receive(:new).and_return(buffer)
|
92
|
+
time = 1504030204
|
93
|
+
time_string = Time.at(time).strftime("%m/%d/%Y %r")
|
94
|
+
original_payload = {"key" => "value"}.to_json
|
95
|
+
records = [ {"EnqueuedTimeUtc" => time_string, "Body" => original_payload } ]
|
96
|
+
flexmock(Avro::DataFile::Reader).should_receive(:new).with(buffer, Avro::IO::DatumReader).and_return(records)
|
97
|
+
d.run do
|
98
|
+
d.instance.send(:emit_blob_messages, test_payload)
|
99
|
+
end
|
100
|
+
assert_equal(1, d.emits.size)
|
101
|
+
d.expect_emit(d.instance.tag, time, { "message" => original_payload })
|
102
|
+
end
|
103
|
+
end
|
data/test/test_in_azure_queue.rb
CHANGED
@@ -40,25 +40,22 @@ class AzureQueueInputTest < Test::Unit::TestCase
|
|
40
40
|
queue_client.should_receive(:list_messages).with(
|
41
41
|
driver.instance.queue_name,
|
42
42
|
driver.instance.lease_time,
|
43
|
-
{ number_of_messages:
|
43
|
+
{ number_of_messages: 1}).and_return(messages).once
|
44
44
|
client = flexmock("client", :queue_client => queue_client)
|
45
45
|
flexmock(Azure::Storage::Client, :create => client)
|
46
46
|
queue_client
|
47
47
|
end
|
48
48
|
|
49
|
-
def
|
49
|
+
def test_one_message
|
50
50
|
d = create_driver
|
51
|
-
messages = [ Struct::QueueMessage.new(1, 99, Base64.encode64("test line"))
|
52
|
-
Struct::QueueMessage.new(2, 100, Base64.encode64("test line2"))]
|
51
|
+
messages = [ Struct::QueueMessage.new(1, 99, Base64.encode64("test line"))]
|
53
52
|
queue_client = setup_mocks(d, messages)
|
54
53
|
queue_client.should_receive(:delete_message).with(d.instance.queue_name, messages[0].id,messages[0].pop_receipt).once
|
55
|
-
queue_client.should_receive(:delete_message).with(d.instance.queue_name, messages[1].id,messages[1].pop_receipt).once
|
56
54
|
d.run do
|
57
55
|
sleep 1
|
58
56
|
end
|
59
|
-
assert_equal(
|
57
|
+
assert_equal(1, d.emits.size)
|
60
58
|
d.expect_emit(d.instance.tag, @time, { "message" => "test line" })
|
61
|
-
d.expect_emit(d.instance.tag, @time, { "message" => "test line2" })
|
62
59
|
end
|
63
60
|
|
64
61
|
def test_no_messages
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-azure-queue
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.2.pre
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Scott Bonebrake
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-08-
|
11
|
+
date: 2017-08-29 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: fluentd
|
@@ -64,6 +64,34 @@ dependencies:
|
|
64
64
|
- - ">="
|
65
65
|
- !ruby/object:Gem::Version
|
66
66
|
version: '0'
|
67
|
+
- !ruby/object:Gem::Dependency
|
68
|
+
name: concurrent-ruby
|
69
|
+
requirement: !ruby/object:Gem::Requirement
|
70
|
+
requirements:
|
71
|
+
- - ">="
|
72
|
+
- !ruby/object:Gem::Version
|
73
|
+
version: '0'
|
74
|
+
type: :runtime
|
75
|
+
prerelease: false
|
76
|
+
version_requirements: !ruby/object:Gem::Requirement
|
77
|
+
requirements:
|
78
|
+
- - ">="
|
79
|
+
- !ruby/object:Gem::Version
|
80
|
+
version: '0'
|
81
|
+
- !ruby/object:Gem::Dependency
|
82
|
+
name: avro
|
83
|
+
requirement: !ruby/object:Gem::Requirement
|
84
|
+
requirements:
|
85
|
+
- - ">="
|
86
|
+
- !ruby/object:Gem::Version
|
87
|
+
version: '1.8'
|
88
|
+
type: :runtime
|
89
|
+
prerelease: false
|
90
|
+
version_requirements: !ruby/object:Gem::Requirement
|
91
|
+
requirements:
|
92
|
+
- - ">="
|
93
|
+
- !ruby/object:Gem::Version
|
94
|
+
version: '1.8'
|
67
95
|
- !ruby/object:Gem::Dependency
|
68
96
|
name: rake
|
69
97
|
requirement: !ruby/object:Gem::Requirement
|
@@ -121,7 +149,9 @@ files:
|
|
121
149
|
- Rakefile
|
122
150
|
- VERSION
|
123
151
|
- fluent-plugin-azure-queue.gemspec
|
152
|
+
- lib/fluent/plugin/in_azure_event_hub_capture.rb
|
124
153
|
- lib/fluent/plugin/in_azure_queue.rb
|
154
|
+
- test/test_in_azure_event_hub_capture.rb
|
125
155
|
- test/test_in_azure_queue.rb
|
126
156
|
homepage: https://github.com/sbonebrake/fluent-plugin-azure-queue
|
127
157
|
licenses:
|
@@ -143,9 +173,10 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
143
173
|
version: 1.3.1
|
144
174
|
requirements: []
|
145
175
|
rubyforge_project:
|
146
|
-
rubygems_version: 2.
|
176
|
+
rubygems_version: 2.0.14.1
|
147
177
|
signing_key:
|
148
178
|
specification_version: 4
|
149
179
|
summary: Fluent input plugin for azure queue input
|
150
180
|
test_files:
|
181
|
+
- test/test_in_azure_event_hub_capture.rb
|
151
182
|
- test/test_in_azure_queue.rb
|