logstash-input-google_pubsub_compressed_batches 1.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of logstash-input-google_pubsub_compressed_batches might be problematic. Click here for more details.

Files changed (52) hide show
  1. checksums.yaml +7 -0
  2. data/CHANGELOG.md +37 -0
  3. data/CONTRIBUTING.md +9 -0
  4. data/CONTRIBUTORS +23 -0
  5. data/Gemfile +11 -0
  6. data/LICENSE +202 -0
  7. data/NOTICE.TXT +13 -0
  8. data/README.md +200 -0
  9. data/docs/index.asciidoc +274 -0
  10. data/lib/logstash-input-google_pubsub_jars.rb +41 -0
  11. data/lib/logstash/inputs/google_pubsub.rb +306 -0
  12. data/logstash-input-google_pubsub_compressed_batches.gemspec +34 -0
  13. data/spec/inputs/google_pubsub_spec.rb +37 -0
  14. data/vendor/jar-dependencies/com/fasterxml/jackson/core/jackson-core/2.1.3/jackson-core-2.1.3.jar +0 -0
  15. data/vendor/jar-dependencies/com/google/api/api-common/1.6.0/api-common-1.6.0.jar +0 -0
  16. data/vendor/jar-dependencies/com/google/api/gax-grpc/1.29.0/gax-grpc-1.29.0.jar +0 -0
  17. data/vendor/jar-dependencies/com/google/api/gax/1.29.0/gax-1.29.0.jar +0 -0
  18. data/vendor/jar-dependencies/com/google/api/grpc/grpc-google-cloud-pubsub-v1/1.19.1/grpc-google-cloud-pubsub-v1-1.19.1.jar +0 -0
  19. data/vendor/jar-dependencies/com/google/api/grpc/proto-google-cloud-pubsub-v1/1.19.1/proto-google-cloud-pubsub-v1-1.19.1.jar +0 -0
  20. data/vendor/jar-dependencies/com/google/api/grpc/proto-google-common-protos/1.12.0/proto-google-common-protos-1.12.0.jar +0 -0
  21. data/vendor/jar-dependencies/com/google/api/grpc/proto-google-iam-v1/0.12.0/proto-google-iam-v1-0.12.0.jar +0 -0
  22. data/vendor/jar-dependencies/com/google/auth/google-auth-library-credentials/0.9.1/google-auth-library-credentials-0.9.1.jar +0 -0
  23. data/vendor/jar-dependencies/com/google/auth/google-auth-library-oauth2-http/0.9.1/google-auth-library-oauth2-http-0.9.1.jar +0 -0
  24. data/vendor/jar-dependencies/com/google/auto/value/auto-value/1.4/auto-value-1.4.jar +0 -0
  25. data/vendor/jar-dependencies/com/google/cloud/google-cloud-core-grpc/1.37.1/google-cloud-core-grpc-1.37.1.jar +0 -0
  26. data/vendor/jar-dependencies/com/google/cloud/google-cloud-core/1.37.1/google-cloud-core-1.37.1.jar +0 -0
  27. data/vendor/jar-dependencies/com/google/cloud/google-cloud-pubsub/1.37.1/google-cloud-pubsub-1.37.1.jar +0 -0
  28. data/vendor/jar-dependencies/com/google/code/findbugs/jsr305/3.0.2/jsr305-3.0.2.jar +0 -0
  29. data/vendor/jar-dependencies/com/google/code/gson/gson/2.7/gson-2.7.jar +0 -0
  30. data/vendor/jar-dependencies/com/google/errorprone/error_prone_annotations/2.2.0/error_prone_annotations-2.2.0.jar +0 -0
  31. data/vendor/jar-dependencies/com/google/guava/guava/20.0/guava-20.0.jar +0 -0
  32. data/vendor/jar-dependencies/com/google/http-client/google-http-client-jackson2/1.19.0/google-http-client-jackson2-1.19.0.jar +0 -0
  33. data/vendor/jar-dependencies/com/google/http-client/google-http-client/1.23.0/google-http-client-1.23.0.jar +0 -0
  34. data/vendor/jar-dependencies/com/google/protobuf/protobuf-java-util/3.6.0/protobuf-java-util-3.6.0.jar +0 -0
  35. data/vendor/jar-dependencies/com/google/protobuf/protobuf-java/3.6.0/protobuf-java-3.6.0.jar +0 -0
  36. data/vendor/jar-dependencies/com/google/protobuf/protobuf-lite/3.0.1/protobuf-lite-3.0.1.jar +0 -0
  37. data/vendor/jar-dependencies/commons-codec/commons-codec/1.3/commons-codec-1.3.jar +0 -0
  38. data/vendor/jar-dependencies/commons-logging/commons-logging/1.1.1/commons-logging-1.1.1.jar +0 -0
  39. data/vendor/jar-dependencies/io/grpc/grpc-auth/1.13.1/grpc-auth-1.13.1.jar +0 -0
  40. data/vendor/jar-dependencies/io/grpc/grpc-context/1.13.1/grpc-context-1.13.1.jar +0 -0
  41. data/vendor/jar-dependencies/io/grpc/grpc-core/1.13.1/grpc-core-1.13.1.jar +0 -0
  42. data/vendor/jar-dependencies/io/grpc/grpc-netty-shaded/1.13.1/grpc-netty-shaded-1.13.1.jar +0 -0
  43. data/vendor/jar-dependencies/io/grpc/grpc-protobuf-lite/1.13.1/grpc-protobuf-lite-1.13.1.jar +0 -0
  44. data/vendor/jar-dependencies/io/grpc/grpc-protobuf/1.13.1/grpc-protobuf-1.13.1.jar +0 -0
  45. data/vendor/jar-dependencies/io/grpc/grpc-stub/1.13.1/grpc-stub-1.13.1.jar +0 -0
  46. data/vendor/jar-dependencies/io/opencensus/opencensus-api/0.12.3/opencensus-api-0.12.3.jar +0 -0
  47. data/vendor/jar-dependencies/io/opencensus/opencensus-contrib-grpc-metrics/0.12.3/opencensus-contrib-grpc-metrics-0.12.3.jar +0 -0
  48. data/vendor/jar-dependencies/joda-time/joda-time/2.9.2/joda-time-2.9.2.jar +0 -0
  49. data/vendor/jar-dependencies/org/apache/httpcomponents/httpclient/4.0.1/httpclient-4.0.1.jar +0 -0
  50. data/vendor/jar-dependencies/org/apache/httpcomponents/httpcore/4.0.1/httpcore-4.0.1.jar +0 -0
  51. data/vendor/jar-dependencies/org/threeten/threetenbp/1.3.3/threetenbp-1.3.3.jar +0 -0
  52. metadata +185 -0
@@ -0,0 +1,274 @@
1
+ :plugin: google_pubsub
2
+ :type: input
3
+ :default_codec: plain
4
+
5
+ ///////////////////////////////////////////
6
+ START - GENERATED VARIABLES, DO NOT EDIT!
7
+ ///////////////////////////////////////////
8
+ :version: %VERSION%
9
+ :release_date: %RELEASE_DATE%
10
+ :changelog_url: %CHANGELOG_URL%
11
+ :include_path: ../../../../logstash/docs/include
12
+ ///////////////////////////////////////////
13
+ END - GENERATED VARIABLES, DO NOT EDIT!
14
+ ///////////////////////////////////////////
15
+
16
+ [id="plugins-{type}s-{plugin}"]
17
+
18
+ === Google_pubsub input plugin
19
+
20
+ include::{include_path}/plugin_header.asciidoc[]
21
+
22
+ ==== Description
23
+
24
+ Author: Eric Johnson <erjohnso@google.com>
25
+ Date: 2016-06-01
26
+
27
+ Copyright 2016 Google Inc.
28
+
29
+ Licensed under the Apache License, Version 2.0 (the "License");
30
+ you may not use this file except in compliance with the License.
31
+ You may obtain a copy of the License at
32
+
33
+ http://www.apache.org/licenses/LICENSE-2.0
34
+
35
+ Unless required by applicable law or agreed to in writing, software
36
+ distributed under the License is distributed on an "AS IS" BASIS,
37
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
38
+ See the License for the specific language governing permissions and
39
+ limitations under the License.
40
+ Google deps
41
+ This is a https://github.com/elastic/logstash[Logstash] input plugin for
42
+ https://cloud.google.com/pubsub/[Google Pub/Sub]. The plugin can subscribe
43
+ to a topic and ingest messages.
44
+
45
+ The main motivation behind the development of this plugin was to ingest
46
+ https://cloud.google.com/logging/[Stackdriver Logging] messages via the
47
+ https://cloud.google.com/logging/docs/export/using_exported_logs[Exported Logs]
48
+ feature of Stackdriver Logging.
49
+
50
+ ==== Prerequisites
51
+
52
+ You must first create a Google Cloud Platform project and enable the
53
+ Google Pub/Sub API. If you intend to use the plugin ingest Stackdriver Logging
54
+ messages, you must also enable the Stackdriver Logging API and configure log
55
+ exporting to Pub/Sub. There is plentiful information on
56
+ https://cloud.google.com/ to get started:
57
+
58
+ - Google Cloud Platform Projects and https://cloud.google.com/docs/overview/[Overview]
59
+ - Google Cloud Pub/Sub https://cloud.google.com/pubsub/[documentation]
60
+ - Stackdriver Logging https://cloud.google.com/logging/[documentation]
61
+
62
+ ==== Cloud Pub/Sub
63
+
64
+ Currently, this module requires you to create a `topic` manually and specify
65
+ it in the logstash config file. You must also specify a `subscription`, but
66
+ the plugin will attempt to create the pull-based `subscription` on its own.
67
+
68
+ All messages received from Pub/Sub will be converted to a logstash `event`
69
+ and added to the processing pipeline queue. All Pub/Sub messages will be
70
+ `acknowledged` and removed from the Pub/Sub `topic` (please see more about
71
+ https://cloud.google.com/pubsub/overview#concepts)[Pub/Sub concepts].
72
+
73
+ It is generally assumed that incoming messages will be in JSON and added to
74
+ the logstash `event` as-is. However, if a plain text message is received, the
75
+ plugin will return the raw text in as `raw_message` in the logstash `event`.
76
+
77
+ ==== Authentication
78
+
79
+ You have two options for authentication depending on where you run Logstash.
80
+
81
+ 1. If you are running Logstash outside of Google Cloud Platform, then you will
82
+ need to create a Google Cloud Platform Service Account and specify the full
83
+ path to the JSON private key file in your config. You must assign sufficient
84
+ roles to the Service Account to create a subscription and to pull messages
85
+ from the subscription. Learn more about GCP Service Accounts and IAM roles
86
+ here:
87
+
88
+ - Google Cloud Platform IAM https://cloud.google.com/iam/[overview]
89
+ - Creating Service Accounts https://cloud.google.com/iam/docs/creating-managing-service-accounts[overview]
90
+ - Granting Roles https://cloud.google.com/iam/docs/granting-roles-to-service-accounts[overview]
91
+
92
+ 2. If you are running Logstash on a Google Compute Engine instance, you may opt
93
+ to use Application Default Credentials. In this case, you will not need to
94
+ specify a JSON private key file in your config.
95
+
96
+ ==== Stackdriver Logging (optional)
97
+
98
+ If you intend to use the logstash plugin for Stackdriver Logging message
99
+ ingestion, you must first manually set up the Export option to Cloud Pub/Sub and
100
+ the manually create the `topic`. Please see the more detailed instructions at,
101
+ https://cloud.google.com/logging/docs/export/using_exported_logs [Exported Logs]
102
+ and ensure that the https://cloud.google.com/logging/docs/export/configure_export#manual-access-pubsub[necessary permissions]
103
+ have also been manually configured.
104
+
105
+ Logging messages from Stackdriver Logging exported to Pub/Sub are received as
106
+ JSON and converted to a logstash `event` as-is in
107
+ https://cloud.google.com/logging/docs/export/using_exported_logs#log_entries_in_google_pubsub_topics[this format].
108
+
109
+ ==== Sample Configuration
110
+
111
+ Below is a copy of the included `example.conf-tmpl` file that shows a basic
112
+ configuration for this plugin.
113
+
114
+ [source,ruby]
115
+ ----------------------------------
116
+ input {
117
+ google_pubsub {
118
+ # Your GCP project id (name)
119
+ project_id => "my-project-1234"
120
+
121
+ # The topic name below is currently hard-coded in the plugin. You
122
+ # must first create this topic by hand and ensure you are exporting
123
+ # logging to this pubsub topic.
124
+ topic => "logstash-input-dev"
125
+
126
+ # The subscription name is customizeable. The plugin will attempt to
127
+ # create the subscription (but use the hard-coded topic name above).
128
+ subscription => "logstash-sub"
129
+
130
+ # If you are running logstash within GCE, it will use
131
+ # Application Default Credentials and use GCE's metadata
132
+ # service to fetch tokens. However, if you are running logstash
133
+ # outside of GCE, you will need to specify the service account's
134
+ # JSON key file below.
135
+ #json_key_file => "/home/erjohnso/pkey.json"
136
+
137
+ # Should the plugin attempt to create the subscription on startup?
138
+ # This is not recommended for security reasons but may be useful in
139
+ # some cases.
140
+ #create_subscription => false
141
+ }
142
+ }
143
+ output { stdout { codec => rubydebug } }
144
+ ----------------------------------
145
+
146
+ ==== Metadata and Attributes
147
+
148
+ The original Pub/Sub message is preserved in the special Logstash
149
+ `[@metadata][pubsub_message]` field so you can fetch:
150
+
151
+ * Message attributes
152
+ * The origiginal base64 data
153
+ * Pub/Sub message ID for de-duplication
154
+ * Publish time
155
+
156
+ You MUST extract any fields you want in a filter prior to the data being sent
157
+ to an output because Logstash deletes `@metadata` fields otherwise.
158
+
159
+ See the PubsubMessage
160
+ https://cloud.google.com/pubsub/docs/reference/rest/v1/PubsubMessage[documentation]
161
+ for a full description of the fields.
162
+
163
+ Example to get the message ID:
164
+
165
+ [source,ruby]
166
+ ----------------------------------
167
+ input {google_pubsub {...}}
168
+
169
+ filter {
170
+ mutate {
171
+ add_field => { "messageId" => "%{[@metadata][pubsub_message][messageId]}" }
172
+ }
173
+ }
174
+
175
+ output {...}
176
+ ----------------------------------
177
+
178
+
179
+ [id="plugins-{type}s-{plugin}-options"]
180
+ ==== Google_pubsub Input Configuration Options
181
+
182
+ This plugin supports the following configuration options plus the <<plugins-{type}s-{plugin}-common-options>> described later.
183
+
184
+ [cols="<,<,<",options="header",]
185
+ |=======================================================================
186
+ |Setting |Input type|Required
187
+ | <<plugins-{type}s-{plugin}-json_key_file>> |a valid filesystem path|No
188
+ | <<plugins-{type}s-{plugin}-max_messages>> |<<number,number>>|Yes
189
+ | <<plugins-{type}s-{plugin}-project_id>> |<<string,string>>|Yes
190
+ | <<plugins-{type}s-{plugin}-subscription>> |<<string,string>>|Yes
191
+ | <<plugins-{type}s-{plugin}-topic>> |<<string,string>>|Yes
192
+ | <<plugins-{type}s-{plugin}-include_metadata>> |<<boolean,boolean>>|No
193
+ | <<plugins-{type}s-{plugin}-create_subscription>> |<<boolean,boolean>>|No
194
+ |=======================================================================
195
+
196
+ Also see <<plugins-{type}s-{plugin}-common-options>> for a list of options supported by all
197
+ input plugins.
198
+
199
+ &nbsp;
200
+
201
+ [id="plugins-{type}s-{plugin}-json_key_file"]
202
+ ===== `json_key_file`
203
+
204
+ * Value type is <<path,path>>
205
+ * There is no default value for this setting.
206
+
207
+ If logstash is running within Google Compute Engine, the plugin will use
208
+ GCE's Application Default Credentials. Outside of GCE, you will need to
209
+ specify a Service Account JSON key file.
210
+
211
+ [id="plugins-{type}s-{plugin}-max_messages"]
212
+ ===== `max_messages`
213
+
214
+ * This is a required setting.
215
+ * Value type is <<number,number>>
216
+ * Default value is `5`
217
+
218
+ The maximum number of messages returned per request.
219
+ The Pub/Sub system may return fewer than the number specified.
220
+
221
+ [id="plugins-{type}s-{plugin}-project_id"]
222
+ ===== `project_id`
223
+
224
+ * This is a required setting.
225
+ * Value type is <<string,string>>
226
+ * There is no default value for this setting.
227
+
228
+ Google Cloud Project ID (name, not number).
229
+
230
+ [id="plugins-{type}s-{plugin}-subscription"]
231
+ ===== `subscription`
232
+
233
+ * This is a required setting.
234
+ * Value type is <<string,string>>
235
+ * There is no default value for this setting.
236
+
237
+
238
+
239
+ [id="plugins-{type}s-{plugin}-topic"]
240
+ ===== `topic`
241
+
242
+ * This is a required setting.
243
+ * Value type is <<string,string>>
244
+ * There is no default value for this setting.
245
+
246
+ Google Cloud Pub/Sub Topic and Subscription.
247
+ Note that the topic must be created manually with Cloud Logging
248
+ pre-configured export to PubSub configured to use the defined topic.
249
+ The subscription will be created automatically by the plugin.
250
+
251
+ [id="plugins-{type}s-{plugin}-include_metadata"]
252
+ ===== `include_metadata`
253
+
254
+ * Value type is <<boolean,boolean>>
255
+ * Default value is `false`.
256
+
257
+ If set true, will include the full message data in the `[@metadata][pubsub_message]` field.
258
+
259
+ [id="plugins-{type}s-{plugin}-create_subscription"]
260
+ ===== `create_subscription`
261
+
262
+ added[1.2.0]
263
+
264
+ * Value type is <<boolean,boolean>>
265
+ * Default value is `false`.
266
+
267
+ If true, the plugin will try to create the subscription before publishing.
268
+ Note: this requires additional permissions to be granted to the client and is _not_
269
+ recommended for most use-cases.
270
+
271
+ [id="plugins-{type}s-{plugin}-common-options"]
272
+ include::{include_path}/{type}.asciidoc[]
273
+
274
+ :default_codec!:
@@ -0,0 +1,41 @@
1
+ # AUTOGENERATED BY THE GRADLE SCRIPT. DO NOT EDIT.
2
+
3
+ require 'jar_dependencies'
4
+ require_jar('com.google.cloud', 'google-cloud-pubsub', '1.37.1')
5
+ require_jar('com.fasterxml.jackson.core', 'jackson-core', '2.1.3')
6
+ require_jar('com.google.api', 'api-common', '1.6.0')
7
+ require_jar('com.google.api', 'gax', '1.29.0')
8
+ require_jar('com.google.api', 'gax-grpc', '1.29.0')
9
+ require_jar('com.google.api.grpc', 'grpc-google-cloud-pubsub-v1', '1.19.1')
10
+ require_jar('com.google.api.grpc', 'proto-google-cloud-pubsub-v1', '1.19.1')
11
+ require_jar('com.google.api.grpc', 'proto-google-common-protos', '1.12.0')
12
+ require_jar('com.google.api.grpc', 'proto-google-iam-v1', '0.12.0')
13
+ require_jar('com.google.auth', 'google-auth-library-credentials', '0.9.1')
14
+ require_jar('com.google.auth', 'google-auth-library-oauth2-http', '0.9.1')
15
+ require_jar('com.google.auto.value', 'auto-value', '1.4')
16
+ require_jar('com.google.cloud', 'google-cloud-core', '1.37.1')
17
+ require_jar('com.google.cloud', 'google-cloud-core-grpc', '1.37.1')
18
+ require_jar('com.google.code.findbugs', 'jsr305', '3.0.2')
19
+ require_jar('com.google.code.gson', 'gson', '2.7')
20
+ require_jar('com.google.errorprone', 'error_prone_annotations', '2.2.0')
21
+ require_jar('com.google.guava', 'guava', '20.0')
22
+ require_jar('com.google.http-client', 'google-http-client', '1.23.0')
23
+ require_jar('com.google.http-client', 'google-http-client-jackson2', '1.19.0')
24
+ require_jar('com.google.protobuf', 'protobuf-java', '3.6.0')
25
+ require_jar('com.google.protobuf', 'protobuf-java-util', '3.6.0')
26
+ require_jar('com.google.protobuf', 'protobuf-lite', '3.0.1')
27
+ require_jar('commons-codec', 'commons-codec', '1.3')
28
+ require_jar('commons-logging', 'commons-logging', '1.1.1')
29
+ require_jar('io.grpc', 'grpc-auth', '1.13.1')
30
+ require_jar('io.grpc', 'grpc-context', '1.13.1')
31
+ require_jar('io.grpc', 'grpc-core', '1.13.1')
32
+ require_jar('io.grpc', 'grpc-netty-shaded', '1.13.1')
33
+ require_jar('io.grpc', 'grpc-protobuf', '1.13.1')
34
+ require_jar('io.grpc', 'grpc-protobuf-lite', '1.13.1')
35
+ require_jar('io.grpc', 'grpc-stub', '1.13.1')
36
+ require_jar('io.opencensus', 'opencensus-api', '0.12.3')
37
+ require_jar('io.opencensus', 'opencensus-contrib-grpc-metrics', '0.12.3')
38
+ require_jar('joda-time', 'joda-time', '2.9.2')
39
+ require_jar('org.apache.httpcomponents', 'httpclient', '4.0.1')
40
+ require_jar('org.apache.httpcomponents', 'httpcore', '4.0.1')
41
+ require_jar('org.threeten', 'threetenbp', '1.3.3')
@@ -0,0 +1,306 @@
1
+ # encoding: utf-8
2
+
3
+ # Author: Eric Johnson <erjohnso@google.com>
4
+ # Date: 2016-06-01
5
+ #
6
+ # Copyright 2016 Google Inc.
7
+ #
8
+ # Licensed under the Apache License, Version 2.0 (the "License");
9
+ # you may not use this file except in compliance with the License.
10
+ # You may obtain a copy of the License at
11
+ #
12
+ # http://www.apache.org/licenses/LICENSE-2.0
13
+ #
14
+ # Unless required by applicable law or agreed to in writing, software
15
+ # distributed under the License is distributed on an "AS IS" BASIS,
16
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17
+ # See the License for the specific language governing permissions and
18
+ # limitations under the License.
19
+ require "logstash/inputs/base"
20
+ require "logstash/namespace"
21
+
22
+ require 'java'
23
+ require 'logstash-input-google_pubsub_jars.rb'
24
+
25
+ # This is a https://github.com/elastic/logstash[Logstash] input plugin for
26
+ # https://cloud.google.com/pubsub/[Google Pub/Sub]. The plugin can subscribe
27
+ # to a topic and ingest messages.
28
+ #
29
+ # The main motivation behind the development of this plugin was to ingest
30
+ # https://cloud.google.com/logging/[Stackdriver Logging] messages via the
31
+ # https://cloud.google.com/logging/docs/export/using_exported_logs[Exported Logs]
32
+ # feature of Stackdriver Logging.
33
+ #
34
+ # ==== Prerequisites
35
+ #
36
+ # You must first create a Google Cloud Platform project and enable the the
37
+ # Google Pub/Sub API. If you intend to use the plugin ingest Stackdriver Logging
38
+ # messages, you must also enable the Stackdriver Logging API and configure log
39
+ # exporting to Pub/Sub. There is plentiful information on
40
+ # https://cloud.google.com/ to get started:
41
+ #
42
+ # - Google Cloud Platform Projects and https://cloud.google.com/docs/overview/[Overview]
43
+ # - Google Cloud Pub/Sub https://cloud.google.com/pubsub/[documentation]
44
+ # - Stackdriver Logging https://cloud.google.com/logging/[documentation]
45
+ #
46
+ # ==== Cloud Pub/Sub
47
+ #
48
+ # Currently, this module requires you to create a `topic` manually and specify
49
+ # it in the logstash config file. You must also specify a `subscription`, but
50
+ # the plugin will attempt to create the pull-based `subscription` on its own.
51
+ #
52
+ # All messages received from Pub/Sub will be converted to a logstash `event`
53
+ # and added to the processing pipeline queue. All Pub/Sub messages will be
54
+ # `acknowledged` and removed from the Pub/Sub `topic` (please see more about
55
+ # https://cloud.google.com/pubsub/overview#concepts)[Pub/Sub concepts].
56
+ #
57
+ # It is generally assumed that incoming messages will be in JSON and added to
58
+ # the logstash `event` as-is. However, if a plain text message is received, the
59
+ # plugin will return the raw text in as `raw_message` in the logstash `event`.
60
+ #
61
+ # ==== Authentication
62
+ #
63
+ # You have two options for authentication depending on where you run Logstash.
64
+ #
65
+ # 1. If you are running Logstash outside of Google Cloud Platform, then you will
66
+ # need to create a Google Cloud Platform Service Account and specify the full
67
+ # path to the JSON private key file in your config. You must assign sufficient
68
+ # roles to the Service Account to create a subscription and to pull messages
69
+ # from the subscription. Learn more about GCP Service Accounts and IAM roles
70
+ # here:
71
+ #
72
+ # - Google Cloud Platform IAM https://cloud.google.com/iam/[overview]
73
+ # - Creating Service Accounts https://cloud.google.com/iam/docs/creating-managing-service-accounts[overview]
74
+ # - Granting Roles https://cloud.google.com/iam/docs/granting-roles-to-service-accounts[overview]
75
+ #
76
+ # 1. If you are running Logstash on a Google Compute Engine instance, you may opt
77
+ # to use Application Default Credentials. In this case, you will not need to
78
+ # specify a JSON private key file in your config.
79
+ #
80
+ # ==== Stackdriver Logging (optional)
81
+ #
82
+ # If you intend to use the logstash plugin for Stackdriver Logging message
83
+ # ingestion, you must first manually set up the Export option to Cloud Pub/Sub and
84
+ # the manually create the `topic`. Please see the more detailed instructions at,
85
+ # https://cloud.google.com/logging/docs/export/using_exported_logs [Exported Logs]
86
+ # and ensure that the https://cloud.google.com/logging/docs/export/configure_export#manual-access-pubsub[necessary permissions]
87
+ # have also been manually configured.
88
+ #
89
+ # Logging messages from Stackdriver Logging exported to Pub/Sub are received as
90
+ # JSON and converted to a logstash `event` as-is in
91
+ # https://cloud.google.com/logging/docs/export/using_exported_logs#log_entries_in_google_pubsub_topics[this format].
92
+ #
93
+ # ==== Sample Configuration
94
+ #
95
+ # Below is a copy of the included `example.conf-tmpl` file that shows a basic
96
+ # configuration for this plugin.
97
+ #
98
+ # [source,ruby]
99
+ # ----------------------------------
100
+ # input {
101
+ # google_pubsub {
102
+ # # Your GCP project id (name)
103
+ # project_id => "my-project-1234"
104
+ #
105
+ # # The topic name below is currently hard-coded in the plugin. You
106
+ # # must first create this topic by hand and ensure you are exporting
107
+ # # logging to this pubsub topic.
108
+ # topic => "logstash-input-dev"
109
+ #
110
+ # # The subscription name is customizeable. The plugin will attempt to
111
+ # # create the subscription (but use the hard-coded topic name above).
112
+ # subscription => "logstash-sub"
113
+ #
114
+ # # If you are running logstash within GCE, it will use
115
+ # # Application Default Credentials and use GCE's metadata
116
+ # # service to fetch tokens. However, if you are running logstash
117
+ # # outside of GCE, you will need to specify the service account's
118
+ # # JSON key file below.
119
+ # #json_key_file => "/home/erjohnso/pkey.json"
120
+ # }
121
+ # }
122
+ # output { stdout { codec => rubydebug } }
123
+ # ----------------------------------
124
+ #
125
+ # ==== Metadata and Attributes
126
+ #
127
+ # The original Pub/Sub message is preserved in the special Logstash
128
+ # `[@metadata][pubsub_message]` field so you can fetch:
129
+ #
130
+ # * Message attributes
131
+ # * The origiginal base64 data
132
+ # * Pub/Sub message ID for de-duplication
133
+ # * Publish time
134
+ #
135
+ # You MUST extract any fields you want in a filter prior to the data being sent
136
+ # to an output because Logstash deletes `@metadata` fields otherwise.
137
+ #
138
+ # See the PubsubMessage
139
+ # https://cloud.google.com/pubsub/docs/reference/rest/v1/PubsubMessage[documentation]
140
+ # for a full description of the fields.
141
+ #
142
+ # Example to get the message ID:
143
+ #
144
+ # [source,ruby]
145
+ # ----------------------------------
146
+ # input {google_pubsub {...}}
147
+ #
148
+ # filter {
149
+ # mutate {
150
+ # add_field => { "messageId" => "%{[@metadata][pubsub_message][messageId]}" }
151
+ # }
152
+ # }
153
+ #
154
+ # output {...}
155
+ # ----------------------------------
156
+ #
157
+
158
+ class LogStash::Inputs::GooglePubSub < LogStash::Inputs::Base
159
+ class MessageReceiver
160
+ include com.google.cloud.pubsub.v1.MessageReceiver
161
+
162
+ def initialize(&blk)
163
+ @block = blk
164
+ end
165
+
166
+ def receiveMessage(message, consumer)
167
+ @block.call(message)
168
+ consumer.ack()
169
+ end
170
+ end
171
+
172
+ java_import 'com.google.api.core.ApiService$Listener'
173
+ class SubscriberListener < Listener
174
+ def initialize(&blk)
175
+ @block = blk
176
+ end
177
+
178
+ def failed(from, failure)
179
+ @block.call(from, failure)
180
+ end
181
+ end
182
+
183
+ include_package 'com.google.api.gax.batching'
184
+ include_package 'com.google.api.gax.core'
185
+ include_package 'com.google.auth.oauth2'
186
+ include_package 'com.google.common.util.concurrent'
187
+ include_package 'com.google.cloud.pubsub.v1'
188
+ include_package 'com.google.pubsub.v1'
189
+ include_package 'com.google.protobuf.util'
190
+ config_name "google_pubsub"
191
+
192
+ # Google Cloud Project ID (name, not number)
193
+ config :project_id, :validate => :string, :required => true
194
+
195
+ # Google Cloud Pub/Sub Topic and Subscription.
196
+ # Note that the topic must be created manually with Cloud Logging
197
+ # pre-configured export to PubSub configured to use the defined topic.
198
+ # The subscription will be created automatically by the plugin.
199
+ config :topic, :validate => :string, :required => true
200
+ config :subscription, :validate => :string, :required => true
201
+ config :max_messages, :validate => :number, :required => true, :default => 5
202
+
203
+ # If logstash is running within Google Compute Engine, the plugin will use
204
+ # GCE's Application Default Credentials. Outside of GCE, you will need to
205
+ # specify a Service Account JSON key file.
206
+ config :json_key_file, :validate => :path, :required => false
207
+
208
+ # If set true, will include the full message data in the `[@metadata][pubsub_message]` field.
209
+ config :include_metadata, :validate => :boolean, :required => false, :default => false
210
+
211
+ # If true, the plugin will try to create the subscription before publishing.
212
+ # Note: this requires additional permissions to be granted to the client and is _not_
213
+ # recommended for most use-cases.
214
+ config :create_subscription, :validate => :boolean, :required => false, :default => false
215
+
216
+ # If undefined, Logstash will complain, even if codec is unused.
217
+ default :codec, "plain"
218
+
219
+ COMPRESSION_ALGORITHM_ZLIB = "zlib"
220
+ BATCHED_RECORD_SEPARATOR = 30.chr
221
+
222
+ public
223
+ def register
224
+ @logger.debug("Registering Google PubSub Input: project_id=#{@project_id}, topic=#{@topic}, subscription=#{@subscription}")
225
+ @subscription_id = "projects/#{@project_id}/subscriptions/#{@subscription}"
226
+
227
+ if @json_key_file
228
+ @credentialsProvider = FixedCredentialsProvider.create(
229
+ ServiceAccountCredentials.fromStream(java.io.FileInputStream.new(@json_key_file))
230
+ )
231
+ end
232
+ @topic_name = ProjectTopicName.of(@project_id, @topic)
233
+ @subscription_name = ProjectSubscriptionName.of(@project_id, @subscription)
234
+ end
235
+
236
+ def stop
237
+ @subscriber.stopAsync().awaitTerminated() if @subscriber != nil
238
+ end
239
+
240
+ def run(queue)
241
+ # Attempt to create the subscription
242
+ if @create_subscription
243
+ @logger.debug("Creating subscription #{@subscription_id}")
244
+ subscriptionAdminClient = SubscriptionAdminClient.create
245
+ begin
246
+ subscriptionAdminClient.createSubscription(@subscription_name, @topic_name, PushConfig.getDefaultInstance(), 0)
247
+ rescue
248
+ @logger.info("Subscription already exists")
249
+ end
250
+ end
251
+
252
+ @logger.debug("Pulling messages from sub '#{@subscription_id}'")
253
+ handler = MessageReceiver.new do |message|
254
+ # handle incoming message, then ack/nack the received message
255
+ data = message.getData().toStringUtf8()
256
+ metadata = extract_metadata(message)
257
+ algorithm = metadata["compression_algorithm"]
258
+
259
+ case algorithm
260
+ when nil
261
+ @codec.decode(data) do |event|
262
+ event.set("host", event.get("host") || @host)
263
+ event.set("[@metadata][pubsub_message]", metadata) if @include_metadata
264
+ decorate(event)
265
+ queue << event
266
+ end
267
+ when COMPRESSION_ALGORITHM_ZLIB
268
+ lines = Zlib::Inflate.inflate(data).split(BATCHED_RECORD_SEPARATOR)
269
+ lines.each do |line|
270
+ event = LogStash::Event.new(line)
271
+ event.set("host", event.get("host") || @host)
272
+ event.set("[@metadata][pubsub_message]", metadata) if @include_metadata
273
+ decorate(event)
274
+ queue << event
275
+ end
276
+ end
277
+ end
278
+ listener = SubscriberListener.new do |from, failure|
279
+ @logger.error("#{failure}")
280
+ raise failure
281
+ end
282
+ flowControlSettings = FlowControlSettings.newBuilder().setMaxOutstandingElementCount(@max_messages).build()
283
+ executorProvider = InstantiatingExecutorProvider.newBuilder().setExecutorThreadCount(1).build()
284
+ subscriberBuilder = Subscriber.newBuilder(@subscription_name, handler)
285
+ .setFlowControlSettings(flowControlSettings)
286
+ .setExecutorProvider(executorProvider)
287
+ .setParallelPullCount(1)
288
+
289
+ if @credentialsProvider
290
+ subscriberBuilder.setCredentialsProvider(@credentialsProvider)
291
+ end
292
+ @subscriber = subscriberBuilder.build()
293
+ @subscriber.addListener(listener, MoreExecutors.directExecutor())
294
+ @subscriber.startAsync()
295
+ @subscriber.awaitTerminated()
296
+ end
297
+
298
+ def extract_metadata(java_message)
299
+ {
300
+ data: java_message.getData().toStringUtf8(),
301
+ attributes: java_message.getAttributesMap(),
302
+ messageId: java_message.getMessageId(),
303
+ publishTime: Timestamps.toString(java_message.getPublishTime())
304
+ }
305
+ end
306
+ end