logstash-lib 1.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +24 -0
- data/.tailor +8 -0
- data/.travis.yml +12 -0
- data/CHANGELOG +1185 -0
- data/CONTRIBUTING.md +61 -0
- data/CONTRIBUTORS +79 -0
- data/LICENSE +14 -0
- data/Makefile +460 -0
- data/README.md +120 -0
- data/STYLE.md +96 -0
- data/bin/logstash +37 -0
- data/bin/logstash-test +4 -0
- data/bin/logstash-web +4 -0
- data/bin/logstash.lib.sh +78 -0
- data/bot/check_pull_changelog.rb +89 -0
- data/docs/configuration.md +260 -0
- data/docs/docgen.rb +242 -0
- data/docs/extending/example-add-a-new-filter.md +121 -0
- data/docs/extending/index.md +91 -0
- data/docs/flags.md +43 -0
- data/docs/generate_index.rb +28 -0
- data/docs/index.html.erb +56 -0
- data/docs/learn.md +46 -0
- data/docs/life-of-an-event.md +109 -0
- data/docs/logging-tool-comparisons.md +60 -0
- data/docs/plugin-doc.html.erb +91 -0
- data/docs/plugin-milestones.md +41 -0
- data/docs/plugin-synopsis.html.erb +24 -0
- data/docs/release-engineering.md +46 -0
- data/docs/release-test-results.md +14 -0
- data/docs/repositories.md +35 -0
- data/docs/tutorials/10-minute-walkthrough/apache-elasticsearch.conf +35 -0
- data/docs/tutorials/10-minute-walkthrough/apache-parse.conf +33 -0
- data/docs/tutorials/10-minute-walkthrough/apache_log.1 +1 -0
- data/docs/tutorials/10-minute-walkthrough/apache_log.2.bz2 +0 -0
- data/docs/tutorials/10-minute-walkthrough/hello-search.conf +25 -0
- data/docs/tutorials/10-minute-walkthrough/hello.conf +16 -0
- data/docs/tutorials/10-minute-walkthrough/index.md +124 -0
- data/docs/tutorials/10-minute-walkthrough/step-5-output.txt +17 -0
- data/docs/tutorials/getting-started-centralized-overview-diagram.png +0 -0
- data/docs/tutorials/getting-started-centralized-overview-diagram.xml +1 -0
- data/docs/tutorials/getting-started-centralized.md +217 -0
- data/docs/tutorials/getting-started-simple.md +200 -0
- data/docs/tutorials/just-enough-rabbitmq-for-logstash.md +201 -0
- data/docs/tutorials/media/frontend-response-codes.png +0 -0
- data/docs/tutorials/metrics-from-logs.md +84 -0
- data/docs/tutorials/zeromq.md +118 -0
- data/extract_services.rb +29 -0
- data/gembag.rb +64 -0
- data/lib/logstash-event.rb +2 -0
- data/lib/logstash.rb +4 -0
- data/lib/logstash/JRUBY-6970-openssl.rb +22 -0
- data/lib/logstash/JRUBY-6970.rb +102 -0
- data/lib/logstash/agent.rb +305 -0
- data/lib/logstash/certs/cacert.pem +3895 -0
- data/lib/logstash/codecs/base.rb +49 -0
- data/lib/logstash/codecs/compress_spooler.rb +50 -0
- data/lib/logstash/codecs/dots.rb +18 -0
- data/lib/logstash/codecs/edn.rb +28 -0
- data/lib/logstash/codecs/edn_lines.rb +36 -0
- data/lib/logstash/codecs/fluent.rb +55 -0
- data/lib/logstash/codecs/graphite.rb +114 -0
- data/lib/logstash/codecs/json.rb +41 -0
- data/lib/logstash/codecs/json_lines.rb +52 -0
- data/lib/logstash/codecs/json_spooler.rb +22 -0
- data/lib/logstash/codecs/line.rb +58 -0
- data/lib/logstash/codecs/msgpack.rb +43 -0
- data/lib/logstash/codecs/multiline.rb +189 -0
- data/lib/logstash/codecs/netflow.rb +342 -0
- data/lib/logstash/codecs/netflow/util.rb +212 -0
- data/lib/logstash/codecs/noop.rb +19 -0
- data/lib/logstash/codecs/oldlogstashjson.rb +56 -0
- data/lib/logstash/codecs/plain.rb +48 -0
- data/lib/logstash/codecs/rubydebug.rb +22 -0
- data/lib/logstash/codecs/spool.rb +38 -0
- data/lib/logstash/config/Makefile +4 -0
- data/lib/logstash/config/config_ast.rb +380 -0
- data/lib/logstash/config/file.rb +39 -0
- data/lib/logstash/config/grammar.rb +3504 -0
- data/lib/logstash/config/grammar.treetop +241 -0
- data/lib/logstash/config/mixin.rb +464 -0
- data/lib/logstash/config/registry.rb +13 -0
- data/lib/logstash/config/test.conf +18 -0
- data/lib/logstash/errors.rb +10 -0
- data/lib/logstash/event.rb +262 -0
- data/lib/logstash/filters/advisor.rb +178 -0
- data/lib/logstash/filters/alter.rb +173 -0
- data/lib/logstash/filters/anonymize.rb +93 -0
- data/lib/logstash/filters/base.rb +190 -0
- data/lib/logstash/filters/checksum.rb +50 -0
- data/lib/logstash/filters/cidr.rb +76 -0
- data/lib/logstash/filters/cipher.rb +145 -0
- data/lib/logstash/filters/clone.rb +35 -0
- data/lib/logstash/filters/collate.rb +114 -0
- data/lib/logstash/filters/csv.rb +94 -0
- data/lib/logstash/filters/date.rb +244 -0
- data/lib/logstash/filters/dns.rb +201 -0
- data/lib/logstash/filters/drop.rb +32 -0
- data/lib/logstash/filters/elapsed.rb +256 -0
- data/lib/logstash/filters/elasticsearch.rb +73 -0
- data/lib/logstash/filters/environment.rb +27 -0
- data/lib/logstash/filters/extractnumbers.rb +84 -0
- data/lib/logstash/filters/gelfify.rb +52 -0
- data/lib/logstash/filters/geoip.rb +145 -0
- data/lib/logstash/filters/grep.rb +153 -0
- data/lib/logstash/filters/grok.rb +425 -0
- data/lib/logstash/filters/grokdiscovery.rb +75 -0
- data/lib/logstash/filters/i18n.rb +51 -0
- data/lib/logstash/filters/json.rb +90 -0
- data/lib/logstash/filters/json_encode.rb +52 -0
- data/lib/logstash/filters/kv.rb +232 -0
- data/lib/logstash/filters/metaevent.rb +68 -0
- data/lib/logstash/filters/metrics.rb +237 -0
- data/lib/logstash/filters/multiline.rb +241 -0
- data/lib/logstash/filters/mutate.rb +399 -0
- data/lib/logstash/filters/noop.rb +21 -0
- data/lib/logstash/filters/prune.rb +149 -0
- data/lib/logstash/filters/punct.rb +32 -0
- data/lib/logstash/filters/railsparallelrequest.rb +86 -0
- data/lib/logstash/filters/range.rb +142 -0
- data/lib/logstash/filters/ruby.rb +42 -0
- data/lib/logstash/filters/sleep.rb +111 -0
- data/lib/logstash/filters/split.rb +64 -0
- data/lib/logstash/filters/sumnumbers.rb +73 -0
- data/lib/logstash/filters/syslog_pri.rb +107 -0
- data/lib/logstash/filters/translate.rb +121 -0
- data/lib/logstash/filters/unique.rb +29 -0
- data/lib/logstash/filters/urldecode.rb +57 -0
- data/lib/logstash/filters/useragent.rb +112 -0
- data/lib/logstash/filters/uuid.rb +58 -0
- data/lib/logstash/filters/xml.rb +139 -0
- data/lib/logstash/filters/zeromq.rb +123 -0
- data/lib/logstash/filterworker.rb +122 -0
- data/lib/logstash/inputs/base.rb +125 -0
- data/lib/logstash/inputs/collectd.rb +306 -0
- data/lib/logstash/inputs/drupal_dblog.rb +323 -0
- data/lib/logstash/inputs/drupal_dblog/jdbcconnection.rb +66 -0
- data/lib/logstash/inputs/elasticsearch.rb +140 -0
- data/lib/logstash/inputs/eventlog.rb +129 -0
- data/lib/logstash/inputs/eventlog/racob_fix.rb +44 -0
- data/lib/logstash/inputs/exec.rb +69 -0
- data/lib/logstash/inputs/file.rb +146 -0
- data/lib/logstash/inputs/ganglia.rb +127 -0
- data/lib/logstash/inputs/ganglia/gmondpacket.rb +146 -0
- data/lib/logstash/inputs/ganglia/xdr.rb +327 -0
- data/lib/logstash/inputs/gelf.rb +138 -0
- data/lib/logstash/inputs/gemfire.rb +222 -0
- data/lib/logstash/inputs/generator.rb +97 -0
- data/lib/logstash/inputs/graphite.rb +41 -0
- data/lib/logstash/inputs/heroku.rb +51 -0
- data/lib/logstash/inputs/imap.rb +136 -0
- data/lib/logstash/inputs/irc.rb +84 -0
- data/lib/logstash/inputs/log4j.rb +136 -0
- data/lib/logstash/inputs/lumberjack.rb +53 -0
- data/lib/logstash/inputs/pipe.rb +57 -0
- data/lib/logstash/inputs/rabbitmq.rb +126 -0
- data/lib/logstash/inputs/rabbitmq/bunny.rb +118 -0
- data/lib/logstash/inputs/rabbitmq/hot_bunnies.rb +1 -0
- data/lib/logstash/inputs/rabbitmq/march_hare.rb +129 -0
- data/lib/logstash/inputs/redis.rb +263 -0
- data/lib/logstash/inputs/relp.rb +106 -0
- data/lib/logstash/inputs/s3.rb +279 -0
- data/lib/logstash/inputs/snmptrap.rb +87 -0
- data/lib/logstash/inputs/sqlite.rb +185 -0
- data/lib/logstash/inputs/sqs.rb +172 -0
- data/lib/logstash/inputs/stdin.rb +46 -0
- data/lib/logstash/inputs/stomp.rb +84 -0
- data/lib/logstash/inputs/syslog.rb +237 -0
- data/lib/logstash/inputs/tcp.rb +231 -0
- data/lib/logstash/inputs/threadable.rb +18 -0
- data/lib/logstash/inputs/twitter.rb +82 -0
- data/lib/logstash/inputs/udp.rb +81 -0
- data/lib/logstash/inputs/unix.rb +163 -0
- data/lib/logstash/inputs/varnishlog.rb +48 -0
- data/lib/logstash/inputs/websocket.rb +50 -0
- data/lib/logstash/inputs/wmi.rb +72 -0
- data/lib/logstash/inputs/xmpp.rb +81 -0
- data/lib/logstash/inputs/zenoss.rb +143 -0
- data/lib/logstash/inputs/zeromq.rb +165 -0
- data/lib/logstash/kibana.rb +113 -0
- data/lib/logstash/loadlibs.rb +9 -0
- data/lib/logstash/logging.rb +89 -0
- data/lib/logstash/monkeypatches-for-bugs.rb +2 -0
- data/lib/logstash/monkeypatches-for-debugging.rb +47 -0
- data/lib/logstash/monkeypatches-for-performance.rb +66 -0
- data/lib/logstash/multiqueue.rb +53 -0
- data/lib/logstash/namespace.rb +16 -0
- data/lib/logstash/outputs/base.rb +120 -0
- data/lib/logstash/outputs/boundary.rb +116 -0
- data/lib/logstash/outputs/circonus.rb +78 -0
- data/lib/logstash/outputs/cloudwatch.rb +351 -0
- data/lib/logstash/outputs/csv.rb +55 -0
- data/lib/logstash/outputs/datadog.rb +93 -0
- data/lib/logstash/outputs/datadog_metrics.rb +123 -0
- data/lib/logstash/outputs/elasticsearch.rb +332 -0
- data/lib/logstash/outputs/elasticsearch/elasticsearch-template.json +44 -0
- data/lib/logstash/outputs/elasticsearch_http.rb +256 -0
- data/lib/logstash/outputs/elasticsearch_river.rb +214 -0
- data/lib/logstash/outputs/email.rb +299 -0
- data/lib/logstash/outputs/exec.rb +40 -0
- data/lib/logstash/outputs/file.rb +180 -0
- data/lib/logstash/outputs/ganglia.rb +75 -0
- data/lib/logstash/outputs/gelf.rb +208 -0
- data/lib/logstash/outputs/gemfire.rb +103 -0
- data/lib/logstash/outputs/google_bigquery.rb +570 -0
- data/lib/logstash/outputs/google_cloud_storage.rb +431 -0
- data/lib/logstash/outputs/graphite.rb +143 -0
- data/lib/logstash/outputs/graphtastic.rb +185 -0
- data/lib/logstash/outputs/hipchat.rb +80 -0
- data/lib/logstash/outputs/http.rb +142 -0
- data/lib/logstash/outputs/irc.rb +80 -0
- data/lib/logstash/outputs/jira.rb +109 -0
- data/lib/logstash/outputs/juggernaut.rb +105 -0
- data/lib/logstash/outputs/librato.rb +146 -0
- data/lib/logstash/outputs/loggly.rb +93 -0
- data/lib/logstash/outputs/lumberjack.rb +51 -0
- data/lib/logstash/outputs/metriccatcher.rb +103 -0
- data/lib/logstash/outputs/mongodb.rb +81 -0
- data/lib/logstash/outputs/nagios.rb +119 -0
- data/lib/logstash/outputs/nagios_nsca.rb +123 -0
- data/lib/logstash/outputs/null.rb +18 -0
- data/lib/logstash/outputs/opentsdb.rb +101 -0
- data/lib/logstash/outputs/pagerduty.rb +79 -0
- data/lib/logstash/outputs/pipe.rb +132 -0
- data/lib/logstash/outputs/rabbitmq.rb +96 -0
- data/lib/logstash/outputs/rabbitmq/bunny.rb +135 -0
- data/lib/logstash/outputs/rabbitmq/hot_bunnies.rb +1 -0
- data/lib/logstash/outputs/rabbitmq/march_hare.rb +143 -0
- data/lib/logstash/outputs/redis.rb +245 -0
- data/lib/logstash/outputs/riak.rb +152 -0
- data/lib/logstash/outputs/riemann.rb +109 -0
- data/lib/logstash/outputs/s3.rb +356 -0
- data/lib/logstash/outputs/sns.rb +124 -0
- data/lib/logstash/outputs/solr_http.rb +78 -0
- data/lib/logstash/outputs/sqs.rb +141 -0
- data/lib/logstash/outputs/statsd.rb +116 -0
- data/lib/logstash/outputs/stdout.rb +53 -0
- data/lib/logstash/outputs/stomp.rb +67 -0
- data/lib/logstash/outputs/syslog.rb +145 -0
- data/lib/logstash/outputs/tcp.rb +145 -0
- data/lib/logstash/outputs/udp.rb +38 -0
- data/lib/logstash/outputs/websocket.rb +46 -0
- data/lib/logstash/outputs/websocket/app.rb +29 -0
- data/lib/logstash/outputs/websocket/pubsub.rb +45 -0
- data/lib/logstash/outputs/xmpp.rb +78 -0
- data/lib/logstash/outputs/zabbix.rb +108 -0
- data/lib/logstash/outputs/zeromq.rb +125 -0
- data/lib/logstash/pipeline.rb +286 -0
- data/lib/logstash/plugin.rb +150 -0
- data/lib/logstash/plugin_mixins/aws_config.rb +93 -0
- data/lib/logstash/program.rb +15 -0
- data/lib/logstash/runner.rb +238 -0
- data/lib/logstash/sized_queue.rb +8 -0
- data/lib/logstash/test.rb +183 -0
- data/lib/logstash/threadwatchdog.rb +37 -0
- data/lib/logstash/time_addon.rb +33 -0
- data/lib/logstash/util.rb +106 -0
- data/lib/logstash/util/buftok.rb +139 -0
- data/lib/logstash/util/charset.rb +39 -0
- data/lib/logstash/util/fieldreference.rb +50 -0
- data/lib/logstash/util/password.rb +25 -0
- data/lib/logstash/util/prctl.rb +11 -0
- data/lib/logstash/util/relp.rb +326 -0
- data/lib/logstash/util/require-helper.rb +18 -0
- data/lib/logstash/util/socket_peer.rb +7 -0
- data/lib/logstash/util/zenoss.rb +566 -0
- data/lib/logstash/util/zeromq.rb +47 -0
- data/lib/logstash/version.rb +6 -0
- data/locales/en.yml +170 -0
- data/logstash-event.gemspec +29 -0
- data/logstash.gemspec +128 -0
- data/patterns/firewalls +60 -0
- data/patterns/grok-patterns +91 -0
- data/patterns/haproxy +37 -0
- data/patterns/java +3 -0
- data/patterns/linux-syslog +14 -0
- data/patterns/mcollective +1 -0
- data/patterns/mcollective-patterns +4 -0
- data/patterns/nagios +108 -0
- data/patterns/postgresql +3 -0
- data/patterns/redis +3 -0
- data/patterns/ruby +2 -0
- data/pkg/build.sh +135 -0
- data/pkg/centos/after-install.sh +1 -0
- data/pkg/centos/before-install.sh +10 -0
- data/pkg/centos/before-remove.sh +11 -0
- data/pkg/centos/sysconfig +15 -0
- data/pkg/debian/after-install.sh +5 -0
- data/pkg/debian/before-install.sh +13 -0
- data/pkg/debian/before-remove.sh +13 -0
- data/pkg/debian/build.sh +34 -0
- data/pkg/debian/debian/README +6 -0
- data/pkg/debian/debian/changelog +17 -0
- data/pkg/debian/debian/compat +1 -0
- data/pkg/debian/debian/control +16 -0
- data/pkg/debian/debian/copyright +27 -0
- data/pkg/debian/debian/dirs +19 -0
- data/pkg/debian/debian/docs +0 -0
- data/pkg/debian/debian/logstash.default +39 -0
- data/pkg/debian/debian/logstash.init +201 -0
- data/pkg/debian/debian/logstash.install +1 -0
- data/pkg/debian/debian/logstash.logrotate +9 -0
- data/pkg/debian/debian/logstash.postinst +68 -0
- data/pkg/debian/debian/logstash.postrm +23 -0
- data/pkg/debian/debian/manpage.1.ex +59 -0
- data/pkg/debian/debian/preinst.ex +37 -0
- data/pkg/debian/debian/prerm.ex +40 -0
- data/pkg/debian/debian/release.conf +5 -0
- data/pkg/debian/debian/rules +80 -0
- data/pkg/debian/debian/watch.ex +22 -0
- data/pkg/logrotate.conf +8 -0
- data/pkg/logstash-web.default +41 -0
- data/pkg/logstash-web.sysv.debian +201 -0
- data/pkg/logstash-web.upstart.ubuntu +18 -0
- data/pkg/logstash.default +45 -0
- data/pkg/logstash.sysv.debian +202 -0
- data/pkg/logstash.sysv.redhat +158 -0
- data/pkg/logstash.upstart.ubuntu +20 -0
- data/pkg/rpm/SOURCES/logstash.conf +26 -0
- data/pkg/rpm/SOURCES/logstash.init +80 -0
- data/pkg/rpm/SOURCES/logstash.logrotate +8 -0
- data/pkg/rpm/SOURCES/logstash.sysconfig +3 -0
- data/pkg/rpm/SOURCES/logstash.wrapper +105 -0
- data/pkg/rpm/SPECS/logstash.spec +180 -0
- data/pkg/rpm/readme.md +4 -0
- data/pkg/ubuntu/after-install.sh +7 -0
- data/pkg/ubuntu/before-install.sh +12 -0
- data/pkg/ubuntu/before-remove.sh +13 -0
- data/pull_release_note.rb +25 -0
- data/require-analyze.rb +22 -0
- data/spec/README.md +14 -0
- data/spec/codecs/edn.rb +40 -0
- data/spec/codecs/edn_lines.rb +53 -0
- data/spec/codecs/graphite.rb +96 -0
- data/spec/codecs/json.rb +57 -0
- data/spec/codecs/json_lines.rb +51 -0
- data/spec/codecs/json_spooler.rb +43 -0
- data/spec/codecs/msgpack.rb +39 -0
- data/spec/codecs/multiline.rb +60 -0
- data/spec/codecs/oldlogstashjson.rb +55 -0
- data/spec/codecs/plain.rb +35 -0
- data/spec/codecs/spool.rb +35 -0
- data/spec/conditionals/test.rb +323 -0
- data/spec/config.rb +31 -0
- data/spec/event.rb +165 -0
- data/spec/examples/fail2ban.rb +28 -0
- data/spec/examples/graphite-input.rb +41 -0
- data/spec/examples/mysql-slow-query.rb +70 -0
- data/spec/examples/parse-apache-logs.rb +66 -0
- data/spec/examples/parse-haproxy-logs.rb +115 -0
- data/spec/examples/syslog.rb +48 -0
- data/spec/filters/alter.rb +96 -0
- data/spec/filters/anonymize.rb +189 -0
- data/spec/filters/checksum.rb +41 -0
- data/spec/filters/clone.rb +67 -0
- data/spec/filters/collate.rb +122 -0
- data/spec/filters/csv.rb +174 -0
- data/spec/filters/date.rb +285 -0
- data/spec/filters/date_performance.rb +31 -0
- data/spec/filters/dns.rb +159 -0
- data/spec/filters/drop.rb +19 -0
- data/spec/filters/elapsed.rb +294 -0
- data/spec/filters/environment.rb +43 -0
- data/spec/filters/geoip.rb +62 -0
- data/spec/filters/grep.rb +342 -0
- data/spec/filters/grok.rb +473 -0
- data/spec/filters/grok/timeout2.rb +56 -0
- data/spec/filters/grok/timeouts.rb +39 -0
- data/spec/filters/i18n.rb +25 -0
- data/spec/filters/json.rb +72 -0
- data/spec/filters/json_encode.rb +37 -0
- data/spec/filters/kv.rb +403 -0
- data/spec/filters/metrics.rb +212 -0
- data/spec/filters/multiline.rb +119 -0
- data/spec/filters/mutate.rb +180 -0
- data/spec/filters/noop.rb +221 -0
- data/spec/filters/prune.rb +441 -0
- data/spec/filters/punct.rb +18 -0
- data/spec/filters/railsparallelrequest.rb +112 -0
- data/spec/filters/range.rb +169 -0
- data/spec/filters/split.rb +58 -0
- data/spec/filters/translate.rb +70 -0
- data/spec/filters/unique.rb +25 -0
- data/spec/filters/useragent.rb +42 -0
- data/spec/filters/xml.rb +157 -0
- data/spec/inputs/file.rb +107 -0
- data/spec/inputs/gelf.rb +52 -0
- data/spec/inputs/generator.rb +30 -0
- data/spec/inputs/imap.rb +60 -0
- data/spec/inputs/redis.rb +63 -0
- data/spec/inputs/relp.rb +70 -0
- data/spec/inputs/tcp.rb +101 -0
- data/spec/jar.rb +21 -0
- data/spec/outputs/csv.rb +266 -0
- data/spec/outputs/elasticsearch.rb +161 -0
- data/spec/outputs/elasticsearch_http.rb +240 -0
- data/spec/outputs/email.rb +173 -0
- data/spec/outputs/file.rb +82 -0
- data/spec/outputs/graphite.rb +236 -0
- data/spec/outputs/redis.rb +127 -0
- data/spec/speed.rb +20 -0
- data/spec/sqlite-test.rb +81 -0
- data/spec/support/LOGSTASH-733.rb +21 -0
- data/spec/support/LOGSTASH-820.rb +25 -0
- data/spec/support/akamai-grok.rb +26 -0
- data/spec/support/date-http.rb +17 -0
- data/spec/support/postwait1.rb +26 -0
- data/spec/support/pull375.rb +21 -0
- data/spec/test_utils.rb +125 -0
- data/spec/util/fieldeval_spec.rb +44 -0
- data/test/jenkins/config.xml.erb +74 -0
- data/test/jenkins/create-jobs.rb +23 -0
- data/test/jenkins/generatorjob.config.xml +66 -0
- data/tools/Gemfile +14 -0
- data/tools/Gemfile.jruby-1.9.lock +322 -0
- data/tools/Gemfile.rbx-2.1.lock +516 -0
- data/tools/Gemfile.ruby-1.9.1.lock +310 -0
- data/tools/Gemfile.ruby-2.0.0.lock +310 -0
- metadata +629 -0
|
@@ -0,0 +1,263 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
require "logstash/inputs/base"
|
|
3
|
+
require "logstash/inputs/threadable"
|
|
4
|
+
require "logstash/namespace"
|
|
5
|
+
|
|
6
|
+
# Read events from a redis. Supports both redis channels and also redis lists
|
|
7
|
+
# (using BLPOP)
|
|
8
|
+
#
|
|
9
|
+
# For more information about redis, see <http://redis.io/>
|
|
10
|
+
#
|
|
11
|
+
# ## `batch_count` note
|
|
12
|
+
#
|
|
13
|
+
# If you use the 'batch_count' setting, you *must* use a redis version 2.6.0 or
|
|
14
|
+
# newer. Anything older does not support the operations used by batching.
|
|
15
|
+
class LogStash::Inputs::Redis < LogStash::Inputs::Threadable
|
|
16
|
+
config_name "redis"
|
|
17
|
+
milestone 2
|
|
18
|
+
|
|
19
|
+
default :codec, "json"
|
|
20
|
+
|
|
21
|
+
# Name is used for logging in case there are multiple instances.
|
|
22
|
+
# This feature has no real function and will be removed in future versions.
|
|
23
|
+
config :name, :validate => :string, :default => "default", :deprecated => true
|
|
24
|
+
|
|
25
|
+
# The hostname of your redis server.
|
|
26
|
+
config :host, :validate => :string, :default => "127.0.0.1"
|
|
27
|
+
|
|
28
|
+
# The port to connect on.
|
|
29
|
+
config :port, :validate => :number, :default => 6379
|
|
30
|
+
|
|
31
|
+
# The redis database number.
|
|
32
|
+
config :db, :validate => :number, :default => 0
|
|
33
|
+
|
|
34
|
+
# Initial connection timeout in seconds.
|
|
35
|
+
config :timeout, :validate => :number, :default => 5
|
|
36
|
+
|
|
37
|
+
# Password to authenticate with. There is no authentication by default.
|
|
38
|
+
config :password, :validate => :password
|
|
39
|
+
|
|
40
|
+
# The name of the redis queue (we'll use BLPOP against this).
|
|
41
|
+
# TODO: remove soon.
|
|
42
|
+
config :queue, :validate => :string, :deprecated => true
|
|
43
|
+
|
|
44
|
+
# The name of a redis list or channel.
|
|
45
|
+
# TODO: change required to true
|
|
46
|
+
config :key, :validate => :string, :required => false
|
|
47
|
+
|
|
48
|
+
# Either list or channel. If redis\_type is list, then we will BLPOP the
|
|
49
|
+
# key. If redis\_type is channel, then we will SUBSCRIBE to the key.
|
|
50
|
+
# If redis\_type is pattern_channel, then we will PSUBSCRIBE to the key.
|
|
51
|
+
# TODO: change required to true
|
|
52
|
+
config :data_type, :validate => [ "list", "channel", "pattern_channel" ], :required => false
|
|
53
|
+
|
|
54
|
+
# How many events to return from redis using EVAL
|
|
55
|
+
config :batch_count, :validate => :number, :default => 1
|
|
56
|
+
|
|
57
|
+
public
|
|
58
|
+
def register
|
|
59
|
+
require 'redis'
|
|
60
|
+
@redis = nil
|
|
61
|
+
@redis_url = "redis://#{@password}@#{@host}:#{@port}/#{@db}"
|
|
62
|
+
|
|
63
|
+
# TODO remove after setting key and data_type to true
|
|
64
|
+
if @queue
|
|
65
|
+
if @key or @data_type
|
|
66
|
+
raise RuntimeError.new(
|
|
67
|
+
"Cannot specify queue parameter and key or data_type"
|
|
68
|
+
)
|
|
69
|
+
end
|
|
70
|
+
@key = @queue
|
|
71
|
+
@data_type = 'list'
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
if not @key or not @data_type
|
|
75
|
+
raise RuntimeError.new(
|
|
76
|
+
"Must define queue, or key and data_type parameters"
|
|
77
|
+
)
|
|
78
|
+
end
|
|
79
|
+
# end TODO
|
|
80
|
+
|
|
81
|
+
@logger.info("Registering redis", :identity => identity)
|
|
82
|
+
end # def register
|
|
83
|
+
|
|
84
|
+
# A string used to identify a redis instance in log messages
|
|
85
|
+
# TODO(sissel): Use instance variables for this once the @name config
|
|
86
|
+
# option is removed.
|
|
87
|
+
private
|
|
88
|
+
def identity
|
|
89
|
+
@name || "#{@redis_url} #{@data_type}:#{@key}"
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
private
|
|
93
|
+
def connect
|
|
94
|
+
redis = Redis.new(
|
|
95
|
+
:host => @host,
|
|
96
|
+
:port => @port,
|
|
97
|
+
:timeout => @timeout,
|
|
98
|
+
:db => @db,
|
|
99
|
+
:password => @password.nil? ? nil : @password.value
|
|
100
|
+
)
|
|
101
|
+
load_batch_script(redis) if @data_type == 'list' && (@batch_count > 1)
|
|
102
|
+
return redis
|
|
103
|
+
end # def connect
|
|
104
|
+
|
|
105
|
+
private
|
|
106
|
+
def load_batch_script(redis)
|
|
107
|
+
#A redis lua EVAL script to fetch a count of keys
|
|
108
|
+
#in case count is bigger than current items in queue whole queue will be returned without extra nil values
|
|
109
|
+
redis_script = <<EOF
|
|
110
|
+
local i = tonumber(ARGV[1])
|
|
111
|
+
local res = {}
|
|
112
|
+
local length = redis.call('llen',KEYS[1])
|
|
113
|
+
if length < i then i = length end
|
|
114
|
+
while (i > 0) do
|
|
115
|
+
local item = redis.call("lpop", KEYS[1])
|
|
116
|
+
if (not item) then
|
|
117
|
+
break
|
|
118
|
+
end
|
|
119
|
+
table.insert(res, item)
|
|
120
|
+
i = i-1
|
|
121
|
+
end
|
|
122
|
+
return res
|
|
123
|
+
EOF
|
|
124
|
+
@redis_script_sha = redis.script(:load, redis_script)
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
private
|
|
128
|
+
def queue_event(msg, output_queue)
|
|
129
|
+
begin
|
|
130
|
+
@codec.decode(msg) do |event|
|
|
131
|
+
decorate(event)
|
|
132
|
+
output_queue << event
|
|
133
|
+
end
|
|
134
|
+
rescue => e # parse or event creation error
|
|
135
|
+
@logger.error("Failed to create event", :message => msg, :exception => e,
|
|
136
|
+
:backtrace => e.backtrace);
|
|
137
|
+
end
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
private
|
|
141
|
+
def list_listener(redis, output_queue)
|
|
142
|
+
|
|
143
|
+
# blpop returns the 'key' read from as well as the item result
|
|
144
|
+
# we only care about the result (2nd item in the list).
|
|
145
|
+
item = redis.blpop(@key, 0)[1]
|
|
146
|
+
|
|
147
|
+
# blpop failed or .. something?
|
|
148
|
+
# TODO(sissel): handle the error
|
|
149
|
+
return if item.nil?
|
|
150
|
+
queue_event(item, output_queue)
|
|
151
|
+
|
|
152
|
+
# If @batch_count is 1, there's no need to continue.
|
|
153
|
+
return if @batch_count == 1
|
|
154
|
+
|
|
155
|
+
begin
|
|
156
|
+
redis.evalsha(@redis_script_sha, [@key], [@batch_count-1]).each do |item|
|
|
157
|
+
queue_event(item, output_queue)
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
# Below is a commented-out implementation of 'batch fetch'
|
|
161
|
+
# using pipelined LPOP calls. This in practice has been observed to
|
|
162
|
+
# perform exactly the same in terms of event throughput as
|
|
163
|
+
# the evalsha method. Given that the EVALSHA implementation uses
|
|
164
|
+
# one call to redis instead of N (where N == @batch_count) calls,
|
|
165
|
+
# I decided to go with the 'evalsha' method of fetching N items
|
|
166
|
+
# from redis in bulk.
|
|
167
|
+
#redis.pipelined do
|
|
168
|
+
#error, item = redis.lpop(@key)
|
|
169
|
+
#(@batch_count-1).times { redis.lpop(@key) }
|
|
170
|
+
#end.each do |item|
|
|
171
|
+
#queue_event(item, output_queue) if item
|
|
172
|
+
#end
|
|
173
|
+
# --- End commented out implementation of 'batch fetch'
|
|
174
|
+
rescue Redis::CommandError => e
|
|
175
|
+
if e.to_s =~ /NOSCRIPT/ then
|
|
176
|
+
@logger.warn("Redis may have been restarted, reloading redis batch EVAL script", :exception => e);
|
|
177
|
+
load_batch_script(redis)
|
|
178
|
+
retry
|
|
179
|
+
else
|
|
180
|
+
raise e
|
|
181
|
+
end
|
|
182
|
+
end
|
|
183
|
+
end
|
|
184
|
+
|
|
185
|
+
private
|
|
186
|
+
def channel_listener(redis, output_queue)
|
|
187
|
+
redis.subscribe @key do |on|
|
|
188
|
+
on.subscribe do |channel, count|
|
|
189
|
+
@logger.info("Subscribed", :channel => channel, :count => count)
|
|
190
|
+
end
|
|
191
|
+
|
|
192
|
+
on.message do |channel, message|
|
|
193
|
+
queue_event message, output_queue
|
|
194
|
+
end
|
|
195
|
+
|
|
196
|
+
on.unsubscribe do |channel, count|
|
|
197
|
+
@logger.info("Unsubscribed", :channel => channel, :count => count)
|
|
198
|
+
end
|
|
199
|
+
end
|
|
200
|
+
end
|
|
201
|
+
|
|
202
|
+
private
|
|
203
|
+
def pattern_channel_listener(redis, output_queue)
|
|
204
|
+
redis.psubscribe @key do |on|
|
|
205
|
+
on.psubscribe do |channel, count|
|
|
206
|
+
@logger.info("Subscribed", :channel => channel, :count => count)
|
|
207
|
+
end
|
|
208
|
+
|
|
209
|
+
on.pmessage do |ch, event, message|
|
|
210
|
+
queue_event message, output_queue
|
|
211
|
+
end
|
|
212
|
+
|
|
213
|
+
on.punsubscribe do |channel, count|
|
|
214
|
+
@logger.info("Unsubscribed", :channel => channel, :count => count)
|
|
215
|
+
end
|
|
216
|
+
end
|
|
217
|
+
end
|
|
218
|
+
|
|
219
|
+
# Since both listeners have the same basic loop, we've abstracted the outer
|
|
220
|
+
# loop.
|
|
221
|
+
private
|
|
222
|
+
def listener_loop(listener, output_queue)
|
|
223
|
+
while !finished?
|
|
224
|
+
begin
|
|
225
|
+
@redis ||= connect
|
|
226
|
+
self.send listener, @redis, output_queue
|
|
227
|
+
rescue Redis::CannotConnectError => e
|
|
228
|
+
@logger.warn("Redis connection problem", :exception => e)
|
|
229
|
+
sleep 1
|
|
230
|
+
@redis = connect
|
|
231
|
+
rescue => e # redis error
|
|
232
|
+
@logger.warn("Failed to get event from redis", :name => @name,
|
|
233
|
+
:exception => e, :backtrace => e.backtrace)
|
|
234
|
+
raise e
|
|
235
|
+
end
|
|
236
|
+
end # while !finished?
|
|
237
|
+
end # listener_loop
|
|
238
|
+
|
|
239
|
+
public
|
|
240
|
+
def run(output_queue)
|
|
241
|
+
if @data_type == 'list'
|
|
242
|
+
listener_loop :list_listener, output_queue
|
|
243
|
+
elsif @data_type == 'channel'
|
|
244
|
+
listener_loop :channel_listener, output_queue
|
|
245
|
+
else
|
|
246
|
+
listener_loop :pattern_channel_listener, output_queue
|
|
247
|
+
end
|
|
248
|
+
end # def run
|
|
249
|
+
|
|
250
|
+
public
|
|
251
|
+
def teardown
|
|
252
|
+
if @data_type == 'channel' and @redis
|
|
253
|
+
@redis.unsubscribe
|
|
254
|
+
@redis.quit
|
|
255
|
+
@redis = nil
|
|
256
|
+
end
|
|
257
|
+
if @data_type == 'pattern_channel' and @redis
|
|
258
|
+
@redis.punsubscribe
|
|
259
|
+
@redis.quit
|
|
260
|
+
@redis = nil
|
|
261
|
+
end
|
|
262
|
+
end
|
|
263
|
+
end # class LogStash::Inputs::Redis
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
require "logstash/inputs/base"
|
|
3
|
+
require "logstash/namespace"
|
|
4
|
+
require "logstash/util/relp"
|
|
5
|
+
require "logstash/util/socket_peer"
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
# Read RELP events over a TCP socket.
|
|
9
|
+
#
|
|
10
|
+
# For more information about RELP, see
|
|
11
|
+
# <http://www.rsyslog.com/doc/imrelp.html>
|
|
12
|
+
#
|
|
13
|
+
# This protocol implements application-level acknowledgements to help protect
|
|
14
|
+
# against message loss.
|
|
15
|
+
#
|
|
16
|
+
# Message acks only function as far as messages being put into the queue for
|
|
17
|
+
# filters; anything lost after that point will not be retransmitted
|
|
18
|
+
class LogStash::Inputs::Relp < LogStash::Inputs::Base
|
|
19
|
+
class Interrupted < StandardError; end
|
|
20
|
+
|
|
21
|
+
config_name "relp"
|
|
22
|
+
milestone 1
|
|
23
|
+
|
|
24
|
+
default :codec, "plain"
|
|
25
|
+
|
|
26
|
+
# The address to listen on.
|
|
27
|
+
config :host, :validate => :string, :default => "0.0.0.0"
|
|
28
|
+
|
|
29
|
+
# The port to listen on.
|
|
30
|
+
config :port, :validate => :number, :required => true
|
|
31
|
+
|
|
32
|
+
def initialize(*args)
|
|
33
|
+
super(*args)
|
|
34
|
+
end # def initialize
|
|
35
|
+
|
|
36
|
+
public
|
|
37
|
+
def register
|
|
38
|
+
@logger.info("Starting relp input listener", :address => "#{@host}:#{@port}")
|
|
39
|
+
@relp_server = RelpServer.new(@host, @port,['syslog'])
|
|
40
|
+
end # def register
|
|
41
|
+
|
|
42
|
+
private
|
|
43
|
+
def relp_stream(relpserver,socket,output_queue,client_address)
|
|
44
|
+
loop do
|
|
45
|
+
frame = relpserver.syslog_read(socket)
|
|
46
|
+
@codec.decode(frame["message"]) do |event|
|
|
47
|
+
decorate(event)
|
|
48
|
+
event["host"] = client_address
|
|
49
|
+
output_queue << event
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
#To get this far, the message must have made it into the queue for
|
|
53
|
+
#filtering. I don't think it's possible to wait for output before ack
|
|
54
|
+
#without fundamentally breaking the plugin architecture
|
|
55
|
+
relpserver.ack(socket, frame['txnr'])
|
|
56
|
+
end
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
public
|
|
60
|
+
def run(output_queue)
|
|
61
|
+
@thread = Thread.current
|
|
62
|
+
loop do
|
|
63
|
+
begin
|
|
64
|
+
# Start a new thread for each connection.
|
|
65
|
+
Thread.start(@relp_server.accept) do |client|
|
|
66
|
+
rs = client[0]
|
|
67
|
+
socket = client[1]
|
|
68
|
+
# monkeypatch a 'peer' method onto the socket.
|
|
69
|
+
socket.instance_eval { class << self; include ::LogStash::Util::SocketPeer end }
|
|
70
|
+
peer = socket.peer
|
|
71
|
+
@logger.debug("Relp Connection to #{peer} created")
|
|
72
|
+
begin
|
|
73
|
+
relp_stream(rs,socket, output_queue, peer)
|
|
74
|
+
rescue Relp::ConnectionClosed => e
|
|
75
|
+
@logger.debug("Relp Connection to #{peer} Closed")
|
|
76
|
+
rescue Relp::RelpError => e
|
|
77
|
+
@logger.warn('Relp error: '+e.class.to_s+' '+e.message)
|
|
78
|
+
#TODO: Still not happy with this, are they all warn level?
|
|
79
|
+
#Will this catch everything I want it to?
|
|
80
|
+
#Relp spec says to close connection on error, ensure this is the case
|
|
81
|
+
end
|
|
82
|
+
end # Thread.start
|
|
83
|
+
rescue Relp::InvalidCommand,Relp::InappropriateCommand => e
|
|
84
|
+
@logger.warn('Relp client trying to open connection with something other than open:'+e.message)
|
|
85
|
+
rescue Relp::InsufficientCommands
|
|
86
|
+
@logger.warn('Relp client incapable of syslog')
|
|
87
|
+
rescue IOError, Interrupted
|
|
88
|
+
if @interrupted
|
|
89
|
+
# Intended shutdown, get out of the loop
|
|
90
|
+
@relp_server.shutdown
|
|
91
|
+
break
|
|
92
|
+
else
|
|
93
|
+
# Else it was a genuine IOError caused by something else, so propagate it up..
|
|
94
|
+
raise
|
|
95
|
+
end
|
|
96
|
+
end
|
|
97
|
+
end # loop
|
|
98
|
+
end # def run
|
|
99
|
+
|
|
100
|
+
def teardown
|
|
101
|
+
@interrupted = true
|
|
102
|
+
@thread.raise(Interrupted.new)
|
|
103
|
+
end
|
|
104
|
+
end # class LogStash::Inputs::Relp
|
|
105
|
+
|
|
106
|
+
#TODO: structured error logging
|
|
@@ -0,0 +1,279 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
require "logstash/inputs/base"
|
|
3
|
+
require "logstash/namespace"
|
|
4
|
+
|
|
5
|
+
require "time"
|
|
6
|
+
require "tmpdir"
|
|
7
|
+
|
|
8
|
+
# Stream events from files from a S3 bucket.
|
|
9
|
+
#
|
|
10
|
+
# Each line from each file generates an event.
|
|
11
|
+
# Files ending in '.gz' are handled as gzip'ed files.
|
|
12
|
+
class LogStash::Inputs::S3 < LogStash::Inputs::Base
|
|
13
|
+
config_name "s3"
|
|
14
|
+
milestone 1
|
|
15
|
+
|
|
16
|
+
# TODO(sissel): refactor to use 'line' codec (requires removing both gzip
|
|
17
|
+
# support and readline usage). Support gzip through a gzip codec! ;)
|
|
18
|
+
default :codec, "plain"
|
|
19
|
+
|
|
20
|
+
# The credentials of the AWS account used to access the bucket.
|
|
21
|
+
# Credentials can be specified:
|
|
22
|
+
# - As an ["id","secret"] array
|
|
23
|
+
# - As a path to a file containing AWS_ACCESS_KEY_ID=... and AWS_SECRET_ACCESS_KEY=...
|
|
24
|
+
# - In the environment (variables AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY)
|
|
25
|
+
config :credentials, :validate => :array, :default => nil
|
|
26
|
+
|
|
27
|
+
# The name of the S3 bucket.
|
|
28
|
+
config :bucket, :validate => :string, :required => true
|
|
29
|
+
|
|
30
|
+
# The AWS region for your bucket.
|
|
31
|
+
config :region, :validate => ["us-east-1", "us-west-1", "us-west-2",
|
|
32
|
+
"eu-west-1", "ap-southeast-1", "ap-southeast-2",
|
|
33
|
+
"ap-northeast-1", "sa-east-1", "us-gov-west-1"],
|
|
34
|
+
:deprecated => "'region' has been deprecated in favor of 'region_endpoint'"
|
|
35
|
+
|
|
36
|
+
# The AWS region for your bucket.
|
|
37
|
+
config :region_endpoint, :validate => ["us-east-1", "us-west-1", "us-west-2",
|
|
38
|
+
"eu-west-1", "ap-southeast-1", "ap-southeast-2",
|
|
39
|
+
"ap-northeast-1", "sa-east-1", "us-gov-west-1"], :default => "us-east-1"
|
|
40
|
+
|
|
41
|
+
# If specified, the prefix the filenames in the bucket must match (not a regexp)
|
|
42
|
+
config :prefix, :validate => :string, :default => nil
|
|
43
|
+
|
|
44
|
+
# Where to write the since database (keeps track of the date
|
|
45
|
+
# the last handled file was added to S3). The default will write
|
|
46
|
+
# sincedb files to some path matching "$HOME/.sincedb*"
|
|
47
|
+
config :sincedb_path, :validate => :string, :default => nil
|
|
48
|
+
|
|
49
|
+
# Name of a S3 bucket to backup processed files to.
|
|
50
|
+
config :backup_to_bucket, :validate => :string, :default => nil
|
|
51
|
+
|
|
52
|
+
# Path of a local directory to backup processed files to.
|
|
53
|
+
config :backup_to_dir, :validate => :string, :default => nil
|
|
54
|
+
|
|
55
|
+
# Whether to delete processed files from the original bucket.
|
|
56
|
+
config :delete, :validate => :boolean, :default => false
|
|
57
|
+
|
|
58
|
+
# Interval to wait between to check the file list again after a run is finished.
|
|
59
|
+
# Value is in seconds.
|
|
60
|
+
config :interval, :validate => :number, :default => 60
|
|
61
|
+
|
|
62
|
+
public
|
|
63
|
+
def register
|
|
64
|
+
require "digest/md5"
|
|
65
|
+
require "aws-sdk"
|
|
66
|
+
|
|
67
|
+
@region_endpoint = @region if !@region.empty?
|
|
68
|
+
|
|
69
|
+
@logger.info("Registering s3 input", :bucket => @bucket, :region_endpoint => @region_endpoint)
|
|
70
|
+
|
|
71
|
+
if @credentials.nil?
|
|
72
|
+
@access_key_id = ENV['AWS_ACCESS_KEY_ID']
|
|
73
|
+
@secret_access_key = ENV['AWS_SECRET_ACCESS_KEY']
|
|
74
|
+
elsif @credentials.is_a? Array
|
|
75
|
+
if @credentials.length ==1
|
|
76
|
+
File.open(@credentials[0]) { |f| f.each do |line|
|
|
77
|
+
unless (/^\#/.match(line))
|
|
78
|
+
if(/\s*=\s*/.match(line))
|
|
79
|
+
param, value = line.split('=', 2)
|
|
80
|
+
param = param.chomp().strip()
|
|
81
|
+
value = value.chomp().strip()
|
|
82
|
+
if param.eql?('AWS_ACCESS_KEY_ID')
|
|
83
|
+
@access_key_id = value
|
|
84
|
+
elsif param.eql?('AWS_SECRET_ACCESS_KEY')
|
|
85
|
+
@secret_access_key = value
|
|
86
|
+
end
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
}
|
|
91
|
+
elsif @credentials.length == 2
|
|
92
|
+
@access_key_id = @credentials[0]
|
|
93
|
+
@secret_access_key = @credentials[1]
|
|
94
|
+
else
|
|
95
|
+
raise ArgumentError.new('Credentials must be of the form "/path/to/file" or ["id", "secret"]')
|
|
96
|
+
end
|
|
97
|
+
end
|
|
98
|
+
if @access_key_id.nil? or @secret_access_key.nil?
|
|
99
|
+
raise ArgumentError.new('Missing AWS credentials')
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
if @bucket.nil?
|
|
103
|
+
raise ArgumentError.new('Missing AWS bucket')
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
if @sincedb_path.nil?
|
|
107
|
+
if ENV['HOME'].nil?
|
|
108
|
+
raise ArgumentError.new('No HOME or sincedb_path set')
|
|
109
|
+
end
|
|
110
|
+
@sincedb_path = File.join(ENV["HOME"], ".sincedb_" + Digest::MD5.hexdigest("#{@bucket}+#{@prefix}"))
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
s3 = AWS::S3.new(
|
|
114
|
+
:access_key_id => @access_key_id,
|
|
115
|
+
:secret_access_key => @secret_access_key,
|
|
116
|
+
:region => @region_endpoint
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
@s3bucket = s3.buckets[@bucket]
|
|
120
|
+
|
|
121
|
+
unless @backup_to_bucket.nil?
|
|
122
|
+
@backup_bucket = s3.buckets[@backup_to_bucket]
|
|
123
|
+
unless @backup_bucket.exists?
|
|
124
|
+
s3.buckets.create(@backup_to_bucket)
|
|
125
|
+
end
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
unless @backup_to_dir.nil?
|
|
129
|
+
Dir.mkdir(@backup_to_dir, 0700) unless File.exists?(@backup_to_dir)
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
end # def register
|
|
133
|
+
|
|
134
|
+
public
|
|
135
|
+
def run(queue)
|
|
136
|
+
loop do
|
|
137
|
+
process_new(queue)
|
|
138
|
+
sleep(@interval)
|
|
139
|
+
end
|
|
140
|
+
finished
|
|
141
|
+
end # def run
|
|
142
|
+
|
|
143
|
+
private
|
|
144
|
+
def process_new(queue, since=nil)
|
|
145
|
+
|
|
146
|
+
if since.nil?
|
|
147
|
+
since = sincedb_read()
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
objects = list_new(since)
|
|
151
|
+
objects.each do |k|
|
|
152
|
+
@logger.debug("S3 input processing", :bucket => @bucket, :key => k)
|
|
153
|
+
lastmod = @s3bucket.objects[k].last_modified
|
|
154
|
+
process_log(queue, k)
|
|
155
|
+
sincedb_write(lastmod)
|
|
156
|
+
end
|
|
157
|
+
|
|
158
|
+
end # def process_new
|
|
159
|
+
|
|
160
|
+
private
|
|
161
|
+
def list_new(since=nil)
|
|
162
|
+
|
|
163
|
+
if since.nil?
|
|
164
|
+
since = Time.new(0)
|
|
165
|
+
end
|
|
166
|
+
|
|
167
|
+
objects = {}
|
|
168
|
+
@s3bucket.objects.with_prefix(@prefix).each do |log|
|
|
169
|
+
if log.last_modified > since
|
|
170
|
+
objects[log.key] = log.last_modified
|
|
171
|
+
end
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
return sorted_objects = objects.keys.sort {|a,b| objects[a] <=> objects[b]}
|
|
175
|
+
|
|
176
|
+
end # def list_new
|
|
177
|
+
|
|
178
|
+
private
|
|
179
|
+
def process_log(queue, key)
|
|
180
|
+
|
|
181
|
+
object = @s3bucket.objects[key]
|
|
182
|
+
tmp = Dir.mktmpdir("logstash-")
|
|
183
|
+
begin
|
|
184
|
+
filename = File.join(tmp, File.basename(key))
|
|
185
|
+
File.open(filename, 'wb') do |s3file|
|
|
186
|
+
object.read do |chunk|
|
|
187
|
+
s3file.write(chunk)
|
|
188
|
+
end
|
|
189
|
+
end
|
|
190
|
+
process_local_log(queue, filename)
|
|
191
|
+
unless @backup_to_bucket.nil?
|
|
192
|
+
backup_object = @backup_bucket.objects[key]
|
|
193
|
+
backup_object.write(Pathname.new(filename))
|
|
194
|
+
end
|
|
195
|
+
unless @backup_to_dir.nil?
|
|
196
|
+
FileUtils.cp(filename, @backup_to_dir)
|
|
197
|
+
end
|
|
198
|
+
if @delete
|
|
199
|
+
object.delete()
|
|
200
|
+
end
|
|
201
|
+
end
|
|
202
|
+
FileUtils.remove_entry_secure(tmp, force=true)
|
|
203
|
+
|
|
204
|
+
end # def process_log
|
|
205
|
+
|
|
206
|
+
private
|
|
207
|
+
def process_local_log(queue, filename)
|
|
208
|
+
|
|
209
|
+
metadata = {
|
|
210
|
+
:version => nil,
|
|
211
|
+
:format => nil,
|
|
212
|
+
}
|
|
213
|
+
File.open(filename) do |file|
|
|
214
|
+
if filename.end_with?('.gz')
|
|
215
|
+
gz = Zlib::GzipReader.new(file)
|
|
216
|
+
gz.each_line do |line|
|
|
217
|
+
metadata = process_line(queue, metadata, line)
|
|
218
|
+
end
|
|
219
|
+
else
|
|
220
|
+
file.each do |line|
|
|
221
|
+
metadata = process_line(queue, metadata, line)
|
|
222
|
+
end
|
|
223
|
+
end
|
|
224
|
+
end
|
|
225
|
+
|
|
226
|
+
end # def process_local_log
|
|
227
|
+
|
|
228
|
+
private
|
|
229
|
+
def process_line(queue, metadata, line)
|
|
230
|
+
|
|
231
|
+
if /#Version: .+/.match(line)
|
|
232
|
+
junk, version = line.strip().split(/#Version: (.+)/)
|
|
233
|
+
unless version.nil?
|
|
234
|
+
metadata[:version] = version
|
|
235
|
+
end
|
|
236
|
+
elsif /#Fields: .+/.match(line)
|
|
237
|
+
junk, format = line.strip().split(/#Fields: (.+)/)
|
|
238
|
+
unless format.nil?
|
|
239
|
+
metadata[:format] = format
|
|
240
|
+
end
|
|
241
|
+
else
|
|
242
|
+
@codec.decode(line) do |event|
|
|
243
|
+
decorate(event)
|
|
244
|
+
unless metadata[:version].nil?
|
|
245
|
+
event["cloudfront_version"] = metadata[:version]
|
|
246
|
+
end
|
|
247
|
+
unless metadata[:format].nil?
|
|
248
|
+
event["cloudfront_fields"] = metadata[:format]
|
|
249
|
+
end
|
|
250
|
+
queue << event
|
|
251
|
+
end
|
|
252
|
+
end
|
|
253
|
+
return metadata
|
|
254
|
+
|
|
255
|
+
end # def process_line
|
|
256
|
+
|
|
257
|
+
private
|
|
258
|
+
def sincedb_read()
|
|
259
|
+
|
|
260
|
+
if File.exists?(@sincedb_path)
|
|
261
|
+
since = Time.parse(File.read(@sincedb_path).chomp.strip)
|
|
262
|
+
else
|
|
263
|
+
since = Time.new(0)
|
|
264
|
+
end
|
|
265
|
+
return since
|
|
266
|
+
|
|
267
|
+
end # def sincedb_read
|
|
268
|
+
|
|
269
|
+
private
|
|
270
|
+
def sincedb_write(since=nil)
|
|
271
|
+
|
|
272
|
+
if since.nil?
|
|
273
|
+
since = Time.now()
|
|
274
|
+
end
|
|
275
|
+
File.open(@sincedb_path, 'w') { |file| file.write(since.to_s) }
|
|
276
|
+
|
|
277
|
+
end # def sincedb_write
|
|
278
|
+
|
|
279
|
+
end # class LogStash::Inputs::S3
|