logstash-lib 1.3.2
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +24 -0
- data/.tailor +8 -0
- data/.travis.yml +12 -0
- data/CHANGELOG +1185 -0
- data/CONTRIBUTING.md +61 -0
- data/CONTRIBUTORS +79 -0
- data/LICENSE +14 -0
- data/Makefile +460 -0
- data/README.md +120 -0
- data/STYLE.md +96 -0
- data/bin/logstash +37 -0
- data/bin/logstash-test +4 -0
- data/bin/logstash-web +4 -0
- data/bin/logstash.lib.sh +78 -0
- data/bot/check_pull_changelog.rb +89 -0
- data/docs/configuration.md +260 -0
- data/docs/docgen.rb +242 -0
- data/docs/extending/example-add-a-new-filter.md +121 -0
- data/docs/extending/index.md +91 -0
- data/docs/flags.md +43 -0
- data/docs/generate_index.rb +28 -0
- data/docs/index.html.erb +56 -0
- data/docs/learn.md +46 -0
- data/docs/life-of-an-event.md +109 -0
- data/docs/logging-tool-comparisons.md +60 -0
- data/docs/plugin-doc.html.erb +91 -0
- data/docs/plugin-milestones.md +41 -0
- data/docs/plugin-synopsis.html.erb +24 -0
- data/docs/release-engineering.md +46 -0
- data/docs/release-test-results.md +14 -0
- data/docs/repositories.md +35 -0
- data/docs/tutorials/10-minute-walkthrough/apache-elasticsearch.conf +35 -0
- data/docs/tutorials/10-minute-walkthrough/apache-parse.conf +33 -0
- data/docs/tutorials/10-minute-walkthrough/apache_log.1 +1 -0
- data/docs/tutorials/10-minute-walkthrough/apache_log.2.bz2 +0 -0
- data/docs/tutorials/10-minute-walkthrough/hello-search.conf +25 -0
- data/docs/tutorials/10-minute-walkthrough/hello.conf +16 -0
- data/docs/tutorials/10-minute-walkthrough/index.md +124 -0
- data/docs/tutorials/10-minute-walkthrough/step-5-output.txt +17 -0
- data/docs/tutorials/getting-started-centralized-overview-diagram.png +0 -0
- data/docs/tutorials/getting-started-centralized-overview-diagram.xml +1 -0
- data/docs/tutorials/getting-started-centralized.md +217 -0
- data/docs/tutorials/getting-started-simple.md +200 -0
- data/docs/tutorials/just-enough-rabbitmq-for-logstash.md +201 -0
- data/docs/tutorials/media/frontend-response-codes.png +0 -0
- data/docs/tutorials/metrics-from-logs.md +84 -0
- data/docs/tutorials/zeromq.md +118 -0
- data/extract_services.rb +29 -0
- data/gembag.rb +64 -0
- data/lib/logstash-event.rb +2 -0
- data/lib/logstash.rb +4 -0
- data/lib/logstash/JRUBY-6970-openssl.rb +22 -0
- data/lib/logstash/JRUBY-6970.rb +102 -0
- data/lib/logstash/agent.rb +305 -0
- data/lib/logstash/certs/cacert.pem +3895 -0
- data/lib/logstash/codecs/base.rb +49 -0
- data/lib/logstash/codecs/compress_spooler.rb +50 -0
- data/lib/logstash/codecs/dots.rb +18 -0
- data/lib/logstash/codecs/edn.rb +28 -0
- data/lib/logstash/codecs/edn_lines.rb +36 -0
- data/lib/logstash/codecs/fluent.rb +55 -0
- data/lib/logstash/codecs/graphite.rb +114 -0
- data/lib/logstash/codecs/json.rb +41 -0
- data/lib/logstash/codecs/json_lines.rb +52 -0
- data/lib/logstash/codecs/json_spooler.rb +22 -0
- data/lib/logstash/codecs/line.rb +58 -0
- data/lib/logstash/codecs/msgpack.rb +43 -0
- data/lib/logstash/codecs/multiline.rb +189 -0
- data/lib/logstash/codecs/netflow.rb +342 -0
- data/lib/logstash/codecs/netflow/util.rb +212 -0
- data/lib/logstash/codecs/noop.rb +19 -0
- data/lib/logstash/codecs/oldlogstashjson.rb +56 -0
- data/lib/logstash/codecs/plain.rb +48 -0
- data/lib/logstash/codecs/rubydebug.rb +22 -0
- data/lib/logstash/codecs/spool.rb +38 -0
- data/lib/logstash/config/Makefile +4 -0
- data/lib/logstash/config/config_ast.rb +380 -0
- data/lib/logstash/config/file.rb +39 -0
- data/lib/logstash/config/grammar.rb +3504 -0
- data/lib/logstash/config/grammar.treetop +241 -0
- data/lib/logstash/config/mixin.rb +464 -0
- data/lib/logstash/config/registry.rb +13 -0
- data/lib/logstash/config/test.conf +18 -0
- data/lib/logstash/errors.rb +10 -0
- data/lib/logstash/event.rb +262 -0
- data/lib/logstash/filters/advisor.rb +178 -0
- data/lib/logstash/filters/alter.rb +173 -0
- data/lib/logstash/filters/anonymize.rb +93 -0
- data/lib/logstash/filters/base.rb +190 -0
- data/lib/logstash/filters/checksum.rb +50 -0
- data/lib/logstash/filters/cidr.rb +76 -0
- data/lib/logstash/filters/cipher.rb +145 -0
- data/lib/logstash/filters/clone.rb +35 -0
- data/lib/logstash/filters/collate.rb +114 -0
- data/lib/logstash/filters/csv.rb +94 -0
- data/lib/logstash/filters/date.rb +244 -0
- data/lib/logstash/filters/dns.rb +201 -0
- data/lib/logstash/filters/drop.rb +32 -0
- data/lib/logstash/filters/elapsed.rb +256 -0
- data/lib/logstash/filters/elasticsearch.rb +73 -0
- data/lib/logstash/filters/environment.rb +27 -0
- data/lib/logstash/filters/extractnumbers.rb +84 -0
- data/lib/logstash/filters/gelfify.rb +52 -0
- data/lib/logstash/filters/geoip.rb +145 -0
- data/lib/logstash/filters/grep.rb +153 -0
- data/lib/logstash/filters/grok.rb +425 -0
- data/lib/logstash/filters/grokdiscovery.rb +75 -0
- data/lib/logstash/filters/i18n.rb +51 -0
- data/lib/logstash/filters/json.rb +90 -0
- data/lib/logstash/filters/json_encode.rb +52 -0
- data/lib/logstash/filters/kv.rb +232 -0
- data/lib/logstash/filters/metaevent.rb +68 -0
- data/lib/logstash/filters/metrics.rb +237 -0
- data/lib/logstash/filters/multiline.rb +241 -0
- data/lib/logstash/filters/mutate.rb +399 -0
- data/lib/logstash/filters/noop.rb +21 -0
- data/lib/logstash/filters/prune.rb +149 -0
- data/lib/logstash/filters/punct.rb +32 -0
- data/lib/logstash/filters/railsparallelrequest.rb +86 -0
- data/lib/logstash/filters/range.rb +142 -0
- data/lib/logstash/filters/ruby.rb +42 -0
- data/lib/logstash/filters/sleep.rb +111 -0
- data/lib/logstash/filters/split.rb +64 -0
- data/lib/logstash/filters/sumnumbers.rb +73 -0
- data/lib/logstash/filters/syslog_pri.rb +107 -0
- data/lib/logstash/filters/translate.rb +121 -0
- data/lib/logstash/filters/unique.rb +29 -0
- data/lib/logstash/filters/urldecode.rb +57 -0
- data/lib/logstash/filters/useragent.rb +112 -0
- data/lib/logstash/filters/uuid.rb +58 -0
- data/lib/logstash/filters/xml.rb +139 -0
- data/lib/logstash/filters/zeromq.rb +123 -0
- data/lib/logstash/filterworker.rb +122 -0
- data/lib/logstash/inputs/base.rb +125 -0
- data/lib/logstash/inputs/collectd.rb +306 -0
- data/lib/logstash/inputs/drupal_dblog.rb +323 -0
- data/lib/logstash/inputs/drupal_dblog/jdbcconnection.rb +66 -0
- data/lib/logstash/inputs/elasticsearch.rb +140 -0
- data/lib/logstash/inputs/eventlog.rb +129 -0
- data/lib/logstash/inputs/eventlog/racob_fix.rb +44 -0
- data/lib/logstash/inputs/exec.rb +69 -0
- data/lib/logstash/inputs/file.rb +146 -0
- data/lib/logstash/inputs/ganglia.rb +127 -0
- data/lib/logstash/inputs/ganglia/gmondpacket.rb +146 -0
- data/lib/logstash/inputs/ganglia/xdr.rb +327 -0
- data/lib/logstash/inputs/gelf.rb +138 -0
- data/lib/logstash/inputs/gemfire.rb +222 -0
- data/lib/logstash/inputs/generator.rb +97 -0
- data/lib/logstash/inputs/graphite.rb +41 -0
- data/lib/logstash/inputs/heroku.rb +51 -0
- data/lib/logstash/inputs/imap.rb +136 -0
- data/lib/logstash/inputs/irc.rb +84 -0
- data/lib/logstash/inputs/log4j.rb +136 -0
- data/lib/logstash/inputs/lumberjack.rb +53 -0
- data/lib/logstash/inputs/pipe.rb +57 -0
- data/lib/logstash/inputs/rabbitmq.rb +126 -0
- data/lib/logstash/inputs/rabbitmq/bunny.rb +118 -0
- data/lib/logstash/inputs/rabbitmq/hot_bunnies.rb +1 -0
- data/lib/logstash/inputs/rabbitmq/march_hare.rb +129 -0
- data/lib/logstash/inputs/redis.rb +263 -0
- data/lib/logstash/inputs/relp.rb +106 -0
- data/lib/logstash/inputs/s3.rb +279 -0
- data/lib/logstash/inputs/snmptrap.rb +87 -0
- data/lib/logstash/inputs/sqlite.rb +185 -0
- data/lib/logstash/inputs/sqs.rb +172 -0
- data/lib/logstash/inputs/stdin.rb +46 -0
- data/lib/logstash/inputs/stomp.rb +84 -0
- data/lib/logstash/inputs/syslog.rb +237 -0
- data/lib/logstash/inputs/tcp.rb +231 -0
- data/lib/logstash/inputs/threadable.rb +18 -0
- data/lib/logstash/inputs/twitter.rb +82 -0
- data/lib/logstash/inputs/udp.rb +81 -0
- data/lib/logstash/inputs/unix.rb +163 -0
- data/lib/logstash/inputs/varnishlog.rb +48 -0
- data/lib/logstash/inputs/websocket.rb +50 -0
- data/lib/logstash/inputs/wmi.rb +72 -0
- data/lib/logstash/inputs/xmpp.rb +81 -0
- data/lib/logstash/inputs/zenoss.rb +143 -0
- data/lib/logstash/inputs/zeromq.rb +165 -0
- data/lib/logstash/kibana.rb +113 -0
- data/lib/logstash/loadlibs.rb +9 -0
- data/lib/logstash/logging.rb +89 -0
- data/lib/logstash/monkeypatches-for-bugs.rb +2 -0
- data/lib/logstash/monkeypatches-for-debugging.rb +47 -0
- data/lib/logstash/monkeypatches-for-performance.rb +66 -0
- data/lib/logstash/multiqueue.rb +53 -0
- data/lib/logstash/namespace.rb +16 -0
- data/lib/logstash/outputs/base.rb +120 -0
- data/lib/logstash/outputs/boundary.rb +116 -0
- data/lib/logstash/outputs/circonus.rb +78 -0
- data/lib/logstash/outputs/cloudwatch.rb +351 -0
- data/lib/logstash/outputs/csv.rb +55 -0
- data/lib/logstash/outputs/datadog.rb +93 -0
- data/lib/logstash/outputs/datadog_metrics.rb +123 -0
- data/lib/logstash/outputs/elasticsearch.rb +332 -0
- data/lib/logstash/outputs/elasticsearch/elasticsearch-template.json +44 -0
- data/lib/logstash/outputs/elasticsearch_http.rb +256 -0
- data/lib/logstash/outputs/elasticsearch_river.rb +214 -0
- data/lib/logstash/outputs/email.rb +299 -0
- data/lib/logstash/outputs/exec.rb +40 -0
- data/lib/logstash/outputs/file.rb +180 -0
- data/lib/logstash/outputs/ganglia.rb +75 -0
- data/lib/logstash/outputs/gelf.rb +208 -0
- data/lib/logstash/outputs/gemfire.rb +103 -0
- data/lib/logstash/outputs/google_bigquery.rb +570 -0
- data/lib/logstash/outputs/google_cloud_storage.rb +431 -0
- data/lib/logstash/outputs/graphite.rb +143 -0
- data/lib/logstash/outputs/graphtastic.rb +185 -0
- data/lib/logstash/outputs/hipchat.rb +80 -0
- data/lib/logstash/outputs/http.rb +142 -0
- data/lib/logstash/outputs/irc.rb +80 -0
- data/lib/logstash/outputs/jira.rb +109 -0
- data/lib/logstash/outputs/juggernaut.rb +105 -0
- data/lib/logstash/outputs/librato.rb +146 -0
- data/lib/logstash/outputs/loggly.rb +93 -0
- data/lib/logstash/outputs/lumberjack.rb +51 -0
- data/lib/logstash/outputs/metriccatcher.rb +103 -0
- data/lib/logstash/outputs/mongodb.rb +81 -0
- data/lib/logstash/outputs/nagios.rb +119 -0
- data/lib/logstash/outputs/nagios_nsca.rb +123 -0
- data/lib/logstash/outputs/null.rb +18 -0
- data/lib/logstash/outputs/opentsdb.rb +101 -0
- data/lib/logstash/outputs/pagerduty.rb +79 -0
- data/lib/logstash/outputs/pipe.rb +132 -0
- data/lib/logstash/outputs/rabbitmq.rb +96 -0
- data/lib/logstash/outputs/rabbitmq/bunny.rb +135 -0
- data/lib/logstash/outputs/rabbitmq/hot_bunnies.rb +1 -0
- data/lib/logstash/outputs/rabbitmq/march_hare.rb +143 -0
- data/lib/logstash/outputs/redis.rb +245 -0
- data/lib/logstash/outputs/riak.rb +152 -0
- data/lib/logstash/outputs/riemann.rb +109 -0
- data/lib/logstash/outputs/s3.rb +356 -0
- data/lib/logstash/outputs/sns.rb +124 -0
- data/lib/logstash/outputs/solr_http.rb +78 -0
- data/lib/logstash/outputs/sqs.rb +141 -0
- data/lib/logstash/outputs/statsd.rb +116 -0
- data/lib/logstash/outputs/stdout.rb +53 -0
- data/lib/logstash/outputs/stomp.rb +67 -0
- data/lib/logstash/outputs/syslog.rb +145 -0
- data/lib/logstash/outputs/tcp.rb +145 -0
- data/lib/logstash/outputs/udp.rb +38 -0
- data/lib/logstash/outputs/websocket.rb +46 -0
- data/lib/logstash/outputs/websocket/app.rb +29 -0
- data/lib/logstash/outputs/websocket/pubsub.rb +45 -0
- data/lib/logstash/outputs/xmpp.rb +78 -0
- data/lib/logstash/outputs/zabbix.rb +108 -0
- data/lib/logstash/outputs/zeromq.rb +125 -0
- data/lib/logstash/pipeline.rb +286 -0
- data/lib/logstash/plugin.rb +150 -0
- data/lib/logstash/plugin_mixins/aws_config.rb +93 -0
- data/lib/logstash/program.rb +15 -0
- data/lib/logstash/runner.rb +238 -0
- data/lib/logstash/sized_queue.rb +8 -0
- data/lib/logstash/test.rb +183 -0
- data/lib/logstash/threadwatchdog.rb +37 -0
- data/lib/logstash/time_addon.rb +33 -0
- data/lib/logstash/util.rb +106 -0
- data/lib/logstash/util/buftok.rb +139 -0
- data/lib/logstash/util/charset.rb +39 -0
- data/lib/logstash/util/fieldreference.rb +50 -0
- data/lib/logstash/util/password.rb +25 -0
- data/lib/logstash/util/prctl.rb +11 -0
- data/lib/logstash/util/relp.rb +326 -0
- data/lib/logstash/util/require-helper.rb +18 -0
- data/lib/logstash/util/socket_peer.rb +7 -0
- data/lib/logstash/util/zenoss.rb +566 -0
- data/lib/logstash/util/zeromq.rb +47 -0
- data/lib/logstash/version.rb +6 -0
- data/locales/en.yml +170 -0
- data/logstash-event.gemspec +29 -0
- data/logstash.gemspec +128 -0
- data/patterns/firewalls +60 -0
- data/patterns/grok-patterns +91 -0
- data/patterns/haproxy +37 -0
- data/patterns/java +3 -0
- data/patterns/linux-syslog +14 -0
- data/patterns/mcollective +1 -0
- data/patterns/mcollective-patterns +4 -0
- data/patterns/nagios +108 -0
- data/patterns/postgresql +3 -0
- data/patterns/redis +3 -0
- data/patterns/ruby +2 -0
- data/pkg/build.sh +135 -0
- data/pkg/centos/after-install.sh +1 -0
- data/pkg/centos/before-install.sh +10 -0
- data/pkg/centos/before-remove.sh +11 -0
- data/pkg/centos/sysconfig +15 -0
- data/pkg/debian/after-install.sh +5 -0
- data/pkg/debian/before-install.sh +13 -0
- data/pkg/debian/before-remove.sh +13 -0
- data/pkg/debian/build.sh +34 -0
- data/pkg/debian/debian/README +6 -0
- data/pkg/debian/debian/changelog +17 -0
- data/pkg/debian/debian/compat +1 -0
- data/pkg/debian/debian/control +16 -0
- data/pkg/debian/debian/copyright +27 -0
- data/pkg/debian/debian/dirs +19 -0
- data/pkg/debian/debian/docs +0 -0
- data/pkg/debian/debian/logstash.default +39 -0
- data/pkg/debian/debian/logstash.init +201 -0
- data/pkg/debian/debian/logstash.install +1 -0
- data/pkg/debian/debian/logstash.logrotate +9 -0
- data/pkg/debian/debian/logstash.postinst +68 -0
- data/pkg/debian/debian/logstash.postrm +23 -0
- data/pkg/debian/debian/manpage.1.ex +59 -0
- data/pkg/debian/debian/preinst.ex +37 -0
- data/pkg/debian/debian/prerm.ex +40 -0
- data/pkg/debian/debian/release.conf +5 -0
- data/pkg/debian/debian/rules +80 -0
- data/pkg/debian/debian/watch.ex +22 -0
- data/pkg/logrotate.conf +8 -0
- data/pkg/logstash-web.default +41 -0
- data/pkg/logstash-web.sysv.debian +201 -0
- data/pkg/logstash-web.upstart.ubuntu +18 -0
- data/pkg/logstash.default +45 -0
- data/pkg/logstash.sysv.debian +202 -0
- data/pkg/logstash.sysv.redhat +158 -0
- data/pkg/logstash.upstart.ubuntu +20 -0
- data/pkg/rpm/SOURCES/logstash.conf +26 -0
- data/pkg/rpm/SOURCES/logstash.init +80 -0
- data/pkg/rpm/SOURCES/logstash.logrotate +8 -0
- data/pkg/rpm/SOURCES/logstash.sysconfig +3 -0
- data/pkg/rpm/SOURCES/logstash.wrapper +105 -0
- data/pkg/rpm/SPECS/logstash.spec +180 -0
- data/pkg/rpm/readme.md +4 -0
- data/pkg/ubuntu/after-install.sh +7 -0
- data/pkg/ubuntu/before-install.sh +12 -0
- data/pkg/ubuntu/before-remove.sh +13 -0
- data/pull_release_note.rb +25 -0
- data/require-analyze.rb +22 -0
- data/spec/README.md +14 -0
- data/spec/codecs/edn.rb +40 -0
- data/spec/codecs/edn_lines.rb +53 -0
- data/spec/codecs/graphite.rb +96 -0
- data/spec/codecs/json.rb +57 -0
- data/spec/codecs/json_lines.rb +51 -0
- data/spec/codecs/json_spooler.rb +43 -0
- data/spec/codecs/msgpack.rb +39 -0
- data/spec/codecs/multiline.rb +60 -0
- data/spec/codecs/oldlogstashjson.rb +55 -0
- data/spec/codecs/plain.rb +35 -0
- data/spec/codecs/spool.rb +35 -0
- data/spec/conditionals/test.rb +323 -0
- data/spec/config.rb +31 -0
- data/spec/event.rb +165 -0
- data/spec/examples/fail2ban.rb +28 -0
- data/spec/examples/graphite-input.rb +41 -0
- data/spec/examples/mysql-slow-query.rb +70 -0
- data/spec/examples/parse-apache-logs.rb +66 -0
- data/spec/examples/parse-haproxy-logs.rb +115 -0
- data/spec/examples/syslog.rb +48 -0
- data/spec/filters/alter.rb +96 -0
- data/spec/filters/anonymize.rb +189 -0
- data/spec/filters/checksum.rb +41 -0
- data/spec/filters/clone.rb +67 -0
- data/spec/filters/collate.rb +122 -0
- data/spec/filters/csv.rb +174 -0
- data/spec/filters/date.rb +285 -0
- data/spec/filters/date_performance.rb +31 -0
- data/spec/filters/dns.rb +159 -0
- data/spec/filters/drop.rb +19 -0
- data/spec/filters/elapsed.rb +294 -0
- data/spec/filters/environment.rb +43 -0
- data/spec/filters/geoip.rb +62 -0
- data/spec/filters/grep.rb +342 -0
- data/spec/filters/grok.rb +473 -0
- data/spec/filters/grok/timeout2.rb +56 -0
- data/spec/filters/grok/timeouts.rb +39 -0
- data/spec/filters/i18n.rb +25 -0
- data/spec/filters/json.rb +72 -0
- data/spec/filters/json_encode.rb +37 -0
- data/spec/filters/kv.rb +403 -0
- data/spec/filters/metrics.rb +212 -0
- data/spec/filters/multiline.rb +119 -0
- data/spec/filters/mutate.rb +180 -0
- data/spec/filters/noop.rb +221 -0
- data/spec/filters/prune.rb +441 -0
- data/spec/filters/punct.rb +18 -0
- data/spec/filters/railsparallelrequest.rb +112 -0
- data/spec/filters/range.rb +169 -0
- data/spec/filters/split.rb +58 -0
- data/spec/filters/translate.rb +70 -0
- data/spec/filters/unique.rb +25 -0
- data/spec/filters/useragent.rb +42 -0
- data/spec/filters/xml.rb +157 -0
- data/spec/inputs/file.rb +107 -0
- data/spec/inputs/gelf.rb +52 -0
- data/spec/inputs/generator.rb +30 -0
- data/spec/inputs/imap.rb +60 -0
- data/spec/inputs/redis.rb +63 -0
- data/spec/inputs/relp.rb +70 -0
- data/spec/inputs/tcp.rb +101 -0
- data/spec/jar.rb +21 -0
- data/spec/outputs/csv.rb +266 -0
- data/spec/outputs/elasticsearch.rb +161 -0
- data/spec/outputs/elasticsearch_http.rb +240 -0
- data/spec/outputs/email.rb +173 -0
- data/spec/outputs/file.rb +82 -0
- data/spec/outputs/graphite.rb +236 -0
- data/spec/outputs/redis.rb +127 -0
- data/spec/speed.rb +20 -0
- data/spec/sqlite-test.rb +81 -0
- data/spec/support/LOGSTASH-733.rb +21 -0
- data/spec/support/LOGSTASH-820.rb +25 -0
- data/spec/support/akamai-grok.rb +26 -0
- data/spec/support/date-http.rb +17 -0
- data/spec/support/postwait1.rb +26 -0
- data/spec/support/pull375.rb +21 -0
- data/spec/test_utils.rb +125 -0
- data/spec/util/fieldeval_spec.rb +44 -0
- data/test/jenkins/config.xml.erb +74 -0
- data/test/jenkins/create-jobs.rb +23 -0
- data/test/jenkins/generatorjob.config.xml +66 -0
- data/tools/Gemfile +14 -0
- data/tools/Gemfile.jruby-1.9.lock +322 -0
- data/tools/Gemfile.rbx-2.1.lock +516 -0
- data/tools/Gemfile.ruby-1.9.1.lock +310 -0
- data/tools/Gemfile.ruby-2.0.0.lock +310 -0
- metadata +629 -0
@@ -0,0 +1,44 @@
|
|
1
|
+
{
|
2
|
+
"template" : "logstash-*",
|
3
|
+
"settings" : {
|
4
|
+
"index.refresh_interval" : "5s",
|
5
|
+
"analysis" : {
|
6
|
+
"analyzer" : {
|
7
|
+
"default" : {
|
8
|
+
"type" : "standard",
|
9
|
+
"stopwords" : "_none_"
|
10
|
+
}
|
11
|
+
}
|
12
|
+
}
|
13
|
+
},
|
14
|
+
"mappings" : {
|
15
|
+
"_default_" : {
|
16
|
+
"_all" : {"enabled" : true},
|
17
|
+
"dynamic_templates" : [ {
|
18
|
+
"string_fields" : {
|
19
|
+
"match" : "*",
|
20
|
+
"match_mapping_type" : "string",
|
21
|
+
"mapping" : {
|
22
|
+
"type" : "multi_field",
|
23
|
+
"fields" : {
|
24
|
+
"{name}" : {"type": "string", "index" : "analyzed", "omit_norms" : true },
|
25
|
+
"raw" : {"type": "string", "index" : "not_analyzed", "ignore_above" : 256}
|
26
|
+
}
|
27
|
+
}
|
28
|
+
}
|
29
|
+
} ],
|
30
|
+
"properties" : {
|
31
|
+
"@version": { "type": "string", "index": "not_analyzed" },
|
32
|
+
"geoip" : {
|
33
|
+
"type" : "object",
|
34
|
+
"dynamic": true,
|
35
|
+
"path": "full",
|
36
|
+
"properties" : {
|
37
|
+
"location" : { "type" : "geo_point" }
|
38
|
+
}
|
39
|
+
}
|
40
|
+
}
|
41
|
+
}
|
42
|
+
}
|
43
|
+
}
|
44
|
+
|
@@ -0,0 +1,256 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/outputs/base"
|
4
|
+
require "stud/buffer"
|
5
|
+
|
6
|
+
# This output lets you store logs in elasticsearch.
|
7
|
+
#
|
8
|
+
# This plugin uses the HTTP/REST interface to ElasticSearch, which usually
|
9
|
+
# lets you use any version of elasticsearch server. It is known to work
|
10
|
+
# with elasticsearch %ELASTICSEARCH_VERSION%
|
11
|
+
#
|
12
|
+
# You can learn more about elasticsearch at <http://elasticsearch.org>
|
13
|
+
class LogStash::Outputs::ElasticSearchHTTP < LogStash::Outputs::Base
|
14
|
+
include Stud::Buffer
|
15
|
+
|
16
|
+
config_name "elasticsearch_http"
|
17
|
+
milestone 2
|
18
|
+
|
19
|
+
# The index to write events to. This can be dynamic using the %{foo} syntax.
|
20
|
+
# The default value will partition your indices by day so you can more easily
|
21
|
+
# delete old data or only search specific date ranges.
|
22
|
+
config :index, :validate => :string, :default => "logstash-%{+YYYY.MM.dd}"
|
23
|
+
|
24
|
+
# The index type to write events to. Generally you should try to write only
|
25
|
+
# similar events to the same 'type'. String expansion '%{foo}' works here.
|
26
|
+
config :index_type, :validate => :string
|
27
|
+
|
28
|
+
# Starting in Logstash 1.3 (unless you set option "manage_template" to false)
|
29
|
+
# a default mapping template for Elasticsearch will be applied, if you do not
|
30
|
+
# already have one set to match the index pattern defined (default of
|
31
|
+
# "logstash-%{+YYYY.MM.dd}"), minus any variables. For example, in this case
|
32
|
+
# the template will be applied to all indices starting with logstash-*
|
33
|
+
#
|
34
|
+
# If you have dynamic templating (e.g. creating indices based on field names)
|
35
|
+
# then you should set "manage_template" to false and use the REST API to upload
|
36
|
+
# your templates manually.
|
37
|
+
config :manage_template, :validate => :boolean, :default => true
|
38
|
+
|
39
|
+
# This configuration option defines how the template is named inside Elasticsearch
|
40
|
+
# Note that if you have used the template management features and subsequently
|
41
|
+
# change this you will need to prune the old template manually, e.g.
|
42
|
+
# curl -XDELETE <http://localhost:9200/_template/OLD_template_name?pretty>
|
43
|
+
# where OLD_template_name is whatever the former setting was.
|
44
|
+
config :template_name, :validate => :string, :default => "logstash"
|
45
|
+
|
46
|
+
# You can set the path to your own template here, if you so desire.
|
47
|
+
# If not the included template will be used.
|
48
|
+
config :template, :validate => :path
|
49
|
+
|
50
|
+
# Overwrite the current template with whatever is configured
|
51
|
+
# in the template and template_name directives.
|
52
|
+
config :template_overwrite, :validate => :boolean, :default => false
|
53
|
+
|
54
|
+
# The hostname or ip address to reach your elasticsearch server.
|
55
|
+
config :host, :validate => :string, :required => true
|
56
|
+
|
57
|
+
# The port for ElasticSearch HTTP interface to use.
|
58
|
+
config :port, :validate => :number, :default => 9200
|
59
|
+
|
60
|
+
# The HTTP Basic Auth username used to access your elasticsearch server.
|
61
|
+
config :user, :validate => :string, :default => nil
|
62
|
+
|
63
|
+
# The HTTP Basic Auth password used to access your elasticsearch server.
|
64
|
+
config :password, :validate => :password, :default => nil
|
65
|
+
|
66
|
+
# This plugin uses the bulk index api for improved indexing performance.
|
67
|
+
# To make efficient bulk api calls, we will buffer a certain number of
|
68
|
+
# events before flushing that out to elasticsearch. This setting
|
69
|
+
# controls how many events will be buffered before sending a batch
|
70
|
+
# of events.
|
71
|
+
config :flush_size, :validate => :number, :default => 100
|
72
|
+
|
73
|
+
# The amount of time since last flush before a flush is forced.
|
74
|
+
#
|
75
|
+
# This setting helps ensure slow event rates don't get stuck in logstash.
|
76
|
+
# For example, if your `flush_size` is 100, and you have received 10 events,
|
77
|
+
# and it has been more than `idle_flush_time` seconds since the last flush,
|
78
|
+
# logstash will flush those 10 events automatically.
|
79
|
+
#
|
80
|
+
# This helps keep both fast and slow log streams moving along in
|
81
|
+
# near-real-time.
|
82
|
+
config :idle_flush_time, :validate => :number, :default => 1
|
83
|
+
|
84
|
+
# The document ID for the index. Useful for overwriting existing entries in
|
85
|
+
# elasticsearch with the same ID.
|
86
|
+
config :document_id, :validate => :string, :default => nil
|
87
|
+
|
88
|
+
# Set the type of elasticsearch replication to use. If async
|
89
|
+
# the index request to elasticsearch to return after the primary
|
90
|
+
# shards have been written. If sync (default), index requests
|
91
|
+
# will wait until the primary and the replica shards have been
|
92
|
+
# written.
|
93
|
+
config :replication, :validate => ['async', 'sync'], :default => 'sync'
|
94
|
+
|
95
|
+
public
|
96
|
+
def register
|
97
|
+
require "ftw" # gem ftw
|
98
|
+
@agent = FTW::Agent.new
|
99
|
+
@queue = []
|
100
|
+
|
101
|
+
auth = @user && @password ? "#{@user}:#{@password.value}@" : ""
|
102
|
+
@bulk_url = "http://#{auth}#{@host}:#{@port}/_bulk?replication=#{@replication}"
|
103
|
+
if @manage_template
|
104
|
+
@logger.info("Automatic template management enabled", :manage_template => @manage_template.to_s)
|
105
|
+
template_search_url = "http://#{auth}#{@host}:#{@port}/_template/*"
|
106
|
+
@template_url = "http://#{auth}#{@host}:#{@port}/_template/#{@template_name}"
|
107
|
+
if @template_overwrite
|
108
|
+
@logger.info("Template overwrite enabled. Deleting existing template.", :template_overwrite => @template_overwrite.to_s)
|
109
|
+
response = @agent.get!(@template_url)
|
110
|
+
template_action('delete') if response.status == 200 #=> Purge the old template if it exists
|
111
|
+
end
|
112
|
+
@logger.debug("Template Search URL:", :template_search_url => template_search_url)
|
113
|
+
has_template = false
|
114
|
+
template_idx_name = @index.sub(/%{[^}]+}/,'*')
|
115
|
+
alt_template_idx_name = @index.sub(/-%{[^}]+}/,'*')
|
116
|
+
# Get the template data
|
117
|
+
response = @agent.get!(template_search_url)
|
118
|
+
json = ""
|
119
|
+
if response.status == 404 #=> This condition can occcur when no template has ever been appended
|
120
|
+
@logger.info("No template found in Elasticsearch...")
|
121
|
+
get_template_json
|
122
|
+
template_action('put')
|
123
|
+
elsif response.status == 200
|
124
|
+
begin
|
125
|
+
response.read_body { |c| json << c }
|
126
|
+
results = JSON.parse(json)
|
127
|
+
rescue Exception => e
|
128
|
+
@logger.error("Error parsing JSON", :json => json, :results => results.to_s, :error => e.to_s)
|
129
|
+
raise "Exception in parsing JSON", e
|
130
|
+
end
|
131
|
+
if !results.any? { |k,v| v["template"] == template_idx_name || v["template"] == alt_template_idx_name }
|
132
|
+
@logger.debug("No template found in Elasticsearch", :has_template => has_template, :name => template_idx_name, :alt => alt_template_idx_name)
|
133
|
+
get_template_json
|
134
|
+
template_action('put')
|
135
|
+
end
|
136
|
+
else #=> Some other status code?
|
137
|
+
@logger.error("Could not check for existing template. Check status code.", :status => response.status.to_s)
|
138
|
+
end # end if response.status == 200
|
139
|
+
end # end if @manage_template
|
140
|
+
buffer_initialize(
|
141
|
+
:max_items => @flush_size,
|
142
|
+
:max_interval => @idle_flush_time,
|
143
|
+
:logger => @logger
|
144
|
+
)
|
145
|
+
end # def register
|
146
|
+
|
147
|
+
public
|
148
|
+
def template_action(command)
|
149
|
+
begin
|
150
|
+
if command == 'delete'
|
151
|
+
response = @agent.delete!(@template_url)
|
152
|
+
response.discard_body
|
153
|
+
elsif command == 'put'
|
154
|
+
response = @agent.put!(@template_url, :body => @template_json)
|
155
|
+
response.discard_body
|
156
|
+
end
|
157
|
+
rescue EOFError
|
158
|
+
@logger.warn("EOF while attempting request or reading response header from elasticsearch",
|
159
|
+
:host => @host, :port => @port)
|
160
|
+
return # abort this action
|
161
|
+
end
|
162
|
+
if response.status != 200
|
163
|
+
@logger.error("Error acting on elasticsearch mapping template",
|
164
|
+
:response => response, :action => command,
|
165
|
+
:request_url => @template_url)
|
166
|
+
return
|
167
|
+
end
|
168
|
+
@logger.info("Successfully deleted template", :template_url => @template_url) if command == 'delete'
|
169
|
+
@logger.info("Successfully applied template", :template_url => @template_url) if command == 'put'
|
170
|
+
end # def template_action
|
171
|
+
|
172
|
+
|
173
|
+
public
|
174
|
+
def get_template_json
|
175
|
+
if @template.nil?
|
176
|
+
if __FILE__ =~ /^(jar:)?file:\/.+!.+/
|
177
|
+
begin
|
178
|
+
# Running from a jar, assume types.db is at the root.
|
179
|
+
jar_path = [__FILE__.split("!").first, "/elasticsearch-template.json"].join("!")
|
180
|
+
@template = jar_path
|
181
|
+
rescue => ex
|
182
|
+
raise "Failed to cache, due to: #{ex}\n#{ex.backtrace}"
|
183
|
+
end
|
184
|
+
else
|
185
|
+
if File.exists?("elasticsearch-template.json")
|
186
|
+
@template = "elasticsearch-template.json"
|
187
|
+
elsif File.exists?("lib/logstash/outputs/elasticsearch/elasticsearch-template.json")
|
188
|
+
@template = "lib/logstash/outputs/elasticsearch/elasticsearch-template.json"
|
189
|
+
else
|
190
|
+
raise "You must specify 'template => ...' in your elasticsearch_http output"
|
191
|
+
end
|
192
|
+
end
|
193
|
+
end
|
194
|
+
@template_json = IO.read(@template).gsub(/\n/,'')
|
195
|
+
@logger.info("Using mapping template", :template => @template_json)
|
196
|
+
end # def get_template
|
197
|
+
|
198
|
+
public
|
199
|
+
def receive(event)
|
200
|
+
return unless output?(event)
|
201
|
+
buffer_receive([event, index, type])
|
202
|
+
end # def receive
|
203
|
+
|
204
|
+
def flush(events, teardown=false)
|
205
|
+
# Avoid creating a new string for newline every time
|
206
|
+
newline = "\n".freeze
|
207
|
+
|
208
|
+
body = events.collect do |event, index, type|
|
209
|
+
index = event.sprintf(@index)
|
210
|
+
|
211
|
+
# Set the 'type' value for the index.
|
212
|
+
if @index_type.nil?
|
213
|
+
type = event["type"] || "logs"
|
214
|
+
else
|
215
|
+
type = event.sprintf(@index_type)
|
216
|
+
end
|
217
|
+
header = { "index" => { "_index" => index, "_type" => type } }
|
218
|
+
header["index"]["_id"] = event.sprintf(@document_id) if !@document_id.nil?
|
219
|
+
|
220
|
+
[ header.to_json, newline, event.to_json, newline ]
|
221
|
+
end.flatten
|
222
|
+
post(body.join(""))
|
223
|
+
end # def receive_bulk
|
224
|
+
|
225
|
+
def post(body)
|
226
|
+
begin
|
227
|
+
response = @agent.post!(@bulk_url, :body => body)
|
228
|
+
rescue EOFError
|
229
|
+
@logger.warn("EOF while writing request or reading response header from elasticsearch",
|
230
|
+
:host => @host, :port => @port)
|
231
|
+
return # abort this flush
|
232
|
+
end
|
233
|
+
|
234
|
+
# Consume the body for error checking
|
235
|
+
# This will also free up the connection for reuse.
|
236
|
+
body = ""
|
237
|
+
begin
|
238
|
+
response.read_body { |chunk| body += chunk }
|
239
|
+
rescue EOFError
|
240
|
+
@logger.warn("EOF while reading response body from elasticsearch",
|
241
|
+
:host => @host, :port => @port)
|
242
|
+
return # abort this flush
|
243
|
+
end
|
244
|
+
|
245
|
+
if response.status != 200
|
246
|
+
@logger.error("Error writing (bulk) to elasticsearch",
|
247
|
+
:response => response, :response_body => body,
|
248
|
+
:request_body => @queue.join("\n"))
|
249
|
+
return
|
250
|
+
end
|
251
|
+
end # def post
|
252
|
+
|
253
|
+
def teardown
|
254
|
+
buffer_flush(:final => true)
|
255
|
+
end # def teardown
|
256
|
+
end # class LogStash::Outputs::ElasticSearchHTTP
|
@@ -0,0 +1,214 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/outputs/base"
|
4
|
+
require "json"
|
5
|
+
require "uri"
|
6
|
+
require "net/http"
|
7
|
+
|
8
|
+
# This output lets you store logs in elasticsearch. It's similar to the
|
9
|
+
# 'elasticsearch' output but improves performance by using a queue server,
|
10
|
+
# rabbitmq, to send data to elasticsearch.
|
11
|
+
#
|
12
|
+
# Upon startup, this output will automatically contact an elasticsearch cluster
|
13
|
+
# and configure it to read from the queue to which we write.
|
14
|
+
#
|
15
|
+
# You can learn more about elasticseasrch at <http://elasticsearch.org>
|
16
|
+
# More about the elasticsearch rabbitmq river plugin: <https://github.com/elasticsearch/elasticsearch-river-rabbitmq/blob/master/README.md>
|
17
|
+
|
18
|
+
class LogStash::Outputs::ElasticSearchRiver < LogStash::Outputs::Base
|
19
|
+
|
20
|
+
config_name "elasticsearch_river"
|
21
|
+
milestone 2
|
22
|
+
|
23
|
+
config :debug, :validate => :boolean, :default => false
|
24
|
+
|
25
|
+
# The index to write events to. This can be dynamic using the %{foo} syntax.
|
26
|
+
# The default value will partition your indeces by day so you can more easily
|
27
|
+
# delete old data or only search specific date ranges.
|
28
|
+
config :index, :validate => :string, :default => "logstash-%{+YYYY.MM.dd}"
|
29
|
+
|
30
|
+
# The index type to write events to. Generally you should try to write only
|
31
|
+
# similar events to the same 'type'. String expansion '%{foo}' works here.
|
32
|
+
config :index_type, :validate => :string, :default => "%{type}"
|
33
|
+
|
34
|
+
# The name/address of an ElasticSearch host to use for river creation
|
35
|
+
config :es_host, :validate => :string, :required => true
|
36
|
+
|
37
|
+
# ElasticSearch API port
|
38
|
+
config :es_port, :validate => :number, :default => 9200
|
39
|
+
|
40
|
+
# ElasticSearch river configuration: bulk fetch size
|
41
|
+
config :es_bulk_size, :validate => :number, :default => 1000
|
42
|
+
|
43
|
+
# ElasticSearch river configuration: bulk timeout in milliseconds
|
44
|
+
config :es_bulk_timeout_ms, :validate => :number, :default => 100
|
45
|
+
|
46
|
+
# ElasticSearch river configuration: is ordered?
|
47
|
+
config :es_ordered, :validate => :boolean, :default => false
|
48
|
+
|
49
|
+
# Hostname of RabbitMQ server
|
50
|
+
config :rabbitmq_host, :validate => :string, :required => true
|
51
|
+
|
52
|
+
# Port of RabbitMQ server
|
53
|
+
config :rabbitmq_port, :validate => :number, :default => 5672
|
54
|
+
|
55
|
+
# RabbitMQ user
|
56
|
+
config :user, :validate => :string, :default => "guest"
|
57
|
+
|
58
|
+
# RabbitMQ password
|
59
|
+
config :password, :validate => :string, :default => "guest"
|
60
|
+
|
61
|
+
# RabbitMQ vhost
|
62
|
+
config :vhost, :validate => :string, :default => "/"
|
63
|
+
|
64
|
+
# RabbitMQ queue name
|
65
|
+
config :queue, :validate => :string, :default => "elasticsearch"
|
66
|
+
|
67
|
+
# RabbitMQ exchange name
|
68
|
+
config :exchange, :validate => :string, :default => "elasticsearch"
|
69
|
+
|
70
|
+
# The exchange type (fanout, topic, direct)
|
71
|
+
config :exchange_type, :validate => [ "fanout", "direct", "topic"],
|
72
|
+
:default => "direct"
|
73
|
+
|
74
|
+
# RabbitMQ routing key
|
75
|
+
config :key, :validate => :string, :default => "elasticsearch"
|
76
|
+
|
77
|
+
# RabbitMQ durability setting. Also used for ElasticSearch setting
|
78
|
+
config :durable, :validate => :boolean, :default => true
|
79
|
+
|
80
|
+
# RabbitMQ persistence setting
|
81
|
+
config :persistent, :validate => :boolean, :default => true
|
82
|
+
|
83
|
+
# The document ID for the index. Useful for overwriting existing entries in
|
84
|
+
# elasticsearch with the same ID.
|
85
|
+
config :document_id, :validate => :string, :default => nil
|
86
|
+
|
87
|
+
public
|
88
|
+
def register
|
89
|
+
|
90
|
+
# TODO(sissel): find a better way of declaring where the elasticsearch
|
91
|
+
# libraries are
|
92
|
+
# TODO(sissel): can skip this step if we're running from a jar.
|
93
|
+
jarpath = File.join(File.dirname(__FILE__), "../../../vendor/**/*.jar")
|
94
|
+
Dir[jarpath].each do |jar|
|
95
|
+
require jar
|
96
|
+
end
|
97
|
+
prepare_river
|
98
|
+
end
|
99
|
+
|
100
|
+
protected
|
101
|
+
def prepare_river
|
102
|
+
require "logstash/outputs/rabbitmq"
|
103
|
+
|
104
|
+
# Configure the message plugin
|
105
|
+
params = {
|
106
|
+
"host" => [@rabbitmq_host],
|
107
|
+
"port" => [@rabbitmq_port],
|
108
|
+
"user" => [@user],
|
109
|
+
"password" => [@password],
|
110
|
+
"exchange_type" => [@exchange_type],
|
111
|
+
"exchange" => [@exchange],
|
112
|
+
"key" => [@key],
|
113
|
+
"vhost" => [@vhost],
|
114
|
+
"durable" => [@durable.to_s],
|
115
|
+
"persistent" => [@persistent.to_s],
|
116
|
+
"debug" => [@debug.to_s],
|
117
|
+
}.reject {|k,v| v.first.nil?}
|
118
|
+
@mq = LogStash::Outputs::RabbitMQ.new(params)
|
119
|
+
@mq.register
|
120
|
+
|
121
|
+
# Set up the river
|
122
|
+
begin
|
123
|
+
auth = "#{@user}:#{@password}"
|
124
|
+
|
125
|
+
# Name the river by our hostname
|
126
|
+
require "socket"
|
127
|
+
hostname = Socket.gethostname
|
128
|
+
|
129
|
+
# Replace spaces with hyphens and remove all non-alpha non-dash non-underscore characters
|
130
|
+
river_name = "#{hostname} #{@queue}".gsub(' ', '-').gsub(/[^\w-]/, '')
|
131
|
+
|
132
|
+
api_path = "/_river/logstash-#{river_name}/_meta"
|
133
|
+
@status_path = "/_river/logstash-#{river_name}/_status"
|
134
|
+
|
135
|
+
river_config = {"type" => "rabbitmq",
|
136
|
+
"rabbitmq" => {
|
137
|
+
"host" => @rabbitmq_host=="localhost" ? hostname : @rabbitmq_host,
|
138
|
+
"port" => @rabbitmq_port,
|
139
|
+
"user" => @user,
|
140
|
+
"pass" => @password,
|
141
|
+
"vhost" => @vhost,
|
142
|
+
"queue" => @queue,
|
143
|
+
"exchange" => @exchange,
|
144
|
+
"routing_key" => @key,
|
145
|
+
"exchange_type" => @exchange_type,
|
146
|
+
"exchange_durable" => @durable.to_s,
|
147
|
+
"queue_durable" => @durable.to_s
|
148
|
+
},
|
149
|
+
"index" => {"bulk_size" => @es_bulk_size,
|
150
|
+
"bulk_timeout" => "#{@es_bulk_timeout_ms}ms",
|
151
|
+
"ordered" => @es_ordered
|
152
|
+
},
|
153
|
+
}
|
154
|
+
@logger.info("ElasticSearch using river", :config => river_config)
|
155
|
+
Net::HTTP.start(@es_host, @es_port) do |http|
|
156
|
+
req = Net::HTTP::Put.new(api_path)
|
157
|
+
req.body = river_config.to_json
|
158
|
+
response = http.request(req)
|
159
|
+
response.value() # raise an exception if error
|
160
|
+
@logger.info("River created: #{response.body}")
|
161
|
+
end
|
162
|
+
rescue Exception => e
|
163
|
+
# TODO(petef): should we just throw an exception here, so the
|
164
|
+
# agent tries to restart us and we in turn retry the river
|
165
|
+
# registration?
|
166
|
+
@logger.warn("Couldn't set up river. You'll have to set it up manually (or restart)", :exception => e)
|
167
|
+
end
|
168
|
+
|
169
|
+
check_river_status
|
170
|
+
end # def prepare_river
|
171
|
+
|
172
|
+
private
|
173
|
+
def check_river_status
|
174
|
+
tries = 0
|
175
|
+
success = false
|
176
|
+
reason = nil
|
177
|
+
begin
|
178
|
+
while !success && tries <= 3 do
|
179
|
+
tries += 1
|
180
|
+
Net::HTTP.start(@es_host, @es_port) do |http|
|
181
|
+
req = Net::HTTP::Get.new(@status_path)
|
182
|
+
response = http.request(req)
|
183
|
+
response.value
|
184
|
+
status = JSON.parse(response.body)
|
185
|
+
@logger.debug("Checking ES river status", :status => status)
|
186
|
+
if status["_source"]["error"]
|
187
|
+
reason = "ES river status: #{status["_source"]["error"]}"
|
188
|
+
else
|
189
|
+
success = true
|
190
|
+
end
|
191
|
+
end
|
192
|
+
sleep(2)
|
193
|
+
end
|
194
|
+
rescue Exception => e
|
195
|
+
raise "river is not running, checking status failed: #{$!}"
|
196
|
+
end
|
197
|
+
|
198
|
+
raise "river is not running: #{reason}" unless success
|
199
|
+
end # def check_river_status
|
200
|
+
|
201
|
+
public
|
202
|
+
def receive(event)
|
203
|
+
return unless output?(event)
|
204
|
+
# River events have a format of
|
205
|
+
# "action\ndata\n"
|
206
|
+
# where 'action' is index or delete, data is the data to index.
|
207
|
+
header = { "index" => { "_index" => event.sprintf(@index), "_type" => event.sprintf(@index_type) } }
|
208
|
+
if !@document_id.nil?
|
209
|
+
header["index"]["_id"] = event.sprintf(@document_id)
|
210
|
+
end
|
211
|
+
|
212
|
+
@mq.publish_serialized(header.to_json + "\n" + event.to_json + "\n")
|
213
|
+
end # def receive
|
214
|
+
end # LogStash::Outputs::ElasticSearchRiver
|