logstash-lib 1.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +24 -0
- data/.tailor +8 -0
- data/.travis.yml +12 -0
- data/CHANGELOG +1185 -0
- data/CONTRIBUTING.md +61 -0
- data/CONTRIBUTORS +79 -0
- data/LICENSE +14 -0
- data/Makefile +460 -0
- data/README.md +120 -0
- data/STYLE.md +96 -0
- data/bin/logstash +37 -0
- data/bin/logstash-test +4 -0
- data/bin/logstash-web +4 -0
- data/bin/logstash.lib.sh +78 -0
- data/bot/check_pull_changelog.rb +89 -0
- data/docs/configuration.md +260 -0
- data/docs/docgen.rb +242 -0
- data/docs/extending/example-add-a-new-filter.md +121 -0
- data/docs/extending/index.md +91 -0
- data/docs/flags.md +43 -0
- data/docs/generate_index.rb +28 -0
- data/docs/index.html.erb +56 -0
- data/docs/learn.md +46 -0
- data/docs/life-of-an-event.md +109 -0
- data/docs/logging-tool-comparisons.md +60 -0
- data/docs/plugin-doc.html.erb +91 -0
- data/docs/plugin-milestones.md +41 -0
- data/docs/plugin-synopsis.html.erb +24 -0
- data/docs/release-engineering.md +46 -0
- data/docs/release-test-results.md +14 -0
- data/docs/repositories.md +35 -0
- data/docs/tutorials/10-minute-walkthrough/apache-elasticsearch.conf +35 -0
- data/docs/tutorials/10-minute-walkthrough/apache-parse.conf +33 -0
- data/docs/tutorials/10-minute-walkthrough/apache_log.1 +1 -0
- data/docs/tutorials/10-minute-walkthrough/apache_log.2.bz2 +0 -0
- data/docs/tutorials/10-minute-walkthrough/hello-search.conf +25 -0
- data/docs/tutorials/10-minute-walkthrough/hello.conf +16 -0
- data/docs/tutorials/10-minute-walkthrough/index.md +124 -0
- data/docs/tutorials/10-minute-walkthrough/step-5-output.txt +17 -0
- data/docs/tutorials/getting-started-centralized-overview-diagram.png +0 -0
- data/docs/tutorials/getting-started-centralized-overview-diagram.xml +1 -0
- data/docs/tutorials/getting-started-centralized.md +217 -0
- data/docs/tutorials/getting-started-simple.md +200 -0
- data/docs/tutorials/just-enough-rabbitmq-for-logstash.md +201 -0
- data/docs/tutorials/media/frontend-response-codes.png +0 -0
- data/docs/tutorials/metrics-from-logs.md +84 -0
- data/docs/tutorials/zeromq.md +118 -0
- data/extract_services.rb +29 -0
- data/gembag.rb +64 -0
- data/lib/logstash-event.rb +2 -0
- data/lib/logstash.rb +4 -0
- data/lib/logstash/JRUBY-6970-openssl.rb +22 -0
- data/lib/logstash/JRUBY-6970.rb +102 -0
- data/lib/logstash/agent.rb +305 -0
- data/lib/logstash/certs/cacert.pem +3895 -0
- data/lib/logstash/codecs/base.rb +49 -0
- data/lib/logstash/codecs/compress_spooler.rb +50 -0
- data/lib/logstash/codecs/dots.rb +18 -0
- data/lib/logstash/codecs/edn.rb +28 -0
- data/lib/logstash/codecs/edn_lines.rb +36 -0
- data/lib/logstash/codecs/fluent.rb +55 -0
- data/lib/logstash/codecs/graphite.rb +114 -0
- data/lib/logstash/codecs/json.rb +41 -0
- data/lib/logstash/codecs/json_lines.rb +52 -0
- data/lib/logstash/codecs/json_spooler.rb +22 -0
- data/lib/logstash/codecs/line.rb +58 -0
- data/lib/logstash/codecs/msgpack.rb +43 -0
- data/lib/logstash/codecs/multiline.rb +189 -0
- data/lib/logstash/codecs/netflow.rb +342 -0
- data/lib/logstash/codecs/netflow/util.rb +212 -0
- data/lib/logstash/codecs/noop.rb +19 -0
- data/lib/logstash/codecs/oldlogstashjson.rb +56 -0
- data/lib/logstash/codecs/plain.rb +48 -0
- data/lib/logstash/codecs/rubydebug.rb +22 -0
- data/lib/logstash/codecs/spool.rb +38 -0
- data/lib/logstash/config/Makefile +4 -0
- data/lib/logstash/config/config_ast.rb +380 -0
- data/lib/logstash/config/file.rb +39 -0
- data/lib/logstash/config/grammar.rb +3504 -0
- data/lib/logstash/config/grammar.treetop +241 -0
- data/lib/logstash/config/mixin.rb +464 -0
- data/lib/logstash/config/registry.rb +13 -0
- data/lib/logstash/config/test.conf +18 -0
- data/lib/logstash/errors.rb +10 -0
- data/lib/logstash/event.rb +262 -0
- data/lib/logstash/filters/advisor.rb +178 -0
- data/lib/logstash/filters/alter.rb +173 -0
- data/lib/logstash/filters/anonymize.rb +93 -0
- data/lib/logstash/filters/base.rb +190 -0
- data/lib/logstash/filters/checksum.rb +50 -0
- data/lib/logstash/filters/cidr.rb +76 -0
- data/lib/logstash/filters/cipher.rb +145 -0
- data/lib/logstash/filters/clone.rb +35 -0
- data/lib/logstash/filters/collate.rb +114 -0
- data/lib/logstash/filters/csv.rb +94 -0
- data/lib/logstash/filters/date.rb +244 -0
- data/lib/logstash/filters/dns.rb +201 -0
- data/lib/logstash/filters/drop.rb +32 -0
- data/lib/logstash/filters/elapsed.rb +256 -0
- data/lib/logstash/filters/elasticsearch.rb +73 -0
- data/lib/logstash/filters/environment.rb +27 -0
- data/lib/logstash/filters/extractnumbers.rb +84 -0
- data/lib/logstash/filters/gelfify.rb +52 -0
- data/lib/logstash/filters/geoip.rb +145 -0
- data/lib/logstash/filters/grep.rb +153 -0
- data/lib/logstash/filters/grok.rb +425 -0
- data/lib/logstash/filters/grokdiscovery.rb +75 -0
- data/lib/logstash/filters/i18n.rb +51 -0
- data/lib/logstash/filters/json.rb +90 -0
- data/lib/logstash/filters/json_encode.rb +52 -0
- data/lib/logstash/filters/kv.rb +232 -0
- data/lib/logstash/filters/metaevent.rb +68 -0
- data/lib/logstash/filters/metrics.rb +237 -0
- data/lib/logstash/filters/multiline.rb +241 -0
- data/lib/logstash/filters/mutate.rb +399 -0
- data/lib/logstash/filters/noop.rb +21 -0
- data/lib/logstash/filters/prune.rb +149 -0
- data/lib/logstash/filters/punct.rb +32 -0
- data/lib/logstash/filters/railsparallelrequest.rb +86 -0
- data/lib/logstash/filters/range.rb +142 -0
- data/lib/logstash/filters/ruby.rb +42 -0
- data/lib/logstash/filters/sleep.rb +111 -0
- data/lib/logstash/filters/split.rb +64 -0
- data/lib/logstash/filters/sumnumbers.rb +73 -0
- data/lib/logstash/filters/syslog_pri.rb +107 -0
- data/lib/logstash/filters/translate.rb +121 -0
- data/lib/logstash/filters/unique.rb +29 -0
- data/lib/logstash/filters/urldecode.rb +57 -0
- data/lib/logstash/filters/useragent.rb +112 -0
- data/lib/logstash/filters/uuid.rb +58 -0
- data/lib/logstash/filters/xml.rb +139 -0
- data/lib/logstash/filters/zeromq.rb +123 -0
- data/lib/logstash/filterworker.rb +122 -0
- data/lib/logstash/inputs/base.rb +125 -0
- data/lib/logstash/inputs/collectd.rb +306 -0
- data/lib/logstash/inputs/drupal_dblog.rb +323 -0
- data/lib/logstash/inputs/drupal_dblog/jdbcconnection.rb +66 -0
- data/lib/logstash/inputs/elasticsearch.rb +140 -0
- data/lib/logstash/inputs/eventlog.rb +129 -0
- data/lib/logstash/inputs/eventlog/racob_fix.rb +44 -0
- data/lib/logstash/inputs/exec.rb +69 -0
- data/lib/logstash/inputs/file.rb +146 -0
- data/lib/logstash/inputs/ganglia.rb +127 -0
- data/lib/logstash/inputs/ganglia/gmondpacket.rb +146 -0
- data/lib/logstash/inputs/ganglia/xdr.rb +327 -0
- data/lib/logstash/inputs/gelf.rb +138 -0
- data/lib/logstash/inputs/gemfire.rb +222 -0
- data/lib/logstash/inputs/generator.rb +97 -0
- data/lib/logstash/inputs/graphite.rb +41 -0
- data/lib/logstash/inputs/heroku.rb +51 -0
- data/lib/logstash/inputs/imap.rb +136 -0
- data/lib/logstash/inputs/irc.rb +84 -0
- data/lib/logstash/inputs/log4j.rb +136 -0
- data/lib/logstash/inputs/lumberjack.rb +53 -0
- data/lib/logstash/inputs/pipe.rb +57 -0
- data/lib/logstash/inputs/rabbitmq.rb +126 -0
- data/lib/logstash/inputs/rabbitmq/bunny.rb +118 -0
- data/lib/logstash/inputs/rabbitmq/hot_bunnies.rb +1 -0
- data/lib/logstash/inputs/rabbitmq/march_hare.rb +129 -0
- data/lib/logstash/inputs/redis.rb +263 -0
- data/lib/logstash/inputs/relp.rb +106 -0
- data/lib/logstash/inputs/s3.rb +279 -0
- data/lib/logstash/inputs/snmptrap.rb +87 -0
- data/lib/logstash/inputs/sqlite.rb +185 -0
- data/lib/logstash/inputs/sqs.rb +172 -0
- data/lib/logstash/inputs/stdin.rb +46 -0
- data/lib/logstash/inputs/stomp.rb +84 -0
- data/lib/logstash/inputs/syslog.rb +237 -0
- data/lib/logstash/inputs/tcp.rb +231 -0
- data/lib/logstash/inputs/threadable.rb +18 -0
- data/lib/logstash/inputs/twitter.rb +82 -0
- data/lib/logstash/inputs/udp.rb +81 -0
- data/lib/logstash/inputs/unix.rb +163 -0
- data/lib/logstash/inputs/varnishlog.rb +48 -0
- data/lib/logstash/inputs/websocket.rb +50 -0
- data/lib/logstash/inputs/wmi.rb +72 -0
- data/lib/logstash/inputs/xmpp.rb +81 -0
- data/lib/logstash/inputs/zenoss.rb +143 -0
- data/lib/logstash/inputs/zeromq.rb +165 -0
- data/lib/logstash/kibana.rb +113 -0
- data/lib/logstash/loadlibs.rb +9 -0
- data/lib/logstash/logging.rb +89 -0
- data/lib/logstash/monkeypatches-for-bugs.rb +2 -0
- data/lib/logstash/monkeypatches-for-debugging.rb +47 -0
- data/lib/logstash/monkeypatches-for-performance.rb +66 -0
- data/lib/logstash/multiqueue.rb +53 -0
- data/lib/logstash/namespace.rb +16 -0
- data/lib/logstash/outputs/base.rb +120 -0
- data/lib/logstash/outputs/boundary.rb +116 -0
- data/lib/logstash/outputs/circonus.rb +78 -0
- data/lib/logstash/outputs/cloudwatch.rb +351 -0
- data/lib/logstash/outputs/csv.rb +55 -0
- data/lib/logstash/outputs/datadog.rb +93 -0
- data/lib/logstash/outputs/datadog_metrics.rb +123 -0
- data/lib/logstash/outputs/elasticsearch.rb +332 -0
- data/lib/logstash/outputs/elasticsearch/elasticsearch-template.json +44 -0
- data/lib/logstash/outputs/elasticsearch_http.rb +256 -0
- data/lib/logstash/outputs/elasticsearch_river.rb +214 -0
- data/lib/logstash/outputs/email.rb +299 -0
- data/lib/logstash/outputs/exec.rb +40 -0
- data/lib/logstash/outputs/file.rb +180 -0
- data/lib/logstash/outputs/ganglia.rb +75 -0
- data/lib/logstash/outputs/gelf.rb +208 -0
- data/lib/logstash/outputs/gemfire.rb +103 -0
- data/lib/logstash/outputs/google_bigquery.rb +570 -0
- data/lib/logstash/outputs/google_cloud_storage.rb +431 -0
- data/lib/logstash/outputs/graphite.rb +143 -0
- data/lib/logstash/outputs/graphtastic.rb +185 -0
- data/lib/logstash/outputs/hipchat.rb +80 -0
- data/lib/logstash/outputs/http.rb +142 -0
- data/lib/logstash/outputs/irc.rb +80 -0
- data/lib/logstash/outputs/jira.rb +109 -0
- data/lib/logstash/outputs/juggernaut.rb +105 -0
- data/lib/logstash/outputs/librato.rb +146 -0
- data/lib/logstash/outputs/loggly.rb +93 -0
- data/lib/logstash/outputs/lumberjack.rb +51 -0
- data/lib/logstash/outputs/metriccatcher.rb +103 -0
- data/lib/logstash/outputs/mongodb.rb +81 -0
- data/lib/logstash/outputs/nagios.rb +119 -0
- data/lib/logstash/outputs/nagios_nsca.rb +123 -0
- data/lib/logstash/outputs/null.rb +18 -0
- data/lib/logstash/outputs/opentsdb.rb +101 -0
- data/lib/logstash/outputs/pagerduty.rb +79 -0
- data/lib/logstash/outputs/pipe.rb +132 -0
- data/lib/logstash/outputs/rabbitmq.rb +96 -0
- data/lib/logstash/outputs/rabbitmq/bunny.rb +135 -0
- data/lib/logstash/outputs/rabbitmq/hot_bunnies.rb +1 -0
- data/lib/logstash/outputs/rabbitmq/march_hare.rb +143 -0
- data/lib/logstash/outputs/redis.rb +245 -0
- data/lib/logstash/outputs/riak.rb +152 -0
- data/lib/logstash/outputs/riemann.rb +109 -0
- data/lib/logstash/outputs/s3.rb +356 -0
- data/lib/logstash/outputs/sns.rb +124 -0
- data/lib/logstash/outputs/solr_http.rb +78 -0
- data/lib/logstash/outputs/sqs.rb +141 -0
- data/lib/logstash/outputs/statsd.rb +116 -0
- data/lib/logstash/outputs/stdout.rb +53 -0
- data/lib/logstash/outputs/stomp.rb +67 -0
- data/lib/logstash/outputs/syslog.rb +145 -0
- data/lib/logstash/outputs/tcp.rb +145 -0
- data/lib/logstash/outputs/udp.rb +38 -0
- data/lib/logstash/outputs/websocket.rb +46 -0
- data/lib/logstash/outputs/websocket/app.rb +29 -0
- data/lib/logstash/outputs/websocket/pubsub.rb +45 -0
- data/lib/logstash/outputs/xmpp.rb +78 -0
- data/lib/logstash/outputs/zabbix.rb +108 -0
- data/lib/logstash/outputs/zeromq.rb +125 -0
- data/lib/logstash/pipeline.rb +286 -0
- data/lib/logstash/plugin.rb +150 -0
- data/lib/logstash/plugin_mixins/aws_config.rb +93 -0
- data/lib/logstash/program.rb +15 -0
- data/lib/logstash/runner.rb +238 -0
- data/lib/logstash/sized_queue.rb +8 -0
- data/lib/logstash/test.rb +183 -0
- data/lib/logstash/threadwatchdog.rb +37 -0
- data/lib/logstash/time_addon.rb +33 -0
- data/lib/logstash/util.rb +106 -0
- data/lib/logstash/util/buftok.rb +139 -0
- data/lib/logstash/util/charset.rb +39 -0
- data/lib/logstash/util/fieldreference.rb +50 -0
- data/lib/logstash/util/password.rb +25 -0
- data/lib/logstash/util/prctl.rb +11 -0
- data/lib/logstash/util/relp.rb +326 -0
- data/lib/logstash/util/require-helper.rb +18 -0
- data/lib/logstash/util/socket_peer.rb +7 -0
- data/lib/logstash/util/zenoss.rb +566 -0
- data/lib/logstash/util/zeromq.rb +47 -0
- data/lib/logstash/version.rb +6 -0
- data/locales/en.yml +170 -0
- data/logstash-event.gemspec +29 -0
- data/logstash.gemspec +128 -0
- data/patterns/firewalls +60 -0
- data/patterns/grok-patterns +91 -0
- data/patterns/haproxy +37 -0
- data/patterns/java +3 -0
- data/patterns/linux-syslog +14 -0
- data/patterns/mcollective +1 -0
- data/patterns/mcollective-patterns +4 -0
- data/patterns/nagios +108 -0
- data/patterns/postgresql +3 -0
- data/patterns/redis +3 -0
- data/patterns/ruby +2 -0
- data/pkg/build.sh +135 -0
- data/pkg/centos/after-install.sh +1 -0
- data/pkg/centos/before-install.sh +10 -0
- data/pkg/centos/before-remove.sh +11 -0
- data/pkg/centos/sysconfig +15 -0
- data/pkg/debian/after-install.sh +5 -0
- data/pkg/debian/before-install.sh +13 -0
- data/pkg/debian/before-remove.sh +13 -0
- data/pkg/debian/build.sh +34 -0
- data/pkg/debian/debian/README +6 -0
- data/pkg/debian/debian/changelog +17 -0
- data/pkg/debian/debian/compat +1 -0
- data/pkg/debian/debian/control +16 -0
- data/pkg/debian/debian/copyright +27 -0
- data/pkg/debian/debian/dirs +19 -0
- data/pkg/debian/debian/docs +0 -0
- data/pkg/debian/debian/logstash.default +39 -0
- data/pkg/debian/debian/logstash.init +201 -0
- data/pkg/debian/debian/logstash.install +1 -0
- data/pkg/debian/debian/logstash.logrotate +9 -0
- data/pkg/debian/debian/logstash.postinst +68 -0
- data/pkg/debian/debian/logstash.postrm +23 -0
- data/pkg/debian/debian/manpage.1.ex +59 -0
- data/pkg/debian/debian/preinst.ex +37 -0
- data/pkg/debian/debian/prerm.ex +40 -0
- data/pkg/debian/debian/release.conf +5 -0
- data/pkg/debian/debian/rules +80 -0
- data/pkg/debian/debian/watch.ex +22 -0
- data/pkg/logrotate.conf +8 -0
- data/pkg/logstash-web.default +41 -0
- data/pkg/logstash-web.sysv.debian +201 -0
- data/pkg/logstash-web.upstart.ubuntu +18 -0
- data/pkg/logstash.default +45 -0
- data/pkg/logstash.sysv.debian +202 -0
- data/pkg/logstash.sysv.redhat +158 -0
- data/pkg/logstash.upstart.ubuntu +20 -0
- data/pkg/rpm/SOURCES/logstash.conf +26 -0
- data/pkg/rpm/SOURCES/logstash.init +80 -0
- data/pkg/rpm/SOURCES/logstash.logrotate +8 -0
- data/pkg/rpm/SOURCES/logstash.sysconfig +3 -0
- data/pkg/rpm/SOURCES/logstash.wrapper +105 -0
- data/pkg/rpm/SPECS/logstash.spec +180 -0
- data/pkg/rpm/readme.md +4 -0
- data/pkg/ubuntu/after-install.sh +7 -0
- data/pkg/ubuntu/before-install.sh +12 -0
- data/pkg/ubuntu/before-remove.sh +13 -0
- data/pull_release_note.rb +25 -0
- data/require-analyze.rb +22 -0
- data/spec/README.md +14 -0
- data/spec/codecs/edn.rb +40 -0
- data/spec/codecs/edn_lines.rb +53 -0
- data/spec/codecs/graphite.rb +96 -0
- data/spec/codecs/json.rb +57 -0
- data/spec/codecs/json_lines.rb +51 -0
- data/spec/codecs/json_spooler.rb +43 -0
- data/spec/codecs/msgpack.rb +39 -0
- data/spec/codecs/multiline.rb +60 -0
- data/spec/codecs/oldlogstashjson.rb +55 -0
- data/spec/codecs/plain.rb +35 -0
- data/spec/codecs/spool.rb +35 -0
- data/spec/conditionals/test.rb +323 -0
- data/spec/config.rb +31 -0
- data/spec/event.rb +165 -0
- data/spec/examples/fail2ban.rb +28 -0
- data/spec/examples/graphite-input.rb +41 -0
- data/spec/examples/mysql-slow-query.rb +70 -0
- data/spec/examples/parse-apache-logs.rb +66 -0
- data/spec/examples/parse-haproxy-logs.rb +115 -0
- data/spec/examples/syslog.rb +48 -0
- data/spec/filters/alter.rb +96 -0
- data/spec/filters/anonymize.rb +189 -0
- data/spec/filters/checksum.rb +41 -0
- data/spec/filters/clone.rb +67 -0
- data/spec/filters/collate.rb +122 -0
- data/spec/filters/csv.rb +174 -0
- data/spec/filters/date.rb +285 -0
- data/spec/filters/date_performance.rb +31 -0
- data/spec/filters/dns.rb +159 -0
- data/spec/filters/drop.rb +19 -0
- data/spec/filters/elapsed.rb +294 -0
- data/spec/filters/environment.rb +43 -0
- data/spec/filters/geoip.rb +62 -0
- data/spec/filters/grep.rb +342 -0
- data/spec/filters/grok.rb +473 -0
- data/spec/filters/grok/timeout2.rb +56 -0
- data/spec/filters/grok/timeouts.rb +39 -0
- data/spec/filters/i18n.rb +25 -0
- data/spec/filters/json.rb +72 -0
- data/spec/filters/json_encode.rb +37 -0
- data/spec/filters/kv.rb +403 -0
- data/spec/filters/metrics.rb +212 -0
- data/spec/filters/multiline.rb +119 -0
- data/spec/filters/mutate.rb +180 -0
- data/spec/filters/noop.rb +221 -0
- data/spec/filters/prune.rb +441 -0
- data/spec/filters/punct.rb +18 -0
- data/spec/filters/railsparallelrequest.rb +112 -0
- data/spec/filters/range.rb +169 -0
- data/spec/filters/split.rb +58 -0
- data/spec/filters/translate.rb +70 -0
- data/spec/filters/unique.rb +25 -0
- data/spec/filters/useragent.rb +42 -0
- data/spec/filters/xml.rb +157 -0
- data/spec/inputs/file.rb +107 -0
- data/spec/inputs/gelf.rb +52 -0
- data/spec/inputs/generator.rb +30 -0
- data/spec/inputs/imap.rb +60 -0
- data/spec/inputs/redis.rb +63 -0
- data/spec/inputs/relp.rb +70 -0
- data/spec/inputs/tcp.rb +101 -0
- data/spec/jar.rb +21 -0
- data/spec/outputs/csv.rb +266 -0
- data/spec/outputs/elasticsearch.rb +161 -0
- data/spec/outputs/elasticsearch_http.rb +240 -0
- data/spec/outputs/email.rb +173 -0
- data/spec/outputs/file.rb +82 -0
- data/spec/outputs/graphite.rb +236 -0
- data/spec/outputs/redis.rb +127 -0
- data/spec/speed.rb +20 -0
- data/spec/sqlite-test.rb +81 -0
- data/spec/support/LOGSTASH-733.rb +21 -0
- data/spec/support/LOGSTASH-820.rb +25 -0
- data/spec/support/akamai-grok.rb +26 -0
- data/spec/support/date-http.rb +17 -0
- data/spec/support/postwait1.rb +26 -0
- data/spec/support/pull375.rb +21 -0
- data/spec/test_utils.rb +125 -0
- data/spec/util/fieldeval_spec.rb +44 -0
- data/test/jenkins/config.xml.erb +74 -0
- data/test/jenkins/create-jobs.rb +23 -0
- data/test/jenkins/generatorjob.config.xml +66 -0
- data/tools/Gemfile +14 -0
- data/tools/Gemfile.jruby-1.9.lock +322 -0
- data/tools/Gemfile.rbx-2.1.lock +516 -0
- data/tools/Gemfile.ruby-1.9.1.lock +310 -0
- data/tools/Gemfile.ruby-2.0.0.lock +310 -0
- metadata +629 -0
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
require "csv"
|
|
2
|
+
require "logstash/namespace"
|
|
3
|
+
require "logstash/outputs/file"
|
|
4
|
+
|
|
5
|
+
# CSV output.
|
|
6
|
+
#
|
|
7
|
+
# Write events to disk in CSV or other delimited format
|
|
8
|
+
# Based on the file output, many config values are shared
|
|
9
|
+
# Uses the Ruby csv library internally
|
|
10
|
+
class LogStash::Outputs::CSV < LogStash::Outputs::File
|
|
11
|
+
|
|
12
|
+
config_name "csv"
|
|
13
|
+
milestone 1
|
|
14
|
+
|
|
15
|
+
# The field names from the event that should be written to the CSV file.
|
|
16
|
+
# Fields are written to the CSV in the same order as the array.
|
|
17
|
+
# If a field does not exist on the event, an empty string will be written.
|
|
18
|
+
# Supports field reference syntax eg: `fields => ["field1", "[nested][field]"]`.
|
|
19
|
+
config :fields, :validate => :array, :required => true
|
|
20
|
+
|
|
21
|
+
# Options for CSV output. This is passed directly to the Ruby stdlib to\_csv function.
|
|
22
|
+
# Full documentation is available here: [http://ruby-doc.org/stdlib-2.0.0/libdoc/csv/rdoc/index.html].
|
|
23
|
+
# A typical use case would be to use alternative column or row seperators eg: `csv_options => {"col_sep" => "\t" "row_sep" => "\r\n"}` gives tab seperated data with windows line endings
|
|
24
|
+
config :csv_options, :validate => :hash, :required => false, :default => Hash.new
|
|
25
|
+
|
|
26
|
+
public
|
|
27
|
+
def register
|
|
28
|
+
super
|
|
29
|
+
@csv_options = Hash[@csv_options.map{|(k,v)|[k.to_sym, v]}]
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
public
|
|
33
|
+
def receive(event)
|
|
34
|
+
return unless output?(event)
|
|
35
|
+
path = event.sprintf(@path)
|
|
36
|
+
fd = open(path)
|
|
37
|
+
csv_values = @fields.map {|name| get_value(name, event)}
|
|
38
|
+
fd.write(csv_values.to_csv(@csv_options))
|
|
39
|
+
|
|
40
|
+
flush(fd)
|
|
41
|
+
close_stale_files
|
|
42
|
+
end #def receive
|
|
43
|
+
|
|
44
|
+
private
|
|
45
|
+
def get_value(name, event)
|
|
46
|
+
val = event[name]
|
|
47
|
+
case val
|
|
48
|
+
when Hash
|
|
49
|
+
return val.to_json
|
|
50
|
+
else
|
|
51
|
+
return val
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
end # class LogStash::Outputs::CSV
|
|
55
|
+
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
require "logstash/outputs/base"
|
|
3
|
+
require "logstash/namespace"
|
|
4
|
+
|
|
5
|
+
class LogStash::Outputs::Datadog < LogStash::Outputs::Base
|
|
6
|
+
# This output lets you send events (for now. soon metrics) to
|
|
7
|
+
# DataDogHQ based on Logstash events
|
|
8
|
+
#
|
|
9
|
+
# Note that since Logstash maintains no state
|
|
10
|
+
# these will be one-shot events
|
|
11
|
+
#
|
|
12
|
+
|
|
13
|
+
config_name "datadog"
|
|
14
|
+
milestone 1
|
|
15
|
+
|
|
16
|
+
# Your DatadogHQ API key
|
|
17
|
+
config :api_key, :validate => :string, :required => true
|
|
18
|
+
|
|
19
|
+
# Title
|
|
20
|
+
config :title, :validate => :string, :default => "Logstash event for %{host}"
|
|
21
|
+
|
|
22
|
+
# Text
|
|
23
|
+
config :text, :validate => :string, :default => "%{message}"
|
|
24
|
+
|
|
25
|
+
# Date Happened
|
|
26
|
+
config :date_happened, :validate => :string
|
|
27
|
+
|
|
28
|
+
# Source type name
|
|
29
|
+
config :source_type_name, :validate => ["nagios", "hudson", "jenkins", "user", "my apps", "feed", "chef", "puppet", "git", "bitbucket", "fabric", "capistrano"], :default => "my apps"
|
|
30
|
+
|
|
31
|
+
# Alert type
|
|
32
|
+
config :alert_type, :validate => ["info", "error", "warning", "success"]
|
|
33
|
+
|
|
34
|
+
# Priority
|
|
35
|
+
config :priority, :validate => ["normal", "low"]
|
|
36
|
+
|
|
37
|
+
# Tags
|
|
38
|
+
# Set any custom tags for this event
|
|
39
|
+
# Default are the Logstash tags if any
|
|
40
|
+
config :dd_tags, :validate => :array
|
|
41
|
+
|
|
42
|
+
public
|
|
43
|
+
def register
|
|
44
|
+
require "net/https"
|
|
45
|
+
require "uri"
|
|
46
|
+
@url = "https://app.datadoghq.com/api/v1/events"
|
|
47
|
+
@uri = URI.parse(@url)
|
|
48
|
+
@client = Net::HTTP.new(@uri.host, @uri.port)
|
|
49
|
+
@client.use_ssl = true
|
|
50
|
+
@client.verify_mode = OpenSSL::SSL::VERIFY_NONE
|
|
51
|
+
@logger.debug("Client", :client => @client.inspect)
|
|
52
|
+
end # def register
|
|
53
|
+
|
|
54
|
+
public
|
|
55
|
+
def receive(event)
|
|
56
|
+
return unless output?(event)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
dd_event = Hash.new
|
|
60
|
+
dd_event['title'] = event.sprintf(@title)
|
|
61
|
+
dd_event['text'] = event.sprintf(@text)
|
|
62
|
+
dd_event['source_type_name'] = @source_type_name
|
|
63
|
+
dd_event['alert_type'] = @alert_type if @alert_type
|
|
64
|
+
dd_event['priority'] = @priority if @priority
|
|
65
|
+
|
|
66
|
+
if @date_happened
|
|
67
|
+
dd_event['date_happened'] = event.sprintf(@date_happened)
|
|
68
|
+
else
|
|
69
|
+
dd_event['date_happened'] = event["@timestamp"].to_i
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
if @dd_tags
|
|
73
|
+
tagz = @dd_tags.collect {|x| event.sprintf(x) }
|
|
74
|
+
else
|
|
75
|
+
tagz = event["tags"]
|
|
76
|
+
end
|
|
77
|
+
dd_event['tags'] = tagz if tagz
|
|
78
|
+
|
|
79
|
+
@logger.debug("DataDog event", :dd_event => dd_event)
|
|
80
|
+
|
|
81
|
+
request = Net::HTTP::Post.new("#{@uri.path}?api_key=#{@api_key}")
|
|
82
|
+
|
|
83
|
+
begin
|
|
84
|
+
request.body = dd_event.to_json
|
|
85
|
+
request.add_field("Content-Type", 'application/json')
|
|
86
|
+
response = @client.request(request)
|
|
87
|
+
@logger.info("DD convo", :request => request.inspect, :response => response.inspect)
|
|
88
|
+
raise unless response.code == '200'
|
|
89
|
+
rescue Exception => e
|
|
90
|
+
@logger.warn("Unhandled exception", :request => request.inspect, :response => response.inspect, :exception => e.inspect)
|
|
91
|
+
end
|
|
92
|
+
end # def receive
|
|
93
|
+
end
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
require "logstash/outputs/base"
|
|
3
|
+
require "logstash/namespace"
|
|
4
|
+
require "stud/buffer"
|
|
5
|
+
|
|
6
|
+
# This output lets you send metrics to
|
|
7
|
+
# DataDogHQ based on Logstash events.
|
|
8
|
+
|
|
9
|
+
# Default queue_size and timeframe are low in order to provide near realtime alerting.
|
|
10
|
+
# If you do not use Datadog for alerting, consider raising these thresholds.
|
|
11
|
+
|
|
12
|
+
class LogStash::Outputs::DatadogMetrics < LogStash::Outputs::Base
|
|
13
|
+
|
|
14
|
+
include Stud::Buffer
|
|
15
|
+
|
|
16
|
+
config_name "datadog_metrics"
|
|
17
|
+
milestone 1
|
|
18
|
+
|
|
19
|
+
# Your DatadogHQ API key. https://app.datadoghq.com/account/settings#api
|
|
20
|
+
config :api_key, :validate => :string, :required => true
|
|
21
|
+
|
|
22
|
+
# The name of the time series.
|
|
23
|
+
config :metric_name, :validate => :string, :default => "%{metric_name}"
|
|
24
|
+
|
|
25
|
+
# The value.
|
|
26
|
+
config :metric_value, :default => "%{metric_value}"
|
|
27
|
+
|
|
28
|
+
# The type of the metric.
|
|
29
|
+
config :metric_type, :validate => ["gauge", "counter", "%{metric_type}"], :default => "%{metric_type}"
|
|
30
|
+
|
|
31
|
+
# The name of the host that produced the metric.
|
|
32
|
+
config :host, :validate => :string, :default => "%{host}"
|
|
33
|
+
|
|
34
|
+
# The name of the device that produced the metric.
|
|
35
|
+
config :device, :validate => :string, :default => "%{metric_device}"
|
|
36
|
+
|
|
37
|
+
# Set any custom tags for this event,
|
|
38
|
+
# default are the Logstash tags if any.
|
|
39
|
+
config :dd_tags, :validate => :array
|
|
40
|
+
|
|
41
|
+
# How many events to queue before flushing to Datadog
|
|
42
|
+
# prior to schedule set in @timeframe
|
|
43
|
+
config :queue_size, :validate => :number, :default => 10
|
|
44
|
+
|
|
45
|
+
# How often (in seconds) to flush queued events to Datadog
|
|
46
|
+
config :timeframe, :validate => :number, :default => 10
|
|
47
|
+
|
|
48
|
+
public
|
|
49
|
+
def register
|
|
50
|
+
require 'time'
|
|
51
|
+
require "net/https"
|
|
52
|
+
require "uri"
|
|
53
|
+
|
|
54
|
+
@url = "https://app.datadoghq.com/api/v1/series"
|
|
55
|
+
@uri = URI.parse(@url)
|
|
56
|
+
@client = Net::HTTP.new(@uri.host, @uri.port)
|
|
57
|
+
@client.use_ssl = true
|
|
58
|
+
@client.verify_mode = OpenSSL::SSL::VERIFY_NONE
|
|
59
|
+
@logger.debug("Client", :client => @client.inspect)
|
|
60
|
+
buffer_initialize(
|
|
61
|
+
:max_items => @queue_size,
|
|
62
|
+
:max_interval => @timeframe,
|
|
63
|
+
:logger => @logger
|
|
64
|
+
)
|
|
65
|
+
end # def register
|
|
66
|
+
|
|
67
|
+
public
|
|
68
|
+
def receive(event)
|
|
69
|
+
return unless output?(event)
|
|
70
|
+
return unless @metric_name && @metric_value && @metric_type
|
|
71
|
+
return unless ["gauge", "counter"].include? event.sprintf(@metric_type)
|
|
72
|
+
|
|
73
|
+
dd_metrics = Hash.new
|
|
74
|
+
dd_metrics['metric'] = event.sprintf(@metric_name)
|
|
75
|
+
dd_metrics['points'] = [[to_epoch(event.timestamp), event.sprintf(@metric_value).to_f]]
|
|
76
|
+
dd_metrics['type'] = event.sprintf(@metric_type)
|
|
77
|
+
dd_metrics['host'] = event.sprintf(@host)
|
|
78
|
+
dd_metrics['device'] = event.sprintf(@device)
|
|
79
|
+
|
|
80
|
+
if @dd_tags
|
|
81
|
+
tagz = @dd_tags.collect {|x| event.sprintf(x) }
|
|
82
|
+
else
|
|
83
|
+
tagz = event["tags"]
|
|
84
|
+
end
|
|
85
|
+
dd_metrics['tags'] = tagz if tagz
|
|
86
|
+
|
|
87
|
+
@logger.info("Queueing event", :event => dd_metrics)
|
|
88
|
+
buffer_receive(dd_metrics)
|
|
89
|
+
end # def receive
|
|
90
|
+
|
|
91
|
+
public
|
|
92
|
+
def flush(events, final=false)
|
|
93
|
+
dd_series = Hash.new
|
|
94
|
+
dd_series['series'] = []
|
|
95
|
+
|
|
96
|
+
events.each do |event|
|
|
97
|
+
begin
|
|
98
|
+
dd_series['series'] << event
|
|
99
|
+
rescue
|
|
100
|
+
@logger.warn("Error adding event to series!", :exception => e)
|
|
101
|
+
next
|
|
102
|
+
end
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
request = Net::HTTP::Post.new("#{@uri.path}?api_key=#{@api_key}")
|
|
106
|
+
|
|
107
|
+
begin
|
|
108
|
+
request.body = dd_series.to_json
|
|
109
|
+
request.add_field("Content-Type", 'application/json')
|
|
110
|
+
response = @client.request(request)
|
|
111
|
+
@logger.info("DD convo", :request => request.inspect, :response => response.inspect)
|
|
112
|
+
raise unless response.code == '202'
|
|
113
|
+
rescue Exception => e
|
|
114
|
+
@logger.warn("Unhandled exception", :request => request.inspect, :response => response.inspect, :exception => e.inspect)
|
|
115
|
+
end
|
|
116
|
+
end # def flush
|
|
117
|
+
|
|
118
|
+
private
|
|
119
|
+
def to_epoch(t)
|
|
120
|
+
return t.is_a?(Time) ? t.to_i : Time.parse(t).to_i
|
|
121
|
+
end # def to_epoch
|
|
122
|
+
|
|
123
|
+
end # class LogStash::Outputs::DatadogMetrics
|
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
require "logstash/namespace"
|
|
3
|
+
require "logstash/outputs/base"
|
|
4
|
+
require "stud/buffer"
|
|
5
|
+
|
|
6
|
+
# This output lets you store logs in elasticsearch and is the most recommended
|
|
7
|
+
# output for logstash. If you plan on using the logstash web interface, you'll
|
|
8
|
+
# need to use this output.
|
|
9
|
+
#
|
|
10
|
+
# *VERSION NOTE*: Your elasticsearch cluster must be running elasticsearch
|
|
11
|
+
# %ELASTICSEARCH_VERSION%. If you use any other version of elasticsearch,
|
|
12
|
+
# you should consider using the [elasticsearch_http](elasticsearch_http)
|
|
13
|
+
# output instead.
|
|
14
|
+
#
|
|
15
|
+
# If you want to set other elasticsearch options that are not exposed directly
|
|
16
|
+
# as config options, there are two options:
|
|
17
|
+
#
|
|
18
|
+
# * create an elasticsearch.yml file in the $PWD of the logstash process
|
|
19
|
+
# * pass in es.* java properties (java -Des.node.foo= or ruby -J-Des.node.foo=)
|
|
20
|
+
#
|
|
21
|
+
# This plugin will join your elasticsearch cluster, so it will show up in
|
|
22
|
+
# elasticsearch's cluster health status.
|
|
23
|
+
#
|
|
24
|
+
# You can learn more about elasticsearch at <http://elasticsearch.org>
|
|
25
|
+
#
|
|
26
|
+
# ## Operational Notes
|
|
27
|
+
#
|
|
28
|
+
# Template management is a new feature and requires at least version
|
|
29
|
+
# Elasticsearch 0.90.5+
|
|
30
|
+
#
|
|
31
|
+
# If you are still using a version older than this, please upgrade for
|
|
32
|
+
# more benefits than just template management.
|
|
33
|
+
#
|
|
34
|
+
# Your firewalls will need to permit port 9300 in *both* directions (from
|
|
35
|
+
# logstash to elasticsearch, and elasticsearch to logstash)
|
|
36
|
+
class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
|
|
37
|
+
include Stud::Buffer
|
|
38
|
+
|
|
39
|
+
config_name "elasticsearch"
|
|
40
|
+
milestone 3
|
|
41
|
+
|
|
42
|
+
# The index to write events to. This can be dynamic using the %{foo} syntax.
|
|
43
|
+
# The default value will partition your indices by day so you can more easily
|
|
44
|
+
# delete old data or only search specific date ranges.
|
|
45
|
+
# Indexes may not contain uppercase characters.
|
|
46
|
+
config :index, :validate => :string, :default => "logstash-%{+YYYY.MM.dd}"
|
|
47
|
+
|
|
48
|
+
# The index type to write events to. Generally you should try to write only
|
|
49
|
+
# similar events to the same 'type'. String expansion '%{foo}' works here.
|
|
50
|
+
config :index_type, :validate => :string
|
|
51
|
+
|
|
52
|
+
# Starting in Logstash 1.3 (unless you set option "manage_template" to false)
|
|
53
|
+
# a default mapping template for Elasticsearch will be applied, if you do not
|
|
54
|
+
# already have one set to match the index pattern defined (default of
|
|
55
|
+
# "logstash-%{+YYYY.MM.dd}"), minus any variables. For example, in this case
|
|
56
|
+
# the template will be applied to all indices starting with logstash-*
|
|
57
|
+
#
|
|
58
|
+
# If you have dynamic templating (e.g. creating indices based on field names)
|
|
59
|
+
# then you should set "manage_template" to false and use the REST API to upload
|
|
60
|
+
# your templates manually.
|
|
61
|
+
config :manage_template, :validate => :boolean, :default => true
|
|
62
|
+
|
|
63
|
+
# This configuration option defines how the template is named inside Elasticsearch
|
|
64
|
+
# Note that if you have used the template management features and subsequently
|
|
65
|
+
# change this you will need to prune the old template manually, e.g.
|
|
66
|
+
# curl -XDELETE <http://localhost:9200/_template/OLD_template_name?pretty>
|
|
67
|
+
# where OLD_template_name is whatever the former setting was.
|
|
68
|
+
config :template_name, :validate => :string, :default => "logstash"
|
|
69
|
+
|
|
70
|
+
# You can set the path to your own template here, if you so desire.
|
|
71
|
+
# If not the included template will be used.
|
|
72
|
+
config :template, :validate => :path
|
|
73
|
+
|
|
74
|
+
# Overwrite the current template with whatever is configured
|
|
75
|
+
# in the template and template_name directives.
|
|
76
|
+
config :template_overwrite, :validate => :boolean, :default => false
|
|
77
|
+
|
|
78
|
+
# The document ID for the index. Useful for overwriting existing entries in
|
|
79
|
+
# elasticsearch with the same ID.
|
|
80
|
+
config :document_id, :validate => :string, :default => nil
|
|
81
|
+
|
|
82
|
+
# The name of your cluster if you set it on the ElasticSearch side. Useful
|
|
83
|
+
# for discovery.
|
|
84
|
+
config :cluster, :validate => :string
|
|
85
|
+
|
|
86
|
+
# The name/address of the host to use for ElasticSearch unicast discovery
|
|
87
|
+
# This is only required if the normal multicast/cluster discovery stuff won't
|
|
88
|
+
# work in your environment.
|
|
89
|
+
config :host, :validate => :string
|
|
90
|
+
|
|
91
|
+
# The port for ElasticSearch transport to use. This is *not* the ElasticSearch
|
|
92
|
+
# REST API port (normally 9200).
|
|
93
|
+
config :port, :validate => :string, :default => "9300-9305"
|
|
94
|
+
|
|
95
|
+
# The name/address of the host to bind to for ElasticSearch clustering
|
|
96
|
+
config :bind_host, :validate => :string
|
|
97
|
+
|
|
98
|
+
# This is only valid for the 'node' protocol.
|
|
99
|
+
#
|
|
100
|
+
# The port for the node to listen on.
|
|
101
|
+
config :bind_port, :validate => :number
|
|
102
|
+
|
|
103
|
+
# Run the elasticsearch server embedded in this process.
|
|
104
|
+
# This option is useful if you want to run a single logstash process that
|
|
105
|
+
# handles log processing and indexing; it saves you from needing to run
|
|
106
|
+
# a separate elasticsearch process.
|
|
107
|
+
config :embedded, :validate => :boolean, :default => false
|
|
108
|
+
|
|
109
|
+
# If you are running the embedded elasticsearch server, you can set the http
|
|
110
|
+
# port it listens on here; it is not common to need this setting changed from
|
|
111
|
+
# default.
|
|
112
|
+
config :embedded_http_port, :validate => :string, :default => "9200-9300"
|
|
113
|
+
|
|
114
|
+
# This setting no longer does anything. It exists to keep config validation
|
|
115
|
+
# from failing. It will be removed in future versions.
|
|
116
|
+
config :max_inflight_requests, :validate => :number, :default => 50, :deprecated => true
|
|
117
|
+
|
|
118
|
+
# The node name ES will use when joining a cluster.
|
|
119
|
+
#
|
|
120
|
+
# By default, this is generated internally by the ES client.
|
|
121
|
+
config :node_name, :validate => :string
|
|
122
|
+
|
|
123
|
+
# This plugin uses the bulk index api for improved indexing performance.
|
|
124
|
+
# To make efficient bulk api calls, we will buffer a certain number of
|
|
125
|
+
# events before flushing that out to elasticsearch. This setting
|
|
126
|
+
# controls how many events will be buffered before sending a batch
|
|
127
|
+
# of events.
|
|
128
|
+
config :flush_size, :validate => :number, :default => 100
|
|
129
|
+
|
|
130
|
+
# The amount of time since last flush before a flush is forced.
|
|
131
|
+
#
|
|
132
|
+
# This setting helps ensure slow event rates don't get stuck in logstash.
|
|
133
|
+
# For example, if your `flush_size` is 100, and you have received 10 events,
|
|
134
|
+
# and it has been more than `idle_flush_time` seconds since the last flush,
|
|
135
|
+
# logstash will flush those 10 events automatically.
|
|
136
|
+
#
|
|
137
|
+
# This helps keep both fast and slow log streams moving along in
|
|
138
|
+
# near-real-time.
|
|
139
|
+
config :idle_flush_time, :validate => :number, :default => 1
|
|
140
|
+
|
|
141
|
+
# Choose the protocol used to talk to elasticsearch.
|
|
142
|
+
#
|
|
143
|
+
# The 'node' protocol will connect to the cluster as a normal elasticsearch
|
|
144
|
+
# node (but will not store data). This allows you to use things like
|
|
145
|
+
# multicast discovery.
|
|
146
|
+
#
|
|
147
|
+
# The 'transport' protocol will connect to the host you specify and will
|
|
148
|
+
# not show up as a 'node' in the elasticsearch cluster. This is useful
|
|
149
|
+
# in situations where you cannot permit connections outbound from the
|
|
150
|
+
# elasticsearch cluster to this logstash server.
|
|
151
|
+
config :protocol, :validate => [ "node", "transport" ], :default => "node"
|
|
152
|
+
|
|
153
|
+
public
|
|
154
|
+
def register
|
|
155
|
+
# TODO(sissel): find a better way of declaring where the elasticsearch
|
|
156
|
+
# libraries are
|
|
157
|
+
# TODO(sissel): can skip this step if we're running from a jar.
|
|
158
|
+
jarpath = File.join(File.dirname(__FILE__), "../../../vendor/jar/elasticsearch*/lib/*.jar")
|
|
159
|
+
Dir[jarpath].each do |jar|
|
|
160
|
+
require jar
|
|
161
|
+
end
|
|
162
|
+
|
|
163
|
+
# setup log4j properties for elasticsearch
|
|
164
|
+
LogStash::Logger.setup_log4j(@logger)
|
|
165
|
+
|
|
166
|
+
if @embedded
|
|
167
|
+
# Default @host with embedded to localhost. This should help avoid
|
|
168
|
+
# newbies tripping on ubuntu and other distros that have a default
|
|
169
|
+
# firewall that blocks multicast.
|
|
170
|
+
@host ||= "localhost"
|
|
171
|
+
|
|
172
|
+
# Start elasticsearch local.
|
|
173
|
+
start_local_elasticsearch
|
|
174
|
+
end
|
|
175
|
+
require "jruby-elasticsearch"
|
|
176
|
+
|
|
177
|
+
@logger.info("New ElasticSearch output", :cluster => @cluster,
|
|
178
|
+
:host => @host, :port => @port, :embedded => @embedded)
|
|
179
|
+
options = {
|
|
180
|
+
:cluster => @cluster,
|
|
181
|
+
:host => @host,
|
|
182
|
+
:port => @port,
|
|
183
|
+
:bind_host => @bind_host,
|
|
184
|
+
:node_name => @node_name,
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
# :node or :transport protocols
|
|
188
|
+
options[:type] = @protocol.to_sym
|
|
189
|
+
|
|
190
|
+
options[:bind_port] = @bind_port unless @bind_port.nil?
|
|
191
|
+
|
|
192
|
+
# TransportClient requires a number for a port.
|
|
193
|
+
options[:port] = options[:port].to_i if options[:type] == :transport
|
|
194
|
+
|
|
195
|
+
@client = ElasticSearch::Client.new(options)
|
|
196
|
+
|
|
197
|
+
# Check to see if we *can* get the template
|
|
198
|
+
java_client = @client.instance_eval{@client}
|
|
199
|
+
begin
|
|
200
|
+
check_template = ElasticSearch::GetIndexTemplatesRequest.new(java_client, @template_name)
|
|
201
|
+
result = check_template.execute #=> Run previously...
|
|
202
|
+
rescue Exception => e
|
|
203
|
+
@logger.error("Unable to check template. Automatic template management disabled.", :error => e.to_s)
|
|
204
|
+
@manage_template = false
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
if @manage_template
|
|
208
|
+
@logger.info("Automatic template management enabled", :manage_template => @manage_template.to_s)
|
|
209
|
+
if @template_overwrite
|
|
210
|
+
@logger.info("Template overwrite enabled. Deleting template if it exists.", :template_overwrite => @template_overwrite.to_s)
|
|
211
|
+
if !result.getIndexTemplates.isEmpty
|
|
212
|
+
delete_template = ElasticSearch::DeleteIndexTemplateRequest.new(java_client, @template_name)
|
|
213
|
+
result = delete_template.execute
|
|
214
|
+
if result.isAcknowledged
|
|
215
|
+
@logger.info("Successfully deleted template", :template_name => @template_name)
|
|
216
|
+
else
|
|
217
|
+
@logger.error("Failed to delete template", :template_name => @template_name)
|
|
218
|
+
end
|
|
219
|
+
end
|
|
220
|
+
end # end if @template_overwrite
|
|
221
|
+
has_template = false
|
|
222
|
+
@logger.debug("Fetching all templates...")
|
|
223
|
+
gettemplates = ElasticSearch::GetIndexTemplatesRequest.new(java_client, "*")
|
|
224
|
+
result = gettemplates.execute
|
|
225
|
+
# Results of this come as a list, so we need to iterate through it
|
|
226
|
+
if !result.getIndexTemplates.isEmpty
|
|
227
|
+
template_metadata_list = result.getIndexTemplates
|
|
228
|
+
templates = {}
|
|
229
|
+
i = 0
|
|
230
|
+
template_metadata_list.size.times do
|
|
231
|
+
template_data = template_metadata_list.get(i)
|
|
232
|
+
templates[template_data.name] = template_data.template
|
|
233
|
+
i += 1
|
|
234
|
+
end
|
|
235
|
+
template_idx_name = @index.sub(/%{[^}]+}/,'*')
|
|
236
|
+
alt_template_idx_name = @index.sub(/-%{[^}]+}/,'*')
|
|
237
|
+
if !templates.any? { |k,v| v == template_idx_name || v == alt_template_idx_name }
|
|
238
|
+
@logger.debug("No logstash template found in Elasticsearch", :has_template => has_template, :name => template_idx_name, :alt => alt_template_idx_name)
|
|
239
|
+
else
|
|
240
|
+
has_template = true
|
|
241
|
+
@logger.info("Found existing Logstash template match.", :has_template => has_template, :name => template_idx_name, :alt => alt_template_idx_name, :templates => templates.to_s)
|
|
242
|
+
end
|
|
243
|
+
end
|
|
244
|
+
if !has_template #=> No template found, we're going to add one
|
|
245
|
+
get_template_json
|
|
246
|
+
put_template = ElasticSearch::PutIndexTemplateRequest.new(java_client, @template_name, @template_json)
|
|
247
|
+
result = put_template.execute
|
|
248
|
+
if result.isAcknowledged
|
|
249
|
+
@logger.info("Successfully inserted template", :template_name => @template_name)
|
|
250
|
+
else
|
|
251
|
+
@logger.error("Failed to insert template", :template_name => @template_name)
|
|
252
|
+
end
|
|
253
|
+
end
|
|
254
|
+
end # if @manage_templates
|
|
255
|
+
|
|
256
|
+
buffer_initialize(
|
|
257
|
+
:max_items => @flush_size,
|
|
258
|
+
:max_interval => @idle_flush_time,
|
|
259
|
+
:logger => @logger
|
|
260
|
+
)
|
|
261
|
+
end # def register
|
|
262
|
+
|
|
263
|
+
public
|
|
264
|
+
def get_template_json
|
|
265
|
+
if @template.nil?
|
|
266
|
+
if __FILE__ =~ /^(jar:)?file:\/.+!.+/
|
|
267
|
+
begin
|
|
268
|
+
# Running from a jar, assume types.db is at the root.
|
|
269
|
+
jar_path = [__FILE__.split("!").first, "/elasticsearch-template.json"].join("!")
|
|
270
|
+
@template = jar_path
|
|
271
|
+
rescue => ex
|
|
272
|
+
raise "Failed to cache, due to: #{ex}\n#{ex.backtrace}"
|
|
273
|
+
end
|
|
274
|
+
else
|
|
275
|
+
if File.exists?("elasticsearch-template.json")
|
|
276
|
+
@template = "elasticsearch-template.json"
|
|
277
|
+
elsif File.exists?("lib/logstash/outputs/elasticsearch/elasticsearch-template.json")
|
|
278
|
+
@template = "lib/logstash/outputs/elasticsearch/elasticsearch-template.json"
|
|
279
|
+
else
|
|
280
|
+
raise "You must specify 'template => ...' in your elasticsearch_http output"
|
|
281
|
+
end
|
|
282
|
+
end
|
|
283
|
+
end
|
|
284
|
+
@template_json = IO.read(@template).gsub(/\n/,'')
|
|
285
|
+
@logger.info("Using mapping template", :template => @template_json)
|
|
286
|
+
end # def get_template
|
|
287
|
+
|
|
288
|
+
protected
|
|
289
|
+
def start_local_elasticsearch
|
|
290
|
+
@logger.info("Starting embedded ElasticSearch local node.")
|
|
291
|
+
builder = org.elasticsearch.node.NodeBuilder.nodeBuilder
|
|
292
|
+
# Disable 'local only' - LOGSTASH-277
|
|
293
|
+
#builder.local(true)
|
|
294
|
+
builder.settings.put("cluster.name", @cluster) if !@cluster.nil?
|
|
295
|
+
builder.settings.put("node.name", @node_name) if !@node_name.nil?
|
|
296
|
+
builder.settings.put("http.port", @embedded_http_port)
|
|
297
|
+
|
|
298
|
+
@embedded_elasticsearch = builder.node
|
|
299
|
+
@embedded_elasticsearch.start
|
|
300
|
+
end # def start_local_elasticsearch
|
|
301
|
+
|
|
302
|
+
public
|
|
303
|
+
def receive(event)
|
|
304
|
+
return unless output?(event)
|
|
305
|
+
buffer_receive([event, index, type])
|
|
306
|
+
end # def receive
|
|
307
|
+
|
|
308
|
+
def flush(events, teardown=false)
|
|
309
|
+
request = @client.bulk
|
|
310
|
+
events.each do |event, index, type|
|
|
311
|
+
index = event.sprintf(@index)
|
|
312
|
+
|
|
313
|
+
# Set the 'type' value for the index.
|
|
314
|
+
if @index_type.nil?
|
|
315
|
+
type = event["type"] || "logs"
|
|
316
|
+
else
|
|
317
|
+
type = event.sprintf(@index_type)
|
|
318
|
+
end
|
|
319
|
+
if @document_id
|
|
320
|
+
request.index(index, type, event.sprintf(@document_id), event.to_json)
|
|
321
|
+
else
|
|
322
|
+
request.index(index, type, nil, event.to_json)
|
|
323
|
+
end
|
|
324
|
+
end
|
|
325
|
+
|
|
326
|
+
request.execute!
|
|
327
|
+
# TODO(sissel): Handle errors. Since bulk requests could mostly succeed
|
|
328
|
+
# (aka partially fail), we need to figure out what documents need to be
|
|
329
|
+
# retried.
|
|
330
|
+
end # def flush
|
|
331
|
+
|
|
332
|
+
end # class LogStash::Outputs::Elasticsearch
|