logstash-lib 1.3.2
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +24 -0
- data/.tailor +8 -0
- data/.travis.yml +12 -0
- data/CHANGELOG +1185 -0
- data/CONTRIBUTING.md +61 -0
- data/CONTRIBUTORS +79 -0
- data/LICENSE +14 -0
- data/Makefile +460 -0
- data/README.md +120 -0
- data/STYLE.md +96 -0
- data/bin/logstash +37 -0
- data/bin/logstash-test +4 -0
- data/bin/logstash-web +4 -0
- data/bin/logstash.lib.sh +78 -0
- data/bot/check_pull_changelog.rb +89 -0
- data/docs/configuration.md +260 -0
- data/docs/docgen.rb +242 -0
- data/docs/extending/example-add-a-new-filter.md +121 -0
- data/docs/extending/index.md +91 -0
- data/docs/flags.md +43 -0
- data/docs/generate_index.rb +28 -0
- data/docs/index.html.erb +56 -0
- data/docs/learn.md +46 -0
- data/docs/life-of-an-event.md +109 -0
- data/docs/logging-tool-comparisons.md +60 -0
- data/docs/plugin-doc.html.erb +91 -0
- data/docs/plugin-milestones.md +41 -0
- data/docs/plugin-synopsis.html.erb +24 -0
- data/docs/release-engineering.md +46 -0
- data/docs/release-test-results.md +14 -0
- data/docs/repositories.md +35 -0
- data/docs/tutorials/10-minute-walkthrough/apache-elasticsearch.conf +35 -0
- data/docs/tutorials/10-minute-walkthrough/apache-parse.conf +33 -0
- data/docs/tutorials/10-minute-walkthrough/apache_log.1 +1 -0
- data/docs/tutorials/10-minute-walkthrough/apache_log.2.bz2 +0 -0
- data/docs/tutorials/10-minute-walkthrough/hello-search.conf +25 -0
- data/docs/tutorials/10-minute-walkthrough/hello.conf +16 -0
- data/docs/tutorials/10-minute-walkthrough/index.md +124 -0
- data/docs/tutorials/10-minute-walkthrough/step-5-output.txt +17 -0
- data/docs/tutorials/getting-started-centralized-overview-diagram.png +0 -0
- data/docs/tutorials/getting-started-centralized-overview-diagram.xml +1 -0
- data/docs/tutorials/getting-started-centralized.md +217 -0
- data/docs/tutorials/getting-started-simple.md +200 -0
- data/docs/tutorials/just-enough-rabbitmq-for-logstash.md +201 -0
- data/docs/tutorials/media/frontend-response-codes.png +0 -0
- data/docs/tutorials/metrics-from-logs.md +84 -0
- data/docs/tutorials/zeromq.md +118 -0
- data/extract_services.rb +29 -0
- data/gembag.rb +64 -0
- data/lib/logstash-event.rb +2 -0
- data/lib/logstash.rb +4 -0
- data/lib/logstash/JRUBY-6970-openssl.rb +22 -0
- data/lib/logstash/JRUBY-6970.rb +102 -0
- data/lib/logstash/agent.rb +305 -0
- data/lib/logstash/certs/cacert.pem +3895 -0
- data/lib/logstash/codecs/base.rb +49 -0
- data/lib/logstash/codecs/compress_spooler.rb +50 -0
- data/lib/logstash/codecs/dots.rb +18 -0
- data/lib/logstash/codecs/edn.rb +28 -0
- data/lib/logstash/codecs/edn_lines.rb +36 -0
- data/lib/logstash/codecs/fluent.rb +55 -0
- data/lib/logstash/codecs/graphite.rb +114 -0
- data/lib/logstash/codecs/json.rb +41 -0
- data/lib/logstash/codecs/json_lines.rb +52 -0
- data/lib/logstash/codecs/json_spooler.rb +22 -0
- data/lib/logstash/codecs/line.rb +58 -0
- data/lib/logstash/codecs/msgpack.rb +43 -0
- data/lib/logstash/codecs/multiline.rb +189 -0
- data/lib/logstash/codecs/netflow.rb +342 -0
- data/lib/logstash/codecs/netflow/util.rb +212 -0
- data/lib/logstash/codecs/noop.rb +19 -0
- data/lib/logstash/codecs/oldlogstashjson.rb +56 -0
- data/lib/logstash/codecs/plain.rb +48 -0
- data/lib/logstash/codecs/rubydebug.rb +22 -0
- data/lib/logstash/codecs/spool.rb +38 -0
- data/lib/logstash/config/Makefile +4 -0
- data/lib/logstash/config/config_ast.rb +380 -0
- data/lib/logstash/config/file.rb +39 -0
- data/lib/logstash/config/grammar.rb +3504 -0
- data/lib/logstash/config/grammar.treetop +241 -0
- data/lib/logstash/config/mixin.rb +464 -0
- data/lib/logstash/config/registry.rb +13 -0
- data/lib/logstash/config/test.conf +18 -0
- data/lib/logstash/errors.rb +10 -0
- data/lib/logstash/event.rb +262 -0
- data/lib/logstash/filters/advisor.rb +178 -0
- data/lib/logstash/filters/alter.rb +173 -0
- data/lib/logstash/filters/anonymize.rb +93 -0
- data/lib/logstash/filters/base.rb +190 -0
- data/lib/logstash/filters/checksum.rb +50 -0
- data/lib/logstash/filters/cidr.rb +76 -0
- data/lib/logstash/filters/cipher.rb +145 -0
- data/lib/logstash/filters/clone.rb +35 -0
- data/lib/logstash/filters/collate.rb +114 -0
- data/lib/logstash/filters/csv.rb +94 -0
- data/lib/logstash/filters/date.rb +244 -0
- data/lib/logstash/filters/dns.rb +201 -0
- data/lib/logstash/filters/drop.rb +32 -0
- data/lib/logstash/filters/elapsed.rb +256 -0
- data/lib/logstash/filters/elasticsearch.rb +73 -0
- data/lib/logstash/filters/environment.rb +27 -0
- data/lib/logstash/filters/extractnumbers.rb +84 -0
- data/lib/logstash/filters/gelfify.rb +52 -0
- data/lib/logstash/filters/geoip.rb +145 -0
- data/lib/logstash/filters/grep.rb +153 -0
- data/lib/logstash/filters/grok.rb +425 -0
- data/lib/logstash/filters/grokdiscovery.rb +75 -0
- data/lib/logstash/filters/i18n.rb +51 -0
- data/lib/logstash/filters/json.rb +90 -0
- data/lib/logstash/filters/json_encode.rb +52 -0
- data/lib/logstash/filters/kv.rb +232 -0
- data/lib/logstash/filters/metaevent.rb +68 -0
- data/lib/logstash/filters/metrics.rb +237 -0
- data/lib/logstash/filters/multiline.rb +241 -0
- data/lib/logstash/filters/mutate.rb +399 -0
- data/lib/logstash/filters/noop.rb +21 -0
- data/lib/logstash/filters/prune.rb +149 -0
- data/lib/logstash/filters/punct.rb +32 -0
- data/lib/logstash/filters/railsparallelrequest.rb +86 -0
- data/lib/logstash/filters/range.rb +142 -0
- data/lib/logstash/filters/ruby.rb +42 -0
- data/lib/logstash/filters/sleep.rb +111 -0
- data/lib/logstash/filters/split.rb +64 -0
- data/lib/logstash/filters/sumnumbers.rb +73 -0
- data/lib/logstash/filters/syslog_pri.rb +107 -0
- data/lib/logstash/filters/translate.rb +121 -0
- data/lib/logstash/filters/unique.rb +29 -0
- data/lib/logstash/filters/urldecode.rb +57 -0
- data/lib/logstash/filters/useragent.rb +112 -0
- data/lib/logstash/filters/uuid.rb +58 -0
- data/lib/logstash/filters/xml.rb +139 -0
- data/lib/logstash/filters/zeromq.rb +123 -0
- data/lib/logstash/filterworker.rb +122 -0
- data/lib/logstash/inputs/base.rb +125 -0
- data/lib/logstash/inputs/collectd.rb +306 -0
- data/lib/logstash/inputs/drupal_dblog.rb +323 -0
- data/lib/logstash/inputs/drupal_dblog/jdbcconnection.rb +66 -0
- data/lib/logstash/inputs/elasticsearch.rb +140 -0
- data/lib/logstash/inputs/eventlog.rb +129 -0
- data/lib/logstash/inputs/eventlog/racob_fix.rb +44 -0
- data/lib/logstash/inputs/exec.rb +69 -0
- data/lib/logstash/inputs/file.rb +146 -0
- data/lib/logstash/inputs/ganglia.rb +127 -0
- data/lib/logstash/inputs/ganglia/gmondpacket.rb +146 -0
- data/lib/logstash/inputs/ganglia/xdr.rb +327 -0
- data/lib/logstash/inputs/gelf.rb +138 -0
- data/lib/logstash/inputs/gemfire.rb +222 -0
- data/lib/logstash/inputs/generator.rb +97 -0
- data/lib/logstash/inputs/graphite.rb +41 -0
- data/lib/logstash/inputs/heroku.rb +51 -0
- data/lib/logstash/inputs/imap.rb +136 -0
- data/lib/logstash/inputs/irc.rb +84 -0
- data/lib/logstash/inputs/log4j.rb +136 -0
- data/lib/logstash/inputs/lumberjack.rb +53 -0
- data/lib/logstash/inputs/pipe.rb +57 -0
- data/lib/logstash/inputs/rabbitmq.rb +126 -0
- data/lib/logstash/inputs/rabbitmq/bunny.rb +118 -0
- data/lib/logstash/inputs/rabbitmq/hot_bunnies.rb +1 -0
- data/lib/logstash/inputs/rabbitmq/march_hare.rb +129 -0
- data/lib/logstash/inputs/redis.rb +263 -0
- data/lib/logstash/inputs/relp.rb +106 -0
- data/lib/logstash/inputs/s3.rb +279 -0
- data/lib/logstash/inputs/snmptrap.rb +87 -0
- data/lib/logstash/inputs/sqlite.rb +185 -0
- data/lib/logstash/inputs/sqs.rb +172 -0
- data/lib/logstash/inputs/stdin.rb +46 -0
- data/lib/logstash/inputs/stomp.rb +84 -0
- data/lib/logstash/inputs/syslog.rb +237 -0
- data/lib/logstash/inputs/tcp.rb +231 -0
- data/lib/logstash/inputs/threadable.rb +18 -0
- data/lib/logstash/inputs/twitter.rb +82 -0
- data/lib/logstash/inputs/udp.rb +81 -0
- data/lib/logstash/inputs/unix.rb +163 -0
- data/lib/logstash/inputs/varnishlog.rb +48 -0
- data/lib/logstash/inputs/websocket.rb +50 -0
- data/lib/logstash/inputs/wmi.rb +72 -0
- data/lib/logstash/inputs/xmpp.rb +81 -0
- data/lib/logstash/inputs/zenoss.rb +143 -0
- data/lib/logstash/inputs/zeromq.rb +165 -0
- data/lib/logstash/kibana.rb +113 -0
- data/lib/logstash/loadlibs.rb +9 -0
- data/lib/logstash/logging.rb +89 -0
- data/lib/logstash/monkeypatches-for-bugs.rb +2 -0
- data/lib/logstash/monkeypatches-for-debugging.rb +47 -0
- data/lib/logstash/monkeypatches-for-performance.rb +66 -0
- data/lib/logstash/multiqueue.rb +53 -0
- data/lib/logstash/namespace.rb +16 -0
- data/lib/logstash/outputs/base.rb +120 -0
- data/lib/logstash/outputs/boundary.rb +116 -0
- data/lib/logstash/outputs/circonus.rb +78 -0
- data/lib/logstash/outputs/cloudwatch.rb +351 -0
- data/lib/logstash/outputs/csv.rb +55 -0
- data/lib/logstash/outputs/datadog.rb +93 -0
- data/lib/logstash/outputs/datadog_metrics.rb +123 -0
- data/lib/logstash/outputs/elasticsearch.rb +332 -0
- data/lib/logstash/outputs/elasticsearch/elasticsearch-template.json +44 -0
- data/lib/logstash/outputs/elasticsearch_http.rb +256 -0
- data/lib/logstash/outputs/elasticsearch_river.rb +214 -0
- data/lib/logstash/outputs/email.rb +299 -0
- data/lib/logstash/outputs/exec.rb +40 -0
- data/lib/logstash/outputs/file.rb +180 -0
- data/lib/logstash/outputs/ganglia.rb +75 -0
- data/lib/logstash/outputs/gelf.rb +208 -0
- data/lib/logstash/outputs/gemfire.rb +103 -0
- data/lib/logstash/outputs/google_bigquery.rb +570 -0
- data/lib/logstash/outputs/google_cloud_storage.rb +431 -0
- data/lib/logstash/outputs/graphite.rb +143 -0
- data/lib/logstash/outputs/graphtastic.rb +185 -0
- data/lib/logstash/outputs/hipchat.rb +80 -0
- data/lib/logstash/outputs/http.rb +142 -0
- data/lib/logstash/outputs/irc.rb +80 -0
- data/lib/logstash/outputs/jira.rb +109 -0
- data/lib/logstash/outputs/juggernaut.rb +105 -0
- data/lib/logstash/outputs/librato.rb +146 -0
- data/lib/logstash/outputs/loggly.rb +93 -0
- data/lib/logstash/outputs/lumberjack.rb +51 -0
- data/lib/logstash/outputs/metriccatcher.rb +103 -0
- data/lib/logstash/outputs/mongodb.rb +81 -0
- data/lib/logstash/outputs/nagios.rb +119 -0
- data/lib/logstash/outputs/nagios_nsca.rb +123 -0
- data/lib/logstash/outputs/null.rb +18 -0
- data/lib/logstash/outputs/opentsdb.rb +101 -0
- data/lib/logstash/outputs/pagerduty.rb +79 -0
- data/lib/logstash/outputs/pipe.rb +132 -0
- data/lib/logstash/outputs/rabbitmq.rb +96 -0
- data/lib/logstash/outputs/rabbitmq/bunny.rb +135 -0
- data/lib/logstash/outputs/rabbitmq/hot_bunnies.rb +1 -0
- data/lib/logstash/outputs/rabbitmq/march_hare.rb +143 -0
- data/lib/logstash/outputs/redis.rb +245 -0
- data/lib/logstash/outputs/riak.rb +152 -0
- data/lib/logstash/outputs/riemann.rb +109 -0
- data/lib/logstash/outputs/s3.rb +356 -0
- data/lib/logstash/outputs/sns.rb +124 -0
- data/lib/logstash/outputs/solr_http.rb +78 -0
- data/lib/logstash/outputs/sqs.rb +141 -0
- data/lib/logstash/outputs/statsd.rb +116 -0
- data/lib/logstash/outputs/stdout.rb +53 -0
- data/lib/logstash/outputs/stomp.rb +67 -0
- data/lib/logstash/outputs/syslog.rb +145 -0
- data/lib/logstash/outputs/tcp.rb +145 -0
- data/lib/logstash/outputs/udp.rb +38 -0
- data/lib/logstash/outputs/websocket.rb +46 -0
- data/lib/logstash/outputs/websocket/app.rb +29 -0
- data/lib/logstash/outputs/websocket/pubsub.rb +45 -0
- data/lib/logstash/outputs/xmpp.rb +78 -0
- data/lib/logstash/outputs/zabbix.rb +108 -0
- data/lib/logstash/outputs/zeromq.rb +125 -0
- data/lib/logstash/pipeline.rb +286 -0
- data/lib/logstash/plugin.rb +150 -0
- data/lib/logstash/plugin_mixins/aws_config.rb +93 -0
- data/lib/logstash/program.rb +15 -0
- data/lib/logstash/runner.rb +238 -0
- data/lib/logstash/sized_queue.rb +8 -0
- data/lib/logstash/test.rb +183 -0
- data/lib/logstash/threadwatchdog.rb +37 -0
- data/lib/logstash/time_addon.rb +33 -0
- data/lib/logstash/util.rb +106 -0
- data/lib/logstash/util/buftok.rb +139 -0
- data/lib/logstash/util/charset.rb +39 -0
- data/lib/logstash/util/fieldreference.rb +50 -0
- data/lib/logstash/util/password.rb +25 -0
- data/lib/logstash/util/prctl.rb +11 -0
- data/lib/logstash/util/relp.rb +326 -0
- data/lib/logstash/util/require-helper.rb +18 -0
- data/lib/logstash/util/socket_peer.rb +7 -0
- data/lib/logstash/util/zenoss.rb +566 -0
- data/lib/logstash/util/zeromq.rb +47 -0
- data/lib/logstash/version.rb +6 -0
- data/locales/en.yml +170 -0
- data/logstash-event.gemspec +29 -0
- data/logstash.gemspec +128 -0
- data/patterns/firewalls +60 -0
- data/patterns/grok-patterns +91 -0
- data/patterns/haproxy +37 -0
- data/patterns/java +3 -0
- data/patterns/linux-syslog +14 -0
- data/patterns/mcollective +1 -0
- data/patterns/mcollective-patterns +4 -0
- data/patterns/nagios +108 -0
- data/patterns/postgresql +3 -0
- data/patterns/redis +3 -0
- data/patterns/ruby +2 -0
- data/pkg/build.sh +135 -0
- data/pkg/centos/after-install.sh +1 -0
- data/pkg/centos/before-install.sh +10 -0
- data/pkg/centos/before-remove.sh +11 -0
- data/pkg/centos/sysconfig +15 -0
- data/pkg/debian/after-install.sh +5 -0
- data/pkg/debian/before-install.sh +13 -0
- data/pkg/debian/before-remove.sh +13 -0
- data/pkg/debian/build.sh +34 -0
- data/pkg/debian/debian/README +6 -0
- data/pkg/debian/debian/changelog +17 -0
- data/pkg/debian/debian/compat +1 -0
- data/pkg/debian/debian/control +16 -0
- data/pkg/debian/debian/copyright +27 -0
- data/pkg/debian/debian/dirs +19 -0
- data/pkg/debian/debian/docs +0 -0
- data/pkg/debian/debian/logstash.default +39 -0
- data/pkg/debian/debian/logstash.init +201 -0
- data/pkg/debian/debian/logstash.install +1 -0
- data/pkg/debian/debian/logstash.logrotate +9 -0
- data/pkg/debian/debian/logstash.postinst +68 -0
- data/pkg/debian/debian/logstash.postrm +23 -0
- data/pkg/debian/debian/manpage.1.ex +59 -0
- data/pkg/debian/debian/preinst.ex +37 -0
- data/pkg/debian/debian/prerm.ex +40 -0
- data/pkg/debian/debian/release.conf +5 -0
- data/pkg/debian/debian/rules +80 -0
- data/pkg/debian/debian/watch.ex +22 -0
- data/pkg/logrotate.conf +8 -0
- data/pkg/logstash-web.default +41 -0
- data/pkg/logstash-web.sysv.debian +201 -0
- data/pkg/logstash-web.upstart.ubuntu +18 -0
- data/pkg/logstash.default +45 -0
- data/pkg/logstash.sysv.debian +202 -0
- data/pkg/logstash.sysv.redhat +158 -0
- data/pkg/logstash.upstart.ubuntu +20 -0
- data/pkg/rpm/SOURCES/logstash.conf +26 -0
- data/pkg/rpm/SOURCES/logstash.init +80 -0
- data/pkg/rpm/SOURCES/logstash.logrotate +8 -0
- data/pkg/rpm/SOURCES/logstash.sysconfig +3 -0
- data/pkg/rpm/SOURCES/logstash.wrapper +105 -0
- data/pkg/rpm/SPECS/logstash.spec +180 -0
- data/pkg/rpm/readme.md +4 -0
- data/pkg/ubuntu/after-install.sh +7 -0
- data/pkg/ubuntu/before-install.sh +12 -0
- data/pkg/ubuntu/before-remove.sh +13 -0
- data/pull_release_note.rb +25 -0
- data/require-analyze.rb +22 -0
- data/spec/README.md +14 -0
- data/spec/codecs/edn.rb +40 -0
- data/spec/codecs/edn_lines.rb +53 -0
- data/spec/codecs/graphite.rb +96 -0
- data/spec/codecs/json.rb +57 -0
- data/spec/codecs/json_lines.rb +51 -0
- data/spec/codecs/json_spooler.rb +43 -0
- data/spec/codecs/msgpack.rb +39 -0
- data/spec/codecs/multiline.rb +60 -0
- data/spec/codecs/oldlogstashjson.rb +55 -0
- data/spec/codecs/plain.rb +35 -0
- data/spec/codecs/spool.rb +35 -0
- data/spec/conditionals/test.rb +323 -0
- data/spec/config.rb +31 -0
- data/spec/event.rb +165 -0
- data/spec/examples/fail2ban.rb +28 -0
- data/spec/examples/graphite-input.rb +41 -0
- data/spec/examples/mysql-slow-query.rb +70 -0
- data/spec/examples/parse-apache-logs.rb +66 -0
- data/spec/examples/parse-haproxy-logs.rb +115 -0
- data/spec/examples/syslog.rb +48 -0
- data/spec/filters/alter.rb +96 -0
- data/spec/filters/anonymize.rb +189 -0
- data/spec/filters/checksum.rb +41 -0
- data/spec/filters/clone.rb +67 -0
- data/spec/filters/collate.rb +122 -0
- data/spec/filters/csv.rb +174 -0
- data/spec/filters/date.rb +285 -0
- data/spec/filters/date_performance.rb +31 -0
- data/spec/filters/dns.rb +159 -0
- data/spec/filters/drop.rb +19 -0
- data/spec/filters/elapsed.rb +294 -0
- data/spec/filters/environment.rb +43 -0
- data/spec/filters/geoip.rb +62 -0
- data/spec/filters/grep.rb +342 -0
- data/spec/filters/grok.rb +473 -0
- data/spec/filters/grok/timeout2.rb +56 -0
- data/spec/filters/grok/timeouts.rb +39 -0
- data/spec/filters/i18n.rb +25 -0
- data/spec/filters/json.rb +72 -0
- data/spec/filters/json_encode.rb +37 -0
- data/spec/filters/kv.rb +403 -0
- data/spec/filters/metrics.rb +212 -0
- data/spec/filters/multiline.rb +119 -0
- data/spec/filters/mutate.rb +180 -0
- data/spec/filters/noop.rb +221 -0
- data/spec/filters/prune.rb +441 -0
- data/spec/filters/punct.rb +18 -0
- data/spec/filters/railsparallelrequest.rb +112 -0
- data/spec/filters/range.rb +169 -0
- data/spec/filters/split.rb +58 -0
- data/spec/filters/translate.rb +70 -0
- data/spec/filters/unique.rb +25 -0
- data/spec/filters/useragent.rb +42 -0
- data/spec/filters/xml.rb +157 -0
- data/spec/inputs/file.rb +107 -0
- data/spec/inputs/gelf.rb +52 -0
- data/spec/inputs/generator.rb +30 -0
- data/spec/inputs/imap.rb +60 -0
- data/spec/inputs/redis.rb +63 -0
- data/spec/inputs/relp.rb +70 -0
- data/spec/inputs/tcp.rb +101 -0
- data/spec/jar.rb +21 -0
- data/spec/outputs/csv.rb +266 -0
- data/spec/outputs/elasticsearch.rb +161 -0
- data/spec/outputs/elasticsearch_http.rb +240 -0
- data/spec/outputs/email.rb +173 -0
- data/spec/outputs/file.rb +82 -0
- data/spec/outputs/graphite.rb +236 -0
- data/spec/outputs/redis.rb +127 -0
- data/spec/speed.rb +20 -0
- data/spec/sqlite-test.rb +81 -0
- data/spec/support/LOGSTASH-733.rb +21 -0
- data/spec/support/LOGSTASH-820.rb +25 -0
- data/spec/support/akamai-grok.rb +26 -0
- data/spec/support/date-http.rb +17 -0
- data/spec/support/postwait1.rb +26 -0
- data/spec/support/pull375.rb +21 -0
- data/spec/test_utils.rb +125 -0
- data/spec/util/fieldeval_spec.rb +44 -0
- data/test/jenkins/config.xml.erb +74 -0
- data/test/jenkins/create-jobs.rb +23 -0
- data/test/jenkins/generatorjob.config.xml +66 -0
- data/tools/Gemfile +14 -0
- data/tools/Gemfile.jruby-1.9.lock +322 -0
- data/tools/Gemfile.rbx-2.1.lock +516 -0
- data/tools/Gemfile.ruby-1.9.1.lock +310 -0
- data/tools/Gemfile.ruby-2.0.0.lock +310 -0
- metadata +629 -0
@@ -0,0 +1,109 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/outputs/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
|
5
|
+
# Riemann is a network event stream processing system.
|
6
|
+
#
|
7
|
+
# While Riemann is very similar conceptually to Logstash, it has
|
8
|
+
# much more in terms of being a monitoring system replacement.
|
9
|
+
#
|
10
|
+
# Riemann is used in Logstash much like statsd or other metric-related
|
11
|
+
# outputs
|
12
|
+
#
|
13
|
+
# You can learn about Riemann here:
|
14
|
+
#
|
15
|
+
# * <http://riemann.io/>
|
16
|
+
# You can see the author talk about it here:
|
17
|
+
# * <http://vimeo.com/38377415>
|
18
|
+
#
|
19
|
+
class LogStash::Outputs::Riemann < LogStash::Outputs::Base
|
20
|
+
config_name "riemann"
|
21
|
+
milestone 1
|
22
|
+
|
23
|
+
# The address of the Riemann server.
|
24
|
+
config :host, :validate => :string, :default => "localhost"
|
25
|
+
|
26
|
+
# The port to connect to on your Riemann server.
|
27
|
+
config :port, :validate => :number, :default => 5555
|
28
|
+
|
29
|
+
# The protocol to use
|
30
|
+
# UDP is non-blocking
|
31
|
+
# TCP is blocking
|
32
|
+
#
|
33
|
+
# Logstash's default output behaviour
|
34
|
+
# is to never lose events
|
35
|
+
# As such, we use tcp as default here
|
36
|
+
config :protocol, :validate => ["tcp", "udp"], :default => "tcp"
|
37
|
+
|
38
|
+
# The name of the sender.
|
39
|
+
# This sets the `host` value
|
40
|
+
# in the Riemann event
|
41
|
+
config :sender, :validate => :string, :default => "%{host}"
|
42
|
+
|
43
|
+
# A Hash to set Riemann event fields
|
44
|
+
# (<http://riemann.io/concepts.html>).
|
45
|
+
#
|
46
|
+
# The following event fields are supported:
|
47
|
+
# `description`, `state`, `metric`, `ttl`, `service`
|
48
|
+
#
|
49
|
+
# Tags found on the Logstash event will automatically be added to the
|
50
|
+
# Riemann event.
|
51
|
+
#
|
52
|
+
# Any other field set here will be passed to Riemann as an event attribute.
|
53
|
+
#
|
54
|
+
# Example:
|
55
|
+
#
|
56
|
+
# riemann {
|
57
|
+
# riemann_event => {
|
58
|
+
# "metric" => "%{metric}"
|
59
|
+
# "service" => "%{service}"
|
60
|
+
# }
|
61
|
+
# }
|
62
|
+
#
|
63
|
+
# `metric` and `ttl` values will be coerced to a floating point value.
|
64
|
+
# Values which cannot be coerced will zero (0.0).
|
65
|
+
#
|
66
|
+
# `description`, by default, will be set to the event message
|
67
|
+
# but can be overridden here.
|
68
|
+
config :riemann_event, :validate => :hash
|
69
|
+
|
70
|
+
#
|
71
|
+
# Enable debugging output?
|
72
|
+
config :debug, :validate => :boolean, :default => false
|
73
|
+
|
74
|
+
public
|
75
|
+
def register
|
76
|
+
require 'riemann/client'
|
77
|
+
@client = Riemann::Client.new(:host => @host, :port => @port)
|
78
|
+
end # def register
|
79
|
+
|
80
|
+
public
|
81
|
+
def receive(event)
|
82
|
+
return unless output?(event)
|
83
|
+
|
84
|
+
# Let's build us an event, shall we?
|
85
|
+
r_event = Hash.new
|
86
|
+
r_event[:host] = event.sprintf(@sender)
|
87
|
+
# riemann doesn't handle floats so we reduce the precision here
|
88
|
+
r_event[:time] = event["@timestamp"].to_i
|
89
|
+
r_event[:description] = event["message"]
|
90
|
+
if @riemann_event
|
91
|
+
@riemann_event.each do |key, val|
|
92
|
+
if ["ttl","metric"].include?(key)
|
93
|
+
r_event[key.to_sym] = event.sprintf(val).to_f
|
94
|
+
else
|
95
|
+
r_event[key.to_sym] = event.sprintf(val)
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
99
|
+
r_event[:tags] = event["tags"] if event["tags"].is_a?(Array)
|
100
|
+
@logger.debug("Riemann event: ", :riemann_event => r_event)
|
101
|
+
begin
|
102
|
+
proto_client = @client.instance_variable_get("@#{@protocol}")
|
103
|
+
@logger.debug("Riemann client proto: #{proto_client.to_s}")
|
104
|
+
proto_client << r_event
|
105
|
+
rescue Exception => e
|
106
|
+
@logger.debug("Unhandled exception", :error => e)
|
107
|
+
end
|
108
|
+
end # def receive
|
109
|
+
end # class LogStash::Outputs::Riemann
|
@@ -0,0 +1,356 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/outputs/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
|
5
|
+
# TODO integrate aws_config in the future
|
6
|
+
#require "logstash/plugin_mixins/aws_config"
|
7
|
+
|
8
|
+
# INFORMATION:
|
9
|
+
|
10
|
+
# This plugin was created for store the logstash's events into Amazon Simple Storage Service (Amazon S3).
|
11
|
+
# For use it you needs authentications and an s3 bucket.
|
12
|
+
# Be careful to have the permission to write file on S3's bucket and run logstash with super user for establish connection.
|
13
|
+
|
14
|
+
# S3 plugin allows you to do something complex, let's explain:)
|
15
|
+
|
16
|
+
# S3 outputs create temporary files into "/opt/logstash/S3_temp/". If you want, you can change the path at the start of register method.
|
17
|
+
# This files have a special name, for example:
|
18
|
+
|
19
|
+
# ls.s3.ip-10-228-27-95.2013-04-18T10.00.tag_hello.part0.txt
|
20
|
+
|
21
|
+
# ls.s3 : indicate logstash plugin s3
|
22
|
+
|
23
|
+
# "ip-10-228-27-95" : indicate you ip machine, if you have more logstash and writing on the same bucket for example.
|
24
|
+
# "2013-04-18T10.00" : represents the time whenever you specify time_file.
|
25
|
+
# "tag_hello" : this indicate the event's tag, you can collect events with the same tag.
|
26
|
+
# "part0" : this means if you indicate size_file then it will generate more parts if you file.size > size_file.
|
27
|
+
# When a file is full it will pushed on bucket and will be deleted in temporary directory.
|
28
|
+
# If a file is empty is not pushed, but deleted.
|
29
|
+
|
30
|
+
# This plugin have a system to restore the previous temporary files if something crash.
|
31
|
+
|
32
|
+
##[Note] :
|
33
|
+
|
34
|
+
## If you specify size_file and time_file then it will create file for each tag (if specified), when time_file or
|
35
|
+
## their size > size_file, it will be triggered then they will be pushed on s3's bucket and will delete from local disk.
|
36
|
+
|
37
|
+
## If you don't specify size_file, but time_file then it will create only one file for each tag (if specified).
|
38
|
+
## When time_file it will be triggered then the files will be pushed on s3's bucket and delete from local disk.
|
39
|
+
|
40
|
+
## If you don't specify time_file, but size_file then it will create files for each tag (if specified),
|
41
|
+
## that will be triggered when their size > size_file, then they will be pushed on s3's bucket and will delete from local disk.
|
42
|
+
|
43
|
+
## If you don't specific size_file and time_file you have a curios mode. It will create only one file for each tag (if specified).
|
44
|
+
## Then the file will be rest on temporary directory and don't will be pushed on bucket until we will restart logstash.
|
45
|
+
|
46
|
+
# INFORMATION ABOUT CLASS:
|
47
|
+
|
48
|
+
# I tried to comment the class at best i could do.
|
49
|
+
# I think there are much thing to improve, but if you want some points to develop here a list:
|
50
|
+
|
51
|
+
# TODO Integrate aws_config in the future
|
52
|
+
# TODO Find a method to push them all files when logtstash close the session.
|
53
|
+
# TODO Integrate @field on the path file
|
54
|
+
# TODO Permanent connection or on demand? For now on demand, but isn't a good implementation.
|
55
|
+
# Use a while or a thread to try the connection before break a time_out and signal an error.
|
56
|
+
# TODO If you have bugs report or helpful advice contact me, but remember that this code is much mine as much as yours,
|
57
|
+
# try to work on it if you want :)
|
58
|
+
|
59
|
+
|
60
|
+
# USAGE:
|
61
|
+
|
62
|
+
# This is an example of logstash config:
|
63
|
+
|
64
|
+
# output {
|
65
|
+
# s3{
|
66
|
+
# access_key_id => "crazy_key" (required)
|
67
|
+
# secret_access_key => "monkey_access_key" (required)
|
68
|
+
# endpoint_region => "eu-west-1" (required)
|
69
|
+
# bucket => "boss_please_open_your_bucket" (required)
|
70
|
+
# size_file => 2048 (optional)
|
71
|
+
# time_file => 5 (optional)
|
72
|
+
# format => "plain" (optional)
|
73
|
+
# canned_acl => "private" (optional. Options are "private", "public_read", "public_read_write", "authenticated_read". Defaults to "private" )
|
74
|
+
# }
|
75
|
+
# }
|
76
|
+
|
77
|
+
# We analize this:
|
78
|
+
|
79
|
+
# access_key_id => "crazy_key"
|
80
|
+
# Amazon will give you the key for use their service if you buy it or try it. (not very much open source anyway)
|
81
|
+
|
82
|
+
# secret_access_key => "monkey_access_key"
|
83
|
+
# Amazon will give you the secret_access_key for use their service if you buy it or try it . (not very much open source anyway).
|
84
|
+
|
85
|
+
# endpoint_region => "eu-west-1"
|
86
|
+
# When you make a contract with Amazon, you should know where the services you use.
|
87
|
+
|
88
|
+
# bucket => "boss_please_open_your_bucket"
|
89
|
+
# Be careful you have the permission to write on bucket and know the name.
|
90
|
+
|
91
|
+
# size_file => 2048
|
92
|
+
# Means the size, in KB, of files who can store on temporary directory before you will be pushed on bucket.
|
93
|
+
# Is useful if you have a little server with poor space on disk and you don't want blow up the server with unnecessary temporary log files.
|
94
|
+
|
95
|
+
# time_file => 5
|
96
|
+
# Means, in minutes, the time before the files will be pushed on bucket. Is useful if you want to push the files every specific time.
|
97
|
+
|
98
|
+
# format => "plain"
|
99
|
+
# Means the format of events you want to store in the files
|
100
|
+
|
101
|
+
# canned_acl => "private"
|
102
|
+
# The S3 canned ACL to use when putting the file. Defaults to "private".
|
103
|
+
|
104
|
+
# LET'S ROCK AND ROLL ON THE CODE!
|
105
|
+
|
106
|
+
class LogStash::Outputs::S3 < LogStash::Outputs::Base
|
107
|
+
#TODO integrate aws_config in the future
|
108
|
+
# include LogStash::PluginMixins::AwsConfig
|
109
|
+
|
110
|
+
config_name "s3"
|
111
|
+
milestone 1
|
112
|
+
|
113
|
+
# Aws access_key.
|
114
|
+
config :access_key_id, :validate => :string
|
115
|
+
|
116
|
+
# Aws secret_access_key
|
117
|
+
config :secret_access_key, :validate => :string
|
118
|
+
|
119
|
+
# S3 bucket
|
120
|
+
config :bucket, :validate => :string
|
121
|
+
|
122
|
+
# Aws endpoint_region
|
123
|
+
config :endpoint_region, :validate => ["us-east-1", "us-west-1", "us-west-2",
|
124
|
+
"eu-west-1", "ap-southeast-1", "ap-southeast-2",
|
125
|
+
"ap-northeast-1", "sa-east-1", "us-gov-west-1"], :default => "us-east-1"
|
126
|
+
|
127
|
+
# Set the size of file in KB, this means that files on bucket when have dimension > file_size, they are stored in two or more file.
|
128
|
+
# If you have tags then it will generate a specific size file for every tags
|
129
|
+
##NOTE: define size of file is the better thing, because generate a local temporary file on disk and then put it in bucket.
|
130
|
+
config :size_file, :validate => :number, :default => 0
|
131
|
+
|
132
|
+
# Set the time, in minutes, to close the current sub_time_section of bucket.
|
133
|
+
# If you define file_size you have a number of files in consideration of the section and the current tag.
|
134
|
+
# 0 stay all time on listerner, beware if you specific 0 and size_file 0, because you will not put the file on bucket,
|
135
|
+
# for now the only thing this plugin can do is to put the file when logstash restart.
|
136
|
+
config :time_file, :validate => :number, :default => 0
|
137
|
+
|
138
|
+
# The event format you want to store in files. Defaults to plain text.
|
139
|
+
config :format, :validate => [ "json", "plain", "nil" ], :default => "plain"
|
140
|
+
|
141
|
+
## IMPORTANT: if you use multiple instance of s3, you should specify on one of them the "restore=> true" and on the others "restore => false".
|
142
|
+
## This is hack for not destroy the new files after restoring the initial files.
|
143
|
+
## If you do not specify "restore => true" when logstash crashes or is restarted, the files are not sent into the bucket,
|
144
|
+
## for example if you have single Instance.
|
145
|
+
config :restore, :validate => :boolean, :default => false
|
146
|
+
|
147
|
+
# Aws canned ACL
|
148
|
+
config :canned_acl, :validate => ["private", "public_read", "public_read_write", "authenticated_read"],
|
149
|
+
:default => "private"
|
150
|
+
|
151
|
+
# Method to set up the aws configuration and establish connection
|
152
|
+
def aws_s3_config
|
153
|
+
|
154
|
+
@endpoint_region == 'us-east-1' ? @endpoint_region = 's3.amazonaws.com' : @endpoint_region = 's3-'+@endpoint_region+'.amazonaws.com'
|
155
|
+
|
156
|
+
@logger.info("Registering s3 output", :bucket => @bucket, :endpoint_region => @endpoint_region)
|
157
|
+
|
158
|
+
AWS.config(
|
159
|
+
:access_key_id => @access_key_id,
|
160
|
+
:secret_access_key => @secret_access_key,
|
161
|
+
:s3_endpoint => @endpoint_region
|
162
|
+
)
|
163
|
+
@s3 = AWS::S3.new
|
164
|
+
|
165
|
+
end
|
166
|
+
|
167
|
+
# This method is used to manage sleep and awaken thread.
|
168
|
+
def time_alert(interval)
|
169
|
+
|
170
|
+
Thread.new do
|
171
|
+
loop do
|
172
|
+
start_time = Time.now
|
173
|
+
yield
|
174
|
+
elapsed = Time.now - start_time
|
175
|
+
sleep([interval - elapsed, 0].max)
|
176
|
+
end
|
177
|
+
end
|
178
|
+
|
179
|
+
end
|
180
|
+
|
181
|
+
# this method is used for write files on bucket. It accept the file and the name of file.
|
182
|
+
def write_on_bucket (file_data, file_basename)
|
183
|
+
|
184
|
+
# if you lose connection with s3, bad control implementation.
|
185
|
+
if ( @s3 == nil)
|
186
|
+
aws_s3_config
|
187
|
+
end
|
188
|
+
|
189
|
+
# find and use the bucket
|
190
|
+
bucket = @s3.buckets[@bucket]
|
191
|
+
|
192
|
+
@logger.debug "S3: ready to write "+file_basename+" in bucket "+@bucket+", Fire in the hole!"
|
193
|
+
|
194
|
+
# prepare for write the file
|
195
|
+
object = bucket.objects[file_basename]
|
196
|
+
object.write(:file => file_data, :acl => @canned_acl)
|
197
|
+
|
198
|
+
@logger.debug "S3: has written "+file_basename+" in bucket "+@bucket + " with canned ACL \"" + @canned_acl + "\""
|
199
|
+
|
200
|
+
end
|
201
|
+
|
202
|
+
# this method is used for create new path for name the file
|
203
|
+
def getFinalPath
|
204
|
+
|
205
|
+
@pass_time = Time.now
|
206
|
+
return @temp_directory+"ls.s3."+Socket.gethostname+"."+(@pass_time).strftime("%Y-%m-%dT%H.%M")
|
207
|
+
|
208
|
+
end
|
209
|
+
|
210
|
+
# This method is used for restore the previous crash of logstash or to prepare the files to send in bucket.
|
211
|
+
# Take two parameter: flag and name. Flag indicate if you want to restore or not, name is the name of file
|
212
|
+
def upFile(flag, name)
|
213
|
+
|
214
|
+
Dir[@temp_directory+name].each do |file|
|
215
|
+
name_file = File.basename(file)
|
216
|
+
|
217
|
+
if (flag == true)
|
218
|
+
@logger.warn "S3: have found temporary file: "+name_file+", something has crashed before... Prepare for upload in bucket!"
|
219
|
+
end
|
220
|
+
|
221
|
+
if (!File.zero?(file))
|
222
|
+
write_on_bucket(file, name_file)
|
223
|
+
|
224
|
+
if (flag == true)
|
225
|
+
@logger.debug "S3: file: "+name_file+" restored on bucket "+@bucket
|
226
|
+
else
|
227
|
+
@logger.debug "S3: file: "+name_file+" was put on bucket "+@bucket
|
228
|
+
end
|
229
|
+
end
|
230
|
+
|
231
|
+
File.delete (file)
|
232
|
+
|
233
|
+
end
|
234
|
+
end
|
235
|
+
|
236
|
+
# This method is used for create new empty temporary files for use. Flag is needed for indicate new subsection time_file.
|
237
|
+
def newFile (flag)
|
238
|
+
|
239
|
+
if (flag == true)
|
240
|
+
@current_final_path = getFinalPath
|
241
|
+
@sizeCounter = 0
|
242
|
+
end
|
243
|
+
|
244
|
+
if (@tags.size != 0)
|
245
|
+
@tempFile = File.new(@current_final_path+".tag_"+@tag_path+"part"+@sizeCounter.to_s+".txt", "w")
|
246
|
+
else
|
247
|
+
@tempFile = File.new(@current_final_path+".part"+@sizeCounter.to_s+".txt", "w")
|
248
|
+
end
|
249
|
+
|
250
|
+
end
|
251
|
+
|
252
|
+
public
|
253
|
+
def register
|
254
|
+
require "aws-sdk"
|
255
|
+
@temp_directory = "/opt/logstash/S3_temp/"
|
256
|
+
|
257
|
+
if (@tags.size != 0)
|
258
|
+
@tag_path = ""
|
259
|
+
for i in (0..@tags.size-1)
|
260
|
+
@tag_path += @tags[i].to_s+"."
|
261
|
+
end
|
262
|
+
end
|
263
|
+
|
264
|
+
if !(File.directory? @temp_directory)
|
265
|
+
@logger.debug "S3: Directory "+@temp_directory+" doesn't exist, let's make it!"
|
266
|
+
Dir.mkdir(@temp_directory)
|
267
|
+
else
|
268
|
+
@logger.debug "S3: Directory "+@temp_directory+" exist, nothing to do"
|
269
|
+
end
|
270
|
+
|
271
|
+
if (@restore == true )
|
272
|
+
@logger.debug "S3: is attempting to verify previous crashes..."
|
273
|
+
|
274
|
+
upFile(true, "*.txt")
|
275
|
+
end
|
276
|
+
|
277
|
+
newFile(true)
|
278
|
+
|
279
|
+
if (time_file != 0)
|
280
|
+
first_time = true
|
281
|
+
@thread = time_alert(@time_file*60) do
|
282
|
+
if (first_time == false)
|
283
|
+
@logger.debug "S3: time_file triggered, let's bucket the file if dosen't empty and create new file "
|
284
|
+
upFile(false, File.basename(@tempFile))
|
285
|
+
newFile(true)
|
286
|
+
else
|
287
|
+
first_time = false
|
288
|
+
end
|
289
|
+
end
|
290
|
+
end
|
291
|
+
|
292
|
+
end
|
293
|
+
|
294
|
+
public
|
295
|
+
def receive(event)
|
296
|
+
return unless output?(event)
|
297
|
+
|
298
|
+
# Prepare format of Events
|
299
|
+
if (@format == "plain")
|
300
|
+
message = self.class.format_message(event)
|
301
|
+
elsif (@format == "json")
|
302
|
+
message = event.to_json
|
303
|
+
else
|
304
|
+
message = event.to_s
|
305
|
+
end
|
306
|
+
|
307
|
+
if(time_file !=0)
|
308
|
+
@logger.debug "S3: trigger files after "+((@pass_time+60*time_file)-Time.now).to_s
|
309
|
+
end
|
310
|
+
|
311
|
+
# if specific the size
|
312
|
+
if(size_file !=0)
|
313
|
+
|
314
|
+
if (@tempFile.size < @size_file )
|
315
|
+
|
316
|
+
@logger.debug "S3: File have size: "+@tempFile.size.to_s+" and size_file is: "+ @size_file.to_s
|
317
|
+
@logger.debug "S3: put event into: "+File.basename(@tempFile)
|
318
|
+
|
319
|
+
# Put the event in the file, now!
|
320
|
+
File.open(@tempFile, 'a') do |file|
|
321
|
+
file.puts message
|
322
|
+
file.write "\n"
|
323
|
+
end
|
324
|
+
|
325
|
+
else
|
326
|
+
|
327
|
+
@logger.debug "S3: file: "+File.basename(@tempFile)+" is too large, let's bucket it and create new file"
|
328
|
+
upFile(false, File.basename(@tempFile))
|
329
|
+
@sizeCounter += 1
|
330
|
+
newFile(false)
|
331
|
+
|
332
|
+
end
|
333
|
+
|
334
|
+
# else we put all in one file
|
335
|
+
else
|
336
|
+
|
337
|
+
@logger.debug "S3: put event into "+File.basename(@tempFile)
|
338
|
+
File.open(@tempFile, 'a') do |file|
|
339
|
+
file.puts message
|
340
|
+
file.write "\n"
|
341
|
+
end
|
342
|
+
end
|
343
|
+
|
344
|
+
end
|
345
|
+
|
346
|
+
def self.format_message(event)
|
347
|
+
message = "Date: #{event["@timestamp"]}\n"
|
348
|
+
message << "Source: #{event["source"]}\n"
|
349
|
+
message << "Tags: #{event["tags"].join(', ')}\n"
|
350
|
+
message << "Fields: #{event.to_hash.inspect}\n"
|
351
|
+
message << "Message: #{event["message"]}"
|
352
|
+
end
|
353
|
+
|
354
|
+
end
|
355
|
+
|
356
|
+
# Enjoy it, by Bistic:)
|