logstash-lib 1.3.2
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +24 -0
- data/.tailor +8 -0
- data/.travis.yml +12 -0
- data/CHANGELOG +1185 -0
- data/CONTRIBUTING.md +61 -0
- data/CONTRIBUTORS +79 -0
- data/LICENSE +14 -0
- data/Makefile +460 -0
- data/README.md +120 -0
- data/STYLE.md +96 -0
- data/bin/logstash +37 -0
- data/bin/logstash-test +4 -0
- data/bin/logstash-web +4 -0
- data/bin/logstash.lib.sh +78 -0
- data/bot/check_pull_changelog.rb +89 -0
- data/docs/configuration.md +260 -0
- data/docs/docgen.rb +242 -0
- data/docs/extending/example-add-a-new-filter.md +121 -0
- data/docs/extending/index.md +91 -0
- data/docs/flags.md +43 -0
- data/docs/generate_index.rb +28 -0
- data/docs/index.html.erb +56 -0
- data/docs/learn.md +46 -0
- data/docs/life-of-an-event.md +109 -0
- data/docs/logging-tool-comparisons.md +60 -0
- data/docs/plugin-doc.html.erb +91 -0
- data/docs/plugin-milestones.md +41 -0
- data/docs/plugin-synopsis.html.erb +24 -0
- data/docs/release-engineering.md +46 -0
- data/docs/release-test-results.md +14 -0
- data/docs/repositories.md +35 -0
- data/docs/tutorials/10-minute-walkthrough/apache-elasticsearch.conf +35 -0
- data/docs/tutorials/10-minute-walkthrough/apache-parse.conf +33 -0
- data/docs/tutorials/10-minute-walkthrough/apache_log.1 +1 -0
- data/docs/tutorials/10-minute-walkthrough/apache_log.2.bz2 +0 -0
- data/docs/tutorials/10-minute-walkthrough/hello-search.conf +25 -0
- data/docs/tutorials/10-minute-walkthrough/hello.conf +16 -0
- data/docs/tutorials/10-minute-walkthrough/index.md +124 -0
- data/docs/tutorials/10-minute-walkthrough/step-5-output.txt +17 -0
- data/docs/tutorials/getting-started-centralized-overview-diagram.png +0 -0
- data/docs/tutorials/getting-started-centralized-overview-diagram.xml +1 -0
- data/docs/tutorials/getting-started-centralized.md +217 -0
- data/docs/tutorials/getting-started-simple.md +200 -0
- data/docs/tutorials/just-enough-rabbitmq-for-logstash.md +201 -0
- data/docs/tutorials/media/frontend-response-codes.png +0 -0
- data/docs/tutorials/metrics-from-logs.md +84 -0
- data/docs/tutorials/zeromq.md +118 -0
- data/extract_services.rb +29 -0
- data/gembag.rb +64 -0
- data/lib/logstash-event.rb +2 -0
- data/lib/logstash.rb +4 -0
- data/lib/logstash/JRUBY-6970-openssl.rb +22 -0
- data/lib/logstash/JRUBY-6970.rb +102 -0
- data/lib/logstash/agent.rb +305 -0
- data/lib/logstash/certs/cacert.pem +3895 -0
- data/lib/logstash/codecs/base.rb +49 -0
- data/lib/logstash/codecs/compress_spooler.rb +50 -0
- data/lib/logstash/codecs/dots.rb +18 -0
- data/lib/logstash/codecs/edn.rb +28 -0
- data/lib/logstash/codecs/edn_lines.rb +36 -0
- data/lib/logstash/codecs/fluent.rb +55 -0
- data/lib/logstash/codecs/graphite.rb +114 -0
- data/lib/logstash/codecs/json.rb +41 -0
- data/lib/logstash/codecs/json_lines.rb +52 -0
- data/lib/logstash/codecs/json_spooler.rb +22 -0
- data/lib/logstash/codecs/line.rb +58 -0
- data/lib/logstash/codecs/msgpack.rb +43 -0
- data/lib/logstash/codecs/multiline.rb +189 -0
- data/lib/logstash/codecs/netflow.rb +342 -0
- data/lib/logstash/codecs/netflow/util.rb +212 -0
- data/lib/logstash/codecs/noop.rb +19 -0
- data/lib/logstash/codecs/oldlogstashjson.rb +56 -0
- data/lib/logstash/codecs/plain.rb +48 -0
- data/lib/logstash/codecs/rubydebug.rb +22 -0
- data/lib/logstash/codecs/spool.rb +38 -0
- data/lib/logstash/config/Makefile +4 -0
- data/lib/logstash/config/config_ast.rb +380 -0
- data/lib/logstash/config/file.rb +39 -0
- data/lib/logstash/config/grammar.rb +3504 -0
- data/lib/logstash/config/grammar.treetop +241 -0
- data/lib/logstash/config/mixin.rb +464 -0
- data/lib/logstash/config/registry.rb +13 -0
- data/lib/logstash/config/test.conf +18 -0
- data/lib/logstash/errors.rb +10 -0
- data/lib/logstash/event.rb +262 -0
- data/lib/logstash/filters/advisor.rb +178 -0
- data/lib/logstash/filters/alter.rb +173 -0
- data/lib/logstash/filters/anonymize.rb +93 -0
- data/lib/logstash/filters/base.rb +190 -0
- data/lib/logstash/filters/checksum.rb +50 -0
- data/lib/logstash/filters/cidr.rb +76 -0
- data/lib/logstash/filters/cipher.rb +145 -0
- data/lib/logstash/filters/clone.rb +35 -0
- data/lib/logstash/filters/collate.rb +114 -0
- data/lib/logstash/filters/csv.rb +94 -0
- data/lib/logstash/filters/date.rb +244 -0
- data/lib/logstash/filters/dns.rb +201 -0
- data/lib/logstash/filters/drop.rb +32 -0
- data/lib/logstash/filters/elapsed.rb +256 -0
- data/lib/logstash/filters/elasticsearch.rb +73 -0
- data/lib/logstash/filters/environment.rb +27 -0
- data/lib/logstash/filters/extractnumbers.rb +84 -0
- data/lib/logstash/filters/gelfify.rb +52 -0
- data/lib/logstash/filters/geoip.rb +145 -0
- data/lib/logstash/filters/grep.rb +153 -0
- data/lib/logstash/filters/grok.rb +425 -0
- data/lib/logstash/filters/grokdiscovery.rb +75 -0
- data/lib/logstash/filters/i18n.rb +51 -0
- data/lib/logstash/filters/json.rb +90 -0
- data/lib/logstash/filters/json_encode.rb +52 -0
- data/lib/logstash/filters/kv.rb +232 -0
- data/lib/logstash/filters/metaevent.rb +68 -0
- data/lib/logstash/filters/metrics.rb +237 -0
- data/lib/logstash/filters/multiline.rb +241 -0
- data/lib/logstash/filters/mutate.rb +399 -0
- data/lib/logstash/filters/noop.rb +21 -0
- data/lib/logstash/filters/prune.rb +149 -0
- data/lib/logstash/filters/punct.rb +32 -0
- data/lib/logstash/filters/railsparallelrequest.rb +86 -0
- data/lib/logstash/filters/range.rb +142 -0
- data/lib/logstash/filters/ruby.rb +42 -0
- data/lib/logstash/filters/sleep.rb +111 -0
- data/lib/logstash/filters/split.rb +64 -0
- data/lib/logstash/filters/sumnumbers.rb +73 -0
- data/lib/logstash/filters/syslog_pri.rb +107 -0
- data/lib/logstash/filters/translate.rb +121 -0
- data/lib/logstash/filters/unique.rb +29 -0
- data/lib/logstash/filters/urldecode.rb +57 -0
- data/lib/logstash/filters/useragent.rb +112 -0
- data/lib/logstash/filters/uuid.rb +58 -0
- data/lib/logstash/filters/xml.rb +139 -0
- data/lib/logstash/filters/zeromq.rb +123 -0
- data/lib/logstash/filterworker.rb +122 -0
- data/lib/logstash/inputs/base.rb +125 -0
- data/lib/logstash/inputs/collectd.rb +306 -0
- data/lib/logstash/inputs/drupal_dblog.rb +323 -0
- data/lib/logstash/inputs/drupal_dblog/jdbcconnection.rb +66 -0
- data/lib/logstash/inputs/elasticsearch.rb +140 -0
- data/lib/logstash/inputs/eventlog.rb +129 -0
- data/lib/logstash/inputs/eventlog/racob_fix.rb +44 -0
- data/lib/logstash/inputs/exec.rb +69 -0
- data/lib/logstash/inputs/file.rb +146 -0
- data/lib/logstash/inputs/ganglia.rb +127 -0
- data/lib/logstash/inputs/ganglia/gmondpacket.rb +146 -0
- data/lib/logstash/inputs/ganglia/xdr.rb +327 -0
- data/lib/logstash/inputs/gelf.rb +138 -0
- data/lib/logstash/inputs/gemfire.rb +222 -0
- data/lib/logstash/inputs/generator.rb +97 -0
- data/lib/logstash/inputs/graphite.rb +41 -0
- data/lib/logstash/inputs/heroku.rb +51 -0
- data/lib/logstash/inputs/imap.rb +136 -0
- data/lib/logstash/inputs/irc.rb +84 -0
- data/lib/logstash/inputs/log4j.rb +136 -0
- data/lib/logstash/inputs/lumberjack.rb +53 -0
- data/lib/logstash/inputs/pipe.rb +57 -0
- data/lib/logstash/inputs/rabbitmq.rb +126 -0
- data/lib/logstash/inputs/rabbitmq/bunny.rb +118 -0
- data/lib/logstash/inputs/rabbitmq/hot_bunnies.rb +1 -0
- data/lib/logstash/inputs/rabbitmq/march_hare.rb +129 -0
- data/lib/logstash/inputs/redis.rb +263 -0
- data/lib/logstash/inputs/relp.rb +106 -0
- data/lib/logstash/inputs/s3.rb +279 -0
- data/lib/logstash/inputs/snmptrap.rb +87 -0
- data/lib/logstash/inputs/sqlite.rb +185 -0
- data/lib/logstash/inputs/sqs.rb +172 -0
- data/lib/logstash/inputs/stdin.rb +46 -0
- data/lib/logstash/inputs/stomp.rb +84 -0
- data/lib/logstash/inputs/syslog.rb +237 -0
- data/lib/logstash/inputs/tcp.rb +231 -0
- data/lib/logstash/inputs/threadable.rb +18 -0
- data/lib/logstash/inputs/twitter.rb +82 -0
- data/lib/logstash/inputs/udp.rb +81 -0
- data/lib/logstash/inputs/unix.rb +163 -0
- data/lib/logstash/inputs/varnishlog.rb +48 -0
- data/lib/logstash/inputs/websocket.rb +50 -0
- data/lib/logstash/inputs/wmi.rb +72 -0
- data/lib/logstash/inputs/xmpp.rb +81 -0
- data/lib/logstash/inputs/zenoss.rb +143 -0
- data/lib/logstash/inputs/zeromq.rb +165 -0
- data/lib/logstash/kibana.rb +113 -0
- data/lib/logstash/loadlibs.rb +9 -0
- data/lib/logstash/logging.rb +89 -0
- data/lib/logstash/monkeypatches-for-bugs.rb +2 -0
- data/lib/logstash/monkeypatches-for-debugging.rb +47 -0
- data/lib/logstash/monkeypatches-for-performance.rb +66 -0
- data/lib/logstash/multiqueue.rb +53 -0
- data/lib/logstash/namespace.rb +16 -0
- data/lib/logstash/outputs/base.rb +120 -0
- data/lib/logstash/outputs/boundary.rb +116 -0
- data/lib/logstash/outputs/circonus.rb +78 -0
- data/lib/logstash/outputs/cloudwatch.rb +351 -0
- data/lib/logstash/outputs/csv.rb +55 -0
- data/lib/logstash/outputs/datadog.rb +93 -0
- data/lib/logstash/outputs/datadog_metrics.rb +123 -0
- data/lib/logstash/outputs/elasticsearch.rb +332 -0
- data/lib/logstash/outputs/elasticsearch/elasticsearch-template.json +44 -0
- data/lib/logstash/outputs/elasticsearch_http.rb +256 -0
- data/lib/logstash/outputs/elasticsearch_river.rb +214 -0
- data/lib/logstash/outputs/email.rb +299 -0
- data/lib/logstash/outputs/exec.rb +40 -0
- data/lib/logstash/outputs/file.rb +180 -0
- data/lib/logstash/outputs/ganglia.rb +75 -0
- data/lib/logstash/outputs/gelf.rb +208 -0
- data/lib/logstash/outputs/gemfire.rb +103 -0
- data/lib/logstash/outputs/google_bigquery.rb +570 -0
- data/lib/logstash/outputs/google_cloud_storage.rb +431 -0
- data/lib/logstash/outputs/graphite.rb +143 -0
- data/lib/logstash/outputs/graphtastic.rb +185 -0
- data/lib/logstash/outputs/hipchat.rb +80 -0
- data/lib/logstash/outputs/http.rb +142 -0
- data/lib/logstash/outputs/irc.rb +80 -0
- data/lib/logstash/outputs/jira.rb +109 -0
- data/lib/logstash/outputs/juggernaut.rb +105 -0
- data/lib/logstash/outputs/librato.rb +146 -0
- data/lib/logstash/outputs/loggly.rb +93 -0
- data/lib/logstash/outputs/lumberjack.rb +51 -0
- data/lib/logstash/outputs/metriccatcher.rb +103 -0
- data/lib/logstash/outputs/mongodb.rb +81 -0
- data/lib/logstash/outputs/nagios.rb +119 -0
- data/lib/logstash/outputs/nagios_nsca.rb +123 -0
- data/lib/logstash/outputs/null.rb +18 -0
- data/lib/logstash/outputs/opentsdb.rb +101 -0
- data/lib/logstash/outputs/pagerduty.rb +79 -0
- data/lib/logstash/outputs/pipe.rb +132 -0
- data/lib/logstash/outputs/rabbitmq.rb +96 -0
- data/lib/logstash/outputs/rabbitmq/bunny.rb +135 -0
- data/lib/logstash/outputs/rabbitmq/hot_bunnies.rb +1 -0
- data/lib/logstash/outputs/rabbitmq/march_hare.rb +143 -0
- data/lib/logstash/outputs/redis.rb +245 -0
- data/lib/logstash/outputs/riak.rb +152 -0
- data/lib/logstash/outputs/riemann.rb +109 -0
- data/lib/logstash/outputs/s3.rb +356 -0
- data/lib/logstash/outputs/sns.rb +124 -0
- data/lib/logstash/outputs/solr_http.rb +78 -0
- data/lib/logstash/outputs/sqs.rb +141 -0
- data/lib/logstash/outputs/statsd.rb +116 -0
- data/lib/logstash/outputs/stdout.rb +53 -0
- data/lib/logstash/outputs/stomp.rb +67 -0
- data/lib/logstash/outputs/syslog.rb +145 -0
- data/lib/logstash/outputs/tcp.rb +145 -0
- data/lib/logstash/outputs/udp.rb +38 -0
- data/lib/logstash/outputs/websocket.rb +46 -0
- data/lib/logstash/outputs/websocket/app.rb +29 -0
- data/lib/logstash/outputs/websocket/pubsub.rb +45 -0
- data/lib/logstash/outputs/xmpp.rb +78 -0
- data/lib/logstash/outputs/zabbix.rb +108 -0
- data/lib/logstash/outputs/zeromq.rb +125 -0
- data/lib/logstash/pipeline.rb +286 -0
- data/lib/logstash/plugin.rb +150 -0
- data/lib/logstash/plugin_mixins/aws_config.rb +93 -0
- data/lib/logstash/program.rb +15 -0
- data/lib/logstash/runner.rb +238 -0
- data/lib/logstash/sized_queue.rb +8 -0
- data/lib/logstash/test.rb +183 -0
- data/lib/logstash/threadwatchdog.rb +37 -0
- data/lib/logstash/time_addon.rb +33 -0
- data/lib/logstash/util.rb +106 -0
- data/lib/logstash/util/buftok.rb +139 -0
- data/lib/logstash/util/charset.rb +39 -0
- data/lib/logstash/util/fieldreference.rb +50 -0
- data/lib/logstash/util/password.rb +25 -0
- data/lib/logstash/util/prctl.rb +11 -0
- data/lib/logstash/util/relp.rb +326 -0
- data/lib/logstash/util/require-helper.rb +18 -0
- data/lib/logstash/util/socket_peer.rb +7 -0
- data/lib/logstash/util/zenoss.rb +566 -0
- data/lib/logstash/util/zeromq.rb +47 -0
- data/lib/logstash/version.rb +6 -0
- data/locales/en.yml +170 -0
- data/logstash-event.gemspec +29 -0
- data/logstash.gemspec +128 -0
- data/patterns/firewalls +60 -0
- data/patterns/grok-patterns +91 -0
- data/patterns/haproxy +37 -0
- data/patterns/java +3 -0
- data/patterns/linux-syslog +14 -0
- data/patterns/mcollective +1 -0
- data/patterns/mcollective-patterns +4 -0
- data/patterns/nagios +108 -0
- data/patterns/postgresql +3 -0
- data/patterns/redis +3 -0
- data/patterns/ruby +2 -0
- data/pkg/build.sh +135 -0
- data/pkg/centos/after-install.sh +1 -0
- data/pkg/centos/before-install.sh +10 -0
- data/pkg/centos/before-remove.sh +11 -0
- data/pkg/centos/sysconfig +15 -0
- data/pkg/debian/after-install.sh +5 -0
- data/pkg/debian/before-install.sh +13 -0
- data/pkg/debian/before-remove.sh +13 -0
- data/pkg/debian/build.sh +34 -0
- data/pkg/debian/debian/README +6 -0
- data/pkg/debian/debian/changelog +17 -0
- data/pkg/debian/debian/compat +1 -0
- data/pkg/debian/debian/control +16 -0
- data/pkg/debian/debian/copyright +27 -0
- data/pkg/debian/debian/dirs +19 -0
- data/pkg/debian/debian/docs +0 -0
- data/pkg/debian/debian/logstash.default +39 -0
- data/pkg/debian/debian/logstash.init +201 -0
- data/pkg/debian/debian/logstash.install +1 -0
- data/pkg/debian/debian/logstash.logrotate +9 -0
- data/pkg/debian/debian/logstash.postinst +68 -0
- data/pkg/debian/debian/logstash.postrm +23 -0
- data/pkg/debian/debian/manpage.1.ex +59 -0
- data/pkg/debian/debian/preinst.ex +37 -0
- data/pkg/debian/debian/prerm.ex +40 -0
- data/pkg/debian/debian/release.conf +5 -0
- data/pkg/debian/debian/rules +80 -0
- data/pkg/debian/debian/watch.ex +22 -0
- data/pkg/logrotate.conf +8 -0
- data/pkg/logstash-web.default +41 -0
- data/pkg/logstash-web.sysv.debian +201 -0
- data/pkg/logstash-web.upstart.ubuntu +18 -0
- data/pkg/logstash.default +45 -0
- data/pkg/logstash.sysv.debian +202 -0
- data/pkg/logstash.sysv.redhat +158 -0
- data/pkg/logstash.upstart.ubuntu +20 -0
- data/pkg/rpm/SOURCES/logstash.conf +26 -0
- data/pkg/rpm/SOURCES/logstash.init +80 -0
- data/pkg/rpm/SOURCES/logstash.logrotate +8 -0
- data/pkg/rpm/SOURCES/logstash.sysconfig +3 -0
- data/pkg/rpm/SOURCES/logstash.wrapper +105 -0
- data/pkg/rpm/SPECS/logstash.spec +180 -0
- data/pkg/rpm/readme.md +4 -0
- data/pkg/ubuntu/after-install.sh +7 -0
- data/pkg/ubuntu/before-install.sh +12 -0
- data/pkg/ubuntu/before-remove.sh +13 -0
- data/pull_release_note.rb +25 -0
- data/require-analyze.rb +22 -0
- data/spec/README.md +14 -0
- data/spec/codecs/edn.rb +40 -0
- data/spec/codecs/edn_lines.rb +53 -0
- data/spec/codecs/graphite.rb +96 -0
- data/spec/codecs/json.rb +57 -0
- data/spec/codecs/json_lines.rb +51 -0
- data/spec/codecs/json_spooler.rb +43 -0
- data/spec/codecs/msgpack.rb +39 -0
- data/spec/codecs/multiline.rb +60 -0
- data/spec/codecs/oldlogstashjson.rb +55 -0
- data/spec/codecs/plain.rb +35 -0
- data/spec/codecs/spool.rb +35 -0
- data/spec/conditionals/test.rb +323 -0
- data/spec/config.rb +31 -0
- data/spec/event.rb +165 -0
- data/spec/examples/fail2ban.rb +28 -0
- data/spec/examples/graphite-input.rb +41 -0
- data/spec/examples/mysql-slow-query.rb +70 -0
- data/spec/examples/parse-apache-logs.rb +66 -0
- data/spec/examples/parse-haproxy-logs.rb +115 -0
- data/spec/examples/syslog.rb +48 -0
- data/spec/filters/alter.rb +96 -0
- data/spec/filters/anonymize.rb +189 -0
- data/spec/filters/checksum.rb +41 -0
- data/spec/filters/clone.rb +67 -0
- data/spec/filters/collate.rb +122 -0
- data/spec/filters/csv.rb +174 -0
- data/spec/filters/date.rb +285 -0
- data/spec/filters/date_performance.rb +31 -0
- data/spec/filters/dns.rb +159 -0
- data/spec/filters/drop.rb +19 -0
- data/spec/filters/elapsed.rb +294 -0
- data/spec/filters/environment.rb +43 -0
- data/spec/filters/geoip.rb +62 -0
- data/spec/filters/grep.rb +342 -0
- data/spec/filters/grok.rb +473 -0
- data/spec/filters/grok/timeout2.rb +56 -0
- data/spec/filters/grok/timeouts.rb +39 -0
- data/spec/filters/i18n.rb +25 -0
- data/spec/filters/json.rb +72 -0
- data/spec/filters/json_encode.rb +37 -0
- data/spec/filters/kv.rb +403 -0
- data/spec/filters/metrics.rb +212 -0
- data/spec/filters/multiline.rb +119 -0
- data/spec/filters/mutate.rb +180 -0
- data/spec/filters/noop.rb +221 -0
- data/spec/filters/prune.rb +441 -0
- data/spec/filters/punct.rb +18 -0
- data/spec/filters/railsparallelrequest.rb +112 -0
- data/spec/filters/range.rb +169 -0
- data/spec/filters/split.rb +58 -0
- data/spec/filters/translate.rb +70 -0
- data/spec/filters/unique.rb +25 -0
- data/spec/filters/useragent.rb +42 -0
- data/spec/filters/xml.rb +157 -0
- data/spec/inputs/file.rb +107 -0
- data/spec/inputs/gelf.rb +52 -0
- data/spec/inputs/generator.rb +30 -0
- data/spec/inputs/imap.rb +60 -0
- data/spec/inputs/redis.rb +63 -0
- data/spec/inputs/relp.rb +70 -0
- data/spec/inputs/tcp.rb +101 -0
- data/spec/jar.rb +21 -0
- data/spec/outputs/csv.rb +266 -0
- data/spec/outputs/elasticsearch.rb +161 -0
- data/spec/outputs/elasticsearch_http.rb +240 -0
- data/spec/outputs/email.rb +173 -0
- data/spec/outputs/file.rb +82 -0
- data/spec/outputs/graphite.rb +236 -0
- data/spec/outputs/redis.rb +127 -0
- data/spec/speed.rb +20 -0
- data/spec/sqlite-test.rb +81 -0
- data/spec/support/LOGSTASH-733.rb +21 -0
- data/spec/support/LOGSTASH-820.rb +25 -0
- data/spec/support/akamai-grok.rb +26 -0
- data/spec/support/date-http.rb +17 -0
- data/spec/support/postwait1.rb +26 -0
- data/spec/support/pull375.rb +21 -0
- data/spec/test_utils.rb +125 -0
- data/spec/util/fieldeval_spec.rb +44 -0
- data/test/jenkins/config.xml.erb +74 -0
- data/test/jenkins/create-jobs.rb +23 -0
- data/test/jenkins/generatorjob.config.xml +66 -0
- data/tools/Gemfile +14 -0
- data/tools/Gemfile.jruby-1.9.lock +322 -0
- data/tools/Gemfile.rbx-2.1.lock +516 -0
- data/tools/Gemfile.ruby-1.9.1.lock +310 -0
- data/tools/Gemfile.ruby-2.0.0.lock +310 -0
- metadata +629 -0
@@ -0,0 +1,116 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/outputs/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
|
5
|
+
class LogStash::Outputs::Boundary < LogStash::Outputs::Base
|
6
|
+
# This output lets you send annotations to
|
7
|
+
# Boundary based on Logstash events
|
8
|
+
#
|
9
|
+
# Note that since Logstash maintains no state
|
10
|
+
# these will be one-shot events
|
11
|
+
#
|
12
|
+
# By default the start and stop time will be
|
13
|
+
# the event timestamp
|
14
|
+
#
|
15
|
+
|
16
|
+
config_name "boundary"
|
17
|
+
milestone 1
|
18
|
+
|
19
|
+
# Your Boundary API key
|
20
|
+
config :api_key, :validate => :string, :required => true
|
21
|
+
|
22
|
+
# Your Boundary Org ID
|
23
|
+
config :org_id, :validate => :string, :required => true
|
24
|
+
|
25
|
+
# Start time
|
26
|
+
# Override the start time
|
27
|
+
# Note that Boundary requires this to be seconds since epoch
|
28
|
+
# If overriding, it is your responsibility to type this correctly
|
29
|
+
# By default this is set to `event["@timestamp"].to_i`
|
30
|
+
config :start_time, :validate => :string
|
31
|
+
|
32
|
+
# End time
|
33
|
+
# Override the stop time
|
34
|
+
# Note that Boundary requires this to be seconds since epoch
|
35
|
+
# If overriding, it is your responsibility to type this correctly
|
36
|
+
# By default this is set to `event["@timestamp"].to_i`
|
37
|
+
config :end_time, :validate => :string
|
38
|
+
|
39
|
+
# Type
|
40
|
+
config :btype, :validate => :string
|
41
|
+
|
42
|
+
# Sub-Type
|
43
|
+
config :bsubtype, :validate => :string
|
44
|
+
|
45
|
+
# Tags
|
46
|
+
# Set any custom tags for this event
|
47
|
+
# Default are the Logstash tags if any
|
48
|
+
config :btags, :validate => :array
|
49
|
+
|
50
|
+
# Auto
|
51
|
+
# If set to true, logstash will try to pull boundary fields out
|
52
|
+
# of the event. Any field explicitly set by config options will
|
53
|
+
# override these.
|
54
|
+
# ['type', 'subtype', 'creation_time', 'end_time', 'links', 'tags', 'loc']
|
55
|
+
config :auto, :validate => :boolean, :default => false
|
56
|
+
|
57
|
+
public
|
58
|
+
def register
|
59
|
+
require "net/https"
|
60
|
+
require "uri"
|
61
|
+
@url = "https://api.boundary.com/#{@org_id}/annotations"
|
62
|
+
@uri = URI.parse(@url)
|
63
|
+
@client = Net::HTTP.new(@uri.host, @uri.port)
|
64
|
+
@client.use_ssl = true
|
65
|
+
# Boundary cert doesn't verify
|
66
|
+
@client.verify_mode = OpenSSL::SSL::VERIFY_NONE
|
67
|
+
end # def register
|
68
|
+
|
69
|
+
public
|
70
|
+
def receive(event)
|
71
|
+
return unless output?(event)
|
72
|
+
|
73
|
+
boundary_event = Hash.new
|
74
|
+
boundary_keys = ['type', 'subtype', 'creation_time', 'end_time', 'links', 'tags', 'loc']
|
75
|
+
|
76
|
+
boundary_event['start_time'] = event.sprintf(@start_time) if @start_time
|
77
|
+
boundary_event['end_time'] = event.sprintf(@end_time) if @end_time
|
78
|
+
boundary_event['type'] = event.sprintf(@btype) if @btype
|
79
|
+
boundary_event['subtype'] = event.sprintf(@bsubtype) if @bsubtype
|
80
|
+
boundary_event['tags'] = @btags.collect { |x| event.sprintf(x) } if @btags
|
81
|
+
|
82
|
+
if @auto
|
83
|
+
boundary_fields = event['@fields'].select { |k| boundary_keys.member? k }
|
84
|
+
boundary_event = boundary_fields.merge boundary_event
|
85
|
+
end
|
86
|
+
|
87
|
+
boundary_event = {
|
88
|
+
'type' => event.sprintf("%{message}"),
|
89
|
+
'subtype' => event.sprintf("%{type}"),
|
90
|
+
'start_time' => event["@timestamp"].to_i,
|
91
|
+
'end_time' => event["@timestamp"].to_i,
|
92
|
+
'links' => [],
|
93
|
+
'tags' => event["tags"],
|
94
|
+
}.merge boundary_event
|
95
|
+
|
96
|
+
request = Net::HTTP::Post.new(@uri.path)
|
97
|
+
request.basic_auth(@api_key, '')
|
98
|
+
|
99
|
+
@logger.debug("Boundary event", :boundary_event => boundary_event)
|
100
|
+
|
101
|
+
begin
|
102
|
+
request.body = boundary_event.to_json
|
103
|
+
request.add_field("Content-Type", 'application/json')
|
104
|
+
response = @client.request(request)
|
105
|
+
@logger.warn("Boundary convo", :request => request.inspect, :response => response.inspect)
|
106
|
+
raise unless response.code == '201'
|
107
|
+
rescue Exception => e
|
108
|
+
@logger.warn(
|
109
|
+
"Unhandled exception",
|
110
|
+
:request => request.inspect,
|
111
|
+
:response => response.inspect,
|
112
|
+
:exception => e.inspect
|
113
|
+
)
|
114
|
+
end
|
115
|
+
end # def receive
|
116
|
+
end
|
@@ -0,0 +1,78 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/outputs/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
|
5
|
+
class LogStash::Outputs::Circonus < LogStash::Outputs::Base
|
6
|
+
# This output lets you send annotations to
|
7
|
+
# Circonus based on Logstash events
|
8
|
+
#
|
9
|
+
|
10
|
+
config_name "circonus"
|
11
|
+
milestone 1
|
12
|
+
|
13
|
+
# Your Circonus API Token
|
14
|
+
config :api_token, :validate => :string, :required => true
|
15
|
+
|
16
|
+
# Your Circonus App name
|
17
|
+
# This will be passed through `event.sprintf`
|
18
|
+
# so variables are allowed here:
|
19
|
+
#
|
20
|
+
# Example:
|
21
|
+
# `app_name => "%{myappname}"`
|
22
|
+
config :app_name, :validate => :string, :required => true
|
23
|
+
|
24
|
+
# Annotations
|
25
|
+
# Registers an annotation with Circonus
|
26
|
+
# The only required field is `title` and `description`.
|
27
|
+
# `start` and `stop` will be set to `event["@timestamp"]`
|
28
|
+
# You can add any other optional annotation values as well.
|
29
|
+
# All values will be passed through `event.sprintf`
|
30
|
+
#
|
31
|
+
# Example:
|
32
|
+
# ["title":"Logstash event", "description":"Logstash event for %{host}"]
|
33
|
+
# or
|
34
|
+
# ["title":"Logstash event", "description":"Logstash event for %{host}", "parent_id", "1"]
|
35
|
+
config :annotation, :validate => :hash, :required => true, :default => {}
|
36
|
+
|
37
|
+
public
|
38
|
+
def register
|
39
|
+
require "net/https"
|
40
|
+
require "uri"
|
41
|
+
@url = "https://circonus.com/api/json/"
|
42
|
+
@uri = URI.parse(@url)
|
43
|
+
@client = Net::HTTP.new(@uri.host, @uri.port)
|
44
|
+
@client.use_ssl = true
|
45
|
+
@client.verify_mode = OpenSSL::SSL::VERIFY_NONE
|
46
|
+
|
47
|
+
end # def register
|
48
|
+
|
49
|
+
public
|
50
|
+
def receive(event)
|
51
|
+
# TODO (lusis)
|
52
|
+
# batch and flush
|
53
|
+
return unless output?(event)
|
54
|
+
|
55
|
+
annotation_event = Hash[*@annotation.collect{|k,v| [event.sprintf(k),event.sprintf(v)]}.flatten]
|
56
|
+
@logger.warn("Annotation event", :data => annotation_event)
|
57
|
+
|
58
|
+
annotation_array = []
|
59
|
+
annotation_path = "#{@uri.path}annotation"
|
60
|
+
@logger.warn("Annotation path", :data => annotation_path)
|
61
|
+
request = Net::HTTP::Post.new(annotation_path)
|
62
|
+
annotation_event['start'] = event["@timestamp"].to_i unless annotation_event['start']
|
63
|
+
annotation_event['stop'] = event["@timestamp"].to_i unless annotation_event['stop']
|
64
|
+
@logger.warn("Annotation event", :data => annotation_event)
|
65
|
+
annotation_array << annotation_event
|
66
|
+
begin
|
67
|
+
request.set_form_data(:annotations => annotation_array.to_json)
|
68
|
+
@logger.warn(annotation_event)
|
69
|
+
request.add_field("X-Circonus-Auth-Token", "#{@api_token}")
|
70
|
+
request.add_field("X-Circonus-App-Name", "#{event.sprintf(@app_name)}")
|
71
|
+
response = @client.request(request)
|
72
|
+
@logger.warn("Circonus convo", :request => request.inspect, :response => response.inspect)
|
73
|
+
raise unless response.code == '200'
|
74
|
+
rescue Exception => e
|
75
|
+
@logger.warn("Unhandled exception", :request => request.inspect, :response => response.inspect, :exception => e.inspect)
|
76
|
+
end
|
77
|
+
end # def receive
|
78
|
+
end
|
@@ -0,0 +1,351 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/outputs/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
require "logstash/plugin_mixins/aws_config"
|
5
|
+
|
6
|
+
# This output lets you aggregate and send metric data to AWS CloudWatch
|
7
|
+
#
|
8
|
+
# #### Summary:
|
9
|
+
# This plugin is intended to be used on a logstash indexer agent (but that
|
10
|
+
# is not the only way, see below.) In the intended scenario, one cloudwatch
|
11
|
+
# output plugin is configured, on the logstash indexer node, with just AWS API
|
12
|
+
# credentials, and possibly a region and/or a namespace. The output looks
|
13
|
+
# for fields present in events, and when it finds them, it uses them to
|
14
|
+
# calculate aggregate statistics. If the `metricname` option is set in this
|
15
|
+
# output, then any events which pass through it will be aggregated & sent to
|
16
|
+
# CloudWatch, but that is not recommended. The intended use is to NOT set the
|
17
|
+
# metricname option here, and instead to add a `CW_metricname` field (and other
|
18
|
+
# fields) to only the events you want sent to CloudWatch.
|
19
|
+
#
|
20
|
+
# When events pass through this output they are queued for background
|
21
|
+
# aggregation and sending, which happens every minute by default. The
|
22
|
+
# queue has a maximum size, and when it is full aggregated statistics will be
|
23
|
+
# sent to CloudWatch ahead of schedule. Whenever this happens a warning
|
24
|
+
# message is written to logstash's log. If you see this you should increase
|
25
|
+
# the `queue_size` configuration option to avoid the extra API calls. The queue
|
26
|
+
# is emptied every time we send data to CloudWatch.
|
27
|
+
#
|
28
|
+
# Note: when logstash is stopped the queue is destroyed before it can be processed.
|
29
|
+
# This is a known limitation of logstash and will hopefully be addressed in a
|
30
|
+
# future version.
|
31
|
+
#
|
32
|
+
# #### Details:
|
33
|
+
# There are two ways to configure this plugin, and they can be used in
|
34
|
+
# combination: event fields & per-output defaults
|
35
|
+
#
|
36
|
+
# Event Field configuration...
|
37
|
+
# You add fields to your events in inputs & filters and this output reads
|
38
|
+
# those fields to aggregate events. The names of the fields read are
|
39
|
+
# configurable via the `field_*` options.
|
40
|
+
#
|
41
|
+
# Per-output defaults...
|
42
|
+
# You set universal defaults in this output plugin's configuration, and
|
43
|
+
# if an event does not have a field for that option then the default is
|
44
|
+
# used.
|
45
|
+
#
|
46
|
+
# Notice, the event fields take precedence over the per-output defaults.
|
47
|
+
#
|
48
|
+
# At a minimum events must have a "metric name" to be sent to CloudWatch.
|
49
|
+
# This can be achieved either by providing a default here OR by adding a
|
50
|
+
# `CW_metricname` field. By default, if no other configuration is provided
|
51
|
+
# besides a metric name, then events will be counted (Unit: Count, Value: 1)
|
52
|
+
# by their metric name (either a default or from their `CW_metricname` field)
|
53
|
+
#
|
54
|
+
# Other fields which can be added to events to modify the behavior of this
|
55
|
+
# plugin are, `CW_namespace`, `CW_unit`, `CW_value`, and
|
56
|
+
# `CW_dimensions`. All of these field names are configurable in
|
57
|
+
# this output. You can also set per-output defaults for any of them.
|
58
|
+
# See below for details.
|
59
|
+
#
|
60
|
+
# Read more about [AWS CloudWatch](http://aws.amazon.com/cloudwatch/),
|
61
|
+
# and the specific of API endpoint this output uses,
|
62
|
+
# [PutMetricData](http://docs.amazonwebservices.com/AmazonCloudWatch/latest/APIReference/API_PutMetricData.html)
|
63
|
+
class LogStash::Outputs::CloudWatch < LogStash::Outputs::Base
|
64
|
+
include LogStash::PluginMixins::AwsConfig
|
65
|
+
|
66
|
+
config_name "cloudwatch"
|
67
|
+
milestone 1
|
68
|
+
|
69
|
+
# Constants
|
70
|
+
# aggregate_key members
|
71
|
+
DIMENSIONS = "dimensions"
|
72
|
+
TIMESTAMP = "timestamp"
|
73
|
+
METRIC = "metric"
|
74
|
+
COUNT = "count"
|
75
|
+
UNIT = "unit"
|
76
|
+
SUM = "sum"
|
77
|
+
MIN = "min"
|
78
|
+
MAX = "max"
|
79
|
+
# Units
|
80
|
+
COUNT_UNIT = "Count"
|
81
|
+
NONE = "None"
|
82
|
+
|
83
|
+
# How often to send data to CloudWatch
|
84
|
+
# This does not affect the event timestamps, events will always have their
|
85
|
+
# actual timestamp (to-the-minute) sent to CloudWatch.
|
86
|
+
#
|
87
|
+
# We only call the API if there is data to send.
|
88
|
+
#
|
89
|
+
# See the Rufus Scheduler docs for an [explanation of allowed values](https://github.com/jmettraux/rufus-scheduler#the-time-strings-understood-by-rufus-scheduler)
|
90
|
+
config :timeframe, :validate => :string, :default => "1m"
|
91
|
+
|
92
|
+
# How many events to queue before forcing a call to the CloudWatch API ahead of `timeframe` schedule
|
93
|
+
# Set this to the number of events-per-timeframe you will be sending to CloudWatch to avoid extra API calls
|
94
|
+
config :queue_size, :validate => :number, :default => 10000
|
95
|
+
|
96
|
+
# The default namespace to use for events which do not have a `CW_namespace` field
|
97
|
+
config :namespace, :validate => :string, :default => "Logstash"
|
98
|
+
|
99
|
+
# The name of the field used to set a different namespace per event
|
100
|
+
# Note: Only one namespace can be sent to CloudWatch per API call
|
101
|
+
# so setting different namespaces will increase the number of API calls
|
102
|
+
# and those cost money.
|
103
|
+
config :field_namespace, :validate => :string, :default => "CW_namespace"
|
104
|
+
|
105
|
+
# The default metric name to use for events which do not have a `CW_metricname` field.
|
106
|
+
# Beware: If this is provided then all events which pass through this output will be aggregated and
|
107
|
+
# sent to CloudWatch, so use this carefully. Furthermore, when providing this option, you
|
108
|
+
# will probably want to also restrict events from passing through this output using event
|
109
|
+
# type, tag, and field matching
|
110
|
+
config :metricname, :validate => :string
|
111
|
+
|
112
|
+
# The name of the field used to set the metric name on an event
|
113
|
+
# The author of this plugin recommends adding this field to events in inputs &
|
114
|
+
# filters rather than using the per-output default setting so that one output
|
115
|
+
# plugin on your logstash indexer can serve all events (which of course had
|
116
|
+
# fields set on your logstash shippers.)
|
117
|
+
config :field_metricname, :validate => :string, :default => "CW_metricname"
|
118
|
+
|
119
|
+
VALID_UNITS = ["Seconds", "Microseconds", "Milliseconds", "Bytes",
|
120
|
+
"Kilobytes", "Megabytes", "Gigabytes", "Terabytes",
|
121
|
+
"Bits", "Kilobits", "Megabits", "Gigabits", "Terabits",
|
122
|
+
"Percent", COUNT_UNIT, "Bytes/Second", "Kilobytes/Second",
|
123
|
+
"Megabytes/Second", "Gigabytes/Second", "Terabytes/Second",
|
124
|
+
"Bits/Second", "Kilobits/Second", "Megabits/Second",
|
125
|
+
"Gigabits/Second", "Terabits/Second", "Count/Second", NONE]
|
126
|
+
|
127
|
+
# The default unit to use for events which do not have a `CW_unit` field
|
128
|
+
# If you set this option you should probably set the "value" option along with it
|
129
|
+
config :unit, :validate => VALID_UNITS, :default => COUNT_UNIT
|
130
|
+
|
131
|
+
# The name of the field used to set the unit on an event metric
|
132
|
+
config :field_unit, :validate => :string, :default => "CW_unit"
|
133
|
+
|
134
|
+
# The default value to use for events which do not have a `CW_value` field
|
135
|
+
# If provided, this must be a string which can be converted to a float, for example...
|
136
|
+
# "1", "2.34", ".5", and "0.67"
|
137
|
+
# If you set this option you should probably set the `unit` option along with it
|
138
|
+
config :value, :validate => :string, :default => "1"
|
139
|
+
|
140
|
+
# The name of the field used to set the value (float) on an event metric
|
141
|
+
config :field_value, :validate => :string, :default => "CW_value"
|
142
|
+
|
143
|
+
# The default dimensions [ name, value, ... ] to use for events which do not have a `CW_dimensions` field
|
144
|
+
config :dimensions, :validate => :hash
|
145
|
+
|
146
|
+
# The name of the field used to set the dimensions on an event metric
|
147
|
+
# The field named here, if present in an event, must have an array of
|
148
|
+
# one or more key & value pairs, for example...
|
149
|
+
# add_field => [ "CW_dimensions", "Environment", "CW_dimensions", "prod" ]
|
150
|
+
# or, equivalently...
|
151
|
+
# add_field => [ "CW_dimensions", "Environment" ]
|
152
|
+
# add_field => [ "CW_dimensions", "prod" ]
|
153
|
+
config :field_dimensions, :validate => :string, :default => "CW_dimensions"
|
154
|
+
|
155
|
+
public
|
156
|
+
def aws_service_endpoint(region)
|
157
|
+
return {
|
158
|
+
:cloud_watch_endpoint => "monitoring.#{region}.amazonaws.com"
|
159
|
+
}
|
160
|
+
end
|
161
|
+
|
162
|
+
public
|
163
|
+
def register
|
164
|
+
require "thread"
|
165
|
+
require "rufus/scheduler"
|
166
|
+
require "aws"
|
167
|
+
|
168
|
+
@cw = AWS::CloudWatch.new(aws_options_hash)
|
169
|
+
|
170
|
+
@event_queue = SizedQueue.new(@queue_size)
|
171
|
+
@scheduler = Rufus::Scheduler.start_new
|
172
|
+
@job = @scheduler.every @timeframe do
|
173
|
+
@logger.info("Scheduler Activated")
|
174
|
+
publish(aggregate({}))
|
175
|
+
end
|
176
|
+
end # def register
|
177
|
+
|
178
|
+
public
|
179
|
+
def receive(event)
|
180
|
+
return unless output?(event)
|
181
|
+
|
182
|
+
if event == LogStash::SHUTDOWN
|
183
|
+
job.trigger()
|
184
|
+
job.unschedule()
|
185
|
+
@logger.info("CloudWatch aggregator thread shutdown.")
|
186
|
+
finished
|
187
|
+
return
|
188
|
+
end
|
189
|
+
|
190
|
+
return unless (event[@field_metricname] || @metricname)
|
191
|
+
|
192
|
+
if (@event_queue.length >= @event_queue.max)
|
193
|
+
@job.trigger
|
194
|
+
@logger.warn("Posted to AWS CloudWatch ahead of schedule. If you see this often, consider increasing the cloudwatch queue_size option.")
|
195
|
+
end
|
196
|
+
|
197
|
+
@logger.info("Queueing event", :event => event)
|
198
|
+
@event_queue << event
|
199
|
+
end # def receive
|
200
|
+
|
201
|
+
private
|
202
|
+
def publish(aggregates)
|
203
|
+
aggregates.each do |namespace, data|
|
204
|
+
@logger.info("Namespace, data: ", :namespace => namespace, :data => data)
|
205
|
+
metric_data = []
|
206
|
+
data.each do |aggregate_key, stats|
|
207
|
+
new_data = {
|
208
|
+
:metric_name => aggregate_key[METRIC],
|
209
|
+
:timestamp => aggregate_key[TIMESTAMP],
|
210
|
+
:unit => aggregate_key[UNIT],
|
211
|
+
:statistic_values => {
|
212
|
+
:sample_count => stats[COUNT],
|
213
|
+
:sum => stats[SUM],
|
214
|
+
:minimum => stats[MIN],
|
215
|
+
:maximum => stats[MAX],
|
216
|
+
}
|
217
|
+
}
|
218
|
+
dims = aggregate_key[DIMENSIONS]
|
219
|
+
if (dims.is_a?(Array) && dims.length > 0 && (dims.length % 2) == 0)
|
220
|
+
new_data[:dimensions] = Array.new
|
221
|
+
i = 0
|
222
|
+
while (i < dims.length)
|
223
|
+
new_data[:dimensions] << {:name => dims[i], :value => dims[i+1]}
|
224
|
+
i += 2
|
225
|
+
end
|
226
|
+
end
|
227
|
+
metric_data << new_data
|
228
|
+
end # data.each
|
229
|
+
|
230
|
+
begin
|
231
|
+
@cw.put_metric_data(
|
232
|
+
:namespace => namespace,
|
233
|
+
:metric_data => metric_data
|
234
|
+
)
|
235
|
+
@logger.info("Sent data to AWS CloudWatch OK", :namespace => namespace, :metric_data => metric_data)
|
236
|
+
rescue Exception => e
|
237
|
+
@logger.warn("Failed to send to AWS CloudWatch", :exception => e, :namespace => namespace, :metric_data => metric_data)
|
238
|
+
break
|
239
|
+
end
|
240
|
+
end # aggregates.each
|
241
|
+
return aggregates
|
242
|
+
end# def publish
|
243
|
+
|
244
|
+
private
|
245
|
+
def aggregate(aggregates)
|
246
|
+
@logger.info("QUEUE SIZE ", :queuesize => @event_queue.size)
|
247
|
+
while !@event_queue.empty? do
|
248
|
+
begin
|
249
|
+
count(aggregates, @event_queue.pop(true))
|
250
|
+
rescue Exception => e
|
251
|
+
@logger.warn("Exception! Breaking count loop", :exception => e)
|
252
|
+
break
|
253
|
+
end
|
254
|
+
end
|
255
|
+
return aggregates
|
256
|
+
end # def aggregate
|
257
|
+
|
258
|
+
private
|
259
|
+
def count(aggregates, event)
|
260
|
+
# If the event doesn't declare a namespace, use the default
|
261
|
+
fnamespace = field(event, @field_namespace)
|
262
|
+
namespace = (fnamespace ? fnamespace : event.sprintf(@namespace))
|
263
|
+
|
264
|
+
funit = field(event, @field_unit)
|
265
|
+
unit = (funit ? funit : event.sprintf(@unit))
|
266
|
+
|
267
|
+
fvalue = field(event, @field_value)
|
268
|
+
value = (fvalue ? fvalue : event.sprintf(@value))
|
269
|
+
|
270
|
+
# We may get to this point with valid Units but missing value. Send zeros.
|
271
|
+
val = (!value) ? 0.0 : value.to_f
|
272
|
+
|
273
|
+
# Event provides exactly one (but not both) of value or unit
|
274
|
+
if ( (fvalue == nil) ^ (funit == nil) )
|
275
|
+
@logger.warn("Likely config error: event has one of #{@field_value} or #{@field_unit} fields but not both.", :event => event)
|
276
|
+
end
|
277
|
+
|
278
|
+
# If Unit is still not set or is invalid warn about misconfiguration & use NONE
|
279
|
+
if (!VALID_UNITS.include?(unit))
|
280
|
+
unit = NONE
|
281
|
+
@logger.warn("Likely config error: invalid or missing Units (#{unit.to_s}), using '#{NONE}' instead", :event => event)
|
282
|
+
end
|
283
|
+
|
284
|
+
if (!aggregates[namespace])
|
285
|
+
aggregates[namespace] = {}
|
286
|
+
end
|
287
|
+
|
288
|
+
dims = event[@field_dimensions]
|
289
|
+
if (dims) # event provides dimensions
|
290
|
+
# validate the structure
|
291
|
+
if (!dims.is_a?(Array) || dims.length == 0 || (dims.length % 2) != 0)
|
292
|
+
@logger.warn("Likely config error: CloudWatch dimensions field (#{dims.to_s}) found which is not a positive- & even-length array. Ignoring it.", :event => event)
|
293
|
+
dims = nil
|
294
|
+
end
|
295
|
+
# Best case, we get here and exit the conditional because dims...
|
296
|
+
# - is an array
|
297
|
+
# - with positive length
|
298
|
+
# - and an even number of elements
|
299
|
+
elsif (@dimensions.is_a?(Hash)) # event did not provide dimensions, but the output has been configured with a default
|
300
|
+
dims = @dimensions.flatten.map{|d| event.sprintf(d)} # into the kind of array described just above
|
301
|
+
else
|
302
|
+
dims = nil
|
303
|
+
end
|
304
|
+
|
305
|
+
fmetric = field(event, @field_metricname)
|
306
|
+
aggregate_key = {
|
307
|
+
METRIC => (fmetric ? fmetric : event.sprintf(@metricname)),
|
308
|
+
DIMENSIONS => dims,
|
309
|
+
UNIT => unit,
|
310
|
+
TIMESTAMP => event.sprintf("%{+YYYY-MM-dd'T'HH:mm:00Z}")
|
311
|
+
}
|
312
|
+
|
313
|
+
if (!aggregates[namespace][aggregate_key])
|
314
|
+
aggregates[namespace][aggregate_key] = {}
|
315
|
+
end
|
316
|
+
|
317
|
+
if (!aggregates[namespace][aggregate_key][MAX] || val > aggregates[namespace][aggregate_key][MAX])
|
318
|
+
aggregates[namespace][aggregate_key][MAX] = val
|
319
|
+
end
|
320
|
+
|
321
|
+
if (!aggregates[namespace][aggregate_key][MIN] || val < aggregates[namespace][aggregate_key][MIN])
|
322
|
+
aggregates[namespace][aggregate_key][MIN] = val
|
323
|
+
end
|
324
|
+
|
325
|
+
if (!aggregates[namespace][aggregate_key][COUNT])
|
326
|
+
aggregates[namespace][aggregate_key][COUNT] = 1
|
327
|
+
else
|
328
|
+
aggregates[namespace][aggregate_key][COUNT] += 1
|
329
|
+
end
|
330
|
+
|
331
|
+
if (!aggregates[namespace][aggregate_key][SUM])
|
332
|
+
aggregates[namespace][aggregate_key][SUM] = val
|
333
|
+
else
|
334
|
+
aggregates[namespace][aggregate_key][SUM] += val
|
335
|
+
end
|
336
|
+
end # def count
|
337
|
+
|
338
|
+
private
|
339
|
+
def field(event, fieldname)
|
340
|
+
if !event[fieldname]
|
341
|
+
return nil
|
342
|
+
else
|
343
|
+
if event[fieldname].is_a?(Array)
|
344
|
+
return event[fieldname][0]
|
345
|
+
else
|
346
|
+
return event[fieldname]
|
347
|
+
end
|
348
|
+
end
|
349
|
+
end # def field
|
350
|
+
|
351
|
+
end # class LogStash::Outputs::CloudWatch
|