logstash-lib 1.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +24 -0
- data/.tailor +8 -0
- data/.travis.yml +12 -0
- data/CHANGELOG +1185 -0
- data/CONTRIBUTING.md +61 -0
- data/CONTRIBUTORS +79 -0
- data/LICENSE +14 -0
- data/Makefile +460 -0
- data/README.md +120 -0
- data/STYLE.md +96 -0
- data/bin/logstash +37 -0
- data/bin/logstash-test +4 -0
- data/bin/logstash-web +4 -0
- data/bin/logstash.lib.sh +78 -0
- data/bot/check_pull_changelog.rb +89 -0
- data/docs/configuration.md +260 -0
- data/docs/docgen.rb +242 -0
- data/docs/extending/example-add-a-new-filter.md +121 -0
- data/docs/extending/index.md +91 -0
- data/docs/flags.md +43 -0
- data/docs/generate_index.rb +28 -0
- data/docs/index.html.erb +56 -0
- data/docs/learn.md +46 -0
- data/docs/life-of-an-event.md +109 -0
- data/docs/logging-tool-comparisons.md +60 -0
- data/docs/plugin-doc.html.erb +91 -0
- data/docs/plugin-milestones.md +41 -0
- data/docs/plugin-synopsis.html.erb +24 -0
- data/docs/release-engineering.md +46 -0
- data/docs/release-test-results.md +14 -0
- data/docs/repositories.md +35 -0
- data/docs/tutorials/10-minute-walkthrough/apache-elasticsearch.conf +35 -0
- data/docs/tutorials/10-minute-walkthrough/apache-parse.conf +33 -0
- data/docs/tutorials/10-minute-walkthrough/apache_log.1 +1 -0
- data/docs/tutorials/10-minute-walkthrough/apache_log.2.bz2 +0 -0
- data/docs/tutorials/10-minute-walkthrough/hello-search.conf +25 -0
- data/docs/tutorials/10-minute-walkthrough/hello.conf +16 -0
- data/docs/tutorials/10-minute-walkthrough/index.md +124 -0
- data/docs/tutorials/10-minute-walkthrough/step-5-output.txt +17 -0
- data/docs/tutorials/getting-started-centralized-overview-diagram.png +0 -0
- data/docs/tutorials/getting-started-centralized-overview-diagram.xml +1 -0
- data/docs/tutorials/getting-started-centralized.md +217 -0
- data/docs/tutorials/getting-started-simple.md +200 -0
- data/docs/tutorials/just-enough-rabbitmq-for-logstash.md +201 -0
- data/docs/tutorials/media/frontend-response-codes.png +0 -0
- data/docs/tutorials/metrics-from-logs.md +84 -0
- data/docs/tutorials/zeromq.md +118 -0
- data/extract_services.rb +29 -0
- data/gembag.rb +64 -0
- data/lib/logstash-event.rb +2 -0
- data/lib/logstash.rb +4 -0
- data/lib/logstash/JRUBY-6970-openssl.rb +22 -0
- data/lib/logstash/JRUBY-6970.rb +102 -0
- data/lib/logstash/agent.rb +305 -0
- data/lib/logstash/certs/cacert.pem +3895 -0
- data/lib/logstash/codecs/base.rb +49 -0
- data/lib/logstash/codecs/compress_spooler.rb +50 -0
- data/lib/logstash/codecs/dots.rb +18 -0
- data/lib/logstash/codecs/edn.rb +28 -0
- data/lib/logstash/codecs/edn_lines.rb +36 -0
- data/lib/logstash/codecs/fluent.rb +55 -0
- data/lib/logstash/codecs/graphite.rb +114 -0
- data/lib/logstash/codecs/json.rb +41 -0
- data/lib/logstash/codecs/json_lines.rb +52 -0
- data/lib/logstash/codecs/json_spooler.rb +22 -0
- data/lib/logstash/codecs/line.rb +58 -0
- data/lib/logstash/codecs/msgpack.rb +43 -0
- data/lib/logstash/codecs/multiline.rb +189 -0
- data/lib/logstash/codecs/netflow.rb +342 -0
- data/lib/logstash/codecs/netflow/util.rb +212 -0
- data/lib/logstash/codecs/noop.rb +19 -0
- data/lib/logstash/codecs/oldlogstashjson.rb +56 -0
- data/lib/logstash/codecs/plain.rb +48 -0
- data/lib/logstash/codecs/rubydebug.rb +22 -0
- data/lib/logstash/codecs/spool.rb +38 -0
- data/lib/logstash/config/Makefile +4 -0
- data/lib/logstash/config/config_ast.rb +380 -0
- data/lib/logstash/config/file.rb +39 -0
- data/lib/logstash/config/grammar.rb +3504 -0
- data/lib/logstash/config/grammar.treetop +241 -0
- data/lib/logstash/config/mixin.rb +464 -0
- data/lib/logstash/config/registry.rb +13 -0
- data/lib/logstash/config/test.conf +18 -0
- data/lib/logstash/errors.rb +10 -0
- data/lib/logstash/event.rb +262 -0
- data/lib/logstash/filters/advisor.rb +178 -0
- data/lib/logstash/filters/alter.rb +173 -0
- data/lib/logstash/filters/anonymize.rb +93 -0
- data/lib/logstash/filters/base.rb +190 -0
- data/lib/logstash/filters/checksum.rb +50 -0
- data/lib/logstash/filters/cidr.rb +76 -0
- data/lib/logstash/filters/cipher.rb +145 -0
- data/lib/logstash/filters/clone.rb +35 -0
- data/lib/logstash/filters/collate.rb +114 -0
- data/lib/logstash/filters/csv.rb +94 -0
- data/lib/logstash/filters/date.rb +244 -0
- data/lib/logstash/filters/dns.rb +201 -0
- data/lib/logstash/filters/drop.rb +32 -0
- data/lib/logstash/filters/elapsed.rb +256 -0
- data/lib/logstash/filters/elasticsearch.rb +73 -0
- data/lib/logstash/filters/environment.rb +27 -0
- data/lib/logstash/filters/extractnumbers.rb +84 -0
- data/lib/logstash/filters/gelfify.rb +52 -0
- data/lib/logstash/filters/geoip.rb +145 -0
- data/lib/logstash/filters/grep.rb +153 -0
- data/lib/logstash/filters/grok.rb +425 -0
- data/lib/logstash/filters/grokdiscovery.rb +75 -0
- data/lib/logstash/filters/i18n.rb +51 -0
- data/lib/logstash/filters/json.rb +90 -0
- data/lib/logstash/filters/json_encode.rb +52 -0
- data/lib/logstash/filters/kv.rb +232 -0
- data/lib/logstash/filters/metaevent.rb +68 -0
- data/lib/logstash/filters/metrics.rb +237 -0
- data/lib/logstash/filters/multiline.rb +241 -0
- data/lib/logstash/filters/mutate.rb +399 -0
- data/lib/logstash/filters/noop.rb +21 -0
- data/lib/logstash/filters/prune.rb +149 -0
- data/lib/logstash/filters/punct.rb +32 -0
- data/lib/logstash/filters/railsparallelrequest.rb +86 -0
- data/lib/logstash/filters/range.rb +142 -0
- data/lib/logstash/filters/ruby.rb +42 -0
- data/lib/logstash/filters/sleep.rb +111 -0
- data/lib/logstash/filters/split.rb +64 -0
- data/lib/logstash/filters/sumnumbers.rb +73 -0
- data/lib/logstash/filters/syslog_pri.rb +107 -0
- data/lib/logstash/filters/translate.rb +121 -0
- data/lib/logstash/filters/unique.rb +29 -0
- data/lib/logstash/filters/urldecode.rb +57 -0
- data/lib/logstash/filters/useragent.rb +112 -0
- data/lib/logstash/filters/uuid.rb +58 -0
- data/lib/logstash/filters/xml.rb +139 -0
- data/lib/logstash/filters/zeromq.rb +123 -0
- data/lib/logstash/filterworker.rb +122 -0
- data/lib/logstash/inputs/base.rb +125 -0
- data/lib/logstash/inputs/collectd.rb +306 -0
- data/lib/logstash/inputs/drupal_dblog.rb +323 -0
- data/lib/logstash/inputs/drupal_dblog/jdbcconnection.rb +66 -0
- data/lib/logstash/inputs/elasticsearch.rb +140 -0
- data/lib/logstash/inputs/eventlog.rb +129 -0
- data/lib/logstash/inputs/eventlog/racob_fix.rb +44 -0
- data/lib/logstash/inputs/exec.rb +69 -0
- data/lib/logstash/inputs/file.rb +146 -0
- data/lib/logstash/inputs/ganglia.rb +127 -0
- data/lib/logstash/inputs/ganglia/gmondpacket.rb +146 -0
- data/lib/logstash/inputs/ganglia/xdr.rb +327 -0
- data/lib/logstash/inputs/gelf.rb +138 -0
- data/lib/logstash/inputs/gemfire.rb +222 -0
- data/lib/logstash/inputs/generator.rb +97 -0
- data/lib/logstash/inputs/graphite.rb +41 -0
- data/lib/logstash/inputs/heroku.rb +51 -0
- data/lib/logstash/inputs/imap.rb +136 -0
- data/lib/logstash/inputs/irc.rb +84 -0
- data/lib/logstash/inputs/log4j.rb +136 -0
- data/lib/logstash/inputs/lumberjack.rb +53 -0
- data/lib/logstash/inputs/pipe.rb +57 -0
- data/lib/logstash/inputs/rabbitmq.rb +126 -0
- data/lib/logstash/inputs/rabbitmq/bunny.rb +118 -0
- data/lib/logstash/inputs/rabbitmq/hot_bunnies.rb +1 -0
- data/lib/logstash/inputs/rabbitmq/march_hare.rb +129 -0
- data/lib/logstash/inputs/redis.rb +263 -0
- data/lib/logstash/inputs/relp.rb +106 -0
- data/lib/logstash/inputs/s3.rb +279 -0
- data/lib/logstash/inputs/snmptrap.rb +87 -0
- data/lib/logstash/inputs/sqlite.rb +185 -0
- data/lib/logstash/inputs/sqs.rb +172 -0
- data/lib/logstash/inputs/stdin.rb +46 -0
- data/lib/logstash/inputs/stomp.rb +84 -0
- data/lib/logstash/inputs/syslog.rb +237 -0
- data/lib/logstash/inputs/tcp.rb +231 -0
- data/lib/logstash/inputs/threadable.rb +18 -0
- data/lib/logstash/inputs/twitter.rb +82 -0
- data/lib/logstash/inputs/udp.rb +81 -0
- data/lib/logstash/inputs/unix.rb +163 -0
- data/lib/logstash/inputs/varnishlog.rb +48 -0
- data/lib/logstash/inputs/websocket.rb +50 -0
- data/lib/logstash/inputs/wmi.rb +72 -0
- data/lib/logstash/inputs/xmpp.rb +81 -0
- data/lib/logstash/inputs/zenoss.rb +143 -0
- data/lib/logstash/inputs/zeromq.rb +165 -0
- data/lib/logstash/kibana.rb +113 -0
- data/lib/logstash/loadlibs.rb +9 -0
- data/lib/logstash/logging.rb +89 -0
- data/lib/logstash/monkeypatches-for-bugs.rb +2 -0
- data/lib/logstash/monkeypatches-for-debugging.rb +47 -0
- data/lib/logstash/monkeypatches-for-performance.rb +66 -0
- data/lib/logstash/multiqueue.rb +53 -0
- data/lib/logstash/namespace.rb +16 -0
- data/lib/logstash/outputs/base.rb +120 -0
- data/lib/logstash/outputs/boundary.rb +116 -0
- data/lib/logstash/outputs/circonus.rb +78 -0
- data/lib/logstash/outputs/cloudwatch.rb +351 -0
- data/lib/logstash/outputs/csv.rb +55 -0
- data/lib/logstash/outputs/datadog.rb +93 -0
- data/lib/logstash/outputs/datadog_metrics.rb +123 -0
- data/lib/logstash/outputs/elasticsearch.rb +332 -0
- data/lib/logstash/outputs/elasticsearch/elasticsearch-template.json +44 -0
- data/lib/logstash/outputs/elasticsearch_http.rb +256 -0
- data/lib/logstash/outputs/elasticsearch_river.rb +214 -0
- data/lib/logstash/outputs/email.rb +299 -0
- data/lib/logstash/outputs/exec.rb +40 -0
- data/lib/logstash/outputs/file.rb +180 -0
- data/lib/logstash/outputs/ganglia.rb +75 -0
- data/lib/logstash/outputs/gelf.rb +208 -0
- data/lib/logstash/outputs/gemfire.rb +103 -0
- data/lib/logstash/outputs/google_bigquery.rb +570 -0
- data/lib/logstash/outputs/google_cloud_storage.rb +431 -0
- data/lib/logstash/outputs/graphite.rb +143 -0
- data/lib/logstash/outputs/graphtastic.rb +185 -0
- data/lib/logstash/outputs/hipchat.rb +80 -0
- data/lib/logstash/outputs/http.rb +142 -0
- data/lib/logstash/outputs/irc.rb +80 -0
- data/lib/logstash/outputs/jira.rb +109 -0
- data/lib/logstash/outputs/juggernaut.rb +105 -0
- data/lib/logstash/outputs/librato.rb +146 -0
- data/lib/logstash/outputs/loggly.rb +93 -0
- data/lib/logstash/outputs/lumberjack.rb +51 -0
- data/lib/logstash/outputs/metriccatcher.rb +103 -0
- data/lib/logstash/outputs/mongodb.rb +81 -0
- data/lib/logstash/outputs/nagios.rb +119 -0
- data/lib/logstash/outputs/nagios_nsca.rb +123 -0
- data/lib/logstash/outputs/null.rb +18 -0
- data/lib/logstash/outputs/opentsdb.rb +101 -0
- data/lib/logstash/outputs/pagerduty.rb +79 -0
- data/lib/logstash/outputs/pipe.rb +132 -0
- data/lib/logstash/outputs/rabbitmq.rb +96 -0
- data/lib/logstash/outputs/rabbitmq/bunny.rb +135 -0
- data/lib/logstash/outputs/rabbitmq/hot_bunnies.rb +1 -0
- data/lib/logstash/outputs/rabbitmq/march_hare.rb +143 -0
- data/lib/logstash/outputs/redis.rb +245 -0
- data/lib/logstash/outputs/riak.rb +152 -0
- data/lib/logstash/outputs/riemann.rb +109 -0
- data/lib/logstash/outputs/s3.rb +356 -0
- data/lib/logstash/outputs/sns.rb +124 -0
- data/lib/logstash/outputs/solr_http.rb +78 -0
- data/lib/logstash/outputs/sqs.rb +141 -0
- data/lib/logstash/outputs/statsd.rb +116 -0
- data/lib/logstash/outputs/stdout.rb +53 -0
- data/lib/logstash/outputs/stomp.rb +67 -0
- data/lib/logstash/outputs/syslog.rb +145 -0
- data/lib/logstash/outputs/tcp.rb +145 -0
- data/lib/logstash/outputs/udp.rb +38 -0
- data/lib/logstash/outputs/websocket.rb +46 -0
- data/lib/logstash/outputs/websocket/app.rb +29 -0
- data/lib/logstash/outputs/websocket/pubsub.rb +45 -0
- data/lib/logstash/outputs/xmpp.rb +78 -0
- data/lib/logstash/outputs/zabbix.rb +108 -0
- data/lib/logstash/outputs/zeromq.rb +125 -0
- data/lib/logstash/pipeline.rb +286 -0
- data/lib/logstash/plugin.rb +150 -0
- data/lib/logstash/plugin_mixins/aws_config.rb +93 -0
- data/lib/logstash/program.rb +15 -0
- data/lib/logstash/runner.rb +238 -0
- data/lib/logstash/sized_queue.rb +8 -0
- data/lib/logstash/test.rb +183 -0
- data/lib/logstash/threadwatchdog.rb +37 -0
- data/lib/logstash/time_addon.rb +33 -0
- data/lib/logstash/util.rb +106 -0
- data/lib/logstash/util/buftok.rb +139 -0
- data/lib/logstash/util/charset.rb +39 -0
- data/lib/logstash/util/fieldreference.rb +50 -0
- data/lib/logstash/util/password.rb +25 -0
- data/lib/logstash/util/prctl.rb +11 -0
- data/lib/logstash/util/relp.rb +326 -0
- data/lib/logstash/util/require-helper.rb +18 -0
- data/lib/logstash/util/socket_peer.rb +7 -0
- data/lib/logstash/util/zenoss.rb +566 -0
- data/lib/logstash/util/zeromq.rb +47 -0
- data/lib/logstash/version.rb +6 -0
- data/locales/en.yml +170 -0
- data/logstash-event.gemspec +29 -0
- data/logstash.gemspec +128 -0
- data/patterns/firewalls +60 -0
- data/patterns/grok-patterns +91 -0
- data/patterns/haproxy +37 -0
- data/patterns/java +3 -0
- data/patterns/linux-syslog +14 -0
- data/patterns/mcollective +1 -0
- data/patterns/mcollective-patterns +4 -0
- data/patterns/nagios +108 -0
- data/patterns/postgresql +3 -0
- data/patterns/redis +3 -0
- data/patterns/ruby +2 -0
- data/pkg/build.sh +135 -0
- data/pkg/centos/after-install.sh +1 -0
- data/pkg/centos/before-install.sh +10 -0
- data/pkg/centos/before-remove.sh +11 -0
- data/pkg/centos/sysconfig +15 -0
- data/pkg/debian/after-install.sh +5 -0
- data/pkg/debian/before-install.sh +13 -0
- data/pkg/debian/before-remove.sh +13 -0
- data/pkg/debian/build.sh +34 -0
- data/pkg/debian/debian/README +6 -0
- data/pkg/debian/debian/changelog +17 -0
- data/pkg/debian/debian/compat +1 -0
- data/pkg/debian/debian/control +16 -0
- data/pkg/debian/debian/copyright +27 -0
- data/pkg/debian/debian/dirs +19 -0
- data/pkg/debian/debian/docs +0 -0
- data/pkg/debian/debian/logstash.default +39 -0
- data/pkg/debian/debian/logstash.init +201 -0
- data/pkg/debian/debian/logstash.install +1 -0
- data/pkg/debian/debian/logstash.logrotate +9 -0
- data/pkg/debian/debian/logstash.postinst +68 -0
- data/pkg/debian/debian/logstash.postrm +23 -0
- data/pkg/debian/debian/manpage.1.ex +59 -0
- data/pkg/debian/debian/preinst.ex +37 -0
- data/pkg/debian/debian/prerm.ex +40 -0
- data/pkg/debian/debian/release.conf +5 -0
- data/pkg/debian/debian/rules +80 -0
- data/pkg/debian/debian/watch.ex +22 -0
- data/pkg/logrotate.conf +8 -0
- data/pkg/logstash-web.default +41 -0
- data/pkg/logstash-web.sysv.debian +201 -0
- data/pkg/logstash-web.upstart.ubuntu +18 -0
- data/pkg/logstash.default +45 -0
- data/pkg/logstash.sysv.debian +202 -0
- data/pkg/logstash.sysv.redhat +158 -0
- data/pkg/logstash.upstart.ubuntu +20 -0
- data/pkg/rpm/SOURCES/logstash.conf +26 -0
- data/pkg/rpm/SOURCES/logstash.init +80 -0
- data/pkg/rpm/SOURCES/logstash.logrotate +8 -0
- data/pkg/rpm/SOURCES/logstash.sysconfig +3 -0
- data/pkg/rpm/SOURCES/logstash.wrapper +105 -0
- data/pkg/rpm/SPECS/logstash.spec +180 -0
- data/pkg/rpm/readme.md +4 -0
- data/pkg/ubuntu/after-install.sh +7 -0
- data/pkg/ubuntu/before-install.sh +12 -0
- data/pkg/ubuntu/before-remove.sh +13 -0
- data/pull_release_note.rb +25 -0
- data/require-analyze.rb +22 -0
- data/spec/README.md +14 -0
- data/spec/codecs/edn.rb +40 -0
- data/spec/codecs/edn_lines.rb +53 -0
- data/spec/codecs/graphite.rb +96 -0
- data/spec/codecs/json.rb +57 -0
- data/spec/codecs/json_lines.rb +51 -0
- data/spec/codecs/json_spooler.rb +43 -0
- data/spec/codecs/msgpack.rb +39 -0
- data/spec/codecs/multiline.rb +60 -0
- data/spec/codecs/oldlogstashjson.rb +55 -0
- data/spec/codecs/plain.rb +35 -0
- data/spec/codecs/spool.rb +35 -0
- data/spec/conditionals/test.rb +323 -0
- data/spec/config.rb +31 -0
- data/spec/event.rb +165 -0
- data/spec/examples/fail2ban.rb +28 -0
- data/spec/examples/graphite-input.rb +41 -0
- data/spec/examples/mysql-slow-query.rb +70 -0
- data/spec/examples/parse-apache-logs.rb +66 -0
- data/spec/examples/parse-haproxy-logs.rb +115 -0
- data/spec/examples/syslog.rb +48 -0
- data/spec/filters/alter.rb +96 -0
- data/spec/filters/anonymize.rb +189 -0
- data/spec/filters/checksum.rb +41 -0
- data/spec/filters/clone.rb +67 -0
- data/spec/filters/collate.rb +122 -0
- data/spec/filters/csv.rb +174 -0
- data/spec/filters/date.rb +285 -0
- data/spec/filters/date_performance.rb +31 -0
- data/spec/filters/dns.rb +159 -0
- data/spec/filters/drop.rb +19 -0
- data/spec/filters/elapsed.rb +294 -0
- data/spec/filters/environment.rb +43 -0
- data/spec/filters/geoip.rb +62 -0
- data/spec/filters/grep.rb +342 -0
- data/spec/filters/grok.rb +473 -0
- data/spec/filters/grok/timeout2.rb +56 -0
- data/spec/filters/grok/timeouts.rb +39 -0
- data/spec/filters/i18n.rb +25 -0
- data/spec/filters/json.rb +72 -0
- data/spec/filters/json_encode.rb +37 -0
- data/spec/filters/kv.rb +403 -0
- data/spec/filters/metrics.rb +212 -0
- data/spec/filters/multiline.rb +119 -0
- data/spec/filters/mutate.rb +180 -0
- data/spec/filters/noop.rb +221 -0
- data/spec/filters/prune.rb +441 -0
- data/spec/filters/punct.rb +18 -0
- data/spec/filters/railsparallelrequest.rb +112 -0
- data/spec/filters/range.rb +169 -0
- data/spec/filters/split.rb +58 -0
- data/spec/filters/translate.rb +70 -0
- data/spec/filters/unique.rb +25 -0
- data/spec/filters/useragent.rb +42 -0
- data/spec/filters/xml.rb +157 -0
- data/spec/inputs/file.rb +107 -0
- data/spec/inputs/gelf.rb +52 -0
- data/spec/inputs/generator.rb +30 -0
- data/spec/inputs/imap.rb +60 -0
- data/spec/inputs/redis.rb +63 -0
- data/spec/inputs/relp.rb +70 -0
- data/spec/inputs/tcp.rb +101 -0
- data/spec/jar.rb +21 -0
- data/spec/outputs/csv.rb +266 -0
- data/spec/outputs/elasticsearch.rb +161 -0
- data/spec/outputs/elasticsearch_http.rb +240 -0
- data/spec/outputs/email.rb +173 -0
- data/spec/outputs/file.rb +82 -0
- data/spec/outputs/graphite.rb +236 -0
- data/spec/outputs/redis.rb +127 -0
- data/spec/speed.rb +20 -0
- data/spec/sqlite-test.rb +81 -0
- data/spec/support/LOGSTASH-733.rb +21 -0
- data/spec/support/LOGSTASH-820.rb +25 -0
- data/spec/support/akamai-grok.rb +26 -0
- data/spec/support/date-http.rb +17 -0
- data/spec/support/postwait1.rb +26 -0
- data/spec/support/pull375.rb +21 -0
- data/spec/test_utils.rb +125 -0
- data/spec/util/fieldeval_spec.rb +44 -0
- data/test/jenkins/config.xml.erb +74 -0
- data/test/jenkins/create-jobs.rb +23 -0
- data/test/jenkins/generatorjob.config.xml +66 -0
- data/tools/Gemfile +14 -0
- data/tools/Gemfile.jruby-1.9.lock +322 -0
- data/tools/Gemfile.rbx-2.1.lock +516 -0
- data/tools/Gemfile.ruby-1.9.1.lock +310 -0
- data/tools/Gemfile.ruby-2.0.0.lock +310 -0
- metadata +629 -0
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
# encoding utf-8
|
|
2
|
+
require "date"
|
|
3
|
+
require "logstash/inputs/base"
|
|
4
|
+
require "logstash/namespace"
|
|
5
|
+
require "socket"
|
|
6
|
+
require "tempfile"
|
|
7
|
+
require "time"
|
|
8
|
+
|
|
9
|
+
# Read events from the connectd binary protocol over the network via udp.
|
|
10
|
+
# See https://collectd.org/wiki/index.php/Binary_protocol
|
|
11
|
+
#
|
|
12
|
+
# Configuration in your Logstash configuration file can be as simple as:
|
|
13
|
+
# input {
|
|
14
|
+
# collectd {}
|
|
15
|
+
# }
|
|
16
|
+
#
|
|
17
|
+
# A sample collectd.conf to send to Logstash might be:
|
|
18
|
+
#
|
|
19
|
+
# Hostname "host.example.com"
|
|
20
|
+
# LoadPlugin interface
|
|
21
|
+
# LoadPlugin load
|
|
22
|
+
# LoadPlugin memory
|
|
23
|
+
# LoadPlugin network
|
|
24
|
+
# <Plugin interface>
|
|
25
|
+
# Interface "eth0"
|
|
26
|
+
# IgnoreSelected false
|
|
27
|
+
# </Plugin>
|
|
28
|
+
# <Plugin network>
|
|
29
|
+
# <Server "10.0.0.1" "25826">
|
|
30
|
+
# </Server>
|
|
31
|
+
# </Plugin>
|
|
32
|
+
#
|
|
33
|
+
# Be sure to replace "10.0.0.1" with the IP of your Logstash instance.
|
|
34
|
+
#
|
|
35
|
+
|
|
36
|
+
#
|
|
37
|
+
class LogStash::Inputs::Collectd < LogStash::Inputs::Base
|
|
38
|
+
config_name "collectd"
|
|
39
|
+
milestone 1
|
|
40
|
+
|
|
41
|
+
# File path(s) to collectd types.db to use.
|
|
42
|
+
# The last matching pattern wins if you have identical pattern names in multiple files.
|
|
43
|
+
# If no types.db is provided the included types.db will be used (currently 5.4.0).
|
|
44
|
+
config :typesdb, :validate => :array
|
|
45
|
+
|
|
46
|
+
# The address to listen on. Defaults to all available addresses.
|
|
47
|
+
config :host, :validate => :string, :default => "0.0.0.0"
|
|
48
|
+
|
|
49
|
+
# The port to listen on. Defaults to the collectd expected port of 25826.
|
|
50
|
+
config :port, :validate => :number, :default => 25826
|
|
51
|
+
|
|
52
|
+
# Prune interval records. Defaults to true.
|
|
53
|
+
config :prune_intervals, :validate => :boolean, :default => true
|
|
54
|
+
|
|
55
|
+
# Buffer size. 1452 is the collectd default for v5+
|
|
56
|
+
config :buffer_size, :validate => :number, :default => 1452
|
|
57
|
+
|
|
58
|
+
public
|
|
59
|
+
def initialize(params)
|
|
60
|
+
super
|
|
61
|
+
BasicSocket.do_not_reverse_lookup = true
|
|
62
|
+
@idbyte = 0
|
|
63
|
+
@length = 0
|
|
64
|
+
@prev_typenum = 0
|
|
65
|
+
@header = []; @body = []
|
|
66
|
+
@timestamp = Time.now().utc
|
|
67
|
+
@collectd = {}
|
|
68
|
+
@types = {}
|
|
69
|
+
end # def initialize
|
|
70
|
+
|
|
71
|
+
public
|
|
72
|
+
def register
|
|
73
|
+
@udp = nil
|
|
74
|
+
if @typesdb.nil?
|
|
75
|
+
if __FILE__ =~ /^file:\/.+!.+/
|
|
76
|
+
begin
|
|
77
|
+
# Running from a jar, assume types.db is at the root.
|
|
78
|
+
jar_path = [__FILE__.split("!").first, "/types.db"].join("!")
|
|
79
|
+
@typesdb = [jar_path]
|
|
80
|
+
rescue => ex
|
|
81
|
+
raise "Failed to cache, due to: #{ex}\n#{ex.backtrace}"
|
|
82
|
+
end
|
|
83
|
+
else
|
|
84
|
+
if File.exists?("types.db")
|
|
85
|
+
@typesdb = ["types.db"]
|
|
86
|
+
elsif File.exists?("vendor/collectd/types.db")
|
|
87
|
+
@typesdb = ["vendor/collectd/types.db"]
|
|
88
|
+
else
|
|
89
|
+
raise "You must specify 'typesdb => ...' in your collectd input"
|
|
90
|
+
end
|
|
91
|
+
end
|
|
92
|
+
end
|
|
93
|
+
@logger.info("Using internal types.db", :typesdb => @typesdb.to_s)
|
|
94
|
+
end # def register
|
|
95
|
+
|
|
96
|
+
public
|
|
97
|
+
def run(output_queue)
|
|
98
|
+
begin
|
|
99
|
+
# get types
|
|
100
|
+
get_types(@typesdb)
|
|
101
|
+
# collectd server
|
|
102
|
+
collectd_listener(output_queue)
|
|
103
|
+
rescue LogStash::ShutdownSignal
|
|
104
|
+
# do nothing, shutdown was requested.
|
|
105
|
+
rescue => e
|
|
106
|
+
@logger.warn("Collectd listener died", :exception => e, :backtrace => e.backtrace)
|
|
107
|
+
sleep(5)
|
|
108
|
+
retry
|
|
109
|
+
end # begin
|
|
110
|
+
end # def run
|
|
111
|
+
|
|
112
|
+
public
|
|
113
|
+
def get_types(paths)
|
|
114
|
+
# Get the typesdb
|
|
115
|
+
paths.each do |path|
|
|
116
|
+
@logger.info("Getting Collectd typesdb info", :typesdb => path.to_s)
|
|
117
|
+
File.open(path, 'r').each_line do |line|
|
|
118
|
+
typename, *line = line.strip.split
|
|
119
|
+
next if typename.nil? || if typename[0,1] != '#' # Don't process commented or blank lines
|
|
120
|
+
v = line.collect { |l| l.strip.split(":")[0] }
|
|
121
|
+
@types[typename] = v
|
|
122
|
+
end
|
|
123
|
+
end
|
|
124
|
+
end
|
|
125
|
+
@logger.debug("Collectd Types", :types => @types.to_s)
|
|
126
|
+
end # def get_types
|
|
127
|
+
|
|
128
|
+
public
|
|
129
|
+
def type_map(id)
|
|
130
|
+
case id
|
|
131
|
+
when 0; return "host"
|
|
132
|
+
when 1,8; return "@timestamp"
|
|
133
|
+
when 2; return "plugin"
|
|
134
|
+
when 3; return "plugin_instance"
|
|
135
|
+
when 4; return "collectd_type"
|
|
136
|
+
when 5; return "type_instance"
|
|
137
|
+
when 6; return "values"
|
|
138
|
+
when 9; return "interval"
|
|
139
|
+
when 100; return "message"
|
|
140
|
+
when 101; return "severity"
|
|
141
|
+
end
|
|
142
|
+
end # def type_map
|
|
143
|
+
|
|
144
|
+
public
|
|
145
|
+
def vt_map(id)
|
|
146
|
+
case id
|
|
147
|
+
when 0; return "COUNTER"
|
|
148
|
+
when 1; return "GAUGE"
|
|
149
|
+
when 2; return "DERIVE"
|
|
150
|
+
when 3; return "ABSOLUTE"
|
|
151
|
+
else; return 'UNKNOWN'
|
|
152
|
+
end
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
public
|
|
156
|
+
def get_values(id, body)
|
|
157
|
+
retval = ''
|
|
158
|
+
case id
|
|
159
|
+
when 0,2,3,4,5,100 #=> String types
|
|
160
|
+
retval = body.pack("C*")
|
|
161
|
+
retval = retval[0..-2]
|
|
162
|
+
when 1 # Time
|
|
163
|
+
# Time here, in bit-shifted format. Parse bytes into UTC.
|
|
164
|
+
byte1, byte2 = body.pack("C*").unpack("NN")
|
|
165
|
+
retval = Time.at(( ((byte1 << 32) + byte2))).utc
|
|
166
|
+
when 7,101 #=> Numeric types
|
|
167
|
+
retval = body.slice!(0..7).pack("C*").unpack("E")[0]
|
|
168
|
+
when 8 # Time, Hi-Res
|
|
169
|
+
# Time here, in bit-shifted format. Parse bytes into UTC.
|
|
170
|
+
byte1, byte2 = body.pack("C*").unpack("NN")
|
|
171
|
+
retval = Time.at(( ((byte1 << 32) + byte2) * (2**-30) )).utc
|
|
172
|
+
when 9 # Interval, Hi-Res
|
|
173
|
+
byte1, byte2 = body.pack("C*").unpack("NN")
|
|
174
|
+
retval = (((byte1 << 32) + byte2) * (2**-30)).to_i
|
|
175
|
+
when 6 # Values
|
|
176
|
+
val_bytes = body.slice!(0..1)
|
|
177
|
+
val_count = val_bytes.pack("C*").unpack("n")
|
|
178
|
+
if body.length % 9 == 0 # Should be 9 fields
|
|
179
|
+
count = 0
|
|
180
|
+
retval = []
|
|
181
|
+
types = body.slice!(0..((body.length/9)-1))
|
|
182
|
+
while body.length > 0
|
|
183
|
+
vtype = vt_map(types[count])
|
|
184
|
+
case types[count]
|
|
185
|
+
when 0, 3; v = body.slice!(0..7).pack("C*").unpack("Q>")[0]
|
|
186
|
+
when 1; v = body.slice!(0..7).pack("C*").unpack("E")[0]
|
|
187
|
+
when 2; v = body.slice!(0..7).pack("C*").unpack("q>")[0]
|
|
188
|
+
else; v = 0
|
|
189
|
+
end
|
|
190
|
+
retval << v
|
|
191
|
+
count += 1
|
|
192
|
+
end
|
|
193
|
+
else
|
|
194
|
+
@logger.error("Incorrect number of data fields for collectd record", :body => body.to_s)
|
|
195
|
+
end
|
|
196
|
+
end
|
|
197
|
+
# Populate some state variables based on their type...
|
|
198
|
+
case id
|
|
199
|
+
when 2
|
|
200
|
+
if @plugin != retval # Zero-out @plugin_instance when @plugin changes
|
|
201
|
+
@plugin_instance = ''
|
|
202
|
+
@collectd.delete('plugin_instance')
|
|
203
|
+
end
|
|
204
|
+
@plugin = retval
|
|
205
|
+
when 0; @cdhost = retval
|
|
206
|
+
when 3; @plugin_instance = retval
|
|
207
|
+
when 4; @cdtype = retval
|
|
208
|
+
when 5; @type_instance = retval
|
|
209
|
+
when 1,8; @timestamp = retval
|
|
210
|
+
end
|
|
211
|
+
return retval
|
|
212
|
+
end # def get_values
|
|
213
|
+
|
|
214
|
+
private
|
|
215
|
+
def generate_event(data, output_queue)
|
|
216
|
+
# Prune these *specific* keys if they exist and are empty.
|
|
217
|
+
# This is better than looping over all keys every time.
|
|
218
|
+
data.delete('type_instance') if data['type_instance'] == ""
|
|
219
|
+
data.delete('plugin_instance') if data['plugin_instance'] == ""
|
|
220
|
+
# As crazy as it sounds, this is where we actually send our events to the queue!
|
|
221
|
+
event = LogStash::Event.new
|
|
222
|
+
data.each {|k, v| event[k] = data[k]}
|
|
223
|
+
decorate(event)
|
|
224
|
+
output_queue << event
|
|
225
|
+
end # def generate_event
|
|
226
|
+
|
|
227
|
+
private
|
|
228
|
+
def collectd_listener(output_queue)
|
|
229
|
+
|
|
230
|
+
@logger.info("Starting Collectd listener", :address => "#{@host}:#{@port}")
|
|
231
|
+
|
|
232
|
+
if @udp && ! @udp.closed?
|
|
233
|
+
@udp.close
|
|
234
|
+
end
|
|
235
|
+
|
|
236
|
+
@udp = UDPSocket.new(Socket::AF_INET)
|
|
237
|
+
@udp.bind(@host, @port)
|
|
238
|
+
|
|
239
|
+
loop do
|
|
240
|
+
payload, client = @udp.recvfrom(@buffer_size)
|
|
241
|
+
payload.each_byte do |byte|
|
|
242
|
+
# According to the documentation for the binary protocol
|
|
243
|
+
# it takes 4 bytes to define the header:
|
|
244
|
+
# The first 2 bytes are the type number,
|
|
245
|
+
# the second 2 bytes are the length of the message.
|
|
246
|
+
# So, until we have looped 4 times (@idbyte is our counter)
|
|
247
|
+
# append the byte to the @header
|
|
248
|
+
if @idbyte < 4
|
|
249
|
+
@header << byte
|
|
250
|
+
# Now that we have looped exactly 4 times...
|
|
251
|
+
elsif @idbyte == 4
|
|
252
|
+
@typenum = (@header[0] << 1) + @header[1] # @typenum gets the first 2 bytes
|
|
253
|
+
@length = (@header[2] << 1) + @header[3] # @length gets the second 2 bytes
|
|
254
|
+
@body << byte # @body begins with the current byte
|
|
255
|
+
# And if we've looped more than 4, up until the length of the message (now defined)
|
|
256
|
+
elsif @idbyte > 4 && @idbyte < @length
|
|
257
|
+
@body << byte # append the current byte to @body
|
|
258
|
+
end
|
|
259
|
+
# So long as we have @length and we've reached it, it's time to parse
|
|
260
|
+
if @length > 0 && @idbyte == @length-1
|
|
261
|
+
field = type_map(@typenum) # Get the field name based on type
|
|
262
|
+
if @typenum < @prev_typenum # We've started over, generate an event
|
|
263
|
+
if @prune_intervals
|
|
264
|
+
generate_event(@collectd, output_queue) unless @prev_typenum == 7 or @prev_typenum == 9
|
|
265
|
+
else
|
|
266
|
+
generate_event(@collectd, output_queue)
|
|
267
|
+
end
|
|
268
|
+
@collectd.clear # Empty @collectd
|
|
269
|
+
@collectd['host'] = @cdhost # Reset these from state
|
|
270
|
+
@collectd['collectd_type'] = @cdtype
|
|
271
|
+
@collectd['plugin'] = @plugin
|
|
272
|
+
@collectd['plugin_instance'] = @plugin_instance
|
|
273
|
+
@collectd['@timestamp'] = @timestamp
|
|
274
|
+
end
|
|
275
|
+
# Here is where we actually fill @collectd
|
|
276
|
+
values = get_values(@typenum, @body)
|
|
277
|
+
if values.kind_of?(Array)
|
|
278
|
+
if values.length > 1 # Only do this iteration on multi-value arrays
|
|
279
|
+
values.each_with_index {|value, x| @collectd[@types[@collectd['collectd_type']][x]] = values[x]}
|
|
280
|
+
else # Otherwise it's a single value
|
|
281
|
+
@collectd['value'] = values[0] # So name it 'value' accordingly
|
|
282
|
+
end
|
|
283
|
+
elsif field != nil # Not an array, make sure it's non-empty
|
|
284
|
+
@collectd[field] = values # Append values to @collectd under key field
|
|
285
|
+
end
|
|
286
|
+
@prev_typenum = @typenum
|
|
287
|
+
# All bytes in the collectd event have now been processed. Reset counters, header & body.
|
|
288
|
+
@idbyte = 0; @length = 0; @header.clear; @body.clear;
|
|
289
|
+
else # Increment the byte positional counter
|
|
290
|
+
@idbyte += 1
|
|
291
|
+
end # End of if @length > 0 && @idbyte == @length-1
|
|
292
|
+
end # End of payload.each_byte do |byte| loop
|
|
293
|
+
end # End of loop do, payload, client = @udp.recvfrom(@buffer_size)
|
|
294
|
+
ensure
|
|
295
|
+
if @udp
|
|
296
|
+
@udp.close_read rescue nil
|
|
297
|
+
@udp.close_write rescue nil
|
|
298
|
+
end
|
|
299
|
+
end # def collectd_listener
|
|
300
|
+
|
|
301
|
+
public
|
|
302
|
+
def teardown
|
|
303
|
+
@udp.close if @udp && !@udp.closed?
|
|
304
|
+
end
|
|
305
|
+
|
|
306
|
+
end # class LogStash::Inputs::Collectd
|
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
require "date"
|
|
3
|
+
require "logstash/inputs/base"
|
|
4
|
+
require "logstash/namespace"
|
|
5
|
+
|
|
6
|
+
# Retrieve watchdog log events from a Drupal installation with DBLog enabled.
|
|
7
|
+
# The events are pulled out directly from the database.
|
|
8
|
+
# The original events are not deleted, and on every consecutive run only new
|
|
9
|
+
# events are pulled.
|
|
10
|
+
#
|
|
11
|
+
# The last watchdog event id that was processed is stored in the Drupal
|
|
12
|
+
# variable table with the name "logstash_last_wid". Delete this variable or
|
|
13
|
+
# set it to 0 if you want to re-import all events.
|
|
14
|
+
#
|
|
15
|
+
# More info on DBLog: http://drupal.org/documentation/modules/dblog
|
|
16
|
+
#
|
|
17
|
+
class LogStash::Inputs::DrupalDblog < LogStash::Inputs::Base
|
|
18
|
+
config_name "drupal_dblog"
|
|
19
|
+
milestone 1
|
|
20
|
+
|
|
21
|
+
default :codec, "plain"
|
|
22
|
+
|
|
23
|
+
# Specify all drupal databases that you whish to import from.
|
|
24
|
+
# This can be as many as you whish.
|
|
25
|
+
# The format is a hash, with a unique site name as the key, and a databse
|
|
26
|
+
# url as the value.
|
|
27
|
+
#
|
|
28
|
+
# Example:
|
|
29
|
+
# [
|
|
30
|
+
# "site1", "mysql://user1:password@host1.com/databasename",
|
|
31
|
+
# "other_site", "mysql://user2:password@otherhost.com/databasename",
|
|
32
|
+
# ...
|
|
33
|
+
# ]
|
|
34
|
+
config :databases, :validate => :hash
|
|
35
|
+
|
|
36
|
+
# By default, the event only contains the current user id as a field.
|
|
37
|
+
# If you whish to add the username as an additional field, set this to true.
|
|
38
|
+
config :add_usernames, :validate => :boolean, :default => false
|
|
39
|
+
|
|
40
|
+
# Time between checks in minutes.
|
|
41
|
+
config :interval, :validate => :number, :default => 10
|
|
42
|
+
|
|
43
|
+
# The amount of log messages that should be fetched with each query.
|
|
44
|
+
# Bulk fetching is done to prevent querying huge data sets when lots of
|
|
45
|
+
# messages are in the database.
|
|
46
|
+
config :bulksize, :validate => :number, :default => 5000
|
|
47
|
+
|
|
48
|
+
# Label this input with a type.
|
|
49
|
+
# Types are used mainly for filter activation.
|
|
50
|
+
#
|
|
51
|
+
#
|
|
52
|
+
# If you create an input with type "foobar", then only filters
|
|
53
|
+
# which also have type "foobar" will act on them.
|
|
54
|
+
#
|
|
55
|
+
# The type is also stored as part of the event itself, so you
|
|
56
|
+
# can also use the type to search for in the web interface.
|
|
57
|
+
config :type, :validate => :string, :default => 'watchdog'
|
|
58
|
+
|
|
59
|
+
public
|
|
60
|
+
def register
|
|
61
|
+
require "php_serialize"
|
|
62
|
+
|
|
63
|
+
if RUBY_PLATFORM == 'java'
|
|
64
|
+
require "logstash/inputs/drupal_dblog/jdbcconnection"
|
|
65
|
+
else
|
|
66
|
+
require "mysql2"
|
|
67
|
+
end
|
|
68
|
+
end # def register
|
|
69
|
+
|
|
70
|
+
public
|
|
71
|
+
def config_init(params)
|
|
72
|
+
super
|
|
73
|
+
|
|
74
|
+
dbs = {}
|
|
75
|
+
valid = true
|
|
76
|
+
|
|
77
|
+
@databases.each do |name, rawUri|
|
|
78
|
+
uri = URI(rawUri)
|
|
79
|
+
|
|
80
|
+
dbs[name] = {
|
|
81
|
+
"site" => name,
|
|
82
|
+
"scheme" => uri.scheme,
|
|
83
|
+
"host" => uri.host,
|
|
84
|
+
"user" => uri.user,
|
|
85
|
+
"password" => uri.password,
|
|
86
|
+
"database" => uri.path.sub('/', ''),
|
|
87
|
+
"port" => uri.port.to_i
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
if not (
|
|
91
|
+
uri.scheme and not uri.scheme.empty?\
|
|
92
|
+
and uri.host and not uri.host.empty?\
|
|
93
|
+
and uri.user and not uri.user.empty?\
|
|
94
|
+
and uri.password\
|
|
95
|
+
and uri.path and not uri.path.sub('/', '').empty?
|
|
96
|
+
)
|
|
97
|
+
@logger.error("Drupal DBLog: Invalid database URI for #{name} : #{rawUri}")
|
|
98
|
+
valid = false
|
|
99
|
+
end
|
|
100
|
+
if not uri.scheme == 'mysql'
|
|
101
|
+
@logger.error("Drupal DBLog: Only mysql databases are supported.")
|
|
102
|
+
valid = false
|
|
103
|
+
end
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
if not valid
|
|
107
|
+
@logger.error("Config validation failed.")
|
|
108
|
+
exit 1
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
@databases = dbs
|
|
112
|
+
end #def config_init
|
|
113
|
+
|
|
114
|
+
public
|
|
115
|
+
def run(output_queue)
|
|
116
|
+
@logger.info("Initializing drupal_dblog")
|
|
117
|
+
|
|
118
|
+
loop do
|
|
119
|
+
@logger.debug("Drupal DBLog: Starting to fetch new watchdog entries")
|
|
120
|
+
start = Time.now.to_i
|
|
121
|
+
|
|
122
|
+
@databases.each do |name, db|
|
|
123
|
+
@logger.debug("Drupal DBLog: Checking database #{name}")
|
|
124
|
+
check_database(output_queue, db)
|
|
125
|
+
@logger.info("Drupal DBLog: Retrieved all new watchdog messages from #{name}")
|
|
126
|
+
end
|
|
127
|
+
|
|
128
|
+
timeTaken = Time.now.to_i - start
|
|
129
|
+
@logger.info("Drupal DBLog: Fetched all new watchdog entries in #{timeTaken} seconds")
|
|
130
|
+
|
|
131
|
+
# If fetching of all databases took less time than the interval,
|
|
132
|
+
# sleep a bit.
|
|
133
|
+
sleepTime = @interval * 60 - timeTaken
|
|
134
|
+
if sleepTime > 0
|
|
135
|
+
@logger.debug("Drupal DBLog: Sleeping for #{sleepTime} seconds")
|
|
136
|
+
sleep(sleepTime)
|
|
137
|
+
end
|
|
138
|
+
end # loop
|
|
139
|
+
end # def run
|
|
140
|
+
|
|
141
|
+
private
|
|
142
|
+
def initialize_client(db)
|
|
143
|
+
if db["scheme"] == 'mysql'
|
|
144
|
+
|
|
145
|
+
if not db["port"] > 0
|
|
146
|
+
db["port"] = 3306
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
if RUBY_PLATFORM == 'java'
|
|
150
|
+
@client = LogStash::DrupalDblogJavaMysqlConnection.new(
|
|
151
|
+
db["host"],
|
|
152
|
+
db["user"],
|
|
153
|
+
db["password"],
|
|
154
|
+
db["database"],
|
|
155
|
+
db["port"]
|
|
156
|
+
)
|
|
157
|
+
else
|
|
158
|
+
@client = Mysql2::Client.new(
|
|
159
|
+
:host => db["host"],
|
|
160
|
+
:port => db["port"],
|
|
161
|
+
:username => db["user"],
|
|
162
|
+
:password => db["password"],
|
|
163
|
+
:database => db["database"]
|
|
164
|
+
)
|
|
165
|
+
end
|
|
166
|
+
end
|
|
167
|
+
end #def get_client
|
|
168
|
+
|
|
169
|
+
private
|
|
170
|
+
def check_database(output_queue, db)
|
|
171
|
+
|
|
172
|
+
begin
|
|
173
|
+
# connect to the MySQL server
|
|
174
|
+
initialize_client(db)
|
|
175
|
+
rescue Exception => e
|
|
176
|
+
@logger.error("Could not connect to database: " + e.message)
|
|
177
|
+
return
|
|
178
|
+
end #begin
|
|
179
|
+
|
|
180
|
+
begin
|
|
181
|
+
@sitename = db["site"]
|
|
182
|
+
|
|
183
|
+
@usermap = @add_usernames ? get_usermap : nil
|
|
184
|
+
|
|
185
|
+
# Retrieve last pulled watchdog entry id
|
|
186
|
+
initialLastWid = get_last_wid
|
|
187
|
+
lastWid = nil
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
if initialLastWid == false
|
|
191
|
+
lastWid = 0
|
|
192
|
+
set_last_wid(0, true)
|
|
193
|
+
else
|
|
194
|
+
lastWid = initialLastWid
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
# Fetch new entries, and create the event
|
|
198
|
+
while true
|
|
199
|
+
results = get_db_rows(lastWid)
|
|
200
|
+
if results.length() < 1
|
|
201
|
+
break
|
|
202
|
+
end
|
|
203
|
+
|
|
204
|
+
@logger.debug("Fetched " + results.length().to_s + " database rows")
|
|
205
|
+
|
|
206
|
+
results.each do |row|
|
|
207
|
+
event = build_event(row)
|
|
208
|
+
if event
|
|
209
|
+
decorate(event)
|
|
210
|
+
output_queue << event
|
|
211
|
+
lastWid = row['wid'].to_s
|
|
212
|
+
end
|
|
213
|
+
end
|
|
214
|
+
|
|
215
|
+
set_last_wid(lastWid, false)
|
|
216
|
+
end
|
|
217
|
+
rescue Exception => e
|
|
218
|
+
@logger.error("Error while fetching messages: ", :error => e.message)
|
|
219
|
+
end # begin
|
|
220
|
+
|
|
221
|
+
# Close connection
|
|
222
|
+
@client.close
|
|
223
|
+
end # def check_database
|
|
224
|
+
|
|
225
|
+
def get_db_rows(lastWid)
|
|
226
|
+
query = 'SELECT * from watchdog WHERE wid > ' + lastWid.to_s + " ORDER BY wid asc LIMIT " + @bulksize.to_s
|
|
227
|
+
return @client.query(query)
|
|
228
|
+
end # def get_db_rows
|
|
229
|
+
|
|
230
|
+
private
|
|
231
|
+
def update_sitename
|
|
232
|
+
if @sitename == ""
|
|
233
|
+
result = @client.query('SELECT value FROM variable WHERE name="site_name"')
|
|
234
|
+
if result.first()
|
|
235
|
+
@sitename = PHP.unserialize(result.first()['value'])
|
|
236
|
+
end
|
|
237
|
+
end
|
|
238
|
+
end # def update_sitename
|
|
239
|
+
|
|
240
|
+
private
|
|
241
|
+
def get_last_wid
|
|
242
|
+
result = @client.query('SELECT value FROM variable WHERE name="logstash_last_wid"')
|
|
243
|
+
lastWid = false
|
|
244
|
+
|
|
245
|
+
if result.count() > 0
|
|
246
|
+
tmp = result.first()["value"].gsub("i:", "").gsub(";", "")
|
|
247
|
+
lastWid = tmp.to_i.to_s == tmp ? tmp : "0"
|
|
248
|
+
end
|
|
249
|
+
|
|
250
|
+
return lastWid
|
|
251
|
+
end # def get_last_wid
|
|
252
|
+
|
|
253
|
+
private
|
|
254
|
+
def set_last_wid(wid, insert)
|
|
255
|
+
wid = PHP.serialize(wid.to_i)
|
|
256
|
+
|
|
257
|
+
# Update last import wid variable
|
|
258
|
+
if insert
|
|
259
|
+
# Does not exist yet, so insert
|
|
260
|
+
@client.query('INSERT INTO variable (name, value) VALUES("logstash_last_wid", "' + wid + '")')
|
|
261
|
+
else
|
|
262
|
+
@client.query('UPDATE variable SET value="' + wid + '" WHERE name="logstash_last_wid"')
|
|
263
|
+
end
|
|
264
|
+
end # def set_last_wid
|
|
265
|
+
|
|
266
|
+
private
|
|
267
|
+
def get_usermap
|
|
268
|
+
map = {}
|
|
269
|
+
|
|
270
|
+
@client.query("SELECT uid, name FROM users").each do |row|
|
|
271
|
+
map[row["uid"]] = row["name"]
|
|
272
|
+
end
|
|
273
|
+
|
|
274
|
+
map[0] = "guest"
|
|
275
|
+
return map
|
|
276
|
+
end # def get_usermap
|
|
277
|
+
|
|
278
|
+
private
|
|
279
|
+
def build_event(row)
|
|
280
|
+
# Convert unix timestamp
|
|
281
|
+
timestamp = Time.at(row["timestamp"]).to_datetime.iso8601
|
|
282
|
+
|
|
283
|
+
msg = row["message"]
|
|
284
|
+
vars = {}
|
|
285
|
+
|
|
286
|
+
# Unserialize the variables, and construct the message
|
|
287
|
+
if row['variables'] != 'N;'
|
|
288
|
+
vars = PHP.unserialize(row["variables"])
|
|
289
|
+
|
|
290
|
+
if vars.is_a?(Hash)
|
|
291
|
+
vars.each_pair do |k, v|
|
|
292
|
+
if msg.scan(k).length() > 0
|
|
293
|
+
msg = msg.gsub(k.to_s, v.to_s)
|
|
294
|
+
else
|
|
295
|
+
# If not inside the message, add var as an additional field
|
|
296
|
+
row["variable_" + k] = v
|
|
297
|
+
end
|
|
298
|
+
end
|
|
299
|
+
end
|
|
300
|
+
end
|
|
301
|
+
|
|
302
|
+
row.delete("message")
|
|
303
|
+
row.delete("variables")
|
|
304
|
+
row.delete("timestamp")
|
|
305
|
+
|
|
306
|
+
row["severity"] = row["severity"].to_i
|
|
307
|
+
|
|
308
|
+
if @add_usernames and @usermap.has_key?(row["uid"])
|
|
309
|
+
row["user"] = @usermap[row["uid"]]
|
|
310
|
+
end
|
|
311
|
+
|
|
312
|
+
entry = {
|
|
313
|
+
"@timestamp" => timestamp,
|
|
314
|
+
"tags" => [],
|
|
315
|
+
"type" => "watchdog",
|
|
316
|
+
"site" => @sitename,
|
|
317
|
+
"message" => msg
|
|
318
|
+
}.merge(row)
|
|
319
|
+
|
|
320
|
+
return LogStash::Event.new(entry)
|
|
321
|
+
end # def build_event
|
|
322
|
+
|
|
323
|
+
end # class LogStash::Inputs::DrupalDblog
|