fluent-plugin-postgresql-csvlog 0.7.3 → 0.8.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f0693e1f78345ee808b7ad9b58c76c6ab97863c6f3e5beb12207bf3bffae5761
4
- data.tar.gz: 0ad321b2d4c797c110c690afd60018eb2cc3d37b6bf2f735ea475285bf0022b5
3
+ metadata.gz: d9f81f14bc67d4554142164c18775964dacf12feaa57ddc5d6a64b67944a9357
4
+ data.tar.gz: deb17cb65f08f9d94fc53720d47cafe67022ae6999c6765023bf6d87e39d0463
5
5
  SHA512:
6
- metadata.gz: 582c67cf06d3a6c94378408e76712c130d91fdb7ae239deb272dbcaf97f7ba509d243dc45e3f525ecdb3da82271b0f94f1f2183a59b7fb09e054c66844fb0c63
7
- data.tar.gz: 88ba2dc6cc7490a08389ca7d471f78f4069b5218e604d11064a83d76e2abc4216110f1012f0840e71b7682d5223fc3298c8cb8f3c29c3e6b9a400881bda24ef0
6
+ metadata.gz: 287762c56603630b1603ba523859e4114d3f25a2d9c39db6661201f0703f44eb4f361b78acb93b6897119c4e7e511725925f6567a3b938d9735d853a42467112
7
+ data.tar.gz: 8676d896c51ae02b943435b4fa0c607e340cc2572fae04f28d989aaa77eec5ec2558b2fc1f4f3bff6c0b97850c689f966a331a2ff5c459c2402a5b9b562b4bf1
data/.gitlab-ci.yml CHANGED
@@ -1,16 +1,32 @@
1
- image: "ruby:2.7"
1
+ image: "ruby:${RUBY_VERSION}"
2
+
3
+ variables:
4
+ RUBY_VERSION: "3.2"
5
+
6
+ stages:
7
+ - test
8
+ - deploy
9
+
10
+ include:
11
+ - component: gitlab.com/gitlab-org/components/gem-release/gem-release@~latest
12
+ inputs:
13
+ smoke_test_script: "ruby -r 'fluent/plugin/filter_postgresql_slowlog'"
14
+ file_pattern_to_trigger_release: "fluent-plugin-postgresql-csvlog.gemspec"
2
15
 
3
16
  test:
17
+ stage: test
4
18
  before_script:
5
19
  - bundle config set path vendor
6
20
  - bundle install --jobs $(nproc)
7
21
  script:
8
22
  - bundle exec rake test
9
23
  cache:
24
+ key: ruby-${RUBY_VERSION}
10
25
  paths:
11
26
  - vendor/ruby
12
27
 
13
28
  .iteration_test:
29
+ stage: test
14
30
  services:
15
31
  - name: postgres:$POSTGRES_SERVER_VERSION
16
32
  alias: postgres
@@ -27,25 +43,22 @@ test:
27
43
  paths:
28
44
  - vendor/ruby
29
45
 
30
- # integration tests for postgres 12
31
- itest_pg12:
46
+ # integration tests for PostgreSQL
47
+ itest:
32
48
  extends: .iteration_test
33
- variables:
34
- POSTGRES_SERVER_VERSION: 12
35
-
36
- # integration tests for postgres 13
37
- itest_pg13:
38
- extends: .iteration_test
39
- variables:
40
- POSTGRES_SERVER_VERSION: 13
49
+ parallel:
50
+ matrix:
51
+ - POSTGRES_SERVER_VERSION: ["12", "13", "14", "15", "16"]
41
52
 
42
53
  end_to_end_verification_test:
43
- image: docker:19.03.12
54
+ stage: test
55
+ image: docker:20.10.16
44
56
  services:
45
- - docker:19.03.12-dind
57
+ - docker:20.10.16-dind
46
58
  tags:
47
59
  - gitlab-org-docker
48
60
  variables:
61
+ DOCKER_HOST: tcp://docker:2375
49
62
  DOCKER_TLS_CERTDIR: ""
50
63
  before_script:
51
64
  - apk add --no-cache docker-compose
data/docker-compose.yml CHANGED
@@ -2,7 +2,7 @@
2
2
  version: "3.3"
3
3
  services:
4
4
  postgres:
5
- image: postgres:13
5
+ image: postgres:14
6
6
  restart: "no"
7
7
  environment:
8
8
  - POSTGRES_USER=testuser
@@ -2,7 +2,7 @@ $:.push File.expand_path('lib', __dir__)
2
2
 
3
3
  Gem::Specification.new do |s|
4
4
  s.name = 'fluent-plugin-postgresql-csvlog'
5
- s.version = '0.7.3'
5
+ s.version = '0.8.2'
6
6
  s.authors = ['stanhu']
7
7
  s.email = ['stanhu@gmail.com']
8
8
  s.homepage = 'https://gitlab.com/gitlab-org/fluent-plugins/fluent-plugin-postgresql-csvlog'
@@ -28,11 +28,11 @@ module Fluent::Plugin
28
28
  wait_event_type,
29
29
  wait_event,
30
30
  xact_start,
31
- extract(epoch from clock_timestamp() - xact_start) xact_age_s,
31
+ CAST(extract(epoch from clock_timestamp() - xact_start) AS double precision) xact_age_s,
32
32
  query_start,
33
- extract(epoch from clock_timestamp() - query_start) query_age_s,
33
+ CAST(extract(epoch from clock_timestamp() - query_start) AS double precision) query_age_s,
34
34
  state_change,
35
- extract(epoch from clock_timestamp() - state_change) state_age_s,
35
+ CAST(extract(epoch from clock_timestamp() - state_change) AS double precision) state_age_s,
36
36
  state,
37
37
  query
38
38
  FROM pg_catalog.pg_stat_activity
@@ -6,8 +6,8 @@ module Fluent::Plugin
6
6
  # MarginaliaExtractor provides the parse_marginalia_into_record
7
7
  # utility method, useful for extracting marginalia into fluentd records
8
8
  module MarginaliaExtractor
9
- MARGINALIA_PREPENDED_REGEXP = %r{^(?<comment>/\*.*\*/)(?<sql>.*)}m.freeze
10
- MARGINALIA_APPENDED_REGEXP = %r{(?<sql>.*)(?<comment>/\*.*\*/)\s*;?\s*$}m.freeze
9
+ MARGINALIA_PREPENDED_REGEXP = %r{^(?<comment>/\*.*?\*/)\s*(?<sql>.*)}m
10
+ MARGINALIA_APPENDED_REGEXP = %r{(?<sql>.*)(?<comment>/\*.*\*/)\s*;?\s*$}m
11
11
 
12
12
  # Injects marginalia into a fluentd record
13
13
  def parse_marginalia_into_record(record, key, strip_comment)
@@ -110,6 +110,11 @@ class PgStatActivityInputIntegrationTest < Test::Unit::TestCase
110
110
  assert_false record['query_length'].nil?
111
111
  assert_false record['query'].nil?
112
112
  assert_false record['fingerprint'].nil?
113
+
114
+ assert_true record['xact_age_s'].class == Float
115
+ assert_true record['query_age_s'].class == Float
116
+ assert_true record['state_age_s'].class == Float
117
+ assert_empty record.values.select { |val| val.class == BigDecimal }
113
118
  end
114
119
  end
115
120
  end
@@ -48,6 +48,19 @@ class Marginalia < Test::Unit::TestCase
48
48
  test_parse(sql, {}, 'sql', true, expected)
49
49
  end
50
50
 
51
+ test 'marginalia appended with other comments in SQL' do
52
+ sql = %(INSERT INTO "p_ci_builds_metadata" ("project_id", "config_options", "config_variables", "has_exposed_artifacts", "build_id", "partition_id") VALUES (40163635, '{"image":{"name":"$AWS_AMAZONLINUX_IMAGE"},"artifacts":{"paths":["${ASSET_NAME}/out"]},"before_script":["yum install -y python3 python3-pip zip findutils","cd ${ASSET_NAME}"],"script":["mkdir -p venv","cd venv","python3 -m venv pyspark_venvsource","source pyspark_venvsource/bin/activate","pip3 install -r ../scripts/requirements.txt","pip3 install venv-pack","venv-pack -f -o pyspark_venv.tar.gz","cd ..; mkdir -p out/lib","cd src/step/; cp -r --parents ./*/*main*.py ../../out"],"retry":{"max":2}}', '[{"key":"ASSET_NAME","value":"data-pipeline"}]', FALSE, 6269937106, 101) RETURNING "id" /*application:sidekiq,correlation_id:01HQNQAHYZF4Q294P59R7RP71H,jid:047ecd6920dc27ccd49b0aef,endpoint_id:PostReceive,db_config_name:ci*/)
53
+ expected = {
54
+ 'application' => 'sidekiq',
55
+ 'correlation_id' => '01HQNQAHYZF4Q294P59R7RP71H',
56
+ 'jid' => '047ecd6920dc27ccd49b0aef',
57
+ 'db_config_name' => 'ci',
58
+ 'endpoint_id' => 'PostReceive',
59
+ 'sql' => %(INSERT INTO "p_ci_builds_metadata" ("project_id", "config_options", "config_variables", "has_exposed_artifacts", "build_id", "partition_id") VALUES (40163635, '{"image":{"name":"$AWS_AMAZONLINUX_IMAGE"},"artifacts":{"paths":["${ASSET_NAME}/out"]},"before_script":["yum install -y python3 python3-pip zip findutils","cd ${ASSET_NAME}"],"script":["mkdir -p venv","cd venv","python3 -m venv pyspark_venvsource","source pyspark_venvsource/bin/activate","pip3 install -r ../scripts/requirements.txt","pip3 install venv-pack","venv-pack -f -o pyspark_venv.tar.gz","cd ..; mkdir -p out/lib","cd src/step/; cp -r --parents ./*/*main*.py ../../out"],"retry":{"max":2}}', '[{"key":"ASSET_NAME","value":"data-pipeline"}]', FALSE, 6269937106, 101) RETURNING "id")
60
+ }
61
+ test_parse(sql, {}, 'sql', true, expected)
62
+ end
63
+
51
64
  test 'normal comment prepended' do
52
65
  sql = '/* this is just a comment */ SELECT COUNT(*) FROM "projects"'
53
66
  expected = {
@@ -87,6 +100,19 @@ class Marginalia < Test::Unit::TestCase
87
100
  test_parse(sql, {}, 'sql', true, expected)
88
101
  end
89
102
 
103
+ test 'marginalia prepended with other comments in SQL' do
104
+ sql = %(/*application:sidekiq,correlation_id:01HQNQAHYZF4Q294P59R7RP71H,jid:047ecd6920dc27ccd49b0aef,endpoint_id:PostReceive,db_config_name:ci*/ INSERT INTO "p_ci_builds_metadata" ("project_id", "config_options", "config_variables", "has_exposed_artifacts", "build_id", "partition_id") VALUES (40163635, '{"image":{"name":"$AWS_AMAZONLINUX_IMAGE"},"artifacts":{"paths":["${ASSET_NAME}/out"]},"before_script":["yum install -y python3 python3-pip zip findutils","cd ${ASSET_NAME}"],"script":["mkdir -p venv","cd venv","python3 -m venv pyspark_venvsource","source pyspark_venvsource/bin/activate","pip3 install -r ../scripts/requirements.txt","pip3 install venv-pack","venv-pack -f -o pyspark_venv.tar.gz","cd ..; mkdir -p out/lib","cd src/step/; cp -r --parents ./*/*main*.py ../../out"],"retry":{"max":2}}', '[{"key":"ASSET_NAME","value":"data-pipeline"}]', FALSE, 6269937106, 101) RETURNING "id")
105
+ expected = {
106
+ 'application' => 'sidekiq',
107
+ 'correlation_id' => '01HQNQAHYZF4Q294P59R7RP71H',
108
+ 'jid' => '047ecd6920dc27ccd49b0aef',
109
+ 'db_config_name' => 'ci',
110
+ 'endpoint_id' => 'PostReceive',
111
+ 'sql' => %(INSERT INTO "p_ci_builds_metadata" ("project_id", "config_options", "config_variables", "has_exposed_artifacts", "build_id", "partition_id") VALUES (40163635, '{"image":{"name":"$AWS_AMAZONLINUX_IMAGE"},"artifacts":{"paths":["${ASSET_NAME}/out"]},"before_script":["yum install -y python3 python3-pip zip findutils","cd ${ASSET_NAME}"],"script":["mkdir -p venv","cd venv","python3 -m venv pyspark_venvsource","source pyspark_venvsource/bin/activate","pip3 install -r ../scripts/requirements.txt","pip3 install venv-pack","venv-pack -f -o pyspark_venv.tar.gz","cd ..; mkdir -p out/lib","cd src/step/; cp -r --parents ./*/*main*.py ../../out"],"retry":{"max":2}}', '[{"key":"ASSET_NAME","value":"data-pipeline"}]', FALSE, 6269937106, 101) RETURNING "id")
112
+ }
113
+ test_parse(sql, {}, 'sql', true, expected)
114
+ end
115
+
90
116
  test 'marginalia prepended for web, comment_strip disabled' do
91
117
  sql = 'SELECT COUNT(*) FROM "projects" /*application:sidekiq,correlation_id:d67cae54c169e0cab7d73389e2934f0e,jid:52a1c8a9e4c555ea573f20f0,job_class:Geo::MetricsUpdateWorker*/'
92
118
  expected = {
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-postgresql-csvlog
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.7.3
4
+ version: 0.8.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - stanhu
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-04-11 00:00:00.000000000 Z
11
+ date: 2024-02-27 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: fluentd
@@ -140,7 +140,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
140
140
  - !ruby/object:Gem::Version
141
141
  version: '0'
142
142
  requirements: []
143
- rubygems_version: 3.4.10
143
+ rubygems_version: 3.5.6
144
144
  signing_key:
145
145
  specification_version: 4
146
146
  summary: fluentd plugins to work with PostgreSQL CSV logs