timescaledb 0.2.1 → 0.2.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +41 -9
  3. data/bin/console +1 -1
  4. data/bin/tsdb +2 -2
  5. data/docs/command_line.md +178 -0
  6. data/docs/img/lttb_example.png +0 -0
  7. data/docs/img/lttb_sql_vs_ruby.gif +0 -0
  8. data/docs/img/lttb_zoom.gif +0 -0
  9. data/docs/index.md +61 -0
  10. data/docs/migrations.md +69 -0
  11. data/docs/models.md +78 -0
  12. data/docs/toolkit.md +394 -0
  13. data/docs/toolkit_lttb_tutorial.md +557 -0
  14. data/docs/toolkit_lttb_zoom.md +357 -0
  15. data/docs/videos.md +16 -0
  16. data/examples/all_in_one/all_in_one.rb +39 -5
  17. data/examples/all_in_one/benchmark_comparison.rb +108 -0
  18. data/examples/all_in_one/caggs.rb +93 -0
  19. data/examples/all_in_one/query_data.rb +78 -0
  20. data/examples/toolkit-demo/compare_volatility.rb +64 -0
  21. data/examples/toolkit-demo/lttb/README.md +15 -0
  22. data/examples/toolkit-demo/lttb/lttb.rb +92 -0
  23. data/examples/toolkit-demo/lttb/lttb_sinatra.rb +139 -0
  24. data/examples/toolkit-demo/lttb/lttb_test.rb +21 -0
  25. data/examples/toolkit-demo/lttb/views/index.erb +27 -0
  26. data/examples/toolkit-demo/lttb-zoom/README.md +13 -0
  27. data/examples/toolkit-demo/lttb-zoom/lttb_zoomable.rb +90 -0
  28. data/examples/toolkit-demo/lttb-zoom/views/index.erb +33 -0
  29. data/lib/timescaledb/acts_as_time_vector.rb +18 -0
  30. data/lib/timescaledb/dimensions.rb +1 -0
  31. data/lib/timescaledb/hypertable.rb +5 -1
  32. data/lib/timescaledb/migration_helpers.rb +11 -0
  33. data/lib/timescaledb/stats_report.rb +1 -1
  34. data/lib/timescaledb/toolkit/helpers.rb +20 -0
  35. data/lib/timescaledb/toolkit/time_vector.rb +66 -0
  36. data/lib/timescaledb/toolkit.rb +3 -0
  37. data/lib/timescaledb/version.rb +1 -1
  38. data/lib/timescaledb.rb +1 -0
  39. data/mkdocs.yml +33 -0
  40. metadata +30 -4
  41. data/examples/all_in_one/Gemfile +0 -11
  42. data/examples/all_in_one/Gemfile.lock +0 -51
@@ -0,0 +1,78 @@
1
+ require 'bundler/inline' #require only what you need
2
+
3
+ gemfile(true) do
4
+ gem 'timescaledb', path: '../..'
5
+ gem 'pry'
6
+ gem 'faker'
7
+ end
8
+
9
+ require 'pp'
10
+ # ruby all_in_one.rb postgres://user:pass@host:port/db_name
11
+ ActiveRecord::Base.establish_connection( ARGV.last)
12
+
13
+ # Simple example
14
+ class Event < ActiveRecord::Base
15
+ self.primary_key = nil
16
+ acts_as_hypertable
17
+
18
+ # If you want to override the automatic assingment of the `created_at ` time series column
19
+ def self.timestamp_attributes_for_create_in_model
20
+ []
21
+ end
22
+ def self.timestamp_attributes_for_update_in_model
23
+ []
24
+ end
25
+ end
26
+
27
+ # Setup Hypertable as in a migration
28
+ ActiveRecord::Base.connection.instance_exec do
29
+ ActiveRecord::Base.logger = Logger.new(STDOUT)
30
+
31
+ drop_table(Event.table_name) if Event.table_exists?
32
+
33
+ hypertable_options = {
34
+ time_column: 'created_at',
35
+ chunk_time_interval: '7 day',
36
+ compress_segmentby: 'identifier',
37
+ compression_interval: '7 days'
38
+ }
39
+
40
+ create_table(:events, id: false, hypertable: hypertable_options) do |t|
41
+ t.string :identifier, null: false
42
+ t.jsonb :payload
43
+ t.timestamps
44
+ end
45
+ end
46
+
47
+ def generate_fake_data(total: 100_000)
48
+ time = 1.month.ago
49
+ total.times.flat_map do
50
+ identifier = %w[sign_up login click scroll logout view]
51
+ time = time + rand(60).seconds
52
+ {
53
+ created_at: time,
54
+ updated_at: time,
55
+ identifier: identifier.sample,
56
+ payload: {
57
+ "name" => Faker::Name.name,
58
+ "email" => Faker::Internet.email
59
+ }
60
+ }
61
+ end
62
+ end
63
+
64
+
65
+ batch = generate_fake_data total: 10_000
66
+ ActiveRecord::Base.logger = nil
67
+ Event.insert_all(batch, returning: false)
68
+ ActiveRecord::Base.logger = Logger.new(STDOUT)
69
+
70
+ pp Event.previous_month.count
71
+ pp Event.previous_week.count
72
+ pp Event.previous_month.group('identifier').count
73
+ pp Event.previous_week.group('identifier').count
74
+
75
+ pp Event
76
+ .previous_month
77
+ .select("time_bucket('1 day', created_at) as time, identifier, count(*)")
78
+ .group("1,2").map(&:attributes)
@@ -0,0 +1,64 @@
1
+ require 'bundler/setup'
2
+ require 'timescaledb'
3
+
4
+
5
+ # Compare volatility processing in Ruby vs SQL.
6
+ class Measurement < ActiveRecord::Base
7
+ acts_as_hypertable time_column: "ts"
8
+ acts_as_time_vector segment_by: "device_id", value_column: "val"
9
+
10
+ scope :volatility_sql, -> do
11
+ select("device_id, timevector(#{time_column}, #{value_column}) -> sort() -> delta() -> abs() -> sum() as volatility")
12
+ .group("device_id")
13
+ end
14
+
15
+ scope :volatility_ruby, -> {
16
+ volatility = Hash.new(0)
17
+ previous = Hash.new
18
+ find_all do |measurement|
19
+ device_id = measurement.device_id
20
+ if previous[device_id]
21
+ delta = (measurement.val - previous[device_id]).abs
22
+ volatility[device_id] += delta
23
+ end
24
+ previous[device_id] = measurement.val
25
+ end
26
+ volatility
27
+ }
28
+ end
29
+
30
+ ActiveRecord::Base.establish_connection ENV["PG_URI"]
31
+ ActiveRecord::Base.connection.add_toolkit_to_search_path!
32
+
33
+
34
+ ActiveRecord::Base.connection.instance_exec do
35
+ ActiveRecord::Base.logger = Logger.new(STDOUT)
36
+
37
+ unless Measurement.table_exists?
38
+ hypertable_options = {
39
+ time_column: 'ts',
40
+ chunk_time_interval: '1 day',
41
+ }
42
+ create_table :measurements, hypertable: hypertable_options, id: false do |t|
43
+ t.integer :device_id
44
+ t.decimal :val
45
+ t.timestamp :ts
46
+ end
47
+ end
48
+ end
49
+
50
+ if Measurement.count.zero?
51
+ ActiveRecord::Base.connection.execute(<<~SQL)
52
+ INSERT INTO measurements (ts, device_id, val)
53
+ SELECT ts, device_id, random()*80
54
+ FROM generate_series(TIMESTAMP '2022-01-01 00:00:00',
55
+ TIMESTAMP '2022-02-01 00:00:00',
56
+ INTERVAL '5 minutes') AS g1(ts),
57
+ generate_series(0, 5) AS g2(device_id);
58
+ SQL
59
+ end
60
+
61
+ Benchmark.bm do |x|
62
+ x.report("ruby") { Measurement.volatility_ruby }
63
+ x.report("sql") { Measurement.volatility_sql.map(&:attributes) }
64
+ end
@@ -0,0 +1,15 @@
1
+ # LTTB examples
2
+
3
+ This folder contains a few ideas to explore and learn more about the lttb algorithm.
4
+
5
+ There is a [./lttb.rb](./lttb.rb) file that is the Ruby implementation of lttb
6
+ and also contains the related [./lttb_test.rb](./lttb_test.rb) file that
7
+ verifies the same example from the Timescale Toolkit [implementation](https://github.com/timescale/timescaledb-toolkit/blob/6ee2ea1e8ff64bab10b90bdf4cd4b0f7ed763934/extension/src/lttb.rs#L512-L530).
8
+
9
+ The [./lttb_sinatra.rb](./lttb_sinatra.rb) is a small webserver that compares
10
+ the SQL vs Ruby implementation. It also uses the [./views](./views) folder which
11
+ contains the view rendering part.
12
+
13
+ You can learn more by reading the [LTTB tutorial](https://jonatas.github.io/timescaledb/toolkit_lttb_tutorial/).
14
+
15
+
@@ -0,0 +1,92 @@
1
+ module Triangle
2
+ module_function
3
+ def area(a, b, c)
4
+ (ax, ay),(bx,by),(cx,cy) = a,b,c
5
+ (
6
+ (ax - cx).to_f * (by - ay) -
7
+ (ax - bx).to_f * (cy - ay)
8
+ ).abs * 0.5
9
+ end
10
+ end
11
+ class Lttb
12
+ class << self
13
+ def avg(array)
14
+ array.sum.to_f / array.size
15
+ end
16
+
17
+ def downsample(data, threshold)
18
+ new(data, threshold).downsample
19
+ end
20
+ end
21
+
22
+ attr_reader :data, :threshold
23
+ def initialize(data, threshold)
24
+ fail 'data is not an array' unless data.is_a? Array
25
+ fail "threshold should be >= 2. It's #{threshold}." if threshold < 2
26
+ @data = data
27
+ @threshold = threshold
28
+ end
29
+
30
+ def downsample
31
+ case @data.first.first
32
+ when Time, DateTime, Date
33
+ transformed_dates = true
34
+ dates_to_numbers()
35
+ end
36
+ process.tap do |downsampled|
37
+ numbers_to_dates(downsampled) if transformed_dates
38
+ end
39
+ end
40
+ private
41
+
42
+ def process
43
+ return data if threshold >= data.size || threshold == 0
44
+
45
+ sampled = [data.first, data.last] # Keep first and last point. append in the middle.
46
+ point_index = 0
47
+
48
+ (threshold - 2).times do |i|
49
+ step = [((i+1.0) * bucket_size).to_i, data.size].min
50
+ next_point = (i * bucket_size).to_i + 1
51
+
52
+ break if next_point > data.size - 2
53
+
54
+ points = data[step, slice]
55
+ avg_x = Lttb.avg(points.map(&:first)).to_i
56
+ avg_y = Lttb.avg(points.map(&:last))
57
+
58
+ max_area = -1.0
59
+
60
+ (next_point...(step + 1)).each do |idx|
61
+ area = Triangle.area(data[point_index], data[idx], [avg_x, avg_y])
62
+
63
+ if area > max_area
64
+ max_area = area
65
+ next_point = idx
66
+ end
67
+ end
68
+
69
+ sampled.insert(-2, data[next_point])
70
+ point_index = next_point
71
+ end
72
+
73
+ sampled
74
+ end
75
+
76
+ def bucket_size
77
+ @bucket_size ||= ((data.size - 2.0) / (threshold - 2.0))
78
+ end
79
+
80
+ def slice
81
+ @slice ||= bucket_size.to_i
82
+ end
83
+
84
+ def dates_to_numbers
85
+ @start_date = data[0][0].dup
86
+ data.each{|d| d[0] = d[0] - @start_date }
87
+ end
88
+
89
+ def numbers_to_dates(downsampled)
90
+ downsampled.each{|d| d[0] = @start_date + d[0]}
91
+ end
92
+ end
@@ -0,0 +1,139 @@
1
+ # ruby lttb.rb postgres://user:pass@host:port/db_name
2
+ require 'bundler/inline' #require only what you need
3
+
4
+ gemfile(true) do
5
+ gem 'timescaledb', path: '../../..'
6
+ gem 'pry'
7
+ gem 'sinatra', require: false
8
+ gem 'sinatra-reloader', require: false
9
+ gem 'sinatra-cross_origin', require: false
10
+ gem 'chartkick'
11
+ end
12
+
13
+ require 'timescaledb/toolkit'
14
+ require 'sinatra'
15
+ require 'sinatra/json'
16
+ require 'sinatra/cross_origin'
17
+ require 'chartkick'
18
+ require_relative 'lttb'
19
+
20
+ PG_URI = ARGV.last
21
+
22
+ VALID_SIZES = %i[small med big]
23
+ def download_weather_dataset size: :small
24
+ unless VALID_SIZES.include?(size)
25
+ fail "Invalid size: #{size}. Valids are #{VALID_SIZES}"
26
+ end
27
+ url = "https://timescaledata.blob.core.windows.net/datasets/weather_#{size}.tar.gz"
28
+ puts "fetching #{size} weather dataset..."
29
+ system "wget \"#{url}\""
30
+ puts "done!"
31
+ end
32
+
33
+ def setup size: :small
34
+ file = "weather_#{size}.tar.gz"
35
+ download_weather_dataset(size: size) unless File.exists? file
36
+ puts "extracting #{file}"
37
+ system "tar -xvzf #{file} "
38
+ puts "creating data structures"
39
+ system "psql #{PG_URI} < weather.sql"
40
+ system %|psql #{PG_URI} -c "\\COPY locations FROM weather_#{size}_locations.csv CSV"|
41
+ system %|psql #{PG_URI} -c "\\COPY conditions FROM weather_#{size}_conditions.csv CSV"|
42
+ end
43
+
44
+ ActiveRecord::Base.establish_connection(PG_URI)
45
+ class Location < ActiveRecord::Base
46
+ self.primary_key = "device_id"
47
+
48
+ has_many :conditions, foreign_key: "device_id"
49
+ end
50
+
51
+ class Condition < ActiveRecord::Base
52
+ acts_as_hypertable time_column: "time"
53
+ acts_as_time_vector value_column: "temperature", segment_by: "device_id"
54
+
55
+ belongs_to :location, foreign_key: "device_id"
56
+ end
57
+
58
+ # Setup Hypertable as in a migration
59
+ ActiveRecord::Base.connection.instance_exec do
60
+ ActiveRecord::Base.logger = Logger.new(STDOUT)
61
+
62
+ unless Condition.table_exists?
63
+ setup size: :big
64
+ end
65
+ end
66
+
67
+ require 'sinatra/reloader'
68
+ require 'sinatra/contrib'
69
+ register Sinatra::Reloader
70
+ register Sinatra::Contrib
71
+ include Chartkick::Helper
72
+
73
+ set :bind, '0.0.0.0'
74
+ set :port, 9999
75
+
76
+ def conditions
77
+ device_ids = (1..9).map{|i|"weather-pro-00000#{i}"}
78
+ Condition
79
+ .where(device_id: device_ids.first)
80
+ .order('time')
81
+ end
82
+
83
+ def threshold
84
+ params[:threshold]&.to_i || 50
85
+ end
86
+
87
+ configure do
88
+ enable :cross_origin
89
+ end
90
+ before do
91
+ response.headers['Access-Control-Allow-Origin'] = '*'
92
+ end
93
+
94
+ # routes...
95
+ options "*" do
96
+ response.headers["Allow"] = "GET, PUT, POST, DELETE, OPTIONS"
97
+ response.headers["Access-Control-Allow-Headers"] = "Authorization,
98
+ Content-Type, Accept, X-User-Email, X-Auth-Token"
99
+ response.headers["Access-Control-Allow-Origin"] = "*"
100
+ 200
101
+ end
102
+
103
+ get '/' do
104
+ headers 'Access-Control-Allow-Origin' => 'https://cdn.jsdelivr.net/'
105
+
106
+ erb :index
107
+ end
108
+
109
+ get '/lttb_ruby' do
110
+ payload = conditions
111
+ .pluck(:device_id, :time, :temperature)
112
+ .group_by(&:first)
113
+ .map do |device_id, data|
114
+ data.each(&:shift)
115
+ {
116
+ name: device_id,
117
+ data: Lttb.downsample(data, threshold)
118
+ }
119
+ end
120
+ json payload
121
+ end
122
+
123
+ get "/lttb_sql" do
124
+ downsampled = conditions
125
+ .lttb(threshold: threshold)
126
+ .map do |device_id, data|
127
+ {
128
+ name: device_id,
129
+ data: data.sort_by(&:first)
130
+ }
131
+ end
132
+ json downsampled
133
+ end
134
+
135
+
136
+ get '/all_data' do
137
+ data = conditions.pluck(:time, :temperature)
138
+ json [ { name: "All data", data: data} ]
139
+ end
@@ -0,0 +1,21 @@
1
+ require_relative 'lttb'
2
+ require 'pp'
3
+ require 'date'
4
+
5
+ data = [
6
+ ['2020-1-1', 10],
7
+ ['2020-1-2', 21],
8
+ ['2020-1-3', 19],
9
+ ['2020-1-4', 32],
10
+ ['2020-1-5', 12],
11
+ ['2020-1-6', 14],
12
+ ['2020-1-7', 18],
13
+ ['2020-1-8', 29],
14
+ ['2020-1-9', 23],
15
+ ['2020-1-10', 27],
16
+ ['2020-1-11', 14]]
17
+ data.each do |e|
18
+ e[0] = Time.mktime(*e[0].split('-'))
19
+ end
20
+
21
+ pp Lttb.downsample(data, 5)
@@ -0,0 +1,27 @@
1
+ <script src="https://cdn.jsdelivr.net/npm/jquery@3.6.1/dist/jquery.min.js"></script>
2
+ <script src="https://cdn.jsdelivr.net/npm/hammerjs@2.0.8hammerjs@2.0.8"></script>
3
+ <script src="https://cdn.jsdelivr.net/npm/moment@2.29.4/moment.min.js"></script>
4
+ <script src="https://cdn.jsdelivr.net/npm/highcharts@10.2.1/highcharts.min.js"></script>
5
+ <script src="https://cdn.jsdelivr.net/npm/chartjs-adapter-moment@1.0.0/dist/chartjs-adapter-moment.min.js"></script>
6
+ <script src="https://cdn.jsdelivr.net/npm/chartkick@4.2.0/dist/chartkick.min.js"></script>
7
+ <script src="https://cdn.jsdelivr.net/npm/chartjs-plugin-zoom@1.2.1/dist/chartjs-plugin-zoom.min.js"></script>
8
+ <h3>Downsampling <%= conditions.count %> records to
9
+ <select value="<%= threshold %>" onchange="location.href=`/?threshold=${this.value}`">
10
+ <option><%= threshold %></option>
11
+ <option value="50">50</option>
12
+ <option value="100">100</option>
13
+ <option value="500">500</option>
14
+ <option value="1000">1000</option>
15
+ <option value="5000">5000</option>
16
+ </select> points.
17
+ </h3>
18
+
19
+ <h3>SQL</h3>
20
+ <%= line_chart("/lttb_sql?threshold=#{threshold}",
21
+ loading: "dowsampled data from SQL") %>
22
+ <h3>Ruby</h3>
23
+ <%= line_chart("/lttb_ruby?threshold=#{threshold}",
24
+ library: {chart: {zoomType: 'x'}},
25
+ points: true, loading: "downsampled data from Ruby") %>
26
+ <!--%= line_chart("/all_data", loading: "Loading all data") %-->
27
+
@@ -0,0 +1,13 @@
1
+ # LTTB examples
2
+
3
+ This folder contains an example to explore the a dynamic reloading of downsampled data.
4
+
5
+ It keeps the same amount of data and refresh the data with a higher resolution
6
+ as you keep zooming in.
7
+ There is a [./lttb_zoomable.rb](./lttb_zoomable.rb) file is a small webserver that compares
8
+ the SQL vs Ruby implementation. It also uses the [./views](./views) folder which
9
+ contains the view with the rendering and javascript part.
10
+
11
+ You can learn more by reading the [LTTB Zoom tutorial](https://jonatas.github.io/timescaledb/toolkit_lttb_zoom/).
12
+
13
+
@@ -0,0 +1,90 @@
1
+ # ruby lttb_zoomable.rb postgres://user:pass@host:port/db_name
2
+ require 'bundler/inline' #require only what you need
3
+
4
+ gemfile(true) do
5
+ gem 'timescaledb', path: '../../..'
6
+ gem 'pry'
7
+ gem 'sinatra', require: false
8
+ gem 'sinatra-reloader'
9
+ gem 'sinatra-cross_origin'
10
+ end
11
+
12
+ require 'timescaledb/toolkit'
13
+ require 'sinatra'
14
+ require 'sinatra/json'
15
+ require 'sinatra/contrib'
16
+
17
+ register Sinatra::Reloader
18
+ register Sinatra::Contrib
19
+
20
+ PG_URI = ARGV.last
21
+
22
+ VALID_SIZES = %i[small med big]
23
+ def download_weather_dataset size: :small
24
+ unless VALID_SIZES.include?(size)
25
+ fail "Invalid size: #{size}. Valids are #{VALID_SIZES}"
26
+ end
27
+ url = "https://timescaledata.blob.core.windows.net/datasets/weather_#{size}.tar.gz"
28
+ puts "fetching #{size} weather dataset..."
29
+ system "wget \"#{url}\""
30
+ puts "done!"
31
+ end
32
+
33
+ def setup size: :small
34
+ file = "weather_#{size}.tar.gz"
35
+ download_weather_dataset(size: size) unless File.exists? file
36
+ puts "extracting #{file}"
37
+ system "tar -xvzf #{file} "
38
+ puts "creating data structures"
39
+ system "psql #{PG_URI} < weather.sql"
40
+ system %|psql #{PG_URI} -c "\\COPY locations FROM weather_#{size}_locations.csv CSV"|
41
+ system %|psql #{PG_URI} -c "\\COPY conditions FROM weather_#{size}_conditions.csv CSV"|
42
+ end
43
+
44
+ ActiveRecord::Base.establish_connection(PG_URI)
45
+
46
+ class Condition < ActiveRecord::Base
47
+ acts_as_hypertable time_column: "time"
48
+ acts_as_time_vector value_column: "temperature", segment_by: "device_id"
49
+ end
50
+
51
+ # Setup Hypertable as in a migration
52
+ ActiveRecord::Base.connection.instance_exec do
53
+ ActiveRecord::Base.logger = Logger.new(STDOUT)
54
+
55
+ if !Condition.table_exists? || Condition.count.zero?
56
+
57
+ setup size: :big
58
+ end
59
+ end
60
+
61
+
62
+ def filter_by_request_params
63
+ filter= {device_id: "weather-pro-000001"}
64
+ if params[:filter] && params[:filter] != "null"
65
+ from, to = params[:filter].split(",").map(&Time.method(:parse))
66
+ filter[:time] = from..to
67
+ end
68
+ filter
69
+ end
70
+
71
+ def conditions
72
+ Condition.where(filter_by_request_params).order('time')
73
+ end
74
+
75
+ def threshold
76
+ params[:threshold]&.to_i || 50
77
+ end
78
+
79
+ configure do
80
+ enable :cross_origin
81
+ end
82
+
83
+ get '/' do
84
+ erb :index
85
+ end
86
+
87
+ get "/lttb_sql" do
88
+ downsampled = conditions.lttb(threshold: threshold, segment_by: nil)
89
+ json downsampled
90
+ end
@@ -0,0 +1,33 @@
1
+ <head>
2
+ <script src="https://cdn.jsdelivr.net/npm/jquery@3.6.1/dist/jquery.min.js"></script>
3
+ <script src="https://cdn.plot.ly/plotly-latest.min.js"></script>
4
+ </head>
5
+
6
+ <h3>Downsampling <%= conditions.count %> records to
7
+ <select value="<%= threshold %>" onchange="location.href=`/?threshold=${this.value}`">
8
+ <option><%= threshold %></option>
9
+ <option value="50">50</option>
10
+ <option value="100">100</option>
11
+ <option value="500">500</option>
12
+ <option value="1000">1000</option>
13
+ <option value="5000">5000</option>
14
+ </select> points.
15
+ </h3>
16
+ <div id='container'></div>
17
+ <script>
18
+ let chart = document.getElementById('container');
19
+ function fetch(filter) {
20
+ $.ajax({
21
+ url: `/lttb_sql?threshold=<%= threshold %>&filter=${filter}`,
22
+ success: function(result) {
23
+ let x = result.map((e) => e[0]);
24
+ let y = result.map((e) => parseFloat(e[1]));
25
+ Plotly.newPlot(chart, [{x, y,"mode": "markers", "type": "scatter"}]);
26
+ chart.on('plotly_relayout',
27
+ function(eventdata){
28
+ fetch([eventdata['xaxis.range[0]'],eventdata['xaxis.range[1]']]);
29
+ });
30
+ }});
31
+ }
32
+ fetch(null);
33
+ </script>
@@ -0,0 +1,18 @@
1
+ module Timescaledb
2
+ module ActsAsTimeVector
3
+ def acts_as_time_vector(options = {})
4
+ return if acts_as_time_vector?
5
+
6
+ include Timescaledb::Toolkit::TimeVector
7
+
8
+ class_attribute :time_vector_options, instance_writer: false
9
+ define_default_scopes
10
+ self.time_vector_options = options
11
+ end
12
+
13
+ def acts_as_time_vector?
14
+ included_modules.include?(Timescaledb::ActsAsTimeVector)
15
+ end
16
+ end
17
+ end
18
+ ActiveRecord::Base.extend Timescaledb::ActsAsTimeVector
@@ -1,6 +1,7 @@
1
1
  module Timescaledb
2
2
  class Dimension < ActiveRecord::Base
3
3
  self.table_name = "timescaledb_information.dimensions"
4
+ attribute :time_interval, :interval
4
5
  end
5
6
  Dimensions = Dimension
6
7
  end
@@ -10,7 +10,7 @@ module Timescaledb
10
10
  foreign_key: "hypertable_name",
11
11
  class_name: "Timescaledb::CompressionSetting"
12
12
 
13
- has_one :dimensions,
13
+ has_many :dimensions,
14
14
  foreign_key: "hypertable_name",
15
15
  class_name: "Timescaledb::Dimension"
16
16
 
@@ -18,6 +18,10 @@ module Timescaledb
18
18
  foreign_key: "hypertable_name",
19
19
  class_name: "Timescaledb::ContinuousAggregate"
20
20
 
21
+ def main_dimension
22
+ dimensions.find_by dimension_number: 1
23
+ end
24
+
21
25
  def chunks_detailed_size
22
26
  struct_from "SELECT * from chunks_detailed_size('#{self.hypertable_name}')"
23
27
  end
@@ -82,6 +82,17 @@ module Timescaledb
82
82
 
83
83
  create_continuous_aggregate_policy(table_name, options[:refresh_policies] || {})
84
84
  end
85
+
86
+
87
+ # Drop a new continuous aggregate.
88
+ #
89
+ # It basically DROP MATERIALIZED VIEW for a given @name.
90
+ #
91
+ # @param name [String, Symbol] The name of the continuous aggregate view.
92
+ def drop_continuous_aggregates view_name
93
+ execute "DROP MATERIALIZED VIEW #{view_name}"
94
+ end
95
+
85
96
  alias_method :create_continuous_aggregates, :create_continuous_aggregate
86
97
 
87
98
  def create_continuous_aggregate_policy(table_name, **options)
@@ -19,7 +19,7 @@ module Timescaledb
19
19
  end
20
20
 
21
21
  def compression_resume(scope)
22
- sum = -> (method) { (scope.map(&method).inject(:+) || 0).to_formatted_s(:human_size)}
22
+ sum = -> (method) { (scope.map(&method).inject(:+) || 0).to_s(:human_size)}
23
23
  {
24
24
  uncompressed: sum[:before_total_bytes],
25
25
  compressed: sum[:after_total_bytes]
@@ -0,0 +1,20 @@
1
+ require 'active_record/connection_adapters/postgresql_adapter'
2
+
3
+ # Useful methods to run TimescaleDB with Toolkit functions in you Ruby app.
4
+ module Timescaledb
5
+ # Helpers methods to setup queries that uses the toolkit.
6
+ module Toolkit
7
+ module Helpers
8
+
9
+ # Includes toolkit_experimental in the search path to make it easy to have
10
+ # access to all the functions
11
+ def add_toolkit_to_search_path!
12
+ return if schema_search_path.include?("toolkit_experimental")
13
+
14
+ self.schema_search_path = "#{schema_search_path}, toolkit_experimental"
15
+ end
16
+ end
17
+ end
18
+ end
19
+
20
+ ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.include(Timescaledb::Toolkit::Helpers)