active-record-profiler 0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +3 -0
- data/CHANGELOG.md +5 -0
- data/Gemfile +4 -0
- data/LICENSE.txt +21 -0
- data/README.md +150 -0
- data/Rakefile +14 -0
- data/active-record-profiler.gemspec +30 -0
- data/lib/active-record-profiler.rb +13 -0
- data/lib/active-record-profiler/collector.rb +276 -0
- data/lib/active-record-profiler/log_subscriber.rb +30 -0
- data/lib/active-record-profiler/logger.rb +67 -0
- data/lib/active-record-profiler/profiler_view_helper.rb +200 -0
- data/lib/active-record-profiler/railtie.rb +18 -0
- data/lib/active-record-profiler/tasks.rake +41 -0
- data/lib/active-record-profiler/version.rb +3 -0
- data/test/active_record_profiler_test.rb +29 -0
- data/test/database.yml +4 -0
- data/test/dummy/README.rdoc +28 -0
- data/test/dummy/Rakefile +6 -0
- data/test/dummy/app/assets/images/.keep +0 -0
- data/test/dummy/app/assets/javascripts/application.js +13 -0
- data/test/dummy/app/assets/stylesheets/application.css +15 -0
- data/test/dummy/app/controllers/application_controller.rb +5 -0
- data/test/dummy/app/controllers/concerns/.keep +0 -0
- data/test/dummy/app/controllers/profiler_controller.rb +5 -0
- data/test/dummy/app/helpers/application_helper.rb +2 -0
- data/test/dummy/app/mailers/.keep +0 -0
- data/test/dummy/app/models/.keep +0 -0
- data/test/dummy/app/models/concerns/.keep +0 -0
- data/test/dummy/app/models/widget.rb +2 -0
- data/test/dummy/app/views/layouts/application.html.erb +14 -0
- data/test/dummy/app/views/profiler/index.html.erb +4 -0
- data/test/dummy/bin/bundle +3 -0
- data/test/dummy/bin/rails +4 -0
- data/test/dummy/bin/rake +4 -0
- data/test/dummy/bin/setup +29 -0
- data/test/dummy/config.ru +4 -0
- data/test/dummy/config/application.rb +25 -0
- data/test/dummy/config/boot.rb +5 -0
- data/test/dummy/config/database.yml +25 -0
- data/test/dummy/config/environment.rb +5 -0
- data/test/dummy/config/environments/development.rb +41 -0
- data/test/dummy/config/environments/production.rb +79 -0
- data/test/dummy/config/environments/test.rb +42 -0
- data/test/dummy/config/initializers/active_record_profiler.rb +3 -0
- data/test/dummy/config/initializers/assets.rb +11 -0
- data/test/dummy/config/initializers/backtrace_silencers.rb +7 -0
- data/test/dummy/config/initializers/cookies_serializer.rb +3 -0
- data/test/dummy/config/initializers/filter_parameter_logging.rb +4 -0
- data/test/dummy/config/initializers/inflections.rb +16 -0
- data/test/dummy/config/initializers/mime_types.rb +4 -0
- data/test/dummy/config/initializers/session_store.rb +3 -0
- data/test/dummy/config/initializers/wrap_parameters.rb +14 -0
- data/test/dummy/config/locales/en.yml +23 -0
- data/test/dummy/config/routes.rb +56 -0
- data/test/dummy/config/secrets.yml +22 -0
- data/test/dummy/db/migrate/20150109175941_create_widgets.rb +8 -0
- data/test/dummy/db/schema.rb +21 -0
- data/test/dummy/lib/assets/.keep +0 -0
- data/test/dummy/public/404.html +67 -0
- data/test/dummy/public/422.html +67 -0
- data/test/dummy/public/500.html +66 -0
- data/test/dummy/public/favicon.ico +0 -0
- data/test/dummy/test/fixtures/widgets.yml +9 -0
- data/test/dummy/test/models/widget_test.rb +7 -0
- data/test/schema.rb +7 -0
- data/test/test_helper.rb +24 -0
- metadata +237 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 29a3517f08a6a21ab80d888178ae5371f60f9f05
|
4
|
+
data.tar.gz: 752933b5e705af4c7698496246f2356df6007dbb
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: e23f7711b5a0936b5d55e14027f3befd06bf7f6d999d3a4e8b8efcc636d0b41e78cd69cef3f4d11d3c7ba9a673ad50bf2e829a1ea58172e5580ff8a8d09ce74a
|
7
|
+
data.tar.gz: 3099240d90f3497e8f2f95597e5489c4b487d58e0fc234f29190d9ad4ca3daefe125ffecf05a73b8ac9d8588b1b247eb8c69091eb2fd6683a10c9ce2ad869e13
|
data/.gitignore
ADDED
data/CHANGELOG.md
ADDED
data/Gemfile
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
Copyright (c) 2010 Gist, Inc.
|
2
|
+
Copyright (c) 2015 Benjamin Turner
|
3
|
+
|
4
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
5
|
+
a copy of this software and associated documentation files (the
|
6
|
+
"Software"), to deal in the Software without restriction, including
|
7
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
8
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
9
|
+
permit persons to whom the Software is furnished to do so, subject to
|
10
|
+
the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be
|
13
|
+
included in all copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
16
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
17
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
18
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
19
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
20
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
21
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,150 @@
|
|
1
|
+
ActiveRecordProfiler
|
2
|
+
====================
|
3
|
+
|
4
|
+
ActiveRecordProfiler monkey-patches
|
5
|
+
ActiveRecord::ConnectionAdapters::AbstractAdapter both to improve the
|
6
|
+
standard SQL logging and to provide profiler-like tracking of SQL
|
7
|
+
statements generated by application code.
|
8
|
+
|
9
|
+
Each SQL log entry generated by ActiveRecord will have appended the
|
10
|
+
filename, line number, and function name of the nearest calling
|
11
|
+
application code. The information is determined by walking up the call
|
12
|
+
stack until a filename within the /app/ directory is found. If no such
|
13
|
+
filename can be found, the SQL will be logged with a location of
|
14
|
+
'Non-application code'.
|
15
|
+
|
16
|
+
Additionally, the profiler will keep track of the total time spent by all
|
17
|
+
SQL calls coming from each calling location, as well as the number of time
|
18
|
+
that location accessed the database. Certain SQL calls not under the
|
19
|
+
direct control of the application are not counted in these statistics,
|
20
|
+
such as "SHOW FIELDS", "SET NAMES", "BEGIN", and "COMMIT" statements which
|
21
|
+
tend to skew the timing statistics and provide less useful information
|
22
|
+
about slow SQL queries.
|
23
|
+
|
24
|
+
Periodically, the profiler will dump its statistics out to a file and
|
25
|
+
restart all of its counters/timers. The output file is named for the time
|
26
|
+
and PID from which it was written, so that multiple processes can safely
|
27
|
+
write their output simultaneously.
|
28
|
+
|
29
|
+
Installation
|
30
|
+
============
|
31
|
+
Add it to your Gemfile, do `bundle install`, and then add a new initializer, `config/initializers/active_record_profiler.rb`:
|
32
|
+
|
33
|
+
ActiveRecord::Base.logger =
|
34
|
+
ActiveRecordProfiler::Logger.new(ActiveRecord::Base.logger)
|
35
|
+
ActiveRecordProfiler::LogSubscriber.attach_to :active_record unless Rails.env.test?
|
36
|
+
|
37
|
+
|
38
|
+
The first line adds call-site information to ActiveRecord logging, and the second line enables profiling (except in the test environment, where it would mess up your profiling data).
|
39
|
+
|
40
|
+
Configuration
|
41
|
+
=============
|
42
|
+
Control the (approximate) frequency of statistics flushes (default: `1.hour`)
|
43
|
+
|
44
|
+
ActiveRecordProfiler::Collector.stats_flush_period = 1.hour
|
45
|
+
|
46
|
+
Note that only flushed data is available for use in the rake reports (described below). If you are running a multithreaded or multiprocess server (which covers most common rails server types), your data will be incomplete until all those threads/processes/servers have flushed their data. This limitation exists primarily to avoid the overhead of coordinating/locking during the process of serving your application's web requests.
|
47
|
+
|
48
|
+
Directory where profile data is recorded (default: `Rails.root,join('log', 'profiler_data'`)
|
49
|
+
|
50
|
+
ActiveRecordProfiler::Collector.profile_dir = Rails.root.join('log', 'profiler_data'
|
51
|
+
|
52
|
+
Any SQL statements matching this pattern will not be tracked by the
|
53
|
+
profiler output, though it will still appear in the enhanced SQL logging
|
54
|
+
(default: `/^(SHOW FIELDS |SET SQL_AUTO_IS_NULL|SET NAMES |EXPLAIN |BEGIN|COMMIT|PRAGMA )/`)
|
55
|
+
|
56
|
+
ActiveRecordProfiler::Collector.sql_ignore_pattern = /^SET /x
|
57
|
+
|
58
|
+
|
59
|
+
Reports
|
60
|
+
=======
|
61
|
+
To see a top-100 list of what SQL statements your application is spending its
|
62
|
+
time in, run the following rake task:
|
63
|
+
|
64
|
+
rake profiler:aggregate max_lines=100 show_sql=true
|
65
|
+
|
66
|
+
This will return a list of the SQL which is taking the most time in your
|
67
|
+
application in this format:
|
68
|
+
|
69
|
+
<file path>:<line number>:in <method name>: <total duration>, <call count>, <max single call duration>
|
70
|
+
|
71
|
+
This will aggregate all of the profiler data you have accumulated; in order
|
72
|
+
to limit the timeframe of the data, use the `prefix` option to specify a
|
73
|
+
partial date/time:
|
74
|
+
|
75
|
+
rake profiler:aggregate max_lines=100 show_sql=true prefix=2010-06-20-10 # data from June 20 during the 10am hour (roughly)
|
76
|
+
|
77
|
+
Each thread running the profiler flushes its stats periodically, and there
|
78
|
+
is a rake task to combine multiple profiler data files together in order to
|
79
|
+
keep the number of data files down to a manageable number. A good way to
|
80
|
+
manage the data files on a server is to set up a cron task to run the
|
81
|
+
following command once per hour or once per day:
|
82
|
+
|
83
|
+
rake profiler:aggregate compact=<'hour' or 'date'> RAILS_ENV=qa
|
84
|
+
|
85
|
+
Compacting by hour will result in a single file for each hour any process
|
86
|
+
dumped its stats. Compacting by day will result in a single file for each
|
87
|
+
day. When using the `prefix` option to generate a profiler report, you
|
88
|
+
cannot specify an hour if you have compacted your data by date instead of
|
89
|
+
hour (the prefix matching operates on the file names, which will not have
|
90
|
+
hours if they have been compacted by date).
|
91
|
+
|
92
|
+
You can clear out all profiler data using the following command:
|
93
|
+
|
94
|
+
rake profiler:clear_data
|
95
|
+
|
96
|
+
If you want programmatic access to the profiler data, check out the source
|
97
|
+
code for the rake tasks in `lib/active-record-profiler/tasks.rake`.
|
98
|
+
|
99
|
+
|
100
|
+
=======
|
101
|
+
HTML Reports
|
102
|
+
============
|
103
|
+
|
104
|
+
The profiler includes some view helpers to make it easy for your application
|
105
|
+
to generate a sortable HTML table of profiler information. The core helper
|
106
|
+
method generates a table based on an ActiveRecordProfiler::Collector object.
|
107
|
+
In its simplest form, it can be called from a view like this:
|
108
|
+
|
109
|
+
<div id="#profiler">
|
110
|
+
<%= profiler_report(params) %>
|
111
|
+
</div>
|
112
|
+
|
113
|
+
The full set of parameters available looks like this:
|
114
|
+
|
115
|
+
profiler_report(page_parameters, date_prefix, sort_id, max_rows, profiler_collector, css_options)
|
116
|
+
|
117
|
+
parameters:
|
118
|
+
page_parameters: this is generally the request parameters, used to build the report-sorting links (HashWithIndifferentAccess)
|
119
|
+
options: hash containing optional settings for the report; supported keys:
|
120
|
+
|
121
|
+
:date : year, year-month, year-month-day, year-month-day-hour used to filter the profiler data; defaults to Today (String)
|
122
|
+
:sort : ActiveRecordProfiler::(DURATION|COUNT|LONGEST|AVG_DURATION) specifying which field to sort the report by; defaults to DURATION (Constant/Integer)
|
123
|
+
:max_rows : Maximum number of table rows to output; in other words, report on the top max_rows SQL statements; defaults to 100 (Integer)
|
124
|
+
:collector : object representing the profile data to use in building the report; defaults to an empty collector using the configured profile data directory (ActiveRecordProfiler::Collector )
|
125
|
+
:table : css class applied to the report <table> element; defaults to nil
|
126
|
+
:header_row : css class applied to the report's header row; defaults to nil
|
127
|
+
:row : css class applied to the report's data rows; defaults to nil
|
128
|
+
:link_locations : true/false value indicating whether to build textmate links to the source code whence a given piece of SQL came; defaults to false
|
129
|
+
|
130
|
+
An easy way to support filtering of report data by month/date/hour is to
|
131
|
+
use a view like this:
|
132
|
+
|
133
|
+
<%= profiler_date_filter_form(params) %>
|
134
|
+
<%= profiler_report(params) %>
|
135
|
+
|
136
|
+
And if you use TextMate, then you may want to throw in some extra goodies
|
137
|
+
to generate links to the actual source code files and lines where the SQL
|
138
|
+
was triggered (Note: the current javascript requires jQuery):
|
139
|
+
|
140
|
+
<%= profiler_date_filter_form(params) %>
|
141
|
+
<%= profiler_report_local_path_form %>
|
142
|
+
<%= profile_report_local_path_javascript %>
|
143
|
+
<%= profiler_report(params, {:link_locations => true}) %>
|
144
|
+
|
145
|
+
|
146
|
+
Miscellaneous
|
147
|
+
=============
|
148
|
+
|
149
|
+
Copyright (c) 2010 Gist, Inc.
|
150
|
+
Copyright (c) 2015 Benjamin Turner
|
data/Rakefile
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
require "bundler"
|
2
|
+
require 'rake/testtask'
|
3
|
+
|
4
|
+
Rake::TestTask.new do |t|
|
5
|
+
t.libs << 'lib'
|
6
|
+
t.libs << 'test'
|
7
|
+
t.pattern = 'test/**/*_test.rb'
|
8
|
+
t.verbose = false
|
9
|
+
end
|
10
|
+
|
11
|
+
desc "Run tests"
|
12
|
+
task :default => :test
|
13
|
+
|
14
|
+
Bundler::GemHelper.install_tasks
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
require 'active-record-profiler/version'
|
5
|
+
|
6
|
+
Gem::Specification.new do |spec|
|
7
|
+
spec.name = "active-record-profiler"
|
8
|
+
spec.version = ActiveRecordProfiler::VERSION
|
9
|
+
spec.authors = ["Ben Turner"]
|
10
|
+
spec.email = ["codewrangler@outofcoffee.com"]
|
11
|
+
spec.summary = %q{Enhances ActiveRecord logging and profiles queries}
|
12
|
+
spec.description = <<-EOF
|
13
|
+
See where each database call is coming from in your code, and get query
|
14
|
+
profiling to see which queries are taking up the most time in the database.
|
15
|
+
EOF
|
16
|
+
spec.homepage = "https://github.com/kindjar/active_record_profiler"
|
17
|
+
spec.license = "MIT"
|
18
|
+
|
19
|
+
spec.files = `git ls-files`.split($/)
|
20
|
+
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
21
|
+
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
22
|
+
spec.require_paths = ["lib"]
|
23
|
+
|
24
|
+
spec.add_dependency "rails", "~> 4.0"
|
25
|
+
|
26
|
+
spec.add_development_dependency "bundler", "~> 1.3"
|
27
|
+
spec.add_development_dependency "rake", "~> 10.3"
|
28
|
+
spec.add_development_dependency "minitest", "~> 5.1"
|
29
|
+
spec.add_development_dependency "sqlite3", "~> 1.3"
|
30
|
+
end
|
@@ -0,0 +1,13 @@
|
|
1
|
+
require 'active-record-profiler/version'
|
2
|
+
require 'active-record-profiler/collector'
|
3
|
+
require 'active-record-profiler/log_subscriber'
|
4
|
+
require 'active-record-profiler/logger'
|
5
|
+
require 'active-record-profiler/profiler_view_helper'
|
6
|
+
|
7
|
+
require 'active-record-profiler/railtie' if defined?(Rails)
|
8
|
+
|
9
|
+
module ActiveRecordProfiler
|
10
|
+
require 'fileutils'
|
11
|
+
require 'json'
|
12
|
+
end
|
13
|
+
|
@@ -0,0 +1,276 @@
|
|
1
|
+
module ActiveRecordProfiler
|
2
|
+
class Collector
|
3
|
+
DURATION = 0
|
4
|
+
COUNT = 1
|
5
|
+
LONGEST = 2
|
6
|
+
LONGEST_SQL = 3
|
7
|
+
LOCATION = -1
|
8
|
+
AVG_DURATION = -2
|
9
|
+
|
10
|
+
DATE_FORMAT = '%Y-%m-%d'
|
11
|
+
HOUR_FORMAT = '-%H'
|
12
|
+
DATETIME_FORMAT = DATE_FORMAT + HOUR_FORMAT + '-%M'
|
13
|
+
AGGREGATE_QUIET_PERIOD = 1.minutes
|
14
|
+
|
15
|
+
CSV_DURATION = 0
|
16
|
+
CSV_COUNT = 1
|
17
|
+
CSV_AVG = 2
|
18
|
+
CSV_LONGEST = 3
|
19
|
+
CSV_LOCATION = 4
|
20
|
+
CSV_LONGEST_SQL = 5
|
21
|
+
|
22
|
+
NON_APP_CODE_DESCRIPTION = 'Non-application code'
|
23
|
+
|
24
|
+
cattr_accessor :profiler_enabled
|
25
|
+
self.profiler_enabled = true
|
26
|
+
|
27
|
+
# You can control the (approximate) frequency of statistics flushes by setting
|
28
|
+
# ActiveRecordProfiler::Collector.stats_flush_period = 1.hour
|
29
|
+
cattr_accessor :stats_flush_period
|
30
|
+
self.stats_flush_period = 1.hour
|
31
|
+
|
32
|
+
# Directory where profile data is recorded
|
33
|
+
cattr_accessor :profile_dir
|
34
|
+
|
35
|
+
# Any SQL statements matching this pattern will not be tracked by the profiler output
|
36
|
+
# (though it will still appear in the enhanced SQL logging).
|
37
|
+
cattr_accessor :sql_ignore_pattern
|
38
|
+
self.sql_ignore_pattern = /^(SHOW FIELDS |SET SQL_AUTO_IS_NULL|SET NAMES |EXPLAIN |BEGIN|COMMIT|PRAGMA )/i
|
39
|
+
|
40
|
+
cattr_accessor :app_path_pattern
|
41
|
+
self.app_path_pattern = Regexp.new(Regexp.quote("/app/"))
|
42
|
+
|
43
|
+
cattr_accessor :trim_root_path
|
44
|
+
|
45
|
+
cattr_accessor :storage_backend
|
46
|
+
self.storage_backend = :json
|
47
|
+
|
48
|
+
cattr_accessor :profile_self
|
49
|
+
self.profile_self = false
|
50
|
+
|
51
|
+
attr_accessor :last_stats_flush
|
52
|
+
attr_accessor :query_sites
|
53
|
+
attr_accessor :profile_data_directory
|
54
|
+
|
55
|
+
def self.instance
|
56
|
+
Thread.current[:active_record_profiler_collector] ||= Collector.new
|
57
|
+
end
|
58
|
+
|
59
|
+
def self.profile_self?
|
60
|
+
self.profile_self
|
61
|
+
end
|
62
|
+
|
63
|
+
def self.clear_data
|
64
|
+
dir = Dir.new(profile_dir)
|
65
|
+
prof_files = dir.entries.select{ |filename| /.prof$/.match(filename) }.map{ |filename| File.join(dir.path, filename) }
|
66
|
+
FileUtils.rm(prof_files) if prof_files.size > 0
|
67
|
+
end
|
68
|
+
|
69
|
+
def initialize
|
70
|
+
@query_sites = {}
|
71
|
+
@last_stats_flush = nil
|
72
|
+
@profile_data_directory = self.class.profile_dir
|
73
|
+
end
|
74
|
+
|
75
|
+
def call_location_name(caller_array = nil)
|
76
|
+
find_app_call_location(caller_array) || NON_APP_CODE_DESCRIPTION
|
77
|
+
end
|
78
|
+
|
79
|
+
def record_caller_info(location, seconds, sql)
|
80
|
+
return if sql_ignore_pattern.match(sql)
|
81
|
+
|
82
|
+
update_counts(location, seconds, 1, sql)
|
83
|
+
end
|
84
|
+
|
85
|
+
def record_self_info(seconds, name)
|
86
|
+
record_caller_info(trim_location(caller.first), seconds, name)
|
87
|
+
end
|
88
|
+
|
89
|
+
def should_flush_stats?
|
90
|
+
self.last_stats_flush ||= Time.now
|
91
|
+
return(Time.now > self.last_stats_flush + stats_flush_period)
|
92
|
+
end
|
93
|
+
|
94
|
+
def flush_query_sites_statistics
|
95
|
+
pid = $$
|
96
|
+
thread_id = Thread.current.object_id
|
97
|
+
flush_time = Time.now
|
98
|
+
site_count = self.query_sites.keys.size
|
99
|
+
Rails.logger.info("Flushing ActiveRecordProfiler statistics for PID #{pid} at #{flush_time} (#{site_count} sites).")
|
100
|
+
|
101
|
+
if (site_count > 0)
|
102
|
+
FileUtils.makedirs(self.profile_data_directory)
|
103
|
+
|
104
|
+
filename = File.join(self.profile_data_directory, "#{flush_time.strftime(DATETIME_FORMAT)}.#{pid}-#{thread_id}.prof")
|
105
|
+
write_file(filename)
|
106
|
+
|
107
|
+
# Nuke each value to make sure it can be reclaimed by Ruby
|
108
|
+
self.query_sites.keys.each{ |k| self.query_sites[k] = nil }
|
109
|
+
end
|
110
|
+
self.query_sites = {}
|
111
|
+
self.last_stats_flush = flush_time
|
112
|
+
end
|
113
|
+
|
114
|
+
def aggregate(options = {})
|
115
|
+
prefix = options[:prefix]
|
116
|
+
compact = options[:compact]
|
117
|
+
raise "Cannot compact without a prefix!" if compact && prefix.nil?
|
118
|
+
return self.query_sites unless File.exists?(self.profile_data_directory)
|
119
|
+
|
120
|
+
dir = Dir.new(self.profile_data_directory)
|
121
|
+
now = Time.now
|
122
|
+
raw_files_processed = []
|
123
|
+
date_regexp = Regexp.new(prefix) if prefix
|
124
|
+
|
125
|
+
dir.each do |filename|
|
126
|
+
next unless /.prof$/.match(filename)
|
127
|
+
next if date_regexp && ! date_regexp.match(filename)
|
128
|
+
# Parse the datetime out of the filename and convert it to localtime
|
129
|
+
begin
|
130
|
+
file_time = DateTime.strptime(filename, DATETIME_FORMAT)
|
131
|
+
file_time = Time.local(file_time.year, file_time.month, file_time.day, file_time.hour, file_time.min)
|
132
|
+
rescue Exception => e
|
133
|
+
if e.to_s != 'invalid date'
|
134
|
+
raise e
|
135
|
+
end
|
136
|
+
end
|
137
|
+
|
138
|
+
if (file_time.nil? || ((file_time + AGGREGATE_QUIET_PERIOD) < now))
|
139
|
+
begin
|
140
|
+
update_from_file(File.join(dir.path, filename))
|
141
|
+
|
142
|
+
raw_files_processed << filename if file_time # any files that are already aggregated don't count
|
143
|
+
rescue Exception => e
|
144
|
+
RAILS_DEFAULT_LOGGER.warn "Unable to read file #{filename}: #{e.message}"
|
145
|
+
end
|
146
|
+
else
|
147
|
+
Rails.logger.info "Skipping file #{filename} because it is too new and may still be open for writing."
|
148
|
+
end
|
149
|
+
end
|
150
|
+
|
151
|
+
if compact && raw_files_processed.size > 0
|
152
|
+
write_file(File.join(dir.path, "#{prefix}.prof"))
|
153
|
+
|
154
|
+
raw_files_processed.each do |filename|
|
155
|
+
FileUtils.rm(File.join(dir.path, filename))
|
156
|
+
end
|
157
|
+
end
|
158
|
+
|
159
|
+
return self.query_sites
|
160
|
+
end
|
161
|
+
|
162
|
+
def save_aggregated(date = nil)
|
163
|
+
aggregate(:date => date, :compact => true)
|
164
|
+
end
|
165
|
+
|
166
|
+
def sorted_locations(sort_field = nil, max_locations = nil)
|
167
|
+
sort_field ||= DURATION
|
168
|
+
case sort_field
|
169
|
+
when LOCATION
|
170
|
+
sorted = self.query_sites.keys.sort
|
171
|
+
when AVG_DURATION
|
172
|
+
sorted = self.query_sites.keys.sort_by{ |k| (self.query_sites[k][DURATION] / self.query_sites[k][COUNT]) }.reverse
|
173
|
+
when DURATION, COUNT, LONGEST
|
174
|
+
sorted = self.query_sites.keys.sort{ |a,b| self.query_sites[b][sort_field] <=> self.query_sites[a][sort_field] }
|
175
|
+
else
|
176
|
+
raise "Invalid sort field: #{sort_field}"
|
177
|
+
end
|
178
|
+
if max_locations && max_locations > 0
|
179
|
+
sorted.first(max_locations)
|
180
|
+
else
|
181
|
+
sorted
|
182
|
+
end
|
183
|
+
end
|
184
|
+
|
185
|
+
protected
|
186
|
+
|
187
|
+
def find_app_call_location(call_stack)
|
188
|
+
call_stack = caller
|
189
|
+
while frame = call_stack.shift
|
190
|
+
if app_path_pattern.match(frame)
|
191
|
+
return trim_location(frame)
|
192
|
+
end
|
193
|
+
end
|
194
|
+
return nil
|
195
|
+
end
|
196
|
+
|
197
|
+
def trim_location(loc)
|
198
|
+
loc.sub(trim_root_path, '')
|
199
|
+
end
|
200
|
+
|
201
|
+
def update_counts(location, seconds, count, sql, longest = nil)
|
202
|
+
longest ||= seconds
|
203
|
+
self.query_sites[location] ||= [0.0,0,0,'']
|
204
|
+
self.query_sites[location][DURATION] += seconds
|
205
|
+
self.query_sites[location][COUNT] += count
|
206
|
+
if (longest > self.query_sites[location][LONGEST])
|
207
|
+
self.query_sites[location][LONGEST] = longest
|
208
|
+
self.query_sites[location][LONGEST_SQL] = sql.to_s
|
209
|
+
end
|
210
|
+
end
|
211
|
+
|
212
|
+
def detect_file_type(filename)
|
213
|
+
type = nil
|
214
|
+
File.open(filename, "r") do |io|
|
215
|
+
first_line = io.readline
|
216
|
+
if first_line.match(/^\/\* JSON \*\//)
|
217
|
+
type = :json
|
218
|
+
end
|
219
|
+
end
|
220
|
+
return type
|
221
|
+
end
|
222
|
+
|
223
|
+
def write_file(filename)
|
224
|
+
case storage_backend
|
225
|
+
when :json
|
226
|
+
write_json_file(filename)
|
227
|
+
else
|
228
|
+
raise "Invalid storage_backend: #{storage_backend}"
|
229
|
+
end
|
230
|
+
end
|
231
|
+
|
232
|
+
def write_json_file(filename)
|
233
|
+
File.open(filename, "w") do |file|
|
234
|
+
file.puts "/* JSON */"
|
235
|
+
file.puts "/* Fields: Duration, Count, Avg. Duration, Max. Duration, Location, Max. Duration SQL */"
|
236
|
+
file.puts "["
|
237
|
+
|
238
|
+
first = true
|
239
|
+
self.query_sites.each_pair do |location, info|
|
240
|
+
if first
|
241
|
+
first = false
|
242
|
+
else
|
243
|
+
file.puts "\n, "
|
244
|
+
end
|
245
|
+
row = [info[DURATION], info[COUNT], (info[DURATION]/info[COUNT]), info[LONGEST], location, info[LONGEST_SQL]]
|
246
|
+
file.print JSON.generate(row)
|
247
|
+
end
|
248
|
+
file.puts "\n]"
|
249
|
+
end
|
250
|
+
end
|
251
|
+
|
252
|
+
def update_from_file(filename)
|
253
|
+
read_file(filename) do |row|
|
254
|
+
update_counts(
|
255
|
+
row[CSV_LOCATION], row[CSV_DURATION].to_f, row[CSV_COUNT].to_i, row[CSV_LONGEST_SQL], row[CSV_LONGEST].to_f
|
256
|
+
)
|
257
|
+
end
|
258
|
+
end
|
259
|
+
|
260
|
+
def read_file(filename)
|
261
|
+
file_type = detect_file_type filename
|
262
|
+
case file_type
|
263
|
+
when :json
|
264
|
+
read_json_file(filename) { |row| yield row }
|
265
|
+
else
|
266
|
+
raise "Unknown profiler data file type for file '#{filename}: #{file_type}"
|
267
|
+
end
|
268
|
+
end
|
269
|
+
|
270
|
+
def read_json_file(filename)
|
271
|
+
JSON.load(File.open(filename, "r")).each do |row|
|
272
|
+
yield row
|
273
|
+
end
|
274
|
+
end
|
275
|
+
end
|
276
|
+
end
|