apache_log_report 1.1.5 → 1.1.9
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/Gemfile.lock +3 -1
- data/README.org +2 -30
- data/Rakefile +13 -0
- data/apache_log_report.gemspec +12 -0
- data/exe/apache_log_report +1 -0
- data/lib/apache_log_report/data_cruncher.rb +50 -7
- data/lib/apache_log_report/ip_locator.rb +42 -0
- data/lib/apache_log_report/options_parser.rb +2 -2
- data/lib/apache_log_report/templates/template.html.erb +95 -20
- data/lib/apache_log_report/templates/template.org.erb +5 -1
- data/lib/apache_log_report/version.rb +1 -1
- data/lib/apache_log_report.rb +1 -0
- metadata +50 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: ffde49bcae6f56e31b5ef7cccd88d5391127ee330264c188132f9ccbec94f421
|
4
|
+
data.tar.gz: 04c799c68ab676b176314b7ebe68cd3514faa48f853b9793e76d505d71342ae9
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 1b5010ca3fa37d6dceb8ab48d0fc5f256713db19f63366602f7200219c9ec51656160e2e50901709179fac67633f5703eb137432c5cd289bc080f14dc675aebf
|
7
|
+
data.tar.gz: 377a4a516fe144f6167c83850c0037cb9d0c216090ad197b2ca22b94d6ab156d1c696ceb31ab5c0be6f06227e7ef62dfd8a23667b26a9919e56eff431c9c977c
|
data/Gemfile.lock
CHANGED
@@ -1,9 +1,10 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
apache_log_report (1.1.
|
4
|
+
apache_log_report (1.1.8)
|
5
5
|
apache_log-parser
|
6
6
|
browser
|
7
|
+
ipaddr
|
7
8
|
sqlite3
|
8
9
|
terminal-table
|
9
10
|
|
@@ -12,6 +13,7 @@ GEM
|
|
12
13
|
specs:
|
13
14
|
apache_log-parser (3.1.2)
|
14
15
|
browser (5.3.1)
|
16
|
+
ipaddr (1.2.3)
|
15
17
|
rake (12.3.3)
|
16
18
|
sqlite3 (1.4.2)
|
17
19
|
terminal-table (3.0.2)
|
data/README.org
CHANGED
@@ -2,34 +2,6 @@
|
|
2
2
|
#+AUTHOR: Adolfo Villafiorita
|
3
3
|
#+STARTUP: showall
|
4
4
|
|
5
|
-
|
5
|
+
This Gem has been superseded by [[https://rubygems.org/gems/log_sense][Log Sense]], which produces better
|
6
|
+
outputs and parses also rails logs.
|
6
7
|
|
7
|
-
* Installation
|
8
|
-
|
9
|
-
* Usage
|
10
|
-
|
11
|
-
* Change Log
|
12
|
-
|
13
|
-
See the [[file:CHANGELOG.org][CHANGELOG]] file.
|
14
|
-
|
15
|
-
* Todo
|
16
|
-
|
17
|
-
** TODO Graphs in HTML output
|
18
|
-
** TODO Countries
|
19
|
-
|
20
|
-
* Compatibility
|
21
|
-
|
22
|
-
|
23
|
-
* Author and Contributors
|
24
|
-
|
25
|
-
[[http://ict4g.net/adolfo][Adolfo Villafiorita]].
|
26
|
-
|
27
|
-
* Known Bugs
|
28
|
-
|
29
|
-
Some known bugs and an unknown number of unknown bugs.
|
30
|
-
|
31
|
-
(See the open issues for the known bugs.)
|
32
|
-
|
33
|
-
* License
|
34
|
-
|
35
|
-
Distributed under the terms of the [[http://opensource.org/licenses/MIT][MIT License]].
|
data/Rakefile
CHANGED
@@ -1,2 +1,15 @@
|
|
1
1
|
require "bundler/gem_tasks"
|
2
2
|
task :default => :spec
|
3
|
+
|
4
|
+
require 'rake/testtask'
|
5
|
+
Rake::TestTask.new do |t|
|
6
|
+
t.libs << 'test'
|
7
|
+
end
|
8
|
+
|
9
|
+
require_relative './lib/apache_log_report/ip_locator.rb'
|
10
|
+
|
11
|
+
desc "Convert Geolocation DB to sqlite"
|
12
|
+
task :dbip_to_sqlite3, [:filename] do |tasks, args|
|
13
|
+
filename = args[:filename]
|
14
|
+
ApacheLogReport::IpLocator::dbip_to_sqlite filename
|
15
|
+
end
|
data/apache_log_report.gemspec
CHANGED
@@ -29,6 +29,18 @@ Gem::Specification.new do |spec|
|
|
29
29
|
|
30
30
|
spec.add_dependency "apache_log-parser"
|
31
31
|
spec.add_dependency "browser"
|
32
|
+
spec.add_dependency "ipaddr"
|
33
|
+
spec.add_dependency "iso_country_codes"
|
32
34
|
spec.add_dependency "sqlite3"
|
33
35
|
spec.add_dependency "terminal-table"
|
36
|
+
|
37
|
+
spec.add_development_dependency "minitest"
|
38
|
+
|
39
|
+
spec.post_install_message = <<-MESSAGE
|
40
|
+
! The 'apache_log_report' gem has been deprecated and has been replaced 'log_sense'.
|
41
|
+
! See: https://rubygems.org/gems/log_sense
|
42
|
+
! And: https://www.ict4g.net/gitea/adolfo/log_sense
|
43
|
+
MESSAGE
|
44
|
+
|
45
|
+
|
34
46
|
end
|
data/exe/apache_log_report
CHANGED
@@ -1,17 +1,39 @@
|
|
1
1
|
module ApacheLogReport
|
2
2
|
module DataCruncher
|
3
|
-
|
4
3
|
#
|
5
4
|
# take a sqlite3 database and analyze data
|
6
5
|
#
|
6
|
+
# @ variables are automatically put in the returned data
|
7
|
+
#
|
7
8
|
|
8
9
|
def self.crunch db, options = {}
|
9
|
-
|
10
|
-
|
10
|
+
first_day_s = db.execute "SELECT datetime from LogLine order by datetime limit 1"
|
11
|
+
last_day_s = db.execute "SELECT datetime from LogLine order by datetime desc limit 1"
|
12
|
+
|
13
|
+
# make first and last day into dates or nil
|
14
|
+
@first_day = first_day_s.empty? ? nil : Date.parse(first_day_s[0][0])
|
15
|
+
@last_day = last_day_s.empty? ? nil : Date.parse(last_day_s[0][0])
|
16
|
+
|
17
|
+
@total_days = 0
|
18
|
+
if @first_day and @last_day
|
19
|
+
@total_days = (@last_day - @first_day).to_i
|
20
|
+
end
|
21
|
+
|
11
22
|
@log_size = db.execute "SELECT count(datetime) from LogLine"
|
12
23
|
@crawlers_size = db.execute "SELECT count(datetime) from LogLine where bot == 1"
|
13
24
|
@selfpolls_size = db.execute "SELECT count(datetime) from LogLine where ip == '::1'"
|
14
25
|
|
26
|
+
@first_day_requested = options[:from_date]
|
27
|
+
@last_day_requested = options[:to_date]
|
28
|
+
|
29
|
+
@first_day_in_analysis = date_intersect options[:from_date], @first_day, :max
|
30
|
+
@last_day_in_analysis = date_intersect options[:to_date], @last_day, :min
|
31
|
+
|
32
|
+
@total_days_in_analysis = 0
|
33
|
+
if @first_day_in_analysis and @last_day_in_analysis
|
34
|
+
@total_days_in_analysis = (@last_day_in_analysis - @first_day_in_analysis).to_i
|
35
|
+
end
|
36
|
+
|
15
37
|
#
|
16
38
|
# generate the where clause corresponding to the command line options to filter data
|
17
39
|
#
|
@@ -54,7 +76,6 @@ module ApacheLogReport
|
|
54
76
|
@total_hits = db.execute "SELECT count(datetime) from LogLine where #{filter}"
|
55
77
|
@total_unique_visitors = db.execute "SELECT count(distinct(unique_visitor)) from LogLine where #{filter}"
|
56
78
|
@total_size = db.execute "SELECT #{human_readable_size} from LogLine where #{filter}"
|
57
|
-
@total_days = (Date.parse(@last_day[0][0]) - Date.parse(@first_day[0][0])).to_i
|
58
79
|
|
59
80
|
@daily_distribution = db.execute "SELECT date(datetime), #{human_readable_day}, count(datetime), count(distinct(unique_visitor)), #{human_readable_size} from LogLine where #{filter} group by date(datetime)"
|
60
81
|
@time_distribution = db.execute "SELECT strftime('%H', datetime), count(datetime), count(distinct(unique_visitor)), #{human_readable_size} from LogLine where #{filter} group by strftime('%H', datetime)"
|
@@ -80,19 +101,41 @@ module ApacheLogReport
|
|
80
101
|
|
81
102
|
@browsers = db.execute "SELECT browser, count(browser), count(distinct(unique_visitor)), #{human_readable_size} from LogLine where #{filter} group by browser order by count(browser) desc"
|
82
103
|
@platforms = db.execute "SELECT platform, count(platform), count(distinct(unique_visitor)), #{human_readable_size} from LogLine where #{filter} group by platform order by count(platform) desc"
|
83
|
-
@ips = db.execute "SELECT ip, count(ip), count(distinct(unique_visitor)), #{human_readable_size} from LogLine where #{filter} group by ip order by count(ip) desc limit #{options[:limit]}"
|
84
104
|
@referers = db.execute "SELECT referer, count(referer), count(distinct(unique_visitor)), #{human_readable_size} from LogLine where #{filter} group by referer order by count(referer) desc limit #{options[:limit]}"
|
85
105
|
|
86
|
-
@
|
106
|
+
@ips = db.execute "SELECT ip, count(ip), count(distinct(unique_visitor)), #{human_readable_size} from LogLine where #{filter} group by ip order by count(ip) desc limit #{options[:limit]}"
|
87
107
|
|
108
|
+
@streaks = db.execute "SELECT ip, substr(datetime, 1, 10), path from LogLine order by ip, datetime"
|
88
109
|
data = {}
|
110
|
+
|
89
111
|
self.instance_variables.each do |variable|
|
90
112
|
var_as_symbol = variable.to_s[1..-1].to_sym
|
91
113
|
data[var_as_symbol] = eval(variable.to_s)
|
92
114
|
end
|
93
115
|
data
|
94
116
|
end
|
95
|
-
end
|
96
117
|
|
118
|
+
# add country code to data[:ips]
|
119
|
+
def self.geolocate data
|
120
|
+
@location_db = IpLocator::load_db
|
121
|
+
data[:ips].each do |ip|
|
122
|
+
country_code = IpLocator::locate_ip ip[0], @location_db
|
123
|
+
ip << country_code
|
124
|
+
end
|
125
|
+
data
|
126
|
+
end
|
127
|
+
|
128
|
+
private
|
129
|
+
|
130
|
+
def self.date_intersect date1, date2, method
|
131
|
+
if date1 and date2
|
132
|
+
[date1, date2].send(method)
|
133
|
+
elsif date1
|
134
|
+
date1
|
135
|
+
else
|
136
|
+
date2
|
137
|
+
end
|
138
|
+
end
|
139
|
+
end
|
97
140
|
end
|
98
141
|
|
@@ -0,0 +1,42 @@
|
|
1
|
+
require 'csv'
|
2
|
+
require 'sqlite3'
|
3
|
+
require 'ipaddr'
|
4
|
+
require 'iso_country_codes'
|
5
|
+
|
6
|
+
module ApacheLogReport
|
7
|
+
module IpLocator
|
8
|
+
DB_FILE = "ip_locations/dbip-country-lite.sqlite3"
|
9
|
+
|
10
|
+
def self.dbip_to_sqlite db_location
|
11
|
+
db = SQLite3::Database.new ":memory:"
|
12
|
+
db.execute "CREATE TABLE ip_location (
|
13
|
+
from_ip_n INTEGER,
|
14
|
+
from_ip TEXT,
|
15
|
+
to_ip TEXT,
|
16
|
+
country_code TEXT
|
17
|
+
)"
|
18
|
+
|
19
|
+
ins = db.prepare "INSERT INTO ip_location(from_ip_n, from_ip, to_ip, country_code) values (?, ?, ?, ?)"
|
20
|
+
CSV.foreach(db_location) do |row|
|
21
|
+
ip = IPAddr.new row[0]
|
22
|
+
ins.execute(ip.to_i, row[0], row[1], row[2])
|
23
|
+
end
|
24
|
+
|
25
|
+
# persist to file
|
26
|
+
ddb = SQLite3::Database.new(DB_FILE)
|
27
|
+
b = SQLite3::Backup.new(ddb, 'main', db, 'main')
|
28
|
+
b.step(-1) #=> DONE
|
29
|
+
b.finish
|
30
|
+
end
|
31
|
+
|
32
|
+
def self.load_db
|
33
|
+
SQLite3::Database.new DB_FILE
|
34
|
+
end
|
35
|
+
|
36
|
+
def self.locate_ip ip, db
|
37
|
+
ip_n = IPAddr.new(ip).to_i
|
38
|
+
res = db.execute "SELECT * FROM ip_location where from_ip_n <= #{ip_n} order by from_ip_n desc limit 1"
|
39
|
+
IsoCountryCodes.find(res[0][3]).name
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
@@ -18,11 +18,11 @@ module ApacheLogReport
|
|
18
18
|
args[:limit] = n
|
19
19
|
end
|
20
20
|
|
21
|
-
opts.on("-bDATE", "--begin=DATE",
|
21
|
+
opts.on("-bDATE", "--begin=DATE", Date, "Consider entries after or on DATE") do |n|
|
22
22
|
args[:from_date] = n
|
23
23
|
end
|
24
24
|
|
25
|
-
opts.on("-eDATE", "--end=DATE",
|
25
|
+
opts.on("-eDATE", "--end=DATE", Date, "Consider entries before or on DATE") do |n|
|
26
26
|
args[:to_date] = n
|
27
27
|
end
|
28
28
|
|
@@ -31,7 +31,11 @@
|
|
31
31
|
"Browsers",
|
32
32
|
"Platforms",
|
33
33
|
"Referers",
|
34
|
-
"IPs"
|
34
|
+
"IPs",
|
35
|
+
"Geolocation",
|
36
|
+
"Streaks",
|
37
|
+
"Command Invocation",
|
38
|
+
"Performance"
|
35
39
|
].each do |item| %>
|
36
40
|
<li class="nav-item">
|
37
41
|
<a href="#<%= item.downcase.gsub(' ', '-') %>"><%= item %></a>
|
@@ -40,6 +44,14 @@
|
|
40
44
|
</ul>
|
41
45
|
</li>
|
42
46
|
</ul>
|
47
|
+
<p>
|
48
|
+
Generated by<br />
|
49
|
+
<a href="https://www.ict4g.net/gitea/adolfo/apache_log_report">
|
50
|
+
Apache Log Report
|
51
|
+
</a> <br />
|
52
|
+
on <%= DateTime.now.strftime("%Y-%m-%d %H:%M") %>.<br />
|
53
|
+
The lean log analyzer.
|
54
|
+
</p>
|
43
55
|
</nav>
|
44
56
|
|
45
57
|
<section>
|
@@ -50,6 +62,22 @@
|
|
50
62
|
<h2 id="summary">Summary</h2>
|
51
63
|
|
52
64
|
<table class="table summary">
|
65
|
+
<tr>
|
66
|
+
<th>Input file</th>
|
67
|
+
<td><b><%= (data[:log_file] || "stdin") %></b></td>
|
68
|
+
</tr>
|
69
|
+
<tr>
|
70
|
+
<th class="period">Period Analyzed</th>
|
71
|
+
<td class="period">
|
72
|
+
<%= data[:first_day_in_analysis] %>
|
73
|
+
--
|
74
|
+
<%= data[:last_day_in_analysis] %>
|
75
|
+
</td>
|
76
|
+
</tr>
|
77
|
+
<tr>
|
78
|
+
<th class="days">Days </th>
|
79
|
+
<td class="days"><%= data[:total_days_in_analysis] %></td>
|
80
|
+
</tr>
|
53
81
|
<tr>
|
54
82
|
<th class="hits">Hits</th>
|
55
83
|
<td class="hits"><%= data[:total_hits][0][0] %></td>
|
@@ -62,18 +90,6 @@
|
|
62
90
|
<th class="tx">Tx</th>
|
63
91
|
<td class="tx"><%= data[:total_size][0][0] %></td>
|
64
92
|
</tr>
|
65
|
-
<tr>
|
66
|
-
<th class="period">Period</th>
|
67
|
-
<td class="period">
|
68
|
-
<%= data[:first_day][0][0] %>
|
69
|
-
--
|
70
|
-
<%= data[:last_day][0][0] %>
|
71
|
-
</td>
|
72
|
-
</tr>
|
73
|
-
<tr>
|
74
|
-
<th class="days">Days </th>
|
75
|
-
<td class="days"><%= data[:total_days] %></td>
|
76
|
-
</tr>
|
77
93
|
</table>
|
78
94
|
</article>
|
79
95
|
<article class="column col-6">
|
@@ -85,6 +101,14 @@
|
|
85
101
|
<th>Input file</th>
|
86
102
|
<td><b><%= (data[:log_file] || "stdin") %></b></td>
|
87
103
|
</tr>
|
104
|
+
<tr>
|
105
|
+
<th>Period in Log</th>
|
106
|
+
<td><%= data[:first_day] %> -- <%= data[:last_day] %></td>
|
107
|
+
</tr>
|
108
|
+
<tr>
|
109
|
+
<th>Total days</th>
|
110
|
+
<td><%= data[:total_days] %></td>
|
111
|
+
</tr>
|
88
112
|
<tr>
|
89
113
|
<th>Log size</th>
|
90
114
|
<td><%= data[:log_size][0][0] %></td>
|
@@ -120,7 +144,7 @@
|
|
120
144
|
{ title: "Browsers", header: ["Browser", "Hits", "Visitors", "Size"], rows: data[:browsers] },
|
121
145
|
{ title: "Platforms", header: ["Platform", "Hits", "Visitors", "Size"], rows: data[:platforms] },
|
122
146
|
{ title: "Referers", header: ["Referers", "Hits", "Visitors", "Size"], rows: data[:referers], col: "col-12" },
|
123
|
-
{ title: "IPs", header: ["IPs", "Hits", "Visitors", "Size"], rows: data[:ips] },
|
147
|
+
{ title: "IPs", header: ["IPs", "Hits", "Visitors", "Size", "Country"], rows: data[:ips] },
|
124
148
|
{ },
|
125
149
|
]
|
126
150
|
%>
|
@@ -139,6 +163,31 @@
|
|
139
163
|
<% end %>
|
140
164
|
</div>
|
141
165
|
|
166
|
+
<article>
|
167
|
+
<h2 id="geolocation">Geolocation</h2>
|
168
|
+
<table class="table">
|
169
|
+
<thead>
|
170
|
+
<tr>
|
171
|
+
<th>Country Code</th>
|
172
|
+
<th>Total Hits</th>
|
173
|
+
<th>Total Visitors</th>
|
174
|
+
<th>IPs</th>
|
175
|
+
</tr>
|
176
|
+
</thead>
|
177
|
+
<tbody>
|
178
|
+
<%# IP, Hits, Visitors Size, Country%>
|
179
|
+
<% data[:ips].group_by { |x| x[4] }.each do |k, v| %>
|
180
|
+
<tr>
|
181
|
+
<td><%= k %></td>
|
182
|
+
<td><%= v.map { |x| x[1] }.inject(&:+) %></td>
|
183
|
+
<td><%= v.map { |x| x[2] }.inject(&:+) %></td>
|
184
|
+
<td><%= v.map { |x| x[0] }.join(", ") %></td>
|
185
|
+
</tr>
|
186
|
+
<% end %>
|
187
|
+
</tbody>
|
188
|
+
</table>
|
189
|
+
</article>
|
190
|
+
|
142
191
|
<article>
|
143
192
|
<h2 id="streaks">Streaks</h2>
|
144
193
|
|
@@ -146,7 +195,16 @@
|
|
146
195
|
<thead>
|
147
196
|
<tr>
|
148
197
|
<th>IP</th>
|
149
|
-
<th>
|
198
|
+
<th>
|
199
|
+
<div class="columns">
|
200
|
+
<div class="col-2 column">
|
201
|
+
Day
|
202
|
+
</div>
|
203
|
+
<div class="col-10 column">
|
204
|
+
Resources
|
205
|
+
</div>
|
206
|
+
</div>
|
207
|
+
</th>
|
150
208
|
</tr>
|
151
209
|
</thead>
|
152
210
|
<tbody>
|
@@ -154,11 +212,28 @@
|
|
154
212
|
<tr>
|
155
213
|
<td class="ip"><%= ip %></td>
|
156
214
|
<td class="streaks">
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
215
|
+
<div class="columns">
|
216
|
+
<% date_urls.group_by { |x| x[1] }.each do |date, urls| %>
|
217
|
+
<div class="col-2 column">
|
218
|
+
<%= date %>
|
219
|
+
</div>
|
220
|
+
<div class="col-10 column">
|
221
|
+
<span class="res-title">HTML:</span>
|
222
|
+
<ul>
|
223
|
+
<% urls.map { |x| x[2] }.select { |x| x.match /.*\.html?/ }.each do |url| %>
|
224
|
+
<li><%= url %></li>
|
225
|
+
<% end %>
|
226
|
+
</ul>
|
227
|
+
|
228
|
+
<span class="res-title">Other Resources:</span>
|
229
|
+
<ul>
|
230
|
+
<% urls.map { |x| x[2] }.sort.select { |x| not x.match /.*\.html?/ }.each do |url| %>
|
231
|
+
<li><%= url %></li>
|
232
|
+
<% end %>
|
233
|
+
</ul>
|
234
|
+
</div>
|
161
235
|
<% end %>
|
236
|
+
</div>
|
162
237
|
</td>
|
163
238
|
</tr>
|
164
239
|
<% end %>
|
@@ -175,7 +250,7 @@
|
|
175
250
|
<tbody>
|
176
251
|
<tr>
|
177
252
|
<th>CLI Command</th>
|
178
|
-
<td><
|
253
|
+
<td><code><%= data[:command] %></code></td>
|
179
254
|
</tr>
|
180
255
|
<tr>
|
181
256
|
<th>Input file</th>
|
@@ -10,7 +10,11 @@
|
|
10
10
|
| Hits | <%= "%10d" % data[:total_hits][0][0] %> |
|
11
11
|
| Unique Visitors | <%= "%10d" % data[:total_unique_visitors][0][0] %> |
|
12
12
|
| Tx | <%= "%10s" % data[:total_size][0][0] %> |
|
13
|
-
|
|
13
|
+
| Logged Period | <%= data[:first_day] %> -- <%= data[:last_day] %> |
|
14
|
+
| Days | <%= "%10d" % data[:total_days] %> |
|
15
|
+
| Period Requested | <%= data[:first_day_requested] %> -- <%= data[:last_day_requested] %> |
|
16
|
+
| Period Analyzed | <%= data[:first_day_in_analysis] %> -- <%= data[:last_day_in_analysis] %> |
|
17
|
+
| Days in Analysis | <%= data[:total_days_in_analysis] %> |
|
14
18
|
|
15
19
|
* Daily Distribution
|
16
20
|
|
data/lib/apache_log_report.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: apache_log_report
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.1.
|
4
|
+
version: 1.1.9
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Adolfo Villafiorita
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2022-01-12 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: apache_log-parser
|
@@ -38,6 +38,34 @@ dependencies:
|
|
38
38
|
- - ">="
|
39
39
|
- !ruby/object:Gem::Version
|
40
40
|
version: '0'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: ipaddr
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - ">="
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - ">="
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: iso_country_codes
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - ">="
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '0'
|
62
|
+
type: :runtime
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - ">="
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '0'
|
41
69
|
- !ruby/object:Gem::Dependency
|
42
70
|
name: sqlite3
|
43
71
|
requirement: !ruby/object:Gem::Requirement
|
@@ -66,6 +94,20 @@ dependencies:
|
|
66
94
|
- - ">="
|
67
95
|
- !ruby/object:Gem::Version
|
68
96
|
version: '0'
|
97
|
+
- !ruby/object:Gem::Dependency
|
98
|
+
name: minitest
|
99
|
+
requirement: !ruby/object:Gem::Requirement
|
100
|
+
requirements:
|
101
|
+
- - ">="
|
102
|
+
- !ruby/object:Gem::Version
|
103
|
+
version: '0'
|
104
|
+
type: :development
|
105
|
+
prerelease: false
|
106
|
+
version_requirements: !ruby/object:Gem::Requirement
|
107
|
+
requirements:
|
108
|
+
- - ">="
|
109
|
+
- !ruby/object:Gem::Version
|
110
|
+
version: '0'
|
69
111
|
description: Generate requests reports in HTML, OrgMode, and SQLite format from an
|
70
112
|
Apache log file.
|
71
113
|
email:
|
@@ -90,6 +132,7 @@ files:
|
|
90
132
|
- lib/apache_log_report/apache_log_report.rb
|
91
133
|
- lib/apache_log_report/data_cruncher.rb
|
92
134
|
- lib/apache_log_report/emitter.rb
|
135
|
+
- lib/apache_log_report/ip_locator.rb
|
93
136
|
- lib/apache_log_report/log_parser.rb
|
94
137
|
- lib/apache_log_report/options_parser.rb
|
95
138
|
- lib/apache_log_report/templates/_output_table.html.erb
|
@@ -104,7 +147,10 @@ metadata:
|
|
104
147
|
homepage_uri: https://www.ict4g.net/gitea/adolfo/apache_log_report
|
105
148
|
source_code_uri: https://www.ict4g.net/gitea/adolfo/apache_log_report
|
106
149
|
changelog_uri: https://www.ict4g.net/gitea/adolfo/apache_log_report/CHANGELOG.org
|
107
|
-
post_install_message:
|
150
|
+
post_install_message: |2
|
151
|
+
! The 'apache_log_report' gem has been deprecated and has been replaced 'log_sense'.
|
152
|
+
! See: https://rubygems.org/gems/log_sense
|
153
|
+
! And: https://www.ict4g.net/gitea/adolfo/log_sense
|
108
154
|
rdoc_options: []
|
109
155
|
require_paths:
|
110
156
|
- lib
|
@@ -119,7 +165,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
119
165
|
- !ruby/object:Gem::Version
|
120
166
|
version: '0'
|
121
167
|
requirements: []
|
122
|
-
rubygems_version: 3.
|
168
|
+
rubygems_version: 3.0.3.1
|
123
169
|
signing_key:
|
124
170
|
specification_version: 4
|
125
171
|
summary: Generate analytics from an Apache log file.
|