searchlink 2.3.73 → 2.3.76
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/searchlink/config.rb +23 -23
- data/lib/searchlink/curl/html.rb +38 -38
- data/lib/searchlink/curl/json.rb +19 -17
- data/lib/searchlink/curl.rb +2 -2
- data/lib/searchlink/exceptions.rb +2 -2
- data/lib/searchlink/help.rb +13 -13
- data/lib/searchlink/output.rb +21 -21
- data/lib/searchlink/parse.rb +113 -108
- data/lib/searchlink/plist.rb +11 -11
- data/lib/searchlink/script_plugin.rb +10 -10
- data/lib/searchlink/search.rb +6 -6
- data/lib/searchlink/searches/amazon.rb +4 -4
- data/lib/searchlink/searches/applemusic.rb +28 -28
- data/lib/searchlink/searches/bitly.rb +11 -11
- data/lib/searchlink/searches/definition.rb +7 -7
- data/lib/searchlink/searches/duckduckgo.rb +31 -27
- data/lib/searchlink/searches/github.rb +48 -48
- data/lib/searchlink/searches/google.rb +16 -16
- data/lib/searchlink/searches/helpers/chromium.rb +46 -46
- data/lib/searchlink/searches/helpers/firefox.rb +20 -20
- data/lib/searchlink/searches/helpers/safari.rb +14 -14
- data/lib/searchlink/searches/history.rb +78 -78
- data/lib/searchlink/searches/hook.rb +5 -5
- data/lib/searchlink/searches/itunes.rb +37 -37
- data/lib/searchlink/searches/lastfm.rb +13 -13
- data/lib/searchlink/searches/linkding.rb +14 -14
- data/lib/searchlink/searches/lyrics.rb +11 -11
- data/lib/searchlink/searches/pinboard.rb +35 -35
- data/lib/searchlink/searches/social.rb +45 -56
- data/lib/searchlink/searches/software.rb +4 -4
- data/lib/searchlink/searches/spelling.rb +10 -10
- data/lib/searchlink/searches/spotlight.rb +4 -4
- data/lib/searchlink/searches/stackoverflow.rb +5 -5
- data/lib/searchlink/searches/tmdb.rb +17 -17
- data/lib/searchlink/searches/twitter.rb +8 -8
- data/lib/searchlink/searches/wikipedia.rb +4 -4
- data/lib/searchlink/searches/youtube.rb +7 -7
- data/lib/searchlink/searches.rb +16 -16
- data/lib/searchlink/semver.rb +4 -4
- data/lib/searchlink/string.rb +55 -55
- data/lib/searchlink/url.rb +30 -32
- data/lib/searchlink/util.rb +3 -3
- data/lib/searchlink/version.rb +19 -21
- data/lib/searchlink/which.rb +5 -5
- data/lib/searchlink.rb +31 -31
- metadata +31 -18
- data/lib/tokens.rb +0 -3
@@ -6,12 +6,12 @@ module SL
|
|
6
6
|
class << self
|
7
7
|
def settings
|
8
8
|
{
|
9
|
-
trigger:
|
9
|
+
trigger: "(?:giste?|ghu?)",
|
10
10
|
searches: [
|
11
|
-
[
|
12
|
-
[
|
13
|
-
[
|
14
|
-
[
|
11
|
+
["gh", "GitHub User/Repo Link"],
|
12
|
+
["ghu", "GitHub User Search"],
|
13
|
+
["gist", "Gist Search"],
|
14
|
+
["giste", "Gist Embed"]
|
15
15
|
]
|
16
16
|
}
|
17
17
|
end
|
@@ -28,23 +28,23 @@ module SL
|
|
28
28
|
|
29
29
|
return SL.ddg("site:github.com #{search_terms}", link_text) unless url
|
30
30
|
|
31
|
-
link_text = title if link_text ==
|
31
|
+
link_text = title if link_text == "" || link_text == search_terms
|
32
32
|
|
33
33
|
[url, title, link_text]
|
34
34
|
end
|
35
35
|
|
36
36
|
def github_search_curl(endpoint, query)
|
37
37
|
headers = {
|
38
|
-
|
39
|
-
|
38
|
+
"Accept" => "application/vnd.github+json",
|
39
|
+
"X-GitHub-Api-Version" => "2022-11-28"
|
40
40
|
}
|
41
|
-
headers[
|
41
|
+
headers["Authorization"] = "Bearer #{Secrets::GH_AUTH_TOKEN}" if defined? Secrets::GH_AUTH_TOKEN
|
42
42
|
|
43
43
|
url = "https://api.github.com/search/#{endpoint}?q=#{query.url_encode}&per_page=1&page=1&order=desc"
|
44
44
|
res = Curl::Json.new(url, headers: headers)
|
45
45
|
|
46
|
-
if res.json.key?(
|
47
|
-
res.json[
|
46
|
+
if res.json.key?("total_count") && res.json["total_count"].positive?
|
47
|
+
res.json["items"][0]
|
48
48
|
else
|
49
49
|
false
|
50
50
|
end
|
@@ -52,17 +52,17 @@ module SL
|
|
52
52
|
|
53
53
|
def user_gists(user, search_terms, page = 1)
|
54
54
|
headers = {
|
55
|
-
|
56
|
-
|
55
|
+
"Accept" => "application/vnd.github+json",
|
56
|
+
"X-GitHub-Api-Version" => "2022-11-28"
|
57
57
|
}
|
58
|
-
headers[
|
58
|
+
headers["Authorization"] = "Bearer #{Secrets::GH_AUTH_TOKEN}" if defined? Secrets::GH_AUTH_TOKEN
|
59
59
|
|
60
60
|
url = "https://api.github.com/users/#{user}/gists?per_page=100&page=#{page}"
|
61
61
|
|
62
62
|
res = Curl::Json.new(url, headers: headers).json
|
63
63
|
|
64
|
-
if res.is_a?(Hash) && res[
|
65
|
-
SL.notify(
|
64
|
+
if res.is_a?(Hash) && res["status"].to_i == 401
|
65
|
+
SL.notify("Error", "Bad GitHub credentials")
|
66
66
|
return nil
|
67
67
|
end
|
68
68
|
|
@@ -70,7 +70,7 @@ module SL
|
|
70
70
|
best = filter_gists(res, search_terms) if res
|
71
71
|
|
72
72
|
if !best && res.count == 100
|
73
|
-
SL.notify(
|
73
|
+
SL.notify("Paging", "Getting page #{page + 1} of #{user} gists")
|
74
74
|
best = user_gists(user, search_terms, page + 1)
|
75
75
|
end
|
76
76
|
|
@@ -96,7 +96,7 @@ module SL
|
|
96
96
|
|
97
97
|
[url, title, link_text]
|
98
98
|
else
|
99
|
-
SL.notify(
|
99
|
+
SL.notify("Searching GitHub", "Repo not found, performing search")
|
100
100
|
search_github(search_terms, link_text)
|
101
101
|
end
|
102
102
|
end
|
@@ -104,16 +104,16 @@ module SL
|
|
104
104
|
def github_user(search_terms, link_text)
|
105
105
|
if search_terms.split(/ /).count > 1
|
106
106
|
query = %(#{search_terms} in:name)
|
107
|
-
res = github_search_curl(
|
107
|
+
res = github_search_curl("users", query)
|
108
108
|
else
|
109
109
|
query = %(user:#{search_terms})
|
110
|
-
res = github_search_curl(
|
111
|
-
res ||= github_search_curl(
|
110
|
+
res = github_search_curl("users", query)
|
111
|
+
res ||= github_search_curl("users", search_terms)
|
112
112
|
end
|
113
113
|
|
114
114
|
if res
|
115
|
-
url = res[
|
116
|
-
title = res[
|
115
|
+
url = res["html_url"]
|
116
|
+
title = res["login"]
|
117
117
|
|
118
118
|
[url, title, link_text]
|
119
119
|
else
|
@@ -126,23 +126,23 @@ module SL
|
|
126
126
|
[%r{(\S+)/(\S+)}, 'user:\1 \2'],
|
127
127
|
[/\bu\w*:(\w+)/, 'user:\1'],
|
128
128
|
[/\bl\w*:(\w+)/, 'language:\1'],
|
129
|
-
[/\bin?:r\w*/,
|
130
|
-
[/\bin?:t\w*/,
|
131
|
-
[/\bin?:d\w*/,
|
132
|
-
[/\bin?:(t(itle)?|n(ame)?)/,
|
133
|
-
[/\br:/,
|
129
|
+
[/\bin?:r\w*/, "in:readme"],
|
130
|
+
[/\bin?:t\w*/, "in:topics"],
|
131
|
+
[/\bin?:d\w*/, "in:description"],
|
132
|
+
[/\bin?:(t(itle)?|n(ame)?)/, "in:name"],
|
133
|
+
[/\br:/, "repo:"]
|
134
134
|
]
|
135
135
|
|
136
136
|
replacements.each { |r| search_terms.gsub!(r[0], r[1]) }
|
137
137
|
|
138
|
-
search_terms +=
|
138
|
+
search_terms += " in:title" unless search_terms =~ /(in|user|repo):/
|
139
139
|
|
140
|
-
res = github_search_curl(
|
140
|
+
res = github_search_curl("repositories", search_terms)
|
141
141
|
|
142
142
|
return false unless res
|
143
143
|
|
144
|
-
url = res[
|
145
|
-
title = res[
|
144
|
+
url = res["html_url"]
|
145
|
+
title = res["description"] || res["full_name"]
|
146
146
|
[url, title, link_text]
|
147
147
|
end
|
148
148
|
|
@@ -158,9 +158,9 @@ module SL
|
|
158
158
|
score = 0
|
159
159
|
gists.map! do |g|
|
160
160
|
{
|
161
|
-
url: g[
|
162
|
-
description: g[
|
163
|
-
files: g[
|
161
|
+
url: g["html_url"],
|
162
|
+
description: g["description"],
|
163
|
+
files: g["files"].map { |file, info| { filename: file, raw: info["raw_url"] } }
|
164
164
|
}
|
165
165
|
end
|
166
166
|
matches = []
|
@@ -169,7 +169,7 @@ module SL
|
|
169
169
|
g[:files].each do |f|
|
170
170
|
next unless f[:filename]
|
171
171
|
|
172
|
-
score = f[:filename].matches_score(search_terms.gsub(/[^a-z0-9]/,
|
172
|
+
score = f[:filename].matches_score(search_terms.gsub(/[^a-z0-9]/, " "))
|
173
173
|
|
174
174
|
if score > 5
|
175
175
|
url = "#{g[:url]}#file-#{f[:filename].gsub(/\./, '-')}"
|
@@ -178,7 +178,7 @@ module SL
|
|
178
178
|
end
|
179
179
|
end
|
180
180
|
|
181
|
-
score = g[:description].nil? ? 0 : g[:description].matches_score(search_terms.gsub(/[^a-z0-9]/,
|
181
|
+
score = g[:description].nil? ? 0 : g[:description].matches_score(search_terms.gsub(/[^a-z0-9]/, " "))
|
182
182
|
matches << { url: g[:url], title: g[:files][0][:filename], score: score } if score > 5
|
183
183
|
end
|
184
184
|
|
@@ -194,29 +194,29 @@ module SL
|
|
194
194
|
when %r{^(?<id>[a-z0-9]{32}|[0-9]{6,10})(?:[#/](?<file>(?:file-)?.*?))?$}
|
195
195
|
m = Regexp.last_match
|
196
196
|
res = Curl::Html.new("https://gist.github.com/#{m['id']}", headers_only: true)
|
197
|
-
url = res.headers[
|
197
|
+
url = res.headers["location"]
|
198
198
|
title = SL::URL.title(url)
|
199
199
|
|
200
|
-
url = "#{url}##{m['file']}" if m[
|
200
|
+
url = "#{url}##{m['file']}" if m["file"]
|
201
201
|
# If a user an id (an o) are given, convert to a link
|
202
202
|
when %r{^(?<u>\w+)/(?<id>[a-z0-9]{32}|[0-9]{6,10})(?:[#/](?<file>(?:file-)?.*?))?$}
|
203
203
|
m = Regexp.last_match
|
204
204
|
url = "https://gist.github.com/#{m['u']}/#{m['id']}"
|
205
205
|
title = SL::URL.title(url)
|
206
206
|
|
207
|
-
url = "#{url}##{m['file']}" if m[
|
207
|
+
url = "#{url}##{m['file']}" if m["file"]
|
208
208
|
# if a full gist URL is given, simply clean it up
|
209
209
|
when %r{(?<url>https://gist.github.com/(?:(?<user>\w+)/)?(?<id>[a-z0-9]{32}|[0-9]{6,10}))(?:[#/](?<file>(?:file-)?.*?))?$}
|
210
210
|
m = Regexp.last_match
|
211
|
-
url = m[
|
211
|
+
url = m["url"]
|
212
212
|
title = SL::URL.title(url)
|
213
213
|
|
214
|
-
url = "#{url}##{m['file']}" if m[
|
214
|
+
url = "#{url}##{m['file']}" if m["file"]
|
215
215
|
# Otherwise do a search of gist.github.com for the keywords
|
216
216
|
else
|
217
217
|
if terms.split(/ +/).count > 1
|
218
218
|
parts = terms.split(/ +/)
|
219
|
-
gist = search_user_gists(parts[0], parts[1..-1].join(
|
219
|
+
gist = search_user_gists(parts[0], parts[1..-1].join(" "))
|
220
220
|
|
221
221
|
if gist
|
222
222
|
url = gist[:url]
|
@@ -232,18 +232,18 @@ module SL
|
|
232
232
|
# Assuming we retrieved a full gist URL
|
233
233
|
if url =~ %r{https://gist.github.com/(?:(?<user>[^/]+)/)?(?<id>[a-z0-9]+?)(?:[#/](?<file>(?:file-)?.*?))?$}
|
234
234
|
m = Regexp.last_match
|
235
|
-
user = m[
|
236
|
-
id = m[
|
235
|
+
user = m["user"]
|
236
|
+
id = m["id"]
|
237
237
|
|
238
238
|
# If we're trying to create an embed, convert elements to a JS embed script
|
239
239
|
if type =~ /e$/
|
240
|
-
url = if m[
|
240
|
+
url = if m["file"]
|
241
241
|
"https://gist.github.com/#{user}/#{id}.js?file=#{m['file'].fix_gist_file}"
|
242
242
|
else
|
243
243
|
"https://gist.github.com/#{user}/#{id}.js"
|
244
244
|
end
|
245
245
|
|
246
|
-
[
|
246
|
+
["embed", %(<script src="#{url}"></script>), link_text]
|
247
247
|
else
|
248
248
|
[url, title, link_text]
|
249
249
|
end
|
@@ -253,6 +253,6 @@ module SL
|
|
253
253
|
end
|
254
254
|
end
|
255
255
|
|
256
|
-
SL::Searches.register
|
256
|
+
SL::Searches.register "github", :search, self
|
257
257
|
end
|
258
258
|
end
|
@@ -8,18 +8,18 @@ module SL
|
|
8
8
|
|
9
9
|
def settings
|
10
10
|
{
|
11
|
-
trigger:
|
11
|
+
trigger: "(g(oo)?g(le?)?|img)",
|
12
12
|
searches: [
|
13
|
-
[
|
14
|
-
[
|
13
|
+
["gg", "Google Search"],
|
14
|
+
["img", "First image from result"]
|
15
15
|
]
|
16
16
|
}
|
17
17
|
end
|
18
18
|
|
19
19
|
def api_key?
|
20
|
-
return false unless SL.config.key?(
|
20
|
+
return false unless SL.config.key?("google_api_key") && SL.config["google_api_key"]
|
21
21
|
|
22
|
-
key = SL.config[
|
22
|
+
key = SL.config["google_api_key"]
|
23
23
|
return false if key =~ /^(x{4,})?$/i
|
24
24
|
|
25
25
|
@api_key = key
|
@@ -31,39 +31,39 @@ module SL
|
|
31
31
|
image = search_type =~ /img$/ ? true : false
|
32
32
|
|
33
33
|
unless api_key?
|
34
|
-
SL.add_error(
|
34
|
+
SL.add_error("api key", "Missing Google API Key")
|
35
35
|
return false
|
36
36
|
end
|
37
37
|
|
38
38
|
url = "https://customsearch.googleapis.com/customsearch/v1?cx=338419ee5ac894523&q=#{ERB::Util.url_encode(search_terms)}&num=1&key=#{@api_key}"
|
39
39
|
json = Curl::Json.new(url).json
|
40
40
|
|
41
|
-
if json[
|
42
|
-
SL.notify(
|
41
|
+
if json["error"] && json["error"]["code"].to_i == 429
|
42
|
+
SL.notify("api limit", "Google API limit reached, defaulting to DuckDuckGo")
|
43
43
|
return SL.ddg(terms, link_text, google: false, image: image)
|
44
44
|
end
|
45
45
|
|
46
|
-
unless json[
|
47
|
-
SL.notify(
|
46
|
+
unless json["queries"]["request"][0]["totalResults"].to_i.positive?
|
47
|
+
SL.notify("no results", "Google returned no results, defaulting to DuckDuckGo")
|
48
48
|
return SL.ddg(terms, link_text, google: false, image: image)
|
49
49
|
end
|
50
50
|
|
51
|
-
result = json[
|
51
|
+
result = json["items"][0]
|
52
52
|
return false if result.nil?
|
53
53
|
|
54
|
-
output_url = result[
|
55
|
-
output_title = result[
|
56
|
-
output_title.remove_seo!(output_url) if SL.config[
|
54
|
+
output_url = result["link"]
|
55
|
+
output_title = result["title"]
|
56
|
+
output_title.remove_seo!(output_url) if SL.config["remove_seo"]
|
57
57
|
|
58
58
|
output_url = SL.first_image if search_type =~ /img$/
|
59
59
|
|
60
60
|
[output_url, output_title, link_text]
|
61
61
|
rescue StandardError
|
62
|
-
SL.notify(
|
62
|
+
SL.notify("Google error", "Error fetching Google results, switching to DuckDuckGo")
|
63
63
|
SL.ddg(search_terms, link_text, google: false, image: image)
|
64
64
|
end
|
65
65
|
end
|
66
66
|
|
67
|
-
SL::Searches.register
|
67
|
+
SL::Searches.register "google", :search, self
|
68
68
|
end
|
69
69
|
end
|
@@ -15,9 +15,9 @@ module SL
|
|
15
15
|
##
|
16
16
|
def search_arc_history(term)
|
17
17
|
# Google history
|
18
|
-
history_file = File.expand_path(
|
18
|
+
history_file = File.expand_path("~/Library/Application Support/Arc/User Data/Default/History")
|
19
19
|
if File.exist?(history_file)
|
20
|
-
SL.notify(
|
20
|
+
SL.notify("Searching Arc History", term)
|
21
21
|
search_chromium_history(history_file, term)
|
22
22
|
else
|
23
23
|
false
|
@@ -31,8 +31,8 @@ module SL
|
|
31
31
|
## @return [Array] Single bookmark, [url, title, date]
|
32
32
|
##
|
33
33
|
def search_brave_history(term)
|
34
|
-
base = File.expand_path(
|
35
|
-
profiles = Dir.glob(
|
34
|
+
base = File.expand_path("~/Library/Application Support/BraveSoftware/Brave-Browser/")
|
35
|
+
profiles = Dir.glob("**/History", base: base)
|
36
36
|
profiles.delete_if { |p| p =~ /^Snapshots/ }
|
37
37
|
profiles.map! { |f| File.join(base, f) }
|
38
38
|
|
@@ -59,8 +59,8 @@ module SL
|
|
59
59
|
## @return [Array] Single bookmark, [url, title, date]
|
60
60
|
##
|
61
61
|
def search_edge_history(term)
|
62
|
-
base = File.expand_path(
|
63
|
-
profiles = Dir.glob(
|
62
|
+
base = File.expand_path("~/Library/Application Support/Microsoft Edge/")
|
63
|
+
profiles = Dir.glob("**/History", base: base)
|
64
64
|
profiles.delete_if { |p| p =~ /^Snapshots/ }
|
65
65
|
profiles.map! { |f| File.join(base, f) }
|
66
66
|
|
@@ -88,8 +88,8 @@ module SL
|
|
88
88
|
##
|
89
89
|
def search_chrome_history(term)
|
90
90
|
# Google history
|
91
|
-
base = File.expand_path(
|
92
|
-
profiles = Dir.glob(
|
91
|
+
base = File.expand_path("~/Library/Application Support/Google/Chrome/")
|
92
|
+
profiles = Dir.glob("**/History", base: base)
|
93
93
|
profiles.delete_if { |p| p =~ /^Snapshots/ }
|
94
94
|
profiles.map! { |f| File.join(base, f) }
|
95
95
|
|
@@ -131,10 +131,10 @@ module SL
|
|
131
131
|
# If search terms start with ''term, only search for exact string matches
|
132
132
|
if term =~ /^ *'/
|
133
133
|
exact_match = true
|
134
|
-
term.gsub!(/(^ *'+|'+ *$)/,
|
134
|
+
term.gsub!(/(^ *'+|'+ *$)/, "")
|
135
135
|
elsif term =~ /%22(.*?)%22/
|
136
136
|
match_phrases = term.scan(/%22(\S.*?\S)%22/)
|
137
|
-
term.gsub!(/%22(\S.*?\S)%22/,
|
137
|
+
term.gsub!(/%22(\S.*?\S)%22/, "")
|
138
138
|
end
|
139
139
|
|
140
140
|
terms = []
|
@@ -154,7 +154,7 @@ module SL
|
|
154
154
|
end)
|
155
155
|
end
|
156
156
|
|
157
|
-
query = terms.join(
|
157
|
+
query = terms.join(" AND ")
|
158
158
|
most_recent = `sqlite3 -json '#{tmpfile}' "select title, url,
|
159
159
|
datetime(last_visit_time / 1000000 + (strftime('%s', '1601-01-01')), 'unixepoch') as datum
|
160
160
|
from urls where #{query} order by datum desc limit 1 COLLATE NOCASE;"`.strip
|
@@ -163,8 +163,8 @@ module SL
|
|
163
163
|
|
164
164
|
bm = JSON.parse(most_recent)[0]
|
165
165
|
|
166
|
-
date = Time.parse(bm[
|
167
|
-
[bm[
|
166
|
+
date = Time.parse(bm["datum"])
|
167
|
+
[bm["url"], bm["title"], date]
|
168
168
|
end
|
169
169
|
|
170
170
|
##
|
@@ -175,10 +175,10 @@ module SL
|
|
175
175
|
## @return [Array] single bookmark [url, title, date]
|
176
176
|
##
|
177
177
|
def search_arc_bookmarks(term)
|
178
|
-
bookmarks_file = File.expand_path(
|
178
|
+
bookmarks_file = File.expand_path("~/Library/Application Support/Arc/StorableSidebar.json")
|
179
179
|
|
180
180
|
if File.exist?(bookmarks_file)
|
181
|
-
SL.notify(
|
181
|
+
SL.notify("Searching Arc Bookmarks", term)
|
182
182
|
return search_arc_json(bookmarks_file, term)
|
183
183
|
end
|
184
184
|
|
@@ -193,8 +193,8 @@ module SL
|
|
193
193
|
## @return [Array] single bookmark [url, title, date]
|
194
194
|
##
|
195
195
|
def search_brave_bookmarks(term)
|
196
|
-
base = File.expand_path(
|
197
|
-
profiles = Dir.glob(
|
196
|
+
base = File.expand_path("~/Library/Application Support/BraveSoftware/Brave-Browser/")
|
197
|
+
profiles = Dir.glob("**/Bookmarks", base: base)
|
198
198
|
profiles.delete_if { |p| p =~ /^Snapshots/ }
|
199
199
|
profiles.map! { |f| File.join(base, f) }
|
200
200
|
|
@@ -222,8 +222,8 @@ module SL
|
|
222
222
|
## @return [Array] single bookmark [url, title, date]
|
223
223
|
##
|
224
224
|
def search_edge_bookmarks(term)
|
225
|
-
base = File.expand_path(
|
226
|
-
profiles = Dir.glob(
|
225
|
+
base = File.expand_path("~/Library/Application Support/Microsoft Edge")
|
226
|
+
profiles = Dir.glob("**/Bookmarks", base: base)
|
227
227
|
profiles.delete_if { |p| p =~ /^Snapshots/ }
|
228
228
|
profiles.map! { |f| File.join(base, f) }
|
229
229
|
|
@@ -250,21 +250,21 @@ module SL
|
|
250
250
|
## @return [Array] single bookmark [url, title, date]
|
251
251
|
##
|
252
252
|
def search_chrome_bookmarks(term)
|
253
|
-
base = File.expand_path(
|
254
|
-
profiles = Dir.glob(
|
253
|
+
base = File.expand_path("~/Library/Application Support/Google/Chrome/")
|
254
|
+
profiles = Dir.glob("**/Bookmarks", base: base)
|
255
255
|
profiles.delete_if { |p| p =~ /^Snapshots/ }
|
256
256
|
profiles.map! { |f| File.join(base, f) }
|
257
257
|
|
258
258
|
res = false
|
259
259
|
|
260
260
|
profiles.each do |bookmarks|
|
261
|
-
|
262
|
-
profile = bookmarks.match(%r{Chrome/([^/]+)/})[1]
|
261
|
+
next unless File.exist?(bookmarks)
|
263
262
|
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
263
|
+
profile = bookmarks.match(%r{Chrome/([^/]+)/})[1]
|
264
|
+
|
265
|
+
SL.notify("Searching Chrome Bookmarks for profile #{profile}", term)
|
266
|
+
res = search_chromium_bookmarks(bookmarks, term)
|
267
|
+
break if res
|
268
268
|
end
|
269
269
|
|
270
270
|
res
|
@@ -287,27 +287,27 @@ module SL
|
|
287
287
|
# If search terms start with ''term, only search for exact string matches
|
288
288
|
if term =~ /^ *'/
|
289
289
|
exact_match = true
|
290
|
-
term.gsub!(/(^ *'+|'+ *$)/,
|
290
|
+
term.gsub!(/(^ *'+|'+ *$)/, "")
|
291
291
|
elsif term =~ /%22(.*?)%22/
|
292
292
|
match_phrases = term.scan(/%22(\S.*?\S)%22/)
|
293
|
-
term.gsub!(/%22(\S.*?\S)%22/,
|
293
|
+
term.gsub!(/%22(\S.*?\S)%22/, "")
|
294
294
|
end
|
295
295
|
|
296
296
|
if arc_bookmarks
|
297
297
|
bookmarks = []
|
298
|
-
arc_bookmarks[
|
298
|
+
arc_bookmarks["sidebarSyncState"]["items"].each do |mark|
|
299
299
|
next if mark.is_a?(String)
|
300
300
|
|
301
|
-
next unless mark[
|
301
|
+
next unless mark["value"]["childrenIds"].empty?
|
302
302
|
|
303
|
-
next unless mark[
|
303
|
+
next unless mark["value"]["data"]["tab"]
|
304
304
|
|
305
305
|
url = {
|
306
|
-
url: mark[
|
307
|
-
saved_title: mark[
|
308
|
-
title: mark[
|
309
|
-
created: mark[
|
310
|
-
active: mark[
|
306
|
+
url: mark["value"]["data"]["tab"]["savedURL"],
|
307
|
+
saved_title: mark["value"]["data"]["tab"]["savedTitle"],
|
308
|
+
title: mark["value"]["title"],
|
309
|
+
created: mark["value"]["createdAt"].to_datetime,
|
310
|
+
active: mark["value"]["data"]["tab"]["timeLastActiveAt"]&.to_datetime
|
311
311
|
}
|
312
312
|
|
313
313
|
score = score_mark(url, term)
|
@@ -370,14 +370,14 @@ module SL
|
|
370
370
|
# If search terms start with ''term, only search for exact string matches
|
371
371
|
if term =~ /^ *'/
|
372
372
|
exact_match = true
|
373
|
-
term.gsub!(/(^ *'+|'+ *$)/,
|
373
|
+
term.gsub!(/(^ *'+|'+ *$)/, "")
|
374
374
|
elsif term =~ /%22(.*?)%22/
|
375
375
|
match_phrases = term.scan(/%22(\S.*?\S)%22/)
|
376
|
-
term.gsub!(/%22(\S.*?\S)%22/,
|
376
|
+
term.gsub!(/%22(\S.*?\S)%22/, "")
|
377
377
|
end
|
378
378
|
|
379
379
|
if chrome_bookmarks
|
380
|
-
roots = chrome_bookmarks[
|
380
|
+
roots = chrome_bookmarks["roots"]
|
381
381
|
|
382
382
|
urls = extract_chrome_bookmarks(roots, [], term)
|
383
383
|
|
@@ -417,15 +417,15 @@ module SL
|
|
417
417
|
##
|
418
418
|
## @return [Array] array of bookmarks
|
419
419
|
##
|
420
|
-
def extract_chrome_bookmarks(json, urls = [], term =
|
420
|
+
def extract_chrome_bookmarks(json, urls = [], term = "")
|
421
421
|
if json.instance_of?(Array)
|
422
422
|
json.each { |item| urls = extract_chrome_bookmarks(item, urls, term) }
|
423
423
|
elsif json.instance_of?(Hash)
|
424
|
-
if json.key?
|
425
|
-
urls = extract_chrome_bookmarks(json[
|
426
|
-
elsif json[
|
427
|
-
date = Time.at(json[
|
428
|
-
url = { url: json[
|
424
|
+
if json.key? "children"
|
425
|
+
urls = extract_chrome_bookmarks(json["children"], urls, term)
|
426
|
+
elsif json["type"] == "url"
|
427
|
+
date = Time.at(json["date_added"].to_i / 1_000_000 + Time.new(1601, 0o1, 0o1).strftime("%s").to_i)
|
428
|
+
url = { url: json["url"], title: json["name"], date: date }
|
429
429
|
score = score_mark(url, term)
|
430
430
|
|
431
431
|
if score > 7
|
@@ -5,12 +5,12 @@ module SL
|
|
5
5
|
class << self
|
6
6
|
def search_firefox_history(term)
|
7
7
|
# Firefox history
|
8
|
-
base = File.expand_path(
|
8
|
+
base = File.expand_path("~/Library/Application Support/Firefox/Profiles")
|
9
9
|
Dir.chdir(base)
|
10
|
-
profile = Dir.glob(
|
10
|
+
profile = Dir.glob("*default-release")
|
11
11
|
return false unless profile
|
12
12
|
|
13
|
-
src = File.join(base, profile[0],
|
13
|
+
src = File.join(base, profile[0], "places.sqlite")
|
14
14
|
|
15
15
|
exact_match = false
|
16
16
|
match_phrases = []
|
@@ -19,14 +19,14 @@ module SL
|
|
19
19
|
case term
|
20
20
|
when /^ *'/
|
21
21
|
exact_match = true
|
22
|
-
term.gsub!(/(^ *'+|'+ *$)/,
|
22
|
+
term.gsub!(/(^ *'+|'+ *$)/, "")
|
23
23
|
when /%22(.*?)%22/
|
24
24
|
match_phrases = term.scan(/%22(\S.*?\S)%22/)
|
25
|
-
term.gsub!(/%22(\S.*?\S)%22/,
|
25
|
+
term.gsub!(/%22(\S.*?\S)%22/, "")
|
26
26
|
end
|
27
27
|
|
28
28
|
if File.exist?(src)
|
29
|
-
SL.notify(
|
29
|
+
SL.notify("Searching Firefox History", term)
|
30
30
|
tmpfile = "#{src}.tmp"
|
31
31
|
FileUtils.cp(src, tmpfile)
|
32
32
|
|
@@ -45,7 +45,7 @@ module SL
|
|
45
45
|
"(moz_places.url LIKE '%#{t[0].strip.downcase}%' OR moz_places.title LIKE '%#{t[0].strip.downcase}%')"
|
46
46
|
end)
|
47
47
|
end
|
48
|
-
query = terms.join(
|
48
|
+
query = terms.join(" AND ")
|
49
49
|
most_recent = `sqlite3 -json '#{tmpfile}' "select moz_places.title, moz_places.url,
|
50
50
|
datetime(moz_historyvisits.visit_date/1000000, 'unixepoch', 'localtime') as datum
|
51
51
|
from moz_places, moz_historyvisits where moz_places.id = moz_historyvisits.place_id
|
@@ -57,9 +57,9 @@ module SL
|
|
57
57
|
marks = JSON.parse(most_recent)
|
58
58
|
|
59
59
|
marks.map! do |bm|
|
60
|
-
date = Time.parse(bm[
|
61
|
-
score = score_mark({ url: bm[
|
62
|
-
{ url: bm[
|
60
|
+
date = Time.parse(bm["datum"])
|
61
|
+
score = score_mark({ url: bm["url"], title: bm["title"] }, term)
|
62
|
+
{ url: bm["url"], title: bm["title"], date: date, score: score }
|
63
63
|
end
|
64
64
|
|
65
65
|
m = marks.max_by { |m| [m[:url].length * -1, m[:score]] }
|
@@ -72,12 +72,12 @@ module SL
|
|
72
72
|
|
73
73
|
def search_firefox_bookmarks(term)
|
74
74
|
# Firefox history
|
75
|
-
base = File.expand_path(
|
75
|
+
base = File.expand_path("~/Library/Application Support/Firefox/Profiles")
|
76
76
|
Dir.chdir(base)
|
77
|
-
profile = Dir.glob(
|
77
|
+
profile = Dir.glob("*default-release")
|
78
78
|
return false unless profile
|
79
79
|
|
80
|
-
src = File.join(base, profile[0],
|
80
|
+
src = File.join(base, profile[0], "places.sqlite")
|
81
81
|
|
82
82
|
exact_match = false
|
83
83
|
match_phrases = []
|
@@ -85,14 +85,14 @@ module SL
|
|
85
85
|
# If search terms start with ''term, only search for exact string matches
|
86
86
|
if term =~ /^ *'/
|
87
87
|
exact_match = true
|
88
|
-
term.gsub!(/(^ *'+|'+ *$)/,
|
88
|
+
term.gsub!(/(^ *'+|'+ *$)/, "")
|
89
89
|
elsif term =~ /%22(.*?)%22/
|
90
90
|
match_phrases = term.scan(/%22(\S.*?\S)%22/)
|
91
|
-
term.gsub!(/%22(\S.*?\S)%22/,
|
91
|
+
term.gsub!(/%22(\S.*?\S)%22/, "")
|
92
92
|
end
|
93
93
|
|
94
94
|
if File.exist?(src)
|
95
|
-
SL.notify(
|
95
|
+
SL.notify("Searching Firefox Bookmarks", term)
|
96
96
|
tmpfile = "#{src}.tmp"
|
97
97
|
FileUtils.cp(src, tmpfile)
|
98
98
|
|
@@ -112,7 +112,7 @@ module SL
|
|
112
112
|
end)
|
113
113
|
end
|
114
114
|
|
115
|
-
query = terms.join(
|
115
|
+
query = terms.join(" AND ")
|
116
116
|
|
117
117
|
most_recent = `sqlite3 -json '#{tmpfile}' "select h.url, b.title,
|
118
118
|
datetime(b.dateAdded/1000000, 'unixepoch', 'localtime') as datum
|
@@ -124,9 +124,9 @@ module SL
|
|
124
124
|
|
125
125
|
bm = JSON.parse(most_recent)[0]
|
126
126
|
|
127
|
-
date = Time.parse(bm[
|
128
|
-
score = score_mark({ url: bm[
|
129
|
-
[bm[
|
127
|
+
date = Time.parse(bm["datum"])
|
128
|
+
score = score_mark({ url: bm["url"], title: bm["title"] }, term)
|
129
|
+
[bm["url"], bm["title"], date, score]
|
130
130
|
else
|
131
131
|
false
|
132
132
|
end
|