bqm 1.4.0 → 1.5.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/DOC.md +4 -0
- data/bin/bqm +40 -21
- data/data/query-sets.json +2 -1
- metadata +3 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 851357946e6ed09933678c74958585dcb137314743d02de03b9f5a56cec6750d
|
4
|
+
data.tar.gz: 05a321f96dcfd330d81869eed88f7ad3b42103bb32b31858a47860b617c376da
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 025c0a65e18b7a7d520f2ad9137dc76200653619b258f8558eee15ebfabc38222da3681d2aff5add7c5ed6e3454ed78c0f18ac6631e69fc2dda30b4a1ca9bbbc
|
7
|
+
data.tar.gz: 5e999b124e978527d8c99bbf551803346ff827965d2d3c4d442c235ab879e08cd517821bcc8c486099c5331abf36543df13d6ebd06b27755bb6563a7412efb63
|
data/DOC.md
ADDED
@@ -0,0 +1,4 @@
|
|
1
|
+
## Options
|
2
|
+
|
3
|
+
- `-i`, `--local-sets`: if several items are provided, they must be comma (`,`) separated. Items can be either a Bloodhound custom query file, a bqm or a folder. In case of a bqm query sets file (similar to `query-sets.json`) or a folder, bqm will try to parse all JSON files inside it, so the folder should not contain other types of JSON files.
|
4
|
+
- `--ignore-default`: ignore the default `query-sets.json`. It's useful for **offline** usage or if you don't want the default data sets.
|
data/bin/bqm
CHANGED
@@ -15,30 +15,43 @@ def find_dataset
|
|
15
15
|
end
|
16
16
|
|
17
17
|
# Merge remote sets defined in data/query-sets.json
|
18
|
-
def merge_remote(source)
|
18
|
+
def merge_remote(source, verbose: false)
|
19
19
|
sets = get_datasets(source)
|
20
20
|
queries = []
|
21
21
|
sets.each do |s|
|
22
22
|
customqueries = Net::HTTP.get(URI(s))
|
23
23
|
data = JSON.parse(customqueries)
|
24
24
|
queries += data['queries']
|
25
|
+
puts " [*] File merged: #{s}" if verbose
|
26
|
+
rescue JSON::ParserError
|
27
|
+
# Handle the JSON parsing error
|
28
|
+
puts " [!] JSON parsing error for #{s}"
|
25
29
|
end
|
26
30
|
queries
|
27
31
|
end
|
28
32
|
|
29
33
|
# Merge local sets provided by the user
|
30
|
-
def merge_local(sources)
|
34
|
+
def merge_local(sources, verbose: false)
|
31
35
|
queries = []
|
32
36
|
sources.each do |source|
|
37
|
+
# If it's a file parse it
|
33
38
|
if File.file?(source) && File.readable?(source)
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
39
|
+
data = json_load(source)
|
40
|
+
if data['queries']
|
41
|
+
queries += data['queries']
|
42
|
+
elsif data['sets']
|
43
|
+
queries += merge_remote(source, verbose: verbose)
|
44
|
+
else
|
45
|
+
raise KeyError, "The file #{source} is neiter a Bloodhound custom query file nor a bqm query sets file"
|
38
46
|
end
|
39
|
-
|
47
|
+
puts " [*] File merged: #{source}" if verbose
|
48
|
+
# If it's a folder, retrieve all JSON files and assumes there are queries files
|
49
|
+
# Then recursive call for a normal file parsing
|
50
|
+
elsif File.directory?(source) && File.readable?(source)
|
51
|
+
qfiles = Dir.glob('*.json', base: source).map { |f| File.absolute_path(f, source) }
|
52
|
+
queries += merge_local(qfiles, verbose: verbose)
|
40
53
|
else
|
41
|
-
raise IOError, "The dataset file #{source} does not exist or is unreadable."
|
54
|
+
raise IOError, "The dataset file/directory #{source} does not exist or is unreadable."
|
42
55
|
end
|
43
56
|
end
|
44
57
|
queries
|
@@ -78,13 +91,18 @@ def pretty_link(lst)
|
|
78
91
|
end
|
79
92
|
|
80
93
|
def get_datasets(source)
|
94
|
+
src = json_load(source)
|
95
|
+
src['sets']
|
96
|
+
end
|
97
|
+
|
98
|
+
def json_load(file)
|
81
99
|
# ruby 3.0+
|
82
100
|
begin
|
83
|
-
src = JSON.load_file(
|
101
|
+
src = JSON.load_file(file)
|
84
102
|
rescue NoMethodError # ruby 2.7 retro-compatibility
|
85
|
-
src = JSON.parse(File.read(
|
103
|
+
src = JSON.parse(File.read(file))
|
86
104
|
end
|
87
|
-
src
|
105
|
+
src
|
88
106
|
end
|
89
107
|
|
90
108
|
if __FILE__ == $PROGRAM_NAME
|
@@ -92,19 +110,21 @@ if __FILE__ == $PROGRAM_NAME
|
|
92
110
|
|
93
111
|
require 'optparse'
|
94
112
|
options = {
|
95
|
-
|
113
|
+
'local-sets': []
|
96
114
|
}
|
97
115
|
OptionParser.new do |parser|
|
98
116
|
parser.banner = 'Usage: bqm [options]'
|
99
117
|
|
100
118
|
parser.on('-o', '--output-path PATH', 'Path where to store the query file')
|
101
119
|
parser.on('-l', '--list', 'List available datasets')
|
102
|
-
parser.on('-i', '--local-sets FILE,...', Array, 'Local custom queries files') do |f|
|
120
|
+
parser.on('-i', '--local-sets FILE,DIRECTORY,...', Array, 'Local custom queries files/directories') do |f|
|
103
121
|
options[:'local-sets'] += f
|
104
122
|
end
|
123
|
+
parser.on('--ignore-default', 'Ignore the default query-sets.json')
|
124
|
+
parser.on('-v', '--verbose', 'Display the name of the merged files/sets')
|
105
125
|
parser.separator ''
|
106
126
|
parser.separator 'Example: bqm -o ~/.config/bloodhound/customqueries.json'
|
107
|
-
parser.separator 'Example: bqm -o /tmp/customqueries.json -i /tmp/a.json,/
|
127
|
+
parser.separator 'Example: bqm -o /tmp/customqueries.json -i /tmp/a.json,/home/user/folder'
|
108
128
|
end.parse!(into: options)
|
109
129
|
|
110
130
|
out = options[:'output-path']
|
@@ -120,20 +140,19 @@ if __FILE__ == $PROGRAM_NAME
|
|
120
140
|
if File.file?(out) && File.readable?(out)
|
121
141
|
puts "[+] The output path #{out} already exists"
|
122
142
|
puts '[?] Do you want to overwrite it? [y/n]'
|
123
|
-
if
|
143
|
+
if $stdin.gets.chomp == 'y'
|
124
144
|
puts '[?] What to do with the existing queries? (merge / discard) [m/d]'
|
125
|
-
flags[:merge_actual] = true if
|
145
|
+
flags[:merge_actual] = true if $stdin.gets.chomp == 'm'
|
126
146
|
else
|
127
147
|
exit
|
128
148
|
end
|
129
149
|
end
|
130
150
|
puts '[+] Fetching and merging datasets'
|
131
|
-
data =
|
151
|
+
data = []
|
152
|
+
data = merge_remote(source, verbose: options[:verbose]) unless options[:'ignore-default']
|
132
153
|
local_set = options[:'local-sets']
|
133
|
-
if local_set
|
134
|
-
|
135
|
-
end
|
136
|
-
if flags[:'merge_actual']
|
154
|
+
data += merge_local(local_set, verbose: options[:verbose]) if local_set
|
155
|
+
if flags[:merge_actual]
|
137
156
|
puts '[+] Merging your existing queries'
|
138
157
|
data += JSON.parse(File.read(out))['queries']
|
139
158
|
end
|
data/data/query-sets.json
CHANGED
@@ -12,6 +12,7 @@
|
|
12
12
|
"https://raw.githubusercontent.com/egypt/customqueries/master/customqueries.json",
|
13
13
|
"https://raw.githubusercontent.com/trustedsec/CrackHound/main/customqueries.json",
|
14
14
|
"https://raw.githubusercontent.com/aress31/bloodhound-utils/main/customqueries.json",
|
15
|
-
"https://raw.githubusercontent.com/ThePorgs/Exegol-images/main/sources/bloodhound/customqueries.json"
|
15
|
+
"https://raw.githubusercontent.com/ThePorgs/Exegol-images/main/sources/assets/bloodhound/customqueries.json"
|
16
16
|
]
|
17
17
|
}
|
18
|
+
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: bqm
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.5.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Alexandre ZANNI
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-06
|
11
|
+
date: 2023-09-06 00:00:00.000000000 Z
|
12
12
|
dependencies: []
|
13
13
|
description: Deduplicate custom BloudHound queries from different datasets and merge
|
14
14
|
them in one customqueries.json file.
|
@@ -18,6 +18,7 @@ executables:
|
|
18
18
|
extensions: []
|
19
19
|
extra_rdoc_files: []
|
20
20
|
files:
|
21
|
+
- DOC.md
|
21
22
|
- LICENSE
|
22
23
|
- bin/bqm
|
23
24
|
- data/query-sets.json
|