vaultkit 1.0.0 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/vkit/cli/base_cli.rb +110 -0
- data/lib/vkit/cli/commands/approval_watch_command.rb +129 -0
- data/lib/vkit/cli/commands/grant_revoke_command.rb +39 -0
- data/lib/vkit/cli/commands/init_command.rb +82 -0
- data/lib/vkit/cli/commands/policy_pack_add_command.rb +38 -0
- data/lib/vkit/cli/commands/policy_pack_info_command.rb +47 -0
- data/lib/vkit/cli/commands/policy_pack_list_command.rb +47 -0
- data/lib/vkit/cli/commands/policy_pack_remove_command.rb +36 -0
- data/lib/vkit/cli/commands/policy_pack_upgrade_command.rb +81 -0
- data/lib/vkit/cli/commands/policy_revoke_command.rb +40 -0
- data/lib/vkit/cli/commands/reset_command.rb +20 -0
- data/lib/vkit/cli/policy_pack/manager.rb +416 -0
- data/lib/vkit/policy/bundle_compiler.rb +18 -0
- data/lib/vkit/policy/packs/ai_safety/metadata.yaml +10 -0
- data/lib/vkit/policy/packs/ai_safety/policies/01_deny_agent_prod_without_clearance.yaml +14 -0
- data/lib/vkit/policy/packs/ai_safety/policies/02_require_approval_for_sensitive_in_prod.yaml +16 -0
- data/lib/vkit/policy/packs/ai_safety/policies/03_mask_sensitive_by_default_for_agents.yaml +15 -0
- data/lib/vkit/policy/packs/financial_compliance/metadata.yaml +10 -0
- data/lib/vkit/policy/packs/financial_compliance/policies/01_require_approval_for_financial_prod.yaml +16 -0
- data/lib/vkit/policy/packs/financial_compliance/policies/02_mask_payment_tokens.yaml +14 -0
- data/lib/vkit/policy/packs/financial_compliance/policies/03_deny_non_admin_access_pci_in_prod.yaml +15 -0
- data/lib/vkit/policy/packs/financial_compliance/policies/04_short_ttl_for_financial_grants.yaml +14 -0
- data/lib/vkit/policy/packs/starter/metadata.yaml +9 -0
- data/lib/vkit/policy/packs/starter/policies/01_deny_sensitive_without_clearance.yaml +15 -0
- data/lib/vkit/policy/packs/starter/policies/02_mask_pii_by_default.yaml +13 -0
- data/lib/vkit/policy/packs/starter/policies/03_require_approval_high_sensitivity.yaml +17 -0
- data/lib/vkit/policy/packs/starter/policies/04_block_cross_region.yaml +14 -0
- data/lib/vkit/policy/packs/starter/policies/05_limit_production_access.yaml +16 -0
- data/lib/vkit/policy/packs/starter/policies/06_default_ttl.yaml +10 -0
- data/lib/vkit/policy/schema/policy_bundle.schema.json +16 -1
- data/lib/vkit/version.rb +1 -1
- metadata +29 -2
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
module Vkit
|
|
2
|
+
module CLI
|
|
3
|
+
module Commands
|
|
4
|
+
class ResetCommand < BaseCommand
|
|
5
|
+
def call
|
|
6
|
+
print "⚠️ This will clear all stored credentials. Continue? (y/N): "
|
|
7
|
+
response = $stdin.gets.chomp
|
|
8
|
+
|
|
9
|
+
unless response.downcase == 'y'
|
|
10
|
+
puts "Cancelled"
|
|
11
|
+
return
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
credential_store.clear!
|
|
15
|
+
puts "🧹 All credentials cleared"
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
end
|
|
20
|
+
end
|
|
@@ -0,0 +1,416 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "yaml"
|
|
4
|
+
require "fileutils"
|
|
5
|
+
require "time"
|
|
6
|
+
require "digest"
|
|
7
|
+
|
|
8
|
+
module Vkit
|
|
9
|
+
module CLI
|
|
10
|
+
module PolicyPack
|
|
11
|
+
class Manager
|
|
12
|
+
# Packs shipped with CLI (in the gem)
|
|
13
|
+
PACKS_DIR = PACKS_DIR = File.expand_path("../../policy/packs", __dir__)
|
|
14
|
+
|
|
15
|
+
# Project-local state
|
|
16
|
+
STATE_DIR_NAME = ".vkit"
|
|
17
|
+
TRACKING_FILE_NAME = "packs.yaml"
|
|
18
|
+
|
|
19
|
+
DEFAULT_POLICIES_DIR = File.join("config", "policies")
|
|
20
|
+
|
|
21
|
+
class Error < StandardError; end
|
|
22
|
+
class PackNotFound < Error; end
|
|
23
|
+
class PackAlreadyInstalled < Error; end
|
|
24
|
+
class PackNotInstalled < Error; end
|
|
25
|
+
class DependencyMissing < Error; end
|
|
26
|
+
class UnsafeOverwrite < Error; end
|
|
27
|
+
class InvalidPack < Error; end
|
|
28
|
+
|
|
29
|
+
def initialize(project_root: Dir.pwd, policies_dir: DEFAULT_POLICIES_DIR)
|
|
30
|
+
@project_root = File.expand_path(project_root)
|
|
31
|
+
@policies_dir = File.expand_path(policies_dir, @project_root)
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def available_packs
|
|
35
|
+
return [] unless Dir.exist?(PACKS_DIR)
|
|
36
|
+
|
|
37
|
+
Dir.children(PACKS_DIR)
|
|
38
|
+
.select { |name| File.directory?(File.join(PACKS_DIR, name)) }
|
|
39
|
+
.sort
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def installed_packs
|
|
43
|
+
state.dig("installed_packs") || {}
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def installed?(pack_name)
|
|
47
|
+
installed_packs.key?(pack_name)
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
def pack_metadata(pack_name)
|
|
51
|
+
meta, _policies = read_pack!(pack_name)
|
|
52
|
+
meta
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
# Install pack policies into project's policies_dir.
|
|
56
|
+
#
|
|
57
|
+
# Options:
|
|
58
|
+
# - force: overwrite pack-installed files if they exist (refuses to overwrite non-pack files)
|
|
59
|
+
# - dry_run: compute actions but do not write
|
|
60
|
+
def install!(pack_name, force: false, dry_run: false)
|
|
61
|
+
meta, policies = read_pack!(pack_name)
|
|
62
|
+
|
|
63
|
+
if installed?(pack_name)
|
|
64
|
+
raise PackAlreadyInstalled, "Pack '#{pack_name}' is already installed"
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
ensure_dependencies!(meta)
|
|
68
|
+
|
|
69
|
+
FileUtils.mkdir_p(@policies_dir) unless dry_run
|
|
70
|
+
|
|
71
|
+
validate_pack_policies!(pack_name, meta, policies)
|
|
72
|
+
|
|
73
|
+
install_record = {
|
|
74
|
+
"name" => pack_name,
|
|
75
|
+
"version" => meta["version"],
|
|
76
|
+
"layer" => meta["layer"],
|
|
77
|
+
"installed_at" => Time.now.utc.iso8601,
|
|
78
|
+
"pack_checksum" => pack_checksum(pack_name),
|
|
79
|
+
"files" => []
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
policies.each_with_index do |policy, idx|
|
|
83
|
+
policy_id = policy.fetch("id")
|
|
84
|
+
ext = "yaml"
|
|
85
|
+
|
|
86
|
+
filename = format("%<pack>s__%<idx>02d__%<id>s.%<ext>s",
|
|
87
|
+
pack: pack_name,
|
|
88
|
+
idx: (idx + 1),
|
|
89
|
+
id: safe_slug(policy_id),
|
|
90
|
+
ext: ext)
|
|
91
|
+
|
|
92
|
+
dest = File.join(@policies_dir, filename)
|
|
93
|
+
|
|
94
|
+
# Overwrite rules:
|
|
95
|
+
# - if dest exists and is NOT tracked by this pack -> refuse
|
|
96
|
+
# - if dest exists and force=false -> refuse
|
|
97
|
+
# - if dest exists and force=true -> allow overwrite
|
|
98
|
+
if File.exist?(dest)
|
|
99
|
+
raise UnsafeOverwrite, "Refusing to overwrite existing file: #{dest}" unless force
|
|
100
|
+
# force is allowed, but this file isn't yet tracked because pack isn't installed.
|
|
101
|
+
# Still, we only allow overwriting if the file looks like our namespace.
|
|
102
|
+
unless File.basename(dest).start_with?("#{pack_name}__")
|
|
103
|
+
raise UnsafeOverwrite, "Refusing to overwrite non-pack file (missing namespace): #{dest}"
|
|
104
|
+
end
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
content = render_policy_file(pack_name, meta, policy)
|
|
108
|
+
|
|
109
|
+
unless dry_run
|
|
110
|
+
File.write(dest, content)
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
install_record["files"] << {
|
|
114
|
+
"path" => relative_to_root(dest),
|
|
115
|
+
"policy_id" => policy_id
|
|
116
|
+
}
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
write_state_add!(pack_name, install_record) unless dry_run
|
|
120
|
+
|
|
121
|
+
install_record["files"].length
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
# Remove pack-installed policy files.
|
|
125
|
+
#
|
|
126
|
+
# Options:
|
|
127
|
+
# - force: remove files even if missing (still won’t delete non-tracked files)
|
|
128
|
+
def remove!(pack_name, force: false)
|
|
129
|
+
unless installed?(pack_name)
|
|
130
|
+
raise PackNotInstalled, "Pack '#{pack_name}' is not installed"
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
pack_entry = installed_packs[pack_name]
|
|
134
|
+
files = Array(pack_entry["files"])
|
|
135
|
+
|
|
136
|
+
removed = 0
|
|
137
|
+
|
|
138
|
+
files.each do |f|
|
|
139
|
+
abs = File.expand_path(f.fetch("path"), @project_root)
|
|
140
|
+
|
|
141
|
+
if File.exist?(abs)
|
|
142
|
+
FileUtils.rm_f(abs)
|
|
143
|
+
removed += 1
|
|
144
|
+
else
|
|
145
|
+
raise Error, "Expected pack file missing: #{abs} (use --force to ignore)" unless force
|
|
146
|
+
end
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
write_state_remove!(pack_name)
|
|
150
|
+
|
|
151
|
+
removed
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
def list_status
|
|
155
|
+
avail = available_packs
|
|
156
|
+
installed = installed_packs
|
|
157
|
+
|
|
158
|
+
avail.map do |name|
|
|
159
|
+
shipped_meta = safe_pack_metadata(name)
|
|
160
|
+
entry = installed[name]
|
|
161
|
+
|
|
162
|
+
shipped_version = shipped_meta["version"]
|
|
163
|
+
installed_version = entry && entry["version"]
|
|
164
|
+
|
|
165
|
+
{
|
|
166
|
+
"name" => name,
|
|
167
|
+
"layer" => shipped_meta["layer"],
|
|
168
|
+
"shipped_version" => shipped_version,
|
|
169
|
+
"installed" => !entry.nil?,
|
|
170
|
+
"installed_version" => installed_version,
|
|
171
|
+
"drift" => entry && shipped_version && installed_version && shipped_version != installed_version
|
|
172
|
+
}
|
|
173
|
+
end
|
|
174
|
+
end
|
|
175
|
+
|
|
176
|
+
def upgrade!(pack_name, force: false, dry_run: false)
|
|
177
|
+
unless installed?(pack_name)
|
|
178
|
+
raise PackNotInstalled, "Pack '#{pack_name}' is not installed"
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
installed_entry = installed_packs[pack_name]
|
|
182
|
+
installed_version = installed_entry["version"]
|
|
183
|
+
|
|
184
|
+
shipped_meta = pack_metadata(pack_name)
|
|
185
|
+
shipped_version = shipped_meta["version"]
|
|
186
|
+
|
|
187
|
+
if installed_version == shipped_version
|
|
188
|
+
return :up_to_date
|
|
189
|
+
end
|
|
190
|
+
|
|
191
|
+
remove!(pack_name, force: force) unless dry_run
|
|
192
|
+
|
|
193
|
+
count = install!(pack_name, force: force, dry_run: dry_run)
|
|
194
|
+
|
|
195
|
+
{
|
|
196
|
+
old_version: installed_version,
|
|
197
|
+
new_version: shipped_version,
|
|
198
|
+
policies: count
|
|
199
|
+
}
|
|
200
|
+
end
|
|
201
|
+
|
|
202
|
+
def upgrade_all!(force: false, dry_run: false)
|
|
203
|
+
# installed packs ordered by layer (low -> high)
|
|
204
|
+
packs = installed_packs.keys.sort_by { |name| safe_pack_metadata(name)["layer"].to_i }
|
|
205
|
+
|
|
206
|
+
packs.each_with_object([]) do |pack, results|
|
|
207
|
+
results << [pack, upgrade!(pack, force: force, dry_run: dry_run)]
|
|
208
|
+
end
|
|
209
|
+
end
|
|
210
|
+
|
|
211
|
+
def install_with_deps!(pack_name, force: false, dry_run: false, visited: {})
|
|
212
|
+
return 0 if installed?(pack_name)
|
|
213
|
+
|
|
214
|
+
raise InvalidPack, "Circular dependency detected at '#{pack_name}'" if visited[pack_name]
|
|
215
|
+
visited[pack_name] = true
|
|
216
|
+
|
|
217
|
+
meta = pack_metadata(pack_name)
|
|
218
|
+
deps = Array(meta["dependencies"])
|
|
219
|
+
|
|
220
|
+
# Install dependencies first
|
|
221
|
+
deps.each do |dep|
|
|
222
|
+
install_with_deps!(dep, force: force, dry_run: dry_run, visited: visited)
|
|
223
|
+
end
|
|
224
|
+
|
|
225
|
+
# Enforce layering monotonicity
|
|
226
|
+
deps.each do |dep|
|
|
227
|
+
dep_layer = pack_metadata(dep)["layer"].to_i
|
|
228
|
+
my_layer = meta["layer"].to_i
|
|
229
|
+
if dep_layer > my_layer
|
|
230
|
+
raise InvalidPack, "Invalid layering: '#{pack_name}' layer #{my_layer} depends on '#{dep}' layer #{dep_layer}"
|
|
231
|
+
end
|
|
232
|
+
end
|
|
233
|
+
|
|
234
|
+
install!(pack_name, force: force, dry_run: dry_run)
|
|
235
|
+
ensure
|
|
236
|
+
visited.delete(pack_name)
|
|
237
|
+
end
|
|
238
|
+
|
|
239
|
+
private
|
|
240
|
+
|
|
241
|
+
# Pack reading & validation
|
|
242
|
+
def read_pack!(pack_name)
|
|
243
|
+
root = pack_root(pack_name)
|
|
244
|
+
raise PackNotFound, "Pack '#{pack_name}' not found" unless Dir.exist?(root)
|
|
245
|
+
|
|
246
|
+
metadata_path = File.join(root, "metadata.yaml")
|
|
247
|
+
raise InvalidPack, "Missing metadata.yaml in pack '#{pack_name}'" unless File.exist?(metadata_path)
|
|
248
|
+
|
|
249
|
+
meta_doc = YAML.safe_load(File.read(metadata_path), aliases: true)
|
|
250
|
+
meta = meta_doc["__pack_meta"]
|
|
251
|
+
required_keys = %w[name version layer]
|
|
252
|
+
missing = required_keys.reject { |k| meta[k].to_s.strip != "" }
|
|
253
|
+
|
|
254
|
+
unless missing.empty?
|
|
255
|
+
raise InvalidPack, "Pack '#{pack_name}' missing required metadata keys: #{missing.join(", ")}"
|
|
256
|
+
end
|
|
257
|
+
|
|
258
|
+
unless Array(meta["dependencies"]).all? { |d| d.is_a?(String) }
|
|
259
|
+
raise InvalidPack, "dependencies must be an array of strings"
|
|
260
|
+
end
|
|
261
|
+
|
|
262
|
+
raise InvalidPack, "Invalid __pack_meta in '#{pack_name}'" unless meta.is_a?(Hash)
|
|
263
|
+
|
|
264
|
+
policies_dir = File.join(root, "policies")
|
|
265
|
+
raise InvalidPack, "Missing policies/ directory in pack '#{pack_name}'" unless Dir.exist?(policies_dir)
|
|
266
|
+
|
|
267
|
+
policy_files = Dir.glob(File.join(policies_dir, "*.yaml")).sort
|
|
268
|
+
raise InvalidPack, "No policy files found in '#{pack_name}'" if policy_files.empty?
|
|
269
|
+
|
|
270
|
+
policies = policy_files.map do |file|
|
|
271
|
+
data = YAML.safe_load(File.read(file), aliases: true)
|
|
272
|
+
|
|
273
|
+
unless data.is_a?(Hash)
|
|
274
|
+
raise InvalidPack, "Policy file #{file} must be a Hash"
|
|
275
|
+
end
|
|
276
|
+
|
|
277
|
+
Vkit::Policy::PolicyValidator.validate!(data, file: File.basename(file))
|
|
278
|
+
|
|
279
|
+
data
|
|
280
|
+
end
|
|
281
|
+
|
|
282
|
+
policies.each do |p|
|
|
283
|
+
raise InvalidPack, "Policy missing id in '#{pack_name}'" unless p.is_a?(Hash) && p["id"]
|
|
284
|
+
end
|
|
285
|
+
|
|
286
|
+
[meta, policies]
|
|
287
|
+
end
|
|
288
|
+
|
|
289
|
+
def safe_pack_metadata(pack_name)
|
|
290
|
+
pack_metadata(pack_name)
|
|
291
|
+
rescue
|
|
292
|
+
{}
|
|
293
|
+
end
|
|
294
|
+
|
|
295
|
+
def validate_pack_policies!(pack_name, meta, policies)
|
|
296
|
+
band = meta["priority_band"] || {}
|
|
297
|
+
min = band["min"]
|
|
298
|
+
max = band["max"]
|
|
299
|
+
|
|
300
|
+
return if min.nil? && max.nil?
|
|
301
|
+
|
|
302
|
+
policies.each do |p|
|
|
303
|
+
pr = p["priority"]
|
|
304
|
+
next if pr.nil?
|
|
305
|
+
if min && pr < min
|
|
306
|
+
raise InvalidPack, "Pack '#{pack_name}' policy '#{p["id"]}' priority #{pr} below band min #{min}"
|
|
307
|
+
end
|
|
308
|
+
if max && pr > max
|
|
309
|
+
raise InvalidPack, "Pack '#{pack_name}' policy '#{p["id"]}' priority #{pr} above band max #{max}"
|
|
310
|
+
end
|
|
311
|
+
end
|
|
312
|
+
end
|
|
313
|
+
|
|
314
|
+
def ensure_dependencies!(meta)
|
|
315
|
+
deps = Array(meta["dependencies"])
|
|
316
|
+
return if deps.empty?
|
|
317
|
+
|
|
318
|
+
missing = deps.reject { |d| installed?(d) }
|
|
319
|
+
return if missing.empty?
|
|
320
|
+
|
|
321
|
+
raise DependencyMissing, "Pack requires dependencies not installed: #{missing.join(", ")}"
|
|
322
|
+
end
|
|
323
|
+
|
|
324
|
+
def pack_root(pack_name)
|
|
325
|
+
File.join(PACKS_DIR, pack_name)
|
|
326
|
+
end
|
|
327
|
+
|
|
328
|
+
def pack_checksum(pack_name)
|
|
329
|
+
root = pack_root(pack_name)
|
|
330
|
+
|
|
331
|
+
files = [
|
|
332
|
+
File.join(root, "metadata.yaml"),
|
|
333
|
+
*Dir.glob(File.join(root, "policies", "*.yaml")).sort
|
|
334
|
+
]
|
|
335
|
+
|
|
336
|
+
digest = Digest::SHA256.new
|
|
337
|
+
files.each do |file|
|
|
338
|
+
digest.update(File.read(file))
|
|
339
|
+
end
|
|
340
|
+
|
|
341
|
+
digest.hexdigest
|
|
342
|
+
end
|
|
343
|
+
|
|
344
|
+
# State file
|
|
345
|
+
def state_dir
|
|
346
|
+
File.join(@project_root, STATE_DIR_NAME)
|
|
347
|
+
end
|
|
348
|
+
|
|
349
|
+
def state_path
|
|
350
|
+
File.join(state_dir, TRACKING_FILE_NAME)
|
|
351
|
+
end
|
|
352
|
+
|
|
353
|
+
def state
|
|
354
|
+
return default_state unless File.exist?(state_path)
|
|
355
|
+
YAML.safe_load(File.read(state_path), permitted_classes: [], permitted_symbols: [], aliases: true) || default_state
|
|
356
|
+
rescue
|
|
357
|
+
default_state
|
|
358
|
+
end
|
|
359
|
+
|
|
360
|
+
def default_state
|
|
361
|
+
{ "format_version" => "v1", "installed_packs" => {} }
|
|
362
|
+
end
|
|
363
|
+
|
|
364
|
+
def write_state_add!(pack_name, entry)
|
|
365
|
+
s = state
|
|
366
|
+
s["installed_packs"] ||= {}
|
|
367
|
+
s["installed_packs"][pack_name] = entry
|
|
368
|
+
|
|
369
|
+
FileUtils.mkdir_p(state_dir)
|
|
370
|
+
File.write(state_path, s.to_yaml)
|
|
371
|
+
end
|
|
372
|
+
|
|
373
|
+
def write_state_remove!(pack_name)
|
|
374
|
+
s = state
|
|
375
|
+
s["installed_packs"] ||= {}
|
|
376
|
+
s["installed_packs"].delete(pack_name)
|
|
377
|
+
|
|
378
|
+
FileUtils.mkdir_p(state_dir)
|
|
379
|
+
File.write(state_path, s.to_yaml)
|
|
380
|
+
end
|
|
381
|
+
|
|
382
|
+
# Rendering
|
|
383
|
+
def render_policy_file(pack_name, meta, policy)
|
|
384
|
+
header = <<~HEADER
|
|
385
|
+
# VaultKit Policy Pack: #{pack_name}
|
|
386
|
+
# Pack Version: #{meta["version"]}
|
|
387
|
+
# Layer: #{meta["layer"]}
|
|
388
|
+
# Installed At: #{Time.now.utc.iso8601}
|
|
389
|
+
#
|
|
390
|
+
# Generated by VaultKit CLI. You may edit this file.
|
|
391
|
+
# To revert to upstream defaults: remove & re-add the pack.
|
|
392
|
+
#
|
|
393
|
+
HEADER
|
|
394
|
+
|
|
395
|
+
body = policy.to_yaml
|
|
396
|
+
body = body.sub(/\A---\s*\n/, "") # remove leading doc marker
|
|
397
|
+
|
|
398
|
+
header + body
|
|
399
|
+
end
|
|
400
|
+
|
|
401
|
+
def safe_slug(value)
|
|
402
|
+
value.to_s
|
|
403
|
+
.strip
|
|
404
|
+
.downcase
|
|
405
|
+
.gsub(/[^a-z0-9]+/, "_")
|
|
406
|
+
.gsub(/\A_+|_+\z/, "")
|
|
407
|
+
.slice(0, 80)
|
|
408
|
+
end
|
|
409
|
+
|
|
410
|
+
def relative_to_root(abs_path)
|
|
411
|
+
abs_path.sub(@project_root + File::SEPARATOR, "")
|
|
412
|
+
end
|
|
413
|
+
end
|
|
414
|
+
end
|
|
415
|
+
end
|
|
416
|
+
end
|
|
@@ -27,6 +27,7 @@ module Vkit
|
|
|
27
27
|
"signing" => nil
|
|
28
28
|
}
|
|
29
29
|
|
|
30
|
+
bundle["bundle"]["installed_packs"] = load_installed_packs
|
|
30
31
|
canonical = canonical_json(bundle)
|
|
31
32
|
bundle["bundle"]["checksum"] = Digest::SHA256.hexdigest(canonical)
|
|
32
33
|
|
|
@@ -153,6 +154,23 @@ module Vkit
|
|
|
153
154
|
value
|
|
154
155
|
end
|
|
155
156
|
end
|
|
157
|
+
|
|
158
|
+
def self.load_installed_packs
|
|
159
|
+
tracking_path = File.join(".vkit", "packs.yaml")
|
|
160
|
+
return [] unless File.exist?(tracking_path)
|
|
161
|
+
|
|
162
|
+
data = YAML.safe_load(File.read(tracking_path), permitted_classes: [], permitted_symbols: [], aliases: true)
|
|
163
|
+
packs = data["installed_packs"] || {}
|
|
164
|
+
|
|
165
|
+
packs.map do |name, meta|
|
|
166
|
+
{
|
|
167
|
+
"name" => name,
|
|
168
|
+
"version" => meta["version"]
|
|
169
|
+
}
|
|
170
|
+
end
|
|
171
|
+
rescue
|
|
172
|
+
[]
|
|
173
|
+
end
|
|
156
174
|
end
|
|
157
175
|
end
|
|
158
176
|
end
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
id: ai_deny_agent_prod_without_clearance
|
|
2
|
+
description: "Deny agent access in production unless requester_clearance is high/admin."
|
|
3
|
+
priority: 1100
|
|
4
|
+
|
|
5
|
+
match: {}
|
|
6
|
+
|
|
7
|
+
context:
|
|
8
|
+
environment: production
|
|
9
|
+
requester_role: agent
|
|
10
|
+
|
|
11
|
+
action:
|
|
12
|
+
deny: true
|
|
13
|
+
reason: "Automation/agents in production require elevated clearance."
|
|
14
|
+
ttl: "1h"
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
id: ai_require_approval_sensitive_prod
|
|
2
|
+
description: "Require approval when accessing sensitive fields in production."
|
|
3
|
+
priority: 1200
|
|
4
|
+
|
|
5
|
+
match:
|
|
6
|
+
fields:
|
|
7
|
+
contains: ["pii", "financial", "phi", "secrets"]
|
|
8
|
+
|
|
9
|
+
context:
|
|
10
|
+
environment: production
|
|
11
|
+
|
|
12
|
+
action:
|
|
13
|
+
require_approval: true
|
|
14
|
+
approver_role: security
|
|
15
|
+
reason: "Sensitive data access in production requires security review."
|
|
16
|
+
ttl: "1h"
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
id: ai_mask_sensitive_for_agents
|
|
2
|
+
description: "Mask sensitive fields by default for agents."
|
|
3
|
+
priority: 1300
|
|
4
|
+
|
|
5
|
+
match:
|
|
6
|
+
fields:
|
|
7
|
+
contains: ["pii", "financial", "phi", "secrets"]
|
|
8
|
+
|
|
9
|
+
context:
|
|
10
|
+
requester_role: agent
|
|
11
|
+
|
|
12
|
+
action:
|
|
13
|
+
mask: true
|
|
14
|
+
reason: "Agents should not receive raw sensitive fields by default."
|
|
15
|
+
ttl: "4h"
|
data/lib/vkit/policy/packs/financial_compliance/policies/01_require_approval_for_financial_prod.yaml
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
id: fin_require_approval_financial_prod
|
|
2
|
+
description: "Require approval for financial-category fields in production."
|
|
3
|
+
priority: 2100
|
|
4
|
+
|
|
5
|
+
match:
|
|
6
|
+
fields:
|
|
7
|
+
category: financial
|
|
8
|
+
|
|
9
|
+
context:
|
|
10
|
+
environment: production
|
|
11
|
+
|
|
12
|
+
action:
|
|
13
|
+
require_approval: true
|
|
14
|
+
approver_role: finance_manager
|
|
15
|
+
reason: "Financial data access in production requires finance approval."
|
|
16
|
+
ttl: "1h"
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
id: fin_mask_payment_tokens
|
|
2
|
+
description: "Mask payment tokens / card-related fields everywhere."
|
|
3
|
+
priority: 2200
|
|
4
|
+
|
|
5
|
+
match:
|
|
6
|
+
fields:
|
|
7
|
+
contains: ["pci", "card", "payment_token"]
|
|
8
|
+
|
|
9
|
+
context: {}
|
|
10
|
+
|
|
11
|
+
action:
|
|
12
|
+
mask: true
|
|
13
|
+
reason: "Payment tokens/card-related data must never be exposed in raw form."
|
|
14
|
+
ttl: "24h"
|
data/lib/vkit/policy/packs/financial_compliance/policies/03_deny_non_admin_access_pci_in_prod.yaml
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
id: fin_deny_non_admin_pci_prod
|
|
2
|
+
description: "Deny PCI-tagged data access in production unless admin."
|
|
3
|
+
priority: 2300
|
|
4
|
+
|
|
5
|
+
match:
|
|
6
|
+
fields:
|
|
7
|
+
contains: ["pci"]
|
|
8
|
+
|
|
9
|
+
context:
|
|
10
|
+
environment: production
|
|
11
|
+
|
|
12
|
+
action:
|
|
13
|
+
deny: true
|
|
14
|
+
reason: "PCI-tagged data in production is restricted to admins."
|
|
15
|
+
ttl: "1h"
|
data/lib/vkit/policy/packs/financial_compliance/policies/04_short_ttl_for_financial_grants.yaml
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
id: fin_short_ttl_financial
|
|
2
|
+
description: "Short TTL for financial access grants to reduce risk window."
|
|
3
|
+
priority: 2400
|
|
4
|
+
|
|
5
|
+
match:
|
|
6
|
+
fields:
|
|
7
|
+
category: financial
|
|
8
|
+
|
|
9
|
+
context: {}
|
|
10
|
+
|
|
11
|
+
action:
|
|
12
|
+
allow: true
|
|
13
|
+
reason: "Financial grants must expire quickly."
|
|
14
|
+
ttl: "30m"
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
id: deny_sensitive_without_clearance
|
|
2
|
+
description: "Deny access to highly sensitive data without sufficient clearance."
|
|
3
|
+
|
|
4
|
+
match:
|
|
5
|
+
fields:
|
|
6
|
+
sensitivity: high
|
|
7
|
+
|
|
8
|
+
context:
|
|
9
|
+
requester_clearance: low
|
|
10
|
+
|
|
11
|
+
priority: 90
|
|
12
|
+
|
|
13
|
+
action:
|
|
14
|
+
deny: true
|
|
15
|
+
reason: "High sensitivity data requires elevated clearance."
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
id: require_approval_high_sensitivity
|
|
2
|
+
description: "Require approval before accessing high sensitivity data."
|
|
3
|
+
|
|
4
|
+
match:
|
|
5
|
+
fields:
|
|
6
|
+
sensitivity: high
|
|
7
|
+
|
|
8
|
+
context:
|
|
9
|
+
environment: production
|
|
10
|
+
|
|
11
|
+
priority: 85
|
|
12
|
+
|
|
13
|
+
action:
|
|
14
|
+
require_approval: true
|
|
15
|
+
approver_role: data_owner
|
|
16
|
+
reason: "High sensitivity data requires manual review in production."
|
|
17
|
+
ttl: "1h"
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
id: block_cross_region
|
|
2
|
+
description: "Block cross-region access to prevent data residency violations."
|
|
3
|
+
|
|
4
|
+
match: {}
|
|
5
|
+
|
|
6
|
+
context:
|
|
7
|
+
requester_region: US
|
|
8
|
+
dataset_region: EU
|
|
9
|
+
|
|
10
|
+
priority: 95
|
|
11
|
+
|
|
12
|
+
action:
|
|
13
|
+
deny: true
|
|
14
|
+
reason: "Cross-region data access is not permitted."
|