qrpm 0.0.3 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.ruby-version +1 -1
- data/NOTES +7 -0
- data/TODO +30 -0
- data/doc/pg.yml +37 -0
- data/doc/qrpm2.rb +69 -0
- data/doc/qrpm2.yml +55 -0
- data/example/configure +0 -0
- data/example/make +0 -0
- data/example/my_package_name-1.2.3-4.x86_64.rpm +0 -0
- data/example/my_package_name.spec +49 -0
- data/example.yml +84 -0
- data/exe/qrpm +134 -42
- data/lib/qrpm/compiler.rb +238 -0
- data/lib/qrpm/fragment.rb +177 -0
- data/lib/qrpm/lexer.rb +41 -0
- data/lib/qrpm/node.rb +372 -31
- data/lib/qrpm/qrpm.rb +182 -53
- data/lib/qrpm/rpm.rb +107 -60
- data/lib/qrpm/template.erb +28 -29
- data/lib/qrpm/template.yml +15 -12
- data/lib/qrpm/utils.rb +10 -0
- data/lib/qrpm/version.rb +1 -1
- data/lib/qrpm.rb +107 -1
- data/qrpm.gemspec +3 -3
- metadata +51 -10
- data/lib/qrpm/parser.rb +0 -183
@@ -0,0 +1,238 @@
|
|
1
|
+
require 'open3'
|
2
|
+
|
3
|
+
module Qrpm
|
4
|
+
# The main result data are #defs and #deps. #keys and #values are the parsed
|
5
|
+
# result of the keys and values of #deps and is used to interpolate strings
|
6
|
+
# when values of dependent variables are known
|
7
|
+
#
|
8
|
+
# #defs is also partitioned into QRPM variables and directories
|
9
|
+
class Compiler
|
10
|
+
# Root node
|
11
|
+
attr_reader :ast # TODO: Rename AST
|
12
|
+
|
13
|
+
# Variable definitions. Map from path to Node
|
14
|
+
attr_reader :defs
|
15
|
+
|
16
|
+
# Map from path to list of variables it depends on. Paths with no
|
17
|
+
# dependencies have an empty list as value
|
18
|
+
attr_reader :deps
|
19
|
+
|
20
|
+
# Dictionary. The dictionary object are compiled into the AST before it is
|
21
|
+
# evaluated so the dictionary elements can be refered to with the usual
|
22
|
+
# $var notation. #dict is automatically augmented with the default system
|
23
|
+
# directory definitions unless :system_dirs is false
|
24
|
+
attr_reader :dict
|
25
|
+
|
26
|
+
# Defaults. Map from key to source expression
|
27
|
+
attr_reader :defaults
|
28
|
+
|
29
|
+
# If :srcdir is true, a default $srcdir variable is prefixed to all local
|
30
|
+
# paths. This is the default. +srcdir:false+ is only used when testing
|
31
|
+
def initialize(dict, system_dirs: true, defaults: true, srcdir: true)
|
32
|
+
constrain dict, Hash
|
33
|
+
@ast = nil
|
34
|
+
@defs = {}
|
35
|
+
@deps = {}
|
36
|
+
@dict = system_dirs ? INSTALL_DIRS.merge(dict) : dict
|
37
|
+
@use_system_dirs = system_dirs
|
38
|
+
@use_defaults = defaults
|
39
|
+
@use_srcdir = srcdir
|
40
|
+
end
|
41
|
+
|
42
|
+
# Parse the YAML source to an AST of Node objects and assign it to #ast.
|
43
|
+
# Returns the AST
|
44
|
+
def parse(yaml)
|
45
|
+
# Root node
|
46
|
+
@ast = RootNode.new
|
47
|
+
|
48
|
+
# Compile standard directories. Also enter them into @defs so that #analyze
|
49
|
+
# can access them by name before @defs is built by #collect_variables
|
50
|
+
STANDARD_DIRS.each { |name| @defs[name] = StandardDirNode.new(@ast, name) } if @use_system_dirs
|
51
|
+
|
52
|
+
# Parse yaml node
|
53
|
+
yaml.each { |key, value|
|
54
|
+
builtin_array = FIELDS[key]&.include?(ArrayNode) || false
|
55
|
+
case [key.to_s, value, builtin_array]
|
56
|
+
in [/[\/\$]/, _, _]; parse_directory_node(@ast, key, value)
|
57
|
+
in [/^#{PATH_RE}$/, String, true]; parse_node(@ast, key, [value])
|
58
|
+
in [/^#{PATH_RE}$/, Array, false]; parse_directory_node(@ast, key, value)
|
59
|
+
in [/^#{PATH_RE}$/, _, _]; parse_node(@ast, key, value)
|
60
|
+
else
|
61
|
+
error "Illegal key: #{key.inspect}"
|
62
|
+
end
|
63
|
+
}
|
64
|
+
|
65
|
+
# Compile and add dictionary to the AST. This allows command line
|
66
|
+
# assignments to override spec file values
|
67
|
+
dict.each { |k,v| [k, ValueNode.new(ast, k.to_s, Fragment::Fragment.parse(v))] }
|
68
|
+
|
69
|
+
# Add defaults
|
70
|
+
DEFAULTS.each { |k,v|
|
71
|
+
next if k == "srcdir" # Special handling of $srcdir below
|
72
|
+
parse_node(@ast, k, v) if !@ast.key?(k)
|
73
|
+
} if @use_defaults
|
74
|
+
|
75
|
+
# Only add a default $srcdir node when :srcdir is true
|
76
|
+
parse_node(@ast, "srcdir", DEFAULTS["srcdir"]) if @use_srcdir && !@ast.key?("srcdir")
|
77
|
+
|
78
|
+
@ast
|
79
|
+
end
|
80
|
+
|
81
|
+
# Analyze and decorate the AST tree. Returns the AST
|
82
|
+
#
|
83
|
+
# The various +check_*+ arguments are only used while testing to allow
|
84
|
+
# illegal but short expressions by suppressing specified checks in
|
85
|
+
# #analyze. The default is to apply all checks
|
86
|
+
def analyze(
|
87
|
+
check_undefined: true,
|
88
|
+
check_mandatory: true,
|
89
|
+
check_field_types: true,
|
90
|
+
check_directory_types: true)
|
91
|
+
|
92
|
+
# Set package/system directory of standard directories depending on the
|
93
|
+
# number of files in the directory
|
94
|
+
dirs = @ast.values.select { |n| n.is_a? DirectoryNode }
|
95
|
+
freq = dirs.map { |d| (d.key_variables & STANDARD_DIRS) * dirs.size }.flatten.tally
|
96
|
+
freq.each { |name, count|
|
97
|
+
node = @defs[name]
|
98
|
+
if count == 1
|
99
|
+
node.setsys
|
100
|
+
else
|
101
|
+
node.setpck
|
102
|
+
end
|
103
|
+
}
|
104
|
+
|
105
|
+
# Collect definitions and dependencies
|
106
|
+
ast.values.each { |node| collect_variables(node) }
|
107
|
+
|
108
|
+
# Detect undefined variables and references to hashes or arrays
|
109
|
+
if check_undefined
|
110
|
+
deps.each { |path, path_deps|
|
111
|
+
path_deps.each { |dep|
|
112
|
+
if !defs.key?(dep)
|
113
|
+
error "Undefined variable '#{dep}' in definition of '#{path}'"
|
114
|
+
elsif !defs[dep].is_a?(ValueNode)
|
115
|
+
error "Can't reference non-variable '#{dep}' in definition of '#{path}'"
|
116
|
+
end
|
117
|
+
}
|
118
|
+
}
|
119
|
+
end
|
120
|
+
|
121
|
+
# Check for mandatory variables
|
122
|
+
if check_mandatory
|
123
|
+
missing = MANDATORY_FIELDS.select { |f| @defs[f].to_s.empty? }
|
124
|
+
missing.empty? or error "Missing mandatory fields '#{missing.join("', '")}'"
|
125
|
+
end
|
126
|
+
|
127
|
+
# Check types of built-in variables
|
128
|
+
if check_field_types
|
129
|
+
FIELDS.each { |f,ts|
|
130
|
+
ast.key?(f) or next
|
131
|
+
ts.any? { |t| ast[f].class <= t } or error "Illegal type of field '#{f}'"
|
132
|
+
}
|
133
|
+
end
|
134
|
+
|
135
|
+
@ast
|
136
|
+
end
|
137
|
+
|
138
|
+
# Compile YAML into a Qrpm object
|
139
|
+
def compile(yaml)
|
140
|
+
parse(yaml)
|
141
|
+
analyze
|
142
|
+
Qrpm.new(defs, deps)
|
143
|
+
end
|
144
|
+
|
145
|
+
def dump
|
146
|
+
puts "Defs"
|
147
|
+
indent { defs.each { |k,v| puts "#{k}: #{v.signature}" if v.interpolated? }}
|
148
|
+
puts "Deps"
|
149
|
+
indent { deps.each { |k,v| puts "#{k}: #{v.join(", ")}" if !v.empty? } }
|
150
|
+
end
|
151
|
+
|
152
|
+
private
|
153
|
+
# Shorthand
|
154
|
+
def error(msg)
|
155
|
+
raise CompileError, msg, caller
|
156
|
+
end
|
157
|
+
|
158
|
+
def parse_file_node(parent, hash)
|
159
|
+
hash = { "file" => hash } if hash.is_a?(String)
|
160
|
+
|
161
|
+
# Check for unknown keys
|
162
|
+
unknown_keys = hash.keys - FILE_KEYS
|
163
|
+
unknown_keys.empty? or
|
164
|
+
error "Illegal file attribute(s): #{unknown_keys.join(", ")}"
|
165
|
+
|
166
|
+
# Check that exactly one of "file", "symlink", or "reflink" is defined
|
167
|
+
(hash.keys & %w(file symlink reflink)).size == 1 or
|
168
|
+
error "Exactly one of 'file', 'symlink', or 'reflink' should be defined"
|
169
|
+
|
170
|
+
# Check that perm is not used together with symlink or reflink
|
171
|
+
(hash.keys & %w(symlink reflink)).empty? || !hash.key?("perm") or
|
172
|
+
error "Can't use 'perm' together with 'symlink' or 'reflink'"
|
173
|
+
|
174
|
+
# Normalize perm (YAML parses a literal 0644 as the integer 420!)
|
175
|
+
hash["perm"] = sprintf "%04o", hash["perm"] if hash["perm"].is_a?(Integer)
|
176
|
+
|
177
|
+
# Update file with srcdir
|
178
|
+
hash["file"] &&= "$srcdir/#{hash["file"]}" if @use_srcdir
|
179
|
+
|
180
|
+
# Create file node and add members
|
181
|
+
FileNode.make(parent, hash)
|
182
|
+
end
|
183
|
+
|
184
|
+
def parse_directory_node(parent, key, value)
|
185
|
+
constrain parent, RootNode
|
186
|
+
constrain key, String, Symbol
|
187
|
+
constrain value, String, Array, Hash
|
188
|
+
dir = DirectoryNode.new(parent, Fragment::Fragment.parse(key.to_s))
|
189
|
+
case value
|
190
|
+
when String, Hash; parse_file_node(dir, value)
|
191
|
+
when Array; value.each { |elem| parse_file_node(dir, elem) }
|
192
|
+
end
|
193
|
+
dir
|
194
|
+
end
|
195
|
+
|
196
|
+
def parse_node(parent, key, value)
|
197
|
+
constrain parent, HashNode, ArrayNode
|
198
|
+
constrain key, String, Symbol
|
199
|
+
constrain value, String, Integer, Float, Hash, Array, nil
|
200
|
+
key = key.to_s
|
201
|
+
case value
|
202
|
+
when String, Integer, Float
|
203
|
+
ValueNode.new(parent, key, Fragment::Fragment.parse(value))
|
204
|
+
when Hash
|
205
|
+
node = HashNode.new(parent, key)
|
206
|
+
value.each { |key, value| parse_node(node, key, value) }
|
207
|
+
node
|
208
|
+
when Array
|
209
|
+
node = ArrayNode.new(parent, key)
|
210
|
+
value.each.with_index { |value, idx| parse_node(node, idx.to_s, value) }
|
211
|
+
node
|
212
|
+
when nil
|
213
|
+
ValueNode.new(parent, key, nil)
|
214
|
+
end
|
215
|
+
end
|
216
|
+
|
217
|
+
# Collect variables from keys and values and add them to @defs and @deps
|
218
|
+
#
|
219
|
+
# Values and array have dependencies on their own while HashNode does not
|
220
|
+
# and just forwards to its members
|
221
|
+
def collect_variables(node)
|
222
|
+
case node
|
223
|
+
when StandardDirNode
|
224
|
+
# (standard dirs are already added to @defs)
|
225
|
+
@deps[node.path] = node.variables
|
226
|
+
when ValueNode
|
227
|
+
@defs[node.path] = node
|
228
|
+
@deps[node.path] = node.variables
|
229
|
+
when ArrayNode
|
230
|
+
@defs[node.path] = node
|
231
|
+
@deps[node.path] = node.traverse.map(&:variables).flatten.uniq
|
232
|
+
when HashNode
|
233
|
+
node.values.map { |n| collect_variables(n) }.flatten
|
234
|
+
end
|
235
|
+
end
|
236
|
+
end
|
237
|
+
end
|
238
|
+
|
@@ -0,0 +1,177 @@
|
|
1
|
+
|
2
|
+
# TODO: Create (and use) a Fragment.parse method
|
3
|
+
|
4
|
+
module Qrpm
|
5
|
+
module Fragment
|
6
|
+
# A part of a key or value in the QRPM configuration file
|
7
|
+
class Fragment
|
8
|
+
# Source string of fragment. The top-level expression fragment has
|
9
|
+
# the whole string as its source
|
10
|
+
attr_reader :source
|
11
|
+
|
12
|
+
# List of embedded fragments
|
13
|
+
attr_reader :fragments
|
14
|
+
|
15
|
+
def initialize(source, fragments = [])
|
16
|
+
@source = source
|
17
|
+
@fragments = Array(fragments)
|
18
|
+
end
|
19
|
+
|
20
|
+
# Return true if this is a NilFragment. False except for NilFragment objects
|
21
|
+
def is_nil?() false end
|
22
|
+
|
23
|
+
# List of variables in the fragment (incl. child fragments)
|
24
|
+
def variables() fragments.map(&:variables).flatten.uniq end
|
25
|
+
|
26
|
+
# Interpolates fragment using dict and returns the result as a String
|
27
|
+
def interpolate(dict) source end
|
28
|
+
|
29
|
+
# Emit source
|
30
|
+
def to_s = source
|
31
|
+
|
32
|
+
# String representation of fragment. Used for debug and test
|
33
|
+
def signature()
|
34
|
+
r = "#{self.class.to_s.sub(/^.*::/, "")}(#{identifier})"
|
35
|
+
r += "{" + fragments.map { |f| f.signature }.join(", ") + "}" if !fragments.empty?
|
36
|
+
r
|
37
|
+
end
|
38
|
+
|
39
|
+
# Parse a JSON value into an Expression source
|
40
|
+
def Fragment.parse(value)
|
41
|
+
constrain value, String, Integer, Float, true, false, nil
|
42
|
+
node = value.nil? ? NilFragment.new : parse_string(value.to_s)
|
43
|
+
Expression.new(value.to_s, node)
|
44
|
+
end
|
45
|
+
|
46
|
+
protected
|
47
|
+
# Used by #signature to display an identifier for a node
|
48
|
+
def identifier() source.inspect.gsub('"', "'") end
|
49
|
+
|
50
|
+
|
51
|
+
private
|
52
|
+
# Parse string and return an array of Fragment sources. The string is
|
53
|
+
# scanned for $NAME, ${NAME}, $(COMMAND), and ${{NAME}} inside $(COMMAND)
|
54
|
+
# interpolations
|
55
|
+
#
|
56
|
+
# The string is parsed into Fragments to be able to interpolate it
|
57
|
+
# without re-parsing
|
58
|
+
#
|
59
|
+
# Variable names needs to be '{}'-quoted if they're followed by a letter
|
60
|
+
# or digit but not '.'. This makes it possible to refer to nested
|
61
|
+
# variables without quotes. Eg '/home/$pck.home/dir' will be parsed as
|
62
|
+
# '/home/${pck.home}/dir'
|
63
|
+
def Fragment.parse_string(string)
|
64
|
+
res = []
|
65
|
+
string.scan(/(.*?)(\\*)(\$#{PATH_RE}|\$\{#{PATH_RE}\}|\$\(.+\)|$)/)[0..-2].each {
|
66
|
+
|prefix, backslashes, expr|
|
67
|
+
expr.delete_suffix(".")
|
68
|
+
text = prefix + '\\' * (backslashes.size / 2)
|
69
|
+
if expr != ""
|
70
|
+
if backslashes.size % 2 == 0
|
71
|
+
if expr[1] == "(" # $()
|
72
|
+
var = CommandFragment.new(expr)
|
73
|
+
var.fragments.concat parse_shell_string(var.command)
|
74
|
+
else # $NAME or ${NAME}
|
75
|
+
var = VariableFragment.new(expr)
|
76
|
+
end
|
77
|
+
else
|
78
|
+
text += expr
|
79
|
+
end
|
80
|
+
end
|
81
|
+
res << TextFragment.new(text) if text != ""
|
82
|
+
res << var if var
|
83
|
+
}
|
84
|
+
res
|
85
|
+
end
|
86
|
+
|
87
|
+
# Parse shell command string and return an array of Node sources
|
88
|
+
def Fragment.parse_shell_string(string)
|
89
|
+
res = []
|
90
|
+
string.scan(/(.*?)(\\*)(\$\{\{#{PATH_RE}\}\}|$)/).each { |prefix, backslashes, expr|
|
91
|
+
text = prefix + backslashes
|
92
|
+
if expr != ""
|
93
|
+
if backslashes.size % 2 == 0
|
94
|
+
var = CommandVariableFragment.new(expr)
|
95
|
+
else
|
96
|
+
text += expr
|
97
|
+
end
|
98
|
+
end
|
99
|
+
res << TextFragment.new(text) if text != ""
|
100
|
+
res << var if var
|
101
|
+
}
|
102
|
+
res
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
# Text without variables
|
107
|
+
class TextFragment < Fragment
|
108
|
+
end
|
109
|
+
|
110
|
+
# Nil value
|
111
|
+
class NilFragment < Fragment
|
112
|
+
def is_nil?() true end
|
113
|
+
def initialize() super(nil) end
|
114
|
+
end
|
115
|
+
|
116
|
+
# $NAME or ${NAME}
|
117
|
+
class VariableFragment < Fragment
|
118
|
+
# Name of the variable excluding '$' and '${}'
|
119
|
+
attr_reader :name
|
120
|
+
|
121
|
+
def initialize(source)
|
122
|
+
super
|
123
|
+
@name = source.sub(/^\W+(.*?)\W*$/, '\1')
|
124
|
+
end
|
125
|
+
|
126
|
+
def variables() [name] end
|
127
|
+
def interpolate(dict) dict[name] end
|
128
|
+
|
129
|
+
protected
|
130
|
+
def identifier() name end
|
131
|
+
end
|
132
|
+
|
133
|
+
# ${{NAME}} in $(COMMAND) interpolations
|
134
|
+
class CommandVariableFragment < VariableFragment
|
135
|
+
def interpolate(dict) dict[name] end # FIXME: Proper shell escape
|
136
|
+
end
|
137
|
+
|
138
|
+
# Common code for CommandFragment and Expression
|
139
|
+
class FragmentContainer < Fragment
|
140
|
+
def interpolate(dict) fragments.map { |f| f.interpolate(dict) }.flatten.join end
|
141
|
+
end
|
142
|
+
|
143
|
+
# $(COMMAND)
|
144
|
+
class CommandFragment < FragmentContainer
|
145
|
+
# Command line excl. '$()'
|
146
|
+
attr_reader :command
|
147
|
+
|
148
|
+
def initialize(string)
|
149
|
+
super(string)
|
150
|
+
@command = string[2..-2]
|
151
|
+
end
|
152
|
+
|
153
|
+
def interpolate(dict)
|
154
|
+
cmd = "set -eo pipefail; #{super}"
|
155
|
+
stdout, stderr, status = Open3.capture3(cmd)
|
156
|
+
status == 0 or raise Error.new "Failed expanding '$(#{cmd})'\n#{stderr}"
|
157
|
+
stdout.chomp
|
158
|
+
end
|
159
|
+
end
|
160
|
+
|
161
|
+
# A key or value as a list of Fragments
|
162
|
+
class Expression < FragmentContainer
|
163
|
+
def initialize(...)
|
164
|
+
super
|
165
|
+
@seen = {}
|
166
|
+
end
|
167
|
+
|
168
|
+
def interpolate(...)
|
169
|
+
r = super
|
170
|
+
!@seen.key?(r.inspect) or raise CompileError.new "Duplicate interpolation: #{r.inspect}"
|
171
|
+
@seen[r.inspect] = true
|
172
|
+
r
|
173
|
+
end
|
174
|
+
end
|
175
|
+
end
|
176
|
+
end
|
177
|
+
|
data/lib/qrpm/lexer.rb
ADDED
@@ -0,0 +1,41 @@
|
|
1
|
+
|
2
|
+
module Qrpm
|
3
|
+
class Lexer
|
4
|
+
attr_reader :reldirs
|
5
|
+
attr_reader :absdirs
|
6
|
+
|
7
|
+
def initialize(reldirs, absdirs)
|
8
|
+
@reldirs = reldirs.map { |p| File.expand_path(p) }
|
9
|
+
@absdirs = absdirs.map { |p| File.expand_path(p) }
|
10
|
+
end
|
11
|
+
|
12
|
+
def self.load_yaml(file) # Is actually a kind of lexer
|
13
|
+
text = IO.read(file).sub(/^__END__.*/m, "")
|
14
|
+
YAML.load(text)
|
15
|
+
end
|
16
|
+
|
17
|
+
def lex(file)
|
18
|
+
yaml = {}
|
19
|
+
source = IO.read(file).sub(/^__END__.*/m, "")
|
20
|
+
YAML.load(source).each { |k,v|
|
21
|
+
if k == "include"
|
22
|
+
includes = v.is_a?(String) ? [v] : v
|
23
|
+
includes.each { |f| Lexer.load_yaml(search f).each { |k,v| yaml[k] = v } }
|
24
|
+
else
|
25
|
+
yaml[k] = v
|
26
|
+
end
|
27
|
+
}
|
28
|
+
yaml
|
29
|
+
end
|
30
|
+
|
31
|
+
def search(file)
|
32
|
+
case file
|
33
|
+
when /^\.\.\/(.*)/; reldirs.map { |d| "#{d}/../#$1" }.find { |f| File.exist? f }
|
34
|
+
when /^\.\/(.*)/; reldirs.map { |d| "#{d}/#$1" }.find { |f| File.exist? f }
|
35
|
+
when /^\//; File.exist?(file) && file
|
36
|
+
else
|
37
|
+
absdirs.map { |d| "#{d}/#{file}" }.find { |f| File.exist? f }
|
38
|
+
end or raise Error, "Can't find #{file}"
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|