pk3make 1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pk3make/__init__.py +0 -0
- pk3make/__main__.py +297 -0
- pk3make/modules/doomglob.py +34 -0
- pk3make/modules/doompic.py +354 -0
- pk3make/modules/pk3makefile.py +65 -0
- pk3make/modules/pk3zip.py +84 -0
- pk3make-1.1.dist-info/METADATA +138 -0
- pk3make-1.1.dist-info/RECORD +11 -0
- pk3make-1.1.dist-info/WHEEL +4 -0
- pk3make-1.1.dist-info/entry_points.txt +2 -0
- pk3make-1.1.dist-info/licenses/LICENSE +157 -0
pk3make/__init__.py
ADDED
File without changes
|
pk3make/__main__.py
ADDED
@@ -0,0 +1,297 @@
|
|
1
|
+
#!/bin/env python3
|
2
|
+
|
3
|
+
|
4
|
+
def clean(workdir="build"):
|
5
|
+
import shutil
|
6
|
+
print("# Removing workdir '{}'".format(workdir))
|
7
|
+
try:
|
8
|
+
shutil.rmtree(workdir)
|
9
|
+
except FileNotFoundError:
|
10
|
+
pass
|
11
|
+
return
|
12
|
+
|
13
|
+
def prepare(workdir="build"):
|
14
|
+
import os
|
15
|
+
print("# Creating WORKDIR '{}'".format(workdir))
|
16
|
+
os.makedirs(workdir, exist_ok=True)
|
17
|
+
|
18
|
+
def cr_build_lump(lock, lumpdef, context):
|
19
|
+
import shutil,os,re
|
20
|
+
from modules import doompic
|
21
|
+
|
22
|
+
bytedump = None
|
23
|
+
|
24
|
+
print(f'## Building {lumpdef[1]} "{context["srcfile"]}"...')
|
25
|
+
|
26
|
+
match lumpdef[1]:
|
27
|
+
case "graphic":
|
28
|
+
pal = get_palette(lock, context["opts"]["palette"], context["opts"], context["pdict"])
|
29
|
+
print(f'Converting Picture "{context["srcfile"]}"...')
|
30
|
+
bytedump = doompic.Picture(context['srcfile'], pal, offset=lumpdef[2]).tobytes()
|
31
|
+
case "flat" | "fade":
|
32
|
+
pal = get_palette(lock, context["opts"]["palette"], context["opts"], context["pdict"])
|
33
|
+
print(f'Converting Flat "{context["srcfile"]}"...')
|
34
|
+
bytedump = doompic.Flat(context['srcfile'], pal).tobytes()
|
35
|
+
case "udmf":
|
36
|
+
print(f'UDMF lumps conversion is currently not supported.')
|
37
|
+
case "palette":
|
38
|
+
print(f'Loading palette "{context["srcfile"]}"')
|
39
|
+
pal = get_palette(lock, lumpdef[0], context["opts"], context["pdict"])
|
40
|
+
print(f'Dumping palette "{context["srcfile"]}"')
|
41
|
+
bytedump = pal.tobytes()
|
42
|
+
case "tinttab" | "colormap" as paltype:
|
43
|
+
palparams = re.match(r"\s*([\w]+)\s*([0-9]\.[0-9]f?)?", lumpdef[2])
|
44
|
+
pal = get_palette(lock, palparams.group(1), context["opts"], context["pdict"])
|
45
|
+
print(f'Generating {paltype} "{context["destfile"]}" with {palparams.group(1,2)}')
|
46
|
+
match paltype:
|
47
|
+
case "tinttab":
|
48
|
+
palweight = float(palparams.group(2))
|
49
|
+
bytedump = pal.tinttab_tobytes(palweight)
|
50
|
+
case "colormap":
|
51
|
+
bytedump = pal.colormap_tobytes()
|
52
|
+
case "raw":
|
53
|
+
with open(context['srcfile'], mode='rb') as s:
|
54
|
+
bytedump = s.read()
|
55
|
+
|
56
|
+
if bytedump != None:
|
57
|
+
print(f'Writing {lumpdef[1]} "{context["destfile"]}"')
|
58
|
+
os.makedirs(os.path.dirname(context["destfile"]), exist_ok=True)
|
59
|
+
with lock:
|
60
|
+
with open(context["destfile"], "wb") as ofile:
|
61
|
+
ofile.write(bytedump)
|
62
|
+
|
63
|
+
def get_palette(lock, lumpname, opts, pdict):
|
64
|
+
from modules import doompic, doomglob
|
65
|
+
import os
|
66
|
+
|
67
|
+
lock.acquire()
|
68
|
+
if not lumpname in pdict:
|
69
|
+
|
70
|
+
p_glob = doomglob.find_lump(opts["srcdir"], lumpname)
|
71
|
+
if len(p_glob) > 1:
|
72
|
+
globlist = []
|
73
|
+
for f in p_glob:
|
74
|
+
globlist.append(f[1])
|
75
|
+
raise doomglob.DuplicateLumpError(f"Color palette {lumpname} is not unique.\n{globlist}")
|
76
|
+
elif len(p_glob) < 1:
|
77
|
+
raise FileNotFoundError(f"Color palette {lumpname} not found.")
|
78
|
+
|
79
|
+
print(f'Caching Palette "{lumpname}"')
|
80
|
+
pdict[lumpname] = doompic.Palette(os.path.join(opts["srcdir"],p_glob[0][1]))
|
81
|
+
lock.release()
|
82
|
+
return pdict[lumpname]
|
83
|
+
|
84
|
+
|
85
|
+
def build(makefile):
|
86
|
+
from modules import doompic, doomglob
|
87
|
+
from natsort import natsorted, ns
|
88
|
+
import shutil, os, re
|
89
|
+
import asyncio, concurrent.futures, multiprocessing
|
90
|
+
|
91
|
+
opts = makefile.get_options()
|
92
|
+
|
93
|
+
print(f'# Building {opts["srcdir"]} => {opts["workdir"]}')
|
94
|
+
|
95
|
+
if opts["palette"] == None:
|
96
|
+
print("WARNING: Default color palette is not defined. Compiling graphics will lead to errors.")
|
97
|
+
|
98
|
+
ppx_man = multiprocessing.Manager()
|
99
|
+
ppx_lock = ppx_man.Lock()
|
100
|
+
palettes = ppx_man.dict()
|
101
|
+
ppx_futures = []
|
102
|
+
|
103
|
+
with concurrent.futures.ThreadPoolExecutor() as ppx:
|
104
|
+
|
105
|
+
for lumpdef in makefile.get_lumpdefs():
|
106
|
+
match lumpdef[1]:
|
107
|
+
case "colormap" | "tinttab" as ltype: # Hardcoded exceptions, eww
|
108
|
+
lumpglob = doomglob.fake_lump(lumpdef[0])
|
109
|
+
case _:
|
110
|
+
lumpglob = doomglob.find_lump(opts["srcdir"], lumpdef[0])
|
111
|
+
|
112
|
+
|
113
|
+
|
114
|
+
for lump in natsorted(lumpglob, alg=ns.PATH):
|
115
|
+
lump_dcheck = doomglob.find_lump(opts["srcdir"], lump[0])
|
116
|
+
|
117
|
+
srcfile = opts["srcdir"] + '/' + lump[1]
|
118
|
+
destfile = opts["workdir"] + '/' + lump[2]
|
119
|
+
|
120
|
+
params = re.match(r"\s*([\w]+)\s*", lumpdef[2] or '')
|
121
|
+
if params != None and "preserve_filename" in params.groups():
|
122
|
+
destfile = opts["workdir"] +'/'+ lump[1]
|
123
|
+
|
124
|
+
# Out-Of-Date check
|
125
|
+
if lumpdef[1] in ["colormap", "tinttab"]:
|
126
|
+
palbase_name = re.match(r"\s*([\w]+)\s*", lumpdef[2]).group(1)
|
127
|
+
ood_target = doomglob.find_lump(opts["srcdir"],palbase_name)
|
128
|
+
srcfile = opts["srcdir"] + '/' + ood_target[0][1]
|
129
|
+
|
130
|
+
if os.path.exists(destfile) and os.path.getmtime(srcfile) < os.path.getmtime(destfile):
|
131
|
+
continue
|
132
|
+
fake_lumpdef = (lump[0],lumpdef[1],lumpdef[2])
|
133
|
+
ppx_context = {
|
134
|
+
"srcfile" : srcfile,
|
135
|
+
"destfile" : destfile,
|
136
|
+
"opts" : opts,
|
137
|
+
"pdict": palettes,
|
138
|
+
}
|
139
|
+
ppx_futures.append( ppx.submit(cr_build_lump, ppx_lock, fake_lumpdef, ppx_context ) )
|
140
|
+
#cr_build_lump(ppx_lock, fake_lumpdef, ppx_context ) # For testing single-threadedly
|
141
|
+
|
142
|
+
# Did anything actually work?
|
143
|
+
for f in ppx_futures:
|
144
|
+
result = f.result()
|
145
|
+
return
|
146
|
+
|
147
|
+
def pack(makefile):
|
148
|
+
from modules import pk3zip, doomglob
|
149
|
+
from natsort import natsorted, ns
|
150
|
+
import io, os, hashlib, pathlib, re
|
151
|
+
|
152
|
+
opts = makefile.get_options()
|
153
|
+
if opts["destfile"] == None:
|
154
|
+
raise FileNotFoundError("destfile is not defined")
|
155
|
+
|
156
|
+
print("# Packing")
|
157
|
+
|
158
|
+
# Keep PK3 file in memory to avoid Windows' file access locks
|
159
|
+
pk3buf = io.BytesIO()
|
160
|
+
|
161
|
+
for lumpdef in makefile.get_lumpdefs():
|
162
|
+
|
163
|
+
if args.verbose:
|
164
|
+
print(f'# Packing lumpdef {lumpdef}')
|
165
|
+
|
166
|
+
match lumpdef[1]:
|
167
|
+
case "marker":
|
168
|
+
|
169
|
+
if args.verbose:
|
170
|
+
print(f"## Adding marker {lumpdef[0]}")
|
171
|
+
with pk3zip.PK3File(pk3buf, "a") as pk3:
|
172
|
+
pk3.writestr(lumpdef[0], "")
|
173
|
+
case _:
|
174
|
+
params = re.match(r"\s*([\w]+)\s*", lumpdef[2] or '')
|
175
|
+
searchname = os.path.dirname(lumpdef[0])+'/'+pathlib.Path(lumpdef[0]).stem[:8]
|
176
|
+
if params != None and "preserve_filename" in params.groups():
|
177
|
+
searchname = lumpdef[0]
|
178
|
+
|
179
|
+
with pk3zip.PK3File(pk3buf, "a") as pk3:
|
180
|
+
|
181
|
+
wf_glob = doomglob.find_lump(opts["workdir"], searchname)
|
182
|
+
wf_glob = natsorted(wf_glob, alg=ns.PATH)
|
183
|
+
|
184
|
+
#print(f'\nGLOB: {wf_glob}\n')
|
185
|
+
#print(f'NAMELIST: {pk3.namelist()}\n')
|
186
|
+
|
187
|
+
wf_unique = [x for x in wf_glob if x[2].lstrip('/').rstrip('/') not in pk3.namelist() ]
|
188
|
+
if params != None and "preserve_filename" in params.groups():
|
189
|
+
wf_unique = [x for x in wf_glob if x[1].lstrip('/').rstrip('/') not in pk3.namelist() ]
|
190
|
+
|
191
|
+
#print(f'\nUNIQUE GLOB: {wf_unique}\n')
|
192
|
+
|
193
|
+
for lump,srcfile,arcpath in wf_unique:
|
194
|
+
wf_path = opts["workdir"] + '/' + srcfile
|
195
|
+
|
196
|
+
if params != None and "preserve_filename" in params.groups():
|
197
|
+
wf_path = opts["workdir"]+'/'+srcfile
|
198
|
+
arcpath = os.path.dirname(arcpath)+'/'+os.path.basename(srcfile)
|
199
|
+
|
200
|
+
|
201
|
+
if args.verbose:
|
202
|
+
print(f'## Packing lump {arcpath}')
|
203
|
+
|
204
|
+
pk3.write(wf_path, arcpath)
|
205
|
+
|
206
|
+
|
207
|
+
# Commit in-memory PK3 file to disk
|
208
|
+
|
209
|
+
if not os.path.isdir(os.path.dirname(opts["destfile"])):
|
210
|
+
print(f'## Creating directory {os.path.dirname(opts["destfile"])}')
|
211
|
+
os.mkdir(os.path.dirname(opts["destfile"]))
|
212
|
+
|
213
|
+
if os.path.isfile(opts["destfile"]):
|
214
|
+
print(f'## Deleting {opts["destfile"]} for recreation')
|
215
|
+
os.remove(opts["destfile"])
|
216
|
+
|
217
|
+
with open(opts["destfile"], "wb") as f:
|
218
|
+
print(f'## Writing {opts["destfile"]}')
|
219
|
+
f.write(pk3buf.getvalue())
|
220
|
+
|
221
|
+
md5hash = hashlib.md5(pk3buf.getvalue())
|
222
|
+
print(f'\nMD5 Hash of {opts["destfile"]}: {md5hash.hexdigest()}')
|
223
|
+
|
224
|
+
return
|
225
|
+
|
226
|
+
def main():
|
227
|
+
from modules import pk3makefile
|
228
|
+
|
229
|
+
# Step switches
|
230
|
+
step_prepare = False
|
231
|
+
step_build = False
|
232
|
+
step_pack = False
|
233
|
+
|
234
|
+
match args.verb:
|
235
|
+
case "prepare":
|
236
|
+
step_prepare = True
|
237
|
+
case "build":
|
238
|
+
step_build = True
|
239
|
+
case "pack":
|
240
|
+
step_pack = True
|
241
|
+
case None | "all":
|
242
|
+
step_prepare = True
|
243
|
+
step_build = True
|
244
|
+
step_pack = True
|
245
|
+
|
246
|
+
if args.verb == "clean":
|
247
|
+
clean()
|
248
|
+
|
249
|
+
pk3mf_name = "./PK3Makefile"
|
250
|
+
if args.makefile != None:
|
251
|
+
pk3mf_name = args.makefile
|
252
|
+
pk3mf = pk3makefile.PK3Makefile(pk3mf_name)
|
253
|
+
|
254
|
+
print(f"MAKEOPTS: = {pk3mf.get_options()}")
|
255
|
+
|
256
|
+
# TODO: Add resolve for missing dependencies
|
257
|
+
if step_prepare:
|
258
|
+
prepare(pk3mf.get_options("workdir"))
|
259
|
+
if step_build:
|
260
|
+
if args.verb == "build" and args.target != None:
|
261
|
+
pk3mf = pk3mf.filter_lumpdefs(args.target)
|
262
|
+
build(pk3mf)
|
263
|
+
if step_pack:
|
264
|
+
pack(pk3mf)
|
265
|
+
|
266
|
+
return
|
267
|
+
|
268
|
+
if __name__ == "__main__":
|
269
|
+
import argparse
|
270
|
+
import pathlib
|
271
|
+
|
272
|
+
# Shell argument API
|
273
|
+
verbs = [
|
274
|
+
'clean', # Delete workdir
|
275
|
+
'prepare', # Make workdir tree etc.
|
276
|
+
'build', # Convert formats & copy files to workdir according to METAINFO
|
277
|
+
'pack', # Pack existing workdir into pk3. (May be used for music packs?)
|
278
|
+
'all', # Do everything
|
279
|
+
]
|
280
|
+
ap_main = argparse.ArgumentParser(
|
281
|
+
prog='pk3make',
|
282
|
+
description='PK3Make - Make for (Weissblatt) PK3s',
|
283
|
+
epilog='Type `pk3make --help` for more info.')
|
284
|
+
ap_sub = ap_main.add_subparsers(title='Build steps', dest='verb', metavar="")
|
285
|
+
|
286
|
+
ap_clean = ap_sub.add_parser('clean', help='Delete the build directory')
|
287
|
+
ap_build = ap_sub.add_parser('build', help='Compile assets into the build directory')
|
288
|
+
ap_pack = ap_sub.add_parser('pack', help='Assemble a PK3 file from the build directory')
|
289
|
+
|
290
|
+
ap_main.add_argument('-v', '--verbose' , action='store_true', help='Verbose log output')
|
291
|
+
ap_build.add_argument('target', nargs='?', help='Target LUMPDEF')
|
292
|
+
|
293
|
+
ap_main.add_argument('makefile', nargs='?', const='./PK3Makefile', help='PK3Makefile to reference')
|
294
|
+
|
295
|
+
args = ap_main.parse_args()
|
296
|
+
|
297
|
+
main()
|
@@ -0,0 +1,34 @@
|
|
1
|
+
class DuplicateLumpError(Exception):
|
2
|
+
"""To be raised when a lump should really be unique"""
|
3
|
+
pass
|
4
|
+
|
5
|
+
|
6
|
+
def find_lump(srcdir, lumpname):
|
7
|
+
import os, glob
|
8
|
+
import pathlib
|
9
|
+
out = list()
|
10
|
+
|
11
|
+
if srcdir == None:
|
12
|
+
raise FileNotFoundError(f'doomglob.find_lump(): No srcdir given')
|
13
|
+
if lumpname == None:
|
14
|
+
raise FileNotFoundError(f'doomglob.find_lump(): No lumpname given')
|
15
|
+
|
16
|
+
#for path in glob.glob(searchstr, root_dir=srcdir):
|
17
|
+
for path in glob.iglob('**/'+lumpname+'*', root_dir=srcdir, recursive=True):
|
18
|
+
posixpath = pathlib.Path(path).as_posix()
|
19
|
+
doomname = pathlib.Path(path).stem[:8]
|
20
|
+
arcpath = (os.path.dirname(posixpath)+'/'+doomname).lstrip('/').rstrip('/')
|
21
|
+
if pathlib.Path(srcdir.rstrip('/')+'/'+posixpath).is_file(): # Filter out directories
|
22
|
+
out.append( (doomname, posixpath, arcpath) )
|
23
|
+
|
24
|
+
# Deduplicate out
|
25
|
+
out = [x for n,x in enumerate(out) if x not in out[:n] ]
|
26
|
+
|
27
|
+
return out # List of tuples (LUMPNAME, PATH, ARCPATH)
|
28
|
+
|
29
|
+
def fake_lump(lumpname):
|
30
|
+
# Only for use with generated lumps, such as COLORMAPs or TINTTABs
|
31
|
+
from pathlib import Path
|
32
|
+
ln_short = Path(lumpname).stem[:8]
|
33
|
+
arcpath = '/'+ln_short.lstrip('/')
|
34
|
+
return [ (ln_short, lumpname, arcpath) ]
|
@@ -0,0 +1,354 @@
|
|
1
|
+
class Palette:
|
2
|
+
def __init__(self, filename):
|
3
|
+
import os
|
4
|
+
from modules.doomglob import find_lump, DuplicateLumpError
|
5
|
+
from PIL import Image
|
6
|
+
|
7
|
+
self.colors = []
|
8
|
+
self.color_lookup = {} # Color LUT to speed up rgb2index (before estimate: 25:16,32)
|
9
|
+
|
10
|
+
# Colormath-based code commented out for future reference
|
11
|
+
# Euclidean distance is 50x faster
|
12
|
+
"""
|
13
|
+
from colormath2.color_objects import sRGBColor, LabColor
|
14
|
+
from colormath2.color_conversions import convert_color
|
15
|
+
from colormath2.color_diff import delta_e_cie2000
|
16
|
+
"""
|
17
|
+
|
18
|
+
with Image.open(filename).convert("RGB") as img:
|
19
|
+
# When it don't fit we make it fit
|
20
|
+
rez_i = img.resize( (16,16), Image.Resampling.NEAREST)
|
21
|
+
|
22
|
+
# Get pixels into self.colors
|
23
|
+
width, height = rez_i.size # should be (16,16)
|
24
|
+
for y in range(height):
|
25
|
+
for x in range(width):
|
26
|
+
pixel = rez_i.getpixel((x,y))
|
27
|
+
#self.colors.append(pixel) # Tuple (R,G,B)
|
28
|
+
|
29
|
+
# Precalc color conversions to speed up rgb2index - UPDATE: No bc fuck colormath
|
30
|
+
#px_srgb = sRGBColor(pixel[0], pixel[1], pixel[2], is_upscaled=True) # COLORMATH STUB
|
31
|
+
#px_cielab = convert_color(px_srgb, LabColor) # COLORMATH STUB
|
32
|
+
color_o = {
|
33
|
+
"id": y*height+x,
|
34
|
+
#"rgb": px_srgb, # COLORMATH STUB
|
35
|
+
"r": pixel[0],
|
36
|
+
"g": pixel[1],
|
37
|
+
"b": pixel[2],
|
38
|
+
#"cielab": px_cielab, # COLORMATH STUB
|
39
|
+
}
|
40
|
+
|
41
|
+
rgbcolor = (pixel[0] << 16) | (pixel[1] << 8) | (pixel[2])
|
42
|
+
self.colors.append(color_o) # Tuple (R,G,B)
|
43
|
+
self.color_lookup[rgbcolor] = color_o["id"]
|
44
|
+
|
45
|
+
def rgb2index(self, color: tuple):
|
46
|
+
# Colormath-based code commented out for future reference
|
47
|
+
# Euclidean distance is 50x faster
|
48
|
+
"""
|
49
|
+
from colormath2.color_objects import sRGBColor, LabColor
|
50
|
+
from colormath2.color_conversions import convert_color
|
51
|
+
from colormath2.color_diff import delta_e_cie2000
|
52
|
+
"""
|
53
|
+
# Hot path O(1): Color matches exactly (most common if you know what you're doing)
|
54
|
+
rgbcolor = (int(color[0]) << 16) | (int(color[1]) << 8) | int(color[2])
|
55
|
+
if rgbcolor in self.color_lookup.keys():
|
56
|
+
#print(f"Converting {color} => #{rgbcolor:X}")
|
57
|
+
return self.colors[self.color_lookup[rgbcolor]]["id"]
|
58
|
+
|
59
|
+
# Cold path: Linear search for the closest color
|
60
|
+
#color_lab = convert_color(sRGBColor(color[0], color[1], color[2], is_upscaled=True), LabColor)
|
61
|
+
min_delta_e = float('inf')
|
62
|
+
min_idx = -1
|
63
|
+
for icolor in self.colors:
|
64
|
+
"""
|
65
|
+
#print(f"ICOLOR {index}: {icolor}")
|
66
|
+
#icolor_lab = convert_color(sRGBColor(icolor[0], icolor[1], icolor[2], is_upscaled=True), LabColor)
|
67
|
+
delta_e = delta_e_cie2000(color_lab, icolor["cielab"])
|
68
|
+
"""
|
69
|
+
# Simple euclidean distance
|
70
|
+
delta_e = ( \
|
71
|
+
(color[0]-icolor['r'])**2 + \
|
72
|
+
(color[1]-icolor['g'])**2 + \
|
73
|
+
(color[2]-icolor['b'])**2 \
|
74
|
+
)**(1/2)
|
75
|
+
if delta_e < min_delta_e:
|
76
|
+
min_delta_e = delta_e
|
77
|
+
min_idx = icolor["id"]
|
78
|
+
if delta_e == 0: # Exact match, no need to continue
|
79
|
+
break
|
80
|
+
#print(f"Found color {min_idx}:{self.colors[min_idx]} for image color {color}")
|
81
|
+
return min_idx
|
82
|
+
|
83
|
+
def generate_colormap(self):
|
84
|
+
return
|
85
|
+
|
86
|
+
def tobytes(self):
|
87
|
+
# Convert self.colors to Doom Palette
|
88
|
+
# Return as IOBytes for saving
|
89
|
+
exbytes = bytearray()
|
90
|
+
for page in range(14):
|
91
|
+
for i,pcolor in enumerate(self.colors):
|
92
|
+
# Default unused palette: Grayscale
|
93
|
+
r = 255-i
|
94
|
+
g = 255-i
|
95
|
+
b = 255-i
|
96
|
+
|
97
|
+
if page == 0: # Regular palette
|
98
|
+
r = pcolor["r"]
|
99
|
+
g = pcolor["g"]
|
100
|
+
b = pcolor["b"]
|
101
|
+
elif 0 < page < 4: # Whiteout palettes => 75% white tint
|
102
|
+
r = pcolor["r"] + (255 - pcolor["r"]) * 0.75
|
103
|
+
g = pcolor["g"] + (255 - pcolor["g"]) * 0.75
|
104
|
+
b = pcolor["b"] + (255 - pcolor["b"]) * 0.75
|
105
|
+
elif page == 4: # Nuke palette => 75% white tint + g,b = 113
|
106
|
+
r = pcolor["r"] + (255 - pcolor["r"]) * 0.75
|
107
|
+
g = 113
|
108
|
+
b = 113
|
109
|
+
elif page == 5: # Inverted palette at 75% brightness
|
110
|
+
r = (255 - pcolor["r"]) * 0.75
|
111
|
+
g = (255 - pcolor["g"]) * 0.75
|
112
|
+
b = (255 - pcolor["b"]) * 0.75
|
113
|
+
# Add color idx.
|
114
|
+
# NOTE: the int() cast is janky but hopefully works
|
115
|
+
exbytes.append(int(r))
|
116
|
+
exbytes.append(int(g))
|
117
|
+
exbytes.append(int(b))
|
118
|
+
return bytes(exbytes)
|
119
|
+
|
120
|
+
def colormap_tobytes(self):
|
121
|
+
"""
|
122
|
+
from colormath.color_objects import HSVColor, sRGBColor
|
123
|
+
from colormath.color_conversions import convert_color
|
124
|
+
"""
|
125
|
+
|
126
|
+
out = bytearray()
|
127
|
+
# Y/X coordinate loop because loop order matters
|
128
|
+
for c,v in [(c,v) for v in range(32) for c in range(256)]:
|
129
|
+
|
130
|
+
"""
|
131
|
+
input_hsv = convert_color( sRGBColor( \
|
132
|
+
self.colors[c][0], \
|
133
|
+
self.colors[c][1], \
|
134
|
+
self.colors[c][2] \
|
135
|
+
, HSVColor)
|
136
|
+
output_hsv = ( input_hsv[0], input_hsv[1], input_hsv[2] * (1-v/32) )
|
137
|
+
output_rgb = sRGBColor()
|
138
|
+
|
139
|
+
out += self.rgb2index(output_rgb.value_tuple())
|
140
|
+
"""
|
141
|
+
|
142
|
+
# Simple RGB squash for now
|
143
|
+
# TODO: Add HSV/LAB-conversion after testing
|
144
|
+
brightness = ( \
|
145
|
+
self.colors[c]["r"] * (1-(v/32)), \
|
146
|
+
self.colors[c]["g"] * (1-(v/32)), \
|
147
|
+
self.colors[c]["b"] * (1-(v/32)) \
|
148
|
+
)
|
149
|
+
out += self.rgb2index(brightness).to_bytes(1)
|
150
|
+
return out
|
151
|
+
|
152
|
+
def tinttab_tobytes(self, factor:float):
|
153
|
+
if type(factor) != float or not (0 <= factor <= 1):
|
154
|
+
raise RuntimeError(f"Invalid TINTTAB factor {factor}")
|
155
|
+
|
156
|
+
out = bytearray()
|
157
|
+
for x,y in [(x,y) for x in range(256) for y in range(256)]:
|
158
|
+
tintcolor = ( \
|
159
|
+
self.colors[x]["r"] * (1-factor) + self.colors[y]["r"] * factor, \
|
160
|
+
self.colors[x]["g"] * (1-factor) + self.colors[y]["g"] * factor, \
|
161
|
+
self.colors[x]["b"] * (1-factor) + self.colors[y]["b"] * factor \
|
162
|
+
)
|
163
|
+
out += self.rgb2index(tintcolor).to_bytes(1)
|
164
|
+
return out
|
165
|
+
|
166
|
+
|
167
|
+
class Flat():
|
168
|
+
def __init__(self, pngfile: str, palette: Palette):
|
169
|
+
from PIL import Image
|
170
|
+
|
171
|
+
self.pixelbuf = bytearray()
|
172
|
+
with Image.open(pngfile).convert("RGBA") as img:
|
173
|
+
|
174
|
+
# Get pixels into self.pixelbuf
|
175
|
+
self.width, self.height = img.size # should be
|
176
|
+
|
177
|
+
"""# Removed to support FADEs
|
178
|
+
if self.width != self.height:
|
179
|
+
raise RuntimeError(f"Flat is not square. ({self.width},{self.height})")
|
180
|
+
"""
|
181
|
+
|
182
|
+
for y in range(self.height):
|
183
|
+
for x in range(self.width):
|
184
|
+
pixel = img.getpixel((x,y))
|
185
|
+
# Flat = Raw paletted pixel dump
|
186
|
+
self.pixelbuf += palette.rgb2index(pixel).to_bytes(1,"little")
|
187
|
+
|
188
|
+
def get_size(self):
|
189
|
+
return (self.width, self.height)
|
190
|
+
|
191
|
+
def tobytes(self):
|
192
|
+
return bytes(self.pixelbuf)
|
193
|
+
|
194
|
+
class Picture():
|
195
|
+
def __init__(self, pngfile: str, palette: Palette, **kwargs):
|
196
|
+
from PIL import Image
|
197
|
+
|
198
|
+
self.palette = palette # Prolly unused but can't hurt
|
199
|
+
|
200
|
+
self.pixelbuf = []
|
201
|
+
with Image.open(pngfile).convert("RGBA") as img:
|
202
|
+
|
203
|
+
# Get pixels into self.pixelbuf
|
204
|
+
self.width, self.height = img.size # should be
|
205
|
+
for y in range(self.height):
|
206
|
+
for x in range(self.width):
|
207
|
+
pixel = img.getpixel((x,y))
|
208
|
+
# Save picture as indexed image (-1 = transparent)
|
209
|
+
if pixel[3] == 0:
|
210
|
+
self.pixelbuf.append( -1 )
|
211
|
+
else:
|
212
|
+
self.pixelbuf.append( palette.rgb2index(pixel) )
|
213
|
+
|
214
|
+
if "offset" in kwargs:
|
215
|
+
new_offset = self.set_offset(kwargs["offset"])
|
216
|
+
|
217
|
+
|
218
|
+
def set_offset(self, offset: str):
|
219
|
+
import re
|
220
|
+
if offset == None or re.match(r"^\s*$", offset):
|
221
|
+
self.offsetX = 0
|
222
|
+
self.offsetY = 0
|
223
|
+
return (self.offsetX, self.offsetY)
|
224
|
+
|
225
|
+
tokens = re.match(r"\s*(-?[0-9]+)\s+(-?[0-9]+)\s*", offset)
|
226
|
+
if tokens:
|
227
|
+
self.offsetX = int(tokens.group(1))
|
228
|
+
self.offsetY = int(tokens.group(2))
|
229
|
+
return (self.offsetX, self.offsetY)
|
230
|
+
|
231
|
+
tokens = re.match(r"\s*([^\s]+)\s*", offset)
|
232
|
+
if not tokens:
|
233
|
+
raise Exception(f'Offset "{offset}" not supported')
|
234
|
+
|
235
|
+
match tokens.group(1):
|
236
|
+
case "": # No offset given - default to "0 0"
|
237
|
+
self.offsetX = 0
|
238
|
+
self.offsetY = 0
|
239
|
+
case "center":
|
240
|
+
self.offsetX = int(self.width/2)
|
241
|
+
self.offsetY = int(self.height/2)
|
242
|
+
case "sprite":
|
243
|
+
self.offsetX = int(self.width/2)
|
244
|
+
self.offsetY = int(self.height-4)
|
245
|
+
case _:
|
246
|
+
raise Exception(f'Offset "{offset}" not supported')
|
247
|
+
return (self.offsetX, self.offsetY)
|
248
|
+
|
249
|
+
def tobytes(self):
|
250
|
+
# === Generate picture lump ===
|
251
|
+
#
|
252
|
+
# [HEADER]
|
253
|
+
# uint16_t LE width
|
254
|
+
# uint16_t LE height
|
255
|
+
# uint16_t LE offsetX
|
256
|
+
# uint16_t LE offsetY
|
257
|
+
# uint32_t[width] LE toc
|
258
|
+
# -----------------------------
|
259
|
+
# COLUMNS are arrays of POSTS separated by 0xFF
|
260
|
+
# [POSTS]
|
261
|
+
# uint8_t LE topdelta
|
262
|
+
# uint8_t LE length
|
263
|
+
# uint8_t LE padding
|
264
|
+
# uint8_t* LE pixels
|
265
|
+
# uint8_t LE padding
|
266
|
+
|
267
|
+
|
268
|
+
columns = bytearray()
|
269
|
+
# --- Create Header ---
|
270
|
+
# NOTE: All integers in a Picture header are LE uint16_t
|
271
|
+
out = bytearray( \
|
272
|
+
self.width.to_bytes(2, byteorder='little') + \
|
273
|
+
self.height.to_bytes(2, byteorder='little') + \
|
274
|
+
self.offsetX.to_bytes(2, byteorder='little', signed=True) + \
|
275
|
+
self.offsetY.to_bytes(2, byteorder='little', signed=True) \
|
276
|
+
)
|
277
|
+
|
278
|
+
# Iterate Column-wise. Yes, Doom picture are column-oriented
|
279
|
+
toc = bytearray() # Table of Columns
|
280
|
+
t_fseek = len(out) + 4 * self.width # whXY + column TOC
|
281
|
+
for x in range(self.width):
|
282
|
+
t_cdata = bytearray() # Column data
|
283
|
+
t_pdata = bytearray() # Setup/Reset Post data
|
284
|
+
t_insidepost = False
|
285
|
+
# Post offset markers
|
286
|
+
t_topdelta = -1
|
287
|
+
t_topoffset = -1
|
288
|
+
t_olddelta = -1
|
289
|
+
t_postheight = 0
|
290
|
+
dbg_postcounter = 0
|
291
|
+
for y in range(self.height):
|
292
|
+
|
293
|
+
## Tall patch support ##
|
294
|
+
|
295
|
+
if y == 254: # Tall patch border
|
296
|
+
if t_insidepost:
|
297
|
+
# Abort post now, restart as usual
|
298
|
+
t_cdata.extend(t_postheight.to_bytes(1, byteorder="little")) # Unused padding
|
299
|
+
t_cdata.extend(b'\x00') # Unused padding
|
300
|
+
t_cdata.extend(t_pdata) # Post data
|
301
|
+
t_cdata.extend(b'\x00') # Unused padding
|
302
|
+
t_pdata = bytearray() # Reset post data
|
303
|
+
|
304
|
+
# Insert Fake post
|
305
|
+
t_cdata.extend(b'\xfe\x00\x00\x00')
|
306
|
+
t_topdelta = y # Flush topdelta
|
307
|
+
t_postheight = 0
|
308
|
+
t_insidepost = False
|
309
|
+
|
310
|
+
## Actual algorithm ##
|
311
|
+
|
312
|
+
current_pixel = self.pixelbuf[y*self.width+x]
|
313
|
+
if (current_pixel == -1 or t_postheight == 254) and t_insidepost: # Post END
|
314
|
+
t_cdata.extend(t_postheight.to_bytes(1, byteorder="little")) # Unused padding
|
315
|
+
t_cdata.extend(b'\x00') # Unused padding
|
316
|
+
t_cdata.extend(t_pdata) # Post data
|
317
|
+
t_cdata.extend(b'\x00') # Unused padding
|
318
|
+
t_pdata = bytearray() # Reset post data
|
319
|
+
t_insidepost = False
|
320
|
+
if current_pixel != -1 and not t_insidepost: # Post START
|
321
|
+
|
322
|
+
# Tall patch tracking
|
323
|
+
t_olddelta = t_topdelta
|
324
|
+
t_topdelta = y
|
325
|
+
t_topoffset = y if y < 254 else t_topdelta - t_olddelta
|
326
|
+
|
327
|
+
# Start new post
|
328
|
+
t_postheight = 1
|
329
|
+
t_cdata.extend((t_topoffset&0xFF).to_bytes(1, byteorder="little"))
|
330
|
+
t_pdata.extend(current_pixel.to_bytes(1, byteorder="little"))
|
331
|
+
t_insidepost = True
|
332
|
+
elif current_pixel != -1 and t_insidepost:
|
333
|
+
t_pdata.extend(current_pixel.to_bytes(1, byteorder="little"))
|
334
|
+
t_postheight = t_postheight + 1
|
335
|
+
|
336
|
+
|
337
|
+
|
338
|
+
if t_insidepost: # Finish last post if End Of Column
|
339
|
+
t_cdata.extend(t_postheight.to_bytes(1, byteorder="little")) # Unused padding
|
340
|
+
t_cdata.extend(b'\x00') # Unused padding
|
341
|
+
t_cdata.extend(t_pdata) # Post data
|
342
|
+
t_cdata.extend(b'\x00') # Unused padding
|
343
|
+
t_cdata.extend(b'\xff') # Column Terminator
|
344
|
+
|
345
|
+
columns.extend(t_cdata) # Save partitioned column whole
|
346
|
+
|
347
|
+
# Add TOC column offset
|
348
|
+
toc.extend(t_fseek.to_bytes(4, byteorder='little'))
|
349
|
+
t_fseek = t_fseek+len(t_cdata)
|
350
|
+
|
351
|
+
out.extend(toc) # Finish off header
|
352
|
+
out.extend(columns) # Write column data block
|
353
|
+
|
354
|
+
return bytes(out)
|
@@ -0,0 +1,65 @@
|
|
1
|
+
class PK3MakeConfigurationError(Exception):
|
2
|
+
"""To be raised when a lump should really be unique"""
|
3
|
+
pass
|
4
|
+
|
5
|
+
class PK3MakeDependencyError(Exception):
|
6
|
+
"""To be raised when a lump should really be unique"""
|
7
|
+
pass
|
8
|
+
|
9
|
+
class PK3Makefile():
|
10
|
+
#def __init__(self):
|
11
|
+
#pass
|
12
|
+
|
13
|
+
def __init__(self, filename):
|
14
|
+
import re
|
15
|
+
|
16
|
+
self.options = {
|
17
|
+
"srcdir": None,
|
18
|
+
"workdir": None,
|
19
|
+
"destfile": None,
|
20
|
+
"palette": None,
|
21
|
+
}
|
22
|
+
|
23
|
+
self.lumps = []
|
24
|
+
|
25
|
+
# List of tuples ( LUMPNAME, TYPE, OFFSET )
|
26
|
+
# OFFSET may either be an interger tuple or a string
|
27
|
+
|
28
|
+
with open(filename) as file:
|
29
|
+
for line in file:
|
30
|
+
re_buildopt = r"^\?([^\s]*): ([^\s]*)"
|
31
|
+
re_lumpdef = r"^([^\s]+)\s*([^\s]+)(?:\s*(.+))?"
|
32
|
+
|
33
|
+
workline = re.sub(r"#.*","", line) # Clean out comments
|
34
|
+
tokens = re.match(re_buildopt, workline)
|
35
|
+
if tokens: # Is it a Buildopt?
|
36
|
+
match tokens.group(1):
|
37
|
+
case "srcdir" | "workdir" | "destfile" | "palette" as cmd:
|
38
|
+
self.options[cmd] = tokens.group(2).rstrip('/')
|
39
|
+
tokens = re.match(re_lumpdef, workline)
|
40
|
+
if tokens: # Is it a Lumpdef?
|
41
|
+
match tokens.group(2):
|
42
|
+
case "flat" | "fade" | "graphic" | "raw" | "colormap"| "tinttab" | "palette" | "marker" as cmd:
|
43
|
+
self.lumps.append( tokens.group(1,2,3) )
|
44
|
+
case "udmf":
|
45
|
+
print(f'Lump type "udmf" is not supported yet. Ignored')
|
46
|
+
case _ as lumptype:
|
47
|
+
print(f'Invalid lumptype "{lumptype}". Ignored')
|
48
|
+
|
49
|
+
def get_options(self, option=None):
|
50
|
+
if option == None:
|
51
|
+
return self.options
|
52
|
+
else:
|
53
|
+
return self.options[option]
|
54
|
+
|
55
|
+
def get_lumpdefs(self):
|
56
|
+
return self.lumps
|
57
|
+
|
58
|
+
def filter_lumpdefs(self, pattern):
|
59
|
+
import re,fnmatch
|
60
|
+
|
61
|
+
glob_re = re.compile(fnmatch.translate(pattern))
|
62
|
+
|
63
|
+
self.lumps = [x for x in self.lumps if glob_re.match(x[0])]
|
64
|
+
|
65
|
+
return self
|
@@ -0,0 +1,84 @@
|
|
1
|
+
import os, io, re
|
2
|
+
import zipfile, pathlib
|
3
|
+
#from binaryornot.check import is_binary
|
4
|
+
|
5
|
+
class PK3File(zipfile.ZipFile):
|
6
|
+
"""This class is basically a deterministic ZIP file.
|
7
|
+
Four attributes need to be controlled:
|
8
|
+
1. Order of files follows programmatic order
|
9
|
+
2. Timestamp is set to 1980-01-01 00:00:00
|
10
|
+
3. All files are set to permissions (d)rw-rw-rw-
|
11
|
+
4. Create system is set to 03/Unix
|
12
|
+
"""
|
13
|
+
|
14
|
+
### Inherited/overwritten ZipFile functions ###
|
15
|
+
def mkdir(self, zinfo_or_directory, mode=511):
|
16
|
+
# Force metadata
|
17
|
+
if isinstance(zinfo_or_arcname, zipfile.ZipInfo):
|
18
|
+
zinfo = zinfo_or_directory
|
19
|
+
else:
|
20
|
+
zinfo = zipfile.ZipInfo(filename=(zinfo_or_directory.rstrip('/')+'/'))
|
21
|
+
|
22
|
+
# Force deterministic metadata
|
23
|
+
zinfo.date_time = (1980, 1, 1, 0, 0, 0)
|
24
|
+
zinfo.create_system = 3
|
25
|
+
zinfo.external_attr = (0o40744 << 16) | 0x10 # Octal encoding for drwxr--r--
|
26
|
+
|
27
|
+
# Mode is overwritten to achieve determinism
|
28
|
+
zipfile.ZipFile.mkdir(self, zinfo, 511)
|
29
|
+
|
30
|
+
def write(self, filename, arcname, compress_type=None, compresslevel=None):
|
31
|
+
|
32
|
+
nodename = pathlib.Path(filename).stem + pathlib.Path(filename).suffix
|
33
|
+
zinfo = zipfile.ZipInfo.from_file(filename, arcname)
|
34
|
+
|
35
|
+
# Force deterministic metadata
|
36
|
+
zinfo.create_system = 3
|
37
|
+
zinfo.date_time = (1980, 1, 1, 0, 0, 0)
|
38
|
+
zinfo.external_attr = 0o0744 << 16 # Octal encoding for -rwxr--r--
|
39
|
+
if zinfo.is_dir():
|
40
|
+
zinfo.external_attr = (0o40744 << 16) | 0x10 # Octal encoding for drwxr--r--
|
41
|
+
|
42
|
+
# Plain text file -> chain into writestr to convert line breaks
|
43
|
+
#if not is_binary(filename):
|
44
|
+
p = re.compile('(SOC_.*)|' \
|
45
|
+
'(.*\.soc)|' \
|
46
|
+
'(TEXTURES)|' \
|
47
|
+
'(ANIMDEFS)|' \
|
48
|
+
'(MUSICDEF)|' \
|
49
|
+
'(L_.*)|' \
|
50
|
+
'(lua_.*)|' \
|
51
|
+
'(.*\.lua)|' \
|
52
|
+
'(.*\.txt)|' \
|
53
|
+
'(S_SKIN)|' \
|
54
|
+
'(SPRTINFO)')
|
55
|
+
if p.match(nodename):
|
56
|
+
# Force LF line breaks for text files
|
57
|
+
with open(filename, mode='r', encoding='utf-8') as f:
|
58
|
+
raw_file = f.read().replace('\r\n','\n')
|
59
|
+
else:
|
60
|
+
with open(filename, mode='rb') as f:
|
61
|
+
raw_file = f.read()
|
62
|
+
|
63
|
+
|
64
|
+
self.writestr(arcname, raw_file,compress_type, compresslevel)
|
65
|
+
|
66
|
+
return
|
67
|
+
|
68
|
+
|
69
|
+
#zipfile.ZipFile.write(self, filename, arcname, compress_type, compresslevel)
|
70
|
+
|
71
|
+
def writestr(self, zinfo_or_arcname, data, compress_type=None, compresslevel=None):
|
72
|
+
if isinstance(zinfo_or_arcname, zipfile.ZipInfo):
|
73
|
+
zinfo = zinfo_or_arcname
|
74
|
+
else:
|
75
|
+
zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
|
76
|
+
|
77
|
+
# Force deterministic metadata
|
78
|
+
zinfo.filename = zinfo.filename.lstrip('/')
|
79
|
+
zinfo.date_time = (1980, 1, 1, 0, 0, 0)
|
80
|
+
zinfo.create_system = 3
|
81
|
+
zinfo.external_attr = 0o0744 << 16 # Octal encoding for -rwxr--r--
|
82
|
+
|
83
|
+
# Force LF line breaks to guarantee determinism
|
84
|
+
zipfile.ZipFile.writestr(self, zinfo, data, compress_type, compresslevel)
|
@@ -0,0 +1,138 @@
|
|
1
|
+
Metadata-Version: 2.4
|
2
|
+
Name: pk3make
|
3
|
+
Version: 1.1
|
4
|
+
Summary: Build system for Weissblatt PK3 files
|
5
|
+
Project-URL: Homepage, https://github.com/liquidunderground/pk3make
|
6
|
+
Project-URL: GitHub, https://github.com/liquidunderground/pk3make
|
7
|
+
Project-URL: Issues, https://github.com/liquidunderground/pk3make/issues
|
8
|
+
Author-email: Zibon Badi <zibonbadi@gmail.com>
|
9
|
+
License-Expression: LGPL-3.0-or-later
|
10
|
+
License-File: LICENSE
|
11
|
+
Classifier: Environment :: Console
|
12
|
+
Classifier: License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)
|
13
|
+
Classifier: Operating System :: OS Independent
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
15
|
+
Classifier: Topic :: Software Development :: Build Tools
|
16
|
+
Requires-Python: >=3.12
|
17
|
+
Requires-Dist: colormath2
|
18
|
+
Requires-Dist: natsort
|
19
|
+
Requires-Dist: pillow
|
20
|
+
Description-Content-Type: text/markdown
|
21
|
+
|
22
|
+
# PK3Make
|
23
|
+
|
24
|
+
"Make" for Weissblatt PK3 files
|
25
|
+
|
26
|
+
## Installation
|
27
|
+
|
28
|
+
1. Set up a [virtual environment](https://docs.python.org/3/library/venv.html)
|
29
|
+
2. Install dependencies `pip install -r requirements.txt`
|
30
|
+
|
31
|
+
## How to use
|
32
|
+
|
33
|
+
PK3Make supplies multiple subcommands. To get an overview type:
|
34
|
+
|
35
|
+
./pk3make.py --help
|
36
|
+
|
37
|
+
To fully build your project akin to a makefile, simply type:
|
38
|
+
|
39
|
+
./pk3make.py make ./PK3Makefile # Default PK3Makefile
|
40
|
+
|
41
|
+
### Notes, Tips and Caveats
|
42
|
+
|
43
|
+
All text files are assumed to be UTF-8 encoded. PK3Make will automatically attempt to convert CRLF to LF newlines but using LF newlines is recommended.
|
44
|
+
|
45
|
+
PK3Make will not find hidden files. To avoid `DuplicateLumpError`s, place
|
46
|
+
your workfiles in a hidden directory, for example `.local/`.
|
47
|
+
|
48
|
+
Should your project contain custom palettes, place it's corresponding
|
49
|
+
LUMPDEF before any `graphic`, `flat` or `fade`. That way, PK3Make can
|
50
|
+
cache your palettes and speed up build times by reducing thread idle.
|
51
|
+
|
52
|
+
|
53
|
+
## But why?
|
54
|
+
|
55
|
+
To put it bluntly: No other tools suited Weissblatt.
|
56
|
+
|
57
|
+
Although the PK3 specification for Weissblatt's engine is based on
|
58
|
+
[ZDoom PK3](https://zdoom.org/wiki/Using_ZIPs_as_WAD_replacement),
|
59
|
+
it's directory namespaces are very different. This made Doom's usual
|
60
|
+
autobuild toolkit [DoomTools](https://mtrop.github.io/DoomTools/) a
|
61
|
+
poor fit for development. Due to the size of the Weissblatt project, manual
|
62
|
+
assembly using SLADE was also out of the question.
|
63
|
+
|
64
|
+
I chose Python as the basis for PK3Make because it is platform-independent,
|
65
|
+
easy-to-read and ubiquitous and although some Doom/Weissblatt-specific
|
66
|
+
modules needed to be written from scratch for PK3Make, Python's vast
|
67
|
+
standard library and otherwise mature PyPI repository helped stem some
|
68
|
+
of the heavy lifting for things such as image processing.
|
69
|
+
|
70
|
+
# PK3Makefile reference
|
71
|
+
|
72
|
+
PK3Make uses it's own build definition language called `PK3Makefile`, inspired by the `METAINFO` spec from
|
73
|
+
[Matt Tropiano's dImgConv](https://mtrop.github.io/DoomTools/dimgconv.html).
|
74
|
+
|
75
|
+
`PK3Makefile`s are processed line-by-line with everything following `#`
|
76
|
+
being treated as a comment. Otherwise it is split into *Build Options*,
|
77
|
+
which define PK3Make's general behavior and *LUMPDEFS*, which define what
|
78
|
+
files to put into your PK3 and how to build them.
|
79
|
+
|
80
|
+
## Build options
|
81
|
+
|
82
|
+
Build options are specified per-line and follow the pattern
|
83
|
+
|
84
|
+
?<OPTION>: <PARAM>
|
85
|
+
|
86
|
+
PK3Make supports the following options:
|
87
|
+
|
88
|
+
`?srcdir: <DIR>` specifies the directory to pull it's base assets from.
|
89
|
+
PK3Make will attempt to find all defined lumps within this folder and
|
90
|
+
mirror it's path within `?workdir` after compilation.
|
91
|
+
|
92
|
+
`?workdir: <DIR>` specifies the temporary working directory. PK3Make will
|
93
|
+
check the timestamps between this and `?srcdir` and rebuild/copy any
|
94
|
+
outdated files into `?workdir` during the compilation process.
|
95
|
+
|
96
|
+
`?palette:` defines the main color palette, by `LUMPNAME` (`PLAYPAL` by default)
|
97
|
+
|
98
|
+
`?destfile:` describes a filepath to the destination PK3. This is where
|
99
|
+
`?workdir` will get copied to during packing.
|
100
|
+
|
101
|
+
|
102
|
+
## Lump definitions
|
103
|
+
|
104
|
+
Lump definitions follow the following pattern:
|
105
|
+
|
106
|
+
<LUMPNAME> <TYPE> <OFFSET>
|
107
|
+
|
108
|
+
`LUMPNAME` describes the filename as used in-engine. Just like the engine,
|
109
|
+
it is matched against the first eight characters of the basename in a
|
110
|
+
case-insensitive manner. [Globbing] such as `D_*.mid` is allowed, in which
|
111
|
+
case `TYPE` and `OFFSET` are applied to all matching lumps. `LUMPNAME`s
|
112
|
+
starting with a "/" are treated as explicit file paths and match against
|
113
|
+
the full file path, starting at the source directory.
|
114
|
+
|
115
|
+
[Globbing]: <https://en.wikipedia.org/wiki/Glob_(programming)>
|
116
|
+
|
117
|
+
`TYPE` determines how the file is treated during compilation. It can be one
|
118
|
+
of the following:
|
119
|
+
|
120
|
+
- `colormap`: File is a Colormap. OFFSET specifies the lump name for the palette from which it is generated
|
121
|
+
- `fade`|`flat`: File is an image and should be converted to a flat. Only PNG images are supported.
|
122
|
+
- `graphic`: File is an image and should be converted to a Doom Picture using `OFFSET` (see below) as a picture offset. If missing, the offset is assumed to be `0 0`.
|
123
|
+
- `marker`: File does not exist and is a 0-byte marker. Explicit path definition required.
|
124
|
+
- `palette`: File is a graphic and should be converted to a color palette. Only PNG images supported.
|
125
|
+
- `raw`: Copy the file over as-is. When `preserve_filename` is given in the offset, the original filename will be preserved.
|
126
|
+
- `tinttab`: File is a TINTTAB. OFFSET is defined as `<PALETTE> <WEIGHT>`. Upon generation, `PALETTE` orthogonally maps each color index against one another, `WEIGHT` specifies a bias towards horizontal/vertical colors between 0 and 1.
|
127
|
+
- `udmf`: (Not supported yet.) File is a UDMF TEXTMAP. PK3Make will generate a directory named LUMPNAME featuring:
|
128
|
+
- `<LUMPNAME>`: Marker
|
129
|
+
- `TEXTMAP`: Original TEXTMAP file (renamed)
|
130
|
+
- `ZNODES`: UDMF BSP tree generated by PK3Make
|
131
|
+
- `ENDMAP`: Marker
|
132
|
+
|
133
|
+
`OFFSET` defines the offset of doom pictures. For convenience, these can be either:
|
134
|
+
|
135
|
+
- `<x> <y>`: Explicit X/Y-coordinates
|
136
|
+
- `center`: Sets the offset to the center of the image
|
137
|
+
- `sprite`: Sets the offset to `width/2 (height-4)`. This is a very common
|
138
|
+
offset for sprites placed in the game world.
|
@@ -0,0 +1,11 @@
|
|
1
|
+
pk3make/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
|
+
pk3make/__main__.py,sha256=Ls3uIkNCE95ZY2HeR8DtWNRkSTKDjvAJ3uqSVQa4t-k,10859
|
3
|
+
pk3make/modules/doomglob.py,sha256=tXRiFwScIP1KGmSJI-bfAeKbXzI-KGB5oc8LG0Yg7DE,1278
|
4
|
+
pk3make/modules/doompic.py,sha256=KOZURZPrUcgL0yHhyloiCyKYl-6Y7LLnk-v9xQpuAmU,14208
|
5
|
+
pk3make/modules/pk3makefile.py,sha256=YaNx0NoVwEZq1t1m1RTd3i5uRm3l-DikWsVp-HVdnfE,2199
|
6
|
+
pk3make/modules/pk3zip.py,sha256=ZdaEa1wh4gTrWZrkE4Orf7IS_PyMidgII4yRK215Bpg,3209
|
7
|
+
pk3make-1.1.dist-info/METADATA,sha256=0XIdDneGJ282VyHoOvWeISxTdg1uB8tmVfYO-z7jgKc,6000
|
8
|
+
pk3make-1.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
9
|
+
pk3make-1.1.dist-info/entry_points.txt,sha256=I5d-GktPSJQuR7bdqW8rWo2Ta5usyFCVX3UlDmh-Duc,45
|
10
|
+
pk3make-1.1.dist-info/licenses/LICENSE,sha256=zfXyi_cMnW2CRuY9eAQyCiSvFLG3n8OpQEnsj7x2Eyk,7626
|
11
|
+
pk3make-1.1.dist-info/RECORD,,
|
@@ -0,0 +1,157 @@
|
|
1
|
+
# GNU LESSER GENERAL PUBLIC LICENSE
|
2
|
+
|
3
|
+
Version 3, 29 June 2007
|
4
|
+
|
5
|
+
Copyright (C) 2007 Free Software Foundation, Inc.
|
6
|
+
<https://fsf.org/>
|
7
|
+
|
8
|
+
Everyone is permitted to copy and distribute verbatim copies of this
|
9
|
+
license document, but changing it is not allowed.
|
10
|
+
|
11
|
+
This version of the GNU Lesser General Public License incorporates the
|
12
|
+
terms and conditions of version 3 of the GNU General Public License,
|
13
|
+
supplemented by the additional permissions listed below.
|
14
|
+
|
15
|
+
## 0. Additional Definitions.
|
16
|
+
|
17
|
+
As used herein, "this License" refers to version 3 of the GNU Lesser
|
18
|
+
General Public License, and the "GNU GPL" refers to version 3 of the
|
19
|
+
GNU General Public License.
|
20
|
+
|
21
|
+
"The Library" refers to a covered work governed by this License, other
|
22
|
+
than an Application or a Combined Work as defined below.
|
23
|
+
|
24
|
+
An "Application" is any work that makes use of an interface provided
|
25
|
+
by the Library, but which is not otherwise based on the Library.
|
26
|
+
Defining a subclass of a class defined by the Library is deemed a mode
|
27
|
+
of using an interface provided by the Library.
|
28
|
+
|
29
|
+
A "Combined Work" is a work produced by combining or linking an
|
30
|
+
Application with the Library. The particular version of the Library
|
31
|
+
with which the Combined Work was made is also called the "Linked
|
32
|
+
Version".
|
33
|
+
|
34
|
+
The "Minimal Corresponding Source" for a Combined Work means the
|
35
|
+
Corresponding Source for the Combined Work, excluding any source code
|
36
|
+
for portions of the Combined Work that, considered in isolation, are
|
37
|
+
based on the Application, and not on the Linked Version.
|
38
|
+
|
39
|
+
The "Corresponding Application Code" for a Combined Work means the
|
40
|
+
object code and/or source code for the Application, including any data
|
41
|
+
and utility programs needed for reproducing the Combined Work from the
|
42
|
+
Application, but excluding the System Libraries of the Combined Work.
|
43
|
+
|
44
|
+
## 1. Exception to Section 3 of the GNU GPL.
|
45
|
+
|
46
|
+
You may convey a covered work under sections 3 and 4 of this License
|
47
|
+
without being bound by section 3 of the GNU GPL.
|
48
|
+
|
49
|
+
## 2. Conveying Modified Versions.
|
50
|
+
|
51
|
+
If you modify a copy of the Library, and, in your modifications, a
|
52
|
+
facility refers to a function or data to be supplied by an Application
|
53
|
+
that uses the facility (other than as an argument passed when the
|
54
|
+
facility is invoked), then you may convey a copy of the modified
|
55
|
+
version:
|
56
|
+
|
57
|
+
- a) under this License, provided that you make a good faith effort
|
58
|
+
to ensure that, in the event an Application does not supply the
|
59
|
+
function or data, the facility still operates, and performs
|
60
|
+
whatever part of its purpose remains meaningful, or
|
61
|
+
- b) under the GNU GPL, with none of the additional permissions of
|
62
|
+
this License applicable to that copy.
|
63
|
+
|
64
|
+
## 3. Object Code Incorporating Material from Library Header Files.
|
65
|
+
|
66
|
+
The object code form of an Application may incorporate material from a
|
67
|
+
header file that is part of the Library. You may convey such object
|
68
|
+
code under terms of your choice, provided that, if the incorporated
|
69
|
+
material is not limited to numerical parameters, data structure
|
70
|
+
layouts and accessors, or small macros, inline functions and templates
|
71
|
+
(ten or fewer lines in length), you do both of the following:
|
72
|
+
|
73
|
+
- a) Give prominent notice with each copy of the object code that
|
74
|
+
the Library is used in it and that the Library and its use are
|
75
|
+
covered by this License.
|
76
|
+
- b) Accompany the object code with a copy of the GNU GPL and this
|
77
|
+
license document.
|
78
|
+
|
79
|
+
## 4. Combined Works.
|
80
|
+
|
81
|
+
You may convey a Combined Work under terms of your choice that, taken
|
82
|
+
together, effectively do not restrict modification of the portions of
|
83
|
+
the Library contained in the Combined Work and reverse engineering for
|
84
|
+
debugging such modifications, if you also do each of the following:
|
85
|
+
|
86
|
+
- a) Give prominent notice with each copy of the Combined Work that
|
87
|
+
the Library is used in it and that the Library and its use are
|
88
|
+
covered by this License.
|
89
|
+
- b) Accompany the Combined Work with a copy of the GNU GPL and this
|
90
|
+
license document.
|
91
|
+
- c) For a Combined Work that displays copyright notices during
|
92
|
+
execution, include the copyright notice for the Library among
|
93
|
+
these notices, as well as a reference directing the user to the
|
94
|
+
copies of the GNU GPL and this license document.
|
95
|
+
- d) Do one of the following:
|
96
|
+
- 0) Convey the Minimal Corresponding Source under the terms of
|
97
|
+
this License, and the Corresponding Application Code in a form
|
98
|
+
suitable for, and under terms that permit, the user to
|
99
|
+
recombine or relink the Application with a modified version of
|
100
|
+
the Linked Version to produce a modified Combined Work, in the
|
101
|
+
manner specified by section 6 of the GNU GPL for conveying
|
102
|
+
Corresponding Source.
|
103
|
+
- 1) Use a suitable shared library mechanism for linking with
|
104
|
+
the Library. A suitable mechanism is one that (a) uses at run
|
105
|
+
time a copy of the Library already present on the user's
|
106
|
+
computer system, and (b) will operate properly with a modified
|
107
|
+
version of the Library that is interface-compatible with the
|
108
|
+
Linked Version.
|
109
|
+
- e) Provide Installation Information, but only if you would
|
110
|
+
otherwise be required to provide such information under section 6
|
111
|
+
of the GNU GPL, and only to the extent that such information is
|
112
|
+
necessary to install and execute a modified version of the
|
113
|
+
Combined Work produced by recombining or relinking the Application
|
114
|
+
with a modified version of the Linked Version. (If you use option
|
115
|
+
4d0, the Installation Information must accompany the Minimal
|
116
|
+
Corresponding Source and Corresponding Application Code. If you
|
117
|
+
use option 4d1, you must provide the Installation Information in
|
118
|
+
the manner specified by section 6 of the GNU GPL for conveying
|
119
|
+
Corresponding Source.)
|
120
|
+
|
121
|
+
## 5. Combined Libraries.
|
122
|
+
|
123
|
+
You may place library facilities that are a work based on the Library
|
124
|
+
side by side in a single library together with other library
|
125
|
+
facilities that are not Applications and are not covered by this
|
126
|
+
License, and convey such a combined library under terms of your
|
127
|
+
choice, if you do both of the following:
|
128
|
+
|
129
|
+
- a) Accompany the combined library with a copy of the same work
|
130
|
+
based on the Library, uncombined with any other library
|
131
|
+
facilities, conveyed under the terms of this License.
|
132
|
+
- b) Give prominent notice with the combined library that part of it
|
133
|
+
is a work based on the Library, and explaining where to find the
|
134
|
+
accompanying uncombined form of the same work.
|
135
|
+
|
136
|
+
## 6. Revised Versions of the GNU Lesser General Public License.
|
137
|
+
|
138
|
+
The Free Software Foundation may publish revised and/or new versions
|
139
|
+
of the GNU Lesser General Public License from time to time. Such new
|
140
|
+
versions will be similar in spirit to the present version, but may
|
141
|
+
differ in detail to address new problems or concerns.
|
142
|
+
|
143
|
+
Each version is given a distinguishing version number. If the Library
|
144
|
+
as you received it specifies that a certain numbered version of the
|
145
|
+
GNU Lesser General Public License "or any later version" applies to
|
146
|
+
it, you have the option of following the terms and conditions either
|
147
|
+
of that published version or of any later version published by the
|
148
|
+
Free Software Foundation. If the Library as you received it does not
|
149
|
+
specify a version number of the GNU Lesser General Public License, you
|
150
|
+
may choose any version of the GNU Lesser General Public License ever
|
151
|
+
published by the Free Software Foundation.
|
152
|
+
|
153
|
+
If the Library as you received it specifies that a proxy can decide
|
154
|
+
whether future versions of the GNU Lesser General Public License shall
|
155
|
+
apply, that proxy's public statement of acceptance of any version is
|
156
|
+
permanent authorization for you to choose that version for the
|
157
|
+
Library.
|