PyCatFile 0.27.4__py3-none-any.whl → 0.28.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pycatfile-0.27.4.data → pycatfile-0.28.0.data}/scripts/catfile.py +5 -5
- pycatfile-0.28.0.data/scripts/catfile_py3.py +509 -0
- {pycatfile-0.27.4.dist-info → pycatfile-0.28.0.dist-info}/METADATA +4 -4
- pycatfile-0.28.0.dist-info/RECORD +10 -0
- {pycatfile-0.27.4.dist-info → pycatfile-0.28.0.dist-info}/WHEEL +1 -1
- pycatfile-0.28.0.dist-info/top_level.txt +2 -0
- pycatfile.py +194 -47
- pycatfile_py3.py +17554 -0
- pycatfile-0.27.4.dist-info/RECORD +0 -8
- pycatfile-0.27.4.dist-info/top_level.txt +0 -1
- {pycatfile-0.27.4.dist-info → pycatfile-0.28.0.dist-info}/licenses/LICENSE +0 -0
- {pycatfile-0.27.4.dist-info → pycatfile-0.28.0.dist-info}/zip-safe +0 -0
pycatfile.py
CHANGED
|
@@ -10,11 +10,11 @@
|
|
|
10
10
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
11
11
|
Revised BSD License for more details.
|
|
12
12
|
|
|
13
|
-
Copyright 2018-
|
|
14
|
-
Copyright 2018-
|
|
15
|
-
Copyright 2018-
|
|
13
|
+
Copyright 2018-2026 Cool Dude 2k - http://idb.berlios.de/
|
|
14
|
+
Copyright 2018-2026 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
|
+
Copyright 2018-2026 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pycatfile.py - Last Update:
|
|
17
|
+
$FileInfo: pycatfile.py - Last Update: 2/3/2026 Ver. 0.28.0 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -661,12 +661,12 @@ __project__ = __program_name__
|
|
|
661
661
|
__program_alt_name__ = __program_name__
|
|
662
662
|
__project_url__ = "https://github.com/GameMaker2k/PyCatFile"
|
|
663
663
|
__project_release_url__ = __project_url__+"/releases/latest"
|
|
664
|
-
__version_info__ = (0,
|
|
665
|
-
__version_date_info__ = (
|
|
664
|
+
__version_info__ = (0, 28, 0, "RC 1", 1)
|
|
665
|
+
__version_date_info__ = (2026, 2, 3, "RC 1", 1)
|
|
666
666
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
667
667
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
668
668
|
__revision__ = __version_info__[3]
|
|
669
|
-
__revision_id__ = "$Id:
|
|
669
|
+
__revision_id__ = "$Id: 620c61932370f8b836e70c8dbe572587740d66e2 $"
|
|
670
670
|
if(__version_info__[4] is not None):
|
|
671
671
|
__version_date_plusrc__ = __version_date__ + \
|
|
672
672
|
"-" + str(__version_date_info__[4])
|
|
@@ -859,15 +859,6 @@ try:
|
|
|
859
859
|
compressionsupport.append("lz4")
|
|
860
860
|
except ImportError:
|
|
861
861
|
pass
|
|
862
|
-
'''
|
|
863
|
-
try:
|
|
864
|
-
import lzo
|
|
865
|
-
compressionsupport.append("lzo")
|
|
866
|
-
compressionsupport.append("lzop")
|
|
867
|
-
except ImportError:
|
|
868
|
-
lzo = None
|
|
869
|
-
pass
|
|
870
|
-
'''
|
|
871
862
|
try:
|
|
872
863
|
try:
|
|
873
864
|
import compression.zstd as zstd
|
|
@@ -931,13 +922,6 @@ if('lzo' in compressionsupport):
|
|
|
931
922
|
compressionlistalt.append('lzo')
|
|
932
923
|
outextlist.append('lzo')
|
|
933
924
|
outextlistwd.append('.lzo')
|
|
934
|
-
'''
|
|
935
|
-
if('lzop' in compressionsupport):
|
|
936
|
-
compressionlist.append('lzop')
|
|
937
|
-
compressionlistalt.append('lzop')
|
|
938
|
-
outextlist.append('lzop')
|
|
939
|
-
outextlistwd.append('.lzop')
|
|
940
|
-
'''
|
|
941
925
|
if('lzma' in compressionsupport):
|
|
942
926
|
compressionlist.append('lzma')
|
|
943
927
|
compressionlistalt.append('lzma')
|
|
@@ -6190,6 +6174,27 @@ def AppendNullBytes(indata=None, delimiter=__file_format_dict__['format_delimite
|
|
|
6190
6174
|
def _hex_lower(n):
|
|
6191
6175
|
return format(int(n), 'x').lower()
|
|
6192
6176
|
|
|
6177
|
+
def system_and_major():
|
|
6178
|
+
info = platform.uname()
|
|
6179
|
+
|
|
6180
|
+
# Python 3: info is a namedtuple with .system / .release
|
|
6181
|
+
# Python 2: info is a plain tuple (system, node, release, version, machine, processor)
|
|
6182
|
+
try:
|
|
6183
|
+
system = info.system
|
|
6184
|
+
release = info.release
|
|
6185
|
+
except AttributeError:
|
|
6186
|
+
# Fallback for Python 2
|
|
6187
|
+
system = info[0]
|
|
6188
|
+
release = info[2]
|
|
6189
|
+
|
|
6190
|
+
# Find the first run of digits in the release string
|
|
6191
|
+
m = re.search(r'\d+', release)
|
|
6192
|
+
if m:
|
|
6193
|
+
major = m.group(0) # e.g. '11' or '6'
|
|
6194
|
+
return u"%s%s" % (system, major) # unicode-safe in Py2
|
|
6195
|
+
else:
|
|
6196
|
+
return system
|
|
6197
|
+
|
|
6193
6198
|
def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
|
|
6194
6199
|
"""
|
|
6195
6200
|
Build and write the archive file header.
|
|
@@ -6268,7 +6273,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
6268
6273
|
else:
|
|
6269
6274
|
fctime = format(int(to_ns(time.time())), 'x').lower()
|
|
6270
6275
|
# Serialize the first group
|
|
6271
|
-
fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding,
|
|
6276
|
+
fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, system_and_major(), py_implementation, __program_name__+str(__version_info__[0]), fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
6272
6277
|
# Append tmpoutlist
|
|
6273
6278
|
fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
|
|
6274
6279
|
# Append extradata items if any
|
|
@@ -6856,7 +6861,9 @@ def AppendFilesWithContentFromTarFileToList(infile, extradata=[], jsondata={}, c
|
|
|
6856
6861
|
inodetofile = {}
|
|
6857
6862
|
filetoinode = {}
|
|
6858
6863
|
inodetoforminode = {}
|
|
6859
|
-
if(
|
|
6864
|
+
if(isinstance(infile, (list, tuple, ))):
|
|
6865
|
+
infile = infile[0]
|
|
6866
|
+
if(infile == "-"):
|
|
6860
6867
|
infile = MkTempFile()
|
|
6861
6868
|
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
6862
6869
|
infile.seek(0, 0)
|
|
@@ -7082,7 +7089,9 @@ def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, c
|
|
|
7082
7089
|
inodetofile = {}
|
|
7083
7090
|
filetoinode = {}
|
|
7084
7091
|
inodetoforminode = {}
|
|
7085
|
-
if(
|
|
7092
|
+
if(isinstance(infile, (list, tuple, ))):
|
|
7093
|
+
infile = infile[0]
|
|
7094
|
+
if(infile == "-"):
|
|
7086
7095
|
infile = MkTempFile()
|
|
7087
7096
|
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
7088
7097
|
infile.seek(0, 0)
|
|
@@ -7245,6 +7254,9 @@ def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, c
|
|
|
7245
7254
|
fcontents.write(zipfp.read(member.filename))
|
|
7246
7255
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
7247
7256
|
fcontents.seek(0, 0)
|
|
7257
|
+
if(typechecktest is not False):
|
|
7258
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
7259
|
+
fcontents.seek(0, 0)
|
|
7248
7260
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
7249
7261
|
if(typechecktest is False and not compresswholefile):
|
|
7250
7262
|
fcontents.seek(0, 2)
|
|
@@ -7328,6 +7340,8 @@ else:
|
|
|
7328
7340
|
inodetofile = {}
|
|
7329
7341
|
filetoinode = {}
|
|
7330
7342
|
inodetoforminode = {}
|
|
7343
|
+
if(isinstance(infile, (list, tuple, ))):
|
|
7344
|
+
infile = infile[0]
|
|
7331
7345
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
7332
7346
|
return False
|
|
7333
7347
|
if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
|
|
@@ -7478,6 +7492,9 @@ else:
|
|
|
7478
7492
|
fcontents.write(rarfp.read(member.filename))
|
|
7479
7493
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
7480
7494
|
fcontents.seek(0, 0)
|
|
7495
|
+
if(typechecktest is not False):
|
|
7496
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
7497
|
+
fcontents.seek(0, 0)
|
|
7481
7498
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
7482
7499
|
if(typechecktest is False and not compresswholefile):
|
|
7483
7500
|
fcontents.seek(0, 2)
|
|
@@ -7606,6 +7623,8 @@ else:
|
|
|
7606
7623
|
inodetofile = {}
|
|
7607
7624
|
filetoinode = {}
|
|
7608
7625
|
inodetoforminode = {}
|
|
7626
|
+
if(isinstance(infile, (list, tuple, ))):
|
|
7627
|
+
infile = infile[0]
|
|
7609
7628
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
7610
7629
|
return False
|
|
7611
7630
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
@@ -7712,6 +7731,9 @@ else:
|
|
|
7712
7731
|
fcontents.seek(0, 0)
|
|
7713
7732
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
7714
7733
|
fcontents.seek(0, 0)
|
|
7734
|
+
if(typechecktest is not False):
|
|
7735
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
7736
|
+
fcontents.seek(0, 0)
|
|
7715
7737
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
7716
7738
|
try:
|
|
7717
7739
|
file_content[member.filename].close()
|
|
@@ -8013,6 +8035,126 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
8013
8035
|
fp.close()
|
|
8014
8036
|
return True
|
|
8015
8037
|
|
|
8038
|
+
def AppendReadInFileWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
|
|
8039
|
+
return ReadInFileWithContentToList(infile, "auto", 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
|
|
8040
|
+
|
|
8041
|
+
def AppendReadInMultipleFileWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
|
|
8042
|
+
return ReadInMultipleFileWithContentToList(infile, fmttype, 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
|
|
8043
|
+
|
|
8044
|
+
def AppendReadInMultipleFilesWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
|
|
8045
|
+
return ReadInMultipleFilesWithContentToList(infile, fmttype, 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
|
|
8046
|
+
|
|
8047
|
+
def AppendReadInFileWithContent(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False):
|
|
8048
|
+
if(not hasattr(fp, "write")):
|
|
8049
|
+
return False
|
|
8050
|
+
GetDirList = AppendReadInFileWithContentToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, insaltkey, verbose)
|
|
8051
|
+
numfiles = int(len(GetDirList))
|
|
8052
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
8053
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, outsaltkey)
|
|
8054
|
+
try:
|
|
8055
|
+
fp.flush()
|
|
8056
|
+
if(hasattr(os, "sync")):
|
|
8057
|
+
os.fsync(fp.fileno())
|
|
8058
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
8059
|
+
pass
|
|
8060
|
+
for curfname in GetDirList:
|
|
8061
|
+
tmpoutlist = curfname['fheaders']
|
|
8062
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, outsaltkey)
|
|
8063
|
+
try:
|
|
8064
|
+
fp.flush()
|
|
8065
|
+
if(hasattr(os, "sync")):
|
|
8066
|
+
os.fsync(fp.fileno())
|
|
8067
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
8068
|
+
pass
|
|
8069
|
+
return fp
|
|
8070
|
+
|
|
8071
|
+
def AppendReadInFileWithContentToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False, returnfp=False):
|
|
8072
|
+
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
8073
|
+
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
8074
|
+
get_in_ext = os.path.splitext(outfile)
|
|
8075
|
+
tmpfmt = GetKeyByFormatExtension(get_in_ext[1], formatspecs=__file_format_multi_dict__)
|
|
8076
|
+
if(tmpfmt is None and get_in_ext[1]!=""):
|
|
8077
|
+
get_in_ext = os.path.splitext(get_in_ext[0])
|
|
8078
|
+
tmpfmt = GetKeyByFormatExtension(get_in_ext[0], formatspecs=__file_format_multi_dict__)
|
|
8079
|
+
if(tmpfmt is None):
|
|
8080
|
+
fmttype = __file_format_default__
|
|
8081
|
+
formatspecs = formatspecs[fmttype]
|
|
8082
|
+
else:
|
|
8083
|
+
fmttype = tmpfmt
|
|
8084
|
+
formatspecs = formatspecs[tmpfmt]
|
|
8085
|
+
elif(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
8086
|
+
formatspecs = formatspecs[fmttype]
|
|
8087
|
+
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
8088
|
+
fmttype = __file_format_default__
|
|
8089
|
+
formatspecs = formatspecs[fmttype]
|
|
8090
|
+
if(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write")):
|
|
8091
|
+
outfile = RemoveWindowsPath(outfile)
|
|
8092
|
+
if(os.path.exists(outfile)):
|
|
8093
|
+
try:
|
|
8094
|
+
os.unlink(outfile)
|
|
8095
|
+
except OSError:
|
|
8096
|
+
pass
|
|
8097
|
+
if(outfile == "-" or outfile is None):
|
|
8098
|
+
verbose = False
|
|
8099
|
+
fp = MkTempFile()
|
|
8100
|
+
elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
8101
|
+
fp = outfile
|
|
8102
|
+
elif(re.findall(__upload_proto_support__, outfile)):
|
|
8103
|
+
fp = MkTempFile()
|
|
8104
|
+
else:
|
|
8105
|
+
fbasename = os.path.splitext(outfile)[0]
|
|
8106
|
+
fextname = os.path.splitext(outfile)[1]
|
|
8107
|
+
if(not compresswholefile and fextname in outextlistwd):
|
|
8108
|
+
compresswholefile = True
|
|
8109
|
+
try:
|
|
8110
|
+
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
8111
|
+
except PermissionError:
|
|
8112
|
+
return False
|
|
8113
|
+
AppendReadInFileWithContent(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, insaltkey, outsaltkey, verbose)
|
|
8114
|
+
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
8115
|
+
fp = CompressOpenFileAlt(
|
|
8116
|
+
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
8117
|
+
try:
|
|
8118
|
+
fp.flush()
|
|
8119
|
+
if(hasattr(os, "sync")):
|
|
8120
|
+
os.fsync(fp.fileno())
|
|
8121
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
8122
|
+
pass
|
|
8123
|
+
if(outfile == "-"):
|
|
8124
|
+
fp.seek(0, 0)
|
|
8125
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
8126
|
+
elif(outfile is None):
|
|
8127
|
+
fp.seek(0, 0)
|
|
8128
|
+
outvar = fp.read()
|
|
8129
|
+
fp.close()
|
|
8130
|
+
return outvar
|
|
8131
|
+
elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
|
|
8132
|
+
fp = CompressOpenFileAlt(
|
|
8133
|
+
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
8134
|
+
fp.seek(0, 0)
|
|
8135
|
+
upload_file_to_internet_file(fp, outfile)
|
|
8136
|
+
if(returnfp):
|
|
8137
|
+
fp.seek(0, 0)
|
|
8138
|
+
return fp
|
|
8139
|
+
else:
|
|
8140
|
+
fp.close()
|
|
8141
|
+
return True
|
|
8142
|
+
|
|
8143
|
+
def AppendReadInFileWithContentToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False, returnfp=False):
|
|
8144
|
+
if not isinstance(infiles, list):
|
|
8145
|
+
infiles = [infiles]
|
|
8146
|
+
returnout = False
|
|
8147
|
+
for infileslist in infiles:
|
|
8148
|
+
returnout = AppendReadInFileWithContentToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, insaltkey, outsaltkey, verbose, True)
|
|
8149
|
+
if(not returnout):
|
|
8150
|
+
break
|
|
8151
|
+
else:
|
|
8152
|
+
outfile = returnout
|
|
8153
|
+
if(not returnfp and returnout):
|
|
8154
|
+
returnout.close()
|
|
8155
|
+
return True
|
|
8156
|
+
return returnout
|
|
8157
|
+
|
|
8016
8158
|
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
|
|
8017
8159
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
8018
8160
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
@@ -9004,8 +9146,6 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, file
|
|
|
9004
9146
|
compresscheck = "zstd"
|
|
9005
9147
|
elif(fextname == ".lz4"):
|
|
9006
9148
|
compresscheck = "lz4"
|
|
9007
|
-
elif(fextname == ".lzo" or fextname == ".lzop"):
|
|
9008
|
-
compresscheck = "lzo"
|
|
9009
9149
|
elif(fextname == ".lzma"):
|
|
9010
9150
|
compresscheck = "lzma"
|
|
9011
9151
|
elif(fextname == ".xz"):
|
|
@@ -9297,8 +9437,6 @@ def UncompressString(infile, formatspecs=__file_format_multi_dict__, filestart=0
|
|
|
9297
9437
|
fileuz = decompressor.decompress(infile)
|
|
9298
9438
|
elif(compresscheck == "lz4" and compresscheck in compressionsupport):
|
|
9299
9439
|
fileuz = lz4.frame.decompress(infile)
|
|
9300
|
-
elif((compresscheck == "lzo" or compresscheck == "lzop") and compresscheck in compressionsupport):
|
|
9301
|
-
fileuz = lzo.decompress(infile)
|
|
9302
9440
|
elif((compresscheck == "lzma" or compresscheck == "xz") and compresscheck in compressionsupport):
|
|
9303
9441
|
fileuz = lzma.decompress(infile)
|
|
9304
9442
|
elif(compresscheck == "zlib" and compresscheck in compressionsupport):
|
|
@@ -9349,8 +9487,6 @@ def UncompressBytes(infile, formatspecs=__file_format_multi_dict__, filestart=0)
|
|
|
9349
9487
|
fileuz = decompressor.decompress(infile)
|
|
9350
9488
|
elif(compresscheck == "lz4" and compresscheck in compressionsupport):
|
|
9351
9489
|
fileuz = lz4.frame.decompress(infile)
|
|
9352
|
-
elif((compresscheck == "lzo" or compresscheck == "lzop") and compresscheck in compressionsupport):
|
|
9353
|
-
fileuz = lzo.decompress(infile)
|
|
9354
9490
|
elif((compresscheck == "lzma" or compresscheck == "xz") and compresscheck in compressionsupport):
|
|
9355
9491
|
fileuz = lzma.decompress(infile)
|
|
9356
9492
|
elif(compresscheck == "zlib" and compresscheck in compressionsupport):
|
|
@@ -9922,9 +10058,6 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None,
|
|
|
9922
10058
|
elif compression == "lz4" and "lz4" in compressionsupport:
|
|
9923
10059
|
bytesfp = MkTempFile()
|
|
9924
10060
|
bytesfp.write(lz4.frame.compress(fp.read(), compression_level=_lvl(compressionlevel)))
|
|
9925
|
-
elif (compression in ("lzo", "lzop")) and "lzop" in compressionsupport:
|
|
9926
|
-
bytesfp = MkTempFile()
|
|
9927
|
-
bytesfp.write(lzo.compress(fp.read(), _lvl(compressionlevel)))
|
|
9928
10061
|
elif compression == "zstd" and "zstandard" in compressionsupport:
|
|
9929
10062
|
bytesfp = MkTempFile()
|
|
9930
10063
|
level = _lvl(compressionlevel)
|
|
@@ -10241,7 +10374,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_form
|
|
|
10241
10374
|
if(not fp):
|
|
10242
10375
|
return False
|
|
10243
10376
|
fp.seek(filestart, 0)
|
|
10244
|
-
elif(
|
|
10377
|
+
elif(infile == "-"):
|
|
10245
10378
|
fp = MkTempFile()
|
|
10246
10379
|
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
10247
10380
|
fp.seek(filestart, 0)
|
|
@@ -10299,10 +10432,8 @@ def CatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_form
|
|
|
10299
10432
|
compresscheck = "bzip2"
|
|
10300
10433
|
elif(fextname == ".zst"):
|
|
10301
10434
|
compresscheck = "zstd"
|
|
10302
|
-
elif(fextname == ".lz4"
|
|
10435
|
+
elif(fextname == ".lz4"):
|
|
10303
10436
|
compresscheck = "lz4"
|
|
10304
|
-
elif(fextname == ".lzo" or fextname == ".lzop"):
|
|
10305
|
-
compresscheck = "lzo"
|
|
10306
10437
|
elif(fextname == ".lzma"):
|
|
10307
10438
|
compresscheck = "lzma"
|
|
10308
10439
|
elif(fextname == ".xz"):
|
|
@@ -11025,6 +11156,10 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
11025
11156
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
11026
11157
|
fcontents.seek(0, 0)
|
|
11027
11158
|
|
|
11159
|
+
if(typechecktest is not False):
|
|
11160
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
11161
|
+
fcontents.seek(0, 0)
|
|
11162
|
+
|
|
11028
11163
|
# get fcencoding once here
|
|
11029
11164
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
11030
11165
|
|
|
@@ -11618,7 +11753,9 @@ def CatFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipcheck
|
|
|
11618
11753
|
|
|
11619
11754
|
|
|
11620
11755
|
def TarFileListFiles(infile, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
11621
|
-
if(
|
|
11756
|
+
if(isinstance(infile, (list, tuple, ))):
|
|
11757
|
+
infile = infile[0]
|
|
11758
|
+
if(infile == "-"):
|
|
11622
11759
|
infile = MkTempFile()
|
|
11623
11760
|
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
11624
11761
|
infile.seek(0, 0)
|
|
@@ -11738,7 +11875,9 @@ def TarFileListFile(infile, formatspecs=__file_format_multi_dict__, verbose=Fals
|
|
|
11738
11875
|
|
|
11739
11876
|
|
|
11740
11877
|
def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11741
|
-
if(
|
|
11878
|
+
if(isinstance(infile, (list, tuple, ))):
|
|
11879
|
+
infile = infile[0]
|
|
11880
|
+
if(infile == "-"):
|
|
11742
11881
|
infile = MkTempFile()
|
|
11743
11882
|
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
11744
11883
|
infile.seek(0, 0)
|
|
@@ -11895,9 +12034,10 @@ def ZipFileListFile(infile, verbose=False, returnfp=False):
|
|
|
11895
12034
|
if(not rarfile_support):
|
|
11896
12035
|
def RarFileListFiles(infile, verbose=False, returnfp=False):
|
|
11897
12036
|
return False
|
|
11898
|
-
|
|
11899
|
-
if(rarfile_support):
|
|
12037
|
+
else:
|
|
11900
12038
|
def RarFileListFiles(infile, verbose=False, returnfp=False):
|
|
12039
|
+
if(isinstance(infile, (list, tuple, ))):
|
|
12040
|
+
infile = infile[0]
|
|
11901
12041
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
11902
12042
|
return False
|
|
11903
12043
|
if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
|
|
@@ -12027,9 +12167,10 @@ def RarFileListFile(infile, verbose=False, returnfp=False):
|
|
|
12027
12167
|
if(not py7zr_support):
|
|
12028
12168
|
def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
12029
12169
|
return False
|
|
12030
|
-
|
|
12031
|
-
if(py7zr_support):
|
|
12170
|
+
else:
|
|
12032
12171
|
def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
12172
|
+
if(isinstance(infile, (list, tuple, ))):
|
|
12173
|
+
infile = infile[0]
|
|
12033
12174
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
12034
12175
|
return False
|
|
12035
12176
|
lcfi = 0
|
|
@@ -12390,8 +12531,14 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12390
12531
|
if headers is None:
|
|
12391
12532
|
headers = {}
|
|
12392
12533
|
urlparts = urlparse(url)
|
|
12393
|
-
|
|
12394
|
-
|
|
12534
|
+
if(urlparts.username is not None):
|
|
12535
|
+
username = unquote(urlparts.username)
|
|
12536
|
+
else:
|
|
12537
|
+
username = None
|
|
12538
|
+
if(urlparts.password is not None):
|
|
12539
|
+
password = unquote(urlparts.password)
|
|
12540
|
+
else:
|
|
12541
|
+
password = None
|
|
12395
12542
|
|
|
12396
12543
|
# Rebuild URL without username and password
|
|
12397
12544
|
netloc = urlparts.hostname or ''
|