PyArchiveFile 0.25.0__tar.gz → 0.25.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyarchivefile-0.25.0 → pyarchivefile-0.25.2}/PKG-INFO +1 -1
- {pyarchivefile-0.25.0 → pyarchivefile-0.25.2}/PyArchiveFile.egg-info/PKG-INFO +1 -1
- {pyarchivefile-0.25.0 → pyarchivefile-0.25.2}/archivefile.py +11 -11
- {pyarchivefile-0.25.0 → pyarchivefile-0.25.2}/pyarchivefile.py +299 -101
- {pyarchivefile-0.25.0 → pyarchivefile-0.25.2}/pyproject.toml +1 -1
- {pyarchivefile-0.25.0 → pyarchivefile-0.25.2}/LICENSE +0 -0
- {pyarchivefile-0.25.0 → pyarchivefile-0.25.2}/PyArchiveFile.egg-info/SOURCES.txt +0 -0
- {pyarchivefile-0.25.0 → pyarchivefile-0.25.2}/PyArchiveFile.egg-info/dependency_links.txt +0 -0
- {pyarchivefile-0.25.0 → pyarchivefile-0.25.2}/PyArchiveFile.egg-info/top_level.txt +0 -0
- {pyarchivefile-0.25.0 → pyarchivefile-0.25.2}/PyArchiveFile.egg-info/zip-safe +0 -0
- {pyarchivefile-0.25.0 → pyarchivefile-0.25.2}/README.md +0 -0
- {pyarchivefile-0.25.0 → pyarchivefile-0.25.2}/archiveneofile.py +0 -0
- {pyarchivefile-0.25.0 → pyarchivefile-0.25.2}/neoarchivefile.py +0 -0
- {pyarchivefile-0.25.0 → pyarchivefile-0.25.2}/setup.cfg +0 -0
- {pyarchivefile-0.25.0 → pyarchivefile-0.25.2}/setup.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: PyArchiveFile
|
|
3
|
-
Version: 0.25.
|
|
3
|
+
Version: 0.25.2
|
|
4
4
|
Summary: A tar like file format name archivefile.
|
|
5
5
|
Home-page: https://github.com/GameMaker2k/PyArchiveFile
|
|
6
6
|
Download-URL: https://github.com/GameMaker2k/PyArchiveFile/archive/master.tar.gz
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: PyArchiveFile
|
|
3
|
-
Version: 0.25.
|
|
3
|
+
Version: 0.25.2
|
|
4
4
|
Summary: A tar like file format name archivefile.
|
|
5
5
|
Home-page: https://github.com/GameMaker2k/PyArchiveFile
|
|
6
6
|
Download-URL: https://github.com/GameMaker2k/PyArchiveFile/archive/master.tar.gz
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: archivefile.py - Last Update: 11/
|
|
17
|
+
$FileInfo: archivefile.py - Last Update: 11/6/2025 Ver. 0.25.2 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -153,38 +153,38 @@ if active_action:
|
|
|
153
153
|
checkcompressfile = pyarchivefile.CheckCompressionSubType(
|
|
154
154
|
input_file, fnamedict, 0, True)
|
|
155
155
|
if((pyarchivefile.IsNestedDict(fnamedict) and checkcompressfile in fnamedict) or (pyarchivefile.IsSingleDict(fnamedict) and checkcompressfile==fnamedict['format_magic'])):
|
|
156
|
-
tmpout = pyarchivefile.RePackArchiveFile(input_file, getargs.output, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, False, 0, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, getargs.verbose, False)
|
|
156
|
+
tmpout = pyarchivefile.RePackArchiveFile(input_file, getargs.output, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, False, 0, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, getargs.verbose, False)
|
|
157
157
|
else:
|
|
158
158
|
tmpout = pyarchivefile.PackArchiveFileFromInFile(
|
|
159
|
-
input_file, getargs.output, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, getargs.verbose, False)
|
|
159
|
+
input_file, getargs.output, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, getargs.verbose, False)
|
|
160
160
|
if(not tmpout):
|
|
161
161
|
sys.exit(1)
|
|
162
162
|
else:
|
|
163
|
-
pyarchivefile.PackArchiveFile(getargs.input, getargs.output, getargs.text, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, False, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, getargs.verbose, False)
|
|
163
|
+
pyarchivefile.PackArchiveFile(getargs.input, getargs.output, getargs.text, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, False, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, getargs.verbose, False)
|
|
164
164
|
elif active_action == 'repack':
|
|
165
165
|
if getargs.convert:
|
|
166
166
|
checkcompressfile = pyarchivefile.CheckCompressionSubType(
|
|
167
167
|
input_file, fnamedict, 0, True)
|
|
168
168
|
if((pyarchivefile.IsNestedDict(fnamedict) and checkcompressfile in fnamedict) or (pyarchivefile.IsSingleDict(fnamedict) and checkcompressfile==fnamedict['format_magic'])):
|
|
169
169
|
pyarchivefile.RePackArchiveFile(input_file, getargs.output, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt,
|
|
170
|
-
False, 0, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, getargs.verbose, False)
|
|
170
|
+
False, 0, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, getargs.verbose, False)
|
|
171
171
|
else:
|
|
172
|
-
pyarchivefile.PackArchiveFileFromInFile(input_file, getargs.output, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, getargs.verbose, False)
|
|
172
|
+
pyarchivefile.PackArchiveFileFromInFile(input_file, getargs.output, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, getargs.verbose, False)
|
|
173
173
|
if(not tmpout):
|
|
174
174
|
sys.exit(1)
|
|
175
175
|
else:
|
|
176
176
|
pyarchivefile.RePackArchiveFile(input_file, getargs.output, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt,
|
|
177
|
-
False, getargs.filestart, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, getargs.verbose, False)
|
|
177
|
+
False, getargs.filestart, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, getargs.verbose, False)
|
|
178
178
|
elif active_action == 'extract':
|
|
179
179
|
if getargs.convert:
|
|
180
180
|
checkcompressfile = pyarchivefile.CheckCompressionSubType(
|
|
181
181
|
input_file, fnamedict, 0, True)
|
|
182
182
|
tempout = BytesIO()
|
|
183
183
|
if((pyarchivefile.IsNestedDict(fnamedict) and checkcompressfile in fnamedict) or (pyarchivefile.IsSingleDict(fnamedict) and checkcompressfile==fnamedict['format_magic'])):
|
|
184
|
-
tmpout = pyarchivefile.RePackArchiveFile(input_file, tempout, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, False, 0, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, False, False)
|
|
184
|
+
tmpout = pyarchivefile.RePackArchiveFile(input_file, tempout, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, False, 0, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, False, False)
|
|
185
185
|
else:
|
|
186
186
|
tmpout = pyarchivefile.PackArchiveFileFromInFile(
|
|
187
|
-
input_file, tempout, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, False, False)
|
|
187
|
+
input_file, tempout, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, False, False)
|
|
188
188
|
if(not tmpout):
|
|
189
189
|
sys.exit(1)
|
|
190
190
|
input_file = tempout
|
|
@@ -208,10 +208,10 @@ if active_action:
|
|
|
208
208
|
input_file, fnamedict, 0, True)
|
|
209
209
|
tempout = BytesIO()
|
|
210
210
|
if((pyarchivefile.IsNestedDict(fnamedict) and checkcompressfile in fnamedict) or (pyarchivefile.IsSingleDict(fnamedict) and checkcompressfile==fnamedict['format_magic'])):
|
|
211
|
-
tmpout = pyarchivefile.RePackArchiveFile(input_file, tempout, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, False, 0, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, False, False, False)
|
|
211
|
+
tmpout = pyarchivefile.RePackArchiveFile(input_file, tempout, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, False, 0, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, False, False, False)
|
|
212
212
|
else:
|
|
213
213
|
tmpout = pyarchivefile.PackArchiveFileFromInFile(
|
|
214
|
-
input_file, tempout, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, False, False)
|
|
214
|
+
input_file, tempout, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyarchivefile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, False, False)
|
|
215
215
|
input_file = tempout
|
|
216
216
|
if(not tmpout):
|
|
217
217
|
sys.exit(1)
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pyarchivefile.py - Last Update: 11/
|
|
17
|
+
$FileInfo: pyarchivefile.py - Last Update: 11/6/2025 Ver. 0.25.2 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -642,12 +642,12 @@ __project__ = __program_name__
|
|
|
642
642
|
__program_alt_name__ = __program_name__
|
|
643
643
|
__project_url__ = "https://github.com/GameMaker2k/PyArchiveFile"
|
|
644
644
|
__project_release_url__ = __project_url__+"/releases/latest"
|
|
645
|
-
__version_info__ = (0, 25,
|
|
646
|
-
__version_date_info__ = (2025, 11,
|
|
645
|
+
__version_info__ = (0, 25, 2, "RC 1", 1)
|
|
646
|
+
__version_date_info__ = (2025, 11, 6, "RC 1", 1)
|
|
647
647
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
648
648
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
649
649
|
__revision__ = __version_info__[3]
|
|
650
|
-
__revision_id__ = "$Id:
|
|
650
|
+
__revision_id__ = "$Id: ac98f2d8fa689bbcf7939f892030aad675a7d5fe $"
|
|
651
651
|
if(__version_info__[4] is not None):
|
|
652
652
|
__version_date_plusrc__ = __version_date__ + \
|
|
653
653
|
"-" + str(__version_date_info__[4])
|
|
@@ -4729,7 +4729,30 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4729
4729
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
4730
4730
|
"'" + newfcs + "'")
|
|
4731
4731
|
return False
|
|
4732
|
-
fnumfiles = int(inheader[
|
|
4732
|
+
fnumfiles = int(inheader[6], 16)
|
|
4733
|
+
outfseeknextfile = inheaderdata[7]
|
|
4734
|
+
fjsonsize = int(inheaderdata[10], 16)
|
|
4735
|
+
fjsonchecksumtype = inheader[11]
|
|
4736
|
+
fjsonchecksum = inheader[12]
|
|
4737
|
+
fp.read(fjsonsize)
|
|
4738
|
+
# Next seek directive
|
|
4739
|
+
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
4740
|
+
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
4741
|
+
if(abs(fseeknextasnum) == 0):
|
|
4742
|
+
pass
|
|
4743
|
+
fp.seek(fseeknextasnum, 1)
|
|
4744
|
+
elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
|
|
4745
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
4746
|
+
if(abs(fseeknextasnum) == 0):
|
|
4747
|
+
pass
|
|
4748
|
+
fp.seek(fseeknextasnum, 1)
|
|
4749
|
+
elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
|
|
4750
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
4751
|
+
if(abs(fseeknextasnum) == 0):
|
|
4752
|
+
pass
|
|
4753
|
+
fp.seek(fseeknextasnum, 0)
|
|
4754
|
+
else:
|
|
4755
|
+
return False
|
|
4733
4756
|
countnum = 0
|
|
4734
4757
|
flist = []
|
|
4735
4758
|
while(countnum < fnumfiles):
|
|
@@ -4768,10 +4791,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4768
4791
|
else:
|
|
4769
4792
|
inheader = ReadFileHeaderDataWoSize(
|
|
4770
4793
|
fp, formatspecs['format_delimiter'])
|
|
4771
|
-
fnumextrafieldsize = int(inheader[
|
|
4772
|
-
fnumextrafields = int(inheader[
|
|
4794
|
+
fnumextrafieldsize = int(inheader[13], 16)
|
|
4795
|
+
fnumextrafields = int(inheader[14], 16)
|
|
4773
4796
|
fextrafieldslist = []
|
|
4774
|
-
extrastart =
|
|
4797
|
+
extrastart = 15
|
|
4775
4798
|
extraend = extrastart + fnumextrafields
|
|
4776
4799
|
while(extrastart < extraend):
|
|
4777
4800
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -4791,7 +4814,105 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4791
4814
|
fhencoding = inheader[2]
|
|
4792
4815
|
fostype = inheader[3]
|
|
4793
4816
|
fpythontype = inheader[4]
|
|
4794
|
-
|
|
4817
|
+
fprojectname = inheader[4]
|
|
4818
|
+
fnumfiles = int(inheader[6], 16)
|
|
4819
|
+
fseeknextfile = inheader[7]
|
|
4820
|
+
fjsontype = inheader[8]
|
|
4821
|
+
fjsonlen = int(inheader[9], 16)
|
|
4822
|
+
fjsonsize = int(inheader[10], 16)
|
|
4823
|
+
fjsonchecksumtype = inheader[11]
|
|
4824
|
+
fjsonchecksum = inheader[12]
|
|
4825
|
+
fjsoncontent = {}
|
|
4826
|
+
fjstart = fp.tell()
|
|
4827
|
+
if(fjsontype=="json"):
|
|
4828
|
+
fjsoncontent = {}
|
|
4829
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4830
|
+
if(fjsonsize > 0):
|
|
4831
|
+
try:
|
|
4832
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4833
|
+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
|
|
4834
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4835
|
+
try:
|
|
4836
|
+
fjsonrawcontent = fprejsoncontent
|
|
4837
|
+
fjsoncontent = json.loads(fprejsoncontent)
|
|
4838
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4839
|
+
fprejsoncontent = ""
|
|
4840
|
+
fjsonrawcontent = fprejsoncontent
|
|
4841
|
+
fjsoncontent = {}
|
|
4842
|
+
else:
|
|
4843
|
+
fprejsoncontent = ""
|
|
4844
|
+
fjsonrawcontent = fprejsoncontent
|
|
4845
|
+
fjsoncontent = {}
|
|
4846
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4847
|
+
fjsoncontent = {}
|
|
4848
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4849
|
+
if (fjsonsize > 0):
|
|
4850
|
+
try:
|
|
4851
|
+
# try base64 → utf-8 → YAML
|
|
4852
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4853
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4854
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4855
|
+
try:
|
|
4856
|
+
# fall back to treating the bytes as plain text YAML
|
|
4857
|
+
fjsonrawcontent = fprejsoncontent
|
|
4858
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4859
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4860
|
+
# final fallback: empty
|
|
4861
|
+
fprejsoncontent = ""
|
|
4862
|
+
fjsonrawcontent = fprejsoncontent
|
|
4863
|
+
fjsoncontent = {}
|
|
4864
|
+
else:
|
|
4865
|
+
fprejsoncontent = ""
|
|
4866
|
+
fjsonrawcontent = fprejsoncontent
|
|
4867
|
+
fjsoncontent = {}
|
|
4868
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4869
|
+
fjsoncontent = {}
|
|
4870
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4871
|
+
fprejsoncontent = ""
|
|
4872
|
+
fjsonrawcontent = fprejsoncontent
|
|
4873
|
+
elif(fjsontype=="list"):
|
|
4874
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4875
|
+
flisttmp = MkTempFile()
|
|
4876
|
+
flisttmp.write(fprejsoncontent.encode())
|
|
4877
|
+
flisttmp.seek(0)
|
|
4878
|
+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
4879
|
+
flisttmp.close()
|
|
4880
|
+
fjsonrawcontent = fjsoncontent
|
|
4881
|
+
if(fjsonlen==1):
|
|
4882
|
+
try:
|
|
4883
|
+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
|
|
4884
|
+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
|
|
4885
|
+
fjsonlen = len(fjsoncontent)
|
|
4886
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4887
|
+
try:
|
|
4888
|
+
fjsonrawcontent = fjsoncontent[0]
|
|
4889
|
+
fjsoncontent = json.loads(fjsoncontent[0])
|
|
4890
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4891
|
+
pass
|
|
4892
|
+
fjend = fp.tell()
|
|
4893
|
+
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4894
|
+
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
4895
|
+
if(abs(fseeknextasnum) == 0):
|
|
4896
|
+
pass
|
|
4897
|
+
fp.seek(fseeknextasnum, 1)
|
|
4898
|
+
elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
|
|
4899
|
+
fseeknextasnum = int(fseeknextfile)
|
|
4900
|
+
if(abs(fseeknextasnum) == 0):
|
|
4901
|
+
pass
|
|
4902
|
+
fp.seek(fseeknextasnum, 1)
|
|
4903
|
+
elif(re.findall("^([0-9]+)", fseeknextfile)):
|
|
4904
|
+
fseeknextasnum = int(fseeknextfile)
|
|
4905
|
+
if(abs(fseeknextasnum) == 0):
|
|
4906
|
+
pass
|
|
4907
|
+
fp.seek(fseeknextasnum, 0)
|
|
4908
|
+
else:
|
|
4909
|
+
return False
|
|
4910
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4911
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4912
|
+
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4913
|
+
fname + " at offset " + str(fheaderstart))
|
|
4914
|
+
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4915
|
+
return False
|
|
4795
4916
|
fprechecksumtype = inheader[-2]
|
|
4796
4917
|
fprechecksum = inheader[-1]
|
|
4797
4918
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -4804,7 +4925,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4804
4925
|
return False
|
|
4805
4926
|
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
4806
4927
|
fcompresstype = ""
|
|
4807
|
-
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
4928
|
+
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
4808
4929
|
if (seekstart < 0) or (seekstart > fnumfiles):
|
|
4809
4930
|
seekstart = 0
|
|
4810
4931
|
if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
|
|
@@ -4924,10 +5045,10 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
4924
5045
|
else:
|
|
4925
5046
|
inheader = ReadFileHeaderDataWoSize(
|
|
4926
5047
|
fp, formatspecs['format_delimiter'])
|
|
4927
|
-
fnumextrafieldsize = int(inheader[
|
|
4928
|
-
fnumextrafields = int(inheader[
|
|
5048
|
+
fnumextrafieldsize = int(inheader[13], 16)
|
|
5049
|
+
fnumextrafields = int(inheader[14], 16)
|
|
4929
5050
|
fextrafieldslist = []
|
|
4930
|
-
extrastart =
|
|
5051
|
+
extrastart = 15
|
|
4931
5052
|
extraend = extrastart + fnumextrafields
|
|
4932
5053
|
while(extrastart < extraend):
|
|
4933
5054
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -4944,10 +5065,40 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
4944
5065
|
formversion = re.findall("([\\d]+)", formstring)
|
|
4945
5066
|
fheadsize = int(inheader[0], 16)
|
|
4946
5067
|
fnumfields = int(inheader[1], 16)
|
|
4947
|
-
|
|
4948
|
-
|
|
4949
|
-
|
|
4950
|
-
|
|
5068
|
+
fnumfiles = int(inheader[6], 16)
|
|
5069
|
+
fseeknextfile = inheaderdata[7]
|
|
5070
|
+
fjsontype = int(inheader[8], 16)
|
|
5071
|
+
fjsonlen = int(inheader[9], 16)
|
|
5072
|
+
fjsonsize = int(inheader[10], 16)
|
|
5073
|
+
fjsonchecksumtype = inheader[11]
|
|
5074
|
+
fjsonchecksum = inheader[12]
|
|
5075
|
+
fjsoncontent = {}
|
|
5076
|
+
fjstart = fp.tell()
|
|
5077
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5078
|
+
fjend = fp.tell()
|
|
5079
|
+
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
5080
|
+
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
5081
|
+
if(abs(fseeknextasnum) == 0):
|
|
5082
|
+
pass
|
|
5083
|
+
fp.seek(fseeknextasnum, 1)
|
|
5084
|
+
elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
|
|
5085
|
+
fseeknextasnum = int(fseeknextfile)
|
|
5086
|
+
if(abs(fseeknextasnum) == 0):
|
|
5087
|
+
pass
|
|
5088
|
+
fp.seek(fseeknextasnum, 1)
|
|
5089
|
+
elif(re.findall("^([0-9]+)", fseeknextfile)):
|
|
5090
|
+
fseeknextasnum = int(fseeknextfile)
|
|
5091
|
+
if(abs(fseeknextasnum) == 0):
|
|
5092
|
+
pass
|
|
5093
|
+
fp.seek(fseeknextasnum, 0)
|
|
5094
|
+
else:
|
|
5095
|
+
return False
|
|
5096
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
5097
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
5098
|
+
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5099
|
+
fname + " at offset " + str(fheaderstart))
|
|
5100
|
+
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
5101
|
+
return False
|
|
4951
5102
|
fprechecksumtype = inheader[-2]
|
|
4952
5103
|
fprechecksum = inheader[-1]
|
|
4953
5104
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -5383,12 +5534,7 @@ def AppendNullBytes(indata=None, delimiter=__file_format_dict__['format_delimite
|
|
|
5383
5534
|
def _hex_lower(n):
|
|
5384
5535
|
return format(int(n), 'x').lower()
|
|
5385
5536
|
|
|
5386
|
-
def AppendFileHeader(fp,
|
|
5387
|
-
numfiles,
|
|
5388
|
-
fencoding,
|
|
5389
|
-
extradata=None,
|
|
5390
|
-
checksumtype="md5",
|
|
5391
|
-
formatspecs=__file_format_dict__):
|
|
5537
|
+
def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__):
|
|
5392
5538
|
"""
|
|
5393
5539
|
Build and write the archive file header.
|
|
5394
5540
|
Returns the same file-like 'fp' on success, or False on failure.
|
|
@@ -5436,24 +5582,44 @@ def AppendFileHeader(fp,
|
|
|
5436
5582
|
# 4) core header fields before checksum:
|
|
5437
5583
|
# tmpoutlenhex, fencoding, platform.system(), fnumfiles
|
|
5438
5584
|
fnumfiles_hex = _hex_lower(numfiles)
|
|
5439
|
-
|
|
5585
|
+
fjsontype = "json"
|
|
5586
|
+
if(len(jsondata) > 0):
|
|
5587
|
+
try:
|
|
5588
|
+
fjsoncontent = json.dumps(jsondata, separators=(',', ':')).encode("UTF-8")
|
|
5589
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5590
|
+
fjsoncontent = "".encode("UTF-8")
|
|
5591
|
+
else:
|
|
5592
|
+
fjsoncontent = "".encode("UTF-8")
|
|
5593
|
+
fjsonsize = format(len(fjsoncontent), 'x').lower()
|
|
5594
|
+
fjsonlen = format(len(jsondata), 'x').lower()
|
|
5595
|
+
tmpoutlist = []
|
|
5596
|
+
tmpoutlist.append(fjsontype)
|
|
5597
|
+
tmpoutlist.append(fjsonlen)
|
|
5598
|
+
tmpoutlist.append(fjsonsize)
|
|
5599
|
+
if(len(jsondata) > 0):
|
|
5600
|
+
tmpoutlist.append(checksumtype[1])
|
|
5601
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs))
|
|
5602
|
+
else:
|
|
5603
|
+
tmpoutlist.append("none")
|
|
5604
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
|
|
5440
5605
|
# Preserve your original "tmpoutlen" computation exactly
|
|
5441
|
-
tmpoutlist
|
|
5442
|
-
|
|
5606
|
+
tmpoutlist.append(extrasizelen)
|
|
5607
|
+
tmpoutlist.append(extrafields)
|
|
5608
|
+
tmpoutlen = 8 + len(tmpoutlist) + len(xlist)
|
|
5443
5609
|
tmpoutlenhex = _hex_lower(tmpoutlen)
|
|
5444
5610
|
|
|
5445
5611
|
# Serialize the first group
|
|
5446
|
-
fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, fnumfiles_hex], delimiter)
|
|
5612
|
+
fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
5447
5613
|
# Append tmpoutlist
|
|
5448
5614
|
fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
|
|
5449
5615
|
# Append extradata items if any
|
|
5450
5616
|
if xlist:
|
|
5451
5617
|
fnumfilesa += AppendNullBytes(xlist, delimiter)
|
|
5452
5618
|
# Append checksum type
|
|
5453
|
-
fnumfilesa += AppendNullByte(checksumtype, delimiter)
|
|
5619
|
+
fnumfilesa += AppendNullByte(checksumtype[0], delimiter)
|
|
5454
5620
|
|
|
5455
5621
|
# 5) inner checksum over fnumfilesa
|
|
5456
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
|
|
5622
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
5457
5623
|
tmpfileoutstr = fnumfilesa + AppendNullByte(outfileheadercshex, delimiter)
|
|
5458
5624
|
|
|
5459
5625
|
# 6) size of (tmpfileoutstr) excluding one delimiter, per your original math
|
|
@@ -5466,7 +5632,7 @@ def AppendFileHeader(fp,
|
|
|
5466
5632
|
+ fnumfilesa
|
|
5467
5633
|
)
|
|
5468
5634
|
|
|
5469
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
|
|
5635
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
5470
5636
|
fnumfilesa += AppendNullByte(outfileheadercshex, delimiter)
|
|
5471
5637
|
|
|
5472
5638
|
# 8) final total size field (again per your original logic)
|
|
@@ -5474,10 +5640,11 @@ def AppendFileHeader(fp,
|
|
|
5474
5640
|
formheaersizestr = AppendNullByte(formheaersize, delimiter) # computed but not appended in original
|
|
5475
5641
|
# Note: you computed 'formheaersizestr' but didn’t append it afterward in the original either.
|
|
5476
5642
|
# Keeping that behavior for compatibility.
|
|
5477
|
-
|
|
5643
|
+
nullstrecd = formatspecs['format_delimiter'].encode('UTF-8')
|
|
5644
|
+
outfileout = fnumfilesa + fjsoncontent + nullstrecd
|
|
5478
5645
|
# 9) write and try to sync
|
|
5479
5646
|
try:
|
|
5480
|
-
fp.write(
|
|
5647
|
+
fp.write(outfileout)
|
|
5481
5648
|
except (OSError, io.UnsupportedOperation):
|
|
5482
5649
|
return False
|
|
5483
5650
|
|
|
@@ -5498,21 +5665,21 @@ def AppendFileHeader(fp,
|
|
|
5498
5665
|
return fp
|
|
5499
5666
|
|
|
5500
5667
|
|
|
5501
|
-
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
|
|
5668
|
+
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
5502
5669
|
if(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
5503
5670
|
formatspecs = formatspecs[fmttype]
|
|
5504
5671
|
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
5505
5672
|
fmttype = __file_format_default__
|
|
5506
5673
|
formatspecs = formatspecs[fmttype]
|
|
5507
|
-
AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
|
|
5674
|
+
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
5508
5675
|
return fp
|
|
5509
5676
|
|
|
5510
5677
|
|
|
5511
|
-
def MakeEmptyArchiveFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
|
|
5678
|
+
def MakeEmptyArchiveFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
5512
5679
|
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
5513
5680
|
|
|
5514
5681
|
|
|
5515
|
-
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_multi_dict__, returnfp=False):
|
|
5682
|
+
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, returnfp=False):
|
|
5516
5683
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
5517
5684
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
5518
5685
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -5553,7 +5720,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
5553
5720
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
5554
5721
|
except PermissionError:
|
|
5555
5722
|
return False
|
|
5556
|
-
AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
|
|
5723
|
+
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
5557
5724
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5558
5725
|
fp = CompressOpenFileAlt(
|
|
5559
5726
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -5584,7 +5751,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
5584
5751
|
return True
|
|
5585
5752
|
|
|
5586
5753
|
|
|
5587
|
-
def MakeEmptyArchiveFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_dict__, returnfp=False):
|
|
5754
|
+
def MakeEmptyArchiveFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, returnfp=False):
|
|
5588
5755
|
return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
|
|
5589
5756
|
|
|
5590
5757
|
|
|
@@ -5671,7 +5838,7 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
5671
5838
|
pass
|
|
5672
5839
|
return fp
|
|
5673
5840
|
|
|
5674
|
-
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
5841
|
+
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
5675
5842
|
if(not hasattr(fp, "write")):
|
|
5676
5843
|
return False
|
|
5677
5844
|
advancedlist = formatspecs['use_advanced_list']
|
|
@@ -5720,7 +5887,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5720
5887
|
inodetoforminode = {}
|
|
5721
5888
|
numfiles = int(len(GetDirList))
|
|
5722
5889
|
fnumfiles = format(numfiles, 'x').lower()
|
|
5723
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
5890
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
5724
5891
|
try:
|
|
5725
5892
|
fp.flush()
|
|
5726
5893
|
if(hasattr(os, "sync")):
|
|
@@ -5977,7 +6144,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5977
6144
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
5978
6145
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
5979
6146
|
AppendFileHeaderWithContent(
|
|
5980
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6147
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
5981
6148
|
try:
|
|
5982
6149
|
fp.flush()
|
|
5983
6150
|
if(hasattr(os, "sync")):
|
|
@@ -5986,7 +6153,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5986
6153
|
pass
|
|
5987
6154
|
return fp
|
|
5988
6155
|
|
|
5989
|
-
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6156
|
+
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
5990
6157
|
if(not hasattr(fp, "write")):
|
|
5991
6158
|
return False
|
|
5992
6159
|
if(verbose):
|
|
@@ -6055,7 +6222,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6055
6222
|
except FileNotFoundError:
|
|
6056
6223
|
return False
|
|
6057
6224
|
numfiles = int(len(tarfp.getmembers()))
|
|
6058
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6225
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6059
6226
|
try:
|
|
6060
6227
|
fp.flush()
|
|
6061
6228
|
if(hasattr(os, "sync")):
|
|
@@ -6199,7 +6366,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6199
6366
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6200
6367
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6201
6368
|
AppendFileHeaderWithContent(
|
|
6202
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6369
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6203
6370
|
try:
|
|
6204
6371
|
fp.flush()
|
|
6205
6372
|
if(hasattr(os, "sync")):
|
|
@@ -6209,7 +6376,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6209
6376
|
fcontents.close()
|
|
6210
6377
|
return fp
|
|
6211
6378
|
|
|
6212
|
-
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6379
|
+
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6213
6380
|
if(not hasattr(fp, "write")):
|
|
6214
6381
|
return False
|
|
6215
6382
|
if(verbose):
|
|
@@ -6248,7 +6415,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6248
6415
|
if(ziptest):
|
|
6249
6416
|
VerbosePrintOut("Bad file found!")
|
|
6250
6417
|
numfiles = int(len(zipfp.infolist()))
|
|
6251
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6418
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6252
6419
|
try:
|
|
6253
6420
|
fp.flush()
|
|
6254
6421
|
if(hasattr(os, "sync")):
|
|
@@ -6413,7 +6580,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6413
6580
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6414
6581
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6415
6582
|
AppendFileHeaderWithContent(
|
|
6416
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6583
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6417
6584
|
try:
|
|
6418
6585
|
fp.flush()
|
|
6419
6586
|
if(hasattr(os, "sync")):
|
|
@@ -6424,11 +6591,10 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6424
6591
|
return fp
|
|
6425
6592
|
|
|
6426
6593
|
if(not rarfile_support):
|
|
6427
|
-
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6594
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6428
6595
|
return False
|
|
6429
|
-
|
|
6430
|
-
|
|
6431
|
-
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6596
|
+
else:
|
|
6597
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6432
6598
|
if(not hasattr(fp, "write")):
|
|
6433
6599
|
return False
|
|
6434
6600
|
if(verbose):
|
|
@@ -6449,7 +6615,7 @@ if(rarfile_support):
|
|
|
6449
6615
|
if(rartest):
|
|
6450
6616
|
VerbosePrintOut("Bad file found!")
|
|
6451
6617
|
numfiles = int(len(rarfp.infolist()))
|
|
6452
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6618
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6453
6619
|
try:
|
|
6454
6620
|
fp.flush()
|
|
6455
6621
|
if(hasattr(os, "sync")):
|
|
@@ -6647,7 +6813,7 @@ if(rarfile_support):
|
|
|
6647
6813
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6648
6814
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6649
6815
|
AppendFileHeaderWithContent(
|
|
6650
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6816
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6651
6817
|
try:
|
|
6652
6818
|
fp.flush()
|
|
6653
6819
|
if(hasattr(os, "sync")):
|
|
@@ -6658,11 +6824,10 @@ if(rarfile_support):
|
|
|
6658
6824
|
return fp
|
|
6659
6825
|
|
|
6660
6826
|
if(not py7zr_support):
|
|
6661
|
-
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6827
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6662
6828
|
return False
|
|
6663
|
-
|
|
6664
|
-
|
|
6665
|
-
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6829
|
+
else:
|
|
6830
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6666
6831
|
if(not hasattr(fp, "write")):
|
|
6667
6832
|
return False
|
|
6668
6833
|
if(verbose):
|
|
@@ -6685,7 +6850,7 @@ if(py7zr_support):
|
|
|
6685
6850
|
if(sztestalt):
|
|
6686
6851
|
VerbosePrintOut("Bad file found!")
|
|
6687
6852
|
numfiles = int(len(szpfp.list()))
|
|
6688
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6853
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6689
6854
|
try:
|
|
6690
6855
|
fp.flush()
|
|
6691
6856
|
if(hasattr(os, "sync")):
|
|
@@ -6821,7 +6986,7 @@ if(py7zr_support):
|
|
|
6821
6986
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6822
6987
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6823
6988
|
AppendFileHeaderWithContent(
|
|
6824
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6989
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6825
6990
|
try:
|
|
6826
6991
|
fp.flush()
|
|
6827
6992
|
if(hasattr(os, "sync")):
|
|
@@ -6831,7 +6996,7 @@ if(py7zr_support):
|
|
|
6831
6996
|
fcontents.close()
|
|
6832
6997
|
return fp
|
|
6833
6998
|
|
|
6834
|
-
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6999
|
+
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6835
7000
|
if(not hasattr(fp, "write")):
|
|
6836
7001
|
return False
|
|
6837
7002
|
if(verbose):
|
|
@@ -6847,7 +7012,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
6847
7012
|
inodetoforminode = {}
|
|
6848
7013
|
numfiles = int(len(GetDirList))
|
|
6849
7014
|
fnumfiles = format(numfiles, 'x').lower()
|
|
6850
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
7015
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6851
7016
|
for curfname in GetDirList:
|
|
6852
7017
|
ftype = format(curfname[0], 'x').lower()
|
|
6853
7018
|
fencoding = curfname[1]
|
|
@@ -6889,7 +7054,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
6889
7054
|
fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile]
|
|
6890
7055
|
fcontents.seek(0, 0)
|
|
6891
7056
|
AppendFileHeaderWithContent(
|
|
6892
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
7057
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6893
7058
|
return fp
|
|
6894
7059
|
|
|
6895
7060
|
|
|
@@ -6898,7 +7063,7 @@ def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6898
7063
|
return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
|
|
6899
7064
|
|
|
6900
7065
|
|
|
6901
|
-
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7066
|
+
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
6902
7067
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
6903
7068
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
6904
7069
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -6972,7 +7137,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
6972
7137
|
fp.close()
|
|
6973
7138
|
return True
|
|
6974
7139
|
|
|
6975
|
-
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7140
|
+
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
6976
7141
|
if not isinstance(infiles, list):
|
|
6977
7142
|
infiles = [infiles]
|
|
6978
7143
|
returnout = False
|
|
@@ -6987,7 +7152,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
|
|
|
6987
7152
|
return True
|
|
6988
7153
|
return returnout
|
|
6989
7154
|
|
|
6990
|
-
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7155
|
+
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
6991
7156
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
6992
7157
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
6993
7158
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7059,7 +7224,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7059
7224
|
fp.close()
|
|
7060
7225
|
return True
|
|
7061
7226
|
|
|
7062
|
-
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7227
|
+
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7063
7228
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7064
7229
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7065
7230
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7132,7 +7297,7 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7132
7297
|
fp.close()
|
|
7133
7298
|
return True
|
|
7134
7299
|
|
|
7135
|
-
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7300
|
+
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7136
7301
|
if not isinstance(infiles, list):
|
|
7137
7302
|
infiles = [infiles]
|
|
7138
7303
|
returnout = False
|
|
@@ -7147,7 +7312,7 @@ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7147
7312
|
return True
|
|
7148
7313
|
return returnout
|
|
7149
7314
|
|
|
7150
|
-
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7315
|
+
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7151
7316
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7152
7317
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7153
7318
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7220,7 +7385,7 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7220
7385
|
fp.close()
|
|
7221
7386
|
return True
|
|
7222
7387
|
|
|
7223
|
-
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7388
|
+
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7224
7389
|
if not isinstance(infiles, list):
|
|
7225
7390
|
infiles = [infiles]
|
|
7226
7391
|
returnout = False
|
|
@@ -7236,11 +7401,10 @@ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7236
7401
|
return returnout
|
|
7237
7402
|
|
|
7238
7403
|
if(not rarfile_support):
|
|
7239
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7404
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7240
7405
|
return False
|
|
7241
|
-
|
|
7242
|
-
|
|
7243
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7406
|
+
else:
|
|
7407
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7244
7408
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7245
7409
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7246
7410
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7313,7 +7477,7 @@ if(rarfile_support):
|
|
|
7313
7477
|
fp.close()
|
|
7314
7478
|
return True
|
|
7315
7479
|
|
|
7316
|
-
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7480
|
+
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7317
7481
|
if not isinstance(infiles, list):
|
|
7318
7482
|
infiles = [infiles]
|
|
7319
7483
|
returnout = False
|
|
@@ -7329,11 +7493,10 @@ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7329
7493
|
return returnout
|
|
7330
7494
|
|
|
7331
7495
|
if(not py7zr_support):
|
|
7332
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7496
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7333
7497
|
return False
|
|
7334
|
-
|
|
7335
|
-
|
|
7336
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7498
|
+
else:
|
|
7499
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7337
7500
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7338
7501
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7339
7502
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7406,7 +7569,7 @@ if(py7zr_support):
|
|
|
7406
7569
|
fp.close()
|
|
7407
7570
|
return True
|
|
7408
7571
|
|
|
7409
|
-
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7572
|
+
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7410
7573
|
if not isinstance(infiles, list):
|
|
7411
7574
|
infiles = [infiles]
|
|
7412
7575
|
returnout = False
|
|
@@ -7421,7 +7584,7 @@ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmt
|
|
|
7421
7584
|
return True
|
|
7422
7585
|
return returnout
|
|
7423
7586
|
|
|
7424
|
-
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7587
|
+
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7425
7588
|
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
7426
7589
|
return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
7427
7590
|
|
|
@@ -9164,43 +9327,41 @@ def CheckSumSupport(checkfor, guaranteed=True):
|
|
|
9164
9327
|
return False
|
|
9165
9328
|
|
|
9166
9329
|
|
|
9167
|
-
def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9330
|
+
def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9168
9331
|
return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
9169
9332
|
|
|
9170
|
-
def PackStackedArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9333
|
+
def PackStackedArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9171
9334
|
return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
9172
9335
|
|
|
9173
|
-
def PackArchiveFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9336
|
+
def PackArchiveFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9174
9337
|
return PackArchiveFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
|
|
9175
9338
|
|
|
9176
9339
|
|
|
9177
|
-
def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9340
|
+
def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9178
9341
|
return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9179
9342
|
|
|
9180
9343
|
|
|
9181
|
-
def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9344
|
+
def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9182
9345
|
return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9183
9346
|
|
|
9184
9347
|
|
|
9185
9348
|
if(not rarfile_support):
|
|
9186
|
-
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9349
|
+
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9187
9350
|
return False
|
|
9188
|
-
|
|
9189
|
-
|
|
9190
|
-
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9351
|
+
else:
|
|
9352
|
+
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9191
9353
|
return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9192
9354
|
|
|
9193
9355
|
|
|
9194
9356
|
if(not py7zr_support):
|
|
9195
|
-
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9357
|
+
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9196
9358
|
return False
|
|
9197
|
-
|
|
9198
|
-
|
|
9199
|
-
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9359
|
+
else:
|
|
9360
|
+
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9200
9361
|
return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9201
9362
|
|
|
9202
9363
|
|
|
9203
|
-
def PackArchiveFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9364
|
+
def PackArchiveFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9204
9365
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, 0, True)
|
|
9205
9366
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9206
9367
|
formatspecs = formatspecs[checkcompressfile]
|
|
@@ -9422,19 +9583,56 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9422
9583
|
else:
|
|
9423
9584
|
inheader = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
|
|
9424
9585
|
|
|
9425
|
-
fnumextrafieldsize = int(inheader[
|
|
9426
|
-
fnumextrafields = int(inheader[
|
|
9427
|
-
extrastart =
|
|
9586
|
+
fnumextrafieldsize = int(inheader[13], 16)
|
|
9587
|
+
fnumextrafields = int(inheader[14], 16)
|
|
9588
|
+
extrastart = 15
|
|
9428
9589
|
extraend = extrastart + fnumextrafields
|
|
9429
9590
|
formversion = re.findall("([\\d]+)", formstring)
|
|
9430
9591
|
fheadsize = int(inheader[0], 16)
|
|
9431
9592
|
fnumfields = int(inheader[1], 16)
|
|
9432
|
-
|
|
9433
|
-
fostype = inheader[3]
|
|
9434
|
-
fpythontype = inheader[4]
|
|
9435
|
-
fnumfiles = int(inheader[5], 16)
|
|
9593
|
+
fnumfiles = int(inheader[6], 16)
|
|
9436
9594
|
fprechecksumtype = inheader[-2]
|
|
9437
9595
|
fprechecksum = inheader[-1]
|
|
9596
|
+
outfseeknextfile = inheader[7]
|
|
9597
|
+
fjsonsize = int(inheader[10], 16)
|
|
9598
|
+
fjsonchecksumtype = inheader[11]
|
|
9599
|
+
fjsonchecksum = inheader[12]
|
|
9600
|
+
fprejsoncontent = fp.read(fjsonsize)
|
|
9601
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
9602
|
+
if(fjsonsize > 0):
|
|
9603
|
+
if(hmac.compare_digest(jsonfcs, fjsonchecksum)):
|
|
9604
|
+
if(verbose):
|
|
9605
|
+
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
|
|
9606
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
9607
|
+
else:
|
|
9608
|
+
valid_archive = False
|
|
9609
|
+
invalid_archive = True
|
|
9610
|
+
if(verbose):
|
|
9611
|
+
VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
|
|
9612
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
9613
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
9614
|
+
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
9615
|
+
fname + " at offset " + str(fheaderstart))
|
|
9616
|
+
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
9617
|
+
return False
|
|
9618
|
+
# Next seek directive
|
|
9619
|
+
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
9620
|
+
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
9621
|
+
if(abs(fseeknextasnum) == 0):
|
|
9622
|
+
pass
|
|
9623
|
+
fp.seek(fseeknextasnum, 1)
|
|
9624
|
+
elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
|
|
9625
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
9626
|
+
if(abs(fseeknextasnum) == 0):
|
|
9627
|
+
pass
|
|
9628
|
+
fp.seek(fseeknextasnum, 1)
|
|
9629
|
+
elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
|
|
9630
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
9631
|
+
if(abs(fseeknextasnum) == 0):
|
|
9632
|
+
pass
|
|
9633
|
+
fp.seek(fseeknextasnum, 0)
|
|
9634
|
+
else:
|
|
9635
|
+
return False
|
|
9438
9636
|
|
|
9439
9637
|
il = 0
|
|
9440
9638
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|