PyFoxFile 0.25.0__tar.gz → 0.25.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyfoxfile-0.25.0 → pyfoxfile-0.25.2}/PKG-INFO +1 -1
- {pyfoxfile-0.25.0 → pyfoxfile-0.25.2}/PyFoxFile.egg-info/PKG-INFO +1 -1
- {pyfoxfile-0.25.0 → pyfoxfile-0.25.2}/foxfile.py +11 -11
- {pyfoxfile-0.25.0 → pyfoxfile-0.25.2}/pyfoxfile.py +301 -103
- {pyfoxfile-0.25.0 → pyfoxfile-0.25.2}/pyproject.toml +1 -1
- {pyfoxfile-0.25.0 → pyfoxfile-0.25.2}/LICENSE +0 -0
- {pyfoxfile-0.25.0 → pyfoxfile-0.25.2}/PyFoxFile.egg-info/SOURCES.txt +0 -0
- {pyfoxfile-0.25.0 → pyfoxfile-0.25.2}/PyFoxFile.egg-info/dependency_links.txt +0 -0
- {pyfoxfile-0.25.0 → pyfoxfile-0.25.2}/PyFoxFile.egg-info/top_level.txt +0 -0
- {pyfoxfile-0.25.0 → pyfoxfile-0.25.2}/PyFoxFile.egg-info/zip-safe +0 -0
- {pyfoxfile-0.25.0 → pyfoxfile-0.25.2}/README.md +0 -0
- {pyfoxfile-0.25.0 → pyfoxfile-0.25.2}/foxneofile.py +0 -0
- {pyfoxfile-0.25.0 → pyfoxfile-0.25.2}/neofoxfile.py +0 -0
- {pyfoxfile-0.25.0 → pyfoxfile-0.25.2}/setup.cfg +0 -0
- {pyfoxfile-0.25.0 → pyfoxfile-0.25.2}/setup.py +0 -0
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: foxfile.py - Last Update: 11/
|
|
17
|
+
$FileInfo: foxfile.py - Last Update: 11/6/2025 Ver. 0.25.2 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -153,38 +153,38 @@ if active_action:
|
|
|
153
153
|
checkcompressfile = pyfoxfile.CheckCompressionSubType(
|
|
154
154
|
input_file, fnamedict, True)
|
|
155
155
|
if((pyfoxfile.IsNestedDict(fnamedict) and checkcompressfile in fnamedict) or (pyfoxfile.IsSingleDict(fnamedict) and checkcompressfile==fnamedict['format_magic'])):
|
|
156
|
-
tmpout = pyfoxfile.RePackFoxFile(input_file, getargs.output, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, False, getargs.filestart, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, getargs.verbose, False)
|
|
156
|
+
tmpout = pyfoxfile.RePackFoxFile(input_file, getargs.output, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, False, getargs.filestart, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, getargs.verbose, False)
|
|
157
157
|
else:
|
|
158
158
|
tmpout = pyfoxfile.PackFoxFileFromInFile(
|
|
159
|
-
input_file, getargs.output, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, getargs.verbose, False)
|
|
159
|
+
input_file, getargs.output, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, getargs.verbose, False)
|
|
160
160
|
if(not tmpout):
|
|
161
161
|
sys.exit(1)
|
|
162
162
|
else:
|
|
163
|
-
pyfoxfile.PackFoxFile(getargs.input, getargs.output, getargs.text, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, False, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, getargs.verbose, False)
|
|
163
|
+
pyfoxfile.PackFoxFile(getargs.input, getargs.output, getargs.text, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, False, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, getargs.verbose, False)
|
|
164
164
|
elif active_action == 'repack':
|
|
165
165
|
if getargs.convert:
|
|
166
166
|
checkcompressfile = pyfoxfile.CheckCompressionSubType(
|
|
167
167
|
input_file, fnamedict, True)
|
|
168
168
|
if((pyfoxfile.IsNestedDict(fnamedict) and checkcompressfile in fnamedict) or (pyfoxfile.IsSingleDict(fnamedict) and checkcompressfile==fnamedict['format_magic'])):
|
|
169
169
|
pyfoxfile.RePackFoxFile(input_file, getargs.output, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt,
|
|
170
|
-
False, getargs.filestart, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, getargs.verbose, False)
|
|
170
|
+
False, getargs.filestart, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, getargs.verbose, False)
|
|
171
171
|
else:
|
|
172
|
-
pyfoxfile.PackFoxFileFromInFile(input_file, getargs.output, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, getargs.verbose, False)
|
|
172
|
+
pyfoxfile.PackFoxFileFromInFile(input_file, getargs.output, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, getargs.verbose, False)
|
|
173
173
|
if(not tmpout):
|
|
174
174
|
sys.exit(1)
|
|
175
175
|
else:
|
|
176
176
|
pyfoxfile.RePackFoxFile(input_file, getargs.output, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt,
|
|
177
|
-
False, getargs.filestart, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, getargs.verbose, False)
|
|
177
|
+
False, getargs.filestart, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, getargs.verbose, False)
|
|
178
178
|
elif active_action == 'extract':
|
|
179
179
|
if getargs.convert:
|
|
180
180
|
checkcompressfile = pyfoxfile.CheckCompressionSubType(
|
|
181
181
|
input_file, fnamedict, True)
|
|
182
182
|
tempout = BytesIO()
|
|
183
183
|
if((pyfoxfile.IsNestedDict(fnamedict) and checkcompressfile in fnamedict) or (pyfoxfile.IsSingleDict(fnamedict) and checkcompressfile==fnamedict['format_magic'])):
|
|
184
|
-
tmpout = pyfoxfile.RePackFoxFile(input_file, tempout, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, False, getargs.filestart, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, False, False)
|
|
184
|
+
tmpout = pyfoxfile.RePackFoxFile(input_file, tempout, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, False, getargs.filestart, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, False, False)
|
|
185
185
|
else:
|
|
186
186
|
tmpout = pyfoxfile.PackFoxFileFromInFile(
|
|
187
|
-
input_file, tempout, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, False, False)
|
|
187
|
+
input_file, tempout, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, False, False)
|
|
188
188
|
if(not tmpout):
|
|
189
189
|
sys.exit(1)
|
|
190
190
|
input_file = tempout
|
|
@@ -208,10 +208,10 @@ if active_action:
|
|
|
208
208
|
input_file, fnamedict, True)
|
|
209
209
|
tempout = BytesIO()
|
|
210
210
|
if((pyfoxfile.IsNestedDict(fnamedict) and checkcompressfile in fnamedict) or (pyfoxfile.IsSingleDict(fnamedict) and checkcompressfile==fnamedict['format_magic'])):
|
|
211
|
-
tmpout = pyfoxfile.RePackFoxFile(input_file, tempout, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, False, getargs.filestart, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, False, False, False)
|
|
211
|
+
tmpout = pyfoxfile.RePackFoxFile(input_file, tempout, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, False, getargs.filestart, 0, 0, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], getargs.skipchecksum, [], {}, fnamedict, False, False, False)
|
|
212
212
|
else:
|
|
213
213
|
tmpout = pyfoxfile.PackFoxFileFromInFile(
|
|
214
|
-
input_file, tempout, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, False, False)
|
|
214
|
+
input_file, tempout, __file_format_default__, getargs.compression, getargs.wholefile, getargs.level, pyfoxfile.compressionlistalt, [getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum, getargs.checksum], [], {}, fnamedict, False, False)
|
|
215
215
|
input_file = tempout
|
|
216
216
|
if(not tmpout):
|
|
217
217
|
sys.exit(1)
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pyfoxfile.py - Last Update: 11/
|
|
17
|
+
$FileInfo: pyfoxfile.py - Last Update: 11/6/2025 Ver. 0.25.2 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -650,12 +650,12 @@ __project__ = __program_name__
|
|
|
650
650
|
__program_alt_name__ = __program_name__
|
|
651
651
|
__project_url__ = "https://github.com/GameMaker2k/PyFoxFile"
|
|
652
652
|
__project_release_url__ = __project_url__+"/releases/latest"
|
|
653
|
-
__version_info__ = (0, 25,
|
|
654
|
-
__version_date_info__ = (2025, 11,
|
|
653
|
+
__version_info__ = (0, 25, 2, "RC 1", 1)
|
|
654
|
+
__version_date_info__ = (2025, 11, 6, "RC 1", 1)
|
|
655
655
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
656
656
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
657
657
|
__revision__ = __version_info__[3]
|
|
658
|
-
__revision_id__ = "$Id:
|
|
658
|
+
__revision_id__ = "$Id: 30b50b3fe5848bbe7a8ffa021b798be5dd67425e $"
|
|
659
659
|
if(__version_info__[4] is not None):
|
|
660
660
|
__version_date_plusrc__ = __version_date__ + \
|
|
661
661
|
"-" + str(__version_date_info__[4])
|
|
@@ -4737,7 +4737,30 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4737
4737
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
4738
4738
|
"'" + newfcs + "'")
|
|
4739
4739
|
return False
|
|
4740
|
-
fnumfiles = int(inheader[
|
|
4740
|
+
fnumfiles = int(inheader[6], 16)
|
|
4741
|
+
outfseeknextfile = inheaderdata[7]
|
|
4742
|
+
fjsonsize = int(inheaderdata[10], 16)
|
|
4743
|
+
fjsonchecksumtype = inheader[11]
|
|
4744
|
+
fjsonchecksum = inheader[12]
|
|
4745
|
+
fp.read(fjsonsize)
|
|
4746
|
+
# Next seek directive
|
|
4747
|
+
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
4748
|
+
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
4749
|
+
if(abs(fseeknextasnum) == 0):
|
|
4750
|
+
pass
|
|
4751
|
+
fp.seek(fseeknextasnum, 1)
|
|
4752
|
+
elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
|
|
4753
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
4754
|
+
if(abs(fseeknextasnum) == 0):
|
|
4755
|
+
pass
|
|
4756
|
+
fp.seek(fseeknextasnum, 1)
|
|
4757
|
+
elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
|
|
4758
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
4759
|
+
if(abs(fseeknextasnum) == 0):
|
|
4760
|
+
pass
|
|
4761
|
+
fp.seek(fseeknextasnum, 0)
|
|
4762
|
+
else:
|
|
4763
|
+
return False
|
|
4741
4764
|
countnum = 0
|
|
4742
4765
|
flist = []
|
|
4743
4766
|
while(countnum < fnumfiles):
|
|
@@ -4776,10 +4799,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4776
4799
|
else:
|
|
4777
4800
|
inheader = ReadFileHeaderDataWoSize(
|
|
4778
4801
|
fp, formatspecs['format_delimiter'])
|
|
4779
|
-
fnumextrafieldsize = int(inheader[
|
|
4780
|
-
fnumextrafields = int(inheader[
|
|
4802
|
+
fnumextrafieldsize = int(inheader[13], 16)
|
|
4803
|
+
fnumextrafields = int(inheader[14], 16)
|
|
4781
4804
|
fextrafieldslist = []
|
|
4782
|
-
extrastart =
|
|
4805
|
+
extrastart = 15
|
|
4783
4806
|
extraend = extrastart + fnumextrafields
|
|
4784
4807
|
while(extrastart < extraend):
|
|
4785
4808
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -4799,7 +4822,105 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4799
4822
|
fhencoding = inheader[2]
|
|
4800
4823
|
fostype = inheader[3]
|
|
4801
4824
|
fpythontype = inheader[4]
|
|
4802
|
-
|
|
4825
|
+
fprojectname = inheader[4]
|
|
4826
|
+
fnumfiles = int(inheader[6], 16)
|
|
4827
|
+
fseeknextfile = inheader[7]
|
|
4828
|
+
fjsontype = inheader[8]
|
|
4829
|
+
fjsonlen = int(inheader[9], 16)
|
|
4830
|
+
fjsonsize = int(inheader[10], 16)
|
|
4831
|
+
fjsonchecksumtype = inheader[11]
|
|
4832
|
+
fjsonchecksum = inheader[12]
|
|
4833
|
+
fjsoncontent = {}
|
|
4834
|
+
fjstart = fp.tell()
|
|
4835
|
+
if(fjsontype=="json"):
|
|
4836
|
+
fjsoncontent = {}
|
|
4837
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4838
|
+
if(fjsonsize > 0):
|
|
4839
|
+
try:
|
|
4840
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4841
|
+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
|
|
4842
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4843
|
+
try:
|
|
4844
|
+
fjsonrawcontent = fprejsoncontent
|
|
4845
|
+
fjsoncontent = json.loads(fprejsoncontent)
|
|
4846
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4847
|
+
fprejsoncontent = ""
|
|
4848
|
+
fjsonrawcontent = fprejsoncontent
|
|
4849
|
+
fjsoncontent = {}
|
|
4850
|
+
else:
|
|
4851
|
+
fprejsoncontent = ""
|
|
4852
|
+
fjsonrawcontent = fprejsoncontent
|
|
4853
|
+
fjsoncontent = {}
|
|
4854
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4855
|
+
fjsoncontent = {}
|
|
4856
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4857
|
+
if (fjsonsize > 0):
|
|
4858
|
+
try:
|
|
4859
|
+
# try base64 → utf-8 → YAML
|
|
4860
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4861
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4862
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4863
|
+
try:
|
|
4864
|
+
# fall back to treating the bytes as plain text YAML
|
|
4865
|
+
fjsonrawcontent = fprejsoncontent
|
|
4866
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4867
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4868
|
+
# final fallback: empty
|
|
4869
|
+
fprejsoncontent = ""
|
|
4870
|
+
fjsonrawcontent = fprejsoncontent
|
|
4871
|
+
fjsoncontent = {}
|
|
4872
|
+
else:
|
|
4873
|
+
fprejsoncontent = ""
|
|
4874
|
+
fjsonrawcontent = fprejsoncontent
|
|
4875
|
+
fjsoncontent = {}
|
|
4876
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4877
|
+
fjsoncontent = {}
|
|
4878
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4879
|
+
fprejsoncontent = ""
|
|
4880
|
+
fjsonrawcontent = fprejsoncontent
|
|
4881
|
+
elif(fjsontype=="list"):
|
|
4882
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4883
|
+
flisttmp = MkTempFile()
|
|
4884
|
+
flisttmp.write(fprejsoncontent.encode())
|
|
4885
|
+
flisttmp.seek(0)
|
|
4886
|
+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
4887
|
+
flisttmp.close()
|
|
4888
|
+
fjsonrawcontent = fjsoncontent
|
|
4889
|
+
if(fjsonlen==1):
|
|
4890
|
+
try:
|
|
4891
|
+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
|
|
4892
|
+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
|
|
4893
|
+
fjsonlen = len(fjsoncontent)
|
|
4894
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4895
|
+
try:
|
|
4896
|
+
fjsonrawcontent = fjsoncontent[0]
|
|
4897
|
+
fjsoncontent = json.loads(fjsoncontent[0])
|
|
4898
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4899
|
+
pass
|
|
4900
|
+
fjend = fp.tell()
|
|
4901
|
+
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4902
|
+
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
4903
|
+
if(abs(fseeknextasnum) == 0):
|
|
4904
|
+
pass
|
|
4905
|
+
fp.seek(fseeknextasnum, 1)
|
|
4906
|
+
elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
|
|
4907
|
+
fseeknextasnum = int(fseeknextfile)
|
|
4908
|
+
if(abs(fseeknextasnum) == 0):
|
|
4909
|
+
pass
|
|
4910
|
+
fp.seek(fseeknextasnum, 1)
|
|
4911
|
+
elif(re.findall("^([0-9]+)", fseeknextfile)):
|
|
4912
|
+
fseeknextasnum = int(fseeknextfile)
|
|
4913
|
+
if(abs(fseeknextasnum) == 0):
|
|
4914
|
+
pass
|
|
4915
|
+
fp.seek(fseeknextasnum, 0)
|
|
4916
|
+
else:
|
|
4917
|
+
return False
|
|
4918
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4919
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4920
|
+
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4921
|
+
fname + " at offset " + str(fheaderstart))
|
|
4922
|
+
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4923
|
+
return False
|
|
4803
4924
|
fprechecksumtype = inheader[-2]
|
|
4804
4925
|
fprechecksum = inheader[-1]
|
|
4805
4926
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -4812,7 +4933,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4812
4933
|
return False
|
|
4813
4934
|
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
4814
4935
|
fcompresstype = ""
|
|
4815
|
-
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
4936
|
+
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
4816
4937
|
if (seekstart < 0) or (seekstart > fnumfiles):
|
|
4817
4938
|
seekstart = 0
|
|
4818
4939
|
if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
|
|
@@ -4932,10 +5053,10 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
4932
5053
|
else:
|
|
4933
5054
|
inheader = ReadFileHeaderDataWoSize(
|
|
4934
5055
|
fp, formatspecs['format_delimiter'])
|
|
4935
|
-
fnumextrafieldsize = int(inheader[
|
|
4936
|
-
fnumextrafields = int(inheader[
|
|
5056
|
+
fnumextrafieldsize = int(inheader[13], 16)
|
|
5057
|
+
fnumextrafields = int(inheader[14], 16)
|
|
4937
5058
|
fextrafieldslist = []
|
|
4938
|
-
extrastart =
|
|
5059
|
+
extrastart = 15
|
|
4939
5060
|
extraend = extrastart + fnumextrafields
|
|
4940
5061
|
while(extrastart < extraend):
|
|
4941
5062
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -4952,10 +5073,40 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
4952
5073
|
formversion = re.findall("([\\d]+)", formstring)
|
|
4953
5074
|
fheadsize = int(inheader[0], 16)
|
|
4954
5075
|
fnumfields = int(inheader[1], 16)
|
|
4955
|
-
|
|
4956
|
-
|
|
4957
|
-
|
|
4958
|
-
|
|
5076
|
+
fnumfiles = int(inheader[6], 16)
|
|
5077
|
+
fseeknextfile = inheaderdata[7]
|
|
5078
|
+
fjsontype = int(inheader[8], 16)
|
|
5079
|
+
fjsonlen = int(inheader[9], 16)
|
|
5080
|
+
fjsonsize = int(inheader[10], 16)
|
|
5081
|
+
fjsonchecksumtype = inheader[11]
|
|
5082
|
+
fjsonchecksum = inheader[12]
|
|
5083
|
+
fjsoncontent = {}
|
|
5084
|
+
fjstart = fp.tell()
|
|
5085
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5086
|
+
fjend = fp.tell()
|
|
5087
|
+
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
5088
|
+
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
5089
|
+
if(abs(fseeknextasnum) == 0):
|
|
5090
|
+
pass
|
|
5091
|
+
fp.seek(fseeknextasnum, 1)
|
|
5092
|
+
elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
|
|
5093
|
+
fseeknextasnum = int(fseeknextfile)
|
|
5094
|
+
if(abs(fseeknextasnum) == 0):
|
|
5095
|
+
pass
|
|
5096
|
+
fp.seek(fseeknextasnum, 1)
|
|
5097
|
+
elif(re.findall("^([0-9]+)", fseeknextfile)):
|
|
5098
|
+
fseeknextasnum = int(fseeknextfile)
|
|
5099
|
+
if(abs(fseeknextasnum) == 0):
|
|
5100
|
+
pass
|
|
5101
|
+
fp.seek(fseeknextasnum, 0)
|
|
5102
|
+
else:
|
|
5103
|
+
return False
|
|
5104
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
5105
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
5106
|
+
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5107
|
+
fname + " at offset " + str(fheaderstart))
|
|
5108
|
+
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
5109
|
+
return False
|
|
4959
5110
|
fprechecksumtype = inheader[-2]
|
|
4960
5111
|
fprechecksum = inheader[-1]
|
|
4961
5112
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -5009,7 +5160,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5009
5160
|
prenewfcs = GetHeaderChecksum(
|
|
5010
5161
|
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
5011
5162
|
prefcs = preheaderdata[-2]
|
|
5012
|
-
if(prefcs
|
|
5163
|
+
if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
|
|
5013
5164
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
5014
5165
|
prefname + " at offset " + str(prefhstart))
|
|
5015
5166
|
VerbosePrintOut("'" + prefcs + "' != " +
|
|
@@ -5030,7 +5181,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5030
5181
|
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
5031
5182
|
prefccs = preheaderdata[-1]
|
|
5032
5183
|
pyhascontents = True
|
|
5033
|
-
if(not hmac.compare_digest(
|
|
5184
|
+
if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
|
|
5034
5185
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5035
5186
|
prefname + " at offset " + str(prefcontentstart))
|
|
5036
5187
|
VerbosePrintOut("'" + prefccs +
|
|
@@ -5391,12 +5542,7 @@ def AppendNullBytes(indata=None, delimiter=__file_format_dict__['format_delimite
|
|
|
5391
5542
|
def _hex_lower(n):
|
|
5392
5543
|
return format(int(n), 'x').lower()
|
|
5393
5544
|
|
|
5394
|
-
def AppendFileHeader(fp,
|
|
5395
|
-
numfiles,
|
|
5396
|
-
fencoding,
|
|
5397
|
-
extradata=None,
|
|
5398
|
-
checksumtype="md5",
|
|
5399
|
-
formatspecs=__file_format_dict__):
|
|
5545
|
+
def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__):
|
|
5400
5546
|
"""
|
|
5401
5547
|
Build and write the archive file header.
|
|
5402
5548
|
Returns the same file-like 'fp' on success, or False on failure.
|
|
@@ -5444,24 +5590,44 @@ def AppendFileHeader(fp,
|
|
|
5444
5590
|
# 4) core header fields before checksum:
|
|
5445
5591
|
# tmpoutlenhex, fencoding, platform.system(), fnumfiles
|
|
5446
5592
|
fnumfiles_hex = _hex_lower(numfiles)
|
|
5447
|
-
|
|
5593
|
+
fjsontype = "json"
|
|
5594
|
+
if(len(jsondata) > 0):
|
|
5595
|
+
try:
|
|
5596
|
+
fjsoncontent = json.dumps(jsondata, separators=(',', ':')).encode("UTF-8")
|
|
5597
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5598
|
+
fjsoncontent = "".encode("UTF-8")
|
|
5599
|
+
else:
|
|
5600
|
+
fjsoncontent = "".encode("UTF-8")
|
|
5601
|
+
fjsonsize = format(len(fjsoncontent), 'x').lower()
|
|
5602
|
+
fjsonlen = format(len(jsondata), 'x').lower()
|
|
5603
|
+
tmpoutlist = []
|
|
5604
|
+
tmpoutlist.append(fjsontype)
|
|
5605
|
+
tmpoutlist.append(fjsonlen)
|
|
5606
|
+
tmpoutlist.append(fjsonsize)
|
|
5607
|
+
if(len(jsondata) > 0):
|
|
5608
|
+
tmpoutlist.append(checksumtype[1])
|
|
5609
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs))
|
|
5610
|
+
else:
|
|
5611
|
+
tmpoutlist.append("none")
|
|
5612
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
|
|
5448
5613
|
# Preserve your original "tmpoutlen" computation exactly
|
|
5449
|
-
tmpoutlist
|
|
5450
|
-
|
|
5614
|
+
tmpoutlist.append(extrasizelen)
|
|
5615
|
+
tmpoutlist.append(extrafields)
|
|
5616
|
+
tmpoutlen = 8 + len(tmpoutlist) + len(xlist)
|
|
5451
5617
|
tmpoutlenhex = _hex_lower(tmpoutlen)
|
|
5452
5618
|
|
|
5453
5619
|
# Serialize the first group
|
|
5454
|
-
fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, fnumfiles_hex], delimiter)
|
|
5620
|
+
fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
5455
5621
|
# Append tmpoutlist
|
|
5456
5622
|
fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
|
|
5457
5623
|
# Append extradata items if any
|
|
5458
5624
|
if xlist:
|
|
5459
5625
|
fnumfilesa += AppendNullBytes(xlist, delimiter)
|
|
5460
5626
|
# Append checksum type
|
|
5461
|
-
fnumfilesa += AppendNullByte(checksumtype, delimiter)
|
|
5627
|
+
fnumfilesa += AppendNullByte(checksumtype[0], delimiter)
|
|
5462
5628
|
|
|
5463
5629
|
# 5) inner checksum over fnumfilesa
|
|
5464
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
|
|
5630
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
5465
5631
|
tmpfileoutstr = fnumfilesa + AppendNullByte(outfileheadercshex, delimiter)
|
|
5466
5632
|
|
|
5467
5633
|
# 6) size of (tmpfileoutstr) excluding one delimiter, per your original math
|
|
@@ -5474,7 +5640,7 @@ def AppendFileHeader(fp,
|
|
|
5474
5640
|
+ fnumfilesa
|
|
5475
5641
|
)
|
|
5476
5642
|
|
|
5477
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
|
|
5643
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
5478
5644
|
fnumfilesa += AppendNullByte(outfileheadercshex, delimiter)
|
|
5479
5645
|
|
|
5480
5646
|
# 8) final total size field (again per your original logic)
|
|
@@ -5482,10 +5648,11 @@ def AppendFileHeader(fp,
|
|
|
5482
5648
|
formheaersizestr = AppendNullByte(formheaersize, delimiter) # computed but not appended in original
|
|
5483
5649
|
# Note: you computed 'formheaersizestr' but didn’t append it afterward in the original either.
|
|
5484
5650
|
# Keeping that behavior for compatibility.
|
|
5485
|
-
|
|
5651
|
+
nullstrecd = formatspecs['format_delimiter'].encode('UTF-8')
|
|
5652
|
+
outfileout = fnumfilesa + fjsoncontent + nullstrecd
|
|
5486
5653
|
# 9) write and try to sync
|
|
5487
5654
|
try:
|
|
5488
|
-
fp.write(
|
|
5655
|
+
fp.write(outfileout)
|
|
5489
5656
|
except (OSError, io.UnsupportedOperation):
|
|
5490
5657
|
return False
|
|
5491
5658
|
|
|
@@ -5506,21 +5673,21 @@ def AppendFileHeader(fp,
|
|
|
5506
5673
|
return fp
|
|
5507
5674
|
|
|
5508
5675
|
|
|
5509
|
-
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
|
|
5676
|
+
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
5510
5677
|
if(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
5511
5678
|
formatspecs = formatspecs[fmttype]
|
|
5512
5679
|
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
5513
5680
|
fmttype = __file_format_default__
|
|
5514
5681
|
formatspecs = formatspecs[fmttype]
|
|
5515
|
-
AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
|
|
5682
|
+
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
5516
5683
|
return fp
|
|
5517
5684
|
|
|
5518
5685
|
|
|
5519
|
-
def MakeEmptyFoxFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
|
|
5686
|
+
def MakeEmptyFoxFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
5520
5687
|
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
5521
5688
|
|
|
5522
5689
|
|
|
5523
|
-
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_multi_dict__, returnfp=False):
|
|
5690
|
+
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, returnfp=False):
|
|
5524
5691
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
5525
5692
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
5526
5693
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -5561,7 +5728,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
5561
5728
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
5562
5729
|
except PermissionError:
|
|
5563
5730
|
return False
|
|
5564
|
-
AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
|
|
5731
|
+
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
5565
5732
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
5566
5733
|
fp = CompressOpenFileAlt(
|
|
5567
5734
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -5592,7 +5759,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
5592
5759
|
return True
|
|
5593
5760
|
|
|
5594
5761
|
|
|
5595
|
-
def MakeEmptyFoxFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_dict__, returnfp=False):
|
|
5762
|
+
def MakeEmptyFoxFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, returnfp=False):
|
|
5596
5763
|
return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
|
|
5597
5764
|
|
|
5598
5765
|
|
|
@@ -5679,7 +5846,7 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
5679
5846
|
pass
|
|
5680
5847
|
return fp
|
|
5681
5848
|
|
|
5682
|
-
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
5849
|
+
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
5683
5850
|
if(not hasattr(fp, "write")):
|
|
5684
5851
|
return False
|
|
5685
5852
|
advancedlist = formatspecs['use_advanced_list']
|
|
@@ -5728,7 +5895,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5728
5895
|
inodetoforminode = {}
|
|
5729
5896
|
numfiles = int(len(GetDirList))
|
|
5730
5897
|
fnumfiles = format(numfiles, 'x').lower()
|
|
5731
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
5898
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
5732
5899
|
try:
|
|
5733
5900
|
fp.flush()
|
|
5734
5901
|
if(hasattr(os, "sync")):
|
|
@@ -5985,7 +6152,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5985
6152
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
5986
6153
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
5987
6154
|
AppendFileHeaderWithContent(
|
|
5988
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6155
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
5989
6156
|
try:
|
|
5990
6157
|
fp.flush()
|
|
5991
6158
|
if(hasattr(os, "sync")):
|
|
@@ -5994,7 +6161,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
5994
6161
|
pass
|
|
5995
6162
|
return fp
|
|
5996
6163
|
|
|
5997
|
-
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6164
|
+
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
5998
6165
|
if(not hasattr(fp, "write")):
|
|
5999
6166
|
return False
|
|
6000
6167
|
if(verbose):
|
|
@@ -6063,7 +6230,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6063
6230
|
except FileNotFoundError:
|
|
6064
6231
|
return False
|
|
6065
6232
|
numfiles = int(len(tarfp.getmembers()))
|
|
6066
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6233
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6067
6234
|
try:
|
|
6068
6235
|
fp.flush()
|
|
6069
6236
|
if(hasattr(os, "sync")):
|
|
@@ -6207,7 +6374,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6207
6374
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6208
6375
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6209
6376
|
AppendFileHeaderWithContent(
|
|
6210
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6377
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6211
6378
|
try:
|
|
6212
6379
|
fp.flush()
|
|
6213
6380
|
if(hasattr(os, "sync")):
|
|
@@ -6217,7 +6384,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6217
6384
|
fcontents.close()
|
|
6218
6385
|
return fp
|
|
6219
6386
|
|
|
6220
|
-
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6387
|
+
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6221
6388
|
if(not hasattr(fp, "write")):
|
|
6222
6389
|
return False
|
|
6223
6390
|
if(verbose):
|
|
@@ -6256,7 +6423,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6256
6423
|
if(ziptest):
|
|
6257
6424
|
VerbosePrintOut("Bad file found!")
|
|
6258
6425
|
numfiles = int(len(zipfp.infolist()))
|
|
6259
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6426
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6260
6427
|
try:
|
|
6261
6428
|
fp.flush()
|
|
6262
6429
|
if(hasattr(os, "sync")):
|
|
@@ -6421,7 +6588,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6421
6588
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6422
6589
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6423
6590
|
AppendFileHeaderWithContent(
|
|
6424
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6591
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6425
6592
|
try:
|
|
6426
6593
|
fp.flush()
|
|
6427
6594
|
if(hasattr(os, "sync")):
|
|
@@ -6432,11 +6599,10 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6432
6599
|
return fp
|
|
6433
6600
|
|
|
6434
6601
|
if(not rarfile_support):
|
|
6435
|
-
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6602
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6436
6603
|
return False
|
|
6437
|
-
|
|
6438
|
-
|
|
6439
|
-
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6604
|
+
else:
|
|
6605
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6440
6606
|
if(not hasattr(fp, "write")):
|
|
6441
6607
|
return False
|
|
6442
6608
|
if(verbose):
|
|
@@ -6457,7 +6623,7 @@ if(rarfile_support):
|
|
|
6457
6623
|
if(rartest):
|
|
6458
6624
|
VerbosePrintOut("Bad file found!")
|
|
6459
6625
|
numfiles = int(len(rarfp.infolist()))
|
|
6460
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6626
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6461
6627
|
try:
|
|
6462
6628
|
fp.flush()
|
|
6463
6629
|
if(hasattr(os, "sync")):
|
|
@@ -6655,7 +6821,7 @@ if(rarfile_support):
|
|
|
6655
6821
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6656
6822
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6657
6823
|
AppendFileHeaderWithContent(
|
|
6658
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6824
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6659
6825
|
try:
|
|
6660
6826
|
fp.flush()
|
|
6661
6827
|
if(hasattr(os, "sync")):
|
|
@@ -6666,11 +6832,10 @@ if(rarfile_support):
|
|
|
6666
6832
|
return fp
|
|
6667
6833
|
|
|
6668
6834
|
if(not py7zr_support):
|
|
6669
|
-
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6835
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6670
6836
|
return False
|
|
6671
|
-
|
|
6672
|
-
|
|
6673
|
-
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6837
|
+
else:
|
|
6838
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6674
6839
|
if(not hasattr(fp, "write")):
|
|
6675
6840
|
return False
|
|
6676
6841
|
if(verbose):
|
|
@@ -6693,7 +6858,7 @@ if(py7zr_support):
|
|
|
6693
6858
|
if(sztestalt):
|
|
6694
6859
|
VerbosePrintOut("Bad file found!")
|
|
6695
6860
|
numfiles = int(len(szpfp.list()))
|
|
6696
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6861
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6697
6862
|
try:
|
|
6698
6863
|
fp.flush()
|
|
6699
6864
|
if(hasattr(os, "sync")):
|
|
@@ -6829,7 +6994,7 @@ if(py7zr_support):
|
|
|
6829
6994
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6830
6995
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6831
6996
|
AppendFileHeaderWithContent(
|
|
6832
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6997
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6833
6998
|
try:
|
|
6834
6999
|
fp.flush()
|
|
6835
7000
|
if(hasattr(os, "sync")):
|
|
@@ -6839,7 +7004,7 @@ if(py7zr_support):
|
|
|
6839
7004
|
fcontents.close()
|
|
6840
7005
|
return fp
|
|
6841
7006
|
|
|
6842
|
-
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7007
|
+
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6843
7008
|
if(not hasattr(fp, "write")):
|
|
6844
7009
|
return False
|
|
6845
7010
|
if(verbose):
|
|
@@ -6855,7 +7020,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
6855
7020
|
inodetoforminode = {}
|
|
6856
7021
|
numfiles = int(len(GetDirList))
|
|
6857
7022
|
fnumfiles = format(numfiles, 'x').lower()
|
|
6858
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
7023
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6859
7024
|
for curfname in GetDirList:
|
|
6860
7025
|
ftype = format(curfname[0], 'x').lower()
|
|
6861
7026
|
fencoding = curfname[1]
|
|
@@ -6897,7 +7062,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
6897
7062
|
fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile]
|
|
6898
7063
|
fcontents.seek(0, 0)
|
|
6899
7064
|
AppendFileHeaderWithContent(
|
|
6900
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
7065
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6901
7066
|
return fp
|
|
6902
7067
|
|
|
6903
7068
|
|
|
@@ -6906,7 +7071,7 @@ def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6906
7071
|
return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
|
|
6907
7072
|
|
|
6908
7073
|
|
|
6909
|
-
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7074
|
+
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
6910
7075
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
6911
7076
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
6912
7077
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -6980,7 +7145,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
6980
7145
|
fp.close()
|
|
6981
7146
|
return True
|
|
6982
7147
|
|
|
6983
|
-
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7148
|
+
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
6984
7149
|
if not isinstance(infiles, list):
|
|
6985
7150
|
infiles = [infiles]
|
|
6986
7151
|
returnout = False
|
|
@@ -6995,7 +7160,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
|
|
|
6995
7160
|
return True
|
|
6996
7161
|
return returnout
|
|
6997
7162
|
|
|
6998
|
-
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7163
|
+
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
6999
7164
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7000
7165
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7001
7166
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7067,7 +7232,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7067
7232
|
fp.close()
|
|
7068
7233
|
return True
|
|
7069
7234
|
|
|
7070
|
-
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7235
|
+
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7071
7236
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7072
7237
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7073
7238
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7140,7 +7305,7 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7140
7305
|
fp.close()
|
|
7141
7306
|
return True
|
|
7142
7307
|
|
|
7143
|
-
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7308
|
+
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7144
7309
|
if not isinstance(infiles, list):
|
|
7145
7310
|
infiles = [infiles]
|
|
7146
7311
|
returnout = False
|
|
@@ -7155,7 +7320,7 @@ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7155
7320
|
return True
|
|
7156
7321
|
return returnout
|
|
7157
7322
|
|
|
7158
|
-
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7323
|
+
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7159
7324
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7160
7325
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7161
7326
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7228,7 +7393,7 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7228
7393
|
fp.close()
|
|
7229
7394
|
return True
|
|
7230
7395
|
|
|
7231
|
-
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7396
|
+
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7232
7397
|
if not isinstance(infiles, list):
|
|
7233
7398
|
infiles = [infiles]
|
|
7234
7399
|
returnout = False
|
|
@@ -7244,11 +7409,10 @@ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7244
7409
|
return returnout
|
|
7245
7410
|
|
|
7246
7411
|
if(not rarfile_support):
|
|
7247
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7412
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7248
7413
|
return False
|
|
7249
|
-
|
|
7250
|
-
|
|
7251
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7414
|
+
else:
|
|
7415
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7252
7416
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7253
7417
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7254
7418
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7321,7 +7485,7 @@ if(rarfile_support):
|
|
|
7321
7485
|
fp.close()
|
|
7322
7486
|
return True
|
|
7323
7487
|
|
|
7324
|
-
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7488
|
+
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7325
7489
|
if not isinstance(infiles, list):
|
|
7326
7490
|
infiles = [infiles]
|
|
7327
7491
|
returnout = False
|
|
@@ -7337,11 +7501,10 @@ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7337
7501
|
return returnout
|
|
7338
7502
|
|
|
7339
7503
|
if(not py7zr_support):
|
|
7340
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7504
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7341
7505
|
return False
|
|
7342
|
-
|
|
7343
|
-
|
|
7344
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7506
|
+
else:
|
|
7507
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7345
7508
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7346
7509
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7347
7510
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7414,7 +7577,7 @@ if(py7zr_support):
|
|
|
7414
7577
|
fp.close()
|
|
7415
7578
|
return True
|
|
7416
7579
|
|
|
7417
|
-
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7580
|
+
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7418
7581
|
if not isinstance(infiles, list):
|
|
7419
7582
|
infiles = [infiles]
|
|
7420
7583
|
returnout = False
|
|
@@ -7429,7 +7592,7 @@ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmt
|
|
|
7429
7592
|
return True
|
|
7430
7593
|
return returnout
|
|
7431
7594
|
|
|
7432
|
-
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7595
|
+
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7433
7596
|
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
7434
7597
|
return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
7435
7598
|
|
|
@@ -9172,43 +9335,41 @@ def CheckSumSupport(checkfor, guaranteed=True):
|
|
|
9172
9335
|
return False
|
|
9173
9336
|
|
|
9174
9337
|
|
|
9175
|
-
def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9338
|
+
def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9176
9339
|
return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
9177
9340
|
|
|
9178
|
-
def PackStackedFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9341
|
+
def PackStackedFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9179
9342
|
return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
9180
9343
|
|
|
9181
|
-
def PackFoxFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9344
|
+
def PackFoxFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9182
9345
|
return PackFoxFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
|
|
9183
9346
|
|
|
9184
9347
|
|
|
9185
|
-
def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9348
|
+
def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9186
9349
|
return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9187
9350
|
|
|
9188
9351
|
|
|
9189
|
-
def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9352
|
+
def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9190
9353
|
return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9191
9354
|
|
|
9192
9355
|
|
|
9193
9356
|
if(not rarfile_support):
|
|
9194
|
-
def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9357
|
+
def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9195
9358
|
return False
|
|
9196
|
-
|
|
9197
|
-
|
|
9198
|
-
def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9359
|
+
else:
|
|
9360
|
+
def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9199
9361
|
return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9200
9362
|
|
|
9201
9363
|
|
|
9202
9364
|
if(not py7zr_support):
|
|
9203
|
-
def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9365
|
+
def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9204
9366
|
return False
|
|
9205
|
-
|
|
9206
|
-
|
|
9207
|
-
def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9367
|
+
else:
|
|
9368
|
+
def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9208
9369
|
return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9209
9370
|
|
|
9210
9371
|
|
|
9211
|
-
def PackFoxFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9372
|
+
def PackFoxFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9212
9373
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, 0, True)
|
|
9213
9374
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9214
9375
|
formatspecs = formatspecs[checkcompressfile]
|
|
@@ -9430,19 +9591,56 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9430
9591
|
else:
|
|
9431
9592
|
inheader = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
|
|
9432
9593
|
|
|
9433
|
-
fnumextrafieldsize = int(inheader[
|
|
9434
|
-
fnumextrafields = int(inheader[
|
|
9435
|
-
extrastart =
|
|
9594
|
+
fnumextrafieldsize = int(inheader[13], 16)
|
|
9595
|
+
fnumextrafields = int(inheader[14], 16)
|
|
9596
|
+
extrastart = 15
|
|
9436
9597
|
extraend = extrastart + fnumextrafields
|
|
9437
9598
|
formversion = re.findall("([\\d]+)", formstring)
|
|
9438
9599
|
fheadsize = int(inheader[0], 16)
|
|
9439
9600
|
fnumfields = int(inheader[1], 16)
|
|
9440
|
-
|
|
9441
|
-
fostype = inheader[3]
|
|
9442
|
-
fpythontype = inheader[4]
|
|
9443
|
-
fnumfiles = int(inheader[5], 16)
|
|
9601
|
+
fnumfiles = int(inheader[6], 16)
|
|
9444
9602
|
fprechecksumtype = inheader[-2]
|
|
9445
9603
|
fprechecksum = inheader[-1]
|
|
9604
|
+
outfseeknextfile = inheader[7]
|
|
9605
|
+
fjsonsize = int(inheader[10], 16)
|
|
9606
|
+
fjsonchecksumtype = inheader[11]
|
|
9607
|
+
fjsonchecksum = inheader[12]
|
|
9608
|
+
fprejsoncontent = fp.read(fjsonsize)
|
|
9609
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
9610
|
+
if(fjsonsize > 0):
|
|
9611
|
+
if(hmac.compare_digest(jsonfcs, fjsonchecksum)):
|
|
9612
|
+
if(verbose):
|
|
9613
|
+
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
|
|
9614
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
9615
|
+
else:
|
|
9616
|
+
valid_archive = False
|
|
9617
|
+
invalid_archive = True
|
|
9618
|
+
if(verbose):
|
|
9619
|
+
VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
|
|
9620
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
9621
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
9622
|
+
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
9623
|
+
fname + " at offset " + str(fheaderstart))
|
|
9624
|
+
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
9625
|
+
return False
|
|
9626
|
+
# Next seek directive
|
|
9627
|
+
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
9628
|
+
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
9629
|
+
if(abs(fseeknextasnum) == 0):
|
|
9630
|
+
pass
|
|
9631
|
+
fp.seek(fseeknextasnum, 1)
|
|
9632
|
+
elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
|
|
9633
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
9634
|
+
if(abs(fseeknextasnum) == 0):
|
|
9635
|
+
pass
|
|
9636
|
+
fp.seek(fseeknextasnum, 1)
|
|
9637
|
+
elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
|
|
9638
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
9639
|
+
if(abs(fseeknextasnum) == 0):
|
|
9640
|
+
pass
|
|
9641
|
+
fp.seek(fseeknextasnum, 0)
|
|
9642
|
+
else:
|
|
9643
|
+
return False
|
|
9446
9644
|
|
|
9447
9645
|
il = 0
|
|
9448
9646
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|