PyFoxFile 0.27.4__py3-none-any.whl → 0.28.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyfoxfile.py CHANGED
@@ -10,11 +10,11 @@
10
10
  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
  Revised BSD License for more details.
12
12
 
13
- Copyright 2018-2024 Cool Dude 2k - http://idb.berlios.de/
14
- Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
15
- Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
13
+ Copyright 2018-2026 Cool Dude 2k - http://idb.berlios.de/
14
+ Copyright 2018-2026 Game Maker 2k - http://intdb.sourceforge.net/
15
+ Copyright 2018-2026 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
16
16
 
17
- $FileInfo: pyfoxfile.py - Last Update: 11/16/2025 Ver. 0.27.4 RC 1 - Author: cooldude2k $
17
+ $FileInfo: pyfoxfile.py - Last Update: 2/3/2026 Ver. 0.28.0 RC 1 - Author: cooldude2k $
18
18
  '''
19
19
 
20
20
  from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
@@ -662,12 +662,12 @@ __project__ = __program_name__
662
662
  __program_alt_name__ = __program_name__
663
663
  __project_url__ = "https://github.com/GameMaker2k/PyFoxFile"
664
664
  __project_release_url__ = __project_url__+"/releases/latest"
665
- __version_info__ = (0, 27, 4, "RC 1", 1)
666
- __version_date_info__ = (2025, 11, 16, "RC 1", 1)
665
+ __version_info__ = (0, 28, 0, "RC 1", 1)
666
+ __version_date_info__ = (2026, 2, 3, "RC 1", 1)
667
667
  __version_date__ = str(__version_date_info__[0]) + "." + str(
668
668
  __version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
669
669
  __revision__ = __version_info__[3]
670
- __revision_id__ = "$Id: 91a0d1eaee5a60c54db95564f2b9f9d799a1ccf5 $"
670
+ __revision_id__ = "$Id: 5ff415165b31708720a44482ad20a845538eacf4 $"
671
671
  if(__version_info__[4] is not None):
672
672
  __version_date_plusrc__ = __version_date__ + \
673
673
  "-" + str(__version_date_info__[4])
@@ -860,15 +860,6 @@ try:
860
860
  compressionsupport.append("lz4")
861
861
  except ImportError:
862
862
  pass
863
- '''
864
- try:
865
- import lzo
866
- compressionsupport.append("lzo")
867
- compressionsupport.append("lzop")
868
- except ImportError:
869
- lzo = None
870
- pass
871
- '''
872
863
  try:
873
864
  try:
874
865
  import compression.zstd as zstd
@@ -932,13 +923,6 @@ if('lzo' in compressionsupport):
932
923
  compressionlistalt.append('lzo')
933
924
  outextlist.append('lzo')
934
925
  outextlistwd.append('.lzo')
935
- '''
936
- if('lzop' in compressionsupport):
937
- compressionlist.append('lzop')
938
- compressionlistalt.append('lzop')
939
- outextlist.append('lzop')
940
- outextlistwd.append('.lzop')
941
- '''
942
926
  if('lzma' in compressionsupport):
943
927
  compressionlist.append('lzma')
944
928
  compressionlistalt.append('lzma')
@@ -6191,6 +6175,27 @@ def AppendNullBytes(indata=None, delimiter=__file_format_dict__['format_delimite
6191
6175
  def _hex_lower(n):
6192
6176
  return format(int(n), 'x').lower()
6193
6177
 
6178
+ def system_and_major():
6179
+ info = platform.uname()
6180
+
6181
+ # Python 3: info is a namedtuple with .system / .release
6182
+ # Python 2: info is a plain tuple (system, node, release, version, machine, processor)
6183
+ try:
6184
+ system = info.system
6185
+ release = info.release
6186
+ except AttributeError:
6187
+ # Fallback for Python 2
6188
+ system = info[0]
6189
+ release = info[2]
6190
+
6191
+ # Find the first run of digits in the release string
6192
+ m = re.search(r'\d+', release)
6193
+ if m:
6194
+ major = m.group(0) # e.g. '11' or '6'
6195
+ return u"%s%s" % (system, major) # unicode-safe in Py2
6196
+ else:
6197
+ return system
6198
+
6194
6199
  def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
6195
6200
  """
6196
6201
  Build and write the archive file header.
@@ -6269,7 +6274,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
6269
6274
  else:
6270
6275
  fctime = format(int(to_ns(time.time())), 'x').lower()
6271
6276
  # Serialize the first group
6272
- fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__+str(__version_info__[0]), fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
6277
+ fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, system_and_major(), py_implementation, __program_name__+str(__version_info__[0]), fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
6273
6278
  # Append tmpoutlist
6274
6279
  fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
6275
6280
  # Append extradata items if any
@@ -6857,7 +6862,9 @@ def AppendFilesWithContentFromTarFileToList(infile, extradata=[], jsondata={}, c
6857
6862
  inodetofile = {}
6858
6863
  filetoinode = {}
6859
6864
  inodetoforminode = {}
6860
- if(not isinstance(infile, (list, tuple, )) and infile == "-"):
6865
+ if(isinstance(infile, (list, tuple, ))):
6866
+ infile = infile[0]
6867
+ if(infile == "-"):
6861
6868
  infile = MkTempFile()
6862
6869
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
6863
6870
  infile.seek(0, 0)
@@ -7083,7 +7090,9 @@ def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, c
7083
7090
  inodetofile = {}
7084
7091
  filetoinode = {}
7085
7092
  inodetoforminode = {}
7086
- if(not isinstance(infile, (list, tuple, )) and infile == "-"):
7093
+ if(isinstance(infile, (list, tuple, ))):
7094
+ infile = infile[0]
7095
+ if(infile == "-"):
7087
7096
  infile = MkTempFile()
7088
7097
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
7089
7098
  infile.seek(0, 0)
@@ -7246,6 +7255,9 @@ def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, c
7246
7255
  fcontents.write(zipfp.read(member.filename))
7247
7256
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
7248
7257
  fcontents.seek(0, 0)
7258
+ if(typechecktest is not False):
7259
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
7260
+ fcontents.seek(0, 0)
7249
7261
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
7250
7262
  if(typechecktest is False and not compresswholefile):
7251
7263
  fcontents.seek(0, 2)
@@ -7329,6 +7341,8 @@ else:
7329
7341
  inodetofile = {}
7330
7342
  filetoinode = {}
7331
7343
  inodetoforminode = {}
7344
+ if(isinstance(infile, (list, tuple, ))):
7345
+ infile = infile[0]
7332
7346
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
7333
7347
  return False
7334
7348
  if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
@@ -7479,6 +7493,9 @@ else:
7479
7493
  fcontents.write(rarfp.read(member.filename))
7480
7494
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
7481
7495
  fcontents.seek(0, 0)
7496
+ if(typechecktest is not False):
7497
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
7498
+ fcontents.seek(0, 0)
7482
7499
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
7483
7500
  if(typechecktest is False and not compresswholefile):
7484
7501
  fcontents.seek(0, 2)
@@ -7607,6 +7624,8 @@ else:
7607
7624
  inodetofile = {}
7608
7625
  filetoinode = {}
7609
7626
  inodetoforminode = {}
7627
+ if(isinstance(infile, (list, tuple, ))):
7628
+ infile = infile[0]
7610
7629
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
7611
7630
  return False
7612
7631
  szpfp = py7zr.SevenZipFile(infile, mode="r")
@@ -7713,6 +7732,9 @@ else:
7713
7732
  fcontents.seek(0, 0)
7714
7733
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
7715
7734
  fcontents.seek(0, 0)
7735
+ if(typechecktest is not False):
7736
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
7737
+ fcontents.seek(0, 0)
7716
7738
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
7717
7739
  try:
7718
7740
  file_content[member.filename].close()
@@ -8014,6 +8036,126 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
8014
8036
  fp.close()
8015
8037
  return True
8016
8038
 
8039
+ def AppendReadInFileWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
8040
+ return ReadInFileWithContentToList(infile, "auto", 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
8041
+
8042
+ def AppendReadInMultipleFileWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
8043
+ return ReadInMultipleFileWithContentToList(infile, fmttype, 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
8044
+
8045
+ def AppendReadInMultipleFilesWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
8046
+ return ReadInMultipleFilesWithContentToList(infile, fmttype, 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
8047
+
8048
+ def AppendReadInFileWithContent(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False):
8049
+ if(not hasattr(fp, "write")):
8050
+ return False
8051
+ GetDirList = AppendReadInFileWithContentToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, insaltkey, verbose)
8052
+ numfiles = int(len(GetDirList))
8053
+ fnumfiles = format(numfiles, 'x').lower()
8054
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, outsaltkey)
8055
+ try:
8056
+ fp.flush()
8057
+ if(hasattr(os, "sync")):
8058
+ os.fsync(fp.fileno())
8059
+ except (io.UnsupportedOperation, AttributeError, OSError):
8060
+ pass
8061
+ for curfname in GetDirList:
8062
+ tmpoutlist = curfname['fheaders']
8063
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, outsaltkey)
8064
+ try:
8065
+ fp.flush()
8066
+ if(hasattr(os, "sync")):
8067
+ os.fsync(fp.fileno())
8068
+ except (io.UnsupportedOperation, AttributeError, OSError):
8069
+ pass
8070
+ return fp
8071
+
8072
+ def AppendReadInFileWithContentToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False, returnfp=False):
8073
+ if(IsNestedDict(formatspecs) and fmttype=="auto" and
8074
+ (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
8075
+ get_in_ext = os.path.splitext(outfile)
8076
+ tmpfmt = GetKeyByFormatExtension(get_in_ext[1], formatspecs=__file_format_multi_dict__)
8077
+ if(tmpfmt is None and get_in_ext[1]!=""):
8078
+ get_in_ext = os.path.splitext(get_in_ext[0])
8079
+ tmpfmt = GetKeyByFormatExtension(get_in_ext[0], formatspecs=__file_format_multi_dict__)
8080
+ if(tmpfmt is None):
8081
+ fmttype = __file_format_default__
8082
+ formatspecs = formatspecs[fmttype]
8083
+ else:
8084
+ fmttype = tmpfmt
8085
+ formatspecs = formatspecs[tmpfmt]
8086
+ elif(IsNestedDict(formatspecs) and fmttype in formatspecs):
8087
+ formatspecs = formatspecs[fmttype]
8088
+ elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
8089
+ fmttype = __file_format_default__
8090
+ formatspecs = formatspecs[fmttype]
8091
+ if(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write")):
8092
+ outfile = RemoveWindowsPath(outfile)
8093
+ if(os.path.exists(outfile)):
8094
+ try:
8095
+ os.unlink(outfile)
8096
+ except OSError:
8097
+ pass
8098
+ if(outfile == "-" or outfile is None):
8099
+ verbose = False
8100
+ fp = MkTempFile()
8101
+ elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
8102
+ fp = outfile
8103
+ elif(re.findall(__upload_proto_support__, outfile)):
8104
+ fp = MkTempFile()
8105
+ else:
8106
+ fbasename = os.path.splitext(outfile)[0]
8107
+ fextname = os.path.splitext(outfile)[1]
8108
+ if(not compresswholefile and fextname in outextlistwd):
8109
+ compresswholefile = True
8110
+ try:
8111
+ fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
8112
+ except PermissionError:
8113
+ return False
8114
+ AppendReadInFileWithContent(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, insaltkey, outsaltkey, verbose)
8115
+ if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
8116
+ fp = CompressOpenFileAlt(
8117
+ fp, compression, compressionlevel, compressionuselist, formatspecs)
8118
+ try:
8119
+ fp.flush()
8120
+ if(hasattr(os, "sync")):
8121
+ os.fsync(fp.fileno())
8122
+ except (io.UnsupportedOperation, AttributeError, OSError):
8123
+ pass
8124
+ if(outfile == "-"):
8125
+ fp.seek(0, 0)
8126
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
8127
+ elif(outfile is None):
8128
+ fp.seek(0, 0)
8129
+ outvar = fp.read()
8130
+ fp.close()
8131
+ return outvar
8132
+ elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
8133
+ fp = CompressOpenFileAlt(
8134
+ fp, compression, compressionlevel, compressionuselist, formatspecs)
8135
+ fp.seek(0, 0)
8136
+ upload_file_to_internet_file(fp, outfile)
8137
+ if(returnfp):
8138
+ fp.seek(0, 0)
8139
+ return fp
8140
+ else:
8141
+ fp.close()
8142
+ return True
8143
+
8144
+ def AppendReadInFileWithContentToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False, returnfp=False):
8145
+ if not isinstance(infiles, list):
8146
+ infiles = [infiles]
8147
+ returnout = False
8148
+ for infileslist in infiles:
8149
+ returnout = AppendReadInFileWithContentToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, insaltkey, outsaltkey, verbose, True)
8150
+ if(not returnout):
8151
+ break
8152
+ else:
8153
+ outfile = returnout
8154
+ if(not returnfp and returnout):
8155
+ returnout.close()
8156
+ return True
8157
+ return returnout
8158
+
8017
8159
  def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
8018
8160
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
8019
8161
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
@@ -9005,8 +9147,6 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, file
9005
9147
  compresscheck = "zstd"
9006
9148
  elif(fextname == ".lz4"):
9007
9149
  compresscheck = "lz4"
9008
- elif(fextname == ".lzo" or fextname == ".lzop"):
9009
- compresscheck = "lzo"
9010
9150
  elif(fextname == ".lzma"):
9011
9151
  compresscheck = "lzma"
9012
9152
  elif(fextname == ".xz"):
@@ -9298,8 +9438,6 @@ def UncompressString(infile, formatspecs=__file_format_multi_dict__, filestart=0
9298
9438
  fileuz = decompressor.decompress(infile)
9299
9439
  elif(compresscheck == "lz4" and compresscheck in compressionsupport):
9300
9440
  fileuz = lz4.frame.decompress(infile)
9301
- elif((compresscheck == "lzo" or compresscheck == "lzop") and compresscheck in compressionsupport):
9302
- fileuz = lzo.decompress(infile)
9303
9441
  elif((compresscheck == "lzma" or compresscheck == "xz") and compresscheck in compressionsupport):
9304
9442
  fileuz = lzma.decompress(infile)
9305
9443
  elif(compresscheck == "zlib" and compresscheck in compressionsupport):
@@ -9350,8 +9488,6 @@ def UncompressBytes(infile, formatspecs=__file_format_multi_dict__, filestart=0)
9350
9488
  fileuz = decompressor.decompress(infile)
9351
9489
  elif(compresscheck == "lz4" and compresscheck in compressionsupport):
9352
9490
  fileuz = lz4.frame.decompress(infile)
9353
- elif((compresscheck == "lzo" or compresscheck == "lzop") and compresscheck in compressionsupport):
9354
- fileuz = lzo.decompress(infile)
9355
9491
  elif((compresscheck == "lzma" or compresscheck == "xz") and compresscheck in compressionsupport):
9356
9492
  fileuz = lzma.decompress(infile)
9357
9493
  elif(compresscheck == "zlib" and compresscheck in compressionsupport):
@@ -9923,9 +10059,6 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None,
9923
10059
  elif compression == "lz4" and "lz4" in compressionsupport:
9924
10060
  bytesfp = MkTempFile()
9925
10061
  bytesfp.write(lz4.frame.compress(fp.read(), compression_level=_lvl(compressionlevel)))
9926
- elif (compression in ("lzo", "lzop")) and "lzop" in compressionsupport:
9927
- bytesfp = MkTempFile()
9928
- bytesfp.write(lzo.compress(fp.read(), _lvl(compressionlevel)))
9929
10062
  elif compression == "zstd" and "zstandard" in compressionsupport:
9930
10063
  bytesfp = MkTempFile()
9931
10064
  level = _lvl(compressionlevel)
@@ -10242,7 +10375,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_form
10242
10375
  if(not fp):
10243
10376
  return False
10244
10377
  fp.seek(filestart, 0)
10245
- elif(not isinstance(infile, (list, tuple, )) and infile == "-"):
10378
+ elif(infile == "-"):
10246
10379
  fp = MkTempFile()
10247
10380
  shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
10248
10381
  fp.seek(filestart, 0)
@@ -10300,10 +10433,8 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_form
10300
10433
  compresscheck = "bzip2"
10301
10434
  elif(fextname == ".zst"):
10302
10435
  compresscheck = "zstd"
10303
- elif(fextname == ".lz4" or fextname == ".clz4"):
10436
+ elif(fextname == ".lz4"):
10304
10437
  compresscheck = "lz4"
10305
- elif(fextname == ".lzo" or fextname == ".lzop"):
10306
- compresscheck = "lzo"
10307
10438
  elif(fextname == ".lzma"):
10308
10439
  compresscheck = "lzma"
10309
10440
  elif(fextname == ".xz"):
@@ -11026,6 +11157,10 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
11026
11157
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
11027
11158
  fcontents.seek(0, 0)
11028
11159
 
11160
+ if(typechecktest is not False):
11161
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
11162
+ fcontents.seek(0, 0)
11163
+
11029
11164
  # get fcencoding once here
11030
11165
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
11031
11166
 
@@ -11619,7 +11754,9 @@ def FoxFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipcheck
11619
11754
 
11620
11755
 
11621
11756
  def TarFileListFiles(infile, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
11622
- if(not isinstance(infile, (list, tuple, )) and infile == "-"):
11757
+ if(isinstance(infile, (list, tuple, ))):
11758
+ infile = infile[0]
11759
+ if(infile == "-"):
11623
11760
  infile = MkTempFile()
11624
11761
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
11625
11762
  infile.seek(0, 0)
@@ -11739,7 +11876,9 @@ def TarFileListFile(infile, formatspecs=__file_format_multi_dict__, verbose=Fals
11739
11876
 
11740
11877
 
11741
11878
  def ZipFileListFiles(infile, verbose=False, returnfp=False):
11742
- if(not isinstance(infile, (list, tuple, )) and infile == "-"):
11879
+ if(isinstance(infile, (list, tuple, ))):
11880
+ infile = infile[0]
11881
+ if(infile == "-"):
11743
11882
  infile = MkTempFile()
11744
11883
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
11745
11884
  infile.seek(0, 0)
@@ -11896,9 +12035,10 @@ def ZipFileListFile(infile, verbose=False, returnfp=False):
11896
12035
  if(not rarfile_support):
11897
12036
  def RarFileListFiles(infile, verbose=False, returnfp=False):
11898
12037
  return False
11899
-
11900
- if(rarfile_support):
12038
+ else:
11901
12039
  def RarFileListFiles(infile, verbose=False, returnfp=False):
12040
+ if(isinstance(infile, (list, tuple, ))):
12041
+ infile = infile[0]
11902
12042
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
11903
12043
  return False
11904
12044
  if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
@@ -12028,9 +12168,10 @@ def RarFileListFile(infile, verbose=False, returnfp=False):
12028
12168
  if(not py7zr_support):
12029
12169
  def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
12030
12170
  return False
12031
-
12032
- if(py7zr_support):
12171
+ else:
12033
12172
  def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
12173
+ if(isinstance(infile, (list, tuple, ))):
12174
+ infile = infile[0]
12034
12175
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
12035
12176
  return False
12036
12177
  lcfi = 0
@@ -12391,8 +12532,14 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
12391
12532
  if headers is None:
12392
12533
  headers = {}
12393
12534
  urlparts = urlparse(url)
12394
- username = unquote(urlparts.username)
12395
- password = unquote(urlparts.password)
12535
+ if(urlparts.username is not None):
12536
+ username = unquote(urlparts.username)
12537
+ else:
12538
+ username = None
12539
+ if(urlparts.password is not None):
12540
+ password = unquote(urlparts.password)
12541
+ else:
12542
+ password = None
12396
12543
 
12397
12544
  # Rebuild URL without username and password
12398
12545
  netloc = urlparts.hostname or ''