PyNeoFile 0.27.4__py3-none-any.whl → 0.28.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyneofile.py CHANGED
@@ -10,11 +10,11 @@
10
10
  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
11
  Revised BSD License for more details.
12
12
 
13
- Copyright 2018-2024 Cool Dude 2k - http://idb.berlios.de/
14
- Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
15
- Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
13
+ Copyright 2018-2026 Cool Dude 2k - http://idb.berlios.de/
14
+ Copyright 2018-2026 Game Maker 2k - http://intdb.sourceforge.net/
15
+ Copyright 2018-2026 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
16
16
 
17
- $FileInfo: pyneofile.py - Last Update: 11/16/2025 Ver. 0.27.4 RC 1 - Author: cooldude2k $
17
+ $FileInfo: pyneofile.py - Last Update: 2/3/2026 Ver. 0.28.0 RC 1 - Author: cooldude2k $
18
18
  '''
19
19
 
20
20
  from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
@@ -655,12 +655,12 @@ __project__ = __program_name__
655
655
  __program_alt_name__ = __program_name__
656
656
  __project_url__ = "https://github.com/GameMaker2k/PyNeoFile"
657
657
  __project_release_url__ = __project_url__+"/releases/latest"
658
- __version_info__ = (0, 27, 4, "RC 1", 1)
659
- __version_date_info__ = (2025, 11, 16, "RC 1", 1)
658
+ __version_info__ = (0, 28, 0, "RC 1", 1)
659
+ __version_date_info__ = (2026, 2, 3, "RC 1", 1)
660
660
  __version_date__ = str(__version_date_info__[0]) + "." + str(
661
661
  __version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
662
662
  __revision__ = __version_info__[3]
663
- __revision_id__ = "$Id: 388df107a9d329c4c1077177ad6b2f559d64e799 $"
663
+ __revision_id__ = "$Id: c00061c18257e7fd91e274ad71aa6cb40010ef33 $"
664
664
  if(__version_info__[4] is not None):
665
665
  __version_date_plusrc__ = __version_date__ + \
666
666
  "-" + str(__version_date_info__[4])
@@ -853,15 +853,6 @@ try:
853
853
  compressionsupport.append("lz4")
854
854
  except ImportError:
855
855
  pass
856
- '''
857
- try:
858
- import lzo
859
- compressionsupport.append("lzo")
860
- compressionsupport.append("lzop")
861
- except ImportError:
862
- lzo = None
863
- pass
864
- '''
865
856
  try:
866
857
  try:
867
858
  import compression.zstd as zstd
@@ -925,13 +916,6 @@ if('lzo' in compressionsupport):
925
916
  compressionlistalt.append('lzo')
926
917
  outextlist.append('lzo')
927
918
  outextlistwd.append('.lzo')
928
- '''
929
- if('lzop' in compressionsupport):
930
- compressionlist.append('lzop')
931
- compressionlistalt.append('lzop')
932
- outextlist.append('lzop')
933
- outextlistwd.append('.lzop')
934
- '''
935
919
  if('lzma' in compressionsupport):
936
920
  compressionlist.append('lzma')
937
921
  compressionlistalt.append('lzma')
@@ -6184,6 +6168,27 @@ def AppendNullBytes(indata=None, delimiter=__file_format_dict__['format_delimite
6184
6168
  def _hex_lower(n):
6185
6169
  return format(int(n), 'x').lower()
6186
6170
 
6171
+ def system_and_major():
6172
+ info = platform.uname()
6173
+
6174
+ # Python 3: info is a namedtuple with .system / .release
6175
+ # Python 2: info is a plain tuple (system, node, release, version, machine, processor)
6176
+ try:
6177
+ system = info.system
6178
+ release = info.release
6179
+ except AttributeError:
6180
+ # Fallback for Python 2
6181
+ system = info[0]
6182
+ release = info[2]
6183
+
6184
+ # Find the first run of digits in the release string
6185
+ m = re.search(r'\d+', release)
6186
+ if m:
6187
+ major = m.group(0) # e.g. '11' or '6'
6188
+ return u"%s%s" % (system, major) # unicode-safe in Py2
6189
+ else:
6190
+ return system
6191
+
6187
6192
  def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, saltkey=None):
6188
6193
  """
6189
6194
  Build and write the archive file header.
@@ -6262,7 +6267,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
6262
6267
  else:
6263
6268
  fctime = format(int(to_ns(time.time())), 'x').lower()
6264
6269
  # Serialize the first group
6265
- fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__+str(__version_info__[0]), fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
6270
+ fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, system_and_major(), py_implementation, __program_name__+str(__version_info__[0]), fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
6266
6271
  # Append tmpoutlist
6267
6272
  fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
6268
6273
  # Append extradata items if any
@@ -6850,7 +6855,9 @@ def AppendFilesWithContentFromTarFileToList(infile, extradata=[], jsondata={}, c
6850
6855
  inodetofile = {}
6851
6856
  filetoinode = {}
6852
6857
  inodetoforminode = {}
6853
- if(not isinstance(infile, (list, tuple, )) and infile == "-"):
6858
+ if(isinstance(infile, (list, tuple, ))):
6859
+ infile = infile[0]
6860
+ if(infile == "-"):
6854
6861
  infile = MkTempFile()
6855
6862
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
6856
6863
  infile.seek(0, 0)
@@ -7076,7 +7083,9 @@ def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, c
7076
7083
  inodetofile = {}
7077
7084
  filetoinode = {}
7078
7085
  inodetoforminode = {}
7079
- if(not isinstance(infile, (list, tuple, )) and infile == "-"):
7086
+ if(isinstance(infile, (list, tuple, ))):
7087
+ infile = infile[0]
7088
+ if(infile == "-"):
7080
7089
  infile = MkTempFile()
7081
7090
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
7082
7091
  infile.seek(0, 0)
@@ -7239,6 +7248,9 @@ def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, c
7239
7248
  fcontents.write(zipfp.read(member.filename))
7240
7249
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
7241
7250
  fcontents.seek(0, 0)
7251
+ if(typechecktest is not False):
7252
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
7253
+ fcontents.seek(0, 0)
7242
7254
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
7243
7255
  if(typechecktest is False and not compresswholefile):
7244
7256
  fcontents.seek(0, 2)
@@ -7322,6 +7334,8 @@ else:
7322
7334
  inodetofile = {}
7323
7335
  filetoinode = {}
7324
7336
  inodetoforminode = {}
7337
+ if(isinstance(infile, (list, tuple, ))):
7338
+ infile = infile[0]
7325
7339
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
7326
7340
  return False
7327
7341
  if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
@@ -7472,6 +7486,9 @@ else:
7472
7486
  fcontents.write(rarfp.read(member.filename))
7473
7487
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
7474
7488
  fcontents.seek(0, 0)
7489
+ if(typechecktest is not False):
7490
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
7491
+ fcontents.seek(0, 0)
7475
7492
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
7476
7493
  if(typechecktest is False and not compresswholefile):
7477
7494
  fcontents.seek(0, 2)
@@ -7600,6 +7617,8 @@ else:
7600
7617
  inodetofile = {}
7601
7618
  filetoinode = {}
7602
7619
  inodetoforminode = {}
7620
+ if(isinstance(infile, (list, tuple, ))):
7621
+ infile = infile[0]
7603
7622
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
7604
7623
  return False
7605
7624
  szpfp = py7zr.SevenZipFile(infile, mode="r")
@@ -7706,6 +7725,9 @@ else:
7706
7725
  fcontents.seek(0, 0)
7707
7726
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
7708
7727
  fcontents.seek(0, 0)
7728
+ if(typechecktest is not False):
7729
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
7730
+ fcontents.seek(0, 0)
7709
7731
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
7710
7732
  try:
7711
7733
  file_content[member.filename].close()
@@ -8007,6 +8029,126 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
8007
8029
  fp.close()
8008
8030
  return True
8009
8031
 
8032
+ def AppendReadInFileWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
8033
+ return ReadInFileWithContentToList(infile, "auto", 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
8034
+
8035
+ def AppendReadInMultipleFileWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
8036
+ return ReadInMultipleFileWithContentToList(infile, fmttype, 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
8037
+
8038
+ def AppendReadInMultipleFilesWithContentToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False):
8039
+ return ReadInMultipleFilesWithContentToList(infile, fmttype, 0, 0, 0, False, contentasfile, uncompress, skipchecksum, formatspecs, saltkey, seektoend)
8040
+
8041
+ def AppendReadInFileWithContent(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False):
8042
+ if(not hasattr(fp, "write")):
8043
+ return False
8044
+ GetDirList = AppendReadInFileWithContentToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, insaltkey, verbose)
8045
+ numfiles = int(len(GetDirList))
8046
+ fnumfiles = format(numfiles, 'x').lower()
8047
+ AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, outsaltkey)
8048
+ try:
8049
+ fp.flush()
8050
+ if(hasattr(os, "sync")):
8051
+ os.fsync(fp.fileno())
8052
+ except (io.UnsupportedOperation, AttributeError, OSError):
8053
+ pass
8054
+ for curfname in GetDirList:
8055
+ tmpoutlist = curfname['fheaders']
8056
+ AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, outsaltkey)
8057
+ try:
8058
+ fp.flush()
8059
+ if(hasattr(os, "sync")):
8060
+ os.fsync(fp.fileno())
8061
+ except (io.UnsupportedOperation, AttributeError, OSError):
8062
+ pass
8063
+ return fp
8064
+
8065
+ def AppendReadInFileWithContentToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False, returnfp=False):
8066
+ if(IsNestedDict(formatspecs) and fmttype=="auto" and
8067
+ (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
8068
+ get_in_ext = os.path.splitext(outfile)
8069
+ tmpfmt = GetKeyByFormatExtension(get_in_ext[1], formatspecs=__file_format_multi_dict__)
8070
+ if(tmpfmt is None and get_in_ext[1]!=""):
8071
+ get_in_ext = os.path.splitext(get_in_ext[0])
8072
+ tmpfmt = GetKeyByFormatExtension(get_in_ext[0], formatspecs=__file_format_multi_dict__)
8073
+ if(tmpfmt is None):
8074
+ fmttype = __file_format_default__
8075
+ formatspecs = formatspecs[fmttype]
8076
+ else:
8077
+ fmttype = tmpfmt
8078
+ formatspecs = formatspecs[tmpfmt]
8079
+ elif(IsNestedDict(formatspecs) and fmttype in formatspecs):
8080
+ formatspecs = formatspecs[fmttype]
8081
+ elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
8082
+ fmttype = __file_format_default__
8083
+ formatspecs = formatspecs[fmttype]
8084
+ if(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write")):
8085
+ outfile = RemoveWindowsPath(outfile)
8086
+ if(os.path.exists(outfile)):
8087
+ try:
8088
+ os.unlink(outfile)
8089
+ except OSError:
8090
+ pass
8091
+ if(outfile == "-" or outfile is None):
8092
+ verbose = False
8093
+ fp = MkTempFile()
8094
+ elif(hasattr(outfile, "read") or hasattr(outfile, "write")):
8095
+ fp = outfile
8096
+ elif(re.findall(__upload_proto_support__, outfile)):
8097
+ fp = MkTempFile()
8098
+ else:
8099
+ fbasename = os.path.splitext(outfile)[0]
8100
+ fextname = os.path.splitext(outfile)[1]
8101
+ if(not compresswholefile and fextname in outextlistwd):
8102
+ compresswholefile = True
8103
+ try:
8104
+ fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
8105
+ except PermissionError:
8106
+ return False
8107
+ AppendReadInFileWithContent(infiles, fp, extradata, jsondata, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, insaltkey, outsaltkey, verbose)
8108
+ if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
8109
+ fp = CompressOpenFileAlt(
8110
+ fp, compression, compressionlevel, compressionuselist, formatspecs)
8111
+ try:
8112
+ fp.flush()
8113
+ if(hasattr(os, "sync")):
8114
+ os.fsync(fp.fileno())
8115
+ except (io.UnsupportedOperation, AttributeError, OSError):
8116
+ pass
8117
+ if(outfile == "-"):
8118
+ fp.seek(0, 0)
8119
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
8120
+ elif(outfile is None):
8121
+ fp.seek(0, 0)
8122
+ outvar = fp.read()
8123
+ fp.close()
8124
+ return outvar
8125
+ elif((not hasattr(outfile, "read") and not hasattr(outfile, "write")) and re.findall(__upload_proto_support__, outfile)):
8126
+ fp = CompressOpenFileAlt(
8127
+ fp, compression, compressionlevel, compressionuselist, formatspecs)
8128
+ fp.seek(0, 0)
8129
+ upload_file_to_internet_file(fp, outfile)
8130
+ if(returnfp):
8131
+ fp.seek(0, 0)
8132
+ return fp
8133
+ else:
8134
+ fp.close()
8135
+ return True
8136
+
8137
+ def AppendReadInFileWithContentToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, insaltkey=None, outsaltkey=None, verbose=False, returnfp=False):
8138
+ if not isinstance(infiles, list):
8139
+ infiles = [infiles]
8140
+ returnout = False
8141
+ for infileslist in infiles:
8142
+ returnout = AppendReadInFileWithContentToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, insaltkey, outsaltkey, verbose, True)
8143
+ if(not returnout):
8144
+ break
8145
+ else:
8146
+ outfile = returnout
8147
+ if(not returnfp and returnout):
8148
+ returnout.close()
8149
+ return True
8150
+ return returnout
8151
+
8010
8152
  def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, saltkey=None, verbose=False, returnfp=False):
8011
8153
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
8012
8154
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
@@ -8998,8 +9140,6 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, file
8998
9140
  compresscheck = "zstd"
8999
9141
  elif(fextname == ".lz4"):
9000
9142
  compresscheck = "lz4"
9001
- elif(fextname == ".lzo" or fextname == ".lzop"):
9002
- compresscheck = "lzo"
9003
9143
  elif(fextname == ".lzma"):
9004
9144
  compresscheck = "lzma"
9005
9145
  elif(fextname == ".xz"):
@@ -9291,8 +9431,6 @@ def UncompressString(infile, formatspecs=__file_format_multi_dict__, filestart=0
9291
9431
  fileuz = decompressor.decompress(infile)
9292
9432
  elif(compresscheck == "lz4" and compresscheck in compressionsupport):
9293
9433
  fileuz = lz4.frame.decompress(infile)
9294
- elif((compresscheck == "lzo" or compresscheck == "lzop") and compresscheck in compressionsupport):
9295
- fileuz = lzo.decompress(infile)
9296
9434
  elif((compresscheck == "lzma" or compresscheck == "xz") and compresscheck in compressionsupport):
9297
9435
  fileuz = lzma.decompress(infile)
9298
9436
  elif(compresscheck == "zlib" and compresscheck in compressionsupport):
@@ -9343,8 +9481,6 @@ def UncompressBytes(infile, formatspecs=__file_format_multi_dict__, filestart=0)
9343
9481
  fileuz = decompressor.decompress(infile)
9344
9482
  elif(compresscheck == "lz4" and compresscheck in compressionsupport):
9345
9483
  fileuz = lz4.frame.decompress(infile)
9346
- elif((compresscheck == "lzo" or compresscheck == "lzop") and compresscheck in compressionsupport):
9347
- fileuz = lzo.decompress(infile)
9348
9484
  elif((compresscheck == "lzma" or compresscheck == "xz") and compresscheck in compressionsupport):
9349
9485
  fileuz = lzma.decompress(infile)
9350
9486
  elif(compresscheck == "zlib" and compresscheck in compressionsupport):
@@ -9916,9 +10052,6 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None,
9916
10052
  elif compression == "lz4" and "lz4" in compressionsupport:
9917
10053
  bytesfp = MkTempFile()
9918
10054
  bytesfp.write(lz4.frame.compress(fp.read(), compression_level=_lvl(compressionlevel)))
9919
- elif (compression in ("lzo", "lzop")) and "lzop" in compressionsupport:
9920
- bytesfp = MkTempFile()
9921
- bytesfp.write(lzo.compress(fp.read(), _lvl(compressionlevel)))
9922
10055
  elif compression == "zstd" and "zstandard" in compressionsupport:
9923
10056
  bytesfp = MkTempFile()
9924
10057
  level = _lvl(compressionlevel)
@@ -10235,7 +10368,7 @@ def NeoFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_form
10235
10368
  if(not fp):
10236
10369
  return False
10237
10370
  fp.seek(filestart, 0)
10238
- elif(not isinstance(infile, (list, tuple, )) and infile == "-"):
10371
+ elif(infile == "-"):
10239
10372
  fp = MkTempFile()
10240
10373
  shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
10241
10374
  fp.seek(filestart, 0)
@@ -10293,10 +10426,8 @@ def NeoFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_form
10293
10426
  compresscheck = "bzip2"
10294
10427
  elif(fextname == ".zst"):
10295
10428
  compresscheck = "zstd"
10296
- elif(fextname == ".lz4" or fextname == ".clz4"):
10429
+ elif(fextname == ".lz4"):
10297
10430
  compresscheck = "lz4"
10298
- elif(fextname == ".lzo" or fextname == ".lzop"):
10299
- compresscheck = "lzo"
10300
10431
  elif(fextname == ".lzma"):
10301
10432
  compresscheck = "lzma"
10302
10433
  elif(fextname == ".xz"):
@@ -11019,6 +11150,10 @@ def RePackNeoFile(infile, outfile, fmttype="auto", compression="auto", compressw
11019
11150
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
11020
11151
  fcontents.seek(0, 0)
11021
11152
 
11153
+ if(typechecktest is not False):
11154
+ typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
11155
+ fcontents.seek(0, 0)
11156
+
11022
11157
  # get fcencoding once here
11023
11158
  fcencoding = GetFileEncoding(fcontents, 0, False)[0]
11024
11159
 
@@ -11612,7 +11747,9 @@ def NeoFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipcheck
11612
11747
 
11613
11748
 
11614
11749
  def TarFileListFiles(infile, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
11615
- if(not isinstance(infile, (list, tuple, )) and infile == "-"):
11750
+ if(isinstance(infile, (list, tuple, ))):
11751
+ infile = infile[0]
11752
+ if(infile == "-"):
11616
11753
  infile = MkTempFile()
11617
11754
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
11618
11755
  infile.seek(0, 0)
@@ -11732,7 +11869,9 @@ def TarFileListFile(infile, formatspecs=__file_format_multi_dict__, verbose=Fals
11732
11869
 
11733
11870
 
11734
11871
  def ZipFileListFiles(infile, verbose=False, returnfp=False):
11735
- if(not isinstance(infile, (list, tuple, )) and infile == "-"):
11872
+ if(isinstance(infile, (list, tuple, ))):
11873
+ infile = infile[0]
11874
+ if(infile == "-"):
11736
11875
  infile = MkTempFile()
11737
11876
  shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
11738
11877
  infile.seek(0, 0)
@@ -11889,9 +12028,10 @@ def ZipFileListFile(infile, verbose=False, returnfp=False):
11889
12028
  if(not rarfile_support):
11890
12029
  def RarFileListFiles(infile, verbose=False, returnfp=False):
11891
12030
  return False
11892
-
11893
- if(rarfile_support):
12031
+ else:
11894
12032
  def RarFileListFiles(infile, verbose=False, returnfp=False):
12033
+ if(isinstance(infile, (list, tuple, ))):
12034
+ infile = infile[0]
11895
12035
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
11896
12036
  return False
11897
12037
  if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
@@ -12021,9 +12161,10 @@ def RarFileListFile(infile, verbose=False, returnfp=False):
12021
12161
  if(not py7zr_support):
12022
12162
  def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
12023
12163
  return False
12024
-
12025
- if(py7zr_support):
12164
+ else:
12026
12165
  def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
12166
+ if(isinstance(infile, (list, tuple, ))):
12167
+ infile = infile[0]
12027
12168
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
12028
12169
  return False
12029
12170
  lcfi = 0
@@ -12384,8 +12525,14 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
12384
12525
  if headers is None:
12385
12526
  headers = {}
12386
12527
  urlparts = urlparse(url)
12387
- username = unquote(urlparts.username)
12388
- password = unquote(urlparts.password)
12528
+ if(urlparts.username is not None):
12529
+ username = unquote(urlparts.username)
12530
+ else:
12531
+ username = None
12532
+ if(urlparts.password is not None):
12533
+ password = unquote(urlparts.password)
12534
+ else:
12535
+ password = None
12389
12536
 
12390
12537
  # Rebuild URL without username and password
12391
12538
  netloc = urlparts.hostname or ''