PyArchiveFile 0.24.0__py3-none-any.whl → 0.24.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyarchivefile.py CHANGED
@@ -14,7 +14,7 @@
14
14
  Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
15
15
  Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
16
16
 
17
- $FileInfo: pyarchivefile.py - Last Update: 10/29/2025 Ver. 0.24.0 RC 1 - Author: cooldude2k $
17
+ $FileInfo: pyarchivefile.py - Last Update: 11/3/2025 Ver. 0.24.4 RC 1 - Author: cooldude2k $
18
18
  '''
19
19
 
20
20
  from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
@@ -33,10 +33,10 @@ import socket
33
33
  import struct
34
34
  import hashlib
35
35
  import inspect
36
- import datetime
37
36
  import logging
38
37
  import zipfile
39
38
  import binascii
39
+ import datetime
40
40
  import platform
41
41
  from io import StringIO, BytesIO
42
42
  from collections import namedtuple
@@ -413,7 +413,12 @@ def is_only_nonprintable(var):
413
413
  __file_format_multi_dict__ = {}
414
414
  __file_format_default__ = "ArchiveFile"
415
415
  __include_defaults__ = True
416
- __use_inmemfile__ = False
416
+ __use_inmemfile__ = True
417
+ __use_spoolfile__ = False
418
+ __use_spooldir__ = tempfile.gettempdir()
419
+ BYTES_PER_MiB = 1024 * 1024
420
+ DEFAULT_SPOOL_MAX = 8 * BYTES_PER_MiB
421
+ __spoolfile_size__ = DEFAULT_SPOOL_MAX
417
422
  __program_name__ = "Py"+__file_format_default__
418
423
  __use_env_file__ = True
419
424
  __use_ini_file__ = True
@@ -450,6 +455,8 @@ if __use_ini_file__ and os.path.exists(__config_file__):
450
455
  __program_name__ = decode_unicode_escape(config.get('config', 'proname'))
451
456
  __include_defaults__ = config.getboolean('config', 'includedef')
452
457
  __use_inmemfile__ = config.getboolean('config', 'inmemfile')
458
+ __use_spoolfile__ = config.getboolean('config', 'usespoolfile')
459
+ __spoolfile_size__ = config.getint('config', 'spoolfilesize')
453
460
  # Loop through all sections
454
461
  for section in config.sections():
455
462
  if section == "config":
@@ -543,6 +550,8 @@ elif __use_json_file__ and os.path.exists(__config_file__):
543
550
  __program_name__ = decode_unicode_escape(_get(cfg_config, 'proname', ''))
544
551
  __include_defaults__ = _to_bool(_get(cfg_config, 'includedef', False))
545
552
  __use_inmemfile__ = _to_bool(_get(cfg_config, 'inmemfile', False))
553
+ __use_spoolfile__ = _to_bool(_get(cfg_config, 'usespoolfile', False))
554
+ __spoolfile_size__ = _to_int(_get(cfg_config, 'spoolfilesize', DEFAULT_SPOOL_MAX))
546
555
 
547
556
  # --- iterate format sections (everything except "config") ---
548
557
  required_keys = [
@@ -622,13 +631,15 @@ __use_alt_inode__ = __file_format_multi_dict__[__file_format_default__]['use_alt
622
631
  __file_format_extension__ = __file_format_multi_dict__[__file_format_default__]['format_extension']
623
632
  __file_format_dict__ = __file_format_multi_dict__[__file_format_default__]
624
633
  __project__ = __program_name__
634
+ __program_alt_name__ = __program_name__
625
635
  __project_url__ = "https://github.com/GameMaker2k/PyArchiveFile"
626
- __version_info__ = (0, 24, 0, "RC 1", 1)
627
- __version_date_info__ = (2025, 10, 29, "RC 1", 1)
636
+ __project_release_url__ = __project_url__+"/releases/latest"
637
+ __version_info__ = (0, 24, 4, "RC 1", 1)
638
+ __version_date_info__ = (2025, 11, 3, "RC 1", 1)
628
639
  __version_date__ = str(__version_date_info__[0]) + "." + str(
629
640
  __version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
630
641
  __revision__ = __version_info__[3]
631
- __revision_id__ = "$Id: f8c5b802307ba5682e5f973da4d45916123047b8 $"
642
+ __revision_id__ = "$Id: 5df8f8c1e18245b16b5406cf5e7f29055afd405d $"
632
643
  if(__version_info__[4] is not None):
633
644
  __version_date_plusrc__ = __version_date__ + \
634
645
  "-" + str(__version_date_info__[4])
@@ -640,6 +651,78 @@ if(__version_info__[3] is not None):
640
651
  if(__version_info__[3] is None):
641
652
  __version__ = str(__version_info__[0]) + "." + str(__version_info__[1]) + "." + str(__version_info__[2])
642
653
 
654
+ # From: https://stackoverflow.com/a/28568003
655
+ # By Phaxmohdem
656
+
657
+
658
+ def versiontuple(v):
659
+ filled = []
660
+ for point in v.split("."):
661
+ filled.append(point.zfill(8))
662
+ return tuple(filled)
663
+
664
+
665
+ def version_check(myvercheck, newvercheck):
666
+ vercheck = 0
667
+ try:
668
+ from packaging import version
669
+ vercheck = 1
670
+ except ImportError:
671
+ try:
672
+ from distutils.version import LooseVersion, StrictVersion
673
+ vercheck = 2
674
+ except ImportError:
675
+ try:
676
+ from pkg_resources import parse_version
677
+ vercheck = 3
678
+ except ImportError:
679
+ return 5
680
+ # print(myvercheck, newvercheck)
681
+ if (vercheck == 1):
682
+ if (version.parse(myvercheck) == version.parse(newvercheck)):
683
+ return 0
684
+ elif (version.parse(myvercheck) < version.parse(newvercheck)):
685
+ return 1
686
+ elif (version.parse(myvercheck) > version.parse(newvercheck)):
687
+ return 2
688
+ else:
689
+ return 3
690
+ elif (vercheck == 2):
691
+ if (StrictVersion(myvercheck) == StrictVersion(newvercheck)):
692
+ return 0
693
+ elif (StrictVersion(myvercheck) < StrictVersion(newvercheck)):
694
+ return 1
695
+ elif (StrictVersion(myvercheck) > StrictVersion(newvercheck)):
696
+ return 2
697
+ else:
698
+ return 3
699
+ elif (vercheck == 3):
700
+ if (parse_version(myvercheck) == parse_version(newvercheck)):
701
+ return 0
702
+ elif (parse_version(myvercheck) < parse_version(newvercheck)):
703
+ return 1
704
+ elif (parse_version(myvercheck) > parse_version(newvercheck)):
705
+ return 2
706
+ else:
707
+ return 3
708
+ else:
709
+ if (versiontuple(myvercheck) == versiontuple(newvercheck)):
710
+ return 0
711
+ elif (versiontuple(myvercheck) < versiontuple(newvercheck)):
712
+ return 1
713
+ elif (versiontuple(myvercheck) > versiontuple(newvercheck)):
714
+ return 2
715
+ else:
716
+ return 3
717
+ return 4
718
+
719
+
720
+ def check_version_number(myversion=__version__, proname=__program_alt_name__, newverurl=__project_release_url__):
721
+ prevercheck = download_from_url(newverurl, geturls_headers, geturls_cj)
722
+ newvercheck = re.findall(proname + " ([0-9\\.]+)<\\/a\\>", prevercheck['Content'].decode("UTF-8"))[0]
723
+ myvercheck = re.findall("([0-9\\.]+)", myversion)[0]
724
+ return version_check(myvercheck, newvercheck)
725
+
643
726
  # ===== Module-level type code table & helpers (reuse anywhere) =====
644
727
 
645
728
  FT = {
@@ -740,6 +823,7 @@ try:
740
823
  compressionsupport.append("lz4")
741
824
  except ImportError:
742
825
  pass
826
+ '''
743
827
  try:
744
828
  import lzo
745
829
  compressionsupport.append("lzo")
@@ -747,6 +831,7 @@ try:
747
831
  except ImportError:
748
832
  lzo = None
749
833
  pass
834
+ '''
750
835
  try:
751
836
  import zstandard
752
837
  compressionsupport.append("zst")
@@ -806,11 +891,13 @@ if('lzo' in compressionsupport):
806
891
  compressionlistalt.append('lzo')
807
892
  outextlist.append('lzo')
808
893
  outextlistwd.append('.lzo')
894
+ '''
809
895
  if('lzop' in compressionsupport):
810
896
  compressionlist.append('lzop')
811
897
  compressionlistalt.append('lzop')
812
898
  outextlist.append('lzop')
813
899
  outextlistwd.append('.lzop')
900
+ '''
814
901
  if('lzma' in compressionsupport):
815
902
  compressionlist.append('lzma')
816
903
  compressionlistalt.append('lzma')
@@ -1956,7 +2043,7 @@ def _normalize_initial_data(data, isbytes, encoding, errors=None):
1956
2043
 
1957
2044
 
1958
2045
  def MkTempFile(data=None,
1959
- inmem=True,
2046
+ inmem=__use_inmemfile__,
1960
2047
  isbytes=True,
1961
2048
  prefix="",
1962
2049
  delete=True,
@@ -1964,9 +2051,9 @@ def MkTempFile(data=None,
1964
2051
  newline=None, # text mode only; in-memory objects ignore newline semantics
1965
2052
  dir=None,
1966
2053
  suffix="",
1967
- use_spool=False,
1968
- spool_max=8 * 1024 * 1024,
1969
- spool_dir=None):
2054
+ use_spool=__use_spoolfile__,
2055
+ spool_max=__spoolfile_size__,
2056
+ spool_dir=__use_spooldir__):
1970
2057
  """
1971
2058
  Return a file-like handle with consistent behavior on Py2.7 and Py3.x.
1972
2059
 
@@ -2409,7 +2496,7 @@ class ZlibFile(object):
2409
2496
 
2410
2497
  def __init__(self, file_path=None, fileobj=None, mode='rb', level=6, wbits=15,
2411
2498
  encoding=None, errors=None, newline=None,
2412
- tolerant_read=False, scan_bytes=(64 << 10), spool_threshold=(8 << 20)):
2499
+ tolerant_read=False, scan_bytes=(64 << 10), spool_threshold=__spoolfile_size__):
2413
2500
 
2414
2501
  if file_path is None and fileobj is None:
2415
2502
  raise ValueError("Either file_path or fileobj must be provided")
@@ -2896,7 +2983,7 @@ class GzipFile(object):
2896
2983
 
2897
2984
  def __init__(self, file_path=None, fileobj=None, mode='rb',
2898
2985
  level=6, encoding=None, errors=None, newline=None,
2899
- tolerant_read=False, scan_bytes=(64 << 10), spool_threshold=(8 << 20)):
2986
+ tolerant_read=False, scan_bytes=(64 << 10), spool_threshold=__spoolfile_size__):
2900
2987
 
2901
2988
  if file_path is None and fileobj is None:
2902
2989
  raise ValueError("Either file_path or fileobj must be provided")
@@ -3320,519 +3407,6 @@ def gzip_decompress_bytes_all_members(blob):
3320
3407
  """
3321
3408
  return _gzip_decompress_multimember(bytes(blob))
3322
3409
 
3323
-
3324
- # ---------- Simple LZO container (NOT real .lzop) ----------
3325
- # File layout (concatenated members allowed):
3326
- # [MAGIC 8B] [FLAGS 1B] [ULEN 8B] [CRC32 4B] [CCHUNK...] | repeat...
3327
- # where:
3328
- # MAGIC = b'\x89LZO\x0D\x0A\x1A\n'
3329
- # FLAGS = bit0: 1 => member has ULEN+CRC, 0 => no header (legacy)
3330
- # ULEN = uncompressed length (u64 BE)
3331
- # CRC32 = CRC32 of uncompressed data (u32 BE)
3332
- # CCHUNK = one or more compressed chunks:
3333
- # [u32BE chunk_size][chunk_data] ... then a zero-size u32 terminator
3334
-
3335
-
3336
- class LzopFile(object):
3337
- MAGIC = b'\x89LZO\x0D\x0A\x1A\n'
3338
- FLAG_HAS_UHDR = 0x01
3339
- RAW_CHUNK = 256 * 1024 # 256 KiB per raw (pre-compress) chunk
3340
-
3341
- def __init__(self, file_path=None, fileobj=None, mode='rb',
3342
- level=9, encoding=None, errors=None, newline=None,
3343
- write_header=True,
3344
- tolerant_read=False, scan_bytes=(64 << 10),
3345
- spool_threshold=(8 << 20)):
3346
- """
3347
- Custom LZO file (NOT the lzop(1) format).
3348
- - streaming write/read, supports concatenated members
3349
- - optional per-member header (uncompressed length + CRC32)
3350
- - spooled reads to limit RAM, strict text mode with newline control
3351
- - tolerant_read: scan forward (up to scan_bytes) to first MAGIC
3352
-
3353
- :param write_header: if True, include ULEN+CRC32 per member
3354
- :param tolerant_read: skip leading junk up to scan_bytes to find MAGIC
3355
- :param scan_bytes: max bytes to scan when tolerant_read=True
3356
- :param spool_threshold: SpooledTemporaryFile RAM threshold before spill
3357
- """
3358
- if lzo is None:
3359
- raise ImportError("python-lzo is required for LzopFile")
3360
-
3361
- if file_path is None and fileobj is None:
3362
- raise ValueError("Either file_path or fileobj must be provided")
3363
- if file_path is not None and fileobj is not None:
3364
- raise ValueError("Only one of file_path or fileobj should be provided")
3365
-
3366
- if 'b' not in mode and 't' not in mode:
3367
- mode += 'b'
3368
- if 'x' in mode and PY2:
3369
- raise ValueError("Exclusive creation mode 'x' is not supported on Python 2")
3370
-
3371
- self.file_path = file_path
3372
- self.file = fileobj
3373
- self.mode = mode
3374
- self.level = int(level) # effective: 1 or 9 (clamped)
3375
- self.encoding = encoding
3376
- self.errors = errors
3377
- self.newline = newline
3378
- self._text_mode = ('t' in mode)
3379
-
3380
- self._write_header = bool(write_header)
3381
-
3382
- # Config (read path)
3383
- self.tolerant_read = bool(tolerant_read)
3384
- self.scan_bytes = int(scan_bytes)
3385
- self.spool_threshold = int(spool_threshold)
3386
-
3387
- # Write state
3388
- self._crc = 0
3389
- self._ulen = 0
3390
- self._open_member = False
3391
- self._member_header_pos = None # position *after* ULEN+CRC placeholders
3392
-
3393
- # Read state
3394
- self._spool = None
3395
- self._text_reader = None
3396
- self._position = 0
3397
- self.closed = False
3398
-
3399
- internal_mode = mode.replace('t', 'b')
3400
- if self.file is None:
3401
- if 'x' in internal_mode and os.path.exists(file_path):
3402
- raise IOError("File exists: '{}'".format(file_path))
3403
- self.file = open(file_path, internal_mode)
3404
- else:
3405
- if 'r' in internal_mode and not hasattr(self.file, 'read'):
3406
- raise ValueError("fileobj must support read() in read mode")
3407
- if any(ch in internal_mode for ch in ('w', 'a', 'x')) and not hasattr(self.file, 'write'):
3408
- raise ValueError("fileobj must support write() in write/append mode")
3409
-
3410
- self._fp = self.file
3411
- if any(ch in internal_mode for ch in ('w', 'a', 'x')):
3412
- # Start a new member at EOF for append
3413
- if 'a' in internal_mode:
3414
- try:
3415
- self.file.seek(0, os.SEEK_END)
3416
- except Exception:
3417
- pass
3418
- # Defer writing header until first write so empty files don’t get empty members
3419
- elif 'r' in internal_mode:
3420
- self._load_all_members_spooled()
3421
- else:
3422
- raise ValueError("Unsupported mode: {}".format(mode))
3423
-
3424
- # ---------- helpers ----------
3425
- @property
3426
- def name(self):
3427
- return self.file_path
3428
-
3429
- def readable(self):
3430
- return 'r' in self.mode
3431
-
3432
- def writable(self):
3433
- return any(ch in self.mode for ch in ('w', 'a', 'x'))
3434
-
3435
- def seekable(self):
3436
- return True if self._spool is not None else bool(getattr(self.file, 'seek', None))
3437
-
3438
- def _normalize_newlines_for_write(self, s):
3439
- nl = self.newline if self.newline is not None else "\n"
3440
- return s.replace("\r\n", "\n").replace("\r", "\n").replace("\n", nl)
3441
-
3442
- def _reader(self):
3443
- return self._text_reader if self._text_mode else self._spool
3444
-
3445
- # ---------- Write path ----------
3446
- def _ensure_member_header(self):
3447
- if self._open_member:
3448
- return
3449
- flags = self.FLAG_HAS_UHDR if self._write_header else 0
3450
- self.file.write(self.MAGIC)
3451
- self.file.write(struct.pack(">B", flags))
3452
- if self._write_header:
3453
- # placeholders for ULEN+CRC; we’ll backfill on finalize
3454
- self.file.write(struct.pack(">Q", 0))
3455
- self.file.write(struct.pack(">I", 0))
3456
- # position *after* ULEN+CRC placeholders (or after FLAGS if no header)
3457
- self._member_header_pos = self.file.tell()
3458
- self._open_member = True
3459
- # reset member stats
3460
- self._crc = 0
3461
- self._ulen = 0
3462
-
3463
- def write(self, data):
3464
- if 'r' in self.mode:
3465
- raise IOError("File not open for writing")
3466
-
3467
- if self._text_mode:
3468
- enc = self.encoding or 'UTF-8'
3469
- errs = self.errors or 'strict'
3470
- if not isinstance(data, text_type):
3471
- raise TypeError("write() expects text (unicode/str) in text mode")
3472
- data = self._normalize_newlines_for_write(data).encode(enc, errs)
3473
- else:
3474
- if not isinstance(data, binary_types):
3475
- raise TypeError("write() expects bytes-like in binary mode")
3476
-
3477
- # Normalize Py3 memoryview / Py2 bytearray
3478
- if (not PY2) and isinstance(data, memoryview):
3479
- data = data.tobytes()
3480
- elif PY2 and isinstance(data, bytearray):
3481
- data = bytes(data)
3482
-
3483
- if not data:
3484
- return 0
3485
-
3486
- # Begin member and write header on first write
3487
- self._ensure_member_header()
3488
-
3489
- # Update integrity stats
3490
- self._crc = _crc32u(data, self._crc)
3491
- self._ulen += len(data)
3492
-
3493
- # Stream in RAW_CHUNK-sized pieces. Each piece becomes one compressed chunk record.
3494
- mv = memoryview(data)
3495
- # clamp level to {1, 9}
3496
- lvl = 9 if self.level >= 9 else 1
3497
- for off in range(0, len(data), self.RAW_CHUNK):
3498
- raw = mv[off:off + self.RAW_CHUNK].tobytes()
3499
- c = lzo.compress(raw, lvl)
3500
- self.file.write(struct.pack(">I", len(c)))
3501
- self.file.write(c)
3502
-
3503
- return len(data)
3504
-
3505
- def _flush_member_only(self):
3506
- """Finalize the current member: write terminator and backfill header."""
3507
- if not self._open_member:
3508
- return
3509
- # write zero-length chunk terminator
3510
- self.file.write(struct.pack(">I", 0))
3511
- if self._write_header:
3512
- # ULEN is at (_member_header_pos - 12), CRC at (_member_header_pos - 4)
3513
- ulen_pos = self._member_header_pos - 12
3514
- crc_pos = self._member_header_pos - 4
3515
- cur = self.file.tell()
3516
- # backfill ULEN
3517
- self.file.seek(ulen_pos, os.SEEK_SET)
3518
- self.file.write(struct.pack(">Q", self._ulen))
3519
- # backfill CRC32
3520
- self.file.seek(crc_pos, os.SEEK_SET)
3521
- self.file.write(struct.pack(">I", self._crc))
3522
- # restore position
3523
- self.file.seek(cur, os.SEEK_SET)
3524
- # reset for potential new member
3525
- self._open_member = False
3526
- self._crc = 0
3527
- self._ulen = 0
3528
- self._member_header_pos = None
3529
-
3530
- def flush(self):
3531
- if self.closed:
3532
- return
3533
- # finalize any open member
3534
- if any(ch in self.mode for ch in ('w', 'a', 'x')) and self._open_member:
3535
- self._flush_member_only()
3536
- if hasattr(self.file, 'flush'):
3537
- self.file.flush()
3538
-
3539
- def close(self):
3540
- if self.closed:
3541
- return
3542
- try:
3543
- # Ensure a clean member terminator & header backfill if needed
3544
- if any(ch in self.mode for ch in ('w', 'a', 'x')) and self._open_member:
3545
- self._flush_member_only()
3546
- if hasattr(self.file, 'flush'):
3547
- try:
3548
- self.file.flush()
3549
- except Exception:
3550
- pass
3551
- finally:
3552
- if self.file_path and self.file is not None:
3553
- try:
3554
- self.file.close()
3555
- except Exception:
3556
- pass
3557
- # tear down read handles
3558
- try:
3559
- if self._text_reader is not None:
3560
- self._text_reader.detach()
3561
- except Exception:
3562
- pass
3563
- try:
3564
- if self._spool is not None:
3565
- self._spool.close()
3566
- except Exception:
3567
- pass
3568
- self.closed = True
3569
-
3570
- # ---------- Read path (spooled, multi-member, tolerant scan) ----------
3571
- def _load_all_members_spooled(self):
3572
- # Seek to start if possible
3573
- try:
3574
- self.file.seek(0)
3575
- except Exception:
3576
- pass
3577
-
3578
- self._spool = tempfile.SpooledTemporaryFile(max_size=self.spool_threshold)
3579
-
3580
- def read_exact(n, abs_off_ref):
3581
- """Read exactly n bytes, updating abs_off_ref[0]."""
3582
- b = b""
3583
- while len(b) < n:
3584
- part = self.file.read(n - len(b))
3585
- if not part:
3586
- break
3587
- b += part
3588
- abs_off_ref[0] += len(part)
3589
- return b
3590
-
3591
- CHUNK = 1 << 20
3592
- abs_off = [0] # track absolute file offset
3593
- scanned = 0
3594
-
3595
- while True:
3596
- # Locate MAGIC (support tolerant scan across chunk boundaries)
3597
- head = read_exact(len(self.MAGIC), abs_off)
3598
- if not head:
3599
- break # EOF
3600
- if head != self.MAGIC:
3601
- # Tolerant scan: slide-by-one until found or limit exceeded
3602
- buf = head
3603
- while True:
3604
- if self.tolerant_read and scanned < self.scan_bytes:
3605
- nxt = read_exact(1, abs_off)
3606
- if not nxt:
3607
- # EOF without finding magic
3608
- raise ValueError("Invalid LZO container: magic not found before EOF")
3609
- buf = buf[1:] + nxt
3610
- scanned += 1
3611
- if buf == self.MAGIC:
3612
- break
3613
- continue
3614
- raise ValueError("Invalid LZO container magic near offset {}".format(abs_off[0] - len(buf)))
3615
- # found MAGIC; proceed
3616
-
3617
- # FLAGS
3618
- f_b = read_exact(1, abs_off)
3619
- if len(f_b) != 1:
3620
- raise ValueError("Truncated header (flags) at offset {}".format(abs_off[0]))
3621
- flags = ord(f_b) if PY2 else f_b[0]
3622
-
3623
- # Optional ULEN/CRC
3624
- ulen = None
3625
- expect_crc = None
3626
- if flags & self.FLAG_HAS_UHDR:
3627
- ulen_b = read_exact(8, abs_off)
3628
- crc_b = read_exact(4, abs_off)
3629
- if len(ulen_b) != 8 or len(crc_b) != 4:
3630
- raise ValueError("Truncated ULEN/CRC header at offset {}".format(abs_off[0]))
3631
- ulen = struct.unpack(">Q", ulen_b)[0]
3632
- expect_crc = struct.unpack(">I", crc_b)[0]
3633
-
3634
- # Chunk loop
3635
- m_crc = 0
3636
- m_len = 0
3637
- while True:
3638
- sz_b = read_exact(4, abs_off)
3639
- if len(sz_b) != 4:
3640
- raise ValueError("Truncated chunk size at offset {}".format(abs_off[0]))
3641
- csz = struct.unpack(">I", sz_b)[0]
3642
- if csz == 0:
3643
- break # end of member
3644
- cdata = read_exact(csz, abs_off)
3645
- if len(cdata) != csz:
3646
- raise ValueError("Truncated chunk payload at offset {}".format(abs_off[0]))
3647
- try:
3648
- raw = lzo.decompress(cdata)
3649
- except Exception as e:
3650
- raise ValueError("LZO decompression error at offset {}: {}".format(abs_off[0], e))
3651
- self._spool.write(raw)
3652
- m_len += len(raw)
3653
- m_crc = _crc32u(raw, m_crc)
3654
-
3655
- # Validate member integrity if header present
3656
- if ulen is not None and m_len != ulen:
3657
- raise ValueError("Member length mismatch ({} != {})".format(m_len, ulen))
3658
- if expect_crc is not None and m_crc != expect_crc:
3659
- raise ValueError("Member CRC32 mismatch (got 0x{:08x}, want 0x{:08x})"
3660
- .format(m_crc, expect_crc))
3661
-
3662
- # Prepare read handles
3663
- try:
3664
- self._spool.seek(0)
3665
- except Exception:
3666
- pass
3667
-
3668
- if self._text_mode:
3669
- enc = self.encoding or 'UTF-8'
3670
- errs = self.errors or 'strict'
3671
- # newline=None => universal newline translation; exact string if provided
3672
- self._text_reader = io.TextIOWrapper(self._spool, encoding=enc, errors=errs, newline=self.newline)
3673
- try:
3674
- self._text_reader.seek(0)
3675
- except Exception:
3676
- pass
3677
-
3678
- self._position = 0
3679
-
3680
- # ---------- Buffered read API (delegates to spool/text wrapper) ----------
3681
- def read(self, size=-1):
3682
- if self.closed:
3683
- raise ValueError("I/O operation on closed file")
3684
- if 'r' not in self.mode:
3685
- raise IOError("File not open for reading")
3686
- r = self._reader()
3687
- if r is None:
3688
- raise IOError("Reader not initialized")
3689
- out = r.read() if (size is None or size < 0) else r.read(int(size))
3690
- try:
3691
- self._position = r.tell()
3692
- except Exception:
3693
- pass
3694
- return out
3695
-
3696
- def readline(self, size=-1):
3697
- if self.closed:
3698
- raise ValueError("I/O operation on closed file")
3699
- if 'r' not in self.mode:
3700
- raise IOError("File not open for reading")
3701
- r = self._reader()
3702
- if r is None:
3703
- raise IOError("Reader not initialized")
3704
- out = r.readline() if (size is None or size < 0) else r.readline(int(size))
3705
- try:
3706
- self._position = r.tell()
3707
- except Exception:
3708
- pass
3709
- if not self._text_mode and out is None:
3710
- return b""
3711
- if self._text_mode and out is None:
3712
- return text_type("")
3713
- return out
3714
-
3715
- def __iter__(self):
3716
- return self
3717
-
3718
- def __next__(self):
3719
- line = self.readline()
3720
- if (self._text_mode and line == "") or (not self._text_mode and line == b""):
3721
- raise StopIteration
3722
- return line
3723
-
3724
- if PY2:
3725
- next = __next__
3726
-
3727
- def seek(self, offset, whence=0):
3728
- if self.closed:
3729
- raise ValueError("I/O operation on closed file")
3730
- if 'r' not in self.mode:
3731
- raise IOError("File not open for reading")
3732
- r = self._reader()
3733
- if r is None:
3734
- raise IOError("Reader not initialized")
3735
- newpos = r.seek(int(offset), int(whence))
3736
- self._position = newpos
3737
- return newpos
3738
-
3739
- def tell(self):
3740
- if self._reader() is not None:
3741
- try:
3742
- self._position = self._reader().tell()
3743
- except Exception:
3744
- pass
3745
- return self._position
3746
-
3747
- # ---------- Misc ----------
3748
- def fileno(self):
3749
- if hasattr(self.file, 'fileno'):
3750
- return self.file.fileno()
3751
- raise OSError("Underlying file object does not support fileno()")
3752
-
3753
- def isatty(self):
3754
- return bool(getattr(self.file, 'isatty', lambda: False)())
3755
-
3756
- def truncate(self, size=None):
3757
- # Prevent corruption of compressed streams
3758
- raise OSError("truncate() is not supported for compressed streams")
3759
-
3760
- # ---------- Convenience constructors ----------
3761
- @classmethod
3762
- def open(cls, path, mode='rb', **kw):
3763
- """
3764
- Mirror built-in open() but for LzopFile.
3765
- Example:
3766
- with LzopFile.open("data.lzo", "rt", encoding="utf-8") as f:
3767
- print(f.readline())
3768
- """
3769
- return cls(file_path=path, mode=mode, **kw)
3770
-
3771
- @classmethod
3772
- def from_fileobj(cls, fileobj, mode='rb', **kw):
3773
- """
3774
- Wrap an existing file-like object (caller retains ownership).
3775
- """
3776
- return cls(fileobj=fileobj, mode=mode, **kw)
3777
-
3778
- @classmethod
3779
- def from_bytes(cls, data, mode='rb', **kw):
3780
- """
3781
- Read from an in-memory bytes buffer.
3782
- Example:
3783
- f = LzopFile.from_bytes(blob, mode='rt', encoding='utf-8', tolerant_read=True)
3784
- text = f.read()
3785
- """
3786
- if not isinstance(data, (bytes, bytearray, memoryview)):
3787
- raise TypeError("from_bytes() expects a bytes-like object")
3788
- bio = io.BytesIO(bytes(data) if not isinstance(data, bytes) else data)
3789
- return cls(fileobj=bio, mode=mode, **kw)
3790
-
3791
- # compatibility aliases for unwrapping utilities
3792
- @property
3793
- def fileobj(self):
3794
- return self.file
3795
-
3796
- @property
3797
- def myfileobj(self):
3798
- return self.file
3799
-
3800
- # ---------- Top-level helpers ----------
3801
- def lzop_compress_bytes(payload, level=9, text=False, **kw):
3802
- """
3803
- Compress 'payload' into a single LZO member (our custom container) and return bytes.
3804
- - text=True: 'payload' is text; encoding/newline/errors handled via LzopFile('wt')
3805
- - text=False: 'payload' is bytes-like; written via LzopFile('wb')
3806
- Kwargs forwarded: write_header (default True), newline/encoding/errors, etc.
3807
- """
3808
- bio = io.BytesIO()
3809
- mode = 'wt' if text else 'wb'
3810
- f = LzopFile(fileobj=bio, mode=mode, level=level, **kw)
3811
- try:
3812
- f.write(payload)
3813
- f.flush() # finalize member (writes terminator + backfills header)
3814
- finally:
3815
- f.close()
3816
- return bio.getvalue()
3817
-
3818
-
3819
- def lzop_decompress_bytes(blob, mode='rb', tolerant_read=False, scan_bytes=(64 << 10),
3820
- spool_threshold=(8 << 20), **kw):
3821
- """
3822
- Decompress bytes produced by this custom container.
3823
- - mode='rb' -> returns bytes; mode='rt' -> returns text (set encoding/errors/newline in kw)
3824
- - tolerant_read/scan_bytes/spool_threshold forwarded to LzopFile
3825
- """
3826
- if not isinstance(blob, (bytes, bytearray, memoryview)):
3827
- raise TypeError("lzop_decompress_bytes() expects a bytes-like object")
3828
- f = LzopFile.from_bytes(blob, mode=mode, tolerant_read=tolerant_read,
3829
- scan_bytes=scan_bytes, spool_threshold=spool_threshold, **kw)
3830
- try:
3831
- return f.read()
3832
- finally:
3833
- f.close()
3834
-
3835
-
3836
3410
  def TarFileCheck(infile):
3837
3411
  try:
3838
3412
  if is_tarfile(infile):
@@ -4074,19 +3648,62 @@ def crc64_iso(msg, initial_value=0xFFFFFFFFFFFFFFFF):
4074
3648
  initial_value & 0xFFFFFFFFFFFFFFFF,
4075
3649
  0xFFFFFFFFFFFFFFFF, True, True)
4076
3650
 
4077
- # =========================
4078
- # Incremental CRC context
4079
- # =========================
4080
- CRCSpec = namedtuple("CRCSpec", "width poly init xorout refin refout")
3651
+ # =========================
3652
+ # Incremental CRC context
3653
+ # =========================
3654
+ CRCSpec = namedtuple("CRCSpec", "width poly init xorout refin refout")
3655
+
3656
+ _CRC_SPECS = {
3657
+ "crc16_ansi": CRCSpec(16, 0x8005, 0xFFFF, 0x0000, True, True),
3658
+ "crc16_ccitt": CRCSpec(16, 0x1021, 0xFFFF, 0x0000, False, False),
3659
+ "crc16_x25": CRCSpec(16, 0x1021, 0xFFFF, 0xFFFF, True, True),
3660
+ "crc16_kermit":CRCSpec(16, 0x1021, 0x0000, 0x0000, True, True),
3661
+ "crc64_ecma": CRCSpec(64, 0x42F0E1EBA9EA3693, 0x0000000000000000, 0x0000000000000000, False, False),
3662
+ "crc64_iso": CRCSpec(64, 0x000000000000001B, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, True, True),
3663
+ }
3664
+
3665
+ # --- helpers --------------------------------------------------------------
3666
+
3667
+ try:
3668
+ # Python 2 may not have algorithms_available
3669
+ _ALGORITHMS_AVAILABLE = set(hashlib.algorithms_available)
3670
+ except AttributeError:
3671
+ _ALGORITHMS_AVAILABLE = set(getattr(hashlib, "algorithms", []))
3672
+
3673
+
3674
+ def _coerce_bytes(data):
3675
+ """Return `data` as a bytes object (Py2 / Py3)."""
3676
+ if isinstance(data, memoryview):
3677
+ # Py3 has .tobytes(), Py2 falls back to bytes()
3678
+ try:
3679
+ return data.tobytes()
3680
+ except AttributeError:
3681
+ return bytes(data)
3682
+
3683
+ if isinstance(data, bytearray):
3684
+ return bytes(data)
3685
+
3686
+ if not isinstance(data, bytes):
3687
+ # E.g. list of ints, unicode, etc.
3688
+ return bytes(bytearray(data))
3689
+
3690
+ return data
3691
+
3692
+
3693
+ def _bytes_to_int(b):
3694
+ """Big-endian bytes -> int, Py2/3 safe."""
3695
+ if not isinstance(b, (bytes, bytearray)):
3696
+ b = _coerce_bytes(b)
4081
3697
 
4082
- _CRC_SPECS = {
4083
- "crc16_ansi": CRCSpec(16, 0x8005, 0xFFFF, 0x0000, True, True),
4084
- "crc16_ccitt": CRCSpec(16, 0x1021, 0xFFFF, 0x0000, False, False),
4085
- "crc16_x25": CRCSpec(16, 0x1021, 0xFFFF, 0xFFFF, True, True),
4086
- "crc16_kermit":CRCSpec(16, 0x1021, 0x0000, 0x0000, True, True),
4087
- "crc64_ecma": CRCSpec(64, 0x42F0E1EBA9EA3693, 0x0000000000000000, 0x0000000000000000, False, False),
4088
- "crc64_iso": CRCSpec(64, 0x000000000000001B, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, True, True),
4089
- }
3698
+ value = 0
3699
+ for ch in b:
3700
+ if not isinstance(ch, int): # Py2: ch is a 1-char string
3701
+ ch = ord(ch)
3702
+ value = (value << 8) | ch
3703
+ return value
3704
+
3705
+
3706
+ # --- your existing CRCContext (unchanged) ---------------------------------
4090
3707
 
4091
3708
  class CRCContext(object):
4092
3709
  __slots__ = ("spec", "table", "mask", "shift", "crc")
@@ -4132,6 +3749,82 @@ class CRCContext(object):
4132
3749
  width_hex = (self.spec.width + 3) // 4
4133
3750
  return format(self.digest_int(), "0{}x".format(width_hex)).lower()
4134
3751
 
3752
+
3753
+ # --- hashlib-backed implementation ---------------------------------------
3754
+
3755
+ class _HashlibCRCWrapper(object):
3756
+ """
3757
+ Wrap a hashlib object to present the same interface as CRCContext
3758
+ (update, digest_int, hexdigest).
3759
+
3760
+ Assumes the hashlib algorithm already implements the exact CRC
3761
+ specification (refin/refout/xorout/etc.).
3762
+ """
3763
+ __slots__ = ("_h", "spec", "mask", "width_hex")
3764
+
3765
+ def __init__(self, algo_name, spec):
3766
+ self._h = hashlib.new(algo_name)
3767
+ self.spec = spec
3768
+ self.mask = (1 << spec.width) - 1
3769
+ self.width_hex = (spec.width + 3) // 4
3770
+
3771
+ def update(self, data):
3772
+ self._h.update(_coerce_bytes(data))
3773
+ return self
3774
+
3775
+ def digest_int(self):
3776
+ # Convert final digest bytes to an integer and mask to width
3777
+ value = _bytes_to_int(self._h.digest())
3778
+ return value & self.mask
3779
+
3780
+ def hexdigest(self):
3781
+ h = self._h.hexdigest().lower()
3782
+ # Normalize to the same number of hex digits as CRCContext
3783
+ if len(h) < self.width_hex:
3784
+ h = ("0" * (self.width_hex - len(h))) + h
3785
+ elif len(h) > self.width_hex:
3786
+ h = h[-self.width_hex:]
3787
+ return h
3788
+
3789
+
3790
+ # --- public class: choose hashlib or fallback -----------------------------
3791
+
3792
+ class CRC(object):
3793
+ """
3794
+ CRC wrapper that uses hashlib if available, otherwise falls back to
3795
+ the pure-Python CRCContext.
3796
+
3797
+ spec.hashlib_name (preferred) or spec.name is used as the hashlib
3798
+ algorithm name, e.g. 'crc32', 'crc32c', etc.
3799
+ """
3800
+
3801
+ __slots__ = ("spec", "_impl")
3802
+
3803
+ def __init__(self, spec):
3804
+ self.spec = spec
3805
+
3806
+ algo_name = getattr(spec, "hashlib_name", None) or getattr(spec, "name", None)
3807
+ impl = None
3808
+
3809
+ if algo_name and algo_name in _ALGORITHMS_AVAILABLE:
3810
+ # Use hashlib-backed implementation
3811
+ impl = _HashlibCRCWrapper(algo_name, spec)
3812
+ else:
3813
+ # Fallback to your pure-Python implementation
3814
+ impl = CRCContext(spec)
3815
+
3816
+ self._impl = impl
3817
+
3818
+ def update(self, data):
3819
+ self._impl.update(data)
3820
+ return self
3821
+
3822
+ def digest_int(self):
3823
+ return self._impl.digest_int()
3824
+
3825
+ def hexdigest(self):
3826
+ return self._impl.hexdigest()
3827
+
4135
3828
  def crc_context_from_name(name_norm):
4136
3829
  spec = _CRC_SPECS.get(name_norm)
4137
3830
  if spec is None:
@@ -5225,7 +4918,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
5225
4918
  cfcontents.close()
5226
4919
  fcontents.seek(0, 0)
5227
4920
  fccs = GetFileChecksum(
5228
- fcontents.read(), HeaderOut[-3].lower(), False, formatspecs)
4921
+ fcontents, HeaderOut[-3].lower(), False, formatspecs)
5229
4922
  fcontentend = fp.tell()
5230
4923
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
5231
4924
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
@@ -6463,8 +6156,7 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
6463
6156
  pass
6464
6157
  return fp
6465
6158
 
6466
-
6467
- def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6159
+ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6468
6160
  if(not hasattr(fp, "write")):
6469
6161
  return False
6470
6162
  advancedlist = formatspecs['use_advanced_list']
@@ -6514,6 +6206,16 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
6514
6206
  numfiles = int(len(GetDirList))
6515
6207
  fnumfiles = format(numfiles, 'x').lower()
6516
6208
  AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6209
+ try:
6210
+ fp.flush()
6211
+ if(hasattr(os, "sync")):
6212
+ os.fsync(fp.fileno())
6213
+ except io.UnsupportedOperation:
6214
+ pass
6215
+ except AttributeError:
6216
+ pass
6217
+ except OSError:
6218
+ pass
6517
6219
  FullSizeFilesAlt = 0
6518
6220
  for curfname in GetDirList:
6519
6221
  fencoding = "UTF-8"
@@ -6765,6 +6467,16 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, filevalues=[], ext
6765
6467
  fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6766
6468
  AppendFileHeaderWithContent(
6767
6469
  fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6470
+ try:
6471
+ fp.flush()
6472
+ if(hasattr(os, "sync")):
6473
+ os.fsync(fp.fileno())
6474
+ except io.UnsupportedOperation:
6475
+ pass
6476
+ except AttributeError:
6477
+ pass
6478
+ except OSError:
6479
+ pass
6768
6480
  return fp
6769
6481
 
6770
6482
  def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
@@ -6773,8 +6485,6 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6773
6485
  if(verbose):
6774
6486
  logging.basicConfig(format="%(message)s",
6775
6487
  stream=sys.stdout, level=logging.DEBUG)
6776
- formver = formatspecs['format_ver']
6777
- fileheaderver = str(int(formver.replace(".", "")))
6778
6488
  curinode = 0
6779
6489
  curfid = 0
6780
6490
  inodelist = []
@@ -6842,6 +6552,16 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6842
6552
  return False
6843
6553
  numfiles = int(len(tarfp.getmembers()))
6844
6554
  AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6555
+ try:
6556
+ fp.flush()
6557
+ if(hasattr(os, "sync")):
6558
+ os.fsync(fp.fileno())
6559
+ except io.UnsupportedOperation:
6560
+ pass
6561
+ except AttributeError:
6562
+ pass
6563
+ except OSError:
6564
+ pass
6845
6565
  for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
6846
6566
  fencoding = "UTF-8"
6847
6567
  if(re.findall("^[.|/]", member.name)):
@@ -6980,6 +6700,16 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6980
6700
  fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
6981
6701
  AppendFileHeaderWithContent(
6982
6702
  fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6703
+ try:
6704
+ fp.flush()
6705
+ if(hasattr(os, "sync")):
6706
+ os.fsync(fp.fileno())
6707
+ except io.UnsupportedOperation:
6708
+ pass
6709
+ except AttributeError:
6710
+ pass
6711
+ except OSError:
6712
+ pass
6983
6713
  fcontents.close()
6984
6714
  return fp
6985
6715
 
@@ -6989,8 +6719,6 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6989
6719
  if(verbose):
6990
6720
  logging.basicConfig(format="%(message)s",
6991
6721
  stream=sys.stdout, level=logging.DEBUG)
6992
- formver = formatspecs['format_ver']
6993
- fileheaderver = str(int(formver.replace(".", "")))
6994
6722
  curinode = 0
6995
6723
  curfid = 0
6996
6724
  inodelist = []
@@ -7028,6 +6756,16 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
7028
6756
  VerbosePrintOut("Bad file found!")
7029
6757
  numfiles = int(len(zipfp.infolist()))
7030
6758
  AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6759
+ try:
6760
+ fp.flush()
6761
+ if(hasattr(os, "sync")):
6762
+ os.fsync(fp.fileno())
6763
+ except io.UnsupportedOperation:
6764
+ pass
6765
+ except AttributeError:
6766
+ pass
6767
+ except OSError:
6768
+ pass
7031
6769
  for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
7032
6770
  fencoding = "UTF-8"
7033
6771
  if(re.findall("^[.|/]", member.filename)):
@@ -7038,9 +6776,9 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
7038
6776
  if(verbose):
7039
6777
  VerbosePrintOut(fname)
7040
6778
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
7041
- fpremode = int(stat.S_IFDIR + 511)
6779
+ fpremode = int(stat.S_IFDIR | 0x1ff)
7042
6780
  else:
7043
- fpremode = int(stat.S_IFREG + 438)
6781
+ fpremode = int(stat.S_IFREG | 0x1b6)
7044
6782
  flinkcount = 0
7045
6783
  ftype = 0
7046
6784
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
@@ -7069,37 +6807,42 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
7069
6807
  fbtime = format(
7070
6808
  int(time.mktime(member.date_time + (0, 0, -1))), 'x').lower()
7071
6809
  if(zipinfo.create_system == 0 or zipinfo.create_system == 10):
7072
- fwinattributes = format(int(zipinfo.external_attr), 'x').lower()
6810
+ fwinattributes = format(int(zipinfo.external_attr & 0xFFFF), 'x').lower()
7073
6811
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
7074
- fmode = format(int(stat.S_IFDIR + 511), 'x').lower()
7075
- fchmode = stat.S_IMODE(int(stat.S_IFDIR + 511))
7076
- ftypemod = stat.S_IFMT(int(stat.S_IFDIR + 511))
6812
+ fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
6813
+ fchmode = stat.S_IMODE(int(stat.S_IFDIR | 0x1ff))
6814
+ ftypemod = stat.S_IFMT(int(stat.S_IFDIR | 0x1ff))
7077
6815
  else:
7078
- fmode = format(int(stat.S_IFREG + 438), 'x').lower()
7079
- fchmode = stat.S_IMODE(int(stat.S_IFREG + 438))
7080
- ftypemod = stat.S_IFMT(int(stat.S_IFREG + 438))
6816
+ fmode = format(int(stat.S_IFREG | 0x1b6), 'x').lower()
6817
+ fchmode = stat.S_IMODE(int(stat.S_IFREG | 0x1b6))
6818
+ ftypemod = stat.S_IFMT(int(stat.S_IFREG | 0x1b6))
7081
6819
  elif(zipinfo.create_system == 3):
7082
- fwinattributes = format(int(0), 'x').lower()
7083
- try:
7084
- fmode = format(int(zipinfo.external_attr), 'x').lower()
7085
- prefmode = int(zipinfo.external_attr)
7086
- fchmode = stat.S_IMODE(prefmode)
7087
- ftypemod = stat.S_IFMT(prefmode)
7088
- except OverflowError:
7089
- fmode = format(int(zipinfo.external_attr >> 16), 'x').lower()
7090
- prefmode = int(zipinfo.external_attr >> 16)
7091
- fchmode = stat.S_IMODE(prefmode)
7092
- ftypemod = stat.S_IFMT(prefmode)
7093
- else:
7094
- fwinattributes = format(int(0), 'x').lower()
6820
+ fwinattributes = format(int(zipinfo.external_attr & 0xFFFF), 'x').lower()
6821
+ fmode = format(int((zipinfo.external_attr >> 16) & 0xFFFF), 'x').lower()
6822
+ prefmode = int((zipinfo.external_attr >> 16) & 0xFFFF)
6823
+ if (prefmode == 0):
6824
+ if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
6825
+ fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
6826
+ prefmode = int(stat.S_IFDIR | 0x1ff)
6827
+ fchmode = stat.S_IMODE(prefmode)
6828
+ ftypemod = stat.S_IFMT(prefmode)
6829
+ else:
6830
+ fmode = format(int(stat.S_IFREG | 0x1b6), 'x').lower()
6831
+ prefmode = int(stat.S_IFREG | 0x1b6)
6832
+ fchmode = stat.S_IMODE(prefmode)
6833
+ ftypemod = stat.S_IFMT(prefmode)
6834
+ fchmode = stat.S_IMODE(prefmode)
6835
+ ftypemod = stat.S_IFMT(prefmode)
6836
+ else:
6837
+ fwinattributes = format(int(zipinfo.external_attr & 0xFFFF), 'x').lower()
7095
6838
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
7096
- fmode = format(int(stat.S_IFDIR + 511), 'x').lower()
7097
- prefmode = int(stat.S_IFDIR + 511)
6839
+ fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
6840
+ prefmode = int(stat.S_IFDIR | 0x1ff)
7098
6841
  fchmode = stat.S_IMODE(prefmode)
7099
6842
  ftypemod = stat.S_IFMT(prefmode)
7100
6843
  else:
7101
- fmode = format(int(stat.S_IFREG + 438), 'x').lower()
7102
- prefmode = int(stat.S_IFREG + 438)
6844
+ fmode = format(int(stat.S_IFREG | 0x1b6), 'x').lower()
6845
+ prefmode = int(stat.S_IFREG | 0x1b6)
7103
6846
  fchmode = stat.S_IMODE(prefmode)
7104
6847
  ftypemod = stat.S_IFMT(prefmode)
7105
6848
  fcompression = ""
@@ -7190,6 +6933,16 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
7190
6933
  fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
7191
6934
  AppendFileHeaderWithContent(
7192
6935
  fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
6936
+ try:
6937
+ fp.flush()
6938
+ if(hasattr(os, "sync")):
6939
+ os.fsync(fp.fileno())
6940
+ except io.UnsupportedOperation:
6941
+ pass
6942
+ except AttributeError:
6943
+ pass
6944
+ except OSError:
6945
+ pass
7193
6946
  fcontents.close()
7194
6947
  return fp
7195
6948
 
@@ -7204,8 +6957,6 @@ if(rarfile_support):
7204
6957
  if(verbose):
7205
6958
  logging.basicConfig(format="%(message)s",
7206
6959
  stream=sys.stdout, level=logging.DEBUG)
7207
- formver = formatspecs['format_ver']
7208
- fileheaderver = str(int(formver.replace(".", "")))
7209
6960
  curinode = 0
7210
6961
  curfid = 0
7211
6962
  inodelist = []
@@ -7222,6 +6973,16 @@ if(rarfile_support):
7222
6973
  VerbosePrintOut("Bad file found!")
7223
6974
  numfiles = int(len(rarfp.infolist()))
7224
6975
  AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
6976
+ try:
6977
+ fp.flush()
6978
+ if(hasattr(os, "sync")):
6979
+ os.fsync(fp.fileno())
6980
+ except io.UnsupportedOperation:
6981
+ pass
6982
+ except AttributeError:
6983
+ pass
6984
+ except OSError:
6985
+ pass
7225
6986
  try:
7226
6987
  fp.flush()
7227
6988
  if(hasattr(os, "sync")):
@@ -7263,11 +7024,11 @@ if(rarfile_support):
7263
7024
  if(is_unix and member.external_attr != 0):
7264
7025
  fpremode = int(member.external_attr)
7265
7026
  elif(member.is_file()):
7266
- fpremode = int(stat.S_IFREG + 438)
7027
+ fpremode = int(stat.S_IFREG | 0x1b6)
7267
7028
  elif(member.is_symlink()):
7268
- fpremode = int(stat.S_IFLNK + 438)
7029
+ fpremode = int(stat.S_IFLNK | 0x1b6)
7269
7030
  elif(member.is_dir()):
7270
- fpremode = int(stat.S_IFDIR + 511)
7031
+ fpremode = int(stat.S_IFDIR | 0x1ff)
7271
7032
  if(is_windows and member.external_attr != 0):
7272
7033
  fwinattributes = format(int(member.external_attr), 'x').lower()
7273
7034
  else:
@@ -7320,23 +7081,23 @@ if(rarfile_support):
7320
7081
  ftypemod = format(
7321
7082
  int(stat.S_IFMT(member.external_attr)), 'x').lower()
7322
7083
  elif(member.is_file()):
7323
- fmode = format(int(stat.S_IFREG + 438), 'x').lower()
7084
+ fmode = format(int(stat.S_IFREG | 0x1b6), 'x').lower()
7324
7085
  fchmode = format(
7325
- int(stat.S_IMODE(int(stat.S_IFREG + 438))), 'x').lower()
7086
+ int(stat.S_IMODE(int(stat.S_IFREG | 0x1b6))), 'x').lower()
7326
7087
  ftypemod = format(
7327
- int(stat.S_IFMT(int(stat.S_IFREG + 438))), 'x').lower()
7088
+ int(stat.S_IFMT(int(stat.S_IFREG | 0x1b6))), 'x').lower()
7328
7089
  elif(member.is_symlink()):
7329
- fmode = format(int(stat.S_IFLNK + 438), 'x').lower()
7090
+ fmode = format(int(stat.S_IFLNK | 0x1b6), 'x').lower()
7330
7091
  fchmode = format(
7331
- int(stat.S_IMODE(int(stat.S_IFREG + 438))), 'x').lower()
7092
+ int(stat.S_IMODE(int(stat.S_IFREG | 0x1b6))), 'x').lower()
7332
7093
  ftypemod = format(
7333
- int(stat.S_IFMT(int(stat.S_IFREG + 438))), 'x').lower()
7094
+ int(stat.S_IFMT(int(stat.S_IFREG | 0x1b6))), 'x').lower()
7334
7095
  elif(member.is_dir()):
7335
- fmode = format(int(stat.S_IFDIR + 511), 'x').lower()
7096
+ fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
7336
7097
  fchmode = format(
7337
- int(stat.S_IMODE(int(stat.S_IFDIR + 511))), 'x').lower()
7098
+ int(stat.S_IMODE(int(stat.S_IFDIR | 0x1ff))), 'x').lower()
7338
7099
  ftypemod = format(
7339
- int(stat.S_IFMT(int(stat.S_IFDIR + 511))), 'x').lower()
7100
+ int(stat.S_IFMT(int(stat.S_IFDIR | 0x1ff))), 'x').lower()
7340
7101
  try:
7341
7102
  fuid = format(int(os.getuid()), 'x').lower()
7342
7103
  except AttributeError:
@@ -7426,15 +7187,25 @@ if(rarfile_support):
7426
7187
  fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
7427
7188
  AppendFileHeaderWithContent(
7428
7189
  fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
7190
+ try:
7191
+ fp.flush()
7192
+ if(hasattr(os, "sync")):
7193
+ os.fsync(fp.fileno())
7194
+ except io.UnsupportedOperation:
7195
+ pass
7196
+ except AttributeError:
7197
+ pass
7198
+ except OSError:
7199
+ pass
7429
7200
  fcontents.close()
7430
7201
  return fp
7431
7202
 
7432
7203
  if(not py7zr_support):
7433
- def AppendFilesWithContentFromSevenZip(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
7204
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
7434
7205
  return False
7435
7206
 
7436
7207
  if(py7zr_support):
7437
- def AppendFilesWithContentFromSevenZip(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
7208
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
7438
7209
  if(not hasattr(fp, "write")):
7439
7210
  return False
7440
7211
  if(verbose):
@@ -7458,6 +7229,16 @@ if(py7zr_support):
7458
7229
  VerbosePrintOut("Bad file found!")
7459
7230
  numfiles = int(len(szpfp.list()))
7460
7231
  AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
7232
+ try:
7233
+ fp.flush()
7234
+ if(hasattr(os, "sync")):
7235
+ os.fsync(fp.fileno())
7236
+ except io.UnsupportedOperation:
7237
+ pass
7238
+ except AttributeError:
7239
+ pass
7240
+ except OSError:
7241
+ pass
7461
7242
  for member in sorted(szpfp.list(), key=lambda x: x.filename):
7462
7243
  fencoding = "UTF-8"
7463
7244
  if(re.findall("^[.|/]", member.filename)):
@@ -7467,9 +7248,9 @@ if(py7zr_support):
7467
7248
  if(verbose):
7468
7249
  VerbosePrintOut(fname)
7469
7250
  if(not member.is_directory):
7470
- fpremode = int(stat.S_IFREG + 438)
7251
+ fpremode = int(stat.S_IFREG | 0x1b6)
7471
7252
  elif(member.is_directory):
7472
- fpremode = int(stat.S_IFDIR + 511)
7253
+ fpremode = int(stat.S_IFDIR | 0x1ff)
7473
7254
  fwinattributes = format(int(0), 'x').lower()
7474
7255
  fcompression = ""
7475
7256
  fcsize = format(int(0), 'x').lower()
@@ -7493,17 +7274,17 @@ if(py7zr_support):
7493
7274
  fctime = format(int(member.creationtime.timestamp()), 'x').lower()
7494
7275
  fbtime = format(int(member.creationtime.timestamp()), 'x').lower()
7495
7276
  if(member.is_directory):
7496
- fmode = format(int(stat.S_IFDIR + 511), 'x').lower()
7277
+ fmode = format(int(stat.S_IFDIR | 0x1ff), 'x').lower()
7497
7278
  fchmode = format(
7498
- int(stat.S_IMODE(int(stat.S_IFDIR + 511))), 'x').lower()
7279
+ int(stat.S_IMODE(int(stat.S_IFDIR | 0x1ff))), 'x').lower()
7499
7280
  ftypemod = format(
7500
- int(stat.S_IFMT(int(stat.S_IFDIR + 511))), 'x').lower()
7281
+ int(stat.S_IFMT(int(stat.S_IFDIR | 0x1ff))), 'x').lower()
7501
7282
  else:
7502
- fmode = format(int(stat.S_IFREG + 438), 'x').lower()
7283
+ fmode = format(int(stat.S_IFREG | 0x1b6), 'x').lower()
7503
7284
  fchmode = format(
7504
- int(stat.S_IMODE(int(stat.S_IFREG + 438))), 'x').lower()
7285
+ int(stat.S_IMODE(int(stat.S_IFREG | 0x1b6))), 'x').lower()
7505
7286
  ftypemod = format(
7506
- int(stat.S_IFMT(int(stat.S_IFREG + 438))), 'x').lower()
7287
+ int(stat.S_IFMT(int(stat.S_IFREG | 0x1b6))), 'x').lower()
7507
7288
  try:
7508
7289
  fuid = format(int(os.getuid()), 'x').lower()
7509
7290
  except AttributeError:
@@ -7596,10 +7377,20 @@ if(py7zr_support):
7596
7377
  fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
7597
7378
  AppendFileHeaderWithContent(
7598
7379
  fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[1], checksumtype[2], checksumtype[3]], formatspecs)
7380
+ try:
7381
+ fp.flush()
7382
+ if(hasattr(os, "sync")):
7383
+ os.fsync(fp.fileno())
7384
+ except io.UnsupportedOperation:
7385
+ pass
7386
+ except AttributeError:
7387
+ pass
7388
+ except OSError:
7389
+ pass
7599
7390
  fcontents.close()
7600
7391
  return fp
7601
7392
 
7602
- def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, filevalues=[], extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
7393
+ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
7603
7394
  if(not hasattr(fp, "write")):
7604
7395
  return False
7605
7396
  if(verbose):
@@ -7661,12 +7452,12 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, filevalues=[], extr
7661
7452
  return fp
7662
7453
 
7663
7454
 
7664
- def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, filevalues=[], extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
7455
+ def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
7665
7456
  inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
7666
- return AppendListsWithContent(inlist, fp, dirlistfromtxt, filevalues, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
7457
+ return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
7667
7458
 
7668
7459
 
7669
- def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, filevalues=[], extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7460
+ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7670
7461
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7671
7462
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7672
7463
  get_in_ext = os.path.splitext(outfile)
@@ -7710,7 +7501,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
7710
7501
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7711
7502
  except PermissionError:
7712
7503
  return False
7713
- AppendFilesWithContent(infiles, fp, dirlistfromtxt, filevalues, extradata, jsondata, compression,
7504
+ AppendFilesWithContent(infiles, fp, dirlistfromtxt, extradata, jsondata, compression,
7714
7505
  compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, formatspecs, verbose)
7715
7506
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7716
7507
  fp = CompressOpenFileAlt(
@@ -7747,12 +7538,12 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
7747
7538
  fp.close()
7748
7539
  return True
7749
7540
 
7750
- def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, filevalues=[], extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7541
+ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7751
7542
  if not isinstance(infiles, list):
7752
7543
  infiles = [infiles]
7753
7544
  returnout = False
7754
7545
  for infileslist in infiles:
7755
- returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, filevalues, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, True)
7546
+ returnout = AppendFilesWithContentToOutFile(infileslist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, True)
7756
7547
  if(not returnout):
7757
7548
  break
7758
7549
  else:
@@ -7762,7 +7553,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
7762
7553
  return True
7763
7554
  return returnout
7764
7555
 
7765
- def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, filevalues=[], extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7556
+ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7766
7557
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7767
7558
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7768
7559
  get_in_ext = os.path.splitext(outfile)
@@ -7803,7 +7594,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
7803
7594
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
7804
7595
  except PermissionError:
7805
7596
  return False
7806
- AppendListsWithContent(inlist, fp, dirlistfromtxt, filevalues, extradata, jsondata, compression,
7597
+ AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, compression,
7807
7598
  compresswholefile, compressionlevel, followlink, checksumtype, formatspecs, verbose)
7808
7599
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
7809
7600
  fp = CompressOpenFileAlt(
@@ -7921,6 +7712,21 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
7921
7712
  fp.close()
7922
7713
  return True
7923
7714
 
7715
+ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7716
+ if not isinstance(infiles, list):
7717
+ infiles = [infiles]
7718
+ returnout = False
7719
+ for infileslist in infiles:
7720
+ returnout = AppendFilesWithContentFromTarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
7721
+ if(not returnout):
7722
+ break
7723
+ else:
7724
+ outfile = returnout
7725
+ if(not returnfp and returnout):
7726
+ returnout.close()
7727
+ return True
7728
+ return returnout
7729
+
7924
7730
  def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7925
7731
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7926
7732
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
@@ -8001,6 +7807,21 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
8001
7807
  fp.close()
8002
7808
  return True
8003
7809
 
7810
+ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7811
+ if not isinstance(infiles, list):
7812
+ infiles = [infiles]
7813
+ returnout = False
7814
+ for infileslist in infiles:
7815
+ returnout = AppendFilesWithContentFromZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
7816
+ if(not returnout):
7817
+ break
7818
+ else:
7819
+ outfile = returnout
7820
+ if(not returnfp and returnout):
7821
+ returnout.close()
7822
+ return True
7823
+ return returnout
7824
+
8004
7825
  if(not rarfile_support):
8005
7826
  def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
8006
7827
  return False
@@ -8086,12 +7907,27 @@ if(rarfile_support):
8086
7907
  fp.close()
8087
7908
  return True
8088
7909
 
7910
+ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7911
+ if not isinstance(infiles, list):
7912
+ infiles = [infiles]
7913
+ returnout = False
7914
+ for infileslist in infiles:
7915
+ returnout = AppendFilesWithContentFromRarFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
7916
+ if(not returnout):
7917
+ break
7918
+ else:
7919
+ outfile = returnout
7920
+ if(not returnfp and returnout):
7921
+ returnout.close()
7922
+ return True
7923
+ return returnout
7924
+
8089
7925
  if(not py7zr_support):
8090
- def AppendFilesWithContentFromSevenZipToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7926
+ def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
8091
7927
  return False
8092
7928
 
8093
7929
  if(py7zr_support):
8094
- def AppendFilesWithContentFromSevenZipToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7930
+ def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
8095
7931
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
8096
7932
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
8097
7933
  get_in_ext = os.path.splitext(outfile)
@@ -8133,7 +7969,7 @@ if(py7zr_support):
8133
7969
  fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
8134
7970
  except PermissionError:
8135
7971
  return False
8136
- AppendFilesWithContentFromSevenZip(infiles, fp, extradata, jsondata, compression,
7972
+ AppendFilesWithContentFromSevenZipFile(infiles, fp, extradata, jsondata, compression,
8137
7973
  compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, verbose)
8138
7974
  if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
8139
7975
  fp = CompressOpenFileAlt(
@@ -8171,9 +8007,24 @@ if(py7zr_support):
8171
8007
  fp.close()
8172
8008
  return True
8173
8009
 
8174
- def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, filevalues=[], extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
8010
+ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
8011
+ if not isinstance(infiles, list):
8012
+ infiles = [infiles]
8013
+ returnout = False
8014
+ for infileslist in infiles:
8015
+ returnout = AppendFilesWithContentFromSevenZipFileToOutFile(infileslist, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, True)
8016
+ if(not returnout):
8017
+ break
8018
+ else:
8019
+ outfile = returnout
8020
+ if(not returnfp and returnout):
8021
+ returnout.close()
8022
+ return True
8023
+ return returnout
8024
+
8025
+ def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
8175
8026
  inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
8176
- return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, filevalues, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
8027
+ return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
8177
8028
 
8178
8029
 
8179
8030
  def PrintPermissionString(fchmode, ftype):
@@ -8865,8 +8716,6 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, file
8865
8716
  fp = pyzstd.zstdfile.ZstdFile(infile, mode="rb")
8866
8717
  else:
8867
8718
  return Flase
8868
- elif((compresscheck == "lzo" or compresscheck == "lzop") and compresscheck in compressionsupport):
8869
- fp = LzopFile(infile, mode="rb")
8870
8719
  elif((compresscheck == "lzma" or compresscheck == "xz") and compresscheck in compressionsupport):
8871
8720
  fp = lzma.open(infile, "rb")
8872
8721
  elif(compresscheck == "zlib" and compresscheck in compressionsupport):
@@ -8989,8 +8838,6 @@ def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__, filestart=0,
8989
8838
  return False
8990
8839
  elif kind == "lz4" and "lz4" in compressionsupport:
8991
8840
  wrapped = lz4.frame.LZ4FrameFile(src, mode="rb")
8992
- elif kind in ("lzo","lzop") and (("lzo" in compressionsupport) or ("lzop" in compressionsupport)):
8993
- wrapped = LzopFile(fileobj=src, mode="rb")
8994
8841
  elif kind == "zlib" and "zlib" in compressionsupport:
8995
8842
  wrapped = ZlibFile(fileobj=src, mode="rb")
8996
8843
  else:
@@ -9062,8 +8909,6 @@ def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb",
9062
8909
  return False
9063
8910
  elif (compresscheck == "lz4" and "lz4" in compressionsupport):
9064
8911
  fp = lz4.frame.open(infile, mode)
9065
- elif ((compresscheck == "lzo" or compresscheck == "lzop") and "lzop" in compressionsupport):
9066
- fp = LzopFile(infile, mode=mode)
9067
8912
  elif ((compresscheck == "lzma" or compresscheck == "xz") and "xz" in compressionsupport):
9068
8913
  fp = lzma.open(infile, mode)
9069
8914
  elif (compresscheck == "zlib" and "zlib" in compressionsupport):
@@ -9631,7 +9476,7 @@ def fast_copy(infp, outfp, bufsize=1 << 20):
9631
9476
  outfp.write(data)
9632
9477
 
9633
9478
 
9634
- def copy_file_to_mmap_dest(src_path, outfp, chunk_size=8 << 20):
9479
+ def copy_file_to_mmap_dest(src_path, outfp, chunk_size=__spoolfile_size__):
9635
9480
  """
9636
9481
  Copy a disk file into an mmap-backed destination (FileLikeAdapter).
9637
9482
  Falls back to buffered copy if the source cannot be mmapped.
@@ -9857,9 +9702,6 @@ def CompressOpenFile(outfile, compressionenable=True, compressionlevel=None,
9857
9702
  elif (fextname == ".lz4" and "lz4" in compressionsupport):
9858
9703
  outfp = FileLikeAdapter(lz4.frame.open(outfile, mode, compression_level=compressionlevel), mode="wb")
9859
9704
 
9860
- elif (fextname == ".lzo" and "lzop" in compressionsupport):
9861
- outfp = FileLikeAdapter(LzopFile(outfile, mode=mode, level=compressionlevel), mode="wb")
9862
-
9863
9705
  elif (fextname == ".lzma" and "lzma" in compressionsupport):
9864
9706
  try:
9865
9707
  outfp = FileLikeAdapter(
@@ -9949,10 +9791,10 @@ def CheckSumSupportAlt(checkfor, guaranteed=True):
9949
9791
 
9950
9792
 
9951
9793
  def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9952
- return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, [], extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
9794
+ return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
9953
9795
 
9954
9796
  def PackStackedArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9955
- return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, [], extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
9797
+ return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
9956
9798
 
9957
9799
  def PackArchiveFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9958
9800
  return PackArchiveFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
@@ -9981,7 +9823,7 @@ if(not py7zr_support):
9981
9823
 
9982
9824
  if(py7zr_support):
9983
9825
  def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9984
- return AppendFilesWithContentFromSevenZipToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9826
+ return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9985
9827
 
9986
9828
 
9987
9829
  def PackArchiveFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
@@ -10089,14 +9931,6 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
10089
9931
  checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
10090
9932
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
10091
9933
  formatspecs = formatspecs[checkcompressfile]
10092
- if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
10093
- return TarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
10094
- elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
10095
- return ZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
10096
- elif(rarfile_support and checkcompressfile == "rarfile" and (rarfile.is_rarfile(infile) or rarfile.is_rarfile_sfx(infile))):
10097
- return RarFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
10098
- elif(py7zr_support and checkcompressfile == "7zipfile" and py7zr.is_7zfile(infile)):
10099
- return SevenZipFileToArray(infile, 0, 0, listonly, contentasfile, skipchecksum, formatspecs, seektoend, returnfp)
10100
9934
  elif(IsSingleDict(formatspecs) and checkcompressfile != formatspecs['format_magic']):
10101
9935
  return False
10102
9936
  elif(IsNestedDict(formatspecs) and checkcompressfile not in formatspecs):
@@ -10458,9 +10292,10 @@ def StackedArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=
10458
10292
  while True:
10459
10293
  if outstartfile >= outfsize: # stop when function signals False
10460
10294
  break
10461
- is_valid_file = ArchiveFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, True)
10295
+ is_valid_file = ArchiveFileValidate(infile, fmttype, outstartfile, formatspecs, seektoend, verbose, True)
10462
10296
  if is_valid_file is False: # stop when function signals False
10463
10297
  outretval.append(is_valid_file)
10298
+ break
10464
10299
  else:
10465
10300
  outretval.append(True)
10466
10301
  infile = is_valid_file
@@ -11466,35 +11301,6 @@ def MultipleArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=
11466
11301
  return outretval
11467
11302
 
11468
11303
 
11469
- def StackedArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, returnfp=False):
11470
- outretval = []
11471
- outstartfile = filestart
11472
- outfsize = float('inf')
11473
- while True:
11474
- if outstartfile >= outfsize: # stop when function signals False
11475
- break
11476
- is_valid_file = ArchiveFileValidate(infile, fmttype, filestart, formatspecs, seektoend, verbose, True)
11477
- if is_valid_file is False: # stop when function signals False
11478
- outretval.append(is_valid_file)
11479
- else:
11480
- outretval.append(True)
11481
- infile = is_valid_file
11482
- outstartfile = infile.tell()
11483
- try:
11484
- infile.seek(0, 2)
11485
- except OSError:
11486
- SeekToEndOfFile(infile)
11487
- except ValueError:
11488
- SeekToEndOfFile(infile)
11489
- outfsize = infile.tell()
11490
- infile.seek(outstartfile, 0)
11491
- if(returnfp):
11492
- return infile
11493
- else:
11494
- infile.close()
11495
- return outretval
11496
-
11497
-
11498
11304
  def StackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
11499
11305
  outretval = []
11500
11306
  outstartfile = filestart
@@ -11705,11 +11511,11 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
11705
11511
  if(zipinfo.create_system == 0 or zipinfo.create_system == 10):
11706
11512
  fwinattributes = int(zipinfo.external_attr)
11707
11513
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
11708
- fmode = int(stat.S_IFDIR + 511)
11709
- fchmode = int(stat.S_IMODE(int(stat.S_IFDIR + 511)))
11710
- ftypemod = int(stat.S_IFMT(int(stat.S_IFDIR + 511)))
11514
+ fmode = int(stat.S_IFDIR | 0x1ff)
11515
+ fchmode = int(stat.S_IMODE(int(stat.S_IFDIR | 0x1ff)))
11516
+ ftypemod = int(stat.S_IFMT(int(stat.S_IFDIR | 0x1ff)))
11711
11517
  else:
11712
- fmode = int(stat.S_IFREG + 438)
11518
+ fmode = int(stat.S_IFREG | 0x1b6)
11713
11519
  fchmode = int(stat.S_IMODE(fmode))
11714
11520
  ftypemod = int(stat.S_IFMT(fmode))
11715
11521
  elif(zipinfo.create_system == 3):
@@ -11725,11 +11531,11 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
11725
11531
  else:
11726
11532
  fwinattributes = int(0)
11727
11533
  if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
11728
- fmode = int(stat.S_IFDIR + 511)
11729
- fchmode = int(stat.S_IMODE(int(stat.S_IFDIR + 511)))
11730
- ftypemod = int(stat.S_IFMT(int(stat.S_IFDIR + 511)))
11534
+ fmode = int(stat.S_IFDIR | 0x1ff)
11535
+ fchmode = int(stat.S_IMODE(int(stat.S_IFDIR | 0x1ff)))
11536
+ ftypemod = int(stat.S_IFMT(int(stat.S_IFDIR | 0x1ff)))
11731
11537
  else:
11732
- fmode = int(stat.S_IFREG + 438)
11538
+ fmode = int(stat.S_IFREG | 0x1b6)
11733
11539
  fchmode = int(stat.S_IMODE(fmode))
11734
11540
  ftypemod = int(stat.S_IFMT(fmode))
11735
11541
  returnval.update({lcfi: member.filename})
@@ -11840,11 +11646,11 @@ if(rarfile_support):
11840
11646
  if(is_unix and member.external_attr != 0):
11841
11647
  fpremode = int(member.external_attr)
11842
11648
  elif(member.is_file()):
11843
- fpremode = int(stat.S_IFREG + 438)
11649
+ fpremode = int(stat.S_IFREG | 0x1b6)
11844
11650
  elif(member.is_symlink()):
11845
- fpremode = int(stat.S_IFLNK + 438)
11651
+ fpremode = int(stat.S_IFLNK | 0x1b6)
11846
11652
  elif(member.is_dir()):
11847
- fpremode = int(stat.S_IFDIR + 511)
11653
+ fpremode = int(stat.S_IFDIR | 0x1ff)
11848
11654
  if(is_windows and member.external_attr != 0):
11849
11655
  fwinattributes = int(member.external_attr)
11850
11656
  else:
@@ -11854,17 +11660,17 @@ if(rarfile_support):
11854
11660
  fchmode = int(stat.S_IMODE(member.external_attr))
11855
11661
  ftypemod = int(stat.S_IFMT(member.external_attr))
11856
11662
  elif(member.is_file()):
11857
- fmode = int(stat.S_IFREG + 438)
11858
- fchmode = int(stat.S_IMODE(int(stat.S_IFREG + 438)))
11859
- ftypemod = int(stat.S_IFMT(int(stat.S_IFREG + 438)))
11663
+ fmode = int(stat.S_IFREG | 0x1b6)
11664
+ fchmode = int(stat.S_IMODE(int(stat.S_IFREG | 0x1b6)))
11665
+ ftypemod = int(stat.S_IFMT(int(stat.S_IFREG | 0x1b6)))
11860
11666
  elif(member.is_symlink()):
11861
- fmode = int(stat.S_IFLNK + 438)
11862
- fchmode = int(stat.S_IMODE(int(stat.S_IFLNK + 438)))
11863
- ftypemod = int(stat.S_IFMT(int(stat.S_IFLNK + 438)))
11667
+ fmode = int(stat.S_IFLNK | 0x1b6)
11668
+ fchmode = int(stat.S_IMODE(int(stat.S_IFLNK | 0x1b6)))
11669
+ ftypemod = int(stat.S_IFMT(int(stat.S_IFLNK | 0x1b6)))
11864
11670
  elif(member.is_dir()):
11865
- fmode = int(stat.S_IFDIR + 511)
11866
- fchmode = int(stat.S_IMODE(int(stat.S_IFDIR + 511)))
11867
- ftypemod = int(stat.S_IFMT(int(stat.S_IFDIR + 511)))
11671
+ fmode = int(stat.S_IFDIR | 0x1ff)
11672
+ fchmode = int(stat.S_IMODE(int(stat.S_IFDIR | 0x1ff)))
11673
+ ftypemod = int(stat.S_IFMT(int(stat.S_IFDIR | 0x1ff)))
11868
11674
  returnval.update({lcfi: member.filename})
11869
11675
  if(not verbose):
11870
11676
  VerbosePrintOut(member.filename)
@@ -11960,18 +11766,18 @@ if(py7zr_support):
11960
11766
  else:
11961
11767
  fname = "./"+member.filename
11962
11768
  if(not member.is_directory):
11963
- fpremode = int(stat.S_IFREG + 438)
11769
+ fpremode = int(stat.S_IFREG | 0x1b6)
11964
11770
  elif(member.is_directory):
11965
- fpremode = int(stat.S_IFDIR + 511)
11771
+ fpremode = int(stat.S_IFDIR | 0x1ff)
11966
11772
  fwinattributes = int(0)
11967
11773
  if(member.is_directory):
11968
- fmode = int(stat.S_IFDIR + 511)
11969
- fchmode = int(stat.S_IMODE(int(stat.S_IFDIR + 511)))
11970
- ftypemod = int(stat.S_IFMT(int(stat.S_IFDIR + 511)))
11774
+ fmode = int(stat.S_IFDIR | 0x1ff)
11775
+ fchmode = int(stat.S_IMODE(int(stat.S_IFDIR | 0x1ff)))
11776
+ ftypemod = int(stat.S_IFMT(int(stat.S_IFDIR | 0x1ff)))
11971
11777
  else:
11972
- fmode = int(stat.S_IFLNK + 438)
11973
- fchmode = int(stat.S_IMODE(int(stat.S_IFLNK + 438)))
11974
- ftypemod = int(stat.S_IFMT(int(stat.S_IFLNK + 438)))
11778
+ fmode = int(stat.S_IFLNK | 0x1b6)
11779
+ fchmode = int(stat.S_IMODE(int(stat.S_IFLNK | 0x1b6)))
11780
+ ftypemod = int(stat.S_IFMT(int(stat.S_IFLNK | 0x1b6)))
11975
11781
  returnval.update({lcfi: member.filename})
11976
11782
  if(not verbose):
11977
11783
  VerbosePrintOut(member.filename)