ansible-core 2.13.8__py3-none-any.whl → 2.13.9rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ansible-core might be problematic. Click here for more details.

Files changed (43) hide show
  1. ansible/cli/doc.py +10 -4
  2. ansible/galaxy/api.py +29 -10
  3. ansible/galaxy/collection/__init__.py +3 -0
  4. ansible/galaxy/collection/concrete_artifact_manager.py +34 -17
  5. ansible/galaxy/dependency_resolution/dataclasses.py +11 -1
  6. ansible/galaxy/dependency_resolution/providers.py +0 -1
  7. ansible/module_utils/ansible_release.py +1 -1
  8. ansible/module_utils/api.py +14 -1
  9. ansible/module_utils/csharp/Ansible.Basic.cs +265 -7
  10. ansible/plugins/lookup/password.py +79 -39
  11. ansible/release.py +1 -1
  12. ansible/utils/encrypt.py +9 -6
  13. {ansible_core-2.13.8.dist-info → ansible_core-2.13.9rc1.dist-info}/METADATA +1 -1
  14. {ansible_core-2.13.8.dist-info → ansible_core-2.13.9rc1.dist-info}/RECORD +43 -43
  15. {ansible_core-2.13.8.dist-info → ansible_core-2.13.9rc1.dist-info}/WHEEL +1 -1
  16. ansible_test/_internal/ci/__init__.py +2 -2
  17. ansible_test/_internal/ci/azp.py +12 -8
  18. ansible_test/_internal/ci/local.py +2 -2
  19. ansible_test/_internal/classification/__init__.py +51 -43
  20. ansible_test/_internal/cli/argparsing/argcompletion.py +20 -5
  21. ansible_test/_internal/cli/commands/sanity.py +0 -15
  22. ansible_test/_internal/commands/coverage/combine.py +3 -1
  23. ansible_test/_internal/commands/integration/__init__.py +6 -2
  24. ansible_test/_internal/commands/integration/cloud/__init__.py +3 -1
  25. ansible_test/_internal/commands/sanity/__init__.py +7 -0
  26. ansible_test/_internal/commands/sanity/pylint.py +11 -0
  27. ansible_test/_internal/commands/sanity/validate_modules.py +66 -5
  28. ansible_test/_internal/config.py +6 -12
  29. ansible_test/_internal/core_ci.py +8 -1
  30. ansible_test/_internal/data.py +17 -8
  31. ansible_test/_internal/delegation.py +1 -2
  32. ansible_test/_internal/metadata.py +4 -0
  33. ansible_test/_internal/payload.py +75 -6
  34. ansible_test/_internal/python_requirements.py +15 -0
  35. ansible_test/_internal/target.py +3 -7
  36. ansible_test/_internal/test.py +1 -1
  37. ansible_test/_internal/util.py +17 -0
  38. ansible_test/_util/controller/sanity/mypy/ansible-test.ini +3 -0
  39. ansible_test/_util/controller/sanity/validate-modules/validate_modules/main.py +92 -126
  40. {ansible_core-2.13.8.data → ansible_core-2.13.9rc1.data}/scripts/ansible-test +0 -0
  41. {ansible_core-2.13.8.dist-info → ansible_core-2.13.9rc1.dist-info}/COPYING +0 -0
  42. {ansible_core-2.13.8.dist-info → ansible_core-2.13.9rc1.dist-info}/entry_points.txt +0 -0
  43. {ansible_core-2.13.8.dist-info → ansible_core-2.13.9rc1.dist-info}/top_level.txt +0 -0
ansible/cli/doc.py CHANGED
@@ -1298,10 +1298,16 @@ class DocCLI(CLI, RoleMixin):
1298
1298
  if 'module' in item:
1299
1299
  text.append(textwrap.fill(DocCLI.tty_ify('Module %s' % item['module']),
1300
1300
  limit - 6, initial_indent=opt_indent[:-2] + "* ", subsequent_indent=opt_indent))
1301
- description = item.get('description', 'The official documentation on the %s module.' % item['module'])
1302
- text.append(textwrap.fill(DocCLI.tty_ify(description), limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent + ' '))
1303
- text.append(textwrap.fill(DocCLI.tty_ify(get_versioned_doclink('modules/%s_module.html' % item['module'])),
1304
- limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent))
1301
+ description = item.get('description')
1302
+ if description is None and item['module'].startswith('ansible.builtin.'):
1303
+ description = 'The official documentation on the %s module.' % item['module']
1304
+ if description is not None:
1305
+ text.append(textwrap.fill(DocCLI.tty_ify(description),
1306
+ limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent + ' '))
1307
+ if item['module'].startswith('ansible.builtin.'):
1308
+ relative_url = 'collections/%s_module.html' % item['module'].replace('.', '/', 2)
1309
+ text.append(textwrap.fill(DocCLI.tty_ify(get_versioned_doclink(relative_url)),
1310
+ limit - 6, initial_indent=opt_indent + ' ', subsequent_indent=opt_indent))
1305
1311
  elif 'name' in item and 'link' in item and 'description' in item:
1306
1312
  text.append(textwrap.fill(DocCLI.tty_ify(item['name']),
1307
1313
  limit - 6, initial_indent=opt_indent[:-2] + "* ", subsequent_indent=opt_indent))
ansible/galaxy/api.py CHANGED
@@ -11,12 +11,15 @@ import functools
11
11
  import hashlib
12
12
  import json
13
13
  import os
14
+ import socket
14
15
  import stat
15
16
  import tarfile
16
17
  import time
17
18
  import threading
18
19
 
19
- from urllib.error import HTTPError
20
+ from http import HTTPStatus
21
+ from http.client import BadStatusLine, IncompleteRead
22
+ from urllib.error import HTTPError, URLError
20
23
  from urllib.parse import quote as urlquote, urlencode, urlparse, parse_qs, urljoin
21
24
 
22
25
  from ansible import constants as C
@@ -34,10 +37,11 @@ from ansible.utils.path import makedirs_safe
34
37
  display = Display()
35
38
  _CACHE_LOCK = threading.Lock()
36
39
  COLLECTION_PAGE_SIZE = 100
37
- RETRY_HTTP_ERROR_CODES = [ # TODO: Allow user-configuration
38
- 429, # Too Many Requests
40
+ RETRY_HTTP_ERROR_CODES = { # TODO: Allow user-configuration
41
+ HTTPStatus.TOO_MANY_REQUESTS,
39
42
  520, # Galaxy rate limit error code (Cloudflare unknown error)
40
- ]
43
+ HTTPStatus.BAD_GATEWAY, # Common error from galaxy that may represent any number of transient backend issues
44
+ }
41
45
 
42
46
 
43
47
  def cache_lock(func):
@@ -48,11 +52,24 @@ def cache_lock(func):
48
52
  return wrapped
49
53
 
50
54
 
51
- def is_rate_limit_exception(exception):
55
+ def should_retry_error(exception):
52
56
  # Note: cloud.redhat.com masks rate limit errors with 403 (Forbidden) error codes.
53
57
  # Since 403 could reflect the actual problem (such as an expired token), we should
54
58
  # not retry by default.
55
- return isinstance(exception, GalaxyError) and exception.http_code in RETRY_HTTP_ERROR_CODES
59
+ if isinstance(exception, GalaxyError) and exception.http_code in RETRY_HTTP_ERROR_CODES:
60
+ return True
61
+
62
+ if isinstance(exception, AnsibleError) and (orig_exc := getattr(exception, 'orig_exc', None)):
63
+ # URLError is often a proxy for an underlying error, handle wrapped exceptions
64
+ if isinstance(orig_exc, URLError):
65
+ orig_exc = orig_exc.reason
66
+
67
+ # Handle common URL related errors such as TimeoutError, and BadStatusLine
68
+ # Note: socket.timeout is only required for Py3.9
69
+ if isinstance(orig_exc, (TimeoutError, BadStatusLine, IncompleteRead, socket.timeout)):
70
+ return True
71
+
72
+ return False
56
73
 
57
74
 
58
75
  def g_connect(versions):
@@ -326,7 +343,7 @@ class GalaxyAPI:
326
343
 
327
344
  @retry_with_delays_and_condition(
328
345
  backoff_iterator=generate_jittered_backoff(retries=6, delay_base=2, delay_threshold=40),
329
- should_retry_error=is_rate_limit_exception
346
+ should_retry_error=should_retry_error
330
347
  )
331
348
  def _call_galaxy(self, url, args=None, headers=None, method=None, auth_required=False, error_context_msg=None,
332
349
  cache=False, cache_key=None):
@@ -384,7 +401,10 @@ class GalaxyAPI:
384
401
  except HTTPError as e:
385
402
  raise GalaxyError(e, error_context_msg)
386
403
  except Exception as e:
387
- raise AnsibleError("Unknown error when attempting to call Galaxy at '%s': %s" % (url, to_native(e)))
404
+ raise AnsibleError(
405
+ "Unknown error when attempting to call Galaxy at '%s': %s" % (url, to_native(e)),
406
+ orig_exc=e
407
+ )
388
408
 
389
409
  resp_data = to_text(resp.read(), errors='surrogate_or_strict')
390
410
  try:
@@ -898,8 +918,7 @@ class GalaxyAPI:
898
918
  try:
899
919
  signatures = data["signatures"]
900
920
  except KeyError:
901
- # Noisy since this is used by the dep resolver, so require more verbosity than Galaxy calls
902
- display.vvvvvv(f"Server {self.api_server} has not signed {namespace}.{name}:{version}")
921
+ display.vvvv(f"Server {self.api_server} has not signed {namespace}.{name}:{version}")
903
922
  return []
904
923
  else:
905
924
  return [signature_info["signature"] for signature_info in signatures]
@@ -741,6 +741,9 @@ def install_collections(
741
741
  "Skipping signature verification."
742
742
  )
743
743
 
744
+ if concrete_coll_pin.type == 'galaxy':
745
+ concrete_coll_pin = concrete_coll_pin.with_signatures_repopulated()
746
+
744
747
  try:
745
748
  install(concrete_coll_pin, output_path, artifacts_manager)
746
749
  except AnsibleError as err:
@@ -27,9 +27,12 @@ if t.TYPE_CHECKING:
27
27
 
28
28
  from ansible.errors import AnsibleError
29
29
  from ansible.galaxy import get_collections_galaxy_meta_info
30
+ from ansible.galaxy.api import should_retry_error
30
31
  from ansible.galaxy.dependency_resolution.dataclasses import _GALAXY_YAML
31
32
  from ansible.galaxy.user_agent import user_agent
32
33
  from ansible.module_utils._text import to_bytes, to_native, to_text
34
+ from ansible.module_utils.api import retry_with_delays_and_condition
35
+ from ansible.module_utils.api import generate_jittered_backoff
33
36
  from ansible.module_utils.common.process import get_bin_path
34
37
  from ansible.module_utils.common._collections_compat import MutableMapping
35
38
  from ansible.module_utils.common.yaml import yaml_load
@@ -159,17 +162,24 @@ class ConcreteArtifactsManager:
159
162
  token=token,
160
163
  ) # type: bytes
161
164
  except URLError as err:
162
- raise_from(
163
- AnsibleError(
164
- 'Failed to download collection tar '
165
- "from '{coll_src!s}': {download_err!s}".
166
- format(
167
- coll_src=to_native(collection.src),
168
- download_err=to_native(err),
169
- ),
165
+ raise AnsibleError(
166
+ 'Failed to download collection tar '
167
+ "from '{coll_src!s}': {download_err!s}".
168
+ format(
169
+ coll_src=to_native(collection.src),
170
+ download_err=to_native(err),
170
171
  ),
171
- err,
172
- )
172
+ ) from err
173
+ except Exception as err:
174
+ raise AnsibleError(
175
+ 'Failed to download collection tar '
176
+ "from '{coll_src!s}' due to the following unforeseen error: "
177
+ '{download_err!s}'.
178
+ format(
179
+ coll_src=to_native(collection.src),
180
+ download_err=to_native(err),
181
+ ),
182
+ ) from err
173
183
  else:
174
184
  display.vvv(
175
185
  "Collection '{coll!s}' obtained from "
@@ -456,6 +466,10 @@ def _extract_collection_from_git(repo_url, coll_ver, b_path):
456
466
 
457
467
 
458
468
  # FIXME: use random subdirs while preserving the file names
469
+ @retry_with_delays_and_condition(
470
+ backoff_iterator=generate_jittered_backoff(retries=6, delay_base=2, delay_threshold=40),
471
+ should_retry_error=should_retry_error
472
+ )
459
473
  def _download_file(url, b_path, expected_hash, validate_certs, token=None, timeout=60):
460
474
  # type: (str, bytes, t.Optional[str], bool, GalaxyToken, int) -> bytes
461
475
  # ^ NOTE: used in download and verify_collections ^
@@ -474,13 +488,16 @@ def _download_file(url, b_path, expected_hash, validate_certs, token=None, timeo
474
488
  display.display("Downloading %s to %s" % (url, to_text(b_tarball_dir)))
475
489
  # NOTE: Galaxy redirects downloads to S3 which rejects the request
476
490
  # NOTE: if an Authorization header is attached so don't redirect it
477
- resp = open_url(
478
- to_native(url, errors='surrogate_or_strict'),
479
- validate_certs=validate_certs,
480
- headers=None if token is None else token.headers(),
481
- unredirected_headers=['Authorization'], http_agent=user_agent(),
482
- timeout=timeout
483
- )
491
+ try:
492
+ resp = open_url(
493
+ to_native(url, errors='surrogate_or_strict'),
494
+ validate_certs=validate_certs,
495
+ headers=None if token is None else token.headers(),
496
+ unredirected_headers=['Authorization'], http_agent=user_agent(),
497
+ timeout=timeout
498
+ )
499
+ except Exception as err:
500
+ raise AnsibleError(to_native(err), orig_exc=err)
484
501
 
485
502
  with open(b_file_path, 'wb') as download_file: # type: t.BinaryIO
486
503
  actual_hash = _consume_file(resp, write_to=download_file)
@@ -27,7 +27,7 @@ if t.TYPE_CHECKING:
27
27
  )
28
28
 
29
29
 
30
- from ansible.errors import AnsibleError
30
+ from ansible.errors import AnsibleError, AnsibleAssertionError
31
31
  from ansible.galaxy.api import GalaxyAPI
32
32
  from ansible.module_utils._text import to_bytes, to_native, to_text
33
33
  from ansible.module_utils.common.arg_spec import ArgumentSpecValidator
@@ -571,3 +571,13 @@ class Candidate(
571
571
 
572
572
  def __init__(self, *args, **kwargs):
573
573
  super(Candidate, self).__init__()
574
+
575
+ def with_signatures_repopulated(self): # type: (Candidate) -> Candidate
576
+ """Populate a new Candidate instance with Galaxy signatures.
577
+ :raises AnsibleAssertionError: If the supplied candidate is not sourced from a Galaxy-like index.
578
+ """
579
+ if self.type != 'galaxy':
580
+ raise AnsibleAssertionError(f"Invalid collection type for {self!r}: unable to get signatures from a galaxy server.")
581
+
582
+ signatures = self.src.get_collection_signatures(self.namespace, self.name, self.ver)
583
+ return self.__class__(self.fqcn, self.ver, self.src, self.type, frozenset([*self.signatures, *signatures]))
@@ -392,7 +392,6 @@ class CollectionDependencyProviderBase(AbstractProvider):
392
392
 
393
393
  if not unsatisfied:
394
394
  if self._include_signatures:
395
- signatures = src_server.get_collection_signatures(first_req.namespace, first_req.name, version)
396
395
  for extra_source in extra_signature_sources:
397
396
  signatures.append(get_signature_from_source(extra_source))
398
397
  latest_matches.append(
@@ -19,6 +19,6 @@
19
19
  from __future__ import (absolute_import, division, print_function)
20
20
  __metaclass__ = type
21
21
 
22
- __version__ = '2.13.8'
22
+ __version__ = '2.13.9rc1'
23
23
  __author__ = 'Ansible, Inc.'
24
24
  __codename__ = "Nobody's Fault but Mine"
@@ -26,11 +26,15 @@ The 'api' module provides the following common argument specs:
26
26
  from __future__ import (absolute_import, division, print_function)
27
27
  __metaclass__ = type
28
28
 
29
+ import copy
29
30
  import functools
31
+ import itertools
30
32
  import random
31
33
  import sys
32
34
  import time
33
35
 
36
+ import ansible.module_utils.compat.typing as t
37
+
34
38
 
35
39
  def rate_limit_argument_spec(spec=None):
36
40
  """Creates an argument spec for working with rate limiting"""
@@ -141,6 +145,15 @@ def retry_with_delays_and_condition(backoff_iterator, should_retry_error=None):
141
145
  :param backoff_iterator: An iterable of delays in seconds.
142
146
  :param should_retry_error: A callable that takes an exception of the decorated function and decides whether to retry or not (returns a bool).
143
147
  """
148
+ def _emit_isolated_iterator_copies(original_iterator): # type: (t.Iterable[t.Any]) -> t.Generator
149
+ # Ref: https://stackoverflow.com/a/30232619/595220
150
+ _copiable_iterator, _first_iterator_copy = itertools.tee(original_iterator)
151
+ yield _first_iterator_copy
152
+ while True:
153
+ yield copy.copy(_copiable_iterator)
154
+ backoff_iterator_generator = _emit_isolated_iterator_copies(backoff_iterator)
155
+ del backoff_iterator # prevent accidental use elsewhere
156
+
144
157
  if should_retry_error is None:
145
158
  should_retry_error = retry_never
146
159
 
@@ -152,7 +165,7 @@ def retry_with_delays_and_condition(backoff_iterator, should_retry_error=None):
152
165
  """
153
166
  call_retryable_function = functools.partial(function, *args, **kwargs)
154
167
 
155
- for delay in backoff_iterator:
168
+ for delay in next(backoff_iterator_generator):
156
169
  try:
157
170
  return call_retryable_function()
158
171
  except Exception as e:
@@ -1,6 +1,8 @@
1
+ using Microsoft.Win32.SafeHandles;
1
2
  using System;
2
3
  using System.Collections;
3
4
  using System.Collections.Generic;
5
+ using System.ComponentModel;
4
6
  using System.Diagnostics;
5
7
  using System.IO;
6
8
  using System.Linq;
@@ -176,7 +178,8 @@ namespace Ansible.Basic
176
178
  }
177
179
 
178
180
  string dateTime = DateTime.Now.ToFileTime().ToString();
179
- string dirName = String.Format("ansible-moduletmp-{0}-{1}", dateTime, new Random().Next(0, int.MaxValue));
181
+ string dirName = String.Format("ansible-moduletmp-{0}-{1}-{2}", dateTime, System.Diagnostics.Process.GetCurrentProcess().Id,
182
+ new Random().Next(0, int.MaxValue));
180
183
  string newTmpdir = Path.Combine(baseDir, dirName);
181
184
  #if CORECLR
182
185
  DirectoryInfo tmpdirInfo = Directory.CreateDirectory(newTmpdir);
@@ -309,8 +312,8 @@ namespace Ansible.Basic
309
312
 
310
313
  public void ExitJson()
311
314
  {
312
- WriteLine(GetFormattedResults(Result));
313
315
  CleanupFiles(null, null);
316
+ WriteLine(GetFormattedResults(Result));
314
317
  Exit(0);
315
318
  }
316
319
 
@@ -337,8 +340,8 @@ namespace Ansible.Basic
337
340
  Result["exception"] = exception.ToString();
338
341
  }
339
342
 
340
- WriteLine(GetFormattedResults(Result));
341
343
  CleanupFiles(null, null);
344
+ WriteLine(GetFormattedResults(Result));
342
345
  Exit(1);
343
346
  }
344
347
 
@@ -1444,10 +1447,22 @@ namespace Ansible.Basic
1444
1447
  {
1445
1448
  foreach (string path in cleanupFiles)
1446
1449
  {
1447
- if (File.Exists(path))
1448
- File.Delete(path);
1449
- else if (Directory.Exists(path))
1450
- Directory.Delete(path, true);
1450
+ try
1451
+ {
1452
+ #if WINDOWS
1453
+ FileCleaner.Delete(path);
1454
+ #else
1455
+ if (File.Exists(path))
1456
+ File.Delete(path);
1457
+ else if (Directory.Exists(path))
1458
+ Directory.Delete(path, true);
1459
+ #endif
1460
+ }
1461
+ catch (Exception e)
1462
+ {
1463
+ Warn(string.Format("Failure cleaning temp path '{0}': {1} {2}",
1464
+ path, e.GetType().Name, e.Message));
1465
+ }
1451
1466
  }
1452
1467
  cleanupFiles = new List<string>();
1453
1468
  }
@@ -1486,4 +1501,247 @@ namespace Ansible.Basic
1486
1501
  Console.WriteLine(line);
1487
1502
  }
1488
1503
  }
1504
+
1505
+ #if WINDOWS
1506
+ // Windows is tricky as AVs and other software might still
1507
+ // have an open handle to files causing a failure. Use a
1508
+ // custom deletion mechanism to remove the files/dirs.
1509
+ // https://github.com/ansible/ansible/pull/80247
1510
+ internal static class FileCleaner
1511
+ {
1512
+ private const int FileDispositionInformation = 13;
1513
+ private const int FileDispositionInformationEx = 64;
1514
+
1515
+ private const int ERROR_INVALID_PARAMETER = 0x00000057;
1516
+ private const int ERROR_DIR_NOT_EMPTY = 0x00000091;
1517
+
1518
+ private static bool? _supportsPosixDelete = null;
1519
+
1520
+ [Flags()]
1521
+ public enum DispositionFlags : uint
1522
+ {
1523
+ FILE_DISPOSITION_DO_NOT_DELETE = 0x00000000,
1524
+ FILE_DISPOSITION_DELETE = 0x00000001,
1525
+ FILE_DISPOSITION_POSIX_SEMANTICS = 0x00000002,
1526
+ FILE_DISPOSITION_FORCE_IMAGE_SECTION_CHECK = 0x00000004,
1527
+ FILE_DISPOSITION_ON_CLOSE = 0x00000008,
1528
+ FILE_DISPOSITION_IGNORE_READONLY_ATTRIBUTE = 0x00000010,
1529
+ }
1530
+
1531
+ [Flags()]
1532
+ public enum FileFlags : uint
1533
+ {
1534
+ FILE_FLAG_OPEN_NO_RECALL = 0x00100000,
1535
+ FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000,
1536
+ FILE_FLAG_SESSION_AWARE = 0x00800000,
1537
+ FILE_FLAG_POSIX_SEMANTICS = 0x01000000,
1538
+ FILE_FLAG_BACKUP_SEMANTICS = 0x02000000,
1539
+ FILE_FLAG_DELETE_ON_CLOSE = 0x04000000,
1540
+ FILE_FLAG_SEQUENTIAL_SCAN = 0x08000000,
1541
+ FILE_FLAG_RANDOM_ACCESS = 0x10000000,
1542
+ FILE_FLAG_NO_BUFFERING = 0x20000000,
1543
+ FILE_FLAG_OVERLAPPED = 0x40000000,
1544
+ FILE_FLAG_WRITE_THROUGH = 0x80000000,
1545
+ }
1546
+
1547
+ [DllImport("Kernel32.dll", CharSet = CharSet.Unicode, SetLastError = true)]
1548
+ private static extern SafeFileHandle CreateFileW(
1549
+ [MarshalAs(UnmanagedType.LPWStr)] string lpFileName,
1550
+ FileSystemRights dwDesiredAccess,
1551
+ FileShare dwShareMode,
1552
+ IntPtr lpSecurityAttributes,
1553
+ FileMode dwCreationDisposition,
1554
+ uint dwFlagsAndAttributes,
1555
+ IntPtr hTemplateFile);
1556
+
1557
+ private static SafeFileHandle CreateFile(string path, FileSystemRights access, FileShare share, FileMode mode,
1558
+ FileAttributes attributes, FileFlags flags)
1559
+ {
1560
+ uint flagsAndAttributes = (uint)attributes | (uint)flags;
1561
+ SafeFileHandle handle = CreateFileW(path, access, share, IntPtr.Zero, mode, flagsAndAttributes,
1562
+ IntPtr.Zero);
1563
+ if (handle.IsInvalid)
1564
+ {
1565
+ int errCode = Marshal.GetLastWin32Error();
1566
+ string msg = string.Format("CreateFileW({0}) failed 0x{1:X8}: {2}",
1567
+ path, errCode, new Win32Exception(errCode).Message);
1568
+ throw new Win32Exception(errCode, msg);
1569
+ }
1570
+
1571
+ return handle;
1572
+ }
1573
+
1574
+ [DllImport("Ntdll.dll")]
1575
+ private static extern int NtSetInformationFile(
1576
+ SafeFileHandle FileHandle,
1577
+ out IntPtr IoStatusBlock,
1578
+ ref int FileInformation,
1579
+ int Length,
1580
+ int FileInformationClass);
1581
+
1582
+ [DllImport("Ntdll.dll")]
1583
+ private static extern int RtlNtStatusToDosError(
1584
+ int Status);
1585
+
1586
+ public static void Delete(string path)
1587
+ {
1588
+ if (File.Exists(path))
1589
+ {
1590
+ DeleteEntry(path, FileAttributes.ReadOnly);
1591
+ }
1592
+ else if (Directory.Exists(path))
1593
+ {
1594
+ Queue<DirectoryInfo> dirQueue = new Queue<DirectoryInfo>();
1595
+ dirQueue.Enqueue(new DirectoryInfo(path));
1596
+ bool nonEmptyDirs = false;
1597
+ HashSet<string> processedDirs = new HashSet<string>();
1598
+
1599
+ while (dirQueue.Count > 0)
1600
+ {
1601
+ DirectoryInfo currentDir = dirQueue.Dequeue();
1602
+
1603
+ bool deleteDir = true;
1604
+ if (processedDirs.Add(currentDir.FullName))
1605
+ {
1606
+ foreach (FileSystemInfo entry in currentDir.EnumerateFileSystemInfos())
1607
+ {
1608
+ // Tries to delete each entry. Failures are ignored
1609
+ // as they will be picked up when the dir is
1610
+ // deleted and not empty.
1611
+ if (entry is DirectoryInfo)
1612
+ {
1613
+ if ((entry.Attributes & FileAttributes.ReparsePoint) != 0)
1614
+ {
1615
+ // If it's a reparse point, just delete it directly.
1616
+ DeleteEntry(entry.FullName, entry.Attributes, ignoreFailure: true);
1617
+ }
1618
+ else
1619
+ {
1620
+ // Add the dir to the queue to delete and it will be processed next round.
1621
+ dirQueue.Enqueue((DirectoryInfo)entry);
1622
+ deleteDir = false;
1623
+ }
1624
+ }
1625
+ else
1626
+ {
1627
+ DeleteEntry(entry.FullName, entry.Attributes, ignoreFailure: true);
1628
+ }
1629
+ }
1630
+ }
1631
+
1632
+ if (deleteDir)
1633
+ {
1634
+ try
1635
+ {
1636
+ DeleteEntry(currentDir.FullName, FileAttributes.Directory);
1637
+ }
1638
+ catch (Win32Exception e)
1639
+ {
1640
+ if (e.NativeErrorCode == ERROR_DIR_NOT_EMPTY)
1641
+ {
1642
+ nonEmptyDirs = true;
1643
+ }
1644
+ else
1645
+ {
1646
+ throw;
1647
+ }
1648
+ }
1649
+ }
1650
+ else
1651
+ {
1652
+ dirQueue.Enqueue(currentDir);
1653
+ }
1654
+ }
1655
+
1656
+ if (nonEmptyDirs)
1657
+ {
1658
+ throw new IOException("Directory contains files still open by other processes");
1659
+ }
1660
+ }
1661
+ }
1662
+
1663
+ private static void DeleteEntry(string path, FileAttributes attr, bool ignoreFailure = false)
1664
+ {
1665
+ try
1666
+ {
1667
+ if ((attr & FileAttributes.ReadOnly) != 0)
1668
+ {
1669
+ // Windows does not allow files set with ReadOnly to be
1670
+ // deleted. Pre-emptively unset the attribute.
1671
+ // FILE_DISPOSITION_IGNORE_READONLY_ATTRIBUTE is quite new,
1672
+ // look at using that flag with POSIX delete once Server 2019
1673
+ // is the baseline.
1674
+ File.SetAttributes(path, FileAttributes.Normal);
1675
+ }
1676
+
1677
+ // REPARSE - Only touch the symlink itself and not the target
1678
+ // BACKUP - Needed for dir handles, bypasses access checks for admins
1679
+ // DELETE_ON_CLOSE is not used as it interferes with the POSIX delete
1680
+ FileFlags flags = FileFlags.FILE_FLAG_OPEN_REPARSE_POINT |
1681
+ FileFlags.FILE_FLAG_BACKUP_SEMANTICS;
1682
+
1683
+ using (SafeFileHandle fileHandle = CreateFile(path, FileSystemRights.Delete,
1684
+ FileShare.ReadWrite | FileShare.Delete, FileMode.Open, FileAttributes.Normal, flags))
1685
+ {
1686
+ if (_supportsPosixDelete == null || _supportsPosixDelete == true)
1687
+ {
1688
+ // A POSIX delete will delete the filesystem entry even if
1689
+ // it's still opened by another process so favour that if
1690
+ // available.
1691
+ DispositionFlags deleteFlags = DispositionFlags.FILE_DISPOSITION_DELETE |
1692
+ DispositionFlags.FILE_DISPOSITION_POSIX_SEMANTICS;
1693
+
1694
+ SetInformationFile(fileHandle, FileDispositionInformationEx, (int)deleteFlags);
1695
+ if (_supportsPosixDelete == true)
1696
+ {
1697
+ return;
1698
+ }
1699
+ }
1700
+
1701
+ // FileDispositionInformation takes in a struct with only a BOOLEAN value.
1702
+ // Using an int will also do the same thing to set that flag to true.
1703
+ SetInformationFile(fileHandle, FileDispositionInformation, Int32.MaxValue);
1704
+ }
1705
+ }
1706
+ catch
1707
+ {
1708
+ if (!ignoreFailure)
1709
+ {
1710
+ throw;
1711
+ }
1712
+ }
1713
+ }
1714
+
1715
+ private static void SetInformationFile(SafeFileHandle handle, int infoClass, int value)
1716
+ {
1717
+ IntPtr ioStatusBlock = IntPtr.Zero;
1718
+
1719
+ int ntStatus = NtSetInformationFile(handle, out ioStatusBlock, ref value,
1720
+ Marshal.SizeOf(typeof(int)), infoClass);
1721
+
1722
+ if (ntStatus != 0)
1723
+ {
1724
+ int errCode = RtlNtStatusToDosError(ntStatus);
1725
+
1726
+ // The POSIX delete was added in Server 2016 (Win 10 14393/Redstone 1)
1727
+ // Mark this flag so we don't try again.
1728
+ if (infoClass == FileDispositionInformationEx && _supportsPosixDelete == null &&
1729
+ errCode == ERROR_INVALID_PARAMETER)
1730
+ {
1731
+ _supportsPosixDelete = false;
1732
+ return;
1733
+ }
1734
+
1735
+ string msg = string.Format("NtSetInformationFile() failed 0x{0:X8}: {1}",
1736
+ errCode, new Win32Exception(errCode).Message);
1737
+ throw new Win32Exception(errCode, msg);
1738
+ }
1739
+
1740
+ if (infoClass == FileDispositionInformationEx)
1741
+ {
1742
+ _supportsPosixDelete = true;
1743
+ }
1744
+ }
1745
+ }
1746
+ #endif
1489
1747
  }