atomicshop 3.3.2__py3-none-any.whl → 3.3.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of atomicshop might be problematic. Click here for more details.

atomicshop/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
1
  """Atomic Basic functions and classes to make developer life easier"""
2
2
 
3
3
  __author__ = "Den Kras"
4
- __version__ = '3.3.2'
4
+ __version__ = '3.3.4'
@@ -402,7 +402,7 @@ def mitm_server(config_file_path: str, script_version: str):
402
402
  # We set the virtual IPs in the network adapter here, so the server multiprocessing processes can listen on them.
403
403
  setting_result: int = _add_virtual_ips_set_default_dns_gateway(system_logger)
404
404
  if setting_result != 0:
405
- print_api.print_api("Failed to set the default DNS gateway.", error_type=True, color="red",
405
+ print_api.print_api("Failed to set the default DNS gateway OR Virtual IPs.", error_type=True, color="red",
406
406
  logger=system_logger)
407
407
  # Wait for the message to be printed and saved to file.
408
408
  time.sleep(1)
@@ -618,8 +618,21 @@ def _create_tcp_server_process(
618
618
  # network_logger_queue_listener.stop()
619
619
  sys.exit(1)
620
620
 
621
- socket_wrapper_instance.start_listening_socket(
622
- callable_function=thread_worker_main, callable_args=(config_static,))
621
+ try:
622
+ socket_wrapper_instance.start_listening_socket(
623
+ callable_function=thread_worker_main, callable_args=(config_static,))
624
+ except OSError as e:
625
+ if e.winerror == 10022: # Invalid argument error on Windows.
626
+ message = (
627
+ str(f"{e}\n"
628
+ f"Check that the IP address and port are correct: {socket_wrapper_kwargs['ip_address']}:{socket_wrapper_kwargs['port']}\n"))
629
+ print_api.print_api(message, error_type=True, color="red", logger=system_logger, logger_method='critical')
630
+ # Wait for the message to be printed and saved to file.
631
+ time.sleep(1)
632
+ # network_logger_queue_listener.stop()
633
+ sys.exit(1)
634
+ else:
635
+ raise e
623
636
 
624
637
  # Notify that the TCP server is ready.
625
638
  is_tcp_process_ready.set()
@@ -710,7 +723,7 @@ def _add_virtual_ips_set_default_dns_gateway(system_logger: logging.Logger) -> i
710
723
  networks.add_virtual_ips_to_default_adapter_by_current_setting(
711
724
  virtual_ipv4s_to_add=IPS_TO_ASSIGN, virtual_ipv4_masks_to_add=MASKS_TO_ASSIGN,
712
725
  dns_gateways=dns_gateway_server_list)
713
- except PermissionError as e:
726
+ except (PermissionError, TimeoutError) as e:
714
727
  print_api.print_api(e, error_type=True, color="red", logger=system_logger)
715
728
  # Wait for the message to be printed and saved to file.
716
729
  time.sleep(1)
atomicshop/networks.py CHANGED
@@ -2,6 +2,7 @@ import socket
2
2
  import time
3
3
  from typing import Union
4
4
  import os
5
+ import psutil
5
6
 
6
7
  from icmplib import ping
7
8
  from icmplib.models import Host
@@ -10,6 +11,7 @@ from win32com.client import CDispatch
10
11
  from .wrappers.pywin32w.wmis import win32networkadapter, win32_networkadapterconfiguration, wmi_helpers, msft_netipaddress
11
12
  from .wrappers.ctyping import setup_device
12
13
  from .wrappers.winregw import winreg_network
14
+ from .wrappers.psutilw import psutil_networks
13
15
 
14
16
 
15
17
  MICROSOFT_LOOPBACK_DEVICE_NAME: str = 'Microsoft KM-TEST Loopback Adapter'
@@ -42,7 +44,7 @@ def get_default_internet_ipv4_by_connect(target: str = "8.8.8.8") -> str:
42
44
  return s.getsockname()[0] # local address of that route
43
45
 
44
46
 
45
- def get_default_internet_interface_name() -> str:
47
+ def get_hostname() -> str:
46
48
  """
47
49
  Get the default network interface name that is being used for internet.
48
50
  :return: string, default network interface name.
@@ -51,6 +53,61 @@ def get_default_internet_interface_name() -> str:
51
53
  return socket.gethostname()
52
54
 
53
55
 
56
+ def get_default_internet_interface_name() -> str | None:
57
+ """
58
+ Get the default network interface name that is being used for internet.
59
+ :return: string, default network interface name.
60
+ """
61
+
62
+ interface_dict: dict = psutil_networks.get_default_connection_name()
63
+ if not interface_dict:
64
+ result = None
65
+ else:
66
+ # Get the first interface name from the dictionary.
67
+ result = next(iter(interface_dict.keys()), None)
68
+
69
+ return result
70
+
71
+
72
+ def get_interface_ips(
73
+ interface_name: str = None,
74
+ ipv4: bool = True,
75
+ ipv6: bool = True,
76
+ localhost: bool = True,
77
+ default_interface: bool = False
78
+ ):
79
+ if not ipv4 and not ipv6:
80
+ raise ValueError("At least one of ipv4 or ipv6 must be True.")
81
+ if default_interface and interface_name:
82
+ raise ValueError("You can't specify both default_interface and interface_name.")
83
+
84
+ if default_interface:
85
+ # Get the default interface name.
86
+ interface_name = get_default_internet_interface_name()
87
+
88
+ physical_ip_types: list[str] = []
89
+ if ipv4:
90
+ physical_ip_types.append("AF_INET") # IPv4
91
+ if ipv6:
92
+ physical_ip_types.append("AF_INET6") # IPv6
93
+
94
+ interfaces: dict = psutil.net_if_addrs()
95
+
96
+ ips = []
97
+ for name, addresses in interfaces.items():
98
+ if interface_name and interface_name != name:
99
+ continue
100
+
101
+ for address in addresses:
102
+ if address.family.name in physical_ip_types:
103
+ if not localhost and (address.address.startswith("127.") or address.address.startswith("::1")):
104
+ # Skip localhost addresses if localhost is True.
105
+ continue
106
+
107
+ ips.append(address.address)
108
+ return ips
109
+
110
+
54
111
  def get_microsoft_loopback_device_network_configuration(
55
112
  wmi_instance: CDispatch = None,
56
113
  timeout: int = 1,
@@ -351,6 +408,8 @@ def add_virtual_ips_to_default_adapter_by_current_setting(
351
408
  availability_wait_seconds: int = 15,
352
409
  simulate_only: bool = False,
353
410
  locator: CDispatch = None,
411
+ wait_until_applied: bool = True,
412
+ wait_until_applied_seconds: int = 15
354
413
  ) -> tuple[list[str], list[str], list[str], list[str]]:
355
414
  """
356
415
  Add virtual IP addresses to the default network adapter.
@@ -381,6 +440,14 @@ def add_virtual_ips_to_default_adapter_by_current_setting(
381
440
  :param simulate_only: bool, if True, the function will only prepare the ip addresses and return them without changing anything.
382
441
  :param locator: CDispatch, WMI locator object. If not specified, it will be created.
383
442
 
443
+ :param wait_until_applied: bool, if True, the function will wait until the IP addresses are applied.
444
+ By default, while WMI command is executed, there is no indication if the addresses were finished applying or not.
445
+ If you have 15+ addresses, it can take a while to apply them.
446
+ :param wait_until_applied_seconds: int, seconds to wait for the IP addresses to be applied.
447
+ This is different from availability_wait_seconds, which is the time to wait for the adapter to be available
448
+ after setting the IP addresses. This is the time to wait for the IP addresses to be
449
+ applied after setting them. If the IP addresses are not applied in this time, a TimeoutError will be raised.
450
+
384
451
  :return: tuple of lists, (current_ipv4s, current_ipv4_masks, ips_to_assign, masks_to_assign)
385
452
  """
386
453
 
@@ -462,4 +529,14 @@ def add_virtual_ips_to_default_adapter_by_current_setting(
462
529
  # print("[!] No new IPs to assign.")
463
530
  pass
464
531
 
532
+ if wait_until_applied:
533
+ # Wait until the IP addresses are applied.
534
+ for _ in range(wait_until_applied_seconds):
535
+ current_ips = get_interface_ips(ipv4=True, ipv6=False, localhost=False, default_interface=True)
536
+ if set(current_ips) == set(ips):
537
+ break
538
+ time.sleep(1)
539
+ else:
540
+ raise TimeoutError("Timeout while waiting for the IP addresses to be applied.")
541
+
465
542
  return current_ipv4s, current_ipv4_masks, ips_to_assign, masks_to_assign
@@ -1,6 +1,10 @@
1
1
  import sys
2
+ import os
2
3
  import subprocess
3
4
  import getpass
5
+ import tempfile
6
+ import textwrap
7
+ from pathlib import Path
4
8
 
5
9
  from ... import process, filesystem
6
10
  from ...permissions import permissions, ubuntu_permissions
@@ -8,6 +12,11 @@ from ...print_api import print_api
8
12
  from .. import ubuntu_terminal
9
13
 
10
14
 
15
+ PREPARATION_OUTPUT_DIR: str = str(Path(__file__).parent / "offline-bundle")
16
+ PREPARATION_OUTPUT_ZIP: str = f"{PREPARATION_OUTPUT_DIR}.zip"
17
+ GET_DOCKER_URL: str = "https://get.docker.com"
18
+
19
+
11
20
  def is_docker_installed():
12
21
  """
13
22
  The function will check if docker is installed.
@@ -109,6 +118,7 @@ def install_docker_ubuntu(
109
118
  # The script will also install docker-compose and docker-buildx.
110
119
  # process.execute_script('curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh', shell=True)
111
120
  process.execute_script('curl -fsSL https://get.docker.com | sh', shell=True)
121
+ # subprocess.run("curl -fsSL https://get.docker.com | sh", shell=True, check=True)
112
122
  # process.execute_script('curl -fsSL https://get.docker.com -o get-docker.sh', shell=True)
113
123
  # process.execute_script('sh get-docker.sh', shell=True)
114
124
  # filesystem.remove_file('get-docker.sh')
@@ -210,3 +220,230 @@ def install_docker_ubuntu(
210
220
  print_api('Docker installation failed.', color='red')
211
221
  print_api('Please check the logs above for more information.', color='red')
212
222
  return 1
223
+
224
+
225
+ def prepare_offline_installation_bundle():
226
+ # The Bash script in a single triple-quoted string - this is to easier copy-paste it if needed to run directly.
227
+ bash_script = textwrap.dedent(r"""#!/usr/bin/env bash
228
+ #
229
+ # Build an offline-install bundle for Docker Engine on Ubuntu 24.04 LTS.
230
+ # The package list is auto-discovered from `get.docker.com --dry-run`.
231
+ #
232
+ # sudo ./prepare_docker_offline.sh [/path/to/output_dir]
233
+ #
234
+ set -Eeuo pipefail
235
+
236
+ ################################################################################
237
+ # CLI PARAMETERS
238
+ # $1 → OUTDIR (already supported: where to build the bundle)
239
+ # $2 → GET_DOCKER_URL (defaults to https://get.docker.com)
240
+ # $3 → OUTPUT_ZIP (defaults to "$OUTDIR.zip")
241
+ ################################################################################
242
+ OUTDIR="${1:-"$PWD/offline-bundle"}"
243
+ GET_DOCKER_URL="${2:-https://get.docker.com}"
244
+ OUTPUT_ZIP="${3:-$OUTDIR.zip}"
245
+
246
+ die() { echo "ERROR: $*" >&2; exit 1; }
247
+ need_root() { [[ $EUID -eq 0 ]] || die "Run as root (use sudo)"; }
248
+ need_cmd() {
249
+ local cmd=$1
250
+ local pkg=${2:-$1} # default package == command
251
+ if ! command -v "$cmd" &>/dev/null; then
252
+ echo "[*] $cmd not found – installing $pkg ..."
253
+ apt-get update -qq
254
+ DEBIAN_FRONTEND=noninteractive \
255
+ apt-get install -y --no-install-recommends "$pkg" || \
256
+ die "Unable to install required package: $pkg"
257
+ fi
258
+ }
259
+
260
+ need_root
261
+ need_cmd curl
262
+
263
+ echo "[*] Discovering package list via get.docker.com --dry-run ..."
264
+ DRY_LOG=$(curl -fsSL "$GET_DOCKER_URL" | bash -s -- --dry-run)
265
+
266
+ echo "[*] Determining package list via --dry-run ..."
267
+ PKGS=$(printf '%s\n' "$DRY_LOG" | sed -n 's/.* install \(.*\) >\/dev\/null.*/\1/p')
268
+
269
+ if ! grep -q '\S' <<< "$PKGS"; then
270
+ echo "No packages detected in dry-run output – aborting." >&2
271
+ exit 1
272
+ fi
273
+
274
+ echo "[*] Install Docker before preparing the offline bundle."
275
+ curl -fsSL "$GET_DOCKER_URL" | sh
276
+
277
+ mkdir -p "$OUTDIR"/packages
278
+ echo "[*] Output directory: $OUTDIR"
279
+
280
+ echo "Packages to install:"
281
+ echo "$PKGS"
282
+
283
+ echo "[*] Downloading packages and all dependencies …"
284
+ apt-get update -qq
285
+ apt-get clean
286
+ mkdir -p /var/cache/apt/archives/partial
287
+ apt-get -y --download-only --reinstall install $PKGS
288
+ cp -v /var/cache/apt/archives/*.deb "$OUTDIR/packages/"
289
+ echo "[*] $(ls "$OUTDIR/packages" | wc -l) .deb files written to packages/"
290
+
291
+ echo "[*] Building local Packages.gz index …"
292
+ pushd "$OUTDIR/packages" >/dev/null
293
+ for deb in *.deb; do
294
+ dpkg-deb -f "$deb" Package
295
+ done | awk '{printf "%s\tmisc\toptional\n",$1}' > override
296
+ apt-ftparchive packages . override | tee Packages | gzip -9c > Packages.gz
297
+ popd >/dev/null
298
+
299
+
300
+ echo ">> Checking for Docker ..."
301
+ command -v docker >/dev/null 2>&1 || { echo "Docker not found."; exit 1; }
302
+
303
+ # Pack final bundle
304
+ echo "[*] Creating a zip archive ..."
305
+ parent_dir=$(dirname "$OUTDIR")
306
+ base_name=$(basename "$OUTDIR")
307
+
308
+ # Create new shell, cd into the directory, and zip the contents. So that the zip file will not contain the full path.
309
+ (
310
+ cd "$parent_dir"
311
+ zip -rq "$OUTPUT_ZIP" "$base_name"
312
+ )
313
+
314
+ rm -rf "$OUTDIR"
315
+ echo "Docker offline bundle created at $OUTPUT_ZIP"
316
+ echo
317
+ echo "Copy the zip file and the offline installation python script to the target machine and execute."
318
+ """)
319
+
320
+ # Write it to a secure temporary file.
321
+ with tempfile.NamedTemporaryFile('w', delete=False, suffix='.sh') as f:
322
+ f.write(bash_script)
323
+ temp_path = f.name
324
+ os.chmod(temp_path, 0o755) # make it executable
325
+
326
+ cmd = [
327
+ "sudo", "bash", temp_path,
328
+ PREPARATION_OUTPUT_DIR,
329
+ GET_DOCKER_URL,
330
+ PREPARATION_OUTPUT_ZIP,
331
+ ]
332
+
333
+ # Run it and stream output live.
334
+ try:
335
+ subprocess.run(cmd, check=True)
336
+ finally:
337
+ # 5. Clean up the temp file unless you want to inspect it.
338
+ os.remove(temp_path)
339
+
340
+
341
+ def install_offline_installation_bundle():
342
+ bash_script = textwrap.dedent(r"""#!/usr/bin/env bash
343
+ # Offline installer for the Docker bundle produced by prepare_docker_offline.sh
344
+ set -euo pipefail
345
+
346
+ die() { echo "ERROR: $*" >&2; exit 1; }
347
+ need_root() { [[ $EUID -eq 0 ]] || die "Run as root (use sudo)"; }
348
+
349
+ need_root
350
+
351
+ # ------------------------------------------------------------------------------
352
+ # Paths
353
+ # ------------------------------------------------------------------------------
354
+ BUNDLE_ZIP="${1:-"$PWD/offline-bundle.zip"}"
355
+
356
+ BUNDLE_DIR="${BUNDLE_ZIP%.zip}" # remove .zip suffix
357
+ REPO_DIR="$BUNDLE_DIR/packages" # contains *.deb + Packages
358
+ OFFLINE_LIST="/etc/apt/sources.list.d/docker-offline.list"
359
+
360
+ # Extract zip archive if it exists
361
+ if [[ -f "$BUNDLE_ZIP" ]]; then
362
+ echo "[*] Extracting offline bundle from $BUNDLE_ZIP ..."
363
+ mkdir -p "$BUNDLE_DIR"
364
+ unzip -q "$BUNDLE_ZIP" -d "."
365
+ else
366
+ die "Bundle zip file '$BUNDLE_ZIP' not found. Provide a valid path."
367
+ fi
368
+
369
+ TEMP_PARTS="$(mktemp -d)" # empty dir ⇒ no extra lists
370
+
371
+ # ------------------------------------------------------------------------------
372
+ # Helper to clean up even if the script aborts
373
+ # ------------------------------------------------------------------------------
374
+ cleanup() {
375
+ sudo rm -f "$OFFLINE_LIST"
376
+ sudo rm -rf "$TEMP_PARTS"
377
+ }
378
+ trap cleanup EXIT
379
+
380
+ # ------------------------------------------------------------------------------
381
+ # 1. Add the local repository (trusted) as the *only* source we will use
382
+ # ------------------------------------------------------------------------------
383
+ echo "[*] Adding temporary APT source for the offline bundle …"
384
+ echo "deb [trusted=yes] file:$REPO_DIR ./" | sudo tee "$OFFLINE_LIST" >/dev/null
385
+
386
+ # Ensure plain index exists (APT always understands the un-compressed form)
387
+ if [[ ! -f "$REPO_DIR/Packages" && -f "$REPO_DIR/Packages.gz" ]]; then
388
+ gunzip -c "$REPO_DIR/Packages.gz" > "$REPO_DIR/Packages"
389
+ fi
390
+
391
+ # ------------------------------------------------------------------------------
392
+ # 2. Update metadata – but ONLY from our offline list
393
+ # ------------------------------------------------------------------------------
394
+ echo "[*] Updating APT metadata – offline only …"
395
+ sudo apt-get -o Dir::Etc::sourcelist="$OFFLINE_LIST" \
396
+ -o Dir::Etc::sourceparts="$TEMP_PARTS" \
397
+ -o APT::Get::List-Cleanup="0" \
398
+ update -qq
399
+
400
+ # ------------------------------------------------------------------------------
401
+ # 3. Figure out which packages are inside the bundle
402
+ # ------------------------------------------------------------------------------
403
+ PKGS=$(awk '/^Package: /{print $2}' "$REPO_DIR/Packages")
404
+
405
+ echo "[*] Installing:"
406
+ printf ' • %s\n' $PKGS
407
+
408
+ # ------------------------------------------------------------------------------
409
+ # 4. Install them, again restricting APT to the offline repo only
410
+ # ------------------------------------------------------------------------------
411
+ sudo apt-get -y \
412
+ -o Dir::Etc::sourcelist="$OFFLINE_LIST" \
413
+ -o Dir::Etc::sourceparts="$TEMP_PARTS" \
414
+ install $PKGS
415
+
416
+ echo "[✓] Docker installed completely offline!"
417
+
418
+ usage() {
419
+ echo "Usage: $0 <image-archive.tar.gz>"
420
+ exit 1
421
+ }
422
+
423
+ echo ">> Checking for Docker ..."
424
+ command -v docker >/dev/null 2>&1 || {
425
+ echo "Docker is not installed; install Docker and try again."
426
+ exit 1
427
+ }
428
+
429
+ echo "Removing extracted files..."
430
+ rm -rf "$BUNDLE_DIR"
431
+ """)
432
+
433
+ # Write it to a secure temporary file.
434
+ with tempfile.NamedTemporaryFile('w', delete=False, suffix='.sh') as f:
435
+ f.write(bash_script)
436
+ temp_path = f.name
437
+ os.chmod(temp_path, 0o755) # make it executable
438
+
439
+ cmd = [
440
+ "sudo", "bash", temp_path,
441
+ PREPARATION_OUTPUT_ZIP, # $1 BUNDLE_ZIP
442
+ ]
443
+
444
+ # 4. Run it and stream output live.
445
+ try:
446
+ subprocess.run(cmd, check=True)
447
+ finally:
448
+ # 5. Clean up the temp file unless you want to inspect it.
449
+ os.remove(temp_path)
@@ -78,7 +78,7 @@ class GitHubWrapper:
78
78
  Usage to download the latest release where the file name is 'test_file.zip':
79
79
  git_wrapper = GitHubWrapper(user_name='user_name', repo_name='repo_name')
80
80
  git_wrapper.download_and_extract_latest_release(
81
- target_directory='target_directory', string_pattern='test_*.zip')
81
+ target_directory='target_directory', asset_pattern='test_*.zip')
82
82
  ================================================================================================================
83
83
  Usage to get the latest release json:
84
84
  git_wrapper = GitHubWrapper(user_name='user_name', repo_name='repo_name')
@@ -116,6 +116,10 @@ class GitHubWrapper:
116
116
  self.commits_url: str = str()
117
117
  self.contents_url: str = str()
118
118
 
119
+ self.releases_url: str = str()
120
+ self.releases_per_page: int = 100
121
+ self.releases_starting_page: int = 1
122
+
119
123
  if self.user_name and self.repo_name and not self.repo_url:
120
124
  self.build_links_from_user_and_repo()
121
125
 
@@ -143,6 +147,7 @@ class GitHubWrapper:
143
147
  self.api_url = f'https://api.{self.domain}/repos/{self.user_name}/{self.repo_name}'
144
148
 
145
149
  self.latest_release_json_url: str = f'{self.api_url}/releases/latest'
150
+ self.releases_url: str = f'{self.api_url}/releases'
146
151
  self.commits_url: str = f'{self.api_url}/commits'
147
152
  self.contents_url: str = f'{self.api_url}/contents'
148
153
  self.branch_download_link = f'{self.api_url}/{self.branch_type_directory}/{self.branch}'
@@ -272,21 +277,133 @@ class GitHubWrapper:
272
277
 
273
278
  download_directory(self.path, current_target_directory, headers)
274
279
 
280
+ def get_releases_json(
281
+ self,
282
+ asset_pattern: str = None,
283
+ latest: bool = False,
284
+ per_page: int = None,
285
+ starting_page: int = None,
286
+ all_assets: bool = False,
287
+ ):
288
+ """
289
+ This function will get the releases json.
290
+ :param asset_pattern: str, the string pattern to search in the asset names of releases. Wildcards can be used.
291
+ If there is a match, the release will be added to the result list.
292
+ :param latest: bool, if True, will get only the latest release.
293
+ If 'asset_pattern' is provided, 'latest' will find the latest release matching the pattern.
294
+ Of course if you want to get it from all releases, you must set 'all_assets' to True.
295
+ :param per_page: int, the number of releases per page. Default is 100.
296
+ :param starting_page: int, the starting page number. Default is 1.
297
+ :param all_assets: bool, if True, will get all releases matching the pattern across all pages
298
+ OR all assets if no pattern is provided.
299
+ :return:
300
+ """
301
+
302
+ # If 'latest' is True and no 'asset_pattern' is provided, we only need to get 1 release from page 1.
303
+ # No need to get more assets than the first one.
304
+ if latest and not asset_pattern:
305
+ per_page = 1
306
+ starting_page = 1
307
+ all_assets = False
308
+ # In all other cases, get the releases according to the provided parameters or defaults.
309
+ else:
310
+ if not per_page:
311
+ per_page = self.releases_per_page
312
+
313
+ if not starting_page:
314
+ starting_page = self.releases_starting_page
315
+
316
+ headers: dict = self._get_headers()
317
+
318
+ params: dict = {
319
+ 'per_page': per_page,
320
+ 'page': starting_page
321
+ }
322
+
323
+ all_releases = []
324
+ while True:
325
+ response = requests.get(self.releases_url, headers=headers, params=params)
326
+ releases = response.json()
327
+ # If no releases found on current page, there will be none on the next as well, break the loop.
328
+ if not releases:
329
+ break
330
+
331
+ # If 'asset_pattern' is provided, filter releases to only those that have matching assets.
332
+ if asset_pattern:
333
+ for release in releases:
334
+ assets = release.get('assets', [])
335
+ matching_assets = [asset for asset in assets if fnmatch.fnmatch(asset.get('name', ''), asset_pattern)]
336
+ if matching_assets:
337
+ all_releases.append(release)
338
+
339
+ if latest:
340
+ return all_releases
341
+ else:
342
+ all_releases.extend(releases)
343
+
344
+ if not all_assets:
345
+ break
346
+
347
+ params['page'] += 1
348
+
349
+ return all_releases
350
+
351
+ def get_latest_release_json(
352
+ self,
353
+ asset_pattern: str = None
354
+ ) -> dict:
355
+ """
356
+ This function will get the latest releases json.
357
+ :param asset_pattern: str, the string pattern to search in the asset names of releases. Wildcards can be used.
358
+ If there is a match, the release will be added to the result list.
359
+ :return: dict, the latest release json.
360
+ """
361
+
362
+ if asset_pattern:
363
+ releases = self.get_releases_json(
364
+ asset_pattern=asset_pattern,
365
+ latest=True,
366
+ all_assets=True
367
+ )
368
+ else:
369
+ releases = self.get_releases_json(latest=True)
370
+
371
+ if not releases:
372
+ return {}
373
+ else:
374
+ return releases[0]
375
+
376
+ def get_latest_release_version(
377
+ self,
378
+ asset_pattern: str = None
379
+ ) -> str:
380
+ """
381
+ This function will get the latest release version number.
382
+
383
+ :param asset_pattern: str, the string pattern to search in the asset names of releases. Wildcards can be used.
384
+ If there is a match, the release will be added to the result list.
385
+ :return: str, the latest release version number.
386
+ """
387
+
388
+ latest_release_json: dict = self.get_latest_release_json(asset_pattern=asset_pattern)
389
+ latest_release_version: str = latest_release_json['tag_name']
390
+ return latest_release_version
391
+
275
392
  def get_latest_release_url(
276
393
  self,
277
- string_pattern: str,
278
- exclude_string: str = None,
394
+ asset_pattern: str,
395
+ exclude_pattern: str = None,
279
396
  **kwargs):
280
397
  """
281
398
  This function will return the latest release url.
282
- :param string_pattern: str, the string pattern to search in the latest release. Wildcards can be used.
283
- :param exclude_string: str, the string to exclude from the search. No wildcards can be used.
399
+ :param asset_pattern: str, the string pattern to search in the latest release. Wildcards can be used.
400
+ :param exclude_pattern: str, the string to exclude from the search. No wildcards can be used.
284
401
  :param kwargs: dict, the print arguments for the 'print_api' function.
285
402
  :return: str, the latest release url.
286
403
  """
287
404
 
288
405
  # Get the 'assets' key of the latest release json.
289
- github_latest_releases_list = self.get_the_latest_release_json()['assets']
406
+ github_latest_releases_list = self.get_latest_release_json()['assets']
290
407
 
291
408
  # Get only download urls of the latest releases.
292
409
  download_urls: list = list()
@@ -294,13 +411,13 @@ class GitHubWrapper:
294
411
  download_urls.append(single_dict['browser_download_url'])
295
412
 
296
413
  # Exclude urls against 'exclude_string'.
297
- if exclude_string:
414
+ if exclude_pattern:
298
415
  for download_url in download_urls:
299
- if exclude_string in download_url:
416
+ if exclude_pattern in download_url:
300
417
  download_urls.remove(download_url)
301
418
 
302
- # Find urls against 'string_pattern'.
303
- found_urls: list = fnmatch.filter(download_urls, string_pattern)
419
+ # Find urls against 'asset_pattern'.
420
+ found_urls: list = fnmatch.filter(download_urls, asset_pattern)
304
421
 
305
422
  # If more than 1 url answer the criteria, we can't download it. The user must be more specific in his input
306
423
  # strings.
@@ -317,15 +434,15 @@ class GitHubWrapper:
317
434
  def download_latest_release(
318
435
  self,
319
436
  target_directory: str,
320
- string_pattern: str,
437
+ asset_pattern: str,
321
438
  exclude_string: str = None,
322
439
  **kwargs):
323
440
  """
324
441
  This function will download the latest release from the GitHub repository.
325
442
  :param target_directory: str, the target directory to download the file.
326
- :param string_pattern: str, the string pattern to search in the latest release. Wildcards can be used.
443
+ :param asset_pattern: str, the string pattern to search in the latest release. Wildcards can be used.
327
444
  :param exclude_string: str, the string to exclude from the search. No wildcards can be used.
328
- The 'excluded_string' will be filtered before the 'string_pattern' entries.
445
+ The 'excluded_string' will be filtered before the 'asset_pattern' entries.
329
446
  :param kwargs: dict, the print arguments for the 'print_api' function.
330
447
  :return:
331
448
  """
@@ -333,7 +450,7 @@ class GitHubWrapper:
333
450
  headers: dict = self._get_headers()
334
451
 
335
452
  # Get the latest release url.
336
- found_url = self.get_latest_release_url(string_pattern=string_pattern, exclude_string=exclude_string, **kwargs)
453
+ found_url = self.get_latest_release_url(asset_pattern=asset_pattern, exclude_string=exclude_string, **kwargs)
337
454
 
338
455
  downloaded_file_path = web.download(
339
456
  file_url=found_url, target_directory=target_directory, headers=headers, **kwargs)
@@ -342,7 +459,7 @@ class GitHubWrapper:
342
459
  def download_and_extract_latest_release(
343
460
  self,
344
461
  target_directory: str,
345
- string_pattern: str,
462
+ asset_pattern: str,
346
463
  exclude_string: str = None,
347
464
  archive_remove_first_directory: bool = False,
348
465
  **kwargs):
@@ -350,7 +467,7 @@ class GitHubWrapper:
350
467
  This function will download the latest release from the GitHub repository, extract the file and remove the file,
351
468
  leaving only the extracted folder.
352
469
  :param target_directory: str, the target directory to download and extract the file.
353
- :param string_pattern: str, the string pattern to search in the latest release. Wildcards can be used.
470
+ :param asset_pattern: str, the string pattern to search in the latest release. Wildcards can be used.
354
471
  :param exclude_string: str, the string to exclude from the search. No wildcards can be used.
355
472
  :param archive_remove_first_directory: bool, sets if archive extract function will extract the archive
356
473
  without first directory in the archive. Check reference in the
@@ -362,7 +479,7 @@ class GitHubWrapper:
362
479
  headers: dict = self._get_headers()
363
480
 
364
481
  # Get the latest release url.
365
- found_url = self.get_latest_release_url(string_pattern=string_pattern, exclude_string=exclude_string, **kwargs)
482
+ found_url = self.get_latest_release_url(asset_pattern=asset_pattern, exclude_string=exclude_string, **kwargs)
366
483
 
367
484
  web.download_and_extract_file(
368
485
  file_url=found_url,
@@ -371,24 +488,6 @@ class GitHubWrapper:
371
488
  headers=headers,
372
489
  **kwargs)
373
490
 
374
- def get_the_latest_release_json(self):
375
- """
376
- This function will get the latest releases json.
377
- :return:
378
- """
379
-
380
- headers: dict = self._get_headers()
381
-
382
- response = requests.get(self.latest_release_json_url, headers=headers)
383
- return response.json()
384
-
385
- def get_the_latest_release_version_number(self):
386
- """
387
- This function will get the latest release version number.
388
- :return:
389
- """
390
- return self.get_the_latest_release_json()['tag_name']
391
-
392
491
  def get_latest_commit(self) -> dict:
393
492
  """
394
493
  This function retrieves the latest commit on the specified branch.
@@ -11,11 +11,9 @@ def parse_args():
11
11
  :return: Parsed arguments.
12
12
  """
13
13
  parser = argparse.ArgumentParser(description='Install PyCharm Community Edition.')
14
- parser.add_argument('-ic', '--install_community', action='store_true', required=True,
15
- help='Install PyCharm Community Edition with snapd.')
16
- parser.add_argument('--enable_sudo_execution', action='store_true',
17
- help='There is a problem when trying to run snapd installed Pycharm as sudo, need to enable '
18
- 'this.')
14
+ parser.add_argument(
15
+ '--enable_sudo_execution', action='store_true',
16
+ help='There is a problem when trying to run snapd installed Pycharm as sudo, need to enable this.')
19
17
 
20
18
  return parser.parse_args()
21
19
 
@@ -30,10 +28,9 @@ def install_main():
30
28
 
31
29
  args = parse_args()
32
30
 
33
- if args.install_community:
34
- process.execute_script('sudo snap install pycharm-community --classic', shell=True)
31
+ process.execute_script('sudo snap install pycharm-professional --classic', shell=True)
35
32
 
36
33
  if args.enable_sudo_execution:
37
34
  process.execute_script('xhost +SI:localuser:root', shell=True)
38
- print_api('Run the following command to start PyCharm as root: [sudo snap run pycharm-community]', color='blue')
35
+ print_api('Run the following command to start PyCharm as root: [sudo snap run pycharm-professional]', color='blue')
39
36
  return 0
@@ -260,21 +260,22 @@ class SNIHandler:
260
260
  # If DNS server is enabled we'll get the domain from dns server.
261
261
  if self.domain_from_dns_server:
262
262
  self.sni_received_parameters.destination_name = self.domain_from_dns_server
263
- message = (
264
- f"SNI Passed: False\n"
265
- f"SNI Handler: No SNI was passed, using domain from DNS Server: {self.domain_from_dns_server}")
263
+ print_api("SNI Passed: False", color="yellow", **(print_kwargs or {}))
264
+
265
+ message = f"SNI Handler: No SNI was passed, using domain from DNS Server: {self.domain_from_dns_server}"
266
266
  print_api(message, color="yellow", **(print_kwargs or {}))
267
267
  # If DNS server is disabled, the domain from dns server will be empty.
268
268
  else:
269
+ print_api("SNI Passed: False", color="yellow", **(print_kwargs or {}))
270
+
269
271
  message = (
270
- f"SNI Passed: False\n"
271
272
  f"SNI Handler: No SNI was passed, No domain passed from DNS Server. Service name will be 'None'.")
272
273
  print_api(message, color="yellow", **(print_kwargs or {}))
273
274
 
274
275
  # Setting "server_hostname" as a domain.
275
276
  self.sni_received_parameters.ssl_socket.server_hostname = self.sni_received_parameters.destination_name
277
+ print_api("SNI Passed: True", **(print_kwargs or {}))
276
278
  message = (
277
- f"SNI Passed: True\n"
278
279
  f"SNI Handler: port {self.sni_received_parameters.ssl_socket.getsockname()[1]}: "
279
280
  f"Incoming connection for [{self.sni_received_parameters.ssl_socket.server_hostname}]")
280
281
  print_api(message, **(print_kwargs or {}))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: atomicshop
3
- Version: 3.3.2
3
+ Version: 3.3.4
4
4
  Summary: Atomic functions and classes to make developer life easier
5
5
  Author: Denis Kras
6
6
  License-Expression: MIT
@@ -1,4 +1,4 @@
1
- atomicshop/__init__.py,sha256=8Ad5ESRJJbXiqt8GJU6Ro7MSdYdMqA0sQPjFqY8RsUU,122
1
+ atomicshop/__init__.py,sha256=-H5JgHG0iCCH8pC2RNRvzeaxBh2DD3LjRloFS8OdaVQ,122
2
2
  atomicshop/_basics_temp.py,sha256=6cu2dd6r2dLrd1BRNcVDKTHlsHs_26Gpw8QS6v32lQ0,3699
3
3
  atomicshop/_create_pdf_demo.py,sha256=Yi-PGZuMg0RKvQmLqVeLIZYadqEZwUm-4A9JxBl_vYA,3713
4
4
  atomicshop/_patch_import.py,sha256=ENp55sKVJ0e6-4lBvZnpz9PQCt3Otbur7F6aXDlyje4,6334
@@ -23,7 +23,7 @@ atomicshop/http_parse.py,sha256=1Tna9YbOM0rE3t6i_M-klBlwd1KNSA9skA_BqKGXDFc,1186
23
23
  atomicshop/inspect_wrapper.py,sha256=sGRVQhrJovNygHTydqJj0hxES-aB2Eg9KbIk3G31apw,11429
24
24
  atomicshop/ip_addresses.py,sha256=penRFeJ1-LDVTko4Q0EwK4JiN5cU-KzCBR2VXg9qbUY,1238
25
25
  atomicshop/keyboard_press.py,sha256=1W5kRtOB75fulVx-uF2yarBhW0_IzdI1k73AnvXstk0,452
26
- atomicshop/networks.py,sha256=xOU_lDf6Rct178W7EB80-AFMgu9Nnh6l7GgjA9z9Jtg,19010
26
+ atomicshop/networks.py,sha256=dTh6T9vAnjYOEPdfpL6W6wN091HwRbVLeNscXWZX_DI,22151
27
27
  atomicshop/on_exit.py,sha256=9XlOnzoAG8zlI8wBF4AB8hyrC6Q1b84gkhqpAhhdN9g,6977
28
28
  atomicshop/pbtkmultifile_argparse.py,sha256=aEk8nhvoQVu-xyfZosK3ma17CwIgOjzO1erXXdjwtS4,4574
29
29
  atomicshop/print_api.py,sha256=SJNQIMqSLlYaPtjHnALySAI-jQYuYHOCGgfP7oe96fU,10957
@@ -140,7 +140,7 @@ atomicshop/mitm/connection_thread_worker.py,sha256=50RP7De2t0WlUk4Ywmv6B63Gwvtvo
140
140
  atomicshop/mitm/import_config.py,sha256=J3ZLF28AsKu1h76iRV_sCM52g0oh4dwDGddZl_XcJsU,18351
141
141
  atomicshop/mitm/initialize_engines.py,sha256=qzz5jzh_lKC03bI1w5ebngVXo1K-RV3poAyW-nObyqo,11042
142
142
  atomicshop/mitm/message.py,sha256=CDhhm4BTuZE7oNZCjvIZ4BuPOW4MuIzQLOg91hJaxDI,3065
143
- atomicshop/mitm/mitm_main.py,sha256=lHk66kai66W44y4Zu7z4eWRA2sZOwvuGHRzNjPHy31k,38291
143
+ atomicshop/mitm/mitm_main.py,sha256=ucmdCr2p1F4c18DE8qJG3uG3FkkVLWdMEIWevN5BNKE,38967
144
144
  atomicshop/mitm/recs_files.py,sha256=tv8XFhYZMkBv4DauvpiAdPgvSo0Bcm1CghnmwO7dx8M,5018
145
145
  atomicshop/mitm/shared_functions.py,sha256=0lzeyINd44sVEfFbahJxQmz6KAMWbYrW5ou3UYfItvw,1777
146
146
  atomicshop/mitm/statistic_analyzer.py,sha256=5_sAYGX2Xunzo_pS2W5WijNCwr_BlGJbbOO462y_wN4,27533
@@ -197,7 +197,7 @@ atomicshop/wrappers/astw.py,sha256=VkYfkfyc_PJLIOxByT6L7B8uUmKY6-I8XGZl4t_z828,4
197
197
  atomicshop/wrappers/configparserw.py,sha256=JwDTPjZoSrv44YKwIRcjyUnpN-FjgXVfMqMK_tJuSgU,22800
198
198
  atomicshop/wrappers/cryptographyw.py,sha256=QEUpDn8vUvMg3ADz6-4oC2kbDNC_woDlw7C0zU7qFVM,14233
199
199
  atomicshop/wrappers/ffmpegw.py,sha256=wcq0ZnAe0yajBOuTKZCCaKI7CDBjkq7FAgdW5IsKcVE,6031
200
- atomicshop/wrappers/githubw.py,sha256=DrFF_oN-rulPQV1iKgVzZadCjuYuCC5eKAjZp_3YD0g,23476
200
+ atomicshop/wrappers/githubw.py,sha256=bds_8fgyFyHXKwty6-SBS3H3Ueta2IMM5UQFpiFmgHQ,27554
201
201
  atomicshop/wrappers/msiw.py,sha256=GQLqud72nfex3kvO1bJSruNriCYTYX1_G1gSf1MPkIA,6118
202
202
  atomicshop/wrappers/netshw.py,sha256=8WE_576XiiHykwFuE-VkCx5CydMpFlztX4frlEteCtI,6350
203
203
  atomicshop/wrappers/numpyw.py,sha256=sBV4gSKyr23kXTalqAb1oqttzE_2XxBooCui66jbAqc,1025
@@ -225,7 +225,7 @@ atomicshop/wrappers/ctyping/msi_windows_installer/extract_msi_main.py,sha256=AEk
225
225
  atomicshop/wrappers/ctyping/msi_windows_installer/tables.py,sha256=tHsu0YfBgzuIk9L-PyqLgU_IzyVbCfy8L1EqelNnvWk,17674
226
226
  atomicshop/wrappers/dockerw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
227
227
  atomicshop/wrappers/dockerw/dockerw.py,sha256=GgPSvXxJj15kZ-LPiaHLl8aekof53sSP_U-vUMUe7_8,10639
228
- atomicshop/wrappers/dockerw/install_docker.py,sha256=CeDlxWuOn_bRUhHEnvpgVGGYZgP7B-Q9qNMkDfFiV2E,10073
228
+ atomicshop/wrappers/dockerw/install_docker.py,sha256=9fjbx3GtpnNA4d4YU2ziPynqANXxo-x-Sq90SUSQEPg,18448
229
229
  atomicshop/wrappers/elasticsearchw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
230
230
  atomicshop/wrappers/elasticsearchw/config_basic.py,sha256=fDujtrjEjbWiYh_WQ3OcYp_8mXhXPYeKLy4wSPL5qM0,1177
231
231
  atomicshop/wrappers/elasticsearchw/elastic_infra.py,sha256=at0sD-SFtmEvfGyIU_YBEKoU-MNeVtDQSNscPm0JWLc,10368
@@ -300,7 +300,7 @@ atomicshop/wrappers/psutilw/psutil_networks.py,sha256=79FplDAj45ofBCudlft77O3lfi
300
300
  atomicshop/wrappers/psutilw/psutilw.py,sha256=q3EwgprqyrR4zLCjl4l5DHFOQoukEvQMIPjNB504oQ0,21262
301
301
  atomicshop/wrappers/psycopgw/psycopgw.py,sha256=XJvVf0oAUjCHkrYfKeFuGCpfn0Oxj3u4SbKMKA1508E,7118
302
302
  atomicshop/wrappers/pycharmw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
303
- atomicshop/wrappers/pycharmw/ubuntu.py,sha256=m9MpgqvIYygulhPxo9g2zlGGXrihBpiY3GNLNyT-B7U,1290
303
+ atomicshop/wrappers/pycharmw/ubuntu.py,sha256=vOvGWvTbzvTja9tRrJW2yJb0_r1EV11RrENGHckzvVE,1051
304
304
  atomicshop/wrappers/pycharmw/win.py,sha256=hNP-d95z1zhcCpYqhHE5HZVYxaAlt8JJCNXh65jZsHc,2757
305
305
  atomicshop/wrappers/pywin32w/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
306
306
  atomicshop/wrappers/pywin32w/cert_store.py,sha256=dV1XyoKTFKZ-HCIVqU2Nd6CTZ8HANqjAXv26rsNzO6s,4365
@@ -328,7 +328,7 @@ atomicshop/wrappers/socketw/exception_wrapper.py,sha256=qW_1CKyPgGlsIt7_jusKkMV4
328
328
  atomicshop/wrappers/socketw/get_process.py,sha256=aJC-_qFUv3NgWCSUzDI72E4z8_-VTZE9NVZ0CwUoNlM,5698
329
329
  atomicshop/wrappers/socketw/receiver.py,sha256=9B3MvcDqr4C3x2fsnjG5SQognd1wRqsBgikxZa0wXG8,8243
330
330
  atomicshop/wrappers/socketw/sender.py,sha256=aX_K8l_rHjd5AWb8bi5mt8-YTkMYVRDB6DnPqK_XDUE,4754
331
- atomicshop/wrappers/socketw/sni.py,sha256=PT50BFOHN5X0ZIyQRHA_Rl4z_lUbkv0em7Chl0viUlw,17819
331
+ atomicshop/wrappers/socketw/sni.py,sha256=YlKavbExcPFfHFLYAJ3i3W6QorY7o4mbQp39g-DnDKA,17911
332
332
  atomicshop/wrappers/socketw/socket_client.py,sha256=McBd3DeCy787oDGCEMUEP2awWy3vdkPqr9w-aFh2fBM,22502
333
333
  atomicshop/wrappers/socketw/socket_server_tester.py,sha256=Qobmh4XV8ZxLUaw-eW4ESKAbeSLecCKn2OWFzMhadk0,6420
334
334
  atomicshop/wrappers/socketw/socket_wrapper.py,sha256=u_v0pjMMrgsdtI0iPPddiLe2wXnBoqTgNM9Y3zjGD4U,41013
@@ -337,9 +337,9 @@ atomicshop/wrappers/socketw/statistics_csv.py,sha256=_gA8bMX6Sw_UCXKi2y9wNAwlqif
337
337
  atomicshop/wrappers/winregw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
338
338
  atomicshop/wrappers/winregw/winreg_installed_software.py,sha256=Qzmyktvob1qp6Tjk2DjLfAqr_yXV0sgWzdMW_9kwNjY,2345
339
339
  atomicshop/wrappers/winregw/winreg_network.py,sha256=ih0BVNwByLvf9F_Lac4EdmDYYJA3PzMvmG0PieDZrsE,9905
340
- atomicshop-3.3.2.dist-info/licenses/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
341
- atomicshop-3.3.2.dist-info/METADATA,sha256=0dnZPwzfNiqMA9qqJbgCwPT4OgNUkV1HWL9ZZhZ5YXY,9288
342
- atomicshop-3.3.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
343
- atomicshop-3.3.2.dist-info/entry_points.txt,sha256=SJEgEP0KoFtfxuGwe5tOzKfXkjR9Dv6YYug33KNYxyY,69
344
- atomicshop-3.3.2.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
345
- atomicshop-3.3.2.dist-info/RECORD,,
340
+ atomicshop-3.3.4.dist-info/licenses/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
341
+ atomicshop-3.3.4.dist-info/METADATA,sha256=AA-i4jzN5U9DaSNf-Hv_TuHfoMUeOR9-53KcyzR2xik,9288
342
+ atomicshop-3.3.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
343
+ atomicshop-3.3.4.dist-info/entry_points.txt,sha256=SJEgEP0KoFtfxuGwe5tOzKfXkjR9Dv6YYug33KNYxyY,69
344
+ atomicshop-3.3.4.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
345
+ atomicshop-3.3.4.dist-info/RECORD,,