atomicshop 3.3.8__py3-none-any.whl → 3.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of atomicshop might be problematic. Click here for more details.

Files changed (120) hide show
  1. atomicshop/__init__.py +1 -1
  2. atomicshop/a_mains/get_local_tcp_ports.py +85 -0
  3. atomicshop/a_mains/install_ca_certificate.py +172 -0
  4. atomicshop/a_mains/process_from_port.py +119 -0
  5. atomicshop/a_mains/set_default_dns_gateway.py +90 -0
  6. atomicshop/basics/strings.py +1 -1
  7. atomicshop/certificates.py +2 -2
  8. atomicshop/dns.py +26 -28
  9. atomicshop/etws/traces/trace_tcp.py +1 -2
  10. atomicshop/mitm/centered_settings.py +133 -0
  11. atomicshop/mitm/config_static.py +22 -44
  12. atomicshop/mitm/connection_thread_worker.py +383 -165
  13. atomicshop/mitm/engines/__parent/recorder___parent.py +1 -1
  14. atomicshop/mitm/engines/__parent/requester___parent.py +1 -1
  15. atomicshop/mitm/engines/__parent/responder___parent.py +15 -2
  16. atomicshop/mitm/engines/create_module_template.py +1 -2
  17. atomicshop/mitm/import_config.py +91 -89
  18. atomicshop/mitm/initialize_engines.py +1 -2
  19. atomicshop/mitm/message.py +5 -4
  20. atomicshop/mitm/mitm_main.py +238 -122
  21. atomicshop/mitm/recs_files.py +61 -5
  22. atomicshop/mitm/ssh_tester.py +82 -0
  23. atomicshop/mitm/statistic_analyzer.py +33 -12
  24. atomicshop/mitm/statistic_analyzer_helper/moving_average_helper.py +104 -31
  25. atomicshop/networks.py +160 -92
  26. atomicshop/package_mains_processor.py +84 -0
  27. atomicshop/permissions/ubuntu_permissions.py +47 -0
  28. atomicshop/print_api.py +3 -5
  29. atomicshop/process.py +11 -4
  30. atomicshop/python_functions.py +23 -108
  31. atomicshop/speech_recognize.py +8 -0
  32. atomicshop/ssh_remote.py +140 -164
  33. atomicshop/web.py +63 -22
  34. atomicshop/web_apis/google_llm.py +22 -14
  35. atomicshop/wrappers/ctyping/msi_windows_installer/cabs.py +2 -1
  36. atomicshop/wrappers/ctyping/msi_windows_installer/extract_msi_main.py +2 -1
  37. atomicshop/wrappers/dockerw/dockerw.py +2 -2
  38. atomicshop/wrappers/elasticsearchw/config_basic.py +0 -12
  39. atomicshop/wrappers/elasticsearchw/elastic_infra.py +0 -190
  40. atomicshop/wrappers/factw/install/pre_install_and_install_before_restart.py +5 -5
  41. atomicshop/wrappers/githubw.py +180 -68
  42. atomicshop/wrappers/loggingw/consts.py +1 -1
  43. atomicshop/wrappers/loggingw/handlers.py +1 -1
  44. atomicshop/wrappers/loggingw/loggingw.py +20 -4
  45. atomicshop/wrappers/loggingw/reading.py +18 -0
  46. atomicshop/wrappers/mongodbw/mongo_infra.py +0 -38
  47. atomicshop/wrappers/netshw.py +124 -3
  48. atomicshop/wrappers/playwrightw/scenarios.py +1 -1
  49. atomicshop/wrappers/powershell_networking.py +80 -0
  50. atomicshop/wrappers/psutilw/psutil_networks.py +9 -0
  51. atomicshop/wrappers/pywin32w/win_event_log/fetch.py +174 -0
  52. atomicshop/wrappers/pywin32w/win_event_log/subscribes/process_create.py +3 -105
  53. atomicshop/wrappers/pywin32w/win_event_log/subscribes/process_terminate.py +3 -57
  54. atomicshop/wrappers/pywin32w/wmis/win32_networkadapterconfiguration.py +12 -27
  55. atomicshop/wrappers/pywin32w/wmis/win32networkadapter.py +15 -9
  56. atomicshop/wrappers/socketw/certificator.py +19 -9
  57. atomicshop/wrappers/socketw/creator.py +101 -14
  58. atomicshop/wrappers/socketw/dns_server.py +17 -5
  59. atomicshop/wrappers/socketw/exception_wrapper.py +21 -16
  60. atomicshop/wrappers/socketw/process_getter.py +86 -0
  61. atomicshop/wrappers/socketw/receiver.py +29 -9
  62. atomicshop/wrappers/socketw/sender.py +10 -9
  63. atomicshop/wrappers/socketw/sni.py +31 -10
  64. atomicshop/wrappers/socketw/{base.py → socket_base.py} +33 -1
  65. atomicshop/wrappers/socketw/socket_client.py +11 -10
  66. atomicshop/wrappers/socketw/socket_wrapper.py +125 -32
  67. atomicshop/wrappers/socketw/ssl_base.py +6 -2
  68. atomicshop/wrappers/ubuntu_terminal.py +21 -18
  69. atomicshop/wrappers/win_auditw.py +189 -0
  70. {atomicshop-3.3.8.dist-info → atomicshop-3.10.0.dist-info}/METADATA +25 -30
  71. {atomicshop-3.3.8.dist-info → atomicshop-3.10.0.dist-info}/RECORD +83 -109
  72. atomicshop/_basics_temp.py +0 -101
  73. atomicshop/a_installs/ubuntu/docker_rootless.py +0 -11
  74. atomicshop/a_installs/ubuntu/docker_sudo.py +0 -11
  75. atomicshop/a_installs/ubuntu/elastic_search_and_kibana.py +0 -10
  76. atomicshop/a_installs/ubuntu/mongodb.py +0 -12
  77. atomicshop/a_installs/win/fibratus.py +0 -9
  78. atomicshop/a_installs/win/mongodb.py +0 -9
  79. atomicshop/a_installs/win/wsl_ubuntu_lts.py +0 -10
  80. atomicshop/addons/a_setup_scripts/install_psycopg2_ubuntu.sh +0 -3
  81. atomicshop/addons/package_setup/CreateWheel.cmd +0 -7
  82. atomicshop/addons/package_setup/Setup in Edit mode.cmd +0 -6
  83. atomicshop/addons/package_setup/Setup.cmd +0 -7
  84. atomicshop/archiver/__init__.py +0 -0
  85. atomicshop/archiver/_search_in_zip.py +0 -189
  86. atomicshop/archiver/search_in_archive.py +0 -284
  87. atomicshop/archiver/sevenz_app_w.py +0 -86
  88. atomicshop/archiver/sevenzs.py +0 -73
  89. atomicshop/archiver/shutils.py +0 -34
  90. atomicshop/archiver/zips.py +0 -353
  91. atomicshop/file_types.py +0 -24
  92. atomicshop/pbtkmultifile_argparse.py +0 -88
  93. atomicshop/script_as_string_processor.py +0 -42
  94. atomicshop/ssh_scripts/process_from_ipv4.py +0 -37
  95. atomicshop/ssh_scripts/process_from_port.py +0 -27
  96. atomicshop/wrappers/_process_wrapper_curl.py +0 -27
  97. atomicshop/wrappers/_process_wrapper_tar.py +0 -21
  98. atomicshop/wrappers/dockerw/install_docker.py +0 -449
  99. atomicshop/wrappers/elasticsearchw/install_elastic.py +0 -233
  100. atomicshop/wrappers/ffmpegw.py +0 -125
  101. atomicshop/wrappers/fibratusw/__init__.py +0 -0
  102. atomicshop/wrappers/fibratusw/install.py +0 -80
  103. atomicshop/wrappers/mongodbw/install_mongodb_ubuntu.py +0 -100
  104. atomicshop/wrappers/mongodbw/install_mongodb_win.py +0 -244
  105. atomicshop/wrappers/process_wrapper_pbtk.py +0 -16
  106. atomicshop/wrappers/socketw/get_process.py +0 -123
  107. atomicshop/wrappers/wslw.py +0 -192
  108. atomicshop-3.3.8.dist-info/entry_points.txt +0 -2
  109. /atomicshop/{addons → a_mains/addons}/PlayWrightCodegen.cmd +0 -0
  110. /atomicshop/{addons → a_mains/addons}/ScriptExecution.cmd +0 -0
  111. /atomicshop/{addons → a_mains/addons}/inits/init_to_import_all_modules.py +0 -0
  112. /atomicshop/{addons → a_mains/addons}/process_list/ReadMe.txt +0 -0
  113. /atomicshop/{addons → a_mains/addons}/process_list/compile.cmd +0 -0
  114. /atomicshop/{addons → a_mains/addons}/process_list/compiled/Win10x64/process_list.dll +0 -0
  115. /atomicshop/{addons → a_mains/addons}/process_list/compiled/Win10x64/process_list.exp +0 -0
  116. /atomicshop/{addons → a_mains/addons}/process_list/compiled/Win10x64/process_list.lib +0 -0
  117. /atomicshop/{addons → a_mains/addons}/process_list/process_list.cpp +0 -0
  118. {atomicshop-3.3.8.dist-info → atomicshop-3.10.0.dist-info}/WHEEL +0 -0
  119. {atomicshop-3.3.8.dist-info → atomicshop-3.10.0.dist-info}/licenses/LICENSE.txt +0 -0
  120. {atomicshop-3.3.8.dist-info → atomicshop-3.10.0.dist-info}/top_level.txt +0 -0
atomicshop/web.py CHANGED
@@ -1,10 +1,14 @@
1
1
  import os
2
2
  import urllib.request
3
+ import urllib.error
3
4
  import ssl
5
+ from typing import Any
6
+ import http.client
7
+
4
8
  # noinspection PyPackageRequirements
5
9
  import certifi
10
+ from dkarchiver.arch_wrappers import zips
6
11
 
7
- from .archiver import zips
8
12
  from .urls import url_parser
9
13
  from .file_io import file_io
10
14
  from .wrappers.playwrightw import scenarios
@@ -14,12 +18,12 @@ from . import filesystem, print_api
14
18
  # https://www.useragents.me/
15
19
  # https://user-agents.net/
16
20
  USER_AGENTS = {
17
- 'Windows_Chrome_Latest':
18
- 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36',
19
- 'Chrome_111.0.0_Windows_10-11_x64':
21
+ 'Chrome 111.0.0 Windows_10/11 x64':
20
22
  'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
21
- 'Chrome 132.0.0, Windows 10/11':
22
- 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36'
23
+ 'Chrome 132.0.0 Windows 10/11 x64':
24
+ 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36',
25
+ 'Chrome 142.0.0 Windows 10/11 x64':
26
+ 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/142.0.0.0 Safari/537.36'
23
27
  }
24
28
 
25
29
 
@@ -73,7 +77,7 @@ def get_page_bytes(
73
77
  raise ValueError('ERROR: [user_agent] specified and [chrome_user_agent] usage is [True]. Choose one.')
74
78
 
75
79
  if chrome_user_agent:
76
- user_agent = USER_AGENTS['Chrome_111.0.0_Windows_10-11_x64']
80
+ user_agent = USER_AGENTS['Chrome 142.0.0 Windows 10/11 x64']
77
81
 
78
82
  if user_agent:
79
83
  # Create a 'Request' object with the URL and user agent.
@@ -99,7 +103,7 @@ def get_page_content(
99
103
  playwright_pdf_format: str = 'A4',
100
104
  playwright_html_txt_convert_to_bytes: bool = True,
101
105
  print_kwargs: dict = None
102
- ) -> any:
106
+ ) -> Any:
103
107
  """
104
108
  Function returns the page content from the given URL.
105
109
 
@@ -157,18 +161,25 @@ def download(
157
161
  target_directory: str = None,
158
162
  file_name: str = None,
159
163
  headers: dict = None,
160
- use_certifi_ca_repository: bool = False,
164
+ overwrite: bool = False,
165
+ # use_certifi_ca_repository: bool = False,
161
166
  **kwargs
162
- ) -> str:
167
+ ) -> str | None:
163
168
  """
164
169
  The function receives url and target filesystem directory to download the file.
165
170
 
171
+ Note: Install 'pip-system-certs' package if you want to use system's CA store for SSL context
172
+ in an environment where 'certifi' package is installed.
173
+
166
174
  :param file_url: full URL to download the file.
167
175
  :param target_directory: The directory on the filesystem to save the file to.
168
176
  If not specified, temporary directory will be used.
169
177
  :param file_name: string, file name (example: file.zip) that you want the downloaded file to be saved as.
170
178
  If not specified, the default filename from 'file_url' will be used.
171
179
  :param headers: dictionary, HTTP headers to use when downloading the file.
180
+ :param overwrite: boolean, if True, the file will be overwritten if it already exists.
181
+ If False, the file will not be overwritten and the function will return None if the file already exists.
182
+ Default is False.
172
183
  :param use_certifi_ca_repository: boolean, if True, the certifi CA store will be used for SSL context
173
184
  instead of the system's default CA store.
174
185
  :return: string, full file path of downloaded file. If download failed, 'None' will be returned.
@@ -196,29 +207,59 @@ def download(
196
207
  # Build full path to file.
197
208
  file_path: str = f'{target_directory}{os.sep}{file_name}'
198
209
 
210
+ if os.path.exists(file_path):
211
+ if overwrite:
212
+ print_api.print_api(f'File already exists: {file_path}. Overwriting...', **kwargs)
213
+ else:
214
+ print_api.print_api(f'File already exists: {file_path}. Skipping download.', **kwargs)
215
+ return file_path
216
+
199
217
  print_api.print_api(f'Downloading: {file_url}', **kwargs)
200
218
  print_api.print_api(f'To: {file_path}', **kwargs)
201
219
 
202
220
  # Open the URL for data gathering with SSL context.
203
- if not use_certifi_ca_repository:
204
- # Create a default SSL context using the system's CA store.
205
- ssl_context = ssl.create_default_context()
206
- else:
207
- # Create a default SSL context using the certifi CA store.
208
- # This is useful for environments where the system's CA store is not available or not trusted.
209
- # 'certifi.where()' returns the path to the certifi CA bundle.
210
- ssl_context = ssl.create_default_context(cafile=certifi.where())
221
+ # if not use_certifi_ca_repository:
222
+ # # Create a default SSL context using the system's CA store.
223
+ # ssl_context = ssl.create_default_context()
224
+ # else:
225
+
226
+ # Create a default SSL context using the certifi CA store.
227
+ # This is useful for environments where the system's CA store is not available or not trusted.
228
+ # 'certifi.where()' returns the path to the certifi CA bundle.
229
+ ssl_context: ssl.SSLContext = ssl.create_default_context(cafile=certifi.where())
211
230
 
212
231
  # In order to use 'urllib.request', it is not enough to 'import urllib', you need to 'import urllib.request'.
213
232
  # Build a Request object with headers if provided.
214
233
  req = urllib.request.Request(file_url, headers=headers or {})
215
- file_to_download = urllib.request.urlopen(req, context=ssl_context)
234
+
235
+ def do_urlopen(ssl_context_internal: ssl.SSLContext) -> http.client.HTTPResponse | None:
236
+ try:
237
+ response: http.client.HTTPResponse = urllib.request.urlopen(req, context=ssl_context_internal)
238
+ return response
239
+ except urllib.error.URLError as e:
240
+ if getattr(e, 'reason', None) and isinstance(e.reason, ssl.SSLCertVerificationError):
241
+ if getattr(e.reason, 'reason', None) and e.reason.reason == 'CERTIFICATE_VERIFY_FAILED':
242
+ if getattr(e.reason, 'verify_message', None) and e.reason.verify_message == 'unable to get local issuer certificate':
243
+ return None
244
+
245
+ raise e
246
+
247
+ # Try to open the URL with the created SSL context with certifi.
248
+ file_to_download = do_urlopen(ssl_context_internal=ssl_context)
249
+ if not file_to_download:
250
+ # If failed, try to open the URL with the system's default SSL context.
251
+ ssl_context = ssl.create_default_context()
252
+ file_to_download = do_urlopen(ssl_context_internal=ssl_context)
253
+ if not file_to_download:
254
+ print_api.print_api(
255
+ 'ERROR: URL open failed with both certifi and system\'s default SSL context.', error_type=True, **kwargs)
256
+ return None
216
257
 
217
258
  # Check status of url.
218
259
  if not is_status_ok(status_code=file_to_download.status, **kwargs):
219
260
  return None
220
261
 
221
- file_size_bytes_int: int = None
262
+ file_size_bytes_int: int | None = None
222
263
  # Get file size. For some reason doesn't show for GitHub branch downloads.
223
264
  if file_to_download.headers['Content-Length']:
224
265
  file_size_bytes_int = int(file_to_download.headers['Content-Length'])
@@ -273,7 +314,7 @@ def download_and_extract_file(
273
314
  Default is empty. If it is empty, then the filename will be extracted from 'file_url'.
274
315
  :param target_directory: string, target directory where to save the file.
275
316
  :param archive_remove_first_directory: boolean, sets if archive extract function will extract the archive without
276
- first directory in the archive. Check reference in the 'extract_archive_with_zipfile' function.
317
+ first directory in the archive. Check reference in the 'dkarchiver.arch_wrappers.zips.extract_archive_with_zipfile' function.
277
318
  :param headers: dictionary, HTTP headers to use when downloading the file.
278
319
  :return:
279
320
  """
@@ -285,7 +326,7 @@ def download_and_extract_file(
285
326
  # Extract the archive and remove the first directory.
286
327
  zips.extract_archive_with_zipfile(
287
328
  archive_path=f'{file_path}', extract_directory=target_directory,
288
- remove_first_directory=archive_remove_first_directory, **kwargs)
329
+ remove_first_directory=archive_remove_first_directory)
289
330
 
290
331
  # Remove the archive file.
291
332
  filesystem.remove_file(file_path=f'{file_path}', **kwargs)
@@ -1,7 +1,7 @@
1
- import os
2
1
  from typing import Literal
3
2
 
4
- import google.generativeai as genai
3
+ from google import genai
4
+ from google.genai.types import GenerateContentConfig
5
5
 
6
6
  from . import google_custom_search
7
7
  from ..wrappers.playwrightw import scenarios
@@ -11,6 +11,14 @@ from .. import urls
11
11
  class GoogleCustomSearchError(Exception):
12
12
  pass
13
13
 
14
+ class GoogleLLMNoContentError(Exception):
15
+ pass
16
+
17
+
18
+ """
19
+ Rate Limits and Quotas: https://ai.google.dev/gemini-api/docs/rate-limits
20
+ """
21
+
14
22
 
15
23
  class GoogleLLM:
16
24
  def __init__(
@@ -26,13 +34,10 @@ class GoogleLLM:
26
34
  :param search_engine_id: str, the search engine ID for the Google Custom Search API.
27
35
  """
28
36
 
29
- self.genai = genai
37
+ self.client = genai.Client(api_key=llm_api_key)
30
38
  self.search_api_key: str = search_api_key
31
39
  self.search_engine_id: str = search_engine_id
32
40
 
33
- os.environ["API_KEY"] = llm_api_key
34
- genai.configure(api_key=os.environ["API_KEY"])
35
-
36
41
  def get_current_models(
37
42
  self,
38
43
  full_info: bool = False
@@ -43,7 +48,7 @@ class GoogleLLM:
43
48
  :param full_info: bool, if True, returns the full information about the models, otherwise only the names for API usage.
44
49
  """
45
50
  result_list: list = []
46
- for model in self.genai.list_models():
51
+ for model in self.client.models.list():
47
52
  if full_info:
48
53
  result_list.append(model)
49
54
  else:
@@ -68,7 +73,7 @@ class GoogleLLM:
68
73
  temperature: float = 0,
69
74
  # max_output_tokens: int = 4096,
70
75
  # model_name: str = 'gemini-2.0-flash-thinking-exp-01-21'
71
- model_name: str = 'models/gemini-2.5-pro-preview-03-25'
76
+ model_name: str = 'gemini-2.5-pro'
72
77
  ) -> str:
73
78
  """
74
79
  Function to get the answer to a question by searching Google Custom Console API and processing the content using Gemini API.
@@ -112,6 +117,9 @@ class GoogleLLM:
112
117
  urls=links[:number_of_top_links], number_of_characters_per_link=number_of_characters_per_link,
113
118
  text_fetch_method=text_fetch_method)
114
119
 
120
+ if not contents:
121
+ raise GoogleLLMNoContentError("No content was fetched from the provided URL(s).")
122
+
115
123
  combined_content = ""
116
124
  for content in contents:
117
125
  combined_content += f'{content}\n\n\n\n================================================================'
@@ -126,12 +134,12 @@ class GoogleLLM:
126
134
  gemini_response = self.ask_gemini(final_question, temperature, model_name)
127
135
  return gemini_response
128
136
 
129
- @staticmethod
130
137
  def ask_gemini(
138
+ self,
131
139
  question: str,
132
140
  temperature: float,
133
141
  # max_output_tokens: int,
134
- model_name: str = 'gemini-2.0-flash-thinking-exp-01-21'
142
+ model_name: str = 'gemini-2.5-pro'
135
143
  ) -> str:
136
144
  """
137
145
  Function to ask the Gemini API a question and get the response.
@@ -161,8 +169,8 @@ class GoogleLLM:
161
169
  # "max_output_tokens": max_output_tokens,
162
170
  }
163
171
 
164
- # model = genai.GenerativeModel('gemini-1.5-pro-latest',
165
- # noinspection PyTypeChecker
166
- model = genai.GenerativeModel(model_name, generation_config=model_config)
167
- response = model.generate_content(question)
172
+ response = self.client.models.generate_content(
173
+ model=model_name,
174
+ contents=question,
175
+ config=GenerateContentConfig(**model_config))
168
176
  return response.text
@@ -1,8 +1,9 @@
1
1
  import os
2
2
  from pathlib import Path
3
3
 
4
+ from dkarchiver.arch_wrappers import sevenz_app_w
5
+
4
6
  from . import tables
5
- from ....archiver import sevenz_app_w
6
7
 
7
8
 
8
9
  def resolve_directory_path(directory_info, directory_key):
@@ -1,11 +1,12 @@
1
1
  import os
2
2
  import argparse
3
3
 
4
+ from dkarchiver.arch_wrappers import sevenz_app_w
5
+
4
6
  from .base import msi
5
7
  from . import base, tables, cabs
6
8
  from ... import olefilew
7
9
  from ....print_api import print_api
8
- from ....archiver import sevenz_app_w
9
10
 
10
11
 
11
12
  # Directory names.
@@ -68,8 +68,8 @@ def change_image_content(
68
68
  dockerw.change_image_content(
69
69
  image_id_or_name="your_docker_image_id_or_name",
70
70
  list_of_commands=[
71
- "apt-get update",
72
- "apt-get install -y python3"
71
+ "apt update",
72
+ "apt install -y python3"
73
73
  ]
74
74
  )
75
75
  ----------------------
@@ -7,17 +7,5 @@ DEFAULT_KIBANA_PORT: str = '5601'
7
7
  DEFAULT_KIBANA_HOST: str = 'localhost'
8
8
  DEFAULT_KIBANA_URL: str = f"http://{DEFAULT_KIBANA_HOST}:{DEFAULT_KIBANA_PORT}"
9
9
 
10
- ELASTIC_SEARCH_CONFIG_DIRECTORY: str = "/etc/elasticsearch"
11
-
12
- ELASTIC_CONFIG_FILE: str = f"{ELASTIC_SEARCH_CONFIG_DIRECTORY}/elasticsearch.yml"
13
- XPACK_SECURITY_SETTING_NAME: str = "xpack.security.enabled"
14
-
15
- ELASTIC_JVM_OPTIONS_DIRECTORY: str = f"{ELASTIC_SEARCH_CONFIG_DIRECTORY}/jvm.options.d"
16
- ELASTIC_JVM_OPTIONS_4GB_CUSTOM_FILE: str = f"{ELASTIC_JVM_OPTIONS_DIRECTORY}/4gb_memory_heap.options"
17
- ELASTIC_JVM_OPTIONS_4GB_MEMORY_USAGE: list[str] = ['-Xms4g', '-Xmx4g']
18
-
19
- UBUNTU_DEPENDENCY_PACKAGES: list[str] = ['apt-transport-https', 'openjdk-11-jdk', 'wget']
20
- UBUNTU_ELASTIC_PACKAGE_NAME: str = 'elasticsearch'
21
10
  UBUNTU_ELASTIC_SERVICE_NAME: str = 'elasticsearch'
22
- UBUNTU_KIBANA_PACKAGE_NAME: str = 'kibana'
23
11
  UBUNTU_KIBANA_SERVICE_NAME: str = 'kibana'
@@ -32,174 +32,6 @@ def start_kibana_service():
32
32
  ubuntu_terminal.start_service(config_basic.UBUNTU_KIBANA_SERVICE_NAME, sudo=True)
33
33
 
34
34
 
35
- def start_elastic_and_check_service_availability(wait_time_seconds: float = 30, exit_on_error: bool = True):
36
- """
37
- Function starts the Elasticsearch service and checks its availability.
38
- :param wait_time_seconds: float, the time to wait after starting the service before checking
39
- the service availability.
40
- :param exit_on_error: bool, if True, the function will exit the program if the service is not available.
41
- :return:
42
- """
43
-
44
- # Start, enable and check the Elasticsearch service.
45
- ubuntu_terminal.start_enable_service_check_availability(
46
- service_name=config_basic.UBUNTU_ELASTIC_SERVICE_NAME,
47
- wait_time_seconds=wait_time_seconds,
48
- exit_on_error=exit_on_error
49
- )
50
-
51
- # Check if Elasticsearch is running.
52
- if not is_server_available():
53
- if exit_on_error:
54
- sys.exit(1)
55
-
56
-
57
- def start_kibana_and_check_service_availability(wait_time_seconds: float = 30, exit_on_error: bool = True):
58
- """
59
- Function starts the Kibana service and checks its availability.
60
- :param wait_time_seconds: float, the time to wait after starting the service before checking
61
- the service availability.
62
- :param exit_on_error: bool, if True, the function will exit the program if the service is not available.
63
- :return:
64
- """
65
-
66
- # Start, enable and check the Elasticsearch service.
67
- ubuntu_terminal.start_enable_service_check_availability(
68
- service_name=config_basic.UBUNTU_KIBANA_SERVICE_NAME,
69
- wait_time_seconds=wait_time_seconds,
70
- exit_on_error=exit_on_error
71
- )
72
-
73
-
74
- def is_elastic_config_file_exists(
75
- config_file_path: str = None,
76
- exit_on_error: bool = False,
77
- output_message: bool = False
78
- ) -> bool:
79
- """
80
- The function checks if the Elasticsearch configuration file exists.
81
-
82
- :param config_file_path: str, the path to the configuration file.
83
- :param exit_on_error: bool, if True, the function will exit the program if the file does not exist.
84
- :param output_message: bool, if True, the function will print a message if the file does not exist.
85
- :return:
86
- """
87
-
88
- if not config_file_path:
89
- config_file_path = config_basic.ELASTIC_CONFIG_FILE
90
-
91
- # if not ubuntu_terminal.is_sudo_file_exists(config_file_path):
92
- if not filesystem.is_file_exists(config_file_path):
93
- if output_message:
94
- message = f"Configuration file does not exist at {config_file_path}."
95
- print_api(message, color='red', error_type=True)
96
- if exit_on_error:
97
- sys.exit(1)
98
- return False
99
- else:
100
- return True
101
-
102
-
103
- def check_xpack_security_setting(config_file_path: str = None):
104
- """
105
- The function checks if the 'xpack.security.enabled' setting is set to 'false' in the Elasticsearch
106
- configuration file.
107
-
108
- :param config_file_path:
109
- :return:
110
- """
111
-
112
- if not config_file_path:
113
- config_file_path = config_basic.ELASTIC_CONFIG_FILE
114
-
115
- with open(config_file_path, 'r') as file:
116
- # Read the file contents
117
- contents = file.read()
118
- # Check if the specific setting exists
119
- if f"{config_basic.XPACK_SECURITY_SETTING_NAME}: false" in contents:
120
- return False
121
- elif f"{config_basic.XPACK_SECURITY_SETTING_NAME}: true" in contents:
122
- return True
123
- # If the setting doesn't exist, return None.
124
- else:
125
- return None
126
-
127
-
128
- def modify_xpack_security_setting(
129
- config_file_path: str = None,
130
- setting: bool = False,
131
- output_message: bool = True
132
- ):
133
- """
134
- The function modifies the 'xpack.security.enabled' setting in the Elasticsearch configuration file.
135
- :param config_file_path: str, the path to the configuration file.
136
- :param setting: bool, the setting to change to. Will be added, if doesn't exist.
137
- :param output_message: bool, if True, the function will print a message.
138
- :return:
139
- """
140
-
141
- if not config_file_path:
142
- config_file_path = config_basic.ELASTIC_CONFIG_FILE
143
-
144
- # The setting to set in the configuration file.
145
- xpack_setting_to_set: str = f'{config_basic.XPACK_SECURITY_SETTING_NAME}: {str(setting).lower()}'
146
-
147
- # Check if the setting exists in the configuration file and get its value.
148
- current_xpack_security_setting = check_xpack_security_setting(config_file_path)
149
-
150
- # If the setting doesn't exist, add it to the configuration file.
151
- if current_xpack_security_setting is None:
152
- with open(config_file_path, 'a') as file:
153
- file.write(f'{xpack_setting_to_set}\n')
154
- if output_message:
155
- print_api(f"Added [{xpack_setting_to_set}] to the configuration.")
156
- # If the setting exists and is different from the desired setting, change it.
157
- elif current_xpack_security_setting != setting:
158
- with open(config_file_path, 'r') as file:
159
- lines = file.readlines()
160
- with open(config_file_path, 'w') as file:
161
- for line in lines:
162
- if f"{config_basic.XPACK_SECURITY_SETTING_NAME}:" in line:
163
- file.write(f'{xpack_setting_to_set}\n')
164
- else:
165
- file.write(line)
166
- if output_message:
167
- print_api(f"Changed [{config_basic.XPACK_SECURITY_SETTING_NAME}] to [{setting}].")
168
- # If the setting is already set to the desired value, print a message.
169
- elif current_xpack_security_setting == setting:
170
- if output_message:
171
- print_api(f"The setting is already set to [{setting}].")
172
-
173
-
174
- def create_jvm_options_custom_file(file_path: str, options: list):
175
- """
176
- The function creates a custom JVM options file for Elasticsearch.
177
- You can use the default directory path as 'config_basic.ELASTIC_JVM_OPTIONS_DIRECTORY'.
178
- :param file_path: str, the path to the custom JVM options file.
179
- :param options: list, the list of JVM options.
180
- :return:
181
- """
182
-
183
- # Write the options to the file.
184
- with open(file_path, 'w') as file:
185
- for option in options:
186
- file.write(f"{option}\n")
187
-
188
-
189
- def create_jvm_options_custom_4gb_memory_heap_file(file_path: str = None):
190
- """
191
- The function creates a custom JVM options file with 4GB memory heap usage.
192
- The 4GB memory usage options are needed for the Elasticsearch to work properly and not to crash.
193
- :param file_path: str, the path to the custom JVM options file.
194
- :return:
195
- """
196
-
197
- if not file_path:
198
- file_path = config_basic.ELASTIC_JVM_OPTIONS_4GB_CUSTOM_FILE
199
-
200
- create_jvm_options_custom_file(file_path, config_basic.ELASTIC_JVM_OPTIONS_4GB_MEMORY_USAGE)
201
-
202
-
203
35
  def is_server_available(
204
36
  max_attempts: int = 5,
205
37
  wait_between_attempts_seconds: float = 10,
@@ -241,25 +73,3 @@ def is_server_available(
241
73
 
242
74
  print_api("Elasticsearch did not start within the expected time.", color='red', **print_kwargs)
243
75
  return False
244
-
245
-
246
- def is_4gb_memory_heap_options_applied_on_server() -> bool:
247
- """
248
- The function checks if the 4GB memory heap options are applied on the Elasticsearch server.
249
- :return: bool.
250
- """
251
-
252
- # Send a GET request
253
- response = requests.get(config_basic.DEFAULT_ELASTIC_URL_JVM_OPTIONS)
254
- response.raise_for_status() # Raise an exception for HTTP errors
255
-
256
- # Load JSON data from the response
257
- jvm_data = response.json()
258
-
259
- # Check if memory heap options are applied in 'input_arguments' key.
260
- for node in jvm_data['nodes'].values():
261
- # Get the JVM input arguments values.
262
- input_arguments = node['jvm']['input_arguments']
263
-
264
- # Check that the 4GB memory heap options are applied.
265
- return all(options in input_arguments for options in config_basic.ELASTIC_JVM_OPTIONS_4GB_MEMORY_USAGE)
@@ -1,13 +1,13 @@
1
- import sys
2
1
  import subprocess
3
2
  from pathlib import Path
4
3
 
4
+ from dkarchiver.arch_wrappers import zips
5
+ from dkinst.installers.helpers import docker_installer
6
+
5
7
  from .... import filesystem
6
8
  from ....permissions import ubuntu_permissions
7
- from ....archiver import zips
8
9
  from ....print_api import print_api
9
10
  from ... import githubw, pipw, ubuntu_terminal
10
- from ...dockerw import install_docker
11
11
  from .. import config_install
12
12
 
13
13
 
@@ -87,7 +87,7 @@ def install_before_restart(
87
87
 
88
88
  # Install docker. FACT installs the docker, but there can be a problem with permissions, so we need to add
89
89
  # the user permissions to the docker group before restart.
90
- if not install_docker.add_current_user_to_docker_group():
90
+ if not docker_installer.add_current_user_to_docker_group():
91
91
  print_api("Docker is installed, but the current user was not added to the docker group.", color='red')
92
92
  return 1
93
93
  else:
@@ -108,7 +108,7 @@ def install_before_restart(
108
108
  # use_docker_installer=True, rootless=True, add_current_user_to_docker_group_bool=False)
109
109
 
110
110
  # Install docker in regular mode.
111
- result: int = install_docker.install_docker_ubuntu(
111
+ result: int = docker_installer.install_docker_ubuntu(
112
112
  use_docker_installer=True, rootless=False, add_current_user_to_docker_group_bool=True)
113
113
  if result != 0:
114
114
  print_api("Docker installation failed. Please install Docker manually.", color='red')