atomicshop 2.10.7__py3-none-any.whl → 2.11.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of atomicshop might be problematic. Click here for more details.
- atomicshop/__init__.py +1 -1
- atomicshop/addons/mains/msi_unpacker.py +5 -0
- atomicshop/archiver/sevenz_app_w.py +86 -0
- atomicshop/basics/strings.py +67 -9
- atomicshop/process.py +18 -0
- atomicshop/wrappers/ctyping/__init__.py +0 -0
- atomicshop/wrappers/ctyping/msi_windows_installer/__init__.py +0 -0
- atomicshop/wrappers/ctyping/msi_windows_installer/base.py +134 -0
- atomicshop/wrappers/ctyping/msi_windows_installer/cabs.py +75 -0
- atomicshop/wrappers/ctyping/msi_windows_installer/extract_msi_main.py +136 -0
- atomicshop/wrappers/ctyping/msi_windows_installer/tables.py +419 -0
- atomicshop/wrappers/dockerw/install_docker.py +68 -30
- atomicshop/wrappers/factw/install/install_after_restart.py +1 -1
- atomicshop/wrappers/olefilew.py +63 -0
- atomicshop/wrappers/ubuntu_terminal.py +72 -9
- {atomicshop-2.10.7.dist-info → atomicshop-2.11.0.dist-info}/METADATA +2 -1
- {atomicshop-2.10.7.dist-info → atomicshop-2.11.0.dist-info}/RECORD +20 -11
- {atomicshop-2.10.7.dist-info → atomicshop-2.11.0.dist-info}/LICENSE.txt +0 -0
- {atomicshop-2.10.7.dist-info → atomicshop-2.11.0.dist-info}/WHEEL +0 -0
- {atomicshop-2.10.7.dist-info → atomicshop-2.11.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,419 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import csv
|
|
3
|
+
import ctypes
|
|
4
|
+
from ctypes import wintypes
|
|
5
|
+
|
|
6
|
+
from . import base
|
|
7
|
+
from .base import msi
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def get_column_names(db_handle, table_name):
|
|
11
|
+
"""Fetch column names for a specific table."""
|
|
12
|
+
column_names = []
|
|
13
|
+
query = f"SELECT `Name` FROM `_Columns` WHERE `Table`='{table_name}' ORDER BY `Number`"
|
|
14
|
+
view_handle = base.create_open_execute_view_handle(db_handle, query)
|
|
15
|
+
|
|
16
|
+
while True:
|
|
17
|
+
record_handle = base.create_fetch_record_from_view_handle(view_handle)
|
|
18
|
+
if not record_handle:
|
|
19
|
+
break
|
|
20
|
+
|
|
21
|
+
column_name = base.get_table_field_data_from_record(record_handle, field_index=1, data_type='stringw')
|
|
22
|
+
if column_name:
|
|
23
|
+
column_names.append(column_name)
|
|
24
|
+
|
|
25
|
+
msi.MsiCloseHandle(record_handle)
|
|
26
|
+
msi.MsiCloseHandle(view_handle)
|
|
27
|
+
return column_names
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def list_all_table_names(db_handle) -> list:
|
|
31
|
+
"""List all tables in the MSI database."""
|
|
32
|
+
query = "SELECT `Name` FROM `_Tables`"
|
|
33
|
+
view_handle = base.create_open_execute_view_handle(db_handle, query)
|
|
34
|
+
|
|
35
|
+
tables: list = []
|
|
36
|
+
while True:
|
|
37
|
+
record_handle = base.create_fetch_record_from_view_handle(view_handle)
|
|
38
|
+
|
|
39
|
+
# If record handle is empty then there is nothing more in the buffer, and we can stop the loop.
|
|
40
|
+
if not record_handle:
|
|
41
|
+
break
|
|
42
|
+
|
|
43
|
+
tables.append(base.get_table_field_data_from_record(record_handle, field_index=1, data_type='stringw'))
|
|
44
|
+
|
|
45
|
+
msi.MsiCloseHandle(record_handle)
|
|
46
|
+
msi.MsiViewClose(view_handle)
|
|
47
|
+
|
|
48
|
+
return tables
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def get_table_contents(db_handle, table_name):
|
|
52
|
+
"""Fetch all contents of a specific table using ctypes."""
|
|
53
|
+
view_handle = base.create_open_execute_view_handle(db_handle, f"SELECT * FROM `{table_name}`")
|
|
54
|
+
contents = []
|
|
55
|
+
|
|
56
|
+
# Fetch column names
|
|
57
|
+
column_names = get_column_names(db_handle, table_name)
|
|
58
|
+
contents.append(column_names)
|
|
59
|
+
|
|
60
|
+
while True:
|
|
61
|
+
record_handle = base.create_fetch_record_from_view_handle(view_handle)
|
|
62
|
+
if not record_handle:
|
|
63
|
+
break
|
|
64
|
+
|
|
65
|
+
row = []
|
|
66
|
+
field_count = msi.MsiRecordGetFieldCount(record_handle)
|
|
67
|
+
|
|
68
|
+
for i in range(1, field_count + 1):
|
|
69
|
+
# Try to fetch as a string
|
|
70
|
+
buf_size = wintypes.DWORD(1024)
|
|
71
|
+
buf = ctypes.create_unicode_buffer(buf_size.value)
|
|
72
|
+
result = msi.MsiRecordGetStringW(record_handle, i, buf, ctypes.byref(buf_size))
|
|
73
|
+
|
|
74
|
+
if result == 0:
|
|
75
|
+
row.append(buf.value)
|
|
76
|
+
elif result == 234: # ERROR_MORE_DATA
|
|
77
|
+
# Increase buffer size and try again
|
|
78
|
+
buf_size = wintypes.DWORD(buf_size.value + 1)
|
|
79
|
+
buf = ctypes.create_unicode_buffer(buf_size.value)
|
|
80
|
+
result = msi.MsiRecordGetStringW(record_handle, i, buf, ctypes.byref(buf_size))
|
|
81
|
+
if result == 0:
|
|
82
|
+
row.append(buf.value)
|
|
83
|
+
else:
|
|
84
|
+
row.append(None)
|
|
85
|
+
else:
|
|
86
|
+
# Try to fetch as an integer
|
|
87
|
+
int_value = ctypes.c_int()
|
|
88
|
+
result = msi.MsiRecordGetInteger(record_handle, i, ctypes.byref(int_value))
|
|
89
|
+
if result == 0:
|
|
90
|
+
row.append(int_value.value)
|
|
91
|
+
else:
|
|
92
|
+
# Try to fetch as a stream
|
|
93
|
+
stream_size = wintypes.DWORD()
|
|
94
|
+
result = msi.MsiRecordReadStream(record_handle, i, None, ctypes.byref(stream_size))
|
|
95
|
+
if result == 0:
|
|
96
|
+
stream_data = ctypes.create_string_buffer(stream_size.value)
|
|
97
|
+
msi.MsiRecordReadStream(record_handle, i, stream_data, ctypes.byref(stream_size))
|
|
98
|
+
row.append(stream_data.raw)
|
|
99
|
+
else:
|
|
100
|
+
row.append(None)
|
|
101
|
+
|
|
102
|
+
contents.append(row)
|
|
103
|
+
msi.MsiCloseHandle(record_handle)
|
|
104
|
+
|
|
105
|
+
msi.MsiCloseHandle(view_handle)
|
|
106
|
+
return contents
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def extract_table_contents_to_csv(db_handle, output_directory):
|
|
110
|
+
"""Extracts all table contents to separate CSV files."""
|
|
111
|
+
|
|
112
|
+
os.makedirs(output_directory, exist_ok=True)
|
|
113
|
+
|
|
114
|
+
# Get all the table names.
|
|
115
|
+
table_names: list = list_all_table_names(db_handle)
|
|
116
|
+
# Get all the table contents by fetching each table.
|
|
117
|
+
table_contents = {table: get_table_contents(db_handle, table) for table in table_names}
|
|
118
|
+
print(f"Tables and their contents have been fetched.")
|
|
119
|
+
|
|
120
|
+
# Save each table to a separate CSV file
|
|
121
|
+
for table, contents in table_contents.items():
|
|
122
|
+
csv_file_path = os.path.join(output_directory, f"{table}.csv")
|
|
123
|
+
with open(csv_file_path, "w", newline='') as csv_file:
|
|
124
|
+
writer = csv.writer(csv_file)
|
|
125
|
+
writer.writerows(contents)
|
|
126
|
+
print(f"Table {table} contents saved to {csv_file_path}")
|
|
127
|
+
|
|
128
|
+
print("All table contents saved to separate CSV files in the 'tables' directory.")
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def extract_binary_table_entries(db_handle, output_directory: str):
|
|
132
|
+
os.makedirs(output_directory, exist_ok=True)
|
|
133
|
+
|
|
134
|
+
# Create and execute a view to query the Binary table
|
|
135
|
+
query = "SELECT Name, Data FROM Binary"
|
|
136
|
+
view_handle = base.create_open_execute_view_handle(db_handle, query)
|
|
137
|
+
|
|
138
|
+
while True:
|
|
139
|
+
# Fetch a record from the view
|
|
140
|
+
record_handle = base.create_fetch_record_from_view_handle(view_handle)
|
|
141
|
+
if not record_handle:
|
|
142
|
+
break
|
|
143
|
+
|
|
144
|
+
# Get the binary name
|
|
145
|
+
name = base.get_table_field_data_from_record(record_handle, field_index=1, data_type='stringw')
|
|
146
|
+
|
|
147
|
+
# Get the size of the binary data
|
|
148
|
+
data_size = msi.MsiRecordDataSize(record_handle, 2)
|
|
149
|
+
if data_size == 0:
|
|
150
|
+
continue
|
|
151
|
+
|
|
152
|
+
# Read the binary data
|
|
153
|
+
binary_data = base.get_table_field_data_from_record(
|
|
154
|
+
record_handle, field_index=2, data_type='stream', buffer_size=data_size)
|
|
155
|
+
|
|
156
|
+
# Save the binary data to a file
|
|
157
|
+
output_filepath = os.path.join(output_directory, name)
|
|
158
|
+
print(f"Extracting binary file [{name}] to {output_filepath}")
|
|
159
|
+
with open(output_filepath, 'wb') as f:
|
|
160
|
+
f.write(binary_data)
|
|
161
|
+
|
|
162
|
+
msi.MsiCloseHandle(record_handle)
|
|
163
|
+
msi.MsiCloseHandle(view_handle)
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def extract_icon_table_entries(db_handle, output_directory: str):
|
|
167
|
+
"""Extracts icons from the Icon table in the specified MSI file."""
|
|
168
|
+
os.makedirs(output_directory, exist_ok=True)
|
|
169
|
+
|
|
170
|
+
query = "SELECT Name, Data FROM Icon"
|
|
171
|
+
view_handle = base.create_open_execute_view_handle(db_handle, query)
|
|
172
|
+
|
|
173
|
+
while True:
|
|
174
|
+
# Fetch a record from the view
|
|
175
|
+
record_handle = base.create_fetch_record_from_view_handle(view_handle)
|
|
176
|
+
if not record_handle:
|
|
177
|
+
break
|
|
178
|
+
|
|
179
|
+
# Read the Name (field 1) and Data (field 2) from the record
|
|
180
|
+
icon_filename = base.get_table_field_data_from_record(record_handle, 1, 'stringw')
|
|
181
|
+
icon_data = base.get_table_field_data_from_record(record_handle, 2, 'stream')
|
|
182
|
+
|
|
183
|
+
# Define the output file path
|
|
184
|
+
output_file_path = os.path.join(output_directory, f"{icon_filename}")
|
|
185
|
+
|
|
186
|
+
# Write the icon data to a file
|
|
187
|
+
with open(output_file_path, 'wb') as icon_file:
|
|
188
|
+
icon_file.write(icon_data)
|
|
189
|
+
|
|
190
|
+
print(f"Extracted icon: {output_file_path}")
|
|
191
|
+
|
|
192
|
+
# Close handles.
|
|
193
|
+
msi.MsiCloseHandle(record_handle)
|
|
194
|
+
msi.MsiCloseHandle(view_handle)
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def extract_issetupfiles_table_entries(db_handle, output_directory: str):
|
|
198
|
+
"""Extracts icons from the Icon table in the specified MSI file."""
|
|
199
|
+
os.makedirs(output_directory, exist_ok=True)
|
|
200
|
+
|
|
201
|
+
query = "SELECT FileName, Stream FROM ISSetupFile"
|
|
202
|
+
view_handle = base.create_open_execute_view_handle(db_handle, query)
|
|
203
|
+
|
|
204
|
+
while True:
|
|
205
|
+
# Fetch a record from the view
|
|
206
|
+
record_handle = base.create_fetch_record_from_view_handle(view_handle)
|
|
207
|
+
if not record_handle:
|
|
208
|
+
break
|
|
209
|
+
|
|
210
|
+
# Read the Name (field 1) and Data (field 2) from the record
|
|
211
|
+
file_name = base.get_table_field_data_from_record(record_handle, 1, 'stringw')
|
|
212
|
+
file_data = base.get_table_field_data_from_record(record_handle, 2, 'stream')
|
|
213
|
+
|
|
214
|
+
# Define the output file path
|
|
215
|
+
output_file_path = os.path.join(output_directory, f"{file_name}")
|
|
216
|
+
|
|
217
|
+
# Write the icon data to a file
|
|
218
|
+
with open(output_file_path, 'wb') as icon_file:
|
|
219
|
+
icon_file.write(file_data)
|
|
220
|
+
|
|
221
|
+
print(f"Extracted IsSetupFile: {output_file_path}")
|
|
222
|
+
|
|
223
|
+
# Close handles.
|
|
224
|
+
msi.MsiCloseHandle(record_handle)
|
|
225
|
+
msi.MsiCloseHandle(view_handle)
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
def extract_registry_changes(db_handle, output_directory: str):
|
|
229
|
+
"""Extracts registry changes from the MSI file and writes them to a .reg file."""
|
|
230
|
+
|
|
231
|
+
os.makedirs(output_directory, exist_ok=True)
|
|
232
|
+
registry_file_path: str = os.path.join(output_directory, "registry_changes.reg")
|
|
233
|
+
|
|
234
|
+
# Create and execute a view for the Registry table
|
|
235
|
+
query = "SELECT `Root`, `Key`, `Name`, `Value` FROM `Registry`"
|
|
236
|
+
view_handle = base.create_open_execute_view_handle(db_handle, query)
|
|
237
|
+
|
|
238
|
+
with open(registry_file_path, 'w') as reg_file:
|
|
239
|
+
# Write the .reg file header
|
|
240
|
+
reg_file.write("Windows Registry Editor Version 5.00\n\n")
|
|
241
|
+
|
|
242
|
+
while True:
|
|
243
|
+
# Fetch a record from the view
|
|
244
|
+
record_handle = base.create_fetch_record_from_view_handle(view_handle)
|
|
245
|
+
if not record_handle:
|
|
246
|
+
break
|
|
247
|
+
|
|
248
|
+
# Read the Root (field 1), Key (field 2), Name (field 3), and Value (field 4) from the record
|
|
249
|
+
root = int(base.get_table_field_data_from_record(record_handle, 1, 'stringw'))
|
|
250
|
+
key = base.get_table_field_data_from_record(record_handle, 2, 'stringw')
|
|
251
|
+
name = base.get_table_field_data_from_record(record_handle, 3, 'stringw')
|
|
252
|
+
value = base.get_table_field_data_from_record(record_handle, 4, 'stringw')
|
|
253
|
+
|
|
254
|
+
# Determine the root key name
|
|
255
|
+
root_key = {
|
|
256
|
+
0: "HKEY_CLASSES_ROOT",
|
|
257
|
+
1: "HKEY_CURRENT_USER",
|
|
258
|
+
2: "HKEY_LOCAL_MACHINE",
|
|
259
|
+
3: "HKEY_USERS"
|
|
260
|
+
}.get(root, "UNKNOWN_ROOT")
|
|
261
|
+
|
|
262
|
+
# Format the registry entry
|
|
263
|
+
if name:
|
|
264
|
+
reg_entry = f'[{root_key}\\{key}]\n"{name}"="{value}"\n\n'
|
|
265
|
+
else:
|
|
266
|
+
reg_entry = f'[{root_key}\\{key}]\n@="{value}"\n\n'
|
|
267
|
+
|
|
268
|
+
# Write the registry entry to the .reg file
|
|
269
|
+
reg_file.write(reg_entry)
|
|
270
|
+
|
|
271
|
+
msi.MsiCloseHandle(record_handle)
|
|
272
|
+
msi.MsiCloseHandle(view_handle)
|
|
273
|
+
|
|
274
|
+
|
|
275
|
+
def list_media_table_entries(db_handle):
|
|
276
|
+
"""List all CAB files from the Media table."""
|
|
277
|
+
query = "SELECT `Cabinet` FROM `Media`"
|
|
278
|
+
media_view = base.create_open_execute_view_handle(db_handle, query)
|
|
279
|
+
|
|
280
|
+
cab_files = []
|
|
281
|
+
while True:
|
|
282
|
+
record_handle = base.create_fetch_record_from_view_handle(media_view)
|
|
283
|
+
if not record_handle:
|
|
284
|
+
break
|
|
285
|
+
|
|
286
|
+
# The CAB file names are prefixed with #
|
|
287
|
+
cab_name = base.get_table_field_data_from_record(record_handle, field_index=1, data_type='stringw')
|
|
288
|
+
cab_files.append(cab_name.strip("#"))
|
|
289
|
+
|
|
290
|
+
msi.MsiCloseHandle(record_handle)
|
|
291
|
+
msi.MsiCloseHandle(media_view)
|
|
292
|
+
return cab_files
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
def extract_cab_files_from_media(db_handle, output_directory: str):
|
|
296
|
+
"""Extract all CAB files from the list."""
|
|
297
|
+
os.makedirs(output_directory, exist_ok=True)
|
|
298
|
+
|
|
299
|
+
query = "SELECT `Cabinet` FROM `Media` WHERE `Cabinet` IS NOT NULL"
|
|
300
|
+
# Query to fetch CAB files from the Media table
|
|
301
|
+
view_handle = base.create_open_execute_view_handle(db_handle, query)
|
|
302
|
+
|
|
303
|
+
cabinet_name_list: list = []
|
|
304
|
+
while True:
|
|
305
|
+
record_handle = base.create_fetch_record_from_view_handle(view_handle)
|
|
306
|
+
if not record_handle:
|
|
307
|
+
break
|
|
308
|
+
|
|
309
|
+
cabinet_name = base.get_table_field_data_from_record(record_handle, field_index=1, data_type='stringw')
|
|
310
|
+
cabinet_name_list.append(cabinet_name)
|
|
311
|
+
msi.MsiCloseHandle(record_handle)
|
|
312
|
+
msi.MsiCloseHandle(view_handle)
|
|
313
|
+
|
|
314
|
+
cab_file_paths: list = []
|
|
315
|
+
for cabinet_name in cabinet_name_list:
|
|
316
|
+
if cabinet_name.startswith("#"):
|
|
317
|
+
cabinet_name = cabinet_name[1:] # Remove the leading #
|
|
318
|
+
|
|
319
|
+
cab_file_path = os.path.join(output_directory, cabinet_name)
|
|
320
|
+
with open(cab_file_path, 'wb') as f:
|
|
321
|
+
# Read the binary stream from the MSI package
|
|
322
|
+
stream_query = f"SELECT `Data` FROM `_Streams` WHERE `Name`='{cabinet_name}'"
|
|
323
|
+
stream_view = base.create_open_execute_view_handle(db_handle, stream_query)
|
|
324
|
+
|
|
325
|
+
while True:
|
|
326
|
+
stream_record = base.create_fetch_record_from_view_handle(stream_view)
|
|
327
|
+
if not stream_record:
|
|
328
|
+
break
|
|
329
|
+
|
|
330
|
+
data = base.get_table_field_data_from_record(stream_record, field_index=1, data_type='stream')
|
|
331
|
+
|
|
332
|
+
f.write(data)
|
|
333
|
+
|
|
334
|
+
msi.MsiCloseHandle(stream_record)
|
|
335
|
+
msi.MsiViewClose(stream_view)
|
|
336
|
+
msi.MsiCloseHandle(stream_view)
|
|
337
|
+
|
|
338
|
+
print(f"Extracted: {cabinet_name}")
|
|
339
|
+
cab_file_paths.append(cab_file_path)
|
|
340
|
+
|
|
341
|
+
print(f"CAB files extraction completed. Files are saved to {output_directory}")
|
|
342
|
+
return cab_file_paths
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
def get_file_table_info(db_handle):
|
|
346
|
+
query = "SELECT `File`, `FileName`, `Component_` FROM `File`"
|
|
347
|
+
view_handle = base.create_open_execute_view_handle(db_handle, query)
|
|
348
|
+
|
|
349
|
+
file_info = {}
|
|
350
|
+
|
|
351
|
+
# Fetch the records
|
|
352
|
+
while True:
|
|
353
|
+
record_handle = base.create_fetch_record_from_view_handle(view_handle)
|
|
354
|
+
if not record_handle:
|
|
355
|
+
break
|
|
356
|
+
|
|
357
|
+
file_key = base.get_table_field_data_from_record(record_handle, field_index=1, data_type='stringw')
|
|
358
|
+
# Handle cases with multiple file names
|
|
359
|
+
file_name = base.get_table_field_data_from_record(record_handle, field_index=2, data_type='stringw')
|
|
360
|
+
file_name = file_name.split('|')[-1]
|
|
361
|
+
component = base.get_table_field_data_from_record(record_handle, field_index=3, data_type='stringw')
|
|
362
|
+
|
|
363
|
+
file_info[file_key] = {'file_name': file_name, 'component': component}
|
|
364
|
+
|
|
365
|
+
msi.MsiCloseHandle(record_handle)
|
|
366
|
+
msi.MsiCloseHandle(view_handle)
|
|
367
|
+
|
|
368
|
+
return file_info
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
def get_component_table_info(db_handle):
|
|
372
|
+
component_info = {}
|
|
373
|
+
|
|
374
|
+
query = "SELECT `Component`, `Directory_` FROM `Component`"
|
|
375
|
+
view_handle = base.create_open_execute_view_handle(db_handle, query)
|
|
376
|
+
|
|
377
|
+
# Fetch the records
|
|
378
|
+
while True:
|
|
379
|
+
record_handle = base.create_fetch_record_from_view_handle(view_handle)
|
|
380
|
+
if not record_handle:
|
|
381
|
+
break
|
|
382
|
+
|
|
383
|
+
component_key = base.get_table_field_data_from_record(record_handle, field_index=1, data_type='stringw')
|
|
384
|
+
directory = base.get_table_field_data_from_record(record_handle, field_index=2, data_type='stringw')
|
|
385
|
+
|
|
386
|
+
component_info[component_key] = {'directory': directory}
|
|
387
|
+
|
|
388
|
+
msi.MsiCloseHandle(record_handle)
|
|
389
|
+
msi.MsiCloseHandle(view_handle)
|
|
390
|
+
|
|
391
|
+
return component_info
|
|
392
|
+
|
|
393
|
+
|
|
394
|
+
def get_directory_table_info(db_handle):
|
|
395
|
+
directory_info = {}
|
|
396
|
+
|
|
397
|
+
# Open a view to the Directory table
|
|
398
|
+
query = "SELECT `Directory`, `Directory_Parent`, `DefaultDir` FROM `Directory`"
|
|
399
|
+
view_handle = base.create_open_execute_view_handle(db_handle, query)
|
|
400
|
+
|
|
401
|
+
# Fetch the records
|
|
402
|
+
while True:
|
|
403
|
+
record_handle = base.create_fetch_record_from_view_handle(view_handle)
|
|
404
|
+
if not record_handle:
|
|
405
|
+
break
|
|
406
|
+
|
|
407
|
+
directory_key = base.get_table_field_data_from_record(record_handle, field_index=1, data_type='stringw')
|
|
408
|
+
parent_key = base.get_table_field_data_from_record(record_handle, field_index=2, data_type='stringw')
|
|
409
|
+
|
|
410
|
+
# Handle cases with multiple directory names with '|' character.
|
|
411
|
+
default_dir_buffer = base.get_table_field_data_from_record(record_handle, field_index=3, data_type='stringw')
|
|
412
|
+
default_dir = default_dir_buffer.split('|')[-1]
|
|
413
|
+
|
|
414
|
+
directory_info[directory_key] = {'parent': parent_key, 'default_dir': default_dir}
|
|
415
|
+
|
|
416
|
+
msi.MsiCloseHandle(record_handle)
|
|
417
|
+
msi.MsiCloseHandle(view_handle)
|
|
418
|
+
|
|
419
|
+
return directory_info
|
|
@@ -1,8 +1,10 @@
|
|
|
1
|
+
import os
|
|
1
2
|
import subprocess
|
|
2
3
|
import getpass
|
|
3
4
|
|
|
4
5
|
from ... import process, filesystem, permissions
|
|
5
6
|
from ...print_api import print_api
|
|
7
|
+
from .. import ubuntu_terminal
|
|
6
8
|
|
|
7
9
|
|
|
8
10
|
def is_docker_installed():
|
|
@@ -59,10 +61,21 @@ def add_current_user_to_docker_group(print_kwargs: dict = None):
|
|
|
59
61
|
return False
|
|
60
62
|
|
|
61
63
|
|
|
62
|
-
def install_docker_ubuntu(
|
|
64
|
+
def install_docker_ubuntu(
|
|
65
|
+
use_docker_installer: bool = True,
|
|
66
|
+
rootless: bool = False,
|
|
67
|
+
add_current_user_to_docker_group_bool: bool = False
|
|
68
|
+
):
|
|
63
69
|
"""
|
|
64
70
|
The function will install docker on ubuntu.
|
|
71
|
+
:param rootless: bool, if True, the rootless installation will be performed.
|
|
72
|
+
Meaning, you will be able to run the 'docker' command without sudo and you will not need to add the
|
|
73
|
+
current user to the docker group.
|
|
74
|
+
:param use_docker_installer: bool, if True, the docker installer will be used.
|
|
75
|
+
If False, the docker will be installed using the apt package manager, custom repo and keyring.
|
|
65
76
|
:param add_current_user_to_docker_group_bool: bool, if True, the current user will be added to the docker group.
|
|
77
|
+
So the user will be able to run the 'docker' command without sudo. If you install docker in rootless mode
|
|
78
|
+
this is not needed.
|
|
66
79
|
|
|
67
80
|
Usage in main.py (run with sudo):
|
|
68
81
|
from atomicshop.wrappers.dockerw import install_docker
|
|
@@ -76,41 +89,66 @@ def install_docker_ubuntu(add_current_user_to_docker_group_bool: bool = True):
|
|
|
76
89
|
main()
|
|
77
90
|
"""
|
|
78
91
|
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
92
|
+
if use_docker_installer:
|
|
93
|
+
# Use the docker installer script.
|
|
94
|
+
# The script will install docker and add the current user to the docker group.
|
|
95
|
+
# The script will also install docker-compose and docker-buildx.
|
|
96
|
+
process.execute_script('curl -fsSL https://get.docker.com -o get-docker.sh && sh get-docker.sh', shell=True)
|
|
97
|
+
else:
|
|
98
|
+
# Remove the existing keyrings, so we will not be asked to overwrite it if it exists.
|
|
99
|
+
docker_keyring_file_path: str = "/etc/apt/keyrings/docker.gpg"
|
|
100
|
+
filesystem.remove_file(docker_keyring_file_path)
|
|
101
|
+
|
|
102
|
+
script = f"""
|
|
103
|
+
# Step 1: Set up Docker's apt repository
|
|
104
|
+
sudo apt-get update
|
|
105
|
+
sudo apt-get install -y ca-certificates curl gnupg
|
|
106
|
+
sudo install -m 0755 -d /etc/apt/keyrings
|
|
107
|
+
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg
|
|
108
|
+
sudo chmod a+r /etc/apt/keyrings/docker.gpg
|
|
109
|
+
|
|
110
|
+
# Add the repository to Apt sources
|
|
111
|
+
echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
|
|
112
|
+
sudo apt-get update
|
|
113
|
+
|
|
114
|
+
# Step 2: Install the Docker packages
|
|
115
|
+
sudo apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
|
|
116
|
+
|
|
117
|
+
# Step 3: Verify the installation
|
|
118
|
+
# sudo docker run hello-world
|
|
119
|
+
|
|
120
|
+
# Add Privileges to run docker without sudo. Add current user to Docker superuser group.
|
|
121
|
+
# sudo usermod -aG docker $USER
|
|
122
|
+
"""
|
|
123
|
+
|
|
124
|
+
process.execute_script(script, shell=True)
|
|
125
|
+
|
|
126
|
+
if rootless:
|
|
127
|
+
# Install uidmap package.
|
|
128
|
+
ubuntu_terminal.update_system_packages()
|
|
129
|
+
ubuntu_terminal.install_packages(['uidmap'])
|
|
130
|
+
|
|
131
|
+
# After 'get-docker.sh' execution, we will install docker in rootless mode.
|
|
132
|
+
subprocess.run(['dockerd-rootless-setuptool.sh', 'install'], shell=True)
|
|
133
|
+
|
|
134
|
+
# Start and enable the docker service in user mode.
|
|
135
|
+
ubuntu_terminal.start_enable_service_check_availability('docker.service', user_mode=True, sudo=False)
|
|
136
|
+
|
|
137
|
+
# Enable lingering so Docker runs when the user is not logged in
|
|
138
|
+
subprocess.run(['sudo', 'loginctl', 'enable-linger', os.getlogin()], shell=True)
|
|
139
|
+
|
|
140
|
+
# Add $HOME/bin to your PATH if it's not already there.
|
|
141
|
+
ubuntu_terminal.add_path_to_bashrc()
|
|
106
142
|
|
|
107
143
|
if add_current_user_to_docker_group_bool:
|
|
108
144
|
# Check if current user that executed the script is a sudo user. If not, use the current user.
|
|
109
145
|
# Add the current user to the docker group.
|
|
110
146
|
add_current_user_to_docker_group()
|
|
111
147
|
|
|
112
|
-
|
|
113
|
-
|
|
148
|
+
# Verify the installation.
|
|
149
|
+
result: list = process.execute_with_live_output('sudo docker run hello-world')
|
|
150
|
+
else:
|
|
151
|
+
result: list = process.execute_with_live_output('docker run hello-world')
|
|
114
152
|
|
|
115
153
|
print_api('\n'.join(result))
|
|
116
154
|
|
|
@@ -32,4 +32,4 @@ def install_after_restart(
|
|
|
32
32
|
process.execute_with_live_output(cmd=install_command, verbose=True)
|
|
33
33
|
# Remove the FACT_core installation log.
|
|
34
34
|
working_directory_path: str = filesystem.get_working_directory()
|
|
35
|
-
filesystem.remove_file(str(Path(working_directory_path, config_install.INSTALL_LOG_FILE_NAME)))
|
|
35
|
+
# filesystem.remove_file(str(Path(working_directory_path, config_install.INSTALL_LOG_FILE_NAME)))
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import datetime
|
|
3
|
+
import json
|
|
4
|
+
|
|
5
|
+
import olefile
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def convert_object_to_string(obj):
|
|
9
|
+
if isinstance(obj, bytes):
|
|
10
|
+
# MSI Database uses latin-1 encoding for strings, could be that other ole files too.
|
|
11
|
+
return obj.decode('latin-1')
|
|
12
|
+
elif isinstance(obj, datetime.datetime):
|
|
13
|
+
return obj.strftime('%Y-%m-%d-%H:%M:%S')
|
|
14
|
+
return obj
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def extract_ole_metadata(ole_file_path: str, output_directory: str):
|
|
18
|
+
"""
|
|
19
|
+
Extract metadata from an OLE2 file.
|
|
20
|
+
:param ole_file_path:
|
|
21
|
+
:param output_directory:
|
|
22
|
+
:return:
|
|
23
|
+
"""
|
|
24
|
+
os.makedirs(output_directory, exist_ok=True)
|
|
25
|
+
metadata_file_path = os.path.join(output_directory, "metadata.json")
|
|
26
|
+
|
|
27
|
+
# Check if the file is ole2 file.
|
|
28
|
+
if not olefile.isOleFile(ole_file_path):
|
|
29
|
+
message = f"The file {ole_file_path} is not an OLE2 file."
|
|
30
|
+
print(message)
|
|
31
|
+
with open(metadata_file_path, "w") as metadata_file:
|
|
32
|
+
metadata_file.write(message)
|
|
33
|
+
return
|
|
34
|
+
|
|
35
|
+
# Open the OLE2 file.
|
|
36
|
+
ole = olefile.OleFileIO(ole_file_path)
|
|
37
|
+
|
|
38
|
+
# Get the metadata of the OLE2 file.
|
|
39
|
+
metadata = ole.get_metadata()
|
|
40
|
+
|
|
41
|
+
meta_properties: dict = {
|
|
42
|
+
'SummaryInformation': {},
|
|
43
|
+
'DocumentSummaryInformation': {}
|
|
44
|
+
}
|
|
45
|
+
# Properties from SummaryInformation stream.
|
|
46
|
+
for prop in metadata.SUMMARY_ATTRIBS:
|
|
47
|
+
value = getattr(metadata, prop)
|
|
48
|
+
value = convert_object_to_string(value)
|
|
49
|
+
meta_properties['SummaryInformation'][prop] = value
|
|
50
|
+
# Properties from DocumentSummaryInformation stream.
|
|
51
|
+
for prop in metadata.DOCSUM_ATTRIBS:
|
|
52
|
+
value = getattr(metadata, prop)
|
|
53
|
+
value = convert_object_to_string(value)
|
|
54
|
+
meta_properties['DocumentSummaryInformation'][prop] = value
|
|
55
|
+
|
|
56
|
+
# Save the metadata to a file.
|
|
57
|
+
with open(metadata_file_path, "w") as metadata_file:
|
|
58
|
+
json.dump(meta_properties, metadata_file, indent=4)
|
|
59
|
+
|
|
60
|
+
print(f"Metadata of the OLE2 file saved to {metadata_file_path}")
|
|
61
|
+
|
|
62
|
+
# Close the OLE2 file.
|
|
63
|
+
ole.close()
|