atomicshop 2.12.25__py3-none-any.whl → 2.12.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of atomicshop might be problematic. Click here for more details.
- atomicshop/__init__.py +1 -1
- atomicshop/wrappers/ctyping/etw_winapi/const.py +81 -0
- atomicshop/wrappers/loggingw/reading.py +115 -98
- {atomicshop-2.12.25.dist-info → atomicshop-2.12.26.dist-info}/METADATA +1 -1
- {atomicshop-2.12.25.dist-info → atomicshop-2.12.26.dist-info}/RECORD +8 -8
- {atomicshop-2.12.25.dist-info → atomicshop-2.12.26.dist-info}/LICENSE.txt +0 -0
- {atomicshop-2.12.25.dist-info → atomicshop-2.12.26.dist-info}/WHEEL +0 -0
- {atomicshop-2.12.25.dist-info → atomicshop-2.12.26.dist-info}/top_level.txt +0 -0
atomicshop/__init__.py
CHANGED
|
@@ -10,6 +10,16 @@ WNODE_FLAG_TRACED_GUID = 0x00020000
|
|
|
10
10
|
MAXIMUM_LOGGERS = 64
|
|
11
11
|
|
|
12
12
|
|
|
13
|
+
"""
|
|
14
|
+
wintypes.DWORD = wintypes.ULONG = ctypes.c_ulong: 32-bit unsigned integer
|
|
15
|
+
wintypes.WORD = wintypes.USHORT = ctypes.c_ushort: 16-bit unsigned integer
|
|
16
|
+
wintypes.BYTE = ctypes.c_ubyte: 8-bit unsigned integer
|
|
17
|
+
wintypes.LARGE_INTEGER is a structure (or union in C terms), can represent both signed and unsigned
|
|
18
|
+
64-bit values depending on context.
|
|
19
|
+
ctypes.c_ulonglong is a simple data type representing an unsigned 64-bit integer.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
|
|
13
23
|
# Define GUID structure
|
|
14
24
|
class GUID(ctypes.Structure):
|
|
15
25
|
_fields_ = [
|
|
@@ -60,6 +70,65 @@ class EVENT_TRACE_PROPERTIES(ctypes.Structure):
|
|
|
60
70
|
]
|
|
61
71
|
|
|
62
72
|
|
|
73
|
+
# Define the EVENT_TRACE_LOGFILE structure
|
|
74
|
+
class EVENT_TRACE_LOGFILE(ctypes.Structure):
|
|
75
|
+
_fields_ = [
|
|
76
|
+
("LogFileName", wintypes.LPWSTR),
|
|
77
|
+
("LoggerName", wintypes.LPWSTR),
|
|
78
|
+
("CurrentTime", wintypes.LARGE_INTEGER),
|
|
79
|
+
("BuffersRead", wintypes.ULONG),
|
|
80
|
+
("ProcessTraceMode", wintypes.ULONG),
|
|
81
|
+
("EventRecordCallback", wintypes.LPVOID),
|
|
82
|
+
("BufferSize", wintypes.ULONG),
|
|
83
|
+
("Filled", wintypes.ULONG),
|
|
84
|
+
("EventsLost", wintypes.ULONG),
|
|
85
|
+
("BuffersLost", wintypes.ULONG),
|
|
86
|
+
("RealTimeBuffersLost", wintypes.ULONG),
|
|
87
|
+
("LogBuffersLost", wintypes.ULONG),
|
|
88
|
+
("BuffersWritten", wintypes.ULONG),
|
|
89
|
+
("LogFileMode", wintypes.ULONG),
|
|
90
|
+
("IsKernelTrace", wintypes.ULONG),
|
|
91
|
+
("Context", wintypes.ULONG) # Placeholder for context pointer
|
|
92
|
+
]
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
# Define the EVENT_TRACE_HEADER structure
|
|
96
|
+
class EVENT_TRACE_HEADER(ctypes.Structure):
|
|
97
|
+
_fields_ = [
|
|
98
|
+
("Size", wintypes.USHORT),
|
|
99
|
+
("FieldTypeFlags", wintypes.USHORT),
|
|
100
|
+
("Version", wintypes.USHORT),
|
|
101
|
+
("Class", wintypes.USHORT), # EVENT_TRACE_CLASS
|
|
102
|
+
("Type", ctypes.c_ubyte),
|
|
103
|
+
("Level", ctypes.c_ubyte),
|
|
104
|
+
("Channel", ctypes.c_ubyte),
|
|
105
|
+
("Flags", ctypes.c_ubyte),
|
|
106
|
+
("InstanceId", wintypes.USHORT),
|
|
107
|
+
("ParentInstanceId", wintypes.USHORT),
|
|
108
|
+
("ParentGuid", GUID),
|
|
109
|
+
("Timestamp", wintypes.LARGE_INTEGER),
|
|
110
|
+
("Guid", GUID),
|
|
111
|
+
("ProcessorTime", wintypes.ULONG),
|
|
112
|
+
("ThreadId", wintypes.ULONG),
|
|
113
|
+
("ProcessId", wintypes.ULONG),
|
|
114
|
+
("KernelTime", wintypes.ULONG),
|
|
115
|
+
("UserTime", wintypes.ULONG),
|
|
116
|
+
]
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
# Define the EVENT_RECORD structure
|
|
120
|
+
class EVENT_RECORD(ctypes.Structure):
|
|
121
|
+
_fields_ = [
|
|
122
|
+
("EventHeader", EVENT_TRACE_HEADER),
|
|
123
|
+
("BufferContext", wintypes.ULONG),
|
|
124
|
+
("ExtendedDataCount", wintypes.USHORT),
|
|
125
|
+
("UserDataLength", wintypes.USHORT),
|
|
126
|
+
("ExtendedData", wintypes.LPVOID),
|
|
127
|
+
("UserData", wintypes.LPVOID),
|
|
128
|
+
("UserContext", wintypes.LPVOID)
|
|
129
|
+
]
|
|
130
|
+
|
|
131
|
+
|
|
63
132
|
class PROVIDER_ENUMERATION_INFO(ctypes.Structure):
|
|
64
133
|
_fields_ = [
|
|
65
134
|
("NumberOfProviders", ULONG),
|
|
@@ -92,3 +161,15 @@ QueryAllTraces.argtypes = [
|
|
|
92
161
|
ctypes.POINTER(wintypes.ULONG)
|
|
93
162
|
]
|
|
94
163
|
QueryAllTraces.restype = wintypes.ULONG
|
|
164
|
+
|
|
165
|
+
OpenTrace = advapi32.OpenTraceW
|
|
166
|
+
OpenTrace.argtypes = [ctypes.POINTER(EVENT_TRACE_LOGFILE)]
|
|
167
|
+
OpenTrace.restype = wintypes.ULONG
|
|
168
|
+
|
|
169
|
+
ProcessTrace = advapi32.ProcessTrace
|
|
170
|
+
ProcessTrace.argtypes = [ctypes.POINTER(wintypes.ULONG), wintypes.ULONG, wintypes.LARGE_INTEGER, wintypes.LARGE_INTEGER]
|
|
171
|
+
ProcessTrace.restype = wintypes.ULONG
|
|
172
|
+
|
|
173
|
+
CloseTrace = advapi32.CloseTrace
|
|
174
|
+
CloseTrace.argtypes = [wintypes.ULONG]
|
|
175
|
+
CloseTrace.restype = wintypes.ULONG
|
|
@@ -6,10 +6,6 @@ from ... import filesystem, datetimes
|
|
|
6
6
|
from ...file_io import csvs
|
|
7
7
|
|
|
8
8
|
|
|
9
|
-
READING_EXISTING_LINES: list = []
|
|
10
|
-
EXISTING_LOGS_FILE_COUNT: int = 0
|
|
11
|
-
|
|
12
|
-
|
|
13
9
|
def get_logs_paths(
|
|
14
10
|
log_files_directory_path: str = None,
|
|
15
11
|
log_file_path: str = None,
|
|
@@ -212,31 +208,10 @@ def get_logs(
|
|
|
212
208
|
return logs_content
|
|
213
209
|
|
|
214
210
|
|
|
215
|
-
|
|
216
|
-
log_file_path: str,
|
|
217
|
-
date_pattern: str = None,
|
|
218
|
-
log_type: Literal['csv'] = 'csv',
|
|
219
|
-
get_previous_file: bool = False,
|
|
220
|
-
header: list = None
|
|
221
|
-
) -> tuple:
|
|
211
|
+
class LogReader:
|
|
222
212
|
"""
|
|
223
|
-
This
|
|
213
|
+
This class gets the latest lines from the log file.
|
|
224
214
|
|
|
225
|
-
:param log_file_path: Path to the log file.
|
|
226
|
-
:param date_pattern: Pattern to match the date in the log file name.
|
|
227
|
-
If specified, the function will get the log file by the date pattern.
|
|
228
|
-
If not specified, the function will get the file date by file last modified time.
|
|
229
|
-
:param log_type: Type of log to get.
|
|
230
|
-
:param get_previous_file: Boolean, if True, the function will get the previous log file.
|
|
231
|
-
For example, your log is set to rotate every Midnight.
|
|
232
|
-
Meaning, once the day will change, the function will get the log file from the previous day in the third entry
|
|
233
|
-
of the return tuple. This happens only once each 24 hours. Not from the time the function was called, but from
|
|
234
|
-
the time the day changed.
|
|
235
|
-
:param header: List of strings that will be the header of the CSV file. Default is 'None'.
|
|
236
|
-
None: the header from the CSV file will be used. The first row of the CSV file will be the header.
|
|
237
|
-
Meaning, that the first line will be skipped and the second line will be the first row of the content.
|
|
238
|
-
List: the list will be used as header.
|
|
239
|
-
All the lines of the CSV file will be considered as content.
|
|
240
215
|
return: List of new lines.
|
|
241
216
|
|
|
242
217
|
Usage:
|
|
@@ -246,14 +221,15 @@ def get_latest_lines(
|
|
|
246
221
|
# The header of the log file will be read from the first iteration of the log file.
|
|
247
222
|
# When the file is rotated, this header will be used to not read the header again.
|
|
248
223
|
header: Union[list, None] = None
|
|
224
|
+
log_reader = reading.LogReader(
|
|
225
|
+
log_file_path='/path/to/log.csv',
|
|
226
|
+
log_type='csv',
|
|
227
|
+
date_pattern='%Y_%m_%d',
|
|
228
|
+
get_previous_file=True,
|
|
229
|
+
header=header
|
|
230
|
+
)
|
|
249
231
|
while True:
|
|
250
|
-
latest_lines, previous_day_24h_lines, header =
|
|
251
|
-
log_file_path='/path/to/log.csv',
|
|
252
|
-
log_type='csv',
|
|
253
|
-
date_pattern='%Y_%m_%d',
|
|
254
|
-
get_previous_file=True,
|
|
255
|
-
header=header
|
|
256
|
-
)
|
|
232
|
+
latest_lines, previous_day_24h_lines, header = log_reader.get_latest_lines(header=header)
|
|
257
233
|
|
|
258
234
|
if latest_lines:
|
|
259
235
|
# Do something with the new lines.
|
|
@@ -262,91 +238,132 @@ def get_latest_lines(
|
|
|
262
238
|
# Do something with the last 24 hours lines. Reminder, this will happen once a day on log rotation.
|
|
263
239
|
|
|
264
240
|
time.sleep(1)
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
def
|
|
241
|
+
"""
|
|
242
|
+
|
|
243
|
+
def __init__(
|
|
244
|
+
self,
|
|
245
|
+
log_file_path: str,
|
|
246
|
+
date_pattern: str = None,
|
|
247
|
+
log_type: Literal['csv'] = 'csv',
|
|
248
|
+
get_previous_file: bool = False,
|
|
249
|
+
header: list = None
|
|
250
|
+
):
|
|
251
|
+
"""
|
|
252
|
+
:param log_file_path: Path to the log file.
|
|
253
|
+
:param date_pattern: Pattern to match the date in the log file name.
|
|
254
|
+
If specified, the function will get the log file by the date pattern.
|
|
255
|
+
If not specified, the function will get the file date by file last modified time.
|
|
256
|
+
:param log_type: Type of log to get.
|
|
257
|
+
:param get_previous_file: Boolean, if True, the function will get the previous log file.
|
|
258
|
+
For example, your log is set to rotate every Midnight.
|
|
259
|
+
Meaning, once the day will change, the function will get the log file from the previous day in the third entry
|
|
260
|
+
of the return tuple. This happens only once each 24 hours. Not from the time the function was called, but from
|
|
261
|
+
the time the day changed.
|
|
262
|
+
:param header: List of strings that will be the header of the CSV file. Default is 'None'.
|
|
263
|
+
None: the header from the CSV file will be used. The first row of the CSV file will be the header.
|
|
264
|
+
Meaning, that the first line will be skipped and the second line will be the first row of the content.
|
|
265
|
+
List: the list will be used as header.
|
|
266
|
+
All the lines of the CSV file will be considered as content.
|
|
267
|
+
"""
|
|
268
|
+
|
|
269
|
+
self.log_file_path: str = log_file_path
|
|
270
|
+
self.date_pattern: str = date_pattern
|
|
271
|
+
self.log_type: Literal['csv'] = log_type
|
|
272
|
+
self.get_previous_file: bool = get_previous_file
|
|
273
|
+
self.header: list = header
|
|
274
|
+
|
|
275
|
+
self._reading_existing_lines: list = []
|
|
276
|
+
self._existing_logs_file_count: int = 0
|
|
277
|
+
|
|
278
|
+
def _extract_new_lines_only(self, content_lines: list):
|
|
268
279
|
new_lines: list = []
|
|
269
280
|
for row in content_lines:
|
|
270
281
|
# If the row is not in the existing lines, then add it to the new lines.
|
|
271
|
-
if row not in
|
|
282
|
+
if row not in self._reading_existing_lines:
|
|
272
283
|
new_lines.append(row)
|
|
273
284
|
|
|
274
285
|
if new_lines:
|
|
275
|
-
|
|
286
|
+
self._reading_existing_lines.extend(new_lines)
|
|
276
287
|
|
|
277
288
|
return new_lines
|
|
278
289
|
|
|
279
|
-
|
|
290
|
+
def get_latest_lines(self, header: list = None) -> tuple:
|
|
291
|
+
if header:
|
|
292
|
+
self.header = header
|
|
280
293
|
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
294
|
+
# If the existing logs file count is 0, it means that this is the first check. We need to get the current count.
|
|
295
|
+
if self._existing_logs_file_count == 0:
|
|
296
|
+
self._existing_logs_file_count = len(get_logs_paths(
|
|
297
|
+
log_file_path=self.log_file_path,
|
|
298
|
+
log_type='csv'
|
|
299
|
+
))
|
|
287
300
|
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
301
|
+
# If the count is still 0, then there are no logs to read.
|
|
302
|
+
if self._existing_logs_file_count == 0:
|
|
303
|
+
return [], [], self.header
|
|
291
304
|
|
|
292
|
-
|
|
293
|
-
|
|
305
|
+
if self.log_type != 'csv':
|
|
306
|
+
raise ValueError('Only "csv" log type is supported.')
|
|
294
307
|
|
|
295
|
-
|
|
308
|
+
previous_file_lines: list = []
|
|
296
309
|
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
log_type='csv',
|
|
302
|
-
latest_only=True
|
|
303
|
-
)
|
|
304
|
-
|
|
305
|
-
latest_statistics_file_path: str = latest_statistics_file_path_object[0]['file_path']
|
|
306
|
-
|
|
307
|
-
# Get the previous day statistics file path.
|
|
308
|
-
previous_day_statistics_file_path: Union[str, None] = None
|
|
309
|
-
try:
|
|
310
|
-
previous_day_statistics_file_path = get_logs_paths(
|
|
311
|
-
log_file_path=log_file_path,
|
|
312
|
-
date_pattern=date_pattern,
|
|
310
|
+
# Get the latest statistics file path.
|
|
311
|
+
latest_statistics_file_path_object = get_logs_paths(
|
|
312
|
+
log_file_path=self.log_file_path,
|
|
313
|
+
date_pattern=self.date_pattern,
|
|
313
314
|
log_type='csv',
|
|
314
|
-
|
|
315
|
-
)
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
315
|
+
latest_only=True
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
# # If there are no logs to read, return empty lists.
|
|
319
|
+
# if not latest_statistics_file_path_object:
|
|
320
|
+
# return [], [], self.header
|
|
321
|
+
|
|
322
|
+
latest_statistics_file_path: str = latest_statistics_file_path_object[0]['file_path']
|
|
323
|
+
|
|
324
|
+
# Get the previous day statistics file path.
|
|
325
|
+
previous_day_statistics_file_path: Union[str, None] = None
|
|
326
|
+
try:
|
|
327
|
+
previous_day_statistics_file_path = get_logs_paths(
|
|
328
|
+
log_file_path=self.log_file_path,
|
|
329
|
+
date_pattern=self.date_pattern,
|
|
330
|
+
log_type='csv',
|
|
331
|
+
previous_day_only=True
|
|
332
|
+
)[0]['file_path']
|
|
333
|
+
# If you get IndexError, it means that there are no previous day logs to read.
|
|
334
|
+
except IndexError:
|
|
335
|
+
pass
|
|
336
|
+
|
|
337
|
+
# Count all the rotated files.
|
|
338
|
+
current_log_files_count: int = len(get_logs_paths(
|
|
339
|
+
log_file_path=self.log_file_path,
|
|
340
|
+
log_type='csv'
|
|
341
|
+
))
|
|
325
342
|
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
343
|
+
# If the count of the log files is greater than the existing logs file count, it means that the rotation
|
|
344
|
+
# happened. We will read the previous day statistics file.
|
|
345
|
+
new_lines_from_previous_file: list = []
|
|
346
|
+
if current_log_files_count > self._existing_logs_file_count:
|
|
347
|
+
current_lines, self.header = csvs.read_csv_to_list_of_dicts_by_header(
|
|
348
|
+
previous_day_statistics_file_path, header=self.header, stdout=False)
|
|
332
349
|
|
|
333
|
-
|
|
334
|
-
|
|
350
|
+
if self.get_previous_file:
|
|
351
|
+
previous_file_lines = current_lines
|
|
335
352
|
|
|
336
|
-
|
|
353
|
+
self._existing_logs_file_count = current_log_files_count
|
|
337
354
|
|
|
338
|
-
|
|
355
|
+
new_lines_from_previous_file = self._extract_new_lines_only(current_lines)
|
|
339
356
|
|
|
340
|
-
|
|
341
|
-
|
|
357
|
+
# empty the previous file lines, since the file is rotated.
|
|
358
|
+
self._reading_existing_lines.clear()
|
|
342
359
|
|
|
343
|
-
|
|
344
|
-
|
|
360
|
+
current_lines, self.header = csvs.read_csv_to_list_of_dicts_by_header(
|
|
361
|
+
latest_statistics_file_path, header=self.header, stdout=False)
|
|
345
362
|
|
|
346
|
-
|
|
363
|
+
new_lines = self._extract_new_lines_only(current_lines)
|
|
347
364
|
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
365
|
+
# If we have new lines from the previous file, we will add the new lines from the latest file.
|
|
366
|
+
if new_lines_from_previous_file:
|
|
367
|
+
new_lines = new_lines_from_previous_file + new_lines
|
|
351
368
|
|
|
352
|
-
|
|
369
|
+
return new_lines, previous_file_lines, self.header
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
atomicshop/__init__.py,sha256=
|
|
1
|
+
atomicshop/__init__.py,sha256=Wq0bf4jFBHfU0lsKuJb7kZ5yruxZGg3_luyY2rWwPss,124
|
|
2
2
|
atomicshop/_basics_temp.py,sha256=6cu2dd6r2dLrd1BRNcVDKTHlsHs_26Gpw8QS6v32lQ0,3699
|
|
3
3
|
atomicshop/_create_pdf_demo.py,sha256=Yi-PGZuMg0RKvQmLqVeLIZYadqEZwUm-4A9JxBl_vYA,3713
|
|
4
4
|
atomicshop/_patch_import.py,sha256=ENp55sKVJ0e6-4lBvZnpz9PQCt3Otbur7F6aXDlyje4,6334
|
|
@@ -170,7 +170,7 @@ atomicshop/wrappers/certauthw/certauthw.py,sha256=4WvhjANI7Kzqrr_nKmtA8Kf7B6rute
|
|
|
170
170
|
atomicshop/wrappers/ctyping/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
171
171
|
atomicshop/wrappers/ctyping/process_winapi.py,sha256=QcXL-ETtlSSkoT8F7pYle97ubGWsjYp8cx8HxkVMgAc,2762
|
|
172
172
|
atomicshop/wrappers/ctyping/etw_winapi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
173
|
-
atomicshop/wrappers/ctyping/etw_winapi/const.py,sha256=
|
|
173
|
+
atomicshop/wrappers/ctyping/etw_winapi/const.py,sha256=jq5nms2qu4FsuXuq3vaHjz9W4ILt9GY-C7CQ8VKcpyg,5764
|
|
174
174
|
atomicshop/wrappers/ctyping/etw_winapi/etw_functions.py,sha256=3DLVXpTeOyTND35T_dKGzKnlLVQ0R3zt3AEcW2bNLNc,5304
|
|
175
175
|
atomicshop/wrappers/ctyping/msi_windows_installer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
176
176
|
atomicshop/wrappers/ctyping/msi_windows_installer/base.py,sha256=Uu9SlWLsQQ6mjE-ek-ptHcmgiI3Ruah9bdZus70EaVY,4884
|
|
@@ -223,7 +223,7 @@ atomicshop/wrappers/loggingw/formatters.py,sha256=mUtcJJfmhLNrwUVYShXTmdu40dBaJu
|
|
|
223
223
|
atomicshop/wrappers/loggingw/handlers.py,sha256=2A_3Qy1B0RvVWZmQocAB6CmpqlXoKJ-yi6iBWG2jNLo,8274
|
|
224
224
|
atomicshop/wrappers/loggingw/loggers.py,sha256=DHOOTAtqkwn1xgvLHSkOiBm6yFGNuQy1kvbhG-TDog8,2374
|
|
225
225
|
atomicshop/wrappers/loggingw/loggingw.py,sha256=m6YySEedP3_4Ik1S_uGMxETSbmRkmMYmAZxhHBlXSlo,16616
|
|
226
|
-
atomicshop/wrappers/loggingw/reading.py,sha256=
|
|
226
|
+
atomicshop/wrappers/loggingw/reading.py,sha256=bsSUM9_epMO2L-lHBEULFxeqdxXOHfICt-1BtQZn7lA,16712
|
|
227
227
|
atomicshop/wrappers/nodejsw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
228
228
|
atomicshop/wrappers/nodejsw/install_nodejs.py,sha256=QZg-R2iTQt7kFb8wNtnTmwraSGwvUs34JIasdbNa7ZU,5154
|
|
229
229
|
atomicshop/wrappers/playwrightw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -260,8 +260,8 @@ atomicshop/wrappers/socketw/socket_server_tester.py,sha256=AhpurHJmP2kgzHaUbq5ey
|
|
|
260
260
|
atomicshop/wrappers/socketw/socket_wrapper.py,sha256=aXBwlEIJhFT0-c4i8iNlFx2It9VpCEpsv--5Oqcpxao,11624
|
|
261
261
|
atomicshop/wrappers/socketw/ssl_base.py,sha256=k4V3gwkbq10MvOH4btU4onLX2GNOsSfUAdcHmL1rpVE,2274
|
|
262
262
|
atomicshop/wrappers/socketw/statistics_csv.py,sha256=t3dtDEfN47CfYVi0CW6Kc2QHTEeZVyYhc57IYYh5nmA,826
|
|
263
|
-
atomicshop-2.12.
|
|
264
|
-
atomicshop-2.12.
|
|
265
|
-
atomicshop-2.12.
|
|
266
|
-
atomicshop-2.12.
|
|
267
|
-
atomicshop-2.12.
|
|
263
|
+
atomicshop-2.12.26.dist-info/LICENSE.txt,sha256=lLU7EYycfYcK2NR_1gfnhnRC8b8ccOTElACYplgZN88,1094
|
|
264
|
+
atomicshop-2.12.26.dist-info/METADATA,sha256=HYm5lvYyf77XdD_MWANy8aWX2kpFD7kD-EmOZg19yOQ,10479
|
|
265
|
+
atomicshop-2.12.26.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
266
|
+
atomicshop-2.12.26.dist-info/top_level.txt,sha256=EgKJB-7xcrAPeqTRF2laD_Np2gNGYkJkd4OyXqpJphA,11
|
|
267
|
+
atomicshop-2.12.26.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|