github-monitor 1.9rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- github_monitor-1.9rc1.dist-info/METADATA +477 -0
- github_monitor-1.9rc1.dist-info/RECORD +7 -0
- github_monitor-1.9rc1.dist-info/WHEEL +5 -0
- github_monitor-1.9rc1.dist-info/entry_points.txt +2 -0
- github_monitor-1.9rc1.dist-info/licenses/LICENSE +674 -0
- github_monitor-1.9rc1.dist-info/top_level.txt +1 -0
- github_monitor.py +2836 -0
github_monitor.py
ADDED
|
@@ -0,0 +1,2836 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Author: Michal Szymanski <misiektoja-github@rm-rf.ninja>
|
|
4
|
+
v1.9
|
|
5
|
+
|
|
6
|
+
OSINT tool implementing real-time tracking of Github users activities including profile and repositories changes:
|
|
7
|
+
https://github.com/misiektoja/github_monitor/
|
|
8
|
+
|
|
9
|
+
Python pip3 requirements:
|
|
10
|
+
|
|
11
|
+
PyGithub
|
|
12
|
+
requests
|
|
13
|
+
python-dateutil
|
|
14
|
+
pytz
|
|
15
|
+
tzlocal (optional)
|
|
16
|
+
python-dotenv (optional)
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
VERSION = "1.9"
|
|
20
|
+
|
|
21
|
+
# ---------------------------
|
|
22
|
+
# CONFIGURATION SECTION START
|
|
23
|
+
# ---------------------------
|
|
24
|
+
|
|
25
|
+
CONFIG_BLOCK = """
|
|
26
|
+
# Get your Github personal access token (classic) by visiting:
|
|
27
|
+
# https://github.com/settings/apps
|
|
28
|
+
#
|
|
29
|
+
# Then go to: Personal access tokens -> Tokens (classic) -> Generate new token (classic)
|
|
30
|
+
#
|
|
31
|
+
# Provide the GITHUB_TOKEN secret using one of the following methods:
|
|
32
|
+
# - Pass it at runtime with -t / --github-token
|
|
33
|
+
# - Set it as an environment variable (e.g. export GITHUB_TOKEN=...)
|
|
34
|
+
# - Add it to ".env" file (GITHUB_TOKEN=...) for persistent use
|
|
35
|
+
# Fallback:
|
|
36
|
+
# - Hard-code it in the code or config file
|
|
37
|
+
GITHUB_TOKEN = "your_github_classic_personal_access_token"
|
|
38
|
+
|
|
39
|
+
# The URL of the Github API
|
|
40
|
+
#
|
|
41
|
+
# For Public Web Github use the default: https://api.github.com
|
|
42
|
+
# For Github Enterprise change to: https://{your_hostname}/api/v3
|
|
43
|
+
#
|
|
44
|
+
# Can also be set using the -x flag
|
|
45
|
+
GITHUB_API_URL = "https://api.github.com"
|
|
46
|
+
|
|
47
|
+
# SMTP settings for sending email notifications
|
|
48
|
+
# If left as-is, no notifications will be sent
|
|
49
|
+
#
|
|
50
|
+
# Provide the SMTP_PASSWORD secret using one of the following methods:
|
|
51
|
+
# - Set it as an environment variable (e.g. export SMTP_PASSWORD=...)
|
|
52
|
+
# - Add it to ".env" file (SMTP_PASSWORD=...) for persistent use
|
|
53
|
+
# Fallback:
|
|
54
|
+
# - Hard-code it in the code or config file
|
|
55
|
+
SMTP_HOST = "your_smtp_server_ssl"
|
|
56
|
+
SMTP_PORT = 587
|
|
57
|
+
SMTP_USER = "your_smtp_user"
|
|
58
|
+
SMTP_PASSWORD = "your_smtp_password"
|
|
59
|
+
SMTP_SSL = True
|
|
60
|
+
SENDER_EMAIL = "your_sender_email"
|
|
61
|
+
RECEIVER_EMAIL = "your_receiver_email"
|
|
62
|
+
|
|
63
|
+
# Whether to send an email when user's profile changes
|
|
64
|
+
# Can also be enabled via the -p flag
|
|
65
|
+
PROFILE_NOTIFICATION = False
|
|
66
|
+
|
|
67
|
+
# Whether to send an email when new GitHub events appear
|
|
68
|
+
# Can also be enabled via the -s flag
|
|
69
|
+
EVENT_NOTIFICATION = False
|
|
70
|
+
|
|
71
|
+
# Whether to send an email when user's repositories change (stargazers, watchers, forks, issues,
|
|
72
|
+
# PRs, description etc., except for update date)
|
|
73
|
+
# Can also be enabled via the -q flag
|
|
74
|
+
REPO_NOTIFICATION = False
|
|
75
|
+
|
|
76
|
+
# Whether to send an email when user's repositories update date changes
|
|
77
|
+
# Can also be enabled via the -u flag
|
|
78
|
+
REPO_UPDATE_DATE_NOTIFICATION = False
|
|
79
|
+
|
|
80
|
+
# Whether to send an email on errors
|
|
81
|
+
# Can also be disabled via the -e flag
|
|
82
|
+
ERROR_NOTIFICATION = True
|
|
83
|
+
|
|
84
|
+
# How often to check for user profile changes / activities; in seconds
|
|
85
|
+
# Can also be set using the -c flag
|
|
86
|
+
GITHUB_CHECK_INTERVAL = 1800 # 30 mins
|
|
87
|
+
|
|
88
|
+
# Set your local time zone so that Github API timestamps are converted accordingly (e.g. 'Europe/Warsaw')
|
|
89
|
+
# Use this command to list all time zones supported by pytz:
|
|
90
|
+
# python3 -c "import pytz; print('\\n'.join(pytz.all_timezones))"
|
|
91
|
+
# If set to 'Auto', the tool will try to detect your local time zone automatically (requires tzlocal)
|
|
92
|
+
LOCAL_TIMEZONE = 'Auto'
|
|
93
|
+
|
|
94
|
+
# Events to monitor
|
|
95
|
+
# Use 'ALL' to monitor all available event types
|
|
96
|
+
EVENTS_TO_MONITOR = [
|
|
97
|
+
'ALL',
|
|
98
|
+
'PushEvent',
|
|
99
|
+
'PullRequestEvent',
|
|
100
|
+
'PullRequestReviewEvent',
|
|
101
|
+
'PullRequestReviewCommentEvent',
|
|
102
|
+
'IssueCommentEvent',
|
|
103
|
+
'IssuesEvent',
|
|
104
|
+
'CommitCommentEvent',
|
|
105
|
+
'CreateEvent',
|
|
106
|
+
'DeleteEvent',
|
|
107
|
+
'ForkEvent',
|
|
108
|
+
'PublicEvent',
|
|
109
|
+
'GollumEvent',
|
|
110
|
+
'MemberEvent',
|
|
111
|
+
'WatchEvent',
|
|
112
|
+
'ReleaseEvent',
|
|
113
|
+
'DeploymentEvent',
|
|
114
|
+
'CheckRunEvent',
|
|
115
|
+
'WorkflowRunEvent',
|
|
116
|
+
]
|
|
117
|
+
|
|
118
|
+
# Number of recent events to fetch when a change in the last event ID is detected
|
|
119
|
+
# Note: if more than EVENTS_NUMBER events occur between two checks,
|
|
120
|
+
# any events older than the most recent EVENTS_NUMBER will be missed
|
|
121
|
+
EVENTS_NUMBER = 30 # 1 page
|
|
122
|
+
|
|
123
|
+
# How often to print a "liveness check" message to the output; in seconds
|
|
124
|
+
# Set to 0 to disable
|
|
125
|
+
LIVENESS_CHECK_INTERVAL = 43200 # 12 hours
|
|
126
|
+
|
|
127
|
+
# URL used to verify internet connectivity at startup
|
|
128
|
+
CHECK_INTERNET_URL = GITHUB_API_URL
|
|
129
|
+
|
|
130
|
+
# Timeout used when checking initial internet connectivity; in seconds
|
|
131
|
+
CHECK_INTERNET_TIMEOUT = 5
|
|
132
|
+
|
|
133
|
+
# CSV file to write new events & profile changes
|
|
134
|
+
# Can also be set using the -b flag
|
|
135
|
+
CSV_FILE = ""
|
|
136
|
+
|
|
137
|
+
# Location of the optional dotenv file which can keep secrets
|
|
138
|
+
# If not specified it will try to auto-search for .env files
|
|
139
|
+
# To disable auto-search, set this to the literal string "none"
|
|
140
|
+
# Can also be set using the --env-file flag
|
|
141
|
+
DOTENV_FILE = ""
|
|
142
|
+
|
|
143
|
+
# Base name for the log file. Output will be saved to github_monitor_<username>.log
|
|
144
|
+
# Can include a directory path to specify the location, e.g. ~/some_dir/github_monitor
|
|
145
|
+
GITHUB_LOGFILE = "github_monitor"
|
|
146
|
+
|
|
147
|
+
# Whether to disable logging to github_monitor_<username>.log
|
|
148
|
+
# Can also be disabled via the -d flag
|
|
149
|
+
DISABLE_LOGGING = False
|
|
150
|
+
|
|
151
|
+
# Width of main horizontal line (─)
|
|
152
|
+
HORIZONTAL_LINE1 = 105
|
|
153
|
+
|
|
154
|
+
# Width of horizontal line for repositories list output (─)
|
|
155
|
+
HORIZONTAL_LINE2 = 80
|
|
156
|
+
|
|
157
|
+
# Whether to clear the terminal screen after starting the tool
|
|
158
|
+
CLEAR_SCREEN = True
|
|
159
|
+
|
|
160
|
+
# Maximum number of times to retry a failed GitHub API/network call
|
|
161
|
+
NET_MAX_RETRIES = 5
|
|
162
|
+
|
|
163
|
+
# Base number of seconds to wait before each retry, multiplied by the attempt count
|
|
164
|
+
NET_BASE_BACKOFF_SEC = 5
|
|
165
|
+
|
|
166
|
+
# Value used by signal handlers increasing/decreasing profile/user activity check (GITHUB_CHECK_INTERVAL); in seconds
|
|
167
|
+
GITHUB_CHECK_SIGNAL_VALUE = 60 # 1 minute
|
|
168
|
+
"""
|
|
169
|
+
|
|
170
|
+
# -------------------------
|
|
171
|
+
# CONFIGURATION SECTION END
|
|
172
|
+
# -------------------------
|
|
173
|
+
|
|
174
|
+
# Default dummy values so linters shut up
|
|
175
|
+
# Do not change values below - modify them in the configuration section or config file instead
|
|
176
|
+
GITHUB_TOKEN = ""
|
|
177
|
+
GITHUB_API_URL = ""
|
|
178
|
+
SMTP_HOST = ""
|
|
179
|
+
SMTP_PORT = 0
|
|
180
|
+
SMTP_USER = ""
|
|
181
|
+
SMTP_PASSWORD = ""
|
|
182
|
+
SMTP_SSL = False
|
|
183
|
+
SENDER_EMAIL = ""
|
|
184
|
+
RECEIVER_EMAIL = ""
|
|
185
|
+
PROFILE_NOTIFICATION = False
|
|
186
|
+
EVENT_NOTIFICATION = False
|
|
187
|
+
REPO_NOTIFICATION = False
|
|
188
|
+
REPO_UPDATE_DATE_NOTIFICATION = False
|
|
189
|
+
ERROR_NOTIFICATION = False
|
|
190
|
+
GITHUB_CHECK_INTERVAL = 0
|
|
191
|
+
LOCAL_TIMEZONE = ""
|
|
192
|
+
EVENTS_TO_MONITOR = []
|
|
193
|
+
EVENTS_NUMBER = 0
|
|
194
|
+
LIVENESS_CHECK_INTERVAL = 0
|
|
195
|
+
CHECK_INTERNET_URL = ""
|
|
196
|
+
CHECK_INTERNET_TIMEOUT = 0
|
|
197
|
+
CSV_FILE = ""
|
|
198
|
+
DOTENV_FILE = ""
|
|
199
|
+
GITHUB_LOGFILE = ""
|
|
200
|
+
DISABLE_LOGGING = False
|
|
201
|
+
HORIZONTAL_LINE1 = 0
|
|
202
|
+
HORIZONTAL_LINE2 = 0
|
|
203
|
+
CLEAR_SCREEN = False
|
|
204
|
+
NET_MAX_RETRIES = 0
|
|
205
|
+
NET_BASE_BACKOFF_SEC = 0
|
|
206
|
+
GITHUB_CHECK_SIGNAL_VALUE = 0
|
|
207
|
+
|
|
208
|
+
exec(CONFIG_BLOCK, globals())
|
|
209
|
+
|
|
210
|
+
# Default name for the optional config file
|
|
211
|
+
DEFAULT_CONFIG_FILENAME = "github_monitor.conf"
|
|
212
|
+
|
|
213
|
+
# List of secret keys to load from env/config
|
|
214
|
+
SECRET_KEYS = ("GITHUB_TOKEN", "SMTP_PASSWORD")
|
|
215
|
+
|
|
216
|
+
LIVENESS_CHECK_COUNTER = LIVENESS_CHECK_INTERVAL / GITHUB_CHECK_INTERVAL
|
|
217
|
+
|
|
218
|
+
stdout_bck = None
|
|
219
|
+
csvfieldnames = ['Date', 'Type', 'Name', 'Old', 'New']
|
|
220
|
+
|
|
221
|
+
TRACK_REPOS_CHANGES = False
|
|
222
|
+
DO_NOT_MONITOR_GITHUB_EVENTS = False
|
|
223
|
+
|
|
224
|
+
CLI_CONFIG_PATH = None
|
|
225
|
+
|
|
226
|
+
# to solve the issue: 'SyntaxError: f-string expression part cannot include a backslash'
|
|
227
|
+
nl_ch = "\n"
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
import sys
|
|
231
|
+
|
|
232
|
+
if sys.version_info < (3, 10):
|
|
233
|
+
print("* Error: Python version 3.10 or higher required !")
|
|
234
|
+
sys.exit(1)
|
|
235
|
+
|
|
236
|
+
import time
|
|
237
|
+
import string
|
|
238
|
+
import os
|
|
239
|
+
from datetime import datetime, timezone
|
|
240
|
+
from dateutil import relativedelta
|
|
241
|
+
from dateutil.parser import isoparse
|
|
242
|
+
import calendar
|
|
243
|
+
import requests as req
|
|
244
|
+
import signal
|
|
245
|
+
import smtplib
|
|
246
|
+
import ssl
|
|
247
|
+
from email.header import Header
|
|
248
|
+
from email.mime.multipart import MIMEMultipart
|
|
249
|
+
from email.mime.text import MIMEText
|
|
250
|
+
import argparse
|
|
251
|
+
import csv
|
|
252
|
+
try:
|
|
253
|
+
import pytz
|
|
254
|
+
except ModuleNotFoundError:
|
|
255
|
+
raise SystemExit("Error: Couldn't find the pytz library !\n\nTo install it, run:\n pip3 install pytz\n\nOnce installed, re-run this tool")
|
|
256
|
+
try:
|
|
257
|
+
from tzlocal import get_localzone
|
|
258
|
+
except ImportError:
|
|
259
|
+
get_localzone = None
|
|
260
|
+
import platform
|
|
261
|
+
import re
|
|
262
|
+
import ipaddress
|
|
263
|
+
try:
|
|
264
|
+
from github import Github, Auth, GithubException, UnknownObjectException
|
|
265
|
+
from github.GithubException import BadCredentialsException
|
|
266
|
+
except ModuleNotFoundError:
|
|
267
|
+
raise SystemExit("Error: Couldn't find the PyGitHub library !\n\nTo install it, run:\n pip3 install PyGithub\n\nOnce installed, re-run this tool. For more help, visit:\nhttps://github.com/PyGithub/PyGithub")
|
|
268
|
+
from itertools import islice
|
|
269
|
+
import textwrap
|
|
270
|
+
import urllib3
|
|
271
|
+
import socket
|
|
272
|
+
from typing import Any, Callable
|
|
273
|
+
import shutil
|
|
274
|
+
from pathlib import Path
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
NET_ERRORS = (
|
|
278
|
+
req.exceptions.RequestException,
|
|
279
|
+
urllib3.exceptions.HTTPError,
|
|
280
|
+
socket.gaierror,
|
|
281
|
+
GithubException,
|
|
282
|
+
)
|
|
283
|
+
|
|
284
|
+
|
|
285
|
+
# Logger class to output messages to stdout and log file
|
|
286
|
+
class Logger(object):
|
|
287
|
+
def __init__(self, filename):
|
|
288
|
+
self.terminal = sys.stdout
|
|
289
|
+
self.logfile = open(filename, "a", buffering=1, encoding="utf-8")
|
|
290
|
+
|
|
291
|
+
def write(self, message):
|
|
292
|
+
self.terminal.write(message)
|
|
293
|
+
self.logfile.write(message)
|
|
294
|
+
self.terminal.flush()
|
|
295
|
+
self.logfile.flush()
|
|
296
|
+
|
|
297
|
+
def flush(self):
|
|
298
|
+
pass
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
# Signal handler when user presses Ctrl+C
|
|
302
|
+
def signal_handler(sig, frame):
|
|
303
|
+
sys.stdout = stdout_bck
|
|
304
|
+
print('\n* You pressed Ctrl+C, tool is terminated.')
|
|
305
|
+
sys.exit(0)
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
# Checks internet connectivity
|
|
309
|
+
def check_internet(url=CHECK_INTERNET_URL, timeout=CHECK_INTERNET_TIMEOUT):
|
|
310
|
+
try:
|
|
311
|
+
_ = req.get(url, timeout=timeout)
|
|
312
|
+
return True
|
|
313
|
+
except req.RequestException as e:
|
|
314
|
+
print(f"* No connectivity, please check your network:\n\n{e}")
|
|
315
|
+
return False
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
# Clears the terminal screen
|
|
319
|
+
def clear_screen(enabled=True):
|
|
320
|
+
if not enabled:
|
|
321
|
+
return
|
|
322
|
+
try:
|
|
323
|
+
if platform.system() == 'Windows':
|
|
324
|
+
os.system('cls')
|
|
325
|
+
else:
|
|
326
|
+
os.system('clear')
|
|
327
|
+
except Exception:
|
|
328
|
+
print("* Cannot clear the screen contents")
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
# Converts absolute value of seconds to human readable format
|
|
332
|
+
def display_time(seconds, granularity=2):
|
|
333
|
+
intervals = (
|
|
334
|
+
('years', 31556952), # approximation
|
|
335
|
+
('months', 2629746), # approximation
|
|
336
|
+
('weeks', 604800), # 60 * 60 * 24 * 7
|
|
337
|
+
('days', 86400), # 60 * 60 * 24
|
|
338
|
+
('hours', 3600), # 60 * 60
|
|
339
|
+
('minutes', 60),
|
|
340
|
+
('seconds', 1),
|
|
341
|
+
)
|
|
342
|
+
result = []
|
|
343
|
+
|
|
344
|
+
if seconds > 0:
|
|
345
|
+
for name, count in intervals:
|
|
346
|
+
value = seconds // count
|
|
347
|
+
if value:
|
|
348
|
+
seconds -= value * count
|
|
349
|
+
if value == 1:
|
|
350
|
+
name = name.rstrip('s')
|
|
351
|
+
result.append(f"{value} {name}")
|
|
352
|
+
return ', '.join(result[:granularity])
|
|
353
|
+
else:
|
|
354
|
+
return '0 seconds'
|
|
355
|
+
|
|
356
|
+
|
|
357
|
+
# Calculates time span between two timestamps, accepts timestamp integers, floats and datetime objects
|
|
358
|
+
def calculate_timespan(timestamp1, timestamp2, show_weeks=True, show_hours=True, show_minutes=True, show_seconds=True, granularity=3):
|
|
359
|
+
result = []
|
|
360
|
+
intervals = ['years', 'months', 'weeks', 'days', 'hours', 'minutes', 'seconds']
|
|
361
|
+
ts1 = timestamp1
|
|
362
|
+
ts2 = timestamp2
|
|
363
|
+
|
|
364
|
+
if isinstance(timestamp1, str):
|
|
365
|
+
try:
|
|
366
|
+
timestamp1 = isoparse(timestamp1)
|
|
367
|
+
except Exception:
|
|
368
|
+
return ""
|
|
369
|
+
|
|
370
|
+
if isinstance(timestamp1, int):
|
|
371
|
+
dt1 = datetime.fromtimestamp(int(ts1), tz=timezone.utc)
|
|
372
|
+
elif isinstance(timestamp1, float):
|
|
373
|
+
ts1 = int(round(ts1))
|
|
374
|
+
dt1 = datetime.fromtimestamp(ts1, tz=timezone.utc)
|
|
375
|
+
elif isinstance(timestamp1, datetime):
|
|
376
|
+
dt1 = timestamp1
|
|
377
|
+
if dt1.tzinfo is None:
|
|
378
|
+
dt1 = pytz.utc.localize(dt1)
|
|
379
|
+
else:
|
|
380
|
+
dt1 = dt1.astimezone(pytz.utc)
|
|
381
|
+
ts1 = int(round(dt1.timestamp()))
|
|
382
|
+
else:
|
|
383
|
+
return ""
|
|
384
|
+
|
|
385
|
+
if isinstance(timestamp2, str):
|
|
386
|
+
try:
|
|
387
|
+
timestamp2 = isoparse(timestamp2)
|
|
388
|
+
except Exception:
|
|
389
|
+
return ""
|
|
390
|
+
|
|
391
|
+
if isinstance(timestamp2, int):
|
|
392
|
+
dt2 = datetime.fromtimestamp(int(ts2), tz=timezone.utc)
|
|
393
|
+
elif isinstance(timestamp2, float):
|
|
394
|
+
ts2 = int(round(ts2))
|
|
395
|
+
dt2 = datetime.fromtimestamp(ts2, tz=timezone.utc)
|
|
396
|
+
elif isinstance(timestamp2, datetime):
|
|
397
|
+
dt2 = timestamp2
|
|
398
|
+
if dt2.tzinfo is None:
|
|
399
|
+
dt2 = pytz.utc.localize(dt2)
|
|
400
|
+
else:
|
|
401
|
+
dt2 = dt2.astimezone(pytz.utc)
|
|
402
|
+
ts2 = int(round(dt2.timestamp()))
|
|
403
|
+
else:
|
|
404
|
+
return ""
|
|
405
|
+
|
|
406
|
+
if ts1 >= ts2:
|
|
407
|
+
ts_diff = ts1 - ts2
|
|
408
|
+
else:
|
|
409
|
+
ts_diff = ts2 - ts1
|
|
410
|
+
dt1, dt2 = dt2, dt1
|
|
411
|
+
|
|
412
|
+
if ts_diff > 0:
|
|
413
|
+
date_diff = relativedelta.relativedelta(dt1, dt2)
|
|
414
|
+
years = date_diff.years
|
|
415
|
+
months = date_diff.months
|
|
416
|
+
days_total = date_diff.days
|
|
417
|
+
|
|
418
|
+
if show_weeks:
|
|
419
|
+
weeks = days_total // 7
|
|
420
|
+
days = days_total % 7
|
|
421
|
+
else:
|
|
422
|
+
weeks = 0
|
|
423
|
+
days = days_total
|
|
424
|
+
|
|
425
|
+
hours = date_diff.hours if show_hours or ts_diff <= 86400 else 0
|
|
426
|
+
minutes = date_diff.minutes if show_minutes or ts_diff <= 3600 else 0
|
|
427
|
+
seconds = date_diff.seconds if show_seconds or ts_diff <= 60 else 0
|
|
428
|
+
|
|
429
|
+
date_list = [years, months, weeks, days, hours, minutes, seconds]
|
|
430
|
+
|
|
431
|
+
for index, interval in enumerate(date_list):
|
|
432
|
+
if interval > 0:
|
|
433
|
+
name = intervals[index]
|
|
434
|
+
if interval == 1:
|
|
435
|
+
name = name.rstrip('s')
|
|
436
|
+
result.append(f"{interval} {name}")
|
|
437
|
+
|
|
438
|
+
return ', '.join(result[:granularity])
|
|
439
|
+
else:
|
|
440
|
+
return '0 seconds'
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
# Sends email notification
|
|
444
|
+
def send_email(subject, body, body_html, use_ssl, smtp_timeout=15):
|
|
445
|
+
fqdn_re = re.compile(r'(?=^.{4,253}$)(^((?!-)[a-zA-Z0-9-]{1,63}(?<!-)\.)+[a-zA-Z]{2,63}\.?$)')
|
|
446
|
+
email_re = re.compile(r'[^@]+@[^@]+\.[^@]+')
|
|
447
|
+
|
|
448
|
+
try:
|
|
449
|
+
ipaddress.ip_address(str(SMTP_HOST))
|
|
450
|
+
except ValueError:
|
|
451
|
+
if not fqdn_re.search(str(SMTP_HOST)):
|
|
452
|
+
print("Error sending email - SMTP settings are incorrect (invalid IP address/FQDN in SMTP_HOST)")
|
|
453
|
+
return 1
|
|
454
|
+
|
|
455
|
+
try:
|
|
456
|
+
port = int(SMTP_PORT)
|
|
457
|
+
if not (1 <= port <= 65535):
|
|
458
|
+
raise ValueError
|
|
459
|
+
except ValueError:
|
|
460
|
+
print("Error sending email - SMTP settings are incorrect (invalid port number in SMTP_PORT)")
|
|
461
|
+
return 1
|
|
462
|
+
|
|
463
|
+
if not email_re.search(str(SENDER_EMAIL)) or not email_re.search(str(RECEIVER_EMAIL)):
|
|
464
|
+
print("Error sending email - SMTP settings are incorrect (invalid email in SENDER_EMAIL or RECEIVER_EMAIL)")
|
|
465
|
+
return 1
|
|
466
|
+
|
|
467
|
+
if not SMTP_USER or not isinstance(SMTP_USER, str) or SMTP_USER == "your_smtp_user" or not SMTP_PASSWORD or not isinstance(SMTP_PASSWORD, str) or SMTP_PASSWORD == "your_smtp_password":
|
|
468
|
+
print("Error sending email - SMTP settings are incorrect (check SMTP_USER & SMTP_PASSWORD variables)")
|
|
469
|
+
return 1
|
|
470
|
+
|
|
471
|
+
if not subject or not isinstance(subject, str):
|
|
472
|
+
print("Error sending email - SMTP settings are incorrect (subject is not a string or is empty)")
|
|
473
|
+
return 1
|
|
474
|
+
|
|
475
|
+
if not body and not body_html:
|
|
476
|
+
print("Error sending email - SMTP settings are incorrect (body and body_html cannot be empty at the same time)")
|
|
477
|
+
return 1
|
|
478
|
+
|
|
479
|
+
try:
|
|
480
|
+
if use_ssl:
|
|
481
|
+
ssl_context = ssl.create_default_context()
|
|
482
|
+
smtpObj = smtplib.SMTP(SMTP_HOST, SMTP_PORT, timeout=smtp_timeout)
|
|
483
|
+
smtpObj.starttls(context=ssl_context)
|
|
484
|
+
else:
|
|
485
|
+
smtpObj = smtplib.SMTP(SMTP_HOST, SMTP_PORT, timeout=smtp_timeout)
|
|
486
|
+
smtpObj.login(SMTP_USER, SMTP_PASSWORD)
|
|
487
|
+
email_msg = MIMEMultipart('alternative')
|
|
488
|
+
email_msg["From"] = SENDER_EMAIL
|
|
489
|
+
email_msg["To"] = RECEIVER_EMAIL
|
|
490
|
+
email_msg["Subject"] = str(Header(subject, 'utf-8'))
|
|
491
|
+
|
|
492
|
+
if body:
|
|
493
|
+
part1 = MIMEText(body, 'plain')
|
|
494
|
+
part1 = MIMEText(body.encode('utf-8'), 'plain', _charset='utf-8')
|
|
495
|
+
email_msg.attach(part1)
|
|
496
|
+
|
|
497
|
+
if body_html:
|
|
498
|
+
part2 = MIMEText(body_html, 'html')
|
|
499
|
+
part2 = MIMEText(body_html.encode('utf-8'), 'html', _charset='utf-8')
|
|
500
|
+
email_msg.attach(part2)
|
|
501
|
+
|
|
502
|
+
smtpObj.sendmail(SENDER_EMAIL, RECEIVER_EMAIL, email_msg.as_string())
|
|
503
|
+
smtpObj.quit()
|
|
504
|
+
except Exception as e:
|
|
505
|
+
print(f"Error sending email: {e}")
|
|
506
|
+
return 1
|
|
507
|
+
return 0
|
|
508
|
+
|
|
509
|
+
|
|
510
|
+
# Initializes the CSV file
|
|
511
|
+
def init_csv_file(csv_file_name):
|
|
512
|
+
try:
|
|
513
|
+
if not os.path.isfile(csv_file_name) or os.path.getsize(csv_file_name) == 0:
|
|
514
|
+
with open(csv_file_name, 'a', newline='', buffering=1, encoding="utf-8") as f:
|
|
515
|
+
writer = csv.DictWriter(f, fieldnames=csvfieldnames, quoting=csv.QUOTE_NONNUMERIC)
|
|
516
|
+
writer.writeheader()
|
|
517
|
+
except Exception as e:
|
|
518
|
+
raise RuntimeError(f"Could not initialize CSV file '{csv_file_name}': {e}")
|
|
519
|
+
|
|
520
|
+
|
|
521
|
+
# Writes CSV entry
|
|
522
|
+
def write_csv_entry(csv_file_name, timestamp, object_type, object_name, old, new):
|
|
523
|
+
try:
|
|
524
|
+
|
|
525
|
+
with open(csv_file_name, 'a', newline='', buffering=1, encoding="utf-8") as csv_file:
|
|
526
|
+
csvwriter = csv.DictWriter(csv_file, fieldnames=csvfieldnames, quoting=csv.QUOTE_NONNUMERIC)
|
|
527
|
+
csvwriter.writerow({'Date': timestamp, 'Type': object_type, 'Name': object_name, 'Old': old, 'New': new})
|
|
528
|
+
|
|
529
|
+
except Exception as e:
|
|
530
|
+
raise RuntimeError(f"Failed to write to CSV file '{csv_file_name}': {e}")
|
|
531
|
+
|
|
532
|
+
|
|
533
|
+
# Converts a datetime to local timezone and removes timezone info (naive)
|
|
534
|
+
def convert_to_local_naive(dt: datetime | None = None):
|
|
535
|
+
tz = pytz.timezone(LOCAL_TIMEZONE)
|
|
536
|
+
|
|
537
|
+
if dt is not None:
|
|
538
|
+
if dt.tzinfo is None:
|
|
539
|
+
dt = pytz.utc.localize(dt)
|
|
540
|
+
|
|
541
|
+
dt_local = dt.astimezone(tz)
|
|
542
|
+
|
|
543
|
+
return dt_local.replace(tzinfo=None)
|
|
544
|
+
else:
|
|
545
|
+
return None
|
|
546
|
+
|
|
547
|
+
|
|
548
|
+
# Returns current local time without timezone info (naive)
|
|
549
|
+
def now_local_naive():
|
|
550
|
+
return datetime.now(pytz.timezone(LOCAL_TIMEZONE)).replace(microsecond=0, tzinfo=None)
|
|
551
|
+
|
|
552
|
+
|
|
553
|
+
# Returns the current date/time in human readable format; eg. Sun 21 Apr 2024, 15:08:45
|
|
554
|
+
def get_cur_ts(ts_str=""):
|
|
555
|
+
return (f'{ts_str}{calendar.day_abbr[(now_local_naive()).weekday()]}, {now_local_naive().strftime("%d %b %Y, %H:%M:%S")}')
|
|
556
|
+
|
|
557
|
+
|
|
558
|
+
# Prints the current date/time in human readable format with separator; eg. Sun 21 Apr 2024, 15:08:45
|
|
559
|
+
def print_cur_ts(ts_str=""):
|
|
560
|
+
print(get_cur_ts(str(ts_str)))
|
|
561
|
+
print(f"{'─' * HORIZONTAL_LINE1}\n{'─' * HORIZONTAL_LINE1}")
|
|
562
|
+
|
|
563
|
+
|
|
564
|
+
# Returns the timestamp/datetime object in human readable format (long version); eg. Sun 21 Apr 2024, 15:08:45
|
|
565
|
+
def get_date_from_ts(ts):
|
|
566
|
+
tz = pytz.timezone(LOCAL_TIMEZONE)
|
|
567
|
+
|
|
568
|
+
if isinstance(ts, str):
|
|
569
|
+
try:
|
|
570
|
+
ts = isoparse(ts)
|
|
571
|
+
except Exception:
|
|
572
|
+
return ""
|
|
573
|
+
|
|
574
|
+
if isinstance(ts, datetime):
|
|
575
|
+
if ts.tzinfo is None:
|
|
576
|
+
ts = pytz.utc.localize(ts)
|
|
577
|
+
ts_new = ts.astimezone(tz)
|
|
578
|
+
|
|
579
|
+
elif isinstance(ts, int):
|
|
580
|
+
ts_new = datetime.fromtimestamp(ts, tz)
|
|
581
|
+
|
|
582
|
+
elif isinstance(ts, float):
|
|
583
|
+
ts_rounded = int(round(ts))
|
|
584
|
+
ts_new = datetime.fromtimestamp(ts_rounded, tz)
|
|
585
|
+
|
|
586
|
+
else:
|
|
587
|
+
return ""
|
|
588
|
+
|
|
589
|
+
return (f'{calendar.day_abbr[ts_new.weekday()]} {ts_new.strftime("%d %b %Y, %H:%M:%S")}')
|
|
590
|
+
|
|
591
|
+
|
|
592
|
+
# Returns the timestamp/datetime object in human readable format (short version); eg.
|
|
593
|
+
# Sun 21 Apr 15:08
|
|
594
|
+
# Sun 21 Apr 24, 15:08 (if show_year == True and current year is different)
|
|
595
|
+
# Sun 21 Apr 25, 15:08 (if always_show_year == True and current year can be the same)
|
|
596
|
+
# Sun 21 Apr (if show_hour == False)
|
|
597
|
+
# Sun 21 Apr 15:08:32 (if show_seconds == True)
|
|
598
|
+
# 21 Apr 15:08 (if show_weekday == False)
|
|
599
|
+
def get_short_date_from_ts(ts, show_year=False, show_hour=True, show_weekday=True, show_seconds=False, always_show_year=False):
|
|
600
|
+
tz = pytz.timezone(LOCAL_TIMEZONE)
|
|
601
|
+
if always_show_year:
|
|
602
|
+
show_year = True
|
|
603
|
+
|
|
604
|
+
if isinstance(ts, str):
|
|
605
|
+
try:
|
|
606
|
+
ts = isoparse(ts)
|
|
607
|
+
except Exception:
|
|
608
|
+
return ""
|
|
609
|
+
|
|
610
|
+
if isinstance(ts, datetime):
|
|
611
|
+
if ts.tzinfo is None:
|
|
612
|
+
ts = pytz.utc.localize(ts)
|
|
613
|
+
ts_new = ts.astimezone(tz)
|
|
614
|
+
|
|
615
|
+
elif isinstance(ts, int):
|
|
616
|
+
ts_new = datetime.fromtimestamp(ts, tz)
|
|
617
|
+
|
|
618
|
+
elif isinstance(ts, float):
|
|
619
|
+
ts_rounded = int(round(ts))
|
|
620
|
+
ts_new = datetime.fromtimestamp(ts_rounded, tz)
|
|
621
|
+
|
|
622
|
+
else:
|
|
623
|
+
return ""
|
|
624
|
+
|
|
625
|
+
if show_hour:
|
|
626
|
+
hour_strftime = " %H:%M:%S" if show_seconds else " %H:%M"
|
|
627
|
+
else:
|
|
628
|
+
hour_strftime = ""
|
|
629
|
+
|
|
630
|
+
weekday_str = f"{calendar.day_abbr[ts_new.weekday()]} " if show_weekday else ""
|
|
631
|
+
|
|
632
|
+
if (show_year and ts_new.year != datetime.now(tz).year) or always_show_year:
|
|
633
|
+
hour_prefix = "," if show_hour else ""
|
|
634
|
+
return f'{weekday_str}{ts_new.strftime(f"%d %b %y{hour_prefix}{hour_strftime}")}'
|
|
635
|
+
else:
|
|
636
|
+
return f'{weekday_str}{ts_new.strftime(f"%d %b{hour_strftime}")}'
|
|
637
|
+
|
|
638
|
+
|
|
639
|
+
# Returns the timestamp/datetime object in human readable format (only hour, minutes and optionally seconds): eg. 15:08:12
|
|
640
|
+
def get_hour_min_from_ts(ts, show_seconds=False):
|
|
641
|
+
tz = pytz.timezone(LOCAL_TIMEZONE)
|
|
642
|
+
|
|
643
|
+
if isinstance(ts, str):
|
|
644
|
+
try:
|
|
645
|
+
ts = isoparse(ts)
|
|
646
|
+
except Exception:
|
|
647
|
+
return ""
|
|
648
|
+
|
|
649
|
+
if isinstance(ts, datetime):
|
|
650
|
+
if ts.tzinfo is None:
|
|
651
|
+
ts = pytz.utc.localize(ts)
|
|
652
|
+
ts_new = ts.astimezone(tz)
|
|
653
|
+
|
|
654
|
+
elif isinstance(ts, int):
|
|
655
|
+
ts_new = datetime.fromtimestamp(ts, tz)
|
|
656
|
+
|
|
657
|
+
elif isinstance(ts, float):
|
|
658
|
+
ts_rounded = int(round(ts))
|
|
659
|
+
ts_new = datetime.fromtimestamp(ts_rounded, tz)
|
|
660
|
+
|
|
661
|
+
else:
|
|
662
|
+
return ""
|
|
663
|
+
|
|
664
|
+
out_strf = "%H:%M:%S" if show_seconds else "%H:%M"
|
|
665
|
+
return ts_new.strftime(out_strf)
|
|
666
|
+
|
|
667
|
+
|
|
668
|
+
# Returns the range between two timestamps/datetime objects; eg. Sun 21 Apr 14:09 - 14:15
|
|
669
|
+
def get_range_of_dates_from_tss(ts1, ts2, between_sep=" - ", short=False):
|
|
670
|
+
tz = pytz.timezone(LOCAL_TIMEZONE)
|
|
671
|
+
|
|
672
|
+
if isinstance(ts1, datetime):
|
|
673
|
+
ts1_new = int(round(ts1.timestamp()))
|
|
674
|
+
elif isinstance(ts1, int):
|
|
675
|
+
ts1_new = ts1
|
|
676
|
+
elif isinstance(ts1, float):
|
|
677
|
+
ts1_new = int(round(ts1))
|
|
678
|
+
else:
|
|
679
|
+
return ""
|
|
680
|
+
|
|
681
|
+
if isinstance(ts2, datetime):
|
|
682
|
+
ts2_new = int(round(ts2.timestamp()))
|
|
683
|
+
elif isinstance(ts2, int):
|
|
684
|
+
ts2_new = ts2
|
|
685
|
+
elif isinstance(ts2, float):
|
|
686
|
+
ts2_new = int(round(ts2))
|
|
687
|
+
else:
|
|
688
|
+
return ""
|
|
689
|
+
|
|
690
|
+
ts1_strf = datetime.fromtimestamp(ts1_new, tz).strftime("%Y%m%d")
|
|
691
|
+
ts2_strf = datetime.fromtimestamp(ts2_new, tz).strftime("%Y%m%d")
|
|
692
|
+
|
|
693
|
+
if ts1_strf == ts2_strf:
|
|
694
|
+
if short:
|
|
695
|
+
out_str = f"{get_short_date_from_ts(ts1_new)}{between_sep}{get_hour_min_from_ts(ts2_new)}"
|
|
696
|
+
else:
|
|
697
|
+
out_str = f"{get_date_from_ts(ts1_new)}{between_sep}{get_hour_min_from_ts(ts2_new, show_seconds=True)}"
|
|
698
|
+
else:
|
|
699
|
+
if short:
|
|
700
|
+
out_str = f"{get_short_date_from_ts(ts1_new)}{between_sep}{get_short_date_from_ts(ts2_new)}"
|
|
701
|
+
else:
|
|
702
|
+
out_str = f"{get_date_from_ts(ts1_new)}{between_sep}{get_date_from_ts(ts2_new)}"
|
|
703
|
+
|
|
704
|
+
return str(out_str)
|
|
705
|
+
|
|
706
|
+
|
|
707
|
+
# Checks if the timezone name is correct
|
|
708
|
+
def is_valid_timezone(tz_name):
|
|
709
|
+
return tz_name in pytz.all_timezones
|
|
710
|
+
|
|
711
|
+
|
|
712
|
+
# Prints and returns the printed text with new line
|
|
713
|
+
def print_v(text=""):
|
|
714
|
+
print(text)
|
|
715
|
+
return text + "\n"
|
|
716
|
+
|
|
717
|
+
|
|
718
|
+
# Signal handler for SIGUSR1 allowing to switch email notifications for user's profile changes
|
|
719
|
+
def toggle_profile_changes_notifications_signal_handler(sig, frame):
|
|
720
|
+
global PROFILE_NOTIFICATION
|
|
721
|
+
PROFILE_NOTIFICATION = not PROFILE_NOTIFICATION
|
|
722
|
+
sig_name = signal.Signals(sig).name
|
|
723
|
+
print(f"* Signal {sig_name} received")
|
|
724
|
+
print(f"* Email notifications: [profile changes = {PROFILE_NOTIFICATION}]")
|
|
725
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
726
|
+
|
|
727
|
+
|
|
728
|
+
# Signal handler for SIGUSR2 allowing to switch email notifications for user's new events
|
|
729
|
+
def toggle_new_events_notifications_signal_handler(sig, frame):
|
|
730
|
+
global EVENT_NOTIFICATION
|
|
731
|
+
EVENT_NOTIFICATION = not EVENT_NOTIFICATION
|
|
732
|
+
sig_name = signal.Signals(sig).name
|
|
733
|
+
print(f"* Signal {sig_name} received")
|
|
734
|
+
print(f"* Email notifications: [new events = {EVENT_NOTIFICATION}]")
|
|
735
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
736
|
+
|
|
737
|
+
|
|
738
|
+
# Signal handler for SIGCONT allowing to switch email notifications for user's repositories changes (except for update date)
|
|
739
|
+
def toggle_repo_changes_notifications_signal_handler(sig, frame):
|
|
740
|
+
global REPO_NOTIFICATION
|
|
741
|
+
REPO_NOTIFICATION = not REPO_NOTIFICATION
|
|
742
|
+
sig_name = signal.Signals(sig).name
|
|
743
|
+
print(f"* Signal {sig_name} received")
|
|
744
|
+
print(f"* Email notifications: [repos changes = {REPO_NOTIFICATION}]")
|
|
745
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
746
|
+
|
|
747
|
+
|
|
748
|
+
# Signal handler for SIGPIPE allowing to switch email notifications for user's repositories update date changes
|
|
749
|
+
def toggle_repo_update_date_changes_notifications_signal_handler(sig, frame):
|
|
750
|
+
global REPO_UPDATE_DATE_NOTIFICATION
|
|
751
|
+
REPO_UPDATE_DATE_NOTIFICATION = not REPO_UPDATE_DATE_NOTIFICATION
|
|
752
|
+
sig_name = signal.Signals(sig).name
|
|
753
|
+
print(f"* Signal {sig_name} received")
|
|
754
|
+
print(f"* Email notifications: [repos update date = {REPO_UPDATE_DATE_NOTIFICATION}]")
|
|
755
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
756
|
+
|
|
757
|
+
|
|
758
|
+
# Signal handler for SIGTRAP allowing to increase check timer by GITHUB_CHECK_SIGNAL_VALUE seconds
|
|
759
|
+
def increase_check_signal_handler(sig, frame):
|
|
760
|
+
global GITHUB_CHECK_INTERVAL
|
|
761
|
+
GITHUB_CHECK_INTERVAL = GITHUB_CHECK_INTERVAL + GITHUB_CHECK_SIGNAL_VALUE
|
|
762
|
+
sig_name = signal.Signals(sig).name
|
|
763
|
+
print(f"* Signal {sig_name} received")
|
|
764
|
+
print(f"* Github timers: [check interval: {display_time(GITHUB_CHECK_INTERVAL)}]")
|
|
765
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
766
|
+
|
|
767
|
+
|
|
768
|
+
# Signal handler for SIGABRT allowing to decrease check timer by GITHUB_CHECK_SIGNAL_VALUE seconds
|
|
769
|
+
def decrease_check_signal_handler(sig, frame):
|
|
770
|
+
global GITHUB_CHECK_INTERVAL
|
|
771
|
+
if GITHUB_CHECK_INTERVAL - GITHUB_CHECK_SIGNAL_VALUE > 0:
|
|
772
|
+
GITHUB_CHECK_INTERVAL = GITHUB_CHECK_INTERVAL - GITHUB_CHECK_SIGNAL_VALUE
|
|
773
|
+
sig_name = signal.Signals(sig).name
|
|
774
|
+
print(f"* Signal {sig_name} received")
|
|
775
|
+
print(f"* Github timers: [check interval: {display_time(GITHUB_CHECK_INTERVAL)}]")
|
|
776
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
777
|
+
|
|
778
|
+
|
|
779
|
+
# Signal handler for SIGHUP allowing to reload secrets from .env
|
|
780
|
+
def reload_secrets_signal_handler(sig, frame):
|
|
781
|
+
sig_name = signal.Signals(sig).name
|
|
782
|
+
print(f"* Signal {sig_name} received")
|
|
783
|
+
|
|
784
|
+
# disable autoscan if DOTENV_FILE set to none
|
|
785
|
+
if DOTENV_FILE and DOTENV_FILE.lower() == 'none':
|
|
786
|
+
env_path = None
|
|
787
|
+
else:
|
|
788
|
+
# reload .env if python-dotenv is installed
|
|
789
|
+
try:
|
|
790
|
+
from dotenv import load_dotenv, find_dotenv
|
|
791
|
+
if DOTENV_FILE:
|
|
792
|
+
env_path = DOTENV_FILE
|
|
793
|
+
else:
|
|
794
|
+
env_path = find_dotenv()
|
|
795
|
+
if env_path:
|
|
796
|
+
load_dotenv(env_path, override=True)
|
|
797
|
+
else:
|
|
798
|
+
print("* No .env file found, skipping env-var reload")
|
|
799
|
+
except ImportError:
|
|
800
|
+
env_path = None
|
|
801
|
+
print("* python-dotenv not installed, skipping env-var reload")
|
|
802
|
+
|
|
803
|
+
if env_path:
|
|
804
|
+
for secret in SECRET_KEYS:
|
|
805
|
+
old_val = globals().get(secret)
|
|
806
|
+
val = os.getenv(secret)
|
|
807
|
+
if val is not None and val != old_val:
|
|
808
|
+
globals()[secret] = val
|
|
809
|
+
print(f"* Reloaded {secret} from {env_path}")
|
|
810
|
+
|
|
811
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
812
|
+
|
|
813
|
+
|
|
814
|
+
# List subclass used as a safe fallback for paginated responses
|
|
815
|
+
class EmptyPaginatedList(list):
|
|
816
|
+
def __init__(self):
|
|
817
|
+
super().__init__()
|
|
818
|
+
self.totalCount = 0
|
|
819
|
+
|
|
820
|
+
|
|
821
|
+
# Wraps GitHub API call with retry and linear back-off, returning a specified default on failure
|
|
822
|
+
def gh_call(fn: Callable[..., Any], retries=NET_MAX_RETRIES, backoff=NET_BASE_BACKOFF_SEC, default: Any = None,) -> Callable[..., Any]:
|
|
823
|
+
def wrapped(*args: Any, **kwargs: Any) -> Any:
|
|
824
|
+
for i in range(1, retries + 1):
|
|
825
|
+
try:
|
|
826
|
+
return fn(*args, **kwargs)
|
|
827
|
+
except NET_ERRORS as e:
|
|
828
|
+
print(f"* {fn.__name__} error: {e} (retry {i}/{retries})")
|
|
829
|
+
time.sleep(backoff * i)
|
|
830
|
+
return default
|
|
831
|
+
return wrapped
|
|
832
|
+
|
|
833
|
+
|
|
834
|
+
# Prints followers and followings for a GitHub user (-f)
|
|
835
|
+
def github_print_followers_and_followings(user):
|
|
836
|
+
user_name_str = user
|
|
837
|
+
user_url = "-"
|
|
838
|
+
followers_count = 0
|
|
839
|
+
followings_count = 0
|
|
840
|
+
followers_list = []
|
|
841
|
+
followings_list = []
|
|
842
|
+
|
|
843
|
+
print(f"* Getting followers & followings for user '{user}' ...")
|
|
844
|
+
|
|
845
|
+
try:
|
|
846
|
+
auth = Auth.Token(GITHUB_TOKEN)
|
|
847
|
+
g = Github(base_url=GITHUB_API_URL, auth=auth)
|
|
848
|
+
|
|
849
|
+
g_user = g.get_user(user)
|
|
850
|
+
user_login = g_user.login
|
|
851
|
+
user_name = g_user.name
|
|
852
|
+
user_url = g_user.html_url
|
|
853
|
+
|
|
854
|
+
followers_count = g_user.followers
|
|
855
|
+
followings_count = g_user.following
|
|
856
|
+
|
|
857
|
+
followers_list = g_user.get_followers()
|
|
858
|
+
followings_list = g_user.get_following()
|
|
859
|
+
|
|
860
|
+
user_name_str = user_login
|
|
861
|
+
if user_name:
|
|
862
|
+
user_name_str += f" ({user_name})"
|
|
863
|
+
except Exception as e:
|
|
864
|
+
raise RuntimeError(f"Cannot fetch user {user} details: {e}")
|
|
865
|
+
|
|
866
|
+
print(f"\nUsername:\t\t{user_name_str}")
|
|
867
|
+
print(f"User URL:\t\t{user_url}/")
|
|
868
|
+
print(f"Github API URL:\t\t{GITHUB_API_URL}")
|
|
869
|
+
print(f"Local timezone:\t\t{LOCAL_TIMEZONE}")
|
|
870
|
+
|
|
871
|
+
print(f"\nFollowers:\t\t{followers_count}")
|
|
872
|
+
|
|
873
|
+
try:
|
|
874
|
+
if followers_list:
|
|
875
|
+
|
|
876
|
+
for follower in followers_list:
|
|
877
|
+
follower_str = f"\n- {follower.login}"
|
|
878
|
+
if follower.name:
|
|
879
|
+
follower_str += f" ({follower.name})"
|
|
880
|
+
if follower.html_url:
|
|
881
|
+
follower_str += f"\n[ {follower.html_url}/ ]"
|
|
882
|
+
print(follower_str)
|
|
883
|
+
except Exception as e:
|
|
884
|
+
print(f"* Cannot fetch user's followers list: {e}")
|
|
885
|
+
|
|
886
|
+
print(f"\nFollowings:\t\t{followings_count}")
|
|
887
|
+
|
|
888
|
+
try:
|
|
889
|
+
if followings_list:
|
|
890
|
+
|
|
891
|
+
for following in followings_list:
|
|
892
|
+
following_str = f"\n- {following.login}"
|
|
893
|
+
if following.name:
|
|
894
|
+
following_str += f" ({following.name})"
|
|
895
|
+
if following.html_url:
|
|
896
|
+
following_str += f"\n[ {following.html_url}/ ]"
|
|
897
|
+
print(following_str)
|
|
898
|
+
except Exception as e:
|
|
899
|
+
print(f"* Cannot fetch user's followings list: {e}")
|
|
900
|
+
|
|
901
|
+
g.close()
|
|
902
|
+
|
|
903
|
+
|
|
904
|
+
# Processes items from all passed repositories and returns a list of dictionaries
|
|
905
|
+
def github_process_repos(repos_list):
|
|
906
|
+
list_of_repos = []
|
|
907
|
+
stargazers_list = []
|
|
908
|
+
subscribers_list = []
|
|
909
|
+
forked_repos = []
|
|
910
|
+
|
|
911
|
+
if repos_list:
|
|
912
|
+
for repo in repos_list:
|
|
913
|
+
try:
|
|
914
|
+
repo_created_date = repo.created_at
|
|
915
|
+
repo_updated_date = repo.updated_at
|
|
916
|
+
stargazers_list = [star.login for star in repo.get_stargazers()]
|
|
917
|
+
subscribers_list = [subscriber.login for subscriber in repo.get_subscribers()]
|
|
918
|
+
forked_repos = [fork.full_name for fork in repo.get_forks()]
|
|
919
|
+
|
|
920
|
+
issues = list(repo.get_issues(state='open'))
|
|
921
|
+
pulls = list(repo.get_pulls(state='open'))
|
|
922
|
+
|
|
923
|
+
real_issues = [i for i in issues if not i.pull_request]
|
|
924
|
+
issue_count = len(real_issues)
|
|
925
|
+
pr_count = len(pulls)
|
|
926
|
+
|
|
927
|
+
issues_list = [f"#{i.number} {i.title} ({i.user.login}) [ {i.html_url} ]" for i in real_issues]
|
|
928
|
+
pr_list = [f"#{pr.number} {pr.title} ({pr.user.login}) [ {pr.html_url} ]" for pr in pulls]
|
|
929
|
+
|
|
930
|
+
list_of_repos.append({"name": repo.name, "descr": repo.description, "is_fork": repo.fork, "forks": repo.forks_count, "stars": repo.stargazers_count, "subscribers": repo.subscribers_count, "url": repo.html_url, "language": repo.language, "date": repo_created_date, "update_date": repo_updated_date, "stargazers_list": stargazers_list, "forked_repos": forked_repos, "subscribers_list": subscribers_list, "issues": issue_count, "pulls": pr_count, "issues_list": issues_list, "pulls_list": pr_list})
|
|
931
|
+
except Exception as e:
|
|
932
|
+
print(f"* Error while processing info for repo '{repo.name}', skipping for now: {e}")
|
|
933
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
934
|
+
continue
|
|
935
|
+
|
|
936
|
+
return list_of_repos
|
|
937
|
+
|
|
938
|
+
|
|
939
|
+
# Prints a list of public repositories for a GitHub user (-r)
|
|
940
|
+
def github_print_repos(user):
|
|
941
|
+
user_name_str = user
|
|
942
|
+
user_url = "-"
|
|
943
|
+
repos_count = 0
|
|
944
|
+
repos_list = []
|
|
945
|
+
|
|
946
|
+
print(f"* Getting public repositories for user '{user}' ...")
|
|
947
|
+
|
|
948
|
+
try:
|
|
949
|
+
auth = Auth.Token(GITHUB_TOKEN)
|
|
950
|
+
g = Github(base_url=GITHUB_API_URL, auth=auth)
|
|
951
|
+
|
|
952
|
+
g_user = g.get_user(user)
|
|
953
|
+
user_login = g_user.login
|
|
954
|
+
user_name = g_user.name
|
|
955
|
+
user_url = g_user.html_url
|
|
956
|
+
|
|
957
|
+
repos_count = g_user.public_repos
|
|
958
|
+
repos_list = g_user.get_repos()
|
|
959
|
+
|
|
960
|
+
user_name_str = user_login
|
|
961
|
+
if user_name:
|
|
962
|
+
user_name_str += f" ({user_name})"
|
|
963
|
+
except Exception as e:
|
|
964
|
+
raise RuntimeError(f"Cannot fetch user {user} details: {e}")
|
|
965
|
+
|
|
966
|
+
print(f"\nUsername:\t\t{user_name_str}")
|
|
967
|
+
print(f"User URL:\t\t{user_url}/")
|
|
968
|
+
print(f"Github API URL:\t\t{GITHUB_API_URL}")
|
|
969
|
+
print(f"Local timezone:\t\t{LOCAL_TIMEZONE}")
|
|
970
|
+
|
|
971
|
+
print(f"\nRepositories:\t\t{repos_count}\n")
|
|
972
|
+
|
|
973
|
+
try:
|
|
974
|
+
if repos_list:
|
|
975
|
+
print("─" * HORIZONTAL_LINE2)
|
|
976
|
+
for repo in repos_list:
|
|
977
|
+
print(f"🔸 {repo.name} {'(fork)' if repo.fork else ''} \n")
|
|
978
|
+
|
|
979
|
+
try:
|
|
980
|
+
pr_count = repo.get_pulls(state='open').totalCount
|
|
981
|
+
issue_count = repo.open_issues_count - pr_count
|
|
982
|
+
except Exception:
|
|
983
|
+
pr_count = "?"
|
|
984
|
+
issue_count = "?"
|
|
985
|
+
|
|
986
|
+
print(f" - 🌐 URL:\t\t{repo.html_url}")
|
|
987
|
+
print(f" - 💻 Language:\t\t{repo.language}")
|
|
988
|
+
|
|
989
|
+
print(f"\n - ⭐ Stars:\t\t{repo.stargazers_count}")
|
|
990
|
+
print(f" - 🍴 Forks:\t\t{repo.forks_count}")
|
|
991
|
+
print(f" - 👓 Watchers:\t\t{repo.subscribers_count}")
|
|
992
|
+
|
|
993
|
+
# print(f" - 🐞 Issues+PRs:\t{repo.open_issues_count}")
|
|
994
|
+
print(f" - 🐞 Issues:\t\t{issue_count}")
|
|
995
|
+
print(f" - 📬 PRs:\t\t{pr_count}")
|
|
996
|
+
|
|
997
|
+
print(f"\n - 📝 License:\t\t{repo.license.name if repo.license else 'None'}")
|
|
998
|
+
print(f" - 🌿 Branch (default):\t{repo.default_branch}")
|
|
999
|
+
|
|
1000
|
+
print(f"\n - 📅 Created:\t\t{get_date_from_ts(repo.created_at)} ({calculate_timespan(int(time.time()), repo.created_at, granularity=2)} ago)")
|
|
1001
|
+
print(f" - 🔄 Updated:\t\t{get_date_from_ts(repo.updated_at)} ({calculate_timespan(int(time.time()), repo.updated_at, granularity=2)} ago)")
|
|
1002
|
+
print(f" - 🔃 Last push:\t{get_date_from_ts(repo.pushed_at)} ({calculate_timespan(int(time.time()), repo.pushed_at, granularity=2)} ago)")
|
|
1003
|
+
|
|
1004
|
+
if repo.description:
|
|
1005
|
+
print(f"\n - 📝 Desc:\t\t{repo.description}")
|
|
1006
|
+
print("─" * HORIZONTAL_LINE2)
|
|
1007
|
+
except Exception as e:
|
|
1008
|
+
raise RuntimeError(f"Cannot fetch user's repositories list: {e}")
|
|
1009
|
+
|
|
1010
|
+
g.close()
|
|
1011
|
+
|
|
1012
|
+
|
|
1013
|
+
# Prints a list of starred repositories by a GitHub user (-g)
|
|
1014
|
+
def github_print_starred_repos(user):
|
|
1015
|
+
user_name_str = user
|
|
1016
|
+
user_url = "-"
|
|
1017
|
+
starred_count = 0
|
|
1018
|
+
starred_list = []
|
|
1019
|
+
|
|
1020
|
+
print(f"* Getting repositories starred by user '{user}' ...")
|
|
1021
|
+
|
|
1022
|
+
try:
|
|
1023
|
+
auth = Auth.Token(GITHUB_TOKEN)
|
|
1024
|
+
g = Github(base_url=GITHUB_API_URL, auth=auth)
|
|
1025
|
+
|
|
1026
|
+
g_user = g.get_user(user)
|
|
1027
|
+
user_login = g_user.login
|
|
1028
|
+
user_name = g_user.name
|
|
1029
|
+
user_url = g_user.html_url
|
|
1030
|
+
|
|
1031
|
+
starred_list = g_user.get_starred()
|
|
1032
|
+
starred_count = starred_list.totalCount
|
|
1033
|
+
|
|
1034
|
+
user_name_str = user_login
|
|
1035
|
+
if user_name:
|
|
1036
|
+
user_name_str += f" ({user_name})"
|
|
1037
|
+
except Exception as e:
|
|
1038
|
+
raise RuntimeError(f"Cannot fetch user {user} details: {e}")
|
|
1039
|
+
|
|
1040
|
+
print(f"\nUsername:\t\t{user_name_str}")
|
|
1041
|
+
print(f"User URL:\t\t{user_url}/")
|
|
1042
|
+
print(f"Github API URL:\t\t{GITHUB_API_URL}")
|
|
1043
|
+
print(f"Local timezone:\t\t{LOCAL_TIMEZONE}")
|
|
1044
|
+
|
|
1045
|
+
print(f"\nRepos starred by user:\t{starred_count}")
|
|
1046
|
+
|
|
1047
|
+
try:
|
|
1048
|
+
if starred_list:
|
|
1049
|
+
for star in starred_list:
|
|
1050
|
+
star_str = f"\n- {star.full_name}"
|
|
1051
|
+
if star.html_url:
|
|
1052
|
+
star_str += f" [ {star.html_url}/ ]"
|
|
1053
|
+
print(star_str)
|
|
1054
|
+
except Exception as e:
|
|
1055
|
+
raise RuntimeError(f"Cannot fetch user's starred list: {e}")
|
|
1056
|
+
|
|
1057
|
+
g.close()
|
|
1058
|
+
|
|
1059
|
+
|
|
1060
|
+
# Returns size in human readable format
|
|
1061
|
+
def human_readable_size(num):
|
|
1062
|
+
value = float(num)
|
|
1063
|
+
for unit in ["B", "KB", "MB", "GB", "TB", "PB"]:
|
|
1064
|
+
if abs(value) < 1024.0:
|
|
1065
|
+
return f"{value:.1f} {unit}"
|
|
1066
|
+
value /= 1024.0
|
|
1067
|
+
return f"{value:.1f} PB"
|
|
1068
|
+
|
|
1069
|
+
|
|
1070
|
+
# Formats the given string as a quoted, indented block
|
|
1071
|
+
def format_body_block(content, indent=" "):
|
|
1072
|
+
new_content = f"'{content}'"
|
|
1073
|
+
indented = textwrap.indent(new_content.strip(), indent)
|
|
1074
|
+
return f"\n{indented}"
|
|
1075
|
+
|
|
1076
|
+
|
|
1077
|
+
# Prints details about passed GitHub event
|
|
1078
|
+
def github_print_event(event, g, time_passed=False, ts: datetime | None = None):
|
|
1079
|
+
|
|
1080
|
+
event_date: datetime | None = None
|
|
1081
|
+
repo_name = ""
|
|
1082
|
+
repo_url = ""
|
|
1083
|
+
st = ""
|
|
1084
|
+
tp = ""
|
|
1085
|
+
repo = None
|
|
1086
|
+
|
|
1087
|
+
event_date = event.created_at
|
|
1088
|
+
if time_passed and not ts:
|
|
1089
|
+
tp = f" ({calculate_timespan(int(time.time()), event_date, show_seconds=False, granularity=2)} ago)"
|
|
1090
|
+
elif time_passed and ts:
|
|
1091
|
+
tp = f" (after {calculate_timespan(event_date, ts, show_seconds=False, granularity=2)}: {get_short_date_from_ts(ts)})"
|
|
1092
|
+
st += print_v(f"Event date:\t\t\t{get_date_from_ts(event_date)}{tp}")
|
|
1093
|
+
st += print_v(f"Event ID:\t\t\t{event.id}")
|
|
1094
|
+
st += print_v(f"Event type:\t\t\t{event.type}")
|
|
1095
|
+
|
|
1096
|
+
if event.repo.id:
|
|
1097
|
+
repo_name = event.repo.name
|
|
1098
|
+
repo_url = event.repo.url.replace("https://api.github.com/repos/", "https://github.com/")
|
|
1099
|
+
st += print_v(f"\nRepo name:\t\t\t{repo_name}")
|
|
1100
|
+
st += print_v(f"Repo URL:\t\t\t{repo_url}")
|
|
1101
|
+
|
|
1102
|
+
try:
|
|
1103
|
+
desc_len = 80
|
|
1104
|
+
repo = g.get_repo(event.repo.name)
|
|
1105
|
+
desc = repo.description or ''
|
|
1106
|
+
cleaned = desc.replace('\n', ' ')
|
|
1107
|
+
short_desc = cleaned[:desc_len] + '...' if len(cleaned) > desc_len else cleaned
|
|
1108
|
+
if short_desc:
|
|
1109
|
+
st += print_v(f"Repo description:\t\t{short_desc}")
|
|
1110
|
+
except UnknownObjectException:
|
|
1111
|
+
repo = None
|
|
1112
|
+
st += print_v("\nRepository not found or has been removed")
|
|
1113
|
+
except GithubException as e:
|
|
1114
|
+
repo = None
|
|
1115
|
+
st += print_v(f"\n* Error occurred while getting repo details: {e}")
|
|
1116
|
+
|
|
1117
|
+
if hasattr(event.actor, 'login'):
|
|
1118
|
+
if event.actor.login:
|
|
1119
|
+
st += print_v(f"\nEvent actor login:\t\t{event.actor.login}")
|
|
1120
|
+
if hasattr(event.actor, 'name'):
|
|
1121
|
+
if event.actor.name:
|
|
1122
|
+
st += print_v(f"Event actor name:\t\t{event.actor.name}")
|
|
1123
|
+
if hasattr(event.actor, 'html_url'):
|
|
1124
|
+
if event.actor.html_url:
|
|
1125
|
+
st += print_v(f"Event actor URL:\t\t{event.actor.html_url}")
|
|
1126
|
+
|
|
1127
|
+
if event.payload.get("ref"):
|
|
1128
|
+
st += print_v(f"\nObject name:\t\t\t{event.payload.get('ref')}")
|
|
1129
|
+
if event.payload.get("ref_type"):
|
|
1130
|
+
st += print_v(f"Object type:\t\t\t{event.payload.get('ref_type')}")
|
|
1131
|
+
if event.payload.get("description"):
|
|
1132
|
+
st += print_v(f"Description:\t\t\t'{event.payload.get('description')}'")
|
|
1133
|
+
|
|
1134
|
+
if event.payload.get("action"):
|
|
1135
|
+
st += print_v(f"\nAction:\t\t\t\t{event.payload.get('action')}")
|
|
1136
|
+
|
|
1137
|
+
if event.payload.get("commits"):
|
|
1138
|
+
commits = event.payload["commits"]
|
|
1139
|
+
commits_total = len(commits)
|
|
1140
|
+
st += print_v(f"\nNumber of commits:\t\t{commits_total}")
|
|
1141
|
+
for commit_count, commit in enumerate(commits, start=1):
|
|
1142
|
+
st += print_v(f"\n=== Commit {commit_count}/{commits_total} ===")
|
|
1143
|
+
st += print_v("." * HORIZONTAL_LINE1)
|
|
1144
|
+
|
|
1145
|
+
commit_details = None
|
|
1146
|
+
if repo:
|
|
1147
|
+
commit_details = repo.get_commit(commit["sha"])
|
|
1148
|
+
|
|
1149
|
+
if commit_details:
|
|
1150
|
+
commit_date = commit_details.commit.author.date
|
|
1151
|
+
st += print_v(f" - Commit date:\t\t\t{get_date_from_ts(commit_date)}")
|
|
1152
|
+
|
|
1153
|
+
st += print_v(f" - Commit SHA:\t\t\t{commit['sha']}")
|
|
1154
|
+
st += print_v(f" - Commit author:\t\t{commit['author']['name']}")
|
|
1155
|
+
|
|
1156
|
+
if commit_details and commit_details.author:
|
|
1157
|
+
st += print_v(f" - Commit author URL:\t\t{commit_details.author.html_url}")
|
|
1158
|
+
|
|
1159
|
+
if commit_details:
|
|
1160
|
+
st += print_v(f" - Commit URL:\t\t\t{commit_details.html_url}")
|
|
1161
|
+
st += print_v(f" - Commit raw patch URL:\t{commit_details.html_url}.patch")
|
|
1162
|
+
|
|
1163
|
+
stats = getattr(commit_details, "stats", None)
|
|
1164
|
+
additions = stats.additions if stats else 0
|
|
1165
|
+
deletions = stats.deletions if stats else 0
|
|
1166
|
+
stats_total = stats.total if stats else 0
|
|
1167
|
+
st += print_v(f"\n - Additions/Deletions:\t\t+{additions} / -{deletions} ({stats_total})")
|
|
1168
|
+
|
|
1169
|
+
if commit_details:
|
|
1170
|
+
try:
|
|
1171
|
+
file_count = sum(1 for _ in commit_details.files)
|
|
1172
|
+
except Exception:
|
|
1173
|
+
file_count = "N/A"
|
|
1174
|
+
st += print_v(f" - Files changed:\t\t{file_count}")
|
|
1175
|
+
if file_count:
|
|
1176
|
+
st += print_v(f" - Changed files list:")
|
|
1177
|
+
for f in commit_details.files:
|
|
1178
|
+
st += print_v(f" • '{f.filename}' - {f.status} (+{f.additions} / -{f.deletions})")
|
|
1179
|
+
|
|
1180
|
+
st += print_v(f"\n - Commit message:\t\t'{commit['message']}'")
|
|
1181
|
+
st += print_v("." * HORIZONTAL_LINE1)
|
|
1182
|
+
|
|
1183
|
+
if event.payload.get("commits") == []:
|
|
1184
|
+
st += print_v("\nNo new commits (forced push, tag push, branch reset or other ref update)")
|
|
1185
|
+
|
|
1186
|
+
if event.payload.get("release"):
|
|
1187
|
+
st += print_v(f"\nRelease name:\t\t\t{event.payload['release'].get('name')}")
|
|
1188
|
+
st += print_v(f"Release tag name:\t\t{event.payload['release'].get('tag_name')}")
|
|
1189
|
+
st += print_v(f"Release URL:\t\t\t{event.payload['release'].get('html_url')}")
|
|
1190
|
+
|
|
1191
|
+
st += print_v(f"\nPublished by:\t\t\t{event.payload['release']['author']['login']}")
|
|
1192
|
+
if event.payload['release']['author'].get('html_url'):
|
|
1193
|
+
st += print_v(f"Published by URL:\t\t{event.payload['release']['author']['html_url']}")
|
|
1194
|
+
if event.payload['release'].get('published_at'):
|
|
1195
|
+
pub_ts = event.payload['release']['published_at']
|
|
1196
|
+
st += print_v(f"Published at:\t\t\t{get_date_from_ts(pub_ts)}")
|
|
1197
|
+
st += print_v(f"Target commitish:\t\t{event.payload['release'].get('target_commitish')}")
|
|
1198
|
+
st += print_v(f"Draft:\t\t\t\t{event.payload['release'].get('draft')}")
|
|
1199
|
+
st += print_v(f"Prerelease:\t\t\t{event.payload['release'].get('prerelease')}")
|
|
1200
|
+
|
|
1201
|
+
if event.payload["release"].get("assets"):
|
|
1202
|
+
print()
|
|
1203
|
+
assets = event.payload['release'].get('assets', [])
|
|
1204
|
+
for asset in assets:
|
|
1205
|
+
size_bytes = asset.get("size", 0)
|
|
1206
|
+
st += print_v(f" - Asset name:\t\t\t{asset.get('name')}")
|
|
1207
|
+
st += print_v(f" - Asset size:\t\t\t{human_readable_size(size_bytes)}")
|
|
1208
|
+
st += print_v(f" - Download URL:\t\t{asset.get('browser_download_url')}")
|
|
1209
|
+
if asset != assets[-1]:
|
|
1210
|
+
st += print_v()
|
|
1211
|
+
|
|
1212
|
+
st += print_v(f"\nRelease notes:\n\n'{event.payload['release'].get('body')}'")
|
|
1213
|
+
|
|
1214
|
+
if repo and event.payload.get("pull_request"):
|
|
1215
|
+
pr_number = event.payload["pull_request"]["number"]
|
|
1216
|
+
pr = repo.get_pull(pr_number)
|
|
1217
|
+
|
|
1218
|
+
st += print_v(f"\n=== PR #{pr.number}: {pr.title} ===")
|
|
1219
|
+
st += print_v("." * HORIZONTAL_LINE1)
|
|
1220
|
+
|
|
1221
|
+
st += print_v(f"Author:\t\t\t\t{pr.user.login}")
|
|
1222
|
+
st += print_v(f"Author URL:\t\t\t{pr.user.html_url}")
|
|
1223
|
+
st += print_v(f"State:\t\t\t\t{pr.state}")
|
|
1224
|
+
st += print_v(f"Merged:\t\t\t\t{pr.merged}")
|
|
1225
|
+
st += print_v(f"PR URL:\t\t\t\t{pr.html_url}")
|
|
1226
|
+
|
|
1227
|
+
if pr.created_at:
|
|
1228
|
+
pr_created_date = get_date_from_ts(pr.created_at)
|
|
1229
|
+
st += print_v(f"Created at:\t\t\t{pr_created_date}")
|
|
1230
|
+
if pr.closed_at:
|
|
1231
|
+
pr_closed_date = get_date_from_ts(pr.closed_at)
|
|
1232
|
+
st += print_v(f"Closed at:\t\t\t{pr_closed_date}")
|
|
1233
|
+
if pr.merged_at:
|
|
1234
|
+
pr_merged_date = get_date_from_ts(pr.merged_at)
|
|
1235
|
+
st += print_v(f"Merged at:\t\t\t{pr_merged_date} by {pr.merged_by.login}")
|
|
1236
|
+
|
|
1237
|
+
st += print_v(f"Head → Base:\t\t\t{pr.head.ref} → {pr.base.ref}")
|
|
1238
|
+
st += print_v(f"Mergeable state:\t\t{pr.mergeable_state}")
|
|
1239
|
+
|
|
1240
|
+
if pr.labels:
|
|
1241
|
+
st += print_v(f"Labels:\t\t\t\t{', '.join(label.name for label in pr.labels)}")
|
|
1242
|
+
|
|
1243
|
+
st += print_v(f"\nCommits:\t\t\t{pr.commits}")
|
|
1244
|
+
st += print_v(f"Comments (issue/review):\t{pr.comments} / {pr.review_comments}")
|
|
1245
|
+
|
|
1246
|
+
st += print_v(f"Additions/Deletions:\t\t+{pr.additions} / -{pr.deletions}")
|
|
1247
|
+
st += print_v(f"Files changed:\t\t\t{pr.changed_files}")
|
|
1248
|
+
|
|
1249
|
+
if pr.body:
|
|
1250
|
+
st += print_v(f"\nPR description:\n\n'{pr.body.strip()}'")
|
|
1251
|
+
|
|
1252
|
+
if pr.requested_reviewers:
|
|
1253
|
+
for reviewer in pr.requested_reviewers:
|
|
1254
|
+
st += print_v(f"\n - Requested reviewer:\t{reviewer.login} ({reviewer.html_url})")
|
|
1255
|
+
|
|
1256
|
+
if pr.assignees:
|
|
1257
|
+
for assignee in pr.assignees:
|
|
1258
|
+
st += print_v(f"\nAssignee:\t\t\t{assignee.login} ({assignee.html_url})")
|
|
1259
|
+
|
|
1260
|
+
st += print_v("." * HORIZONTAL_LINE1)
|
|
1261
|
+
|
|
1262
|
+
if event.payload.get("review"):
|
|
1263
|
+
review_date = event.payload["review"].get("submitted_at")
|
|
1264
|
+
st += print_v(f"\nReview submitted at:\t\t{get_date_from_ts(review_date)}")
|
|
1265
|
+
st += print_v(f"Review URL:\t\t\t{event.payload['review'].get('html_url')}")
|
|
1266
|
+
|
|
1267
|
+
if event.payload["review"].get("author_association"):
|
|
1268
|
+
st += print_v(f"Author association:\t\t{event.payload['review'].get('author_association')}")
|
|
1269
|
+
|
|
1270
|
+
if event.payload["review"].get("id"):
|
|
1271
|
+
st += print_v(f"Review ID:\t\t\t{event.payload['review'].get('id')}")
|
|
1272
|
+
if event.payload["review"].get("commit_id"):
|
|
1273
|
+
st += print_v(f"Commit SHA reviewed:\t\t{event.payload['review'].get('commit_id')}")
|
|
1274
|
+
if event.payload["review"].get("state"):
|
|
1275
|
+
st += print_v(f"Review state:\t\t\t{event.payload['review'].get('state')}")
|
|
1276
|
+
if event.payload["review"].get("body"):
|
|
1277
|
+
review_body = event.payload['review'].get('body')
|
|
1278
|
+
if len(review_body) > 750:
|
|
1279
|
+
review_body = review_body[:750] + " ... <cut>"
|
|
1280
|
+
st += print_v(f"Review body:")
|
|
1281
|
+
st += print_v(format_body_block(review_body))
|
|
1282
|
+
|
|
1283
|
+
if repo:
|
|
1284
|
+
try:
|
|
1285
|
+
pr_number = event.payload["pull_request"]["number"]
|
|
1286
|
+
pr_obj = repo.get_pull(pr_number)
|
|
1287
|
+
count = sum(1 for _ in pr_obj.get_single_review_comments(event.payload["review"].get("id")))
|
|
1288
|
+
st += print_v(f"Comments in this review:\t{count}")
|
|
1289
|
+
except Exception:
|
|
1290
|
+
pass
|
|
1291
|
+
|
|
1292
|
+
if event.payload.get("issue"):
|
|
1293
|
+
st += print_v(f"\nIssue title:\t\t\t{event.payload['issue'].get('title')}")
|
|
1294
|
+
|
|
1295
|
+
issue_date = event.payload["issue"].get("created_at")
|
|
1296
|
+
st += print_v(f"Issue date:\t\t\t{get_date_from_ts(issue_date)}")
|
|
1297
|
+
|
|
1298
|
+
issue_author = event.payload["issue"].get("user", {}).get("login")
|
|
1299
|
+
if issue_author:
|
|
1300
|
+
st += print_v(f"Issue author:\t\t\t{issue_author}")
|
|
1301
|
+
|
|
1302
|
+
issue_author_url = event.payload["issue"].get("user", {}).get("html_url")
|
|
1303
|
+
if issue_author_url:
|
|
1304
|
+
st += print_v(f"Issue author URL:\t\t{issue_author_url}")
|
|
1305
|
+
|
|
1306
|
+
st += print_v(f"Issue URL:\t\t\t{event.payload['issue'].get('html_url')}")
|
|
1307
|
+
|
|
1308
|
+
if event.payload["issue"].get("state"):
|
|
1309
|
+
st += print_v(f"Issue state:\t\t\t{event.payload['issue'].get('state')}")
|
|
1310
|
+
|
|
1311
|
+
st += print_v(f"Issue comments:\t\t\t{event.payload['issue'].get('comments', 0)}")
|
|
1312
|
+
|
|
1313
|
+
labels = event.payload["issue"].get("labels", [])
|
|
1314
|
+
if labels:
|
|
1315
|
+
label_names = ", ".join(label.get("name") for label in labels if label.get("name"))
|
|
1316
|
+
if label_names:
|
|
1317
|
+
st += print_v(f"Issue labels:\t\t\t{label_names}")
|
|
1318
|
+
|
|
1319
|
+
if event.payload["issue"].get("assignees"):
|
|
1320
|
+
assignees = event.payload["issue"].get("assignees")
|
|
1321
|
+
for assignee in assignees:
|
|
1322
|
+
st += print_v(f" - Assignee name:\t\t{assignee.get('name')}")
|
|
1323
|
+
if assignee != assignees[-1]:
|
|
1324
|
+
st += print_v()
|
|
1325
|
+
|
|
1326
|
+
reactions = event.payload["issue"].get("reactions", {})
|
|
1327
|
+
|
|
1328
|
+
reaction_map = {
|
|
1329
|
+
"+1": "👍",
|
|
1330
|
+
"-1": "👎",
|
|
1331
|
+
"laugh": "😄",
|
|
1332
|
+
"hooray": "🎉",
|
|
1333
|
+
"confused": "😕",
|
|
1334
|
+
"heart": "❤️",
|
|
1335
|
+
"rocket": "🚀",
|
|
1336
|
+
"eyes": "👀",
|
|
1337
|
+
}
|
|
1338
|
+
|
|
1339
|
+
reaction_display = []
|
|
1340
|
+
for key, emoji in reaction_map.items():
|
|
1341
|
+
count = reactions.get(key, 0)
|
|
1342
|
+
if count > 0:
|
|
1343
|
+
reaction_display.append(f"{emoji} {count}")
|
|
1344
|
+
|
|
1345
|
+
if reaction_display:
|
|
1346
|
+
st += print_v(f"Issue reactions:\t\t{' / '.join(reaction_display)}")
|
|
1347
|
+
|
|
1348
|
+
if event.payload["issue"].get("body"):
|
|
1349
|
+
issue_body = event.payload['issue'].get('body')
|
|
1350
|
+
issue_snippet = issue_body if len(issue_body) <= 750 else issue_body[:750] + " ... <cut>"
|
|
1351
|
+
st += print_v(f"\nIssue body:")
|
|
1352
|
+
st += print_v(format_body_block(issue_snippet))
|
|
1353
|
+
|
|
1354
|
+
if event.payload.get("comment"):
|
|
1355
|
+
comment = event.payload["comment"]
|
|
1356
|
+
|
|
1357
|
+
comment_date = comment.get("created_at")
|
|
1358
|
+
st += print_v(f"\nComment date:\t\t\t{get_date_from_ts(comment_date)}")
|
|
1359
|
+
|
|
1360
|
+
comment_author = comment.get("user", {}).get("login")
|
|
1361
|
+
if comment_author:
|
|
1362
|
+
st += print_v(f"Comment author:\t\t\t{comment_author}")
|
|
1363
|
+
|
|
1364
|
+
comment_author_url = comment.get("user", {}).get("html_url")
|
|
1365
|
+
if comment_author_url:
|
|
1366
|
+
st += print_v(f"Comment author URL:\t\t{comment_author_url}")
|
|
1367
|
+
|
|
1368
|
+
st += print_v(f"Comment URL:\t\t\t{comment.get('html_url')}")
|
|
1369
|
+
if comment.get("path"):
|
|
1370
|
+
st += print_v(f"Comment path:\t\t\t{comment.get('path')}")
|
|
1371
|
+
|
|
1372
|
+
comment_body = comment.get("body")
|
|
1373
|
+
if comment_body:
|
|
1374
|
+
if len(comment_body) > 750:
|
|
1375
|
+
comment_body = comment_body[:750] + " ... <cut>"
|
|
1376
|
+
st += print_v(f"\nComment body:")
|
|
1377
|
+
st += print_v(format_body_block(comment_body))
|
|
1378
|
+
|
|
1379
|
+
if event.type == "PullRequestReviewCommentEvent":
|
|
1380
|
+
parent_id = comment.get("in_reply_to_id")
|
|
1381
|
+
if parent_id and repo:
|
|
1382
|
+
try:
|
|
1383
|
+
pr_number = event.payload["pull_request"]["number"]
|
|
1384
|
+
pr = repo.get_pull(pr_number)
|
|
1385
|
+
|
|
1386
|
+
parent = pr.get_review_comment(parent_id)
|
|
1387
|
+
parent_date = get_date_from_ts(parent.created_at)
|
|
1388
|
+
|
|
1389
|
+
st += print_v(f"\nPrevious comment:\n\n↳ In reply to {parent.user.login} (@ {parent_date}):")
|
|
1390
|
+
|
|
1391
|
+
parent_body = parent.body
|
|
1392
|
+
if len(parent_body) > 750:
|
|
1393
|
+
parent_body = parent_body[:750] + " ... <cut>"
|
|
1394
|
+
st += print_v(format_body_block(parent_body))
|
|
1395
|
+
|
|
1396
|
+
st += print_v(f"\nPrevious comment URL:\t\t{parent.html_url}")
|
|
1397
|
+
except Exception as e:
|
|
1398
|
+
st += print_v(f"\n* Could not fetch parent comment (ID {parent_id}): {e}")
|
|
1399
|
+
else:
|
|
1400
|
+
st += print_v("\n(This is the first comment in its thread)")
|
|
1401
|
+
elif event.type in ("IssueCommentEvent", "CommitCommentEvent"):
|
|
1402
|
+
if repo:
|
|
1403
|
+
|
|
1404
|
+
comment_id = comment["id"]
|
|
1405
|
+
comment_created = datetime.fromisoformat(comment["created_at"].replace("Z", "+00:00"))
|
|
1406
|
+
|
|
1407
|
+
if event.type == "IssueCommentEvent":
|
|
1408
|
+
|
|
1409
|
+
issue_number = event.payload["issue"]["number"]
|
|
1410
|
+
issue = repo.get_issue(issue_number)
|
|
1411
|
+
|
|
1412
|
+
virtual_comment_list = []
|
|
1413
|
+
|
|
1414
|
+
if issue.body:
|
|
1415
|
+
virtual_comment_list.append({
|
|
1416
|
+
"id": f"issue-{issue.id}", # fake ID so it doesn't collide
|
|
1417
|
+
"created_at": issue.created_at,
|
|
1418
|
+
"user": issue.user,
|
|
1419
|
+
"body": issue.body,
|
|
1420
|
+
"html_url": issue.html_url
|
|
1421
|
+
})
|
|
1422
|
+
|
|
1423
|
+
for c in issue.get_comments():
|
|
1424
|
+
virtual_comment_list.append({
|
|
1425
|
+
"id": c.id,
|
|
1426
|
+
"created_at": c.created_at,
|
|
1427
|
+
"user": c.user,
|
|
1428
|
+
"body": c.body,
|
|
1429
|
+
"html_url": c.html_url
|
|
1430
|
+
})
|
|
1431
|
+
|
|
1432
|
+
previous = None
|
|
1433
|
+
for c in virtual_comment_list:
|
|
1434
|
+
if c["id"] == comment_id or (isinstance(c["id"], int) and c["id"] == comment_id):
|
|
1435
|
+
continue
|
|
1436
|
+
|
|
1437
|
+
if c["created_at"] < comment_created:
|
|
1438
|
+
if not previous or c["created_at"] > previous["created_at"]:
|
|
1439
|
+
previous = c
|
|
1440
|
+
|
|
1441
|
+
if previous:
|
|
1442
|
+
prev_date = get_date_from_ts(previous["created_at"])
|
|
1443
|
+
st += print_v(f"\nPrevious comment:\n\n↳ In reply to {previous['user'].login} (@ {prev_date}):")
|
|
1444
|
+
|
|
1445
|
+
parent_body = previous["body"]
|
|
1446
|
+
if len(parent_body) > 750:
|
|
1447
|
+
parent_body = parent_body[:750] + " ... <cut>"
|
|
1448
|
+
st += print_v(format_body_block(parent_body))
|
|
1449
|
+
|
|
1450
|
+
st += print_v(f"\nPrevious comment URL:\t\t{previous['html_url']}")
|
|
1451
|
+
else:
|
|
1452
|
+
st += print_v("\n(This is the first comment in this thread)")
|
|
1453
|
+
|
|
1454
|
+
elif event.type == "CommitCommentEvent":
|
|
1455
|
+
commit_sha = comment["commit_id"]
|
|
1456
|
+
comments = list(repo.get_commit(commit_sha).get_comments())
|
|
1457
|
+
|
|
1458
|
+
previous = None
|
|
1459
|
+
for c in comments:
|
|
1460
|
+
if c.id == comment_id:
|
|
1461
|
+
continue
|
|
1462
|
+
if c.created_at < comment_created:
|
|
1463
|
+
if not previous or c.created_at > previous.created_at:
|
|
1464
|
+
previous = c
|
|
1465
|
+
|
|
1466
|
+
if previous:
|
|
1467
|
+
prev_date = get_date_from_ts(previous.created_at)
|
|
1468
|
+
st += print_v(f"\nPrevious comment:\n\n↳ In reply to {previous.user.login} (@ {prev_date}):")
|
|
1469
|
+
|
|
1470
|
+
parent_body = previous.body
|
|
1471
|
+
if len(parent_body) > 750:
|
|
1472
|
+
parent_body = parent_body[:750] + " ... <cut>"
|
|
1473
|
+
st += print_v(format_body_block(parent_body))
|
|
1474
|
+
|
|
1475
|
+
st += print_v(f"\nPrevious comment URL:\t\t{previous.html_url}")
|
|
1476
|
+
else:
|
|
1477
|
+
st += print_v("\n(This is the first comment in this thread)")
|
|
1478
|
+
|
|
1479
|
+
if event.payload.get("forkee"):
|
|
1480
|
+
st += print_v(f"\nForked to repo:\t\t\t{event.payload['forkee'].get('full_name')}")
|
|
1481
|
+
st += print_v(f"Forked to repo (URL):\t\t{event.payload['forkee'].get('html_url')}")
|
|
1482
|
+
|
|
1483
|
+
if event.type == "MemberEvent":
|
|
1484
|
+
member_login = event.payload.get("member", {}).get("login")
|
|
1485
|
+
member_role = event.payload.get("membership", {}).get("role")
|
|
1486
|
+
if member_login:
|
|
1487
|
+
st += print_v(f"\nMember added:\t\t\t{member_login}")
|
|
1488
|
+
member_url = event.payload.get("member", {}).get("html_url")
|
|
1489
|
+
if member_url:
|
|
1490
|
+
st += print_v(f"Member added URL:\t\t{member_url}")
|
|
1491
|
+
if member_role:
|
|
1492
|
+
st += print_v(f"Permission level:\t\t{member_role}")
|
|
1493
|
+
|
|
1494
|
+
if event.type == "PublicEvent":
|
|
1495
|
+
st += print_v("\nRepository is now public")
|
|
1496
|
+
|
|
1497
|
+
if event.type == "DiscussionEvent":
|
|
1498
|
+
discussion_title = event.payload.get("discussion", {}).get("title")
|
|
1499
|
+
discussion_url = event.payload.get("discussion", {}).get("html_url")
|
|
1500
|
+
discussion_category = event.payload.get("discussion", {}).get("category", {}).get("name")
|
|
1501
|
+
if discussion_title:
|
|
1502
|
+
st += print_v(f"\nDiscussion title:\t\t{discussion_title}")
|
|
1503
|
+
if discussion_url:
|
|
1504
|
+
st += print_v(f"Discussion URL:\t\t\t{discussion_url}")
|
|
1505
|
+
if discussion_category:
|
|
1506
|
+
st += print_v(f"Discussion category:\t\t{discussion_category}")
|
|
1507
|
+
|
|
1508
|
+
if event.type == "DiscussionCommentEvent":
|
|
1509
|
+
comment_author = event.payload.get("comment", {}).get("user", {}).get("login")
|
|
1510
|
+
comment_body = event.payload.get("comment", {}).get("body")
|
|
1511
|
+
if comment_author:
|
|
1512
|
+
st += print_v(f"\nDiscussion comment by:\t\t{comment_author}")
|
|
1513
|
+
if comment_body:
|
|
1514
|
+
if len(comment_body) > 750:
|
|
1515
|
+
comment_body = comment_body[:750] + " ... <cut>"
|
|
1516
|
+
st += print_v(f"\nComment body:")
|
|
1517
|
+
st += print_v(format_body_block(comment_body))
|
|
1518
|
+
|
|
1519
|
+
return event_date, repo_name, repo_url, st
|
|
1520
|
+
|
|
1521
|
+
|
|
1522
|
+
# Lists recent events for the user (-l) and potentially dumps the entries to CSV file (if -b is used)
|
|
1523
|
+
def github_list_events(user, number, csv_file_name):
|
|
1524
|
+
events = []
|
|
1525
|
+
available_events = 0
|
|
1526
|
+
|
|
1527
|
+
try:
|
|
1528
|
+
if csv_file_name:
|
|
1529
|
+
init_csv_file(csv_file_name)
|
|
1530
|
+
except Exception as e:
|
|
1531
|
+
print(f"* Error: {e}")
|
|
1532
|
+
|
|
1533
|
+
list_operation = "* Listing & saving" if csv_file_name else "* Listing"
|
|
1534
|
+
|
|
1535
|
+
print(f"{list_operation} {number} recent events for '{user}' ...\n")
|
|
1536
|
+
|
|
1537
|
+
try:
|
|
1538
|
+
auth = Auth.Token(GITHUB_TOKEN)
|
|
1539
|
+
g = Github(base_url=GITHUB_API_URL, auth=auth)
|
|
1540
|
+
|
|
1541
|
+
g_user = g.get_user(user)
|
|
1542
|
+
all_events = list(g_user.get_events())
|
|
1543
|
+
total_available = len(all_events)
|
|
1544
|
+
events = all_events[:number]
|
|
1545
|
+
available_events = len(events)
|
|
1546
|
+
|
|
1547
|
+
user_login = g_user.login
|
|
1548
|
+
user_name = g_user.name
|
|
1549
|
+
user_url = g_user.html_url
|
|
1550
|
+
|
|
1551
|
+
user_name_str = user_login
|
|
1552
|
+
if user_name:
|
|
1553
|
+
user_name_str += f" ({user_name})"
|
|
1554
|
+
except Exception as e:
|
|
1555
|
+
print(f"* Cannot fetch user details: {e}")
|
|
1556
|
+
return
|
|
1557
|
+
|
|
1558
|
+
print(f"Username:\t\t\t{user_name_str}")
|
|
1559
|
+
print(f"User URL:\t\t\t{user_url}/")
|
|
1560
|
+
print(f"Github API URL:\t\t\t{GITHUB_API_URL}")
|
|
1561
|
+
if csv_file_name:
|
|
1562
|
+
print(f"CSV export enabled:\t\t{bool(csv_file_name)}" + (f" ({csv_file_name})" if csv_file_name else ""))
|
|
1563
|
+
print(f"Local timezone:\t\t\t{LOCAL_TIMEZONE}")
|
|
1564
|
+
print(f"Available events:\t\t{total_available}")
|
|
1565
|
+
print(f"\n{'─' * HORIZONTAL_LINE1}\n{'─' * HORIZONTAL_LINE1}")
|
|
1566
|
+
|
|
1567
|
+
if available_events == 0:
|
|
1568
|
+
print("There are no events yet")
|
|
1569
|
+
else:
|
|
1570
|
+
try:
|
|
1571
|
+
event_number_map = {id(event): event_index + 1 for event_index, event in enumerate(events)}
|
|
1572
|
+
|
|
1573
|
+
for event in reversed(events):
|
|
1574
|
+
|
|
1575
|
+
if event.type in EVENTS_TO_MONITOR or 'ALL' in EVENTS_TO_MONITOR:
|
|
1576
|
+
event_number = event_number_map[id(event)]
|
|
1577
|
+
print(f"Event number:\t\t\t#{event_number}")
|
|
1578
|
+
try:
|
|
1579
|
+
event_date, repo_name, repo_url, event_text = github_print_event(event, g)
|
|
1580
|
+
except Exception as e:
|
|
1581
|
+
print(f"\n* Warning, cannot fetch all event details, skipping: {e}")
|
|
1582
|
+
print_cur_ts("\nTimestamp:\t\t\t")
|
|
1583
|
+
continue
|
|
1584
|
+
try:
|
|
1585
|
+
if csv_file_name:
|
|
1586
|
+
write_csv_entry(csv_file_name, convert_to_local_naive(event_date), str(event.type), str(repo_name), "", "")
|
|
1587
|
+
except Exception as e:
|
|
1588
|
+
print(f"* Error: {e}")
|
|
1589
|
+
print_cur_ts("\nTimestamp:\t\t\t")
|
|
1590
|
+
except Exception as e:
|
|
1591
|
+
print(f"* Cannot fetch events: {e}")
|
|
1592
|
+
|
|
1593
|
+
|
|
1594
|
+
# Detects and reports changes in a user's profile-level entities (followers, followings, public repos, starred repos)
|
|
1595
|
+
def handle_profile_change(label, count_old, count_new, list_old, raw_list, user, csv_file_name, field):
|
|
1596
|
+
try:
|
|
1597
|
+
list_new = []
|
|
1598
|
+
list_new = [getattr(item, field) for item in raw_list]
|
|
1599
|
+
if not list_new and count_new > 0:
|
|
1600
|
+
return list_old, count_old
|
|
1601
|
+
except Exception as e:
|
|
1602
|
+
print(f"* Error while trying to get the list of {label.lower()}: {e}")
|
|
1603
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
1604
|
+
return list_old, count_old
|
|
1605
|
+
|
|
1606
|
+
new_count = len(list_new)
|
|
1607
|
+
old_count = len(list_old)
|
|
1608
|
+
|
|
1609
|
+
if list_new == list_old:
|
|
1610
|
+
return list_old, count_old
|
|
1611
|
+
|
|
1612
|
+
diff = new_count - old_count
|
|
1613
|
+
|
|
1614
|
+
diff_str = f"+{diff}" if diff > 0 else f"{diff}"
|
|
1615
|
+
|
|
1616
|
+
label_context = "by" if label.lower() in ["followings", "starred repos"] else "for"
|
|
1617
|
+
|
|
1618
|
+
if diff == 0:
|
|
1619
|
+
print(f"* {label} list changed {label_context} user {user}\n")
|
|
1620
|
+
else:
|
|
1621
|
+
print(f"* {label} number changed {label_context} user {user} from {old_count} to {new_count} ({diff_str})\n")
|
|
1622
|
+
try:
|
|
1623
|
+
if csv_file_name:
|
|
1624
|
+
write_csv_entry(csv_file_name, now_local_naive(), f"{label} Count", user, old_count, new_count)
|
|
1625
|
+
except Exception as e:
|
|
1626
|
+
print(f"* Error: {e}")
|
|
1627
|
+
|
|
1628
|
+
added_list_str = ""
|
|
1629
|
+
removed_list_str = ""
|
|
1630
|
+
added_mbody = ""
|
|
1631
|
+
removed_mbody = ""
|
|
1632
|
+
|
|
1633
|
+
removed_items = list(set(list_old) - set(list_new))
|
|
1634
|
+
added_items = list(set(list_new) - set(list_old))
|
|
1635
|
+
|
|
1636
|
+
if removed_items:
|
|
1637
|
+
print(f"Removed {label.lower()}:\n")
|
|
1638
|
+
removed_mbody = f"\nRemoved {label.lower()}:\n\n"
|
|
1639
|
+
for item in removed_items:
|
|
1640
|
+
item_url = (f"https://github.com/{item}/" if label.lower() in ["followers", "followings", "starred repos"]
|
|
1641
|
+
else f"https://github.com/{user}/{item}/")
|
|
1642
|
+
print(f"- {item} [ {item_url} ]")
|
|
1643
|
+
removed_list_str += f"- {item} [ {item_url} ]\n"
|
|
1644
|
+
try:
|
|
1645
|
+
if csv_file_name:
|
|
1646
|
+
write_csv_entry(csv_file_name, now_local_naive(), f"Removed {label[:-1]}", user, item, "")
|
|
1647
|
+
except Exception as e:
|
|
1648
|
+
print(f"* Error: {e}")
|
|
1649
|
+
print()
|
|
1650
|
+
|
|
1651
|
+
if added_items:
|
|
1652
|
+
print(f"Added {label.lower()}:\n")
|
|
1653
|
+
added_mbody = f"\nAdded {label.lower()}:\n\n"
|
|
1654
|
+
for item in added_items:
|
|
1655
|
+
item_url = (f"https://github.com/{item}/" if label.lower() in ["followers", "followings", "starred repos"]
|
|
1656
|
+
else f"https://github.com/{user}/{item}/")
|
|
1657
|
+
print(f"- {item} [ {item_url} ]")
|
|
1658
|
+
added_list_str += f"- {item} [ {item_url} ]\n"
|
|
1659
|
+
try:
|
|
1660
|
+
if csv_file_name:
|
|
1661
|
+
write_csv_entry(csv_file_name, now_local_naive(), f"Added {label[:-1]}", user, "", item)
|
|
1662
|
+
except Exception as e:
|
|
1663
|
+
print(f"* Error: {e}")
|
|
1664
|
+
print()
|
|
1665
|
+
|
|
1666
|
+
if diff == 0:
|
|
1667
|
+
m_subject = f"Github user {user} {label.lower()} list changed"
|
|
1668
|
+
m_body = (f"{label} list changed {label_context} user {user}\n"
|
|
1669
|
+
f"{removed_mbody}{removed_list_str}{added_mbody}{added_list_str}\n"
|
|
1670
|
+
f"Check interval: {display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)}){get_cur_ts(nl_ch + 'Timestamp: ')}")
|
|
1671
|
+
else:
|
|
1672
|
+
m_subject = f"Github user {user} {label.lower()} number has changed! ({diff_str}, {old_count} -> {new_count})"
|
|
1673
|
+
m_body = (f"{label} number changed {label_context} user {user} from {old_count} to {new_count} ({diff_str})\n"
|
|
1674
|
+
f"{removed_mbody}{removed_list_str}{added_mbody}{added_list_str}\n"
|
|
1675
|
+
f"Check interval: {display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)}){get_cur_ts(nl_ch + 'Timestamp: ')}")
|
|
1676
|
+
|
|
1677
|
+
if PROFILE_NOTIFICATION:
|
|
1678
|
+
print(f"Sending email notification to {RECEIVER_EMAIL}")
|
|
1679
|
+
send_email(m_subject, m_body, "", SMTP_SSL)
|
|
1680
|
+
|
|
1681
|
+
print(f"Check interval:\t\t\t{display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)})")
|
|
1682
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
1683
|
+
return list_new, new_count
|
|
1684
|
+
|
|
1685
|
+
|
|
1686
|
+
# Detects and reports changes in repository-level entities (like stargazers, watchers, forks, issues, pull requests)
|
|
1687
|
+
def check_repo_list_changes(count_old, count_new, list_old, list_new, label, repo_name, repo_url, user, csv_file_name):
|
|
1688
|
+
if not list_new and count_new > 0:
|
|
1689
|
+
return
|
|
1690
|
+
|
|
1691
|
+
old_count = len(list_old)
|
|
1692
|
+
new_count = len(list_new)
|
|
1693
|
+
|
|
1694
|
+
if list_old == list_new:
|
|
1695
|
+
return
|
|
1696
|
+
|
|
1697
|
+
diff = new_count - old_count
|
|
1698
|
+
|
|
1699
|
+
diff_str = f"{'+' if diff > 0 else ''}{diff}"
|
|
1700
|
+
|
|
1701
|
+
if diff == 0:
|
|
1702
|
+
print(f"* Repo '{repo_name}': {label.lower()} list changed\n* Repo URL: {repo_url}")
|
|
1703
|
+
else:
|
|
1704
|
+
print(f"* Repo '{repo_name}': number of {label.lower()} changed from {old_count} to {new_count} ({diff_str})\n* Repo URL: {repo_url}")
|
|
1705
|
+
try:
|
|
1706
|
+
if csv_file_name:
|
|
1707
|
+
write_csv_entry(csv_file_name, now_local_naive(), f"Repo {label} Count", repo_name, old_count, new_count)
|
|
1708
|
+
except Exception as e:
|
|
1709
|
+
print(f"* Error: {e}")
|
|
1710
|
+
|
|
1711
|
+
added_list_str = ""
|
|
1712
|
+
removed_list_str = ""
|
|
1713
|
+
added_mbody = ""
|
|
1714
|
+
removed_mbody = ""
|
|
1715
|
+
|
|
1716
|
+
removed_items = list(set(list_old) - set(list_new))
|
|
1717
|
+
added_items = list(set(list_new) - set(list_old))
|
|
1718
|
+
|
|
1719
|
+
removal_text = "Closed" if label in ["Issues", "Pull Requests"] else "Removed"
|
|
1720
|
+
|
|
1721
|
+
if list_old != list_new:
|
|
1722
|
+
print()
|
|
1723
|
+
|
|
1724
|
+
if removed_items:
|
|
1725
|
+
print(f"{removal_text} {label.lower()}:\n")
|
|
1726
|
+
removed_mbody = f"\n{removal_text} {label.lower()}:\n\n"
|
|
1727
|
+
for item in removed_items:
|
|
1728
|
+
item_line = f"- {item} [ https://github.com/{item}/ ]" if label.lower() in ["stargazers", "watchers", "forks"] else f"- {item}"
|
|
1729
|
+
print(item_line)
|
|
1730
|
+
removed_list_str += item_line + "\n"
|
|
1731
|
+
try:
|
|
1732
|
+
if csv_file_name:
|
|
1733
|
+
value = item.rsplit("(", 1)[0].strip() if label in ["Issues", "Pull Requests"] else item
|
|
1734
|
+
write_csv_entry(csv_file_name, now_local_naive(), f"{removal_text} {label[:-1]}", repo_name, value, "")
|
|
1735
|
+
except Exception as e:
|
|
1736
|
+
print(f"* Error: {e}")
|
|
1737
|
+
print()
|
|
1738
|
+
|
|
1739
|
+
if added_items:
|
|
1740
|
+
print(f"Added {label.lower()}:\n")
|
|
1741
|
+
added_mbody = f"\nAdded {label.lower()}:\n\n"
|
|
1742
|
+
for item in added_items:
|
|
1743
|
+
item_line = f"- {item} [ https://github.com/{item}/ ]" if label.lower() in ["stargazers", "watchers", "forks"] else f"- {item}"
|
|
1744
|
+
print(item_line)
|
|
1745
|
+
added_list_str += item_line + "\n"
|
|
1746
|
+
try:
|
|
1747
|
+
if csv_file_name:
|
|
1748
|
+
value = item.rsplit("(", 1)[0].strip() if label in ["Issues", "Pull Requests"] else item
|
|
1749
|
+
write_csv_entry(csv_file_name, now_local_naive(), f"Added {label[:-1]}", repo_name, "", value)
|
|
1750
|
+
except Exception as e:
|
|
1751
|
+
print(f"* Error: {e}")
|
|
1752
|
+
print()
|
|
1753
|
+
|
|
1754
|
+
if diff == 0:
|
|
1755
|
+
m_subject = f"Github user {user} {label.lower()} list changed for repo '{repo_name}'!"
|
|
1756
|
+
m_body = (f"* Repo '{repo_name}': {label.lower()} list changed\n"
|
|
1757
|
+
f"* Repo URL: {repo_url}\n{removed_mbody}{removed_list_str}{added_mbody}{added_list_str}\n"
|
|
1758
|
+
f"Check interval: {display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)}){get_cur_ts(nl_ch + 'Timestamp: ')}")
|
|
1759
|
+
else:
|
|
1760
|
+
m_subject = f"Github user {user} number of {label.lower()} for repo '{repo_name}' has changed! ({diff_str}, {old_count} -> {new_count})"
|
|
1761
|
+
m_body = (f"* Repo '{repo_name}': number of {label.lower()} changed from {old_count} to {new_count} ({diff_str})\n"
|
|
1762
|
+
f"* Repo URL: {repo_url}\n{removed_mbody}{removed_list_str}{added_mbody}{added_list_str}\n"
|
|
1763
|
+
f"Check interval: {display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)}){get_cur_ts(nl_ch + 'Timestamp: ')}")
|
|
1764
|
+
|
|
1765
|
+
if REPO_NOTIFICATION:
|
|
1766
|
+
print(f"Sending email notification to {RECEIVER_EMAIL}")
|
|
1767
|
+
send_email(m_subject, m_body, "", SMTP_SSL)
|
|
1768
|
+
print(f"Check interval:\t\t\t{display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)})")
|
|
1769
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
1770
|
+
|
|
1771
|
+
|
|
1772
|
+
# Finds an optional config file
|
|
1773
|
+
def find_config_file(cli_path=None):
|
|
1774
|
+
"""
|
|
1775
|
+
Search for an optional config file in:
|
|
1776
|
+
1) CLI-provided path (must exist if given)
|
|
1777
|
+
2) ./{DEFAULT_CONFIG_FILENAME}
|
|
1778
|
+
3) ~/.{DEFAULT_CONFIG_FILENAME}
|
|
1779
|
+
4) script-directory/{DEFAULT_CONFIG_FILENAME}
|
|
1780
|
+
"""
|
|
1781
|
+
|
|
1782
|
+
if cli_path:
|
|
1783
|
+
p = Path(os.path.expanduser(cli_path))
|
|
1784
|
+
return str(p) if p.is_file() else None
|
|
1785
|
+
|
|
1786
|
+
candidates = [
|
|
1787
|
+
Path.cwd() / DEFAULT_CONFIG_FILENAME,
|
|
1788
|
+
Path.home() / f".{DEFAULT_CONFIG_FILENAME}",
|
|
1789
|
+
Path(__file__).parent / DEFAULT_CONFIG_FILENAME,
|
|
1790
|
+
]
|
|
1791
|
+
|
|
1792
|
+
for p in candidates:
|
|
1793
|
+
if p.is_file():
|
|
1794
|
+
return str(p)
|
|
1795
|
+
return None
|
|
1796
|
+
|
|
1797
|
+
|
|
1798
|
+
# Resolves an executable path by checking if it's a valid file or searching in $PATH
|
|
1799
|
+
def resolve_executable(path):
|
|
1800
|
+
if os.path.isfile(path) and os.access(path, os.X_OK):
|
|
1801
|
+
return path
|
|
1802
|
+
|
|
1803
|
+
found = shutil.which(path)
|
|
1804
|
+
if found:
|
|
1805
|
+
return found
|
|
1806
|
+
|
|
1807
|
+
raise FileNotFoundError(f"Could not find executable '{path}'")
|
|
1808
|
+
|
|
1809
|
+
|
|
1810
|
+
# Main function that monitors activity of the specified GitHub user
|
|
1811
|
+
def github_monitor_user(user, csv_file_name):
|
|
1812
|
+
|
|
1813
|
+
try:
|
|
1814
|
+
if csv_file_name:
|
|
1815
|
+
init_csv_file(csv_file_name)
|
|
1816
|
+
except Exception as e:
|
|
1817
|
+
print(f"* Error: {e}")
|
|
1818
|
+
|
|
1819
|
+
followers_count = 0
|
|
1820
|
+
followings_count = 0
|
|
1821
|
+
repos_count = 0
|
|
1822
|
+
starred_count = 0
|
|
1823
|
+
available_events = 0
|
|
1824
|
+
events = []
|
|
1825
|
+
repos_list = []
|
|
1826
|
+
event_date: datetime | None = None
|
|
1827
|
+
|
|
1828
|
+
try:
|
|
1829
|
+
auth = Auth.Token(GITHUB_TOKEN)
|
|
1830
|
+
g = Github(base_url=GITHUB_API_URL, auth=auth)
|
|
1831
|
+
g_user_myself = g.get_user()
|
|
1832
|
+
user_myself_login = g_user_myself.login
|
|
1833
|
+
user_myself_name = g_user_myself.name
|
|
1834
|
+
user_myself_url = g_user_myself.html_url
|
|
1835
|
+
|
|
1836
|
+
g_user = g.get_user(user)
|
|
1837
|
+
user_login = g_user.login
|
|
1838
|
+
user_name = g_user.name
|
|
1839
|
+
user_url = g_user.html_url
|
|
1840
|
+
location = g_user.location
|
|
1841
|
+
bio = g_user.bio
|
|
1842
|
+
company = g_user.company
|
|
1843
|
+
email = g_user.email
|
|
1844
|
+
blog = g_user.blog
|
|
1845
|
+
account_created_date = g_user.created_at
|
|
1846
|
+
account_updated_date = g_user.updated_at
|
|
1847
|
+
|
|
1848
|
+
followers_count = g_user.followers
|
|
1849
|
+
followings_count = g_user.following
|
|
1850
|
+
repos_count = g_user.public_repos
|
|
1851
|
+
|
|
1852
|
+
followers_list = g_user.get_followers()
|
|
1853
|
+
followings_list = g_user.get_following()
|
|
1854
|
+
repos_list = g_user.get_repos()
|
|
1855
|
+
|
|
1856
|
+
starred_list = g_user.get_starred()
|
|
1857
|
+
starred_count = starred_list.totalCount
|
|
1858
|
+
|
|
1859
|
+
if not DO_NOT_MONITOR_GITHUB_EVENTS:
|
|
1860
|
+
events = list(islice(g_user.get_events(), EVENTS_NUMBER))
|
|
1861
|
+
available_events = len(events)
|
|
1862
|
+
|
|
1863
|
+
except Exception as e:
|
|
1864
|
+
print(f"* Error: {e}")
|
|
1865
|
+
sys.exit(1)
|
|
1866
|
+
|
|
1867
|
+
last_event_id = 0
|
|
1868
|
+
last_event_ts: datetime | None = None
|
|
1869
|
+
events_list_of_ids = set()
|
|
1870
|
+
|
|
1871
|
+
if not DO_NOT_MONITOR_GITHUB_EVENTS:
|
|
1872
|
+
if available_events:
|
|
1873
|
+
try:
|
|
1874
|
+
for event in reversed(events):
|
|
1875
|
+
events_list_of_ids.add(event.id)
|
|
1876
|
+
|
|
1877
|
+
newest = events[0]
|
|
1878
|
+
last_event_id = newest.id
|
|
1879
|
+
if last_event_id:
|
|
1880
|
+
last_event_ts = newest.created_at
|
|
1881
|
+
except Exception as e:
|
|
1882
|
+
print(f"* Cannot get event IDs / timestamps: {e}\n")
|
|
1883
|
+
pass
|
|
1884
|
+
|
|
1885
|
+
followers_old_count = followers_count
|
|
1886
|
+
followings_old_count = followings_count
|
|
1887
|
+
repos_old_count = repos_count
|
|
1888
|
+
starred_old_count = starred_count
|
|
1889
|
+
|
|
1890
|
+
user_name_old = user_name
|
|
1891
|
+
location_old = location
|
|
1892
|
+
bio_old = bio
|
|
1893
|
+
company_old = company
|
|
1894
|
+
email_old = email
|
|
1895
|
+
blog_old = blog
|
|
1896
|
+
|
|
1897
|
+
last_event_id_old = last_event_id
|
|
1898
|
+
last_event_ts_old = last_event_ts
|
|
1899
|
+
events_list_of_ids_old = events_list_of_ids.copy()
|
|
1900
|
+
|
|
1901
|
+
user_myself_name_str = user_myself_login
|
|
1902
|
+
if user_myself_name:
|
|
1903
|
+
user_myself_name_str += f" ({user_myself_name})"
|
|
1904
|
+
|
|
1905
|
+
print(f"Token belongs to:\t\t{user_myself_name_str}" + f"\n\t\t\t\t[ {user_myself_url} ]" if user_myself_url else "")
|
|
1906
|
+
|
|
1907
|
+
user_name_str = user_login
|
|
1908
|
+
if user_name:
|
|
1909
|
+
user_name_str += f" ({user_name})"
|
|
1910
|
+
|
|
1911
|
+
print(f"\nUsername:\t\t\t{user_name_str}")
|
|
1912
|
+
print(f"User URL:\t\t\t{user_url}/")
|
|
1913
|
+
|
|
1914
|
+
if location:
|
|
1915
|
+
print(f"Location:\t\t\t{location}")
|
|
1916
|
+
|
|
1917
|
+
if company:
|
|
1918
|
+
print(f"Company:\t\t\t{company}")
|
|
1919
|
+
|
|
1920
|
+
if email:
|
|
1921
|
+
print(f"Email:\t\t\t\t{email}")
|
|
1922
|
+
|
|
1923
|
+
if blog:
|
|
1924
|
+
print(f"Blog URL:\t\t\t{blog}")
|
|
1925
|
+
|
|
1926
|
+
print(f"\nAccount creation date:\t\t{get_date_from_ts(account_created_date)} ({calculate_timespan(int(time.time()), account_created_date, show_seconds=False)} ago)")
|
|
1927
|
+
print(f"Account updated date:\t\t{get_date_from_ts(account_updated_date)} ({calculate_timespan(int(time.time()), account_updated_date, show_seconds=False)} ago)")
|
|
1928
|
+
account_updated_date_old = account_updated_date
|
|
1929
|
+
|
|
1930
|
+
print(f"\nFollowers:\t\t\t{followers_count}")
|
|
1931
|
+
print(f"Followings:\t\t\t{followings_count}")
|
|
1932
|
+
print(f"Repositories:\t\t\t{repos_count}")
|
|
1933
|
+
print(f"Starred repos:\t\t\t{starred_count}")
|
|
1934
|
+
if not DO_NOT_MONITOR_GITHUB_EVENTS:
|
|
1935
|
+
print(f"Available events:\t\t{available_events}{'+' if available_events == EVENTS_NUMBER else ''}")
|
|
1936
|
+
|
|
1937
|
+
if bio:
|
|
1938
|
+
print(f"\nBio:\n\n'{bio}'")
|
|
1939
|
+
|
|
1940
|
+
print_cur_ts("\nTimestamp:\t\t\t")
|
|
1941
|
+
|
|
1942
|
+
list_of_repos = []
|
|
1943
|
+
if repos_list and TRACK_REPOS_CHANGES:
|
|
1944
|
+
print("Processing list of public repositories (be patient, it might take a while) ...")
|
|
1945
|
+
try:
|
|
1946
|
+
list_of_repos = github_process_repos(repos_list)
|
|
1947
|
+
except Exception as e:
|
|
1948
|
+
print(f"* Cannot process list of public repositories: {e}")
|
|
1949
|
+
print_cur_ts("\nTimestamp:\t\t\t")
|
|
1950
|
+
|
|
1951
|
+
list_of_repos_old = list_of_repos
|
|
1952
|
+
|
|
1953
|
+
if not DO_NOT_MONITOR_GITHUB_EVENTS:
|
|
1954
|
+
print(f"Latest event:\n")
|
|
1955
|
+
|
|
1956
|
+
if available_events == 0:
|
|
1957
|
+
print("There are no events yet")
|
|
1958
|
+
else:
|
|
1959
|
+
try:
|
|
1960
|
+
github_print_event(events[0], g, True)
|
|
1961
|
+
except Exception as e:
|
|
1962
|
+
print(f"\n* Warning: cannot fetch last event details: {e}")
|
|
1963
|
+
|
|
1964
|
+
print_cur_ts("\nTimestamp:\t\t\t")
|
|
1965
|
+
|
|
1966
|
+
followers_old = []
|
|
1967
|
+
followings_old = []
|
|
1968
|
+
repos_old = []
|
|
1969
|
+
starred_old = []
|
|
1970
|
+
|
|
1971
|
+
try:
|
|
1972
|
+
followers_old = [follower.login for follower in followers_list]
|
|
1973
|
+
followings_old = [following.login for following in followings_list]
|
|
1974
|
+
repos_old = [repo.name for repo in repos_list]
|
|
1975
|
+
starred_old = [star.full_name for star in starred_list]
|
|
1976
|
+
except Exception as e:
|
|
1977
|
+
print(f"* Error: {e}")
|
|
1978
|
+
sys.exit(1)
|
|
1979
|
+
|
|
1980
|
+
g.close()
|
|
1981
|
+
|
|
1982
|
+
time.sleep(GITHUB_CHECK_INTERVAL)
|
|
1983
|
+
alive_counter = 0
|
|
1984
|
+
email_sent = False
|
|
1985
|
+
|
|
1986
|
+
# main loop
|
|
1987
|
+
while True:
|
|
1988
|
+
|
|
1989
|
+
try:
|
|
1990
|
+
g_user = g.get_user(user)
|
|
1991
|
+
email_sent = False
|
|
1992
|
+
|
|
1993
|
+
except (GithubException, Exception) as e:
|
|
1994
|
+
print(f"* Error, retrying in {display_time(GITHUB_CHECK_INTERVAL)}: {e}")
|
|
1995
|
+
|
|
1996
|
+
should_notify = False
|
|
1997
|
+
reason_msg = None
|
|
1998
|
+
|
|
1999
|
+
if isinstance(e, BadCredentialsException):
|
|
2000
|
+
reason_msg = "GitHub token might not be valid anymore (bad credentials error)!"
|
|
2001
|
+
else:
|
|
2002
|
+
matched = next((msg for msg in ["Forbidden", "Bad Request"] if msg in str(e)), None)
|
|
2003
|
+
if matched:
|
|
2004
|
+
reason_msg = f"Session might not be valid ('{matched}' error)"
|
|
2005
|
+
|
|
2006
|
+
if reason_msg:
|
|
2007
|
+
print(f"* {reason_msg}")
|
|
2008
|
+
should_notify = True
|
|
2009
|
+
|
|
2010
|
+
if should_notify and ERROR_NOTIFICATION and not email_sent:
|
|
2011
|
+
m_subject = f"github_monitor: session error! (user: {user})"
|
|
2012
|
+
m_body = f"{reason_msg}\n{e}{get_cur_ts(nl_ch + nl_ch + 'Timestamp: ')}"
|
|
2013
|
+
print(f"Sending email notification to {RECEIVER_EMAIL}")
|
|
2014
|
+
send_email(m_subject, m_body, "", SMTP_SSL)
|
|
2015
|
+
email_sent = True
|
|
2016
|
+
|
|
2017
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
2018
|
+
time.sleep(GITHUB_CHECK_INTERVAL)
|
|
2019
|
+
continue
|
|
2020
|
+
|
|
2021
|
+
# Changed followings
|
|
2022
|
+
followings_raw = list(gh_call(g_user.get_following)())
|
|
2023
|
+
followings_count = gh_call(lambda: g_user.following)()
|
|
2024
|
+
if followings_raw is not None and followings_count is not None:
|
|
2025
|
+
followings_old, followings_old_count = handle_profile_change("Followings", followings_old_count, followings_count, followings_old, followings_raw, user, csv_file_name, field="login")
|
|
2026
|
+
|
|
2027
|
+
# Changed followers
|
|
2028
|
+
followers_raw = list(gh_call(g_user.get_followers)())
|
|
2029
|
+
followers_count = gh_call(lambda: g_user.followers)()
|
|
2030
|
+
if followers_raw is not None and followers_count is not None:
|
|
2031
|
+
followers_old, followers_old_count = handle_profile_change("Followers", followers_old_count, followers_count, followers_old, followers_raw, user, csv_file_name, field="login")
|
|
2032
|
+
|
|
2033
|
+
# Changed public repositories
|
|
2034
|
+
repos_raw = list(gh_call(g_user.get_repos)())
|
|
2035
|
+
repos_count = gh_call(lambda: g_user.public_repos)()
|
|
2036
|
+
if repos_raw is not None and repos_count is not None:
|
|
2037
|
+
repos_old, repos_old_count = handle_profile_change("Repos", repos_old_count, repos_count, repos_old, repos_raw, user, csv_file_name, field="name")
|
|
2038
|
+
|
|
2039
|
+
# Changed starred repositories
|
|
2040
|
+
starred_raw = gh_call(g_user.get_starred)()
|
|
2041
|
+
if starred_raw is not None:
|
|
2042
|
+
starred_list = list(starred_raw)
|
|
2043
|
+
starred_count = starred_raw.totalCount
|
|
2044
|
+
starred_old, starred_old_count = handle_profile_change("Starred Repos", starred_old_count, starred_count, starred_old, starred_list, user, csv_file_name, field="full_name")
|
|
2045
|
+
|
|
2046
|
+
# Changed bio
|
|
2047
|
+
bio = gh_call(lambda: g_user.bio)()
|
|
2048
|
+
if bio is not None and bio != bio_old:
|
|
2049
|
+
print(f"* Bio has changed for user {user} !\n")
|
|
2050
|
+
print(f"Old bio:\n\n{bio_old}\n")
|
|
2051
|
+
print(f"New bio:\n\n{bio}\n")
|
|
2052
|
+
|
|
2053
|
+
try:
|
|
2054
|
+
if csv_file_name:
|
|
2055
|
+
write_csv_entry(csv_file_name, now_local_naive(), "Bio", user, bio_old, bio)
|
|
2056
|
+
except Exception as e:
|
|
2057
|
+
print(f"* Error: {e}")
|
|
2058
|
+
|
|
2059
|
+
m_subject = f"Github user {user} bio has changed!"
|
|
2060
|
+
m_body = f"Github user {user} bio has changed\n\nOld bio:\n\n{bio_old}\n\nNew bio:\n\n{bio}\n\nCheck interval: {display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)}){get_cur_ts(nl_ch + 'Timestamp: ')}"
|
|
2061
|
+
|
|
2062
|
+
if PROFILE_NOTIFICATION:
|
|
2063
|
+
print(f"Sending email notification to {RECEIVER_EMAIL}")
|
|
2064
|
+
send_email(m_subject, m_body, "", SMTP_SSL)
|
|
2065
|
+
|
|
2066
|
+
bio_old = bio
|
|
2067
|
+
print(f"Check interval:\t\t\t{display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)})")
|
|
2068
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
2069
|
+
|
|
2070
|
+
# Changed location
|
|
2071
|
+
location = gh_call(lambda: g_user.location)()
|
|
2072
|
+
if location is not None and location != location_old:
|
|
2073
|
+
print(f"* Location has changed for user {user} !\n")
|
|
2074
|
+
print(f"Old location:\t\t\t{location_old}\n")
|
|
2075
|
+
print(f"New location:\t\t\t{location}\n")
|
|
2076
|
+
|
|
2077
|
+
try:
|
|
2078
|
+
if csv_file_name:
|
|
2079
|
+
write_csv_entry(csv_file_name, now_local_naive(), "Location", user, location_old, location)
|
|
2080
|
+
except Exception as e:
|
|
2081
|
+
print(f"* Error: {e}")
|
|
2082
|
+
|
|
2083
|
+
m_subject = f"Github user {user} location has changed!"
|
|
2084
|
+
m_body = f"Github user {user} location has changed\n\nOld location: {location_old}\n\nNew location: {location}\n\nCheck interval: {display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)}){get_cur_ts(nl_ch + 'Timestamp: ')}"
|
|
2085
|
+
|
|
2086
|
+
if PROFILE_NOTIFICATION:
|
|
2087
|
+
print(f"Sending email notification to {RECEIVER_EMAIL}")
|
|
2088
|
+
send_email(m_subject, m_body, "", SMTP_SSL)
|
|
2089
|
+
|
|
2090
|
+
location_old = location
|
|
2091
|
+
print(f"Check interval:\t\t\t{display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)})")
|
|
2092
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
2093
|
+
|
|
2094
|
+
# Changed user name
|
|
2095
|
+
user_name = gh_call(lambda: g_user.name)()
|
|
2096
|
+
if user_name is not None and user_name != user_name_old:
|
|
2097
|
+
print(f"* User name has changed for user {user} !\n")
|
|
2098
|
+
print(f"Old user name:\t\t\t{user_name_old}\n")
|
|
2099
|
+
print(f"New user name:\t\t\t{user_name}\n")
|
|
2100
|
+
|
|
2101
|
+
try:
|
|
2102
|
+
if csv_file_name:
|
|
2103
|
+
write_csv_entry(csv_file_name, now_local_naive(), "User Name", user, user_name_old, user_name)
|
|
2104
|
+
except Exception as e:
|
|
2105
|
+
print(f"* Error: {e}")
|
|
2106
|
+
|
|
2107
|
+
m_subject = f"Github user {user} name has changed!"
|
|
2108
|
+
m_body = f"Github user {user} name has changed\n\nOld user name: {user_name_old}\n\nNew user name: {user_name}\n\nCheck interval: {display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)}){get_cur_ts(nl_ch + 'Timestamp: ')}"
|
|
2109
|
+
|
|
2110
|
+
if PROFILE_NOTIFICATION:
|
|
2111
|
+
print(f"Sending email notification to {RECEIVER_EMAIL}")
|
|
2112
|
+
send_email(m_subject, m_body, "", SMTP_SSL)
|
|
2113
|
+
|
|
2114
|
+
user_name_old = user_name
|
|
2115
|
+
print(f"Check interval:\t\t\t{display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)})")
|
|
2116
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
2117
|
+
|
|
2118
|
+
# Changed company
|
|
2119
|
+
company = gh_call(lambda: g_user.company)()
|
|
2120
|
+
if company is not None and company != company_old:
|
|
2121
|
+
print(f"* User company has changed for user {user} !\n")
|
|
2122
|
+
print(f"Old company:\t\t\t{company_old}\n")
|
|
2123
|
+
print(f"New company:\t\t\t{company}\n")
|
|
2124
|
+
|
|
2125
|
+
try:
|
|
2126
|
+
if csv_file_name:
|
|
2127
|
+
write_csv_entry(csv_file_name, now_local_naive(), "Company", user, company_old, company)
|
|
2128
|
+
except Exception as e:
|
|
2129
|
+
print(f"* Error: {e}")
|
|
2130
|
+
|
|
2131
|
+
m_subject = f"Github user {user} company has changed!"
|
|
2132
|
+
m_body = f"Github user {user} company has changed\n\nOld company: {company_old}\n\nNew company: {company}\n\nCheck interval: {display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)}){get_cur_ts(nl_ch + 'Timestamp: ')}"
|
|
2133
|
+
|
|
2134
|
+
if PROFILE_NOTIFICATION:
|
|
2135
|
+
print(f"Sending email notification to {RECEIVER_EMAIL}")
|
|
2136
|
+
send_email(m_subject, m_body, "", SMTP_SSL)
|
|
2137
|
+
|
|
2138
|
+
company_old = company
|
|
2139
|
+
print(f"Check interval:\t\t\t{display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)})")
|
|
2140
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
2141
|
+
|
|
2142
|
+
# Changed email
|
|
2143
|
+
email = gh_call(lambda: g_user.email)()
|
|
2144
|
+
if email is not None and email != email_old:
|
|
2145
|
+
print(f"* User email has changed for user {user} !\n")
|
|
2146
|
+
print(f"Old email:\t\t\t{email_old}\n")
|
|
2147
|
+
print(f"New email:\t\t\t{email}\n")
|
|
2148
|
+
|
|
2149
|
+
try:
|
|
2150
|
+
if csv_file_name:
|
|
2151
|
+
write_csv_entry(csv_file_name, now_local_naive(), "Email", user, email_old, email)
|
|
2152
|
+
except Exception as e:
|
|
2153
|
+
print(f"* Error: {e}")
|
|
2154
|
+
|
|
2155
|
+
m_subject = f"Github user {user} email has changed!"
|
|
2156
|
+
m_body = f"Github user {user} email has changed\n\nOld email: {email_old}\n\nNew email: {email}\n\nCheck interval: {display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)}){get_cur_ts(nl_ch + 'Timestamp: ')}"
|
|
2157
|
+
|
|
2158
|
+
if PROFILE_NOTIFICATION:
|
|
2159
|
+
print(f"Sending email notification to {RECEIVER_EMAIL}")
|
|
2160
|
+
send_email(m_subject, m_body, "", SMTP_SSL)
|
|
2161
|
+
|
|
2162
|
+
email_old = email
|
|
2163
|
+
print(f"Check interval:\t\t\t{display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)})")
|
|
2164
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
2165
|
+
|
|
2166
|
+
# Changed blog URL
|
|
2167
|
+
blog = gh_call(lambda: g_user.blog)()
|
|
2168
|
+
if blog is not None and blog != blog_old:
|
|
2169
|
+
print(f"* User blog URL has changed for user {user} !\n")
|
|
2170
|
+
print(f"Old blog URL:\t\t\t{blog_old}\n")
|
|
2171
|
+
print(f"New blog URL:\t\t\t{blog}\n")
|
|
2172
|
+
|
|
2173
|
+
try:
|
|
2174
|
+
if csv_file_name:
|
|
2175
|
+
write_csv_entry(csv_file_name, now_local_naive(), "Blog URL", user, blog_old, blog)
|
|
2176
|
+
except Exception as e:
|
|
2177
|
+
print(f"* Error: {e}")
|
|
2178
|
+
|
|
2179
|
+
m_subject = f"Github user {user} blog URL has changed!"
|
|
2180
|
+
m_body = f"Github user {user} blog URL has changed\n\nOld blog URL: {blog_old}\n\nNew blog URL: {blog}\n\nCheck interval: {display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)}){get_cur_ts(nl_ch + 'Timestamp: ')}"
|
|
2181
|
+
|
|
2182
|
+
if PROFILE_NOTIFICATION:
|
|
2183
|
+
print(f"Sending email notification to {RECEIVER_EMAIL}")
|
|
2184
|
+
send_email(m_subject, m_body, "", SMTP_SSL)
|
|
2185
|
+
|
|
2186
|
+
blog_old = blog
|
|
2187
|
+
print(f"Check interval:\t\t\t{display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)})")
|
|
2188
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
2189
|
+
|
|
2190
|
+
# Changed account update date
|
|
2191
|
+
account_updated_date = gh_call(lambda: g_user.updated_at)()
|
|
2192
|
+
if account_updated_date is not None and account_updated_date != account_updated_date_old:
|
|
2193
|
+
print(f"* User account has been updated for user {user} ! (after {calculate_timespan(account_updated_date, account_updated_date_old, show_seconds=False, granularity=2)})\n")
|
|
2194
|
+
print(f"Old account update date:\t{get_date_from_ts(account_updated_date_old)}\n")
|
|
2195
|
+
print(f"New account update date:\t{get_date_from_ts(account_updated_date)}\n")
|
|
2196
|
+
|
|
2197
|
+
try:
|
|
2198
|
+
if csv_file_name:
|
|
2199
|
+
write_csv_entry(csv_file_name, convert_to_local_naive(account_updated_date), "Account Update Date", user, convert_to_local_naive(account_updated_date_old), convert_to_local_naive(account_updated_date))
|
|
2200
|
+
except Exception as e:
|
|
2201
|
+
print(f"* Error: {e}")
|
|
2202
|
+
|
|
2203
|
+
m_subject = f"Github user {user} account has been updated! (after {calculate_timespan(account_updated_date, account_updated_date_old, show_seconds=False, granularity=2)})"
|
|
2204
|
+
m_body = f"Github user {user} account has been updated (after {calculate_timespan(account_updated_date, account_updated_date_old, show_seconds=False, granularity=2)})\n\nOld account update date: {get_date_from_ts(account_updated_date_old)}\n\nNew account update date: {get_date_from_ts(account_updated_date)}\n\nCheck interval: {display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)}){get_cur_ts(nl_ch + 'Timestamp: ')}"
|
|
2205
|
+
|
|
2206
|
+
if PROFILE_NOTIFICATION:
|
|
2207
|
+
print(f"Sending email notification to {RECEIVER_EMAIL}")
|
|
2208
|
+
send_email(m_subject, m_body, "", SMTP_SSL)
|
|
2209
|
+
|
|
2210
|
+
account_updated_date_old = account_updated_date
|
|
2211
|
+
print(f"Check interval:\t\t\t{display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)})")
|
|
2212
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
2213
|
+
|
|
2214
|
+
list_of_repos = []
|
|
2215
|
+
|
|
2216
|
+
# Changed repos details
|
|
2217
|
+
if TRACK_REPOS_CHANGES:
|
|
2218
|
+
repos_list = gh_call(g_user.get_repos)()
|
|
2219
|
+
if repos_list is not None:
|
|
2220
|
+
try:
|
|
2221
|
+
list_of_repos = github_process_repos(repos_list)
|
|
2222
|
+
list_of_repos_ok = True
|
|
2223
|
+
except Exception as e:
|
|
2224
|
+
list_of_repos = list_of_repos_old
|
|
2225
|
+
print(f"* Cannot process list of public repositories, keeping old list: {e}")
|
|
2226
|
+
list_of_repos_ok = False
|
|
2227
|
+
|
|
2228
|
+
if list_of_repos_ok:
|
|
2229
|
+
|
|
2230
|
+
for repo in list_of_repos:
|
|
2231
|
+
r_name = repo.get("name")
|
|
2232
|
+
r_descr = repo.get("descr", "")
|
|
2233
|
+
r_forks = repo.get("forks", 0)
|
|
2234
|
+
r_stars = repo.get("stars", 0)
|
|
2235
|
+
r_subscribers = repo.get("subscribers", 0)
|
|
2236
|
+
r_url = repo.get("url", "")
|
|
2237
|
+
r_update = repo.get("update_date")
|
|
2238
|
+
r_stargazers_list = repo.get("stargazers_list")
|
|
2239
|
+
r_subscribers_list = repo.get("subscribers_list")
|
|
2240
|
+
r_forked_repos = repo.get("forked_repos")
|
|
2241
|
+
r_issues = repo.get("issues")
|
|
2242
|
+
r_pulls = repo.get("pulls")
|
|
2243
|
+
r_issues_list = repo.get("issues_list")
|
|
2244
|
+
r_pulls_list = repo.get("pulls_list")
|
|
2245
|
+
|
|
2246
|
+
for repo_old in list_of_repos_old:
|
|
2247
|
+
r_name_old = repo_old.get("name")
|
|
2248
|
+
if r_name_old == r_name:
|
|
2249
|
+
r_descr_old = repo_old.get("descr", "")
|
|
2250
|
+
r_forks_old = repo_old.get("forks", 0)
|
|
2251
|
+
r_stars_old = repo_old.get("stars", 0)
|
|
2252
|
+
r_subscribers_old = repo_old.get("subscribers", 0)
|
|
2253
|
+
r_url_old = repo_old.get("url", "")
|
|
2254
|
+
r_update_old = repo_old.get("update_date")
|
|
2255
|
+
r_stargazers_list_old = repo_old.get("stargazers_list")
|
|
2256
|
+
r_subscribers_list_old = repo_old.get("subscribers_list")
|
|
2257
|
+
r_forked_repos_old = repo_old.get("forked_repos")
|
|
2258
|
+
r_issues_old = repo_old.get("issues")
|
|
2259
|
+
r_pulls_old = repo_old.get("pulls")
|
|
2260
|
+
r_issues_list_old = repo_old.get("issues_list")
|
|
2261
|
+
r_pulls_list_old = repo_old.get("pulls_list")
|
|
2262
|
+
|
|
2263
|
+
# Update date for repo changed
|
|
2264
|
+
if r_update != r_update_old:
|
|
2265
|
+
r_message = f"* Repo '{r_name}' update date changed (after {calculate_timespan(r_update, r_update_old, show_seconds=False, granularity=2)})\n* Repo URL: {r_url}\n\nOld repo update date:\t{get_date_from_ts(r_update_old)}\n\nNew repo update date:\t{get_date_from_ts(r_update)}\n"
|
|
2266
|
+
print(r_message)
|
|
2267
|
+
try:
|
|
2268
|
+
if csv_file_name:
|
|
2269
|
+
write_csv_entry(csv_file_name, now_local_naive(), "Repo Update Date", r_name, convert_to_local_naive(r_update_old), convert_to_local_naive(r_update))
|
|
2270
|
+
except Exception as e:
|
|
2271
|
+
print(f"* Error: {e}")
|
|
2272
|
+
m_subject = f"Github user {user} repo '{r_name}' update date has changed ! (after {calculate_timespan(r_update, r_update_old, show_seconds=False, granularity=2)})"
|
|
2273
|
+
m_body = f"{r_message}\nCheck interval: {display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)}){get_cur_ts(nl_ch + 'Timestamp: ')}"
|
|
2274
|
+
if REPO_UPDATE_DATE_NOTIFICATION:
|
|
2275
|
+
print(f"Sending email notification to {RECEIVER_EMAIL}")
|
|
2276
|
+
send_email(m_subject, m_body, "", SMTP_SSL)
|
|
2277
|
+
print(f"Check interval:\t\t\t{display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)})")
|
|
2278
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
2279
|
+
|
|
2280
|
+
# Number of stars for repo changed
|
|
2281
|
+
check_repo_list_changes(r_stars_old, r_stars, r_stargazers_list_old, r_stargazers_list, "Stargazers", r_name, r_url, user, csv_file_name)
|
|
2282
|
+
|
|
2283
|
+
# Number of watchers/subscribers for repo changed
|
|
2284
|
+
check_repo_list_changes(r_subscribers_old, r_subscribers, r_subscribers_list_old, r_subscribers_list, "Watchers", r_name, r_url, user, csv_file_name)
|
|
2285
|
+
|
|
2286
|
+
# Number of forks for repo changed
|
|
2287
|
+
check_repo_list_changes(r_forks_old, r_forks, r_forked_repos_old, r_forked_repos, "Forks", r_name, r_url, user, csv_file_name)
|
|
2288
|
+
|
|
2289
|
+
# Number of issues for repo changed
|
|
2290
|
+
check_repo_list_changes(r_issues_old, r_issues, r_issues_list_old, r_issues_list, "Issues", r_name, r_url, user, csv_file_name)
|
|
2291
|
+
|
|
2292
|
+
# Number of PRs for repo changed
|
|
2293
|
+
check_repo_list_changes(r_pulls_old, r_pulls, r_pulls_list_old, r_pulls_list, "Pull Requests", r_name, r_url, user, csv_file_name)
|
|
2294
|
+
|
|
2295
|
+
# Repo description changed
|
|
2296
|
+
if r_descr != r_descr_old:
|
|
2297
|
+
r_message = f"* Repo '{r_name}' description changed from:\n\n'{r_descr_old}'\n\nto:\n\n'{r_descr}'\n\n* Repo URL: {r_url}\n"
|
|
2298
|
+
print(r_message)
|
|
2299
|
+
try:
|
|
2300
|
+
if csv_file_name:
|
|
2301
|
+
write_csv_entry(csv_file_name, now_local_naive(), "Repo Description", r_name, r_descr_old, r_descr)
|
|
2302
|
+
except Exception as e:
|
|
2303
|
+
print(f"* Error: {e}")
|
|
2304
|
+
m_subject = f"Github user {user} repo '{r_name}' description has changed !"
|
|
2305
|
+
m_body = f"{r_message}\nCheck interval: {display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)}){get_cur_ts(nl_ch + 'Timestamp: ')}"
|
|
2306
|
+
if REPO_NOTIFICATION:
|
|
2307
|
+
print(f"Sending email notification to {RECEIVER_EMAIL}")
|
|
2308
|
+
send_email(m_subject, m_body, "", SMTP_SSL)
|
|
2309
|
+
print(f"Check interval:\t\t\t{display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)})")
|
|
2310
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
2311
|
+
|
|
2312
|
+
list_of_repos_old = list_of_repos
|
|
2313
|
+
|
|
2314
|
+
# New Github events
|
|
2315
|
+
if not DO_NOT_MONITOR_GITHUB_EVENTS:
|
|
2316
|
+
events = list(gh_call(lambda: list(islice(g_user.get_events(), EVENTS_NUMBER)))())
|
|
2317
|
+
if events is not None:
|
|
2318
|
+
available_events = len(events)
|
|
2319
|
+
if available_events == 0:
|
|
2320
|
+
last_event_id = 0
|
|
2321
|
+
last_event_ts = None
|
|
2322
|
+
else:
|
|
2323
|
+
try:
|
|
2324
|
+
newest = events[0]
|
|
2325
|
+
last_event_id = newest.id
|
|
2326
|
+
if last_event_id:
|
|
2327
|
+
last_event_ts = newest.created_at
|
|
2328
|
+
except Exception as e:
|
|
2329
|
+
last_event_id = 0
|
|
2330
|
+
last_event_ts = None
|
|
2331
|
+
print(f"* Cannot get last event ID / timestamp: {e}")
|
|
2332
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
2333
|
+
|
|
2334
|
+
events_list_of_ids = set()
|
|
2335
|
+
first_new = True
|
|
2336
|
+
|
|
2337
|
+
# New events showed up
|
|
2338
|
+
if last_event_id and last_event_id != last_event_id_old:
|
|
2339
|
+
|
|
2340
|
+
for event in reversed(events):
|
|
2341
|
+
|
|
2342
|
+
events_list_of_ids.add(event.id)
|
|
2343
|
+
|
|
2344
|
+
if event.id in events_list_of_ids_old:
|
|
2345
|
+
continue
|
|
2346
|
+
|
|
2347
|
+
if event.type in EVENTS_TO_MONITOR or 'ALL' in EVENTS_TO_MONITOR:
|
|
2348
|
+
|
|
2349
|
+
event_date = None
|
|
2350
|
+
repo_name = ""
|
|
2351
|
+
repo_url = ""
|
|
2352
|
+
event_text = ""
|
|
2353
|
+
|
|
2354
|
+
try:
|
|
2355
|
+
event_date, repo_name, repo_url, event_text = github_print_event(event, g, first_new, last_event_ts_old)
|
|
2356
|
+
except Exception as e:
|
|
2357
|
+
print(f"\n* Warning, cannot fetch all event details: {e}")
|
|
2358
|
+
|
|
2359
|
+
first_new = False
|
|
2360
|
+
|
|
2361
|
+
if event_date and repo_name and event_text:
|
|
2362
|
+
|
|
2363
|
+
try:
|
|
2364
|
+
if csv_file_name:
|
|
2365
|
+
write_csv_entry(csv_file_name, convert_to_local_naive(event_date), str(event.type), str(repo_name), "", "")
|
|
2366
|
+
except Exception as e:
|
|
2367
|
+
print(f"* Error: {e}")
|
|
2368
|
+
|
|
2369
|
+
m_subject = f"Github user {user} has new {event.type} (repo: {repo_name})"
|
|
2370
|
+
m_body = f"Github user {user} has new {event.type} event\n\n{event_text}\nCheck interval: {display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)}){get_cur_ts(nl_ch + 'Timestamp: ')}"
|
|
2371
|
+
|
|
2372
|
+
if EVENT_NOTIFICATION:
|
|
2373
|
+
print(f"\nSending email notification to {RECEIVER_EMAIL}")
|
|
2374
|
+
send_email(m_subject, m_body, "", SMTP_SSL)
|
|
2375
|
+
|
|
2376
|
+
print(f"Check interval:\t\t\t{display_time(GITHUB_CHECK_INTERVAL)} ({get_range_of_dates_from_tss(int(time.time()) - GITHUB_CHECK_INTERVAL, int(time.time()), short=True)})")
|
|
2377
|
+
print_cur_ts("Timestamp:\t\t\t")
|
|
2378
|
+
|
|
2379
|
+
last_event_id_old = last_event_id
|
|
2380
|
+
last_event_ts_old = last_event_ts
|
|
2381
|
+
events_list_of_ids_old = events_list_of_ids.copy()
|
|
2382
|
+
|
|
2383
|
+
alive_counter += 1
|
|
2384
|
+
|
|
2385
|
+
if LIVENESS_CHECK_COUNTER and alive_counter >= LIVENESS_CHECK_COUNTER:
|
|
2386
|
+
print_cur_ts("Liveness check, timestamp:\t")
|
|
2387
|
+
alive_counter = 0
|
|
2388
|
+
|
|
2389
|
+
g.close()
|
|
2390
|
+
|
|
2391
|
+
time.sleep(GITHUB_CHECK_INTERVAL)
|
|
2392
|
+
|
|
2393
|
+
|
|
2394
|
+
def main():
|
|
2395
|
+
global CLI_CONFIG_PATH, DOTENV_FILE, LOCAL_TIMEZONE, LIVENESS_CHECK_COUNTER, GITHUB_TOKEN, GITHUB_API_URL, CSV_FILE, DISABLE_LOGGING, GITHUB_LOGFILE, PROFILE_NOTIFICATION, EVENT_NOTIFICATION, REPO_NOTIFICATION, REPO_UPDATE_DATE_NOTIFICATION, ERROR_NOTIFICATION, GITHUB_CHECK_INTERVAL, SMTP_PASSWORD, stdout_bck, DO_NOT_MONITOR_GITHUB_EVENTS, TRACK_REPOS_CHANGES
|
|
2396
|
+
|
|
2397
|
+
if "--generate-config" in sys.argv:
|
|
2398
|
+
print(CONFIG_BLOCK.strip("\n"))
|
|
2399
|
+
sys.exit(0)
|
|
2400
|
+
|
|
2401
|
+
if "--version" in sys.argv:
|
|
2402
|
+
print(f"{os.path.basename(sys.argv[0])} v{VERSION}")
|
|
2403
|
+
sys.exit(0)
|
|
2404
|
+
|
|
2405
|
+
stdout_bck = sys.stdout
|
|
2406
|
+
|
|
2407
|
+
signal.signal(signal.SIGINT, signal_handler)
|
|
2408
|
+
signal.signal(signal.SIGTERM, signal_handler)
|
|
2409
|
+
|
|
2410
|
+
clear_screen(CLEAR_SCREEN)
|
|
2411
|
+
|
|
2412
|
+
print(f"Github Monitoring Tool v{VERSION}\n")
|
|
2413
|
+
|
|
2414
|
+
parser = argparse.ArgumentParser(
|
|
2415
|
+
prog="github_monitor",
|
|
2416
|
+
description=("Monitor a GitHub user's profile and activity with customizable email alerts [ https://github.com/misiektoja/github_monitor/ ]"), formatter_class=argparse.RawTextHelpFormatter
|
|
2417
|
+
)
|
|
2418
|
+
|
|
2419
|
+
# Positional
|
|
2420
|
+
parser.add_argument(
|
|
2421
|
+
"username",
|
|
2422
|
+
nargs="?",
|
|
2423
|
+
metavar="GITHUB_USERNAME",
|
|
2424
|
+
help="GitHub username",
|
|
2425
|
+
type=str
|
|
2426
|
+
)
|
|
2427
|
+
|
|
2428
|
+
# Version, just to list in help, it is handled earlier
|
|
2429
|
+
parser.add_argument(
|
|
2430
|
+
"--version",
|
|
2431
|
+
action="version",
|
|
2432
|
+
version=f"%(prog)s v{VERSION}"
|
|
2433
|
+
)
|
|
2434
|
+
|
|
2435
|
+
# Configuration & dotenv files
|
|
2436
|
+
conf = parser.add_argument_group("Configuration & dotenv files")
|
|
2437
|
+
conf.add_argument(
|
|
2438
|
+
"--config-file",
|
|
2439
|
+
dest="config_file",
|
|
2440
|
+
metavar="PATH",
|
|
2441
|
+
help="Location of the optional config file",
|
|
2442
|
+
)
|
|
2443
|
+
conf.add_argument(
|
|
2444
|
+
"--generate-config",
|
|
2445
|
+
action="store_true",
|
|
2446
|
+
help="Print default config template and exit",
|
|
2447
|
+
)
|
|
2448
|
+
conf.add_argument(
|
|
2449
|
+
"--env-file",
|
|
2450
|
+
dest="env_file",
|
|
2451
|
+
metavar="PATH",
|
|
2452
|
+
help="Path to optional dotenv file (auto-search if not set, disable with 'none')",
|
|
2453
|
+
)
|
|
2454
|
+
|
|
2455
|
+
# API settings
|
|
2456
|
+
creds = parser.add_argument_group("API settings")
|
|
2457
|
+
creds.add_argument(
|
|
2458
|
+
"-t", "--github-token",
|
|
2459
|
+
dest="github_token",
|
|
2460
|
+
metavar="GITHUB_TOKEN",
|
|
2461
|
+
type=str,
|
|
2462
|
+
help="GitHub personal access token (classic)"
|
|
2463
|
+
)
|
|
2464
|
+
creds.add_argument(
|
|
2465
|
+
"-x", "--github-url",
|
|
2466
|
+
dest="github_url",
|
|
2467
|
+
metavar="GITHUB_URL",
|
|
2468
|
+
type=str,
|
|
2469
|
+
help="GitHub API URL"
|
|
2470
|
+
)
|
|
2471
|
+
|
|
2472
|
+
# Notifications
|
|
2473
|
+
notify = parser.add_argument_group("Notifications")
|
|
2474
|
+
notify.add_argument(
|
|
2475
|
+
"-p", "--notify-profile",
|
|
2476
|
+
dest="notify_profile",
|
|
2477
|
+
action="store_true",
|
|
2478
|
+
default=None,
|
|
2479
|
+
help="Email when user's profile changes"
|
|
2480
|
+
)
|
|
2481
|
+
notify.add_argument(
|
|
2482
|
+
"-s", "--notify-events",
|
|
2483
|
+
dest="notify_events",
|
|
2484
|
+
action="store_true",
|
|
2485
|
+
default=None,
|
|
2486
|
+
help="Email when new GitHub events appear"
|
|
2487
|
+
)
|
|
2488
|
+
notify.add_argument(
|
|
2489
|
+
"-q", "--notify-repo-changes",
|
|
2490
|
+
dest="notify_repo_changes",
|
|
2491
|
+
action="store_true",
|
|
2492
|
+
default=None,
|
|
2493
|
+
help="Email when user's repositories change (stargazers, watchers, forks, issues, PRs, description etc., except for update date)"
|
|
2494
|
+
)
|
|
2495
|
+
notify.add_argument(
|
|
2496
|
+
"-u", "--notify-repo-update-date",
|
|
2497
|
+
dest="notify_repo_update_date",
|
|
2498
|
+
action="store_true",
|
|
2499
|
+
default=None,
|
|
2500
|
+
help="Email when user's repositories update date changes"
|
|
2501
|
+
)
|
|
2502
|
+
notify.add_argument(
|
|
2503
|
+
"-e", "--no-error-notify",
|
|
2504
|
+
dest="notify_errors",
|
|
2505
|
+
action="store_false",
|
|
2506
|
+
default=None,
|
|
2507
|
+
help="Disable email on errors"
|
|
2508
|
+
)
|
|
2509
|
+
notify.add_argument(
|
|
2510
|
+
"--send-test-email",
|
|
2511
|
+
dest="send_test_email",
|
|
2512
|
+
action="store_true",
|
|
2513
|
+
help="Send test email to verify SMTP settings"
|
|
2514
|
+
)
|
|
2515
|
+
|
|
2516
|
+
# Intervals & timers
|
|
2517
|
+
times = parser.add_argument_group("Intervals & timers")
|
|
2518
|
+
times.add_argument(
|
|
2519
|
+
"-c", "--check-interval",
|
|
2520
|
+
dest="check_interval",
|
|
2521
|
+
metavar="SECONDS",
|
|
2522
|
+
type=int,
|
|
2523
|
+
help="Time between monitoring checks, in seconds"
|
|
2524
|
+
)
|
|
2525
|
+
|
|
2526
|
+
# Listing
|
|
2527
|
+
listing = parser.add_argument_group("Listing")
|
|
2528
|
+
listing.add_argument(
|
|
2529
|
+
"-r", "--list-repos",
|
|
2530
|
+
dest="list_repos",
|
|
2531
|
+
action="store_true",
|
|
2532
|
+
default=None,
|
|
2533
|
+
help="List user's repositories with stats"
|
|
2534
|
+
)
|
|
2535
|
+
listing.add_argument(
|
|
2536
|
+
"-g", "--list-starred-repos",
|
|
2537
|
+
dest="list_starred_repos",
|
|
2538
|
+
action="store_true",
|
|
2539
|
+
default=None,
|
|
2540
|
+
help="List user's starred repositories"
|
|
2541
|
+
)
|
|
2542
|
+
listing.add_argument(
|
|
2543
|
+
"-f", "--list-followers-followings",
|
|
2544
|
+
dest="list_followers_and_followings",
|
|
2545
|
+
action="store_true",
|
|
2546
|
+
default=None,
|
|
2547
|
+
help="List user's followers & followings"
|
|
2548
|
+
)
|
|
2549
|
+
listing.add_argument(
|
|
2550
|
+
"-l", "--list-recent-events",
|
|
2551
|
+
dest="list_recent_events",
|
|
2552
|
+
action="store_true",
|
|
2553
|
+
default=None,
|
|
2554
|
+
help="List user's recent GitHub events"
|
|
2555
|
+
)
|
|
2556
|
+
listing.add_argument(
|
|
2557
|
+
"-n", "--recent-events-count",
|
|
2558
|
+
dest="recent_events_count",
|
|
2559
|
+
metavar="N",
|
|
2560
|
+
type=int,
|
|
2561
|
+
help="Number of events to list (use with -l)"
|
|
2562
|
+
)
|
|
2563
|
+
|
|
2564
|
+
# Features & output
|
|
2565
|
+
opts = parser.add_argument_group("Features & output")
|
|
2566
|
+
opts.add_argument(
|
|
2567
|
+
"-j", "--track-repos-changes",
|
|
2568
|
+
dest="track_repos_changes",
|
|
2569
|
+
action="store_true",
|
|
2570
|
+
default=None,
|
|
2571
|
+
help="Track user's repository changes (changed stargazers, watchers, forks, description, update date etc.)"
|
|
2572
|
+
)
|
|
2573
|
+
opts.add_argument(
|
|
2574
|
+
"-k", "--no-monitor-events",
|
|
2575
|
+
dest="no_monitor_events",
|
|
2576
|
+
action="store_true",
|
|
2577
|
+
default=None,
|
|
2578
|
+
help="Disable event monitoring"
|
|
2579
|
+
)
|
|
2580
|
+
opts.add_argument(
|
|
2581
|
+
"-b", "--csv-file",
|
|
2582
|
+
dest="csv_file",
|
|
2583
|
+
metavar="CSV_FILE",
|
|
2584
|
+
type=str,
|
|
2585
|
+
help="Write new events & profile changes to CSV"
|
|
2586
|
+
)
|
|
2587
|
+
opts.add_argument(
|
|
2588
|
+
"-d", "--disable-logging",
|
|
2589
|
+
dest="disable_logging",
|
|
2590
|
+
action="store_true",
|
|
2591
|
+
default=None,
|
|
2592
|
+
help="Disable logging to github_monitor_<username>.log"
|
|
2593
|
+
)
|
|
2594
|
+
|
|
2595
|
+
args = parser.parse_args()
|
|
2596
|
+
|
|
2597
|
+
if len(sys.argv) == 1:
|
|
2598
|
+
parser.print_help(sys.stderr)
|
|
2599
|
+
sys.exit(1)
|
|
2600
|
+
|
|
2601
|
+
if args.config_file:
|
|
2602
|
+
CLI_CONFIG_PATH = os.path.expanduser(args.config_file)
|
|
2603
|
+
|
|
2604
|
+
cfg_path = find_config_file(CLI_CONFIG_PATH)
|
|
2605
|
+
|
|
2606
|
+
if not cfg_path and CLI_CONFIG_PATH:
|
|
2607
|
+
print(f"* Error: Config file '{CLI_CONFIG_PATH}' does not exist")
|
|
2608
|
+
sys.exit(1)
|
|
2609
|
+
|
|
2610
|
+
if cfg_path:
|
|
2611
|
+
try:
|
|
2612
|
+
with open(cfg_path, "r") as cf:
|
|
2613
|
+
exec(cf.read(), globals())
|
|
2614
|
+
except Exception as e:
|
|
2615
|
+
print(f"* Error loading config file '{cfg_path}': {e}")
|
|
2616
|
+
sys.exit(1)
|
|
2617
|
+
|
|
2618
|
+
if args.env_file:
|
|
2619
|
+
DOTENV_FILE = os.path.expanduser(args.env_file)
|
|
2620
|
+
else:
|
|
2621
|
+
if DOTENV_FILE:
|
|
2622
|
+
DOTENV_FILE = os.path.expanduser(DOTENV_FILE)
|
|
2623
|
+
|
|
2624
|
+
if DOTENV_FILE and DOTENV_FILE.lower() == 'none':
|
|
2625
|
+
env_path = None
|
|
2626
|
+
else:
|
|
2627
|
+
try:
|
|
2628
|
+
from dotenv import load_dotenv, find_dotenv
|
|
2629
|
+
|
|
2630
|
+
if DOTENV_FILE:
|
|
2631
|
+
env_path = DOTENV_FILE
|
|
2632
|
+
if not os.path.isfile(env_path):
|
|
2633
|
+
print(f"* Warning: dotenv file '{env_path}' does not exist\n")
|
|
2634
|
+
else:
|
|
2635
|
+
load_dotenv(env_path, override=True)
|
|
2636
|
+
else:
|
|
2637
|
+
env_path = find_dotenv() or None
|
|
2638
|
+
if env_path:
|
|
2639
|
+
load_dotenv(env_path, override=True)
|
|
2640
|
+
except ImportError:
|
|
2641
|
+
env_path = DOTENV_FILE if DOTENV_FILE else None
|
|
2642
|
+
if env_path:
|
|
2643
|
+
print(f"* Warning: Cannot load dotenv file '{env_path}' because 'python-dotenv' is not installed\n\nTo install it, run:\n pip3 install python-dotenv\n\nOnce installed, re-run this tool\n")
|
|
2644
|
+
|
|
2645
|
+
if env_path:
|
|
2646
|
+
for secret in SECRET_KEYS:
|
|
2647
|
+
val = os.getenv(secret)
|
|
2648
|
+
if val is not None:
|
|
2649
|
+
globals()[secret] = val
|
|
2650
|
+
|
|
2651
|
+
local_tz = None
|
|
2652
|
+
if LOCAL_TIMEZONE == "Auto":
|
|
2653
|
+
if get_localzone is not None:
|
|
2654
|
+
try:
|
|
2655
|
+
local_tz = get_localzone()
|
|
2656
|
+
except Exception:
|
|
2657
|
+
pass
|
|
2658
|
+
if local_tz:
|
|
2659
|
+
LOCAL_TIMEZONE = str(local_tz)
|
|
2660
|
+
else:
|
|
2661
|
+
print("* Error: Cannot detect local timezone, consider setting LOCAL_TIMEZONE to your local timezone manually !")
|
|
2662
|
+
sys.exit(1)
|
|
2663
|
+
else:
|
|
2664
|
+
if not is_valid_timezone(LOCAL_TIMEZONE):
|
|
2665
|
+
print(f"* Error: Configured LOCAL_TIMEZONE '{LOCAL_TIMEZONE}' is not valid. Please use a valid pytz timezone name.")
|
|
2666
|
+
sys.exit(1)
|
|
2667
|
+
|
|
2668
|
+
if not check_internet():
|
|
2669
|
+
sys.exit(1)
|
|
2670
|
+
|
|
2671
|
+
if args.send_test_email:
|
|
2672
|
+
print("* Sending test email notification ...\n")
|
|
2673
|
+
if send_email("github_monitor: test email", "This is test email - your SMTP settings seems to be correct !", "", SMTP_SSL, smtp_timeout=5) == 0:
|
|
2674
|
+
print("* Email sent successfully !")
|
|
2675
|
+
else:
|
|
2676
|
+
sys.exit(1)
|
|
2677
|
+
sys.exit(0)
|
|
2678
|
+
|
|
2679
|
+
if args.github_token:
|
|
2680
|
+
GITHUB_TOKEN = args.github_token
|
|
2681
|
+
|
|
2682
|
+
if not GITHUB_TOKEN or GITHUB_TOKEN == "your_github_classic_personal_access_token":
|
|
2683
|
+
print("* Error: GITHUB_TOKEN (-t / --github_token) value is empty or incorrect")
|
|
2684
|
+
sys.exit(1)
|
|
2685
|
+
|
|
2686
|
+
if not args.username:
|
|
2687
|
+
print("* Error: GITHUB_USERNAME argument is required !")
|
|
2688
|
+
sys.exit(1)
|
|
2689
|
+
|
|
2690
|
+
if args.github_url:
|
|
2691
|
+
GITHUB_API_URL = args.github_url
|
|
2692
|
+
|
|
2693
|
+
if not GITHUB_API_URL:
|
|
2694
|
+
print("* Error: GITHUB_API_URL (-x / --github_url) value is empty")
|
|
2695
|
+
sys.exit(1)
|
|
2696
|
+
|
|
2697
|
+
if args.list_followers_and_followings:
|
|
2698
|
+
try:
|
|
2699
|
+
github_print_followers_and_followings(args.username)
|
|
2700
|
+
except Exception as e:
|
|
2701
|
+
print(f"* Error: {e}")
|
|
2702
|
+
sys.exit(1)
|
|
2703
|
+
sys.exit(0)
|
|
2704
|
+
|
|
2705
|
+
if args.list_repos:
|
|
2706
|
+
try:
|
|
2707
|
+
github_print_repos(args.username)
|
|
2708
|
+
except Exception as e:
|
|
2709
|
+
print(f"* Error: {e}")
|
|
2710
|
+
sys.exit(1)
|
|
2711
|
+
sys.exit(0)
|
|
2712
|
+
|
|
2713
|
+
if args.list_starred_repos:
|
|
2714
|
+
try:
|
|
2715
|
+
github_print_starred_repos(args.username)
|
|
2716
|
+
except Exception as e:
|
|
2717
|
+
print(f"* Error: {e}")
|
|
2718
|
+
sys.exit(1)
|
|
2719
|
+
sys.exit(0)
|
|
2720
|
+
|
|
2721
|
+
if args.check_interval:
|
|
2722
|
+
GITHUB_CHECK_INTERVAL = args.check_interval
|
|
2723
|
+
LIVENESS_CHECK_COUNTER = LIVENESS_CHECK_INTERVAL / GITHUB_CHECK_INTERVAL
|
|
2724
|
+
|
|
2725
|
+
if args.csv_file:
|
|
2726
|
+
CSV_FILE = os.path.expanduser(args.csv_file)
|
|
2727
|
+
else:
|
|
2728
|
+
if CSV_FILE:
|
|
2729
|
+
CSV_FILE = os.path.expanduser(CSV_FILE)
|
|
2730
|
+
|
|
2731
|
+
if CSV_FILE:
|
|
2732
|
+
try:
|
|
2733
|
+
with open(CSV_FILE, 'a', newline='', buffering=1, encoding="utf-8") as _:
|
|
2734
|
+
pass
|
|
2735
|
+
except Exception as e:
|
|
2736
|
+
print(f"* Error: CSV file cannot be opened for writing: {e}")
|
|
2737
|
+
sys.exit(1)
|
|
2738
|
+
|
|
2739
|
+
if args.list_recent_events:
|
|
2740
|
+
if args.recent_events_count and args.recent_events_count > 0:
|
|
2741
|
+
events_n = args.recent_events_count
|
|
2742
|
+
else:
|
|
2743
|
+
events_n = 5
|
|
2744
|
+
try:
|
|
2745
|
+
github_list_events(args.username, events_n, CSV_FILE)
|
|
2746
|
+
except Exception as e:
|
|
2747
|
+
print(f"* Error: {e}")
|
|
2748
|
+
sys.exit(1)
|
|
2749
|
+
sys.exit(0)
|
|
2750
|
+
|
|
2751
|
+
if args.disable_logging is True:
|
|
2752
|
+
DISABLE_LOGGING = True
|
|
2753
|
+
|
|
2754
|
+
if not DISABLE_LOGGING:
|
|
2755
|
+
log_path = Path(os.path.expanduser(GITHUB_LOGFILE))
|
|
2756
|
+
if log_path.parent != Path('.'):
|
|
2757
|
+
if log_path.suffix == "":
|
|
2758
|
+
log_path = log_path.parent / f"{log_path.name}_{args.username}.log"
|
|
2759
|
+
else:
|
|
2760
|
+
if log_path.suffix == "":
|
|
2761
|
+
log_path = Path(f"{log_path.name}_{args.username}.log")
|
|
2762
|
+
log_path.parent.mkdir(parents=True, exist_ok=True)
|
|
2763
|
+
FINAL_LOG_PATH = str(log_path)
|
|
2764
|
+
sys.stdout = Logger(FINAL_LOG_PATH)
|
|
2765
|
+
else:
|
|
2766
|
+
FINAL_LOG_PATH = None
|
|
2767
|
+
|
|
2768
|
+
if args.notify_profile is True:
|
|
2769
|
+
PROFILE_NOTIFICATION = True
|
|
2770
|
+
|
|
2771
|
+
if args.notify_events is True:
|
|
2772
|
+
EVENT_NOTIFICATION = True
|
|
2773
|
+
|
|
2774
|
+
if args.notify_repo_changes is True:
|
|
2775
|
+
REPO_NOTIFICATION = True
|
|
2776
|
+
|
|
2777
|
+
if args.notify_repo_update_date is True:
|
|
2778
|
+
REPO_UPDATE_DATE_NOTIFICATION = True
|
|
2779
|
+
|
|
2780
|
+
if args.notify_errors is False:
|
|
2781
|
+
ERROR_NOTIFICATION = False
|
|
2782
|
+
|
|
2783
|
+
if args.track_repos_changes is True:
|
|
2784
|
+
TRACK_REPOS_CHANGES = True
|
|
2785
|
+
|
|
2786
|
+
if args.no_monitor_events is True:
|
|
2787
|
+
DO_NOT_MONITOR_GITHUB_EVENTS = True
|
|
2788
|
+
|
|
2789
|
+
if not TRACK_REPOS_CHANGES:
|
|
2790
|
+
REPO_NOTIFICATION = False
|
|
2791
|
+
REPO_UPDATE_DATE_NOTIFICATION = False
|
|
2792
|
+
|
|
2793
|
+
if DO_NOT_MONITOR_GITHUB_EVENTS:
|
|
2794
|
+
EVENT_NOTIFICATION = False
|
|
2795
|
+
|
|
2796
|
+
if SMTP_HOST.startswith("your_smtp_server_"):
|
|
2797
|
+
EVENT_NOTIFICATION = False
|
|
2798
|
+
PROFILE_NOTIFICATION = False
|
|
2799
|
+
REPO_NOTIFICATION = False
|
|
2800
|
+
REPO_UPDATE_DATE_NOTIFICATION = False
|
|
2801
|
+
ERROR_NOTIFICATION = False
|
|
2802
|
+
|
|
2803
|
+
print(f"* Github polling interval:\t[ {display_time(GITHUB_CHECK_INTERVAL)} ]")
|
|
2804
|
+
print(f"* Email notifications:\t\t[profile changes = {PROFILE_NOTIFICATION}] [new events = {EVENT_NOTIFICATION}]\n*\t\t\t\t[repos changes = {REPO_NOTIFICATION}] [repos update date = {REPO_UPDATE_DATE_NOTIFICATION}]\n*\t\t\t\t[errors = {ERROR_NOTIFICATION}]")
|
|
2805
|
+
print(f"* Github API URL:\t\t{GITHUB_API_URL}")
|
|
2806
|
+
print(f"* Track repos changes:\t\t{TRACK_REPOS_CHANGES}")
|
|
2807
|
+
print(f"* Monitor Github events:\t{not DO_NOT_MONITOR_GITHUB_EVENTS}")
|
|
2808
|
+
print(f"* Liveness check:\t\t{bool(LIVENESS_CHECK_INTERVAL)}" + (f" ({display_time(LIVENESS_CHECK_INTERVAL)})" if LIVENESS_CHECK_INTERVAL else ""))
|
|
2809
|
+
print(f"* CSV logging enabled:\t\t{bool(CSV_FILE)}" + (f" ({CSV_FILE})" if CSV_FILE else ""))
|
|
2810
|
+
print(f"* Output logging enabled:\t{not DISABLE_LOGGING}" + (f" ({FINAL_LOG_PATH})" if not DISABLE_LOGGING else ""))
|
|
2811
|
+
print(f"* Configuration file:\t\t{cfg_path}")
|
|
2812
|
+
print(f"* Dotenv file:\t\t\t{env_path or 'None'}")
|
|
2813
|
+
print(f"* Local timezone:\t\t{LOCAL_TIMEZONE}")
|
|
2814
|
+
|
|
2815
|
+
out = f"\nMonitoring Github user {args.username}"
|
|
2816
|
+
print(out)
|
|
2817
|
+
print("-" * len(out))
|
|
2818
|
+
|
|
2819
|
+
# We define signal handlers only for Linux, Unix & MacOS since Windows has limited number of signals supported
|
|
2820
|
+
if platform.system() != 'Windows':
|
|
2821
|
+
signal.signal(signal.SIGUSR1, toggle_profile_changes_notifications_signal_handler)
|
|
2822
|
+
signal.signal(signal.SIGUSR2, toggle_new_events_notifications_signal_handler)
|
|
2823
|
+
signal.signal(signal.SIGCONT, toggle_repo_changes_notifications_signal_handler)
|
|
2824
|
+
signal.signal(signal.SIGPIPE, toggle_repo_update_date_changes_notifications_signal_handler)
|
|
2825
|
+
signal.signal(signal.SIGTRAP, increase_check_signal_handler)
|
|
2826
|
+
signal.signal(signal.SIGABRT, decrease_check_signal_handler)
|
|
2827
|
+
signal.signal(signal.SIGHUP, reload_secrets_signal_handler)
|
|
2828
|
+
|
|
2829
|
+
github_monitor_user(args.username, CSV_FILE)
|
|
2830
|
+
|
|
2831
|
+
sys.stdout = stdout_bck
|
|
2832
|
+
sys.exit(0)
|
|
2833
|
+
|
|
2834
|
+
|
|
2835
|
+
if __name__ == "__main__":
|
|
2836
|
+
main()
|