pynop 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pynop-0.1.0/PKG-INFO ADDED
@@ -0,0 +1,13 @@
1
+ Metadata-Version: 2.4
2
+ Name: pynop
3
+ Version: 0.1.0
4
+ Summary: Shared DB/core + NopCommerce tooling for ABC StoreDataSync
5
+ Author-email: ABC Sync <noreply@example.com>
6
+ License: MIT
7
+ Requires-Python: >=3.9
8
+ Description-Content-Type: text/markdown
9
+
10
+ # pynop (ABC StoreDataSync)
11
+
12
+ This package contains the shared `core` utilities (DB access, Biz/log framework) and the `NopTool`/`NopQuery` helpers used by the `sync` project.
13
+
pynop-0.1.0/README.md ADDED
@@ -0,0 +1,4 @@
1
+ # pynop (ABC StoreDataSync)
2
+
3
+ This package contains the shared `core` utilities (DB access, Biz/log framework) and the `NopTool`/`NopQuery` helpers used by the `sync` project.
4
+
@@ -0,0 +1,4 @@
1
+ """
2
+ Shared DB/core layer + NopCommerce DB tooling for StoreDataSync.
3
+ """
4
+
@@ -0,0 +1,4 @@
1
+ """
2
+ Shared core layer used by the StoreDataSync project.
3
+ """
4
+
@@ -0,0 +1,95 @@
1
+ import functools
2
+ import traceback
3
+ from datetime import datetime
4
+
5
+ from .log import DataLogger
6
+
7
+ import settings
8
+ from .exceptions import BizError
9
+
10
+
11
+ def timestamp():
12
+ """Return current timestamp as a formatted string."""
13
+ return datetime.now().strftime("%Y-%m-%d %H:%M:%S")
14
+
15
+
16
+ def mark(func):
17
+ """Decorator to print markers with timestamps before, after, and on exceptions."""
18
+
19
+ @functools.wraps(func)
20
+ def wrapper(*args, **kwargs):
21
+ # assume first arg is self
22
+ self = args[0]
23
+ logger = getattr(self, "logger", None)
24
+
25
+ start_msg = f"[{timestamp()}] --- START {func.__qualname__} ---"
26
+ end_msg = f"[{timestamp()}] --- END {func.__qualname__} ---"
27
+
28
+ try:
29
+ if logger:
30
+ logger.info(start_msg)
31
+ else:
32
+ print(start_msg)
33
+
34
+ result = func(*args, **kwargs)
35
+
36
+ if logger:
37
+ logger.info(end_msg)
38
+ else:
39
+ print(end_msg)
40
+
41
+ return result
42
+ except Exception as e:
43
+ err_msg = f"[{timestamp()}] !!! ERROR in {func.__qualname__}: {e}"
44
+ if logger:
45
+ logger.error(err_msg)
46
+ else:
47
+ print(err_msg)
48
+ traceback.print_exc()
49
+ raise
50
+
51
+ return wrapper
52
+
53
+
54
+ def auto_mark_methods(cls):
55
+ """Class decorator to apply `mark` to all methods of a class."""
56
+ for attr_name, attr_value in cls.__dict__.items():
57
+ if callable(attr_value) and not attr_name.startswith("__"):
58
+ setattr(cls, attr_name, mark(attr_value))
59
+ return cls
60
+
61
+
62
+ class Biz:
63
+ """
64
+ Abstract class to describe business logic of sync program.
65
+ """
66
+
67
+ def __init__(self, settings):
68
+ log_level = settings.LOG_LEVEL_OPTION
69
+ log_dir_path = settings.LOG_DIR_PATH
70
+ app_started_time = settings.APP_STARTED_TIME
71
+ self.logger = DataLogger(str(type(self).__name__), log_level, log_dir_path, app_started_time)
72
+
73
+ self.reports = {}
74
+
75
+ def start_biz(self):
76
+ pass
77
+
78
+ def end_biz(self):
79
+ self.logger.log_divider()
80
+ self.logger.log_biz_msg("Biz Reports", str(self.reports))
81
+ self.logger.log_current_time()
82
+ self.logger.log_divider()
83
+ self.logger.cleanup_expired_log(settings.LOG_DIR_PATH, settings.LOG_MAX_RETENTION_DAYS)
84
+ self.logger.end()
85
+
86
+ def run(self):
87
+ try:
88
+ self.start_biz()
89
+ self.end_biz()
90
+ except Exception as e:
91
+ self.logger.log_biz_msg("Biz Error", str(e))
92
+ raise e
93
+ finally:
94
+ pass
95
+
@@ -0,0 +1,6 @@
1
+ PYODBC_MSSQL_CONNECTION_STRING = "DRIVER={ODBC Driver 17 for SQL Server};SERVER=%s;DATABASE=%s;UID=%s;PWD=%s"
2
+ #PYODBC_MSSQL_CONNECTION_STRING = "DRIVER={SQL Server};SERVER=%s;DATABASE=%s;UID=%s;PWD=%s"
3
+ PYODBC_MYSQL_CONNECTION_STRING = "DRIVER={MySQL ODBC 8.0 Unicode Driver};SERVER=%s;DATABASE=%s;UID=%s;PWD=%s"
4
+ PYMONGO_CONNECTION_STRING = "mongodb://{server}:27017/"
5
+ #PYMONGO_CONNECTION_STRING = "mongodb+srv://{username}:{password}@{server}/{db_name}"
6
+
@@ -0,0 +1,160 @@
1
+ import pyodbc
2
+ from pymongo import MongoClient
3
+ from .config import *
4
+ from settings import DB_CONFIG
5
+
6
+
7
+ class DB:
8
+ _conn = None
9
+ _cursor = None
10
+
11
+ def __init__(self, db_engine, db_server, db_name, db_username, db_password):
12
+ if db_engine == "MONGO":
13
+ self._conn = MongoClient(PYMONGO_CONNECTION_STRING.format(server=db_server))
14
+ self._cursor = self._conn[db_name]
15
+ else:
16
+ conn_str = ""
17
+
18
+ if db_engine == "MSSQL":
19
+ conn_str = PYODBC_MSSQL_CONNECTION_STRING % (
20
+ db_server,
21
+ db_name,
22
+ db_username,
23
+ db_password,
24
+ )
25
+ elif db_engine == "MYSQL":
26
+ conn_str = PYODBC_MYSQL_CONNECTION_STRING % (
27
+ db_server,
28
+ db_name,
29
+ db_username,
30
+ db_password,
31
+ )
32
+ elif db_engine == "DSN":
33
+ conn_str = db_server
34
+
35
+ self._conn = pyodbc.connect(conn_str)
36
+ self._cursor = self._conn.cursor()
37
+
38
+ return
39
+
40
+
41
+ class AutoDao:
42
+ def __init__(self, db_type="", db_engine="", db_server="", db_name="", db_username="", db_password=""):
43
+ env = DB_CONFIG["env"]
44
+
45
+ if db_type and db_type in DB_CONFIG[env].keys():
46
+ self.db = DB(
47
+ DB_CONFIG[env][db_type]["db_engine"],
48
+ DB_CONFIG[env][db_type]["db_server"],
49
+ DB_CONFIG[env][db_type]["db_name"],
50
+ DB_CONFIG[env][db_type]["db_username"],
51
+ DB_CONFIG[env][db_type]["db_password"],
52
+ )
53
+ else:
54
+ self.db = DB(db_type, db_server, db_name, db_username, db_password)
55
+
56
+ def get_hashmap(self, sql, param=[]):
57
+ result = {}
58
+ self.db._cursor.execute(sql, param)
59
+ for a in self.db._cursor.fetchall():
60
+ result[a[0]] = a[1]
61
+ return result
62
+
63
+ def get_hashtable(self, sql, param=[], key=0):
64
+ result = {}
65
+ self.db._cursor.execute(sql, param)
66
+ result_rows = self.db._cursor.fetchall()
67
+ if result_rows:
68
+ result_cols = [column[0] for column in self.db._cursor.description]
69
+ for row in result_rows:
70
+ result[row[key]] = dict(zip(result_cols, row))
71
+ return result
72
+
73
+ def get_hasharrtable(self, sql, param=[], key=0):
74
+ result = {}
75
+ self.db._cursor.execute(sql, param)
76
+ result_rows = self.db._cursor.fetchall()
77
+ if result_rows:
78
+ result_cols = [column[0] for column in self.db._cursor.description]
79
+ for row in result_rows:
80
+ row_key = row[key]
81
+ if row_key not in result:
82
+ result[row_key] = []
83
+ result[row_key].append(dict(zip(result_cols, row)))
84
+ return result
85
+
86
+ def get_rows_and_keys(self, sql, param=[], count=0):
87
+ self.db._cursor.execute(sql, param)
88
+ if count:
89
+ result = self.db._cursor.fetchmany(count)
90
+ else:
91
+ result = self.db._cursor.fetchall()
92
+
93
+ result_cols = [column[0] for column in self.db._cursor.description]
94
+ return result, result_cols
95
+
96
+ def get_one_row_dict(self, sql, param=[]):
97
+ self.db._cursor.execute(sql, param)
98
+ result = self.db._cursor.fetchone()
99
+ if result:
100
+ result_cols = [column[0] for column in self.db._cursor.description]
101
+ result = dict(zip(result_cols, result))
102
+ return result
103
+
104
+ def get_one_col_list(self, sql, param=[], count=0):
105
+ self.db._cursor.execute(sql, param)
106
+ result = []
107
+ if count:
108
+ result = self.db._cursor.fetchmany(count)
109
+ else:
110
+ result = self.db._cursor.fetchall()
111
+ return [row[0] for row in result]
112
+
113
+ def get_one_value(self, sql, param=[]):
114
+ self.db._cursor.execute(sql, param)
115
+ result = self.db._cursor.fetchone()
116
+ if result:
117
+ result = result[0]
118
+ return result
119
+
120
+ def get_list(self, sql, param=[]):
121
+ result = []
122
+ self.db._cursor.execute(sql, param)
123
+ result_rows = self.db._cursor.fetchall()
124
+ if result_rows:
125
+ result_cols = [column[0] for column in self.db._cursor.description]
126
+ for row in result_rows:
127
+ result.append(dict(zip(result_cols, row)))
128
+ return result
129
+
130
+ def update(self, sql, param=[]):
131
+ self.db._cursor.execute(sql, param)
132
+ self.db._cursor.commit()
133
+ return True
134
+
135
+ def update_return_rowcount(self, sql, param=[]):
136
+ self.db._cursor.execute(sql, param)
137
+ n = self.db._cursor.rowcount
138
+ self.db._cursor.commit()
139
+ return n
140
+
141
+ def update_r_dict(self, sql, param=[]):
142
+ self.db._cursor.execute(sql, param)
143
+ result = self.db._cursor.fetchone()
144
+ if result:
145
+ result_cols = [column[0] for column in self.db._cursor.description]
146
+ result = dict(zip(result_cols, result))
147
+ self.db._cursor.commit()
148
+ return result
149
+
150
+ def update_r_list(self, sql, param=[]):
151
+ self.db._cursor.execute(sql, param)
152
+ result = self.db._cursor.fetchall()
153
+ result_cols = [column[0] for column in self.db._cursor.description]
154
+ self.db._cursor.commit()
155
+ return result, result_cols
156
+
157
+
158
+ class Query:
159
+ pass
160
+
@@ -0,0 +1,3 @@
1
+ class BizError(Exception):
2
+ pass
3
+
@@ -0,0 +1,145 @@
1
+ import logging
2
+ from logging.handlers import TimedRotatingFileHandler
3
+ from datetime import datetime
4
+ import os
5
+ import pickle
6
+ import settings
7
+
8
+ BIG_DIVIDER = """
9
+ ======================================================================
10
+ """
11
+
12
+ DIVIDER = """
13
+ ----------------------------------------------------------------------
14
+ """
15
+
16
+ BIG_LINEFEED = "\n\n"
17
+ LINEFEED = "\n"
18
+
19
+
20
+ class DataLogger:
21
+ def __init__(self, biz_name='DataBiz', log_level='INFO', log_dir_path='./', app_started_time=datetime.now()):
22
+
23
+ if not os.path.exists(log_dir_path):
24
+ os.makedirs(log_dir_path)
25
+
26
+ self.log_level = log_level.upper()
27
+ self.log_dir_path = log_dir_path
28
+ self.app_started_time = app_started_time
29
+
30
+ logging.basicConfig(
31
+ #level=logging.DEBUG,
32
+ #format="%(asctime)s [%(biz_name)-2s] %(levelname)-2s: %(message)s",
33
+ )
34
+
35
+ self.logger = logging.getLogger(biz_name)
36
+
37
+ if self.log_level == 'DEBUG':
38
+ self.logger.setLevel(logging.DEBUG)
39
+ elif self.log_level == 'INFO':
40
+ self.logger.setLevel(logging.INFO)
41
+ elif self.log_level == 'WARNING':
42
+ self.logger.setLevel(logging.WARNING)
43
+ elif self.log_level == 'ERROR':
44
+ self.logger.setLevel(logging.ERROR)
45
+ elif self.log_level == 'FATAL' or log_level == 'CRITICAL':
46
+ self.logger.setLevel(logging.CRITICAL)
47
+ else:
48
+ raise ValueError('unknown logging level')
49
+
50
+ log_filename = self.app_started_time.strftime("%Y%m%d_%H%M%S") + "_" + biz_name + ".log"
51
+ self.log_filename = os.path.join(self.log_dir_path, log_filename)
52
+
53
+ trh = TimedRotatingFileHandler(self.log_filename, when='D', interval=1, backupCount=settings.LOG_MAX_RETENTION_DAYS, encoding="utf8")
54
+ trh.suffix = "%Y%m%d_%H%M%S"
55
+ self.logger.addHandler(trh)
56
+
57
+ ABC_SIGNATURE = f"""
58
+ ======================================================================
59
+ *Task Title : ABC Sync -> {biz_name}
60
+ *Started Time : {app_started_time}
61
+ ======================================================================
62
+ """
63
+ self.log(ABC_SIGNATURE, log_level)
64
+
65
+ def end(self):
66
+ handlers = self.logger.handlers[:]
67
+ for handler in handlers:
68
+ handler.close()
69
+ self.logger.removeHandler(handler)
70
+
71
+ def log(self, msg, log_level="warning"):
72
+ if log_level.lower() == "warning":
73
+ self.logger.warning(msg)
74
+ elif log_level.lower() == "info":
75
+ self.logger.info(msg)
76
+ elif log_level.lower() == "debug":
77
+ self.logger.debug(msg)
78
+ elif log_level.lower() == "error":
79
+ self.logger.error(msg)
80
+
81
+ def log_big_divider(self, log_level="warning"):
82
+ self.log(BIG_DIVIDER, log_level)
83
+
84
+ def log_divider(self, log_level="warning"):
85
+ self.log(DIVIDER, log_level)
86
+
87
+ def log_big_linefeed(self, log_level="warning"):
88
+ self.log(BIG_LINEFEED, log_level)
89
+
90
+ def log_linefeed(self, log_level="warning"):
91
+ self.log(LINEFEED, log_level)
92
+
93
+ def log_task_title(self, title="", log_level="warning"):
94
+ if not title:
95
+ title = self.logger.name
96
+ TITLE_SPACES = " "
97
+ msg = BIG_DIVIDER + LINEFEED + TITLE_SPACES + title + DIVIDER + LINEFEED
98
+ self.log(msg, log_level)
99
+
100
+ def log_func_title(self, title="", log_level="info"):
101
+ msg = LINEFEED + LINEFEED + "- " + title + DIVIDER
102
+ self.log(msg, log_level)
103
+
104
+ def log_current_time(self):
105
+ self.log(LINEFEED + "** Started Time : {s}".format(s = self.app_started_time))
106
+ self.log(LINEFEED + "** Current Time : {c}".format(c = datetime.now()))
107
+ self.log(LINEFEED + "** Wasted Time : {w}".format(w = datetime.now() - self.app_started_time))
108
+
109
+ def log_step(self, step="", log_level="info"):
110
+ msg = LINEFEED + BIG_DIVIDER + LINEFEED + ".. " + step + LINEFEED
111
+ self.log(msg, log_level)
112
+
113
+ def log_data(self, data={}, log_level="debug"):
114
+ for k, v in data.items():
115
+ self.log(f"{k} : {v}", log_level)
116
+
117
+ def log_biz_msg(self, msg_title, msg_text, log_level="info"):
118
+ msg = LINEFEED + "*" + msg_title + ":" + LINEFEED + " " + msg_text
119
+ self.log(msg, log_level)
120
+
121
+ def log_query(self, sql_name, sql):
122
+ self.log_divider("debug")
123
+ self.log_biz_msg(sql_name + "=\n", sql, "debug")
124
+
125
+ def log_query_result(self, sql_name, result):
126
+ self.log_biz_msg(sql_name + "_RESULT", str(result) + " rows", "debug")
127
+ self.log_divider("debug")
128
+
129
+ def dump_data(self, filename, data):
130
+ with open(filename, 'wb') as filehandle:
131
+ pickle.dump(data, filehandle, protocol=pickle.HIGHEST_PROTOCOL)
132
+
133
+ def load_data(self, filename):
134
+ with open(filename, 'rb') as filehandle:
135
+ return pickle.load(filehandle)
136
+
137
+ def cleanup_expired_log(self, log_dir, max_retention_days):
138
+ today = datetime.now()
139
+ for filename in os.listdir(log_dir):
140
+ file_path = os.path.join(log_dir, filename)
141
+ if os.path.isfile(file_path):
142
+ file_creation_time = datetime.fromtimestamp(os.path.getmtime(file_path))
143
+ if (today - file_creation_time).days > max_retention_days:
144
+ os.remove(file_path)
145
+
@@ -0,0 +1,157 @@
1
+ import smtplib
2
+ import settings
3
+ from datetime import datetime
4
+ import pymsteams
5
+ from email.mime.text import MIMEText
6
+ from email.mime.multipart import MIMEMultipart
7
+ from slack_sdk import WebClient
8
+ from slack_sdk.errors import SlackApiError
9
+
10
+
11
+ class Notify:
12
+ def __init__(self, name):
13
+ """
14
+ Initialize Notifier,support multiple notification tools.
15
+ :param name: notifier name
16
+ """
17
+ self.name = name
18
+ self.notifiers = self._initialize_notifiers()
19
+
20
+ def _initialize_notifiers(self):
21
+ # Automatically initializes all available notification tools.
22
+ return [
23
+ SlackNotify(self.name),
24
+ EmailNotify(self.name),
25
+ ]
26
+
27
+ def notify(self, subject, content):
28
+ # Call the notification method of all notification tools.
29
+ for notifier in self.notifiers:
30
+ try:
31
+ if hasattr(notifier, "notify"):
32
+ notifier.notify(subject, content)
33
+ except Exception as e:
34
+ print(f"Error in {notifier.__class__.__name__} notification: {e}")
35
+
36
+ def notify_started(self):
37
+ self.notify("Started", "Batch task started successfully.")
38
+
39
+ def notify_exited(self):
40
+ self.notify("Exited", "Batch task exited successfully.")
41
+
42
+ def notify_error(self, tb):
43
+ self.notify("Has some error", "Error details are below:\n" + tb)
44
+
45
+ def notify_report(self, report_dict):
46
+ content = ""
47
+ for report_name, report_content in report_dict.items():
48
+ content += f"{report_name}: {str(report_content)}\n"
49
+ self.notify("Biz Reports", content.strip())
50
+
51
+ def close(self):
52
+ # Close all notification tool connections.
53
+ for notifier in self.notifiers:
54
+ try:
55
+ if hasattr(notifier, "close"):
56
+ notifier.close()
57
+ except Exception as e:
58
+ print(f"Error closing {notifier.__class__.__name__} notifier: {e}")
59
+
60
+
61
+ class SlackNotify:
62
+ def __init__(self, name):
63
+ self.name = name
64
+ try:
65
+ self.channel_id = settings.SLACK_CHANNEL_ID
66
+ if hasattr(settings, "SLACK_BOT_TOKEN") and settings.SLACK_BOT_TOKEN:
67
+ self.client = WebClient(token=settings.SLACK_BOT_TOKEN)
68
+ else:
69
+ self.client = None
70
+ print("slack setting ******")
71
+ print(self.channel_id, settings.SLACK_BOT_TOKEN)
72
+ except Exception as e:
73
+ print(f"Error instantiates Slack client: {e}")
74
+ self.client = None # Make sure server is None for subsequent checks
75
+
76
+ def now(self):
77
+ return datetime.now().strftime("%Y-%m-%d %H:%M")
78
+
79
+ def notify(self, subject, content):
80
+ if self.client is None:
81
+ print("Slack client is not initialized.")
82
+ return
83
+ try:
84
+ result = self.client.chat_postMessage(
85
+ channel=self.channel_id,
86
+ text="*[{name}]* ({time}) {subject}\n{content}".format(
87
+ name=self.name, time=self.now(), subject=subject, content=content
88
+ ),
89
+ )
90
+ if not result["ok"]:
91
+ print(result)
92
+
93
+ except SlackApiError as e:
94
+ print(f"Failed to send Slack message: {e}")
95
+
96
+
97
+ class EmailNotify:
98
+ def __init__(self, name):
99
+ self.name = name
100
+ self.server = None # Initializing the SMTP connection
101
+ try:
102
+ # connect to SMTP server
103
+ self.sender_email = settings.SENDER_EMAIL
104
+ self.receiver_email = settings.RECEIVER_EMAIL.split(",")
105
+ self.email_server_addr = settings.EMAIL_SERVER_ADDR
106
+ self.email_server_port = settings.EMAIL_SERVER_PORT
107
+
108
+ if self.email_server_addr and self.email_server_port and self.sender_email:
109
+ self.server = smtplib.SMTP(self.email_server_addr, self.email_server_port)
110
+ except Exception as e:
111
+ print(f"Error initializing SMTP connection: {e}")
112
+ self.server = None # Make sure server is None for subsequent checks
113
+
114
+ def now(self):
115
+ return datetime.now().strftime("%Y-%m-%d %H:%M")
116
+
117
+ def notify(self, subject, content):
118
+ # Ban Success Mail
119
+ if subject != "Has some error":
120
+ return
121
+ if self.server is None:
122
+ print("SMTP server is not initialized.")
123
+ return
124
+
125
+ # Creat message object
126
+ message = MIMEMultipart()
127
+ message["From"] = self.sender_email
128
+ message["To"] = ", ".join(self.receiver_email)
129
+ message["Subject"] = "[{name}]{subject}".format(name=self.name, subject=subject)
130
+
131
+ # email text
132
+ msg = """
133
+ <!DOCTYPE html>
134
+ <html>
135
+ <body>
136
+ <p><b>[{name}]({time}) {subject}</b></p>
137
+ <p>{content}</p>
138
+ </body>
139
+ </html>
140
+ """.format(name=self.name, time=self.now(), subject=subject, content=content)
141
+ message.attach(MIMEText(msg, "html"))
142
+
143
+ try:
144
+ if self.sender_email and self.receiver_email:
145
+ # send email
146
+ self.server.sendmail(self.sender_email, self.receiver_email, message.as_string())
147
+ except Exception as e:
148
+ print(f"Failed to send email: {e}")
149
+
150
+ def close(self):
151
+ # Closing the SMTP Connection
152
+ if self.server is not None:
153
+ try:
154
+ self.server.quit()
155
+ except Exception as e:
156
+ print(f"Error closing SMTP connection: {e}")
157
+