medicafe 0.240419.2__py3-none-any.whl → 0.240613.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of medicafe might be problematic. Click here for more details.
- MediBot/MediBot.bat +174 -38
- MediBot/MediBot.py +80 -77
- MediBot/MediBot_Charges.py +0 -28
- MediBot/MediBot_Crosswalk_Library.py +281 -0
- MediBot/MediBot_Post.py +0 -0
- MediBot/MediBot_Preprocessor.py +138 -211
- MediBot/MediBot_Preprocessor_lib.py +496 -0
- MediBot/MediBot_UI.py +80 -35
- MediBot/MediBot_dataformat_library.py +79 -35
- MediBot/MediBot_docx_decoder.py +295 -0
- MediBot/update_medicafe.py +46 -8
- MediLink/MediLink.py +207 -108
- MediLink/MediLink_837p_encoder.py +299 -214
- MediLink/MediLink_837p_encoder_library.py +445 -245
- MediLink/MediLink_API_v2.py +174 -0
- MediLink/MediLink_APIs.py +139 -0
- MediLink/MediLink_ConfigLoader.py +44 -32
- MediLink/MediLink_DataMgmt.py +297 -89
- MediLink/MediLink_Decoder.py +63 -0
- MediLink/MediLink_Down.py +73 -102
- MediLink/MediLink_ERA_decoder.py +4 -4
- MediLink/MediLink_Gmail.py +479 -4
- MediLink/MediLink_Mailer.py +0 -0
- MediLink/MediLink_Parser.py +111 -0
- MediLink/MediLink_Scan.py +0 -0
- MediLink/MediLink_Scheduler.py +2 -131
- MediLink/MediLink_StatusCheck.py +0 -4
- MediLink/MediLink_UI.py +87 -27
- MediLink/MediLink_Up.py +301 -45
- MediLink/MediLink_batch.bat +1 -1
- MediLink/test.py +74 -0
- medicafe-0.240613.0.dist-info/METADATA +55 -0
- medicafe-0.240613.0.dist-info/RECORD +43 -0
- {medicafe-0.240419.2.dist-info → medicafe-0.240613.0.dist-info}/WHEEL +5 -5
- medicafe-0.240419.2.dist-info/METADATA +0 -19
- medicafe-0.240419.2.dist-info/RECORD +0 -32
- {medicafe-0.240419.2.dist-info → medicafe-0.240613.0.dist-info}/LICENSE +0 -0
- {medicafe-0.240419.2.dist-info → medicafe-0.240613.0.dist-info}/top_level.txt +0 -0
MediLink/MediLink_Gmail.py
CHANGED
|
@@ -1,4 +1,479 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
import subprocess
|
|
4
|
+
import time
|
|
5
|
+
import webbrowser
|
|
6
|
+
from MediLink_ConfigLoader import log, load_configuration
|
|
7
|
+
|
|
8
|
+
import requests
|
|
9
|
+
import json
|
|
10
|
+
from http.server import BaseHTTPRequestHandler, HTTPServer
|
|
11
|
+
import ssl
|
|
12
|
+
import signal
|
|
13
|
+
from threading import Thread, Event
|
|
14
|
+
|
|
15
|
+
import platform
|
|
16
|
+
|
|
17
|
+
config, _ = load_configuration()
|
|
18
|
+
local_storage_path = config['MediLink_Config']['local_storage_path']
|
|
19
|
+
downloaded_emails_file = os.path.join(local_storage_path, 'downloaded_emails.txt')
|
|
20
|
+
|
|
21
|
+
server_port = 8000
|
|
22
|
+
cert_file = 'server.cert'
|
|
23
|
+
key_file = 'server.key'
|
|
24
|
+
openssl_cnf = 'MediLink\\openssl.cnf' # This file needs to be located in the same place as where MediCafe is run from (so MediBot folder?)
|
|
25
|
+
|
|
26
|
+
httpd = None # Global variable for the HTTP server
|
|
27
|
+
shutdown_event = Event() # Event to signal shutdown
|
|
28
|
+
|
|
29
|
+
# Define the scopes for the Gmail API and other required APIs
|
|
30
|
+
SCOPES = ' '.join([
|
|
31
|
+
'https://www.googleapis.com/auth/gmail.modify',
|
|
32
|
+
'https://www.googleapis.com/auth/gmail.compose',
|
|
33
|
+
'https://www.googleapis.com/auth/gmail.readonly',
|
|
34
|
+
'https://www.googleapis.com/auth/script.external_request',
|
|
35
|
+
'https://www.googleapis.com/auth/userinfo.email',
|
|
36
|
+
'https://www.googleapis.com/auth/script.scriptapp',
|
|
37
|
+
'https://www.googleapis.com/auth/drive'
|
|
38
|
+
])
|
|
39
|
+
|
|
40
|
+
# Path to token.json file
|
|
41
|
+
TOKEN_PATH = 'token.json'
|
|
42
|
+
|
|
43
|
+
# Determine the operating system and version
|
|
44
|
+
os_name = platform.system()
|
|
45
|
+
os_version = platform.release()
|
|
46
|
+
|
|
47
|
+
# Set the credentials path based on the OS and version
|
|
48
|
+
if os_name == 'Windows' and 'XP' in os_version:
|
|
49
|
+
CREDENTIALS_PATH = 'F:\\Medibot\\json\\credentials.json'
|
|
50
|
+
else:
|
|
51
|
+
CREDENTIALS_PATH = 'json\\credentials.json'
|
|
52
|
+
|
|
53
|
+
# Log the selected path for verification
|
|
54
|
+
log("Using CREDENTIALS_PATH: {}".format(CREDENTIALS_PATH), config, level="INFO")
|
|
55
|
+
|
|
56
|
+
REDIRECT_URI = 'https://127.0.0.1:8000'
|
|
57
|
+
|
|
58
|
+
def get_authorization_url():
|
|
59
|
+
with open(CREDENTIALS_PATH, 'r') as credentials_file:
|
|
60
|
+
credentials = json.load(credentials_file)
|
|
61
|
+
client_id = credentials['web']['client_id']
|
|
62
|
+
auth_url = (
|
|
63
|
+
"https://accounts.google.com/o/oauth2/v2/auth?"
|
|
64
|
+
"response_type=code&"
|
|
65
|
+
"client_id={}&"
|
|
66
|
+
"redirect_uri={}&"
|
|
67
|
+
"scope={}&"
|
|
68
|
+
"access_type=offline"
|
|
69
|
+
).format(client_id, REDIRECT_URI, SCOPES)
|
|
70
|
+
return auth_url
|
|
71
|
+
|
|
72
|
+
def exchange_code_for_token(auth_code):
|
|
73
|
+
with open(CREDENTIALS_PATH, 'r') as credentials_file:
|
|
74
|
+
credentials = json.load(credentials_file)
|
|
75
|
+
token_url = "https://oauth2.googleapis.com/token"
|
|
76
|
+
data = {
|
|
77
|
+
'code': auth_code,
|
|
78
|
+
'client_id': credentials['web']['client_id'],
|
|
79
|
+
'client_secret': credentials['web']['client_secret'],
|
|
80
|
+
'redirect_uri': REDIRECT_URI,
|
|
81
|
+
'grant_type': 'authorization_code'
|
|
82
|
+
}
|
|
83
|
+
response = requests.post(token_url, data=data)
|
|
84
|
+
log("Token exchange response: Status code {}, Body: {}".format(response.status_code, response.text))
|
|
85
|
+
token_response = response.json()
|
|
86
|
+
token_response['token_time'] = time.time()
|
|
87
|
+
# Ensure refresh_token is saved
|
|
88
|
+
if 'refresh_token' in token_response:
|
|
89
|
+
with open(TOKEN_PATH, 'w') as token_file:
|
|
90
|
+
json.dump(token_response, token_file)
|
|
91
|
+
return token_response
|
|
92
|
+
|
|
93
|
+
def get_access_token():
|
|
94
|
+
if os.path.exists(TOKEN_PATH):
|
|
95
|
+
with open(TOKEN_PATH, 'r') as token_file:
|
|
96
|
+
token_data = json.load(token_file)
|
|
97
|
+
log("Loaded token data:\n {}".format(token_data))
|
|
98
|
+
|
|
99
|
+
if 'access_token' in token_data and 'expires_in' in token_data:
|
|
100
|
+
try:
|
|
101
|
+
token_expiry_time = token_data['token_time'] + token_data['expires_in']
|
|
102
|
+
except KeyError as e:
|
|
103
|
+
log("KeyError: {}".format(e))
|
|
104
|
+
return None
|
|
105
|
+
|
|
106
|
+
if token_expiry_time > time.time():
|
|
107
|
+
log("Access token is still valid.")
|
|
108
|
+
return token_data['access_token']
|
|
109
|
+
else:
|
|
110
|
+
log("Access token has expired, refreshing token.")
|
|
111
|
+
new_token_data = refresh_access_token(token_data.get('refresh_token'))
|
|
112
|
+
if 'access_token' in new_token_data:
|
|
113
|
+
new_token_data['token_time'] = time.time()
|
|
114
|
+
with open(TOKEN_PATH, 'w') as token_file:
|
|
115
|
+
json.dump(new_token_data, token_file)
|
|
116
|
+
return new_token_data['access_token']
|
|
117
|
+
else:
|
|
118
|
+
log("Failed to refresh access token.")
|
|
119
|
+
return None
|
|
120
|
+
log("Access token not found.")
|
|
121
|
+
return None
|
|
122
|
+
|
|
123
|
+
def refresh_access_token(refresh_token):
|
|
124
|
+
log("Refreshing access token.")
|
|
125
|
+
with open(CREDENTIALS_PATH, 'r') as credentials_file:
|
|
126
|
+
credentials = json.load(credentials_file)
|
|
127
|
+
token_url = "https://oauth2.googleapis.com/token"
|
|
128
|
+
data = {
|
|
129
|
+
'client_id': credentials['web']['client_id'],
|
|
130
|
+
'client_secret': credentials['web']['client_secret'],
|
|
131
|
+
'refresh_token': refresh_token,
|
|
132
|
+
'grant_type': 'refresh_token'
|
|
133
|
+
}
|
|
134
|
+
response = requests.post(token_url, data=data)
|
|
135
|
+
log("Refresh token response: Status code {}, Body:\n {}".format(response.status_code, response.text))
|
|
136
|
+
if response.status_code == 200:
|
|
137
|
+
log("Access token refreshed successfully.")
|
|
138
|
+
return response.json()
|
|
139
|
+
else:
|
|
140
|
+
log("Failed to refresh access token. Status code: {}".format(response.status_code))
|
|
141
|
+
return {}
|
|
142
|
+
|
|
143
|
+
class RequestHandler(BaseHTTPRequestHandler):
|
|
144
|
+
def _set_headers(self):
|
|
145
|
+
self.send_header('Access-Control-Allow-Origin', '*')
|
|
146
|
+
self.send_header('Access-Control-Allow-Methods', 'POST, OPTIONS')
|
|
147
|
+
self.send_header('Access-Control-Allow-Headers', 'Content-Type')
|
|
148
|
+
self.send_header('Content-type', 'application/json')
|
|
149
|
+
|
|
150
|
+
def do_OPTIONS(self):
|
|
151
|
+
self.send_response(200)
|
|
152
|
+
self._set_headers()
|
|
153
|
+
self.end_headers()
|
|
154
|
+
|
|
155
|
+
def do_POST(self):
|
|
156
|
+
if self.path == '/download':
|
|
157
|
+
content_length = int(self.headers['Content-Length'])
|
|
158
|
+
post_data = self.rfile.read(content_length)
|
|
159
|
+
data = json.loads(post_data.decode('utf-8'))
|
|
160
|
+
links = data.get('links', [])
|
|
161
|
+
|
|
162
|
+
# Log the content of links
|
|
163
|
+
log("Received links: {}".format(links))
|
|
164
|
+
|
|
165
|
+
file_ids = [link.get('fileId', None) for link in links if link.get('fileId')]
|
|
166
|
+
log("File IDs received from client: {}".format(file_ids))
|
|
167
|
+
|
|
168
|
+
# Proceed with downloading files
|
|
169
|
+
download_docx_files(links)
|
|
170
|
+
self.send_response(200)
|
|
171
|
+
self._set_headers() # Include CORS headers
|
|
172
|
+
self.end_headers()
|
|
173
|
+
response = json.dumps({"status": "success", "message": "All files downloaded", "fileIds": file_ids})
|
|
174
|
+
self.wfile.write(response.encode('utf-8'))
|
|
175
|
+
shutdown_event.set()
|
|
176
|
+
elif self.path == '/shutdown':
|
|
177
|
+
log("Shutdown request received.")
|
|
178
|
+
self.send_response(200)
|
|
179
|
+
self._set_headers()
|
|
180
|
+
self.end_headers()
|
|
181
|
+
response = json.dumps({"status": "success", "message": "Server is shutting down."})
|
|
182
|
+
self.wfile.write(response.encode('utf-8'))
|
|
183
|
+
shutdown_event.set() # Signal shutdown event instead of calling stop_server directly
|
|
184
|
+
elif self.path == '/delete-files':
|
|
185
|
+
content_length = int(self.headers['Content-Length'])
|
|
186
|
+
post_data = self.rfile.read(content_length)
|
|
187
|
+
data = json.loads(post_data.decode('utf-8'))
|
|
188
|
+
file_ids = data.get('fileIds', [])
|
|
189
|
+
log("File IDs to delete received from client: {}".format(file_ids))
|
|
190
|
+
|
|
191
|
+
if not isinstance(file_ids, list):
|
|
192
|
+
self.send_response(400)
|
|
193
|
+
self._set_headers()
|
|
194
|
+
self.end_headers()
|
|
195
|
+
response = json.dumps({"status": "error", "message": "Invalid fileIds parameter."})
|
|
196
|
+
self.wfile.write(response.encode('utf-8'))
|
|
197
|
+
return
|
|
198
|
+
|
|
199
|
+
self.send_response(200)
|
|
200
|
+
self._set_headers() # Include CORS headers
|
|
201
|
+
self.end_headers()
|
|
202
|
+
response = json.dumps({"status": "success", "message": "Files deleted successfully."})
|
|
203
|
+
self.wfile.write(response.encode('utf-8'))
|
|
204
|
+
else:
|
|
205
|
+
self.send_response(404)
|
|
206
|
+
self.end_headers()
|
|
207
|
+
|
|
208
|
+
def do_GET(self):
|
|
209
|
+
if self.path.startswith("/?code="):
|
|
210
|
+
auth_code = self.path.split('=')[1]
|
|
211
|
+
log("Received authorization code: {}".format(auth_code)) # Add this line
|
|
212
|
+
token_response = exchange_code_for_token(auth_code)
|
|
213
|
+
log("Token response: {}".format(token_response)) # Add this line
|
|
214
|
+
if 'access_token' in token_response:
|
|
215
|
+
with open(TOKEN_PATH, 'w') as token_file:
|
|
216
|
+
json.dump(token_response, token_file)
|
|
217
|
+
self.send_response(200)
|
|
218
|
+
self.send_header('Content-type', 'text/html')
|
|
219
|
+
self.end_headers()
|
|
220
|
+
self.wfile.write("Authentication successful. You can close this window now.".encode())
|
|
221
|
+
initiate_link_retrieval() # Proceed with link retrieval
|
|
222
|
+
else:
|
|
223
|
+
log("Authentication failed with response: {}".format(token_response)) # Add this line
|
|
224
|
+
self.send_response(400)
|
|
225
|
+
self.send_header('Content-type', 'text/html')
|
|
226
|
+
self.end_headers()
|
|
227
|
+
self.wfile.write("Authentication failed. Please check the logs for more details.".encode())
|
|
228
|
+
shutdown_event.set() # Signal shutdown event after failed authentication
|
|
229
|
+
elif self.path == '/downloaded-emails':
|
|
230
|
+
self.send_response(200)
|
|
231
|
+
self._set_headers()
|
|
232
|
+
self.end_headers()
|
|
233
|
+
downloaded_emails = load_downloaded_emails()
|
|
234
|
+
response = json.dumps({"downloadedEmails": list(downloaded_emails)})
|
|
235
|
+
self.wfile.write(response.encode('utf-8'))
|
|
236
|
+
else:
|
|
237
|
+
self.send_response(200)
|
|
238
|
+
self.send_header('Access-Control-Allow-Origin', '*')
|
|
239
|
+
self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
|
|
240
|
+
self.send_header('Access-Control-Allow-Headers', 'Content-Type')
|
|
241
|
+
self.send_header('Content-type', 'text/html')
|
|
242
|
+
self.end_headers()
|
|
243
|
+
self.wfile.write(b'HTTPS server is running.')
|
|
244
|
+
|
|
245
|
+
def generate_self_signed_cert(cert_file, key_file):
|
|
246
|
+
log("Checking if certificate file exists: " + cert_file)
|
|
247
|
+
log("Checking if key file exists: " + key_file)
|
|
248
|
+
if not os.path.exists(cert_file) or not os.path.exists(key_file):
|
|
249
|
+
log("Generating self-signed SSL certificate...")
|
|
250
|
+
cmd = [
|
|
251
|
+
'openssl', 'req', '-config', openssl_cnf, '-nodes', '-new', '-x509',
|
|
252
|
+
'-keyout', key_file,
|
|
253
|
+
'-out', cert_file,
|
|
254
|
+
'-days', '365'
|
|
255
|
+
#'-subj', '/C=US/ST=...' The openssl.cnf file contains default values for these fields, but they can be overridden by the -subj option.
|
|
256
|
+
]
|
|
257
|
+
try:
|
|
258
|
+
log("Running command: " + ' '.join(cmd))
|
|
259
|
+
result = subprocess.call(cmd)
|
|
260
|
+
log("Command finished with result: " + str(result))
|
|
261
|
+
if result != 0:
|
|
262
|
+
raise RuntimeError("Failed to generate self-signed certificate")
|
|
263
|
+
log("Self-signed SSL certificate generated.")
|
|
264
|
+
except Exception as e:
|
|
265
|
+
log("Error generating self-signed certificate: {}".format(e))
|
|
266
|
+
raise
|
|
267
|
+
|
|
268
|
+
def run_server():
|
|
269
|
+
global httpd
|
|
270
|
+
try:
|
|
271
|
+
log("Attempting to start server on port " + str(server_port))
|
|
272
|
+
server_address = ('0.0.0.0', server_port) # Bind to all interfaces
|
|
273
|
+
httpd = HTTPServer(server_address, RequestHandler)
|
|
274
|
+
log("Attempting to wrap socket with SSL. cert_file=" + cert_file + ", key_file=" + key_file)
|
|
275
|
+
|
|
276
|
+
if not os.path.exists(cert_file):
|
|
277
|
+
log("Error: Certificate file not found: " + cert_file)
|
|
278
|
+
if not os.path.exists(key_file):
|
|
279
|
+
log("Error: Key file not found: " + key_file)
|
|
280
|
+
|
|
281
|
+
httpd.socket = ssl.wrap_socket(httpd.socket, certfile=cert_file, keyfile=key_file, server_side=True)
|
|
282
|
+
log("Starting HTTPS server on port {}".format(server_port))
|
|
283
|
+
httpd.serve_forever()
|
|
284
|
+
except Exception as e:
|
|
285
|
+
log("Error in serving: {}".format(e))
|
|
286
|
+
stop_server()
|
|
287
|
+
|
|
288
|
+
def stop_server():
|
|
289
|
+
global httpd
|
|
290
|
+
if httpd:
|
|
291
|
+
log("Stopping HTTPS server.")
|
|
292
|
+
httpd.shutdown()
|
|
293
|
+
httpd.server_close()
|
|
294
|
+
log("HTTPS server stopped.")
|
|
295
|
+
shutdown_event.set() # Signal shutdown event
|
|
296
|
+
|
|
297
|
+
def load_downloaded_emails():
|
|
298
|
+
downloaded_emails = set()
|
|
299
|
+
if os.path.exists(downloaded_emails_file):
|
|
300
|
+
with open(downloaded_emails_file, 'r') as file:
|
|
301
|
+
downloaded_emails = set(line.strip() for line in file)
|
|
302
|
+
log("Loaded downloaded emails: {}".format(downloaded_emails))
|
|
303
|
+
return downloaded_emails
|
|
304
|
+
|
|
305
|
+
def download_docx_files(links):
|
|
306
|
+
# Load the set of downloaded emails
|
|
307
|
+
# TODO Test if any of these have a .csv extension and then move those to the right location locally.
|
|
308
|
+
downloaded_emails = load_downloaded_emails()
|
|
309
|
+
|
|
310
|
+
for link in links:
|
|
311
|
+
try:
|
|
312
|
+
url = link.get('url', '')
|
|
313
|
+
filename = link.get('filename', '')
|
|
314
|
+
|
|
315
|
+
# Log the variables to debug
|
|
316
|
+
log("Processing link: url='{}', filename='{}'".format(url, filename))
|
|
317
|
+
|
|
318
|
+
# Skip if email already downloaded
|
|
319
|
+
if filename in downloaded_emails:
|
|
320
|
+
log("Skipping already downloaded email: {}".format(filename))
|
|
321
|
+
continue
|
|
322
|
+
|
|
323
|
+
log("Downloading .docx file from URL: {}".format(url))
|
|
324
|
+
response = requests.get(url, verify=False) # Set verify to False for self-signed certs
|
|
325
|
+
if response.status_code == 200:
|
|
326
|
+
file_path = os.path.join(local_storage_path, filename)
|
|
327
|
+
with open(file_path, 'wb') as file:
|
|
328
|
+
file.write(response.content)
|
|
329
|
+
log("Downloaded .docx file: {}".format(filename))
|
|
330
|
+
# Add to the set and save the updated list
|
|
331
|
+
downloaded_emails.add(filename)
|
|
332
|
+
with open(downloaded_emails_file, 'a') as file:
|
|
333
|
+
file.write(filename + '\n')
|
|
334
|
+
else:
|
|
335
|
+
log("Failed to download .docx file from URL: {}. Status code: {}".format(url, response.status_code))
|
|
336
|
+
except Exception as e:
|
|
337
|
+
log("Error downloading .docx file from URL: {}. Error: {}".format(url, e))
|
|
338
|
+
|
|
339
|
+
def open_browser_with_executable(url, browser_path=None):
|
|
340
|
+
try:
|
|
341
|
+
if browser_path:
|
|
342
|
+
log("Attempting to open URL with provided executable: {} {}".format(browser_path, url))
|
|
343
|
+
process = subprocess.Popen([browser_path, url], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
344
|
+
stdout, stderr = process.communicate()
|
|
345
|
+
if process.returncode == 0:
|
|
346
|
+
log("Browser opened with provided executable path using subprocess.Popen.")
|
|
347
|
+
else:
|
|
348
|
+
log("Browser failed to open using subprocess.Popen. Return code: {}. Stderr: {}".format(process.returncode, stderr))
|
|
349
|
+
else:
|
|
350
|
+
log("No browser path provided. Attempting to open URL with default browser: {}".format(url))
|
|
351
|
+
webbrowser.open(url)
|
|
352
|
+
log("Default browser opened.")
|
|
353
|
+
except Exception as e:
|
|
354
|
+
log("Failed to open browser: {}".format(e))
|
|
355
|
+
|
|
356
|
+
def initiate_link_retrieval():
|
|
357
|
+
log("Initiating browser via implicit GET.")
|
|
358
|
+
url_get = "https://script.google.com/macros/s/AKfycbzlq8d32mDlLdtFxgL_zvLJernlGPB64ftyxyH8F1nNlr3P-VBH6Yd0NGa1pbBc5AozvQ/exec?action=get_link"
|
|
359
|
+
open_browser_with_executable(url_get)
|
|
360
|
+
|
|
361
|
+
log("Preparing POST call.")
|
|
362
|
+
url = "https://script.google.com/macros/s/AKfycbzlq8d32mDlLdtFxgL_zvLJernlGPB64ftyxyH8F1nNlr3P-VBH6Yd0NGa1pbBc5AozvQ/exec"
|
|
363
|
+
downloaded_emails = list(load_downloaded_emails())
|
|
364
|
+
payload = {
|
|
365
|
+
"downloadedEmails": downloaded_emails
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
access_token = get_access_token()
|
|
369
|
+
if not access_token:
|
|
370
|
+
log("Access token not found. Please authenticate first.")
|
|
371
|
+
shutdown_event.set() # Signal shutdown event if token is not found
|
|
372
|
+
return
|
|
373
|
+
|
|
374
|
+
# Inspect the token to check its validity and permissions
|
|
375
|
+
try:
|
|
376
|
+
inspect_token(access_token)
|
|
377
|
+
except Exception as e:
|
|
378
|
+
log("Error inspecting token: {}".format(e))
|
|
379
|
+
shutdown_event.set() # Signal shutdown event on exception
|
|
380
|
+
return
|
|
381
|
+
|
|
382
|
+
headers = {
|
|
383
|
+
'Authorization': 'Bearer {}'.format(access_token),
|
|
384
|
+
'Content-Type': 'application/json'
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
log("Request headers: {}".format(headers))
|
|
388
|
+
log("Request payload: {}".format(payload))
|
|
389
|
+
|
|
390
|
+
handle_post_response(url, payload, headers)
|
|
391
|
+
|
|
392
|
+
def handle_post_response(url, payload, headers):
|
|
393
|
+
try:
|
|
394
|
+
response = requests.post(url, json=payload, headers=headers)
|
|
395
|
+
log("Response status code: {}".format(response.status_code))
|
|
396
|
+
log("Response body: {}".format(response.text))
|
|
397
|
+
|
|
398
|
+
if response.status_code == 200:
|
|
399
|
+
response_data = response.json()
|
|
400
|
+
log("Parsed response data: {}".format(response_data)) # Log the parsed response data
|
|
401
|
+
if response_data.get("status") == "error":
|
|
402
|
+
log("Error message from server: {}".format(response_data.get("message")))
|
|
403
|
+
print("Error: {}".format(response_data.get("message")))
|
|
404
|
+
shutdown_event.set() # Signal shutdown event after error
|
|
405
|
+
else:
|
|
406
|
+
log("Link retrieval initiated successfully...")
|
|
407
|
+
# What happens now? I think the webapp is supposed to make a call back to the server here to say its ready to download?
|
|
408
|
+
elif response.status_code == 401:
|
|
409
|
+
log("Unauthorized. Check if the token has the necessary scopes.")
|
|
410
|
+
shutdown_event.set()
|
|
411
|
+
elif response.status_code == 403:
|
|
412
|
+
log("Forbidden. Ensure that the OAuth client has the correct permissions and the Apps Script is configured correctly.")
|
|
413
|
+
shutdown_event.set()
|
|
414
|
+
elif response.status_code == 404:
|
|
415
|
+
log("Not Found. Verify the URL and ensure the Apps Script is deployed correctly.")
|
|
416
|
+
shutdown_event.set()
|
|
417
|
+
else:
|
|
418
|
+
log("Failed to initiate link retrieval. Status code: {}".format(response.status_code))
|
|
419
|
+
shutdown_event.set() # Signal shutdown event on failure
|
|
420
|
+
except requests.exceptions.RequestException as e:
|
|
421
|
+
log("RequestException during link retrieval initiation: {}".format(e))
|
|
422
|
+
shutdown_event.set()
|
|
423
|
+
except Exception as e:
|
|
424
|
+
log("Unexpected error during link retrieval initiation: {}".format(e))
|
|
425
|
+
shutdown_event.set()
|
|
426
|
+
|
|
427
|
+
def inspect_token(access_token):
|
|
428
|
+
info_url = "https://www.googleapis.com/oauth2/v1/tokeninfo?access_token={}".format(access_token)
|
|
429
|
+
response = requests.get(info_url)
|
|
430
|
+
log("Token info: Status code {}, Body: {}".format(response.status_code, response.text))
|
|
431
|
+
return response.json()
|
|
432
|
+
|
|
433
|
+
def signal_handler(sig, frame):
|
|
434
|
+
log("Signal received: {}. Initiating shutdown.".format(sig))
|
|
435
|
+
stop_server()
|
|
436
|
+
sys.exit(0)
|
|
437
|
+
|
|
438
|
+
def auth_and_retrieval():
|
|
439
|
+
access_token = get_access_token()
|
|
440
|
+
if not access_token:
|
|
441
|
+
log("Access token not found or expired. Please authenticate first.")
|
|
442
|
+
print("If the browser does not open automatically, please open the following URL in your browser to authorize the application:")
|
|
443
|
+
auth_url = get_authorization_url()
|
|
444
|
+
print(auth_url)
|
|
445
|
+
open_browser_with_executable(auth_url)
|
|
446
|
+
shutdown_event.wait() # Wait for the shutdown event to be set after authentication
|
|
447
|
+
else:
|
|
448
|
+
log("Access token found. Proceeding.")
|
|
449
|
+
initiate_link_retrieval()
|
|
450
|
+
shutdown_event.wait() # Wait for the shutdown event to be set
|
|
451
|
+
|
|
452
|
+
if __name__ == "__main__":
|
|
453
|
+
signal.signal(signal.SIGINT, signal_handler)
|
|
454
|
+
signal.signal(signal.SIGTERM, signal_handler)
|
|
455
|
+
|
|
456
|
+
try:
|
|
457
|
+
# Generate SSL certificate if it doesn't exist
|
|
458
|
+
generate_self_signed_cert(cert_file, key_file)
|
|
459
|
+
|
|
460
|
+
from threading import Thread
|
|
461
|
+
log("Starting server thread.")
|
|
462
|
+
server_thread = Thread(target=run_server)
|
|
463
|
+
server_thread.daemon = True
|
|
464
|
+
server_thread.start()
|
|
465
|
+
|
|
466
|
+
auth_and_retrieval()
|
|
467
|
+
|
|
468
|
+
log("Stopping HTTPS server.")
|
|
469
|
+
stop_server() # Ensure the server is stopped
|
|
470
|
+
log("Waiting for server thread to finish.")
|
|
471
|
+
server_thread.join() # Wait for the server thread to finish
|
|
472
|
+
except KeyboardInterrupt:
|
|
473
|
+
log("KeyboardInterrupt received, stopping server.")
|
|
474
|
+
stop_server()
|
|
475
|
+
sys.exit(0)
|
|
476
|
+
except Exception as e:
|
|
477
|
+
log("An error occurred: {}".format(e))
|
|
478
|
+
stop_server()
|
|
479
|
+
sys.exit(1)
|
|
File without changes
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
# MediLink_Parser.py
|
|
2
|
+
def parse_era_content(content):
|
|
3
|
+
extracted_data = []
|
|
4
|
+
normalized_content = content.replace('~\n', '~')
|
|
5
|
+
lines = normalized_content.split('~')
|
|
6
|
+
|
|
7
|
+
record = {}
|
|
8
|
+
check_eft, payer_address = None, None
|
|
9
|
+
allowed_amount, write_off, patient_responsibility, adjustment_amount = 0, 0, 0, 0
|
|
10
|
+
is_payer_section = False
|
|
11
|
+
|
|
12
|
+
for line in lines:
|
|
13
|
+
segments = line.split('*')
|
|
14
|
+
|
|
15
|
+
if segments[0] == 'TRN' and len(segments) > 2:
|
|
16
|
+
check_eft = segments[2]
|
|
17
|
+
|
|
18
|
+
if segments[0] == 'N1':
|
|
19
|
+
if segments[1] == 'PR':
|
|
20
|
+
is_payer_section = True
|
|
21
|
+
elif segments[1] == 'PE':
|
|
22
|
+
is_payer_section = False
|
|
23
|
+
|
|
24
|
+
if is_payer_section and segments[0] == 'N3' and len(segments) > 1:
|
|
25
|
+
payer_address = segments[1]
|
|
26
|
+
|
|
27
|
+
if segments[0] == 'CLP' and len(segments) >= 5:
|
|
28
|
+
if record:
|
|
29
|
+
if adjustment_amount == 0 and (write_off > 0 or patient_responsibility > 0):
|
|
30
|
+
adjustment_amount = write_off + patient_responsibility
|
|
31
|
+
|
|
32
|
+
record.update({
|
|
33
|
+
'Payer Address': payer_address,
|
|
34
|
+
'Allowed Amount': allowed_amount,
|
|
35
|
+
'Write Off': write_off,
|
|
36
|
+
'Patient Responsibility': patient_responsibility,
|
|
37
|
+
'Adjustment Amount': adjustment_amount,
|
|
38
|
+
})
|
|
39
|
+
extracted_data.append(record)
|
|
40
|
+
|
|
41
|
+
allowed_amount, write_off, patient_responsibility, adjustment_amount = 0, 0, 0, 0
|
|
42
|
+
|
|
43
|
+
record = {
|
|
44
|
+
'Check EFT': check_eft,
|
|
45
|
+
'Chart Number': segments[1],
|
|
46
|
+
'Payer Address': payer_address,
|
|
47
|
+
'Amount Paid': segments[4],
|
|
48
|
+
'Charge': segments[3],
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
elif segments[0] == 'CAS':
|
|
52
|
+
if segments[1] == 'CO':
|
|
53
|
+
write_off += float(segments[3])
|
|
54
|
+
elif segments[1] == 'PR':
|
|
55
|
+
patient_responsibility += float(segments[3])
|
|
56
|
+
elif segments[1] == 'OA':
|
|
57
|
+
adjustment_amount += float(segments[3])
|
|
58
|
+
|
|
59
|
+
elif segments[0] == 'AMT' and segments[1] == 'B6':
|
|
60
|
+
allowed_amount += float(segments[2])
|
|
61
|
+
|
|
62
|
+
elif segments[0] == 'DTM' and (segments[1] == '232' or segments[1] == '472'):
|
|
63
|
+
record['Date of Service'] = segments[2]
|
|
64
|
+
|
|
65
|
+
if record:
|
|
66
|
+
if adjustment_amount == 0 and (write_off > 0 or patient_responsibility > 0):
|
|
67
|
+
adjustment_amount = write_off + patient_responsibility
|
|
68
|
+
record.update({
|
|
69
|
+
'Allowed Amount': allowed_amount,
|
|
70
|
+
'Write Off': write_off,
|
|
71
|
+
'Patient Responsibility': patient_responsibility,
|
|
72
|
+
'Adjustment Amount': adjustment_amount,
|
|
73
|
+
})
|
|
74
|
+
extracted_data.append(record)
|
|
75
|
+
|
|
76
|
+
return extracted_data
|
|
77
|
+
|
|
78
|
+
def parse_277_content(content):
|
|
79
|
+
segments = content.split('~')
|
|
80
|
+
records = []
|
|
81
|
+
current_record = {}
|
|
82
|
+
for segment in segments:
|
|
83
|
+
parts = segment.split('*')
|
|
84
|
+
if parts[0] == 'HL':
|
|
85
|
+
if current_record:
|
|
86
|
+
records.append(current_record)
|
|
87
|
+
current_record = {}
|
|
88
|
+
elif parts[0] == 'NM1':
|
|
89
|
+
if parts[1] == 'QC':
|
|
90
|
+
current_record['Last'] = parts[3]
|
|
91
|
+
current_record['First'] = parts[4]
|
|
92
|
+
elif parts[1] == '41':
|
|
93
|
+
current_record['Clearing House'] = parts[3]
|
|
94
|
+
elif parts[0] == 'TRN':
|
|
95
|
+
current_record['Claim Status Tracking #'] = parts[2]
|
|
96
|
+
elif parts[0] == 'STC':
|
|
97
|
+
current_record['Status'] = parts[1]
|
|
98
|
+
current_record['Acknowledged Amt'] = parts[4]
|
|
99
|
+
elif parts[0] == 'DTP':
|
|
100
|
+
if parts[1] == '472':
|
|
101
|
+
current_record['Date of Service'] = parts[3]
|
|
102
|
+
elif parts[1] == '050':
|
|
103
|
+
current_record['Received Date'] = parts[3]
|
|
104
|
+
elif parts[0] == 'AMT':
|
|
105
|
+
if parts[1] == 'YU':
|
|
106
|
+
current_record['Billed Amt'] = parts[2]
|
|
107
|
+
|
|
108
|
+
if current_record:
|
|
109
|
+
records.append(current_record)
|
|
110
|
+
|
|
111
|
+
return records
|
|
File without changes
|