medicafe 0.250728.8__py3-none-any.whl → 0.250805.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of medicafe might be problematic. Click here for more details.
- MediBot/MediBot.bat +233 -19
- MediBot/MediBot.py +138 -46
- MediBot/MediBot_Crosswalk_Library.py +127 -623
- MediBot/MediBot_Crosswalk_Utils.py +618 -0
- MediBot/MediBot_Preprocessor.py +72 -17
- MediBot/MediBot_Preprocessor_lib.py +470 -76
- MediBot/MediBot_UI.py +32 -17
- MediBot/MediBot_dataformat_library.py +68 -20
- MediBot/MediBot_docx_decoder.py +120 -19
- MediBot/MediBot_smart_import.py +180 -0
- MediBot/__init__.py +89 -0
- MediBot/get_medicafe_version.py +25 -0
- MediBot/update_json.py +35 -6
- MediBot/update_medicafe.py +19 -1
- MediCafe/MediLink_ConfigLoader.py +160 -0
- MediCafe/__init__.py +171 -0
- MediCafe/__main__.py +222 -0
- MediCafe/api_core.py +1098 -0
- MediCafe/api_core_backup.py +427 -0
- MediCafe/api_factory.py +306 -0
- MediCafe/api_utils.py +356 -0
- MediCafe/core_utils.py +450 -0
- MediCafe/graphql_utils.py +445 -0
- MediCafe/logging_config.py +123 -0
- MediCafe/logging_demo.py +61 -0
- MediCafe/migration_helpers.py +463 -0
- MediCafe/smart_import.py +436 -0
- MediLink/MediLink.py +66 -26
- MediLink/MediLink_837p_cob_library.py +28 -28
- MediLink/MediLink_837p_encoder.py +33 -34
- MediLink/MediLink_837p_encoder_library.py +243 -151
- MediLink/MediLink_837p_utilities.py +129 -5
- MediLink/MediLink_API_Generator.py +83 -60
- MediLink/MediLink_API_v3.py +1 -1
- MediLink/MediLink_ClaimStatus.py +177 -31
- MediLink/MediLink_DataMgmt.py +405 -72
- MediLink/MediLink_Decoder.py +20 -1
- MediLink/MediLink_Deductible.py +155 -28
- MediLink/MediLink_Display_Utils.py +72 -0
- MediLink/MediLink_Down.py +127 -5
- MediLink/MediLink_Gmail.py +712 -653
- MediLink/MediLink_PatientProcessor.py +257 -0
- MediLink/MediLink_UI.py +85 -61
- MediLink/MediLink_Up.py +28 -4
- MediLink/MediLink_insurance_utils.py +227 -264
- MediLink/MediLink_main.py +248 -0
- MediLink/MediLink_smart_import.py +264 -0
- MediLink/__init__.py +93 -0
- MediLink/insurance_type_integration_test.py +66 -76
- MediLink/test.py +1 -1
- MediLink/test_timing.py +59 -0
- {medicafe-0.250728.8.dist-info → medicafe-0.250805.0.dist-info}/METADATA +1 -1
- medicafe-0.250805.0.dist-info/RECORD +81 -0
- medicafe-0.250805.0.dist-info/entry_points.txt +2 -0
- {medicafe-0.250728.8.dist-info → medicafe-0.250805.0.dist-info}/top_level.txt +1 -0
- medicafe-0.250728.8.dist-info/RECORD +0 -59
- {medicafe-0.250728.8.dist-info → medicafe-0.250805.0.dist-info}/LICENSE +0 -0
- {medicafe-0.250728.8.dist-info → medicafe-0.250805.0.dist-info}/WHEEL +0 -0
MediLink/MediLink_Gmail.py
CHANGED
|
@@ -1,654 +1,713 @@
|
|
|
1
|
-
# MediLink_Gmail.py
|
|
2
|
-
import sys, os, subprocess, time, webbrowser, requests, json, ssl, signal
|
|
3
|
-
from
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
self.
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
log("
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
self.
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
log("
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
self.
|
|
331
|
-
self.
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
log("
|
|
386
|
-
|
|
387
|
-
log("
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
log("Error
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
httpd.
|
|
416
|
-
log("HTTPS server
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
#
|
|
430
|
-
#
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
log("
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
def
|
|
575
|
-
try:
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
""
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
""
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
1
|
+
# MediLink_Gmail.py
|
|
2
|
+
import sys, os, subprocess, time, webbrowser, requests, json, ssl, signal
|
|
3
|
+
from MediCafe.core_utils import get_shared_config_loader
|
|
4
|
+
|
|
5
|
+
# Get shared config loader
|
|
6
|
+
MediLink_ConfigLoader = get_shared_config_loader()
|
|
7
|
+
if MediLink_ConfigLoader:
|
|
8
|
+
load_configuration = MediLink_ConfigLoader.load_configuration
|
|
9
|
+
log = MediLink_ConfigLoader.log
|
|
10
|
+
else:
|
|
11
|
+
# Fallback functions if config loader is not available
|
|
12
|
+
def load_configuration():
|
|
13
|
+
return {}, {}
|
|
14
|
+
def log(message, level="INFO"):
|
|
15
|
+
print("[{}] {}".format(level, message))
|
|
16
|
+
from http.server import BaseHTTPRequestHandler, HTTPServer
|
|
17
|
+
from threading import Thread, Event
|
|
18
|
+
import platform
|
|
19
|
+
import ctypes
|
|
20
|
+
|
|
21
|
+
config, _ = load_configuration()
|
|
22
|
+
local_storage_path = config['MediLink_Config']['local_storage_path']
|
|
23
|
+
downloaded_emails_file = os.path.join(local_storage_path, 'downloaded_emails.txt')
|
|
24
|
+
|
|
25
|
+
server_port = 8000
|
|
26
|
+
cert_file = 'server.cert'
|
|
27
|
+
key_file = 'server.key'
|
|
28
|
+
# Try to find openssl.cnf in various locations
|
|
29
|
+
openssl_cnf = 'MediLink\\openssl.cnf'
|
|
30
|
+
if not os.path.exists(openssl_cnf):
|
|
31
|
+
log("Could not find openssl.cnf at: " + os.path.abspath(openssl_cnf))
|
|
32
|
+
# Try one directory up
|
|
33
|
+
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
34
|
+
alternative_path = os.path.join(parent_dir, 'MediBot', 'openssl.cnf')
|
|
35
|
+
log("Trying alternative path: " + alternative_path)
|
|
36
|
+
if os.path.exists(alternative_path):
|
|
37
|
+
openssl_cnf = alternative_path
|
|
38
|
+
log("Found openssl.cnf at: " + openssl_cnf)
|
|
39
|
+
else:
|
|
40
|
+
log("Could not find openssl.cnf at alternative path either")
|
|
41
|
+
|
|
42
|
+
httpd = None # Global variable for the HTTP server
|
|
43
|
+
shutdown_event = Event() # Event to signal shutdown
|
|
44
|
+
|
|
45
|
+
# Define the scopes for the Gmail API and other required APIs
|
|
46
|
+
SCOPES = ' '.join([
|
|
47
|
+
'https://www.googleapis.com/auth/gmail.modify',
|
|
48
|
+
'https://www.googleapis.com/auth/gmail.compose',
|
|
49
|
+
'https://www.googleapis.com/auth/gmail.readonly',
|
|
50
|
+
'https://www.googleapis.com/auth/script.external_request',
|
|
51
|
+
'https://www.googleapis.com/auth/userinfo.email',
|
|
52
|
+
'https://www.googleapis.com/auth/script.scriptapp',
|
|
53
|
+
'https://www.googleapis.com/auth/drive'
|
|
54
|
+
])
|
|
55
|
+
|
|
56
|
+
# Path to token.json file
|
|
57
|
+
TOKEN_PATH = 'token.json'
|
|
58
|
+
|
|
59
|
+
# Determine the operating system and version
|
|
60
|
+
os_name = platform.system()
|
|
61
|
+
os_version = platform.release()
|
|
62
|
+
|
|
63
|
+
# Set the credentials path based on the OS and version
|
|
64
|
+
if os_name == 'Windows' and 'XP' in os_version:
|
|
65
|
+
CREDENTIALS_PATH = 'F:\\Medibot\\json\\credentials.json'
|
|
66
|
+
else:
|
|
67
|
+
CREDENTIALS_PATH = 'json\\credentials.json'
|
|
68
|
+
|
|
69
|
+
# Log the selected path for verification
|
|
70
|
+
log("Using CREDENTIALS_PATH: {}".format(CREDENTIALS_PATH), config, level="INFO")
|
|
71
|
+
|
|
72
|
+
REDIRECT_URI = 'https://127.0.0.1:8000'
|
|
73
|
+
|
|
74
|
+
def get_authorization_url():
|
|
75
|
+
with open(CREDENTIALS_PATH, 'r') as credentials_file:
|
|
76
|
+
credentials = json.load(credentials_file)
|
|
77
|
+
client_id = credentials['web']['client_id']
|
|
78
|
+
auth_url = (
|
|
79
|
+
"https://accounts.google.com/o/oauth2/v2/auth?"
|
|
80
|
+
"response_type=code&"
|
|
81
|
+
"client_id={}&"
|
|
82
|
+
"redirect_uri={}&"
|
|
83
|
+
"scope={}&"
|
|
84
|
+
"access_type=offline&" # Requesting offline access allows the application to obtain a refresh token, enabling it to access resources even when the user is not actively using the app. This is useful for long-lived sessions.
|
|
85
|
+
# To improve user experience, consider changing this to 'online' if you don't need offline access:
|
|
86
|
+
# "access_type=online&" # Use this if you only need access while the user is actively using the app and don't require a refresh token.
|
|
87
|
+
|
|
88
|
+
"prompt=consent" # This forces the user to re-consent to the requested scopes every time they authenticate. While this is useful for ensuring the user is aware of the permissions being granted, it can be modified to 'none' or omitted entirely if the application is functioning correctly and tokens are being refreshed properly.
|
|
89
|
+
# To improve user experience, consider changing this to 'none' if you want to avoid showing the consent screen every time:
|
|
90
|
+
# "prompt=none" # Use this if you want to skip the consent screen for users who have already granted permissions.
|
|
91
|
+
# Alternatively, you can omit the prompt parameter entirely to use the default behavior:
|
|
92
|
+
# # "prompt=" # Omitting this will show the consent screen only when necessary.
|
|
93
|
+
).format(client_id, REDIRECT_URI, SCOPES)
|
|
94
|
+
log("Generated authorization URL: {}".format(auth_url))
|
|
95
|
+
return auth_url
|
|
96
|
+
|
|
97
|
+
def exchange_code_for_token(auth_code, retries=3):
|
|
98
|
+
for attempt in range(retries):
|
|
99
|
+
try:
|
|
100
|
+
with open(CREDENTIALS_PATH, 'r') as credentials_file:
|
|
101
|
+
credentials = json.load(credentials_file)
|
|
102
|
+
token_url = "https://oauth2.googleapis.com/token"
|
|
103
|
+
data = {
|
|
104
|
+
'code': auth_code,
|
|
105
|
+
'client_id': credentials['web']['client_id'],
|
|
106
|
+
'client_secret': credentials['web']['client_secret'],
|
|
107
|
+
'redirect_uri': REDIRECT_URI,
|
|
108
|
+
'grant_type': 'authorization_code'
|
|
109
|
+
}
|
|
110
|
+
response = requests.post(token_url, data=data)
|
|
111
|
+
log("Token exchange response: Status code {}, Body: {}".format(response.status_code, response.text))
|
|
112
|
+
token_response = response.json()
|
|
113
|
+
if response.status_code == 200:
|
|
114
|
+
token_response['token_time'] = time.time()
|
|
115
|
+
return token_response
|
|
116
|
+
else:
|
|
117
|
+
log("Token exchange failed: {}".format(token_response))
|
|
118
|
+
if attempt < retries - 1:
|
|
119
|
+
log("Retrying token exchange... (Attempt {}/{})".format(attempt + 1, retries))
|
|
120
|
+
except Exception as e:
|
|
121
|
+
log("Error during token exchange: {}".format(e))
|
|
122
|
+
return {}
|
|
123
|
+
|
|
124
|
+
def get_access_token():
|
|
125
|
+
if os.path.exists(TOKEN_PATH):
|
|
126
|
+
with open(TOKEN_PATH, 'r') as token_file:
|
|
127
|
+
token_data = json.load(token_file)
|
|
128
|
+
log("Loaded token data:\n {}".format(token_data))
|
|
129
|
+
|
|
130
|
+
if 'access_token' in token_data and 'expires_in' in token_data:
|
|
131
|
+
try:
|
|
132
|
+
# Use current time if 'token_time' is missing
|
|
133
|
+
token_time = token_data.get('token_time', time.time())
|
|
134
|
+
token_expiry_time = token_time + token_data['expires_in']
|
|
135
|
+
|
|
136
|
+
except KeyError as e:
|
|
137
|
+
log("KeyError while accessing token data: {}".format(e))
|
|
138
|
+
return None
|
|
139
|
+
|
|
140
|
+
if token_expiry_time > time.time():
|
|
141
|
+
log("Access token is still valid. Expires in {} seconds.".format(token_expiry_time - time.time()))
|
|
142
|
+
return token_data['access_token']
|
|
143
|
+
else:
|
|
144
|
+
log("Access token has expired. Current time: {}, Expiry time: {}".format(time.time(), token_expiry_time))
|
|
145
|
+
new_token_data = refresh_access_token(token_data.get('refresh_token'))
|
|
146
|
+
if 'access_token' in new_token_data:
|
|
147
|
+
new_token_data['token_time'] = time.time()
|
|
148
|
+
with open(TOKEN_PATH, 'w') as token_file:
|
|
149
|
+
json.dump(new_token_data, token_file)
|
|
150
|
+
log("Access token refreshed successfully. New token data: {}".format(new_token_data))
|
|
151
|
+
return new_token_data['access_token']
|
|
152
|
+
else:
|
|
153
|
+
log("Failed to refresh access token. New token data: {}".format(new_token_data))
|
|
154
|
+
return None
|
|
155
|
+
log("Access token not found. Please authenticate.")
|
|
156
|
+
return None
|
|
157
|
+
|
|
158
|
+
def refresh_access_token(refresh_token):
|
|
159
|
+
log("Refreshing access token.")
|
|
160
|
+
with open(CREDENTIALS_PATH, 'r') as credentials_file:
|
|
161
|
+
credentials = json.load(credentials_file)
|
|
162
|
+
token_url = "https://oauth2.googleapis.com/token"
|
|
163
|
+
data = {
|
|
164
|
+
'client_id': credentials['web']['client_id'],
|
|
165
|
+
'client_secret': credentials['web']['client_secret'],
|
|
166
|
+
'refresh_token': refresh_token,
|
|
167
|
+
'grant_type': 'refresh_token'
|
|
168
|
+
}
|
|
169
|
+
response = requests.post(token_url, data=data)
|
|
170
|
+
log("Refresh token response: Status code {}, Body:\n {}".format(response.status_code, response.text))
|
|
171
|
+
if response.status_code == 200:
|
|
172
|
+
log("Access token refreshed successfully.")
|
|
173
|
+
return response.json()
|
|
174
|
+
else:
|
|
175
|
+
log("Failed to refresh access token. Status code: {}".format(response.status_code))
|
|
176
|
+
return {}
|
|
177
|
+
|
|
178
|
+
def bring_window_to_foreground():
|
|
179
|
+
"""Brings the current window to the foreground on Windows."""
|
|
180
|
+
try:
|
|
181
|
+
if platform.system() == 'Windows':
|
|
182
|
+
# Get the current process ID
|
|
183
|
+
pid = os.getpid()
|
|
184
|
+
# Get the window handle for the current process
|
|
185
|
+
hwnd = ctypes.windll.user32.GetForegroundWindow()
|
|
186
|
+
# Get the process ID of the window
|
|
187
|
+
current_pid = ctypes.c_ulong()
|
|
188
|
+
ctypes.windll.user32.GetWindowThreadProcessId(hwnd, ctypes.byref(current_pid))
|
|
189
|
+
|
|
190
|
+
# If the window is not ours, try to bring it to front
|
|
191
|
+
if current_pid.value != pid:
|
|
192
|
+
# Try to set the window to foreground
|
|
193
|
+
ctypes.windll.user32.SetForegroundWindow(hwnd)
|
|
194
|
+
# If that fails, try the alternative method
|
|
195
|
+
if ctypes.windll.user32.GetForegroundWindow() != hwnd:
|
|
196
|
+
ctypes.windll.user32.ShowWindow(hwnd, 9) # SW_RESTORE = 9
|
|
197
|
+
ctypes.windll.user32.SetForegroundWindow(hwnd)
|
|
198
|
+
except Exception as e:
|
|
199
|
+
log("Error bringing window to foreground: {}".format(e))
|
|
200
|
+
|
|
201
|
+
class RequestHandler(BaseHTTPRequestHandler):
|
|
202
|
+
def _set_headers(self):
|
|
203
|
+
self.send_header('Access-Control-Allow-Origin', '*')
|
|
204
|
+
self.send_header('Access-Control-Allow-Methods', 'POST, OPTIONS')
|
|
205
|
+
self.send_header('Access-Control-Allow-Headers', 'Content-Type')
|
|
206
|
+
self.send_header('Content-type', 'application/json')
|
|
207
|
+
|
|
208
|
+
def do_OPTIONS(self):
|
|
209
|
+
self.send_response(200)
|
|
210
|
+
self._set_headers()
|
|
211
|
+
self.end_headers()
|
|
212
|
+
|
|
213
|
+
def do_POST(self):
|
|
214
|
+
if self.path == '/download':
|
|
215
|
+
content_length = int(self.headers['Content-Length'])
|
|
216
|
+
post_data = self.rfile.read(content_length)
|
|
217
|
+
data = json.loads(post_data.decode('utf-8'))
|
|
218
|
+
links = data.get('links', [])
|
|
219
|
+
|
|
220
|
+
# Log the content of links
|
|
221
|
+
log("Received links: {}".format(links))
|
|
222
|
+
|
|
223
|
+
file_ids = [link.get('fileId', None) for link in links if link.get('fileId')]
|
|
224
|
+
log("File IDs received from client: {}".format(file_ids))
|
|
225
|
+
|
|
226
|
+
# Proceed with downloading files
|
|
227
|
+
download_docx_files(links)
|
|
228
|
+
self.send_response(200)
|
|
229
|
+
self._set_headers() # Include CORS headers
|
|
230
|
+
self.end_headers()
|
|
231
|
+
response = json.dumps({"status": "success", "message": "All files downloaded", "fileIds": file_ids})
|
|
232
|
+
self.wfile.write(response.encode('utf-8'))
|
|
233
|
+
shutdown_event.set()
|
|
234
|
+
bring_window_to_foreground() # Bring window to foreground after download
|
|
235
|
+
elif self.path == '/shutdown':
|
|
236
|
+
log("Shutdown request received.")
|
|
237
|
+
self.send_response(200)
|
|
238
|
+
self._set_headers()
|
|
239
|
+
self.end_headers()
|
|
240
|
+
response = json.dumps({"status": "success", "message": "Server is shutting down."})
|
|
241
|
+
self.wfile.write(response.encode('utf-8'))
|
|
242
|
+
shutdown_event.set() # Signal shutdown event instead of calling stop_server directly
|
|
243
|
+
elif self.path == '/delete-files':
|
|
244
|
+
content_length = int(self.headers['Content-Length'])
|
|
245
|
+
post_data = self.rfile.read(content_length)
|
|
246
|
+
data = json.loads(post_data.decode('utf-8'))
|
|
247
|
+
file_ids = data.get('fileIds', [])
|
|
248
|
+
log("File IDs to delete received from client: {}".format(file_ids))
|
|
249
|
+
|
|
250
|
+
if not isinstance(file_ids, list):
|
|
251
|
+
self.send_response(400)
|
|
252
|
+
self._set_headers()
|
|
253
|
+
self.end_headers()
|
|
254
|
+
response = json.dumps({"status": "error", "message": "Invalid fileIds parameter."})
|
|
255
|
+
self.wfile.write(response.encode('utf-8'))
|
|
256
|
+
return
|
|
257
|
+
|
|
258
|
+
self.send_response(200)
|
|
259
|
+
self._set_headers() # Include CORS headers
|
|
260
|
+
self.end_headers()
|
|
261
|
+
response = json.dumps({"status": "success", "message": "Files deleted successfully."})
|
|
262
|
+
self.wfile.write(response.encode('utf-8'))
|
|
263
|
+
else:
|
|
264
|
+
self.send_response(404)
|
|
265
|
+
self.end_headers()
|
|
266
|
+
|
|
267
|
+
def do_GET(self):
|
|
268
|
+
log("Full request path: {}".format(self.path)) # Log the full path for debugging
|
|
269
|
+
if self.path.startswith("/?code="):
|
|
270
|
+
auth_code = self.path.split('=')[1].split('&')[0]
|
|
271
|
+
auth_code = requests.utils.unquote(auth_code) # Decode if URL-encoded
|
|
272
|
+
log("Received authorization code: {}".format(auth_code))
|
|
273
|
+
if is_valid_authorization_code(auth_code):
|
|
274
|
+
try:
|
|
275
|
+
token_response = exchange_code_for_token(auth_code)
|
|
276
|
+
if 'access_token' not in token_response:
|
|
277
|
+
# Check for specific error message
|
|
278
|
+
if token_response.get("status") == "error":
|
|
279
|
+
self.send_response(400)
|
|
280
|
+
self.send_header('Content-type', 'text/html')
|
|
281
|
+
self.end_headers()
|
|
282
|
+
self.wfile.write(token_response["message"].encode())
|
|
283
|
+
return
|
|
284
|
+
# Handle other cases
|
|
285
|
+
raise ValueError("Access token not found in response.")
|
|
286
|
+
except Exception as e:
|
|
287
|
+
log("Error during token exchange: {}".format(e))
|
|
288
|
+
self.send_response(500)
|
|
289
|
+
self.send_header('Content-type', 'text/html')
|
|
290
|
+
self.end_headers()
|
|
291
|
+
self.wfile.write("An error occurred during authentication. Please try again.".encode())
|
|
292
|
+
else:
|
|
293
|
+
log("Token response: {}".format(token_response)) # Add this line
|
|
294
|
+
if 'access_token' in token_response:
|
|
295
|
+
with open(TOKEN_PATH, 'w') as token_file:
|
|
296
|
+
json.dump(token_response, token_file)
|
|
297
|
+
self.send_response(200)
|
|
298
|
+
self.send_header('Content-type', 'text/html')
|
|
299
|
+
self.end_headers()
|
|
300
|
+
self.wfile.write("Authentication successful. You can close this window now.".encode())
|
|
301
|
+
initiate_link_retrieval(config) # Pass config here
|
|
302
|
+
else:
|
|
303
|
+
log("Authentication failed with response: {}".format(token_response)) # Log the full response
|
|
304
|
+
if 'error' in token_response:
|
|
305
|
+
error_description = token_response.get('error_description', 'No description provided.')
|
|
306
|
+
log("Error details: {}".format(error_description)) # Log specific error details
|
|
307
|
+
|
|
308
|
+
# Provide user feedback based on the error
|
|
309
|
+
if token_response.get('error') == 'invalid_grant':
|
|
310
|
+
log("Invalid grant error encountered. Authorization code: {}, Response: {}".format(auth_code, token_response))
|
|
311
|
+
check_invalid_grant_causes(auth_code)
|
|
312
|
+
clear_token_cache() # Clear the cache on invalid grant
|
|
313
|
+
user_message = "Authentication failed: Invalid or expired authorization code. Please try again."
|
|
314
|
+
else:
|
|
315
|
+
user_message = "Authentication failed. Please check the logs for more details."
|
|
316
|
+
|
|
317
|
+
self.send_response(400)
|
|
318
|
+
self.send_header('Content-type', 'text/html')
|
|
319
|
+
self.end_headers()
|
|
320
|
+
self.wfile.write(user_message.encode())
|
|
321
|
+
shutdown_event.set() # Signal shutdown event after failed authentication
|
|
322
|
+
else:
|
|
323
|
+
log("Invalid authorization code format: {}".format(auth_code))
|
|
324
|
+
self.send_response(400)
|
|
325
|
+
self.send_header('Content-type', 'text/html')
|
|
326
|
+
self.end_headers()
|
|
327
|
+
self.wfile.write("Invalid authorization code format. Please try again.".encode())
|
|
328
|
+
shutdown_event.set() # Signal shutdown event after failed authentication
|
|
329
|
+
elif self.path == '/downloaded-emails':
|
|
330
|
+
self.send_response(200)
|
|
331
|
+
self._set_headers()
|
|
332
|
+
self.end_headers()
|
|
333
|
+
downloaded_emails = load_downloaded_emails()
|
|
334
|
+
response = json.dumps({"downloadedEmails": list(downloaded_emails)})
|
|
335
|
+
self.wfile.write(response.encode('utf-8'))
|
|
336
|
+
else:
|
|
337
|
+
self.send_response(200)
|
|
338
|
+
self.send_header('Access-Control-Allow-Origin', '*')
|
|
339
|
+
self.send_header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
|
|
340
|
+
self.send_header('Access-Control-Allow-Headers', 'Content-Type')
|
|
341
|
+
self.send_header('Content-type', 'text/html')
|
|
342
|
+
self.end_headers()
|
|
343
|
+
self.wfile.write(b'HTTPS server is running.')
|
|
344
|
+
|
|
345
|
+
def generate_self_signed_cert(cert_file, key_file):
|
|
346
|
+
log("Checking if certificate file exists: " + cert_file)
|
|
347
|
+
log("Checking if key file exists: " + key_file)
|
|
348
|
+
|
|
349
|
+
# Check if certificate exists and is not expired
|
|
350
|
+
cert_needs_regeneration = True
|
|
351
|
+
if os.path.exists(cert_file):
|
|
352
|
+
try:
|
|
353
|
+
# Check certificate expiration
|
|
354
|
+
check_cmd = ['openssl', 'x509', '-in', cert_file, '-checkend', '86400', '-noout'] # Check if expires in next 24 hours
|
|
355
|
+
result = subprocess.call(check_cmd)
|
|
356
|
+
if result == 0:
|
|
357
|
+
log("Certificate is still valid")
|
|
358
|
+
cert_needs_regeneration = False
|
|
359
|
+
else:
|
|
360
|
+
log("Certificate is expired or will expire soon")
|
|
361
|
+
# Delete expired certificate and key files
|
|
362
|
+
try:
|
|
363
|
+
if os.path.exists(cert_file):
|
|
364
|
+
os.remove(cert_file)
|
|
365
|
+
log("Deleted expired certificate file: {}".format(cert_file))
|
|
366
|
+
if os.path.exists(key_file):
|
|
367
|
+
os.remove(key_file)
|
|
368
|
+
log("Deleted expired key file: {}".format(key_file))
|
|
369
|
+
except Exception as e:
|
|
370
|
+
log("Error deleting expired certificate files: {}".format(e))
|
|
371
|
+
except Exception as e:
|
|
372
|
+
log("Error checking certificate expiration: {}".format(e))
|
|
373
|
+
|
|
374
|
+
if cert_needs_regeneration:
|
|
375
|
+
log("Generating self-signed SSL certificate...")
|
|
376
|
+
cmd = [
|
|
377
|
+
'openssl', 'req', '-config', openssl_cnf, '-nodes', '-new', '-x509',
|
|
378
|
+
'-keyout', key_file,
|
|
379
|
+
'-out', cert_file,
|
|
380
|
+
'-days', '365',
|
|
381
|
+
'-sha256' # Use SHA-256 for better security
|
|
382
|
+
#'-subj', '/C=US/ST=...' The openssl.cnf file contains default values for these fields, but they can be overridden by the -subj option.
|
|
383
|
+
]
|
|
384
|
+
try:
|
|
385
|
+
log("Running command: " + ' '.join(cmd))
|
|
386
|
+
result = subprocess.call(cmd)
|
|
387
|
+
log("Command finished with result: " + str(result))
|
|
388
|
+
if result != 0:
|
|
389
|
+
raise RuntimeError("Failed to generate self-signed certificate")
|
|
390
|
+
|
|
391
|
+
# Verify the certificate was generated correctly
|
|
392
|
+
verify_cmd = ['openssl', 'x509', '-in', cert_file, '-text', '-noout']
|
|
393
|
+
verify_result = subprocess.call(verify_cmd)
|
|
394
|
+
if verify_result != 0:
|
|
395
|
+
raise RuntimeError("Generated certificate verification failed")
|
|
396
|
+
|
|
397
|
+
log("Self-signed SSL certificate generated and verified successfully.")
|
|
398
|
+
except Exception as e:
|
|
399
|
+
log("Error generating self-signed certificate: {}".format(e))
|
|
400
|
+
raise
|
|
401
|
+
|
|
402
|
+
def run_server():
|
|
403
|
+
global httpd
|
|
404
|
+
try:
|
|
405
|
+
log("Attempting to start server on port " + str(server_port))
|
|
406
|
+
server_address = ('0.0.0.0', server_port) # Bind to all interfaces
|
|
407
|
+
httpd = HTTPServer(server_address, RequestHandler)
|
|
408
|
+
log("Attempting to wrap socket with SSL. cert_file=" + cert_file + ", key_file=" + key_file)
|
|
409
|
+
|
|
410
|
+
if not os.path.exists(cert_file):
|
|
411
|
+
log("Error: Certificate file not found: " + cert_file)
|
|
412
|
+
if not os.path.exists(key_file):
|
|
413
|
+
log("Error: Key file not found: " + key_file)
|
|
414
|
+
|
|
415
|
+
httpd.socket = ssl.wrap_socket(httpd.socket, certfile=cert_file, keyfile=key_file, server_side=True)
|
|
416
|
+
log("Starting HTTPS server on port {}".format(server_port))
|
|
417
|
+
httpd.serve_forever()
|
|
418
|
+
except Exception as e:
|
|
419
|
+
log("Error in serving: {}".format(e))
|
|
420
|
+
stop_server()
|
|
421
|
+
|
|
422
|
+
def stop_server():
|
|
423
|
+
global httpd
|
|
424
|
+
if httpd:
|
|
425
|
+
log("Stopping HTTPS server.")
|
|
426
|
+
httpd.shutdown()
|
|
427
|
+
httpd.server_close()
|
|
428
|
+
log("HTTPS server stopped.")
|
|
429
|
+
shutdown_event.set() # Signal shutdown event
|
|
430
|
+
bring_window_to_foreground() # Bring window to foreground after shutdown
|
|
431
|
+
|
|
432
|
+
def load_downloaded_emails():
|
|
433
|
+
downloaded_emails = set()
|
|
434
|
+
if os.path.exists(downloaded_emails_file):
|
|
435
|
+
with open(downloaded_emails_file, 'r') as file:
|
|
436
|
+
downloaded_emails = set(line.strip() for line in file)
|
|
437
|
+
log("Loaded downloaded emails: {}".format(downloaded_emails))
|
|
438
|
+
return downloaded_emails
|
|
439
|
+
|
|
440
|
+
def download_docx_files(links):
|
|
441
|
+
# Load the set of downloaded emails
|
|
442
|
+
# TODO (LOW-MEDIUM PRIORITY - CSV File Detection and Routing):
|
|
443
|
+
# PROBLEM: Downloaded files may include CSV files that need special handling and routing.
|
|
444
|
+
# Currently all files are treated the same regardless of extension.
|
|
445
|
+
#
|
|
446
|
+
# IMPLEMENTATION REQUIREMENTS:
|
|
447
|
+
# 1. File Extension Detection:
|
|
448
|
+
# - Check each downloaded file for .csv extension (case-insensitive)
|
|
449
|
+
# - Also check for common CSV variants: .txt, .tsv, .dat (based on content)
|
|
450
|
+
# - Handle files with multiple extensions like "report.csv.zip"
|
|
451
|
+
#
|
|
452
|
+
# 2. Content-Based Detection (Advanced):
|
|
453
|
+
# - For files without clear extensions, peek at content
|
|
454
|
+
# - Look for CSV patterns: comma-separated values, consistent column counts
|
|
455
|
+
# - Handle Excel files that might be CSV exports (.xlsx with CSV content)
|
|
456
|
+
#
|
|
457
|
+
# 3. CSV Routing Logic:
|
|
458
|
+
# - Move CSV files to dedicated CSV processing directory
|
|
459
|
+
# - Maintain file naming conventions for downstream processing
|
|
460
|
+
# - Log CSV file movements for audit trail
|
|
461
|
+
# - Preserve original file permissions and timestamps
|
|
462
|
+
#
|
|
463
|
+
# IMPLEMENTATION STEPS:
|
|
464
|
+
# 1. Add helper function detect_csv_files(downloaded_files) -> list
|
|
465
|
+
# 2. Add helper function move_csv_to_processing_dir(csv_file, destination_dir)
|
|
466
|
+
# 3. Add configuration for CSV destination directory in config file
|
|
467
|
+
# 4. Update this function to call CSV detection and routing after download
|
|
468
|
+
# 5. Add error handling for file movement failures
|
|
469
|
+
# 6. Add logging for all CSV file operations
|
|
470
|
+
#
|
|
471
|
+
# CONFIGURATION NEEDED:
|
|
472
|
+
# - config['csv_processing_dir']: Where to move detected CSV files
|
|
473
|
+
# - config['csv_file_extensions']: List of extensions to treat as CSV
|
|
474
|
+
# - config['csv_content_detection']: Boolean to enable content-based detection
|
|
475
|
+
#
|
|
476
|
+
# ERROR HANDLING:
|
|
477
|
+
# - Handle permission errors when moving files
|
|
478
|
+
# - Handle disk space issues
|
|
479
|
+
# - Gracefully handle corrupted or locked files
|
|
480
|
+
# - Provide fallback options when CSV directory is unavailable
|
|
481
|
+
#
|
|
482
|
+
# TESTING SCENARIOS:
|
|
483
|
+
# - Mixed file types: .docx, .csv, .pdf in same download batch
|
|
484
|
+
# - CSV files with unusual extensions (.txt, .dat)
|
|
485
|
+
# - Large CSV files (>100MB)
|
|
486
|
+
# - CSV files in ZIP archives
|
|
487
|
+
#
|
|
488
|
+
# FILES TO MODIFY: This file (download_docx_files function)
|
|
489
|
+
# RELATED: May need updates to CSV processing modules that expect files in specific locations
|
|
490
|
+
downloaded_emails = load_downloaded_emails()
|
|
491
|
+
|
|
492
|
+
for link in links:
|
|
493
|
+
try:
|
|
494
|
+
url = link.get('url', '')
|
|
495
|
+
filename = link.get('filename', '')
|
|
496
|
+
|
|
497
|
+
# Log the variables to debug
|
|
498
|
+
log("Processing link: url='{}', filename='{}'".format(url, filename))
|
|
499
|
+
|
|
500
|
+
# Skip if email already downloaded
|
|
501
|
+
if filename in downloaded_emails:
|
|
502
|
+
log("Skipping already downloaded email: {}".format(filename))
|
|
503
|
+
continue
|
|
504
|
+
|
|
505
|
+
log("Downloading .docx file from URL: {}".format(url))
|
|
506
|
+
response = requests.get(url, verify=False) # Set verify to False for self-signed certs
|
|
507
|
+
if response.status_code == 200:
|
|
508
|
+
file_path = os.path.join(local_storage_path, filename)
|
|
509
|
+
with open(file_path, 'wb') as file:
|
|
510
|
+
file.write(response.content)
|
|
511
|
+
log("Downloaded .docx file: {}".format(filename))
|
|
512
|
+
# Add to the set and save the updated list
|
|
513
|
+
downloaded_emails.add(filename)
|
|
514
|
+
with open(downloaded_emails_file, 'a') as file:
|
|
515
|
+
file.write(filename + '\n')
|
|
516
|
+
else:
|
|
517
|
+
log("Failed to download .docx file from URL: {}. Status code: {}".format(url, response.status_code))
|
|
518
|
+
except Exception as e:
|
|
519
|
+
log("Error downloading .docx file from URL: {}. Error: {}".format(url, e))
|
|
520
|
+
|
|
521
|
+
def open_browser_with_executable(url, browser_path=None):
|
|
522
|
+
try:
|
|
523
|
+
if browser_path:
|
|
524
|
+
log("Attempting to open URL with provided executable: {} {}".format(browser_path, url))
|
|
525
|
+
process = subprocess.Popen([browser_path, url], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
526
|
+
stdout, stderr = process.communicate()
|
|
527
|
+
if process.returncode == 0:
|
|
528
|
+
log("Browser opened with provided executable path using subprocess.Popen.")
|
|
529
|
+
else:
|
|
530
|
+
log("Browser failed to open using subprocess.Popen. Return code: {}. Stderr: {}".format(process.returncode, stderr))
|
|
531
|
+
else:
|
|
532
|
+
log("No browser path provided. Attempting to open URL with default browser: {}".format(url))
|
|
533
|
+
webbrowser.open(url)
|
|
534
|
+
log("Default browser opened.")
|
|
535
|
+
except Exception as e:
|
|
536
|
+
log("Failed to open browser: {}".format(e))
|
|
537
|
+
|
|
538
|
+
def initiate_link_retrieval(config):
|
|
539
|
+
log("Initiating browser via implicit GET.")
|
|
540
|
+
url_get = "https://script.google.com/macros/s/{}/exec?action=get_link".format(config['MediLink_Config']['webapp_deployment_id']) # Use config here
|
|
541
|
+
open_browser_with_executable(url_get)
|
|
542
|
+
|
|
543
|
+
log("Preparing POST call.")
|
|
544
|
+
url = "https://script.google.com/macros/s/{}/exec".format(config['MediLink_Config']['webapp_deployment_id']) # Use config here
|
|
545
|
+
downloaded_emails = list(load_downloaded_emails())
|
|
546
|
+
payload = {
|
|
547
|
+
"downloadedEmails": downloaded_emails
|
|
548
|
+
}
|
|
549
|
+
|
|
550
|
+
access_token = get_access_token()
|
|
551
|
+
if not access_token:
|
|
552
|
+
log("Access token not found. Please authenticate first.")
|
|
553
|
+
shutdown_event.set() # Signal shutdown event if token is not found
|
|
554
|
+
return
|
|
555
|
+
|
|
556
|
+
# Inspect the token to check its validity and permissions
|
|
557
|
+
token_info = inspect_token(access_token)
|
|
558
|
+
if token_info is None:
|
|
559
|
+
log("Access token is invalid. Please re-authenticate.")
|
|
560
|
+
shutdown_event.set() # Signal shutdown event if token is invalid
|
|
561
|
+
return
|
|
562
|
+
|
|
563
|
+
# Proceed with the rest of the function if the token is valid
|
|
564
|
+
headers = {
|
|
565
|
+
'Authorization': 'Bearer {}'.format(access_token),
|
|
566
|
+
'Content-Type': 'application/json'
|
|
567
|
+
}
|
|
568
|
+
|
|
569
|
+
log("Request headers: {}".format(headers))
|
|
570
|
+
log("Request payload: {}".format(payload))
|
|
571
|
+
|
|
572
|
+
handle_post_response(url, payload, headers)
|
|
573
|
+
|
|
574
|
+
def handle_post_response(url, payload, headers):
|
|
575
|
+
try:
|
|
576
|
+
response = requests.post(url, json=payload, headers=headers)
|
|
577
|
+
log("Response status code: {}".format(response.status_code))
|
|
578
|
+
log("Response body: {}".format(response.text))
|
|
579
|
+
|
|
580
|
+
if response.status_code == 200:
|
|
581
|
+
response_data = response.json()
|
|
582
|
+
log("Parsed response data: {}".format(response_data)) # Log the parsed response data
|
|
583
|
+
if response_data.get("status") == "error":
|
|
584
|
+
log("Error message from server: {}".format(response_data.get("message")))
|
|
585
|
+
print("Error: {}".format(response_data.get("message")))
|
|
586
|
+
shutdown_event.set() # Signal shutdown event after error
|
|
587
|
+
else:
|
|
588
|
+
log("Link retrieval initiated successfully.")
|
|
589
|
+
elif response.status_code == 401:
|
|
590
|
+
log("Unauthorized. Check if the token has the necessary scopes.Response body: {}".format(response.text))
|
|
591
|
+
# Inspect the token to log its details
|
|
592
|
+
token_info = inspect_token(headers['Authorization'].split(' ')[1])
|
|
593
|
+
log("Token details: {}".format(token_info))
|
|
594
|
+
shutdown_event.set()
|
|
595
|
+
elif response.status_code == 403:
|
|
596
|
+
log("Forbidden access. Ensure that the OAuth client has the correct permissions. Response body: {}".format(response.text))
|
|
597
|
+
shutdown_event.set()
|
|
598
|
+
elif response.status_code == 404:
|
|
599
|
+
log("Not Found. Verify the URL and ensure the Apps Script is deployed correctly. Response body: {}".format(response.text))
|
|
600
|
+
shutdown_event.set()
|
|
601
|
+
else:
|
|
602
|
+
log("Failed to initiate link retrieval. Unexpected status code: {}. Response body: {}".format(response.status_code, response.text))
|
|
603
|
+
shutdown_event.set()
|
|
604
|
+
except requests.exceptions.RequestException as e:
|
|
605
|
+
log("RequestException during link retrieval initiation: {}".format(e))
|
|
606
|
+
shutdown_event.set()
|
|
607
|
+
except Exception as e:
|
|
608
|
+
log("Unexpected error during link retrieval initiation: {}".format(e))
|
|
609
|
+
shutdown_event.set()
|
|
610
|
+
|
|
611
|
+
def inspect_token(access_token):
|
|
612
|
+
info_url = "https://www.googleapis.com/oauth2/v1/tokeninfo?access_token={}".format(access_token)
|
|
613
|
+
try:
|
|
614
|
+
response = requests.get(info_url)
|
|
615
|
+
log("Token info: Status code {}, Body: {}".format(response.status_code, response.text))
|
|
616
|
+
|
|
617
|
+
if response.status_code == 200:
|
|
618
|
+
return response.json()
|
|
619
|
+
else:
|
|
620
|
+
log("Failed to inspect token. Status code: {}, Body: {}".format(response.status_code, response.text))
|
|
621
|
+
# Check for invalid token
|
|
622
|
+
if response.status_code == 400 and "invalid_token" in response.text:
|
|
623
|
+
log("Access token is invalid. Deleting token.json and stopping the server.")
|
|
624
|
+
delete_token_file() # Delete the token.json file
|
|
625
|
+
print("Access token is invalid. Please re-authenticate and restart the server.")
|
|
626
|
+
stop_server() # Stop the server
|
|
627
|
+
return None # Return None for invalid tokens
|
|
628
|
+
return None # Return None for other invalid tokens
|
|
629
|
+
except Exception as e:
|
|
630
|
+
log("Exception during token inspection: {}".format(e))
|
|
631
|
+
return None
|
|
632
|
+
|
|
633
|
+
def delete_token_file():
|
|
634
|
+
try:
|
|
635
|
+
if os.path.exists(TOKEN_PATH):
|
|
636
|
+
os.remove(TOKEN_PATH)
|
|
637
|
+
log("Deleted token.json successfully.")
|
|
638
|
+
else:
|
|
639
|
+
log("token.json does not exist.")
|
|
640
|
+
except Exception as e:
|
|
641
|
+
log("Error deleting token.json: {}".format(e))
|
|
642
|
+
|
|
643
|
+
def signal_handler(sig, frame):
|
|
644
|
+
log("Signal received: {}. Initiating shutdown.".format(sig))
|
|
645
|
+
stop_server()
|
|
646
|
+
sys.exit(0)
|
|
647
|
+
|
|
648
|
+
def auth_and_retrieval():
|
|
649
|
+
access_token = get_access_token()
|
|
650
|
+
if not access_token:
|
|
651
|
+
log("Access token not found or expired. Please authenticate first.")
|
|
652
|
+
#print("If the browser does not open automatically, please open the following URL in your browser to authorize the application:")
|
|
653
|
+
auth_url = get_authorization_url()
|
|
654
|
+
#print(auth_url)
|
|
655
|
+
open_browser_with_executable(auth_url)
|
|
656
|
+
shutdown_event.wait() # Wait for the shutdown event to be set after authentication
|
|
657
|
+
else:
|
|
658
|
+
log("Access token found. Proceeding.")
|
|
659
|
+
initiate_link_retrieval(config) # Pass config here
|
|
660
|
+
shutdown_event.wait() # Wait for the shutdown event to be set
|
|
661
|
+
|
|
662
|
+
def is_valid_authorization_code(auth_code):
|
|
663
|
+
# Check if the authorization code is not None and is a non-empty string
|
|
664
|
+
if auth_code and isinstance(auth_code, str) and len(auth_code) > 0: # Check for non-empty string
|
|
665
|
+
return True
|
|
666
|
+
log("Invalid authorization code format: {}".format(auth_code))
|
|
667
|
+
return False
|
|
668
|
+
|
|
669
|
+
def clear_token_cache():
|
|
670
|
+
if os.path.exists(TOKEN_PATH):
|
|
671
|
+
os.remove(TOKEN_PATH)
|
|
672
|
+
log("Cleared token cache.")
|
|
673
|
+
|
|
674
|
+
def check_invalid_grant_causes(auth_code):
|
|
675
|
+
# TODO Implement this function in the future to check for common causes of invalid_grant error
|
|
676
|
+
# Log potential causes for invalid_grant
|
|
677
|
+
log("FUTURE IMPLEMENTATION: Checking common causes for invalid_grant error with auth code: {}".format(auth_code))
|
|
678
|
+
# Example checks (you can expand this based on your needs)
|
|
679
|
+
"""
|
|
680
|
+
if is_code_used(auth_code):
|
|
681
|
+
log("Authorization code has already been used.")
|
|
682
|
+
if not is_redirect_uri_correct():
|
|
683
|
+
log("Redirect URI does not match the registered URI.")
|
|
684
|
+
"""
|
|
685
|
+
|
|
686
|
+
if __name__ == "__main__":
|
|
687
|
+
signal.signal(signal.SIGINT, signal_handler)
|
|
688
|
+
signal.signal(signal.SIGTERM, signal_handler)
|
|
689
|
+
|
|
690
|
+
try:
|
|
691
|
+
# Generate SSL certificate if it doesn't exist
|
|
692
|
+
generate_self_signed_cert(cert_file, key_file)
|
|
693
|
+
|
|
694
|
+
from threading import Thread
|
|
695
|
+
log("Starting server thread.")
|
|
696
|
+
server_thread = Thread(target=run_server)
|
|
697
|
+
server_thread.daemon = True
|
|
698
|
+
server_thread.start()
|
|
699
|
+
|
|
700
|
+
auth_and_retrieval()
|
|
701
|
+
|
|
702
|
+
log("Stopping HTTPS server.")
|
|
703
|
+
stop_server() # Ensure the server is stopped
|
|
704
|
+
log("Waiting for server thread to finish.")
|
|
705
|
+
server_thread.join() # Wait for the server thread to finish
|
|
706
|
+
except KeyboardInterrupt:
|
|
707
|
+
log("KeyboardInterrupt received, stopping server.")
|
|
708
|
+
stop_server()
|
|
709
|
+
sys.exit(0)
|
|
710
|
+
except Exception as e:
|
|
711
|
+
log("An error occurred: {}".format(e))
|
|
712
|
+
stop_server()
|
|
654
713
|
sys.exit(1)
|