appier 1.34.6__py2.py3-none-any.whl → 1.34.8__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- appier/__init__.py +1 -1
- appier/api.py +10 -0
- appier/asgi.py +10 -0
- appier/async_neo.py +2 -2
- appier/async_old.py +1 -1
- appier/base.py +15 -2
- appier/bus.py +10 -0
- appier/config.py +409 -401
- appier/data.py +2 -0
- appier/exceptions.py +450 -442
- appier/http.py +1292 -1283
- appier/model.py +7 -2
- appier/mongo.py +24 -0
- appier/scheduler.py +342 -334
- appier/test/data.py +10 -0
- appier/test/error_handler.py +142 -0
- appier/test/exception_handler.py +146 -0
- appier/test/http.py +24 -0
- appier/test/tags.py +109 -0
- appier/util.py +2517 -2503
- {appier-1.34.6.dist-info → appier-1.34.8.dist-info}/METADATA +1 -1
- {appier-1.34.6.dist-info → appier-1.34.8.dist-info}/RECORD +25 -22
- {appier-1.34.6.dist-info → appier-1.34.8.dist-info}/LICENSE +0 -0
- {appier-1.34.6.dist-info → appier-1.34.8.dist-info}/WHEEL +0 -0
- {appier-1.34.6.dist-info → appier-1.34.8.dist-info}/top_level.txt +0 -0
appier/util.py
CHANGED
|
@@ -1,2503 +1,2517 @@
|
|
|
1
|
-
#!/usr/bin/python
|
|
2
|
-
# -*- coding: utf-8 -*-
|
|
3
|
-
|
|
4
|
-
# Hive Appier Framework
|
|
5
|
-
# Copyright (c) 2008-2024 Hive Solutions Lda.
|
|
6
|
-
#
|
|
7
|
-
# This file is part of Hive Appier Framework.
|
|
8
|
-
#
|
|
9
|
-
# Hive Appier Framework is free software: you can redistribute it and/or modify
|
|
10
|
-
# it under the terms of the Apache License as published by the Apache
|
|
11
|
-
# Foundation, either version 2.0 of the License, or (at your option) any
|
|
12
|
-
# later version.
|
|
13
|
-
#
|
|
14
|
-
# Hive Appier Framework is distributed in the hope that it will be useful,
|
|
15
|
-
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
16
|
-
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
17
|
-
# Apache License for more details.
|
|
18
|
-
#
|
|
19
|
-
# You should have received a copy of the Apache License along with
|
|
20
|
-
# Hive Appier Framework. If not, see <http://www.apache.org/licenses/>.
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
"""
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
import
|
|
41
|
-
import
|
|
42
|
-
import
|
|
43
|
-
import
|
|
44
|
-
import
|
|
45
|
-
import
|
|
46
|
-
import
|
|
47
|
-
import
|
|
48
|
-
import
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
""" The
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
"""
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
if
|
|
95
|
-
return
|
|
96
|
-
return
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
def
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
"
|
|
125
|
-
"
|
|
126
|
-
"
|
|
127
|
-
|
|
128
|
-
"""
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
mobile
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
mobile
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
tablet
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
tablet
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
the provided user agent.
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
version_f=
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
email string
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
if
|
|
364
|
-
return (
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
#
|
|
522
|
-
#
|
|
523
|
-
data
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
#
|
|
558
|
-
#
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
#
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
# retrieves the
|
|
580
|
-
#
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
#
|
|
585
|
-
#
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
def
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
object[
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
def
|
|
652
|
-
for name, value in legacy.
|
|
653
|
-
if name in
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
object[name] = value
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
#
|
|
692
|
-
#
|
|
693
|
-
#
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
#
|
|
702
|
-
# the
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
for
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
:
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
#
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
#
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
#
|
|
799
|
-
#
|
|
800
|
-
#
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
leafs_l.
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
#
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
data =
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
:
|
|
941
|
-
:
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
|
|
953
|
-
|
|
954
|
-
|
|
955
|
-
|
|
956
|
-
|
|
957
|
-
|
|
958
|
-
|
|
959
|
-
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
:
|
|
973
|
-
:
|
|
974
|
-
used
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
984
|
-
"""
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
return camel
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
:
|
|
1032
|
-
:
|
|
1033
|
-
used
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
if
|
|
1037
|
-
|
|
1038
|
-
|
|
1039
|
-
parts
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
according to the
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
"""
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
|
|
1078
|
-
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
|
|
1092
|
-
|
|
1093
|
-
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
|
|
1098
|
-
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
|
|
1104
|
-
|
|
1105
|
-
|
|
1106
|
-
|
|
1107
|
-
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
:
|
|
1111
|
-
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
|
|
1150
|
-
|
|
1151
|
-
|
|
1152
|
-
|
|
1153
|
-
|
|
1154
|
-
|
|
1155
|
-
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
|
|
1162
|
-
|
|
1163
|
-
:
|
|
1164
|
-
:
|
|
1165
|
-
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
|
|
1171
|
-
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
the
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
:type
|
|
1200
|
-
:param
|
|
1201
|
-
delimiter
|
|
1202
|
-
:type
|
|
1203
|
-
:param
|
|
1204
|
-
|
|
1205
|
-
:
|
|
1206
|
-
:
|
|
1207
|
-
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
|
|
1227
|
-
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
to
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
|
|
1255
|
-
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
#
|
|
1265
|
-
#
|
|
1266
|
-
#
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
#
|
|
1274
|
-
#
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
|
|
1302
|
-
|
|
1303
|
-
|
|
1304
|
-
|
|
1305
|
-
|
|
1306
|
-
|
|
1307
|
-
|
|
1308
|
-
|
|
1309
|
-
|
|
1310
|
-
|
|
1311
|
-
|
|
1312
|
-
|
|
1313
|
-
|
|
1314
|
-
|
|
1315
|
-
|
|
1316
|
-
|
|
1317
|
-
|
|
1318
|
-
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
|
|
1329
|
-
|
|
1330
|
-
|
|
1331
|
-
|
|
1332
|
-
|
|
1333
|
-
|
|
1334
|
-
|
|
1335
|
-
|
|
1336
|
-
|
|
1337
|
-
|
|
1338
|
-
|
|
1339
|
-
|
|
1340
|
-
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
|
|
1344
|
-
|
|
1345
|
-
mime
|
|
1346
|
-
|
|
1347
|
-
|
|
1348
|
-
|
|
1349
|
-
|
|
1350
|
-
|
|
1351
|
-
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
|
|
1360
|
-
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
|
|
1364
|
-
|
|
1365
|
-
|
|
1366
|
-
|
|
1367
|
-
|
|
1368
|
-
|
|
1369
|
-
|
|
1370
|
-
|
|
1371
|
-
|
|
1372
|
-
|
|
1373
|
-
|
|
1374
|
-
|
|
1375
|
-
|
|
1376
|
-
|
|
1377
|
-
|
|
1378
|
-
|
|
1379
|
-
|
|
1380
|
-
|
|
1381
|
-
#
|
|
1382
|
-
|
|
1383
|
-
|
|
1384
|
-
|
|
1385
|
-
|
|
1386
|
-
|
|
1387
|
-
|
|
1388
|
-
|
|
1389
|
-
|
|
1390
|
-
#
|
|
1391
|
-
|
|
1392
|
-
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
|
|
1397
|
-
|
|
1398
|
-
|
|
1399
|
-
#
|
|
1400
|
-
|
|
1401
|
-
|
|
1402
|
-
|
|
1403
|
-
|
|
1404
|
-
#
|
|
1405
|
-
#
|
|
1406
|
-
|
|
1407
|
-
|
|
1408
|
-
|
|
1409
|
-
|
|
1410
|
-
|
|
1411
|
-
|
|
1412
|
-
|
|
1413
|
-
|
|
1414
|
-
|
|
1415
|
-
|
|
1416
|
-
|
|
1417
|
-
|
|
1418
|
-
|
|
1419
|
-
#
|
|
1420
|
-
#
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
|
|
1425
|
-
|
|
1426
|
-
|
|
1427
|
-
|
|
1428
|
-
|
|
1429
|
-
|
|
1430
|
-
|
|
1431
|
-
|
|
1432
|
-
|
|
1433
|
-
|
|
1434
|
-
|
|
1435
|
-
|
|
1436
|
-
|
|
1437
|
-
|
|
1438
|
-
|
|
1439
|
-
|
|
1440
|
-
|
|
1441
|
-
|
|
1442
|
-
|
|
1443
|
-
|
|
1444
|
-
|
|
1445
|
-
|
|
1446
|
-
|
|
1447
|
-
|
|
1448
|
-
|
|
1449
|
-
|
|
1450
|
-
|
|
1451
|
-
|
|
1452
|
-
|
|
1453
|
-
|
|
1454
|
-
|
|
1455
|
-
#
|
|
1456
|
-
#
|
|
1457
|
-
|
|
1458
|
-
|
|
1459
|
-
|
|
1460
|
-
|
|
1461
|
-
|
|
1462
|
-
|
|
1463
|
-
|
|
1464
|
-
|
|
1465
|
-
|
|
1466
|
-
|
|
1467
|
-
|
|
1468
|
-
|
|
1469
|
-
|
|
1470
|
-
|
|
1471
|
-
|
|
1472
|
-
|
|
1473
|
-
|
|
1474
|
-
|
|
1475
|
-
|
|
1476
|
-
|
|
1477
|
-
|
|
1478
|
-
|
|
1479
|
-
|
|
1480
|
-
|
|
1481
|
-
|
|
1482
|
-
|
|
1483
|
-
|
|
1484
|
-
|
|
1485
|
-
|
|
1486
|
-
|
|
1487
|
-
|
|
1488
|
-
|
|
1489
|
-
|
|
1490
|
-
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
|
|
1494
|
-
|
|
1495
|
-
|
|
1496
|
-
|
|
1497
|
-
|
|
1498
|
-
|
|
1499
|
-
|
|
1500
|
-
|
|
1501
|
-
|
|
1502
|
-
|
|
1503
|
-
|
|
1504
|
-
|
|
1505
|
-
|
|
1506
|
-
|
|
1507
|
-
|
|
1508
|
-
|
|
1509
|
-
|
|
1510
|
-
|
|
1511
|
-
|
|
1512
|
-
#
|
|
1513
|
-
#
|
|
1514
|
-
|
|
1515
|
-
|
|
1516
|
-
|
|
1517
|
-
|
|
1518
|
-
|
|
1519
|
-
|
|
1520
|
-
|
|
1521
|
-
headers
|
|
1522
|
-
|
|
1523
|
-
headers.
|
|
1524
|
-
|
|
1525
|
-
|
|
1526
|
-
#
|
|
1527
|
-
#
|
|
1528
|
-
#
|
|
1529
|
-
|
|
1530
|
-
|
|
1531
|
-
|
|
1532
|
-
|
|
1533
|
-
|
|
1534
|
-
#
|
|
1535
|
-
#
|
|
1536
|
-
#
|
|
1537
|
-
|
|
1538
|
-
|
|
1539
|
-
|
|
1540
|
-
|
|
1541
|
-
|
|
1542
|
-
|
|
1543
|
-
#
|
|
1544
|
-
#
|
|
1545
|
-
|
|
1546
|
-
|
|
1547
|
-
|
|
1548
|
-
|
|
1549
|
-
|
|
1550
|
-
|
|
1551
|
-
|
|
1552
|
-
|
|
1553
|
-
|
|
1554
|
-
|
|
1555
|
-
|
|
1556
|
-
|
|
1557
|
-
|
|
1558
|
-
|
|
1559
|
-
|
|
1560
|
-
|
|
1561
|
-
|
|
1562
|
-
|
|
1563
|
-
|
|
1564
|
-
|
|
1565
|
-
#
|
|
1566
|
-
#
|
|
1567
|
-
|
|
1568
|
-
|
|
1569
|
-
|
|
1570
|
-
|
|
1571
|
-
|
|
1572
|
-
|
|
1573
|
-
#
|
|
1574
|
-
#
|
|
1575
|
-
|
|
1576
|
-
|
|
1577
|
-
|
|
1578
|
-
|
|
1579
|
-
|
|
1580
|
-
|
|
1581
|
-
|
|
1582
|
-
|
|
1583
|
-
|
|
1584
|
-
|
|
1585
|
-
|
|
1586
|
-
|
|
1587
|
-
|
|
1588
|
-
|
|
1589
|
-
|
|
1590
|
-
|
|
1591
|
-
|
|
1592
|
-
|
|
1593
|
-
|
|
1594
|
-
|
|
1595
|
-
|
|
1596
|
-
|
|
1597
|
-
|
|
1598
|
-
|
|
1599
|
-
|
|
1600
|
-
|
|
1601
|
-
|
|
1602
|
-
|
|
1603
|
-
|
|
1604
|
-
|
|
1605
|
-
|
|
1606
|
-
|
|
1607
|
-
|
|
1608
|
-
|
|
1609
|
-
|
|
1610
|
-
|
|
1611
|
-
|
|
1612
|
-
|
|
1613
|
-
|
|
1614
|
-
|
|
1615
|
-
|
|
1616
|
-
|
|
1617
|
-
|
|
1618
|
-
|
|
1619
|
-
|
|
1620
|
-
|
|
1621
|
-
|
|
1622
|
-
|
|
1623
|
-
|
|
1624
|
-
|
|
1625
|
-
|
|
1626
|
-
|
|
1627
|
-
|
|
1628
|
-
|
|
1629
|
-
|
|
1630
|
-
|
|
1631
|
-
|
|
1632
|
-
|
|
1633
|
-
|
|
1634
|
-
|
|
1635
|
-
|
|
1636
|
-
|
|
1637
|
-
|
|
1638
|
-
|
|
1639
|
-
|
|
1640
|
-
|
|
1641
|
-
|
|
1642
|
-
|
|
1643
|
-
|
|
1644
|
-
|
|
1645
|
-
|
|
1646
|
-
|
|
1647
|
-
|
|
1648
|
-
|
|
1649
|
-
|
|
1650
|
-
|
|
1651
|
-
|
|
1652
|
-
|
|
1653
|
-
|
|
1654
|
-
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
|
|
1659
|
-
#
|
|
1660
|
-
|
|
1661
|
-
|
|
1662
|
-
|
|
1663
|
-
|
|
1664
|
-
|
|
1665
|
-
|
|
1666
|
-
|
|
1667
|
-
|
|
1668
|
-
|
|
1669
|
-
|
|
1670
|
-
#
|
|
1671
|
-
#
|
|
1672
|
-
|
|
1673
|
-
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
#
|
|
1679
|
-
#
|
|
1680
|
-
|
|
1681
|
-
|
|
1682
|
-
|
|
1683
|
-
|
|
1684
|
-
|
|
1685
|
-
|
|
1686
|
-
#
|
|
1687
|
-
#
|
|
1688
|
-
|
|
1689
|
-
|
|
1690
|
-
|
|
1691
|
-
|
|
1692
|
-
|
|
1693
|
-
|
|
1694
|
-
|
|
1695
|
-
|
|
1696
|
-
|
|
1697
|
-
|
|
1698
|
-
|
|
1699
|
-
|
|
1700
|
-
|
|
1701
|
-
#
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
|
|
1708
|
-
|
|
1709
|
-
|
|
1710
|
-
#
|
|
1711
|
-
|
|
1712
|
-
|
|
1713
|
-
|
|
1714
|
-
|
|
1715
|
-
|
|
1716
|
-
|
|
1717
|
-
#
|
|
1718
|
-
|
|
1719
|
-
|
|
1720
|
-
|
|
1721
|
-
|
|
1722
|
-
|
|
1723
|
-
|
|
1724
|
-
|
|
1725
|
-
|
|
1726
|
-
#
|
|
1727
|
-
|
|
1728
|
-
|
|
1729
|
-
|
|
1730
|
-
|
|
1731
|
-
|
|
1732
|
-
|
|
1733
|
-
|
|
1734
|
-
|
|
1735
|
-
#
|
|
1736
|
-
|
|
1737
|
-
|
|
1738
|
-
|
|
1739
|
-
|
|
1740
|
-
|
|
1741
|
-
|
|
1742
|
-
|
|
1743
|
-
|
|
1744
|
-
|
|
1745
|
-
|
|
1746
|
-
|
|
1747
|
-
|
|
1748
|
-
|
|
1749
|
-
|
|
1750
|
-
#
|
|
1751
|
-
|
|
1752
|
-
|
|
1753
|
-
|
|
1754
|
-
#
|
|
1755
|
-
#
|
|
1756
|
-
|
|
1757
|
-
|
|
1758
|
-
|
|
1759
|
-
|
|
1760
|
-
|
|
1761
|
-
|
|
1762
|
-
|
|
1763
|
-
|
|
1764
|
-
|
|
1765
|
-
|
|
1766
|
-
|
|
1767
|
-
|
|
1768
|
-
|
|
1769
|
-
|
|
1770
|
-
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
|
|
1776
|
-
|
|
1777
|
-
|
|
1778
|
-
|
|
1779
|
-
|
|
1780
|
-
#
|
|
1781
|
-
|
|
1782
|
-
|
|
1783
|
-
|
|
1784
|
-
|
|
1785
|
-
|
|
1786
|
-
|
|
1787
|
-
|
|
1788
|
-
|
|
1789
|
-
|
|
1790
|
-
|
|
1791
|
-
|
|
1792
|
-
|
|
1793
|
-
|
|
1794
|
-
|
|
1795
|
-
|
|
1796
|
-
|
|
1797
|
-
|
|
1798
|
-
)
|
|
1799
|
-
|
|
1800
|
-
|
|
1801
|
-
|
|
1802
|
-
|
|
1803
|
-
|
|
1804
|
-
|
|
1805
|
-
|
|
1806
|
-
|
|
1807
|
-
|
|
1808
|
-
|
|
1809
|
-
|
|
1810
|
-
|
|
1811
|
-
|
|
1812
|
-
|
|
1813
|
-
|
|
1814
|
-
|
|
1815
|
-
|
|
1816
|
-
|
|
1817
|
-
|
|
1818
|
-
:
|
|
1819
|
-
|
|
1820
|
-
|
|
1821
|
-
|
|
1822
|
-
|
|
1823
|
-
|
|
1824
|
-
|
|
1825
|
-
|
|
1826
|
-
|
|
1827
|
-
|
|
1828
|
-
|
|
1829
|
-
|
|
1830
|
-
#
|
|
1831
|
-
|
|
1832
|
-
|
|
1833
|
-
|
|
1834
|
-
|
|
1835
|
-
|
|
1836
|
-
|
|
1837
|
-
|
|
1838
|
-
|
|
1839
|
-
|
|
1840
|
-
|
|
1841
|
-
|
|
1842
|
-
|
|
1843
|
-
|
|
1844
|
-
|
|
1845
|
-
#
|
|
1846
|
-
|
|
1847
|
-
|
|
1848
|
-
|
|
1849
|
-
|
|
1850
|
-
|
|
1851
|
-
|
|
1852
|
-
|
|
1853
|
-
|
|
1854
|
-
|
|
1855
|
-
#
|
|
1856
|
-
|
|
1857
|
-
|
|
1858
|
-
|
|
1859
|
-
|
|
1860
|
-
|
|
1861
|
-
|
|
1862
|
-
|
|
1863
|
-
|
|
1864
|
-
|
|
1865
|
-
|
|
1866
|
-
|
|
1867
|
-
|
|
1868
|
-
|
|
1869
|
-
|
|
1870
|
-
|
|
1871
|
-
|
|
1872
|
-
|
|
1873
|
-
|
|
1874
|
-
|
|
1875
|
-
|
|
1876
|
-
|
|
1877
|
-
|
|
1878
|
-
|
|
1879
|
-
|
|
1880
|
-
#
|
|
1881
|
-
|
|
1882
|
-
|
|
1883
|
-
|
|
1884
|
-
|
|
1885
|
-
|
|
1886
|
-
|
|
1887
|
-
|
|
1888
|
-
|
|
1889
|
-
|
|
1890
|
-
|
|
1891
|
-
|
|
1892
|
-
|
|
1893
|
-
|
|
1894
|
-
|
|
1895
|
-
|
|
1896
|
-
|
|
1897
|
-
|
|
1898
|
-
|
|
1899
|
-
|
|
1900
|
-
|
|
1901
|
-
|
|
1902
|
-
|
|
1903
|
-
|
|
1904
|
-
|
|
1905
|
-
|
|
1906
|
-
|
|
1907
|
-
|
|
1908
|
-
|
|
1909
|
-
|
|
1910
|
-
|
|
1911
|
-
|
|
1912
|
-
|
|
1913
|
-
|
|
1914
|
-
|
|
1915
|
-
|
|
1916
|
-
|
|
1917
|
-
|
|
1918
|
-
|
|
1919
|
-
|
|
1920
|
-
|
|
1921
|
-
|
|
1922
|
-
|
|
1923
|
-
|
|
1924
|
-
:type
|
|
1925
|
-
:param
|
|
1926
|
-
|
|
1927
|
-
:type
|
|
1928
|
-
:param
|
|
1929
|
-
|
|
1930
|
-
|
|
1931
|
-
|
|
1932
|
-
|
|
1933
|
-
:
|
|
1934
|
-
|
|
1935
|
-
|
|
1936
|
-
|
|
1937
|
-
|
|
1938
|
-
|
|
1939
|
-
|
|
1940
|
-
|
|
1941
|
-
|
|
1942
|
-
|
|
1943
|
-
|
|
1944
|
-
|
|
1945
|
-
|
|
1946
|
-
|
|
1947
|
-
|
|
1948
|
-
|
|
1949
|
-
|
|
1950
|
-
|
|
1951
|
-
|
|
1952
|
-
|
|
1953
|
-
|
|
1954
|
-
|
|
1955
|
-
|
|
1956
|
-
|
|
1957
|
-
|
|
1958
|
-
|
|
1959
|
-
|
|
1960
|
-
|
|
1961
|
-
|
|
1962
|
-
|
|
1963
|
-
|
|
1964
|
-
|
|
1965
|
-
|
|
1966
|
-
#
|
|
1967
|
-
|
|
1968
|
-
|
|
1969
|
-
|
|
1970
|
-
|
|
1971
|
-
#
|
|
1972
|
-
#
|
|
1973
|
-
|
|
1974
|
-
|
|
1975
|
-
|
|
1976
|
-
|
|
1977
|
-
|
|
1978
|
-
)
|
|
1979
|
-
|
|
1980
|
-
#
|
|
1981
|
-
#
|
|
1982
|
-
|
|
1983
|
-
|
|
1984
|
-
|
|
1985
|
-
|
|
1986
|
-
|
|
1987
|
-
|
|
1988
|
-
|
|
1989
|
-
|
|
1990
|
-
|
|
1991
|
-
|
|
1992
|
-
|
|
1993
|
-
|
|
1994
|
-
|
|
1995
|
-
#
|
|
1996
|
-
|
|
1997
|
-
|
|
1998
|
-
|
|
1999
|
-
|
|
2000
|
-
|
|
2001
|
-
|
|
2002
|
-
|
|
2003
|
-
|
|
2004
|
-
|
|
2005
|
-
return final
|
|
2006
|
-
|
|
2007
|
-
|
|
2008
|
-
|
|
2009
|
-
|
|
2010
|
-
|
|
2011
|
-
|
|
2012
|
-
|
|
2013
|
-
|
|
2014
|
-
|
|
2015
|
-
|
|
2016
|
-
|
|
2017
|
-
|
|
2018
|
-
|
|
2019
|
-
|
|
2020
|
-
|
|
2021
|
-
|
|
2022
|
-
|
|
2023
|
-
|
|
2024
|
-
|
|
2025
|
-
|
|
2026
|
-
|
|
2027
|
-
|
|
2028
|
-
|
|
2029
|
-
|
|
2030
|
-
|
|
2031
|
-
|
|
2032
|
-
|
|
2033
|
-
|
|
2034
|
-
|
|
2035
|
-
|
|
2036
|
-
|
|
2037
|
-
|
|
2038
|
-
|
|
2039
|
-
|
|
2040
|
-
|
|
2041
|
-
|
|
2042
|
-
|
|
2043
|
-
|
|
2044
|
-
|
|
2045
|
-
|
|
2046
|
-
|
|
2047
|
-
|
|
2048
|
-
|
|
2049
|
-
|
|
2050
|
-
|
|
2051
|
-
|
|
2052
|
-
|
|
2053
|
-
|
|
2054
|
-
|
|
2055
|
-
|
|
2056
|
-
|
|
2057
|
-
|
|
2058
|
-
|
|
2059
|
-
|
|
2060
|
-
|
|
2061
|
-
|
|
2062
|
-
|
|
2063
|
-
|
|
2064
|
-
|
|
2065
|
-
|
|
2066
|
-
|
|
2067
|
-
|
|
2068
|
-
|
|
2069
|
-
|
|
2070
|
-
|
|
2071
|
-
|
|
2072
|
-
#
|
|
2073
|
-
#
|
|
2074
|
-
value
|
|
2075
|
-
|
|
2076
|
-
|
|
2077
|
-
|
|
2078
|
-
|
|
2079
|
-
|
|
2080
|
-
|
|
2081
|
-
|
|
2082
|
-
|
|
2083
|
-
|
|
2084
|
-
|
|
2085
|
-
|
|
2086
|
-
|
|
2087
|
-
|
|
2088
|
-
|
|
2089
|
-
|
|
2090
|
-
|
|
2091
|
-
|
|
2092
|
-
|
|
2093
|
-
|
|
2094
|
-
|
|
2095
|
-
|
|
2096
|
-
|
|
2097
|
-
|
|
2098
|
-
|
|
2099
|
-
|
|
2100
|
-
|
|
2101
|
-
|
|
2102
|
-
|
|
2103
|
-
|
|
2104
|
-
|
|
2105
|
-
|
|
2106
|
-
|
|
2107
|
-
|
|
2108
|
-
|
|
2109
|
-
|
|
2110
|
-
|
|
2111
|
-
|
|
2112
|
-
|
|
2113
|
-
|
|
2114
|
-
|
|
2115
|
-
return
|
|
2116
|
-
|
|
2117
|
-
return
|
|
2118
|
-
|
|
2119
|
-
|
|
2120
|
-
def
|
|
2121
|
-
|
|
2122
|
-
|
|
2123
|
-
|
|
2124
|
-
|
|
2125
|
-
|
|
2126
|
-
|
|
2127
|
-
|
|
2128
|
-
|
|
2129
|
-
|
|
2130
|
-
|
|
2131
|
-
|
|
2132
|
-
|
|
2133
|
-
|
|
2134
|
-
|
|
2135
|
-
|
|
2136
|
-
|
|
2137
|
-
|
|
2138
|
-
|
|
2139
|
-
|
|
2140
|
-
|
|
2141
|
-
|
|
2142
|
-
|
|
2143
|
-
|
|
2144
|
-
|
|
2145
|
-
|
|
2146
|
-
|
|
2147
|
-
|
|
2148
|
-
|
|
2149
|
-
|
|
2150
|
-
|
|
2151
|
-
|
|
2152
|
-
|
|
2153
|
-
|
|
2154
|
-
|
|
2155
|
-
|
|
2156
|
-
|
|
2157
|
-
|
|
2158
|
-
def
|
|
2159
|
-
|
|
2160
|
-
|
|
2161
|
-
|
|
2162
|
-
|
|
2163
|
-
|
|
2164
|
-
|
|
2165
|
-
|
|
2166
|
-
|
|
2167
|
-
|
|
2168
|
-
|
|
2169
|
-
|
|
2170
|
-
|
|
2171
|
-
|
|
2172
|
-
|
|
2173
|
-
|
|
2174
|
-
|
|
2175
|
-
|
|
2176
|
-
|
|
2177
|
-
|
|
2178
|
-
|
|
2179
|
-
|
|
2180
|
-
|
|
2181
|
-
def
|
|
2182
|
-
|
|
2183
|
-
|
|
2184
|
-
|
|
2185
|
-
|
|
2186
|
-
|
|
2187
|
-
|
|
2188
|
-
|
|
2189
|
-
|
|
2190
|
-
|
|
2191
|
-
|
|
2192
|
-
|
|
2193
|
-
|
|
2194
|
-
|
|
2195
|
-
def
|
|
2196
|
-
|
|
2197
|
-
|
|
2198
|
-
|
|
2199
|
-
|
|
2200
|
-
|
|
2201
|
-
|
|
2202
|
-
|
|
2203
|
-
|
|
2204
|
-
|
|
2205
|
-
|
|
2206
|
-
|
|
2207
|
-
|
|
2208
|
-
|
|
2209
|
-
|
|
2210
|
-
function
|
|
2211
|
-
|
|
2212
|
-
|
|
2213
|
-
|
|
2214
|
-
|
|
2215
|
-
|
|
2216
|
-
|
|
2217
|
-
|
|
2218
|
-
def
|
|
2219
|
-
|
|
2220
|
-
|
|
2221
|
-
|
|
2222
|
-
|
|
2223
|
-
|
|
2224
|
-
|
|
2225
|
-
|
|
2226
|
-
|
|
2227
|
-
|
|
2228
|
-
|
|
2229
|
-
|
|
2230
|
-
|
|
2231
|
-
|
|
2232
|
-
|
|
2233
|
-
function
|
|
2234
|
-
|
|
2235
|
-
|
|
2236
|
-
|
|
2237
|
-
|
|
2238
|
-
|
|
2239
|
-
|
|
2240
|
-
|
|
2241
|
-
|
|
2242
|
-
|
|
2243
|
-
|
|
2244
|
-
|
|
2245
|
-
|
|
2246
|
-
|
|
2247
|
-
|
|
2248
|
-
|
|
2249
|
-
|
|
2250
|
-
|
|
2251
|
-
|
|
2252
|
-
|
|
2253
|
-
|
|
2254
|
-
|
|
2255
|
-
|
|
2256
|
-
|
|
2257
|
-
|
|
2258
|
-
|
|
2259
|
-
|
|
2260
|
-
|
|
2261
|
-
|
|
2262
|
-
|
|
2263
|
-
|
|
2264
|
-
|
|
2265
|
-
|
|
2266
|
-
|
|
2267
|
-
|
|
2268
|
-
|
|
2269
|
-
|
|
2270
|
-
|
|
2271
|
-
|
|
2272
|
-
|
|
2273
|
-
|
|
2274
|
-
|
|
2275
|
-
#
|
|
2276
|
-
|
|
2277
|
-
|
|
2278
|
-
|
|
2279
|
-
|
|
2280
|
-
|
|
2281
|
-
|
|
2282
|
-
|
|
2283
|
-
|
|
2284
|
-
|
|
2285
|
-
|
|
2286
|
-
|
|
2287
|
-
|
|
2288
|
-
|
|
2289
|
-
|
|
2290
|
-
|
|
2291
|
-
|
|
2292
|
-
|
|
2293
|
-
|
|
2294
|
-
|
|
2295
|
-
|
|
2296
|
-
|
|
2297
|
-
|
|
2298
|
-
|
|
2299
|
-
|
|
2300
|
-
|
|
2301
|
-
|
|
2302
|
-
|
|
2303
|
-
|
|
2304
|
-
|
|
2305
|
-
|
|
2306
|
-
|
|
2307
|
-
|
|
2308
|
-
|
|
2309
|
-
|
|
2310
|
-
|
|
2311
|
-
|
|
2312
|
-
|
|
2313
|
-
|
|
2314
|
-
|
|
2315
|
-
)
|
|
2316
|
-
|
|
2317
|
-
|
|
2318
|
-
def
|
|
2319
|
-
|
|
2320
|
-
|
|
2321
|
-
|
|
2322
|
-
|
|
2323
|
-
|
|
2324
|
-
|
|
2325
|
-
|
|
2326
|
-
|
|
2327
|
-
|
|
2328
|
-
|
|
2329
|
-
|
|
2330
|
-
|
|
2331
|
-
|
|
2332
|
-
|
|
2333
|
-
|
|
2334
|
-
|
|
2335
|
-
|
|
2336
|
-
|
|
2337
|
-
|
|
2338
|
-
|
|
2339
|
-
|
|
2340
|
-
|
|
2341
|
-
|
|
2342
|
-
|
|
2343
|
-
|
|
2344
|
-
|
|
2345
|
-
|
|
2346
|
-
|
|
2347
|
-
|
|
2348
|
-
|
|
2349
|
-
|
|
2350
|
-
|
|
2351
|
-
|
|
2352
|
-
|
|
2353
|
-
|
|
2354
|
-
|
|
2355
|
-
|
|
2356
|
-
|
|
2357
|
-
|
|
2358
|
-
|
|
2359
|
-
|
|
2360
|
-
|
|
2361
|
-
|
|
2362
|
-
|
|
2363
|
-
|
|
2364
|
-
|
|
2365
|
-
|
|
2366
|
-
|
|
2367
|
-
|
|
2368
|
-
|
|
2369
|
-
|
|
2370
|
-
|
|
2371
|
-
|
|
2372
|
-
yield
|
|
2373
|
-
|
|
2374
|
-
|
|
2375
|
-
|
|
2376
|
-
|
|
2377
|
-
|
|
2378
|
-
|
|
2379
|
-
|
|
2380
|
-
|
|
2381
|
-
|
|
2382
|
-
|
|
2383
|
-
|
|
2384
|
-
|
|
2385
|
-
|
|
2386
|
-
|
|
2387
|
-
|
|
2388
|
-
|
|
2389
|
-
|
|
2390
|
-
|
|
2391
|
-
|
|
2392
|
-
|
|
2393
|
-
|
|
2394
|
-
|
|
2395
|
-
|
|
2396
|
-
|
|
2397
|
-
|
|
2398
|
-
|
|
2399
|
-
|
|
2400
|
-
|
|
2401
|
-
|
|
2402
|
-
|
|
2403
|
-
|
|
2404
|
-
|
|
2405
|
-
|
|
2406
|
-
|
|
2407
|
-
|
|
2408
|
-
|
|
2409
|
-
|
|
2410
|
-
|
|
2411
|
-
|
|
2412
|
-
|
|
2413
|
-
|
|
2414
|
-
|
|
2415
|
-
|
|
2416
|
-
|
|
2417
|
-
|
|
2418
|
-
|
|
2419
|
-
|
|
2420
|
-
|
|
2421
|
-
|
|
2422
|
-
|
|
2423
|
-
|
|
2424
|
-
|
|
2425
|
-
|
|
2426
|
-
|
|
2427
|
-
|
|
2428
|
-
|
|
2429
|
-
|
|
2430
|
-
|
|
2431
|
-
|
|
2432
|
-
|
|
2433
|
-
|
|
2434
|
-
|
|
2435
|
-
|
|
2436
|
-
|
|
2437
|
-
if
|
|
2438
|
-
|
|
2439
|
-
|
|
2440
|
-
|
|
2441
|
-
|
|
2442
|
-
|
|
2443
|
-
|
|
2444
|
-
|
|
2445
|
-
|
|
2446
|
-
|
|
2447
|
-
|
|
2448
|
-
|
|
2449
|
-
|
|
2450
|
-
|
|
2451
|
-
|
|
2452
|
-
|
|
2453
|
-
|
|
2454
|
-
|
|
2455
|
-
|
|
2456
|
-
|
|
2457
|
-
|
|
2458
|
-
|
|
2459
|
-
|
|
2460
|
-
|
|
2461
|
-
|
|
2462
|
-
|
|
2463
|
-
|
|
2464
|
-
|
|
2465
|
-
|
|
2466
|
-
|
|
2467
|
-
|
|
2468
|
-
|
|
2469
|
-
|
|
2470
|
-
|
|
2471
|
-
|
|
2472
|
-
|
|
2473
|
-
|
|
2474
|
-
|
|
2475
|
-
|
|
2476
|
-
|
|
2477
|
-
|
|
2478
|
-
|
|
2479
|
-
|
|
2480
|
-
|
|
2481
|
-
|
|
2482
|
-
self
|
|
2483
|
-
|
|
2484
|
-
|
|
2485
|
-
|
|
2486
|
-
|
|
2487
|
-
|
|
2488
|
-
|
|
2489
|
-
|
|
2490
|
-
|
|
2491
|
-
|
|
2492
|
-
|
|
2493
|
-
|
|
2494
|
-
def __init__(self, *args, **kwargs):
|
|
2495
|
-
|
|
2496
|
-
|
|
2497
|
-
|
|
2498
|
-
|
|
2499
|
-
|
|
2500
|
-
|
|
2501
|
-
if self.
|
|
2502
|
-
return
|
|
2503
|
-
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
|
|
4
|
+
# Hive Appier Framework
|
|
5
|
+
# Copyright (c) 2008-2024 Hive Solutions Lda.
|
|
6
|
+
#
|
|
7
|
+
# This file is part of Hive Appier Framework.
|
|
8
|
+
#
|
|
9
|
+
# Hive Appier Framework is free software: you can redistribute it and/or modify
|
|
10
|
+
# it under the terms of the Apache License as published by the Apache
|
|
11
|
+
# Foundation, either version 2.0 of the License, or (at your option) any
|
|
12
|
+
# later version.
|
|
13
|
+
#
|
|
14
|
+
# Hive Appier Framework is distributed in the hope that it will be useful,
|
|
15
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
16
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
17
|
+
# Apache License for more details.
|
|
18
|
+
#
|
|
19
|
+
# You should have received a copy of the Apache License along with
|
|
20
|
+
# Hive Appier Framework. If not, see <http://www.apache.org/licenses/>.
|
|
21
|
+
|
|
22
|
+
"""appier.util
|
|
23
|
+
|
|
24
|
+
General-purpose utilities used across the Appier code-base.
|
|
25
|
+
Offers helpers for I/O, date handling, MIME types, hashing and
|
|
26
|
+
thread-safe counters. Includes convenience wrappers for subprocess,
|
|
27
|
+
virtualenv and dynamic module import. Widely depended on by other
|
|
28
|
+
modules; keep changes here backward-compatible.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
__author__ = "João Magalhães <joamag@hive.pt>"
|
|
32
|
+
""" The author(s) of the module """
|
|
33
|
+
|
|
34
|
+
__copyright__ = "Copyright (c) 2008-2024 Hive Solutions Lda."
|
|
35
|
+
""" The copyright for the module """
|
|
36
|
+
|
|
37
|
+
__license__ = "Apache License, Version 2.0"
|
|
38
|
+
""" The license for the module """
|
|
39
|
+
|
|
40
|
+
import os
|
|
41
|
+
import re
|
|
42
|
+
import sys
|
|
43
|
+
import json
|
|
44
|
+
import copy
|
|
45
|
+
import uuid
|
|
46
|
+
import types
|
|
47
|
+
import locale
|
|
48
|
+
import hashlib
|
|
49
|
+
import calendar
|
|
50
|
+
import datetime
|
|
51
|
+
import warnings
|
|
52
|
+
import functools
|
|
53
|
+
import threading
|
|
54
|
+
import mimetypes
|
|
55
|
+
import contextlib
|
|
56
|
+
import subprocess
|
|
57
|
+
import multiprocessing
|
|
58
|
+
|
|
59
|
+
from . import smtp
|
|
60
|
+
from . import config
|
|
61
|
+
from . import legacy
|
|
62
|
+
from . import common
|
|
63
|
+
from . import defines
|
|
64
|
+
from . import exceptions
|
|
65
|
+
|
|
66
|
+
CREATION_COUNTER = 0
|
|
67
|
+
""" The global static creation counter value that
|
|
68
|
+
will be used to create an order in the declaration
|
|
69
|
+
of attributes for a set of classes """
|
|
70
|
+
|
|
71
|
+
FIRST_CAP_REGEX = re.compile(r"(.)([A-Z][a-z]+)")
|
|
72
|
+
""" Regular expression that ensures that the first
|
|
73
|
+
token of each camel string is properly capitalized """
|
|
74
|
+
|
|
75
|
+
ALL_CAP_REGEX = re.compile(r"([a-z0-9])([A-Z])")
|
|
76
|
+
""" The generalized transition from lower case to
|
|
77
|
+
upper case letter regex that will provide a way of
|
|
78
|
+
putting the underscore in the middle of the transition """
|
|
79
|
+
|
|
80
|
+
SORT_MAP = {"1": 1, "-1": -1, "ascending": 1, "descending": -1}
|
|
81
|
+
""" The map associating the normalized (text) way of
|
|
82
|
+
representing sorting with the current infra-structure
|
|
83
|
+
number way of representing the same information """
|
|
84
|
+
|
|
85
|
+
SEQUENCE_TYPES = (list, tuple)
|
|
86
|
+
""" The sequence defining the various types that are
|
|
87
|
+
considered to be sequence based for python """
|
|
88
|
+
|
|
89
|
+
defines = defines
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def to_limit(limit_s):
|
|
93
|
+
limit = int(limit_s)
|
|
94
|
+
if limit < 0:
|
|
95
|
+
return 0
|
|
96
|
+
return limit
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def to_find(find_s):
|
|
100
|
+
if not find_s:
|
|
101
|
+
return []
|
|
102
|
+
find_t = type(find_s)
|
|
103
|
+
if find_t == list:
|
|
104
|
+
return find_s
|
|
105
|
+
return [find_s]
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def to_sort(sort_s):
|
|
109
|
+
sort_l = []
|
|
110
|
+
sorts = sort_s.split(",")
|
|
111
|
+
for sort_i in sorts:
|
|
112
|
+
values = sort_i.split(":", 1)
|
|
113
|
+
if len(values) == 1:
|
|
114
|
+
values.append("descending")
|
|
115
|
+
name, direction = values
|
|
116
|
+
if name == "default":
|
|
117
|
+
return None
|
|
118
|
+
values[1] = SORT_MAP.get(direction, 1)
|
|
119
|
+
sort_l.append(tuple(values))
|
|
120
|
+
return sort_l
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
ALIAS = {
|
|
124
|
+
"context": "find_d",
|
|
125
|
+
"filters": "find_d",
|
|
126
|
+
"filters[]": "find_d",
|
|
127
|
+
"filter_def": "find_d",
|
|
128
|
+
"filter_string": "find_s",
|
|
129
|
+
"filter_name": "find_n",
|
|
130
|
+
"filter_operator": "find_o",
|
|
131
|
+
"insensitive": "find_i",
|
|
132
|
+
"order": "sort",
|
|
133
|
+
"offset": "skip",
|
|
134
|
+
"start_record": "skip",
|
|
135
|
+
"number_records": "limit",
|
|
136
|
+
}
|
|
137
|
+
""" The map containing the various attribute alias
|
|
138
|
+
between the normalized manned and the appier manner """
|
|
139
|
+
|
|
140
|
+
FIND_TYPES = dict(
|
|
141
|
+
skip=int,
|
|
142
|
+
limit=to_limit,
|
|
143
|
+
find_s=legacy.UNICODE,
|
|
144
|
+
find_d=to_find,
|
|
145
|
+
find_i=bool,
|
|
146
|
+
find_t=legacy.UNICODE,
|
|
147
|
+
find_n=legacy.UNICODE,
|
|
148
|
+
find_o=legacy.UNICODE,
|
|
149
|
+
sort=to_sort,
|
|
150
|
+
meta=bool,
|
|
151
|
+
fields=list,
|
|
152
|
+
)
|
|
153
|
+
""" The map associating the various find fields with
|
|
154
|
+
their respective types, note that in case a special
|
|
155
|
+
conversion operation is required the associated value
|
|
156
|
+
may represent a conversion function instead """
|
|
157
|
+
|
|
158
|
+
FIND_DEFAULTS = dict(limit=10)
|
|
159
|
+
""" The map that defines the various default values
|
|
160
|
+
for a series of find related attributes """
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def is_iterable(object):
|
|
164
|
+
"""
|
|
165
|
+
Verifies if the provided object (value) is iterable
|
|
166
|
+
meaning that the type of it is listed in a list of
|
|
167
|
+
sequence based data types.
|
|
168
|
+
|
|
169
|
+
:type object: Object
|
|
170
|
+
:param object: The value that is going to be tested
|
|
171
|
+
for iterable type.
|
|
172
|
+
:rtype: bool
|
|
173
|
+
:return: If the provided object represents an iterable
|
|
174
|
+
object meaning that it belongs to sequence type.
|
|
175
|
+
"""
|
|
176
|
+
|
|
177
|
+
return isinstance(object, defines.ITERABLES)
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def is_mobile(user_agent):
|
|
181
|
+
"""
|
|
182
|
+
Verifies if the provided user agent string represents a
|
|
183
|
+
mobile agent, for that a series of regular expressions
|
|
184
|
+
are matched against the user agent string.
|
|
185
|
+
|
|
186
|
+
:type user_agent: String
|
|
187
|
+
:param user_agent: The string containing the user agent
|
|
188
|
+
value that is going to be verified against a series of
|
|
189
|
+
regular expressions for mobile verification.
|
|
190
|
+
:rtype: bool
|
|
191
|
+
:return: If the provided user agent string represents a
|
|
192
|
+
mobile browser or a regular (desktop) one.
|
|
193
|
+
"""
|
|
194
|
+
|
|
195
|
+
if not user_agent:
|
|
196
|
+
return False
|
|
197
|
+
prefix = user_agent[:4]
|
|
198
|
+
mobile = defines.MOBILE_REGEX.search(user_agent)
|
|
199
|
+
mobile_prefix = defines.MOBILE_PREFIX_REGEX.search(prefix)
|
|
200
|
+
is_mobile = True if mobile or mobile_prefix else False
|
|
201
|
+
return is_mobile
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def is_tablet(user_agent):
|
|
205
|
+
"""
|
|
206
|
+
Verifies if the provided user agent string represents a
|
|
207
|
+
tablet agent, for that a series of regular expressions
|
|
208
|
+
are matched against the user agent string.
|
|
209
|
+
|
|
210
|
+
:type user_agent: String
|
|
211
|
+
:param user_agent: The string containing the user agent
|
|
212
|
+
value that is going to be verified against a series of
|
|
213
|
+
regular expressions for tablet verification.
|
|
214
|
+
:rtype: bool
|
|
215
|
+
:return: If the provided user agent string represents a
|
|
216
|
+
tablet browser or a regular (desktop) one.
|
|
217
|
+
"""
|
|
218
|
+
|
|
219
|
+
if not user_agent:
|
|
220
|
+
return False
|
|
221
|
+
prefix = user_agent[:4]
|
|
222
|
+
tablet = defines.TABLET_REGEX.search(user_agent)
|
|
223
|
+
mobile_prefix = defines.MOBILE_PREFIX_REGEX.search(prefix)
|
|
224
|
+
is_tablet = True if tablet or mobile_prefix else False
|
|
225
|
+
return is_tablet
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
def is_browser(user_agent):
|
|
229
|
+
"""
|
|
230
|
+
Verifies if the provided user agent string represents a
|
|
231
|
+
browser (interactive) agent, for that a series of verifications
|
|
232
|
+
are going to be performed against the user agent string.
|
|
233
|
+
|
|
234
|
+
:type user_agent: String
|
|
235
|
+
:param user_agent: The string containing the user agent
|
|
236
|
+
value that is going to be verified for browser presence.
|
|
237
|
+
:rtype: bool
|
|
238
|
+
:return: If the provided user agent string represents an
|
|
239
|
+
interactive browser or not.
|
|
240
|
+
"""
|
|
241
|
+
|
|
242
|
+
info = browser_info(user_agent)
|
|
243
|
+
if not info:
|
|
244
|
+
return False
|
|
245
|
+
interactive = info.get("interactive", False)
|
|
246
|
+
if not interactive:
|
|
247
|
+
return False
|
|
248
|
+
return True
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def is_bot(user_agent):
|
|
252
|
+
"""
|
|
253
|
+
Verifies if the provided user agent string represents a
|
|
254
|
+
bot (automated) agent, for that a series of verifications
|
|
255
|
+
are going to be performed against the user agent string.
|
|
256
|
+
|
|
257
|
+
:type user_agent: String
|
|
258
|
+
:param user_agent: The string containing the user agent
|
|
259
|
+
value that is going to be verified for bot presence.
|
|
260
|
+
:rtype: bool
|
|
261
|
+
:return: If the provided user agent string represents an
|
|
262
|
+
automated bot or not.
|
|
263
|
+
"""
|
|
264
|
+
|
|
265
|
+
info = browser_info(user_agent=user_agent)
|
|
266
|
+
if not info:
|
|
267
|
+
return False
|
|
268
|
+
bot = info.get("bot", False)
|
|
269
|
+
if not bot:
|
|
270
|
+
return False
|
|
271
|
+
return True
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
def browser_info(user_agent):
|
|
275
|
+
"""
|
|
276
|
+
Retrieves a dictionary containing information about the browser
|
|
277
|
+
and the operative system associated with the provided user agent.
|
|
278
|
+
|
|
279
|
+
The retrieval of the information depends on the kind of user
|
|
280
|
+
agent string provided, as coverage is limited.
|
|
281
|
+
|
|
282
|
+
:type user_agent: String
|
|
283
|
+
:param user_agent: The HTTP based user agent string to be processed.
|
|
284
|
+
:rtype: Dictionary
|
|
285
|
+
:return: The dictionary/map containing the information processed from
|
|
286
|
+
the provided user agent.
|
|
287
|
+
"""
|
|
288
|
+
|
|
289
|
+
if not user_agent:
|
|
290
|
+
return None
|
|
291
|
+
|
|
292
|
+
info = dict()
|
|
293
|
+
|
|
294
|
+
for browser_i in defines.BROWSER_INFO:
|
|
295
|
+
identity = browser_i["identity"]
|
|
296
|
+
sub_string = browser_i.get("sub_string", identity)
|
|
297
|
+
version_search = browser_i.get("version_search", sub_string + "/")
|
|
298
|
+
interactive = browser_i.get("interactive", True)
|
|
299
|
+
bot = browser_i.get("bot", False)
|
|
300
|
+
|
|
301
|
+
if not sub_string in user_agent:
|
|
302
|
+
continue
|
|
303
|
+
if not version_search in user_agent:
|
|
304
|
+
continue
|
|
305
|
+
|
|
306
|
+
version_i = user_agent.index(version_search) + len(version_search)
|
|
307
|
+
version = user_agent[version_i:].split(" ", 1)[0].strip(" ;")
|
|
308
|
+
try:
|
|
309
|
+
version_f = float(".".join(version.split(".")[:2]))
|
|
310
|
+
except ValueError:
|
|
311
|
+
version_f = 0.0
|
|
312
|
+
try:
|
|
313
|
+
version_i = int(version_f)
|
|
314
|
+
except ValueError:
|
|
315
|
+
version_f = 0
|
|
316
|
+
|
|
317
|
+
info.update(
|
|
318
|
+
name=identity,
|
|
319
|
+
version=version,
|
|
320
|
+
version_f=version_f,
|
|
321
|
+
version_i=version_i,
|
|
322
|
+
interactive=interactive,
|
|
323
|
+
bot=bot,
|
|
324
|
+
)
|
|
325
|
+
break
|
|
326
|
+
|
|
327
|
+
for os_i in defines.OS_INFO:
|
|
328
|
+
identity = os_i["identity"]
|
|
329
|
+
sub_string = os_i.get("sub_string", identity)
|
|
330
|
+
|
|
331
|
+
if not sub_string in user_agent:
|
|
332
|
+
continue
|
|
333
|
+
|
|
334
|
+
info.update(os=identity)
|
|
335
|
+
break
|
|
336
|
+
|
|
337
|
+
return info if info else None
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
def email_parts(base, strip=True):
|
|
341
|
+
"""
|
|
342
|
+
Unpacks the complete set of parts (name and email) from the
|
|
343
|
+
provided generalized email string. The provided string may
|
|
344
|
+
be a single email or the more complex form (eg: Name <email>).
|
|
345
|
+
|
|
346
|
+
Note that the provided base argument may be a single string
|
|
347
|
+
or a sequence of strings and the returning type will reflect
|
|
348
|
+
that same provided parameter.
|
|
349
|
+
|
|
350
|
+
:type base: String/List
|
|
351
|
+
:param base: The base value that is going to be parsed as an
|
|
352
|
+
email string or a sequence of such values.
|
|
353
|
+
:type strip: bool
|
|
354
|
+
:param strip: If the provided base value should be stripped
|
|
355
|
+
of any extra space characters before processing.
|
|
356
|
+
:rtype: Tuple/List
|
|
357
|
+
:return: The resulting parsed tuple/tuples for the provided
|
|
358
|
+
email strings, these tuples contain name and emails for each
|
|
359
|
+
of the parsed values.
|
|
360
|
+
"""
|
|
361
|
+
|
|
362
|
+
base_t = type(base)
|
|
363
|
+
if base_t in SEQUENCE_TYPES:
|
|
364
|
+
return [email_parts(base, strip=strip) for base in base]
|
|
365
|
+
|
|
366
|
+
if not base:
|
|
367
|
+
return (None, None)
|
|
368
|
+
if strip:
|
|
369
|
+
base = base.strip()
|
|
370
|
+
|
|
371
|
+
match = defines.EMAIL_REGEX.match(base)
|
|
372
|
+
if not match:
|
|
373
|
+
return (None, None)
|
|
374
|
+
|
|
375
|
+
email = match.group("email_a") or match.group("email_b")
|
|
376
|
+
name = match.group("name") or email
|
|
377
|
+
|
|
378
|
+
return (name, email)
|
|
379
|
+
|
|
380
|
+
|
|
381
|
+
def email_mime(base, encoding="utf-8"):
|
|
382
|
+
if legacy.PYTHON_3:
|
|
383
|
+
encoding = None
|
|
384
|
+
|
|
385
|
+
base_t = type(base)
|
|
386
|
+
if base_t in SEQUENCE_TYPES:
|
|
387
|
+
return [
|
|
388
|
+
value
|
|
389
|
+
for value in (email_mime(item, encoding=encoding) for item in base)
|
|
390
|
+
if value
|
|
391
|
+
]
|
|
392
|
+
|
|
393
|
+
name, email = email_parts(base)
|
|
394
|
+
if not name or not email:
|
|
395
|
+
return None
|
|
396
|
+
|
|
397
|
+
name = smtp.header(name, encoding=encoding)
|
|
398
|
+
|
|
399
|
+
return "%s <%s>" % (name, email)
|
|
400
|
+
|
|
401
|
+
|
|
402
|
+
def email_name(base):
|
|
403
|
+
base_t = type(base)
|
|
404
|
+
if base_t in SEQUENCE_TYPES:
|
|
405
|
+
return [
|
|
406
|
+
value
|
|
407
|
+
for value in (email_name(base) for base in base if email_name(base))
|
|
408
|
+
if value
|
|
409
|
+
]
|
|
410
|
+
name, _email = email_parts(base)
|
|
411
|
+
return name
|
|
412
|
+
|
|
413
|
+
|
|
414
|
+
def email_base(base):
|
|
415
|
+
base_t = type(base)
|
|
416
|
+
if base_t in SEQUENCE_TYPES:
|
|
417
|
+
return [
|
|
418
|
+
value
|
|
419
|
+
for value in (email_base(base) for base in base if email_base(base))
|
|
420
|
+
if value
|
|
421
|
+
]
|
|
422
|
+
_name, email = email_parts(base)
|
|
423
|
+
return email
|
|
424
|
+
|
|
425
|
+
|
|
426
|
+
def date_to_timestamp(value, format="%d/%m/%Y"):
|
|
427
|
+
if not value:
|
|
428
|
+
return None
|
|
429
|
+
try:
|
|
430
|
+
value = datetime.datetime.strptime(value, format)
|
|
431
|
+
except Exception:
|
|
432
|
+
return None
|
|
433
|
+
value = value.utctimetuple()
|
|
434
|
+
return calendar.timegm(value)
|
|
435
|
+
|
|
436
|
+
|
|
437
|
+
def obfuscate(value, display_l=3, token="*"):
|
|
438
|
+
value_l = len(value)
|
|
439
|
+
display_l = min([value_l, display_l])
|
|
440
|
+
obfuscated = value[:display_l] + ((value_l - display_l) * token)
|
|
441
|
+
return obfuscated
|
|
442
|
+
|
|
443
|
+
|
|
444
|
+
def import_pip(name, package=None, default=None):
|
|
445
|
+
package = package or name
|
|
446
|
+
try:
|
|
447
|
+
module = __import__(name)
|
|
448
|
+
except ImportError:
|
|
449
|
+
try:
|
|
450
|
+
module = install_pip_s(package)
|
|
451
|
+
except Exception:
|
|
452
|
+
return default
|
|
453
|
+
try:
|
|
454
|
+
module = __import__(name)
|
|
455
|
+
except ImportError:
|
|
456
|
+
return default
|
|
457
|
+
return module
|
|
458
|
+
|
|
459
|
+
|
|
460
|
+
def ensure_pip(name, package=None, delayed=False):
|
|
461
|
+
package = package or name
|
|
462
|
+
try:
|
|
463
|
+
__import__(name)
|
|
464
|
+
except ImportError:
|
|
465
|
+
install_pip_s(package, delayed=delayed)
|
|
466
|
+
|
|
467
|
+
|
|
468
|
+
def install_pip(package, delayed=False, isolated=True, user=None):
|
|
469
|
+
try:
|
|
470
|
+
import pip
|
|
471
|
+
|
|
472
|
+
pip_internal = pip
|
|
473
|
+
finally:
|
|
474
|
+
pass
|
|
475
|
+
try:
|
|
476
|
+
import pip._internal
|
|
477
|
+
|
|
478
|
+
pip_internal = pip._internal
|
|
479
|
+
except ImportError:
|
|
480
|
+
pass
|
|
481
|
+
try:
|
|
482
|
+
import pip._internal.main
|
|
483
|
+
|
|
484
|
+
pip_internal = pip._internal.main
|
|
485
|
+
except ImportError:
|
|
486
|
+
pass
|
|
487
|
+
user = config.conf("PIP_USER", False, cast=bool)
|
|
488
|
+
args = ["install", package]
|
|
489
|
+
if hasattr(pip_internal, "main"):
|
|
490
|
+
pip_main = pip_internal.main
|
|
491
|
+
elif hasattr(pip, "main"):
|
|
492
|
+
pip_main = pip.main # @UndefinedVariable
|
|
493
|
+
else:
|
|
494
|
+
raise exceptions.OperationalError(message="pip not found")
|
|
495
|
+
if user:
|
|
496
|
+
args.insert(1, "--user")
|
|
497
|
+
if delayed:
|
|
498
|
+
process = multiprocessing.Process(target=pip_main, args=(args,))
|
|
499
|
+
process.start()
|
|
500
|
+
result = 0
|
|
501
|
+
elif isolated:
|
|
502
|
+
process = multiprocessing.Process(target=pip_main, args=(args,))
|
|
503
|
+
process.start()
|
|
504
|
+
process.join()
|
|
505
|
+
result = process.exitcode
|
|
506
|
+
else:
|
|
507
|
+
result = pip_main(args)
|
|
508
|
+
if result == 0:
|
|
509
|
+
return
|
|
510
|
+
raise exceptions.OperationalError(message="pip error, exit code (%d)" % result)
|
|
511
|
+
|
|
512
|
+
|
|
513
|
+
def install_pip_s(package, delayed=False):
|
|
514
|
+
try:
|
|
515
|
+
install_pip(package, delayed=delayed, user=False)
|
|
516
|
+
except exceptions.OperationalError:
|
|
517
|
+
install_pip(package, delayed=delayed, user=True)
|
|
518
|
+
|
|
519
|
+
|
|
520
|
+
def request_json(request=None, encoding="utf-8"):
|
|
521
|
+
# retrieves the proper request object, either the provided
|
|
522
|
+
# request or the default base request object and then in
|
|
523
|
+
# case the the JSON data is already in the request properties
|
|
524
|
+
# it is used (cached value) otherwise continues with the parse
|
|
525
|
+
request = request or common.base().get_request()
|
|
526
|
+
if "_data_j" in request.properties:
|
|
527
|
+
return request.properties["_data_j"]
|
|
528
|
+
|
|
529
|
+
# retrieves the current request data and tries to
|
|
530
|
+
# "load" it as JSON data, in case it fails gracefully
|
|
531
|
+
# handles the failure setting the value as an empty map
|
|
532
|
+
data = request.data
|
|
533
|
+
try:
|
|
534
|
+
is_bytes = legacy.is_bytes(data)
|
|
535
|
+
if is_bytes:
|
|
536
|
+
data = data.decode(encoding)
|
|
537
|
+
data_j = json.loads(data)
|
|
538
|
+
except Exception:
|
|
539
|
+
data_j = {}
|
|
540
|
+
request.properties["_data_j"] = data_j
|
|
541
|
+
|
|
542
|
+
# returns the JSON data object to the caller method so that it
|
|
543
|
+
# may be used as the parsed value (post information)
|
|
544
|
+
return data_j
|
|
545
|
+
|
|
546
|
+
|
|
547
|
+
def get_context(self):
|
|
548
|
+
"""
|
|
549
|
+
Retrieves the "best" possible context object for the current
|
|
550
|
+
execution life-cycle, typically this should be an "attached"
|
|
551
|
+
request object.
|
|
552
|
+
|
|
553
|
+
Multiple strategies should be used while trying to retrieved
|
|
554
|
+
the "current" context.
|
|
555
|
+
"""
|
|
556
|
+
|
|
557
|
+
# tries to retrieve the request attached to the current instance
|
|
558
|
+
# (typically a property) and verifies the object compliance,
|
|
559
|
+
# returning the object to the caller in case it's valid
|
|
560
|
+
if hasattr(self, "request"):
|
|
561
|
+
request = self.request
|
|
562
|
+
is_valid = hasattr(request, "is_mock") and not request.is_mock()
|
|
563
|
+
if is_valid:
|
|
564
|
+
return request
|
|
565
|
+
|
|
566
|
+
# uses the global strategy to try to retrieve a request for the
|
|
567
|
+
# current execution environment (not thread safe)
|
|
568
|
+
request = common.base().get_request()
|
|
569
|
+
is_valid = hasattr(request, "is_mock") and not request.is_mock()
|
|
570
|
+
if is_valid:
|
|
571
|
+
return request
|
|
572
|
+
|
|
573
|
+
# fallback return value meaning that it was not possible to retrieve
|
|
574
|
+
# any valid execution context for the current environment
|
|
575
|
+
return None
|
|
576
|
+
|
|
577
|
+
|
|
578
|
+
def get_object(object=None, alias=False, page=False, find=False, norm=True, **kwargs):
|
|
579
|
+
# retrieves the base request object that is going to be used in
|
|
580
|
+
# the construction of the object
|
|
581
|
+
request = common.base().get_request()
|
|
582
|
+
|
|
583
|
+
# verifies if the provided object is valid in such case creates
|
|
584
|
+
# a copy of it and uses it as the base object for validation
|
|
585
|
+
# otherwise used an empty map (form validation)
|
|
586
|
+
object = object and copy.copy(object) or {}
|
|
587
|
+
|
|
588
|
+
# retrieves the current request data and tries to
|
|
589
|
+
# "load" it as JSON data, in case it fails gracefully
|
|
590
|
+
# handles the failure setting the value as an empty map
|
|
591
|
+
data_j = request_json()
|
|
592
|
+
|
|
593
|
+
# uses all the values referencing data in the request to try
|
|
594
|
+
# to populate the object this way it may be constructed using
|
|
595
|
+
# any of theses strategies (easier for the developer)
|
|
596
|
+
for name, value in data_j.items():
|
|
597
|
+
object[name] = value
|
|
598
|
+
for name, value in request.files_s.items():
|
|
599
|
+
object[name] = value
|
|
600
|
+
for name, value in request.post_s.items():
|
|
601
|
+
object[name] = value
|
|
602
|
+
for name, value in request.params_s.items():
|
|
603
|
+
object[name] = value
|
|
604
|
+
|
|
605
|
+
# in case the alias flag is set tries to resolve the attribute
|
|
606
|
+
# alias and in case the find types are set converts the find
|
|
607
|
+
# based attributes using the currently defined mapping map
|
|
608
|
+
if alias:
|
|
609
|
+
resolve_alias(object)
|
|
610
|
+
if page:
|
|
611
|
+
page_types(object)
|
|
612
|
+
if find:
|
|
613
|
+
find_types(object)
|
|
614
|
+
if find:
|
|
615
|
+
find_defaults(object, kwargs)
|
|
616
|
+
|
|
617
|
+
# in case the normalization flag is set runs the normalization
|
|
618
|
+
# of the provided object so that sequences are properly handled
|
|
619
|
+
# as defined in the specification (this allows multiple references)
|
|
620
|
+
if norm:
|
|
621
|
+
norm_object(object)
|
|
622
|
+
|
|
623
|
+
# returns the constructed object to the caller method this object
|
|
624
|
+
# should be a structured representation of the data in the request
|
|
625
|
+
return object
|
|
626
|
+
|
|
627
|
+
|
|
628
|
+
def resolve_alias(object):
|
|
629
|
+
for name, value in legacy.eager(object.items()):
|
|
630
|
+
if not name in ALIAS:
|
|
631
|
+
continue
|
|
632
|
+
_alias = ALIAS[name]
|
|
633
|
+
object[_alias] = value
|
|
634
|
+
del object[name]
|
|
635
|
+
|
|
636
|
+
|
|
637
|
+
def page_types(object, size=50):
|
|
638
|
+
page = object.get("page", 1)
|
|
639
|
+
size = object.get("size", size)
|
|
640
|
+
sorter = object.get("sorter", None)
|
|
641
|
+
direction = object.get("direction", "descending")
|
|
642
|
+
page = int(page)
|
|
643
|
+
size = int(size)
|
|
644
|
+
offset = page - 1
|
|
645
|
+
object["skip"] = offset * size
|
|
646
|
+
object["limit"] = size
|
|
647
|
+
if sorter:
|
|
648
|
+
object["sort"] = "%s:%s" % (sorter, direction)
|
|
649
|
+
|
|
650
|
+
|
|
651
|
+
def find_types(object):
|
|
652
|
+
for name, value in legacy.eager(object.items()):
|
|
653
|
+
if not name in FIND_TYPES:
|
|
654
|
+
del object[name]
|
|
655
|
+
continue
|
|
656
|
+
find_type = FIND_TYPES[name]
|
|
657
|
+
object[name] = find_type(value)
|
|
658
|
+
|
|
659
|
+
|
|
660
|
+
def find_defaults(object, kwargs):
|
|
661
|
+
for name, value in legacy.iteritems(kwargs):
|
|
662
|
+
if name in object:
|
|
663
|
+
continue
|
|
664
|
+
if not name in FIND_TYPES:
|
|
665
|
+
continue
|
|
666
|
+
object[name] = value
|
|
667
|
+
|
|
668
|
+
for name, value in legacy.iteritems(FIND_DEFAULTS):
|
|
669
|
+
if name in object:
|
|
670
|
+
continue
|
|
671
|
+
object[name] = value
|
|
672
|
+
|
|
673
|
+
|
|
674
|
+
def norm_object(object):
|
|
675
|
+
# iterates over all the key value association in the
|
|
676
|
+
# object, trying to find the ones that refer sequences
|
|
677
|
+
# so that they may be normalized
|
|
678
|
+
for name, value in object.items():
|
|
679
|
+
# verifies if the current name references a sequence
|
|
680
|
+
# and if that's not the case continues the loop trying
|
|
681
|
+
# to find any other sequence based value
|
|
682
|
+
if not name.endswith("[]"):
|
|
683
|
+
continue
|
|
684
|
+
|
|
685
|
+
# removes the current reference to the name as the value
|
|
686
|
+
# is not in the valid structure and then normalizes the
|
|
687
|
+
# name by removing the extra sequence indication value
|
|
688
|
+
del object[name]
|
|
689
|
+
name = name[:-2]
|
|
690
|
+
|
|
691
|
+
# in case the current value is not valid (empty) the object
|
|
692
|
+
# is set with an empty list for the current iteration as this
|
|
693
|
+
# is considered to be the default value
|
|
694
|
+
if not value:
|
|
695
|
+
object[name] = []
|
|
696
|
+
continue
|
|
697
|
+
|
|
698
|
+
# retrieves the normalized and linearized list of leafs
|
|
699
|
+
# for the current value and ten verifies the size of each
|
|
700
|
+
# of its values and uses it to measure the number of
|
|
701
|
+
# dictionary elements that are going to be contained in
|
|
702
|
+
# the sequence to be "generated", then uses this (size)
|
|
703
|
+
# value to pre-generate the complete set of dictionaries
|
|
704
|
+
leafs_l = leafs(value)
|
|
705
|
+
first = leafs_l[0] if leafs_l else (None, [])
|
|
706
|
+
_fqn, values = first
|
|
707
|
+
size = len(values)
|
|
708
|
+
list = [dict() for _index in range(size)]
|
|
709
|
+
|
|
710
|
+
# sets the list of generates dictionaries in the object for
|
|
711
|
+
# the newly normalized name of structure
|
|
712
|
+
object[name] = list
|
|
713
|
+
|
|
714
|
+
# iterates over the complete set of key value pairs in the
|
|
715
|
+
# leafs list to gather the value into the various objects that
|
|
716
|
+
# are contained in the sequence (normalization process)
|
|
717
|
+
for _name, _value in leafs_l:
|
|
718
|
+
for index in range(size):
|
|
719
|
+
_object = list[index]
|
|
720
|
+
_name_l = _name.split(".")
|
|
721
|
+
set_object(_object, _name_l, _value[index])
|
|
722
|
+
|
|
723
|
+
|
|
724
|
+
def set_object(object, name_l, value):
|
|
725
|
+
"""
|
|
726
|
+
Sets a composite value in an object, allowing for
|
|
727
|
+
dynamic setting of random size key values.
|
|
728
|
+
|
|
729
|
+
This method is useful for situations where one wants
|
|
730
|
+
to set a value at a randomly defined depth inside
|
|
731
|
+
an object without having to much work with the creation
|
|
732
|
+
of the inner dictionaries.
|
|
733
|
+
|
|
734
|
+
:type object: Dictionary
|
|
735
|
+
:param object: The target object that is going to be
|
|
736
|
+
changed and set with the target value.
|
|
737
|
+
:type name_l: List
|
|
738
|
+
:param name_l: The list of names that defined the fully
|
|
739
|
+
qualified name to be used in the setting of the value
|
|
740
|
+
for example path.to.end will be a three size list containing
|
|
741
|
+
each of the partial names.
|
|
742
|
+
:type value: Object
|
|
743
|
+
:param value: The value that is going to be set in the
|
|
744
|
+
defined target of the object.
|
|
745
|
+
"""
|
|
746
|
+
|
|
747
|
+
# retrieves the first name in the names list this is the
|
|
748
|
+
# value that is going to be used for the current iteration
|
|
749
|
+
name = name_l[0]
|
|
750
|
+
|
|
751
|
+
# in case the length of the current names list has reached
|
|
752
|
+
# one this is the final iteration and so the value is set
|
|
753
|
+
# at the current naming point
|
|
754
|
+
if len(name_l) == 1:
|
|
755
|
+
object[name] = value
|
|
756
|
+
|
|
757
|
+
# otherwise this is a "normal" step and so a new map must
|
|
758
|
+
# be created/retrieved and the iteration step should be
|
|
759
|
+
# performed on this new map as it's set on the current naming
|
|
760
|
+
# place (recursion step)
|
|
761
|
+
else:
|
|
762
|
+
map = object.get(name, {})
|
|
763
|
+
object[name] = map
|
|
764
|
+
set_object(map, name_l[1:], value)
|
|
765
|
+
|
|
766
|
+
|
|
767
|
+
def leafs(object):
|
|
768
|
+
"""
|
|
769
|
+
Retrieves a list containing a series of tuples that
|
|
770
|
+
each represent a leaf of the current object structure.
|
|
771
|
+
|
|
772
|
+
A leaf is the last element of an object that is not a
|
|
773
|
+
map, the other intermediary maps are considered to be
|
|
774
|
+
trunks and should be percolated recursively.
|
|
775
|
+
|
|
776
|
+
This is a recursive function that takes some memory for
|
|
777
|
+
the construction of the list, and so should be used with
|
|
778
|
+
the proper care to avoid bottlenecks.
|
|
779
|
+
|
|
780
|
+
:type object: Dictionary
|
|
781
|
+
:param object: The object for which the leafs list
|
|
782
|
+
structure is meant to be retrieved.
|
|
783
|
+
:rtype: List
|
|
784
|
+
:return: The list of leaf node tuples for the provided
|
|
785
|
+
object, as requested for each of the sequences.
|
|
786
|
+
"""
|
|
787
|
+
|
|
788
|
+
# creates the list that will hold the various leaf nodes
|
|
789
|
+
# "gathered" by the current recursion function
|
|
790
|
+
leafs_l = []
|
|
791
|
+
|
|
792
|
+
# iterates over all the key and value relations in the
|
|
793
|
+
# object trying to find the leaf nodes (no map nodes)
|
|
794
|
+
# creating a tuple of fqn (fully qualified name) and value
|
|
795
|
+
for name, value in object.items():
|
|
796
|
+
# retrieves the data type for the current value and
|
|
797
|
+
# validation if it is a dictionary or any other type
|
|
798
|
+
# in case it's a dictionary a new iteration step must
|
|
799
|
+
# be performed retrieving the leafs of the value and
|
|
800
|
+
# then incrementing the name with the current prefix
|
|
801
|
+
value_t = type(value)
|
|
802
|
+
if value_t == dict:
|
|
803
|
+
_leafs = leafs(value)
|
|
804
|
+
_leafs = [(name + "." + _name, value) for _name, value in _leafs]
|
|
805
|
+
leafs_l.extend(_leafs)
|
|
806
|
+
|
|
807
|
+
# otherwise this is a leaf node and so the leaf tuple
|
|
808
|
+
# node must be constructed with the current value
|
|
809
|
+
# (properly validated for sequence presence)
|
|
810
|
+
else:
|
|
811
|
+
value_t = type(value)
|
|
812
|
+
if not value_t == list:
|
|
813
|
+
value = [value]
|
|
814
|
+
leafs_l.append((name, value))
|
|
815
|
+
|
|
816
|
+
# returns the list of leaf nodes that was "just" created
|
|
817
|
+
# to the caller method so that it may be used there
|
|
818
|
+
return leafs_l
|
|
819
|
+
|
|
820
|
+
|
|
821
|
+
def gather_errors(lazy_dict, resolve=True):
|
|
822
|
+
"""
|
|
823
|
+
Function responsible for the iterative gathering of
|
|
824
|
+
lazy evaluation errors, allowing for a complete gathering
|
|
825
|
+
of error instead of a single evaluation.
|
|
826
|
+
|
|
827
|
+
:type lazy_dict: LazyDict
|
|
828
|
+
:param lazy_dict: The lazy dictionary that is going to be
|
|
829
|
+
percolated and evaluated sequentially.
|
|
830
|
+
:type resolve: bool
|
|
831
|
+
:param resolve: If the lazy dictionary values should be evaluated
|
|
832
|
+
even if they have already been eager loaded, by unsetting this value
|
|
833
|
+
there's a risk of not gathering all of the errors.
|
|
834
|
+
:rtype: Dictionary
|
|
835
|
+
:return: The final dictionary containing the complete set of
|
|
836
|
+
errors that have been found.
|
|
837
|
+
"""
|
|
838
|
+
|
|
839
|
+
# creates the dictionary that is going to hold sequences of
|
|
840
|
+
# string based error indexed by parameter name
|
|
841
|
+
errors = dict()
|
|
842
|
+
|
|
843
|
+
# iterates over the complete set of keys in the lazy dictionary
|
|
844
|
+
# to evaluate the values and check if there are errors associated
|
|
845
|
+
for key in lazy_dict:
|
|
846
|
+
try:
|
|
847
|
+
_value = lazy_dict.__getitem__(key, resolve=resolve)
|
|
848
|
+
except (exceptions.AppierException, exceptions.BaseInternalError) as exception:
|
|
849
|
+
_errors = errors.get(key, [])
|
|
850
|
+
_errors.append(exception.message)
|
|
851
|
+
errors[key] = _errors
|
|
852
|
+
|
|
853
|
+
# returns the final dictionary of error (indexed by name) to
|
|
854
|
+
# the caller method so that it may be used for error handling
|
|
855
|
+
return errors
|
|
856
|
+
|
|
857
|
+
|
|
858
|
+
def gen_token(limit=None, hash=hashlib.sha256):
|
|
859
|
+
"""
|
|
860
|
+
Generates a random cryptographic ready token according
|
|
861
|
+
to the framework specification, this is generated using
|
|
862
|
+
a truly random UUID based seed and hashed using the
|
|
863
|
+
provided hash digest strategy (SHA256 by default).
|
|
864
|
+
|
|
865
|
+
The resulting value is returned as an hexadecimal based
|
|
866
|
+
string according to the standard.
|
|
867
|
+
|
|
868
|
+
:type limit: int
|
|
869
|
+
:param limit: The maximum number of characters allowed
|
|
870
|
+
for the token to be generated.
|
|
871
|
+
:type hash: Function
|
|
872
|
+
:param hash: The hashing method that is going to be used
|
|
873
|
+
for the hash of the generated token, this should be compliant
|
|
874
|
+
with the base python hashing infra-structure.
|
|
875
|
+
:rtype: String
|
|
876
|
+
:return: The hexadecimal based string value
|
|
877
|
+
"""
|
|
878
|
+
|
|
879
|
+
token_s = str(uuid.uuid4())
|
|
880
|
+
token_s = token_s.encode("utf-8")
|
|
881
|
+
token = hash(token_s).hexdigest()
|
|
882
|
+
if limit:
|
|
883
|
+
token = token[:limit]
|
|
884
|
+
return token
|
|
885
|
+
|
|
886
|
+
|
|
887
|
+
def html_to_text(data):
|
|
888
|
+
"""
|
|
889
|
+
Converts the provided HTML textual data into a plain text
|
|
890
|
+
representation of it. This method uses a series of heuristics
|
|
891
|
+
for this conversion, and such conversion should not be considered
|
|
892
|
+
to be completely reliable.
|
|
893
|
+
|
|
894
|
+
The current implementation is not memory or processor efficient
|
|
895
|
+
and should be used carefully to avoid performance problems.
|
|
896
|
+
|
|
897
|
+
:type data: String
|
|
898
|
+
:param data: The HTML string of text that is going to be used for
|
|
899
|
+
the conversion into the plain text representation.
|
|
900
|
+
:rtype: String
|
|
901
|
+
:return: The approximate plain text representation to the provided
|
|
902
|
+
HTML contents.
|
|
903
|
+
"""
|
|
904
|
+
|
|
905
|
+
data = data.strip()
|
|
906
|
+
data = data.replace("\n", "\r")
|
|
907
|
+
|
|
908
|
+
data = data.replace("©", "Copyright")
|
|
909
|
+
data = data.replace("·", "-")
|
|
910
|
+
|
|
911
|
+
result = re.findall(defines.BODY_REGEX, data)
|
|
912
|
+
data = result[0] if result else ""
|
|
913
|
+
|
|
914
|
+
data = defines.TAG_REGEX.sub("", data)
|
|
915
|
+
|
|
916
|
+
valid = []
|
|
917
|
+
lines = data.splitlines(False)
|
|
918
|
+
for line in lines:
|
|
919
|
+
line = line.strip()
|
|
920
|
+
if not line:
|
|
921
|
+
continue
|
|
922
|
+
valid.append(line)
|
|
923
|
+
|
|
924
|
+
data = "\n".join(valid)
|
|
925
|
+
data = data.replace("\n.", ".")
|
|
926
|
+
return data
|
|
927
|
+
|
|
928
|
+
|
|
929
|
+
def camel_to_underscore(camel, separator="_", lower=True):
|
|
930
|
+
"""
|
|
931
|
+
Converts the provided camel cased based value into
|
|
932
|
+
a normalized underscore based string.
|
|
933
|
+
|
|
934
|
+
An optional lower parameter may be used to avoid the case
|
|
935
|
+
of the letters from being lower cased.
|
|
936
|
+
|
|
937
|
+
This is useful as most of the python string standards
|
|
938
|
+
are compliant with the underscore strategy.
|
|
939
|
+
|
|
940
|
+
:type camel: String
|
|
941
|
+
:param camel: The camel cased string that is going to be
|
|
942
|
+
converted into an underscore based string.
|
|
943
|
+
:type separator: String
|
|
944
|
+
:param separator: The separator token that is going to
|
|
945
|
+
be used in the camel to underscore conversion.
|
|
946
|
+
:type lower: bool
|
|
947
|
+
:param lower: If the letter casing should be changed while
|
|
948
|
+
convert the value from camel to underscore.
|
|
949
|
+
:rtype: String
|
|
950
|
+
:return: The underscore based string resulting from the
|
|
951
|
+
conversion of the provided camel cased one.
|
|
952
|
+
"""
|
|
953
|
+
|
|
954
|
+
if not camel:
|
|
955
|
+
return camel
|
|
956
|
+
value = FIRST_CAP_REGEX.sub(r"\1" + separator + r"\2", camel)
|
|
957
|
+
value = ALL_CAP_REGEX.sub(r"\1" + separator + r"\2", value)
|
|
958
|
+
if lower:
|
|
959
|
+
value = value.lower()
|
|
960
|
+
return value
|
|
961
|
+
|
|
962
|
+
|
|
963
|
+
def camel_to_readable(camel, lower=False, capitalize=False):
|
|
964
|
+
"""
|
|
965
|
+
Converts the given camel cased oriented string value
|
|
966
|
+
into a readable one meaning that the returned value
|
|
967
|
+
is a set of strings separated by spaces.
|
|
968
|
+
|
|
969
|
+
This method may be used to convert class names into
|
|
970
|
+
something that is readable by an end user.
|
|
971
|
+
|
|
972
|
+
:type camel: String
|
|
973
|
+
:param camel: The camel case string value that is going
|
|
974
|
+
to be used in the conversion into a readable string.
|
|
975
|
+
:type lower: bool
|
|
976
|
+
:param lower: If the camel based value should be lower
|
|
977
|
+
cased before the conversion to readable.
|
|
978
|
+
:type capitalize: bool
|
|
979
|
+
:param capitalize: If all of the words should be capitalized
|
|
980
|
+
or if instead only the first one should.
|
|
981
|
+
:rtype: String
|
|
982
|
+
:return: The final human readable string that may be
|
|
983
|
+
used to display a value to an end user.
|
|
984
|
+
"""
|
|
985
|
+
|
|
986
|
+
if not camel:
|
|
987
|
+
return camel
|
|
988
|
+
underscore = camel_to_underscore(camel, lower=lower)
|
|
989
|
+
return underscore_to_readable(underscore, capitalize=capitalize)
|
|
990
|
+
|
|
991
|
+
|
|
992
|
+
def underscore_to_camel(underscore, lower=False):
|
|
993
|
+
"""
|
|
994
|
+
Converts the provided underscore cased based value into
|
|
995
|
+
a normalized camel cased string.
|
|
996
|
+
|
|
997
|
+
An optional lower parameter may be provided to obtain a
|
|
998
|
+
lower came case version of the string.
|
|
999
|
+
|
|
1000
|
+
This is useful as most of the python string standards
|
|
1001
|
+
are compliant with the underscore strategy.
|
|
1002
|
+
|
|
1003
|
+
:type underscore: String
|
|
1004
|
+
:param underscore: The underscore cased string that is going to be
|
|
1005
|
+
converted into an camel case based string.
|
|
1006
|
+
:type lower: bool
|
|
1007
|
+
:param lower: If the the first letter of the resulting camel
|
|
1008
|
+
case string should be lower case (lower camel case).
|
|
1009
|
+
:rtype: String
|
|
1010
|
+
:return: The camel case based string resulting from the
|
|
1011
|
+
conversion of the provided underscore cased one.
|
|
1012
|
+
"""
|
|
1013
|
+
|
|
1014
|
+
if not underscore:
|
|
1015
|
+
return underscore
|
|
1016
|
+
camel = underscore_to_readable(underscore, capitalize=True, separator="")
|
|
1017
|
+
if not lower:
|
|
1018
|
+
return camel
|
|
1019
|
+
return camel[0].lower() + camel[1:]
|
|
1020
|
+
|
|
1021
|
+
|
|
1022
|
+
def underscore_to_readable(underscore, capitalize=False, separator=" "):
|
|
1023
|
+
"""
|
|
1024
|
+
Converts the given underscore oriented string value
|
|
1025
|
+
into a readable one meaning that the returned value
|
|
1026
|
+
is a set of strings separated by spaces.
|
|
1027
|
+
|
|
1028
|
+
This method may be used to class attributes into
|
|
1029
|
+
something that is readable by an end user.
|
|
1030
|
+
|
|
1031
|
+
:type camel: String
|
|
1032
|
+
:param camel: The underscore string value that is going
|
|
1033
|
+
to be used in the conversion into a readable string.
|
|
1034
|
+
:type capitalize: bool
|
|
1035
|
+
:param capitalize: If all of the words should be capitalized
|
|
1036
|
+
or if instead only the first one should.
|
|
1037
|
+
:type separator: String
|
|
1038
|
+
:param separator: The separator to be used to join the multiple
|
|
1039
|
+
parts of the resulting readable tokens.
|
|
1040
|
+
:rtype: String
|
|
1041
|
+
:return: The final human readable string that may be
|
|
1042
|
+
used to display a value to an end user.
|
|
1043
|
+
"""
|
|
1044
|
+
|
|
1045
|
+
if not underscore:
|
|
1046
|
+
return underscore
|
|
1047
|
+
parts = underscore.split("_")
|
|
1048
|
+
parts = [part for part in parts if part]
|
|
1049
|
+
if capitalize:
|
|
1050
|
+
parts = [part[0].upper() + part[1:] for part in parts]
|
|
1051
|
+
else:
|
|
1052
|
+
parts[0] = parts[0][0].upper() + parts[0][1:]
|
|
1053
|
+
return separator.join(parts)
|
|
1054
|
+
|
|
1055
|
+
|
|
1056
|
+
def quote(value, *args, **kwargs):
|
|
1057
|
+
"""
|
|
1058
|
+
Quotes the passed value according to the defined
|
|
1059
|
+
standard for URL escaping, the value is first encoded
|
|
1060
|
+
into the expected UTF-8 encoding as defined by standard.
|
|
1061
|
+
|
|
1062
|
+
This method should be used instead of a direct call to
|
|
1063
|
+
the equivalent call in the URL library.
|
|
1064
|
+
|
|
1065
|
+
:type value: String
|
|
1066
|
+
:param value: The string value that is going to be quoted
|
|
1067
|
+
according to the URL escaping scheme.
|
|
1068
|
+
:rtype: String
|
|
1069
|
+
:return: The quoted value according to the URL scheme this
|
|
1070
|
+
value may be safely used in urls.
|
|
1071
|
+
"""
|
|
1072
|
+
|
|
1073
|
+
is_unicode = isinstance(value, legacy.UNICODE)
|
|
1074
|
+
if is_unicode:
|
|
1075
|
+
value = value.encode("utf-8")
|
|
1076
|
+
return legacy.quote(value, *args, **kwargs)
|
|
1077
|
+
|
|
1078
|
+
|
|
1079
|
+
def unquote(value, *args, **kwargs):
|
|
1080
|
+
"""
|
|
1081
|
+
Unquotes the provided value according to the URL scheme
|
|
1082
|
+
the resulting value should be an unicode string representing
|
|
1083
|
+
the same value, the intermediary string value from the decoding
|
|
1084
|
+
should be an UTF-8 based value.
|
|
1085
|
+
|
|
1086
|
+
This method should be used instead of a direct call to
|
|
1087
|
+
the equivalent call in the URL library.
|
|
1088
|
+
|
|
1089
|
+
:type value: String
|
|
1090
|
+
:param value: The string value that is going to be unquoted
|
|
1091
|
+
according to the URL escaping scheme.
|
|
1092
|
+
:rtype: String
|
|
1093
|
+
:return: The unquoted value extracted as an unicode
|
|
1094
|
+
string that the represents the same value.
|
|
1095
|
+
"""
|
|
1096
|
+
|
|
1097
|
+
value = legacy.unquote(value, *args, **kwargs)
|
|
1098
|
+
is_bytes = isinstance(value, legacy.BYTES)
|
|
1099
|
+
if is_bytes:
|
|
1100
|
+
value = value.decode("utf-8")
|
|
1101
|
+
return value
|
|
1102
|
+
|
|
1103
|
+
|
|
1104
|
+
def escape(value, char, escape="\\"):
|
|
1105
|
+
"""
|
|
1106
|
+
Escapes the provided string value according to the requested
|
|
1107
|
+
target character(s) and escape value. Meaning that all the characters
|
|
1108
|
+
are going to be replaced by the escape plus character sequence.
|
|
1109
|
+
|
|
1110
|
+
:type value: String
|
|
1111
|
+
:param value: The string that is going to have the target characters
|
|
1112
|
+
escaped according to the escape character.
|
|
1113
|
+
:type char: String/List/Tuple
|
|
1114
|
+
:param char: The character(s) that is going to be "target" of escaping
|
|
1115
|
+
or a list of characters for escaping.
|
|
1116
|
+
:type escape: String
|
|
1117
|
+
:param escape: The character to be used for escaping (typically '\').
|
|
1118
|
+
:rtype: String
|
|
1119
|
+
:return: The final string with the target character properly escaped.
|
|
1120
|
+
"""
|
|
1121
|
+
|
|
1122
|
+
if not isinstance(char, (list, tuple)):
|
|
1123
|
+
char = (char,)
|
|
1124
|
+
value = value.replace(escape, escape + escape)
|
|
1125
|
+
for _char in char:
|
|
1126
|
+
value = value.replace(_char, escape + _char)
|
|
1127
|
+
return value
|
|
1128
|
+
|
|
1129
|
+
|
|
1130
|
+
def unescape(value, escape="\\"):
|
|
1131
|
+
"""
|
|
1132
|
+
Unescapes the provided string value using the provided escape
|
|
1133
|
+
character as the reference for the unescape operation.
|
|
1134
|
+
|
|
1135
|
+
This is considered to be a very expensive operation and so it
|
|
1136
|
+
should be used carefully.
|
|
1137
|
+
|
|
1138
|
+
:type value: String
|
|
1139
|
+
:param value: The string value that is going to be unescape.
|
|
1140
|
+
:rtype: String
|
|
1141
|
+
:return: The final unescaped value.
|
|
1142
|
+
"""
|
|
1143
|
+
|
|
1144
|
+
result = []
|
|
1145
|
+
iterator = iter(value)
|
|
1146
|
+
for char in iterator:
|
|
1147
|
+
if char == escape:
|
|
1148
|
+
try:
|
|
1149
|
+
result.append(next(iterator))
|
|
1150
|
+
except StopIteration:
|
|
1151
|
+
result.append(escape)
|
|
1152
|
+
else:
|
|
1153
|
+
result.append(char)
|
|
1154
|
+
return "".join(result)
|
|
1155
|
+
|
|
1156
|
+
|
|
1157
|
+
def count_unescape(value, sub, escape="\\"):
|
|
1158
|
+
"""
|
|
1159
|
+
Runs the sub string count operation on an escaped string
|
|
1160
|
+
so that it takes into account the escaped values avoiding
|
|
1161
|
+
them for the count operation.
|
|
1162
|
+
|
|
1163
|
+
:type value: String
|
|
1164
|
+
:param value: The base string value to have the number of
|
|
1165
|
+
occurrences of a sub string counted.
|
|
1166
|
+
:type sub: String
|
|
1167
|
+
:param sub: The sub string to be evaluated for occurrences,
|
|
1168
|
+
this value should be constrained to strings of single character.
|
|
1169
|
+
:type escape: String
|
|
1170
|
+
:param escape: The "special" escape character that will allow the
|
|
1171
|
+
delimiter to be also present in the choices selection.
|
|
1172
|
+
:rtype: int
|
|
1173
|
+
:return: The final count of occurrences of the sub string
|
|
1174
|
+
taking into account the proper escaping of the string.
|
|
1175
|
+
"""
|
|
1176
|
+
|
|
1177
|
+
count = 0
|
|
1178
|
+
iterator = iter(value)
|
|
1179
|
+
for char in iterator:
|
|
1180
|
+
if char == escape:
|
|
1181
|
+
try:
|
|
1182
|
+
next(iterator)
|
|
1183
|
+
except StopIteration:
|
|
1184
|
+
pass
|
|
1185
|
+
elif char == sub:
|
|
1186
|
+
count += 1
|
|
1187
|
+
return count
|
|
1188
|
+
|
|
1189
|
+
|
|
1190
|
+
def split_unescape(value, delimiter=" ", max=-1, escape="\\", unescape=True):
|
|
1191
|
+
"""
|
|
1192
|
+
Splits the provided string around the delimiter character that
|
|
1193
|
+
has been provided and allows proper escaping of it using the
|
|
1194
|
+
provided escape character.
|
|
1195
|
+
|
|
1196
|
+
This is considered to be a very expensive operation when compared
|
|
1197
|
+
to the simple split operation and so it should be used carefully.
|
|
1198
|
+
|
|
1199
|
+
:type value: String
|
|
1200
|
+
:param value: The string value that is going to be split around
|
|
1201
|
+
the proper delimiter value taking into account the escaping.
|
|
1202
|
+
:type delimiter: String
|
|
1203
|
+
:param delimiter: The delimiter character to be used in the split
|
|
1204
|
+
operation.
|
|
1205
|
+
:type max: int
|
|
1206
|
+
:param max: The maximum number of split operations that are going
|
|
1207
|
+
to be performed by this operation.
|
|
1208
|
+
:type escape: String
|
|
1209
|
+
:param escape: The "special" escape character that will allow the
|
|
1210
|
+
delimiter to be also present in the choices selection.
|
|
1211
|
+
:type unescape: bool
|
|
1212
|
+
:param unescape: If the final resulting string should be already
|
|
1213
|
+
unescaped (normalized).
|
|
1214
|
+
:rtype: List
|
|
1215
|
+
:return: The final list containing the multiple string parts separated
|
|
1216
|
+
by the delimiter character and respecting the escape sequences.
|
|
1217
|
+
"""
|
|
1218
|
+
|
|
1219
|
+
result = []
|
|
1220
|
+
current = []
|
|
1221
|
+
iterator = iter(value)
|
|
1222
|
+
count = 0
|
|
1223
|
+
for char in iterator:
|
|
1224
|
+
if char == escape:
|
|
1225
|
+
try:
|
|
1226
|
+
if not unescape:
|
|
1227
|
+
current.append(escape)
|
|
1228
|
+
current.append(next(iterator))
|
|
1229
|
+
except StopIteration:
|
|
1230
|
+
if unescape:
|
|
1231
|
+
current.append(escape)
|
|
1232
|
+
elif char == delimiter and not count == max:
|
|
1233
|
+
result.append("".join(current))
|
|
1234
|
+
current = []
|
|
1235
|
+
count += 1
|
|
1236
|
+
else:
|
|
1237
|
+
current.append(char)
|
|
1238
|
+
result.append("".join(current))
|
|
1239
|
+
return result
|
|
1240
|
+
|
|
1241
|
+
|
|
1242
|
+
def call_safe(callable, *args, **kwargs):
|
|
1243
|
+
"""
|
|
1244
|
+
Method used to call a callable object using a "safe" approach,
|
|
1245
|
+
meaning that each of its keyword arguments will be validated
|
|
1246
|
+
for existence in the target callable definition.
|
|
1247
|
+
|
|
1248
|
+
In case the validation of the keyword argument fails the same
|
|
1249
|
+
argument is removed from the map of keyword arguments.
|
|
1250
|
+
|
|
1251
|
+
Note that in case the wildcard based kwargs value exists in
|
|
1252
|
+
the callable definition the callable is immediately considered
|
|
1253
|
+
to be valid and the call is ran.
|
|
1254
|
+
|
|
1255
|
+
:type callable: Callable
|
|
1256
|
+
:callable callable: The callable that is going to have the keyword
|
|
1257
|
+
based arguments validated and the get called.
|
|
1258
|
+
:rtype: object
|
|
1259
|
+
:return: The resulting value from the safe call of the provided
|
|
1260
|
+
callable, this may have any data type.
|
|
1261
|
+
"""
|
|
1262
|
+
|
|
1263
|
+
# retrieves the arguments specification to the provided callable
|
|
1264
|
+
# and retrieves the various argument names and the existence or
|
|
1265
|
+
# not of the wildcard kwargs value in the callable and in case it
|
|
1266
|
+
# exists runs the callable call immediately
|
|
1267
|
+
argspec = legacy.getargspec(callable)
|
|
1268
|
+
method_args = argspec[0]
|
|
1269
|
+
method_kwargs = argspec[2]
|
|
1270
|
+
if method_kwargs:
|
|
1271
|
+
return callable(*args, **kwargs)
|
|
1272
|
+
|
|
1273
|
+
# iterates over the complete set of keyword based arguments to be
|
|
1274
|
+
# used in the call and validates them against the method specification
|
|
1275
|
+
# in case they do not exist in the specification deletes them from
|
|
1276
|
+
# the map of keyword based arguments (not going to be sent)
|
|
1277
|
+
for name in legacy.keys(kwargs):
|
|
1278
|
+
if name in method_args:
|
|
1279
|
+
continue
|
|
1280
|
+
del kwargs[name]
|
|
1281
|
+
|
|
1282
|
+
# runs the callable with the "remaining" arguments and keyword arguments
|
|
1283
|
+
# returning the value to the caller method
|
|
1284
|
+
return callable(*args, **kwargs)
|
|
1285
|
+
|
|
1286
|
+
|
|
1287
|
+
def base_name(name, suffix="_controller"):
|
|
1288
|
+
"""
|
|
1289
|
+
Retrieves the base name of a class name that contains
|
|
1290
|
+
a suffix (eg: controller) the resulting value is the
|
|
1291
|
+
underscore version of the name without the suffix.
|
|
1292
|
+
|
|
1293
|
+
This method provides an easy way to expose class names
|
|
1294
|
+
in external environments.
|
|
1295
|
+
|
|
1296
|
+
:type name: String
|
|
1297
|
+
:param name: The name from which the base name will be
|
|
1298
|
+
extracted and treated.
|
|
1299
|
+
:type suffix: String
|
|
1300
|
+
:param suffix: The optional suffix value that if sent will
|
|
1301
|
+
be removed from the last part of the name string.
|
|
1302
|
+
:rtype: String
|
|
1303
|
+
:return: The resulting base name for the provided name, treated
|
|
1304
|
+
and with the suffix removed (in case it exists).
|
|
1305
|
+
"""
|
|
1306
|
+
|
|
1307
|
+
suffix_l = len(suffix)
|
|
1308
|
+
name = camel_to_underscore(name)
|
|
1309
|
+
if name.endswith(suffix):
|
|
1310
|
+
name = name[: suffix_l * -1]
|
|
1311
|
+
return name
|
|
1312
|
+
|
|
1313
|
+
|
|
1314
|
+
def base_name_m(name, suffixes=("_controller", "_part", "_app")):
|
|
1315
|
+
"""
|
|
1316
|
+
Multiple iteration version of the base name function that provides
|
|
1317
|
+
a simple strategy for the retrieval of a "base name" without the
|
|
1318
|
+
complete set of provided suffixes attached to the value.
|
|
1319
|
+
|
|
1320
|
+
:type name: String
|
|
1321
|
+
:param name: The name from which the base name will be
|
|
1322
|
+
extracted and treated, with multiple value strategy.
|
|
1323
|
+
:type suffixes: List/Tuple
|
|
1324
|
+
:param suffixes: The complete set of suffixes that are going
|
|
1325
|
+
to be removed from the provided value creating the base name.
|
|
1326
|
+
:rtype: String
|
|
1327
|
+
:return: The resulting base name for the provided name, treated
|
|
1328
|
+
and without the complete set of provided suffixes.
|
|
1329
|
+
"""
|
|
1330
|
+
|
|
1331
|
+
for suffix in suffixes:
|
|
1332
|
+
name = base_name(name, suffix=suffix)
|
|
1333
|
+
return name
|
|
1334
|
+
|
|
1335
|
+
|
|
1336
|
+
def is_content_type(data, target):
|
|
1337
|
+
"""
|
|
1338
|
+
Verifies if the any of the provided mime types (target) is
|
|
1339
|
+
valid for the provided content type string.
|
|
1340
|
+
|
|
1341
|
+
:type data: String
|
|
1342
|
+
:param data: The content type string to be parsed and matched
|
|
1343
|
+
against the target mime type values.
|
|
1344
|
+
:type target: Tuple/String
|
|
1345
|
+
:param target: The tuple containing the multiple mime type values
|
|
1346
|
+
to be verified against the content type mime strings.
|
|
1347
|
+
:rtype: bool
|
|
1348
|
+
:return: If any of the provided mime types is considered valid
|
|
1349
|
+
for the content type.
|
|
1350
|
+
"""
|
|
1351
|
+
|
|
1352
|
+
if not isinstance(target, (list, tuple)):
|
|
1353
|
+
target = (target,)
|
|
1354
|
+
mime, _extra = parse_content_type(data)
|
|
1355
|
+
for item in target:
|
|
1356
|
+
type, _sub_type = item.split("/")
|
|
1357
|
+
wildcard = type + "/*"
|
|
1358
|
+
if item in mime:
|
|
1359
|
+
return True
|
|
1360
|
+
if wildcard in mime:
|
|
1361
|
+
return True
|
|
1362
|
+
return False
|
|
1363
|
+
|
|
1364
|
+
|
|
1365
|
+
def parse_content_type(data):
|
|
1366
|
+
"""
|
|
1367
|
+
Parses the provided content type string retrieving both the multiple
|
|
1368
|
+
mime types associated with the resource and the extra key to value
|
|
1369
|
+
items associated with the string in case they are defined (it's optional).
|
|
1370
|
+
|
|
1371
|
+
:type data: String
|
|
1372
|
+
:param data: The content type data that is going to be parsed to
|
|
1373
|
+
obtain the structure of values for the content type string, this must
|
|
1374
|
+
be a plain unicode string and not a binary string.
|
|
1375
|
+
:rtype: Tuple
|
|
1376
|
+
:return: The sequence of mime types of the the content and the multiple
|
|
1377
|
+
extra values associated with the content type (eg: charset, boundary, etc.)
|
|
1378
|
+
"""
|
|
1379
|
+
|
|
1380
|
+
# creates the list of final normalized mime types and the
|
|
1381
|
+
# dictionary to store the extra values.
|
|
1382
|
+
types = []
|
|
1383
|
+
extra_m = dict()
|
|
1384
|
+
|
|
1385
|
+
# in case no valid type has been sent returns the values
|
|
1386
|
+
# immediately to avoid further problems
|
|
1387
|
+
if not data:
|
|
1388
|
+
return types, extra_m
|
|
1389
|
+
|
|
1390
|
+
# extracts the mime and the extra parts from the data string
|
|
1391
|
+
# they are the basis of the processing method
|
|
1392
|
+
data = data.strip(";")
|
|
1393
|
+
parts = data.split(";")
|
|
1394
|
+
mime = parts[0]
|
|
1395
|
+
extra = parts[1:]
|
|
1396
|
+
mime = mime.strip()
|
|
1397
|
+
|
|
1398
|
+
# runs a series of verifications on the base mime value and in
|
|
1399
|
+
# case it's not valid returns the default values immediately
|
|
1400
|
+
if not "/" in mime:
|
|
1401
|
+
return types, extra_m
|
|
1402
|
+
|
|
1403
|
+
# strips the complete set of valid extra values, note
|
|
1404
|
+
# that these values are going to be processed as key
|
|
1405
|
+
# to value items
|
|
1406
|
+
extra = [value.strip() for value in extra if extra]
|
|
1407
|
+
|
|
1408
|
+
# splits the complete mime type into its type and sub
|
|
1409
|
+
# type components (first step of normalization)
|
|
1410
|
+
type, sub_type = mime.split("/", 1)
|
|
1411
|
+
sub_types = sub_type.split("+")
|
|
1412
|
+
|
|
1413
|
+
# iterates over the complete set of sub types to
|
|
1414
|
+
# create the full mime type for each of them and
|
|
1415
|
+
# add the new full items to the types list (normalization)
|
|
1416
|
+
for sub_type in sub_types:
|
|
1417
|
+
types.append(type + "/" + sub_type)
|
|
1418
|
+
|
|
1419
|
+
# goes through all of the extra key to value items
|
|
1420
|
+
# and converts them into proper dictionary values
|
|
1421
|
+
for extra_item in extra:
|
|
1422
|
+
if not "=" in extra_item:
|
|
1423
|
+
continue
|
|
1424
|
+
extra_item = extra_item.strip()
|
|
1425
|
+
key, value = extra_item.split("=")
|
|
1426
|
+
extra_m[key] = value
|
|
1427
|
+
|
|
1428
|
+
# returns the final tuple containing both the normalized
|
|
1429
|
+
# mime types for the content and the extra key to value items
|
|
1430
|
+
return types, extra_m
|
|
1431
|
+
|
|
1432
|
+
|
|
1433
|
+
def parse_cookie(data):
|
|
1434
|
+
"""
|
|
1435
|
+
Parses/interprets the provided cookie data string, returning a
|
|
1436
|
+
map structure containing key to value associations of the various
|
|
1437
|
+
parts of the cookie.
|
|
1438
|
+
|
|
1439
|
+
In case no key value association exists for the cookie the value
|
|
1440
|
+
for such cookie (key) is stored and an empty string (unset).
|
|
1441
|
+
|
|
1442
|
+
:type data: String
|
|
1443
|
+
:param data: The cookie serialized data that is going to be parsed
|
|
1444
|
+
in order to create the final cookie dictionary/map.
|
|
1445
|
+
:rtype: Dictionary
|
|
1446
|
+
:return: The final map containing key the value association for the
|
|
1447
|
+
various parts of the provided cookie string.
|
|
1448
|
+
"""
|
|
1449
|
+
|
|
1450
|
+
# creates the dictionary that is going to hold the various cookie
|
|
1451
|
+
# key to value associations parsed from the "raw" data
|
|
1452
|
+
cookie_m = dict()
|
|
1453
|
+
|
|
1454
|
+
# splits the data information around the proper cookie separator
|
|
1455
|
+
# and then iterates over each of the cookies to set them in the
|
|
1456
|
+
# final cookie map (with the key to value associations)
|
|
1457
|
+
cookies = [cookie.strip() for cookie in data.split(";")]
|
|
1458
|
+
for cookie in cookies:
|
|
1459
|
+
if not "=" in cookie:
|
|
1460
|
+
cookie += "="
|
|
1461
|
+
name, value = cookie.split("=", 1)
|
|
1462
|
+
cookie_m[name] = value
|
|
1463
|
+
|
|
1464
|
+
# returns the final map of cookies to the caller method so that
|
|
1465
|
+
# proper and easy access is possible to the cookie
|
|
1466
|
+
return cookie_m
|
|
1467
|
+
|
|
1468
|
+
|
|
1469
|
+
def parse_multipart(data, boundary):
|
|
1470
|
+
"""
|
|
1471
|
+
Parses the provided data buffer as a set of multipart data
|
|
1472
|
+
the content type is not verified inside this method.
|
|
1473
|
+
|
|
1474
|
+
The function returns a tuple containing both a map of "basic"
|
|
1475
|
+
form parameters, a map containing the set of file tuples and
|
|
1476
|
+
a sequence containing the name and values tuples in order.
|
|
1477
|
+
|
|
1478
|
+
:type data: String
|
|
1479
|
+
:param data: The string containing the complete set of data
|
|
1480
|
+
that is going to be processed as multipart.
|
|
1481
|
+
:type boundary: String
|
|
1482
|
+
:param boundary: The string containing the basic boundary header
|
|
1483
|
+
value, should be provided from the caller function.
|
|
1484
|
+
:rtype: Tuple
|
|
1485
|
+
:return: A tuple containing both the map of post attributes,
|
|
1486
|
+
the map of file attributes and a list with the various name and
|
|
1487
|
+
value tuples (to be able to access ordered values).
|
|
1488
|
+
"""
|
|
1489
|
+
|
|
1490
|
+
ordered = []
|
|
1491
|
+
ordered_m = dict()
|
|
1492
|
+
post = dict()
|
|
1493
|
+
files = dict()
|
|
1494
|
+
|
|
1495
|
+
boundary = str(boundary)
|
|
1496
|
+
boundary = boundary.strip()
|
|
1497
|
+
boundary_base = "--" + boundary[9:].strip('"')
|
|
1498
|
+
boundary_value = legacy.bytes(boundary_base + "\r\n")
|
|
1499
|
+
boundary_extra = legacy.bytes(boundary_base + "--" + "\r\n")
|
|
1500
|
+
boundary_extra_l = len(boundary_extra)
|
|
1501
|
+
parts = data.split(boundary_value)
|
|
1502
|
+
parts[-1] = parts[-1][: boundary_extra_l * -1]
|
|
1503
|
+
|
|
1504
|
+
# iterates over the complete set of parts in the multi part payload
|
|
1505
|
+
# to process them and add them to the appropriate dictionary and list
|
|
1506
|
+
for part in parts:
|
|
1507
|
+
# in case the current part is not valid or empty skips the
|
|
1508
|
+
# current cycle (nothing to be done)
|
|
1509
|
+
if not part:
|
|
1510
|
+
continue
|
|
1511
|
+
|
|
1512
|
+
# splits the current part around the beginning of part sequence
|
|
1513
|
+
# and retrieves the proper contents if they exist
|
|
1514
|
+
part_s = part.split(b"\r\n\r\n", 1)
|
|
1515
|
+
headers = part_s[0]
|
|
1516
|
+
if len(part_s) > 1:
|
|
1517
|
+
contents = part_s[1]
|
|
1518
|
+
else:
|
|
1519
|
+
contents = None
|
|
1520
|
+
|
|
1521
|
+
# strips the current headers string and then splits it around
|
|
1522
|
+
# the various lines that define the various headers
|
|
1523
|
+
headers_data = headers.strip()
|
|
1524
|
+
headers_lines = headers_data.split(b"\r\n")
|
|
1525
|
+
|
|
1526
|
+
# creates the initial headers map of the headers that contains
|
|
1527
|
+
# the association between the byte based key and the data value
|
|
1528
|
+
# then retrieves the tuple of values and resets the map as it's
|
|
1529
|
+
# going to be changed and normalized with the new values
|
|
1530
|
+
headers = dict([line.split(b":", 1) for line in headers_lines])
|
|
1531
|
+
headers_t = legacy.eager(headers.items())
|
|
1532
|
+
headers.clear()
|
|
1533
|
+
|
|
1534
|
+
# runs the normalization process using the header tuples, this
|
|
1535
|
+
# should create a map of headers with the key as a normal string
|
|
1536
|
+
# and the values encoded as byte based strings (contain data)
|
|
1537
|
+
# note that the headers are defined
|
|
1538
|
+
for key, value in headers_t:
|
|
1539
|
+
key = legacy.str(key).lower()
|
|
1540
|
+
value = value.strip()
|
|
1541
|
+
headers[key] = value
|
|
1542
|
+
|
|
1543
|
+
# tries to retrieve the content disposition header for the current
|
|
1544
|
+
# part and in case there's none it's not possible to process the
|
|
1545
|
+
# current part (this header is considered required)
|
|
1546
|
+
disposition = headers.get("content-disposition", None)
|
|
1547
|
+
if not disposition:
|
|
1548
|
+
continue
|
|
1549
|
+
|
|
1550
|
+
# creates the dictionary that will hold the various parts of the
|
|
1551
|
+
# content disposition header that are going to be extracted for
|
|
1552
|
+
# latter processing, this is required to make some decisions on
|
|
1553
|
+
# the type of part that is currently being processed
|
|
1554
|
+
parts = dict()
|
|
1555
|
+
parts_data = disposition.split(b";")
|
|
1556
|
+
for value in parts_data:
|
|
1557
|
+
value_s = value.split(b"=", 1)
|
|
1558
|
+
key = legacy.str(value_s[0]).strip().lower()
|
|
1559
|
+
if len(value_s) > 1:
|
|
1560
|
+
value = value_s[1].strip()
|
|
1561
|
+
else:
|
|
1562
|
+
value = None
|
|
1563
|
+
parts[key] = value
|
|
1564
|
+
|
|
1565
|
+
# retrieves the various characteristics values from the headers
|
|
1566
|
+
# and from the content disposition of the current part, these
|
|
1567
|
+
# values are going to be used to decide on whether the current
|
|
1568
|
+
# part is a file or a normal key value attribute
|
|
1569
|
+
content_type = headers.get("content-type", None)
|
|
1570
|
+
name = parts.get("name", b'"undefined"').strip(b'"')
|
|
1571
|
+
filename = parts.get("filename", b"").strip(b'"')
|
|
1572
|
+
|
|
1573
|
+
# decodes the various content disposition values into an unicode
|
|
1574
|
+
# based string so that may be latter be used safely inside the
|
|
1575
|
+
# application environment(as expected by the current structure)
|
|
1576
|
+
if content_type:
|
|
1577
|
+
content_type = content_type.decode("utf-8")
|
|
1578
|
+
name = name.decode("utf-8")
|
|
1579
|
+
filename = filename.decode("utf-8")
|
|
1580
|
+
|
|
1581
|
+
# in case the currently discovered contents are valid they
|
|
1582
|
+
# must be stripped from the last two bytes so that the real
|
|
1583
|
+
# value is retrieved from the provided contents
|
|
1584
|
+
contents = contents if contents == None else contents[:-2]
|
|
1585
|
+
|
|
1586
|
+
# verifies if the file name is included in the parts unpacked
|
|
1587
|
+
# from the content type in case it does this is considered to be
|
|
1588
|
+
# file part otherwise it's a normal key value part
|
|
1589
|
+
if "filename" in parts:
|
|
1590
|
+
is_file = True
|
|
1591
|
+
else:
|
|
1592
|
+
is_file = False
|
|
1593
|
+
|
|
1594
|
+
if is_file:
|
|
1595
|
+
target = files
|
|
1596
|
+
file_tuple = (filename, content_type, contents)
|
|
1597
|
+
value = FileTuple(file_tuple)
|
|
1598
|
+
else:
|
|
1599
|
+
target = post
|
|
1600
|
+
value = contents if contents == None else contents.decode("utf-8")
|
|
1601
|
+
|
|
1602
|
+
exists = name in ordered_m
|
|
1603
|
+
|
|
1604
|
+
sequence = target.get(name, [])
|
|
1605
|
+
sequence.append(value)
|
|
1606
|
+
target[name] = sequence
|
|
1607
|
+
|
|
1608
|
+
sequence_o = ordered_m.get(name, [])
|
|
1609
|
+
sequence_o.append(value)
|
|
1610
|
+
ordered_m[name] = sequence_o
|
|
1611
|
+
|
|
1612
|
+
if exists:
|
|
1613
|
+
continue
|
|
1614
|
+
|
|
1615
|
+
tuple_s = (name, sequence_o)
|
|
1616
|
+
ordered.append(tuple_s)
|
|
1617
|
+
|
|
1618
|
+
return (post, files, ordered)
|
|
1619
|
+
|
|
1620
|
+
|
|
1621
|
+
def decode_params(params):
|
|
1622
|
+
"""
|
|
1623
|
+
Decodes the complete set of parameters defined in the
|
|
1624
|
+
provided map so that all of keys and values are created
|
|
1625
|
+
as unicode strings instead of UTF-8 based strings.
|
|
1626
|
+
|
|
1627
|
+
This method's execution is mandatory on the retrieval of
|
|
1628
|
+
the parameters from the sent data.
|
|
1629
|
+
|
|
1630
|
+
:type params: Dictionary
|
|
1631
|
+
:param params: The map containing the encoded set of values
|
|
1632
|
+
that are going to be decoded from the UTF-8 form.
|
|
1633
|
+
:rtype: Dictionary
|
|
1634
|
+
:return: The decoded map meaning that all the keys and values
|
|
1635
|
+
are in the unicode form instead of the string form.
|
|
1636
|
+
"""
|
|
1637
|
+
|
|
1638
|
+
# creates the dictionary that will hold the processed/decoded
|
|
1639
|
+
# sequences of parameters created from the provided (and original)
|
|
1640
|
+
# map of encoded parameters (raw values)
|
|
1641
|
+
_params = dict()
|
|
1642
|
+
|
|
1643
|
+
for key, value in params.items():
|
|
1644
|
+
items = []
|
|
1645
|
+
for item in value:
|
|
1646
|
+
is_bytes = legacy.is_bytes(item)
|
|
1647
|
+
if is_bytes:
|
|
1648
|
+
item = item.decode("utf-8")
|
|
1649
|
+
items.append(item)
|
|
1650
|
+
is_bytes = legacy.is_bytes(key)
|
|
1651
|
+
if is_bytes:
|
|
1652
|
+
key = key.decode("utf-8")
|
|
1653
|
+
_params[key] = items
|
|
1654
|
+
|
|
1655
|
+
return _params
|
|
1656
|
+
|
|
1657
|
+
|
|
1658
|
+
def load_form(form):
|
|
1659
|
+
# creates the map that is going to hold the "structured"
|
|
1660
|
+
# version of the form with key value associations
|
|
1661
|
+
form_s = dict()
|
|
1662
|
+
|
|
1663
|
+
# iterates over all the form items to parse their values
|
|
1664
|
+
# and populate the form structured version of it, note that
|
|
1665
|
+
# for the sake of parsing the order of the elements in the
|
|
1666
|
+
# form is relevant, in case there's multiple values for the
|
|
1667
|
+
# same name they are considered as a list, otherwise they are
|
|
1668
|
+
# considered as a single value
|
|
1669
|
+
for name in form:
|
|
1670
|
+
# retrieves the value (as a list) for the current name, then
|
|
1671
|
+
# in case the sequence is larger than one element sets it,
|
|
1672
|
+
# otherwise retrieves and sets the value as the first element
|
|
1673
|
+
value = form[name]
|
|
1674
|
+
value = (
|
|
1675
|
+
value[0] if isinstance(value, (list, tuple)) and len(value) == 1 else value
|
|
1676
|
+
)
|
|
1677
|
+
|
|
1678
|
+
# splits the complete name into its various components
|
|
1679
|
+
# and retrieves both the final (last) element and the
|
|
1680
|
+
# various partial elements from it
|
|
1681
|
+
names = name.split(".")
|
|
1682
|
+
final = names[-1]
|
|
1683
|
+
partials = names[:-1]
|
|
1684
|
+
|
|
1685
|
+
# sets the initial "struct" reference as the form structured
|
|
1686
|
+
# that has just been created (initial structure for iteration)
|
|
1687
|
+
# then starts the iteration to retrieve or create the various
|
|
1688
|
+
# intermediate structures
|
|
1689
|
+
struct = form_s
|
|
1690
|
+
for _name in partials:
|
|
1691
|
+
_struct = struct.get(_name, {})
|
|
1692
|
+
struct[_name] = _struct
|
|
1693
|
+
struct = _struct
|
|
1694
|
+
|
|
1695
|
+
# sets the current value in the currently loaded "struct" element
|
|
1696
|
+
# so that the reference gets properly updated
|
|
1697
|
+
struct[final] = value
|
|
1698
|
+
|
|
1699
|
+
# retrieves the final "normalized" form structure containing
|
|
1700
|
+
# a series of chained maps resulting from the parsing of the
|
|
1701
|
+
# linear version of the attribute names
|
|
1702
|
+
return form_s
|
|
1703
|
+
|
|
1704
|
+
|
|
1705
|
+
def check_login(self, token=None, request=None):
|
|
1706
|
+
# tries to retrieve the request from the current context
|
|
1707
|
+
# in case it has not been passed through other manner
|
|
1708
|
+
request = request or (self.request if self else None)
|
|
1709
|
+
|
|
1710
|
+
# retrieves the data type of the token and creates the
|
|
1711
|
+
# tokens sequence value taking into account its type
|
|
1712
|
+
if isinstance(token, SEQUENCE_TYPES):
|
|
1713
|
+
tokens = token
|
|
1714
|
+
else:
|
|
1715
|
+
tokens = (token,)
|
|
1716
|
+
|
|
1717
|
+
# in case the username value is set in session and there's
|
|
1718
|
+
# no token to be validated returns valid and in case the checking
|
|
1719
|
+
# of the complete set of tokens is valid also returns valid
|
|
1720
|
+
if check_user(self, request=request) and not token:
|
|
1721
|
+
return True
|
|
1722
|
+
if check_tokens(self, tokens, request=request):
|
|
1723
|
+
return True
|
|
1724
|
+
|
|
1725
|
+
# returns the default value as invalid because if all the
|
|
1726
|
+
# validation procedures have failed the check is invalid
|
|
1727
|
+
return False
|
|
1728
|
+
|
|
1729
|
+
|
|
1730
|
+
def check_user(self, request=None):
|
|
1731
|
+
# tries to retrieve the reference to the current request
|
|
1732
|
+
# either from the provided arguments or from the current context
|
|
1733
|
+
request = request or (self.request if self else None)
|
|
1734
|
+
|
|
1735
|
+
# runs the multiple verification strategies available an
|
|
1736
|
+
# in case at least one of them succeeds the user is considered
|
|
1737
|
+
# to be currently authenticated
|
|
1738
|
+
if request and "username" in request.session:
|
|
1739
|
+
return True
|
|
1740
|
+
if request and hasattr(request, "tokens_p"):
|
|
1741
|
+
return True
|
|
1742
|
+
|
|
1743
|
+
# by default the user is considered to be not authenticated, all
|
|
1744
|
+
# of the tests for authentication have failed
|
|
1745
|
+
return False
|
|
1746
|
+
|
|
1747
|
+
|
|
1748
|
+
def check_token(self, token, tokens_m=None, request=None):
|
|
1749
|
+
# in case the provided token is invalid or empty the method
|
|
1750
|
+
# return immediately in success (simple validation)
|
|
1751
|
+
if not token:
|
|
1752
|
+
return True
|
|
1753
|
+
|
|
1754
|
+
# tries to retrieve the tokens map from the provided argument
|
|
1755
|
+
# defaulting to the session one in case none is provided
|
|
1756
|
+
if tokens_m == None:
|
|
1757
|
+
tokens_m = get_tokens_m(self, request=request)
|
|
1758
|
+
|
|
1759
|
+
# splits the provided token string into its parts, note that
|
|
1760
|
+
# a namespace is defined around the dot character
|
|
1761
|
+
token_l = token.split(".")
|
|
1762
|
+
|
|
1763
|
+
# iterates over the complete set of parts in the token list
|
|
1764
|
+
# of parts to validate the complete chain of values against
|
|
1765
|
+
# the map of token parts (namespace validation)
|
|
1766
|
+
for token_p in token_l:
|
|
1767
|
+
if not isinstance(tokens_m, dict):
|
|
1768
|
+
return False
|
|
1769
|
+
if "*" in tokens_m and tokens_m["*"] == True:
|
|
1770
|
+
return True
|
|
1771
|
+
if not token_p in tokens_m:
|
|
1772
|
+
return False
|
|
1773
|
+
tokens_m = tokens_m[token_p]
|
|
1774
|
+
|
|
1775
|
+
# determines if the final tokens map value is a dictionary
|
|
1776
|
+
# and "selects" the proper validation result accordingly
|
|
1777
|
+
is_dict = isinstance(tokens_m, dict)
|
|
1778
|
+
result = tokens_m.get("_", False) if is_dict else tokens_m
|
|
1779
|
+
|
|
1780
|
+
# verifies if the "final" result value is valid and returns
|
|
1781
|
+
# the final validation result accordingly
|
|
1782
|
+
return True if result == True else False
|
|
1783
|
+
|
|
1784
|
+
|
|
1785
|
+
def check_tokens(self, tokens, tokens_m=None, request=None):
|
|
1786
|
+
# iterates over the complete set of tokens that are going
|
|
1787
|
+
# to be validated against the current context and if any of
|
|
1788
|
+
# them fails an invalid result is returned otherwise a valid
|
|
1789
|
+
# result is returned (indicating that all is valid)
|
|
1790
|
+
for token in tokens:
|
|
1791
|
+
if not check_token(self, token, tokens_m=tokens_m, request=request):
|
|
1792
|
+
return False
|
|
1793
|
+
return True
|
|
1794
|
+
|
|
1795
|
+
|
|
1796
|
+
def ensure_login(self, token=None, context=None, request=None):
|
|
1797
|
+
request = request or (self.request if self else None)
|
|
1798
|
+
is_auth = check_user(self, request=request)
|
|
1799
|
+
if not is_auth:
|
|
1800
|
+
raise exceptions.AppierException(
|
|
1801
|
+
message="User not authenticated", code=403, token=token, context=context
|
|
1802
|
+
)
|
|
1803
|
+
if check_token(self, token, request=request):
|
|
1804
|
+
return
|
|
1805
|
+
raise exceptions.AppierException(
|
|
1806
|
+
message="Not enough permissions", code=403, token=token, context=context
|
|
1807
|
+
)
|
|
1808
|
+
|
|
1809
|
+
|
|
1810
|
+
def get_tokens_m(self, request=None, set=None):
|
|
1811
|
+
"""
|
|
1812
|
+
Retrieves the map of tokens from the current session so that
|
|
1813
|
+
they can be used for proper ACL validation.
|
|
1814
|
+
|
|
1815
|
+
In case the current session contains a sequence based representation
|
|
1816
|
+
of the tokens they are converted to their equivalent map value.
|
|
1817
|
+
|
|
1818
|
+
:type request: Request
|
|
1819
|
+
:param request: The request that is going to be used to access
|
|
1820
|
+
the session information, if any.
|
|
1821
|
+
:type set: bool
|
|
1822
|
+
:param set: If the possibly converted tokens list should be persisted
|
|
1823
|
+
into the current session, sparing some CPU cycles on next execution,
|
|
1824
|
+
in case no value is provided a default value is applied taking into
|
|
1825
|
+
account the current execution context.
|
|
1826
|
+
:rtype: Dictionary
|
|
1827
|
+
:return: The map of tokens to be used for ACL validation.
|
|
1828
|
+
"""
|
|
1829
|
+
|
|
1830
|
+
# tries to retrieve the request from the current context
|
|
1831
|
+
# in case it has not been passed through other manner, if
|
|
1832
|
+
# no valid context is found returns invalid value immediately
|
|
1833
|
+
request = request or (self.request if self else None)
|
|
1834
|
+
if not request:
|
|
1835
|
+
return dict()
|
|
1836
|
+
|
|
1837
|
+
# verifies if the set flag is set and if that's not the case
|
|
1838
|
+
# ensures proper default value taking into account if there's
|
|
1839
|
+
# a token "provider method" defined or not
|
|
1840
|
+
if set == None:
|
|
1841
|
+
set = False if hasattr(request, "tokens_p") else True
|
|
1842
|
+
|
|
1843
|
+
# tries to retrieve the "provider method "for the tokens under the
|
|
1844
|
+
# current request an in case it's not available used the default
|
|
1845
|
+
# one (simple session access)
|
|
1846
|
+
try:
|
|
1847
|
+
if hasattr(request, "tokens_p"):
|
|
1848
|
+
tokens_m = request.tokens_p()
|
|
1849
|
+
else:
|
|
1850
|
+
tokens_m = request.session.get("tokens", {})
|
|
1851
|
+
except Exception:
|
|
1852
|
+
return dict()
|
|
1853
|
+
|
|
1854
|
+
# verifies if the resulting value is either a map or a sequence,
|
|
1855
|
+
# going to be used for decisions on normalization
|
|
1856
|
+
is_map = isinstance(tokens_m, dict)
|
|
1857
|
+
is_sequence = isinstance(tokens_m, (list, tuple))
|
|
1858
|
+
|
|
1859
|
+
# if the tokens value is already a map then an immediate return
|
|
1860
|
+
# is going to be performed (it is a valid tokens map)
|
|
1861
|
+
if is_map:
|
|
1862
|
+
return tokens_m
|
|
1863
|
+
|
|
1864
|
+
# in case the value present in the tokens value is a sequence
|
|
1865
|
+
# it must be properly converted into the equivalent map value
|
|
1866
|
+
if is_sequence:
|
|
1867
|
+
# converts the tokens sequence into a map version of it
|
|
1868
|
+
# so that proper structured verification is possible
|
|
1869
|
+
tokens_m = to_tokens_m(tokens_m)
|
|
1870
|
+
|
|
1871
|
+
# in case the set flag is set the tokens map should
|
|
1872
|
+
# be set in the request session (may be dangerous)
|
|
1873
|
+
# and then returns the tokens map to the caller method
|
|
1874
|
+
if set:
|
|
1875
|
+
request.session["tokens"] = tokens_m
|
|
1876
|
+
return tokens_m
|
|
1877
|
+
|
|
1878
|
+
# returns the "default" empty tokens map as it was not possible
|
|
1879
|
+
# to retrieve any information regarding tokens from the
|
|
1880
|
+
# current context and environment
|
|
1881
|
+
return dict()
|
|
1882
|
+
|
|
1883
|
+
|
|
1884
|
+
def to_tokens_m(tokens):
|
|
1885
|
+
# creates a new map to be used to store tokens map that is
|
|
1886
|
+
# going to be created from the list/sequence version
|
|
1887
|
+
tokens_m = dict()
|
|
1888
|
+
|
|
1889
|
+
# iterates over the complete set of tokens in the
|
|
1890
|
+
# sequence to properly add their namespace parts
|
|
1891
|
+
# to the tokens map (as specified)
|
|
1892
|
+
for token in tokens:
|
|
1893
|
+
tokens_c = tokens_m
|
|
1894
|
+
token_l = token.split(".")
|
|
1895
|
+
head, tail = token_l[:-1], token_l[-1]
|
|
1896
|
+
|
|
1897
|
+
for token_p in head:
|
|
1898
|
+
current = tokens_c.get(token_p, {})
|
|
1899
|
+
is_dict = isinstance(current, dict)
|
|
1900
|
+
if not is_dict:
|
|
1901
|
+
current = {"_": current}
|
|
1902
|
+
tokens_c[token_p] = current
|
|
1903
|
+
tokens_c = current
|
|
1904
|
+
|
|
1905
|
+
leaf = tokens_c.get(tail, None)
|
|
1906
|
+
if leaf and isinstance(leaf, dict):
|
|
1907
|
+
leaf["_"] = True
|
|
1908
|
+
else:
|
|
1909
|
+
tokens_c[tail] = True
|
|
1910
|
+
|
|
1911
|
+
# returns the final map version of the token to the caller
|
|
1912
|
+
# method so that it may be used for structure verification
|
|
1913
|
+
return tokens_m
|
|
1914
|
+
|
|
1915
|
+
|
|
1916
|
+
def dict_merge(first, second, override=True, recursive=False, callback=None):
|
|
1917
|
+
"""
|
|
1918
|
+
Merges two dictionaries, optionally using a deep (recursive)
|
|
1919
|
+
strategy to achieve the merge.
|
|
1920
|
+
|
|
1921
|
+
The default "way" of the merge is from the second to the first
|
|
1922
|
+
and overriding the values of the first dictionary.
|
|
1923
|
+
|
|
1924
|
+
:type first: Dictionary
|
|
1925
|
+
:param first: The target dictionary of the merge operation and
|
|
1926
|
+
that will have its contents overriden if requested.
|
|
1927
|
+
:type second: Dictionary
|
|
1928
|
+
:param second: The base dictionary of the merge that will be
|
|
1929
|
+
"copied" into the first one.
|
|
1930
|
+
:type override: bool
|
|
1931
|
+
:param override: If the contents of the first dictionary should
|
|
1932
|
+
be overriden (overwritten) in case of "collision".
|
|
1933
|
+
:type recursive: bool
|
|
1934
|
+
:param recursive: If the merge operation should be performed using
|
|
1935
|
+
a deep and recursive approach for dictionary types.
|
|
1936
|
+
:type callback: Function
|
|
1937
|
+
:param callback: Optional function to to be called in case there's
|
|
1938
|
+
a conflicting value for the same key with both the first and second
|
|
1939
|
+
values to be merged, allowing control over merge operations, this
|
|
1940
|
+
is only used in case of a recursive approach.
|
|
1941
|
+
:rtype: Dictionary
|
|
1942
|
+
:return: The resulting dictionary (new instance) from the merge
|
|
1943
|
+
operation of the second dictionary into the first.
|
|
1944
|
+
"""
|
|
1945
|
+
|
|
1946
|
+
# in case no override exists then the order of the items is
|
|
1947
|
+
# exchanged so that the first overrides the second values
|
|
1948
|
+
# and not the exact opposite
|
|
1949
|
+
if not override:
|
|
1950
|
+
first, second = second, first
|
|
1951
|
+
|
|
1952
|
+
# in case the recursive flag is set, must iterate over all
|
|
1953
|
+
# of the first items to try to merge any possible dictionary
|
|
1954
|
+
# value using a recursive strategy
|
|
1955
|
+
if recursive:
|
|
1956
|
+
# creates the dictionary that is going to store the final
|
|
1957
|
+
# merged value resulting from both dictionaries
|
|
1958
|
+
final = dict()
|
|
1959
|
+
|
|
1960
|
+
# runs the main iteration cycles around the first dictionary
|
|
1961
|
+
# trying to find possible conflicts that would required a
|
|
1962
|
+
# smarter merge strategy
|
|
1963
|
+
for key, value in legacy.iteritems(first):
|
|
1964
|
+
# in case the current key is not present in the second
|
|
1965
|
+
# dictionary (there's no conflict) and so a simple set
|
|
1966
|
+
# strategy should be applied
|
|
1967
|
+
if not key in second:
|
|
1968
|
+
final[key] = value
|
|
1969
|
+
continue
|
|
1970
|
+
|
|
1971
|
+
# grabs the other (second) value that is going to be used
|
|
1972
|
+
# as the basis for the merge operation
|
|
1973
|
+
other = second[key]
|
|
1974
|
+
|
|
1975
|
+
# in case a callback is defined calls it to determine the
|
|
1976
|
+
# final merged value from both the original and the other
|
|
1977
|
+
if callback:
|
|
1978
|
+
final[key] = callback(value, other)
|
|
1979
|
+
|
|
1980
|
+
# if it represents a dictionary (smart merge) then both
|
|
1981
|
+
# values are going to be merged recursively
|
|
1982
|
+
elif isinstance(value, dict) and isinstance(other, dict):
|
|
1983
|
+
if not override:
|
|
1984
|
+
value, other = other, value
|
|
1985
|
+
final[key] = dict_merge(
|
|
1986
|
+
value, other, override=override, recursive=recursive
|
|
1987
|
+
)
|
|
1988
|
+
|
|
1989
|
+
# otherwise the previous value is simply replaced with the
|
|
1990
|
+
# the other value, (fallback operation) this is considered
|
|
1991
|
+
# to be a non smart merge operation
|
|
1992
|
+
else:
|
|
1993
|
+
final[key] = other
|
|
1994
|
+
|
|
1995
|
+
# runs the final iteration cycles around the second dictionary
|
|
1996
|
+
# values to try to set the unique second values in the final
|
|
1997
|
+
for key, value in legacy.iteritems(second):
|
|
1998
|
+
if key in final:
|
|
1999
|
+
continue
|
|
2000
|
+
final[key] = value
|
|
2001
|
+
|
|
2002
|
+
# returns the final merged result to the caller method, this
|
|
2003
|
+
# result should contain all of its dictionary values properly
|
|
2004
|
+
# merged within both the first and second values
|
|
2005
|
+
return final
|
|
2006
|
+
|
|
2007
|
+
# otherwise (uses a simple strategy) and creates a new dictionary
|
|
2008
|
+
# for the first value, then updates it with the second set of
|
|
2009
|
+
# dictionary values, returning then the newly created dictionary
|
|
2010
|
+
# to the caller method (basic update strategy)
|
|
2011
|
+
else:
|
|
2012
|
+
final = dict(first)
|
|
2013
|
+
final.update(second)
|
|
2014
|
+
return final
|
|
2015
|
+
|
|
2016
|
+
|
|
2017
|
+
def deprecated(message="Function %s is now deprecated"):
|
|
2018
|
+
"""
|
|
2019
|
+
Decorator that marks a certain function or method as
|
|
2020
|
+
deprecated so that whenever such function is called
|
|
2021
|
+
an output messaged warns the developer about the
|
|
2022
|
+
deprecation (incentive).
|
|
2023
|
+
|
|
2024
|
+
:type message: String
|
|
2025
|
+
:param message: The message template to be used in the
|
|
2026
|
+
output operation of the error.
|
|
2027
|
+
:rtype: Decorator
|
|
2028
|
+
:return: The decorator that should be used to wrap a
|
|
2029
|
+
function and mark it as deprecated (send warning).
|
|
2030
|
+
"""
|
|
2031
|
+
|
|
2032
|
+
def decorator(function):
|
|
2033
|
+
name = function.__name__ if hasattr(function, "__name__") else None
|
|
2034
|
+
|
|
2035
|
+
@functools.wraps(function)
|
|
2036
|
+
def interceptor(*args, **kwargs):
|
|
2037
|
+
warnings.simplefilter("always", DeprecationWarning)
|
|
2038
|
+
warnings.warn(message % name, category=DeprecationWarning, stacklevel=2)
|
|
2039
|
+
warnings.simplefilter("default", DeprecationWarning)
|
|
2040
|
+
return function(*args, **kwargs)
|
|
2041
|
+
|
|
2042
|
+
return interceptor
|
|
2043
|
+
|
|
2044
|
+
return decorator
|
|
2045
|
+
|
|
2046
|
+
|
|
2047
|
+
def cached(function):
|
|
2048
|
+
"""
|
|
2049
|
+
Decorator that marks a certain function as cached meaning that
|
|
2050
|
+
the local context of the instance associated with the function
|
|
2051
|
+
(method) is going to be used to store the result and further
|
|
2052
|
+
requests to the function will use the cached result, resulting
|
|
2053
|
+
in an improved resolution time.
|
|
2054
|
+
|
|
2055
|
+
The life-cycle of the context is critical to avoid issues with
|
|
2056
|
+
invalid cache invalidation.
|
|
2057
|
+
|
|
2058
|
+
:rtype: Decorator
|
|
2059
|
+
:return: The decorator that should be used to wrap a function
|
|
2060
|
+
marking it as ready to cache it's return value on current context.
|
|
2061
|
+
"""
|
|
2062
|
+
|
|
2063
|
+
name = function.__name__
|
|
2064
|
+
|
|
2065
|
+
@functools.wraps(function)
|
|
2066
|
+
def _cached(self, *args, **kwargs):
|
|
2067
|
+
# tries to retrieve the current execution context, most
|
|
2068
|
+
# of the times this should be a request object for the
|
|
2069
|
+
# current temporary execution life-cycle
|
|
2070
|
+
context = get_context(self)
|
|
2071
|
+
|
|
2072
|
+
# retrieves the properties map (if possible) and then
|
|
2073
|
+
# verifies the existence or not of the name in such map
|
|
2074
|
+
# returning the value immediately if it's cached
|
|
2075
|
+
properties = context.properties if context else None
|
|
2076
|
+
exists = name in properties if properties else False
|
|
2077
|
+
if exists:
|
|
2078
|
+
return properties[name]
|
|
2079
|
+
|
|
2080
|
+
# as no cache retrieval was possible executes the function
|
|
2081
|
+
# operation and caches the resulting value into the properties
|
|
2082
|
+
# map (in case it exists)
|
|
2083
|
+
value = function(self, *args, **kwargs)
|
|
2084
|
+
if not properties == None:
|
|
2085
|
+
properties[name] = value
|
|
2086
|
+
return value
|
|
2087
|
+
|
|
2088
|
+
return _cached
|
|
2089
|
+
|
|
2090
|
+
|
|
2091
|
+
def private(function):
|
|
2092
|
+
@functools.wraps(function)
|
|
2093
|
+
def _private(self, *args, **kwargs):
|
|
2094
|
+
ensure = kwargs.get("ensure", True)
|
|
2095
|
+
request = kwargs.get("request", self.request)
|
|
2096
|
+
if ensure:
|
|
2097
|
+
ensure_login(self, request=request)
|
|
2098
|
+
sanitize(function, kwargs)
|
|
2099
|
+
return function(self, *args, **kwargs)
|
|
2100
|
+
|
|
2101
|
+
return _private
|
|
2102
|
+
|
|
2103
|
+
|
|
2104
|
+
def ensure(token=None, context=None):
|
|
2105
|
+
def decorator(function):
|
|
2106
|
+
@functools.wraps(function)
|
|
2107
|
+
def interceptor(self, *args, **kwargs):
|
|
2108
|
+
ensure = kwargs.get("ensure", True)
|
|
2109
|
+
request = kwargs.get("request", self.request)
|
|
2110
|
+
if ensure:
|
|
2111
|
+
ensure_login(self, token=token, context=context, request=request)
|
|
2112
|
+
sanitize(function, kwargs)
|
|
2113
|
+
return function(self, *args, **kwargs)
|
|
2114
|
+
|
|
2115
|
+
return interceptor
|
|
2116
|
+
|
|
2117
|
+
return decorator
|
|
2118
|
+
|
|
2119
|
+
|
|
2120
|
+
def delayed(function):
|
|
2121
|
+
@functools.wraps(function)
|
|
2122
|
+
def _delayed(self, *args, **kwargs):
|
|
2123
|
+
_args = [self] + list(args)
|
|
2124
|
+
return self.owner.delay(function, _args, kwargs)
|
|
2125
|
+
|
|
2126
|
+
return _delayed
|
|
2127
|
+
|
|
2128
|
+
|
|
2129
|
+
def route(url, method="GET", asynchronous=False, json=False, opts=None, priority=1):
|
|
2130
|
+
def decorator(function, *args, **kwargs):
|
|
2131
|
+
if is_detached(function):
|
|
2132
|
+
delay(function, *args, **kwargs)
|
|
2133
|
+
else:
|
|
2134
|
+
common.base().App.add_route(
|
|
2135
|
+
method,
|
|
2136
|
+
url,
|
|
2137
|
+
function,
|
|
2138
|
+
asynchronous=asynchronous,
|
|
2139
|
+
json=json,
|
|
2140
|
+
opts=opts,
|
|
2141
|
+
priority=priority,
|
|
2142
|
+
)
|
|
2143
|
+
return function
|
|
2144
|
+
|
|
2145
|
+
def delay(function, *args, **kwargs):
|
|
2146
|
+
global CREATION_COUNTER
|
|
2147
|
+
route = (url, method, asynchronous, json, opts, priority)
|
|
2148
|
+
if not hasattr(function, "_routes"):
|
|
2149
|
+
function._routes = []
|
|
2150
|
+
function._routes.append(route)
|
|
2151
|
+
function.creation_counter = CREATION_COUNTER
|
|
2152
|
+
CREATION_COUNTER += 1
|
|
2153
|
+
|
|
2154
|
+
return decorator
|
|
2155
|
+
|
|
2156
|
+
|
|
2157
|
+
def error_handler(code, scope=None, json=None, opts=None, priority=1):
|
|
2158
|
+
def decorator(function, *args, **kwargs):
|
|
2159
|
+
if is_detached(function):
|
|
2160
|
+
delay(function, *args, **kwargs)
|
|
2161
|
+
else:
|
|
2162
|
+
common.base().App.add_error(
|
|
2163
|
+
code, function, scope=scope, json=json, opts=opts, priority=priority
|
|
2164
|
+
)
|
|
2165
|
+
return function
|
|
2166
|
+
|
|
2167
|
+
def delay(function, *args, **kwargs):
|
|
2168
|
+
global CREATION_COUNTER
|
|
2169
|
+
error = (code, scope, json, opts, priority)
|
|
2170
|
+
if not hasattr(function, "_errors"):
|
|
2171
|
+
function._errors = []
|
|
2172
|
+
function._errors.append(error)
|
|
2173
|
+
function.creation_counter = CREATION_COUNTER
|
|
2174
|
+
CREATION_COUNTER += 1
|
|
2175
|
+
return function
|
|
2176
|
+
|
|
2177
|
+
return decorator
|
|
2178
|
+
|
|
2179
|
+
|
|
2180
|
+
def exception_handler(exception, scope=None, json=None, opts=None, priority=1):
|
|
2181
|
+
def decorator(function, *args, **kwargs):
|
|
2182
|
+
if is_detached(function):
|
|
2183
|
+
delay(function, *args, **kwargs)
|
|
2184
|
+
else:
|
|
2185
|
+
common.base().App.add_exception(
|
|
2186
|
+
exception,
|
|
2187
|
+
function,
|
|
2188
|
+
scope=scope,
|
|
2189
|
+
json=json,
|
|
2190
|
+
opts=opts,
|
|
2191
|
+
priority=priority,
|
|
2192
|
+
)
|
|
2193
|
+
return function
|
|
2194
|
+
|
|
2195
|
+
def delay(function, *args, **kwargs):
|
|
2196
|
+
global CREATION_COUNTER
|
|
2197
|
+
_exception = (exception, scope, json, opts, priority)
|
|
2198
|
+
if not hasattr(function, "_exceptions"):
|
|
2199
|
+
function._exceptions = []
|
|
2200
|
+
function._exceptions.append(_exception)
|
|
2201
|
+
function.creation_counter = CREATION_COUNTER
|
|
2202
|
+
CREATION_COUNTER += 1
|
|
2203
|
+
return function
|
|
2204
|
+
|
|
2205
|
+
return decorator
|
|
2206
|
+
|
|
2207
|
+
|
|
2208
|
+
def before_request(scope="all", opts=None, priority=1):
|
|
2209
|
+
def decorator(function, *args, **kwargs):
|
|
2210
|
+
if is_detached(function):
|
|
2211
|
+
delay(function, *args, **kwargs)
|
|
2212
|
+
else:
|
|
2213
|
+
common.base().App.add_custom(
|
|
2214
|
+
"before_request", function, opts=opts, priority=priority
|
|
2215
|
+
)
|
|
2216
|
+
return function
|
|
2217
|
+
|
|
2218
|
+
def delay(function, *args, **kwargs):
|
|
2219
|
+
global CREATION_COUNTER
|
|
2220
|
+
_custom = ("before_request", opts, priority)
|
|
2221
|
+
if not hasattr(function, "_customs"):
|
|
2222
|
+
function._customs = []
|
|
2223
|
+
function._customs.append(_custom)
|
|
2224
|
+
function.creation_counter = CREATION_COUNTER
|
|
2225
|
+
CREATION_COUNTER += 1
|
|
2226
|
+
return function
|
|
2227
|
+
|
|
2228
|
+
return decorator
|
|
2229
|
+
|
|
2230
|
+
|
|
2231
|
+
def after_request(scope="all", opts=None, priority=1):
|
|
2232
|
+
def decorator(function, *args, **kwargs):
|
|
2233
|
+
if is_detached(function):
|
|
2234
|
+
delay(function, *args, **kwargs)
|
|
2235
|
+
else:
|
|
2236
|
+
common.base().App.add_custom(
|
|
2237
|
+
"after_request", function, opts=opts, priority=priority
|
|
2238
|
+
)
|
|
2239
|
+
return function
|
|
2240
|
+
|
|
2241
|
+
def delay(function, *args, **kwargs):
|
|
2242
|
+
global CREATION_COUNTER
|
|
2243
|
+
_custom = ("after_request", opts, priority)
|
|
2244
|
+
if not hasattr(function, "_customs"):
|
|
2245
|
+
function._customs = []
|
|
2246
|
+
function._customs.append(_custom)
|
|
2247
|
+
function.creation_counter = CREATION_COUNTER
|
|
2248
|
+
CREATION_COUNTER += 1
|
|
2249
|
+
return function
|
|
2250
|
+
|
|
2251
|
+
return decorator
|
|
2252
|
+
|
|
2253
|
+
|
|
2254
|
+
def is_detached(function):
|
|
2255
|
+
"""
|
|
2256
|
+
Verifies if the provided function value is considered to be
|
|
2257
|
+
a detached method from a class, this is valid for situations
|
|
2258
|
+
where the type of the value is a function and there's a reference
|
|
2259
|
+
to the parent class of definition.
|
|
2260
|
+
|
|
2261
|
+
This method is not completely safe as it relies on the fact that
|
|
2262
|
+
by convention the first argument of a "future" method is the "self"
|
|
2263
|
+
one, meaning that a "normal function" would be detected as a
|
|
2264
|
+
method if the first argument of it is named self.
|
|
2265
|
+
|
|
2266
|
+
:type function: Function
|
|
2267
|
+
:param function: The function value that is going to be evaluated
|
|
2268
|
+
for the presence of a detached method.
|
|
2269
|
+
:rtype: bool
|
|
2270
|
+
:return: If the provided function value refers a detached method
|
|
2271
|
+
of a certain class.
|
|
2272
|
+
"""
|
|
2273
|
+
|
|
2274
|
+
# verifies if the provided value is a valid function type
|
|
2275
|
+
# an in case it's not it's considered to not be a detached
|
|
2276
|
+
is_function = isinstance(function, types.FunctionType)
|
|
2277
|
+
if not is_function:
|
|
2278
|
+
return False
|
|
2279
|
+
|
|
2280
|
+
# retrieves the function's specification (should include arguments)
|
|
2281
|
+
# and then verifies that they are valid and that at least one valid
|
|
2282
|
+
# argument exists for the specification (as required by methods)
|
|
2283
|
+
spec = legacy.getargspec(function)
|
|
2284
|
+
if not spec:
|
|
2285
|
+
return False
|
|
2286
|
+
if not spec.args:
|
|
2287
|
+
return False
|
|
2288
|
+
|
|
2289
|
+
# verifies that the name of the first argument of the function is the
|
|
2290
|
+
# the instance one, if that's the case this should be a detached method
|
|
2291
|
+
# that is currently being identified as a function
|
|
2292
|
+
return spec.args[0] == "self"
|
|
2293
|
+
|
|
2294
|
+
|
|
2295
|
+
def sanitize(function, kwargs):
|
|
2296
|
+
removal = []
|
|
2297
|
+
method_a = legacy.getargspec(function)[0]
|
|
2298
|
+
for name in kwargs:
|
|
2299
|
+
if name in method_a:
|
|
2300
|
+
continue
|
|
2301
|
+
removal.append(name)
|
|
2302
|
+
for name in removal:
|
|
2303
|
+
del kwargs[name]
|
|
2304
|
+
|
|
2305
|
+
|
|
2306
|
+
def verify(condition, message=None, code=None, exception=None, **kwargs):
|
|
2307
|
+
if condition:
|
|
2308
|
+
return
|
|
2309
|
+
exception = exception or exceptions.AssertionError
|
|
2310
|
+
kwargs = dict(kwargs)
|
|
2311
|
+
if not message == None:
|
|
2312
|
+
kwargs["message"] = message
|
|
2313
|
+
if not code == None:
|
|
2314
|
+
kwargs["code"] = code
|
|
2315
|
+
raise exception(**kwargs)
|
|
2316
|
+
|
|
2317
|
+
|
|
2318
|
+
def verify_equal(first, second, message=None, code=None, exception=None, **kwargs):
|
|
2319
|
+
message = message or "Expected %s got %s" % (repr(second), repr(first))
|
|
2320
|
+
return verify(
|
|
2321
|
+
first == second, message=message, code=code, exception=exception, **kwargs
|
|
2322
|
+
)
|
|
2323
|
+
|
|
2324
|
+
|
|
2325
|
+
def verify_not_equal(first, second, message=None, code=None, exception=None, **kwargs):
|
|
2326
|
+
message = message or "Expected %s not equal to %s" % (repr(first), repr(second))
|
|
2327
|
+
return verify(
|
|
2328
|
+
not first == second, message=message, code=code, exception=exception, **kwargs
|
|
2329
|
+
)
|
|
2330
|
+
|
|
2331
|
+
|
|
2332
|
+
def verify_type(
|
|
2333
|
+
value, types, null=True, message=None, code=None, exception=None, **kwargs
|
|
2334
|
+
):
|
|
2335
|
+
message = message or "Expected %s to have type %s" % (repr(value), repr(types))
|
|
2336
|
+
return verify(
|
|
2337
|
+
(null and value == None) or isinstance(value, types),
|
|
2338
|
+
message=message,
|
|
2339
|
+
code=code,
|
|
2340
|
+
exception=exception,
|
|
2341
|
+
**kwargs
|
|
2342
|
+
)
|
|
2343
|
+
|
|
2344
|
+
|
|
2345
|
+
def verify_many(sequence, message=None, code=None, exception=None, **kwargs):
|
|
2346
|
+
for condition in sequence:
|
|
2347
|
+
verify(condition, message=message, code=code, exception=exception, **kwargs)
|
|
2348
|
+
|
|
2349
|
+
|
|
2350
|
+
def execute(args, command=None, path=None, shell=None, encoding=None):
|
|
2351
|
+
if shell == None:
|
|
2352
|
+
shell = os.name == "nt"
|
|
2353
|
+
if not encoding:
|
|
2354
|
+
encoding = sys.getfilesystemencoding()
|
|
2355
|
+
if command:
|
|
2356
|
+
args = command.split(" ")
|
|
2357
|
+
process = subprocess.Popen(
|
|
2358
|
+
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=shell, cwd=path
|
|
2359
|
+
)
|
|
2360
|
+
code = process.wait()
|
|
2361
|
+
stdout = process.stdout.read()
|
|
2362
|
+
stderr = process.stderr.read()
|
|
2363
|
+
stdout = stdout.decode(encoding)
|
|
2364
|
+
stderr = stderr.decode(encoding)
|
|
2365
|
+
return dict(stdout=stdout, stderr=stderr, code=code)
|
|
2366
|
+
|
|
2367
|
+
|
|
2368
|
+
@contextlib.contextmanager
|
|
2369
|
+
def ctx_locale(name="", force=False):
|
|
2370
|
+
saved = locale.setlocale(locale.LC_ALL)
|
|
2371
|
+
if saved == name and not force:
|
|
2372
|
+
yield saved
|
|
2373
|
+
return
|
|
2374
|
+
try:
|
|
2375
|
+
yield locale.setlocale(locale.LC_ALL, name)
|
|
2376
|
+
finally:
|
|
2377
|
+
locale.setlocale(locale.LC_ALL, saved)
|
|
2378
|
+
|
|
2379
|
+
|
|
2380
|
+
@contextlib.contextmanager
|
|
2381
|
+
def ctx_request(app=None):
|
|
2382
|
+
app = app or common.base().get_app()
|
|
2383
|
+
_request = app._request
|
|
2384
|
+
app._request = app._mock
|
|
2385
|
+
try:
|
|
2386
|
+
yield True
|
|
2387
|
+
finally:
|
|
2388
|
+
app._request = _request
|
|
2389
|
+
|
|
2390
|
+
|
|
2391
|
+
class FileTuple(tuple):
|
|
2392
|
+
"""
|
|
2393
|
+
Tuple class (inherits from tuple) that represents
|
|
2394
|
+
the name, content type and (data) contents of a file
|
|
2395
|
+
in the context of the appier infra-structure.
|
|
2396
|
+
|
|
2397
|
+
This class shares many of the signature with the
|
|
2398
|
+
typical python file interface, allowing most of
|
|
2399
|
+
the operation to be performed (eg: read, seek,
|
|
2400
|
+
tell, etc.).
|
|
2401
|
+
"""
|
|
2402
|
+
|
|
2403
|
+
def __init__(self, *args, **kwargs):
|
|
2404
|
+
tuple.__init__(*args, **kwargs)
|
|
2405
|
+
self._position = 0
|
|
2406
|
+
|
|
2407
|
+
@classmethod
|
|
2408
|
+
def from_data(cls, data, name=None, mime=None):
|
|
2409
|
+
file_tuple = cls((name, mime, data))
|
|
2410
|
+
return file_tuple
|
|
2411
|
+
|
|
2412
|
+
@classmethod
|
|
2413
|
+
def from_file(cls, file, name=None, mime=None):
|
|
2414
|
+
data = file.read()
|
|
2415
|
+
file_tuple = cls.from_data(data, name=name, mime=mime)
|
|
2416
|
+
return file_tuple
|
|
2417
|
+
|
|
2418
|
+
@classmethod
|
|
2419
|
+
def from_path(cls, path, name=None, mime=None, guess=True):
|
|
2420
|
+
mime = cls.guess(path) if mime == None and guess else mime
|
|
2421
|
+
file = open(path, "rb")
|
|
2422
|
+
try:
|
|
2423
|
+
file_tuple = cls.from_file(file, name=name, mime=mime)
|
|
2424
|
+
finally:
|
|
2425
|
+
file.close()
|
|
2426
|
+
return file_tuple
|
|
2427
|
+
|
|
2428
|
+
@classmethod
|
|
2429
|
+
def guess(cls, name):
|
|
2430
|
+
mime = mimetypes.guess_type(name, strict=False)[0]
|
|
2431
|
+
if mime:
|
|
2432
|
+
return mime
|
|
2433
|
+
return None
|
|
2434
|
+
|
|
2435
|
+
def read(self, count=None):
|
|
2436
|
+
data, data_l = self[2], len(self[2])
|
|
2437
|
+
if not count and self._position == 0:
|
|
2438
|
+
data, offset = data, data_l
|
|
2439
|
+
elif not count:
|
|
2440
|
+
data, offset = data[self._position :], data_l - self._position
|
|
2441
|
+
else:
|
|
2442
|
+
data, offset = data[self._position : self._position + count], count
|
|
2443
|
+
self._position += offset
|
|
2444
|
+
return data
|
|
2445
|
+
|
|
2446
|
+
def seek(self, offset, whence=os.SEEK_SET):
|
|
2447
|
+
if whence == os.SEEK_SET:
|
|
2448
|
+
self._position = offset
|
|
2449
|
+
if whence == os.SEEK_CUR:
|
|
2450
|
+
self._position += offset
|
|
2451
|
+
if whence == os.SEEK_END:
|
|
2452
|
+
self._position = len(self[2]) + offset
|
|
2453
|
+
|
|
2454
|
+
def tell(self):
|
|
2455
|
+
return self._position
|
|
2456
|
+
|
|
2457
|
+
def save(self, path, close=True):
|
|
2458
|
+
contents = self[2]
|
|
2459
|
+
if legacy.is_string(path):
|
|
2460
|
+
file = open(path, "wb")
|
|
2461
|
+
else:
|
|
2462
|
+
file = path
|
|
2463
|
+
try:
|
|
2464
|
+
file.write(contents)
|
|
2465
|
+
finally:
|
|
2466
|
+
if close:
|
|
2467
|
+
file.close()
|
|
2468
|
+
|
|
2469
|
+
def seekable(self):
|
|
2470
|
+
return True
|
|
2471
|
+
|
|
2472
|
+
@property
|
|
2473
|
+
def name(self):
|
|
2474
|
+
return self[0]
|
|
2475
|
+
|
|
2476
|
+
@property
|
|
2477
|
+
def mime(self):
|
|
2478
|
+
return self[1]
|
|
2479
|
+
|
|
2480
|
+
@property
|
|
2481
|
+
def data(self):
|
|
2482
|
+
return self[2]
|
|
2483
|
+
|
|
2484
|
+
|
|
2485
|
+
class BaseThread(threading.Thread):
|
|
2486
|
+
"""
|
|
2487
|
+
The top level thread class that is meant to encapsulate
|
|
2488
|
+
a running base object and run it in a new context.
|
|
2489
|
+
|
|
2490
|
+
This base thread may be used to run a network loop allowing
|
|
2491
|
+
a main thread to continue with execution logic.
|
|
2492
|
+
"""
|
|
2493
|
+
|
|
2494
|
+
def __init__(self, owner=None, daemon=False, *args, **kwargs):
|
|
2495
|
+
threading.Thread.__init__(self, *args, **kwargs)
|
|
2496
|
+
self.owner = owner
|
|
2497
|
+
self.daemon = daemon
|
|
2498
|
+
|
|
2499
|
+
def run(self):
|
|
2500
|
+
threading.Thread.run(self)
|
|
2501
|
+
if not self.owner:
|
|
2502
|
+
return
|
|
2503
|
+
self.owner.start()
|
|
2504
|
+
self.owner = None
|
|
2505
|
+
|
|
2506
|
+
|
|
2507
|
+
class JSONEncoder(json.JSONEncoder):
|
|
2508
|
+
def __init__(self, *args, **kwargs):
|
|
2509
|
+
self.permissive = kwargs.pop("permissive", True)
|
|
2510
|
+
json.JSONEncoder.__init__(self, *args, **kwargs)
|
|
2511
|
+
|
|
2512
|
+
def default(self, obj, **kwargs):
|
|
2513
|
+
if hasattr(obj, "json_v"):
|
|
2514
|
+
return obj.json_v()
|
|
2515
|
+
if self.permissive:
|
|
2516
|
+
return str(obj)
|
|
2517
|
+
return json.JSONEncoder.default(self, obj, **kwargs)
|