python-obfuscation-framework 1.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (147) hide show
  1. pof/__init__.py +21 -0
  2. pof/__main__.py +22 -0
  3. pof/cli.py +187 -0
  4. pof/errors.py +2 -0
  5. pof/evasion/__init__.py +57 -0
  6. pof/evasion/argv.py +44 -0
  7. pof/evasion/base.py +48 -0
  8. pof/evasion/cpu/__init__.py +0 -0
  9. pof/evasion/cpu/cpu_count.py +27 -0
  10. pof/evasion/fs/__init__.py +0 -0
  11. pof/evasion/fs/directory_exist.py +29 -0
  12. pof/evasion/fs/directory_list_exist.py +46 -0
  13. pof/evasion/fs/directory_list_missing.py +45 -0
  14. pof/evasion/fs/directory_missing.py +28 -0
  15. pof/evasion/fs/exec_method.py +51 -0
  16. pof/evasion/fs/executable_path.py +66 -0
  17. pof/evasion/fs/file_exist.py +29 -0
  18. pof/evasion/fs/file_list_exist.py +46 -0
  19. pof/evasion/fs/file_list_missing.py +45 -0
  20. pof/evasion/fs/file_missing.py +31 -0
  21. pof/evasion/fs/tmp.py +112 -0
  22. pof/evasion/hardware/__init__.py +0 -0
  23. pof/evasion/hardware/ram_count.py +50 -0
  24. pof/evasion/hooks/__init__.py +0 -0
  25. pof/evasion/hooks/debugger.py +36 -0
  26. pof/evasion/hooks/tracemalloc.py +23 -0
  27. pof/evasion/human/__init__.py +0 -0
  28. pof/evasion/human/p.py +45 -0
  29. pof/evasion/human/prompt.py +69 -0
  30. pof/evasion/integrity.py +129 -0
  31. pof/evasion/multi.py +41 -0
  32. pof/evasion/os/__init__.py +0 -0
  33. pof/evasion/os/domain.py +27 -0
  34. pof/evasion/os/hostname.py +27 -0
  35. pof/evasion/os/uid.py +28 -0
  36. pof/evasion/os/username.py +27 -0
  37. pof/evasion/processes/__init__.py +0 -0
  38. pof/evasion/processes/proc_count.py +47 -0
  39. pof/evasion/time/__init__.py +0 -0
  40. pof/evasion/time/expire.py +75 -0
  41. pof/evasion/time/uptime.py +48 -0
  42. pof/evasion/time/utc.py +26 -0
  43. pof/evasion/utils.py +198 -0
  44. pof/main.py +369 -0
  45. pof/obfuscator/__init__.py +86 -0
  46. pof/obfuscator/builtins.py +482 -0
  47. pof/obfuscator/cipher/__init__.py +0 -0
  48. pof/obfuscator/cipher/deep_encryption.py +194 -0
  49. pof/obfuscator/cipher/rc4.py +22 -0
  50. pof/obfuscator/cipher/shift.py +19 -0
  51. pof/obfuscator/cipher/xor.py +121 -0
  52. pof/obfuscator/compression/__init__.py +0 -0
  53. pof/obfuscator/compression/bz2.py +22 -0
  54. pof/obfuscator/compression/gzip.py +22 -0
  55. pof/obfuscator/compression/lzma.py +22 -0
  56. pof/obfuscator/compression/zlib.py +22 -0
  57. pof/obfuscator/constants.py +294 -0
  58. pof/obfuscator/definitions.py +341 -0
  59. pof/obfuscator/encoding/__init__.py +0 -0
  60. pof/obfuscator/encoding/a85.py +21 -0
  61. pof/obfuscator/encoding/b16.py +21 -0
  62. pof/obfuscator/encoding/b32.py +21 -0
  63. pof/obfuscator/encoding/b32hex.py +21 -0
  64. pof/obfuscator/encoding/b64.py +21 -0
  65. pof/obfuscator/encoding/b85.py +25 -0
  66. pof/obfuscator/encoding/binascii.py +22 -0
  67. pof/obfuscator/encoding/snt.py +23 -0
  68. pof/obfuscator/esoteric/__init__.py +0 -0
  69. pof/obfuscator/esoteric/call.py +49 -0
  70. pof/obfuscator/esoteric/doc.py +237 -0
  71. pof/obfuscator/esoteric/globals.py +62 -0
  72. pof/obfuscator/esoteric/imports.py +55 -0
  73. pof/obfuscator/extract_variables.py +297 -0
  74. pof/obfuscator/junk/__init__.py +0 -0
  75. pof/obfuscator/junk/add_comments.py +102 -0
  76. pof/obfuscator/junk/add_newlines.py +36 -0
  77. pof/obfuscator/names.py +474 -0
  78. pof/obfuscator/names_rope.py +375 -0
  79. pof/obfuscator/numbers.py +271 -0
  80. pof/obfuscator/other/__init__.py +0 -0
  81. pof/obfuscator/other/tokens.py +47 -0
  82. pof/obfuscator/remove/__init__.py +0 -0
  83. pof/obfuscator/remove/comments.py +36 -0
  84. pof/obfuscator/remove/exceptions.py +75 -0
  85. pof/obfuscator/remove/indents.py +28 -0
  86. pof/obfuscator/remove/loggings.py +120 -0
  87. pof/obfuscator/remove/loggings_old.py +45 -0
  88. pof/obfuscator/remove/newline.py +27 -0
  89. pof/obfuscator/remove/print.py +40 -0
  90. pof/obfuscator/restructure.py +15 -0
  91. pof/obfuscator/stegano/__init__.py +0 -0
  92. pof/obfuscator/stegano/docstrings.py +111 -0
  93. pof/obfuscator/stegano/ipv6encoding.py +21 -0
  94. pof/obfuscator/stegano/macencoding.py +21 -0
  95. pof/obfuscator/stegano/uuidencoding.py +21 -0
  96. pof/obfuscator/strings.py +359 -0
  97. pof/stager/__init__.py +17 -0
  98. pof/stager/cipher/__init__.py +0 -0
  99. pof/stager/cipher/rc4.py +36 -0
  100. pof/stager/download.py +80 -0
  101. pof/stager/image.py +374 -0
  102. pof/stager/lots/__init__.py +1 -0
  103. pof/stager/lots/cl1pnet.py +51 -0
  104. pof/stager/lots/pastebin.py +35 -0
  105. pof/stager/lots/pasters.py +30 -0
  106. pof/stager/quine.py +135 -0
  107. pof/utils/__init__.py +0 -0
  108. pof/utils/cipher/__init__.py +7 -0
  109. pof/utils/cipher/rc4.py +407 -0
  110. pof/utils/cipher/shift.py +41 -0
  111. pof/utils/compression/__init__.py +11 -0
  112. pof/utils/compression/bz2.py +38 -0
  113. pof/utils/compression/gzip.py +38 -0
  114. pof/utils/compression/lzma.py +38 -0
  115. pof/utils/compression/zlib.py +38 -0
  116. pof/utils/encoding/__init__.py +19 -0
  117. pof/utils/encoding/a85.py +35 -0
  118. pof/utils/encoding/b16.py +30 -0
  119. pof/utils/encoding/b3.py +93 -0
  120. pof/utils/encoding/b32.py +30 -0
  121. pof/utils/encoding/b32hex.py +30 -0
  122. pof/utils/encoding/b64.py +30 -0
  123. pof/utils/encoding/b85.py +35 -0
  124. pof/utils/encoding/binascii.py +38 -0
  125. pof/utils/encoding/snt.py +97 -0
  126. pof/utils/entropy.py +24 -0
  127. pof/utils/extract_names.py +204 -0
  128. pof/utils/generator/__init__.py +17 -0
  129. pof/utils/generator/advanced.py +53 -0
  130. pof/utils/generator/base.py +178 -0
  131. pof/utils/generator/basic.py +107 -0
  132. pof/utils/generator/names.txt +37241 -0
  133. pof/utils/generator/unicode.py +171 -0
  134. pof/utils/se/__init__.py +3 -0
  135. pof/utils/se/homoglyphs.py +99 -0
  136. pof/utils/se/homoglyphs.txt +96 -0
  137. pof/utils/stegano/__init__.py +5 -0
  138. pof/utils/stegano/ipv6encoding.py +97 -0
  139. pof/utils/stegano/macencoding.py +96 -0
  140. pof/utils/stegano/uuidencoding.py +102 -0
  141. pof/utils/tokens.py +68 -0
  142. python_obfuscation_framework-1.4.1.dist-info/LICENSE +674 -0
  143. python_obfuscation_framework-1.4.1.dist-info/METADATA +851 -0
  144. python_obfuscation_framework-1.4.1.dist-info/RECORD +147 -0
  145. python_obfuscation_framework-1.4.1.dist-info/WHEEL +5 -0
  146. python_obfuscation_framework-1.4.1.dist-info/entry_points.txt +2 -0
  147. python_obfuscation_framework-1.4.1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,93 @@
1
+ from tokenize import DEDENT, INDENT, LPAR, NAME, NEWLINE, NUMBER, OP, RPAR, STRING
2
+
3
+
4
+ # TODO (deoktr): add 2 way of doing the encoding, one (the current) where you need the
5
+ # function at the top of the file, and another where it's inline, so there would
6
+ # be no need for a function definition
7
+ class Base3Encoding:
8
+ """Base 3 is a troll integer encoding.
9
+
10
+ The only goal of this encoding is have numbers has strings and composed only
11
+ of `0oO` only 'round' symbols.
12
+ """
13
+
14
+ @staticmethod
15
+ def encode(number: int):
16
+ """Converts an integer to a base3 string."""
17
+ if not isinstance(number, int):
18
+ msg = "number must be an integer"
19
+ raise TypeError(msg)
20
+
21
+ alphabet = "0oO"
22
+
23
+ base3 = ""
24
+ sign = ""
25
+
26
+ if number < 0:
27
+ sign = "-"
28
+ number = -number
29
+
30
+ if 0 <= number < len(alphabet):
31
+ return sign + alphabet[number]
32
+
33
+ while number != 0:
34
+ number, i = divmod(number, len(alphabet))
35
+ base3 = alphabet[i] + base3
36
+
37
+ return sign + base3
38
+
39
+ @classmethod
40
+ def encode_tokens(cls, number: int):
41
+ return [(STRING, repr(cls.encode(number)))]
42
+
43
+ @staticmethod
44
+ def definition_tokens():
45
+ """Base 3 decode function definition tokens.
46
+
47
+ ```
48
+ def b3decode(number):
49
+ return int(number.replace("o", "1").replace("O", "2"), 3)
50
+ ```.
51
+ """
52
+ return [
53
+ (NAME, "def"),
54
+ (NAME, "b3decode"),
55
+ (OP, "("),
56
+ (NAME, "number"),
57
+ (OP, ")"),
58
+ (OP, ":"),
59
+ (NEWLINE, "\n"),
60
+ (INDENT, " "),
61
+ (NAME, "return"),
62
+ (NAME, "int"),
63
+ (OP, "("),
64
+ (NAME, "number"),
65
+ (OP, "."),
66
+ (NAME, "replace"),
67
+ (OP, "("),
68
+ (STRING, repr("o")),
69
+ (OP, ","),
70
+ (STRING, repr("1")),
71
+ (OP, ")"),
72
+ (OP, "."),
73
+ (NAME, "replace"),
74
+ (OP, "("),
75
+ (STRING, repr("O")),
76
+ (OP, ","),
77
+ (STRING, repr("2")),
78
+ (OP, ")"),
79
+ (OP, ","),
80
+ (NUMBER, "3"),
81
+ (OP, ")"),
82
+ (NEWLINE, "\n"),
83
+ (DEDENT, ""),
84
+ ]
85
+
86
+ @staticmethod
87
+ def decode_tokens(encoded_tokens):
88
+ return [
89
+ (NAME, "b3decode"),
90
+ (LPAR, "("),
91
+ *encoded_tokens,
92
+ (RPAR, ")"),
93
+ ]
@@ -0,0 +1,30 @@
1
+ from base64 import b32encode
2
+ from tokenize import LPAR, NAME, RPAR, STRING
3
+
4
+
5
+ class Base32Encoding:
6
+ @staticmethod
7
+ def encode(string):
8
+ return b32encode(string).decode()
9
+
10
+ @classmethod
11
+ def encode_tokens(cls, string):
12
+ return [(STRING, repr(cls.encode(string)))]
13
+
14
+ @staticmethod
15
+ def import_tokens():
16
+ return [
17
+ (NAME, "from"),
18
+ (NAME, "base64"),
19
+ (NAME, "import"),
20
+ (NAME, "b32decode"),
21
+ ]
22
+
23
+ @staticmethod
24
+ def decode_tokens(encoded_tokens):
25
+ return [
26
+ (NAME, "b32decode"),
27
+ (LPAR, "("),
28
+ *encoded_tokens,
29
+ (RPAR, ")"),
30
+ ]
@@ -0,0 +1,30 @@
1
+ from base64 import b32hexencode
2
+ from tokenize import LPAR, NAME, RPAR, STRING
3
+
4
+
5
+ class Base32HexEncoding:
6
+ @staticmethod
7
+ def encode(string):
8
+ return b32hexencode(string).decode()
9
+
10
+ @classmethod
11
+ def encode_tokens(cls, string):
12
+ return [(STRING, repr(cls.encode(string)))]
13
+
14
+ @staticmethod
15
+ def import_tokens():
16
+ return [
17
+ (NAME, "from"),
18
+ (NAME, "base64"),
19
+ (NAME, "import"),
20
+ (NAME, "b32hexdecode"),
21
+ ]
22
+
23
+ @staticmethod
24
+ def decode_tokens(encoded_tokens):
25
+ return [
26
+ (NAME, "b32hexdecode"),
27
+ (LPAR, "("),
28
+ *encoded_tokens,
29
+ (RPAR, ")"),
30
+ ]
@@ -0,0 +1,30 @@
1
+ from base64 import b64encode
2
+ from tokenize import LPAR, NAME, RPAR, STRING
3
+
4
+
5
+ class Base64Encoding:
6
+ @staticmethod
7
+ def encode(string):
8
+ return b64encode(string).decode()
9
+
10
+ @classmethod
11
+ def encode_tokens(cls, string):
12
+ return [(STRING, repr(cls.encode(string)))]
13
+
14
+ @staticmethod
15
+ def import_tokens():
16
+ return [
17
+ (NAME, "from"),
18
+ (NAME, "base64"),
19
+ (NAME, "import"),
20
+ (NAME, "b64decode"),
21
+ ]
22
+
23
+ @staticmethod
24
+ def decode_tokens(encoded_tokens):
25
+ return [
26
+ (NAME, "b64decode"),
27
+ (LPAR, "("),
28
+ *encoded_tokens,
29
+ (RPAR, ")"),
30
+ ]
@@ -0,0 +1,35 @@
1
+ from base64 import b85encode
2
+ from tokenize import LPAR, NAME, RPAR, STRING
3
+
4
+
5
+ class Base85Encoding:
6
+ """Base85 encoding.
7
+
8
+ New in Python version 3.4
9
+ """
10
+
11
+ @staticmethod
12
+ def encode(string):
13
+ return b85encode(string).decode()
14
+
15
+ @classmethod
16
+ def encode_tokens(cls, string):
17
+ return [(STRING, repr(cls.encode(string)))]
18
+
19
+ @staticmethod
20
+ def import_tokens():
21
+ return [
22
+ (NAME, "from"),
23
+ (NAME, "base64"),
24
+ (NAME, "import"),
25
+ (NAME, "b85decode"),
26
+ ]
27
+
28
+ @staticmethod
29
+ def decode_tokens(encoded_tokens):
30
+ return [
31
+ (NAME, "b85decode"),
32
+ (LPAR, "("),
33
+ *encoded_tokens,
34
+ (RPAR, ")"),
35
+ ]
@@ -0,0 +1,38 @@
1
+ import binascii
2
+ import marshal
3
+ from tokenize import COMMA, LPAR, NAME, OP, RPAR, STRING
4
+
5
+
6
+ class BinasciiEncoding:
7
+ @staticmethod
8
+ def encode(string):
9
+ return binascii.b2a_base64(marshal.dumps(string))
10
+
11
+ @classmethod
12
+ def encode_tokens(cls, string):
13
+ return [(STRING, repr(cls.encode(string)))]
14
+
15
+ @staticmethod
16
+ def import_tokens():
17
+ return [
18
+ (NAME, "import"),
19
+ (NAME, "binascii"),
20
+ (COMMA, ","),
21
+ (NAME, "marshal"),
22
+ ]
23
+
24
+ @staticmethod
25
+ def decode_tokens(encoded_tokens):
26
+ return [
27
+ (NAME, "marshal"),
28
+ (OP, "."),
29
+ (NAME, "loads"),
30
+ (LPAR, "("),
31
+ (NAME, "binascii"),
32
+ (OP, "."),
33
+ (NAME, "a2b_base64"),
34
+ (LPAR, "("),
35
+ *encoded_tokens,
36
+ (RPAR, ")"),
37
+ (RPAR, ")"),
38
+ ]
@@ -0,0 +1,97 @@
1
+ from tokenize import DEDENT, INDENT, LPAR, NAME, NEWLINE, NUMBER, OP, RPAR, STRING
2
+
3
+
4
+ class SpacenTabEncoding:
5
+ r"""Space and tab encoding.
6
+
7
+ Encode 0 as a space ( ), 1 as a tab (\t).
8
+ """
9
+
10
+ @staticmethod
11
+ def encode(string):
12
+ string_bin = bin(int.from_bytes(string, "big")).replace("0b", "")
13
+ out = ""
14
+ for bit in string_bin:
15
+ if bit == "0":
16
+ out += " "
17
+ elif bit == "1":
18
+ out += "\t"
19
+ return out
20
+
21
+ @classmethod
22
+ def encode_tokens(cls, string):
23
+ return [(STRING, repr(cls.encode(string)))]
24
+
25
+ @staticmethod
26
+ def import_tokens():
27
+ return []
28
+
29
+ @classmethod
30
+ def definition_tokens(cls):
31
+ return [
32
+ (NAME, "def"),
33
+ (NAME, "sntdecode"),
34
+ (OP, "("),
35
+ (NAME, "encoded"),
36
+ (OP, ")"),
37
+ (OP, ":"),
38
+ (NEWLINE, "\n"),
39
+ (INDENT, " "),
40
+ (NAME, "msg_bin"),
41
+ (OP, "="),
42
+ (NAME, "encoded"),
43
+ (OP, "."),
44
+ (NAME, "replace"),
45
+ (OP, "("),
46
+ (STRING, '" "'),
47
+ (OP, ","),
48
+ (STRING, '"0"'),
49
+ (OP, ")"),
50
+ (OP, "."),
51
+ (NAME, "replace"),
52
+ (OP, "("),
53
+ (STRING, '"\\t"'),
54
+ (OP, ","),
55
+ (STRING, '"1"'),
56
+ (OP, ")"),
57
+ (NEWLINE, "\n"),
58
+ (NAME, "n"),
59
+ (OP, "="),
60
+ (NAME, "int"),
61
+ (OP, "("),
62
+ (NAME, "msg_bin"),
63
+ (OP, ","),
64
+ (NUMBER, "2"),
65
+ (OP, ")"),
66
+ (NEWLINE, "\n"),
67
+ (NAME, "return"),
68
+ (NAME, "n"),
69
+ (OP, "."),
70
+ (NAME, "to_bytes"),
71
+ (OP, "("),
72
+ (OP, "("),
73
+ (NAME, "n"),
74
+ (OP, "."),
75
+ (NAME, "bit_length"),
76
+ (OP, "("),
77
+ (OP, ")"),
78
+ (OP, "+"),
79
+ (NUMBER, "7"),
80
+ (OP, ")"),
81
+ (OP, "//"),
82
+ (NUMBER, "8"),
83
+ (OP, ","),
84
+ (STRING, '"big"'),
85
+ (OP, ")"),
86
+ (NEWLINE, "\n"),
87
+ (DEDENT, ""),
88
+ ]
89
+
90
+ @staticmethod
91
+ def decode_tokens(encoded_tokens):
92
+ return [
93
+ (NAME, "sntdecode"),
94
+ (LPAR, "("),
95
+ *encoded_tokens,
96
+ (RPAR, ")"),
97
+ ]
pof/utils/entropy.py ADDED
@@ -0,0 +1,24 @@
1
+ """Shannon entropy.
2
+
3
+ - https://practicalsecurityanalytics.com/file-entropy/
4
+ - https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.entropy.html
5
+
6
+ from tests:
7
+ a basic entropy of a Python source code is around 4.3
8
+ when compressing the entropy is between 4.25 and 4.4
9
+ when using unicode variables the entropy is around 6.2
10
+ """
11
+
12
+ import collections
13
+ import math
14
+
15
+
16
+ def entropy(data):
17
+ count = collections.Counter(map(ord, data))
18
+
19
+ pk = [x / sum(count.values()) for x in count.values()]
20
+
21
+ base = 2
22
+
23
+ # Shannon entropy
24
+ return -sum([p * math.log(p) / math.log(base) for p in pk])
@@ -0,0 +1,204 @@
1
+ """Extract names.
2
+
3
+ Extract names present in Python source files.
4
+ """
5
+
6
+ import io
7
+ import keyword
8
+ from tokenize import NAME, generate_tokens
9
+
10
+ BUILTINS = (
11
+ "__name__",
12
+ "__doc__",
13
+ "__package__",
14
+ "__loader__",
15
+ "__spec__",
16
+ "__build_class__",
17
+ "__import__",
18
+ "abs",
19
+ "all",
20
+ "any",
21
+ "ascii",
22
+ "bin",
23
+ "breakpoint",
24
+ "callable",
25
+ "chr",
26
+ "compile",
27
+ "delattr",
28
+ "dir",
29
+ "divmod",
30
+ "eval",
31
+ "exec",
32
+ "format",
33
+ "getattr",
34
+ "globals",
35
+ "hasattr",
36
+ "hash",
37
+ "hex",
38
+ "id",
39
+ "input",
40
+ "isinstance",
41
+ "issubclass",
42
+ "iter",
43
+ "aiter",
44
+ "len",
45
+ "locals",
46
+ "max",
47
+ "min",
48
+ "next",
49
+ "anext",
50
+ "oct",
51
+ "ord",
52
+ "pow",
53
+ "print",
54
+ "repr",
55
+ "round",
56
+ "setattr",
57
+ "sorted",
58
+ "sum",
59
+ "vars",
60
+ "None",
61
+ "Ellipsis",
62
+ "NotImplemented",
63
+ "False",
64
+ "True",
65
+ "bool",
66
+ "memoryview",
67
+ "bytearray",
68
+ "bytes",
69
+ "classmethod",
70
+ "complex",
71
+ "dict",
72
+ "enumerate",
73
+ "filter",
74
+ "float",
75
+ "frozenset",
76
+ "property",
77
+ "int",
78
+ "list",
79
+ "map",
80
+ "object",
81
+ "range",
82
+ "reversed",
83
+ "set",
84
+ "slice",
85
+ "staticmethod",
86
+ "str",
87
+ "super",
88
+ "tuple",
89
+ "type",
90
+ "zip",
91
+ "__debug__",
92
+ "BaseException",
93
+ "Exception",
94
+ "TypeError",
95
+ "StopAsyncIteration",
96
+ "StopIteration",
97
+ "GeneratorExit",
98
+ "SystemExit",
99
+ "KeyboardInterrupt",
100
+ "ImportError",
101
+ "ModuleNotFoundError",
102
+ "OSError",
103
+ "EnvironmentError",
104
+ "IOError",
105
+ "EOFError",
106
+ "RuntimeError",
107
+ "RecursionError",
108
+ "NotImplementedError",
109
+ "NameError",
110
+ "UnboundLocalError",
111
+ "AttributeError",
112
+ "SyntaxError",
113
+ "IndentationError",
114
+ "TabError",
115
+ "LookupError",
116
+ "IndexError",
117
+ "KeyError",
118
+ "ValueError",
119
+ "UnicodeError",
120
+ "UnicodeEncodeError",
121
+ "UnicodeDecodeError",
122
+ "UnicodeTranslateError",
123
+ "AssertionError",
124
+ "ArithmeticError",
125
+ "FloatingPointError",
126
+ "OverflowError",
127
+ "ZeroDivisionError",
128
+ "SystemError",
129
+ "ReferenceError",
130
+ "MemoryError",
131
+ "BufferError",
132
+ "Warning",
133
+ "UserWarning",
134
+ "EncodingWarning",
135
+ "DeprecationWarning",
136
+ "PendingDeprecationWarning",
137
+ "SyntaxWarning",
138
+ "RuntimeWarning",
139
+ "FutureWarning",
140
+ "ImportWarning",
141
+ "UnicodeWarning",
142
+ "BytesWarning",
143
+ "ResourceWarning",
144
+ "ConnectionError",
145
+ "BlockingIOError",
146
+ "BrokenPipeError",
147
+ "ChildProcessError",
148
+ "ConnectionAbortedError",
149
+ "ConnectionRefusedError",
150
+ "ConnectionResetError",
151
+ "FileExistsError",
152
+ "FileNotFoundError",
153
+ "IsADirectoryError",
154
+ "NotADirectoryError",
155
+ "InterruptedError",
156
+ "PermissionError",
157
+ "ProcessLookupError",
158
+ "TimeoutError",
159
+ "open",
160
+ "quit",
161
+ "exit",
162
+ "copyright",
163
+ "credits",
164
+ "license",
165
+ "help",
166
+ )
167
+
168
+ RESERVED_WORDS = (
169
+ "__init__",
170
+ "__eq__",
171
+ "__lt__",
172
+ "append", # on list
173
+ "update", # on dict
174
+ "copy", # copy dict or list
175
+ "join", # on string "".join()
176
+ "self",
177
+ "args",
178
+ "kwargs",
179
+ )
180
+
181
+ RESERVED = RESERVED_WORDS + BUILTINS + tuple(keyword.kwlist)
182
+
183
+
184
+ class NameExtract:
185
+ @staticmethod
186
+ def get_names(tokens):
187
+ names = []
188
+ for toknum, tokval, *_ in tokens:
189
+ if (
190
+ toknum == NAME
191
+ and tokval not in RESERVED
192
+ and tokval not in names
193
+ and len(tokval) > 1
194
+ ):
195
+ names.append(tokval)
196
+ return names
197
+
198
+ @classmethod
199
+ def get_from_file(cls, file):
200
+ with file.open() as f:
201
+ code = f.read()
202
+ io_obj = io.StringIO(code)
203
+ tokens = list(generate_tokens(io_obj.readline))
204
+ return cls.get_names(tokens)
@@ -0,0 +1,17 @@
1
+ """Random names generators.
2
+
3
+ https://docs.python.org/3/reference/lexical_analysis.html#identifiers.
4
+
5
+ this is invisible unicode ? in VIM the second unicdoe doesn't appear !
6
+
7
+
8
+ ݻ
9
+
10
+ """
11
+
12
+ from .advanced import AdvancedGenerator
13
+ from .base import BaseGenerator
14
+ from .basic import BasicGenerator
15
+ from .unicode import UnicodeGenerator
16
+
17
+ __all__ = ["AdvancedGenerator", "BaseGenerator", "BasicGenerator", "UnicodeGenerator"]
@@ -0,0 +1,53 @@
1
+ """Advanced random names generators."""
2
+
3
+ import random
4
+ from pathlib import Path
5
+
6
+ from .base import BaseGenerator
7
+ from .basic import BasicGenerator
8
+
9
+
10
+ class AdvancedGenerator(BaseGenerator):
11
+ @classmethod
12
+ def realistic_generator(cls):
13
+ file = Path(__file__).parent / "names.txt"
14
+ with file.open() as file:
15
+ name_list = [line.rstrip() for line in file]
16
+ previous = []
17
+ while True:
18
+ name = random.choice(name_list)
19
+ if name in previous or name in cls.RESERVED:
20
+ continue
21
+ previous.append(name)
22
+ yield name
23
+
24
+ @classmethod
25
+ def fixed_length_generator(cls, chars="O0", first_chars="O", length=17):
26
+ # inspired by: https://pyob.oxyry.com/
27
+ gen = BasicGenerator.alphabet_generator(
28
+ chars=chars,
29
+ first_chars=first_chars,
30
+ min_length=length,
31
+ max_length=length,
32
+ )
33
+ while True:
34
+ yield next(gen)
35
+
36
+ @classmethod
37
+ def multi_generator(cls, gen_dict):
38
+ """Combine multiple generator.
39
+
40
+ Take a dict of generator, with the key being the probability of it being
41
+ used, must be an int.
42
+ """
43
+ list_generators = []
44
+ for key, value in gen_dict.items():
45
+ list_generators.extend(key * [value])
46
+ previous = []
47
+ while True:
48
+ generator = random.choice(list_generators)
49
+ name = next(generator)
50
+ if name in previous or name in cls.RESERVED:
51
+ continue
52
+ previous.append(name)
53
+ yield name