howler-api 2.13.0.dev329__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of howler-api might be problematic. Click here for more details.
- howler/__init__.py +0 -0
- howler/actions/__init__.py +167 -0
- howler/actions/add_label.py +111 -0
- howler/actions/add_to_bundle.py +159 -0
- howler/actions/change_field.py +76 -0
- howler/actions/demote.py +160 -0
- howler/actions/example_plugin.py +104 -0
- howler/actions/prioritization.py +93 -0
- howler/actions/promote.py +147 -0
- howler/actions/remove_from_bundle.py +133 -0
- howler/actions/remove_label.py +111 -0
- howler/actions/transition.py +200 -0
- howler/api/__init__.py +249 -0
- howler/api/base.py +88 -0
- howler/api/socket.py +114 -0
- howler/api/v1/__init__.py +97 -0
- howler/api/v1/action.py +372 -0
- howler/api/v1/analytic.py +748 -0
- howler/api/v1/auth.py +382 -0
- howler/api/v1/borealis.py +101 -0
- howler/api/v1/configs.py +55 -0
- howler/api/v1/dossier.py +222 -0
- howler/api/v1/help.py +28 -0
- howler/api/v1/hit.py +1181 -0
- howler/api/v1/notebook.py +82 -0
- howler/api/v1/overview.py +191 -0
- howler/api/v1/search.py +715 -0
- howler/api/v1/template.py +206 -0
- howler/api/v1/tool.py +183 -0
- howler/api/v1/user.py +414 -0
- howler/api/v1/utils/__init__.py +0 -0
- howler/api/v1/utils/etag.py +84 -0
- howler/api/v1/view.py +288 -0
- howler/app.py +235 -0
- howler/common/README.md +144 -0
- howler/common/__init__.py +0 -0
- howler/common/classification.py +979 -0
- howler/common/classification.yml +107 -0
- howler/common/exceptions.py +167 -0
- howler/common/hexdump.py +48 -0
- howler/common/iprange.py +171 -0
- howler/common/loader.py +154 -0
- howler/common/logging/__init__.py +241 -0
- howler/common/logging/audit.py +138 -0
- howler/common/logging/format.py +38 -0
- howler/common/net.py +79 -0
- howler/common/net_static.py +1494 -0
- howler/common/random_user.py +316 -0
- howler/common/swagger.py +117 -0
- howler/config.py +64 -0
- howler/cronjobs/__init__.py +29 -0
- howler/cronjobs/retention.py +61 -0
- howler/cronjobs/rules.py +274 -0
- howler/cronjobs/view_cleanup.py +88 -0
- howler/datastore/README.md +112 -0
- howler/datastore/__init__.py +0 -0
- howler/datastore/bulk.py +72 -0
- howler/datastore/collection.py +2327 -0
- howler/datastore/constants.py +117 -0
- howler/datastore/exceptions.py +41 -0
- howler/datastore/howler_store.py +105 -0
- howler/datastore/migrations/fix_process.py +41 -0
- howler/datastore/operations.py +130 -0
- howler/datastore/schemas.py +90 -0
- howler/datastore/store.py +231 -0
- howler/datastore/support/__init__.py +0 -0
- howler/datastore/support/build.py +214 -0
- howler/datastore/support/schemas.py +90 -0
- howler/datastore/types.py +22 -0
- howler/error.py +91 -0
- howler/external/__init__.py +0 -0
- howler/external/generate_mitre.py +96 -0
- howler/external/generate_sigma_rules.py +31 -0
- howler/external/generate_tlds.py +47 -0
- howler/external/reindex_data.py +46 -0
- howler/external/wipe_databases.py +58 -0
- howler/gunicorn_config.py +25 -0
- howler/healthz.py +47 -0
- howler/helper/__init__.py +0 -0
- howler/helper/azure.py +50 -0
- howler/helper/discover.py +59 -0
- howler/helper/hit.py +236 -0
- howler/helper/oauth.py +247 -0
- howler/helper/search.py +92 -0
- howler/helper/workflow.py +110 -0
- howler/helper/ws.py +378 -0
- howler/odm/README.md +102 -0
- howler/odm/__init__.py +1 -0
- howler/odm/base.py +1504 -0
- howler/odm/charter.txt +146 -0
- howler/odm/helper.py +416 -0
- howler/odm/howler_enum.py +25 -0
- howler/odm/models/__init__.py +0 -0
- howler/odm/models/action.py +33 -0
- howler/odm/models/analytic.py +90 -0
- howler/odm/models/assemblyline.py +48 -0
- howler/odm/models/aws.py +23 -0
- howler/odm/models/azure.py +16 -0
- howler/odm/models/cbs.py +44 -0
- howler/odm/models/config.py +558 -0
- howler/odm/models/dossier.py +33 -0
- howler/odm/models/ecs/__init__.py +0 -0
- howler/odm/models/ecs/agent.py +17 -0
- howler/odm/models/ecs/autonomous_system.py +16 -0
- howler/odm/models/ecs/client.py +149 -0
- howler/odm/models/ecs/cloud.py +141 -0
- howler/odm/models/ecs/code_signature.py +27 -0
- howler/odm/models/ecs/container.py +32 -0
- howler/odm/models/ecs/dns.py +62 -0
- howler/odm/models/ecs/egress.py +10 -0
- howler/odm/models/ecs/elf.py +74 -0
- howler/odm/models/ecs/email.py +122 -0
- howler/odm/models/ecs/error.py +14 -0
- howler/odm/models/ecs/event.py +140 -0
- howler/odm/models/ecs/faas.py +24 -0
- howler/odm/models/ecs/file.py +84 -0
- howler/odm/models/ecs/geo.py +30 -0
- howler/odm/models/ecs/group.py +18 -0
- howler/odm/models/ecs/hash.py +16 -0
- howler/odm/models/ecs/host.py +17 -0
- howler/odm/models/ecs/http.py +37 -0
- howler/odm/models/ecs/ingress.py +12 -0
- howler/odm/models/ecs/interface.py +21 -0
- howler/odm/models/ecs/network.py +30 -0
- howler/odm/models/ecs/observer.py +45 -0
- howler/odm/models/ecs/organization.py +12 -0
- howler/odm/models/ecs/os.py +21 -0
- howler/odm/models/ecs/pe.py +17 -0
- howler/odm/models/ecs/process.py +216 -0
- howler/odm/models/ecs/registry.py +26 -0
- howler/odm/models/ecs/related.py +45 -0
- howler/odm/models/ecs/rule.py +51 -0
- howler/odm/models/ecs/server.py +24 -0
- howler/odm/models/ecs/threat.py +247 -0
- howler/odm/models/ecs/tls.py +58 -0
- howler/odm/models/ecs/url.py +51 -0
- howler/odm/models/ecs/user.py +57 -0
- howler/odm/models/ecs/user_agent.py +20 -0
- howler/odm/models/ecs/vulnerability.py +41 -0
- howler/odm/models/gcp.py +16 -0
- howler/odm/models/hit.py +356 -0
- howler/odm/models/howler_data.py +328 -0
- howler/odm/models/lead.py +33 -0
- howler/odm/models/localized_label.py +13 -0
- howler/odm/models/overview.py +16 -0
- howler/odm/models/pivot.py +40 -0
- howler/odm/models/template.py +24 -0
- howler/odm/models/user.py +83 -0
- howler/odm/models/view.py +34 -0
- howler/odm/random_data.py +888 -0
- howler/odm/randomizer.py +606 -0
- howler/patched.py +5 -0
- howler/plugins/__init__.py +25 -0
- howler/plugins/config.py +123 -0
- howler/remote/__init__.py +0 -0
- howler/remote/datatypes/README.md +355 -0
- howler/remote/datatypes/__init__.py +98 -0
- howler/remote/datatypes/counters.py +63 -0
- howler/remote/datatypes/events.py +66 -0
- howler/remote/datatypes/hash.py +206 -0
- howler/remote/datatypes/lock.py +42 -0
- howler/remote/datatypes/queues/__init__.py +0 -0
- howler/remote/datatypes/queues/comms.py +59 -0
- howler/remote/datatypes/queues/multi.py +32 -0
- howler/remote/datatypes/queues/named.py +93 -0
- howler/remote/datatypes/queues/priority.py +215 -0
- howler/remote/datatypes/set.py +118 -0
- howler/remote/datatypes/user_quota_tracker.py +54 -0
- howler/security/__init__.py +253 -0
- howler/security/socket.py +108 -0
- howler/security/utils.py +185 -0
- howler/services/__init__.py +0 -0
- howler/services/action_service.py +111 -0
- howler/services/analytic_service.py +128 -0
- howler/services/auth_service.py +323 -0
- howler/services/config_service.py +128 -0
- howler/services/dossier_service.py +252 -0
- howler/services/event_service.py +93 -0
- howler/services/hit_service.py +893 -0
- howler/services/jwt_service.py +158 -0
- howler/services/lucene_service.py +286 -0
- howler/services/notebook_service.py +119 -0
- howler/services/overview_service.py +44 -0
- howler/services/template_service.py +45 -0
- howler/services/user_service.py +330 -0
- howler/utils/__init__.py +0 -0
- howler/utils/annotations.py +28 -0
- howler/utils/chunk.py +38 -0
- howler/utils/dict_utils.py +200 -0
- howler/utils/isotime.py +17 -0
- howler/utils/list_utils.py +11 -0
- howler/utils/lucene.py +77 -0
- howler/utils/path.py +27 -0
- howler/utils/socket_utils.py +61 -0
- howler/utils/str_utils.py +256 -0
- howler/utils/uid.py +47 -0
- howler_api-2.13.0.dev329.dist-info/METADATA +71 -0
- howler_api-2.13.0.dev329.dist-info/RECORD +200 -0
- howler_api-2.13.0.dev329.dist-info/WHEEL +4 -0
- howler_api-2.13.0.dev329.dist-info/entry_points.txt +8 -0
howler/plugins/config.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
from pydantic import BaseModel, ImportString, model_validator
|
|
5
|
+
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, YamlConfigSettingsSource
|
|
6
|
+
|
|
7
|
+
from howler.common.logging import HWL_DATE_FORMAT, HWL_LOG_FORMAT
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger("howler.odm.models.config")
|
|
10
|
+
logger.setLevel(logging.INFO)
|
|
11
|
+
console = logging.StreamHandler()
|
|
12
|
+
console.setLevel(logging.INFO)
|
|
13
|
+
console.setFormatter(logging.Formatter(HWL_LOG_FORMAT, HWL_DATE_FORMAT))
|
|
14
|
+
logger.addHandler(console)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class ODMModules(BaseModel):
|
|
18
|
+
"A set of fields for adding additional fields to Howler's ODM."
|
|
19
|
+
|
|
20
|
+
modify_odm: dict[str, ImportString] = {}
|
|
21
|
+
generation: dict[str, ImportString] = {}
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class Modules(BaseModel):
|
|
25
|
+
"A list of components exposed for use in Howler by this plugin."
|
|
26
|
+
|
|
27
|
+
routes: list[ImportString] = []
|
|
28
|
+
operations: list[ImportString] = []
|
|
29
|
+
token_functions: dict[str, ImportString] = {}
|
|
30
|
+
|
|
31
|
+
odm: ODMModules = ODMModules()
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class BasePluginConfig(BaseSettings):
|
|
35
|
+
"Configuration File for Plugin"
|
|
36
|
+
|
|
37
|
+
name: str
|
|
38
|
+
features: dict[str, bool] = {}
|
|
39
|
+
|
|
40
|
+
modules: Modules = Modules()
|
|
41
|
+
|
|
42
|
+
@model_validator(mode="before")
|
|
43
|
+
@classmethod
|
|
44
|
+
def initialize_plugin_configuration(cls, data: Any) -> Any: # noqa: C901
|
|
45
|
+
"Convert a raw yaml config into an object ready for validation by pydantic"
|
|
46
|
+
if not isinstance(data, dict):
|
|
47
|
+
return data
|
|
48
|
+
|
|
49
|
+
# Default mutation requires plugin name
|
|
50
|
+
if "name" not in data:
|
|
51
|
+
logger.warning("Name is missing from configuration")
|
|
52
|
+
return data
|
|
53
|
+
|
|
54
|
+
plugin_name = data["name"]
|
|
55
|
+
logger.debug("Beginning configuration parsing for plugin %s", plugin_name)
|
|
56
|
+
|
|
57
|
+
if "modules" not in data:
|
|
58
|
+
return data
|
|
59
|
+
|
|
60
|
+
if "routes" in data["modules"] and isinstance(data["modules"]["routes"], list):
|
|
61
|
+
new_routes: list[str] = []
|
|
62
|
+
for route in data["modules"]["routes"]:
|
|
63
|
+
new_routes.append(f"{plugin_name}.routes.{route}" if "." not in route else route)
|
|
64
|
+
|
|
65
|
+
data["modules"]["routes"] = new_routes
|
|
66
|
+
|
|
67
|
+
if "operations" in data["modules"] and isinstance(data["modules"]["operations"], list):
|
|
68
|
+
new_operations: list[str] = []
|
|
69
|
+
for operation in data["modules"]["operations"]:
|
|
70
|
+
new_operations.append(f"{plugin_name}.actions.{operation}" if "." not in operation else operation)
|
|
71
|
+
|
|
72
|
+
data["modules"]["operations"] = new_operations
|
|
73
|
+
|
|
74
|
+
if "token_functions" in data["modules"] and isinstance(data["modules"]["token_functions"], dict):
|
|
75
|
+
new_token_functions_dict: dict[str, str] = {}
|
|
76
|
+
|
|
77
|
+
for application, value in data["modules"]["token_functions"].items():
|
|
78
|
+
if value is True:
|
|
79
|
+
new_token_functions_dict[application] = f"{plugin_name}.token.{application}:get_token"
|
|
80
|
+
elif value is False:
|
|
81
|
+
continue
|
|
82
|
+
else:
|
|
83
|
+
new_token_functions_dict[application] = value
|
|
84
|
+
|
|
85
|
+
data["modules"]["token_functions"] = new_token_functions_dict
|
|
86
|
+
|
|
87
|
+
if "odm" not in data["modules"] or not isinstance(data["modules"]["odm"], dict):
|
|
88
|
+
return data
|
|
89
|
+
|
|
90
|
+
if "modify_odm" in data["modules"]["odm"] and isinstance(data["modules"]["odm"]["modify_odm"], dict):
|
|
91
|
+
new_modify_odm_dict: dict[str, str] = {}
|
|
92
|
+
for odm_name, value in data["modules"]["odm"]["modify_odm"].items():
|
|
93
|
+
if value is True:
|
|
94
|
+
new_modify_odm_dict[odm_name] = f"{plugin_name}.odm.{odm_name}:modify_odm"
|
|
95
|
+
elif value is False:
|
|
96
|
+
continue
|
|
97
|
+
else:
|
|
98
|
+
new_modify_odm_dict[odm_name] = value
|
|
99
|
+
|
|
100
|
+
data["modules"]["odm"]["modify_odm"] = new_modify_odm_dict
|
|
101
|
+
|
|
102
|
+
if "generation" in data["modules"]["odm"] and isinstance(data["modules"]["odm"]["generation"], dict):
|
|
103
|
+
new_generation_dict: dict[str, str] = {}
|
|
104
|
+
for odm_name, value in data["modules"]["odm"]["generation"].items():
|
|
105
|
+
if value is True:
|
|
106
|
+
new_generation_dict[odm_name] = f"{plugin_name}.odm.{odm_name}:generate"
|
|
107
|
+
elif value is False:
|
|
108
|
+
continue
|
|
109
|
+
else:
|
|
110
|
+
new_generation_dict[odm_name] = value
|
|
111
|
+
|
|
112
|
+
data["modules"]["odm"]["generation"] = new_generation_dict
|
|
113
|
+
|
|
114
|
+
return data
|
|
115
|
+
|
|
116
|
+
@classmethod
|
|
117
|
+
def settings_customise_sources(
|
|
118
|
+
cls, # noqa: ANN102
|
|
119
|
+
*args, # noqa: ANN002
|
|
120
|
+
**kwargs, # noqa: ANN002, ANN102
|
|
121
|
+
) -> tuple[PydanticBaseSettingsSource, ...]:
|
|
122
|
+
"Adds a YamlConfigSettingsSource object at the end of the settings_customize_sources response."
|
|
123
|
+
return (*super().settings_customise_sources(*args, **kwargs), YamlConfigSettingsSource(cls))
|
|
File without changes
|
|
@@ -0,0 +1,355 @@
|
|
|
1
|
+
# Remote Datatypes
|
|
2
|
+
|
|
3
|
+
Now that most project are distributed using containers there is sometimes a need for having data structures available live to different process running on different hosts. This is where the remote datatypes com into play. These are essentially data structures stored in Redis that are available to all processes in your cluster.
|
|
4
|
+
|
|
5
|
+
We have a various range of supported data types to account to various scenarios:
|
|
6
|
+
|
|
7
|
+
## Counters
|
|
8
|
+
|
|
9
|
+
Counters are very useful to gather metrics throughout your system.
|
|
10
|
+
|
|
11
|
+
They support simple integer operation:
|
|
12
|
+
|
|
13
|
+
- `inc(name, x)`: Increment name counter by X
|
|
14
|
+
- `dec(name, x)`: Decrement name counter by X
|
|
15
|
+
- `set(name, x)`: Set name counter to X
|
|
16
|
+
|
|
17
|
+
They also have other functions to inspect the values:
|
|
18
|
+
|
|
19
|
+
- `get_queues()`: List the name of all the counters
|
|
20
|
+
- `get_queues_sizes()`: List the name of all the counters with their current sizes
|
|
21
|
+
- `reset_queues()`: Reset all counters to 0
|
|
22
|
+
- `delete()`: Delete any traces to the counters
|
|
23
|
+
|
|
24
|
+
Example:
|
|
25
|
+
|
|
26
|
+
```python
|
|
27
|
+
from howler.remote.datatypes.counters import Counters
|
|
28
|
+
with Counters('test-counter') as ct:
|
|
29
|
+
# Increment counter by 4
|
|
30
|
+
ct.inc('value_1', 4)
|
|
31
|
+
|
|
32
|
+
# Get Counter values
|
|
33
|
+
ct.get_queues_sizes()
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
Output:
|
|
37
|
+
|
|
38
|
+
```json
|
|
39
|
+
{
|
|
40
|
+
"test-counter-value_1": 4
|
|
41
|
+
}
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Event Senders/Watchers
|
|
45
|
+
|
|
46
|
+
Event senders and watchers are use to trigger code execution on multiple container when another container performs an action. If multiple containers register to watch the same event, they will all receive the same message when the event fires.
|
|
47
|
+
|
|
48
|
+
Methods:
|
|
49
|
+
|
|
50
|
+
- EventSender:
|
|
51
|
+
- `send(event_name, event_data)`: Sends the event_data as an event of type event_name
|
|
52
|
+
- EventWatcher:
|
|
53
|
+
- `register(event_name, callback)`: Register a callback function for all events of type event name (wildcard * can be use to register a single callback to multiple events)
|
|
54
|
+
- `start()`: Start listening for events
|
|
55
|
+
- `stop()`: Stop listening for events
|
|
56
|
+
|
|
57
|
+
Example, A container register a watcher for a specific event:
|
|
58
|
+
|
|
59
|
+
```python
|
|
60
|
+
import time
|
|
61
|
+
from howler.remote.datatypes.events import EventWatcher
|
|
62
|
+
|
|
63
|
+
def callback_func(data: dict[str, Any]):
|
|
64
|
+
print(data)
|
|
65
|
+
|
|
66
|
+
watcher = EventWatcher()
|
|
67
|
+
try:
|
|
68
|
+
# register for the test event in the event group
|
|
69
|
+
watcher.register('event.test', callback_func)
|
|
70
|
+
watcher.start()
|
|
71
|
+
while True:
|
|
72
|
+
time.sleep(1)
|
|
73
|
+
finally:
|
|
74
|
+
watcher.stop()
|
|
75
|
+
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
Another container creates an event to wake up the first container:
|
|
79
|
+
|
|
80
|
+
```python
|
|
81
|
+
import time
|
|
82
|
+
from howler.remote.datatypes.events import EventSender
|
|
83
|
+
|
|
84
|
+
# create a sender for the event group
|
|
85
|
+
sender = EventSender('event.', redis_connection)
|
|
86
|
+
|
|
87
|
+
# send a test even
|
|
88
|
+
sender.send('test', {'payload': 100})
|
|
89
|
+
# After this call, the first container will have woken up and executed its callback function
|
|
90
|
+
# that would print: {'payload': 100}
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
## Hash Table
|
|
94
|
+
|
|
95
|
+
Hash tables are used to keep dictionary like structures available to all containers in your cluster.
|
|
96
|
+
|
|
97
|
+
They support the following methods:
|
|
98
|
+
|
|
99
|
+
- `add(key, value)`: Add the specified value to the key in the hash table, if the key already exists it is not added
|
|
100
|
+
- `increment(key, count)`: Increment the value at the key by the count
|
|
101
|
+
- `limited_add(key, value, limit)`: Add the specified value to the key in the hash table, if size of the set is smaller then the limit
|
|
102
|
+
- `exists(key)`: Check if a get exists
|
|
103
|
+
- `get(key)`: Get the value at the specfied key
|
|
104
|
+
- `keys()`: Return the list of keys in the hash table
|
|
105
|
+
- `length()`: Return the number of keys in the hash table
|
|
106
|
+
- `items()`: Get the whole hash table keys and values as a dictionary
|
|
107
|
+
- `conditional_remove(key, value)`: Remove the key from the hash table if it value is the same as specified
|
|
108
|
+
- `pop(key)`: Get the value of the key and remove it from the hash table
|
|
109
|
+
- `set(key, value)`: Set the specified key to a specific value
|
|
110
|
+
- `multi_set(dict)`: Set multiple keys at once
|
|
111
|
+
- `delete()`: Completely delete the hash structure
|
|
112
|
+
|
|
113
|
+
Example:
|
|
114
|
+
|
|
115
|
+
```python
|
|
116
|
+
from howler.remote.datatypes.hash import Hash
|
|
117
|
+
with Hash('test-hashmap') as h:
|
|
118
|
+
h.add("key", "value")
|
|
119
|
+
print(h.exists("key"))
|
|
120
|
+
print(h.get("key"))
|
|
121
|
+
print(h.items())
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
Output:
|
|
125
|
+
|
|
126
|
+
```python
|
|
127
|
+
True
|
|
128
|
+
"value"
|
|
129
|
+
{"key": "value"}
|
|
130
|
+
```
|
|
131
|
+
|
|
132
|
+
## Global Locks
|
|
133
|
+
|
|
134
|
+
If you need to make sure that certain set of operation do not take place at the same time in two different processes/containers/threads, you can use the global lock.
|
|
135
|
+
|
|
136
|
+
The global lock can only be used using a `with` statement and specifies the max amount of time you can keep the lock for.
|
|
137
|
+
|
|
138
|
+
Example:
|
|
139
|
+
|
|
140
|
+
```python
|
|
141
|
+
from howler.remote.datatypes.lock import Lock
|
|
142
|
+
|
|
143
|
+
def process_1():
|
|
144
|
+
with Lock('test', 10):
|
|
145
|
+
# Do sensitive execution up to 10 seconds
|
|
146
|
+
|
|
147
|
+
def process_2()
|
|
148
|
+
with Lock('test', 20):
|
|
149
|
+
# Do sensitive execution up to 20 seconds
|
|
150
|
+
|
|
151
|
+
# Process 2 has to wait that process 1 is done before executing
|
|
152
|
+
```
|
|
153
|
+
|
|
154
|
+
## Sets
|
|
155
|
+
|
|
156
|
+
Sets are very useful to keep a list of non-duplicated items available to all containers in your cluster.
|
|
157
|
+
|
|
158
|
+
They support the following methods:
|
|
159
|
+
|
|
160
|
+
- `add(value1, ...)`: Add one or many values to the set
|
|
161
|
+
- `limited_add(value, limit)`: Add a value only if the number of items already in is smaller then the limit
|
|
162
|
+
- `remove(value)`: Remove a specific value from the set
|
|
163
|
+
- `exist(value)`: Check if a value exists in the set
|
|
164
|
+
- `pop()`: Returns a random value from the set and removes it
|
|
165
|
+
- `pop_all()`: Returns all values from the set and removes them all
|
|
166
|
+
- `random()`: Return a random member of the set but keeps it in the set
|
|
167
|
+
- `members()`: Return all members from the set and leave them there
|
|
168
|
+
- `length()`: Returns the length of the set
|
|
169
|
+
- `delete()`: Deletes the set entirely
|
|
170
|
+
|
|
171
|
+
Example:
|
|
172
|
+
|
|
173
|
+
```python
|
|
174
|
+
from howler.remote.datatypes.set import Set
|
|
175
|
+
with Set('test-set') as s:
|
|
176
|
+
values = ['a', 'b', 1, 2]
|
|
177
|
+
s.add(*values)
|
|
178
|
+
print(s.exist('b'))
|
|
179
|
+
print(s.pop())
|
|
180
|
+
print(s.length())
|
|
181
|
+
```
|
|
182
|
+
|
|
183
|
+
Output:
|
|
184
|
+
|
|
185
|
+
```python
|
|
186
|
+
True
|
|
187
|
+
'a' or 'b' or 1 or 2
|
|
188
|
+
3
|
|
189
|
+
```
|
|
190
|
+
|
|
191
|
+
## Quota trackers
|
|
192
|
+
|
|
193
|
+
Quota trackers are used to track user quotas in the system. It has only two functions, one to start tracking an operation and another to end it. If the begin of tracking returns false, the user is over it's quota.
|
|
194
|
+
|
|
195
|
+
Methods:
|
|
196
|
+
|
|
197
|
+
- `begin(unique_identifier, max_quota)`: Increment the current quota of the unique identifier if its not already at the maximum, returns `False` if over the quota.
|
|
198
|
+
- `end(unique_identifier)`: Decrease the current quota of the unique identifier
|
|
199
|
+
|
|
200
|
+
Example:
|
|
201
|
+
|
|
202
|
+
```python
|
|
203
|
+
from howler.remote.datatypes.user_quota_tracker import UserQuotaTracker
|
|
204
|
+
uqt = UserQuotaTracker('test-quota')
|
|
205
|
+
|
|
206
|
+
try:
|
|
207
|
+
ok = uqt.start('uid', 3):
|
|
208
|
+
if ok:
|
|
209
|
+
# Do processing which has quota
|
|
210
|
+
else:
|
|
211
|
+
raise Exception('Over your quota...')
|
|
212
|
+
|
|
213
|
+
finally:
|
|
214
|
+
if ok:
|
|
215
|
+
uqt.end('uid')
|
|
216
|
+
```
|
|
217
|
+
|
|
218
|
+
## Queues
|
|
219
|
+
|
|
220
|
+
To support distributed processing in a container based environment, this library also support multiple queuing types so the containers can pass messages from one to another.
|
|
221
|
+
|
|
222
|
+
### Comunication queues
|
|
223
|
+
|
|
224
|
+
Communication queues are similar to a chat where user connected receive the message but do not know of the message that happened while they were offline.
|
|
225
|
+
|
|
226
|
+
Available methods:
|
|
227
|
+
|
|
228
|
+
- `close()`: Stop listening for messages and disconnect from the queue
|
|
229
|
+
- `listen(blocking)`: This is a generator that either wait for message or return None
|
|
230
|
+
- `publish(message)`: Sends a message to the comms queue
|
|
231
|
+
|
|
232
|
+
Example of message receiver:
|
|
233
|
+
|
|
234
|
+
```python
|
|
235
|
+
from howler.remote.datatypes.queues.comms import CommsQueue
|
|
236
|
+
|
|
237
|
+
with CommsQueue('test-comms-queue') as cq:
|
|
238
|
+
for msg in cq.listen(blocking=True):
|
|
239
|
+
if msg == "stop":
|
|
240
|
+
break
|
|
241
|
+
|
|
242
|
+
print(message)
|
|
243
|
+
```
|
|
244
|
+
|
|
245
|
+
Example of message sender:
|
|
246
|
+
|
|
247
|
+
```python
|
|
248
|
+
from howler.remote.datatypes.queues.comms import CommsQueue
|
|
249
|
+
|
|
250
|
+
with CommsQueue('test-comms-queue') as cq:
|
|
251
|
+
# send a message to the receiver
|
|
252
|
+
cq.publish("This is my message!")
|
|
253
|
+
|
|
254
|
+
# tell the receiver to stop
|
|
255
|
+
cq.publish("stop")
|
|
256
|
+
```
|
|
257
|
+
|
|
258
|
+
### Named queues (FIFO)
|
|
259
|
+
|
|
260
|
+
Named queues are essentially First-in First-out (FIFO) queue where messages are processed in order in which they are sent in the queue.
|
|
261
|
+
|
|
262
|
+
The following methods are available:
|
|
263
|
+
|
|
264
|
+
- `delete()`: Delete the queue with all it's messages
|
|
265
|
+
- `length()`: Return the lenght of the queue
|
|
266
|
+
- `peek_next()`: Get the next item in the queue without removing it
|
|
267
|
+
- `pop_batch(count)`: Get X amount of items from the queue
|
|
268
|
+
- `pop(blocking)`: Get the next item from the queue. If blocking is True, wait for the next message
|
|
269
|
+
- `push(msg_1, ...)`: Push message(s) to the queue
|
|
270
|
+
- `unpop()`: Put the message back at the head of the FIFO queue
|
|
271
|
+
|
|
272
|
+
Example:
|
|
273
|
+
|
|
274
|
+
```python
|
|
275
|
+
from howler.remote.datatypes.queues.named import NamedQueue, select
|
|
276
|
+
with NamedQueue('test-named-queue') as nq:
|
|
277
|
+
for x in range(5):
|
|
278
|
+
nq.push(x)
|
|
279
|
+
|
|
280
|
+
print(nq.pop_batch(100))
|
|
281
|
+
```
|
|
282
|
+
|
|
283
|
+
Output:
|
|
284
|
+
|
|
285
|
+
```python
|
|
286
|
+
[0, 1, 2, 3, 4]
|
|
287
|
+
```
|
|
288
|
+
|
|
289
|
+
### Priority queues
|
|
290
|
+
|
|
291
|
+
Priority queues are queues where the message can set its priority on insertion to determine its position in the queue. Message of the same priority will act as FIFO queues.
|
|
292
|
+
|
|
293
|
+
The following methods are available:
|
|
294
|
+
|
|
295
|
+
- `count(lowest, highest)`: Count the number of item between the lowest and highest priority (inclusive)
|
|
296
|
+
- `delete()`: Delete the queue with all it's items
|
|
297
|
+
- `length()`: Return the full length of the queue
|
|
298
|
+
- `pop(num)`: Pop the specified number of items form the queue
|
|
299
|
+
- `blocking_pop(timeout, low_priority)`: Pop the next item from the queue and wait for it if the queue is empty (low_priority pops from lower priority items first)
|
|
300
|
+
- `dequeue_range(lower_limit, upper_limit)`: Dequeue a number of elements, within a specified range of scores
|
|
301
|
+
- `push(priority, message)`: Push a message at a given priority
|
|
302
|
+
- `rank(message)`: Returns the priority of a message in the queue
|
|
303
|
+
- `remove(message)`: Removes a message from the queue by its value
|
|
304
|
+
- `unpush(num)`: Pop the specified number of lower priority items from the queue
|
|
305
|
+
|
|
306
|
+
Example:
|
|
307
|
+
|
|
308
|
+
```python
|
|
309
|
+
from howler.remote.datatypes.queues.priority import PriorityQueue, length, select
|
|
310
|
+
with PriorityQueue('test-priority-queue') as pq:
|
|
311
|
+
for x in range(10):
|
|
312
|
+
pq.push(100, x)
|
|
313
|
+
|
|
314
|
+
a_key = pq.push(101, 'a')
|
|
315
|
+
z_key = pq.push(99, 'z')
|
|
316
|
+
print(pq.rank(a_key))
|
|
317
|
+
print(pq.rank(z_key))
|
|
318
|
+
print(pq.pop())
|
|
319
|
+
```
|
|
320
|
+
|
|
321
|
+
Output:
|
|
322
|
+
|
|
323
|
+
```python
|
|
324
|
+
0
|
|
325
|
+
11
|
|
326
|
+
'a'
|
|
327
|
+
```
|
|
328
|
+
|
|
329
|
+
#### Unique Priority queues
|
|
330
|
+
|
|
331
|
+
Unique Priority queues function the same way as Priority queues execept that all messages in the queue must be unique so new messages with the same value are not added again.
|
|
332
|
+
|
|
333
|
+
Example:
|
|
334
|
+
|
|
335
|
+
```python
|
|
336
|
+
from howler.remote.datatypes.queues.priority import PriorityQueue, length, select
|
|
337
|
+
with PriorityQueue('test-priority-queue') as pq:
|
|
338
|
+
for x in range(10):
|
|
339
|
+
pq.push(100, x)
|
|
340
|
+
|
|
341
|
+
print(pq.length())
|
|
342
|
+
|
|
343
|
+
# These values were already added in the previous loop so they wont be added again.
|
|
344
|
+
for x in range(10):
|
|
345
|
+
pq.push(100, x)
|
|
346
|
+
|
|
347
|
+
print(pq.length())
|
|
348
|
+
```
|
|
349
|
+
|
|
350
|
+
Output:
|
|
351
|
+
|
|
352
|
+
```python
|
|
353
|
+
10
|
|
354
|
+
10
|
|
355
|
+
```
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
import time
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
|
|
8
|
+
import redis
|
|
9
|
+
from packaging.version import parse
|
|
10
|
+
|
|
11
|
+
from howler.common import loader
|
|
12
|
+
from howler.odm.models.config import config
|
|
13
|
+
from howler.utils.uid import get_random_id
|
|
14
|
+
|
|
15
|
+
# Add a version warning if redis python client is < 2.10.0. Older versions
|
|
16
|
+
# have a connection bug that can manifest with the dispatcher.
|
|
17
|
+
if parse(redis.__version__) <= parse("2.10.0"):
|
|
18
|
+
import warnings
|
|
19
|
+
|
|
20
|
+
warnings.warn(
|
|
21
|
+
"%s works best with redis > 2.10.0. You're running"
|
|
22
|
+
" redis %s. You should upgrade." % (__name__, redis.__version__)
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
log = logging.getLogger(f"{loader.APP_NAME}.queue")
|
|
27
|
+
pool: dict[tuple[str, str], redis.BlockingConnectionPool] = {}
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def now_as_iso():
|
|
31
|
+
s = datetime.utcfromtimestamp(time.time()).isoformat()
|
|
32
|
+
return f"{s}Z"
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def reply_queue_name(prefix=None, suffix=None):
|
|
36
|
+
if prefix:
|
|
37
|
+
components = [prefix]
|
|
38
|
+
else:
|
|
39
|
+
components = []
|
|
40
|
+
|
|
41
|
+
components.append(get_random_id())
|
|
42
|
+
|
|
43
|
+
if suffix:
|
|
44
|
+
components.append(str(suffix))
|
|
45
|
+
|
|
46
|
+
return "-".join(components)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def retry_call(func, *args, **kw):
|
|
50
|
+
maximum = 2
|
|
51
|
+
exponent = -7
|
|
52
|
+
|
|
53
|
+
while True:
|
|
54
|
+
try:
|
|
55
|
+
ret_val = func(*args, **kw)
|
|
56
|
+
|
|
57
|
+
if exponent != -7:
|
|
58
|
+
log.info("Reconnected to Redis!")
|
|
59
|
+
|
|
60
|
+
return ret_val
|
|
61
|
+
except (redis.ConnectionError, ConnectionResetError) as ce:
|
|
62
|
+
log.warning(f"No connection to Redis, reconnecting... [{ce}]")
|
|
63
|
+
time.sleep(2**exponent)
|
|
64
|
+
exponent = exponent + 1 if exponent < maximum else exponent
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def get_client(host, port, private):
|
|
68
|
+
# In case a structure is passed a client as host
|
|
69
|
+
if isinstance(host, (redis.Redis, redis.StrictRedis)):
|
|
70
|
+
return host
|
|
71
|
+
|
|
72
|
+
if not host or not port:
|
|
73
|
+
host = host or config.core.redis.nonpersistent.host
|
|
74
|
+
port = int(port or config.core.redis.nonpersistent.port)
|
|
75
|
+
|
|
76
|
+
if private:
|
|
77
|
+
return redis.StrictRedis(host=host, port=port)
|
|
78
|
+
else:
|
|
79
|
+
return redis.StrictRedis(connection_pool=get_pool(host, port))
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def get_pool(host, port):
|
|
83
|
+
key = (host, port)
|
|
84
|
+
|
|
85
|
+
connection_pool = pool.get(key, None)
|
|
86
|
+
if not connection_pool:
|
|
87
|
+
connection_pool = redis.BlockingConnectionPool(host=host, port=port, max_connections=200)
|
|
88
|
+
pool[key] = connection_pool
|
|
89
|
+
|
|
90
|
+
return connection_pool
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def decode(data):
|
|
94
|
+
try:
|
|
95
|
+
return json.loads(data)
|
|
96
|
+
except ValueError:
|
|
97
|
+
log.warning("Invalid data on queue: %s", str(data))
|
|
98
|
+
return None
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
3
|
+
from redis.exceptions import ConnectionError
|
|
4
|
+
|
|
5
|
+
from howler.remote.datatypes import get_client, now_as_iso, retry_call
|
|
6
|
+
from howler.remote.datatypes.hash import Hash
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Counters(object):
|
|
10
|
+
def __init__(self, prefix="counter", host=None, port=None, track_counters=False):
|
|
11
|
+
self.c = get_client(host, port, False)
|
|
12
|
+
self.prefix = prefix
|
|
13
|
+
if track_counters:
|
|
14
|
+
self.tracker: Optional[Hash] = Hash("c-tracker-%s" % prefix, host=host, port=port)
|
|
15
|
+
else:
|
|
16
|
+
self.tracker = None
|
|
17
|
+
|
|
18
|
+
def __enter__(self):
|
|
19
|
+
return self
|
|
20
|
+
|
|
21
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
22
|
+
self.delete()
|
|
23
|
+
|
|
24
|
+
def inc(self, name, value=1, track_id=None):
|
|
25
|
+
if self.tracker:
|
|
26
|
+
self.tracker.add(track_id or name, now_as_iso())
|
|
27
|
+
return retry_call(self.c.incr, "%s-%s" % (self.prefix, name), value)
|
|
28
|
+
|
|
29
|
+
def dec(self, name, value=1, track_id=None):
|
|
30
|
+
if self.tracker:
|
|
31
|
+
self.tracker.pop(str(track_id or name))
|
|
32
|
+
return retry_call(self.c.decr, "%s-%s" % (self.prefix, name), value)
|
|
33
|
+
|
|
34
|
+
def get_queues_sizes(self):
|
|
35
|
+
out = {}
|
|
36
|
+
for queue in retry_call(self.c.keys, "%s-*" % self.prefix):
|
|
37
|
+
queue_size = int(retry_call(self.c.get, queue))
|
|
38
|
+
out[queue] = queue_size
|
|
39
|
+
|
|
40
|
+
return {k.decode("utf-8"): v for k, v in out.items()}
|
|
41
|
+
|
|
42
|
+
def get_queues(self):
|
|
43
|
+
return [k.decode("utf-8") for k in retry_call(self.c.keys, "%s-*" % self.prefix)]
|
|
44
|
+
|
|
45
|
+
def ready(self):
|
|
46
|
+
try:
|
|
47
|
+
self.c.ping()
|
|
48
|
+
except ConnectionError:
|
|
49
|
+
return False
|
|
50
|
+
|
|
51
|
+
return True
|
|
52
|
+
|
|
53
|
+
def reset_queues(self):
|
|
54
|
+
if self.tracker:
|
|
55
|
+
self.tracker.delete()
|
|
56
|
+
for queue in retry_call(self.c.keys, "%s-*" % self.prefix):
|
|
57
|
+
retry_call(self.c.set, queue, "0")
|
|
58
|
+
|
|
59
|
+
def delete(self):
|
|
60
|
+
if self.tracker:
|
|
61
|
+
self.tracker.delete()
|
|
62
|
+
for queue in retry_call(self.c.keys, "%s-*" % self.prefix):
|
|
63
|
+
retry_call(self.c.delete, queue)
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import threading
|
|
5
|
+
from typing import TYPE_CHECKING, Any, Callable, Generic, Optional, TypeVar
|
|
6
|
+
|
|
7
|
+
from howler.common.logging import get_logger
|
|
8
|
+
from howler.remote.datatypes import get_client, retry_call
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from redis import Redis
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
logger = get_logger(__name__)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
MessageType = TypeVar("MessageType")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class EventSender(Generic[MessageType]):
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
prefix: str,
|
|
24
|
+
host=None,
|
|
25
|
+
port=None,
|
|
26
|
+
private=None,
|
|
27
|
+
serializer: Callable[[MessageType], str] = json.dumps,
|
|
28
|
+
):
|
|
29
|
+
self.client: Redis[Any] = get_client(host, port, private)
|
|
30
|
+
self.prefix = prefix.lower()
|
|
31
|
+
if not self.prefix.endswith("."):
|
|
32
|
+
self.prefix += "."
|
|
33
|
+
self.serializer = serializer
|
|
34
|
+
|
|
35
|
+
def send(self, name: str, data: MessageType):
|
|
36
|
+
path = self.prefix + name.lower().lstrip(".")
|
|
37
|
+
retry_call(self.client.publish, path, self.serializer(data))
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class EventWatcher(Generic[MessageType]):
|
|
41
|
+
def __init__(
|
|
42
|
+
self,
|
|
43
|
+
host=None,
|
|
44
|
+
port=None,
|
|
45
|
+
private=None,
|
|
46
|
+
deserializer: Callable[[str], MessageType] = json.loads,
|
|
47
|
+
):
|
|
48
|
+
client: Redis[Any] = get_client(host, port, private)
|
|
49
|
+
self.pubsub = retry_call(client.pubsub)
|
|
50
|
+
self.worker: Optional[threading.Thread] = None
|
|
51
|
+
self.deserializer = deserializer
|
|
52
|
+
|
|
53
|
+
def register(self, path: str, callback: Callable[[MessageType], None]):
|
|
54
|
+
def _callback(message: dict[str, Any]):
|
|
55
|
+
if message["type"] == "pmessage":
|
|
56
|
+
data = self.deserializer(message.get("data", ""))
|
|
57
|
+
callback(data)
|
|
58
|
+
|
|
59
|
+
self.pubsub.psubscribe(**{path.lower(): _callback})
|
|
60
|
+
|
|
61
|
+
def start(self):
|
|
62
|
+
self.worker = self.pubsub.run_in_thread(0.01, daemon=True)
|
|
63
|
+
|
|
64
|
+
def stop(self):
|
|
65
|
+
if self.worker is not None:
|
|
66
|
+
self.worker.stop() # type: ignore
|