clickdetect 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
api/detector.py ADDED
@@ -0,0 +1,70 @@
1
+ from fastapi import APIRouter, HTTPException
2
+ from detector.manager import get_manager_instance
3
+ from detector.detector import Detector
4
+ from datetime import datetime
5
+
6
+ router = APIRouter(prefix='/detector')
7
+
8
+ def detector_to_dict(job_id: str, d: Detector):
9
+ return {
10
+ 'id': job_id,
11
+ 'name': d.name,
12
+ 'description': d.description,
13
+ 'tenant': d.tenant,
14
+ 'active': d.active,
15
+ 'for_time': d.for_time,
16
+ 'rules_count': len(d._rules),
17
+ 'webhooks': d.webhooks,
18
+ 'last_time_exec': datetime.fromtimestamp(d._last_time).isoformat(),
19
+ 'next_time_exec': datetime.fromtimestamp(d._next_time).isoformat()
20
+ }
21
+
22
+
23
+ @router.get('/list')
24
+ async def listDetectors():
25
+ manager = get_manager_instance()
26
+ detectors = await manager.get_detectors()
27
+ return [detector_to_dict(job_id, d) for job_id, d in detectors.items()]
28
+
29
+
30
+ @router.get('/tenant/{tenant}')
31
+ async def getDetectorsByTenant(tenant: str):
32
+ manager = get_manager_instance()
33
+ detectors = await manager.get_detectors()
34
+ return [detector_to_dict(job_id, d) for job_id, d in detectors.items() if d.tenant == tenant]
35
+
36
+
37
+ @router.get('/{id}')
38
+ async def getDetector(id: str):
39
+ manager = get_manager_instance()
40
+ detector = await manager.get_detector_by_id(id)
41
+ if not detector:
42
+ raise HTTPException(status_code=404, detail='Detector not found')
43
+ return detector_to_dict(id, detector)
44
+
45
+
46
+ @router.delete('/{id}')
47
+ async def deleteDetector(id: str):
48
+ manager = get_manager_instance()
49
+ result = await manager.remove_scheduler(id)
50
+ if not result:
51
+ raise HTTPException(status_code=404, detail='Detector not found')
52
+ return {'deleted': id}
53
+
54
+
55
+ @router.post('/{id}/stop')
56
+ async def stopDetector(id: str):
57
+ manager = get_manager_instance()
58
+ result = await manager.stop_scheduler(id)
59
+ if not result:
60
+ raise HTTPException(status_code=404, detail='Detector not found')
61
+ return {'stopped': id}
62
+
63
+
64
+ @router.post('/{id}/resume')
65
+ async def resumeDetector(id: str):
66
+ manager = get_manager_instance()
67
+ result = await manager.resume_scheduler(id)
68
+ if not result:
69
+ raise HTTPException(status_code=404, detail='Detector not found')
70
+ return {'resumed': id}
api/health.py ADDED
@@ -0,0 +1,6 @@
1
+ from fastapi import APIRouter
2
+ router = APIRouter(prefix='/health')
3
+
4
+ @router.get('/ok')
5
+ def isOk():
6
+ return { 'ok': True}
api/rules.py ADDED
@@ -0,0 +1,45 @@
1
+ from fastapi import APIRouter, HTTPException
2
+ from detector.manager import get_manager_instance
3
+ router = APIRouter(prefix='/rules')
4
+
5
+ @router.get('/{detector_id}')
6
+ async def listRules(detector_id: str):
7
+ manager = get_manager_instance()
8
+ detector = await manager.get_detector_by_id(detector_id)
9
+ if not detector:
10
+ raise HTTPException(status_code=404, detail='Detector not found')
11
+ return [x.to_dict() for x in detector._rules]
12
+
13
+ @router.get('/{detector_id}/{rule_id}')
14
+ async def getRuleById(detector_id: str, rule_id: str):
15
+ manager = get_manager_instance()
16
+ detector = await manager.get_detector_by_id(detector_id)
17
+ if not detector:
18
+ raise HTTPException(status_code=404, detail='Detector not found')
19
+ rule = await detector.get_rule_by_id(rule_id)
20
+ if not rule:
21
+ raise HTTPException(status_code=404, detail='Rule not found')
22
+ return rule.to_dict()
23
+
24
+ @router.get('/{detector_id}/{rule_id}/pause')
25
+ async def pauseRule(detector_id: str, rule_id: str):
26
+ manager = get_manager_instance()
27
+ detector = await manager.get_detector_by_id(detector_id)
28
+ if not detector:
29
+ raise HTTPException(status_code=404, detail='Detector not found')
30
+ ok = await detector.setRuleActive(rule_id, False)
31
+ if not ok:
32
+ raise HTTPException(status_code=404, detail='Rule not found')
33
+ return { 'ok': True }
34
+
35
+ @router.get('/{detector_id}/{rule_id}/resume')
36
+ async def resumeRule(detector_id: str, rule_id: str):
37
+ manager = get_manager_instance()
38
+ detector = await manager.get_detector_by_id(detector_id)
39
+ if not detector:
40
+ raise HTTPException(status_code=404, detail='Detector not found')
41
+ ok = await detector.setRuleActive(rule_id, True)
42
+ if not ok:
43
+ raise HTTPException(status_code=404, detail='Rule not found')
44
+ return { 'ok': True }
45
+
@@ -0,0 +1,78 @@
1
+ Metadata-Version: 2.4
2
+ Name: clickdetect
3
+ Version: 0.1.0
4
+ Summary: Generic SIEM detector
5
+ Requires-Python: >=3.13
6
+ Description-Content-Type: text/markdown
7
+ Requires-Dist: aiohttp[speedups]>=3.13.2
8
+ Requires-Dist: apscheduler>=3.11.1
9
+ Requires-Dist: asyncpg>=0.31.0
10
+ Requires-Dist: clickhouse-connect>=0.10.0
11
+ Requires-Dist: colorlog>=6.10.1
12
+ Requires-Dist: fastapi[standard]>=0.127.1
13
+ Requires-Dist: jinja2>=3.1.6
14
+ Requires-Dist: matrix-nio[e2e]>=0.25.2
15
+ Requires-Dist: pyyaml>=6.0.3
16
+
17
+ # Overview
18
+
19
+ **Clickdetect** is a framework for threshold-based detection and alerting. It periodically queries your data sources, evaluates rules against the results, and sends alerts to one or more destinations when conditions are met.
20
+
21
+ You can pull events from any DataSource implemented, and push alerts to any webhook.
22
+
23
+ If you use elastalert, you will like this!
24
+
25
+ ## Documentation
26
+
27
+ Documentation: [https://clickdetect.souzo.me](https://clickdetect.souzo.me)
28
+
29
+ ## Next steps
30
+
31
+ * Implement timeframe []
32
+ * Grouping alerts []
33
+ * Suppress alerts []
34
+ * Hot reload rules []
35
+ * Add new rules using api []
36
+ * Add api endpoint to silence detectors []
37
+ * Sigma converter in rule (sigma: true) []
38
+ * Sync schedulers for scalability []
39
+ * Get rules from S3
40
+
41
+
42
+ ## Installation
43
+
44
+ ### Using uv
45
+
46
+ Download here:
47
+
48
+ 1. https://docs.astral.sh/uv/getting-started/installation/
49
+
50
+ ```sh
51
+ git clone https://github.com/clicksiem/clickdetect
52
+ cd clickdetect
53
+ uv sync --no-dev
54
+ uv run clickdetect
55
+ ```
56
+
57
+ ### Using Docker / Podman
58
+
59
+ From repository
60
+
61
+ ```sh
62
+ git clone https://github.com/clicksiem/clickdetect
63
+ cd clickdetect
64
+ podman build -t clickdetect .
65
+ podman run --rm -v ./runner.yml:/app/runner.yml -p 8080:8080 clickdetect --api
66
+ ```
67
+
68
+ ## Contribution
69
+
70
+ * Like this project
71
+ * Help me to create a sigma converter for clickhouse.
72
+ * Report bugs in the issues
73
+
74
+ ## Contact
75
+
76
+ * E-mail: me@souzo.me <vinicius morais>
77
+ * [Linkedin](https://www.linkedin.com/in/vinicius-f-a76ba51b5/)
78
+
@@ -0,0 +1,28 @@
1
+ clickdetect.py,sha256=DoHaFBaRYsyTGwjU7jbynWC3uLfJf7CD_9FUSixbxo0,2640
2
+ api/detector.py,sha256=4FNsF_ORmsPXaCk-IFtUBPeDCHXUff5gHkNZM5HEDu0,2217
3
+ api/health.py,sha256=r7c6FNfnvrfb7Pt84HY1obCeyKsWe4QesmAuZEaJTG0,124
4
+ api/rules.py,sha256=AKMyy8MNfeRC1JM_95i9ybIRZNmxpVkypQ6deiSOvS0,1818
5
+ detector/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
6
+ detector/config.py,sha256=XUerloUPlY9adeQKFkCy0cwP9MlPdUXsS7ABCY8G6Hk,763
7
+ detector/detector.py,sha256=WymJitGhbfPT1GDPgF3RmCjdzH_HRnWqmxXiT6RTuTA,8726
8
+ detector/manager.py,sha256=0PC3VD4bkl25U-SnCDTv-1b_26a-kaC3vYlBDB-uVNg,2767
9
+ detector/rules.py,sha256=ZbKAm7a52zhskk6Uht-LwVGuQgyZIzcdYsOmFtB9vtM,1896
10
+ detector/runner.py,sha256=hBMQ3lrar4yHriW7XhLMAXmZlURDiQGlGmJZv_Bh7Bk,4630
11
+ detector/utils.py,sha256=QqvTPzlixY62xE6VFX37ViEY-z7WydVAgiV9MdvdGyw,1831
12
+ detector/datasource/__init__.py,sha256=_1WhW6-ggEMlv-RQM4sjXoqjr45KvuhGjXEzSLGX730,487
13
+ detector/datasource/base.py,sha256=3LUMpXKoddkFYp_TIinHUXXGKsQmAf_HwuF8Bio68kw,686
14
+ detector/datasource/clickhouse.py,sha256=Og365v4T1etJ3xV3C_GJRFLvdEm4qVyi47BU_02zz-g,2390
15
+ detector/datasource/elasticsearch.py,sha256=wFV8l1PzOd5xt-VuZVmNYAuxILFE7FQDPdynp3RQwlU,3734
16
+ detector/datasource/loki.py,sha256=1xXk2t2VmzWpu4loTm0dQtTKZTPpTaFddN09qLDHQgA,4332
17
+ detector/datasource/postgresql.py,sha256=iYz12W6zD7IArHJNj3l_SX-PvafkgX1Xsb5B6yN3FWg,2281
18
+ detector/webhooks/__init__.py,sha256=_GqWAgBXVbmF2HL9A7PAlU06kCPX6rl4H4TZCLapDOc,396
19
+ detector/webhooks/base.py,sha256=0blxe5C04ik7eL6THvJfwtob9xnFWxPnuOA1z531kPY,668
20
+ detector/webhooks/email.py,sha256=Fi-uLWTvHTgMUiSZGav7H2NV40kFTCid8G2XXYpjPsc,3430
21
+ detector/webhooks/generic.py,sha256=iy6jruuxZIgqBT8PJOeChltYfNidjTU7KBHfNcB3F1c,2176
22
+ detector/webhooks/matrix.py,sha256=-7qUvA7CTmOxKUwN818m08MMiJZkcjQSh_eek9ho-CY,2932
23
+ detector/webhooks/teams.py,sha256=ZCk7qfnDKySxCys8SEFwaBsdvQy9B_A1qWVbaR4sOHM,2766
24
+ clickdetect-0.1.0.dist-info/METADATA,sha256=fE8u4mCnheqXFuq37_3BDpeQKkMv_5aXMkrjDrJrGAI,1908
25
+ clickdetect-0.1.0.dist-info/WHEEL,sha256=aeYiig01lYGDzBgS8HxWXOg3uV61G9ijOsup-k9o1sk,91
26
+ clickdetect-0.1.0.dist-info/entry_points.txt,sha256=8_PkJbEHQgVjvJHjmBQnMP6uvD0sUzM-Gd5xTi0gafQ,48
27
+ clickdetect-0.1.0.dist-info/top_level.txt,sha256=zV-XLGS2Ry805jitX8Ib3o663-AQG86vrgUlVQGaGt0,25
28
+ clickdetect-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (82.0.1)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ clickdetect = clickdetect:run
@@ -0,0 +1,3 @@
1
+ api
2
+ clickdetect
3
+ detector
clickdetect.py ADDED
@@ -0,0 +1,79 @@
1
+ import asyncio
2
+ import uvicorn
3
+ import detector.config as config
4
+ import argparse
5
+ from typing import Any
6
+ from detector.runner import Runner
7
+ from detector.manager import Manager, get_manager_instance
8
+ from logging import getLogger
9
+ from fastapi import FastAPI
10
+ from api.detector import router as detector_router
11
+ from api.rules import router as rules_router
12
+ from os.path import exists as f_exists
13
+
14
+ config.logConfig()
15
+ logger = getLogger(__name__)
16
+
17
+ async def load_api(args: Any):
18
+ app = FastAPI(title=config.app_name)
19
+ app.include_router(detector_router)
20
+ app.include_router(rules_router)
21
+
22
+ server_config = uvicorn.Config(app, host='0.0.0.0', port=args.port, log_level='info')
23
+ server = uvicorn.Server(server_config)
24
+ await server.serve()
25
+
26
+ async def load_runner(args: Any) -> Runner | None:
27
+ runner = await Runner(args.runner).init()
28
+ if not runner:
29
+ logger.debug('Runner not loaded')
30
+ return None
31
+
32
+ detectors = await runner.get_detectors()
33
+ manager = Manager()
34
+
35
+ if not detectors:
36
+ logger.error('No detector found')
37
+ return None
38
+
39
+ for detector in detectors:
40
+ await manager.run_detector(detector)
41
+
42
+ return runner
43
+
44
+ async def loop_run(runner: Runner | None = None):
45
+ try:
46
+ while await config.is_running():
47
+ await asyncio.sleep(1)
48
+ except asyncio.CancelledError:
49
+ logger.warning('received kill event')
50
+ finally:
51
+ await get_manager_instance().shutdown()
52
+ if runner:
53
+ await runner.close()
54
+
55
+ async def main():
56
+ parser = argparse.ArgumentParser(description=f'{config.app_name} is a tool to detect patterns and alerts in clickhouse and others database')
57
+ parser.add_argument('--api', required=False, default=False, action='store_true', help='Enable api, required for clicksiem-backend')
58
+ parser.add_argument('-p', '--port', default=config.default_port, type=int, help=f'specify api port, default: {config.default_port}')
59
+ parser.add_argument('-r', '--runner', default=config.default_runner, type=str, help=f'Runner file containing webhook, datasources, detectors and rules. Default: {config.default_runner}')
60
+ parser.add_argument('--stdin', default=False, type=bool, help='Read file from stdin')
61
+ args = parser.parse_args()
62
+
63
+ tasks = []
64
+ if args.runner:
65
+ if not f_exists(args.runner):
66
+ logger.fatal(f'File {args.runner} does not exists')
67
+ exit(1)
68
+ tasks.append(await load_runner(args))
69
+ if args.api:
70
+ tasks.append(load_api(args))
71
+ if tasks:
72
+ await asyncio.gather(*tasks)
73
+
74
+
75
+ def run():
76
+ asyncio.run(main())
77
+
78
+ if __name__ == "__main__":
79
+ run()
detector/__init__.py ADDED
@@ -0,0 +1 @@
1
+
detector/config.py ADDED
@@ -0,0 +1,36 @@
1
+ import logging
2
+ from asyncio import Lock
3
+ import colorlog
4
+
5
+ _lock = Lock()
6
+ running = True
7
+ rule_eval_semaphore = 7
8
+ webhook_send_semaphore = 7
9
+
10
+ def logConfig():
11
+ colorlog.basicConfig(
12
+ level=colorlog.DEBUG,
13
+ format="%(asctime)s | %(log_color)s%(levelname)-8s%(reset)s | %(name)s | %(message)s",
14
+ log_colors={
15
+ 'DEBUG': 'cyan',
16
+ 'INFO': 'green',
17
+ 'WARNING': 'yellow',
18
+ 'ERROR': 'red',
19
+ 'CRITICAL': 'red,bg_white',
20
+ }
21
+ )
22
+
23
+ async def is_running():
24
+ global running
25
+ async with _lock:
26
+ return running
27
+
28
+ async def stop_running():
29
+ global running
30
+ async with _lock:
31
+ running = False
32
+
33
+
34
+ app_name = 'ClickDetector'
35
+ default_runner = 'runner.yml'
36
+ default_port = 8080
@@ -0,0 +1,14 @@
1
+ from typing import Dict, List, Type
2
+ from detector.datasource.base import BaseDataSource
3
+ from detector.datasource.clickhouse import ClickhouseDataSource
4
+ from detector.datasource.loki import LokiDataSource
5
+ from detector.datasource.elasticsearch import ElasticsearchDataSource
6
+ from detector.datasource.postgresql import PostgreSQLDataSource
7
+
8
+
9
+ datasources: List[Type[BaseDataSource]] = [
10
+ ClickhouseDataSource,
11
+ LokiDataSource,
12
+ ElasticsearchDataSource,
13
+ PostgreSQLDataSource,
14
+ ]
@@ -0,0 +1,31 @@
1
+ from dataclasses import dataclass
2
+ from typing import Any, Dict
3
+
4
+
5
+ @dataclass()
6
+ class DataSourceQueryResult:
7
+ len: int
8
+ value: Any
9
+
10
+ def to_dict(self) -> Dict[str, Any]:
11
+ return {
12
+ 'len': self.len,
13
+ 'value': self.value
14
+ }
15
+
16
+ class BaseDataSource:
17
+ async def connect(self):
18
+ raise NotImplementedError()
19
+
20
+ async def query(self, data: str) -> DataSourceQueryResult | None:
21
+ raise NotImplementedError()
22
+
23
+ @classmethod
24
+ def _name(cls) -> str:
25
+ raise NotImplementedError()
26
+
27
+ async def _parse(self, _obj: Any):
28
+ raise NotImplementedError()
29
+
30
+ def to_dict(self) -> Dict:
31
+ raise NotImplementedError()
@@ -0,0 +1,76 @@
1
+ from typing import Any, Dict
2
+ from clickhouse_connect import get_async_client
3
+ from clickhouse_connect.driver.asyncclient import AsyncClient
4
+ from logging import getLogger
5
+
6
+ from .base import BaseDataSource, DataSourceQueryResult
7
+
8
+ logger = getLogger(__name__)
9
+
10
+ class ClickhouseDataSource(BaseDataSource):
11
+ database: str
12
+ host: str
13
+ port: int
14
+ username: str
15
+ password: str
16
+ verify: bool = False
17
+ client: AsyncClient | None = None
18
+
19
+ async def connect(self):
20
+ try:
21
+ self.client = await get_async_client(
22
+ database=self.database,
23
+ host=self.host,
24
+ username=self.username,
25
+ password=self.password,
26
+ port=self.port,
27
+ secure=self.verify
28
+ )
29
+ except Exception as ex:
30
+ logger.error(f'Failed to connect to ClickHouse at {self.host}:{self.port} | {ex}')
31
+ self.client = None
32
+
33
+ async def query(self, data: str) -> DataSourceQueryResult | None:
34
+ if not self.client:
35
+ await self.connect()
36
+ if not self.client:
37
+ return None
38
+ try:
39
+ result = await self.client.query(data)
40
+ return DataSourceQueryResult(result.row_count, list(result.named_results()))
41
+ except Exception as ex:
42
+ logger.error(f'Query failed, resetting client | {ex}')
43
+ self.client = None
44
+ return None
45
+
46
+ @classmethod
47
+ def _name(cls) -> str:
48
+ return 'clickhouse'
49
+
50
+ def to_dict(self) -> Dict:
51
+ return {
52
+ 'database': self.database,
53
+ 'host': self.host,
54
+ 'port': self.port,
55
+ 'username': self.username,
56
+ 'password': self.password,
57
+ 'verify': self.verify
58
+ }
59
+
60
+ async def _parse(self, _obj: Any):
61
+ database = _obj.get('database', 'default')
62
+ host = _obj.get('host')
63
+ port = _obj.get('port')
64
+ username = _obj.get('username')
65
+ password = _obj.get('password')
66
+ verify = _obj.get('verify', False)
67
+
68
+ if not host or not port or not username or not password:
69
+ raise Exception(f'Invalid parameters: {self.to_dict().items()}')
70
+
71
+ self.database = database
72
+ self.host = host
73
+ self.port = port
74
+ self.username = username
75
+ self.password = password
76
+ self.verify = verify
@@ -0,0 +1,109 @@
1
+ from typing import Any, Dict
2
+ from logging import getLogger
3
+ import json
4
+ import aiohttp
5
+
6
+ from .base import BaseDataSource, DataSourceQueryResult
7
+
8
+ logger = getLogger(__name__)
9
+
10
+ class ElasticsearchDataSource(BaseDataSource):
11
+ host: str
12
+ port: int
13
+ index: str
14
+ username: str | None = None
15
+ password: str | None = None
16
+ api_key: str | None = None
17
+ verify: bool = False
18
+ _session: aiohttp.ClientSession | None = None
19
+
20
+ def _base_url(self) -> str:
21
+ scheme = 'https' if self.verify else 'http'
22
+ return f'{scheme}://{self.host}:{self.port}'
23
+
24
+ def _headers(self) -> Dict[str, str]:
25
+ headers = {'Content-Type': 'application/json'}
26
+ if self.api_key:
27
+ headers['Authorization'] = f'ApiKey {self.api_key}'
28
+ return headers
29
+
30
+ def _auth(self) -> aiohttp.BasicAuth | None:
31
+ if self.username and self.password:
32
+ return aiohttp.BasicAuth(self.username, self.password)
33
+ return None
34
+
35
+ async def connect(self):
36
+ try:
37
+ connector = aiohttp.TCPConnector(ssl=False)
38
+ self._session = aiohttp.ClientSession(
39
+ connector=connector,
40
+ auth=self._auth(),
41
+ headers=self._headers()
42
+ )
43
+ async with self._session.get(f'{self._base_url()}/_cluster/health') as resp:
44
+ if resp.status != 200:
45
+ raise Exception(f'Elasticsearch not healthy, status: {resp.status}')
46
+ except Exception as ex:
47
+ logger.error(f'Failed to connect to Elasticsearch at {self.host}:{self.port} | {ex}')
48
+ if self._session:
49
+ await self._session.close()
50
+ self._session = None
51
+
52
+ async def query(self, data: str) -> DataSourceQueryResult | None:
53
+ if not self._session:
54
+ await self.connect()
55
+ if not self._session:
56
+ return None
57
+ try:
58
+ body = json.loads(data)
59
+ async with self._session.post(
60
+ f'{self._base_url()}/{self.index}/_search',
61
+ json=body
62
+ ) as resp:
63
+ if resp.status != 200:
64
+ text = await resp.text()
65
+ raise Exception(f'HTTP {resp.status}: {text}')
66
+ payload = await resp.json()
67
+ return self._parse_result(payload)
68
+ except Exception as ex:
69
+ logger.error(f'Query failed, resetting session | {ex}')
70
+ if self._session:
71
+ await self._session.close()
72
+ self._session = None
73
+ return None
74
+
75
+ def _parse_result(self, payload: Any) -> DataSourceQueryResult:
76
+ hits = payload.get('hits', {}).get('hits', [])
77
+ rows = [{'_id': h['_id'], '_index': h['_index'], **h['_source']} for h in hits]
78
+ return DataSourceQueryResult(len(rows), rows)
79
+
80
+ @classmethod
81
+ def _name(cls) -> str:
82
+ return 'elasticsearch'
83
+
84
+ def to_dict(self) -> Dict:
85
+ return {
86
+ 'host': self.host,
87
+ 'port': self.port,
88
+ 'index': self.index,
89
+ 'username': self.username,
90
+ 'password': self.password,
91
+ 'api_key': self.api_key,
92
+ 'verify': self.verify,
93
+ }
94
+
95
+ async def _parse(self, _obj: Any):
96
+ host = _obj.get('host')
97
+ port = _obj.get('port')
98
+ index = _obj.get('index')
99
+
100
+ if not host or not port or not index:
101
+ raise Exception('Invalid parameters: host, port and index are required')
102
+
103
+ self.host = host
104
+ self.port = port
105
+ self.index = index
106
+ self.username = _obj.get('username')
107
+ self.password = _obj.get('password')
108
+ self.api_key = _obj.get('api_key')
109
+ self.verify = _obj.get('verify', False)