hik-check 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hik_check-0.1.0/LICENSE +21 -0
- hik_check-0.1.0/PKG-INFO +56 -0
- hik_check-0.1.0/README.md +24 -0
- hik_check-0.1.0/hik_check/__init__.py +1 -0
- hik_check-0.1.0/hik_check/script.py +1073 -0
- hik_check-0.1.0/hik_check.egg-info/PKG-INFO +56 -0
- hik_check-0.1.0/hik_check.egg-info/SOURCES.txt +11 -0
- hik_check-0.1.0/hik_check.egg-info/dependency_links.txt +1 -0
- hik_check-0.1.0/hik_check.egg-info/entry_points.txt +2 -0
- hik_check-0.1.0/hik_check.egg-info/requires.txt +1 -0
- hik_check-0.1.0/hik_check.egg-info/top_level.txt +1 -0
- hik_check-0.1.0/pyproject.toml +17 -0
- hik_check-0.1.0/setup.cfg +4 -0
hik_check-0.1.0/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Migara Mewantha
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
hik_check-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: hik_check
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A script for hikvision device management
|
|
5
|
+
License: MIT License
|
|
6
|
+
|
|
7
|
+
Copyright (c) 2026 Migara Mewantha
|
|
8
|
+
|
|
9
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
10
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
11
|
+
in the Software without restriction, including without limitation the rights
|
|
12
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
13
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
14
|
+
furnished to do so, subject to the following conditions:
|
|
15
|
+
|
|
16
|
+
The above copyright notice and this permission notice shall be included in all
|
|
17
|
+
copies or substantial portions of the Software.
|
|
18
|
+
|
|
19
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
20
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
21
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
22
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
23
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
24
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
25
|
+
SOFTWARE.
|
|
26
|
+
|
|
27
|
+
Requires-Python: >=3.7
|
|
28
|
+
Description-Content-Type: text/markdown
|
|
29
|
+
License-File: LICENSE
|
|
30
|
+
Requires-Dist: requests>=2.25.0
|
|
31
|
+
Dynamic: license-file
|
|
32
|
+
|
|
33
|
+
# hik_check
|
|
34
|
+
|
|
35
|
+
A command-line tool for Hikvision device management.
|
|
36
|
+
|
|
37
|
+
## Installation
|
|
38
|
+
|
|
39
|
+
```bash
|
|
40
|
+
pip install hik_check
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
## Requirements
|
|
44
|
+
|
|
45
|
+
- Python >= 3.7
|
|
46
|
+
- `requests` library (installed automatically)
|
|
47
|
+
|
|
48
|
+
## Usage
|
|
49
|
+
|
|
50
|
+
```bash
|
|
51
|
+
hik_check [options]
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
## Developer
|
|
55
|
+
|
|
56
|
+
Migara Mewantha
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# hik_check
|
|
2
|
+
|
|
3
|
+
A command-line tool for Hikvision device management.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pip install hik_check
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Requirements
|
|
12
|
+
|
|
13
|
+
- Python >= 3.7
|
|
14
|
+
- `requests` library (installed automatically)
|
|
15
|
+
|
|
16
|
+
## Usage
|
|
17
|
+
|
|
18
|
+
```bash
|
|
19
|
+
hik_check [options]
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
## Developer
|
|
23
|
+
|
|
24
|
+
Migara Mewantha
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .script import main
|
|
@@ -0,0 +1,1073 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# Developer: Migara Mewantha
|
|
3
|
+
# Hikvision Device Management Script (Python Port)
|
|
4
|
+
# Version: 6.0 (py)
|
|
5
|
+
|
|
6
|
+
# =====================================
|
|
7
|
+
# Installation
|
|
8
|
+
# =====================================
|
|
9
|
+
# pip3 install requests
|
|
10
|
+
# chmod +x hikcheck.py
|
|
11
|
+
# sudo python3 hikcheck.py install
|
|
12
|
+
|
|
13
|
+
import sys
|
|
14
|
+
import os
|
|
15
|
+
import json
|
|
16
|
+
import time
|
|
17
|
+
import argparse
|
|
18
|
+
import random
|
|
19
|
+
import csv
|
|
20
|
+
import subprocess
|
|
21
|
+
import re
|
|
22
|
+
import shutil
|
|
23
|
+
import xml.etree.ElementTree as ET
|
|
24
|
+
from datetime import datetime
|
|
25
|
+
|
|
26
|
+
# Check for requests library
|
|
27
|
+
try:
|
|
28
|
+
import requests
|
|
29
|
+
from requests.auth import HTTPDigestAuth
|
|
30
|
+
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
|
31
|
+
# Suppress SSL warnings for local IPs
|
|
32
|
+
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
|
33
|
+
except ImportError:
|
|
34
|
+
print("\033[0;31mERROR: Python 'requests' library is missing.\033[0m")
|
|
35
|
+
print("Please run: pip3 install requests")
|
|
36
|
+
sys.exit(1)
|
|
37
|
+
|
|
38
|
+
# ================================
|
|
39
|
+
# Configuration & Globals
|
|
40
|
+
# ================================
|
|
41
|
+
|
|
42
|
+
SCRIPT_NAME = "hikcheck"
|
|
43
|
+
INSTALL_PATH = f"/usr/local/bin/{SCRIPT_NAME}"
|
|
44
|
+
# CONFIG_DIR = os.path.expanduser("~/.config/hikcheck")
|
|
45
|
+
# CONFIG_FILE = os.path.join(CONFIG_DIR, "config.json")
|
|
46
|
+
CONFIG_DIR = ""
|
|
47
|
+
CONFIG_FILE = ""
|
|
48
|
+
|
|
49
|
+
# Colors
|
|
50
|
+
RED = '\033[0;31m'
|
|
51
|
+
GREEN = '\033[0;32m'
|
|
52
|
+
BLUE = '\033[1;34m'
|
|
53
|
+
YELLOW = '\033[1;33m'
|
|
54
|
+
NC = '\033[0m'
|
|
55
|
+
|
|
56
|
+
# Default Config
|
|
57
|
+
DEFAULT_CONFIG = {
|
|
58
|
+
"username": "admin",
|
|
59
|
+
"password": "Hik12345",
|
|
60
|
+
"base_ip": "192.168.1",
|
|
61
|
+
"timeout": 5
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
# Current runtime config
|
|
65
|
+
CONFIG = DEFAULT_CONFIG.copy()
|
|
66
|
+
|
|
67
|
+
# ================================
|
|
68
|
+
# Utility Functions
|
|
69
|
+
# ================================
|
|
70
|
+
|
|
71
|
+
def print_error(msg):
|
|
72
|
+
print(f"{RED}ERROR: {msg}{NC}", file=sys.stderr)
|
|
73
|
+
|
|
74
|
+
def print_success(msg):
|
|
75
|
+
print(f"{GREEN}✓ {msg}{NC}")
|
|
76
|
+
|
|
77
|
+
def print_info(msg):
|
|
78
|
+
print(f"{BLUE}ℹ {msg}{NC}")
|
|
79
|
+
|
|
80
|
+
def print_warning(msg):
|
|
81
|
+
print(f"{YELLOW}⚠ {msg}{NC}")
|
|
82
|
+
|
|
83
|
+
def get_user_home():
|
|
84
|
+
if 'SUDO_USER' in os.environ:
|
|
85
|
+
sudo_user = os.environ['SUDO_USER']
|
|
86
|
+
return os.path.expanduser(F"~{sudo_user}")
|
|
87
|
+
return os.path.expanduser("~")
|
|
88
|
+
|
|
89
|
+
CONFIG_DIR = os.path.join(get_user_home(), ".config", "hikcheck")
|
|
90
|
+
CONFIG_FILE = os.path.join(CONFIG_DIR, "config.json")
|
|
91
|
+
|
|
92
|
+
print("Config: ", CONFIG_FILE)
|
|
93
|
+
|
|
94
|
+
def load_config():
|
|
95
|
+
global CONFIG
|
|
96
|
+
if os.path.exists(CONFIG_FILE):
|
|
97
|
+
try:
|
|
98
|
+
with open(CONFIG_FILE, 'r') as f:
|
|
99
|
+
loaded = json.load(f)
|
|
100
|
+
CONFIG.update(loaded)
|
|
101
|
+
except Exception as e:
|
|
102
|
+
print_warning(f"Failed to load config: {e}")
|
|
103
|
+
|
|
104
|
+
def create_config():
|
|
105
|
+
os.makedirs(CONFIG_DIR, exist_ok=True)
|
|
106
|
+
with open(CONFIG_FILE, 'w') as f:
|
|
107
|
+
json.dump(CONFIG, f, indent=4)
|
|
108
|
+
print_success(f"Configuration file created at {CONFIG_FILE}")
|
|
109
|
+
|
|
110
|
+
def get_session():
|
|
111
|
+
"""Returns a requests session with Digest Auth configured."""
|
|
112
|
+
s = requests.Session()
|
|
113
|
+
s.auth = HTTPDigestAuth(CONFIG['username'], CONFIG['password'])
|
|
114
|
+
s.headers.update({'Content-Type': 'application/json'})
|
|
115
|
+
return s
|
|
116
|
+
|
|
117
|
+
def clean_xml(xml_string):
|
|
118
|
+
"""Removes namespaces to make parsing easier."""
|
|
119
|
+
return re.sub(r' xmlns="[^"]+"', '', xml_string, count=1)
|
|
120
|
+
|
|
121
|
+
# ================================
|
|
122
|
+
# Device Query Functions
|
|
123
|
+
# ================================
|
|
124
|
+
|
|
125
|
+
def get_device_info(ip_addr):
|
|
126
|
+
url = f"http://{ip_addr}/ISAPI/System/deviceInfo"
|
|
127
|
+
try:
|
|
128
|
+
r = get_session().get(url, timeout=CONFIG['timeout'])
|
|
129
|
+
if r.status_code == 200:
|
|
130
|
+
try:
|
|
131
|
+
# Basic XML parsing
|
|
132
|
+
root = ET.fromstring(clean_xml(r.text))
|
|
133
|
+
model = root.findtext('model')
|
|
134
|
+
fw_ver = root.findtext('firmwareVersion')
|
|
135
|
+
fw_date = root.findtext('firmwareReleasedDate')
|
|
136
|
+
|
|
137
|
+
print(f"{BLUE}Device Information:{NC}")
|
|
138
|
+
print(f" Device Model: {model}")
|
|
139
|
+
print(f" Firmware Version: {fw_ver}")
|
|
140
|
+
print(f" Firmware Build: {fw_date}")
|
|
141
|
+
print("")
|
|
142
|
+
return True
|
|
143
|
+
except ET.ParseError:
|
|
144
|
+
print_error("Failed to parse device info XML")
|
|
145
|
+
else:
|
|
146
|
+
print_error(f"HTTP Error {r.status_code}")
|
|
147
|
+
except requests.RequestException:
|
|
148
|
+
print_error("Connection failed")
|
|
149
|
+
return False
|
|
150
|
+
|
|
151
|
+
def get_device_time(ip_addr):
|
|
152
|
+
url = f"http://{ip_addr}/ISAPI/System/time"
|
|
153
|
+
try:
|
|
154
|
+
r = get_session().get(url, timeout=CONFIG['timeout'])
|
|
155
|
+
if r.status_code == 200:
|
|
156
|
+
root = ET.fromstring(clean_xml(r.text))
|
|
157
|
+
local_time = root.findtext('localTime')
|
|
158
|
+
tz_node = root.find('timeZone')
|
|
159
|
+
tz_string = tz_node.text if tz_node is not None else "Unknown"
|
|
160
|
+
|
|
161
|
+
# Extract offset like +05:30
|
|
162
|
+
tz_offset_match = re.search(r'[+-][0-9]{1,2}:[0-9]{2}', tz_string)
|
|
163
|
+
tz_offset = tz_offset_match.group(0) if tz_offset_match else "Unknown"
|
|
164
|
+
|
|
165
|
+
print(f"{BLUE}Time Details:{NC}")
|
|
166
|
+
print(f" Local Time: {local_time}")
|
|
167
|
+
print(f" Timezone: {tz_offset}")
|
|
168
|
+
print(f" Timezone Data: {tz_string}")
|
|
169
|
+
print("")
|
|
170
|
+
return True
|
|
171
|
+
else:
|
|
172
|
+
print_error("Could not retrieve time information")
|
|
173
|
+
except requests.RequestException:
|
|
174
|
+
print_error("Connection failed")
|
|
175
|
+
return False
|
|
176
|
+
|
|
177
|
+
def get_device_counts(ip_addr):
|
|
178
|
+
print(f"{BLUE}Counts:{NC}")
|
|
179
|
+
session = get_session()
|
|
180
|
+
|
|
181
|
+
# 1. User Count
|
|
182
|
+
try:
|
|
183
|
+
r = session.get(f"http://{ip_addr}/ISAPI/AccessControl/UserInfo/Count?format=json", timeout=CONFIG['timeout'])
|
|
184
|
+
if r.status_code == 200:
|
|
185
|
+
data = r.json()
|
|
186
|
+
# Handle potential nesting differences or missing keys
|
|
187
|
+
count = data.get('UserInfoCount', {}).get('userNumber', 'Error')
|
|
188
|
+
print(f" User Count: {count}")
|
|
189
|
+
else:
|
|
190
|
+
print(" User Count: ERROR")
|
|
191
|
+
except:
|
|
192
|
+
print(" User Count: ERROR")
|
|
193
|
+
|
|
194
|
+
# 2. Card Count
|
|
195
|
+
try:
|
|
196
|
+
r = session.get(f"http://{ip_addr}/ISAPI/AccessControl/CardInfo/Count?format=json", timeout=CONFIG['timeout'])
|
|
197
|
+
if r.status_code == 200:
|
|
198
|
+
data = r.json()
|
|
199
|
+
count = data.get('CardInfoCount', {}).get('cardNumber', 'Error')
|
|
200
|
+
print(f" Card Count: {count}")
|
|
201
|
+
else:
|
|
202
|
+
print(" Card Count: ERROR")
|
|
203
|
+
except:
|
|
204
|
+
print(" Card Count: ERROR")
|
|
205
|
+
|
|
206
|
+
# 3. Face Count
|
|
207
|
+
try:
|
|
208
|
+
r = session.get(f"http://{ip_addr}/ISAPI/Intelligent/FDLib/Count?format=json", timeout=CONFIG['timeout'])
|
|
209
|
+
face_count = "ERROR"
|
|
210
|
+
if r.status_code == 200:
|
|
211
|
+
data = r.json()
|
|
212
|
+
# Replicating jq logic: select(.faceLibType == "blackFD")
|
|
213
|
+
if 'FDRecordDataInfo' in data:
|
|
214
|
+
# Ensure list
|
|
215
|
+
infos = data['FDRecordDataInfo'] if isinstance(data['FDRecordDataInfo'], list) else [data['FDRecordDataInfo']]
|
|
216
|
+
for item in infos:
|
|
217
|
+
if item.get('faceLibType') == 'blackFD':
|
|
218
|
+
face_count = item.get('recordDataNumber', 'Error')
|
|
219
|
+
break
|
|
220
|
+
|
|
221
|
+
if face_count == "ERROR" or face_count is None:
|
|
222
|
+
print(" Face Count: ERROR")
|
|
223
|
+
else:
|
|
224
|
+
print(f" Face Count: {face_count}")
|
|
225
|
+
|
|
226
|
+
except:
|
|
227
|
+
print(" Face Count: ERROR")
|
|
228
|
+
print("")
|
|
229
|
+
|
|
230
|
+
def test_connection(ip_addr):
|
|
231
|
+
print_info(f"Testing connection to {ip_addr}...")
|
|
232
|
+
|
|
233
|
+
# Ping
|
|
234
|
+
try:
|
|
235
|
+
subprocess.run(['ping', '-c', '1', '-W', '2', ip_addr], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True)
|
|
236
|
+
print_success("Device is reachable (ping successful)")
|
|
237
|
+
except subprocess.CalledProcessError:
|
|
238
|
+
print_warning("Device did not respond to ping")
|
|
239
|
+
|
|
240
|
+
# API Check
|
|
241
|
+
try:
|
|
242
|
+
r = get_session().get(f"http://{ip_addr}/ISAPI/System/deviceInfo", timeout=CONFIG['timeout'])
|
|
243
|
+
if r.status_code == 200:
|
|
244
|
+
print_success("API connection successful")
|
|
245
|
+
get_device_info(ip_addr)
|
|
246
|
+
else:
|
|
247
|
+
print_error(f"API connection failed (HTTP {r.status_code})")
|
|
248
|
+
except requests.RequestException as e:
|
|
249
|
+
print_error(f"API connection failed: {e}")
|
|
250
|
+
|
|
251
|
+
# ================================
|
|
252
|
+
# Fetch Logic
|
|
253
|
+
# ================================
|
|
254
|
+
|
|
255
|
+
def draw_progress_bar(current, total, width=40):
|
|
256
|
+
if total <= 0: total = 1
|
|
257
|
+
percent = int(current * 100 / total)
|
|
258
|
+
if percent > 100: percent = 100
|
|
259
|
+
|
|
260
|
+
filled = int(percent * width / 100)
|
|
261
|
+
empty = width - filled
|
|
262
|
+
|
|
263
|
+
bar = '#' * filled + '.' * empty
|
|
264
|
+
sys.stdout.write(f"\rProgress: [{bar}] {percent}% ({current}/{total})")
|
|
265
|
+
sys.stdout.flush()
|
|
266
|
+
|
|
267
|
+
def fetch_events(ip_ending, start_time, end_time, timezone):
|
|
268
|
+
timezone = timezone.strip()
|
|
269
|
+
device_ip = f"{CONFIG['base_ip']}.{ip_ending}"
|
|
270
|
+
|
|
271
|
+
print(f"\n{YELLOW}Starting Fetching Events from Device {device_ip}{NC}\n")
|
|
272
|
+
|
|
273
|
+
# Input Device Type
|
|
274
|
+
print("Select the device type")
|
|
275
|
+
print(" 1. FR (default)")
|
|
276
|
+
print(" 2. HAC")
|
|
277
|
+
print(" 3. SFR")
|
|
278
|
+
dtype_in = input("Enter the No. : ").strip()
|
|
279
|
+
|
|
280
|
+
if dtype_in == "2":
|
|
281
|
+
device_type = "HAC"
|
|
282
|
+
elif dtype_in == "3":
|
|
283
|
+
device_type = "SFR"
|
|
284
|
+
else:
|
|
285
|
+
device_type = "FR"
|
|
286
|
+
|
|
287
|
+
print(f"\nStart Time: {start_time}")
|
|
288
|
+
print(f"End Time : {end_time}\n")
|
|
289
|
+
print(f"{GREEN}Choose an option...{NC}")
|
|
290
|
+
print(" 1. Fetch success events only")
|
|
291
|
+
print(" 2. Fetch all events")
|
|
292
|
+
opt_in = input("Enter option: ").strip()
|
|
293
|
+
|
|
294
|
+
major = 0
|
|
295
|
+
minor = 0
|
|
296
|
+
page_size = 30
|
|
297
|
+
|
|
298
|
+
if opt_in == "1":
|
|
299
|
+
if device_type == "HAC":
|
|
300
|
+
major = 5
|
|
301
|
+
minor = 1
|
|
302
|
+
page_size = 5
|
|
303
|
+
elif device_type in ["SFR", "FR"]:
|
|
304
|
+
major = 5
|
|
305
|
+
minor = 75
|
|
306
|
+
|
|
307
|
+
print(f"\nUsing Major={major}, Minor={minor} (Device: {device_type})\n")
|
|
308
|
+
print(f"{GREEN}Connecting to device {device_ip}...{NC}")
|
|
309
|
+
|
|
310
|
+
position = 0
|
|
311
|
+
total_matches = 0
|
|
312
|
+
total_fetched = 0
|
|
313
|
+
checkin_count = 0
|
|
314
|
+
checkout_count = 0
|
|
315
|
+
all_events = []
|
|
316
|
+
|
|
317
|
+
session = get_session()
|
|
318
|
+
url = f"http://{device_ip}/ISAPI/AccessControl/AcsEvent?format=json"
|
|
319
|
+
|
|
320
|
+
while True:
|
|
321
|
+
payload = {
|
|
322
|
+
"AcsEventCond": {
|
|
323
|
+
"searchID": "1",
|
|
324
|
+
"searchResultPosition": position,
|
|
325
|
+
"maxResults": page_size,
|
|
326
|
+
"major": major,
|
|
327
|
+
"minor": minor,
|
|
328
|
+
"startTime": f"{start_time}{timezone}",
|
|
329
|
+
"endTime": f"{end_time}{timezone}"
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
# print("=== DEBUG INFO ===")
|
|
334
|
+
# print(f"URL: {url}")
|
|
335
|
+
# print(f"Position: {position}, Page Size: {page_size}")
|
|
336
|
+
# print(f"Major: {major}, Minor: {minor}")
|
|
337
|
+
# print(f"Start Time: {start_time}{timezone}")
|
|
338
|
+
# print(f"End Time: {end_time}{timezone}")
|
|
339
|
+
# print(f"Payload: {json.dumps(payload, indent=2)}")
|
|
340
|
+
|
|
341
|
+
try:
|
|
342
|
+
r = session.post(url, json=payload, timeout=CONFIG['timeout'] + 5)
|
|
343
|
+
# print(f"Status Code: {r.status_code}")
|
|
344
|
+
# print(f"Response Headers: {dict(r.headers)}")
|
|
345
|
+
# print(f"Response Body: {r.text}")
|
|
346
|
+
if r.status_code != 200:
|
|
347
|
+
print_error(f"HTTP Error {r.status_code}")
|
|
348
|
+
print_error(r)
|
|
349
|
+
print(payload)
|
|
350
|
+
break
|
|
351
|
+
|
|
352
|
+
data = r.json()
|
|
353
|
+
acs_event = data.get("AcsEvent", {})
|
|
354
|
+
|
|
355
|
+
# Update Total Matches
|
|
356
|
+
tm = acs_event.get("totalMatches", 0)
|
|
357
|
+
if isinstance(tm, int) and tm > 0:
|
|
358
|
+
total_matches = tm
|
|
359
|
+
print(f"Total Matches: {total_matches}")
|
|
360
|
+
|
|
361
|
+
is_continue = input("Continue? (y/n): ").strip().lower()
|
|
362
|
+
if is_continue != "y":
|
|
363
|
+
break
|
|
364
|
+
|
|
365
|
+
info_list = acs_event.get("InfoList", [])
|
|
366
|
+
# Sometimes InfoList is a single dict if one result, or list if multiple
|
|
367
|
+
if isinstance(info_list, dict):
|
|
368
|
+
info_list = [info_list]
|
|
369
|
+
elif not info_list:
|
|
370
|
+
info_list = []
|
|
371
|
+
|
|
372
|
+
count = len(info_list)
|
|
373
|
+
if count == 0:
|
|
374
|
+
break
|
|
375
|
+
|
|
376
|
+
# Store Data
|
|
377
|
+
all_events.extend(info_list)
|
|
378
|
+
total_fetched += count
|
|
379
|
+
|
|
380
|
+
draw_progress_bar(total_fetched, total_matches)
|
|
381
|
+
|
|
382
|
+
# Count check-ins/outs for SFR
|
|
383
|
+
if device_type == "SFR":
|
|
384
|
+
for ev in info_list:
|
|
385
|
+
status = ev.get("attendanceStatus")
|
|
386
|
+
if status == "checkIn":
|
|
387
|
+
checkin_count += 1
|
|
388
|
+
elif status == "checkOut":
|
|
389
|
+
checkout_count += 1
|
|
390
|
+
|
|
391
|
+
# Break conditions
|
|
392
|
+
if position + count >= total_matches:
|
|
393
|
+
break
|
|
394
|
+
|
|
395
|
+
position += count
|
|
396
|
+
|
|
397
|
+
except Exception as e:
|
|
398
|
+
print_error(f"Exception during fetch: {e}")
|
|
399
|
+
break
|
|
400
|
+
|
|
401
|
+
print("\n\n============ SUMMARY ============")
|
|
402
|
+
print(f"IP Address: {device_ip}")
|
|
403
|
+
print(f"Total Actions: {total_matches}")
|
|
404
|
+
print(f"Total Saved: {total_fetched}")
|
|
405
|
+
if device_type == "SFR":
|
|
406
|
+
print(f"Check-ins: {checkin_count}")
|
|
407
|
+
print(f"Check-outs: {checkout_count}")
|
|
408
|
+
print("=================================\n")
|
|
409
|
+
|
|
410
|
+
save_q = input("Do you want to save results? (y/n): ").strip().lower()
|
|
411
|
+
if save_q != 'y':
|
|
412
|
+
print("Exiting without saving.")
|
|
413
|
+
return
|
|
414
|
+
|
|
415
|
+
print("\nSelect output format:")
|
|
416
|
+
print("1) Pretty JSON")
|
|
417
|
+
print("2) CSV")
|
|
418
|
+
fmt = input("Choose option: ").strip()
|
|
419
|
+
|
|
420
|
+
base_filename = f"{device_ip}.acs_events"
|
|
421
|
+
|
|
422
|
+
if fmt == "1":
|
|
423
|
+
out_file = f"{base_filename}.json"
|
|
424
|
+
with open(out_file, 'w') as f:
|
|
425
|
+
json.dump(all_events, f, indent=4)
|
|
426
|
+
print(f"Saved pretty JSON → {out_file}")
|
|
427
|
+
|
|
428
|
+
elif fmt == "2":
|
|
429
|
+
out_file = f"{base_filename}.csv"
|
|
430
|
+
# CSV Headers: time,name,employeeNo,major,minor,attendanceStatus,serialNo
|
|
431
|
+
with open(out_file, 'w', newline='') as f:
|
|
432
|
+
writer = csv.writer(f)
|
|
433
|
+
writer.writerow(["time", "name", "employeeNo", "major", "minor", "attendanceStatus", "serialNo"])
|
|
434
|
+
for ev in all_events:
|
|
435
|
+
writer.writerow([
|
|
436
|
+
ev.get("time", ""),
|
|
437
|
+
ev.get("name", ""),
|
|
438
|
+
ev.get("employeeNoString", ""),
|
|
439
|
+
ev.get("major", ""),
|
|
440
|
+
ev.get("minor", ""),
|
|
441
|
+
ev.get("attendanceStatus", "N/A"),
|
|
442
|
+
ev.get("serialNo", "")
|
|
443
|
+
])
|
|
444
|
+
print(f"Saved CSV → {out_file}")
|
|
445
|
+
else:
|
|
446
|
+
print("Invalid option.")
|
|
447
|
+
|
|
448
|
+
# ================================
|
|
449
|
+
# Wipe Logic
|
|
450
|
+
# ================================
|
|
451
|
+
|
|
452
|
+
def wipe_device(ip_ending, dev_type_arg="FR"):
|
|
453
|
+
if not ip_ending:
|
|
454
|
+
print_error("No IP ending provided.")
|
|
455
|
+
return
|
|
456
|
+
|
|
457
|
+
ip = f"{CONFIG['base_ip']}.{ip_ending}"
|
|
458
|
+
dev_type = dev_type_arg.upper()
|
|
459
|
+
|
|
460
|
+
print(f"{RED}========================================={NC}")
|
|
461
|
+
print(f"{RED} ⚠ DANGER ZONE: Wiping Device {ip} {NC}")
|
|
462
|
+
print(f"{RED}========================================={NC}")
|
|
463
|
+
|
|
464
|
+
if dev_type == "HAC":
|
|
465
|
+
print_info("Mode: High Assurance (HAC) - Will verify deletion.")
|
|
466
|
+
|
|
467
|
+
print("This action will delete ALL users and events.\n")
|
|
468
|
+
|
|
469
|
+
captcha = random.randint(1000, 9999)
|
|
470
|
+
print(f"To confirm, type the code: {YELLOW}{captcha}{NC}")
|
|
471
|
+
user_input = input("Enter code: ").strip()
|
|
472
|
+
|
|
473
|
+
if user_input != str(captcha):
|
|
474
|
+
print(f"\n{RED}Incorrect code. Operation cancelled.{NC}")
|
|
475
|
+
return
|
|
476
|
+
|
|
477
|
+
print(f"\n{YELLOW}Sending wipe command...{NC}")
|
|
478
|
+
|
|
479
|
+
session = get_session()
|
|
480
|
+
# PUT request to delete all
|
|
481
|
+
wipe_payload = {
|
|
482
|
+
"UserInfoDetail": {
|
|
483
|
+
"mode": "all",
|
|
484
|
+
"EmployeeNoList": [] # Empty list required by some firmware
|
|
485
|
+
}
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
try:
|
|
489
|
+
r = session.put(
|
|
490
|
+
f"http://{ip}/ISAPI/AccessControl/UserInfoDetail/Delete?format=json",
|
|
491
|
+
json=wipe_payload,
|
|
492
|
+
timeout=CONFIG['timeout']
|
|
493
|
+
)
|
|
494
|
+
|
|
495
|
+
if r.status_code == 200:
|
|
496
|
+
print_success("Wipe command accepted.")
|
|
497
|
+
else:
|
|
498
|
+
print_error(f"Failed to wipe. Code: {r.status_code} Msg: {r.text}")
|
|
499
|
+
return
|
|
500
|
+
except Exception as e:
|
|
501
|
+
print_error(f"Connection failed: {e}")
|
|
502
|
+
return
|
|
503
|
+
|
|
504
|
+
# HAC Polling
|
|
505
|
+
if dev_type == "HAC":
|
|
506
|
+
print(f"{YELLOW}Waiting for database to clear (checking every 5s)...{NC}")
|
|
507
|
+
loop_limit = 24
|
|
508
|
+
loop_count = 0
|
|
509
|
+
|
|
510
|
+
while loop_count < loop_limit:
|
|
511
|
+
try:
|
|
512
|
+
r = session.get(f"http://{ip}/ISAPI/AccessControl/CardInfo/Count?format=json", timeout=2)
|
|
513
|
+
if r.status_code == 200:
|
|
514
|
+
cnt = r.json().get('CardInfoCount', {}).get('cardNumber', -1)
|
|
515
|
+
if cnt == 0:
|
|
516
|
+
print(f"\r{GREEN}Database cleared! (Cards: 0){NC} ")
|
|
517
|
+
break
|
|
518
|
+
sys.stdout.write(f"\r{BLUE}Processing... Current Card Count: {cnt} ...{NC}")
|
|
519
|
+
sys.stdout.flush()
|
|
520
|
+
except:
|
|
521
|
+
pass
|
|
522
|
+
|
|
523
|
+
time.sleep(5)
|
|
524
|
+
loop_count += 1
|
|
525
|
+
print("") # newline
|
|
526
|
+
else:
|
|
527
|
+
print("Refreshing data...")
|
|
528
|
+
time.sleep(2)
|
|
529
|
+
|
|
530
|
+
print("\n-----------------------------------------")
|
|
531
|
+
print(" Final Device Status")
|
|
532
|
+
print("-----------------------------------------")
|
|
533
|
+
print("")
|
|
534
|
+
get_device_counts(ip)
|
|
535
|
+
|
|
536
|
+
|
|
537
|
+
|
|
538
|
+
|
|
539
|
+
# Query User details
|
|
540
|
+
def fetch_user(ip_addr, user_state_id, mode):
|
|
541
|
+
"""
|
|
542
|
+
Fetch user information from access control device.
|
|
543
|
+
|
|
544
|
+
Args:
|
|
545
|
+
ip_addr: IP address of the device
|
|
546
|
+
user_state_id: Employee number to search for
|
|
547
|
+
mode: If False, show simple logs. If True, return full response data.
|
|
548
|
+
|
|
549
|
+
Returns:
|
|
550
|
+
dict if mode is True, None otherwise
|
|
551
|
+
"""
|
|
552
|
+
|
|
553
|
+
url = f"http://{ip_addr}/ISAPI/AccessControl/UserInfo/Search?format=json"
|
|
554
|
+
|
|
555
|
+
payload = {
|
|
556
|
+
"UserInfoSearchCond": {
|
|
557
|
+
"maxResults": 10,
|
|
558
|
+
"searchID": "1",
|
|
559
|
+
"searchResultPosition": 0,
|
|
560
|
+
"EmployeeNoList": [{"employeeNo": user_state_id}]
|
|
561
|
+
}
|
|
562
|
+
}
|
|
563
|
+
|
|
564
|
+
try:
|
|
565
|
+
response = requests.post(
|
|
566
|
+
url,
|
|
567
|
+
auth=HTTPDigestAuth('admin', 'Hik12345'),
|
|
568
|
+
headers={"Content-Type": "application/json"},
|
|
569
|
+
json=payload,
|
|
570
|
+
timeout=10
|
|
571
|
+
)
|
|
572
|
+
|
|
573
|
+
response.raise_for_status()
|
|
574
|
+
data = response.json()
|
|
575
|
+
|
|
576
|
+
# Check if request was successful
|
|
577
|
+
if data.get("UserInfoSearch", {}).get("responseStatusStrg") == "OK":
|
|
578
|
+
user_info = data["UserInfoSearch"]["UserInfo"][0]
|
|
579
|
+
user_name = user_info.get("name", "Unknown")
|
|
580
|
+
|
|
581
|
+
if not mode:
|
|
582
|
+
print(f"User name: {user_name}")
|
|
583
|
+
print(f"Successfully synced to device {ip_addr}")
|
|
584
|
+
else:
|
|
585
|
+
return data
|
|
586
|
+
else:
|
|
587
|
+
# No match found
|
|
588
|
+
if not mode:
|
|
589
|
+
print(f"No match at device {ip_addr}")
|
|
590
|
+
else:
|
|
591
|
+
return data
|
|
592
|
+
|
|
593
|
+
except requests.exceptions.RequestException as e:
|
|
594
|
+
if not mode:
|
|
595
|
+
print(f"Error connecting to device {ip_addr}: {str(e)}")
|
|
596
|
+
else:
|
|
597
|
+
return {"error": str(e)}
|
|
598
|
+
|
|
599
|
+
|
|
600
|
+
# ================================
|
|
601
|
+
# Installation
|
|
602
|
+
# ================================
|
|
603
|
+
|
|
604
|
+
def install_script():
|
|
605
|
+
if os.geteuid() != 0:
|
|
606
|
+
print_warning("Please run installation with sudo")
|
|
607
|
+
sys.exit(1)
|
|
608
|
+
|
|
609
|
+
print_info(f"Installing to {INSTALL_PATH}...")
|
|
610
|
+
|
|
611
|
+
# Copy current script
|
|
612
|
+
current_file = os.path.abspath(sys.argv[0])
|
|
613
|
+
try:
|
|
614
|
+
shutil.copy(current_file, INSTALL_PATH)
|
|
615
|
+
os.chmod(INSTALL_PATH, 0o755)
|
|
616
|
+
create_config()
|
|
617
|
+
print_success("Installation complete!")
|
|
618
|
+
print_info(f"You can now use '{SCRIPT_NAME}' from anywhere")
|
|
619
|
+
except Exception as e:
|
|
620
|
+
print_error(f"Installation failed: {e}")
|
|
621
|
+
|
|
622
|
+
def uninstall_script():
|
|
623
|
+
print_warning("Uninstalling hikcheck...")
|
|
624
|
+
if os.path.exists(INSTALL_PATH):
|
|
625
|
+
try:
|
|
626
|
+
# Need root
|
|
627
|
+
if not os.access(INSTALL_PATH, os.W_OK):
|
|
628
|
+
print_error("Permission denied. Run with sudo.")
|
|
629
|
+
return
|
|
630
|
+
os.remove(INSTALL_PATH)
|
|
631
|
+
print_success(f"Removed {INSTALL_PATH}")
|
|
632
|
+
except Exception as e:
|
|
633
|
+
print_error(f"Error removing script: {e}")
|
|
634
|
+
|
|
635
|
+
if os.path.exists(CONFIG_DIR):
|
|
636
|
+
q = input("Remove configuration directory? (y/n): ").strip().lower()
|
|
637
|
+
if q == 'y':
|
|
638
|
+
shutil.rmtree(CONFIG_DIR)
|
|
639
|
+
print_success("Configuration removed")
|
|
640
|
+
|
|
641
|
+
# ================================
|
|
642
|
+
# Main
|
|
643
|
+
# ================================
|
|
644
|
+
|
|
645
|
+
|
|
646
|
+
# ================================
|
|
647
|
+
# User Data Batch Fetch
|
|
648
|
+
# ================================
|
|
649
|
+
|
|
650
|
+
def save_users_csv(users, ip_addr, output_dir):
|
|
651
|
+
"""
|
|
652
|
+
Saves user list to a CSV file in the specified directory.
|
|
653
|
+
"""
|
|
654
|
+
if not users:
|
|
655
|
+
print_warning(f"No users to save for {ip_addr}")
|
|
656
|
+
return None
|
|
657
|
+
|
|
658
|
+
try:
|
|
659
|
+
os.makedirs(output_dir, exist_ok=True)
|
|
660
|
+
csv_file = os.path.join(output_dir, f"users_{ip_addr}.csv")
|
|
661
|
+
|
|
662
|
+
with open(csv_file, 'w', newline='') as f:
|
|
663
|
+
writer = csv.writer(f)
|
|
664
|
+
# Headers: userstate, name, starttime, endtime
|
|
665
|
+
writer.writerow(["userstate", "name", "starttime", "endtime"])
|
|
666
|
+
|
|
667
|
+
for u in users:
|
|
668
|
+
valid = u.get("Valid", {})
|
|
669
|
+
writer.writerow([
|
|
670
|
+
u.get("employeeNo", ""),
|
|
671
|
+
u.get("name", ""),
|
|
672
|
+
valid.get("beginTime", ""),
|
|
673
|
+
valid.get("endTime", "")
|
|
674
|
+
])
|
|
675
|
+
|
|
676
|
+
print_success(f"Saved {len(users)} users to {csv_file}")
|
|
677
|
+
return csv_file
|
|
678
|
+
except Exception as e:
|
|
679
|
+
print_error(f"Failed to save CSV: {e}")
|
|
680
|
+
return None
|
|
681
|
+
|
|
682
|
+
def fetch_all_users(ip_addr):
|
|
683
|
+
"""
|
|
684
|
+
Fetches all users from the device using pagination.
|
|
685
|
+
Returns a list of user dicts.
|
|
686
|
+
"""
|
|
687
|
+
print(f"\n{BLUE}Fetching all users from {ip_addr}...{NC}")
|
|
688
|
+
|
|
689
|
+
session = get_session()
|
|
690
|
+
url = f"http://{ip_addr}/ISAPI/AccessControl/UserInfo/Search?format=json"
|
|
691
|
+
|
|
692
|
+
all_users = []
|
|
693
|
+
position = 0
|
|
694
|
+
page_size = 30
|
|
695
|
+
total_matches = 0
|
|
696
|
+
|
|
697
|
+
# Progress Bar initialization
|
|
698
|
+
sys.stdout.write(f"\rInitializing fetch...")
|
|
699
|
+
sys.stdout.flush()
|
|
700
|
+
|
|
701
|
+
retry_count = 0
|
|
702
|
+
MAX_RETRIES = 3
|
|
703
|
+
|
|
704
|
+
while True:
|
|
705
|
+
payload = {
|
|
706
|
+
"UserInfoSearchCond": {
|
|
707
|
+
"maxResults": page_size,
|
|
708
|
+
"searchID": "1",
|
|
709
|
+
"searchResultPosition": position,
|
|
710
|
+
"EmployeeNoList": []
|
|
711
|
+
}
|
|
712
|
+
}
|
|
713
|
+
|
|
714
|
+
try:
|
|
715
|
+
r = session.post(url, json=payload, timeout=CONFIG['timeout'] + 5)
|
|
716
|
+
|
|
717
|
+
# Handle 401 specifically by refreshing session
|
|
718
|
+
if r.status_code == 401:
|
|
719
|
+
if retry_count < MAX_RETRIES:
|
|
720
|
+
retry_count += 1
|
|
721
|
+
# print_warning(f"Auth failed (401). Retrying {retry_count}/{MAX_RETRIES} with new session...")
|
|
722
|
+
session = get_session() # Refresh session/nonce
|
|
723
|
+
time.sleep(1) # Small backoff
|
|
724
|
+
continue # Retry the same request
|
|
725
|
+
else:
|
|
726
|
+
print_error("Multiple 401 errors. credentials might be wrong or device is rejecting requests.")
|
|
727
|
+
break
|
|
728
|
+
|
|
729
|
+
if r.status_code != 200:
|
|
730
|
+
print_error(f"HTTP Error {r.status_code}")
|
|
731
|
+
# print_error(r.text)
|
|
732
|
+
break
|
|
733
|
+
|
|
734
|
+
# Reset retry count on success
|
|
735
|
+
retry_count = 0
|
|
736
|
+
|
|
737
|
+
data = r.json()
|
|
738
|
+
search_result = data.get("UserInfoSearch", {})
|
|
739
|
+
|
|
740
|
+
# Update total matches for progress bar
|
|
741
|
+
tm = search_result.get("totalMatches", 0)
|
|
742
|
+
if tm > 0:
|
|
743
|
+
total_matches = tm
|
|
744
|
+
|
|
745
|
+
# Extract users
|
|
746
|
+
users = search_result.get("UserInfo", [])
|
|
747
|
+
if isinstance(users, dict):
|
|
748
|
+
users = [users]
|
|
749
|
+
|
|
750
|
+
if not users:
|
|
751
|
+
break
|
|
752
|
+
|
|
753
|
+
all_users.extend(users)
|
|
754
|
+
current_count = len(all_users)
|
|
755
|
+
|
|
756
|
+
# Draw progress
|
|
757
|
+
draw_progress_bar(current_count, total_matches)
|
|
758
|
+
|
|
759
|
+
# Check for more
|
|
760
|
+
status = search_result.get("responseStatusStrg", "OK")
|
|
761
|
+
|
|
762
|
+
if status != "MORE":
|
|
763
|
+
break
|
|
764
|
+
|
|
765
|
+
if current_count >= total_matches and total_matches > 0:
|
|
766
|
+
break
|
|
767
|
+
|
|
768
|
+
position += len(users)
|
|
769
|
+
|
|
770
|
+
except Exception as e:
|
|
771
|
+
print_error(f"Error during fetch: {e}")
|
|
772
|
+
break
|
|
773
|
+
|
|
774
|
+
print(f"\n{GREEN}Fetch complete. Total users: {len(all_users)}{NC}")
|
|
775
|
+
return all_users
|
|
776
|
+
|
|
777
|
+
|
|
778
|
+
# ================================
|
|
779
|
+
# Card Data Batch Fetch
|
|
780
|
+
# ================================
|
|
781
|
+
|
|
782
|
+
def save_cards_csv(cards, ip_addr, output_dir):
|
|
783
|
+
"""Saves card list to a CSV file in the specified directory."""
|
|
784
|
+
if not cards:
|
|
785
|
+
print_warning(f"No cards to save for {ip_addr}")
|
|
786
|
+
return None
|
|
787
|
+
|
|
788
|
+
try:
|
|
789
|
+
os.makedirs(output_dir, exist_ok=True)
|
|
790
|
+
csv_file = os.path.join(output_dir, f"cards_{ip_addr}.csv")
|
|
791
|
+
|
|
792
|
+
with open(csv_file, 'w', newline='') as f:
|
|
793
|
+
writer = csv.writer(f)
|
|
794
|
+
writer.writerow(["employeeNo", "cardNo", "cardType", "leaderCard"])
|
|
795
|
+
for c in cards:
|
|
796
|
+
writer.writerow([
|
|
797
|
+
c.get("employeeNo", ""),
|
|
798
|
+
c.get("cardNo", ""),
|
|
799
|
+
c.get("cardType", ""),
|
|
800
|
+
c.get("leaderCard", "")
|
|
801
|
+
])
|
|
802
|
+
|
|
803
|
+
print_success(f"Saved {len(cards)} cards to {csv_file}")
|
|
804
|
+
return csv_file
|
|
805
|
+
except Exception as e:
|
|
806
|
+
print_error(f"Failed to save CSV: {e}")
|
|
807
|
+
return None
|
|
808
|
+
|
|
809
|
+
|
|
810
|
+
def fetch_all_cards(ip_addr):
|
|
811
|
+
"""Fetches all cards from the device using pagination."""
|
|
812
|
+
print(f"\n{BLUE}Fetching all cards from {ip_addr}...{NC}")
|
|
813
|
+
|
|
814
|
+
session = get_session()
|
|
815
|
+
url = f"http://{ip_addr}/ISAPI/AccessControl/CardInfo/Search?format=json"
|
|
816
|
+
|
|
817
|
+
all_cards = []
|
|
818
|
+
position = 0
|
|
819
|
+
page_size = 100
|
|
820
|
+
total_matches = 0
|
|
821
|
+
|
|
822
|
+
sys.stdout.write(f"\rInitializing fetch...")
|
|
823
|
+
sys.stdout.flush()
|
|
824
|
+
|
|
825
|
+
retry_count = 0
|
|
826
|
+
MAX_RETRIES = 3
|
|
827
|
+
|
|
828
|
+
while True:
|
|
829
|
+
payload = {
|
|
830
|
+
"CardInfoSearchCond": {
|
|
831
|
+
"searchID": "1",
|
|
832
|
+
"searchResultPosition": position,
|
|
833
|
+
"maxResults": page_size
|
|
834
|
+
}
|
|
835
|
+
}
|
|
836
|
+
|
|
837
|
+
try:
|
|
838
|
+
r = session.post(url, json=payload, timeout=CONFIG['timeout'] + 5)
|
|
839
|
+
|
|
840
|
+
if r.status_code == 401:
|
|
841
|
+
if retry_count < MAX_RETRIES:
|
|
842
|
+
retry_count += 1
|
|
843
|
+
session = get_session()
|
|
844
|
+
time.sleep(1)
|
|
845
|
+
continue
|
|
846
|
+
else:
|
|
847
|
+
print_error("Multiple 401 errors. Credentials may be wrong.")
|
|
848
|
+
break
|
|
849
|
+
|
|
850
|
+
if r.status_code != 200:
|
|
851
|
+
print_error(f"HTTP Error {r.status_code}")
|
|
852
|
+
break
|
|
853
|
+
|
|
854
|
+
retry_count = 0
|
|
855
|
+
|
|
856
|
+
data = r.json()
|
|
857
|
+
search_result = data.get("CardInfoSearch", {})
|
|
858
|
+
|
|
859
|
+
tm = search_result.get("totalMatches", 0)
|
|
860
|
+
if tm > 0:
|
|
861
|
+
total_matches = tm
|
|
862
|
+
|
|
863
|
+
cards = search_result.get("CardInfo", [])
|
|
864
|
+
if isinstance(cards, dict):
|
|
865
|
+
cards = [cards]
|
|
866
|
+
|
|
867
|
+
if not cards:
|
|
868
|
+
break
|
|
869
|
+
|
|
870
|
+
all_cards.extend(cards)
|
|
871
|
+
current_count = len(all_cards)
|
|
872
|
+
|
|
873
|
+
draw_progress_bar(current_count, total_matches)
|
|
874
|
+
|
|
875
|
+
status = search_result.get("responseStatusStrg", "OK")
|
|
876
|
+
if status != "MORE":
|
|
877
|
+
break
|
|
878
|
+
|
|
879
|
+
if current_count >= total_matches and total_matches > 0:
|
|
880
|
+
break
|
|
881
|
+
|
|
882
|
+
position += len(cards)
|
|
883
|
+
|
|
884
|
+
except Exception as e:
|
|
885
|
+
print_error(f"Error during fetch: {e}")
|
|
886
|
+
break
|
|
887
|
+
|
|
888
|
+
print(f"\n{GREEN}Fetch complete. Total cards: {len(all_cards)}{NC}")
|
|
889
|
+
return all_cards
|
|
890
|
+
|
|
891
|
+
|
|
892
|
+
# ================================
|
|
893
|
+
# Main
|
|
894
|
+
# ================================
|
|
895
|
+
|
|
896
|
+
def main():
|
|
897
|
+
load_config()
|
|
898
|
+
|
|
899
|
+
|
|
900
|
+
parser = argparse.ArgumentParser(description="Hikvision Device Management Tool (Python)")
|
|
901
|
+
subparsers = parser.add_subparsers(dest="command")
|
|
902
|
+
|
|
903
|
+
# Subcommands
|
|
904
|
+
subparsers.add_parser("install", help="Install hikcheck to system")
|
|
905
|
+
subparsers.add_parser("uninstall", help="Remove hikcheck from system")
|
|
906
|
+
subparsers.add_parser("config", help="Edit configuration")
|
|
907
|
+
|
|
908
|
+
# Count
|
|
909
|
+
p_count = subparsers.add_parser("count", help="Get device counts")
|
|
910
|
+
p_count.add_argument("ips", nargs="+", help="IP endings")
|
|
911
|
+
|
|
912
|
+
# Data
|
|
913
|
+
p_data = subparsers.add_parser("data", help="Get device data")
|
|
914
|
+
p_data.add_argument("ips", nargs="+", help="IP endings (e.g. 165 166)")
|
|
915
|
+
|
|
916
|
+
# Info
|
|
917
|
+
p_info = subparsers.add_parser("info", help="Get device info only")
|
|
918
|
+
p_info.add_argument("ips", nargs="+", help="IP endings")
|
|
919
|
+
|
|
920
|
+
# Time
|
|
921
|
+
p_time = subparsers.add_parser("time", help="Get device time settings")
|
|
922
|
+
p_time.add_argument("ips", nargs="+", help="IP endings")
|
|
923
|
+
|
|
924
|
+
# Test
|
|
925
|
+
p_test = subparsers.add_parser("test", help="Test connection")
|
|
926
|
+
p_test.add_argument("ip", help="IP ending")
|
|
927
|
+
|
|
928
|
+
# Wipe
|
|
929
|
+
p_wipe = subparsers.add_parser("wipe", help="Wipe device data")
|
|
930
|
+
p_wipe.add_argument("ip", help="IP ending")
|
|
931
|
+
p_wipe.add_argument("type", nargs="?", default="FR", help="Device type (default: FR)")
|
|
932
|
+
|
|
933
|
+
# Fetch
|
|
934
|
+
p_fetch = subparsers.add_parser("fetch", help="Fetch access events")
|
|
935
|
+
p_fetch.add_argument("ip", help="IP ending")
|
|
936
|
+
p_fetch.add_argument("start", help="Start time (YYYY-MM-DDThh:mm:ss)")
|
|
937
|
+
p_fetch.add_argument("end", help="End time (YYYY-MM-DDThh:mm:ss)")
|
|
938
|
+
p_fetch.add_argument("tz", help="Timezone offset (e.g. +05:30). For negative, use ' -- -05:00'")
|
|
939
|
+
|
|
940
|
+
# Overrides
|
|
941
|
+
parser.add_argument("-u", "--username", help="Override username")
|
|
942
|
+
parser.add_argument("-p", "--password", help="Override password")
|
|
943
|
+
parser.add_argument("-b", "--base-ip", help="Override base IP")
|
|
944
|
+
parser.add_argument("-t", "--timeout", type=int, help="Override timeout")
|
|
945
|
+
|
|
946
|
+
# Fetch User
|
|
947
|
+
p_user_data = subparsers.add_parser("user_data", help="Get user details from reader")
|
|
948
|
+
p_user_data.add_argument("user_state_id", help="Latest user state Id")
|
|
949
|
+
p_user_data.add_argument("ips", nargs="+", help="IP endings")
|
|
950
|
+
p_user_data.add_argument("--json", dest="mode", action="store_true", help="Output raw JSON instead of text logs")
|
|
951
|
+
|
|
952
|
+
# Users Batch Fetch
|
|
953
|
+
p_users = subparsers.add_parser("users", help="Fetch all users and save to CSV")
|
|
954
|
+
p_users.add_argument("ips", nargs="+", help="IP endings")
|
|
955
|
+
|
|
956
|
+
# Cards Batch Fetch
|
|
957
|
+
p_cards = subparsers.add_parser("cards", help="Fetch all cards and save to CSV")
|
|
958
|
+
p_cards.add_argument("ips", nargs="+", help="IP endings")
|
|
959
|
+
|
|
960
|
+
args = parser.parse_args()
|
|
961
|
+
|
|
962
|
+
# Apply Overrides
|
|
963
|
+
if args.username: CONFIG['username'] = args.username
|
|
964
|
+
if args.password: CONFIG['password'] = args.password
|
|
965
|
+
if args.base_ip: CONFIG['base_ip'] = args.base_ip
|
|
966
|
+
if args.timeout: CONFIG['timeout'] = args.timeout
|
|
967
|
+
|
|
968
|
+
if args.command == "install":
|
|
969
|
+
install_script()
|
|
970
|
+
elif args.command == "uninstall":
|
|
971
|
+
uninstall_script()
|
|
972
|
+
elif args.command == "config":
|
|
973
|
+
editor = os.environ.get('EDITOR', 'nano')
|
|
974
|
+
subprocess.call([editor, CONFIG_FILE])
|
|
975
|
+
elif args.command == "data":
|
|
976
|
+
for ip_end in args.ips:
|
|
977
|
+
full_ip = f"{CONFIG['base_ip']}.{ip_end}"
|
|
978
|
+
print(f"=========================================\n Device Report for: {full_ip}\n=========================================")
|
|
979
|
+
get_device_info(full_ip)
|
|
980
|
+
get_device_time(full_ip)
|
|
981
|
+
get_device_counts(full_ip)
|
|
982
|
+
elif args.command == "info":
|
|
983
|
+
for ip_end in args.ips:
|
|
984
|
+
full_ip = f"{CONFIG['base_ip']}.{ip_end}"
|
|
985
|
+
print(f"=========================================\n Device Info for: {full_ip}\n=========================================")
|
|
986
|
+
get_device_info(full_ip)
|
|
987
|
+
elif args.command == "time":
|
|
988
|
+
for ip_end in args.ips:
|
|
989
|
+
full_ip = f"{CONFIG['base_ip']}.{ip_end}"
|
|
990
|
+
print(f"=========================================\n Time Settings for: {full_ip}\n=========================================")
|
|
991
|
+
get_device_time(full_ip)
|
|
992
|
+
elif args.command == "test":
|
|
993
|
+
test_connection(f"{CONFIG['base_ip']}.{args.ip}")
|
|
994
|
+
elif args.command == "wipe":
|
|
995
|
+
wipe_device(args.ip, args.type)
|
|
996
|
+
elif args.command == "fetch":
|
|
997
|
+
fetch_events(args.ip, args.start, args.end, args.tz)
|
|
998
|
+
elif args.command == "count":
|
|
999
|
+
for ip_end in args.ips:
|
|
1000
|
+
full_ip = f"{CONFIG['base_ip']}.{ip_end}"
|
|
1001
|
+
print(f"=========================================\n Device Counts for: {full_ip}\n=========================================")
|
|
1002
|
+
get_device_counts(full_ip)
|
|
1003
|
+
elif args.command == "user_data":
|
|
1004
|
+
for ip_end in args.ips:
|
|
1005
|
+
full_ip = f"{CONFIG['base_ip']}.{ip_end}"
|
|
1006
|
+
print(f"=========================================\n Checking the Readers for user: {full_ip} \n================================")
|
|
1007
|
+
res = fetch_user(full_ip, args.user_state_id, args.mode)
|
|
1008
|
+
if args.mode and res:
|
|
1009
|
+
print(json.dumps(res, indent=4))
|
|
1010
|
+
elif args.command == "users":
|
|
1011
|
+
summary_logs = []
|
|
1012
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
1013
|
+
batch_dir = f"hikcheck_users_batch_{timestamp}"
|
|
1014
|
+
|
|
1015
|
+
print(f"\n{BLUE}Output Directory: {batch_dir}{NC}\n")
|
|
1016
|
+
|
|
1017
|
+
for ip_end in args.ips:
|
|
1018
|
+
full_ip = f"{CONFIG['base_ip']}.{ip_end}"
|
|
1019
|
+
print(f"=========================================\n Fetching Users from: {full_ip} \n================================")
|
|
1020
|
+
users_list = fetch_all_users(full_ip)
|
|
1021
|
+
saved_path = save_users_csv(users_list, full_ip, batch_dir)
|
|
1022
|
+
|
|
1023
|
+
# Add to summary
|
|
1024
|
+
summary_logs.append({
|
|
1025
|
+
"device": full_ip,
|
|
1026
|
+
"count": len(users_list),
|
|
1027
|
+
"saved_at": saved_path if saved_path else "FAILED"
|
|
1028
|
+
})
|
|
1029
|
+
|
|
1030
|
+
print("\n" + "="*40)
|
|
1031
|
+
print(" EXECUTION SUMMARY")
|
|
1032
|
+
print("="*40)
|
|
1033
|
+
for log in summary_logs:
|
|
1034
|
+
print(f"Device: {log['device']}")
|
|
1035
|
+
print(f" - Users Fetched: {log['count']}")
|
|
1036
|
+
print(f" - Saved To: {log['saved_at']}")
|
|
1037
|
+
print("-" * 20)
|
|
1038
|
+
print("="*40 + "\n")
|
|
1039
|
+
|
|
1040
|
+
elif args.command == "cards":
|
|
1041
|
+
summary_logs = []
|
|
1042
|
+
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
1043
|
+
batch_dir = f"hikcheck_cards_batch_{timestamp}"
|
|
1044
|
+
|
|
1045
|
+
print(f"\n{BLUE}Output Directory: {batch_dir}{NC}\n")
|
|
1046
|
+
|
|
1047
|
+
for ip_end in args.ips:
|
|
1048
|
+
full_ip = f"{CONFIG['base_ip']}.{ip_end}"
|
|
1049
|
+
print(f"=========================================\n Fetching Cards from: {full_ip} \n=========================================")
|
|
1050
|
+
cards_list = fetch_all_cards(full_ip)
|
|
1051
|
+
saved_path = save_cards_csv(cards_list, full_ip, batch_dir)
|
|
1052
|
+
|
|
1053
|
+
summary_logs.append({
|
|
1054
|
+
"device": full_ip,
|
|
1055
|
+
"count": len(cards_list),
|
|
1056
|
+
"saved_at": saved_path if saved_path else "FAILED"
|
|
1057
|
+
})
|
|
1058
|
+
|
|
1059
|
+
print("\n" + "="*40)
|
|
1060
|
+
print(" EXECUTION SUMMARY")
|
|
1061
|
+
print("="*40)
|
|
1062
|
+
for log in summary_logs:
|
|
1063
|
+
print(f"Device: {log['device']}")
|
|
1064
|
+
print(f" - Cards Fetched: {log['count']}")
|
|
1065
|
+
print(f" - Saved To: {log['saved_at']}")
|
|
1066
|
+
print("-" * 20)
|
|
1067
|
+
print("="*40 + "\n")
|
|
1068
|
+
|
|
1069
|
+
else:
|
|
1070
|
+
parser.print_help()
|
|
1071
|
+
|
|
1072
|
+
if __name__ == "__main__":
|
|
1073
|
+
main()
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: hik_check
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A script for hikvision device management
|
|
5
|
+
License: MIT License
|
|
6
|
+
|
|
7
|
+
Copyright (c) 2026 Migara Mewantha
|
|
8
|
+
|
|
9
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
10
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
11
|
+
in the Software without restriction, including without limitation the rights
|
|
12
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
13
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
14
|
+
furnished to do so, subject to the following conditions:
|
|
15
|
+
|
|
16
|
+
The above copyright notice and this permission notice shall be included in all
|
|
17
|
+
copies or substantial portions of the Software.
|
|
18
|
+
|
|
19
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
20
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
21
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
22
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
23
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
24
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
25
|
+
SOFTWARE.
|
|
26
|
+
|
|
27
|
+
Requires-Python: >=3.7
|
|
28
|
+
Description-Content-Type: text/markdown
|
|
29
|
+
License-File: LICENSE
|
|
30
|
+
Requires-Dist: requests>=2.25.0
|
|
31
|
+
Dynamic: license-file
|
|
32
|
+
|
|
33
|
+
# hik_check
|
|
34
|
+
|
|
35
|
+
A command-line tool for Hikvision device management.
|
|
36
|
+
|
|
37
|
+
## Installation
|
|
38
|
+
|
|
39
|
+
```bash
|
|
40
|
+
pip install hik_check
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
## Requirements
|
|
44
|
+
|
|
45
|
+
- Python >= 3.7
|
|
46
|
+
- `requests` library (installed automatically)
|
|
47
|
+
|
|
48
|
+
## Usage
|
|
49
|
+
|
|
50
|
+
```bash
|
|
51
|
+
hik_check [options]
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
## Developer
|
|
55
|
+
|
|
56
|
+
Migara Mewantha
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
LICENSE
|
|
2
|
+
README.md
|
|
3
|
+
pyproject.toml
|
|
4
|
+
hik_check/__init__.py
|
|
5
|
+
hik_check/script.py
|
|
6
|
+
hik_check.egg-info/PKG-INFO
|
|
7
|
+
hik_check.egg-info/SOURCES.txt
|
|
8
|
+
hik_check.egg-info/dependency_links.txt
|
|
9
|
+
hik_check.egg-info/entry_points.txt
|
|
10
|
+
hik_check.egg-info/requires.txt
|
|
11
|
+
hik_check.egg-info/top_level.txt
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
requests>=2.25.0
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
hik_check
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=61.0"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "hik_check"
|
|
7
|
+
version = "0.1.0"
|
|
8
|
+
description = "A script for hikvision device management"
|
|
9
|
+
readme = "README.md"
|
|
10
|
+
license = { file = "LICENSE" }
|
|
11
|
+
requires-python = ">=3.7"
|
|
12
|
+
dependencies = [
|
|
13
|
+
"requests>=2.25.0",
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
[project.scripts]
|
|
17
|
+
hik_check = "hik_check.script:main"
|