qlsdk2 0.4.1__tar.gz → 0.5.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. qlsdk2-0.5.0/PKG-INFO +40 -0
  2. qlsdk2-0.5.0/README.md +12 -0
  3. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/setup.py +2 -2
  4. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/core/entity/__init__.py +1 -0
  5. qlsdk2-0.5.0/src/qlsdk/core/utils.py +75 -0
  6. qlsdk2-0.5.0/src/qlsdk/persist/__init__.py +3 -0
  7. qlsdk2-0.5.0/src/qlsdk/persist/ars_edf.py +278 -0
  8. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/persist/rsc_edf.py +47 -28
  9. qlsdk2-0.5.0/src/qlsdk/persist/stream.py +161 -0
  10. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/command/__init__.py +28 -30
  11. qlsdk2-0.5.0/src/qlsdk/rsc/device/__init__.py +7 -0
  12. qlsdk2-0.5.0/src/qlsdk/rsc/device/arskindling.py +386 -0
  13. qlsdk2-0.5.0/src/qlsdk/rsc/device/base.py +455 -0
  14. qlsdk2-0.5.0/src/qlsdk/rsc/device/c16_rs.py +205 -0
  15. qlsdk2-0.4.1/src/qlsdk/rsc/device/c64_rs.py → qlsdk2-0.5.0/src/qlsdk/rsc/device/c256_rs.py +3 -2
  16. qlsdk2-0.5.0/src/qlsdk/rsc/device/c64_rs.py +359 -0
  17. qlsdk2-0.4.1/src/qlsdk/rsc/device/base.py → qlsdk2-0.5.0/src/qlsdk/rsc/device/c64s1.py +27 -50
  18. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/device/device_factory.py +6 -1
  19. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/interface/device.py +12 -2
  20. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/interface/parser.py +4 -1
  21. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/manager/container.py +21 -17
  22. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/network/discover.py +25 -36
  23. qlsdk2-0.5.0/src/qlsdk/rsc/parser/base.py +150 -0
  24. qlsdk2-0.5.0/src/qlsdk2.egg-info/PKG-INFO +40 -0
  25. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk2.egg-info/SOURCES.txt +7 -1
  26. qlsdk2-0.5.0/test/test.222.py +29 -0
  27. qlsdk2-0.4.1/PKG-INFO +0 -121
  28. qlsdk2-0.4.1/README.md +0 -93
  29. qlsdk2-0.4.1/src/qlsdk/core/utils.py +0 -70
  30. qlsdk2-0.4.1/src/qlsdk/persist/__init__.py +0 -2
  31. qlsdk2-0.4.1/src/qlsdk/rsc/command/message.py +0 -336
  32. qlsdk2-0.4.1/src/qlsdk/rsc/device/__init__.py +0 -2
  33. qlsdk2-0.4.1/src/qlsdk/rsc/parser/base.py +0 -66
  34. qlsdk2-0.4.1/src/qlsdk2.egg-info/PKG-INFO +0 -121
  35. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/setup.cfg +0 -0
  36. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/__init__.py +0 -0
  37. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/ar4/__init__.py +0 -0
  38. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/ar4m/__init__.py +0 -0
  39. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/core/__init__.py +0 -0
  40. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/core/crc/__init__.py +0 -0
  41. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/core/crc/crctools.py +0 -0
  42. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/core/device.py +0 -0
  43. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/core/exception.py +0 -0
  44. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/core/filter/__init__.py +0 -0
  45. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/core/filter/norch.py +0 -0
  46. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/core/local.py +0 -0
  47. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/core/message/__init__.py +0 -0
  48. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/core/message/command.py +0 -0
  49. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/core/message/tcp.py +0 -0
  50. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/core/message/udp.py +0 -0
  51. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/core/network/__init__.py +0 -0
  52. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/core/network/monitor.py +0 -0
  53. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/persist/edf.py +0 -0
  54. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/__init__.py +0 -0
  55. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/eegion.py +0 -0
  56. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/entity.py +0 -0
  57. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/interface/__init__.py +0 -0
  58. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/interface/command.py +0 -0
  59. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/interface/handler.py +0 -0
  60. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/manager/__init__.py +0 -0
  61. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/manager/search.py +0 -0
  62. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/network/__init__.py +0 -0
  63. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/paradigm.py +0 -0
  64. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/parser/__init__.py +0 -0
  65. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/rsc/proxy.py +0 -0
  66. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/sdk/__init__.py +0 -0
  67. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/sdk/ar4sdk.py +0 -0
  68. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/sdk/hub.py +0 -0
  69. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/sdk/libs/libAr4SDK.dll +0 -0
  70. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/sdk/libs/libwinpthread-1.dll +0 -0
  71. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/x8/__init__.py +0 -0
  72. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk/x8m/__init__.py +0 -0
  73. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk2.egg-info/dependency_links.txt +0 -0
  74. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk2.egg-info/requires.txt +0 -0
  75. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/src/qlsdk2.egg-info/top_level.txt +0 -0
  76. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/test/test.py +0 -0
  77. {qlsdk2-0.4.1 → qlsdk2-0.5.0}/test/test_ar4m.py +0 -0
qlsdk2-0.5.0/PKG-INFO ADDED
@@ -0,0 +1,40 @@
1
+ Metadata-Version: 2.2
2
+ Name: qlsdk2
3
+ Version: 0.5.0
4
+ Summary: SDK for quanlan device
5
+ Home-page: https://github.com/hehuajun/qlsdk
6
+ Author: hehuajun
7
+ Author-email: hehuajun@eegion.com
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: License :: OSI Approved :: MIT License
10
+ Classifier: Operating System :: OS Independent
11
+ Requires-Python: >=3.9
12
+ Description-Content-Type: text/markdown
13
+ Requires-Dist: loguru>=0.6.0
14
+ Requires-Dist: numpy>=1.23.5
15
+ Requires-Dist: bitarray>=1.5.3
16
+ Provides-Extra: dev
17
+ Requires-Dist: pytest>=6.0; extra == "dev"
18
+ Requires-Dist: twine>=3.0; extra == "dev"
19
+ Dynamic: author
20
+ Dynamic: author-email
21
+ Dynamic: classifier
22
+ Dynamic: description
23
+ Dynamic: description-content-type
24
+ Dynamic: home-page
25
+ Dynamic: requires-dist
26
+ Dynamic: requires-python
27
+ Dynamic: summary
28
+
29
+ 版本:v0.5.0
30
+ 时间:2025-07-29
31
+ [新特性]
32
+ 1. C16R设备搜索
33
+ 2、C16R设备连接
34
+ 3、C16R信号采集/停止
35
+ 4、C16R数据自动记录到文件
36
+ 5、C16R采集通道设置支持数字和名称两种模式(可混用)
37
+
38
+ [优化]
39
+ 1、提升信号接收及指令解析性能
40
+ 2、日志级别及文案优化
qlsdk2-0.5.0/README.md ADDED
@@ -0,0 +1,12 @@
1
+ 版本:v0.5.0
2
+ 时间:2025-07-29
3
+ [新特性]
4
+ 1. C16R设备搜索
5
+ 2、C16R设备连接
6
+ 3、C16R信号采集/停止
7
+ 4、C16R数据自动记录到文件
8
+ 5、C16R采集通道设置支持数字和名称两种模式(可混用)
9
+
10
+ [优化]
11
+ 1、提升信号接收及指令解析性能
12
+ 2、日志级别及文案优化
@@ -1,12 +1,12 @@
1
1
  # setup.py
2
2
  import setuptools
3
3
 
4
- with open("README.md", "r") as fh:
4
+ with open("README.md", "r", encoding='utf-8' ) as fh:
5
5
  long_description = fh.read()
6
6
 
7
7
  setuptools.setup(
8
8
  name="qlsdk2",
9
- version="0.4.1",
9
+ version="0.5.0",
10
10
  author="hehuajun",
11
11
  author_email="hehuajun@eegion.com",
12
12
  description="SDK for quanlan device",
@@ -26,6 +26,7 @@ class RscPacket(object):
26
26
  self.time_stamp = int.from_bytes(body[0:8], 'little')
27
27
  self.result = body[8]
28
28
  self.pkg_id = int.from_bytes(body[9: 13], 'little')
29
+ logger.trace(f"pkg_id: {self.pkg_id}")
29
30
  self.channels = to_channels(body[13: 45])
30
31
  self.origin_sample_rate = int.from_bytes(body[45: 49], 'little')
31
32
  self.sample_rate = int.from_bytes(body[49: 53], 'little')
@@ -0,0 +1,75 @@
1
+ from bitarray import bitarray
2
+ from loguru import logger
3
+ # 通道数组[1,2,3,...]转换为bytes
4
+ def to_bytes(channels: list[int], upper=256) -> bytes:
5
+ byte_len = int(upper / 8)
6
+ channel = [0] * byte_len
7
+ result = bitarray(upper)
8
+ result.setall(0)
9
+ for i in range(len(channels)):
10
+ if channels[i] > 0 and channels[i] <= upper:
11
+ # 每个字节从低位开始计数
12
+ m = (channels[i] - 1) % 8
13
+ result[channels[i] + 6 - 2 * m] = 1
14
+ return result.tobytes()
15
+
16
+ # 通道bytes转换为数组[1,2,3,...]
17
+ def to_channels(data: bytes) -> list[int]:
18
+ ba = bitarray()
19
+ ba.frombytes(data)
20
+ channels = []
21
+ for i in range(len(ba)):
22
+ if ba[i] == 1:
23
+ m = i % 8
24
+ channels.append(i + 8 - 2 * m)
25
+
26
+ channels.sort()
27
+ return channels
28
+
29
+ def bytes_to_ints(b):
30
+ """将小端字节序的3字节bytes数组转换为int数组"""
31
+ if len(b) % 3 != 0:
32
+ raise ValueError("输入的bytes长度必须是3的倍数")
33
+ return [
34
+ b[i] | (b[i + 1] << 8) | (b[i + 2] << 16)
35
+ for i in range(0, len(b), 3)
36
+ ]
37
+
38
+ def bytes_to_int(b: bytes) -> int:
39
+ """将小端字节序的3字节bytes数组转换为int数组"""
40
+ if len(b) != 3:
41
+ raise ValueError("输入的bytes长度必须是3的倍数")
42
+ return b[0] | (b[1] << 8) | (b[2] << 16)
43
+ def bytes_to_ints2(b):
44
+ """将小端字节序的3字节bytes数组转换为int数组"""
45
+ if len(b) % 3 != 0:
46
+ raise ValueError("输入的bytes长度必须是3的倍数")
47
+ return [
48
+ b[i] | (b[i + 1] << 8) | (b[i + 2] << 16)
49
+ for i in range(0, len(b), 3)
50
+ ]
51
+
52
+ import numpy as np
53
+ if __name__ == "__main__":
54
+
55
+ # channels = [1]
56
+ # channels1 = [1, 2, 3, 4]
57
+ # channels2 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64]
58
+ # channels3 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86,87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119,120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145,146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176,177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207,208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238,239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256]
59
+ # channels4 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63]
60
+ # logger.info(to_bytes(channels).hex())
61
+ # logger.info(to_bytes(channels1).hex())
62
+ # logger.info(to_bytes(channels2).hex())
63
+ # logger.info(to_bytes(channels3).hex())
64
+ # logger.info(to_bytes(channels4).hex())
65
+
66
+ # bs = 'ffffffffffffff7f000000000000000000000000000000000000000000000000'
67
+ # bs1 = '8000000000000000000000000000000000000000000000000000000000000000'
68
+ # bs2 = '0100000000000000000000000000000000000000000000000000000000000000'
69
+
70
+ # logger.info(to_channels(bytes.fromhex(bs1)))
71
+ # logger.info(to_channels(bytes.fromhex(bs2)))
72
+
73
+ aa = 'ff3fff3fff3fff3f000000000000000000000000000000000000000000000000'
74
+
75
+ logger.info(to_channels(bytes.fromhex(aa)))
@@ -0,0 +1,3 @@
1
+ from .edf import EdfHandler
2
+ from .rsc_edf import RscEDFHandler
3
+ from .ars_edf import ARSKindlingEDFHandler
@@ -0,0 +1,278 @@
1
+ from datetime import datetime
2
+ import os
3
+ from threading import Lock
4
+ from loguru import logger
5
+ import numpy as np
6
+
7
+ from qlsdk.core.entity import RscPacket
8
+ from qlsdk.persist.rsc_edf import RscEDFHandler
9
+ from qlsdk.persist.stream import EDF_FILE_TYPE, EDFStreamWriter
10
+
11
+
12
+ def intersection_positions(A, B):
13
+ setB = set(B)
14
+ seen = set()
15
+ return [idx for idx, elem in enumerate(A)
16
+ if elem in setB and elem not in seen and not seen.add(elem)]
17
+
18
+ # 用作数据结构一致化处理,通过调用公共类写入edf文件
19
+ # 入参包含写入edf的全部前置参数
20
+ # 实时数据包为个性化数据包,含有eeg数据部分
21
+ class ARSKindlingEDFHandler(object):
22
+ '''
23
+ Rsc EDF Handler
24
+ 处理EDF文件的读写
25
+ RSC设备通道数根据选择变化,不同通道采样频率相同
26
+ eeg_sample_rate: 采样频率
27
+ physical_max: 物理最大值 (uV)
28
+ physical_min: 物理最小值 (uV)
29
+ resolution: 分辨率
30
+ storage_path: 存储路径
31
+
32
+ @author: qlsdk
33
+ @since: 0.4.0
34
+ '''
35
+ def __init__(self, eeg_sample_rate, physical_max, physical_min, resolution=24, storage_path = None):
36
+ # edf文件参数
37
+ self.physical_max = physical_max
38
+ self.physical_min = physical_min
39
+ self.digital_max = 8388607 if resolution == 24 else 32767
40
+ self.digital_min = -8388607 if resolution == 24 else - 32768
41
+ self.file_type = EDF_FILE_TYPE["bdf"] if resolution == 24 else EDF_FILE_TYPE["edf"]
42
+ # 点分辨率
43
+ self.resolution = resolution
44
+ # eeg通道数
45
+ self.channels = None
46
+ # eeg采样率
47
+ self.sample_rate = eeg_sample_rate
48
+ # bytes per second
49
+ self.bytes_per_second = 0
50
+ self._edf_writer = None
51
+ self._cache2 = tuple()
52
+ self._recording = False
53
+ self._edf_writer = None
54
+ self.annotations = None
55
+ # 每个数据块大小
56
+ self._chunk = np.array([])
57
+ self._duration = 0
58
+ self._points = 0
59
+ self._first_pkg_id = None
60
+ self._last_pkg_id = None
61
+ self._first_timestamp = None
62
+ self._start_time = None
63
+ self._end_time = None
64
+ self._patient_code = "patient_code"
65
+ self._patient_name = "patient_name"
66
+ self._device_type = "24130032"
67
+ self._device_no = "24130032"
68
+ self._total_packets = 0
69
+ self._lost_packets = 0
70
+ self._storage_path = storage_path
71
+ self._edf_writer_thread = None
72
+ self._file_prefix = None
73
+
74
+ # 癫痫造模仪1对4, 这个是固定模式,写死,规则随设备
75
+ self._edf_handler = {}
76
+ self._edf_Handler_A = None
77
+ self._edf_Handler_B = None
78
+ self._edf_Handler_C = None
79
+ self._edf_Handler_D = None
80
+
81
+ self._channel_spilt = {
82
+ "A" : [49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62],
83
+ "B" : [33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46],
84
+ "C" : [17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30],
85
+ "D" : [7, 8, 5, 6, 3, 4, 1, 2, 9, 10, 11, 12, 13, 14],
86
+ }
87
+
88
+ self._channel_mapping = {
89
+ 1: "D7",
90
+ 2: "D8",
91
+ 3: "D5",
92
+ 4: "D6",
93
+ 5: "D3",
94
+ 6: "D4",
95
+ 7: "D1",
96
+ 8: "D2",
97
+ 9: "D9",
98
+ 10: "D10",
99
+ 11: "D11",
100
+ 12: "D12",
101
+ 13: "D13",
102
+ 14: "D14",
103
+
104
+ 17: "C7",
105
+ 18: "C8",
106
+ 19: "C5",
107
+ 20: "C6",
108
+ 21: "C3",
109
+ 22: "C4",
110
+ 23: "C1",
111
+ 24: "C2",
112
+ 25: "C9",
113
+ 26: "C10",
114
+ 27: "C11",
115
+ 28: "C12",
116
+ 29: "C13",
117
+ 30: "C14",
118
+
119
+ 33: "B7",
120
+ 34: "B8",
121
+ 35: "B5",
122
+ 36: "B6",
123
+ 37: "B3",
124
+ 38: "B4",
125
+ 39: "B1",
126
+ 40: "B2",
127
+ 41: "B9",
128
+ 42: "B10",
129
+ 43: "B11",
130
+ 44: "B12",
131
+ 45: "B13",
132
+ 46: "B14",
133
+
134
+ 49: "A7",
135
+ 50: "A8",
136
+ 51: "A5",
137
+ 52: "A6",
138
+ 53: "A3",
139
+ 54: "A4",
140
+ 55: "A1",
141
+ 56: "A2",
142
+ 57: "A9",
143
+ 58: "A10",
144
+ 59: "A11",
145
+ 60: "A12",
146
+ 61: "A13",
147
+ 62: "A14"
148
+ }
149
+
150
+ self._lock = Lock()
151
+
152
+ @property
153
+ def file_name(self):
154
+ suffix = "bdf" if self.resolution == 24 else "edf"
155
+
156
+ # 文件名称
157
+ file_name = f"{self._file_prefix}_{self._device_no}_{self._start_time.strftime('%y%m%d%H%I%M')}.{suffix}" if self._file_prefix else f"{self._device_no}_{self._start_time.strftime('%y%m%d%H%I%M')}.{suffix}"
158
+
159
+ if self._storage_path:
160
+ try:
161
+ # 自动创建目录,存在则忽略
162
+ os.makedirs(self._storage_path, exist_ok=True)
163
+
164
+ return f"{self._storage_path}/{file_name}"
165
+ except Exception as e:
166
+ logger.error(f"创建目录[{self._storage_path}]失败: {e}")
167
+
168
+ return file_name
169
+
170
+ def set_device_type(self, device_type):
171
+ if device_type == 0x39:
172
+ self._device_type = "C64RS"
173
+ elif device_type == 0x40:
174
+ self._device_type = "LJ64S1"
175
+ else:
176
+ self._device_type = hex(device_type)
177
+
178
+ def set_device_no(self, device_no):
179
+ self._device_no = device_no
180
+
181
+ def set_storage_path(self, storage_path):
182
+ self._storage_path = storage_path
183
+
184
+ def set_file_prefix(self, file_prefix):
185
+ self._file_prefix = file_prefix
186
+
187
+ def set_patient_code(self, patient_code):
188
+ self._patient_code = patient_code
189
+
190
+ def set_patient_name(self, patient_name):
191
+ self._patient_name = patient_name
192
+
193
+ def write(self, packet: RscPacket):
194
+ # logger.trace(f"packet: {packet}")
195
+ if packet is None:
196
+ # self._edf_writer_thread.stop_recording()
197
+ for k in self._edf_handler.keys():
198
+ self._edf_handler[k].append(None)
199
+ return
200
+
201
+ #按分区写入数据
202
+
203
+ for k in self._channel_spilt.keys():
204
+ logger.info(f'分区{k}, {self._channel_spilt[k]}')
205
+ p = packet
206
+ self.writeA(p, self._channel_spilt[k], k)
207
+
208
+ # with self._lock:
209
+ # if self.channels is None:
210
+ # logger.info(f"开始记录数据到文件...")
211
+ # self.channels = packet.channels
212
+ # self._first_pkg_id = packet.pkg_id if self._first_pkg_id is None else self._first_pkg_id
213
+ # self._first_timestamp = packet.time_stamp if self._first_timestamp is None else self._first_timestamp
214
+ # self._start_time = datetime.now()
215
+ # logger.info(f"第一个包id: {self._first_pkg_id }, 时间戳:{self._first_timestamp}, 当前时间:{datetime.now().timestamp()} offset: {datetime.now().timestamp() - self._first_timestamp}")
216
+
217
+ # if self._last_pkg_id and self._last_pkg_id != packet.pkg_id - 1:
218
+ # self._lost_packets += packet.pkg_id - self._last_pkg_id - 1
219
+ # logger.warning(f"数据包丢失: {self._last_pkg_id} -> {packet.pkg_id}, 丢包数: {packet.pkg_id - self._last_pkg_id - 1}")
220
+
221
+ # self._last_pkg_id = packet.pkg_id
222
+ # self._total_packets += 1
223
+
224
+ # if self._edf_writer_thread is None:
225
+ # self._edf_writer_thread = EDFStreamWriter(self.channels, self.sample_rate, self.physical_max, self.physical_min, self.file_type, self.file_name)
226
+ # self._edf_writer_thread.set_start_time(self._start_time)
227
+ # self._edf_writer_thread.start()
228
+ # logger.info(f"开始写入数据: {self.file_name}")
229
+
230
+ # self._edf_writer_thread.append(packet.eeg)
231
+
232
+ def writeA(self, packet: RscPacket, channel_filter, name='A'):
233
+ # 参数检查
234
+ if packet is None or channel_filter is None:
235
+ logger.warning("空数据,忽略")
236
+ return
237
+
238
+ channel_pos = intersection_positions(packet.channels, channel_filter)
239
+
240
+ if channel_pos is None or len(channel_pos) == 0 :
241
+ logger.debug(f"没有指定分区{name}的通道,跳过")
242
+ pass
243
+
244
+ # 分区数据包写入
245
+ if name not in self._edf_handler.keys():
246
+ edf_handler = RscEDFHandler(self.sample_rate, self.digital_max , self.digital_min, self.resolution)
247
+ edf_handler.set_device_type(self._device_type)
248
+ edf_handler.set_device_no(self._device_no)
249
+ edf_handler.set_storage_path(self._storage_path)
250
+ edf_handler.set_file_prefix(f'{self._file_prefix}_{name}' if self._file_prefix else name)
251
+ logger.info(f"开始写入分区{name}的数据到文件")
252
+ self._edf_handler[name] = edf_handler
253
+
254
+ # 保留本分区的通道和数据
255
+ channels = [packet.channels[p] for p in channel_pos]
256
+ eeg = [packet.eeg[p] for p in channel_pos]
257
+
258
+ # 新建数据包实例
259
+ data = RscPacket()
260
+ data.time_stamp = packet.time_stamp
261
+ data.pkg_id = packet.pkg_id
262
+ data.channels = channels
263
+ data.origin_sample_rate = packet.origin_sample_rate
264
+ data.sample_rate = packet.sample_rate
265
+ data.sample_num = packet.sample_num
266
+ data.resolution = packet.resolution
267
+ data.trigger = packet.trigger
268
+ data.eeg = eeg
269
+
270
+ self._edf_handler[name].write(data)
271
+
272
+ # trigger标记
273
+ # desc: 标记内容
274
+ # cur_time: 设备时间时间戳,非设备发出的trigger不要设置
275
+ def trigger(self, desc, cur_time=None):
276
+ # trigger现在(20250702)多个分区共享, 分发到所有分区文件中标记
277
+ for k in self._edf_handler.keys():
278
+ self._edf_handler[k].trigger(desc, cur_time)
@@ -1,4 +1,5 @@
1
1
  from datetime import datetime
2
+ import time
2
3
  from multiprocessing import Lock, Queue
3
4
  from time import time_ns
4
5
  from pyedflib import FILETYPE_BDFPLUS, FILETYPE_EDFPLUS, EdfWriter
@@ -77,8 +78,9 @@ class EDFStreamWriter(Thread):
77
78
  if self._writer is None:
78
79
  self._init_writer()
79
80
 
80
- while True:
81
- if self._recording or (not self.data_queue.empty()):
81
+ waits = 300
82
+ while waits > 0:
83
+ if not self.data_queue.empty():
82
84
  try:
83
85
  data = self.data_queue.get(timeout=30)
84
86
  if data is None:
@@ -88,13 +90,20 @@ class EDFStreamWriter(Thread):
88
90
  self._points += len(data[1])
89
91
  logger.trace(f"已处理数据点数:{self._points}")
90
92
  self._write_file(data)
93
+ # 有数据重置计数器
94
+ waits = 100 # 重置等待计数器
91
95
  except Exception as e:
92
96
  logger.error(f"异常或超时(30s)结束: {str(e)}")
93
97
  break
94
98
  else:
95
- logger.debug("数据记录完成")
96
- break
99
+ time.sleep(0.1)
100
+ # 记录状态等待30s、非记录状态等待3s
101
+ if self._recording:
102
+ waits -= 1
103
+ else:
104
+ waits -= 10
97
105
 
106
+ logger.info(f"数据记录完成:{self.file_path}")
98
107
  self.close()
99
108
 
100
109
  def _init_writer(self):
@@ -159,6 +168,9 @@ class EDFStreamWriter(Thread):
159
168
  # 写入时转置为(样本数, 通道数)格式
160
169
  self._writer.writeSamples(data_float64)
161
170
  self._duration += 1
171
+
172
+ if self._duration % 10 == 0: # 每10秒打印一次进度
173
+ logger.info(f"数据记录中... 文件名:{self.file_path}, 已记录时长: {self._duration}秒")
162
174
 
163
175
  # 用作数据结构一致化处理,通过调用公共类写入edf文件
164
176
  # 入参包含写入edf的全部前置参数
@@ -199,7 +211,6 @@ class RscEDFHandler(object):
199
211
  self.annotations = None
200
212
  # 每个数据块大小
201
213
  self._chunk = np.array([])
202
- self._Lock = Lock()
203
214
  self._duration = 0
204
215
  self._points = 0
205
216
  self._first_pkg_id = None
@@ -217,6 +228,8 @@ class RscEDFHandler(object):
217
228
  self._edf_writer_thread = None
218
229
  self._file_prefix = None
219
230
 
231
+ self._lock = Lock()
232
+
220
233
  @property
221
234
  def file_name(self):
222
235
  suffix = "bdf" if self.resolution == 24 else "edf"
@@ -240,8 +253,12 @@ class RscEDFHandler(object):
240
253
  self._device_type = "C64RS"
241
254
  elif device_type == 0x40:
242
255
  self._device_type = "LJ64S1"
256
+ elif device_type == 0x60:
257
+ self._device_type = "ARSKindling"
258
+ elif device_type == 0x339:
259
+ self._device_type = "C16R"
243
260
  else:
244
- self._device_type = hex(device_type)
261
+ self._device_type = device_type
245
262
 
246
263
  def set_device_no(self, device_no):
247
264
  self._device_no = device_no
@@ -259,33 +276,35 @@ class RscEDFHandler(object):
259
276
  self._patient_name = patient_name
260
277
 
261
278
  def write(self, packet: RscPacket):
262
- logger.debug(f"packet: {packet}")
279
+ # logger.trace(f"packet: {packet}")
263
280
  if packet is None:
281
+ logger.info(f"收到结束信号,即将停止写入数据:{self.file_name}")
264
282
  self._edf_writer_thread.stop_recording()
265
283
  return
266
284
 
267
- if self.channels is None:
268
- logger.info(f"开始记录数据到文件...")
269
- self.channels = packet.channels
270
- self._first_pkg_id = packet.pkg_id if self._first_pkg_id is None else self._first_pkg_id
271
- self._first_timestamp = packet.time_stamp if self._first_timestamp is None else self._first_timestamp
272
- self._start_time = datetime.now()
273
- logger.info(f"第一个包id: {self._first_pkg_id }, 时间戳:{self._first_timestamp}, 当前时间:{datetime.now().timestamp()} offset: {datetime.now().timestamp() - self._first_timestamp}")
274
-
275
- if self._last_pkg_id and self._last_pkg_id != packet.pkg_id - 1:
276
- self._lost_packets += packet.pkg_id - self._last_pkg_id - 1
277
- logger.warning(f"数据包丢失: {self._last_pkg_id} -> {packet.pkg_id}, 丢包数: {packet.pkg_id - self._last_pkg_id - 1}")
278
-
279
- self._last_pkg_id = packet.pkg_id
280
- self._total_packets += 1
281
-
282
- if self._edf_writer_thread is None:
283
- self._edf_writer_thread = EDFStreamWriter(self.channels, self.sample_rate, self.physical_max, self.physical_min, self.file_type, self.file_name)
284
- self._edf_writer_thread.set_start_time(self._start_time)
285
- self._edf_writer_thread.start()
286
- logger.info(f"开始写入数据: {self.file_name}")
285
+ with self._lock:
286
+ if self.channels is None:
287
+ logger.debug(f"开始记录数据到文件...")
288
+ self.channels = packet.channels
289
+ self._first_pkg_id = packet.pkg_id if self._first_pkg_id is None else self._first_pkg_id
290
+ self._first_timestamp = packet.time_stamp if self._first_timestamp is None else self._first_timestamp
291
+ self._start_time = datetime.now()
292
+ logger.debug(f"第一个包id: {self._first_pkg_id }, 时间戳:{self._first_timestamp}, 当前时间:{datetime.now().timestamp()} offset: {datetime.now().timestamp() - self._first_timestamp}")
293
+
294
+ if self._last_pkg_id and self._last_pkg_id != packet.pkg_id - 1:
295
+ self._lost_packets += packet.pkg_id - self._last_pkg_id - 1
296
+ logger.warning(f"数据包丢失: {self._last_pkg_id} -> {packet.pkg_id}, 丢包数: {packet.pkg_id - self._last_pkg_id - 1}")
297
+
298
+ self._last_pkg_id = packet.pkg_id
299
+ self._total_packets += 1
287
300
 
288
- self._edf_writer_thread.append(packet.eeg)
301
+ if self._edf_writer_thread is None:
302
+ self._edf_writer_thread = EDFStreamWriter(self.channels, self.sample_rate, self.physical_max, self.physical_min, self.file_type, self.file_name)
303
+ self._edf_writer_thread.set_start_time(self._start_time)
304
+ self._edf_writer_thread.start()
305
+ logger.info(f"开始写入数据: {self.file_name}")
306
+
307
+ self._edf_writer_thread.append(packet.eeg)
289
308
 
290
309
 
291
310
  # trigger标记