@superhero/eventflow-db 4.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/README.md +401 -0
  2. package/config.js +30 -0
  3. package/index.js +662 -0
  4. package/index.test.js +266 -0
  5. package/package.json +34 -0
  6. package/sql/event/delete-by-id.sql +3 -0
  7. package/sql/event/delete-by-pid-domain.sql +4 -0
  8. package/sql/event/persist.sql +3 -0
  9. package/sql/event/read-by-id.sql +3 -0
  10. package/sql/event/read-by-pid-domain.sql +4 -0
  11. package/sql/event/schema.sql +19 -0
  12. package/sql/event_cpid/delete.sql +4 -0
  13. package/sql/event_cpid/persist.sql +3 -0
  14. package/sql/event_cpid/read-by-cpid-domain.sql +6 -0
  15. package/sql/event_cpid/read-by-event_id.sql +3 -0
  16. package/sql/event_cpid/schema.sql +12 -0
  17. package/sql/event_eid/delete.sql +4 -0
  18. package/sql/event_eid/persist.sql +3 -0
  19. package/sql/event_eid/read-by-eid-domain.sql +6 -0
  20. package/sql/event_eid/read-by-eid.sql +5 -0
  21. package/sql/event_eid/read-by-event_id.sql +3 -0
  22. package/sql/event_eid/schema.sql +12 -0
  23. package/sql/event_published/persist.sql +3 -0
  24. package/sql/event_published/schema.sql +23 -0
  25. package/sql/event_published/update-to-consumed-by-hub.sql +5 -0
  26. package/sql/event_published/update-to-consumed-by-spoke.sql +5 -0
  27. package/sql/event_published/update-to-failed.sql +3 -0
  28. package/sql/event_published/update-to-orphan.sql +3 -0
  29. package/sql/event_published/update-to-success.sql +3 -0
  30. package/sql/event_scheduled/persist.sql +3 -0
  31. package/sql/event_scheduled/read.sql +4 -0
  32. package/sql/event_scheduled/schema.sql +20 -0
  33. package/sql/event_scheduled/update-executed.sql +3 -0
  34. package/sql/event_scheduled/update-failed.sql +3 -0
  35. package/sql/event_scheduled/update-success.sql +3 -0
  36. package/sql/hub/persist.sql +3 -0
  37. package/sql/hub/read-online-hubs.sql +3 -0
  38. package/sql/hub/schema.sql +16 -0
  39. package/sql/hub/update-to-quit.sql +3 -0
  40. package/sql/log/archive.sql +5 -0
  41. package/sql/log/persist.sql +3 -0
  42. package/sql/log/schema.sql +14 -0
package/index.test.js ADDED
@@ -0,0 +1,266 @@
1
+ import Config from '@superhero/config'
2
+ import Locator from '@superhero/locator'
3
+ import { locate } from '@superhero/eventflow-db'
4
+ import assert from 'node:assert/strict'
5
+ import { after, suite, test } from 'node:test'
6
+
7
+ suite('@superhero/eventflow-db', async () =>
8
+ {
9
+ const
10
+ config = new Config(),
11
+ locator = new Locator()
12
+
13
+ locator.set('@superhero/config', config)
14
+ await config.add('./config.js')
15
+
16
+ const db = locate(locator)
17
+
18
+ test('Setup table schemas', async (sub) =>
19
+ {
20
+ await assert.doesNotReject(db.setupTableSchemas())
21
+
22
+ await sub.test('Persist a hub', async (sub) =>
23
+ {
24
+ const hub =
25
+ {
26
+ id : 'test_hub_id_' + Date.now().toString(36),
27
+ external_ip : '127.0.0.1',
28
+ external_port : 50001,
29
+ internal_ip : '127.0.0.1',
30
+ internal_port : 50001,
31
+ }
32
+
33
+ await assert.doesNotReject(db.persistHub(hub))
34
+
35
+ await sub.test('Read online hubs', async () =>
36
+ {
37
+ const hubs = await db.readOnlineHubs()
38
+ assert.ok(hubs.length > 0, 'Online hubs should be returned')
39
+ assert.equal(hubs[0].id, hub.id)
40
+ assert.equal(hubs[0].external_ip, hub.external_ip)
41
+ assert.equal(hubs[0].internal_ip, hub.internal_ip)
42
+ assert.equal(hubs[0].external_port, hub.external_port)
43
+ assert.equal(hubs[0].internal_port, hub.internal_port)
44
+ })
45
+
46
+ await sub.test('Persisting an event should generate an ID if not provided', async (sub) =>
47
+ {
48
+ const
49
+ domain = 'foo',
50
+ pid = 'bar',
51
+ name = 'baz',
52
+ event = { domain, pid, name, data: { qux: 'foobar' } },
53
+ id = await db.persistEvent(event)
54
+
55
+ assert.ok(id)
56
+ event.id = id
57
+
58
+ await sub.test('Read an event by id should return the same data as when persisted the event', async () =>
59
+ {
60
+ const readEvent = await db.readEvent(event.id)
61
+
62
+ assert.equal(readEvent.id, event.id)
63
+ assert.equal(readEvent.domain, event.domain)
64
+ assert.equal(readEvent.pid, event.pid)
65
+ assert.equal(readEvent.name, event.name)
66
+ assert.deepEqual(readEvent.data, event.data)
67
+ })
68
+
69
+ await sub.test('Schedule a persisted event', async (sub) =>
70
+ {
71
+ const scheduledEvent = { event_id:event.id, scheduled: new Date() }
72
+
73
+ const success = await db.persistEventScheduled(scheduledEvent)
74
+ assert.ok(success, 'Event should be scheduled')
75
+
76
+ await sub.test('Read all scheduled events', async () =>
77
+ {
78
+ const events = await db.readEventsScheduled()
79
+ assert.ok(events.length > 0, 'Scheduled events should be returned')
80
+ })
81
+
82
+ await sub.test('Update scheduled event as executed', async () =>
83
+ {
84
+ const updated = await db.updateEventScheduledExecuted(scheduledEvent.event_id)
85
+ assert.ok(updated, 'Scheduled event should be marked as executed')
86
+ })
87
+
88
+ await sub.test('Update scheduled event as success', async () =>
89
+ {
90
+ const updated = await db.updateEventScheduledSuccess(scheduledEvent.event_id)
91
+ assert.ok(updated, 'Scheduled event should be marked as success')
92
+ })
93
+
94
+ await sub.test('Update scheduled event as failed', async () =>
95
+ {
96
+ const updated = await db.updateEventScheduledFailed(scheduledEvent.event_id)
97
+ assert.ok(updated, 'Scheduled event should be marked as failed')
98
+ })
99
+ })
100
+
101
+ await sub.test('Publish a persisted event', async (sub) =>
102
+ {
103
+ const publishedEvent = { event_id:event.id, publisher:'spoke_id' }
104
+ const success = await db.persistEventPublished(publishedEvent)
105
+ assert.ok(success, 'Event should be published')
106
+
107
+ await sub.test('Update published event to consumed by hub', async () =>
108
+ {
109
+ const success = await db.updateEventPublishedToConsumedByHub(event.id, hub.id)
110
+ assert.ok(success, 'Event should be updated to consumed by hub')
111
+ })
112
+
113
+ await sub.test('Update published event to consumed by spoke', async () =>
114
+ {
115
+ const success = await db.updateEventPublishedToConsumedBySpoke(event.id, publishedEvent.publisher)
116
+ assert.ok(success, 'Event should be updated to consumed by spoke')
117
+ })
118
+
119
+ await sub.test('Update published event to success', async () =>
120
+ {
121
+ const success = await db.updateEventPublishedToSuccess(event.id)
122
+ assert.ok(success, 'Event should be updated to success')
123
+ })
124
+
125
+ await sub.test('Update published event to failed', async () =>
126
+ {
127
+ const success = await db.updateEventPublishedToFailed(event.id)
128
+ assert.ok(success, 'Event should be updated to failed')
129
+ })
130
+
131
+ await sub.test('Update published event to orphan', async () =>
132
+ {
133
+ const success = await db.updateEventPublishedToOrphan(id)
134
+ assert.ok(success, 'Event should be updated to orphan')
135
+ })
136
+ })
137
+
138
+ await sub.test('Persist event cpid association', async (sub) =>
139
+ {
140
+ const cpid = 'test_cpid'
141
+ const success = await db.persistEventCpid(event.id, cpid)
142
+ assert.ok(success, 'Event cpid association should be persisted')
143
+
144
+ await sub.test('Read events by domain and cpid', async () =>
145
+ {
146
+ const events = await db.readEventsByDomainAndCpid(event.domain, cpid)
147
+ assert.ok(events.length, 'Events found by domain and cpid')
148
+ })
149
+
150
+ await sub.test('Read associated cpid by event id', async () =>
151
+ {
152
+ const readCpid = await db.readEventCpidByEventId(event.id)
153
+ assert.ok(readCpid.length > 0, 'Event cpid association should be found')
154
+ assert.equal(readCpid[0], cpid)
155
+ })
156
+
157
+ await sub.test('Delete associated cpid by event id', async () =>
158
+ {
159
+ const success = await db.deleteEventCpid(event.id, cpid)
160
+ assert.ok(success, 'Event cpid association should be deleted')
161
+ })
162
+
163
+ await sub.test('Read deleted associated cpid by event id returns empty', async () =>
164
+ {
165
+ const readCpid = await db.readEventCpidByEventId(event.id)
166
+ assert.ok(readCpid.length === 0, 'Event cpid association should be empty')
167
+ })
168
+ })
169
+
170
+ await sub.test('Persist event eid association', async (sub) =>
171
+ {
172
+ const eid = 'test_eid'
173
+ const success = await db.persistEventEid(event.id, eid)
174
+ assert.ok(success, 'Event eid association should be persisted')
175
+
176
+ await sub.test('Read events by eid', async () =>
177
+ {
178
+ const events = await db.readEventsByEid(eid)
179
+ assert.ok(events.length, 'Events found by eid')
180
+ })
181
+
182
+ await sub.test('Read events by domain and eid', async () =>
183
+ {
184
+ const events = await db.readEventsByDomainAndEid(event.domain, eid)
185
+ assert.ok(events.length, 'Events found by domain and eid')
186
+ })
187
+
188
+ await sub.test('Read associated eid by event id', async () =>
189
+ {
190
+ const readEid = await db.readEventEidByEventId(event.id)
191
+ assert.ok(readEid.length > 0, 'Event eid association should be found')
192
+ assert.equal(readEid[0], eid)
193
+ })
194
+
195
+ await sub.test('Delete associated eid by event id', async () =>
196
+ {
197
+ const success = await db.deleteEventEid(event.id, eid)
198
+ assert.ok(success, 'Event eid association should be deleted')
199
+ })
200
+
201
+ await sub.test('Read deleted associated eid by event id returns empty', async () =>
202
+ {
203
+ const readEid = await db.readEventEidByEventId(event.id)
204
+ assert.ok(readEid.length === 0, 'Event eid association should be empty')
205
+ })
206
+ })
207
+
208
+ await sub.test('Delete event', async (sub) =>
209
+ {
210
+ const success = await db.deleteEvent(event.id)
211
+ assert.ok(success, 'Event should be deleted')
212
+
213
+ await sub.test('Reading a deleted event rejects', async () =>
214
+ {
215
+ await assert.rejects(db.readEvent(event.id))
216
+ })
217
+ })
218
+
219
+ await sub.test('By domain and pid', async (sub) =>
220
+ {
221
+ const event_id = await db.persistEvent(event)
222
+ assert.ok(event_id, 'Event should be persisted')
223
+
224
+ await sub.test('Read event by domain and pid', async () =>
225
+ {
226
+ const eventlog = await db.readEventsByDomainAndPid(domain, pid)
227
+ assert.ok(eventlog.length > 0, 'Event should be found')
228
+ })
229
+
230
+ await sub.test('Delete event by domain and pid', async () =>
231
+ {
232
+ const success = await db.deleteEventByDomainAndPid(event.domain, event.pid)
233
+ assert.ok(success, 'Should be possible to delete event by domain and pid')
234
+ })
235
+
236
+ await sub.test('Read empty eventlog by domain and pid', async () =>
237
+ {
238
+ const eventlog = await db.readEventsByDomainAndPid(domain, pid)
239
+ assert.ok(eventlog.length === 0, 'Eventlog should be empty')
240
+ })
241
+ })
242
+ })
243
+
244
+ await sub.test('Persist log', async () =>
245
+ {
246
+ const log = { agent:hub.id, message: 'Test log', error: { message: 'Test error' } }
247
+ await assert.doesNotReject(db.persistLog(log))
248
+ })
249
+
250
+ await sub.test('Update hub to quit', async () =>
251
+ {
252
+ const updated = await db.updateHubToQuit(hub.id)
253
+ assert.ok(updated, 'Hub should be updated to quit')
254
+ })
255
+ })
256
+
257
+ await sub.test('Reading a non existing event should reject with an error', async () =>
258
+ {
259
+ await assert.rejects(
260
+ db.readEvent('nonExistingId'),
261
+ { code : 'E_EVENTFLOW_DB_EVENT_NOT_FOUND' })
262
+ })
263
+ })
264
+
265
+ after(() => db.destroy())
266
+ })
package/package.json ADDED
@@ -0,0 +1,34 @@
1
+ {
2
+ "name": "@superhero/eventflow-db",
3
+ "version": "4.0.0",
4
+ "description": "Eventflow db is a set of common database logic in the eventflow ecosystem.",
5
+ "keywords": [
6
+ "eventflow"
7
+ ],
8
+ "main": "src/config.js",
9
+ "license": "MIT",
10
+ "type": "module",
11
+ "exports": {
12
+ ".": "./index.js"
13
+ },
14
+ "dependencies": {
15
+ "mysql2": "^3.11.5",
16
+ "@superhero/db": "^0.5.0"
17
+ },
18
+ "devDependencies": {
19
+ "@superhero/config": "4.1.2",
20
+ "@superhero/locator": "4.2.0"
21
+ },
22
+ "scripts": {
23
+ "build": "docker run --rm --name eventflow-mysql -e MYSQL_ROOT_PASSWORD=root -e MYSQL_DATABASE=eventflow -p 3306:3306 -d mysql:latest",
24
+ "test": "node --trace-warnings --test --experimental-test-coverage"
25
+ },
26
+ "author": {
27
+ "name": "Erik Landvall",
28
+ "email": "erik@landvall.se"
29
+ },
30
+ "repository": {
31
+ "type": "git",
32
+ "url": "git+https://github.com/superhero/eventflow-db.git"
33
+ }
34
+ }
@@ -0,0 +1,3 @@
1
+ DELETE
2
+ FROM event
3
+ WHERE id = ?
@@ -0,0 +1,4 @@
1
+ DELETE
2
+ FROM event
3
+ WHERE pid = ?
4
+ AND domain = ?
@@ -0,0 +1,3 @@
1
+ INSERT
2
+ INTO event
3
+ SET ?
@@ -0,0 +1,3 @@
1
+ SELECT *
2
+ FROM event
3
+ WHERE id = ?
@@ -0,0 +1,4 @@
1
+ SELECT *
2
+ FROM event
3
+ WHERE pid = ?
4
+ AND domain = ?
@@ -0,0 +1,19 @@
1
+ CREATE TABLE IF NOT EXISTS event
2
+ (
3
+ id VARCHAR(64) NOT NULL,
4
+ timestamp DATETIME(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3),
5
+ domain VARCHAR(64) NOT NULL,
6
+ rid VARCHAR(64) NULL,
7
+ pid VARCHAR(64) NOT NULL,
8
+ name VARCHAR(64) NOT NULL,
9
+ data JSON NOT NULL,
10
+
11
+ PRIMARY KEY (id),
12
+ FOREIGN KEY (rid) REFERENCES event (id) ON DELETE SET NULL,
13
+
14
+ INDEX idx_rid (rid),
15
+ INDEX idx_name (name, timestamp),
16
+ INDEX idx_domain (domain, timestamp),
17
+ INDEX idx_domain_pid (domain, pid, timestamp)
18
+ )
19
+ ENGINE=InnoDB
@@ -0,0 +1,4 @@
1
+ DELETE
2
+ FROM event_cpid
3
+ WHERE event_id = ?
4
+ AND cpid = ?
@@ -0,0 +1,3 @@
1
+ INSERT
2
+ INTO event_cpid
3
+ SET event_id = ?, cpid = ?
@@ -0,0 +1,6 @@
1
+ SELECT event.*
2
+ FROM event_cpid
3
+ JOIN event
4
+ ON event_id = id
5
+ WHERE cpid = ?
6
+ AND domain = ?
@@ -0,0 +1,3 @@
1
+ SELECT cpid
2
+ FROM event_cpid
3
+ WHERE event_id = ?
@@ -0,0 +1,12 @@
1
+ CREATE TABLE IF NOT EXISTS event_cpid
2
+ (
3
+ event_id VARCHAR(64) NOT NULL,
4
+ cpid VARCHAR(64) NOT NULL,
5
+
6
+ PRIMARY KEY (event_id, cpid),
7
+ FOREIGN KEY (event_id) REFERENCES event (id)
8
+ ON UPDATE CASCADE
9
+ ON DELETE CASCADE,
10
+ INDEX idx_cpid (cpid)
11
+ )
12
+ ENGINE=InnoDB
@@ -0,0 +1,4 @@
1
+ DELETE
2
+ FROM event_eid
3
+ WHERE event_id = ?
4
+ AND eid = ?
@@ -0,0 +1,3 @@
1
+ INSERT
2
+ INTO event_eid
3
+ SET event_id = ?, eid = ?
@@ -0,0 +1,6 @@
1
+ SELECT event.*
2
+ FROM event_eid
3
+ JOIN event
4
+ ON event_id = id
5
+ WHERE eid = ?
6
+ AND domain = ?
@@ -0,0 +1,5 @@
1
+ SELECT event.*
2
+ FROM event_eid
3
+ JOIN event
4
+ ON event_id = id
5
+ WHERE eid = ?
@@ -0,0 +1,3 @@
1
+ SELECT eid
2
+ FROM event_eid
3
+ WHERE event_id = ?
@@ -0,0 +1,12 @@
1
+ CREATE TABLE IF NOT EXISTS event_eid
2
+ (
3
+ event_id VARCHAR(64) NOT NULL,
4
+ eid VARCHAR(64) NOT NULL,
5
+
6
+ PRIMARY KEY (event_id, eid),
7
+ FOREIGN KEY (event_id) REFERENCES event (id)
8
+ ON UPDATE CASCADE
9
+ ON DELETE CASCADE,
10
+ INDEX idx_eid (eid)
11
+ )
12
+ ENGINE=InnoDB
@@ -0,0 +1,3 @@
1
+ INSERT
2
+ INTO event_published
3
+ SET ?
@@ -0,0 +1,23 @@
1
+ CREATE TABLE IF NOT EXISTS event_published
2
+ (
3
+ event_id VARCHAR(64) NOT NULL,
4
+ published DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP(),
5
+ publisher VARCHAR(64) NOT NULL,
6
+ consumer VARCHAR(64) NULL,
7
+ hub VARCHAR(64) NULL,
8
+ consumed_hub DATETIME NULL,
9
+ consumed_spoke DATETIME NULL,
10
+ success DATETIME NULL,
11
+ failed DATETIME NULL,
12
+ orphan DATETIME NULL,
13
+
14
+ PRIMARY KEY (event_id),
15
+ FOREIGN KEY (hub) REFERENCES hub (id),
16
+ FOREIGN KEY (event_id) REFERENCES event (id)
17
+ ON UPDATE CASCADE
18
+ ON DELETE CASCADE,
19
+ INDEX idx_published (published),
20
+ INDEX idx_consumed_hub (consumed_hub),
21
+ INDEX idx_consumed_spoke (consumed_spoke)
22
+ )
23
+ ENGINE=InnoDB
@@ -0,0 +1,5 @@
1
+ UPDATE event_published
2
+ SET consumed_hub = UTC_TIMESTAMP(),
3
+ hub = ?
4
+ WHERE event_id = ?
5
+ AND consumed_hub IS NULL
@@ -0,0 +1,5 @@
1
+ UPDATE event_published
2
+ SET consumed_spoke = UTC_TIMESTAMP(),
3
+ consumer = ?
4
+ WHERE event_id = ?
5
+ AND consumed_spoke IS NULL
@@ -0,0 +1,3 @@
1
+ UPDATE event_published
2
+ SET failed = UTC_TIMESTAMP()
3
+ WHERE event_id = ?
@@ -0,0 +1,3 @@
1
+ UPDATE event_published
2
+ SET orphan = UTC_TIMESTAMP()
3
+ WHERE event_id = ?
@@ -0,0 +1,3 @@
1
+ UPDATE event_published
2
+ SET success = UTC_TIMESTAMP()
3
+ WHERE event_id = ?
@@ -0,0 +1,3 @@
1
+ INSERT
2
+ INTO event_scheduled
3
+ SET ?
@@ -0,0 +1,4 @@
1
+ SELECT *
2
+ FROM event_scheduled
3
+ WHERE scheduled >= UTC_TIMESTAMP()
4
+ AND executed IS NULL
@@ -0,0 +1,20 @@
1
+ CREATE TABLE IF NOT EXISTS event_scheduled
2
+ (
3
+ event_id VARCHAR(64) NOT NULL,
4
+ timestamp DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP(),
5
+ scheduled DATETIME NOT NULL,
6
+ executed DATETIME NULL,
7
+ success DATETIME NULL,
8
+ failed DATETIME NULL,
9
+
10
+ PRIMARY KEY (event_id),
11
+ FOREIGN KEY (event_id) REFERENCES event (id)
12
+ ON UPDATE CASCADE
13
+ ON DELETE CASCADE,
14
+ INDEX idx_timestamp (timestamp),
15
+ INDEX idx_scheduled (scheduled),
16
+ INDEX idx_executed (executed),
17
+ INDEX idx_success (success),
18
+ INDEX idx_failed (failed)
19
+ )
20
+ ENGINE=InnoDB
@@ -0,0 +1,3 @@
1
+ UPDATE event_scheduled
2
+ SET executed = UTC_TIMESTAMP()
3
+ WHERE event_id = ?
@@ -0,0 +1,3 @@
1
+ UPDATE event_scheduled
2
+ SET failed = UTC_TIMESTAMP()
3
+ WHERE event_id = ?
@@ -0,0 +1,3 @@
1
+ UPDATE event_scheduled
2
+ SET success = UTC_TIMESTAMP()
3
+ WHERE event_id = ?
@@ -0,0 +1,3 @@
1
+ INSERT
2
+ INTO hub
3
+ SET ?
@@ -0,0 +1,3 @@
1
+ SELECT *
2
+ FROM hub
3
+ WHERE quit IS NULL
@@ -0,0 +1,16 @@
1
+ CREATE TABLE IF NOT EXISTS hub
2
+ (
3
+ id VARCHAR(64) NOT NULL,
4
+ timestamp DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP(),
5
+ external_ip VARCHAR(16) NOT NULL,
6
+ external_port SMALLINT UNSIGNED NOT NULL,
7
+ internal_ip VARCHAR(16) NOT NULL,
8
+ internal_port SMALLINT UNSIGNED NOT NULL,
9
+ quit DATETIME NULL,
10
+
11
+ PRIMARY KEY (id),
12
+ INDEX idx_timestamp (timestamp),
13
+ INDEX idx_external_ip_port (external_ip, external_port),
14
+ INDEX idx_internal_ip_port (internal_ip, internal_port)
15
+ )
16
+ ENGINE=InnoDB
@@ -0,0 +1,3 @@
1
+ UPDATE hub
2
+ SET quit = UTC_TIMESTAMP()
3
+ WHERE id = ?
@@ -0,0 +1,5 @@
1
+ ALTER TABLE log REORGANIZE PARTITION p_hot INTO
2
+ (
3
+ PARTITION p_cold_?? VALUES LESS THAN (TO_DAYS(?)),
4
+ PARTITION p_hot VALUES LESS THAN MAXVALUE
5
+ )
@@ -0,0 +1,3 @@
1
+ INSERT
2
+ INTO log
3
+ SET ?
@@ -0,0 +1,14 @@
1
+ CREATE TABLE IF NOT EXISTS log
2
+ (
3
+ timestamp DATETIME(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3),
4
+ agent VARCHAR(64) NOT NULL,
5
+ message TEXT NOT NULL,
6
+ error JSON NOT NULL,
7
+ INDEX idx_timestamp (timestamp),
8
+ INDEX idx_agent (agent)
9
+ )
10
+ ENGINE=InnoDB
11
+ PARTITION BY RANGE (TO_DAYS(timestamp))
12
+ (
13
+ PARTITION p_hot VALUES LESS THAN MAXVALUE
14
+ )