hive-stream 2.0.6 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. package/.claude/settings.local.json +12 -0
  2. package/.env.example +2 -2
  3. package/.travis.yml +11 -11
  4. package/CHANGELOG.md +166 -0
  5. package/CLAUDE.md +75 -0
  6. package/LICENSE +21 -21
  7. package/README.md +338 -238
  8. package/dist/actions.d.ts +41 -10
  9. package/dist/actions.js +126 -23
  10. package/dist/actions.js.map +1 -1
  11. package/dist/adapters/base.adapter.d.ts +25 -25
  12. package/dist/adapters/base.adapter.js +63 -49
  13. package/dist/adapters/base.adapter.js.map +1 -1
  14. package/dist/adapters/mongodb.adapter.d.ts +50 -37
  15. package/dist/adapters/mongodb.adapter.js +363 -158
  16. package/dist/adapters/mongodb.adapter.js.map +1 -1
  17. package/dist/adapters/postgresql.adapter.d.ts +49 -0
  18. package/dist/adapters/postgresql.adapter.js +507 -0
  19. package/dist/adapters/postgresql.adapter.js.map +1 -0
  20. package/dist/adapters/sqlite.adapter.d.ts +40 -41
  21. package/dist/adapters/sqlite.adapter.js +470 -397
  22. package/dist/adapters/sqlite.adapter.js.map +1 -1
  23. package/dist/api.d.ts +6 -6
  24. package/dist/api.js +95 -55
  25. package/dist/api.js.map +1 -1
  26. package/dist/config.d.ts +16 -16
  27. package/dist/config.js +18 -18
  28. package/dist/config.js.map +1 -1
  29. package/dist/contracts/coinflip.contract.d.ts +27 -14
  30. package/dist/contracts/coinflip.contract.js +253 -94
  31. package/dist/contracts/coinflip.contract.js.map +1 -1
  32. package/dist/contracts/dice.contract.d.ts +37 -29
  33. package/dist/contracts/dice.contract.js +282 -155
  34. package/dist/contracts/dice.contract.js.map +1 -1
  35. package/dist/contracts/lotto.contract.d.ts +20 -20
  36. package/dist/contracts/lotto.contract.js +246 -246
  37. package/dist/contracts/nft.contract.d.ts +24 -0
  38. package/dist/contracts/nft.contract.js +533 -0
  39. package/dist/contracts/nft.contract.js.map +1 -0
  40. package/dist/contracts/token.contract.d.ts +18 -0
  41. package/dist/contracts/token.contract.js +263 -0
  42. package/dist/contracts/token.contract.js.map +1 -0
  43. package/dist/exchanges/bittrex.d.ts +6 -6
  44. package/dist/exchanges/bittrex.js +34 -34
  45. package/dist/exchanges/coingecko.d.ts +5 -0
  46. package/dist/exchanges/coingecko.js +40 -0
  47. package/dist/exchanges/coingecko.js.map +1 -0
  48. package/dist/exchanges/exchange.d.ts +9 -9
  49. package/dist/exchanges/exchange.js +26 -26
  50. package/dist/hive-rates.d.ts +9 -9
  51. package/dist/hive-rates.js +121 -75
  52. package/dist/hive-rates.js.map +1 -1
  53. package/dist/index.d.ts +12 -11
  54. package/dist/index.js +33 -32
  55. package/dist/index.js.map +1 -1
  56. package/dist/streamer.d.ts +140 -93
  57. package/dist/streamer.js +793 -545
  58. package/dist/streamer.js.map +1 -1
  59. package/dist/test.d.ts +1 -1
  60. package/dist/test.js +25 -25
  61. package/dist/types/hive-stream.d.ts +35 -6
  62. package/dist/types/hive-stream.js +2 -2
  63. package/dist/utils.d.ts +27 -27
  64. package/dist/utils.js +271 -261
  65. package/dist/utils.js.map +1 -1
  66. package/ecosystem.config.js +17 -17
  67. package/jest.config.js +8 -8
  68. package/package.json +53 -48
  69. package/test-contract-block.md +18 -18
  70. package/tests/actions.spec.ts +252 -0
  71. package/tests/adapters/actions-persistence.spec.ts +144 -0
  72. package/tests/adapters/postgresql.adapter.spec.ts +127 -0
  73. package/tests/adapters/sqlite.adapter.spec.ts +180 -42
  74. package/tests/contracts/coinflip.contract.spec.ts +221 -131
  75. package/tests/contracts/dice.contract.spec.ts +202 -159
  76. package/tests/contracts/entrants.json +728 -728
  77. package/tests/contracts/lotto.contract.spec.ts +323 -323
  78. package/tests/contracts/nft.contract.spec.ts +948 -0
  79. package/tests/contracts/token.contract.spec.ts +334 -0
  80. package/tests/helpers/mock-adapter.ts +214 -0
  81. package/tests/setup.ts +29 -18
  82. package/tests/streamer-actions.spec.ts +263 -0
  83. package/tests/streamer.spec.ts +248 -151
  84. package/tests/utils.spec.ts +91 -94
  85. package/tsconfig.build.json +3 -22
  86. package/tslint.json +20 -20
  87. package/wallaby.js +26 -26
package/README.md CHANGED
@@ -1,238 +1,338 @@
1
- # Hive Stream
2
-
3
- A Node.js layer for Hive that allows you to watch for specific actions on the Hive blockchain.
4
-
5
- ## Install
6
-
7
- ```shell
8
- npm install hive-stream
9
- ```
10
-
11
- ## Quick Usage
12
-
13
- ```javascript
14
- const { Streamer } = require('hive-stream');
15
-
16
- const ss = new Streamer();
17
-
18
- // Watch for all custom JSON operations
19
- ss.onCustomJson((op, { sender, isSignedWithActiveKey }, blockNumber, blockId, prevBlockId, trxId, blockTime) => {
20
- // React to custom JSON operations
21
- });
22
- ```
23
-
24
- ## Configuration
25
-
26
- The `Streamer` object can accept an object of configuration values which are all optional. However, some operations like transferring Hive Engine tokens or other operations on the blockchain that are not READ ONLY, will require the active key and/or posting keys supplied as well as a username.
27
-
28
- The `BLOCK_CHECK_INTERVAL` value is how often to check for new blocks or in cases of error or falling behind, to poll for new blocks. You should keep this as the default 1000ms value which is one second. This allows you to account for situations where blocks fall behind the main block.
29
-
30
- The `BLOCKS_BEHIND_WARNING` value is a numeric value of the number of blocks your API will fall behind from the master before warning to the console.
31
-
32
- The `API_URL` is the Hive API. If you want to enable debug mode, set to `DEBUG_MODE` to `true`. The configuration values and their defaults can be found [here](https://github.com/Vheissu/hive-stream/blob/master/config.js).
33
-
34
- ```
35
- const options = {
36
- ACTIVE_KEY: '',
37
- POSTING_KEY: '',
38
- APP_NAME: 'hive-stream',
39
- USERNAME: '',
40
- LAST_BLOCK_NUMBER: 0,
41
- BLOCK_CHECK_INTERVAL: 1000,
42
- BLOCKS_BEHIND_WARNING: 25,
43
- API_URL: 'https://api.hiveit.com',
44
- DEBUG_MODE: false
45
- }
46
-
47
- const ss = new Streamer(options);
48
- ```
49
-
50
- The configuration itself can also be overloaded using the `setConfig` method which allows you to pass one or more of the above configuration options, useful in situations where multiple keys might be used for issuing.
51
-
52
- ```
53
- ss.setConfig({
54
- ACTIVE_KEY: 'newactivekey',
55
- USERNAME: 'newusername'
56
- });
57
- ```
58
-
59
- ## Streamer
60
-
61
- The following subscription methods are read only methods, they allow you to react to certain Hive and Hive Engine events on the blockchain. You do not need to pass in any keys to use these methods as they're purely read only.
62
-
63
- **The following actions DO require calling the `start` method first to watch the blockchain**
64
-
65
- #### Watch for transfers
66
-
67
- ```javascript
68
- ss.onTransfer((op, blockNumber, blockId, prevBlockId, trxId, blockTime) => {
69
-
70
- })
71
- ```
72
-
73
- #### Watch for custom JSON operations
74
- ```javascript
75
- ss.onCustomJson((op, { sender, isSignedWithActiveKey }, blockNumber, blockId, prevBlockId, trxId, blockTime) => {
76
-
77
- })
78
- ```
79
-
80
- #### Watch for custom JSON operations (with a specific ID)
81
- ```javascript
82
- ss.onCustomJsonId((op, { sender, isSignedWithActiveKey }, blockNumber, blockId, prevBlockId, trxId, blockTime) => {
83
-
84
- })
85
- ```
86
-
87
- #### Watch for post operations
88
- ```javascript
89
- ss.onPost((op, blockNumber, blockId, prevBlockId, trxId, blockTime) => {
90
-
91
- });
92
- ```
93
-
94
- #### Watch for comment operations
95
- ```javascript
96
- ss.onComment((op, blockNumber, blockId, prevBlockId, trxId, blockTime) => {
97
-
98
- });
99
- ```
100
-
101
- ## Actions (active key)
102
-
103
- All of the below methods require an active key has been supplied in the constructor above called `ACTIVE_KEY`. The methods below are all promised based, so you can `await` them or use `then` to confirm a successful result.
104
-
105
- **The following actions do NOT require calling the `start` method first to watch the blockchain**
106
-
107
- ```javascript
108
- const ss = new Streamer({
109
- ACTIVE_KEY: 'youractivekey'
110
- });
111
- ```
112
-
113
- ### Transfer Hive (HIVE or HBD)
114
- ```javascript
115
- transferHiveTokens(from, to, amount, symbol, memo = '') {
116
-
117
- }
118
- ```
119
-
120
- ## Contracts
121
-
122
- Hive Stream allows you to write contracts which get executed when a custom JSON operation matches. The only requirement is sending a payload which contains `hiveContract` inside of it.
123
-
124
- The payload consists of:
125
-
126
- `name` the name of the smart contract you registered.
127
-
128
- `action` matches the name of a function defined inside of your contract
129
-
130
- `payload` an object of data which will be provided to the action
131
-
132
- ### Writing contracts
133
-
134
- Really, a contract is nothing more than a bunch of functions which get matched to values inside of JSON payloads.
135
-
136
- ### Register a contract
137
-
138
- Register a file containing contract code which will be executed.
139
-
140
- ```javascript
141
- import contract from './my-contract';
142
-
143
- registerContract('mycontract', Contract);
144
- ```
145
-
146
- ### Unregister a contract
147
-
148
- Unregister a contract that has been registered.
149
-
150
- ```javascript
151
- unregisterContract('mycontract');
152
- ```
153
-
154
- ### Example Payload
155
-
156
- ```javascript
157
- JSON.stringify({ hiveContract: { name: 'hivedice', action: 'roll', payload: { roll: 22, amount: '1'} } })
158
- ```
159
-
160
- This will match a registered contract called `hivedice` and inside of the contract code, a function called `roll` and finally, the payload is sent to the function as an argument, allowing you to access the values inside of it. See the example file `dice.contract.ts` in the `src/contracts` folder in the repository. there is also a coinflip and lotto contract showing you how to build a coinflip or lottery based contract.
161
-
162
- ## Time-based Actions
163
-
164
- It's like a cron job for your contracts. Time-based actions allow you to execute contract functions over a wide variety of different periods. Want to call a function every 3 seconds block time or want to call a function once per day? Time-based actions are an easy way to run time code.
165
-
166
- The following example will run a contract action every 30 seconds. All you do is register a new `TimeAction` instance.
167
-
168
- ```
169
- import { TimeAction, Streamer } from 'hive-stream';
170
-
171
- const streamer = new Streamer({
172
- ACTIVE_KEY: ''
173
- });
174
-
175
- const testAction = new TimeAction('30s', 'test30s', 'hivedice', 'testauto');
176
-
177
- streamer.registerAction(testAction);
178
-
179
- streamer.start();
180
- ```
181
-
182
- The `TimeAction` instance accepts the following values:
183
-
184
- - timeValue - When should this action be run?
185
- - uniqueId - A unique ID to describe your action
186
- - contractName - The name of the contract
187
- - contractMethod - The method we are calling inside of the contract
188
- - date - An optional final parameter that accepts a date of creation
189
-
190
- ```
191
- new TimeAction(timeValue, uniqueId, contractName, contractMethod, date)
192
- ```
193
-
194
- ### Valid time values
195
-
196
- At the moment, the `timeValue` passed in as the first argument to `TimeAction` cannot accept just any value. However, there are many available out-of-the-box with more flexibility to come in the future.
197
-
198
- - `3s` or `block` will run a task every block (3 seconds, approximately)
199
- - `30s` will run a task every 30 seconds
200
- - `1m` or `minute` will run a task every 60 seconds (1 minute)
201
- - `15m` or `quarter` will run a task every 15 minutes
202
- - `30m` or `halfhour` will run a task every 30 minutes
203
- - `1h` or `hourly` will run a task every 60 minutes (every hour)
204
- - `12h` or `halfday` will run a task every 12 hours (half a day)
205
- - `24h` or `day` will run a task every 24 hours (day)
206
-
207
- Values will be persisted if using one of the three adapters that ship with the library.
208
-
209
- ## Adapters
210
-
211
- The Hive Stream library supports custom adapters for various actions that take place in the library. When the library first loads, it makes a call to get the last block number or when a block is processed, storing the processed block number. This library ships with two adapters; File and SQLite, both of which are file based adapters. The SQLite database works more like a traditional database and shows how you might create an adapter for a database like MongoDB or MySQL.
212
-
213
- When creating an adapter, at a minimum your adapter requires two methods: `loadState` and `saveState`. It must also extend `AdapterBase` which is exported from the package.
214
-
215
- You can see a few adapters that ship with Hive Stream in the `src/adapters` directory.
216
-
217
- ## Permanently running with PM2
218
-
219
- Simply copy the `ecosystem.config.js` file from this repository into your application, globally install `pm2` via `npm install pm2 -g` and change the `script` value below to reflect the main file of your application.
220
-
221
- **ecosystem.config.js**
222
-
223
- ```
224
- module.exports = {
225
- apps: [
226
- {
227
- name: 'hive-stream',
228
- script: 'index.js',
229
- ignore_watch: ['node_modules'],
230
- env: {
231
- NODE_ENV: 'development'
232
- },
233
- env_production: {
234
- NODE_ENV: 'production'
235
- }
236
- }
237
- ]
238
- ```
1
+ # Hive Stream
2
+
3
+ A Node.js layer for Hive that allows you to watch for specific actions on the Hive blockchain.
4
+
5
+ ## Install
6
+
7
+ ```shell
8
+ npm install hive-stream
9
+ ```
10
+
11
+ ## Quick Usage
12
+
13
+ ```javascript
14
+ const { Streamer } = require('hive-stream');
15
+
16
+ const ss = new Streamer();
17
+
18
+ // Watch for all custom JSON operations
19
+ ss.onCustomJson((op, { sender, isSignedWithActiveKey }, blockNumber, blockId, prevBlockId, trxId, blockTime) => {
20
+ // React to custom JSON operations
21
+ });
22
+ ```
23
+
24
+ ## Configuration
25
+
26
+ The `Streamer` object can accept an object of configuration values which are all optional. However, some operations like transferring Hive Engine tokens or other operations on the blockchain that are not READ ONLY, will require the active key and/or posting keys supplied as well as a username.
27
+
28
+ The `BLOCK_CHECK_INTERVAL` value is how often to check for new blocks or in cases of error or falling behind, to poll for new blocks. You should keep this as the default 1000ms value which is one second. This allows you to account for situations where blocks fall behind the main block.
29
+
30
+ The `BLOCKS_BEHIND_WARNING` value is a numeric value of the number of blocks your API will fall behind from the master before warning to the console.
31
+
32
+ The `API_NODES` are the Hive API endpoints used for failover. If you want to enable debug mode, set `DEBUG_MODE` to `true`. The configuration values and their defaults can be found in `src/config.ts`.
33
+
34
+ ```
35
+ const options = {
36
+ ACTIVE_KEY: '',
37
+ POSTING_KEY: '',
38
+ APP_NAME: 'hive-stream',
39
+ USERNAME: '',
40
+ LAST_BLOCK_NUMBER: 0,
41
+ BLOCK_CHECK_INTERVAL: 1000,
42
+ BLOCKS_BEHIND_WARNING: 25,
43
+ API_NODES: ['https://api.hive.blog', 'https://api.openhive.network', 'https://rpc.ausbit.dev'],
44
+ DEBUG_MODE: false
45
+ }
46
+
47
+ const ss = new Streamer(options);
48
+ ```
49
+
50
+ The configuration itself can also be overloaded using the `setConfig` method which allows you to pass one or more of the above configuration options, useful in situations where multiple keys might be used for issuing.
51
+
52
+ ```
53
+ ss.setConfig({
54
+ ACTIVE_KEY: 'newactivekey',
55
+ USERNAME: 'newusername'
56
+ });
57
+ ```
58
+
59
+ ## Streamer
60
+
61
+ The following subscription methods are read only methods, they allow you to react to certain Hive and Hive Engine events on the blockchain. You do not need to pass in any keys to use these methods as they're purely read only.
62
+
63
+ **The following actions DO require calling the `start` method first to watch the blockchain**
64
+
65
+ #### Watch for transfers
66
+
67
+ ```javascript
68
+ ss.onTransfer((op, blockNumber, blockId, prevBlockId, trxId, blockTime) => {
69
+
70
+ })
71
+ ```
72
+
73
+ #### Watch for custom JSON operations
74
+ ```javascript
75
+ ss.onCustomJson((op, { sender, isSignedWithActiveKey }, blockNumber, blockId, prevBlockId, trxId, blockTime) => {
76
+
77
+ })
78
+ ```
79
+
80
+ #### Watch for custom JSON operations (with a specific ID)
81
+ ```javascript
82
+ ss.onCustomJsonId((op, { sender, isSignedWithActiveKey }, blockNumber, blockId, prevBlockId, trxId, blockTime) => {
83
+
84
+ })
85
+ ```
86
+
87
+ #### Watch for post operations
88
+ ```javascript
89
+ ss.onPost((op, blockNumber, blockId, prevBlockId, trxId, blockTime) => {
90
+
91
+ });
92
+ ```
93
+
94
+ #### Watch for comment operations
95
+ ```javascript
96
+ ss.onComment((op, blockNumber, blockId, prevBlockId, trxId, blockTime) => {
97
+
98
+ });
99
+ ```
100
+
101
+ ## Actions (active key)
102
+
103
+ All of the below methods require an active key has been supplied in the constructor above called `ACTIVE_KEY`. The methods below are all promised based, so you can `await` them or use `then` to confirm a successful result.
104
+
105
+ **The following actions do NOT require calling the `start` method first to watch the blockchain**
106
+
107
+ ```javascript
108
+ const ss = new Streamer({
109
+ ACTIVE_KEY: 'youractivekey'
110
+ });
111
+ ```
112
+
113
+ ### Transfer Hive (HIVE or HBD)
114
+ ```javascript
115
+ transferHiveTokens(from, to, amount, symbol, memo = '') {
116
+
117
+ }
118
+ ```
119
+
120
+ ### Transfer Hive Engine Tokens
121
+ ```javascript
122
+ transferHiveEngineTokens(from, to, symbol, quantity, memo = '') {
123
+
124
+ }
125
+ ```
126
+
127
+ ### Transfer Hive Engine Tokens to Multiple Accounts
128
+ ```javascript
129
+ transferHiveEngineTokensMultiple(from, accounts = [], symbol, memo = '', amount = '0') {
130
+
131
+ }
132
+ ```
133
+
134
+ ### Issue Hive Engine Tokens
135
+ ```javascript
136
+ issueHiveEngineTokens(from, to, symbol, quantity, memo = '') {
137
+
138
+ }
139
+ ```
140
+
141
+ ### Issue Hive Engine Tokens to Multiple Accounts
142
+ ```javascript
143
+ issueHiveEngineTokensMultiple(from, accounts = [], symbol, memo = '', amount = '0') {
144
+
145
+ }
146
+ ```
147
+
148
+ ### Upvote/Downvote Posts
149
+ ```javascript
150
+ upvote(votePercentage = '100.0', username, permlink) {
151
+
152
+ }
153
+
154
+ downvote(votePercentage = '100.0', username, permlink) {
155
+
156
+ }
157
+ ```
158
+
159
+ ## Contracts
160
+
161
+ Hive Stream allows you to write contracts which get executed when a custom JSON operation matches. The only requirement is sending a payload which contains `hivePayload` inside of it.
162
+
163
+ The payload consists of:
164
+
165
+ `name` the name of the smart contract you registered.
166
+
167
+ `action` matches the name of a function defined inside of your contract
168
+
169
+ `payload` an object of data which will be provided to the action
170
+
171
+ ### Writing contracts
172
+
173
+ Really, a contract is nothing more than a bunch of functions which get matched to values inside of JSON payloads.
174
+
175
+ ### Register a contract
176
+
177
+ Register a file containing contract code which will be executed.
178
+
179
+ ```javascript
180
+ import contract from './my-contract';
181
+
182
+ registerContract('mycontract', Contract);
183
+ ```
184
+
185
+ ### Unregister a contract
186
+
187
+ Unregister a contract that has been registered.
188
+
189
+ ```javascript
190
+ unregisterContract('mycontract');
191
+ ```
192
+
193
+ ### Example Payload
194
+
195
+ ```javascript
196
+ JSON.stringify({ hivePayload: { name: 'hivedice', action: 'roll', payload: { roll: 22, amount: '1'} } })
197
+ ```
198
+
199
+ This will match a registered contract called `hivedice` and inside of the contract code, a function called `roll` and finally, the payload is sent to the function as an argument, allowing you to access the values inside of it.
200
+
201
+ ### Built-in Contract Examples
202
+
203
+ The library includes several built-in contract examples in the `src/contracts` folder:
204
+
205
+ - `DiceContract` - A dice rolling game contract
206
+ - `CoinflipContract` - A coin flip game contract
207
+ - `LottoContract` - A lottery-style game contract
208
+ - `TokenContract` - A contract for token operations
209
+ - `NFTContract` - A contract for NFT operations
210
+
211
+ These can be imported and used as examples for building your own contracts:
212
+
213
+ ```javascript
214
+ import { DiceContract, CoinflipContract, LottoContract } from 'hive-stream';
215
+ ```
216
+
217
+ ## Time-based Actions
218
+
219
+ It's like a cron job for your contracts. Time-based actions allow you to execute contract functions over a wide variety of different periods. Want to call a function every 3 seconds block time or want to call a function once per day? Time-based actions are an easy way to run time code.
220
+
221
+ The following example will run a contract action every 30 seconds. All you do is register a new `TimeAction` instance.
222
+
223
+ ```
224
+ import { TimeAction, Streamer } from 'hive-stream';
225
+
226
+ const streamer = new Streamer({
227
+ ACTIVE_KEY: ''
228
+ });
229
+
230
+ const testAction = new TimeAction('30s', 'test30s', 'hivedice', 'testauto');
231
+
232
+ streamer.registerAction(testAction);
233
+
234
+ streamer.start();
235
+ ```
236
+
237
+ The `TimeAction` instance accepts the following values:
238
+
239
+ - timeValue - When should this action be run?
240
+ - uniqueId - A unique ID to describe your action
241
+ - contractName - The name of the contract
242
+ - contractMethod - The method we are calling inside of the contract
243
+ - date - An optional final parameter that accepts a date of creation
244
+
245
+ ```
246
+ new TimeAction(timeValue, uniqueId, contractName, contractMethod, date)
247
+ ```
248
+
249
+ ### Valid time values
250
+
251
+ At the moment, the `timeValue` passed in as the first argument to `TimeAction` cannot accept just any value. However, there are many available out-of-the-box with more flexibility to come in the future.
252
+
253
+ - `3s` or `block` will run a task every block (3 seconds, approximately)
254
+ - `10s` will run a task every 10 seconds
255
+ - `30s` will run a task every 30 seconds
256
+ - `1m` or `minute` will run a task every 60 seconds (1 minute)
257
+ - `5m` will run a task every 5 minutes
258
+ - `15m` or `quarter` will run a task every 15 minutes
259
+ - `30m` or `halfhour` will run a task every 30 minutes
260
+ - `1h` or `hourly` will run a task every 60 minutes (every hour)
261
+ - `12h` or `halfday` will run a task every 12 hours (half a day)
262
+ - `24h`, `day`, or `daily` will run a task every 24 hours (day)
263
+ - `week` or `weekly` will run a task every 7 days (week)
264
+
265
+ Values will be persisted if using one of the database adapters that ship with the library.
266
+
267
+ ## Adapters
268
+
269
+ The Hive Stream library supports custom adapters for various actions that take place in the library. When the library first loads, it makes a call to get the last block number or when a block is processed, storing the processed block number. This library ships with three adapters: SQLite, MongoDB, and PostgreSQL. These provide robust database storage for blockchain state and operations.
270
+
271
+ By default, Streamer uses SQLite adapter. To use a different adapter, use the `registerAdapter()` method:
272
+
273
+ ### SQLite Adapter (Default)
274
+ ```javascript
275
+ import { Streamer, SqliteAdapter } from 'hive-stream';
276
+
277
+ const streamer = new Streamer(config);
278
+ // SQLite is used by default, but you can explicitly register a custom SQLite database:
279
+ const adapter = new SqliteAdapter('./hive-stream.db');
280
+ await streamer.registerAdapter(adapter);
281
+ ```
282
+
283
+ ### MongoDB Adapter
284
+ ```javascript
285
+ import { Streamer, MongodbAdapter } from 'hive-stream';
286
+
287
+ const streamer = new Streamer(config);
288
+ const adapter = new MongodbAdapter('mongodb://localhost:27017', 'hive_stream');
289
+ await streamer.registerAdapter(adapter);
290
+ ```
291
+
292
+ ### PostgreSQL Adapter
293
+ ```javascript
294
+ import { Streamer, PostgreSQLAdapter } from 'hive-stream';
295
+
296
+ const streamer = new Streamer(config);
297
+ const adapter = new PostgreSQLAdapter({
298
+ host: 'localhost',
299
+ port: 5432,
300
+ user: 'postgres',
301
+ password: 'your_password',
302
+ database: 'hive_stream'
303
+ });
304
+
305
+ // Or with connection string
306
+ const adapter = new PostgreSQLAdapter({
307
+ connectionString: 'postgresql://user:pass@localhost:5432/hive_stream'
308
+ });
309
+
310
+ await streamer.registerAdapter(adapter);
311
+ ```
312
+
313
+ When creating an adapter, at a minimum your adapter requires two methods: `loadState` and `saveState`. It must also extend `AdapterBase` which is exported from the package.
314
+
315
+ You can see a few adapters that ship with Hive Stream in the `src/adapters` directory.
316
+
317
+ ## Permanently running with PM2
318
+
319
+ Simply copy the `ecosystem.config.js` file from this repository into your application, globally install `pm2` via `npm install pm2 -g` and change the `script` value below to reflect the main file of your application.
320
+
321
+ **ecosystem.config.js**
322
+
323
+ ```
324
+ module.exports = {
325
+ apps: [
326
+ {
327
+ name: 'hive-stream',
328
+ script: 'index.js',
329
+ ignore_watch: ['node_modules'],
330
+ env: {
331
+ NODE_ENV: 'development'
332
+ },
333
+ env_production: {
334
+ NODE_ENV: 'production'
335
+ }
336
+ }
337
+ ]
338
+ ```
package/dist/actions.d.ts CHANGED
@@ -1,10 +1,41 @@
1
- export declare class TimeAction {
2
- timeValue: string;
3
- id: string;
4
- contractName: string;
5
- contractMethod: string;
6
- payload: any;
7
- date: Date;
8
- constructor(timeValue: string, id: string, contractName: string, contractMethod: string, payload?: any, date?: Date);
9
- reset(): void;
10
- }
1
+ export interface TimeActionInterface {
2
+ timeValue: string;
3
+ id: string;
4
+ contractName: string;
5
+ contractMethod: string;
6
+ payload: any;
7
+ date: Date;
8
+ enabled: boolean;
9
+ lastExecution?: Date;
10
+ executionCount: number;
11
+ maxExecutions?: number;
12
+ timezone?: string;
13
+ }
14
+ export declare class TimeAction implements TimeActionInterface {
15
+ timeValue: string;
16
+ id: string;
17
+ contractName: string;
18
+ contractMethod: string;
19
+ payload: any;
20
+ date: Date;
21
+ enabled: boolean;
22
+ lastExecution?: Date;
23
+ executionCount: number;
24
+ maxExecutions?: number;
25
+ timezone?: string;
26
+ private static readonly VALID_TIME_VALUES;
27
+ constructor(timeValue: string, id: string, contractName: string, contractMethod: string, payload?: any, date?: Date | string, enabled?: boolean, executionCount?: number, maxExecutions?: number, timezone?: string);
28
+ private validateTimeValue;
29
+ private validateId;
30
+ private validateContractName;
31
+ private validateMethodName;
32
+ private parseDate;
33
+ reset(): void;
34
+ disable(): void;
35
+ enable(): void;
36
+ hasReachedMaxExecutions(): boolean;
37
+ incrementExecutionCount(): void;
38
+ toJSON(): Record<string, any>;
39
+ static fromJSON(data: any): TimeAction;
40
+ static getValidTimeValues(): string[];
41
+ }