s3db.js 6.1.0 → 7.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/PLUGINS.md +2724 -0
- package/README.md +377 -492
- package/UNLICENSE +24 -0
- package/dist/s3db.cjs.js +30054 -18189
- package/dist/s3db.cjs.min.js +1 -1
- package/dist/s3db.d.ts +373 -72
- package/dist/s3db.es.js +30040 -18186
- package/dist/s3db.es.min.js +1 -1
- package/dist/s3db.iife.js +29727 -17863
- package/dist/s3db.iife.min.js +1 -1
- package/package.json +44 -69
- package/src/behaviors/body-only.js +110 -0
- package/src/behaviors/body-overflow.js +153 -0
- package/src/behaviors/enforce-limits.js +195 -0
- package/src/behaviors/index.js +39 -0
- package/src/behaviors/truncate-data.js +204 -0
- package/src/behaviors/user-managed.js +147 -0
- package/src/client.class.js +515 -0
- package/src/concerns/base62.js +61 -0
- package/src/concerns/calculator.js +204 -0
- package/src/concerns/crypto.js +142 -0
- package/src/concerns/id.js +8 -0
- package/src/concerns/index.js +5 -0
- package/src/concerns/try-fn.js +151 -0
- package/src/connection-string.class.js +75 -0
- package/src/database.class.js +599 -0
- package/src/errors.js +261 -0
- package/src/index.js +17 -0
- package/src/plugins/audit.plugin.js +442 -0
- package/src/plugins/cache/cache.class.js +53 -0
- package/src/plugins/cache/index.js +6 -0
- package/src/plugins/cache/memory-cache.class.js +164 -0
- package/src/plugins/cache/s3-cache.class.js +189 -0
- package/src/plugins/cache.plugin.js +275 -0
- package/src/plugins/consumers/index.js +24 -0
- package/src/plugins/consumers/rabbitmq-consumer.js +56 -0
- package/src/plugins/consumers/sqs-consumer.js +102 -0
- package/src/plugins/costs.plugin.js +81 -0
- package/src/plugins/fulltext.plugin.js +473 -0
- package/src/plugins/index.js +12 -0
- package/src/plugins/metrics.plugin.js +603 -0
- package/src/plugins/plugin.class.js +210 -0
- package/src/plugins/plugin.obj.js +13 -0
- package/src/plugins/queue-consumer.plugin.js +134 -0
- package/src/plugins/replicator.plugin.js +769 -0
- package/src/plugins/replicators/base-replicator.class.js +85 -0
- package/src/plugins/replicators/bigquery-replicator.class.js +328 -0
- package/src/plugins/replicators/index.js +44 -0
- package/src/plugins/replicators/postgres-replicator.class.js +427 -0
- package/src/plugins/replicators/s3db-replicator.class.js +352 -0
- package/src/plugins/replicators/sqs-replicator.class.js +427 -0
- package/src/resource.class.js +2626 -0
- package/src/s3db.d.ts +1263 -0
- package/src/schema.class.js +706 -0
- package/src/stream/index.js +16 -0
- package/src/stream/resource-ids-page-reader.class.js +10 -0
- package/src/stream/resource-ids-reader.class.js +63 -0
- package/src/stream/resource-reader.class.js +81 -0
- package/src/stream/resource-writer.class.js +92 -0
- package/src/validator.class.js +97 -0
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* S3 Cache Configuration Documentation
|
|
3
|
+
*
|
|
4
|
+
* This cache implementation stores data in Amazon S3, providing persistent storage
|
|
5
|
+
* that survives process restarts and can be shared across multiple instances.
|
|
6
|
+
* It's suitable for large datasets and distributed caching scenarios.
|
|
7
|
+
*
|
|
8
|
+
* @typedef {Object} S3CacheConfig
|
|
9
|
+
* @property {string} bucket - The name of the S3 bucket to use for cache storage
|
|
10
|
+
* @property {string} [region='us-east-1'] - AWS region where the S3 bucket is located
|
|
11
|
+
* @property {string} [accessKeyId] - AWS access key ID (if not using IAM roles)
|
|
12
|
+
* @property {string} [secretAccessKey] - AWS secret access key (if not using IAM roles)
|
|
13
|
+
* @property {string} [sessionToken] - AWS session token for temporary credentials
|
|
14
|
+
* @property {string} [prefix='cache/'] - S3 key prefix for all cache objects
|
|
15
|
+
* @property {number} [ttl=3600000] - Time to live in milliseconds (1 hour default)
|
|
16
|
+
* @property {boolean} [enableCompression=true] - Whether to compress cache values using gzip
|
|
17
|
+
* @property {number} [compressionThreshold=1024] - Minimum size in bytes to trigger compression
|
|
18
|
+
* @property {string} [storageClass='STANDARD'] - S3 storage class: 'STANDARD', 'STANDARD_IA', 'ONEZONE_IA', 'GLACIER', 'DEEP_ARCHIVE'
|
|
19
|
+
* @property {boolean} [enableEncryption=true] - Whether to use S3 server-side encryption (AES256)
|
|
20
|
+
* @property {string} [encryptionAlgorithm='AES256'] - Encryption algorithm: 'AES256' or 'aws:kms'
|
|
21
|
+
* @property {string} [kmsKeyId] - KMS key ID for encryption (if using aws:kms)
|
|
22
|
+
* @property {number} [maxConcurrency=10] - Maximum number of concurrent S3 operations
|
|
23
|
+
* @property {number} [retryAttempts=3] - Number of retry attempts for failed S3 operations
|
|
24
|
+
* @property {number} [retryDelay=1000] - Delay in milliseconds between retry attempts
|
|
25
|
+
* @property {boolean} [logOperations=false] - Whether to log S3 operations to console for debugging
|
|
26
|
+
* @property {Object} [metadata] - Additional metadata to include with all cache objects
|
|
27
|
+
* - Key: metadata name (e.g., 'environment', 'version')
|
|
28
|
+
* - Value: metadata value (e.g., 'production', '1.0.0')
|
|
29
|
+
* @property {string} [contentType='application/json'] - Content type for cache objects
|
|
30
|
+
* @property {boolean} [enableVersioning=false] - Whether to enable S3 object versioning for cache objects
|
|
31
|
+
* @property {number} [maxKeys=1000] - Maximum number of keys to retrieve in list operations
|
|
32
|
+
* @property {boolean} [enableCacheControl=false] - Whether to set Cache-Control headers on S3 objects
|
|
33
|
+
* @property {string} [cacheControl='max-age=3600'] - Cache-Control header value for S3 objects
|
|
34
|
+
* @property {Object} [s3ClientOptions] - Additional options to pass to the S3 client constructor
|
|
35
|
+
* @property {boolean} [enableLocalCache=false] - Whether to use local memory cache as a layer on top of S3
|
|
36
|
+
* @property {number} [localCacheSize=100] - Size of local memory cache when enabled
|
|
37
|
+
* @property {number} [localCacheTtl=300000] - TTL for local memory cache in milliseconds (5 minutes default)
|
|
38
|
+
*
|
|
39
|
+
* @example
|
|
40
|
+
* // Basic configuration with compression and encryption
|
|
41
|
+
* {
|
|
42
|
+
* bucket: 'my-cache-bucket',
|
|
43
|
+
* region: 'us-west-2',
|
|
44
|
+
* accessKeyId: 'AKIAIOSFODNN7EXAMPLE',
|
|
45
|
+
* secretAccessKey: 'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY',
|
|
46
|
+
* prefix: 'app-cache/',
|
|
47
|
+
* ttl: 7200000, // 2 hours
|
|
48
|
+
* enableCompression: true,
|
|
49
|
+
* enableEncryption: true,
|
|
50
|
+
* storageClass: 'STANDARD_IA'
|
|
51
|
+
* }
|
|
52
|
+
*
|
|
53
|
+
* @example
|
|
54
|
+
* // Configuration with KMS encryption and local caching
|
|
55
|
+
* {
|
|
56
|
+
* bucket: 'secure-cache-bucket',
|
|
57
|
+
* region: 'eu-west-1',
|
|
58
|
+
* prefix: 'encrypted-cache/',
|
|
59
|
+
* enableEncryption: true,
|
|
60
|
+
* encryptionAlgorithm: 'aws:kms',
|
|
61
|
+
* kmsKeyId: 'arn:aws:kms:eu-west-1:123456789012:key/abcd1234-5678-90ef-ghij-klmnopqrstuv',
|
|
62
|
+
* enableLocalCache: true,
|
|
63
|
+
* localCacheSize: 500,
|
|
64
|
+
* localCacheTtl: 600000, // 10 minutes
|
|
65
|
+
* metadata: {
|
|
66
|
+
* 'environment': 'production',
|
|
67
|
+
* 'cache_type': 's3'
|
|
68
|
+
* }
|
|
69
|
+
* }
|
|
70
|
+
*
|
|
71
|
+
* @example
|
|
72
|
+
* // Configuration with cost optimization
|
|
73
|
+
* {
|
|
74
|
+
* bucket: 'cost-optimized-cache',
|
|
75
|
+
* region: 'us-east-1',
|
|
76
|
+
* prefix: 'cache/',
|
|
77
|
+
* storageClass: 'STANDARD_IA',
|
|
78
|
+
* ttl: 86400000, // 24 hours
|
|
79
|
+
* enableCompression: true,
|
|
80
|
+
* compressionThreshold: 512,
|
|
81
|
+
* maxConcurrency: 5,
|
|
82
|
+
* enableCacheControl: true,
|
|
83
|
+
* cacheControl: 'max-age=86400, public'
|
|
84
|
+
* }
|
|
85
|
+
*
|
|
86
|
+
* @example
|
|
87
|
+
* // Minimal configuration using IAM roles
|
|
88
|
+
* {
|
|
89
|
+
* bucket: 'my-cache-bucket',
|
|
90
|
+
* region: 'us-east-1'
|
|
91
|
+
* }
|
|
92
|
+
*
|
|
93
|
+
* @notes
|
|
94
|
+
* - Requires AWS credentials with S3 read/write permissions
|
|
95
|
+
* - S3 storage costs depend on storage class and data transfer
|
|
96
|
+
* - Compression reduces storage costs but increases CPU usage
|
|
97
|
+
* - Encryption provides security but may impact performance
|
|
98
|
+
* - Local cache layer improves performance for frequently accessed data
|
|
99
|
+
* - Storage class affects cost, availability, and retrieval time
|
|
100
|
+
* - Versioning allows recovery of deleted cache objects
|
|
101
|
+
* - Cache-Control headers help with CDN integration
|
|
102
|
+
* - Retry mechanism handles temporary S3 service issues
|
|
103
|
+
* - Concurrent operations improve performance but may hit rate limits
|
|
104
|
+
* - Metadata is useful for cache management and monitoring
|
|
105
|
+
* - TTL is enforced by checking object creation time
|
|
106
|
+
*/
|
|
107
|
+
import zlib from "zlib";
|
|
108
|
+
import { join } from "path";
|
|
109
|
+
|
|
110
|
+
import { Cache } from "./cache.class.js"
|
|
111
|
+
import { streamToString } from "#src/stream/index.js";
|
|
112
|
+
import tryFn from "../../concerns/try-fn.js";
|
|
113
|
+
|
|
114
|
+
export class S3Cache extends Cache {
|
|
115
|
+
constructor({
|
|
116
|
+
client,
|
|
117
|
+
keyPrefix = 'cache',
|
|
118
|
+
ttl = 0,
|
|
119
|
+
prefix = undefined
|
|
120
|
+
}) {
|
|
121
|
+
super({ client, keyPrefix, ttl, prefix });
|
|
122
|
+
this.client = client
|
|
123
|
+
this.keyPrefix = keyPrefix;
|
|
124
|
+
this.config.ttl = ttl;
|
|
125
|
+
this.config.client = client;
|
|
126
|
+
this.config.prefix = prefix !== undefined ? prefix : keyPrefix + (keyPrefix.endsWith('/') ? '' : '/');
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
async _set(key, data) {
|
|
130
|
+
let body = JSON.stringify(data);
|
|
131
|
+
const lengthSerialized = body.length;
|
|
132
|
+
body = zlib.gzipSync(body).toString('base64');
|
|
133
|
+
|
|
134
|
+
return this.client.putObject({
|
|
135
|
+
key: join(this.keyPrefix, key),
|
|
136
|
+
body,
|
|
137
|
+
contentEncoding: "gzip",
|
|
138
|
+
contentType: "application/gzip",
|
|
139
|
+
metadata: {
|
|
140
|
+
compressor: "zlib",
|
|
141
|
+
compressed: 'true',
|
|
142
|
+
"client-id": this.client.id,
|
|
143
|
+
"length-serialized": String(lengthSerialized),
|
|
144
|
+
"length-compressed": String(body.length),
|
|
145
|
+
"compression-gain": (body.length/lengthSerialized).toFixed(2),
|
|
146
|
+
},
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
async _get(key) {
|
|
151
|
+
const [ok, err, result] = await tryFn(async () => {
|
|
152
|
+
const { Body } = await this.client.getObject(join(this.keyPrefix, key));
|
|
153
|
+
let content = await streamToString(Body);
|
|
154
|
+
content = Buffer.from(content, 'base64');
|
|
155
|
+
content = zlib.unzipSync(content).toString();
|
|
156
|
+
return JSON.parse(content);
|
|
157
|
+
});
|
|
158
|
+
if (ok) return result;
|
|
159
|
+
if (err.name === 'NoSuchKey' || err.name === 'NotFound') return null;
|
|
160
|
+
throw err;
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
async _del(key) {
|
|
164
|
+
await this.client.deleteObject(join(this.keyPrefix, key));
|
|
165
|
+
return true
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
async _clear() {
|
|
169
|
+
const keys = await this.client.getAllKeys({
|
|
170
|
+
prefix: this.keyPrefix,
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
await this.client.deleteObjects(keys);
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
async size() {
|
|
177
|
+
const keys = await this.keys();
|
|
178
|
+
return keys.length;
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
async keys() {
|
|
182
|
+
// Busca todas as chaves com o prefixo do cache e remove o prefixo
|
|
183
|
+
const allKeys = await this.client.getAllKeys({ prefix: this.keyPrefix });
|
|
184
|
+
const prefix = this.keyPrefix.endsWith('/') ? this.keyPrefix : this.keyPrefix + '/';
|
|
185
|
+
return allKeys.map(k => k.startsWith(prefix) ? k.slice(prefix.length) : k);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
export default S3Cache
|
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
import { join } from "path";
|
|
2
|
+
|
|
3
|
+
import { sha256 } from "../concerns/crypto.js";
|
|
4
|
+
import Plugin from "./plugin.class.js";
|
|
5
|
+
import S3Cache from "./cache/s3-cache.class.js";
|
|
6
|
+
import MemoryCache from "./cache/memory-cache.class.js";
|
|
7
|
+
import tryFn from "../concerns/try-fn.js";
|
|
8
|
+
|
|
9
|
+
export class CachePlugin extends Plugin {
|
|
10
|
+
constructor(options = {}) {
|
|
11
|
+
super(options);
|
|
12
|
+
this.driver = options.driver;
|
|
13
|
+
this.config = {
|
|
14
|
+
includePartitions: options.includePartitions !== false,
|
|
15
|
+
...options
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
async setup(database) {
|
|
20
|
+
await super.setup(database);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
async onSetup() {
|
|
24
|
+
// Initialize cache driver
|
|
25
|
+
if (this.config.driver) {
|
|
26
|
+
// Use custom driver if provided
|
|
27
|
+
this.driver = this.config.driver;
|
|
28
|
+
} else if (this.config.driverType === 'memory') {
|
|
29
|
+
this.driver = new MemoryCache(this.config.memoryOptions || {});
|
|
30
|
+
} else {
|
|
31
|
+
// Default to S3Cache, sempre passa o client do database
|
|
32
|
+
this.driver = new S3Cache({ client: this.database.client, ...(this.config.s3Options || {}) });
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// Install database proxy for new resources
|
|
36
|
+
this.installDatabaseProxy();
|
|
37
|
+
|
|
38
|
+
// Install hooks for existing resources
|
|
39
|
+
this.installResourceHooks();
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
async onStart() {
|
|
43
|
+
// Plugin is ready
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
async onStop() {
|
|
47
|
+
// Cleanup if needed
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
installDatabaseProxy() {
|
|
51
|
+
if (this.database._cacheProxyInstalled) {
|
|
52
|
+
return; // Already installed
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const installResourceHooks = this.installResourceHooks.bind(this);
|
|
56
|
+
|
|
57
|
+
// Store original method
|
|
58
|
+
this.database._originalCreateResourceForCache = this.database.createResource;
|
|
59
|
+
|
|
60
|
+
// Create new method that doesn't call itself
|
|
61
|
+
this.database.createResource = async function (...args) {
|
|
62
|
+
const resource = await this._originalCreateResourceForCache(...args);
|
|
63
|
+
installResourceHooks(resource);
|
|
64
|
+
return resource;
|
|
65
|
+
};
|
|
66
|
+
|
|
67
|
+
// Mark as installed
|
|
68
|
+
this.database._cacheProxyInstalled = true;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
installResourceHooks() {
|
|
72
|
+
for (const resource of Object.values(this.database.resources)) {
|
|
73
|
+
this.installResourceHooksForResource(resource);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
installResourceHooksForResource(resource) {
|
|
78
|
+
if (!this.driver) return;
|
|
79
|
+
|
|
80
|
+
// Add cache methods to resource
|
|
81
|
+
Object.defineProperty(resource, 'cache', {
|
|
82
|
+
value: this.driver,
|
|
83
|
+
writable: true,
|
|
84
|
+
configurable: true,
|
|
85
|
+
enumerable: false
|
|
86
|
+
});
|
|
87
|
+
resource.cacheKeyFor = async (options = {}) => {
|
|
88
|
+
const { action, params = {}, partition, partitionValues } = options;
|
|
89
|
+
return this.generateCacheKey(resource, action, params, partition, partitionValues);
|
|
90
|
+
};
|
|
91
|
+
|
|
92
|
+
// List of methods to cache
|
|
93
|
+
const cacheMethods = [
|
|
94
|
+
'count', 'listIds', 'getMany', 'getAll', 'page', 'list', 'get'
|
|
95
|
+
];
|
|
96
|
+
for (const method of cacheMethods) {
|
|
97
|
+
resource.useMiddleware(method, async (ctx, next) => {
|
|
98
|
+
// Build cache key
|
|
99
|
+
let key;
|
|
100
|
+
if (method === 'getMany') {
|
|
101
|
+
key = await resource.cacheKeyFor({ action: method, params: { ids: ctx.args[0] } });
|
|
102
|
+
} else if (method === 'page') {
|
|
103
|
+
const { offset, size, partition, partitionValues } = ctx.args[0] || {};
|
|
104
|
+
key = await resource.cacheKeyFor({ action: method, params: { offset, size }, partition, partitionValues });
|
|
105
|
+
} else if (method === 'list' || method === 'listIds' || method === 'count') {
|
|
106
|
+
const { partition, partitionValues } = ctx.args[0] || {};
|
|
107
|
+
key = await resource.cacheKeyFor({ action: method, partition, partitionValues });
|
|
108
|
+
} else if (method === 'getAll') {
|
|
109
|
+
key = await resource.cacheKeyFor({ action: method });
|
|
110
|
+
} else if (method === 'get') {
|
|
111
|
+
key = await resource.cacheKeyFor({ action: method, params: { id: ctx.args[0] } });
|
|
112
|
+
}
|
|
113
|
+
// Try cache
|
|
114
|
+
const [ok, err, cached] = await tryFn(() => resource.cache.get(key));
|
|
115
|
+
if (ok && cached !== null && cached !== undefined) return cached;
|
|
116
|
+
if (!ok && err.name !== 'NoSuchKey') throw err;
|
|
117
|
+
// Not cached, call next
|
|
118
|
+
const result = await next();
|
|
119
|
+
await resource.cache.set(key, result);
|
|
120
|
+
return result;
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// List of methods to clear cache on write
|
|
125
|
+
const writeMethods = ['insert', 'update', 'delete', 'deleteMany'];
|
|
126
|
+
for (const method of writeMethods) {
|
|
127
|
+
resource.useMiddleware(method, async (ctx, next) => {
|
|
128
|
+
const result = await next();
|
|
129
|
+
// Determine which records to clear
|
|
130
|
+
if (method === 'insert') {
|
|
131
|
+
await this.clearCacheForResource(resource, ctx.args[0]);
|
|
132
|
+
} else if (method === 'update') {
|
|
133
|
+
await this.clearCacheForResource(resource, { id: ctx.args[0], ...ctx.args[1] });
|
|
134
|
+
} else if (method === 'delete') {
|
|
135
|
+
let data = { id: ctx.args[0] };
|
|
136
|
+
if (typeof resource.get === 'function') {
|
|
137
|
+
const [ok, err, full] = await tryFn(() => resource.get(ctx.args[0]));
|
|
138
|
+
if (ok && full) data = full;
|
|
139
|
+
}
|
|
140
|
+
await this.clearCacheForResource(resource, data);
|
|
141
|
+
} else if (method === 'deleteMany') {
|
|
142
|
+
// After all deletions, clear all aggregate and partition caches
|
|
143
|
+
await this.clearCacheForResource(resource);
|
|
144
|
+
}
|
|
145
|
+
return result;
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
async clearCacheForResource(resource, data) {
|
|
151
|
+
if (!resource.cache) return; // Skip if no cache is available
|
|
152
|
+
|
|
153
|
+
const keyPrefix = `resource=${resource.name}`;
|
|
154
|
+
|
|
155
|
+
// Always clear main cache for this resource
|
|
156
|
+
await resource.cache.clear(keyPrefix);
|
|
157
|
+
|
|
158
|
+
// Only clear partition cache if partitions are enabled AND resource has partitions AND includePartitions is true
|
|
159
|
+
if (this.config.includePartitions === true && resource.config?.partitions && Object.keys(resource.config.partitions).length > 0) {
|
|
160
|
+
if (!data) {
|
|
161
|
+
// If no data, clear all partition caches
|
|
162
|
+
for (const partitionName of Object.keys(resource.config.partitions)) {
|
|
163
|
+
const partitionKeyPrefix = join(keyPrefix, `partition=${partitionName}`);
|
|
164
|
+
await resource.cache.clear(partitionKeyPrefix);
|
|
165
|
+
}
|
|
166
|
+
} else {
|
|
167
|
+
const partitionValues = this.getPartitionValues(data, resource);
|
|
168
|
+
for (const [partitionName, values] of Object.entries(partitionValues)) {
|
|
169
|
+
// Only clear partition cache if there are actual values
|
|
170
|
+
if (values && Object.keys(values).length > 0 && Object.values(values).some(v => v !== null && v !== undefined)) {
|
|
171
|
+
const partitionKeyPrefix = join(keyPrefix, `partition=${partitionName}`);
|
|
172
|
+
await resource.cache.clear(partitionKeyPrefix);
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
async generateCacheKey(resource, action, params = {}, partition = null, partitionValues = null) {
|
|
180
|
+
const keyParts = [
|
|
181
|
+
`resource=${resource.name}`,
|
|
182
|
+
`action=${action}`
|
|
183
|
+
];
|
|
184
|
+
|
|
185
|
+
// Add partition information if available
|
|
186
|
+
if (partition && partitionValues && Object.keys(partitionValues).length > 0) {
|
|
187
|
+
keyParts.push(`partition:${partition}`);
|
|
188
|
+
for (const [field, value] of Object.entries(partitionValues)) {
|
|
189
|
+
if (value !== null && value !== undefined) {
|
|
190
|
+
keyParts.push(`${field}:${value}`);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
// Add params if they exist
|
|
196
|
+
if (Object.keys(params).length > 0) {
|
|
197
|
+
const paramsHash = await this.hashParams(params);
|
|
198
|
+
keyParts.push(paramsHash);
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
return join(...keyParts) + '.json.gz';
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
async hashParams(params) {
|
|
205
|
+
const sortedParams = Object.keys(params)
|
|
206
|
+
.sort()
|
|
207
|
+
.map(key => `${key}:${params[key]}`)
|
|
208
|
+
.join('|') || 'empty';
|
|
209
|
+
|
|
210
|
+
return await sha256(sortedParams);
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
// Utility methods
|
|
214
|
+
async getCacheStats() {
|
|
215
|
+
if (!this.driver) return null;
|
|
216
|
+
|
|
217
|
+
return {
|
|
218
|
+
size: await this.driver.size(),
|
|
219
|
+
keys: await this.driver.keys(),
|
|
220
|
+
driver: this.driver.constructor.name
|
|
221
|
+
};
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
async clearAllCache() {
|
|
225
|
+
if (!this.driver) return;
|
|
226
|
+
|
|
227
|
+
for (const resource of Object.values(this.database.resources)) {
|
|
228
|
+
if (resource.cache) {
|
|
229
|
+
const keyPrefix = `resource=${resource.name}`;
|
|
230
|
+
await resource.cache.clear(keyPrefix);
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
async warmCache(resourceName, options = {}) {
|
|
236
|
+
const resource = this.database.resources[resourceName];
|
|
237
|
+
if (!resource) {
|
|
238
|
+
throw new Error(`Resource '${resourceName}' not found`);
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
const { includePartitions = true } = options;
|
|
242
|
+
|
|
243
|
+
// Warm main cache using the wrapped method (which will call the original)
|
|
244
|
+
await resource.getAll();
|
|
245
|
+
|
|
246
|
+
// Warm partition caches if enabled
|
|
247
|
+
if (includePartitions && resource.config.partitions) {
|
|
248
|
+
for (const [partitionName, partitionDef] of Object.entries(resource.config.partitions)) {
|
|
249
|
+
if (partitionDef.fields) {
|
|
250
|
+
// Get some sample partition values and warm those caches
|
|
251
|
+
const allRecords = await resource.getAll();
|
|
252
|
+
|
|
253
|
+
// Ensure allRecords is an array
|
|
254
|
+
const recordsArray = Array.isArray(allRecords) ? allRecords : [];
|
|
255
|
+
const partitionValues = new Set();
|
|
256
|
+
|
|
257
|
+
for (const record of recordsArray.slice(0, 10)) { // Sample first 10 records
|
|
258
|
+
const values = this.getPartitionValues(record, resource);
|
|
259
|
+
if (values[partitionName]) {
|
|
260
|
+
partitionValues.add(JSON.stringify(values[partitionName]));
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
// Warm cache for each partition value
|
|
265
|
+
for (const partitionValueStr of partitionValues) {
|
|
266
|
+
const partitionValues = JSON.parse(partitionValueStr);
|
|
267
|
+
await resource.list({ partition: partitionName, partitionValues });
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
export default CachePlugin;
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { SqsConsumer } from './sqs-consumer.js';
|
|
2
|
+
import { RabbitMqConsumer } from './rabbitmq-consumer.js';
|
|
3
|
+
|
|
4
|
+
export { SqsConsumer, RabbitMqConsumer };
|
|
5
|
+
|
|
6
|
+
export const CONSUMER_DRIVERS = {
|
|
7
|
+
sqs: SqsConsumer,
|
|
8
|
+
rabbitmq: RabbitMqConsumer,
|
|
9
|
+
// kafka: KafkaConsumer, // futuro
|
|
10
|
+
};
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Cria uma instância de consumer baseado no driver
|
|
14
|
+
* @param {string} driver - Tipo do driver (sqs, rabbitmq, kafka...)
|
|
15
|
+
* @param {Object} config - Configuração do consumer
|
|
16
|
+
* @returns {SqsConsumer|RabbitMqConsumer|KafkaConsumer}
|
|
17
|
+
*/
|
|
18
|
+
export function createConsumer(driver, config) {
|
|
19
|
+
const ConsumerClass = CONSUMER_DRIVERS[driver];
|
|
20
|
+
if (!ConsumerClass) {
|
|
21
|
+
throw new Error(`Unknown consumer driver: ${driver}. Available: ${Object.keys(CONSUMER_DRIVERS).join(', ')}`);
|
|
22
|
+
}
|
|
23
|
+
return new ConsumerClass(config);
|
|
24
|
+
}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import amqp from 'amqplib';
|
|
2
|
+
import tryFn from "../../concerns/try-fn.js";
|
|
3
|
+
|
|
4
|
+
export class RabbitMqConsumer {
|
|
5
|
+
constructor({ amqpUrl, queue, prefetch = 10, reconnectInterval = 2000, onMessage, onError, driver = 'rabbitmq' }) {
|
|
6
|
+
this.amqpUrl = amqpUrl;
|
|
7
|
+
this.queue = queue;
|
|
8
|
+
this.prefetch = prefetch;
|
|
9
|
+
this.reconnectInterval = reconnectInterval;
|
|
10
|
+
this.onMessage = onMessage;
|
|
11
|
+
this.onError = onError;
|
|
12
|
+
this.driver = driver;
|
|
13
|
+
this.connection = null;
|
|
14
|
+
this.channel = null;
|
|
15
|
+
this._stopped = false;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
async start() {
|
|
19
|
+
this._stopped = false;
|
|
20
|
+
await this._connect();
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
async stop() {
|
|
24
|
+
this._stopped = true;https://github.com/browserbase/stagehand
|
|
25
|
+
if (this.channel) await this.channel.close();
|
|
26
|
+
if (this.connection) await this.connection.close();
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
async _connect() {
|
|
30
|
+
const [ok, err] = await tryFn(async () => {
|
|
31
|
+
this.connection = await amqp.connect(this.amqpUrl);
|
|
32
|
+
this.channel = await this.connection.createChannel();
|
|
33
|
+
await this.channel.assertQueue(this.queue, { durable: true });
|
|
34
|
+
this.channel.prefetch(this.prefetch);
|
|
35
|
+
this.channel.consume(this.queue, async (msg) => {
|
|
36
|
+
if (msg !== null) {
|
|
37
|
+
const [okMsg, errMsg] = await tryFn(async () => {
|
|
38
|
+
const content = JSON.parse(msg.content.toString());
|
|
39
|
+
await this.onMessage({ $body: content, $raw: msg });
|
|
40
|
+
this.channel.ack(msg);
|
|
41
|
+
});
|
|
42
|
+
if (!okMsg) {
|
|
43
|
+
if (this.onError) this.onError(errMsg, msg);
|
|
44
|
+
this.channel.nack(msg, false, false);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
});
|
|
48
|
+
});
|
|
49
|
+
if (!ok) {
|
|
50
|
+
if (this.onError) this.onError(err);
|
|
51
|
+
if (!this._stopped) {
|
|
52
|
+
setTimeout(() => this._connect(), this.reconnectInterval);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
import tryFn from "../../concerns/try-fn.js";
|
|
2
|
+
// Remover o import estático do SDK
|
|
3
|
+
// import { SQSClient, ReceiveMessageCommand, DeleteMessageCommand } from '@aws-sdk/client-sqs';
|
|
4
|
+
|
|
5
|
+
export class SqsConsumer {
|
|
6
|
+
constructor({ queueUrl, onMessage, onError, poolingInterval = 5000, maxMessages = 10, region = 'us-east-1', credentials, endpoint, driver = 'sqs' }) {
|
|
7
|
+
this.driver = driver;
|
|
8
|
+
this.queueUrl = queueUrl;
|
|
9
|
+
this.onMessage = onMessage;
|
|
10
|
+
this.onError = onError;
|
|
11
|
+
this.poolingInterval = poolingInterval;
|
|
12
|
+
this.maxMessages = maxMessages;
|
|
13
|
+
this.region = region;
|
|
14
|
+
this.credentials = credentials;
|
|
15
|
+
this.endpoint = endpoint;
|
|
16
|
+
this.sqs = null; // será inicializado dinamicamente
|
|
17
|
+
this._stopped = false;
|
|
18
|
+
this._timer = null;
|
|
19
|
+
this._pollPromise = null;
|
|
20
|
+
this._pollResolve = null;
|
|
21
|
+
// SDK classes
|
|
22
|
+
this._SQSClient = null;
|
|
23
|
+
this._ReceiveMessageCommand = null;
|
|
24
|
+
this._DeleteMessageCommand = null;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
async start() {
|
|
28
|
+
// Carregar SDK dinamicamente
|
|
29
|
+
const [ok, err, sdk] = await tryFn(() => import('@aws-sdk/client-sqs'));
|
|
30
|
+
if (!ok) throw new Error('SqsConsumer: @aws-sdk/client-sqs is not installed. Please install it to use the SQS consumer.');
|
|
31
|
+
const { SQSClient, ReceiveMessageCommand, DeleteMessageCommand } = sdk;
|
|
32
|
+
this._SQSClient = SQSClient;
|
|
33
|
+
this._ReceiveMessageCommand = ReceiveMessageCommand;
|
|
34
|
+
this._DeleteMessageCommand = DeleteMessageCommand;
|
|
35
|
+
this.sqs = new SQSClient({ region: this.region, credentials: this.credentials, endpoint: this.endpoint });
|
|
36
|
+
this._stopped = false;
|
|
37
|
+
this._pollPromise = new Promise((resolve) => { this._pollResolve = resolve; });
|
|
38
|
+
this._poll();
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
async stop() {
|
|
42
|
+
this._stopped = true;
|
|
43
|
+
if (this._timer) {
|
|
44
|
+
clearTimeout(this._timer);
|
|
45
|
+
this._timer = null;
|
|
46
|
+
}
|
|
47
|
+
// Don't wait for poll promise as it might hang for up to 10 seconds
|
|
48
|
+
// The _poll method checks _stopped and will resolve the promise
|
|
49
|
+
if (this._pollResolve) {
|
|
50
|
+
this._pollResolve();
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
async _poll() {
|
|
55
|
+
if (this._stopped) {
|
|
56
|
+
if (this._pollResolve) this._pollResolve();
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
const [ok, err, result] = await tryFn(async () => {
|
|
60
|
+
const cmd = new this._ReceiveMessageCommand({
|
|
61
|
+
QueueUrl: this.queueUrl,
|
|
62
|
+
MaxNumberOfMessages: this.maxMessages,
|
|
63
|
+
WaitTimeSeconds: 10,
|
|
64
|
+
MessageAttributeNames: ['All'],
|
|
65
|
+
});
|
|
66
|
+
const { Messages } = await this.sqs.send(cmd);
|
|
67
|
+
if (Messages && Messages.length > 0) {
|
|
68
|
+
for (const msg of Messages) {
|
|
69
|
+
const [okMsg, errMsg] = await tryFn(async () => {
|
|
70
|
+
const parsedMsg = this._parseMessage(msg);
|
|
71
|
+
await this.onMessage(parsedMsg, msg);
|
|
72
|
+
// Delete after successful processing
|
|
73
|
+
await this.sqs.send(new this._DeleteMessageCommand({
|
|
74
|
+
QueueUrl: this.queueUrl,
|
|
75
|
+
ReceiptHandle: msg.ReceiptHandle
|
|
76
|
+
}));
|
|
77
|
+
});
|
|
78
|
+
if (!okMsg && this.onError) {
|
|
79
|
+
this.onError(errMsg, msg);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
});
|
|
84
|
+
if (!ok && this.onError) {
|
|
85
|
+
this.onError(err);
|
|
86
|
+
}
|
|
87
|
+
this._timer = setTimeout(() => this._poll(), this.poolingInterval);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
_parseMessage(msg) {
|
|
91
|
+
let body;
|
|
92
|
+
const [ok, err, parsed] = tryFn(() => JSON.parse(msg.Body));
|
|
93
|
+
body = ok ? parsed : msg.Body;
|
|
94
|
+
const attributes = {};
|
|
95
|
+
if (msg.MessageAttributes) {
|
|
96
|
+
for (const [k, v] of Object.entries(msg.MessageAttributes)) {
|
|
97
|
+
attributes[k] = v.StringValue;
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
return { $body: body, $attributes: attributes, $raw: msg };
|
|
101
|
+
}
|
|
102
|
+
}
|