seyfert 1.2.1 → 1.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/cache/index.d.ts +1 -5
- package/lib/cache/resources/voice-states.d.ts +7 -2
- package/lib/cache/resources/voice-states.js +11 -0
- package/lib/client/http/adapters/bun.d.ts +14 -0
- package/lib/client/http/adapters/bun.js +117 -0
- package/lib/client/http/adapters/index.d.ts +15 -0
- package/lib/client/http/adapters/index.js +27 -0
- package/lib/client/http/adapters/uws.d.ts +17 -0
- package/lib/client/http/adapters/uws.js +142 -0
- package/lib/client/http/httpclient.d.ts +13 -0
- package/lib/client/http/httpclient.js +48 -0
- package/lib/client/oninteractioncreate.js +7 -7
- package/lib/client/onmessagecreate.js +12 -3
- package/lib/client/workerclient.js +1 -1
- package/lib/commands/applications/chat.d.ts +11 -9
- package/lib/commands/applications/chat.js +9 -6
- package/lib/commands/applications/chatcontext.d.ts +2 -1
- package/lib/commands/applications/chatcontext.js +3 -0
- package/lib/commands/applications/menu.d.ts +4 -3
- package/lib/commands/applications/menu.js +9 -6
- package/lib/commands/applications/menucontext.d.ts +4 -1
- package/lib/commands/applications/menucontext.js +9 -0
- package/lib/commands/basecontex.d.ts +1 -1
- package/lib/commands/basecontex.js +5 -11
- package/lib/commands/decorators.d.ts +3 -3
- package/lib/commands/decorators.js +5 -5
- package/lib/commands/handler.js +2 -0
- package/lib/common/shorters/channels.d.ts +9 -2
- package/lib/common/shorters/channels.js +13 -0
- package/lib/common/shorters/members.d.ts +6 -0
- package/lib/common/shorters/members.js +6 -0
- package/lib/common/shorters/messages.d.ts +2 -2
- package/lib/common/shorters/messages.js +7 -3
- package/lib/components/componentcontext.d.ts +1 -0
- package/lib/components/componentcontext.js +3 -0
- package/lib/structures/GuildMember.d.ts +1 -5
- package/lib/structures/GuildMember.js +2 -2
- package/lib/structures/Message.d.ts +1 -1
- package/lib/structures/User.js +1 -1
- package/lib/structures/VoiceState.d.ts +18 -0
- package/lib/structures/VoiceState.js +48 -0
- package/lib/structures/channels.d.ts +6 -3
- package/lib/structures/channels.js +14 -1
- package/lib/structures/extra/Permissions.d.ts +1 -1
- package/lib/structures/index.d.ts +1 -0
- package/lib/structures/index.js +1 -0
- package/lib/websocket/discord/shard.d.ts +4 -4
- package/lib/websocket/discord/shard.js +12 -17
- package/lib/websocket/discord/sharder.d.ts +1 -1
- package/lib/websocket/discord/sharder.js +2 -2
- package/lib/websocket/discord/workermanager.js +1 -1
- package/lib/websocket/structures/index.d.ts +6 -99
- package/lib/websocket/structures/index.js +29 -211
- package/lib/websocket/structures/timeout.d.ts +2 -2
- package/lib/websocket/structures/timeout.js +4 -4
- package/package.json +2 -2
|
@@ -23,7 +23,7 @@ class Shard {
|
|
|
23
23
|
ack: true,
|
|
24
24
|
};
|
|
25
25
|
bucket;
|
|
26
|
-
offlineSendQueue =
|
|
26
|
+
offlineSendQueue = [];
|
|
27
27
|
constructor(id, options) {
|
|
28
28
|
this.id = id;
|
|
29
29
|
this.options = options;
|
|
@@ -34,12 +34,7 @@ class Shard {
|
|
|
34
34
|
if (options.debugger)
|
|
35
35
|
this.debugger = options.debugger;
|
|
36
36
|
const safe = this.calculateSafeRequests();
|
|
37
|
-
this.bucket = new structures_1.DynamicBucket({
|
|
38
|
-
limit: safe,
|
|
39
|
-
refillAmount: safe,
|
|
40
|
-
refillInterval: 6e4,
|
|
41
|
-
debugger: this.debugger,
|
|
42
|
-
});
|
|
37
|
+
this.bucket = new structures_1.DynamicBucket({ refillInterval: 6e4, limit: safe, debugger: options.debugger });
|
|
43
38
|
}
|
|
44
39
|
get latency() {
|
|
45
40
|
return this.heart.lastAck && this.heart.lastBeat
|
|
@@ -76,7 +71,7 @@ class Shard {
|
|
|
76
71
|
this.heart.ack = true;
|
|
77
72
|
};
|
|
78
73
|
}
|
|
79
|
-
async send(
|
|
74
|
+
async send(force, message) {
|
|
80
75
|
this.debugger?.info(`[Shard #${this.id}] Sending: ${v10_1.GatewayOpcodes[message.op]} ${JSON.stringify(message.d, (_, value) => {
|
|
81
76
|
if (typeof value === 'string')
|
|
82
77
|
return value.replace(this.options.token, v => {
|
|
@@ -85,13 +80,13 @@ class Shard {
|
|
|
85
80
|
});
|
|
86
81
|
return value;
|
|
87
82
|
}, 1)}`);
|
|
88
|
-
await this.checkOffline(
|
|
89
|
-
await this.bucket.acquire(
|
|
90
|
-
await this.checkOffline(
|
|
83
|
+
await this.checkOffline(force);
|
|
84
|
+
await this.bucket.acquire(force);
|
|
85
|
+
await this.checkOffline(force);
|
|
91
86
|
this.websocket?.send(JSON.stringify(message));
|
|
92
87
|
}
|
|
93
88
|
async identify() {
|
|
94
|
-
await this.send(
|
|
89
|
+
await this.send(true, {
|
|
95
90
|
op: v10_1.GatewayOpcodes.Identify,
|
|
96
91
|
d: {
|
|
97
92
|
token: `Bot ${this.options.token}`,
|
|
@@ -107,7 +102,7 @@ class Shard {
|
|
|
107
102
|
return !!(this.data.resume_gateway_url && this.data.session_id && this.data.resumeSeq !== null);
|
|
108
103
|
}
|
|
109
104
|
async resume() {
|
|
110
|
-
await this.send(
|
|
105
|
+
await this.send(true, {
|
|
111
106
|
op: v10_1.GatewayOpcodes.Resume,
|
|
112
107
|
d: {
|
|
113
108
|
seq: this.data.resumeSeq,
|
|
@@ -185,13 +180,13 @@ class Shard {
|
|
|
185
180
|
{
|
|
186
181
|
switch (packet.t) {
|
|
187
182
|
case v10_1.GatewayDispatchEvents.Resumed:
|
|
188
|
-
this.offlineSendQueue.
|
|
183
|
+
this.offlineSendQueue.map((resolve) => resolve());
|
|
189
184
|
this.options.handlePayload(this.id, packet);
|
|
190
185
|
break;
|
|
191
186
|
case v10_1.GatewayDispatchEvents.Ready: {
|
|
192
187
|
this.data.resume_gateway_url = packet.d.resume_gateway_url;
|
|
193
188
|
this.data.session_id = packet.d.session_id;
|
|
194
|
-
this.offlineSendQueue.
|
|
189
|
+
this.offlineSendQueue.map((resolve) => resolve());
|
|
195
190
|
this.options.handlePayload(this.id, packet);
|
|
196
191
|
break;
|
|
197
192
|
}
|
|
@@ -251,9 +246,9 @@ class Shard {
|
|
|
251
246
|
}
|
|
252
247
|
return this.onpacket(JSON.parse(data));
|
|
253
248
|
}
|
|
254
|
-
checkOffline(
|
|
249
|
+
checkOffline(force) {
|
|
255
250
|
if (!this.isOpen) {
|
|
256
|
-
return new Promise(resolve => this.offlineSendQueue
|
|
251
|
+
return new Promise(resolve => this.offlineSendQueue[force ? 'unshift' : 'push'](resolve));
|
|
257
252
|
}
|
|
258
253
|
return Promise.resolve();
|
|
259
254
|
}
|
|
@@ -22,7 +22,7 @@ export declare class ShardManager extends Map<number, Shard> {
|
|
|
22
22
|
disconnect(shardId: number): Promise<void> | undefined;
|
|
23
23
|
disconnectAll(): Promise<unknown>;
|
|
24
24
|
setShardPresence(shardId: number, payload: GatewayUpdatePresence['d']): void;
|
|
25
|
-
setPresence(payload: GatewayUpdatePresence['d']): Promise<void
|
|
25
|
+
setPresence(payload: GatewayUpdatePresence['d']): Promise<void>;
|
|
26
26
|
joinVoice(guild_id: string, channel_id: string, options: ObjectToLower<Pick<GatewayVoiceStateUpdate['d'], 'self_deaf' | 'self_mute'>>): void;
|
|
27
27
|
leaveVoice(guild_id: string): void;
|
|
28
28
|
send<T extends GatewaySendPayload>(shardId: number, payload: T): void;
|
|
@@ -82,7 +82,7 @@ class ShardManager extends Map {
|
|
|
82
82
|
*/
|
|
83
83
|
spawnBuckets() {
|
|
84
84
|
this.debugger?.info('#0 Preparing buckets');
|
|
85
|
-
const chunks = structures_1.
|
|
85
|
+
const chunks = structures_1.DynamicBucket.chunk(new Array(this.shardEnd - this.shardStart), this.concurrency);
|
|
86
86
|
chunks.forEach((arr, index) => {
|
|
87
87
|
for (let i = 0; i < arr.length; i++) {
|
|
88
88
|
const id = i + (index > 0 ? index * this.concurrency : 0) + this.shardStart;
|
|
@@ -155,7 +155,7 @@ class ShardManager extends Map {
|
|
|
155
155
|
payload,
|
|
156
156
|
});
|
|
157
157
|
}
|
|
158
|
-
this.get(shardId)?.send(
|
|
158
|
+
this.get(shardId)?.send(false, payload);
|
|
159
159
|
}
|
|
160
160
|
}
|
|
161
161
|
exports.ShardManager = ShardManager;
|
|
@@ -83,7 +83,7 @@ class WorkerManager extends Map {
|
|
|
83
83
|
}
|
|
84
84
|
prepareSpaces() {
|
|
85
85
|
this.debugger?.info('Preparing buckets');
|
|
86
|
-
const chunks = structures_1.
|
|
86
|
+
const chunks = structures_1.DynamicBucket.chunk(new Array(this.shardEnd - this.shardStart), this.options.shardsPerWorker);
|
|
87
87
|
chunks.forEach((shards, index) => {
|
|
88
88
|
for (let i = 0; i < shards.length; i++) {
|
|
89
89
|
const id = i + (index > 0 ? index * this.options.shardsPerWorker : 0) + this.shardStart;
|
|
@@ -1,117 +1,24 @@
|
|
|
1
1
|
/// <reference types="node" />
|
|
2
|
-
import type
|
|
3
|
-
/**
|
|
4
|
-
* just any kind of request to queue and resolve later
|
|
5
|
-
*/
|
|
6
|
-
export type QueuedRequest = (value: void | Promise<void>) => Promise<unknown> | any;
|
|
2
|
+
import { type Logger } from '../../common';
|
|
7
3
|
/**
|
|
8
4
|
* options of the dynamic bucket
|
|
9
5
|
*/
|
|
10
6
|
export interface DynamicBucketOptions {
|
|
11
7
|
limit: number;
|
|
12
8
|
refillInterval: number;
|
|
13
|
-
refillAmount: number;
|
|
14
9
|
debugger?: Logger;
|
|
15
10
|
}
|
|
16
|
-
/**
|
|
17
|
-
* generally useless for interaction based bots
|
|
18
|
-
* ideally this would only be triggered on certain paths
|
|
19
|
-
* example: a huge amount of messages being spammed
|
|
20
|
-
*
|
|
21
|
-
* a dynamic bucket is just a priority queue implemented using linked lists
|
|
22
|
-
* we create an empty bucket for every path
|
|
23
|
-
* dynamically allocating memory improves the final memory footprint
|
|
24
|
-
*/
|
|
25
11
|
export declare class DynamicBucket {
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
refillAmount: number;
|
|
29
|
-
/** The queue of requests to acquire an available request. Mapped by <shardId, resolve()> */
|
|
30
|
-
queue: PriorityQueue<QueuedRequest>;
|
|
31
|
-
/** The amount of requests that have been used up already. */
|
|
12
|
+
options: DynamicBucketOptions;
|
|
13
|
+
queue: ((value?: unknown) => any)[];
|
|
32
14
|
used: number;
|
|
33
|
-
|
|
34
|
-
processing: boolean;
|
|
35
|
-
/** The timeout id for the timer to reduce the used amount by the refill amount. */
|
|
36
|
-
timeoutId?: NodeJS.Timeout;
|
|
37
|
-
/** The timestamp in milliseconds when the next refill is scheduled. */
|
|
15
|
+
processing?: boolean;
|
|
38
16
|
refillsAt?: number;
|
|
39
|
-
|
|
17
|
+
timeoutId?: NodeJS.Timeout;
|
|
40
18
|
constructor(options: DynamicBucketOptions);
|
|
41
19
|
get remaining(): number;
|
|
42
20
|
refill(): void;
|
|
43
|
-
/** Begin processing the queue. */
|
|
44
21
|
processQueue(): Promise<void>;
|
|
45
|
-
|
|
46
|
-
acquire(priority: number): Promise<void>;
|
|
47
|
-
toString(): string;
|
|
48
|
-
}
|
|
49
|
-
/**
|
|
50
|
-
* abstract node lol
|
|
51
|
-
*/
|
|
52
|
-
export interface AbstractNode<T> {
|
|
53
|
-
data: T;
|
|
54
|
-
next: this | null;
|
|
55
|
-
}
|
|
56
|
-
export interface QueuePusher<T> {
|
|
57
|
-
push(data: T): NonNullable<TNode<T>>;
|
|
58
|
-
}
|
|
59
|
-
export interface QueuePusherWithPriority<T> {
|
|
60
|
-
push(data: T, priority: number): NonNullable<PNode<T>>;
|
|
61
|
-
}
|
|
62
|
-
export declare class TNode<T> implements AbstractNode<T> {
|
|
63
|
-
data: T;
|
|
64
|
-
next: this | null;
|
|
65
|
-
constructor(data: T);
|
|
66
|
-
static null<T>(list: AbstractNode<T> | null): list is null;
|
|
67
|
-
}
|
|
68
|
-
export declare class PNode<T> extends TNode<T> {
|
|
69
|
-
priority: number;
|
|
70
|
-
constructor(data: T, priority: number);
|
|
71
|
-
}
|
|
72
|
-
export declare abstract class Queue<T> {
|
|
73
|
-
protected abstract head: AbstractNode<T> | null;
|
|
74
|
-
/**
|
|
75
|
-
* O(1)
|
|
76
|
-
*/
|
|
77
|
-
pop(): AbstractNode<T> | null;
|
|
78
|
-
/**
|
|
79
|
-
* O(1)
|
|
80
|
-
*/
|
|
81
|
-
peek(): T;
|
|
82
|
-
/**
|
|
83
|
-
* O(n)
|
|
84
|
-
*/
|
|
85
|
-
size(): number;
|
|
86
|
-
/**
|
|
87
|
-
* O(1)
|
|
88
|
-
*/
|
|
89
|
-
isEmpty(): boolean;
|
|
90
|
-
[Symbol.iterator](): IterableIterator<T>;
|
|
91
|
-
toArray(): T[];
|
|
92
|
-
toString(): string;
|
|
93
|
-
}
|
|
94
|
-
export declare class LinkedList<T> extends Queue<T> implements QueuePusher<T> {
|
|
95
|
-
protected head: TNode<T> | null;
|
|
96
|
-
/**
|
|
97
|
-
* O(1)
|
|
98
|
-
*/
|
|
99
|
-
push(data: T): NonNullable<TNode<T>>;
|
|
100
|
-
}
|
|
101
|
-
export declare class PriorityQueue<T> extends Queue<T> implements QueuePusherWithPriority<T> {
|
|
102
|
-
protected head: PNode<T> | null;
|
|
103
|
-
/**
|
|
104
|
-
* O(#priorities)
|
|
105
|
-
*/
|
|
106
|
-
push(data: T, priority: number): NonNullable<PNode<T>>;
|
|
107
|
-
}
|
|
108
|
-
export declare class SequentialBucket {
|
|
109
|
-
private connections;
|
|
110
|
-
private capacity;
|
|
111
|
-
private spawnTimeout;
|
|
112
|
-
constructor(maxCapacity: number);
|
|
113
|
-
destroy(): Promise<void>;
|
|
114
|
-
push(promise: QueuedRequest): Promise<void>;
|
|
115
|
-
acquire(promises?: LinkedList<QueuedRequest>): Promise<boolean>;
|
|
22
|
+
acquire(force?: boolean): Promise<unknown>;
|
|
116
23
|
static chunk<T>(array: T[], chunks: number): T[][];
|
|
117
24
|
}
|
|
@@ -1,256 +1,74 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.DynamicBucket = void 0;
|
|
4
4
|
const common_1 = require("../../common");
|
|
5
|
-
/**
|
|
6
|
-
* generally useless for interaction based bots
|
|
7
|
-
* ideally this would only be triggered on certain paths
|
|
8
|
-
* example: a huge amount of messages being spammed
|
|
9
|
-
*
|
|
10
|
-
* a dynamic bucket is just a priority queue implemented using linked lists
|
|
11
|
-
* we create an empty bucket for every path
|
|
12
|
-
* dynamically allocating memory improves the final memory footprint
|
|
13
|
-
*/
|
|
14
5
|
class DynamicBucket {
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
refillAmount;
|
|
18
|
-
/** The queue of requests to acquire an available request. Mapped by <shardId, resolve()> */
|
|
19
|
-
queue = new PriorityQueue();
|
|
20
|
-
/** The amount of requests that have been used up already. */
|
|
6
|
+
options;
|
|
7
|
+
queue = [];
|
|
21
8
|
used = 0;
|
|
22
|
-
|
|
23
|
-
processing = false;
|
|
24
|
-
/** The timeout id for the timer to reduce the used amount by the refill amount. */
|
|
25
|
-
timeoutId;
|
|
26
|
-
/** The timestamp in milliseconds when the next refill is scheduled. */
|
|
9
|
+
processing;
|
|
27
10
|
refillsAt;
|
|
28
|
-
|
|
11
|
+
timeoutId;
|
|
29
12
|
constructor(options) {
|
|
30
|
-
this.
|
|
31
|
-
this.refillInterval = options.refillInterval;
|
|
32
|
-
this.refillAmount = options.refillAmount;
|
|
33
|
-
if (options.debugger) {
|
|
34
|
-
this.debugger = options.debugger;
|
|
35
|
-
}
|
|
13
|
+
this.options = options;
|
|
36
14
|
}
|
|
37
15
|
get remaining() {
|
|
38
|
-
if (this.limit < this.used) {
|
|
16
|
+
if (this.options.limit < this.used) {
|
|
39
17
|
return 0;
|
|
40
18
|
}
|
|
41
|
-
return this.limit - this.used;
|
|
19
|
+
return this.options.limit - this.used;
|
|
42
20
|
}
|
|
43
21
|
refill() {
|
|
44
|
-
// Lower the used amount by the refill amount
|
|
45
|
-
this.used = this.refillAmount > this.used ? 0 : this.used - this.refillAmount;
|
|
46
|
-
// Reset the refillsAt timestamp since it just got refilled
|
|
47
22
|
this.refillsAt = undefined;
|
|
23
|
+
if (this.timeoutId) {
|
|
24
|
+
clearTimeout(this.timeoutId);
|
|
25
|
+
this.timeoutId = undefined;
|
|
26
|
+
}
|
|
48
27
|
if (this.used > 0) {
|
|
49
|
-
|
|
50
|
-
clearTimeout(this.timeoutId);
|
|
51
|
-
}
|
|
28
|
+
this.used = 0;
|
|
52
29
|
this.timeoutId = setTimeout(() => {
|
|
53
30
|
this.refill();
|
|
54
|
-
}, this.refillInterval);
|
|
55
|
-
this.refillsAt = Date.now() + this.refillInterval;
|
|
31
|
+
}, this.options.refillInterval);
|
|
32
|
+
this.refillsAt = Date.now() + this.options.refillInterval;
|
|
56
33
|
}
|
|
57
34
|
}
|
|
58
|
-
/** Begin processing the queue. */
|
|
59
35
|
async processQueue() {
|
|
60
|
-
|
|
61
|
-
if (this.processing) {
|
|
36
|
+
if (this.processing)
|
|
62
37
|
return;
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
while (!this.queue.isEmpty()) {
|
|
38
|
+
this.processing = true;
|
|
39
|
+
while (this.queue.length) {
|
|
66
40
|
if (this.remaining) {
|
|
67
|
-
this.debugger?.debug(`Processing queue. Remaining: ${this.remaining} Length: ${this.queue.
|
|
68
|
-
|
|
69
|
-
this.queue.peek()();
|
|
70
|
-
this.queue.pop();
|
|
71
|
-
// A request can be made
|
|
41
|
+
this.options.debugger?.debug(`Processing queue. Remaining: ${this.remaining} Length: ${this.queue.length}`);
|
|
42
|
+
this.queue.shift()();
|
|
72
43
|
this.used++;
|
|
73
|
-
// Create a new timeout for this request if none exists.
|
|
74
44
|
if (!this.timeoutId) {
|
|
75
45
|
this.timeoutId = setTimeout(() => {
|
|
76
46
|
this.refill();
|
|
77
|
-
}, this.refillInterval);
|
|
78
|
-
|
|
79
|
-
this.refillsAt = Date.now() + this.refillInterval;
|
|
47
|
+
}, this.options.refillInterval);
|
|
48
|
+
this.refillsAt = Date.now() + this.options.refillInterval;
|
|
80
49
|
}
|
|
81
|
-
// Check if a refill is scheduled, since we have used up all available requests
|
|
82
50
|
}
|
|
83
51
|
else if (this.refillsAt) {
|
|
84
52
|
const now = Date.now();
|
|
85
|
-
// If there is time left until next refill, just delay execution.
|
|
86
53
|
if (this.refillsAt > now) {
|
|
54
|
+
this.options.debugger?.info(`Waiting ${this.refillsAt - now}ms to process queue`);
|
|
87
55
|
await (0, common_1.delay)(this.refillsAt - now);
|
|
56
|
+
this.used = 0;
|
|
88
57
|
}
|
|
89
58
|
}
|
|
90
59
|
}
|
|
91
|
-
// Loop has ended mark false so it can restart later when needed
|
|
92
60
|
this.processing = false;
|
|
93
61
|
}
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
this.queue.push(resolve, priority);
|
|
62
|
+
acquire(force = false) {
|
|
63
|
+
return new Promise(res => {
|
|
64
|
+
this.queue[force ? 'unshift' : 'push'](res);
|
|
98
65
|
void this.processQueue();
|
|
99
66
|
});
|
|
100
67
|
}
|
|
101
|
-
toString() {
|
|
102
|
-
return [...this.queue].toString();
|
|
103
|
-
}
|
|
104
|
-
}
|
|
105
|
-
exports.DynamicBucket = DynamicBucket;
|
|
106
|
-
class TNode {
|
|
107
|
-
data;
|
|
108
|
-
next;
|
|
109
|
-
constructor(data) {
|
|
110
|
-
this.data = data;
|
|
111
|
-
this.next = null;
|
|
112
|
-
}
|
|
113
|
-
static null(list) {
|
|
114
|
-
return !list;
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
exports.TNode = TNode;
|
|
118
|
-
class PNode extends TNode {
|
|
119
|
-
priority;
|
|
120
|
-
constructor(data, priority) {
|
|
121
|
-
super(data);
|
|
122
|
-
this.priority = priority;
|
|
123
|
-
}
|
|
124
|
-
}
|
|
125
|
-
exports.PNode = PNode;
|
|
126
|
-
class Queue {
|
|
127
|
-
/**
|
|
128
|
-
* O(1)
|
|
129
|
-
*/
|
|
130
|
-
pop() {
|
|
131
|
-
if (TNode.null(this.head)) {
|
|
132
|
-
throw new Error('cannot pop a list without elements');
|
|
133
|
-
}
|
|
134
|
-
return (this.head = this.head.next);
|
|
135
|
-
}
|
|
136
|
-
/**
|
|
137
|
-
* O(1)
|
|
138
|
-
*/
|
|
139
|
-
peek() {
|
|
140
|
-
if (TNode.null(this.head)) {
|
|
141
|
-
throw new Error('cannot peek an empty list');
|
|
142
|
-
}
|
|
143
|
-
return this.head.data;
|
|
144
|
-
}
|
|
145
|
-
/**
|
|
146
|
-
* O(n)
|
|
147
|
-
*/
|
|
148
|
-
size() {
|
|
149
|
-
let aux = this.head;
|
|
150
|
-
if (TNode.null(aux)) {
|
|
151
|
-
return 0;
|
|
152
|
-
}
|
|
153
|
-
let count = 1;
|
|
154
|
-
while (aux.next !== null) {
|
|
155
|
-
count++;
|
|
156
|
-
aux = aux.next;
|
|
157
|
-
}
|
|
158
|
-
return count;
|
|
159
|
-
}
|
|
160
|
-
/**
|
|
161
|
-
* O(1)
|
|
162
|
-
*/
|
|
163
|
-
isEmpty() {
|
|
164
|
-
return TNode.null(this.head);
|
|
165
|
-
}
|
|
166
|
-
*[Symbol.iterator]() {
|
|
167
|
-
let temp = this.head;
|
|
168
|
-
while (temp !== null) {
|
|
169
|
-
yield temp.data;
|
|
170
|
-
temp = temp.next;
|
|
171
|
-
}
|
|
172
|
-
}
|
|
173
|
-
toArray() {
|
|
174
|
-
return Array.from(this);
|
|
175
|
-
}
|
|
176
|
-
toString() {
|
|
177
|
-
return this.head?.toString() || '';
|
|
178
|
-
}
|
|
179
|
-
}
|
|
180
|
-
exports.Queue = Queue;
|
|
181
|
-
class LinkedList extends Queue {
|
|
182
|
-
head = null;
|
|
183
|
-
/**
|
|
184
|
-
* O(1)
|
|
185
|
-
*/
|
|
186
|
-
push(data) {
|
|
187
|
-
const temp = new TNode(data);
|
|
188
|
-
temp.next = this.head;
|
|
189
|
-
this.head = temp;
|
|
190
|
-
return this.head;
|
|
191
|
-
}
|
|
192
|
-
}
|
|
193
|
-
exports.LinkedList = LinkedList;
|
|
194
|
-
class PriorityQueue extends Queue {
|
|
195
|
-
head = null;
|
|
196
|
-
/**
|
|
197
|
-
* O(#priorities)
|
|
198
|
-
*/
|
|
199
|
-
push(data, priority) {
|
|
200
|
-
let start = this.head;
|
|
201
|
-
const temp = new PNode(data, priority);
|
|
202
|
-
if (TNode.null(this.head) || TNode.null(start)) {
|
|
203
|
-
this.head = temp;
|
|
204
|
-
return this.head;
|
|
205
|
-
}
|
|
206
|
-
if (this.head.priority > priority) {
|
|
207
|
-
temp.next = this.head;
|
|
208
|
-
this.head = temp;
|
|
209
|
-
return this.head;
|
|
210
|
-
}
|
|
211
|
-
while (start.next !== null && start.next.priority < priority) {
|
|
212
|
-
start = start.next;
|
|
213
|
-
}
|
|
214
|
-
temp.next = start.next;
|
|
215
|
-
start.next = temp;
|
|
216
|
-
return this.head;
|
|
217
|
-
}
|
|
218
|
-
}
|
|
219
|
-
exports.PriorityQueue = PriorityQueue;
|
|
220
|
-
class SequentialBucket {
|
|
221
|
-
connections;
|
|
222
|
-
capacity; // max_concurrency
|
|
223
|
-
spawnTimeout;
|
|
224
|
-
constructor(maxCapacity) {
|
|
225
|
-
this.connections = new LinkedList();
|
|
226
|
-
this.capacity = maxCapacity;
|
|
227
|
-
this.spawnTimeout = 5000;
|
|
228
|
-
}
|
|
229
|
-
async destroy() {
|
|
230
|
-
this.connections = new LinkedList();
|
|
231
|
-
}
|
|
232
|
-
async push(promise) {
|
|
233
|
-
this.connections.push(promise);
|
|
234
|
-
if (this.capacity <= this.connections.size()) {
|
|
235
|
-
await this.acquire();
|
|
236
|
-
await (0, common_1.delay)(this.spawnTimeout);
|
|
237
|
-
}
|
|
238
|
-
return;
|
|
239
|
-
}
|
|
240
|
-
async acquire(promises = this.connections) {
|
|
241
|
-
while (!promises.isEmpty()) {
|
|
242
|
-
const item = promises.peek();
|
|
243
|
-
item().catch((...args) => {
|
|
244
|
-
Promise.reject(...args);
|
|
245
|
-
});
|
|
246
|
-
promises.pop();
|
|
247
|
-
}
|
|
248
|
-
return Promise.resolve(true);
|
|
249
|
-
}
|
|
250
68
|
static chunk(array, chunks) {
|
|
251
69
|
let index = 0;
|
|
252
70
|
let resIndex = 0;
|
|
253
|
-
const result = Array(Math.ceil(array.length / chunks));
|
|
71
|
+
const result = new Array(Math.ceil(array.length / chunks));
|
|
254
72
|
while (index < array.length) {
|
|
255
73
|
result[resIndex] = array.slice(index, (index += chunks));
|
|
256
74
|
resIndex++;
|
|
@@ -258,4 +76,4 @@ class SequentialBucket {
|
|
|
258
76
|
return result;
|
|
259
77
|
}
|
|
260
78
|
}
|
|
261
|
-
exports.
|
|
79
|
+
exports.DynamicBucket = DynamicBucket;
|
|
@@ -15,6 +15,6 @@ export declare class ConnectQueue {
|
|
|
15
15
|
}[];
|
|
16
16
|
protected interval?: NodeJS.Timeout;
|
|
17
17
|
constructor(intervalTime?: number, concurrency?: number);
|
|
18
|
-
push(callback: () => any): void
|
|
19
|
-
shift(): any
|
|
18
|
+
push(callback: () => any): Promise<void>;
|
|
19
|
+
shift(): Promise<any>;
|
|
20
20
|
}
|
|
@@ -40,11 +40,11 @@ class ConnectQueue {
|
|
|
40
40
|
this.intervalTime = intervalTime;
|
|
41
41
|
this.concurrency = concurrency;
|
|
42
42
|
}
|
|
43
|
-
push(callback) {
|
|
43
|
+
async push(callback) {
|
|
44
44
|
this.queue.push({ cb: callback });
|
|
45
45
|
if (this.queue.length === this.concurrency) {
|
|
46
46
|
for (let i = 0; i < this.concurrency; i++) {
|
|
47
|
-
this.queue[i].cb?.();
|
|
47
|
+
await this.queue[i].cb?.();
|
|
48
48
|
this.queue[i].cb = undefined;
|
|
49
49
|
}
|
|
50
50
|
this.interval = setInterval(() => {
|
|
@@ -54,7 +54,7 @@ class ConnectQueue {
|
|
|
54
54
|
}, this.intervalTime);
|
|
55
55
|
}
|
|
56
56
|
}
|
|
57
|
-
shift() {
|
|
57
|
+
async shift() {
|
|
58
58
|
const shift = this.queue.shift();
|
|
59
59
|
if (!shift) {
|
|
60
60
|
if (!this.queue.length) {
|
|
@@ -65,7 +65,7 @@ class ConnectQueue {
|
|
|
65
65
|
}
|
|
66
66
|
if (!shift.cb)
|
|
67
67
|
return this.shift();
|
|
68
|
-
shift.cb?.();
|
|
68
|
+
await shift.cb?.();
|
|
69
69
|
if (!this.queue.length) {
|
|
70
70
|
clearInterval(this.interval);
|
|
71
71
|
this.interval = undefined;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "seyfert",
|
|
3
|
-
"version": "1.2.
|
|
3
|
+
"version": "1.2.2",
|
|
4
4
|
"description": "The most advanced framework for discord bots",
|
|
5
5
|
"main": "./lib/index.js",
|
|
6
6
|
"module": "./lib/index.js",
|
|
@@ -71,4 +71,4 @@
|
|
|
71
71
|
"url": "https://github.com/socram03"
|
|
72
72
|
}
|
|
73
73
|
]
|
|
74
|
-
}
|
|
74
|
+
}
|