corecdtl 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +21 -0
- package/README.md +202 -0
- package/build/Release/hypernode.node +0 -0
- package/dist/http/chunker/ChunkParser.d.ts +8 -0
- package/dist/http/chunker/ChunkParser.js +2 -0
- package/dist/http/chunker/ChunkProgression.d.ts +25 -0
- package/dist/http/chunker/ChunkProgression.js +55 -0
- package/dist/http/chunker/FixedChunkedParser.d.ts +14 -0
- package/dist/http/chunker/FixedChunkedParser.js +49 -0
- package/dist/http/chunker/StreamingChunkedParser.d.ts +19 -0
- package/dist/http/chunker/StreamingChunkedParser.js +94 -0
- package/dist/http/chunker/UntilEndChunkerParser.d.ts +37 -0
- package/dist/http/chunker/UntilEndChunkerParser.js +64 -0
- package/dist/http/content/encoding.d.ts +3 -0
- package/dist/http/content/encoding.js +38 -0
- package/dist/http/content/parser.d.ts +1 -0
- package/dist/http/content/parser.js +64 -0
- package/dist/http/context/ApiContext.d.ts +14 -0
- package/dist/http/context/ApiContext.js +151 -0
- package/dist/http/context/HttpContext.d.ts +42 -0
- package/dist/http/context/HttpContext.js +231 -0
- package/dist/http/context/WebContext.d.ts +31 -0
- package/dist/http/context/WebContext.js +320 -0
- package/dist/http/factory/accumulator.d.ts +13 -0
- package/dist/http/factory/accumulator.js +221 -0
- package/dist/http/factory/factory.d.ts +5 -0
- package/dist/http/factory/factory.js +97 -0
- package/dist/http/factory/pipeline.d.ts +3 -0
- package/dist/http/factory/pipeline.js +102 -0
- package/dist/http/response/HttpResponseBase.d.ts +10 -0
- package/dist/http/response/HttpResponseBase.js +2 -0
- package/dist/http/response/PipeResponseBase.d.ts +31 -0
- package/dist/http/response/PipeResponseBase.js +115 -0
- package/dist/http.d.ts +567 -0
- package/dist/http.js +59 -0
- package/dist/hypernode.d.ts +31 -0
- package/dist/hypernode.js +8 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.js +39 -0
- package/package.json +63 -0
package/LICENSE.md
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) [2026] [DirikTi]
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
<p align="center">
|
|
2
|
+
<img src="docs/assets/logo.png" alt="<Project Name> Logo" width="180"/>
|
|
3
|
+
</p>
|
|
4
|
+
|
|
5
|
+
<h1 align="center"><CoreCDTL></h1>
|
|
6
|
+
|
|
7
|
+
<p align="center">
|
|
8
|
+
<a href="<CI_LINK>">
|
|
9
|
+
<img src="https://img.shields.io/badge/build-passing-brightgreen" />
|
|
10
|
+
</a>
|
|
11
|
+
<a href="<TEST_LINK>">
|
|
12
|
+
<img src="https://img.shields.io/badge/tests-rfc-blue" />
|
|
13
|
+
</a>
|
|
14
|
+
<a href="<LICENSE_LINK>">
|
|
15
|
+
<img src="https://img.shields.io/badge/license-MIT-black" />
|
|
16
|
+
</a>
|
|
17
|
+
<img src="https://img.shields.io/badge/HTTP-1.1-orange" />
|
|
18
|
+
<img src="https://img.shields.io/badge/SIMD-required-red" />
|
|
19
|
+
<a href="<API_DOCS_LINK>">
|
|
20
|
+
<img src="https://img.shields.io/badge/docs-API-informational" />
|
|
21
|
+
</a>
|
|
22
|
+
</p>
|
|
23
|
+
|
|
24
|
+
<p align="center">
|
|
25
|
+
High-performance • Low-level • Fully customizable HTTP engine
|
|
26
|
+
</p>
|
|
27
|
+
|
|
28
|
+
# CoreCDTL
|
|
29
|
+
|
|
30
|
+
**A high-performance, fully customizable HTTP engine for building web and API servers.**
|
|
31
|
+
|
|
32
|
+
Designed for teams that need maximum control over their HTTP stack without sacrificing performance.
|
|
33
|
+
Suitable for large-scale, high-traffic production systems and internal infrastructure.
|
|
34
|
+
|
|
35
|
+
---
|
|
36
|
+
|
|
37
|
+
# Overview
|
|
38
|
+
|
|
39
|
+
`CoreCDTL` is a low-level HTTP engine distributed as a **library**, not a framework.
|
|
40
|
+
|
|
41
|
+
It operates above the socket layer and provides more than just parsing.
|
|
42
|
+
Beyond request parsing, it includes a full processing pipeline that enables building complete HTTP servers with deep customization.
|
|
43
|
+
|
|
44
|
+
The engine allows developers to intervene and replace core behaviors such as:
|
|
45
|
+
|
|
46
|
+
* request accumulation
|
|
47
|
+
* response models
|
|
48
|
+
* content parsers
|
|
49
|
+
* pipeline stages
|
|
50
|
+
|
|
51
|
+
This makes it ideal for teams that want to design and control their own architecture instead of adapting to opinionated frameworks.
|
|
52
|
+
|
|
53
|
+
---
|
|
54
|
+
|
|
55
|
+
# Design Goals
|
|
56
|
+
|
|
57
|
+
* High performance
|
|
58
|
+
* Minimal overhead parsing
|
|
59
|
+
* Deterministic state machines
|
|
60
|
+
* Fully customizable request/response pipeline
|
|
61
|
+
* Pluggable internal components
|
|
62
|
+
* Production-grade reliability
|
|
63
|
+
* Designed for large-scale and high-traffic systems
|
|
64
|
+
* Suitable for web and API servers
|
|
65
|
+
|
|
66
|
+
---
|
|
67
|
+
|
|
68
|
+
# Architecture
|
|
69
|
+
|
|
70
|
+
## High-Level Architecture
|
|
71
|
+
|
|
72
|
+
```
|
|
73
|
+
Socket → Parser → State Machines → Pipeline → Handlers → Response
|
|
74
|
+
```
|
|
75
|
+
|
|
76
|
+
The engine processes data in a single pass and drives the request lifecycle through explicit states and a customizable execution pipeline.
|
|
77
|
+
|
|
78
|
+
---
|
|
79
|
+
|
|
80
|
+
## State Machines
|
|
81
|
+
|
|
82
|
+
The HTTP protocol is handled using deterministic state machines:
|
|
83
|
+
|
|
84
|
+
* Request line
|
|
85
|
+
* Headers
|
|
86
|
+
* Body
|
|
87
|
+
* Chunked transfer encoding
|
|
88
|
+
|
|
89
|
+
This approach ensures:
|
|
90
|
+
|
|
91
|
+
* predictable behavior
|
|
92
|
+
* low branching overhead
|
|
93
|
+
* high cache efficiency
|
|
94
|
+
* safe incremental parsing
|
|
95
|
+
|
|
96
|
+
---
|
|
97
|
+
|
|
98
|
+
## Customization Points
|
|
99
|
+
|
|
100
|
+
The engine is designed to be extended or replaced at multiple levels:
|
|
101
|
+
|
|
102
|
+
* Accumulators
|
|
103
|
+
* Response models
|
|
104
|
+
* Content parsers
|
|
105
|
+
* Pipeline stages / middleware
|
|
106
|
+
* Server behavior customization
|
|
107
|
+
|
|
108
|
+
Users can build their own HTTP server behavior on top of the core engine without modifying internals.
|
|
109
|
+
|
|
110
|
+
---
|
|
111
|
+
|
|
112
|
+
# RFC Compliance
|
|
113
|
+
|
|
114
|
+
The engine follows the HTTP/1.1 specifications and validates protocol rules strictly.
|
|
115
|
+
|
|
116
|
+
* RFC-compliant parsing
|
|
117
|
+
* Strict header validation
|
|
118
|
+
* Deterministic behavior on malformed input
|
|
119
|
+
* Non-compliant or ambiguous inputs are intentionally rejected
|
|
120
|
+
|
|
121
|
+
---
|
|
122
|
+
|
|
123
|
+
# Security Considerations
|
|
124
|
+
|
|
125
|
+
Security is handled as a first-class concern.
|
|
126
|
+
|
|
127
|
+
The engine includes protections against:
|
|
128
|
+
|
|
129
|
+
* header injection
|
|
130
|
+
* CRLF attacks
|
|
131
|
+
* request smuggling vectors
|
|
132
|
+
* malformed or ambiguous requests
|
|
133
|
+
* unsafe parsing states
|
|
134
|
+
|
|
135
|
+
Invalid inputs fail fast and do not propagate undefined behavior.
|
|
136
|
+
|
|
137
|
+
---
|
|
138
|
+
|
|
139
|
+
# Performance Characteristics
|
|
140
|
+
|
|
141
|
+
The engine is built with performance as a core principle:
|
|
142
|
+
|
|
143
|
+
* single-pass parsing
|
|
144
|
+
* low/zero-copy design
|
|
145
|
+
* minimal allocations
|
|
146
|
+
* cache-friendly structures
|
|
147
|
+
* branch-predictable state machines
|
|
148
|
+
* SIMD optimizations
|
|
149
|
+
|
|
150
|
+
Designed for high-throughput, low-latency workloads.
|
|
151
|
+
|
|
152
|
+
---
|
|
153
|
+
|
|
154
|
+
# Public API
|
|
155
|
+
|
|
156
|
+
Full API documentation is available here:
|
|
157
|
+
|
|
158
|
+
👉 **[API Documentation](https://corecdtl.com/docs/intro)**
|
|
159
|
+
|
|
160
|
+
---
|
|
161
|
+
|
|
162
|
+
# Testing
|
|
163
|
+
|
|
164
|
+
The project includes:
|
|
165
|
+
|
|
166
|
+
* Unit tests
|
|
167
|
+
* RFC compliance tests
|
|
168
|
+
|
|
169
|
+
---
|
|
170
|
+
|
|
171
|
+
# Platform Support
|
|
172
|
+
|
|
173
|
+
Requires modern CPUs with **SIMD support**.
|
|
174
|
+
|
|
175
|
+
---
|
|
176
|
+
|
|
177
|
+
# Limitations
|
|
178
|
+
|
|
179
|
+
See:
|
|
180
|
+
|
|
181
|
+
👉 **[Limitations](https://corecdtl.com/docs/limitations)**
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
Current known limitations include:
|
|
185
|
+
|
|
186
|
+
* HTTP/2 not supported
|
|
187
|
+
* TLS not included
|
|
188
|
+
|
|
189
|
+
---
|
|
190
|
+
|
|
191
|
+
# Roadmap
|
|
192
|
+
|
|
193
|
+
Planned features and future work:
|
|
194
|
+
|
|
195
|
+
👉 **[Roadmap](https://corecdtl.com/docs/roadmap)**
|
|
196
|
+
|
|
197
|
+
---
|
|
198
|
+
|
|
199
|
+
# License
|
|
200
|
+
|
|
201
|
+
Open-source.
|
|
202
|
+
See **[LICENSE](LICENSE)** for details.
|
|
Binary file
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import FixedChunkedParser from "./FixedChunkedParser";
|
|
2
|
+
import StreamingChunkedParser from "./StreamingChunkedParser";
|
|
3
|
+
import { UntilEndChunkedParser } from "./UntilEndChunkerParser";
|
|
4
|
+
export interface ChunkParser {
|
|
5
|
+
streaming: StreamingChunkedParser;
|
|
6
|
+
fixed: FixedChunkedParser;
|
|
7
|
+
untilEnd: UntilEndChunkedParser;
|
|
8
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { ChunkParser } from "./ChunkParser";
|
|
2
|
+
import { Http } from "../../http";
|
|
3
|
+
declare class ChunkProgression {
|
|
4
|
+
objId: number;
|
|
5
|
+
fn: Function;
|
|
6
|
+
chunkParser: ChunkParser;
|
|
7
|
+
contentLen?: number;
|
|
8
|
+
routePipe: any;
|
|
9
|
+
params: string[];
|
|
10
|
+
headers: Record<string, string | Array<string>>;
|
|
11
|
+
query: any;
|
|
12
|
+
method: Http.HttpMethod;
|
|
13
|
+
headerSize: number;
|
|
14
|
+
mainOffset: number;
|
|
15
|
+
retFlag: number;
|
|
16
|
+
rawBuf: Buffer;
|
|
17
|
+
private respCpool;
|
|
18
|
+
private cPool;
|
|
19
|
+
private parseInitial;
|
|
20
|
+
constructor(cPool: any, parseInitial: Function, respCpool: any);
|
|
21
|
+
allocateResp(): any;
|
|
22
|
+
reset(): void;
|
|
23
|
+
free(): void;
|
|
24
|
+
}
|
|
25
|
+
export default ChunkProgression;
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const http_1 = require("../../http");
|
|
7
|
+
const FixedChunkedParser_1 = __importDefault(require("./FixedChunkedParser"));
|
|
8
|
+
const StreamingChunkedParser_1 = __importDefault(require("./StreamingChunkedParser"));
|
|
9
|
+
const UntilEndChunkerParser_1 = require("./UntilEndChunkerParser");
|
|
10
|
+
class ChunkProgression {
|
|
11
|
+
constructor(cPool, parseInitial, respCpool) {
|
|
12
|
+
this.cPool = cPool;
|
|
13
|
+
this.fn = parseInitial;
|
|
14
|
+
this.chunkParser = {
|
|
15
|
+
streaming: new StreamingChunkedParser_1.default(),
|
|
16
|
+
fixed: new FixedChunkedParser_1.default(),
|
|
17
|
+
untilEnd: new UntilEndChunkerParser_1.UntilEndChunkedParser()
|
|
18
|
+
};
|
|
19
|
+
this.contentLen = undefined;
|
|
20
|
+
this.routePipe = null;
|
|
21
|
+
this.params = [];
|
|
22
|
+
this.headers = {};
|
|
23
|
+
this.query = {};
|
|
24
|
+
this.method = http_1.Http.HttpMethod.GET;
|
|
25
|
+
this.headerSize = 0;
|
|
26
|
+
this.mainOffset = 0;
|
|
27
|
+
this.retFlag = http_1.Http.RetFlagBits.FLAG_OK;
|
|
28
|
+
this.rawBuf = Buffer.allocUnsafe(0);
|
|
29
|
+
this.objId = cPool.registerObj(this);
|
|
30
|
+
this.respCpool = respCpool;
|
|
31
|
+
this.parseInitial = parseInitial;
|
|
32
|
+
}
|
|
33
|
+
allocateResp() {
|
|
34
|
+
let ret = this.respCpool.allocate();
|
|
35
|
+
return ret;
|
|
36
|
+
}
|
|
37
|
+
reset() {
|
|
38
|
+
this.fn = this.parseInitial;
|
|
39
|
+
this.contentLen = undefined;
|
|
40
|
+
this.routePipe = null;
|
|
41
|
+
this.params = [];
|
|
42
|
+
this.headers = {};
|
|
43
|
+
this.query = {};
|
|
44
|
+
this.method = http_1.Http.HttpMethod.GET;
|
|
45
|
+
this.headerSize = 0;
|
|
46
|
+
this.mainOffset = 0;
|
|
47
|
+
this.retFlag = http_1.Http.RetFlagBits.FLAG_OK;
|
|
48
|
+
this.rawBuf = Buffer.allocUnsafe(0);
|
|
49
|
+
}
|
|
50
|
+
free() {
|
|
51
|
+
this.reset();
|
|
52
|
+
this.cPool.free(this.objId);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
exports.default = ChunkProgression;
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
declare class FixedChunkedParser {
|
|
2
|
+
private buffer;
|
|
3
|
+
private writeCursor;
|
|
4
|
+
private expectedLength;
|
|
5
|
+
constructor();
|
|
6
|
+
allocateBuffer(size: number): void;
|
|
7
|
+
write(data: Buffer): void;
|
|
8
|
+
isFinished(): boolean;
|
|
9
|
+
getBody(): Buffer;
|
|
10
|
+
getTotalWrittenSize(): number;
|
|
11
|
+
getExpectedSize(): number;
|
|
12
|
+
free(): void;
|
|
13
|
+
}
|
|
14
|
+
export default FixedChunkedParser;
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
class FixedChunkedParser {
|
|
4
|
+
constructor() {
|
|
5
|
+
this.buffer = null;
|
|
6
|
+
this.writeCursor = 0;
|
|
7
|
+
this.expectedLength = 0;
|
|
8
|
+
}
|
|
9
|
+
allocateBuffer(size) {
|
|
10
|
+
if (size <= 0) {
|
|
11
|
+
this.buffer = Buffer.alloc(0);
|
|
12
|
+
this.expectedLength = 0;
|
|
13
|
+
return;
|
|
14
|
+
}
|
|
15
|
+
this.expectedLength = size;
|
|
16
|
+
this.buffer = Buffer.allocUnsafe(size);
|
|
17
|
+
this.writeCursor = 0;
|
|
18
|
+
}
|
|
19
|
+
write(data) {
|
|
20
|
+
if (!this.buffer) {
|
|
21
|
+
throw new Error("Accumulator not initialized. Call allocateBuffer first.");
|
|
22
|
+
}
|
|
23
|
+
const remainingSpace = this.expectedLength - this.writeCursor;
|
|
24
|
+
const dataLength = data.length;
|
|
25
|
+
if (dataLength > remainingSpace) {
|
|
26
|
+
throw new Error(`Fixed buffer overflow: Received ${dataLength} bytes, only ${remainingSpace} remaining.`);
|
|
27
|
+
}
|
|
28
|
+
data.copy(this.buffer, this.writeCursor);
|
|
29
|
+
this.writeCursor += dataLength;
|
|
30
|
+
}
|
|
31
|
+
isFinished() {
|
|
32
|
+
return this.writeCursor === this.expectedLength;
|
|
33
|
+
}
|
|
34
|
+
getBody() {
|
|
35
|
+
return this.buffer || Buffer.alloc(0);
|
|
36
|
+
}
|
|
37
|
+
getTotalWrittenSize() {
|
|
38
|
+
return this.writeCursor;
|
|
39
|
+
}
|
|
40
|
+
getExpectedSize() {
|
|
41
|
+
return this.expectedLength;
|
|
42
|
+
}
|
|
43
|
+
free() {
|
|
44
|
+
this.buffer = null;
|
|
45
|
+
this.writeCursor = 0;
|
|
46
|
+
this.expectedLength = 0;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
exports.default = FixedChunkedParser;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
declare class StreamingChunkedParser {
|
|
2
|
+
private residualBuffer;
|
|
3
|
+
private state;
|
|
4
|
+
private expected;
|
|
5
|
+
private bodyParts;
|
|
6
|
+
private totalSize;
|
|
7
|
+
constructor();
|
|
8
|
+
write(data: Buffer): void;
|
|
9
|
+
/**
|
|
10
|
+
* Mark the parser as finished. This is optional: external HTTP logic
|
|
11
|
+
* may simply stop calling `.write()` after the stream ends.
|
|
12
|
+
*/
|
|
13
|
+
finish(): void;
|
|
14
|
+
isFinished(): boolean;
|
|
15
|
+
getBody(): Buffer;
|
|
16
|
+
getTotalSize(): Number;
|
|
17
|
+
free(): void;
|
|
18
|
+
}
|
|
19
|
+
export default StreamingChunkedParser;
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
var StreamingChunkedState;
|
|
4
|
+
(function (StreamingChunkedState) {
|
|
5
|
+
StreamingChunkedState[StreamingChunkedState["READ_SIZE"] = 0] = "READ_SIZE";
|
|
6
|
+
StreamingChunkedState[StreamingChunkedState["READ_DATA"] = 1] = "READ_DATA";
|
|
7
|
+
StreamingChunkedState[StreamingChunkedState["READ_CRLF"] = 2] = "READ_CRLF";
|
|
8
|
+
StreamingChunkedState[StreamingChunkedState["FINISHED"] = 3] = "FINISHED";
|
|
9
|
+
})(StreamingChunkedState || (StreamingChunkedState = {}));
|
|
10
|
+
class StreamingChunkedParser {
|
|
11
|
+
constructor() {
|
|
12
|
+
this.residualBuffer = null;
|
|
13
|
+
this.state = StreamingChunkedState.READ_SIZE;
|
|
14
|
+
this.expected = 0;
|
|
15
|
+
this.bodyParts = [];
|
|
16
|
+
this.totalSize = 0;
|
|
17
|
+
}
|
|
18
|
+
write(data) {
|
|
19
|
+
let currentBuffer;
|
|
20
|
+
if (this.residualBuffer) {
|
|
21
|
+
currentBuffer = Buffer.concat([this.residualBuffer, data]);
|
|
22
|
+
this.residualBuffer = null;
|
|
23
|
+
}
|
|
24
|
+
else {
|
|
25
|
+
currentBuffer = data;
|
|
26
|
+
}
|
|
27
|
+
let readIndex = 0;
|
|
28
|
+
const writeIndex = currentBuffer.length;
|
|
29
|
+
// 2. State Machine (Sadece currentBuffer üzerinde ilerler)
|
|
30
|
+
while (readIndex < writeIndex && this.state !== StreamingChunkedState.FINISHED) {
|
|
31
|
+
const available = writeIndex - readIndex;
|
|
32
|
+
if (this.state === StreamingChunkedState.READ_SIZE) {
|
|
33
|
+
// Sadece mevcut Buffer'ın sınırı içinde \r\n ara
|
|
34
|
+
const idx = currentBuffer.indexOf('\r\n', readIndex);
|
|
35
|
+
if (idx === -1 || idx >= writeIndex)
|
|
36
|
+
break;
|
|
37
|
+
// Hex boyutu çıkar
|
|
38
|
+
const hexSize = currentBuffer.toString('ascii', readIndex, idx);
|
|
39
|
+
this.expected = parseInt(hexSize, 16);
|
|
40
|
+
readIndex = idx + 2;
|
|
41
|
+
if (this.expected === 0) {
|
|
42
|
+
this.state = StreamingChunkedState.FINISHED;
|
|
43
|
+
break;
|
|
44
|
+
}
|
|
45
|
+
this.state = StreamingChunkedState.READ_DATA;
|
|
46
|
+
}
|
|
47
|
+
if (this.state === StreamingChunkedState.READ_DATA) {
|
|
48
|
+
if (available < this.expected)
|
|
49
|
+
break;
|
|
50
|
+
const chunk = currentBuffer.subarray(readIndex, readIndex + this.expected);
|
|
51
|
+
this.bodyParts.push(chunk);
|
|
52
|
+
this.totalSize += chunk.length;
|
|
53
|
+
readIndex += this.expected;
|
|
54
|
+
this.state = StreamingChunkedState.READ_CRLF;
|
|
55
|
+
}
|
|
56
|
+
if (this.state === StreamingChunkedState.READ_CRLF) {
|
|
57
|
+
if (available < 2)
|
|
58
|
+
break;
|
|
59
|
+
readIndex += 2;
|
|
60
|
+
this.expected = 0;
|
|
61
|
+
this.state = StreamingChunkedState.READ_SIZE;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
if (readIndex < writeIndex) {
|
|
65
|
+
this.residualBuffer = currentBuffer.subarray(readIndex);
|
|
66
|
+
}
|
|
67
|
+
else {
|
|
68
|
+
this.residualBuffer = null;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Mark the parser as finished. This is optional: external HTTP logic
|
|
73
|
+
* may simply stop calling `.write()` after the stream ends.
|
|
74
|
+
*/
|
|
75
|
+
finish() {
|
|
76
|
+
}
|
|
77
|
+
isFinished() {
|
|
78
|
+
return this.state === StreamingChunkedState.FINISHED;
|
|
79
|
+
}
|
|
80
|
+
getBody() {
|
|
81
|
+
return Buffer.concat(this.bodyParts, this.totalSize);
|
|
82
|
+
}
|
|
83
|
+
getTotalSize() {
|
|
84
|
+
return this.totalSize;
|
|
85
|
+
}
|
|
86
|
+
free() {
|
|
87
|
+
this.residualBuffer = null;
|
|
88
|
+
this.state = StreamingChunkedState.READ_SIZE;
|
|
89
|
+
this.expected = 0;
|
|
90
|
+
this.bodyParts.length = 0;
|
|
91
|
+
this.totalSize = 0;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
exports.default = StreamingChunkedParser;
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
export declare class UntilEndChunkedParser {
|
|
2
|
+
private bodyParts;
|
|
3
|
+
private totalSize;
|
|
4
|
+
private finished;
|
|
5
|
+
constructor();
|
|
6
|
+
/**
|
|
7
|
+
* Appends incoming data to the internal buffer list.
|
|
8
|
+
* This method performs zero validation and assumes that
|
|
9
|
+
* all data belongs to the body until the stream terminates.
|
|
10
|
+
*
|
|
11
|
+
* @param {Buffer} data - Raw incoming bytes.
|
|
12
|
+
*/
|
|
13
|
+
write(data: Buffer): void;
|
|
14
|
+
/**
|
|
15
|
+
* Mark the parser as finished. This is optional: external HTTP logic
|
|
16
|
+
* may simply stop calling `.write()` after the stream ends.
|
|
17
|
+
*/
|
|
18
|
+
finish(): void;
|
|
19
|
+
/**
|
|
20
|
+
* Returns true only if `.finish()` was explicitly called.
|
|
21
|
+
* Streaming HTTP readers normally won't call this; instead,
|
|
22
|
+
* they treat socket-close as the terminal event.
|
|
23
|
+
*/
|
|
24
|
+
isFinished(): boolean;
|
|
25
|
+
/**
|
|
26
|
+
* Returns the full concatenated body buffer.
|
|
27
|
+
*/
|
|
28
|
+
getBody(): Buffer;
|
|
29
|
+
/**
|
|
30
|
+
* Returns number of bytes accumulated so far.
|
|
31
|
+
*/
|
|
32
|
+
getTotalSize(): number;
|
|
33
|
+
/**
|
|
34
|
+
* Reset the parser to initial state.
|
|
35
|
+
*/
|
|
36
|
+
free(): void;
|
|
37
|
+
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.UntilEndChunkedParser = void 0;
|
|
4
|
+
class UntilEndChunkedParser {
|
|
5
|
+
constructor() {
|
|
6
|
+
this.bodyParts = [];
|
|
7
|
+
this.totalSize = 0;
|
|
8
|
+
this.finished = false;
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Appends incoming data to the internal buffer list.
|
|
12
|
+
* This method performs zero validation and assumes that
|
|
13
|
+
* all data belongs to the body until the stream terminates.
|
|
14
|
+
*
|
|
15
|
+
* @param {Buffer} data - Raw incoming bytes.
|
|
16
|
+
*/
|
|
17
|
+
write(data) {
|
|
18
|
+
if (this.finished) {
|
|
19
|
+
throw new Error("Cannot write after parser is finished.");
|
|
20
|
+
}
|
|
21
|
+
if (data.length > 0) {
|
|
22
|
+
this.bodyParts.push(data);
|
|
23
|
+
this.totalSize += data.length;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Mark the parser as finished. This is optional: external HTTP logic
|
|
28
|
+
* may simply stop calling `.write()` after the stream ends.
|
|
29
|
+
*/
|
|
30
|
+
finish() {
|
|
31
|
+
this.finished = true;
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Returns true only if `.finish()` was explicitly called.
|
|
35
|
+
* Streaming HTTP readers normally won't call this; instead,
|
|
36
|
+
* they treat socket-close as the terminal event.
|
|
37
|
+
*/
|
|
38
|
+
isFinished() {
|
|
39
|
+
return this.finished;
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Returns the full concatenated body buffer.
|
|
43
|
+
*/
|
|
44
|
+
getBody() {
|
|
45
|
+
if (this.bodyParts.length === 0)
|
|
46
|
+
return Buffer.alloc(0);
|
|
47
|
+
return Buffer.concat(this.bodyParts, this.totalSize);
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Returns number of bytes accumulated so far.
|
|
51
|
+
*/
|
|
52
|
+
getTotalSize() {
|
|
53
|
+
return this.totalSize;
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Reset the parser to initial state.
|
|
57
|
+
*/
|
|
58
|
+
free() {
|
|
59
|
+
this.bodyParts.length = 0;
|
|
60
|
+
this.totalSize = 0;
|
|
61
|
+
this.finished = false;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
exports.UntilEndChunkedParser = UntilEndChunkedParser;
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.contentEncodingTable = exports.contentDecodingTable = void 0;
|
|
4
|
+
const zlib_1 = require("zlib");
|
|
5
|
+
function gzipDecodeFn(b) {
|
|
6
|
+
try {
|
|
7
|
+
return (0, zlib_1.gunzipSync)(b);
|
|
8
|
+
}
|
|
9
|
+
catch (e) {
|
|
10
|
+
return null;
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
function brotliDecodeFn(b) {
|
|
14
|
+
try {
|
|
15
|
+
return (0, zlib_1.brotliDecompressSync)(b);
|
|
16
|
+
}
|
|
17
|
+
catch (e) {
|
|
18
|
+
return null;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
function deflateFn(b) {
|
|
22
|
+
try {
|
|
23
|
+
return (0, zlib_1.inflateSync)(b);
|
|
24
|
+
}
|
|
25
|
+
catch (e) {
|
|
26
|
+
return null;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
exports.contentDecodingTable = {
|
|
30
|
+
gzip: gzipDecodeFn,
|
|
31
|
+
br: brotliDecodeFn,
|
|
32
|
+
deflate: deflateFn,
|
|
33
|
+
};
|
|
34
|
+
exports.contentEncodingTable = {
|
|
35
|
+
gzip: (b) => (0, zlib_1.gzipSync)(b),
|
|
36
|
+
br: (b) => (0, zlib_1.brotliCompressSync)(b),
|
|
37
|
+
deflate: (b) => (0, zlib_1.deflateSync)(b)
|
|
38
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare const contentParserTable: Record<string, (b: any) => any>;
|