@loadmill/core 0.3.48 → 0.3.51
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/conf/extrema.d.ts +1 -1
- package/dist/conf/extrema.js +2 -2
- package/dist/conf/extrema.js.map +1 -1
- package/dist/conf/types.d.ts +12 -0
- package/dist/conf/validate.d.ts +3 -1
- package/dist/conf/validate.js +12 -1
- package/dist/conf/validate.js.map +1 -1
- package/dist/multipart-form-data/form-data-utils.js +2 -2
- package/dist/multipart-form-data/form-data-utils.js.map +1 -1
- package/dist/multipart-form-data/is-binary-file.d.ts +2 -0
- package/dist/multipart-form-data/is-binary-file.js +215 -0
- package/dist/multipart-form-data/is-binary-file.js.map +1 -0
- package/dist/multipart-form-data/multipart-text-to-post-form-data.d.ts +5 -1
- package/dist/multipart-form-data/multipart-text-to-post-form-data.js +96 -35
- package/dist/multipart-form-data/multipart-text-to-post-form-data.js.map +1 -1
- package/dist/parameters/extractions.d.ts +2 -1
- package/dist/parameters/extractions.js.map +1 -1
- package/dist/parameters/index.d.ts +1 -1
- package/dist/parameters/index.js +4 -0
- package/dist/parameters/index.js.map +1 -1
- package/dist/parameters/parameter-regex-providers.d.ts +2 -0
- package/dist/parameters/parameter-regex-providers.js +3 -1
- package/dist/parameters/parameter-regex-providers.js.map +1 -1
- package/dist/request/index.d.ts +10 -1
- package/dist/request/index.js +16 -6
- package/dist/request/index.js.map +1 -1
- package/package.json +10 -5
- package/src/conf/extrema.ts +1 -0
- package/src/conf/types.ts +14 -0
- package/src/conf/validate.ts +19 -1
- package/src/multipart-form-data/form-data-utils.ts +4 -4
- package/src/multipart-form-data/is-binary-file.ts +206 -0
- package/src/multipart-form-data/multipart-text-to-post-form-data.ts +107 -47
- package/src/parameters/extractions.ts +10 -8
- package/src/parameters/index.ts +4 -1
- package/src/parameters/parameter-regex-providers.ts +4 -0
- package/src/request/index.ts +15 -0
- package/test/multipart-form-data/form-data-utils.spec.ts +28 -7
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
const MAX_BYTES = 512;
|
|
2
|
+
|
|
3
|
+
// A very basic non-exception raising reader. Read bytes and
|
|
4
|
+
// at the end use hasError() to check whether this worked.
|
|
5
|
+
class Reader {
|
|
6
|
+
public fileBuffer: Buffer;
|
|
7
|
+
public size: number;
|
|
8
|
+
public offset: number;
|
|
9
|
+
public error: boolean;
|
|
10
|
+
|
|
11
|
+
constructor(fileBuffer: Buffer, size: number) {
|
|
12
|
+
this.fileBuffer = fileBuffer;
|
|
13
|
+
this.size = size;
|
|
14
|
+
this.offset = 0;
|
|
15
|
+
this.error = false;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
public hasError(): boolean {
|
|
19
|
+
return this.error;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
public nextByte(): number {
|
|
23
|
+
if (this.offset === this.size || this.hasError()) {
|
|
24
|
+
this.error = true;
|
|
25
|
+
return 0xff;
|
|
26
|
+
}
|
|
27
|
+
return this.fileBuffer[this.offset++];
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
public next(len: number): number[] {
|
|
31
|
+
const n = new Array();
|
|
32
|
+
for (let i = 0; i < len; i++) {
|
|
33
|
+
n[i] = this.nextByte();
|
|
34
|
+
}
|
|
35
|
+
return n;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// Read a Google Protobuf var(iable)int from the buffer.
|
|
40
|
+
function readProtoVarInt(reader: Reader): number {
|
|
41
|
+
let idx = 0;
|
|
42
|
+
let varInt = 0;
|
|
43
|
+
|
|
44
|
+
while (!reader.hasError()) {
|
|
45
|
+
const b = reader.nextByte();
|
|
46
|
+
varInt = varInt | ((b & 0x7f) << (7 * idx));
|
|
47
|
+
if ((b & 0x80) === 0) {
|
|
48
|
+
break;
|
|
49
|
+
}
|
|
50
|
+
idx++;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return varInt;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
// Attempt to taste a full Google Protobuf message.
|
|
57
|
+
function readProtoMessage(reader: Reader): boolean {
|
|
58
|
+
const varInt = readProtoVarInt(reader);
|
|
59
|
+
const wireType = varInt & 0x7;
|
|
60
|
+
|
|
61
|
+
switch (wireType) {
|
|
62
|
+
case 0:
|
|
63
|
+
readProtoVarInt(reader);
|
|
64
|
+
return true;
|
|
65
|
+
case 1:
|
|
66
|
+
reader.next(8);
|
|
67
|
+
return true;
|
|
68
|
+
case 2: {
|
|
69
|
+
const len = readProtoVarInt(reader);
|
|
70
|
+
reader.next(len);
|
|
71
|
+
return true;
|
|
72
|
+
}
|
|
73
|
+
case 5:
|
|
74
|
+
reader.next(4);
|
|
75
|
+
return true;
|
|
76
|
+
}
|
|
77
|
+
return false;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Check whether this seems to be a valid protobuf file.
|
|
81
|
+
function isBinaryProto(fileBuffer: Buffer, totalBytes: number): boolean {
|
|
82
|
+
const reader = new Reader(fileBuffer, totalBytes);
|
|
83
|
+
let numMessages = 0;
|
|
84
|
+
|
|
85
|
+
// eslint-disable-next-line no-constant-condition
|
|
86
|
+
while (true) {
|
|
87
|
+
// Definitely not a valid protobuf
|
|
88
|
+
if (!readProtoMessage(reader) && !reader.hasError()) {
|
|
89
|
+
return false;
|
|
90
|
+
}
|
|
91
|
+
// Short read?
|
|
92
|
+
if (reader.hasError()) {
|
|
93
|
+
break;
|
|
94
|
+
}
|
|
95
|
+
numMessages++;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
return numMessages > 0;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
export function isBinaryFile(file: Buffer, size?: number): boolean {
|
|
102
|
+
if (size === undefined) {
|
|
103
|
+
size = file.length;
|
|
104
|
+
}
|
|
105
|
+
return isBinaryCheck(file, size);
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
function isBinaryCheck(fileBuffer: Buffer, bytesRead: number): boolean {
|
|
109
|
+
// empty file. no clue what it is.
|
|
110
|
+
if (bytesRead === 0) {
|
|
111
|
+
return false;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
let suspiciousBytes = 0;
|
|
115
|
+
const totalBytes = Math.min(bytesRead, MAX_BYTES);
|
|
116
|
+
|
|
117
|
+
// UTF-8 BOM
|
|
118
|
+
if (bytesRead >= 3 && fileBuffer[0] === 0xef && fileBuffer[1] === 0xbb && fileBuffer[2] === 0xbf) {
|
|
119
|
+
return false;
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
// UTF-32 BOM
|
|
123
|
+
if (
|
|
124
|
+
bytesRead >= 4 &&
|
|
125
|
+
fileBuffer[0] === 0x00 &&
|
|
126
|
+
fileBuffer[1] === 0x00 &&
|
|
127
|
+
fileBuffer[2] === 0xfe &&
|
|
128
|
+
fileBuffer[3] === 0xff
|
|
129
|
+
) {
|
|
130
|
+
return false;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// UTF-32 LE BOM
|
|
134
|
+
if (
|
|
135
|
+
bytesRead >= 4 &&
|
|
136
|
+
fileBuffer[0] === 0xff &&
|
|
137
|
+
fileBuffer[1] === 0xfe &&
|
|
138
|
+
fileBuffer[2] === 0x00 &&
|
|
139
|
+
fileBuffer[3] === 0x00
|
|
140
|
+
) {
|
|
141
|
+
return false;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
// GB BOM
|
|
145
|
+
if (
|
|
146
|
+
bytesRead >= 4 &&
|
|
147
|
+
fileBuffer[0] === 0x84 &&
|
|
148
|
+
fileBuffer[1] === 0x31 &&
|
|
149
|
+
fileBuffer[2] === 0x95 &&
|
|
150
|
+
fileBuffer[3] === 0x33
|
|
151
|
+
) {
|
|
152
|
+
return false;
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
if (totalBytes >= 5 && fileBuffer.slice(0, 5).toString() === '%PDF-') {
|
|
156
|
+
/* PDF. This is binary. */
|
|
157
|
+
return true;
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// UTF-16 BE BOM
|
|
161
|
+
if (bytesRead >= 2 && fileBuffer[0] === 0xfe && fileBuffer[1] === 0xff) {
|
|
162
|
+
return false;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
// UTF-16 LE BOM
|
|
166
|
+
if (bytesRead >= 2 && fileBuffer[0] === 0xff && fileBuffer[1] === 0xfe) {
|
|
167
|
+
return false;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
for (let i = 0; i < totalBytes; i++) {
|
|
171
|
+
if (fileBuffer[i] === 0) {
|
|
172
|
+
// NULL byte--it's binary!
|
|
173
|
+
return true;
|
|
174
|
+
} else if ((fileBuffer[i] < 7 || fileBuffer[i] > 14) && (fileBuffer[i] < 32 || fileBuffer[i] > 127)) {
|
|
175
|
+
// UTF-8 detection
|
|
176
|
+
if (fileBuffer[i] > 193 && fileBuffer[i] < 224 && i + 1 < totalBytes) {
|
|
177
|
+
i++;
|
|
178
|
+
if (fileBuffer[i] > 127 && fileBuffer[i] < 192) {
|
|
179
|
+
continue;
|
|
180
|
+
}
|
|
181
|
+
} else if (fileBuffer[i] > 223 && fileBuffer[i] < 240 && i + 2 < totalBytes) {
|
|
182
|
+
i++;
|
|
183
|
+
if (fileBuffer[i] > 127 && fileBuffer[i] < 192 && fileBuffer[i + 1] > 127 && fileBuffer[i + 1] < 192) {
|
|
184
|
+
i++;
|
|
185
|
+
continue;
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
suspiciousBytes++;
|
|
190
|
+
// Read at least 32 fileBuffer before making a decision
|
|
191
|
+
if (i >= 32 && (suspiciousBytes * 100) / totalBytes > 10) {
|
|
192
|
+
return true;
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
if ((suspiciousBytes * 100) / totalBytes > 10) {
|
|
198
|
+
return true;
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
if (suspiciousBytes > 1 && isBinaryProto(fileBuffer, totalBytes)) {
|
|
202
|
+
return true;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
return false;
|
|
206
|
+
}
|
|
@@ -1,26 +1,25 @@
|
|
|
1
|
+
import { extension } from 'mime-types';
|
|
2
|
+
import { isBase64 } from 'validator';
|
|
3
|
+
|
|
4
|
+
import { HarParam } from '../har';
|
|
1
5
|
import { PostFormData, PostFormDataEntry } from '../request';
|
|
6
|
+
import { isBinaryFile } from './is-binary-file';
|
|
2
7
|
|
|
3
|
-
export const multipartFormDataTextToPostFormData = (multipartFormDataText: string): PostFormData =>
|
|
4
|
-
|
|
8
|
+
export const multipartFormDataTextToPostFormData = (multipartFormDataText: string = ''): PostFormData =>
|
|
9
|
+
getLines(multipartFormDataText)
|
|
10
|
+
.map(line => extractPostFormDataEntry(line));
|
|
5
11
|
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
const getLines = (multipartFormDataText: string) => {
|
|
15
|
-
const boundary = getBoundary(multipartFormDataText);
|
|
16
|
-
const boundaryRegex = splitByBoundaryRegex(boundary);
|
|
17
|
-
const linesSplittedByBoundary = multipartFormDataText.split(boundaryRegex);
|
|
18
|
-
return removeEmptyLines(linesSplittedByBoundary);
|
|
19
|
-
};
|
|
12
|
+
const getLines = (multipartFormDataText: string = ''): string[] =>
|
|
13
|
+
removeEmptyLines(
|
|
14
|
+
multipartFormDataText.split(
|
|
15
|
+
splitByBoundaryRegex(
|
|
16
|
+
getBoundary(multipartFormDataText)
|
|
17
|
+
)
|
|
18
|
+
)
|
|
19
|
+
);
|
|
20
20
|
|
|
21
|
-
const getBoundary = (multipartFormDataText: string): string =>
|
|
22
|
-
|
|
23
|
-
};
|
|
21
|
+
const getBoundary = (multipartFormDataText: string): string =>
|
|
22
|
+
multipartFormDataText.substring(0, multipartFormDataText.indexOf('\r\n'));
|
|
24
23
|
|
|
25
24
|
const splitByBoundaryRegex = (boundary: string) => {
|
|
26
25
|
const normalBoundary = boundary + '\r\n';
|
|
@@ -28,7 +27,7 @@ const splitByBoundaryRegex = (boundary: string) => {
|
|
|
28
27
|
return new RegExp(`${normalBoundary}|${lastBoundary}`);
|
|
29
28
|
};
|
|
30
29
|
|
|
31
|
-
const removeEmptyLines = (lines: string[]): string[] => lines.filter(Boolean);
|
|
30
|
+
const removeEmptyLines = (lines: string[] = []): string[] => lines.filter(Boolean);
|
|
32
31
|
|
|
33
32
|
const extractPostFormDataEntry = (line: string): PostFormDataEntry => {
|
|
34
33
|
const indexOfFirst2Newlines = line.indexOf('\r\n\r\n');
|
|
@@ -38,52 +37,113 @@ const extractPostFormDataEntry = (line: string): PostFormDataEntry => {
|
|
|
38
37
|
|
|
39
38
|
const postFormDataEntry: PostFormDataEntry = { name: fields.name, value };
|
|
40
39
|
fields.fileName && (postFormDataEntry.fileName = fields.fileName);
|
|
40
|
+
fields.contentType && (postFormDataEntry.contentType = fields.contentType);
|
|
41
41
|
|
|
42
42
|
return postFormDataEntry;
|
|
43
43
|
};
|
|
44
44
|
|
|
45
|
-
type Fields =
|
|
45
|
+
type Fields = HarParam;
|
|
46
46
|
|
|
47
47
|
const extractFields = (line: string, indexOfFirst2Newlines: number): Fields => {
|
|
48
48
|
const rawFields = extractRawFields(line, indexOfFirst2Newlines);
|
|
49
49
|
const res: Fields = { name: '' };
|
|
50
|
-
const
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
const [k, v] = token.split('=');
|
|
54
|
-
if (k === 'name') {
|
|
55
|
-
res.name = removeDoubleQuotes(v);
|
|
56
|
-
} else if (k === 'filename') {
|
|
57
|
-
res.fileName = removeDoubleQuotes(v);
|
|
58
|
-
}
|
|
59
|
-
}
|
|
50
|
+
const fieldsLines = rawFields.split('\r\n');
|
|
51
|
+
addNameFields(fieldsLines[0], res);
|
|
52
|
+
fieldsLines.length > 1 && addContentType(fieldsLines[1], res);
|
|
60
53
|
return res;
|
|
61
54
|
};
|
|
62
55
|
|
|
63
|
-
const extractRawFields = (line: string, indexOfFieldsValueSeperator: number): string =>
|
|
64
|
-
|
|
56
|
+
const extractRawFields = (line: string, indexOfFieldsValueSeperator: number): string =>
|
|
57
|
+
line.substring(0, indexOfFieldsValueSeperator);
|
|
58
|
+
|
|
59
|
+
const removeDoubleQuotes = (v: string): string =>
|
|
60
|
+
v.replace(/"/g, '');
|
|
61
|
+
|
|
62
|
+
const calculateValue = ({ fileName, contentType }: Fields, line: string, indexOfFirst2Newlines: number): string | '' => {
|
|
63
|
+
const value = extractValue(line, indexOfFirst2Newlines);
|
|
64
|
+
return getBinaryOrTextValue({ value, fileName, contentType });
|
|
65
|
+
};
|
|
66
|
+
|
|
67
|
+
export const getBinaryOrTextValue = (fields: Omit<Fields, 'name'>): string => {
|
|
68
|
+
const { value = '', fileName } = fields;
|
|
69
|
+
if (fileName) {
|
|
70
|
+
const isBass64 = isBase64(value);
|
|
71
|
+
const toBinary = isToBinary(fields);
|
|
72
|
+
if (isBass64 || toBinary) {
|
|
73
|
+
return myAtob(value, isBass64, toBinary);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
return value;
|
|
65
77
|
};
|
|
66
78
|
|
|
67
|
-
const
|
|
68
|
-
|
|
79
|
+
const myAtob = (target = '', fromBase64?: boolean, toBinary?: boolean) => {
|
|
80
|
+
const defaultEncoding = 'utf8';
|
|
81
|
+
const encodingFrom = fromBase64 ? 'base64' : defaultEncoding;
|
|
82
|
+
const encodingTo = toBinary ? 'binary' : defaultEncoding;
|
|
83
|
+
return Buffer.from(target, encodingFrom).toString(encodingTo);
|
|
69
84
|
};
|
|
70
85
|
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
86
|
+
const isToBinary = (fields: Omit<Fields, 'name'>): boolean => {
|
|
87
|
+
const { value = '', fileName, contentType } = fields;
|
|
88
|
+
if (isAlreadyBinary(value)) {
|
|
89
|
+
return false;
|
|
90
|
+
}
|
|
91
|
+
if (fileName) {
|
|
92
|
+
const ext = getExtension(fileName);
|
|
93
|
+
if (ext) {
|
|
94
|
+
return isBinaryFileExtension(ext);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
if (contentType) {
|
|
98
|
+
return isBinaryContentType(contentType);
|
|
99
|
+
}
|
|
100
|
+
return false;
|
|
101
|
+
};
|
|
74
102
|
|
|
75
|
-
const
|
|
76
|
-
const
|
|
77
|
-
|
|
103
|
+
const isAlreadyBinary = (target = '') => {
|
|
104
|
+
const bytes = Buffer.from(target);
|
|
105
|
+
const size = target.length;
|
|
106
|
+
return isBinaryFile(bytes, size);
|
|
78
107
|
};
|
|
79
108
|
|
|
80
|
-
const
|
|
81
|
-
|
|
82
|
-
|
|
109
|
+
const getExtension = (fileName: string): string | undefined =>
|
|
110
|
+
fileName.split('.').pop();
|
|
111
|
+
|
|
112
|
+
const isBinaryFileExtension = (ext: string): boolean =>
|
|
113
|
+
['docx', 'pdf', 'doc', 'jpg', 'jpeg', 'png'].includes(ext);
|
|
114
|
+
|
|
115
|
+
const isBinaryContentType = (contentType: string): boolean =>
|
|
116
|
+
['docx', 'pdf', 'bin'].includes(extension(contentType) || '');
|
|
117
|
+
|
|
118
|
+
const extractValue = (line: string, indexOfFieldsValueSeperator: number): string =>
|
|
119
|
+
removeEndingNewline(
|
|
120
|
+
line.substring(indexOfFieldsValueSeperator + 4)
|
|
121
|
+
);
|
|
122
|
+
|
|
123
|
+
const removeEndingNewline = (s: string): string =>
|
|
124
|
+
s.endsWith('\r\n') ? removeLast2Chars(s) : s;
|
|
125
|
+
|
|
126
|
+
const removeLast2Chars = (s: string): string =>
|
|
127
|
+
s.substring(0, s.length - 2);
|
|
128
|
+
|
|
129
|
+
const addNameFields = (nameFieldsLine: string, res: HarParam) => {
|
|
130
|
+
const tokens = extractTokens(nameFieldsLine);
|
|
131
|
+
for (const token of tokens) {
|
|
132
|
+
const [k, v] = token.split('=');
|
|
133
|
+
if (k === 'name') {
|
|
134
|
+
res.name = removeDoubleQuotes(v);
|
|
135
|
+
} else if (k === 'filename') {
|
|
136
|
+
res.fileName = removeDoubleQuotes(v);
|
|
137
|
+
}
|
|
83
138
|
}
|
|
84
|
-
return s;
|
|
85
139
|
};
|
|
86
140
|
|
|
87
|
-
const
|
|
88
|
-
|
|
141
|
+
const extractTokens = (nameFieldsLine: string): string[] =>
|
|
142
|
+
nameFieldsLine.split(';').map(t => t.trim());
|
|
143
|
+
|
|
144
|
+
const addContentType = (contentTypeLine: string, res: HarParam) => {
|
|
145
|
+
const [k, v] = contentTypeLine.split(': ');
|
|
146
|
+
if (k === 'content-type') {
|
|
147
|
+
res.contentType = v;
|
|
148
|
+
}
|
|
89
149
|
};
|
|
@@ -17,14 +17,7 @@ const { getParameterName } = parameterUtils;
|
|
|
17
17
|
* @param ext The extraction object to take a part
|
|
18
18
|
* @returns The spreaded parts of the given extraction object
|
|
19
19
|
*/
|
|
20
|
-
export function getExtractionParts(ext: Extractions): {
|
|
21
|
-
name: string,
|
|
22
|
-
value: string | {
|
|
23
|
-
query: string;
|
|
24
|
-
attr?: string;
|
|
25
|
-
}
|
|
26
|
-
type?: string,
|
|
27
|
-
} {
|
|
20
|
+
export function getExtractionParts(ext: Extractions): ExtractionParts {
|
|
28
21
|
const name = getParameterName(ext);
|
|
29
22
|
const extContent = ext[name];
|
|
30
23
|
if (typeof extContent === 'string') {
|
|
@@ -35,6 +28,15 @@ export function getExtractionParts(ext: Extractions): {
|
|
|
35
28
|
return { name, value, type };
|
|
36
29
|
}
|
|
37
30
|
|
|
31
|
+
export type ExtractionParts = {
|
|
32
|
+
name: string;
|
|
33
|
+
value: string | {
|
|
34
|
+
query: string;
|
|
35
|
+
attr?: string;
|
|
36
|
+
};
|
|
37
|
+
type?: string;
|
|
38
|
+
};
|
|
39
|
+
|
|
38
40
|
export function requestsExtractionsArr(requests, toIndex: number) {
|
|
39
41
|
const extractionParamNames = [] as string[];
|
|
40
42
|
for (let i = 0; i <= toIndex; i++) {
|
package/src/parameters/index.ts
CHANGED
|
@@ -177,6 +177,9 @@ export const parameterUtils = {
|
|
|
177
177
|
},
|
|
178
178
|
|
|
179
179
|
findIndex(key, arr) {
|
|
180
|
+
if (!arr) {
|
|
181
|
+
return -1;
|
|
182
|
+
}
|
|
180
183
|
return arr.findIndex((obj) => {
|
|
181
184
|
return Object.keys(obj)[0] === key;
|
|
182
185
|
});
|
|
@@ -244,7 +247,7 @@ export const parameterUtils = {
|
|
|
244
247
|
return this.getUsedConfParams({ requests: [request] }, parameters);
|
|
245
248
|
},
|
|
246
249
|
|
|
247
|
-
getValueByKeyFromArr(key: string, parameters: Parameters[], returnArray: boolean = false) {
|
|
250
|
+
getValueByKeyFromArr(key: string, parameters: Parameters[] | undefined = [], returnArray: boolean = false) {
|
|
248
251
|
let res;
|
|
249
252
|
const param = getParameterByKey(key, parameters);
|
|
250
253
|
if (param) {
|
|
@@ -64,6 +64,8 @@ export const CAPTURE_REGEX = /(([^\\]\(|^\().*[^\\]\))/;
|
|
|
64
64
|
|
|
65
65
|
export const PARAM_USAGE_REGEXP = /\$\{(.+)\}/;
|
|
66
66
|
|
|
67
|
+
export const SIGNLE_PARAM_USAGE_REGEXP = /^\${\w+}$/;
|
|
68
|
+
|
|
67
69
|
export const CAPTURE_ALL_REGEX = /(.*)/;
|
|
68
70
|
|
|
69
71
|
export function isCaptureAllRegExp(regexp: RegExp): boolean {
|
|
@@ -72,3 +74,5 @@ export function isCaptureAllRegExp(regexp: RegExp): boolean {
|
|
|
72
74
|
|
|
73
75
|
export const CSRF_REGEXP = '[^,; ]*?[cCxX](?:srf|SRF)[^\'" =:]*? *\\\\?["\']?(?:=|:) *\\\\?["\']([^\'"]+?)\\\\?["\']';
|
|
74
76
|
export const getCsrfTokenByRegexp = (data: string) => new RegExp(CSRF_REGEXP).exec(data);
|
|
77
|
+
|
|
78
|
+
export const CONTAINS_APP_JSON_REGEXP = /application\/(.*)json/;
|
package/src/request/index.ts
CHANGED
|
@@ -39,6 +39,7 @@ export class LoadmillRequest implements RequestLike {
|
|
|
39
39
|
delay?: number | string;
|
|
40
40
|
postData?: RequestPostData;
|
|
41
41
|
postFormData?: PostFormData;
|
|
42
|
+
file?: RequestRawFileData;
|
|
42
43
|
cachePenetration?: CachePenetration;
|
|
43
44
|
|
|
44
45
|
method: HttpMethod = 'GET';
|
|
@@ -80,6 +81,7 @@ export function createRequest(from: RequestLike): LoadmillRequest {
|
|
|
80
81
|
headers,
|
|
81
82
|
postData,
|
|
82
83
|
postFormData,
|
|
84
|
+
file,
|
|
83
85
|
stopBefore,
|
|
84
86
|
skipBefore,
|
|
85
87
|
loop,
|
|
@@ -114,6 +116,10 @@ export function createRequest(from: RequestLike): LoadmillRequest {
|
|
|
114
116
|
request.postFormData = postFormData;
|
|
115
117
|
}
|
|
116
118
|
|
|
119
|
+
if (file) {
|
|
120
|
+
request.file = file;
|
|
121
|
+
}
|
|
122
|
+
|
|
117
123
|
if (delay !== null && delay !== undefined) {
|
|
118
124
|
request.delay = delay;
|
|
119
125
|
}
|
|
@@ -360,6 +366,7 @@ export interface RequestLike {
|
|
|
360
366
|
delay?: number | string;
|
|
361
367
|
postData?: RequestPostData;
|
|
362
368
|
postFormData?: PostFormData;
|
|
369
|
+
file?: RequestRawFileData;
|
|
363
370
|
extract?: Extractions | Extractions[];
|
|
364
371
|
headers?: LoadmillHeaders | LoadmillHeaders[];
|
|
365
372
|
expectedStatus?: HttpResponseStatus;
|
|
@@ -391,11 +398,19 @@ export type PostFormDataEntry = {
|
|
|
391
398
|
name: string;
|
|
392
399
|
value: string;
|
|
393
400
|
fileName?: string;
|
|
401
|
+
contentType?: string;
|
|
402
|
+
};
|
|
403
|
+
|
|
404
|
+
export type RequestRawFileData = {
|
|
405
|
+
name: string;
|
|
406
|
+
key: string;
|
|
407
|
+
signedUrl?: string; // created by aws
|
|
394
408
|
};
|
|
395
409
|
|
|
396
410
|
export enum BodyTypes {
|
|
397
411
|
POST_DATA = 'postData',
|
|
398
412
|
FORM_DATA = 'postFormData',
|
|
413
|
+
RAW_FILE = 'rawFile',
|
|
399
414
|
}
|
|
400
415
|
|
|
401
416
|
export type LoadmillHeaders = { [headerName: string]: string };
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
/* eslint-disable max-len */
|
|
1
2
|
const fs = require('fs');
|
|
2
3
|
const path = require('path');
|
|
3
4
|
import { suite, describe, it } from 'mocha';
|
|
@@ -34,7 +35,6 @@ suite('form-data-utils - multipart form data', () => {
|
|
|
34
35
|
describe('Form Class - manages and manipulates multipart form data', () => {
|
|
35
36
|
it('to string', () => {
|
|
36
37
|
const boundary = '------WebKitFormBoundary12345678ASDFGHJ';
|
|
37
|
-
// eslint-disable-next-line max-len
|
|
38
38
|
const expectedText = '--------WebKitFormBoundary12345678ASDFGHJ\r\nContent-Disposition: form-data; name="the_name_of_the_thing"; filename="some file name.ext"\r\ncontent-type: application/vnd.mspowerpoint-sniffing.x-rar-compressed\r\n\r\nthe_contents_of_the_file\r\n--------WebKitFormBoundary12345678ASDFGHJ\r\nContent-Disposition: form-data; name="entityId"\r\n\r\n123456789123456789123456789\r\n--------WebKitFormBoundary12345678ASDFGHJ\r\nContent-Disposition: form-data; name="shouldDoStuff"\r\n\r\ntrue\r\n--------WebKitFormBoundary12345678ASDFGHJ\r\nContent-Disposition: form-data; name="content-transfer-encoding"\r\n\r\nquoted-printable\r\n--------WebKitFormBoundary12345678ASDFGHJ--\r\n';
|
|
39
39
|
const form = new Form(params, boundary);
|
|
40
40
|
const result = form.toString();
|
|
@@ -49,7 +49,7 @@ suite('form-data-utils - multipart form data', () => {
|
|
|
49
49
|
postFormData: [
|
|
50
50
|
{
|
|
51
51
|
name: 'the_name_of_the_thing',
|
|
52
|
-
value: '',
|
|
52
|
+
value: 'the_contents_of_the_file',
|
|
53
53
|
fileName: 'some file name.ext',
|
|
54
54
|
},
|
|
55
55
|
{
|
|
@@ -75,7 +75,7 @@ suite('form-data-utils - multipart form data', () => {
|
|
|
75
75
|
postFormData: [
|
|
76
76
|
{
|
|
77
77
|
name: 'file',
|
|
78
|
-
value: '',
|
|
78
|
+
value: 'ÿØÿà\u0000\u0010JFIF\u0000\u0001\u0001\u0000\u0000\u0001\u0000\u0001\u0000\u0000ÿÛ\u0000\u0000\u0005\u0003\u0004\t\t\b\t\t\t\t\t\t\t\t\t\u0005u001c\u0017\t\b\u001a\t\t\u0006\u0018!\u0018\u001a\u001d\u001d\u001f\u001f\u001f\u0007\u000b"\u0018"\u001e\u0018\u001c\u001e\u001f\u001e\u0001\u0005\u0005\u0005\b\u0007\b\u000e\b\b\r\u0012\r\u000e\r\u0012\u0012\u0012\u0012\u0012\u0012\u0012\u0012\u0012\u0012\u0012\u0012\u0012\u0012\u0012\u0012\u0012\u0012\u0012\u0012\u0012\u0012R#B?\u00005Ù',
|
|
79
79
|
fileName: 'silk_sonic.jpeg',
|
|
80
80
|
},
|
|
81
81
|
]
|
|
@@ -98,24 +98,45 @@ suite('form-data-utils - multipart form data', () => {
|
|
|
98
98
|
});
|
|
99
99
|
|
|
100
100
|
it('should return 2 name-values', () => {
|
|
101
|
-
// eslint-disable-next-line max-len
|
|
102
101
|
const text = '----10453916712\r\nContent-Disposition: form-data; name="arnon"\r\n\r\npeleg\r\n----10453916712\r\nContent-Disposition: form-data; name="rivi"\r\n\r\nshimi\r\n----10453916712--\r\n';
|
|
103
102
|
const result = multipartFormDataTextToPostFormData(text);
|
|
104
103
|
expect(result).to.have.deep.ordered.members([{ name: 'arnon', value: 'peleg' }, { name: 'rivi', value: 'shimi' }]);
|
|
105
104
|
});
|
|
106
105
|
|
|
107
106
|
it('should return name-value-filename', () => {
|
|
108
|
-
// eslint-disable-next-line max-len
|
|
109
107
|
const text = '-----------------------------10453916712809342873442003634\r\nContent-Disposition: form-data; name="file"; filename="silk_sonic.jpeg"\r\nContent-Type: image/jpeg\r\n\r\nXCXCXCXCXCXCXCXCXCXCXCXCXCXC\t\r\n\t \n\n\nXXC101010101010101101010101010\r\n-----------------------------10453916712809342873442003634--\r\n';
|
|
110
108
|
const result = multipartFormDataTextToPostFormData(text);
|
|
111
109
|
expect(result).to.have.deep.ordered.members([
|
|
112
110
|
{
|
|
113
111
|
name: 'file',
|
|
114
|
-
value: '',
|
|
112
|
+
value: 'XCXCXCXCXCXCXCXCXCXCXCXCXCXC\t\r\n\t \n\n\nXXC101010101010101101010101010',
|
|
115
113
|
fileName: 'silk_sonic.jpeg'
|
|
116
114
|
}
|
|
117
115
|
]);
|
|
118
116
|
});
|
|
119
|
-
});
|
|
120
117
|
|
|
118
|
+
it('should return also the value of a simple file', () => {
|
|
119
|
+
const text = '------WebKitFormBoundaryntAaM2MokVD4YVOO\r\nContent-Disposition: form-data; name="file"; filename="Cars.csv"\r\nContent-Type: text/csv\r\n\r\nID,First_Name,Last_Name,Car_Make,Car_Model,Car_Color,Car_Model Year,Car_VIN\n1,Lenora,Hegdonne,Lincoln,Town Car,Blue,1994,WAUUL98E68A703943\n2,Jermaine,Bradane,Ford,GT,Pink,2005,1N6AA0EC1FN193806\r\n------WebKitFormBoundaryntAaM2MokVD4YVOO--\r\n';
|
|
120
|
+
const result = multipartFormDataTextToPostFormData(text);
|
|
121
|
+
expect(result).to.have.deep.ordered.members([
|
|
122
|
+
{
|
|
123
|
+
name: 'file',
|
|
124
|
+
value: 'ID,First_Name,Last_Name,Car_Make,Car_Model,Car_Color,Car_Model Year,Car_VIN\n1,Lenora,Hegdonne,Lincoln,Town Car,Blue,1994,WAUUL98E68A703943\n2,Jermaine,Bradane,Ford,GT,Pink,2005,1N6AA0EC1FN193806',
|
|
125
|
+
fileName: 'Cars.csv'
|
|
126
|
+
}
|
|
127
|
+
]);
|
|
128
|
+
});
|
|
129
|
+
it('should detect content-type', () => {
|
|
130
|
+
const text = '----------------------------148649485734387205386089\r\nContent-Disposition: form-data; name="file"; filename="Bell data engineer.docx"\r\ncontent-type: application/vnd.openxmlformats-officedocument.wordprocessingml.document\r\n\r\nUEsDBBQABgAIAAAAIQCTkoLJhQEAACkHAAATAAgCW0NvbnRlbnRfVHlwZXNdLnhtbCCiBAIo\r\n----------------------------148649485734387205386089--\r\n';
|
|
131
|
+
const result = multipartFormDataTextToPostFormData(text);
|
|
132
|
+
expect(result).to.have.deep.ordered.members([
|
|
133
|
+
{
|
|
134
|
+
name: 'file',
|
|
135
|
+
value: 'PK\u0003\u0004\u0014\u0000\u0006\u0000\b\u0000\u0000\u0000!\u0000É
\u0001\u0000\u0000)\u0007\u0000\u0000\u0013\u0000\b\u0002[Content_Types].xml ¢\u0004\u0002(',
|
|
136
|
+
fileName: 'Bell data engineer.docx',
|
|
137
|
+
contentType: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
|
|
138
|
+
}
|
|
139
|
+
]);
|
|
140
|
+
});
|
|
141
|
+
});
|
|
121
142
|
});
|