@luca324/yandex-evershop-s3 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +60 -0
- package/package.json +45 -0
- package/patches/@evershop+s3_file_storage+2.0.0.patch +236 -0
package/README.md
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
# @luca324/yandex-evershop-s3
|
|
2
|
+
|
|
3
|
+
[](https://badge.fury.io/js/%40luca324%2Fyandex-evershop-s3)
|
|
4
|
+
|
|
5
|
+
Пропатченная версия `@evershop/s3_file_storage` с поддержкой Yandex Cloud S3.
|
|
6
|
+
|
|
7
|
+
## Установка
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install @luca324/yandex-evershop-s3
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
Или для Yarn:
|
|
14
|
+
```bash
|
|
15
|
+
yarn add @luca324/yandex-evershop-s3
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
## Что исправляет
|
|
19
|
+
|
|
20
|
+
Этот пакет автоматически применяет патч к `@evershop/s3_file_storage`:
|
|
21
|
+
|
|
22
|
+
1. **Поддержка Yandex Cloud S3 endpoint**
|
|
23
|
+
2. [Другие исправления, которые вы сделали]
|
|
24
|
+
|
|
25
|
+
## Использование
|
|
26
|
+
|
|
27
|
+
Используйте точно так же, как оригинальный пакет:
|
|
28
|
+
добавьте в файл .env следующие переменные:
|
|
29
|
+
|
|
30
|
+
```env
|
|
31
|
+
AWS_ACCESS_KEY_ID=your_access_key_id # ID ключа доступа (в интерфейсе Yandex S3)
|
|
32
|
+
AWS_SECRET_ACCESS_KEY=your_secret_access_key # Секретный ключ доступа
|
|
33
|
+
AWS_BUCKET_NAME=your_bucket_name # Имя бакета
|
|
34
|
+
AWS_REGION=ru-central1 # Регион (для Yandex S3 работает `ru-central1`)
|
|
35
|
+
AWS_ENDPOINT=https://storage.yandexcloud.net # Endpoint для Yandex S3 (`https://storage.yandexcloud.net`)
|
|
36
|
+
AWS_S3_FORCE_PATH_STYLE=true
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
## Как это работает
|
|
40
|
+
|
|
41
|
+
При установке `postinstall` скрипт автоматически применяет патч к установленной копии `@evershop/s3_file_storage`.
|
|
42
|
+
|
|
43
|
+
## Локальная разработка
|
|
44
|
+
|
|
45
|
+
```bash
|
|
46
|
+
# Клонируйте репозиторий
|
|
47
|
+
git clone https://github.com/Luca324/yandex-evershop-s3.git
|
|
48
|
+
cd yandex-evershop-s3
|
|
49
|
+
|
|
50
|
+
# Установите зависимости
|
|
51
|
+
npm install
|
|
52
|
+
|
|
53
|
+
# Создайте патч (если нужно обновить)
|
|
54
|
+
npx patch-package @evershop/s3_file_storage
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
## Лицензия
|
|
58
|
+
|
|
59
|
+
MIT © [Luca324](https://github.com/Luca324)
|
|
60
|
+
|
package/package.json
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@luca324/yandex-evershop-s3",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Patched version of @evershop/s3_file_storage with Yandex Cloud S3 support",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"types": "dist/index.d.ts",
|
|
7
|
+
"scripts": {
|
|
8
|
+
"prepare": "patch-package",
|
|
9
|
+
"postinstall": "patch-package"
|
|
10
|
+
},
|
|
11
|
+
"dependencies": {
|
|
12
|
+
"@evershop/s3_file_storage": "2.0.0",
|
|
13
|
+
"patch-package": "^8.0.1"
|
|
14
|
+
},
|
|
15
|
+
"peerDependencies": {
|
|
16
|
+
"@evershop/s3_file_storage": "^2.0.0"
|
|
17
|
+
},
|
|
18
|
+
"files": [
|
|
19
|
+
"dist/",
|
|
20
|
+
"patches/",
|
|
21
|
+
"README.md",
|
|
22
|
+
"LICENSE"
|
|
23
|
+
],
|
|
24
|
+
"keywords": [
|
|
25
|
+
"evershop",
|
|
26
|
+
"yandex",
|
|
27
|
+
"s3",
|
|
28
|
+
"yandex-cloud",
|
|
29
|
+
"storage",
|
|
30
|
+
"patch"
|
|
31
|
+
],
|
|
32
|
+
"author": "Luca324 <tanya.mosol@yandex.ru>",
|
|
33
|
+
"license": "MIT",
|
|
34
|
+
"repository": {
|
|
35
|
+
"type": "git",
|
|
36
|
+
"url": "git+https://github.com/Luca324/yandex-evershop-s3.git"
|
|
37
|
+
},
|
|
38
|
+
"bugs": {
|
|
39
|
+
"url": "https://github.com/Luca324/yandex-evershop-s3/issues"
|
|
40
|
+
},
|
|
41
|
+
"homepage": "https://github.com/Luca324/yandex-evershop-s3#readme",
|
|
42
|
+
"publishConfig": {
|
|
43
|
+
"access": "public"
|
|
44
|
+
}
|
|
45
|
+
}
|
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
diff --git a/node_modules/@evershop/s3_file_storage/dist/services/awsFileBrowser.js b/node_modules/@evershop/s3_file_storage/dist/services/awsFileBrowser.js
|
|
2
|
+
index 09edceb..bec8a45 100644
|
|
3
|
+
--- a/node_modules/@evershop/s3_file_storage/dist/services/awsFileBrowser.js
|
|
4
|
+
+++ b/node_modules/@evershop/s3_file_storage/dist/services/awsFileBrowser.js
|
|
5
|
+
@@ -1,6 +1,11 @@
|
|
6
|
+
import { S3Client, ListObjectsV2Command } from "@aws-sdk/client-s3";
|
|
7
|
+
import { getEnv } from "@evershop/evershop/lib/util/getEnv";
|
|
8
|
+
-const s3Client = new S3Client({ region: getEnv("AWS_REGION") });
|
|
9
|
+
+// Patched for Yandex S3 *: endpoint + forcePathStyle
|
|
10
|
+
+const s3Client = new S3Client({
|
|
11
|
+
+ region: getEnv("AWS_REGION"),
|
|
12
|
+
+ endpoint: getEnv("AWS_ENDPOINT"),
|
|
13
|
+
+ forcePathStyle: true
|
|
14
|
+
+ });
|
|
15
|
+
const bucketName = getEnv("AWS_BUCKET_NAME");
|
|
16
|
+
export const awsFileBrowser = {
|
|
17
|
+
list: async (path) => {
|
|
18
|
+
@@ -23,7 +28,8 @@ export const awsFileBrowser = {
|
|
19
|
+
? data.Contents.filter((item) => item.Size !== 0).map((object) => {
|
|
20
|
+
var _a;
|
|
21
|
+
const fileName = (_a = object.Key) === null || _a === void 0 ? void 0 : _a.split("/").pop();
|
|
22
|
+
- const fileURL = `https://${bucketName}.s3.amazonaws.com/${object.Key}`;
|
|
23
|
+
+ // Patched for Yandex S3 *
|
|
24
|
+
+ const fileURL = `${getEnv("AWS_ENDPOINT")}/${bucketName}/${object.Key}`;
|
|
25
|
+
return {
|
|
26
|
+
name: fileName,
|
|
27
|
+
url: fileURL,
|
|
28
|
+
diff --git a/node_modules/@evershop/s3_file_storage/dist/services/awsFileDeleter.js b/node_modules/@evershop/s3_file_storage/dist/services/awsFileDeleter.js
|
|
29
|
+
index 30a7d64..1f8b906 100644
|
|
30
|
+
--- a/node_modules/@evershop/s3_file_storage/dist/services/awsFileDeleter.js
|
|
31
|
+
+++ b/node_modules/@evershop/s3_file_storage/dist/services/awsFileDeleter.js
|
|
32
|
+
@@ -1,6 +1,11 @@
|
|
33
|
+
import { S3Client, DeleteObjectCommand, HeadObjectCommand, } from "@aws-sdk/client-s3";
|
|
34
|
+
import { getEnv } from "@evershop/evershop/lib/util/getEnv";
|
|
35
|
+
-const s3Client = new S3Client({ region: getEnv("AWS_REGION") });
|
|
36
|
+
+// Patched for Yandex S3 *: endpoint + forcePathStyle
|
|
37
|
+
+const s3Client = new S3Client({
|
|
38
|
+
+ region: getEnv("AWS_REGION"),
|
|
39
|
+
+ endpoint: getEnv("AWS_ENDPOINT"),
|
|
40
|
+
+ forcePathStyle: true
|
|
41
|
+
+});
|
|
42
|
+
const bucketName = getEnv("AWS_BUCKET_NAME");
|
|
43
|
+
export const awsFileDeleter = {
|
|
44
|
+
delete: async (path) => {
|
|
45
|
+
diff --git a/node_modules/@evershop/s3_file_storage/dist/services/awsFileUploader.js b/node_modules/@evershop/s3_file_storage/dist/services/awsFileUploader.js
|
|
46
|
+
index d5f3923..24f18bf 100644
|
|
47
|
+
--- a/node_modules/@evershop/s3_file_storage/dist/services/awsFileUploader.js
|
|
48
|
+
+++ b/node_modules/@evershop/s3_file_storage/dist/services/awsFileUploader.js
|
|
49
|
+
@@ -1,7 +1,14 @@
|
|
50
|
+
import path from "path";
|
|
51
|
+
import { S3Client, PutObjectCommand, } from "@aws-sdk/client-s3";
|
|
52
|
+
import { getEnv } from "@evershop/evershop/lib/util/getEnv";
|
|
53
|
+
-const s3Client = new S3Client({ region: getEnv("AWS_REGION") });
|
|
54
|
+
+// Patched for Yandex S3 *: endpoint + forcePathStyle
|
|
55
|
+
+const s3clientparams = {
|
|
56
|
+
+ region: getEnv("AWS_REGION"),
|
|
57
|
+
+ endpoint: getEnv("AWS_ENDPOINT"),
|
|
58
|
+
+ forcePathStyle: true
|
|
59
|
+
+}
|
|
60
|
+
+const s3Client = new S3Client(s3clientparams);
|
|
61
|
+
+
|
|
62
|
+
const bucketName = getEnv("AWS_BUCKET_NAME");
|
|
63
|
+
export const awsFileUploader = {
|
|
64
|
+
upload: async (files, requestedPath) => {
|
|
65
|
+
@@ -27,7 +34,8 @@ export const awsFileUploader = {
|
|
66
|
+
name: files[index].filename,
|
|
67
|
+
mimetype: files[index].mimetype,
|
|
68
|
+
size: files[index].size,
|
|
69
|
+
- url: `https://${bucketName}.s3.amazonaws.com/${path.join(requestedPath, files[index].filename)}`,
|
|
70
|
+
+ // Patched for Yandex S3 *
|
|
71
|
+
+ url: `${getEnv("AWS_ENDPOINT")}/${bucketName}/${path.posix.join(requestedPath ?? "", files[index].filename)}`,
|
|
72
|
+
});
|
|
73
|
+
});
|
|
74
|
+
return uploadedFiles;
|
|
75
|
+
diff --git a/node_modules/@evershop/s3_file_storage/dist/services/awsFolderCreator.js b/node_modules/@evershop/s3_file_storage/dist/services/awsFolderCreator.js
|
|
76
|
+
index 642c6ad..54f0157 100644
|
|
77
|
+
--- a/node_modules/@evershop/s3_file_storage/dist/services/awsFolderCreator.js
|
|
78
|
+
+++ b/node_modules/@evershop/s3_file_storage/dist/services/awsFolderCreator.js
|
|
79
|
+
@@ -1,6 +1,11 @@
|
|
80
|
+
import { S3Client, PutObjectCommand } from "@aws-sdk/client-s3";
|
|
81
|
+
import { getEnv } from "@evershop/evershop/lib/util/getEnv";
|
|
82
|
+
-const s3Client = new S3Client({ region: getEnv("AWS_REGION") });
|
|
83
|
+
+// Patched for Yandex S3 *: endpoint + forcePathStyle
|
|
84
|
+
+const s3Client = new S3Client({
|
|
85
|
+
+ region: getEnv("AWS_REGION"),
|
|
86
|
+
+ endpoint: getEnv("AWS_ENDPOINT"),
|
|
87
|
+
+ forcePathStyle: true
|
|
88
|
+
+});
|
|
89
|
+
const bucketName = getEnv("AWS_BUCKET_NAME");
|
|
90
|
+
export const awsFolderCreator = {
|
|
91
|
+
create: async (path) => {
|
|
92
|
+
diff --git a/node_modules/@evershop/s3_file_storage/dist/subscribers/product_image_added/awsGenerateProductImageVariant.js b/node_modules/@evershop/s3_file_storage/dist/subscribers/product_image_added/awsGenerateProductImageVariant.js
|
|
93
|
+
index 977ba54..e8c9919 100644
|
|
94
|
+
--- a/node_modules/@evershop/s3_file_storage/dist/subscribers/product_image_added/awsGenerateProductImageVariant.js
|
|
95
|
+
+++ b/node_modules/@evershop/s3_file_storage/dist/subscribers/product_image_added/awsGenerateProductImageVariant.js
|
|
96
|
+
@@ -9,14 +9,22 @@ import { error } from "@evershop/evershop/lib/log";
|
|
97
|
+
async function downloadObjectToBuffer(objectUrl) {
|
|
98
|
+
var _a;
|
|
99
|
+
const parsedUrl = new URL(objectUrl);
|
|
100
|
+
- const bucketName = parsedUrl.host.split(".")[0]; // Extract the bucket name
|
|
101
|
+
- const objectKey = parsedUrl.pathname.substr(1); // Extract the object key (remove leading '/')
|
|
102
|
+
+ const bucketName = getEnv("AWS_BUCKET_NAME"); // Extract the bucket name for Yandex S3 *
|
|
103
|
+
+ // Extract the object key (remove leading '/' and bucket segment) for Yandex S3 *
|
|
104
|
+
+ const pathParts = parsedUrl.pathname.replace(/^\/+/, "").split("/");
|
|
105
|
+
+ pathParts.shift();
|
|
106
|
+
+ const objectKey = pathParts.join("/");
|
|
107
|
+
const params = {
|
|
108
|
+
Bucket: bucketName,
|
|
109
|
+
Key: objectKey,
|
|
110
|
+
};
|
|
111
|
+
const getObjectCommand = new GetObjectCommand(params);
|
|
112
|
+
- const s3Client = new S3Client({ region: getEnv("AWS_REGION") });
|
|
113
|
+
+ const s3clientparams = {
|
|
114
|
+
+ region: getEnv("AWS_REGION"),
|
|
115
|
+
+ endpoint: getEnv("AWS_ENDPOINT"),
|
|
116
|
+
+ forcePathStyle: true
|
|
117
|
+
+ }
|
|
118
|
+
+ const s3Client = new S3Client(s3clientparams);
|
|
119
|
+
const data = await s3Client.send(getObjectCommand);
|
|
120
|
+
// Get content as a buffer from the data.Body object
|
|
121
|
+
const buffer = await ((_a = data.Body) === null || _a === void 0 ? void 0 : _a.transformToByteArray());
|
|
122
|
+
@@ -31,7 +39,10 @@ async function resizeAndUploadImage(s3Client, originalObjectUrl, resizedObjectUr
|
|
123
|
+
.toBuffer();
|
|
124
|
+
// Upload the resized image
|
|
125
|
+
const parsedUrl = new URL(resizedObjectUrl);
|
|
126
|
+
- const objectKey = parsedUrl.pathname.substr(1); // Extract the object key (remove leading '/')
|
|
127
|
+
+ // Extract the object key (remove leading '/' and bucket segment) for Yandex S3 *
|
|
128
|
+
+ const uploadPathParts = parsedUrl.pathname.replace(/^\/+/, "").split("/");
|
|
129
|
+
+ uploadPathParts.shift();
|
|
130
|
+
+ const objectKey = uploadPathParts.join("/");
|
|
131
|
+
const uploadParams = {
|
|
132
|
+
Bucket: bucketName,
|
|
133
|
+
Key: objectKey,
|
|
134
|
+
@@ -43,12 +54,17 @@ async function resizeAndUploadImage(s3Client, originalObjectUrl, resizedObjectUr
|
|
135
|
+
}
|
|
136
|
+
export default async function awsGenerateProductImageVariant(data) {
|
|
137
|
+
if (getConfig("system.file_storage") === "s3") {
|
|
138
|
+
- try {
|
|
139
|
+
- const s3Client = new S3Client({ region: getEnv("AWS_REGION") });
|
|
140
|
+
+ try {
|
|
141
|
+
+ const s3clientparams = {
|
|
142
|
+
+ region: getEnv("AWS_REGION"),
|
|
143
|
+
+ endpoint: getEnv("AWS_ENDPOINT"),
|
|
144
|
+
+ forcePathStyle: true
|
|
145
|
+
+ }
|
|
146
|
+
+ const s3Client = new S3Client(s3clientparams);
|
|
147
|
+
const originalObjectUrl = data.origin_image;
|
|
148
|
+
// The data.image is the full url of the Object, we need to get the Object path
|
|
149
|
+
// by removing the container url
|
|
150
|
+
- const ext = path.extname(originalObjectUrl);
|
|
151
|
+
+ const ext = path.extname(originalObjectUrl)
|
|
152
|
+
// Target path for single variant by adding a '-single' just before the extension
|
|
153
|
+
const singleObjectUrl = originalObjectUrl.replace(ext, `-single${ext}`);
|
|
154
|
+
// Target path for listing variant by adding a '-listing' just before the extension
|
|
155
|
+
diff --git a/node_modules/@evershop/s3_file_storage/src/services/awsFileBrowser.ts b/node_modules/@evershop/s3_file_storage/src/services/awsFileBrowser.ts
|
|
156
|
+
index b3f38d7..8d7bb42 100644
|
|
157
|
+
--- a/node_modules/@evershop/s3_file_storage/src/services/awsFileBrowser.ts
|
|
158
|
+
+++ b/node_modules/@evershop/s3_file_storage/src/services/awsFileBrowser.ts
|
|
159
|
+
@@ -27,7 +27,7 @@ export const awsFileBrowser = {
|
|
160
|
+
const files = data.Contents
|
|
161
|
+
? data.Contents.filter((item) => item.Size !== 0).map((object) => {
|
|
162
|
+
const fileName = object.Key?.split("/").pop();
|
|
163
|
+
- const fileURL = `https://${bucketName}.s3.amazonaws.com/${object.Key}`;
|
|
164
|
+
+ const fileURL = `${getEnv("AWS_ENDPOINT")}/${bucketName}/${object.Key}`;
|
|
165
|
+
|
|
166
|
+
return {
|
|
167
|
+
name: fileName,
|
|
168
|
+
diff --git a/node_modules/@evershop/s3_file_storage/src/services/awsFileDeleter.ts b/node_modules/@evershop/s3_file_storage/src/services/awsFileDeleter.ts
|
|
169
|
+
index 8563cef..ccb2852 100644
|
|
170
|
+
--- a/node_modules/@evershop/s3_file_storage/src/services/awsFileDeleter.ts
|
|
171
|
+
+++ b/node_modules/@evershop/s3_file_storage/src/services/awsFileDeleter.ts
|
|
172
|
+
@@ -5,7 +5,10 @@ import {
|
|
173
|
+
} from "@aws-sdk/client-s3";
|
|
174
|
+
import { getEnv } from "@evershop/evershop/lib/util/getEnv";
|
|
175
|
+
|
|
176
|
+
-const s3Client = new S3Client({ region: getEnv("AWS_REGION") });
|
|
177
|
+
+const s3Client = new S3Client({ region: getEnv("AWS_REGION"),
|
|
178
|
+
+ endpoint: getEnv("AWS_ENDPOINT"),
|
|
179
|
+
+ forcePathStyle: true
|
|
180
|
+
+});
|
|
181
|
+
const bucketName = getEnv("AWS_BUCKET_NAME");
|
|
182
|
+
|
|
183
|
+
export const awsFileDeleter = {
|
|
184
|
+
diff --git a/node_modules/@evershop/s3_file_storage/src/subscribers/product_image_added/awsGenerateProductImageVariant.ts b/node_modules/@evershop/s3_file_storage/src/subscribers/product_image_added/awsGenerateProductImageVariant.ts
|
|
185
|
+
index 80fc2e2..ef28b80 100644
|
|
186
|
+
--- a/node_modules/@evershop/s3_file_storage/src/subscribers/product_image_added/awsGenerateProductImageVariant.ts
|
|
187
|
+
+++ b/node_modules/@evershop/s3_file_storage/src/subscribers/product_image_added/awsGenerateProductImageVariant.ts
|
|
188
|
+
@@ -22,7 +22,11 @@ async function downloadObjectToBuffer(objectUrl: string) {
|
|
189
|
+
};
|
|
190
|
+
|
|
191
|
+
const getObjectCommand = new GetObjectCommand(params);
|
|
192
|
+
- const s3Client = new S3Client({ region: getEnv("AWS_REGION") });
|
|
193
|
+
+ const s3Client = new S3Client({
|
|
194
|
+
+ region: getEnv("AWS_REGION"),
|
|
195
|
+
+ endpoint: getEnv("AWS_ENDPOINT"),
|
|
196
|
+
+ forcePathStyle: true
|
|
197
|
+
+ });
|
|
198
|
+
const data = await s3Client.send(getObjectCommand);
|
|
199
|
+
// Get content as a buffer from the data.Body object
|
|
200
|
+
const buffer = await data.Body?.transformToByteArray();
|
|
201
|
+
@@ -59,13 +63,23 @@ async function resizeAndUploadImage(
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
export default async function awsGenerateProductImageVariant(data) {
|
|
205
|
+
+ console.log('data (FROM SRC)', data);
|
|
206
|
+
if (getConfig("system.file_storage") === "s3") {
|
|
207
|
+
+ console.log('getConfig("system.file_storage") === "s3"**', true);
|
|
208
|
+
try {
|
|
209
|
+
- const s3Client = new S3Client({ region: getEnv("AWS_REGION") });
|
|
210
|
+
+ const s3clientparams = {
|
|
211
|
+
+ region: getEnv("AWS_REGION"),
|
|
212
|
+
+ endpoint: getEnv("AWS_ENDPOINT"),
|
|
213
|
+
+ forcePathStyle: true
|
|
214
|
+
+ }
|
|
215
|
+
+ console.log('s3clientparams', s3clientparams);
|
|
216
|
+
+ const s3Client = new S3Client(s3clientparams);
|
|
217
|
+
+ console.log('s3Client', s3Client);
|
|
218
|
+
const originalObjectUrl = data.origin_image;
|
|
219
|
+
// The data.image is the full url of the Object, we need to get the Object path
|
|
220
|
+
// by removing the container url
|
|
221
|
+
const ext = path.extname(originalObjectUrl);
|
|
222
|
+
+ console.log('ext', ext);
|
|
223
|
+
// Target path for single variant by adding a '-single' just before the extension
|
|
224
|
+
const singleObjectUrl = originalObjectUrl.replace(ext, `-single${ext}`);
|
|
225
|
+
// Target path for listing variant by adding a '-listing' just before the extension
|
|
226
|
+
@@ -103,6 +117,10 @@ export default async function awsGenerateProductImageVariant(data) {
|
|
227
|
+
getConfig("catalog.product.image.thumbnail.height", 100)
|
|
228
|
+
);
|
|
229
|
+
|
|
230
|
+
+ console.log('singleUrl', singleUrl);
|
|
231
|
+
+ console.log('listingUrl', listingUrl);
|
|
232
|
+
+ console.log('thumnailUrl', thumnailUrl);
|
|
233
|
+
+
|
|
234
|
+
// Update the record in the database with the new URLs in the variant columns
|
|
235
|
+
await update("product_image")
|
|
236
|
+
.given({
|