@nonphoto/sanity-image 3.2.0 → 5.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +516 -68
- package/dist/index.js +278 -114
- package/package.json +5 -2
- package/src/asset.ts +120 -9
- package/src/constants.ts +7 -0
- package/src/crop.ts +14 -0
- package/src/hotspot.ts +14 -0
- package/src/image.ts +69 -0
- package/src/index.ts +6 -5
- package/src/rect.ts +28 -0
- package/src/transformations.ts +155 -0
- package/src/imageObject.ts +0 -15
- package/src/params.ts +0 -118
- package/src/reference.ts +0 -12
- package/src/stub.ts +0 -44
- package/src/url.ts +0 -76
package/dist/index.js
CHANGED
|
@@ -1,20 +1,126 @@
|
|
|
1
1
|
// src/asset.ts
|
|
2
|
-
|
|
3
|
-
|
|
2
|
+
import {
|
|
3
|
+
is as is5,
|
|
4
|
+
nullish,
|
|
5
|
+
number as number5,
|
|
6
|
+
object as object5,
|
|
7
|
+
string as string2,
|
|
8
|
+
union as union2
|
|
9
|
+
} from "valibot";
|
|
10
|
+
|
|
11
|
+
// src/crop.ts
|
|
12
|
+
import { is, number, object, optional } from "valibot";
|
|
13
|
+
var cropSchema = object({
|
|
14
|
+
top: optional(number()),
|
|
15
|
+
bottom: optional(number()),
|
|
16
|
+
left: optional(number()),
|
|
17
|
+
right: optional(number())
|
|
18
|
+
});
|
|
19
|
+
function isCrop(input) {
|
|
20
|
+
return is(cropSchema, input);
|
|
4
21
|
}
|
|
5
22
|
|
|
6
|
-
// src/
|
|
7
|
-
|
|
8
|
-
|
|
23
|
+
// src/hotspot.ts
|
|
24
|
+
import { is as is2, number as number2, object as object2, optional as optional2 } from "valibot";
|
|
25
|
+
var hotspotSchema = object2({
|
|
26
|
+
x: optional2(number2()),
|
|
27
|
+
y: optional2(number2()),
|
|
28
|
+
width: optional2(number2()),
|
|
29
|
+
height: optional2(number2())
|
|
30
|
+
});
|
|
31
|
+
function isHotspot(input) {
|
|
32
|
+
return is2(hotspotSchema, input);
|
|
9
33
|
}
|
|
10
34
|
|
|
11
|
-
// src/
|
|
12
|
-
|
|
13
|
-
|
|
35
|
+
// src/rect.ts
|
|
36
|
+
import { is as is3, number as number3, object as object3, tuple } from "valibot";
|
|
37
|
+
var rectSchema = object3({
|
|
38
|
+
pos: tuple([number3(), number3()]),
|
|
39
|
+
size: tuple([number3(), number3()])
|
|
40
|
+
});
|
|
41
|
+
function isRect(input) {
|
|
42
|
+
return is3(rectSchema, input);
|
|
43
|
+
}
|
|
44
|
+
function rectFromCrop(asset, crop) {
|
|
45
|
+
const left = Math.max(crop.left ?? 0, 0);
|
|
46
|
+
const right = Math.max(crop.right ?? 0, 0);
|
|
47
|
+
const top = Math.max(crop.top ?? 0, 0);
|
|
48
|
+
const bottom = Math.max(crop.bottom ?? 0, 0);
|
|
49
|
+
return {
|
|
50
|
+
pos: [left * asset.width, top * asset.height],
|
|
51
|
+
size: [(1 - left - right) * asset.width, (1 - top - bottom) * asset.height]
|
|
52
|
+
};
|
|
14
53
|
}
|
|
15
54
|
|
|
16
|
-
// src/
|
|
17
|
-
|
|
55
|
+
// src/transformations.ts
|
|
56
|
+
import {
|
|
57
|
+
boolean,
|
|
58
|
+
is as is4,
|
|
59
|
+
literal,
|
|
60
|
+
number as number4,
|
|
61
|
+
object as object4,
|
|
62
|
+
partial,
|
|
63
|
+
string,
|
|
64
|
+
tuple as tuple2,
|
|
65
|
+
union
|
|
66
|
+
} from "valibot";
|
|
67
|
+
var transformationsSchema = partial(
|
|
68
|
+
object4({
|
|
69
|
+
auto: literal("format"),
|
|
70
|
+
background: string(),
|
|
71
|
+
blur: number4(),
|
|
72
|
+
crop: union([
|
|
73
|
+
literal("top"),
|
|
74
|
+
literal("bottom"),
|
|
75
|
+
literal("left"),
|
|
76
|
+
literal("right"),
|
|
77
|
+
literal("center"),
|
|
78
|
+
literal("focalpoint"),
|
|
79
|
+
literal("entropy")
|
|
80
|
+
]),
|
|
81
|
+
download: union([string(), boolean()]),
|
|
82
|
+
dpr: union([literal(1), literal(2), literal(3)]),
|
|
83
|
+
fit: union([
|
|
84
|
+
literal("clip"),
|
|
85
|
+
literal("crop"),
|
|
86
|
+
literal("fill"),
|
|
87
|
+
literal("fillmax"),
|
|
88
|
+
literal("max"),
|
|
89
|
+
literal("scale"),
|
|
90
|
+
literal("min")
|
|
91
|
+
]),
|
|
92
|
+
flipHorizontal: boolean(),
|
|
93
|
+
flipVertical: boolean(),
|
|
94
|
+
focalPoint: tuple2([number4(), number4()]),
|
|
95
|
+
format: union([
|
|
96
|
+
literal("jpg"),
|
|
97
|
+
literal("pjpg"),
|
|
98
|
+
literal("png"),
|
|
99
|
+
literal("webp")
|
|
100
|
+
]),
|
|
101
|
+
frame: number4(),
|
|
102
|
+
height: number4(),
|
|
103
|
+
invert: boolean(),
|
|
104
|
+
maxHeight: number4(),
|
|
105
|
+
maxWidth: number4(),
|
|
106
|
+
minHeight: number4(),
|
|
107
|
+
minWidth: number4(),
|
|
108
|
+
orientation: union([literal(0), literal(90), literal(180), literal(270)]),
|
|
109
|
+
pad: number4(),
|
|
110
|
+
quality: number4(),
|
|
111
|
+
rect: rectSchema,
|
|
112
|
+
saturation: number4(),
|
|
113
|
+
sharpen: number4(),
|
|
114
|
+
width: number4()
|
|
115
|
+
})
|
|
116
|
+
);
|
|
117
|
+
function isTransformations(input) {
|
|
118
|
+
return is4(transformationsSchema, input);
|
|
119
|
+
}
|
|
120
|
+
function entry(key, value) {
|
|
121
|
+
return value == null || value === false ? void 0 : [key, String(typeof value === "number" ? Math.round(value) : value)];
|
|
122
|
+
}
|
|
123
|
+
function transformationsToURLSearch({
|
|
18
124
|
auto,
|
|
19
125
|
background,
|
|
20
126
|
blur,
|
|
@@ -41,129 +147,187 @@ function sanityImageParamsToSearchParamEntries({
|
|
|
41
147
|
sharpen,
|
|
42
148
|
width
|
|
43
149
|
}) {
|
|
44
|
-
return
|
|
45
|
-
auto,
|
|
46
|
-
bg
|
|
47
|
-
blur,
|
|
48
|
-
crop,
|
|
49
|
-
dl
|
|
50
|
-
dpr,
|
|
51
|
-
fit,
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
"
|
|
60
|
-
"
|
|
61
|
-
"
|
|
62
|
-
"
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
)
|
|
75
|
-
|
|
150
|
+
return "?" + [
|
|
151
|
+
entry("auto", auto),
|
|
152
|
+
entry("bg", background),
|
|
153
|
+
entry("blur", blur),
|
|
154
|
+
entry("crop", crop),
|
|
155
|
+
entry("dl", download),
|
|
156
|
+
entry("dpr", dpr),
|
|
157
|
+
entry("fit", fit),
|
|
158
|
+
entry(
|
|
159
|
+
"flip",
|
|
160
|
+
flipHorizontal || flipVertical ? [flipHorizontal && "h", flipVertical && "v"].filter(Boolean).join("") : void 0
|
|
161
|
+
),
|
|
162
|
+
entry("fm", format),
|
|
163
|
+
entry("fp-x", focalPoint?.[0]),
|
|
164
|
+
entry("fp-y", focalPoint?.[1]),
|
|
165
|
+
entry("frame", frame),
|
|
166
|
+
entry("h", height),
|
|
167
|
+
entry("invert", invert),
|
|
168
|
+
entry("max-h", maxHeight),
|
|
169
|
+
entry("max-w", maxWidth),
|
|
170
|
+
entry("min-h", minHeight),
|
|
171
|
+
entry("min-w", minWidth),
|
|
172
|
+
entry("or", orientation),
|
|
173
|
+
entry("pad", pad),
|
|
174
|
+
entry("q", quality),
|
|
175
|
+
entry(
|
|
176
|
+
"rect",
|
|
177
|
+
rect ? [rect.pos[0], rect.pos[1], rect.size[0], rect.size[1]].map(Math.round).join(",") : void 0
|
|
178
|
+
),
|
|
179
|
+
entry("sat", saturation),
|
|
180
|
+
entry("sharp", sharpen),
|
|
181
|
+
entry("w", width)
|
|
182
|
+
].filter((entry2) => entry2 != null).map((entry2) => entry2.join("=")).join("&");
|
|
76
183
|
}
|
|
77
184
|
|
|
78
|
-
// src/
|
|
79
|
-
|
|
80
|
-
|
|
185
|
+
// src/asset.ts
|
|
186
|
+
var assetLikeSchema = object5({
|
|
187
|
+
_id: string2()
|
|
188
|
+
});
|
|
189
|
+
var referenceLikeSchema = object5({
|
|
190
|
+
_ref: string2()
|
|
191
|
+
});
|
|
192
|
+
var imageObjectSchema = object5({
|
|
193
|
+
asset: nullish(union2([assetLikeSchema, referenceLikeSchema, string2()])),
|
|
194
|
+
crop: nullish(cropSchema),
|
|
195
|
+
hotspot: nullish(hotspotSchema)
|
|
196
|
+
});
|
|
197
|
+
var imageAssetSchema = object5({
|
|
198
|
+
_id: string2(),
|
|
199
|
+
assetId: string2(),
|
|
200
|
+
width: number5(),
|
|
201
|
+
height: number5(),
|
|
202
|
+
extension: string2(),
|
|
203
|
+
vanityName: nullish(string2()),
|
|
204
|
+
transformations: nullish(transformationsSchema)
|
|
205
|
+
});
|
|
206
|
+
function isAssetLike(input) {
|
|
207
|
+
return is5(assetLikeSchema, input);
|
|
208
|
+
}
|
|
209
|
+
function isReferenceLike(input) {
|
|
210
|
+
return is5(referenceLikeSchema, input);
|
|
211
|
+
}
|
|
212
|
+
function isImageObject(input) {
|
|
213
|
+
return is5(imageObjectSchema, input);
|
|
214
|
+
}
|
|
215
|
+
function isImageAsset(input) {
|
|
216
|
+
return is5(imageAssetSchema, input);
|
|
217
|
+
}
|
|
218
|
+
function assetIdFromSource(source) {
|
|
219
|
+
return typeof source === "string" ? source : isAssetLike(source) ? source._id : isReferenceLike(source) ? source._ref : isImageObject(source) && source.asset ? assetIdFromSource(source.asset) : void 0;
|
|
220
|
+
}
|
|
221
|
+
function parseAssetId(id) {
|
|
222
|
+
const matches = id.match(/^image-(\w+)-(\d+)x(\d+)-(\w+)$/);
|
|
81
223
|
if (matches) {
|
|
82
|
-
const [,
|
|
83
|
-
return {
|
|
224
|
+
const [, assetId, width, height, extension] = matches;
|
|
225
|
+
return {
|
|
226
|
+
_id: id,
|
|
227
|
+
assetId,
|
|
228
|
+
width: Number(width),
|
|
229
|
+
height: Number(height),
|
|
230
|
+
extension
|
|
231
|
+
};
|
|
84
232
|
}
|
|
85
233
|
}
|
|
86
|
-
function
|
|
87
|
-
|
|
234
|
+
function imageAssetFromSource(source) {
|
|
235
|
+
if (typeof source === "object" && "assetId" in source) {
|
|
236
|
+
return source;
|
|
237
|
+
} else {
|
|
238
|
+
const id = assetIdFromSource(source);
|
|
239
|
+
const asset = id ? parseAssetId(id) : void 0;
|
|
240
|
+
return asset ? imageAssetWithTransformations(asset, {
|
|
241
|
+
rect: typeof source === "object" && "crop" in source && source.crop ? rectFromCrop(asset, source.crop) : void 0
|
|
242
|
+
}) : void 0;
|
|
243
|
+
}
|
|
88
244
|
}
|
|
89
|
-
function
|
|
90
|
-
|
|
91
|
-
|
|
245
|
+
function imageAssetWithTransformations(asset, transformations) {
|
|
246
|
+
return {
|
|
247
|
+
...asset,
|
|
248
|
+
transformations: { ...asset.transformations, ...transformations }
|
|
249
|
+
};
|
|
92
250
|
}
|
|
93
251
|
|
|
94
|
-
// src/
|
|
95
|
-
var
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
// iPhone 6-8 Plus
|
|
118
|
-
960,
|
|
119
|
-
720,
|
|
120
|
-
// iPhone 6-8
|
|
121
|
-
640,
|
|
122
|
-
// 480p
|
|
123
|
-
480,
|
|
124
|
-
360,
|
|
125
|
-
240
|
|
126
|
-
];
|
|
127
|
-
function sanityImageUrl(client, image, params) {
|
|
252
|
+
// src/constants.ts
|
|
253
|
+
var srcsetWidths = {
|
|
254
|
+
default: [2560, 1920, 1280, 960, 640, 480, 360, 240],
|
|
255
|
+
expanded: [
|
|
256
|
+
3840,
|
|
257
|
+
3200,
|
|
258
|
+
2560,
|
|
259
|
+
2048,
|
|
260
|
+
1920,
|
|
261
|
+
1668,
|
|
262
|
+
1280,
|
|
263
|
+
1080,
|
|
264
|
+
960,
|
|
265
|
+
720,
|
|
266
|
+
640,
|
|
267
|
+
480,
|
|
268
|
+
360,
|
|
269
|
+
240
|
|
270
|
+
]
|
|
271
|
+
};
|
|
272
|
+
|
|
273
|
+
// src/image.ts
|
|
274
|
+
function imageUrl(client, asset) {
|
|
128
275
|
const url = new URL(
|
|
129
276
|
[
|
|
130
|
-
|
|
277
|
+
"https://cdn.sanity.io/images",
|
|
131
278
|
client.projectId,
|
|
132
279
|
client.dataset,
|
|
133
|
-
`${
|
|
134
|
-
|
|
280
|
+
`${asset.assetId}-${asset.width}x${asset.height}.${asset.extension}`,
|
|
281
|
+
asset.vanityName
|
|
135
282
|
].filter(Boolean).join("/")
|
|
136
283
|
);
|
|
137
|
-
if (
|
|
138
|
-
url.search =
|
|
139
|
-
sanityImageParamsToSearchParamEntries(params)
|
|
140
|
-
).toString();
|
|
284
|
+
if (asset.transformations) {
|
|
285
|
+
url.search = transformationsToURLSearch(asset.transformations);
|
|
141
286
|
}
|
|
142
287
|
return url.href;
|
|
143
288
|
}
|
|
144
|
-
function
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
width,
|
|
153
|
-
height: width * aspectRatio
|
|
154
|
-
});
|
|
289
|
+
function imageSrcset(client, asset, widths = srcsetWidths.default) {
|
|
290
|
+
return widths.sort((a, b) => a - b).filter((width) => width < asset.width).map(Math.round).map((width) => {
|
|
291
|
+
const url = imageUrl(
|
|
292
|
+
client,
|
|
293
|
+
imageAssetWithTransformations(asset, {
|
|
294
|
+
width
|
|
295
|
+
})
|
|
296
|
+
);
|
|
155
297
|
return `${url} ${width}w`;
|
|
156
298
|
}).join(",");
|
|
157
299
|
}
|
|
300
|
+
function imageAspectRatio(asset) {
|
|
301
|
+
const size = asset.transformations && ["crop", "fill", "fillmax", "scale", "min"].includes(
|
|
302
|
+
asset.transformations.fit ?? ""
|
|
303
|
+
) && asset.transformations.width != null && asset.transformations.height != null ? [asset.transformations.width, asset.transformations.height] : asset.transformations?.rect ? asset.transformations.rect.size : [asset.width, asset.height];
|
|
304
|
+
return size[0] / size[1];
|
|
305
|
+
}
|
|
158
306
|
export {
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
307
|
+
assetIdFromSource,
|
|
308
|
+
assetLikeSchema,
|
|
309
|
+
cropSchema,
|
|
310
|
+
hotspotSchema,
|
|
311
|
+
imageAspectRatio,
|
|
312
|
+
imageAssetFromSource,
|
|
313
|
+
imageAssetSchema,
|
|
314
|
+
imageAssetWithTransformations,
|
|
315
|
+
imageObjectSchema,
|
|
316
|
+
imageSrcset,
|
|
317
|
+
imageUrl,
|
|
318
|
+
isAssetLike,
|
|
319
|
+
isCrop,
|
|
320
|
+
isHotspot,
|
|
321
|
+
isImageAsset,
|
|
322
|
+
isImageObject,
|
|
323
|
+
isRect,
|
|
324
|
+
isReferenceLike,
|
|
325
|
+
isTransformations,
|
|
326
|
+
parseAssetId,
|
|
327
|
+
rectFromCrop,
|
|
328
|
+
rectSchema,
|
|
329
|
+
referenceLikeSchema,
|
|
330
|
+
srcsetWidths,
|
|
331
|
+
transformationsSchema,
|
|
332
|
+
transformationsToURLSearch
|
|
169
333
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@nonphoto/sanity-image",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "5.0.0",
|
|
4
4
|
"author": "Jonas Luebbers <jonas@jonasluebbers.com> (https://www.jonasluebbers.com)",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"type": "module",
|
|
@@ -36,5 +36,8 @@
|
|
|
36
36
|
"engines": {
|
|
37
37
|
"node": ">=20"
|
|
38
38
|
},
|
|
39
|
-
"packageManager": "pnpm@10.12.1"
|
|
39
|
+
"packageManager": "pnpm@10.12.1",
|
|
40
|
+
"dependencies": {
|
|
41
|
+
"valibot": "^1.1.0"
|
|
42
|
+
}
|
|
40
43
|
}
|
package/src/asset.ts
CHANGED
|
@@ -1,12 +1,123 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
import {
|
|
2
|
+
InferOutput,
|
|
3
|
+
is,
|
|
4
|
+
nullish,
|
|
5
|
+
number,
|
|
6
|
+
object,
|
|
7
|
+
string,
|
|
8
|
+
union,
|
|
9
|
+
} from "valibot";
|
|
10
|
+
import { cropSchema } from "./crop";
|
|
11
|
+
import { hotspotSchema } from "./hotspot";
|
|
12
|
+
import { rectFromCrop } from "./rect";
|
|
13
|
+
import { Transformations, transformationsSchema } from "./transformations";
|
|
14
|
+
|
|
15
|
+
export type ImageSource =
|
|
16
|
+
| ImageObject
|
|
17
|
+
| ImageAsset
|
|
18
|
+
| AssetLike
|
|
19
|
+
| ReferenceLike
|
|
20
|
+
| string;
|
|
21
|
+
|
|
22
|
+
export type AssetLike = InferOutput<typeof assetLikeSchema>;
|
|
23
|
+
|
|
24
|
+
export type ReferenceLike = InferOutput<typeof referenceLikeSchema>;
|
|
25
|
+
|
|
26
|
+
export type ImageObject = InferOutput<typeof imageObjectSchema>;
|
|
27
|
+
|
|
28
|
+
export type ImageAsset = InferOutput<typeof imageAssetSchema>;
|
|
29
|
+
|
|
30
|
+
export const assetLikeSchema = object({
|
|
31
|
+
_id: string(),
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
export const referenceLikeSchema = object({
|
|
35
|
+
_ref: string(),
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
export const imageObjectSchema = object({
|
|
39
|
+
asset: nullish(union([assetLikeSchema, referenceLikeSchema, string()])),
|
|
40
|
+
crop: nullish(cropSchema),
|
|
41
|
+
hotspot: nullish(hotspotSchema),
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
export const imageAssetSchema = object({
|
|
45
|
+
_id: string(),
|
|
46
|
+
assetId: string(),
|
|
47
|
+
width: number(),
|
|
48
|
+
height: number(),
|
|
49
|
+
extension: string(),
|
|
50
|
+
vanityName: nullish(string()),
|
|
51
|
+
transformations: nullish(transformationsSchema),
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
export function isAssetLike(input: unknown): input is AssetLike {
|
|
55
|
+
return is(assetLikeSchema, input);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
export function isReferenceLike(input: unknown): input is ReferenceLike {
|
|
59
|
+
return is(referenceLikeSchema, input);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
export function isImageObject(input: unknown): input is ImageObject {
|
|
63
|
+
return is(imageObjectSchema, input);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
export function isImageAsset(input: unknown): input is ImageAsset {
|
|
67
|
+
return is(imageAssetSchema, input);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
export function assetIdFromSource(source: ImageSource): string | undefined {
|
|
71
|
+
return typeof source === "string"
|
|
72
|
+
? source
|
|
73
|
+
: isAssetLike(source)
|
|
74
|
+
? source._id
|
|
75
|
+
: isReferenceLike(source)
|
|
76
|
+
? source._ref
|
|
77
|
+
: isImageObject(source) && source.asset
|
|
78
|
+
? assetIdFromSource(source.asset)
|
|
79
|
+
: undefined;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
export function parseAssetId(id: string): ImageAsset | undefined {
|
|
83
|
+
const matches = id.match(/^image-(\w+)-(\d+)x(\d+)-(\w+)$/);
|
|
84
|
+
if (matches) {
|
|
85
|
+
const [, assetId, width, height, extension] = matches;
|
|
86
|
+
return {
|
|
87
|
+
_id: id,
|
|
88
|
+
assetId,
|
|
89
|
+
width: Number(width),
|
|
90
|
+
height: Number(height),
|
|
91
|
+
extension,
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
export function imageAssetFromSource(
|
|
97
|
+
source: ImageSource,
|
|
98
|
+
): ImageAsset | undefined {
|
|
99
|
+
if (typeof source === "object" && "assetId" in source) {
|
|
100
|
+
return source;
|
|
101
|
+
} else {
|
|
102
|
+
const id = assetIdFromSource(source);
|
|
103
|
+
const asset = id ? parseAssetId(id) : undefined;
|
|
104
|
+
return asset
|
|
105
|
+
? imageAssetWithTransformations(asset, {
|
|
106
|
+
rect:
|
|
107
|
+
typeof source === "object" && "crop" in source && source.crop
|
|
108
|
+
? rectFromCrop(asset, source.crop)
|
|
109
|
+
: undefined,
|
|
110
|
+
})
|
|
111
|
+
: undefined;
|
|
112
|
+
}
|
|
3
113
|
}
|
|
4
114
|
|
|
5
|
-
export function
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
115
|
+
export function imageAssetWithTransformations(
|
|
116
|
+
asset: ImageAsset,
|
|
117
|
+
transformations: Transformations,
|
|
118
|
+
): ImageAsset {
|
|
119
|
+
return {
|
|
120
|
+
...asset,
|
|
121
|
+
transformations: { ...asset.transformations, ...transformations },
|
|
122
|
+
};
|
|
12
123
|
}
|
package/src/constants.ts
ADDED
package/src/crop.ts
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { InferOutput, is, number, object, optional } from "valibot";
|
|
2
|
+
|
|
3
|
+
export const cropSchema = object({
|
|
4
|
+
top: optional(number()),
|
|
5
|
+
bottom: optional(number()),
|
|
6
|
+
left: optional(number()),
|
|
7
|
+
right: optional(number()),
|
|
8
|
+
});
|
|
9
|
+
|
|
10
|
+
export type Crop = InferOutput<typeof cropSchema>;
|
|
11
|
+
|
|
12
|
+
export function isCrop(input: unknown): input is Crop {
|
|
13
|
+
return is(cropSchema, input);
|
|
14
|
+
}
|
package/src/hotspot.ts
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { InferOutput, is, number, object, optional } from "valibot";
|
|
2
|
+
|
|
3
|
+
export const hotspotSchema = object({
|
|
4
|
+
x: optional(number()),
|
|
5
|
+
y: optional(number()),
|
|
6
|
+
width: optional(number()),
|
|
7
|
+
height: optional(number()),
|
|
8
|
+
});
|
|
9
|
+
|
|
10
|
+
export type Hotspot = InferOutput<typeof hotspotSchema>;
|
|
11
|
+
|
|
12
|
+
export function isHotspot(input: unknown): input is Hotspot {
|
|
13
|
+
return is(hotspotSchema, input);
|
|
14
|
+
}
|
package/src/image.ts
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import { ImageAsset, imageAssetWithTransformations } from "./asset";
|
|
2
|
+
import { srcsetWidths } from "./constants";
|
|
3
|
+
import { transformationsToURLSearch } from "./transformations";
|
|
4
|
+
|
|
5
|
+
export interface SanityClientLike {
|
|
6
|
+
projectId: string;
|
|
7
|
+
dataset: string;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
export function imageUrl(client: SanityClientLike, asset: ImageAsset): string {
|
|
11
|
+
const url = new URL(
|
|
12
|
+
[
|
|
13
|
+
"https://cdn.sanity.io/images",
|
|
14
|
+
client.projectId,
|
|
15
|
+
client.dataset,
|
|
16
|
+
`${asset.assetId}-${asset.width}x${asset.height}.${asset.extension}`,
|
|
17
|
+
asset.vanityName,
|
|
18
|
+
]
|
|
19
|
+
.filter(Boolean)
|
|
20
|
+
.join("/"),
|
|
21
|
+
);
|
|
22
|
+
if (asset.transformations) {
|
|
23
|
+
url.search = transformationsToURLSearch(asset.transformations);
|
|
24
|
+
}
|
|
25
|
+
return url.href;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export function imageSrcset(
|
|
29
|
+
client: SanityClientLike,
|
|
30
|
+
asset: ImageAsset,
|
|
31
|
+
widths: number[] = srcsetWidths.default,
|
|
32
|
+
): string | undefined {
|
|
33
|
+
return widths
|
|
34
|
+
.sort((a, b) => a - b)
|
|
35
|
+
.filter((width) => width < asset.width)
|
|
36
|
+
.map(Math.round)
|
|
37
|
+
.map((width) => {
|
|
38
|
+
const url = imageUrl(
|
|
39
|
+
client,
|
|
40
|
+
imageAssetWithTransformations(asset, {
|
|
41
|
+
width,
|
|
42
|
+
}),
|
|
43
|
+
);
|
|
44
|
+
return `${url} ${width}w`;
|
|
45
|
+
})
|
|
46
|
+
.join(",");
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* Calculates the aspect ratio of an image, taking its transformations into account.
|
|
51
|
+
* @param asset - The asset to calculate the aspect ratio of
|
|
52
|
+
* @returns The aspect ratio of the image
|
|
53
|
+
* @todo This function currently ignores the `crop` mode settings including focal point
|
|
54
|
+
* and min/max height/width.
|
|
55
|
+
*/
|
|
56
|
+
export function imageAspectRatio(asset: ImageAsset): number {
|
|
57
|
+
const size: [number, number] =
|
|
58
|
+
asset.transformations &&
|
|
59
|
+
["crop", "fill", "fillmax", "scale", "min"].includes(
|
|
60
|
+
asset.transformations.fit ?? "",
|
|
61
|
+
) &&
|
|
62
|
+
asset.transformations.width != null &&
|
|
63
|
+
asset.transformations.height != null
|
|
64
|
+
? [asset.transformations.width, asset.transformations.height]
|
|
65
|
+
: asset.transformations?.rect
|
|
66
|
+
? asset.transformations.rect.size
|
|
67
|
+
: [asset.width, asset.height];
|
|
68
|
+
return size[0] / size[1];
|
|
69
|
+
}
|