@helia/verified-fetch 0.0.0-28d62f7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +4 -0
- package/README.md +531 -0
- package/dist/index.min.js +118 -0
- package/dist/src/index.d.ts +539 -0
- package/dist/src/index.d.ts.map +1 -0
- package/dist/src/index.js +505 -0
- package/dist/src/index.js.map +1 -0
- package/dist/src/singleton.d.ts +3 -0
- package/dist/src/singleton.d.ts.map +1 -0
- package/dist/src/singleton.js +15 -0
- package/dist/src/singleton.js.map +1 -0
- package/dist/src/types.d.ts +2 -0
- package/dist/src/types.d.ts.map +1 -0
- package/dist/src/types.js +2 -0
- package/dist/src/types.js.map +1 -0
- package/dist/src/utils/dag-cbor-to-safe-json.d.ts +7 -0
- package/dist/src/utils/dag-cbor-to-safe-json.d.ts.map +1 -0
- package/dist/src/utils/dag-cbor-to-safe-json.js +37 -0
- package/dist/src/utils/dag-cbor-to-safe-json.js.map +1 -0
- package/dist/src/utils/get-content-disposition-filename.d.ts +6 -0
- package/dist/src/utils/get-content-disposition-filename.d.ts.map +1 -0
- package/dist/src/utils/get-content-disposition-filename.js +16 -0
- package/dist/src/utils/get-content-disposition-filename.js.map +1 -0
- package/dist/src/utils/get-e-tag.d.ts +28 -0
- package/dist/src/utils/get-e-tag.d.ts.map +1 -0
- package/dist/src/utils/get-e-tag.js +18 -0
- package/dist/src/utils/get-e-tag.js.map +1 -0
- package/dist/src/utils/get-stream-from-async-iterable.d.ts +10 -0
- package/dist/src/utils/get-stream-from-async-iterable.d.ts.map +1 -0
- package/dist/src/utils/get-stream-from-async-iterable.js +38 -0
- package/dist/src/utils/get-stream-from-async-iterable.js.map +1 -0
- package/dist/src/utils/get-tar-stream.d.ts +4 -0
- package/dist/src/utils/get-tar-stream.d.ts.map +1 -0
- package/dist/src/utils/get-tar-stream.js +46 -0
- package/dist/src/utils/get-tar-stream.js.map +1 -0
- package/dist/src/utils/parse-resource.d.ts +18 -0
- package/dist/src/utils/parse-resource.d.ts.map +1 -0
- package/dist/src/utils/parse-resource.js +24 -0
- package/dist/src/utils/parse-resource.js.map +1 -0
- package/dist/src/utils/parse-url-string.d.ts +32 -0
- package/dist/src/utils/parse-url-string.d.ts.map +1 -0
- package/dist/src/utils/parse-url-string.js +115 -0
- package/dist/src/utils/parse-url-string.js.map +1 -0
- package/dist/src/utils/responses.d.ts +5 -0
- package/dist/src/utils/responses.d.ts.map +1 -0
- package/dist/src/utils/responses.js +27 -0
- package/dist/src/utils/responses.js.map +1 -0
- package/dist/src/utils/select-output-type.d.ts +12 -0
- package/dist/src/utils/select-output-type.d.ts.map +1 -0
- package/dist/src/utils/select-output-type.js +148 -0
- package/dist/src/utils/select-output-type.js.map +1 -0
- package/dist/src/utils/tlru.d.ts +15 -0
- package/dist/src/utils/tlru.d.ts.map +1 -0
- package/dist/src/utils/tlru.js +40 -0
- package/dist/src/utils/tlru.js.map +1 -0
- package/dist/src/utils/walk-path.d.ts +13 -0
- package/dist/src/utils/walk-path.d.ts.map +1 -0
- package/dist/src/utils/walk-path.js +17 -0
- package/dist/src/utils/walk-path.js.map +1 -0
- package/dist/src/verified-fetch.d.ts +59 -0
- package/dist/src/verified-fetch.d.ts.map +1 -0
- package/dist/src/verified-fetch.js +408 -0
- package/dist/src/verified-fetch.js.map +1 -0
- package/package.json +197 -0
- package/src/index.ts +596 -0
- package/src/singleton.ts +20 -0
- package/src/types.ts +1 -0
- package/src/utils/dag-cbor-to-safe-json.ts +44 -0
- package/src/utils/get-content-disposition-filename.ts +18 -0
- package/src/utils/get-e-tag.ts +36 -0
- package/src/utils/get-stream-from-async-iterable.ts +45 -0
- package/src/utils/get-tar-stream.ts +68 -0
- package/src/utils/parse-resource.ts +40 -0
- package/src/utils/parse-url-string.ts +154 -0
- package/src/utils/responses.ts +29 -0
- package/src/utils/select-output-type.ts +167 -0
- package/src/utils/tlru.ts +52 -0
- package/src/utils/walk-path.ts +34 -0
- package/src/verified-fetch.ts +509 -0
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import type { RequestFormatShorthand } from '../types.js'
|
|
2
|
+
import type { CID } from 'multiformats/cid'
|
|
3
|
+
|
|
4
|
+
interface GetETagArg {
|
|
5
|
+
cid: CID
|
|
6
|
+
reqFormat?: RequestFormatShorthand
|
|
7
|
+
rangeStart?: number
|
|
8
|
+
rangeEnd?: number
|
|
9
|
+
/**
|
|
10
|
+
* Weak Etag is used when we can't guarantee byte-for-byte-determinism (generated, or mutable content).
|
|
11
|
+
* Some examples:
|
|
12
|
+
* - IPNS requests
|
|
13
|
+
* - CAR streamed with blocks in non-deterministic order
|
|
14
|
+
* - TAR streamed with files in non-deterministic order
|
|
15
|
+
*/
|
|
16
|
+
weak?: boolean
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* etag
|
|
21
|
+
* you need to wrap cid with ""
|
|
22
|
+
* we use strong Etags for immutable responses and weak one (prefixed with W/ ) for mutable/generated ones (ipns and generated HTML).
|
|
23
|
+
* block and car responses should have different etag than deserialized one, so you can add some prefix like we do in existing gateway
|
|
24
|
+
*
|
|
25
|
+
* @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag
|
|
26
|
+
* @see https://specs.ipfs.tech/http-gateways/path-gateway/#etag-response-header
|
|
27
|
+
*/
|
|
28
|
+
export function getETag ({ cid, reqFormat, weak, rangeStart, rangeEnd }: GetETagArg): string {
|
|
29
|
+
const prefix = weak === true ? 'W/' : ''
|
|
30
|
+
let suffix = reqFormat == null ? '' : `.${reqFormat}`
|
|
31
|
+
if (rangeStart != null || rangeEnd != null) {
|
|
32
|
+
suffix += `.${rangeStart ?? '0'}-${rangeEnd ?? 'N'}`
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
return `${prefix}"${cid.toString()}${suffix}"`
|
|
36
|
+
}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { CustomProgressEvent } from 'progress-events'
|
|
2
|
+
import type { VerifiedFetchInit } from '../index.js'
|
|
3
|
+
import type { ComponentLogger } from '@libp2p/interface'
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Converts an async iterator of Uint8Array bytes to a stream and returns the first chunk of bytes
|
|
7
|
+
*/
|
|
8
|
+
export async function getStreamFromAsyncIterable (iterator: AsyncIterable<Uint8Array>, path: string, logger: ComponentLogger, options?: Pick<VerifiedFetchInit, 'onProgress'>): Promise<{ stream: ReadableStream<Uint8Array>, firstChunk: Uint8Array }> {
|
|
9
|
+
const log = logger.forComponent('helia:verified-fetch:get-stream-from-async-iterable')
|
|
10
|
+
const reader = iterator[Symbol.asyncIterator]()
|
|
11
|
+
const { value: firstChunk, done } = await reader.next()
|
|
12
|
+
|
|
13
|
+
if (done === true) {
|
|
14
|
+
log.error('No content found for path', path)
|
|
15
|
+
throw new Error('No content found')
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
const stream = new ReadableStream({
|
|
19
|
+
async start (controller) {
|
|
20
|
+
// the initial value is already available
|
|
21
|
+
options?.onProgress?.(new CustomProgressEvent<void>('verified-fetch:request:progress:chunk'))
|
|
22
|
+
controller.enqueue(firstChunk)
|
|
23
|
+
},
|
|
24
|
+
async pull (controller) {
|
|
25
|
+
const { value, done } = await reader.next()
|
|
26
|
+
|
|
27
|
+
if (done === true) {
|
|
28
|
+
if (value != null) {
|
|
29
|
+
options?.onProgress?.(new CustomProgressEvent<void>('verified-fetch:request:progress:chunk'))
|
|
30
|
+
controller.enqueue(value)
|
|
31
|
+
}
|
|
32
|
+
controller.close()
|
|
33
|
+
return
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
options?.onProgress?.(new CustomProgressEvent<void>('verified-fetch:request:progress:chunk'))
|
|
37
|
+
controller.enqueue(value)
|
|
38
|
+
}
|
|
39
|
+
})
|
|
40
|
+
|
|
41
|
+
return {
|
|
42
|
+
stream,
|
|
43
|
+
firstChunk
|
|
44
|
+
}
|
|
45
|
+
}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import { CodeError } from '@libp2p/interface'
|
|
2
|
+
import { exporter, recursive, type UnixFSEntry } from 'ipfs-unixfs-exporter'
|
|
3
|
+
import map from 'it-map'
|
|
4
|
+
import { pipe } from 'it-pipe'
|
|
5
|
+
import { pack, type TarEntryHeader, type TarImportCandidate } from 'it-tar'
|
|
6
|
+
import type { AbortOptions } from '@libp2p/interface'
|
|
7
|
+
import type { Blockstore } from 'interface-blockstore'
|
|
8
|
+
|
|
9
|
+
const EXPORTABLE = ['file', 'raw', 'directory']
|
|
10
|
+
|
|
11
|
+
function toHeader (file: UnixFSEntry): Partial<TarEntryHeader> & { name: string } {
|
|
12
|
+
let mode: number | undefined
|
|
13
|
+
let mtime: Date | undefined
|
|
14
|
+
|
|
15
|
+
if (file.type === 'file' || file.type === 'directory') {
|
|
16
|
+
mode = file.unixfs.mode
|
|
17
|
+
mtime = file.unixfs.mtime != null ? new Date(Number(file.unixfs.mtime.secs * 1000n)) : undefined
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
return {
|
|
21
|
+
name: file.path,
|
|
22
|
+
mode,
|
|
23
|
+
mtime,
|
|
24
|
+
size: Number(file.size),
|
|
25
|
+
type: file.type === 'directory' ? 'directory' : 'file'
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function toTarImportCandidate (entry: UnixFSEntry): TarImportCandidate {
|
|
30
|
+
if (!EXPORTABLE.includes(entry.type)) {
|
|
31
|
+
throw new CodeError('Not a UnixFS node', 'ERR_NOT_UNIXFS')
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const candidate: TarImportCandidate = {
|
|
35
|
+
header: toHeader(entry)
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
if (entry.type === 'file' || entry.type === 'raw') {
|
|
39
|
+
candidate.body = entry.content()
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
return candidate
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export async function * tarStream (ipfsPath: string, blockstore: Blockstore, options?: AbortOptions): AsyncGenerator<Uint8Array> {
|
|
46
|
+
const file = await exporter(ipfsPath, blockstore, options)
|
|
47
|
+
|
|
48
|
+
if (file.type === 'file' || file.type === 'raw') {
|
|
49
|
+
yield * pipe(
|
|
50
|
+
[toTarImportCandidate(file)],
|
|
51
|
+
pack()
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
return
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
if (file.type === 'directory') {
|
|
58
|
+
yield * pipe(
|
|
59
|
+
recursive(ipfsPath, blockstore, options),
|
|
60
|
+
(source) => map(source, (entry) => toTarImportCandidate(entry)),
|
|
61
|
+
pack()
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
return
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
throw new CodeError('Not a UnixFS node', 'ERR_NOT_UNIXFS')
|
|
68
|
+
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { CID } from 'multiformats/cid'
|
|
2
|
+
import { parseUrlString } from './parse-url-string.js'
|
|
3
|
+
import type { ParsedUrlStringResults } from './parse-url-string.js'
|
|
4
|
+
import type { Resource } from '../index.js'
|
|
5
|
+
import type { IPNS, IPNSRoutingEvents, ResolveDnsLinkProgressEvents, ResolveProgressEvents } from '@helia/ipns'
|
|
6
|
+
import type { ComponentLogger } from '@libp2p/interface'
|
|
7
|
+
import type { ProgressOptions } from 'progress-events'
|
|
8
|
+
|
|
9
|
+
export interface ParseResourceComponents {
|
|
10
|
+
ipns: IPNS
|
|
11
|
+
logger: ComponentLogger
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export interface ParseResourceOptions extends ProgressOptions<ResolveProgressEvents | IPNSRoutingEvents | ResolveDnsLinkProgressEvents> {
|
|
15
|
+
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Handles the different use cases for the `resource` argument.
|
|
19
|
+
* The resource can represent an IPFS path, IPNS path, or CID.
|
|
20
|
+
* If the resource represents an IPNS path, we need to resolve it to a CID.
|
|
21
|
+
*/
|
|
22
|
+
export async function parseResource (resource: Resource, { ipns, logger }: ParseResourceComponents, options?: ParseResourceOptions): Promise<ParsedUrlStringResults> {
|
|
23
|
+
if (typeof resource === 'string') {
|
|
24
|
+
return parseUrlString({ urlString: resource, ipns, logger }, { onProgress: options?.onProgress })
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
const cid = CID.asCID(resource)
|
|
28
|
+
|
|
29
|
+
if (cid != null) {
|
|
30
|
+
// an actual CID
|
|
31
|
+
return {
|
|
32
|
+
cid,
|
|
33
|
+
protocol: 'ipfs',
|
|
34
|
+
path: '',
|
|
35
|
+
query: {}
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
throw new TypeError(`Invalid resource. Cannot determine CID from resource: ${resource}`)
|
|
40
|
+
}
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
import { peerIdFromString } from '@libp2p/peer-id'
|
|
2
|
+
import { CID } from 'multiformats/cid'
|
|
3
|
+
import { TLRU } from './tlru.js'
|
|
4
|
+
import type { RequestFormatShorthand } from '../types.js'
|
|
5
|
+
import type { IPNS, IPNSRoutingEvents, ResolveDnsLinkProgressEvents, ResolveProgressEvents, ResolveResult } from '@helia/ipns'
|
|
6
|
+
import type { ComponentLogger } from '@libp2p/interface'
|
|
7
|
+
import type { ProgressOptions } from 'progress-events'
|
|
8
|
+
|
|
9
|
+
const ipnsCache = new TLRU<ResolveResult>(1000)
|
|
10
|
+
|
|
11
|
+
export interface ParseUrlStringInput {
|
|
12
|
+
urlString: string
|
|
13
|
+
ipns: IPNS
|
|
14
|
+
logger: ComponentLogger
|
|
15
|
+
}
|
|
16
|
+
export interface ParseUrlStringOptions extends ProgressOptions<ResolveProgressEvents | IPNSRoutingEvents | ResolveDnsLinkProgressEvents> {
|
|
17
|
+
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export interface ParsedUrlQuery extends Record<string, string | unknown> {
|
|
21
|
+
format?: RequestFormatShorthand
|
|
22
|
+
download?: boolean
|
|
23
|
+
filename?: string
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export interface ParsedUrlStringResults {
|
|
27
|
+
protocol: string
|
|
28
|
+
path: string
|
|
29
|
+
cid: CID
|
|
30
|
+
query: ParsedUrlQuery
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const URL_REGEX = /^(?<protocol>ip[fn]s):\/\/(?<cidOrPeerIdOrDnsLink>[^/$?]+)\/?(?<path>[^$?]*)\??(?<queryString>.*)$/
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* A function that parses ipfs:// and ipns:// URLs, returning an object with easily recognizable properties.
|
|
37
|
+
*
|
|
38
|
+
* After determining the protocol successfully, we process the cidOrPeerIdOrDnsLink:
|
|
39
|
+
* * If it's ipfs, it parses the CID or throws an Aggregate error
|
|
40
|
+
* * If it's ipns, it attempts to resolve the PeerId and then the DNSLink. If both fail, an Aggregate error is thrown.
|
|
41
|
+
*/
|
|
42
|
+
export async function parseUrlString ({ urlString, ipns, logger }: ParseUrlStringInput, options?: ParseUrlStringOptions): Promise<ParsedUrlStringResults> {
|
|
43
|
+
const log = logger.forComponent('helia:verified-fetch:parse-url-string')
|
|
44
|
+
const match = urlString.match(URL_REGEX)
|
|
45
|
+
|
|
46
|
+
if (match == null || match.groups == null) {
|
|
47
|
+
throw new TypeError(`Invalid URL: ${urlString}, please use ipfs:// or ipns:// URLs only.`)
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
const { protocol, cidOrPeerIdOrDnsLink, path: urlPath, queryString } = match.groups
|
|
51
|
+
|
|
52
|
+
let cid: CID | undefined
|
|
53
|
+
let resolvedPath: string | undefined
|
|
54
|
+
const errors: Error[] = []
|
|
55
|
+
|
|
56
|
+
if (protocol === 'ipfs') {
|
|
57
|
+
try {
|
|
58
|
+
cid = CID.parse(cidOrPeerIdOrDnsLink)
|
|
59
|
+
} catch (err) {
|
|
60
|
+
log.error(err)
|
|
61
|
+
errors.push(new TypeError('Invalid CID for ipfs://<cid> URL'))
|
|
62
|
+
}
|
|
63
|
+
} else {
|
|
64
|
+
let resolveResult = ipnsCache.get(cidOrPeerIdOrDnsLink)
|
|
65
|
+
|
|
66
|
+
if (resolveResult != null) {
|
|
67
|
+
cid = resolveResult.cid
|
|
68
|
+
resolvedPath = resolveResult.path
|
|
69
|
+
log.trace('resolved %s to %c from cache', cidOrPeerIdOrDnsLink, cid)
|
|
70
|
+
} else {
|
|
71
|
+
// protocol is ipns
|
|
72
|
+
log.trace('Attempting to resolve PeerId for %s', cidOrPeerIdOrDnsLink)
|
|
73
|
+
let peerId = null
|
|
74
|
+
|
|
75
|
+
try {
|
|
76
|
+
peerId = peerIdFromString(cidOrPeerIdOrDnsLink)
|
|
77
|
+
resolveResult = await ipns.resolve(peerId, { onProgress: options?.onProgress })
|
|
78
|
+
cid = resolveResult?.cid
|
|
79
|
+
resolvedPath = resolveResult?.path
|
|
80
|
+
log.trace('resolved %s to %c', cidOrPeerIdOrDnsLink, cid)
|
|
81
|
+
ipnsCache.set(cidOrPeerIdOrDnsLink, resolveResult, 60 * 1000 * 2)
|
|
82
|
+
} catch (err) {
|
|
83
|
+
if (peerId == null) {
|
|
84
|
+
log.error('Could not parse PeerId string "%s"', cidOrPeerIdOrDnsLink, err)
|
|
85
|
+
errors.push(new TypeError(`Could not parse PeerId in ipns url "${cidOrPeerIdOrDnsLink}", ${(err as Error).message}`))
|
|
86
|
+
} else {
|
|
87
|
+
log.error('Could not resolve PeerId %c', peerId, err)
|
|
88
|
+
errors.push(new TypeError(`Could not resolve PeerId "${cidOrPeerIdOrDnsLink}", ${(err as Error).message}`))
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
if (cid == null) {
|
|
93
|
+
log.trace('Attempting to resolve DNSLink for %s', cidOrPeerIdOrDnsLink)
|
|
94
|
+
|
|
95
|
+
try {
|
|
96
|
+
resolveResult = await ipns.resolveDns(cidOrPeerIdOrDnsLink, { onProgress: options?.onProgress })
|
|
97
|
+
cid = resolveResult?.cid
|
|
98
|
+
resolvedPath = resolveResult?.path
|
|
99
|
+
log.trace('resolved %s to %c', cidOrPeerIdOrDnsLink, cid)
|
|
100
|
+
ipnsCache.set(cidOrPeerIdOrDnsLink, resolveResult, 60 * 1000 * 2)
|
|
101
|
+
} catch (err) {
|
|
102
|
+
log.error('Could not resolve DnsLink for "%s"', cidOrPeerIdOrDnsLink, err)
|
|
103
|
+
errors.push(err as Error)
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
if (cid == null) {
|
|
110
|
+
throw new AggregateError(errors, `Invalid resource. Cannot determine CID from URL "${urlString}"`)
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// parse query string
|
|
114
|
+
const query: Record<string, any> = {}
|
|
115
|
+
|
|
116
|
+
if (queryString != null && queryString.length > 0) {
|
|
117
|
+
const queryParts = queryString.split('&')
|
|
118
|
+
for (const part of queryParts) {
|
|
119
|
+
const [key, value] = part.split('=')
|
|
120
|
+
query[key] = decodeURIComponent(value)
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
if (query.download != null) {
|
|
124
|
+
query.download = query.download === 'true'
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
if (query.filename != null) {
|
|
128
|
+
query.filename = query.filename.toString()
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
/**
|
|
133
|
+
* join the path from resolve result & given path.
|
|
134
|
+
* e.g. /ipns/<peerId>/ that is resolved to /ipfs/<cid>/<path1>, when requested as /ipns/<peerId>/<path2>, should be
|
|
135
|
+
* resolved to /ipfs/<cid>/<path1>/<path2>
|
|
136
|
+
*/
|
|
137
|
+
const pathParts = []
|
|
138
|
+
|
|
139
|
+
if (urlPath.length > 0) {
|
|
140
|
+
pathParts.push(urlPath)
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
if (resolvedPath != null && resolvedPath.length > 0) {
|
|
144
|
+
pathParts.push(resolvedPath)
|
|
145
|
+
}
|
|
146
|
+
const path = pathParts.join('/')
|
|
147
|
+
|
|
148
|
+
return {
|
|
149
|
+
protocol,
|
|
150
|
+
cid,
|
|
151
|
+
path,
|
|
152
|
+
query
|
|
153
|
+
}
|
|
154
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
export function okResponse (body?: BodyInit | null): Response {
|
|
2
|
+
return new Response(body, {
|
|
3
|
+
status: 200,
|
|
4
|
+
statusText: 'OK'
|
|
5
|
+
})
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
export function notSupportedResponse (body?: BodyInit | null): Response {
|
|
9
|
+
const response = new Response(body, {
|
|
10
|
+
status: 501,
|
|
11
|
+
statusText: 'Not Implemented'
|
|
12
|
+
})
|
|
13
|
+
response.headers.set('X-Content-Type-Options', 'nosniff') // see https://specs.ipfs.tech/http-gateways/path-gateway/#x-content-type-options-response-header
|
|
14
|
+
return response
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export function notAcceptableResponse (body?: BodyInit | null): Response {
|
|
18
|
+
return new Response(body, {
|
|
19
|
+
status: 406,
|
|
20
|
+
statusText: 'Not Acceptable'
|
|
21
|
+
})
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export function badRequestResponse (body?: BodyInit | null): Response {
|
|
25
|
+
return new Response(body, {
|
|
26
|
+
status: 400,
|
|
27
|
+
statusText: 'Bad Request'
|
|
28
|
+
})
|
|
29
|
+
}
|
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
import { code as dagCborCode } from '@ipld/dag-cbor'
|
|
2
|
+
import { code as dagJsonCode } from '@ipld/dag-json'
|
|
3
|
+
import { code as dagPbCode } from '@ipld/dag-pb'
|
|
4
|
+
import { code as jsonCode } from 'multiformats/codecs/json'
|
|
5
|
+
import { code as rawCode } from 'multiformats/codecs/raw'
|
|
6
|
+
import type { RequestFormatShorthand } from '../types.js'
|
|
7
|
+
import type { CID } from 'multiformats/cid'
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* This maps supported response types for each codec supported by verified-fetch
|
|
11
|
+
*/
|
|
12
|
+
const CID_TYPE_MAP: Record<number, string[]> = {
|
|
13
|
+
[dagCborCode]: [
|
|
14
|
+
'application/json',
|
|
15
|
+
'application/vnd.ipld.dag-cbor',
|
|
16
|
+
'application/cbor',
|
|
17
|
+
'application/vnd.ipld.dag-json',
|
|
18
|
+
'application/octet-stream',
|
|
19
|
+
'application/vnd.ipld.raw',
|
|
20
|
+
'application/vnd.ipfs.ipns-record',
|
|
21
|
+
'application/vnd.ipld.car'
|
|
22
|
+
],
|
|
23
|
+
[dagJsonCode]: [
|
|
24
|
+
'application/json',
|
|
25
|
+
'application/vnd.ipld.dag-cbor',
|
|
26
|
+
'application/cbor',
|
|
27
|
+
'application/vnd.ipld.dag-json',
|
|
28
|
+
'application/octet-stream',
|
|
29
|
+
'application/vnd.ipld.raw',
|
|
30
|
+
'application/vnd.ipfs.ipns-record',
|
|
31
|
+
'application/vnd.ipld.car'
|
|
32
|
+
],
|
|
33
|
+
[jsonCode]: [
|
|
34
|
+
'application/json',
|
|
35
|
+
'application/vnd.ipld.dag-cbor',
|
|
36
|
+
'application/cbor',
|
|
37
|
+
'application/vnd.ipld.dag-json',
|
|
38
|
+
'application/octet-stream',
|
|
39
|
+
'application/vnd.ipld.raw',
|
|
40
|
+
'application/vnd.ipfs.ipns-record',
|
|
41
|
+
'application/vnd.ipld.car'
|
|
42
|
+
],
|
|
43
|
+
[dagPbCode]: [
|
|
44
|
+
'application/octet-stream',
|
|
45
|
+
'application/json',
|
|
46
|
+
'application/vnd.ipld.dag-cbor',
|
|
47
|
+
'application/cbor',
|
|
48
|
+
'application/vnd.ipld.dag-json',
|
|
49
|
+
'application/vnd.ipld.raw',
|
|
50
|
+
'application/vnd.ipfs.ipns-record',
|
|
51
|
+
'application/vnd.ipld.car',
|
|
52
|
+
'application/x-tar'
|
|
53
|
+
],
|
|
54
|
+
[rawCode]: [
|
|
55
|
+
'application/octet-stream',
|
|
56
|
+
'application/vnd.ipld.raw',
|
|
57
|
+
'application/vnd.ipfs.ipns-record',
|
|
58
|
+
'application/vnd.ipld.car',
|
|
59
|
+
'application/x-tar'
|
|
60
|
+
]
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Selects an output mime-type based on the CID and a passed `Accept` header
|
|
65
|
+
*/
|
|
66
|
+
export function selectOutputType (cid: CID, accept?: string): string | undefined {
|
|
67
|
+
const cidMimeTypes = CID_TYPE_MAP[cid.code]
|
|
68
|
+
|
|
69
|
+
if (accept != null) {
|
|
70
|
+
return chooseMimeType(accept, cidMimeTypes)
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
function chooseMimeType (accept: string, validMimeTypes: string[]): string | undefined {
|
|
75
|
+
const requestedMimeTypes = accept
|
|
76
|
+
.split(',')
|
|
77
|
+
.map(s => {
|
|
78
|
+
const parts = s.trim().split(';')
|
|
79
|
+
|
|
80
|
+
return {
|
|
81
|
+
mimeType: `${parts[0]}`.trim(),
|
|
82
|
+
weight: parseQFactor(parts[1])
|
|
83
|
+
}
|
|
84
|
+
})
|
|
85
|
+
.sort((a, b) => {
|
|
86
|
+
if (a.weight === b.weight) {
|
|
87
|
+
return 0
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
if (a.weight > b.weight) {
|
|
91
|
+
return -1
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
return 1
|
|
95
|
+
})
|
|
96
|
+
.map(s => s.mimeType)
|
|
97
|
+
|
|
98
|
+
for (const headerFormat of requestedMimeTypes) {
|
|
99
|
+
for (const mimeType of validMimeTypes) {
|
|
100
|
+
if (headerFormat.includes(mimeType)) {
|
|
101
|
+
return mimeType
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
if (headerFormat === '*/*') {
|
|
105
|
+
return mimeType
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
if (headerFormat.startsWith('*/') && mimeType.split('/')[1] === headerFormat.split('/')[1]) {
|
|
109
|
+
return mimeType
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
if (headerFormat.endsWith('/*') && mimeType.split('/')[0] === headerFormat.split('/')[0]) {
|
|
113
|
+
return mimeType
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Parses q-factor weighting from the accept header to allow letting some mime
|
|
121
|
+
* types take precedence over others.
|
|
122
|
+
*
|
|
123
|
+
* If the q-factor for an acceptable mime representation is omitted it defaults
|
|
124
|
+
* to `1`.
|
|
125
|
+
*
|
|
126
|
+
* All specified values should be in the range 0-1.
|
|
127
|
+
*
|
|
128
|
+
* @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept#q
|
|
129
|
+
*/
|
|
130
|
+
function parseQFactor (str?: string): number {
|
|
131
|
+
if (str != null) {
|
|
132
|
+
str = str.trim()
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
if (str == null || !str.startsWith('q=')) {
|
|
136
|
+
return 1
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
const factor = parseFloat(str.replace('q=', ''))
|
|
140
|
+
|
|
141
|
+
if (isNaN(factor)) {
|
|
142
|
+
return 0
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
return factor
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
const FORMAT_TO_MIME_TYPE: Record<RequestFormatShorthand, string> = {
|
|
149
|
+
raw: 'application/vnd.ipld.raw',
|
|
150
|
+
car: 'application/vnd.ipld.car',
|
|
151
|
+
'dag-json': 'application/vnd.ipld.dag-json',
|
|
152
|
+
'dag-cbor': 'application/vnd.ipld.dag-cbor',
|
|
153
|
+
json: 'application/json',
|
|
154
|
+
cbor: 'application/cbor',
|
|
155
|
+
'ipns-record': 'application/vnd.ipfs.ipns-record',
|
|
156
|
+
tar: 'application/x-tar'
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
/**
|
|
160
|
+
* Converts a `format=...` query param to a mime type as would be found in the
|
|
161
|
+
* `Accept` header, if a valid mapping is available
|
|
162
|
+
*/
|
|
163
|
+
export function queryFormatToAcceptHeader (format?: RequestFormatShorthand): string | undefined {
|
|
164
|
+
if (format != null) {
|
|
165
|
+
return FORMAT_TO_MIME_TYPE[format]
|
|
166
|
+
}
|
|
167
|
+
}
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import hashlru from 'hashlru'
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Time Aware Least Recent Used Cache
|
|
5
|
+
*
|
|
6
|
+
* @see https://arxiv.org/pdf/1801.00390
|
|
7
|
+
*/
|
|
8
|
+
export class TLRU<T> {
|
|
9
|
+
private readonly lru: ReturnType<typeof hashlru>
|
|
10
|
+
|
|
11
|
+
constructor (maxSize: number) {
|
|
12
|
+
this.lru = hashlru(maxSize)
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
get (key: string): T | undefined {
|
|
16
|
+
const value = this.lru.get(key)
|
|
17
|
+
|
|
18
|
+
if (value != null) {
|
|
19
|
+
if (value.expire != null && value.expire < Date.now()) {
|
|
20
|
+
this.lru.remove(key)
|
|
21
|
+
|
|
22
|
+
return undefined
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
return value.value
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
return undefined
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
set (key: string, value: T, ttl: number): void {
|
|
32
|
+
this.lru.set(key, { value, expire: Date.now() + ttl })
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
has (key: string): boolean {
|
|
36
|
+
const value = this.get(key)
|
|
37
|
+
|
|
38
|
+
if (value != null) {
|
|
39
|
+
return true
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
return false
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
remove (key: string): void {
|
|
46
|
+
this.lru.remove(key)
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
clear (): void {
|
|
50
|
+
this.lru.clear()
|
|
51
|
+
}
|
|
52
|
+
}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { walkPath as exporterWalk, type ExporterOptions, type ReadableStorage, type UnixFSEntry } from 'ipfs-unixfs-exporter'
|
|
2
|
+
import type { CID } from 'multiformats/cid'
|
|
3
|
+
|
|
4
|
+
export interface PathWalkerOptions extends ExporterOptions {
|
|
5
|
+
|
|
6
|
+
}
|
|
7
|
+
export interface PathWalkerResponse {
|
|
8
|
+
ipfsRoots: CID[]
|
|
9
|
+
terminalElement: UnixFSEntry
|
|
10
|
+
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export interface PathWalkerFn {
|
|
14
|
+
(blockstore: ReadableStorage, path: string, options?: PathWalkerOptions): Promise<PathWalkerResponse>
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export async function walkPath (blockstore: ReadableStorage, path: string, options?: PathWalkerOptions): Promise<PathWalkerResponse> {
|
|
18
|
+
const ipfsRoots: CID[] = []
|
|
19
|
+
let terminalElement: UnixFSEntry | undefined
|
|
20
|
+
|
|
21
|
+
for await (const entry of exporterWalk(path, blockstore, options)) {
|
|
22
|
+
ipfsRoots.push(entry.cid)
|
|
23
|
+
terminalElement = entry
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
if (terminalElement == null) {
|
|
27
|
+
throw new Error('No terminal element found')
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
return {
|
|
31
|
+
ipfsRoots,
|
|
32
|
+
terminalElement
|
|
33
|
+
}
|
|
34
|
+
}
|