hypercore-fetch 8.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +226 -0
- package/bin.js +27 -0
- package/index.js +722 -0
- package/package.json +43 -0
- package/test.js +261 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2019 RangerMauve
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
# hypercore-fetch
|
|
2
|
+
|
|
3
|
+
Implementation of Fetch that uses the Hyper SDK for loading p2p content
|
|
4
|
+
|
|
5
|
+
`npm install --save hypercore-fetch`
|
|
6
|
+
|
|
7
|
+
```javascript
|
|
8
|
+
const fetch = require('hypercore-fetch')()
|
|
9
|
+
|
|
10
|
+
const someURL = `hyper://blog.mauve.moe`
|
|
11
|
+
|
|
12
|
+
const response = await fetch(`${someURL}/index.json`)
|
|
13
|
+
|
|
14
|
+
const json = await response.json()
|
|
15
|
+
|
|
16
|
+
console.log(json)
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
You can also use the bundled CLI
|
|
20
|
+
|
|
21
|
+
```
|
|
22
|
+
npm i -g hypercore-fetch
|
|
23
|
+
|
|
24
|
+
hypercore-fetch hyper://somethingorother
|
|
25
|
+
|
|
26
|
+
# Or
|
|
27
|
+
|
|
28
|
+
npx hypercore-fetch hyper://somethingorother
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
## API
|
|
32
|
+
|
|
33
|
+
### `makeFetch({Hyperdrive, resolveURL, base, session, writable}) => fetch()`
|
|
34
|
+
|
|
35
|
+
Creates a hypercore-fetch instance.
|
|
36
|
+
|
|
37
|
+
The `base` parameter can be used to specify what the base URL is for relative paths like `fetch('./dat.json')`.
|
|
38
|
+
|
|
39
|
+
You can pass in options for the [Dat SDK](https://github.com/datproject/sdk) to have it be auto-created,
|
|
40
|
+
or you can pass in both a function matching `const archive = Hyperdrive(key)` and a `const resolved = await resolveName(url)` function (where `resolved` is an instance of URL, uses hyper-dns by default).
|
|
41
|
+
|
|
42
|
+
Set `session` to your Electron session if you want to enable setting the `body` of fetch requests to Electron's [UploadData](https://www.electronjs.org/docs/api/structures/upload-data) API in their protocol handlers.
|
|
43
|
+
|
|
44
|
+
If you don't want to allow write access to archives, pass in `writable: false`.
|
|
45
|
+
|
|
46
|
+
Typically, you don't need to pass in any of these and they're there for more advanced users.
|
|
47
|
+
|
|
48
|
+
After you've created it, `fetch` will be have like it does in [browsers](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API).
|
|
49
|
+
|
|
50
|
+
### `await fetch.close()`
|
|
51
|
+
|
|
52
|
+
Closes resources for the Dat SDK. This does nothing if you specified the Hyperdrive and `resolveName` options.
|
|
53
|
+
|
|
54
|
+
### Common Headers
|
|
55
|
+
|
|
56
|
+
Each response will contain a header for the canonical URL represented as a `Link` header with `rel=canonical`.
|
|
57
|
+
|
|
58
|
+
Each response will also contain the `Allow` header of all the methods currently allowed. If the archive is writable, this will contain `PUT`.
|
|
59
|
+
|
|
60
|
+
There is also an `ETag` header which will be a JSON string containging the drive's current `version`. This will change only when the drive has gotten an update of some sort and is monotonically incrementing.
|
|
61
|
+
|
|
62
|
+
### `fetch('hyper://NAME/example.txt', {method: 'GET'})`
|
|
63
|
+
|
|
64
|
+
This will attempt to load `example.txt` from the archive labeled by `NAME`.
|
|
65
|
+
|
|
66
|
+
It will also load `index.html` files automatically for a folder.
|
|
67
|
+
You can find the details about how resolution works in the [resolve-dat-path](https://github.com/RangerMauve/resolve-dat-path/blob/master/index.js#L3) module.
|
|
68
|
+
|
|
69
|
+
`NAME` can either be the 64 character hex key for an archive, a domain to parse with [dat-dns](https://www.npmjs.com/package/dat-dns), or a name for an archive which allows you to write to it.
|
|
70
|
+
|
|
71
|
+
The response headers will contain `X-Blocks` for the number of blocks of data this file represents on disk, and `X-Blocks-Downloaded` which is the number of blocks from this file that have been downloaded locally.
|
|
72
|
+
|
|
73
|
+
### `fetch('hyper://NAME/.well-known/dat', {method: 'GET'})`
|
|
74
|
+
|
|
75
|
+
This is used by the dat-dns module for resoving dns domains to `dat://` URLs.
|
|
76
|
+
|
|
77
|
+
This will return some text which will have a `dat://` URL of your archive, followed by a newline and a TTL for the DNS record.
|
|
78
|
+
|
|
79
|
+
### `fetch('hyper://NAME/example/', {method: 'GET'})`
|
|
80
|
+
|
|
81
|
+
When doing a `GET` on a directory, you will get a directory listing.
|
|
82
|
+
|
|
83
|
+
By default it will return a JSON array of files and folders in that directory.
|
|
84
|
+
|
|
85
|
+
You can differentiate a folder from files by the fact that it ends with a `/`.
|
|
86
|
+
|
|
87
|
+
You can set the `Accept` header to `text/html` in order to have it return a basic HTML page with links to files and folders in that directory.
|
|
88
|
+
|
|
89
|
+
e.g.
|
|
90
|
+
|
|
91
|
+
```json
|
|
92
|
+
["example.txt", "posts/", "example2.md"]
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
Files in the directory will be listed under their name, sub-directories will have a `/` appended to them.
|
|
96
|
+
|
|
97
|
+
`NAME` can either be the 64 character hex key for an archive, a domain to parse with [dat-dns](https://www.npmjs.com/package/dat-dns), or a name for an archive which allows you to write to it.
|
|
98
|
+
|
|
99
|
+
### `fetch('hyper://NAME/example/?noResolve', {method: 'GET'})`
|
|
100
|
+
|
|
101
|
+
Adding `?noResolve` to a URL will prevent resolving `index.html` files and will attempt to load the path as is.
|
|
102
|
+
This can be useful for list files in a directory that would normally render as a page.
|
|
103
|
+
|
|
104
|
+
`NAME` can either be the 64 character hex key for an archive, a domain to parse with [dat-dns](https://www.npmjs.com/package/dat-dns), or a name for an archive which allows you to write to it.
|
|
105
|
+
|
|
106
|
+
The response headers will contain `X-Blocks` for the number of blocks of data this file represents on disk, and `X-Blocks-Downloaded` which is the number of blocks from this file that have been downloaded locally.
|
|
107
|
+
|
|
108
|
+
### `fetch('hyper://NAME/', {headers: {'Accept': 'text/event-stream'}})`
|
|
109
|
+
|
|
110
|
+
Using the `text/event-stream` content type in the `Accept` header will get back an event stream full of `change` events for every time a file at that path changes.
|
|
111
|
+
|
|
112
|
+
This can be useful if you want to trigger a download every time a file changes.
|
|
113
|
+
The `data` for the event will contain the version at the time of the change.
|
|
114
|
+
|
|
115
|
+
This stream of data can be used with the `EventSource` in browsers.
|
|
116
|
+
|
|
117
|
+
Currently there's no way to watch for changes to specific files, so that should be handled at the application level.
|
|
118
|
+
|
|
119
|
+
You can also watch for the `download` and `upload` events which will be emitted whenever you download or upload blocks from the hyperdrive.
|
|
120
|
+
|
|
121
|
+
The `data` for the event will contain a JSON encoded object with the `index` of the block, and the `source` which is the public key of the hypercore (either the metadata of the hyperdrive, or the content feed).
|
|
122
|
+
|
|
123
|
+
### `fetch('hyper://NAME/example.txt', {method: 'PUT', body: 'Hello World'})`
|
|
124
|
+
|
|
125
|
+
You can add files to archives using a `PUT` method along with a `body`.
|
|
126
|
+
|
|
127
|
+
The `body` can be either a `String`, an `ArrayBuffer`, a `Blob`, a WHATWG `ReadableStream`, a Node.js `Stream`, or electron's [UploadData](https://www.electronjs.org/docs/api/structures/upload-data) object (make sure to specify the `session` argument in the `makeFetch` function for electron support).
|
|
128
|
+
|
|
129
|
+
`NAME` can either be the 64 character hex key for an archive, a domain to parse with [dat-dns](https://www.npmjs.com/package/dat-dns), or a name for an archive which allows you to write to it.
|
|
130
|
+
|
|
131
|
+
Your `NAME` will likely be a `name` in most cases to ensure you have a writeable archive.
|
|
132
|
+
|
|
133
|
+
### `fetch('hyper://NAME/example.txt', {method: 'DELETE'})`
|
|
134
|
+
|
|
135
|
+
You can delete a file in an archive by using the `DELETE` method.
|
|
136
|
+
|
|
137
|
+
You cannot delete directories if they are not empty.
|
|
138
|
+
|
|
139
|
+
`NAME` can either be the 64 character hex key for an archive, a domain to parse with [dat-dns](https://www.npmjs.com/package/dat-dns), or a name for an archive which allows you to write to it.
|
|
140
|
+
|
|
141
|
+
### `fetch('hyper://NAME/example.txt', {method: 'GET', headers: {'x-download': 'cache'}})`
|
|
142
|
+
|
|
143
|
+
You can download a file or an entire folder to the local cache using the `x-download` header set to `cache` in a `GET` request.
|
|
144
|
+
|
|
145
|
+
`NAME` can either be the 64 character hex key for an archive, a domain to parse with [dat-dns](https://www.npmjs.com/package/dat-dns), or a name for an archive which allows you to write to it.
|
|
146
|
+
|
|
147
|
+
You can use `/` for the path to download the entire contents
|
|
148
|
+
|
|
149
|
+
### `fetch('hyper://NAME/example.txt', {method: 'DELETE', headers: {'x-clear': 'cache'}})`
|
|
150
|
+
|
|
151
|
+
You can clear the data stored in the local cache for a file or folder using the `x-clear` header set to `cache` in a `DELETE` request..
|
|
152
|
+
|
|
153
|
+
This is like the opposite of using `x-download` to download data.
|
|
154
|
+
|
|
155
|
+
This does not delete data, it only deletes the cached data from disk.
|
|
156
|
+
|
|
157
|
+
`NAME` can either be the 64 character hex key for an archive, a domain to parse with [dat-dns](https://www.npmjs.com/package/dat-dns), or a name for an archive which allows you to write to it.
|
|
158
|
+
|
|
159
|
+
You can use `/` for the path to clear all data for the archive.
|
|
160
|
+
|
|
161
|
+
### `fetch('hyper://NAME/$/tags/TAG_NAME', {method: 'PUT'})`
|
|
162
|
+
|
|
163
|
+
You can add a tag a version of the archive with a human readable name (like SPAGHETTI), in the example represented as `tagName` by doing a PUT into the special `/$/tags/` folder.
|
|
164
|
+
|
|
165
|
+
Afterwards you can load the archive at that given version with `hyper://NAME+TAG_NAME`.
|
|
166
|
+
|
|
167
|
+
E.g.
|
|
168
|
+
|
|
169
|
+
`PUT hyper://123kjh213kjh123/$/tags/v4.20`
|
|
170
|
+
`GET hyper://123kjh213kjh123+v4.20/example.txt`
|
|
171
|
+
|
|
172
|
+
### `fetch('hyper://NAME/$/tags/', {method: 'GET'})`
|
|
173
|
+
|
|
174
|
+
You can get a list of all tags by doing a `GET` on the `/$/tags/` folder.
|
|
175
|
+
|
|
176
|
+
The response will be a JSON object which maps tag names to archive versions.
|
|
177
|
+
|
|
178
|
+
Use `await response.json()` to get the data out.
|
|
179
|
+
|
|
180
|
+
e.g.
|
|
181
|
+
|
|
182
|
+
```json
|
|
183
|
+
{
|
|
184
|
+
"tagOne": 1,
|
|
185
|
+
"example": 100000
|
|
186
|
+
}
|
|
187
|
+
```
|
|
188
|
+
|
|
189
|
+
### `fetch('hyper://NAME/$/tags/TAG_NAME', {method: 'DELETE'})`
|
|
190
|
+
|
|
191
|
+
You can delete a given tag with the `DELETE` method on a name within the special `$/tags/` folder.
|
|
192
|
+
|
|
193
|
+
Specify the tag you want in the URL, and it'll be removed from the tags list.
|
|
194
|
+
|
|
195
|
+
### `fetch('hyper://NAME/$/extensions/')`
|
|
196
|
+
|
|
197
|
+
You can list the current [hypercore extensions](https://github.com/hypercore-protocol/hypercore#ext--feedregisterextensionname-handlers) that are enabled by doing a `GET` on the `/$/extensions/` directory.
|
|
198
|
+
|
|
199
|
+
This will give you a directory listing with the names of all the extensions.
|
|
200
|
+
|
|
201
|
+
### `fetch('hyper://NAME/$/extensions/EXTENSION_NAME')`
|
|
202
|
+
|
|
203
|
+
You can list the peers that you are replication with which have registered this extension by doing a `GET` to the directory for the extension.
|
|
204
|
+
|
|
205
|
+
This is also how you can register an extension that hasn't been registered yet.
|
|
206
|
+
|
|
207
|
+
The list will be a JSON array with objects that contain the fields `remotePublicKey`, `remoteAddress`, `remoteType`, and `stats`
|
|
208
|
+
|
|
209
|
+
### `fetch('hyper://NAME/$/extensions/', {headers: {'Accept': 'text/event-stream'}})`
|
|
210
|
+
|
|
211
|
+
Using the `text/event-stream` content type in the `Accept` header will get back an event stream with the extension events.
|
|
212
|
+
|
|
213
|
+
The `event` will be the name of the extension you got the data for, the `id` (accessible by `e.lastEventId` in EventSource) will be set to the ID of the peer that sent it.
|
|
214
|
+
|
|
215
|
+
### `fetch('hyper://NAME/$/extensions/EXTENSION_NAME', {method: 'POST', body: 'Example'})`
|
|
216
|
+
|
|
217
|
+
You can broadcast an extension message to all peers that are replicating that extension type with a `POST` to the extension's URL.
|
|
218
|
+
|
|
219
|
+
The `body` of the request will be used as the payload. Please note that only utf8 encoded text is currently supported due to limitations of the event-stream encoding.
|
|
220
|
+
|
|
221
|
+
### `fetch('hyper://NAME/$/extensions/EXTENSION_NAME/REMOTE_PUBLIC_KEY', {method: 'POST', body: 'Example'})`
|
|
222
|
+
|
|
223
|
+
You can send an extension message to a specific peer by doing a `POST` to the extension with their remote public key ID.
|
|
224
|
+
|
|
225
|
+
The `body` of the request will be used as the payload. Please note that only utf8 encoded text is currently supported due to limitations of the event-stream encoding.
|
|
226
|
+
|
package/bin.js
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const eosp = require('end-of-stream-promise')
|
|
4
|
+
const { Readable } = require('streamx')
|
|
5
|
+
|
|
6
|
+
run()
|
|
7
|
+
.catch((e) => process.nextTick(() => {
|
|
8
|
+
throw e
|
|
9
|
+
}))
|
|
10
|
+
|
|
11
|
+
async function run () {
|
|
12
|
+
const fetch = require('./')()
|
|
13
|
+
|
|
14
|
+
try {
|
|
15
|
+
const url = process.argv[2]
|
|
16
|
+
|
|
17
|
+
const response = await fetch(url)
|
|
18
|
+
|
|
19
|
+
const stream = Readable.from(response.body)
|
|
20
|
+
|
|
21
|
+
stream.pipe(process.stdout)
|
|
22
|
+
|
|
23
|
+
await eosp(stream)
|
|
24
|
+
} finally {
|
|
25
|
+
fetch.close()
|
|
26
|
+
}
|
|
27
|
+
}
|
package/index.js
ADDED
|
@@ -0,0 +1,722 @@
|
|
|
1
|
+
const resolveDatPath = require('resolve-dat-path')
|
|
2
|
+
const Headers = require('fetch-headers')
|
|
3
|
+
const mime = require('mime/lite')
|
|
4
|
+
const SDK = require('hyper-sdk')
|
|
5
|
+
const parseRange = require('range-parser')
|
|
6
|
+
const makeDir = require('make-dir')
|
|
7
|
+
const { Readable } = require('streamx')
|
|
8
|
+
const makeFetch = require('make-fetch')
|
|
9
|
+
const { EventIterator } = require('event-iterator')
|
|
10
|
+
|
|
11
|
+
const DEFAULT_TIMEOUT = 5000
|
|
12
|
+
|
|
13
|
+
const NUMBER_REGEX = /^\d+$/
|
|
14
|
+
const PROTOCOL_REGEX = /^\w+:\/\//
|
|
15
|
+
const NOT_WRITABLE_ERROR = 'Archive not writable'
|
|
16
|
+
|
|
17
|
+
const READABLE_ALLOW = ['GET', 'HEAD']
|
|
18
|
+
const WRITABLE_ALLOW = ['PUT', 'POST', 'DELETE']
|
|
19
|
+
const ALL_ALLOW = READABLE_ALLOW.concat(WRITABLE_ALLOW)
|
|
20
|
+
|
|
21
|
+
const SPECIAL_FOLDER = '/$/'
|
|
22
|
+
const TAGS_FOLDER_NAME = 'tags/'
|
|
23
|
+
const TAGS_FOLDER = SPECIAL_FOLDER + TAGS_FOLDER_NAME
|
|
24
|
+
const EXTENSIONS_FOLDER_NAME = 'extensions/'
|
|
25
|
+
const EXTENSIONS_FOLDER = SPECIAL_FOLDER + EXTENSIONS_FOLDER_NAME
|
|
26
|
+
const EXTENSION_EVENT = 'extension-message'
|
|
27
|
+
|
|
28
|
+
// TODO: Add caching support
|
|
29
|
+
const { resolveURL: DEFAULT_RESOLVE_URL } = require('hyper-dns')
|
|
30
|
+
|
|
31
|
+
module.exports = function makeHyperFetch (opts = {}) {
|
|
32
|
+
let {
|
|
33
|
+
Hyperdrive,
|
|
34
|
+
resolveURL = DEFAULT_RESOLVE_URL,
|
|
35
|
+
base,
|
|
36
|
+
timeout = DEFAULT_TIMEOUT,
|
|
37
|
+
writable = false
|
|
38
|
+
} = opts
|
|
39
|
+
|
|
40
|
+
let sdk = null
|
|
41
|
+
let gettingSDK = null
|
|
42
|
+
let onClose = async () => undefined
|
|
43
|
+
|
|
44
|
+
const isSourceDat = base && base.startsWith('hyper://')
|
|
45
|
+
|
|
46
|
+
const fetch = makeFetch(hyperFetch)
|
|
47
|
+
|
|
48
|
+
fetch.close = () => onClose()
|
|
49
|
+
|
|
50
|
+
function getExtension (archive, name) {
|
|
51
|
+
const existing = archive.metadata.extensions.get(name)
|
|
52
|
+
if (existing) return existing
|
|
53
|
+
|
|
54
|
+
const extension = archive.registerExtension(name, {
|
|
55
|
+
encoding: 'utf8',
|
|
56
|
+
onmessage: (content, peer) => {
|
|
57
|
+
archive.emit(EXTENSION_EVENT, name, content, peer)
|
|
58
|
+
}
|
|
59
|
+
})
|
|
60
|
+
|
|
61
|
+
return extension
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
function getExtensionPeers (archive, name) {
|
|
65
|
+
// List peers with this extension
|
|
66
|
+
const allPeers = archive.peers
|
|
67
|
+
return allPeers.filter((peer) => {
|
|
68
|
+
const { remoteExtensions } = peer
|
|
69
|
+
|
|
70
|
+
if (!remoteExtensions) return false
|
|
71
|
+
|
|
72
|
+
const { names } = remoteExtensions
|
|
73
|
+
|
|
74
|
+
if (!names) return false
|
|
75
|
+
|
|
76
|
+
return names.includes(name)
|
|
77
|
+
})
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
function listExtensionNames (archive) {
|
|
81
|
+
return archive.metadata.extensions.names()
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
async function loadArchive (key) {
|
|
85
|
+
const Hyperdrive = await getHyperdrive()
|
|
86
|
+
return Hyperdrive(key)
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
return fetch
|
|
90
|
+
|
|
91
|
+
async function hyperFetch ({ url, headers: rawHeaders, method, signal, body }) {
|
|
92
|
+
const isHyperURL = url.startsWith('hyper://')
|
|
93
|
+
const urlHasProtocol = url.match(PROTOCOL_REGEX)
|
|
94
|
+
|
|
95
|
+
const shouldIntercept = isHyperURL || (!urlHasProtocol && isSourceDat)
|
|
96
|
+
|
|
97
|
+
if (!shouldIntercept) throw new Error('Invalid protocol, must be hyper://')
|
|
98
|
+
|
|
99
|
+
const headers = new Headers(rawHeaders || {})
|
|
100
|
+
|
|
101
|
+
const responseHeaders = {}
|
|
102
|
+
responseHeaders['Access-Control-Allow-Origin'] = '*'
|
|
103
|
+
responseHeaders['Allow-CSP-From'] = '*'
|
|
104
|
+
responseHeaders['Access-Control-Allow-Headers'] = '*'
|
|
105
|
+
|
|
106
|
+
try {
|
|
107
|
+
let { pathname: path, key, version, searchParams } = parseDatURL(url)
|
|
108
|
+
if (!path) path = '/'
|
|
109
|
+
if (!path.startsWith('/')) path = '/' + path
|
|
110
|
+
|
|
111
|
+
try {
|
|
112
|
+
const resolvedURL = await resolveURL(`hyper://${key}`)
|
|
113
|
+
key = resolvedURL.hostname
|
|
114
|
+
} catch (e) {
|
|
115
|
+
// Probably a domain that couldn't resolve
|
|
116
|
+
if (key.includes('.')) throw e
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
let archive = await loadArchive(key)
|
|
120
|
+
|
|
121
|
+
if (!archive) {
|
|
122
|
+
return {
|
|
123
|
+
statusCode: 404,
|
|
124
|
+
headers: responseHeaders,
|
|
125
|
+
data: intoAsyncIterable('Unknown drive')
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
await archive.ready()
|
|
130
|
+
|
|
131
|
+
if (!archive.version) {
|
|
132
|
+
if (!archive.peers.length) {
|
|
133
|
+
await new Promise((resolve, reject) => {
|
|
134
|
+
setTimeout(() => reject(new Error('Timed out looking for peers')), timeout)
|
|
135
|
+
archive.once('peer-open', resolve)
|
|
136
|
+
})
|
|
137
|
+
}
|
|
138
|
+
await new Promise((resolve, reject) => {
|
|
139
|
+
archive.metadata.update({ ifAvailable: true }, (err) => {
|
|
140
|
+
if (err) reject(err)
|
|
141
|
+
else resolve()
|
|
142
|
+
})
|
|
143
|
+
})
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (version) {
|
|
147
|
+
if (NUMBER_REGEX.test(version)) {
|
|
148
|
+
archive = await archive.checkout(version)
|
|
149
|
+
} else {
|
|
150
|
+
archive = await archive.checkout(await archive.getTaggedVersion(version))
|
|
151
|
+
}
|
|
152
|
+
await archive.ready()
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
const canonical = `hyper://${archive.key.toString('hex')}${path || ''}`
|
|
156
|
+
responseHeaders.Link = `<${canonical}>; rel="canonical"`
|
|
157
|
+
|
|
158
|
+
const isWritable = writable && archive.writable
|
|
159
|
+
const allowHeaders = isWritable ? ALL_ALLOW : READABLE_ALLOW
|
|
160
|
+
responseHeaders.Allow = allowHeaders.join(', ')
|
|
161
|
+
|
|
162
|
+
// We can say the file hasn't changed if the drive version hasn't changed
|
|
163
|
+
responseHeaders.ETag = `"${archive.version}"`
|
|
164
|
+
|
|
165
|
+
if (path.startsWith(SPECIAL_FOLDER)) {
|
|
166
|
+
if (path === SPECIAL_FOLDER) {
|
|
167
|
+
const files = [
|
|
168
|
+
TAGS_FOLDER_NAME,
|
|
169
|
+
EXTENSIONS_FOLDER_NAME
|
|
170
|
+
]
|
|
171
|
+
|
|
172
|
+
const data = renderFiles(headers, responseHeaders, url, path, files)
|
|
173
|
+
if (method === 'HEAD') {
|
|
174
|
+
return {
|
|
175
|
+
statusCode: 204,
|
|
176
|
+
headers: responseHeaders,
|
|
177
|
+
data: intoAsyncIterable('')
|
|
178
|
+
}
|
|
179
|
+
} else {
|
|
180
|
+
return {
|
|
181
|
+
statusCode: 200,
|
|
182
|
+
headers: responseHeaders,
|
|
183
|
+
data
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
} else if (path.startsWith(TAGS_FOLDER)) {
|
|
187
|
+
if (method === 'GET') {
|
|
188
|
+
if (path === TAGS_FOLDER) {
|
|
189
|
+
responseHeaders['x-is-directory'] = 'true'
|
|
190
|
+
const tags = await archive.getAllTags()
|
|
191
|
+
const tagsObject = Object.fromEntries(tags)
|
|
192
|
+
const json = JSON.stringify(tagsObject, null, '\t')
|
|
193
|
+
|
|
194
|
+
responseHeaders['Content-Type'] = 'application/json; charset=utf-8'
|
|
195
|
+
|
|
196
|
+
return {
|
|
197
|
+
statusCode: 200,
|
|
198
|
+
headers: responseHeaders,
|
|
199
|
+
data: intoAsyncIterable(json)
|
|
200
|
+
}
|
|
201
|
+
} else {
|
|
202
|
+
const tagName = path.slice(TAGS_FOLDER.length)
|
|
203
|
+
try {
|
|
204
|
+
const tagVersion = await archive.getTaggedVersion(tagName)
|
|
205
|
+
|
|
206
|
+
return {
|
|
207
|
+
statusCode: 200,
|
|
208
|
+
headers: responseHeaders,
|
|
209
|
+
data: intoAsyncIterable(`${tagVersion}`)
|
|
210
|
+
}
|
|
211
|
+
} catch {
|
|
212
|
+
return {
|
|
213
|
+
statusCode: 404,
|
|
214
|
+
headers: responseHeaders,
|
|
215
|
+
data: intoAsyncIterable('Tag Not Found')
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
} else if (method === 'DELETE') {
|
|
220
|
+
checkWritable(archive)
|
|
221
|
+
const tagName = path.slice(TAGS_FOLDER.length)
|
|
222
|
+
await archive.deleteTag(tagName || version)
|
|
223
|
+
responseHeaders.ETag = `"${archive.version}"`
|
|
224
|
+
|
|
225
|
+
return {
|
|
226
|
+
statusCode: 200,
|
|
227
|
+
headers: responseHeaders,
|
|
228
|
+
data: intoAsyncIterable('')
|
|
229
|
+
}
|
|
230
|
+
} else if (method === 'PUT') {
|
|
231
|
+
checkWritable(archive)
|
|
232
|
+
const tagName = path.slice(TAGS_FOLDER.length)
|
|
233
|
+
const tagVersion = archive.version
|
|
234
|
+
|
|
235
|
+
await archive.createTag(tagName, tagVersion)
|
|
236
|
+
responseHeaders['Content-Type'] = 'text/plain; charset=utf-8'
|
|
237
|
+
responseHeaders.ETag = `"${archive.version}"`
|
|
238
|
+
|
|
239
|
+
return {
|
|
240
|
+
statusCode: 200,
|
|
241
|
+
headers: responseHeaders,
|
|
242
|
+
data: intoAsyncIterable(`${tagVersion}`)
|
|
243
|
+
}
|
|
244
|
+
} else if (method === 'HEAD') {
|
|
245
|
+
return {
|
|
246
|
+
statusCode: 204,
|
|
247
|
+
headers: responseHeaders,
|
|
248
|
+
data: intoAsyncIterable('')
|
|
249
|
+
}
|
|
250
|
+
} else {
|
|
251
|
+
return {
|
|
252
|
+
statusCode: 405,
|
|
253
|
+
headers: responseHeaders,
|
|
254
|
+
data: intoAsyncIterable('Method Not Allowed')
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
} else if (path.startsWith(EXTENSIONS_FOLDER)) {
|
|
258
|
+
if (path === EXTENSIONS_FOLDER) {
|
|
259
|
+
if (method === 'GET') {
|
|
260
|
+
const accept = headers.get('Accept') || ''
|
|
261
|
+
if (!accept.includes('text/event-stream')) {
|
|
262
|
+
responseHeaders['x-is-directory'] = 'true'
|
|
263
|
+
|
|
264
|
+
const extensions = listExtensionNames(archive)
|
|
265
|
+
const data = renderFiles(headers, responseHeaders, url, path, extensions)
|
|
266
|
+
|
|
267
|
+
return {
|
|
268
|
+
statusCode: 204,
|
|
269
|
+
headers: responseHeaders,
|
|
270
|
+
data
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
|
|
274
|
+
const events = new EventIterator(({ push }) => {
|
|
275
|
+
function onMessage (name, content, peer) {
|
|
276
|
+
const id = peer.remotePublicKey.toString('hex')
|
|
277
|
+
// TODO: Fancy verification on the `name`?
|
|
278
|
+
// Send each line of content separately on a `data` line
|
|
279
|
+
const data = content.split('\n').map((line) => `data:${line}\n`).join('')
|
|
280
|
+
push(`id:${id}\nevent:${name}\n${data}\n`)
|
|
281
|
+
}
|
|
282
|
+
archive.on(EXTENSION_EVENT, onMessage)
|
|
283
|
+
return () => {
|
|
284
|
+
archive.removeListener('extension-message', onMessage)
|
|
285
|
+
}
|
|
286
|
+
})
|
|
287
|
+
|
|
288
|
+
responseHeaders['Content-Type'] = 'text/event-stream'
|
|
289
|
+
|
|
290
|
+
return {
|
|
291
|
+
statusCode: 200,
|
|
292
|
+
headers: responseHeaders,
|
|
293
|
+
data: events
|
|
294
|
+
}
|
|
295
|
+
} else {
|
|
296
|
+
return {
|
|
297
|
+
statusCode: 405,
|
|
298
|
+
headers: responseHeaders,
|
|
299
|
+
data: intoAsyncIterable('Method Not Allowed')
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
} else {
|
|
303
|
+
let extensionName = path.slice(EXTENSIONS_FOLDER.length)
|
|
304
|
+
let extensionPeer = null
|
|
305
|
+
if (extensionName.includes('/')) {
|
|
306
|
+
const split = extensionName.split('/')
|
|
307
|
+
extensionName = split[0]
|
|
308
|
+
if (split[1]) extensionPeer = split[1]
|
|
309
|
+
}
|
|
310
|
+
if (method === 'POST') {
|
|
311
|
+
const extension = getExtension(archive, extensionName)
|
|
312
|
+
if (extensionPeer) {
|
|
313
|
+
const peers = getExtensionPeers(archive, extensionName)
|
|
314
|
+
const peer = peers.find(({ remotePublicKey }) => remotePublicKey.toString('hex') === extensionPeer)
|
|
315
|
+
if (!peer) {
|
|
316
|
+
return {
|
|
317
|
+
statusCode: 404,
|
|
318
|
+
headers: responseHeaders,
|
|
319
|
+
data: intoAsyncIterable('Peer Not Found')
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
extension.send(await collect(body), peer)
|
|
323
|
+
} else {
|
|
324
|
+
extension.broadcast(await collect(body))
|
|
325
|
+
}
|
|
326
|
+
return {
|
|
327
|
+
statusCode: 200,
|
|
328
|
+
headers: responseHeaders,
|
|
329
|
+
data: intoAsyncIterable('')
|
|
330
|
+
}
|
|
331
|
+
} else if (method === 'GET') {
|
|
332
|
+
const accept = headers.get('Accept') || ''
|
|
333
|
+
if (!accept.includes('text/event-stream')) {
|
|
334
|
+
// Load up the extension into memory
|
|
335
|
+
getExtension(archive, extensionName)
|
|
336
|
+
|
|
337
|
+
const extensionPeers = getExtensionPeers(archive, extensionName)
|
|
338
|
+
const finalPeers = formatPeers(extensionPeers)
|
|
339
|
+
|
|
340
|
+
const json = JSON.stringify(finalPeers, null, '\t')
|
|
341
|
+
|
|
342
|
+
return {
|
|
343
|
+
statusCode: 200,
|
|
344
|
+
header: responseHeaders,
|
|
345
|
+
data: intoAsyncIterable(json)
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
} else {
|
|
349
|
+
return {
|
|
350
|
+
statusCode: 405,
|
|
351
|
+
headers: responseHeaders,
|
|
352
|
+
data: intoAsyncIterable('Method Not Allowed')
|
|
353
|
+
}
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
} else {
|
|
357
|
+
return {
|
|
358
|
+
statusCode: 404,
|
|
359
|
+
headers: responseHeaders,
|
|
360
|
+
data: intoAsyncIterable('Not Found')
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
if (method === 'PUT') {
|
|
366
|
+
checkWritable(archive)
|
|
367
|
+
if (path.endsWith('/')) {
|
|
368
|
+
await makeDir(path, { fs: archive })
|
|
369
|
+
} else {
|
|
370
|
+
const parentDir = path.split('/').slice(0, -1).join('/')
|
|
371
|
+
if (parentDir) {
|
|
372
|
+
await makeDir(parentDir, { fs: archive })
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
const source = Readable.from(body)
|
|
376
|
+
const destination = archive.createWriteStream(path)
|
|
377
|
+
// The sink is needed because Hyperdrive's write stream is duplex
|
|
378
|
+
|
|
379
|
+
source.pipe(destination)
|
|
380
|
+
|
|
381
|
+
await Promise.race([
|
|
382
|
+
once(source, 'error'),
|
|
383
|
+
once(destination, 'error'),
|
|
384
|
+
once(source, 'end')
|
|
385
|
+
])
|
|
386
|
+
}
|
|
387
|
+
responseHeaders.ETag = `"${archive.version}"`
|
|
388
|
+
|
|
389
|
+
return {
|
|
390
|
+
statusCode: 200,
|
|
391
|
+
headers: responseHeaders,
|
|
392
|
+
data: intoAsyncIterable('')
|
|
393
|
+
}
|
|
394
|
+
} else if (method === 'DELETE') {
|
|
395
|
+
if (headers.get('x-clear') === 'cache') {
|
|
396
|
+
await archive.clear(path)
|
|
397
|
+
return {
|
|
398
|
+
statusCode: 200,
|
|
399
|
+
headers: responseHeaders,
|
|
400
|
+
data: intoAsyncIterable('')
|
|
401
|
+
}
|
|
402
|
+
} else {
|
|
403
|
+
checkWritable(archive)
|
|
404
|
+
|
|
405
|
+
const stats = await archive.stat(path)
|
|
406
|
+
// Weird stuff happening up in here...
|
|
407
|
+
const stat = Array.isArray(stats) ? stats[0] : stats
|
|
408
|
+
|
|
409
|
+
if (stat.isDirectory()) {
|
|
410
|
+
await archive.rmdir(path)
|
|
411
|
+
} else {
|
|
412
|
+
await archive.unlink(path)
|
|
413
|
+
}
|
|
414
|
+
responseHeaders.ETag = `"${archive.version}"`
|
|
415
|
+
|
|
416
|
+
return {
|
|
417
|
+
statusCode: 200,
|
|
418
|
+
headers: responseHeaders,
|
|
419
|
+
data: intoAsyncIterable('')
|
|
420
|
+
}
|
|
421
|
+
}
|
|
422
|
+
} else if ((method === 'GET') || (method === 'HEAD')) {
|
|
423
|
+
if (method === 'GET' && headers.get('Accept') === 'text/event-stream') {
|
|
424
|
+
const contentFeed = await archive.getContent()
|
|
425
|
+
const events = new EventIterator(({ push, fail }) => {
|
|
426
|
+
const watcher = archive.watch(path, () => {
|
|
427
|
+
const event = 'change'
|
|
428
|
+
const data = archive.version
|
|
429
|
+
push({ event, data })
|
|
430
|
+
})
|
|
431
|
+
watcher.on('error', fail)
|
|
432
|
+
function onDownloadMetadata (index) {
|
|
433
|
+
const event = 'download'
|
|
434
|
+
const source = archive.metadata.key.toString('hex')
|
|
435
|
+
const data = { index, source }
|
|
436
|
+
push({ event, data })
|
|
437
|
+
}
|
|
438
|
+
function onUploadMetadata (index) {
|
|
439
|
+
const event = 'download'
|
|
440
|
+
const source = archive.metadata.key.toString('hex')
|
|
441
|
+
const data = { index, source }
|
|
442
|
+
push({ event, data })
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
function onDownloadContent (index) {
|
|
446
|
+
const event = 'download'
|
|
447
|
+
const source = contentFeed.key.toString('hex')
|
|
448
|
+
const data = { index, source }
|
|
449
|
+
push({ event, data })
|
|
450
|
+
}
|
|
451
|
+
function onUploadContent (index) {
|
|
452
|
+
const event = 'download'
|
|
453
|
+
const source = contentFeed.key.toString('hex')
|
|
454
|
+
const data = { index, source }
|
|
455
|
+
push({ event, data })
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
// TODO: Filter out indexes that don't belong to files?
|
|
459
|
+
|
|
460
|
+
archive.metadata.on('download', onDownloadMetadata)
|
|
461
|
+
archive.metadata.on('upload', onUploadMetadata)
|
|
462
|
+
contentFeed.on('download', onDownloadContent)
|
|
463
|
+
contentFeed.on('upload', onUploadMetadata)
|
|
464
|
+
return () => {
|
|
465
|
+
watcher.destroy()
|
|
466
|
+
archive.metadata.removeListener('download', onDownloadMetadata)
|
|
467
|
+
archive.metadata.removeListener('upload', onUploadMetadata)
|
|
468
|
+
contentFeed.removeListener('download', onDownloadContent)
|
|
469
|
+
contentFeed.removeListener('upload', onUploadContent)
|
|
470
|
+
}
|
|
471
|
+
})
|
|
472
|
+
async function * startReader () {
|
|
473
|
+
for await (const { event, data } of events) {
|
|
474
|
+
yield `event:${event}\ndata:${JSON.stringify(data)}\n\n`
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
responseHeaders['Content-Type'] = 'text/event-stream'
|
|
479
|
+
|
|
480
|
+
return {
|
|
481
|
+
statusCode: 200,
|
|
482
|
+
headers: responseHeaders,
|
|
483
|
+
data: startReader()
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
let stat = null
|
|
488
|
+
let finalPath = path
|
|
489
|
+
|
|
490
|
+
if (headers.get('x-download') === 'cache') {
|
|
491
|
+
await archive.download(path)
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
// Legacy DNS spec from Dat protocol: https://github.com/datprotocol/DEPs/blob/master/proposals/0005-dns.md
|
|
495
|
+
if (finalPath === '/.well-known/dat') {
|
|
496
|
+
const { key } = archive
|
|
497
|
+
const entry = `dat://${key.toString('hex')}\nttl=3600`
|
|
498
|
+
return {
|
|
499
|
+
statusCode: 200,
|
|
500
|
+
headers: responseHeaders,
|
|
501
|
+
data: intoAsyncIterable(entry)
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
|
|
505
|
+
// New spec from hyper-dns https://github.com/martinheidegger/hyper-dns
|
|
506
|
+
if (finalPath === '/.well-known/hyper') {
|
|
507
|
+
const { key } = archive
|
|
508
|
+
const entry = `hyper://${key.toString('hex')}\nttl=3600`
|
|
509
|
+
return {
|
|
510
|
+
statusCode: 200,
|
|
511
|
+
headers: responseHeaders,
|
|
512
|
+
data: intoAsyncIterable(entry)
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
try {
|
|
516
|
+
if (searchParams.has('noResolve')) {
|
|
517
|
+
const stats = await archive.stat(path)
|
|
518
|
+
stat = stats[0]
|
|
519
|
+
} else {
|
|
520
|
+
const resolved = await resolveDatPath(archive, path)
|
|
521
|
+
finalPath = resolved.path
|
|
522
|
+
stat = resolved.stat
|
|
523
|
+
}
|
|
524
|
+
} catch (e) {
|
|
525
|
+
responseHeaders['Content-Type'] = 'text/plain; charset=utf-8'
|
|
526
|
+
return {
|
|
527
|
+
statusCode: 404,
|
|
528
|
+
headers: responseHeaders,
|
|
529
|
+
data: intoAsyncIterable(e.stack)
|
|
530
|
+
}
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
responseHeaders['Content-Type'] = getMimeType(finalPath)
|
|
534
|
+
responseHeaders['Last-Modified'] = stat.mtime.toUTCString()
|
|
535
|
+
|
|
536
|
+
let data = null
|
|
537
|
+
const isRanged = headers.get('Range') || headers.get('range')
|
|
538
|
+
let statusCode = 200
|
|
539
|
+
|
|
540
|
+
if (stat.isDirectory()) {
|
|
541
|
+
responseHeaders['x-is-directory'] = 'true'
|
|
542
|
+
const stats = await archive.readdir(finalPath, { includeStats: true })
|
|
543
|
+
const files = stats.map(({ stat, name }) => (stat.isDirectory() ? `${name}/` : name))
|
|
544
|
+
|
|
545
|
+
// Add special directory
|
|
546
|
+
if (finalPath === '/') files.unshift('$/')
|
|
547
|
+
|
|
548
|
+
data = renderFiles(headers, responseHeaders, url, path, files)
|
|
549
|
+
} else {
|
|
550
|
+
responseHeaders['Accept-Ranges'] = 'bytes'
|
|
551
|
+
|
|
552
|
+
try {
|
|
553
|
+
const { blocks, downloadedBlocks } = await archive.stats(finalPath)
|
|
554
|
+
responseHeaders['X-Blocks'] = `${blocks}`
|
|
555
|
+
responseHeaders['X-Blocks-Downloaded'] = `${downloadedBlocks}`
|
|
556
|
+
} catch (e) {
|
|
557
|
+
// Don't worry about it, it's optional.
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
const { size } = stat
|
|
561
|
+
responseHeaders['Content-Length'] = `${size}`
|
|
562
|
+
|
|
563
|
+
if (isRanged) {
|
|
564
|
+
const ranges = parseRange(size, isRanged)
|
|
565
|
+
if (ranges && ranges.length && ranges.type === 'bytes') {
|
|
566
|
+
statusCode = 206
|
|
567
|
+
const [{ start, end }] = ranges
|
|
568
|
+
const length = (end - start + 1)
|
|
569
|
+
responseHeaders['Content-Length'] = `${length}`
|
|
570
|
+
responseHeaders['Content-Range'] = `bytes ${start}-${end}/${size}`
|
|
571
|
+
if (method !== 'HEAD') {
|
|
572
|
+
data = archive.createReadStream(finalPath, {
|
|
573
|
+
start,
|
|
574
|
+
end
|
|
575
|
+
})
|
|
576
|
+
}
|
|
577
|
+
} else {
|
|
578
|
+
if (method !== 'HEAD') {
|
|
579
|
+
data = archive.createReadStream(finalPath)
|
|
580
|
+
}
|
|
581
|
+
}
|
|
582
|
+
} else if (method !== 'HEAD') {
|
|
583
|
+
data = archive.createReadStream(finalPath)
|
|
584
|
+
}
|
|
585
|
+
}
|
|
586
|
+
|
|
587
|
+
if (method === 'HEAD') {
|
|
588
|
+
return {
|
|
589
|
+
statusCode: 204,
|
|
590
|
+
headers: responseHeaders,
|
|
591
|
+
data: intoAsyncIterable('')
|
|
592
|
+
}
|
|
593
|
+
} else {
|
|
594
|
+
return {
|
|
595
|
+
statusCode,
|
|
596
|
+
headers: responseHeaders,
|
|
597
|
+
data
|
|
598
|
+
}
|
|
599
|
+
}
|
|
600
|
+
} else {
|
|
601
|
+
return {
|
|
602
|
+
statusCode: 405,
|
|
603
|
+
headers: responseHeaders,
|
|
604
|
+
data: intoAsyncIterable('Method Not Allowed')
|
|
605
|
+
}
|
|
606
|
+
}
|
|
607
|
+
} catch (e) {
|
|
608
|
+
const isUnauthorized = (e.message === NOT_WRITABLE_ERROR)
|
|
609
|
+
const statusCode = isUnauthorized ? 403 : 500
|
|
610
|
+
const statusText = isUnauthorized ? 'Not Authorized' : 'Server Error'
|
|
611
|
+
return {
|
|
612
|
+
statusCode,
|
|
613
|
+
statusText,
|
|
614
|
+
headers: responseHeaders,
|
|
615
|
+
data: intoAsyncIterable(e.stack)
|
|
616
|
+
}
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
|
|
620
|
+
function getHyperdrive () {
|
|
621
|
+
if (Hyperdrive) return Hyperdrive
|
|
622
|
+
return getSDK().then(({ Hyperdrive }) => Hyperdrive)
|
|
623
|
+
}
|
|
624
|
+
|
|
625
|
+
function getSDK () {
|
|
626
|
+
if (sdk) return Promise.resolve(sdk)
|
|
627
|
+
if (gettingSDK) return gettingSDK
|
|
628
|
+
gettingSDK = SDK(opts).then((gotSDK) => {
|
|
629
|
+
sdk = gotSDK
|
|
630
|
+
gettingSDK = null
|
|
631
|
+
onClose = async () => sdk.close()
|
|
632
|
+
Hyperdrive = sdk.Hyperdrive
|
|
633
|
+
|
|
634
|
+
return sdk
|
|
635
|
+
})
|
|
636
|
+
|
|
637
|
+
return gettingSDK
|
|
638
|
+
}
|
|
639
|
+
|
|
640
|
+
function checkWritable (archive) {
|
|
641
|
+
if (!writable) throw new Error(NOT_WRITABLE_ERROR)
|
|
642
|
+
if (!archive.writable) {
|
|
643
|
+
throw new Error(NOT_WRITABLE_ERROR)
|
|
644
|
+
}
|
|
645
|
+
}
|
|
646
|
+
}
|
|
647
|
+
|
|
648
|
+
function parseDatURL (url) {
|
|
649
|
+
const parsed = new URL(url)
|
|
650
|
+
let key = parsed.hostname
|
|
651
|
+
let version = null
|
|
652
|
+
if (key.includes('+')) [key, version] = key.split('+')
|
|
653
|
+
|
|
654
|
+
parsed.key = key
|
|
655
|
+
parsed.version = version
|
|
656
|
+
|
|
657
|
+
return parsed
|
|
658
|
+
}
|
|
659
|
+
|
|
660
|
+
async function * intoAsyncIterable (data) {
|
|
661
|
+
yield Buffer.from(data)
|
|
662
|
+
}
|
|
663
|
+
|
|
664
|
+
function getMimeType (path) {
|
|
665
|
+
let mimeType = mime.getType(path) || 'text/plain; charset=utf-8'
|
|
666
|
+
if (mimeType.startsWith('text/')) mimeType = `${mimeType}; charset=utf-8`
|
|
667
|
+
return mimeType
|
|
668
|
+
}
|
|
669
|
+
|
|
670
|
+
function renderDirectory (url, path, files) {
|
|
671
|
+
return `<!DOCTYPE html>
|
|
672
|
+
<title>${url}</title>
|
|
673
|
+
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
674
|
+
<h1>Index of ${path}</h1>
|
|
675
|
+
<ul>
|
|
676
|
+
<li><a href="../">../</a></li>${files.map((file) => `
|
|
677
|
+
<li><a href="${file}">./${file}</a></li>
|
|
678
|
+
`).join('')}
|
|
679
|
+
</ul>
|
|
680
|
+
`
|
|
681
|
+
}
|
|
682
|
+
|
|
683
|
+
function renderFiles (headers, responseHeaders, url, path, files) {
|
|
684
|
+
if (headers.get('Accept') && headers.get('Accept').includes('text/html')) {
|
|
685
|
+
const page = renderDirectory(url, path, files)
|
|
686
|
+
responseHeaders['Content-Type'] = 'text/html; charset=utf-8'
|
|
687
|
+
return intoAsyncIterable(page)
|
|
688
|
+
} else {
|
|
689
|
+
const json = JSON.stringify(files, null, '\t')
|
|
690
|
+
responseHeaders['Content-Type'] = 'application/json; charset=utf-8'
|
|
691
|
+
return intoAsyncIterable(json)
|
|
692
|
+
}
|
|
693
|
+
}
|
|
694
|
+
|
|
695
|
+
function once (ee, name) {
|
|
696
|
+
return new Promise((resolve, reject) => {
|
|
697
|
+
const isError = name === 'error'
|
|
698
|
+
const cb = isError ? reject : resolve
|
|
699
|
+
ee.once(name, cb)
|
|
700
|
+
})
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
async function collect (source) {
|
|
704
|
+
let buffer = ''
|
|
705
|
+
|
|
706
|
+
for await (const chunk of source) {
|
|
707
|
+
buffer += chunk
|
|
708
|
+
}
|
|
709
|
+
|
|
710
|
+
return buffer
|
|
711
|
+
}
|
|
712
|
+
|
|
713
|
+
function formatPeers (peers) {
|
|
714
|
+
return peers.map(({ remotePublicKey, remoteAddress, remoteType, stats }) => {
|
|
715
|
+
return {
|
|
716
|
+
remotePublicKey: remotePublicKey.toString('hex'),
|
|
717
|
+
remoteType,
|
|
718
|
+
remoteAddress,
|
|
719
|
+
stats
|
|
720
|
+
}
|
|
721
|
+
})
|
|
722
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "hypercore-fetch",
|
|
3
|
+
"version": "8.3.2",
|
|
4
|
+
"description": "Implementation of Fetch that uses the Dat SDK for loading p2p content",
|
|
5
|
+
"bin": {
|
|
6
|
+
"hypercore-fetch": "bin.js"
|
|
7
|
+
},
|
|
8
|
+
"main": "index.js",
|
|
9
|
+
"scripts": {
|
|
10
|
+
"test": "node test"
|
|
11
|
+
},
|
|
12
|
+
"repository": {
|
|
13
|
+
"type": "git",
|
|
14
|
+
"url": "git+https://github.com/RangerMauve/hypercore-fetch.git"
|
|
15
|
+
},
|
|
16
|
+
"keywords": [
|
|
17
|
+
"dat",
|
|
18
|
+
"fetch"
|
|
19
|
+
],
|
|
20
|
+
"author": "RangerMauve",
|
|
21
|
+
"license": "MIT",
|
|
22
|
+
"bugs": {
|
|
23
|
+
"url": "https://github.com/RangerMauve/hypercore-fetch/issues"
|
|
24
|
+
},
|
|
25
|
+
"homepage": "https://github.com/RangerMauve/hypercore-fetch#readme",
|
|
26
|
+
"dependencies": {
|
|
27
|
+
"event-iterator": "^2.0.0",
|
|
28
|
+
"fetch-headers": "^2.0.0",
|
|
29
|
+
"hyper-dns": "^0.12.0",
|
|
30
|
+
"hyper-sdk": "^3.0.8",
|
|
31
|
+
"make-dir": "^3.1.0",
|
|
32
|
+
"make-fetch": "^2.2.1",
|
|
33
|
+
"mime": "^2.4.4",
|
|
34
|
+
"range-parser": "^1.2.1",
|
|
35
|
+
"resolve-dat-path": "^2.0.0",
|
|
36
|
+
"sodium-universal": "^3.0.2",
|
|
37
|
+
"streamx": "^2.10.0"
|
|
38
|
+
},
|
|
39
|
+
"devDependencies": {
|
|
40
|
+
"random-access-memory": "^3.1.1",
|
|
41
|
+
"tape": "^5.2.2"
|
|
42
|
+
}
|
|
43
|
+
}
|
package/test.js
ADDED
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
const SDK = require('hyper-sdk')
|
|
2
|
+
const test = require('tape')
|
|
3
|
+
|
|
4
|
+
runTests()
|
|
5
|
+
|
|
6
|
+
const SAMPLE_CONTENT = 'Hello World'
|
|
7
|
+
|
|
8
|
+
async function runTests () {
|
|
9
|
+
const { Hyperdrive, close } = await SDK({
|
|
10
|
+
persist: false
|
|
11
|
+
})
|
|
12
|
+
|
|
13
|
+
const { Hyperdrive: Hyperdrive2, close: close2 } = await SDK({
|
|
14
|
+
persist: false
|
|
15
|
+
})
|
|
16
|
+
|
|
17
|
+
const fetch = require('./')({
|
|
18
|
+
Hyperdrive,
|
|
19
|
+
writable: true
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
const fetch2 = require('./')({
|
|
23
|
+
Hyperdrive: Hyperdrive2,
|
|
24
|
+
writable: true
|
|
25
|
+
})
|
|
26
|
+
|
|
27
|
+
test.onFinish(() => {
|
|
28
|
+
close()
|
|
29
|
+
close2()
|
|
30
|
+
})
|
|
31
|
+
|
|
32
|
+
test('Read index.html', async (t) => {
|
|
33
|
+
const archive = Hyperdrive('example1')
|
|
34
|
+
|
|
35
|
+
const FILE_LOCATION = '/index.html'
|
|
36
|
+
const FILE_DATA = '<h1>Hello World!</h1>'
|
|
37
|
+
|
|
38
|
+
await archive.writeFile(FILE_LOCATION, FILE_DATA)
|
|
39
|
+
|
|
40
|
+
const url = `hyper://${archive.key.toString('hex')}${FILE_LOCATION}`
|
|
41
|
+
|
|
42
|
+
t.pass('Prepped archive ' + url)
|
|
43
|
+
|
|
44
|
+
const response = await fetch(url)
|
|
45
|
+
|
|
46
|
+
t.pass('got response')
|
|
47
|
+
|
|
48
|
+
const text = await response.text()
|
|
49
|
+
|
|
50
|
+
t.pass('got response text')
|
|
51
|
+
|
|
52
|
+
const contentType = response.headers.get('content-type')
|
|
53
|
+
|
|
54
|
+
t.equal(contentType, 'text/html; charset=utf-8')
|
|
55
|
+
t.equal(text, FILE_DATA)
|
|
56
|
+
t.pass('Headers ' + [...response.headers.entries()])
|
|
57
|
+
})
|
|
58
|
+
|
|
59
|
+
test('GET .well-known/dat', async (t) => {
|
|
60
|
+
const response = await fetch('hyper://example/.well-known/dat')
|
|
61
|
+
t.ok(response, 'Got response')
|
|
62
|
+
t.equal(response.status, 200, 'Got OK response code')
|
|
63
|
+
const text = await response.text()
|
|
64
|
+
t.ok(text.startsWith('dat://'), 'Returned dat URL')
|
|
65
|
+
})
|
|
66
|
+
|
|
67
|
+
test('GET .well-known/hyper', async (t) => {
|
|
68
|
+
const response = await fetch('hyper://example/.well-known/hyper')
|
|
69
|
+
t.ok(response, 'Got response')
|
|
70
|
+
t.equal(response.status, 200, 'Got OK response code')
|
|
71
|
+
const text = await response.text()
|
|
72
|
+
t.ok(text.startsWith('hyper://'), 'Returned dat URL')
|
|
73
|
+
})
|
|
74
|
+
|
|
75
|
+
test('PUT file', async (t) => {
|
|
76
|
+
const response1 = await fetch('hyper://example/checkthis.txt', { method: 'PUT', body: SAMPLE_CONTENT })
|
|
77
|
+
|
|
78
|
+
t.equal(response1.status, 200, 'Got OK response on write')
|
|
79
|
+
|
|
80
|
+
const response2 = await fetch('hyper://example/checkthis.txt')
|
|
81
|
+
|
|
82
|
+
t.equal(response2.status, 200, 'Got OK response on read')
|
|
83
|
+
|
|
84
|
+
t.equal(await response2.text(), SAMPLE_CONTENT, 'Read back written data')
|
|
85
|
+
})
|
|
86
|
+
|
|
87
|
+
test('PUT directory', async (t) => {
|
|
88
|
+
const response1 = await fetch('hyper://example/foo/bar/', { method: 'PUT' })
|
|
89
|
+
|
|
90
|
+
t.equal(response1.status, 200, 'Got OK response on directory creation')
|
|
91
|
+
})
|
|
92
|
+
|
|
93
|
+
test('PUT file in new directory', async (t) => {
|
|
94
|
+
const response1 = await fetch('hyper://example/fizz/buzz/example.txt', { method: 'PUT', body: SAMPLE_CONTENT })
|
|
95
|
+
|
|
96
|
+
t.equal(response1.status, 200, 'Got OK response on directory/file creation')
|
|
97
|
+
})
|
|
98
|
+
|
|
99
|
+
test('PUT to overwrite a file', async (t) => {
|
|
100
|
+
const response1 = await fetch('hyper://example/baz/index.html', { method: 'PUT', body: SAMPLE_CONTENT })
|
|
101
|
+
t.ok(response1.ok)
|
|
102
|
+
const response2 = await fetch('hyper://example/baz/index.html', { method: 'PUT', body: SAMPLE_CONTENT })
|
|
103
|
+
|
|
104
|
+
t.equal(response2.status, 200, 'Got OK response on file overwrite')
|
|
105
|
+
})
|
|
106
|
+
|
|
107
|
+
test('DELETE file', async (t) => {
|
|
108
|
+
const response1 = await fetch('hyper://example/test.txt', { method: 'PUT', body: SAMPLE_CONTENT })
|
|
109
|
+
t.ok(response1.ok)
|
|
110
|
+
|
|
111
|
+
const response2 = await fetch('hyper://example/test.txt', { method: 'DELETE' })
|
|
112
|
+
|
|
113
|
+
t.equal(response2.status, 200, 'Got OK response on file delete')
|
|
114
|
+
|
|
115
|
+
const response3 = await fetch('hyper://example/test.txt', { method: 'GET' })
|
|
116
|
+
|
|
117
|
+
t.equal(response3.status, 404, 'Got not found on deleted file')
|
|
118
|
+
})
|
|
119
|
+
|
|
120
|
+
test('GET index.html', async (t) => {
|
|
121
|
+
const response1 = await fetch('hyper://example/baz')
|
|
122
|
+
|
|
123
|
+
t.equal(await response1.text(), SAMPLE_CONTENT, 'Got index.html content')
|
|
124
|
+
|
|
125
|
+
const response2 = await fetch('hyper://example/baz?noResolve')
|
|
126
|
+
|
|
127
|
+
t.equal(response2.headers.get('content-type'), 'application/json; charset=utf-8', 'noResolve flag yields JSON by default')
|
|
128
|
+
t.deepEqual(await response2.json(), ['index.html'], 'Listed directory')
|
|
129
|
+
|
|
130
|
+
const response3 = await fetch('hyper://example/baz?noResolve')
|
|
131
|
+
t.equal(response3.headers.get('content-type'), 'application/json; charset=utf-8', 'noResolve flag yields JSON by default')
|
|
132
|
+
t.deepEqual(await response3.json(), ['index.html'], 'Listed directory')
|
|
133
|
+
})
|
|
134
|
+
|
|
135
|
+
test('Create and read tags', async (t) => {
|
|
136
|
+
await fetch('hyper://example/test.txt', { method: 'PUT', body: SAMPLE_CONTENT })
|
|
137
|
+
|
|
138
|
+
const response2 = await fetch('hyper://example/$/tags/tag1', { method: 'PUT' })
|
|
139
|
+
t.ok(response2.ok, 'Able to create tag')
|
|
140
|
+
|
|
141
|
+
const version = await response2.json()
|
|
142
|
+
|
|
143
|
+
const response3 = await fetch('hyper://example/$/tags/')
|
|
144
|
+
|
|
145
|
+
t.ok(response3.ok, 'Able to ask for tags')
|
|
146
|
+
t.deepEqual(await response3.json(), { tag1: version }, 'Tag got created')
|
|
147
|
+
|
|
148
|
+
// Insert a file which won't be available with the old tag
|
|
149
|
+
await fetch('hyper://example/notaccessible.txt', { method: 'PUT', body: 'test' })
|
|
150
|
+
|
|
151
|
+
const response4 = await fetch('hyper://example+tag1/notaccessible.txt')
|
|
152
|
+
|
|
153
|
+
t.equal(response4.status, 404, 'Newer file not found in older tag')
|
|
154
|
+
|
|
155
|
+
const response5 = await fetch('hyper://example/$/tags/tag1', { method: 'DELETE' })
|
|
156
|
+
|
|
157
|
+
t.ok(response5.ok, 'Able to delete tag')
|
|
158
|
+
|
|
159
|
+
const response6 = await fetch('hyper://example/$/tags/')
|
|
160
|
+
|
|
161
|
+
t.deepEqual(await response6.json(), {}, 'No tags left after delete')
|
|
162
|
+
})
|
|
163
|
+
|
|
164
|
+
test('Load Mauve\'s blog', async (t) => {
|
|
165
|
+
const response = await fetch('hyper://blog.mauve.moe/')
|
|
166
|
+
|
|
167
|
+
t.ok(response.ok, 'Succesfully loaded homepage')
|
|
168
|
+
})
|
|
169
|
+
|
|
170
|
+
test('Watch for changes', async (t) => {
|
|
171
|
+
const response = await fetch('hyper://example/', {
|
|
172
|
+
headers: {
|
|
173
|
+
Accept: 'text/event-stream'
|
|
174
|
+
}
|
|
175
|
+
})
|
|
176
|
+
|
|
177
|
+
t.ok(response.ok, 'Able to open request')
|
|
178
|
+
t.equal(response.headers.get('Content-Type'), 'text/event-stream', 'Response is event stream')
|
|
179
|
+
|
|
180
|
+
const reader = await response.body.getReader()
|
|
181
|
+
|
|
182
|
+
const [data] = await Promise.all([
|
|
183
|
+
reader.read(),
|
|
184
|
+
fetch('hyper://example/example4.txt', { method: 'PUT', body: 'Hello World' })
|
|
185
|
+
])
|
|
186
|
+
|
|
187
|
+
t.ok(data.value, 'Got eventsource data after writing')
|
|
188
|
+
t.ok(data.value.includes('event:change'), 'Eventsource data represents a change event')
|
|
189
|
+
t.ok(data.value.endsWith('\n\n'), 'Ends with two newlines')
|
|
190
|
+
|
|
191
|
+
await reader.cancel()
|
|
192
|
+
})
|
|
193
|
+
|
|
194
|
+
test('Send extension from one peer to another', async (t) => {
|
|
195
|
+
const domainResponse = await fetch('hyper://example/.well-known/hyper')
|
|
196
|
+
const domain = (await domainResponse.text()).split('\n')[0]
|
|
197
|
+
|
|
198
|
+
const extensionURL = `${domain}/$/extensions/example`
|
|
199
|
+
const extensionListURL = `${domain}/$/extensions/`
|
|
200
|
+
|
|
201
|
+
// Load up extension message on peer 1
|
|
202
|
+
await fetch(extensionURL)
|
|
203
|
+
// Load up extension message on peer 2
|
|
204
|
+
await fetch2(extensionURL)
|
|
205
|
+
|
|
206
|
+
t.pass('Able to initialize extensions')
|
|
207
|
+
|
|
208
|
+
const extensionListRequest = await fetch(extensionListURL)
|
|
209
|
+
const extensionList = await extensionListRequest.json()
|
|
210
|
+
|
|
211
|
+
// Extension list will always be alphabetically sorted
|
|
212
|
+
t.deepEqual(extensionList, ['example', 'hypertrie'], 'Got expected list of extensions')
|
|
213
|
+
|
|
214
|
+
// Wait a bit for them to connect
|
|
215
|
+
// TODO: Peers API
|
|
216
|
+
await delay(2000)
|
|
217
|
+
|
|
218
|
+
const peerResponse1 = await fetch(extensionURL)
|
|
219
|
+
const peerList1 = await peerResponse1.json()
|
|
220
|
+
|
|
221
|
+
t.equal(peerList1.length, 1, 'Got one peer for extension message on peer1')
|
|
222
|
+
|
|
223
|
+
const peerResponse2 = await fetch2(extensionURL)
|
|
224
|
+
const peerList2 = await peerResponse2.json()
|
|
225
|
+
|
|
226
|
+
t.equal(peerList2.length, 1, 'Got one peer for extension message on peer2')
|
|
227
|
+
|
|
228
|
+
const eventRequest = await fetch(extensionListURL, {
|
|
229
|
+
headers: {
|
|
230
|
+
Accept: 'text/event-stream'
|
|
231
|
+
}
|
|
232
|
+
})
|
|
233
|
+
|
|
234
|
+
t.ok(eventRequest.ok, 'Able to open request')
|
|
235
|
+
t.equal(eventRequest.headers.get('Content-Type'), 'text/event-stream', 'Response is event stream')
|
|
236
|
+
|
|
237
|
+
const reader = await eventRequest.body.getReader()
|
|
238
|
+
|
|
239
|
+
const toRead = reader.read()
|
|
240
|
+
|
|
241
|
+
await delay(500)
|
|
242
|
+
|
|
243
|
+
const broadcastRequest = await fetch2(extensionURL, { method: 'POST', body: 'Hello World' })
|
|
244
|
+
|
|
245
|
+
t.ok(broadcastRequest.ok, 'Able to broadcast to peers')
|
|
246
|
+
|
|
247
|
+
const data = await toRead
|
|
248
|
+
|
|
249
|
+
t.ok(data.value, 'Got eventsource data after writing')
|
|
250
|
+
t.ok(data.value.includes('event:example\n'), 'EventSource data represents an example event')
|
|
251
|
+
t.ok(data.value.includes('data:Hello World\n'), 'EventSource data contains expected body')
|
|
252
|
+
t.ok(data.value.includes('id:'), 'EventSource data contains an ID')
|
|
253
|
+
t.ok(data.value.endsWith('\n\n'), 'Ends with two newlines')
|
|
254
|
+
|
|
255
|
+
await reader.cancel()
|
|
256
|
+
})
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
function delay (time) {
|
|
260
|
+
return new Promise((resolve) => setTimeout(resolve, time))
|
|
261
|
+
}
|