@fireproof/core 0.8.0 → 0.10.1-dev

Sign up to get free protection for your applications and to get access to all the features.
Files changed (57) hide show
  1. package/README.md +5 -184
  2. package/dist/fireproof.browser.js +18879 -0
  3. package/dist/fireproof.browser.js.map +7 -0
  4. package/dist/fireproof.cjs.js +9305 -0
  5. package/dist/fireproof.cjs.js.map +7 -0
  6. package/dist/fireproof.esm.js +9295 -0
  7. package/dist/fireproof.esm.js.map +7 -0
  8. package/package.json +57 -105
  9. package/dist/blockstore.js +0 -268
  10. package/dist/clock.js +0 -459
  11. package/dist/crypto.js +0 -63
  12. package/dist/database.js +0 -434
  13. package/dist/db-index.js +0 -403
  14. package/dist/encrypted-block.js +0 -48
  15. package/dist/fireproof.js +0 -84
  16. package/dist/import.js +0 -29
  17. package/dist/listener.js +0 -111
  18. package/dist/loader.js +0 -13
  19. package/dist/prolly.js +0 -405
  20. package/dist/remote.js +0 -102
  21. package/dist/sha1.js +0 -74
  22. package/dist/src/fireproof.d.ts +0 -472
  23. package/dist/src/fireproof.js +0 -81191
  24. package/dist/src/fireproof.js.map +0 -1
  25. package/dist/src/fireproof.mjs +0 -81186
  26. package/dist/src/fireproof.mjs.map +0 -1
  27. package/dist/storage/base.js +0 -426
  28. package/dist/storage/blocksToEncryptedCarBlock.js +0 -144
  29. package/dist/storage/browser.js +0 -62
  30. package/dist/storage/filesystem.js +0 -67
  31. package/dist/storage/rest.js +0 -57
  32. package/dist/storage/ucan.js +0 -0
  33. package/dist/storage/utils.js +0 -144
  34. package/dist/sync.js +0 -218
  35. package/dist/utils.js +0 -16
  36. package/dist/valet.js +0 -102
  37. package/src/blockstore.js +0 -283
  38. package/src/clock.js +0 -486
  39. package/src/crypto.js +0 -70
  40. package/src/database.js +0 -469
  41. package/src/db-index.js +0 -426
  42. package/src/encrypted-block.js +0 -57
  43. package/src/fireproof.js +0 -98
  44. package/src/import.js +0 -34
  45. package/src/link.d.ts +0 -3
  46. package/src/loader.js +0 -16
  47. package/src/prolly.js +0 -445
  48. package/src/remote.js +0 -113
  49. package/src/sha1.js +0 -83
  50. package/src/storage/base.js +0 -463
  51. package/src/storage/browser.js +0 -67
  52. package/src/storage/filesystem.js +0 -73
  53. package/src/storage/rest.js +0 -59
  54. package/src/storage/ucan.js +0 -0
  55. package/src/storage/utils.js +0 -152
  56. package/src/sync.js +0 -237
  57. package/src/valet.js +0 -105
package/dist/prolly.js DELETED
@@ -1,405 +0,0 @@
1
- import { advance, EventFetcher, EventBlock, findCommonAncestorWithSortedEvents, findEventsToSync, vis as visClock } from './clock.js';
2
- // import { create, load } from '../../../../prolly-trees/src/map.js'
3
- // @ts-ignore
4
- import { create, load } from 'prolly-trees/map';
5
- // @ts-ignore
6
- import { nocache as cache } from 'prolly-trees/cache';
7
- // @ts-ignore
8
- import { CIDCounter, bf, simpleCompare as compare } from 'prolly-trees/utils';
9
- import * as codec from '@ipld/dag-cbor';
10
- import { sha256 as hasher } from 'multiformats/hashes/sha2';
11
- // import { blake2b256 as hasher } from '@multiformats/blake2/blake2b'
12
- import { doTransaction } from './blockstore.js';
13
- import { create as createBlock } from 'multiformats/block';
14
- const blockOpts = { cache, chunker: bf(30), codec, hasher, compare };
15
- // const SYNC_ROOT = 'fireproof' // change this if you want to break sync
16
- /**
17
- * @typedef {import('./blockstore.js').TransactionBlockstore} TransactionBlockstore
18
- */
19
- // const withLog = async (label, fn) => {
20
- // const resp = await fn()
21
- // // console.log('withLog', label, !!resp)
22
- // return resp
23
- // }
24
- // should also return a CIDCounter
25
- export const makeGetBlock = blocks => {
26
- // const cids = new CIDCounter() // this could be used for proofs of mutations
27
- const getBlockFn = async (address) => {
28
- // const { cid, bytes } = await withLog(address, () => blocks.get(address))
29
- const { cid, bytes } = await blocks.get(address);
30
- // cids.add({ address: cid })
31
- return createBlock({ cid, bytes, hasher, codec });
32
- };
33
- return {
34
- // cids,
35
- getBlock: getBlockFn
36
- };
37
- };
38
- /**
39
- *
40
- * @param {*} param0
41
- * @returns
42
- */
43
- async function createAndSaveNewEvent({ inBlocks, bigPut, root, event: inEvent, head, additions, removals = [] }) {
44
- let cids;
45
- const { key, value, del } = inEvent;
46
- // console.log('createAndSaveNewEvent', root.constructor.name, root.entryList)
47
- // root = await root.block
48
- const data = {
49
- root: root
50
- ? (await root.address)
51
- : null,
52
- key
53
- };
54
- // import('./clock').EventLink<import('./clock').EventData>
55
- if (del) {
56
- data.value = null;
57
- data.type = 'del';
58
- }
59
- else {
60
- data.value = value;
61
- data.type = 'put';
62
- }
63
- // console.log('head', head)
64
- // if (head.length === 0) {
65
- // // create an empty prolly root
66
- // let emptyRoot
67
- // for await (const node of create({ get: getBlock, list: [{ key: '_sync', value: SYNC_ROOT }], ...blockOpts })) {
68
- // emptyRoot = await node.block
69
- // bigPut(emptyRoot)
70
- // }
71
- // console.log('emptyRoot', emptyRoot)
72
- // const first = await EventBlock.create(
73
- // {
74
- // root: emptyRoot.cid,
75
- // key: null,
76
- // value: null,
77
- // type: 'del'
78
- // },
79
- // []
80
- // )
81
- // bigPut(first)
82
- // head = [first.cid]
83
- // }
84
- /** @type {import('./clock').EventData} */
85
- // @ts-ignore
86
- const event = await EventBlock.create(data, head);
87
- bigPut(event);
88
- ({ head, cids } = await advance(inBlocks, head, event.cid));
89
- return {
90
- root,
91
- additions,
92
- removals,
93
- head,
94
- clockCIDs: cids,
95
- event
96
- };
97
- }
98
- const makeGetAndPutBlock = inBlocks => {
99
- // const mblocks = new MemoryBlockstore()
100
- // const blocks = new MultiBlockFetcher(mblocks, inBlocks)
101
- const { getBlock, cids } = makeGetBlock(inBlocks);
102
- // const put = inBlocks.put.bind(inBlocks)
103
- const bigPut = async (block, additions) => {
104
- // console.log('bigPut', block.cid.toString())
105
- const { cid, bytes } = block;
106
- inBlocks.put(cid, bytes);
107
- // mblocks.putSync(cid, bytes)
108
- if (additions) {
109
- additions.set(cid.toString(), block);
110
- }
111
- };
112
- return { getBlock, bigPut, blocks: inBlocks, cids };
113
- };
114
- const bulkFromEvents = (sorted, event) => {
115
- if (event) {
116
- const update = { value: { data: { key: event.key } } };
117
- if (event.del) {
118
- update.value.data.type = 'del';
119
- }
120
- else {
121
- update.value.data.type = 'put';
122
- update.value.data.value = event.value;
123
- }
124
- sorted.push(update);
125
- }
126
- const bulk = new Map();
127
- for (const { value: event } of sorted) {
128
- const { data: { type, value, key } } = event;
129
- if (!key) {
130
- throw new Error('key is required');
131
- }
132
- const bulkEvent = type === 'put' ? { key, value } : { key, del: true };
133
- bulk.set(bulkEvent.key, bulkEvent); // last wins
134
- }
135
- return Array.from(bulk.values());
136
- };
137
- // Get the value of the root from the ancestor event
138
- /**
139
- *
140
- * @param {EventFetcher} events
141
- * @param {import('./clock').EventLink<import('./clock').EventData>} ancestor
142
- * @param {*} getBlock
143
- * @returns
144
- */
145
- const prollyRootFromAncestor = async (events, ancestor, getBlock) => {
146
- // console.log('prollyRootFromAncestor', ancestor)
147
- const event = await events.get(ancestor);
148
- const { root } = event.value.data;
149
- if (root) {
150
- return load({ cid: root, get: getBlock, ...blockOpts });
151
- }
152
- else {
153
- // console.log('no root', root) // false means no common ancestor. null means empty database.
154
- return root;
155
- }
156
- };
157
- const doProllyBulk = async (inBlocks, head, event, doFull = false) => {
158
- const { getBlock, blocks } = makeGetAndPutBlock(inBlocks); // this is doubled with eventfetcher
159
- let bulkSorted = [];
160
- let prollyRootNode = null;
161
- const events = new EventFetcher(blocks);
162
- if (head.length) {
163
- if (!doFull && head.length === 1) {
164
- prollyRootNode = await prollyRootFromAncestor(events, head[0], getBlock);
165
- }
166
- else {
167
- // Otherwise, we find the common ancestor and update the root and other blocks
168
- // todo this is returning more events than necessary, lets define the desired semantics from the top down
169
- // good semantics mean we can cache the results of this call
170
- // const {cids, events : bulkSorted } = await findEventsToSync(blocks, head)
171
- const { ancestor, sorted } = await findCommonAncestorWithSortedEvents(events, head, doFull);
172
- bulkSorted = sorted;
173
- // console.log('sorted', !!ancestor, JSON.stringify(sorted.map(({ value: { data: { key, value } } }) => ({ key, value }))))
174
- if (ancestor) {
175
- prollyRootNode = await prollyRootFromAncestor(events, ancestor, getBlock);
176
- // if (!prollyRootNode) {
177
- // prollyRootNode = await bigMerge(events, head, getBlock)
178
- // // throw new Error('no common ancestor')
179
- // }
180
- }
181
- // console.log('event', event)
182
- }
183
- }
184
- const bulkOperations = bulkFromEvents(bulkSorted, event);
185
- // if prolly root node is null, we need to create a new one
186
- if (!prollyRootNode) {
187
- // console.log('make new root', bulkOperations.length)
188
- let root;
189
- // let rootNode
190
- const newBlocks = [];
191
- // if all operations are deletes, we can just return an empty root
192
- if (bulkOperations.every(op => op.del)) {
193
- return { root: null, blocks: [], clockCIDs: await events.all() };
194
- }
195
- for await (const node of create({ get: getBlock, list: bulkOperations, ...blockOpts })) {
196
- // root = await node.block
197
- root = node;
198
- newBlocks.push(await node.block);
199
- }
200
- // throw new Error('not root time')
201
- // root.isThisOne = 'yes'
202
- // console.log('made new root', root.constructor.name, root.block.cid.toString())
203
- return { root, blocks: newBlocks, clockCIDs: await events.all() };
204
- }
205
- else {
206
- const writeResp = await prollyRootNode.bulk(bulkOperations); // { root: newProllyRootNode, blocks: newBlocks }
207
- writeResp.clockCIDs = await events.all();
208
- return writeResp;
209
- }
210
- };
211
- /**
212
- * Put a value (a CID) for the given key. If the key exists it's value is overwritten.
213
- *
214
- * @param {import('./blockstore.js').Blockstore} inBlocks Bucket block storage.
215
- * @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
216
- * @param {{key: string, value: import('./clock').EventLink<import('./clock').EventData>}} event The key of the value to put.
217
- * @param {object} [options]
218
- * @returns {Promise<any>}
219
- */
220
- export async function put(inBlocks, head, event, options) {
221
- const { bigPut } = makeGetAndPutBlock(inBlocks);
222
- // console.log('major put')
223
- // If the head is empty, we create a new event and return the root and addition blocks
224
- if (!head.length) {
225
- const additions = new Map();
226
- const { root, blocks } = await doProllyBulk(inBlocks, head, event);
227
- for (const b of blocks) {
228
- bigPut(b, additions);
229
- }
230
- return createAndSaveNewEvent({ inBlocks, bigPut, root, event, head, additions: Array.from(additions.values()) });
231
- }
232
- const { root: newProllyRootNode, blocks: newBlocks } = await doProllyBulk(inBlocks, head, event);
233
- if (!newProllyRootNode) {
234
- return createAndSaveNewEvent({
235
- inBlocks,
236
- bigPut,
237
- root: null,
238
- event,
239
- head,
240
- additions: []
241
- });
242
- }
243
- else {
244
- const prollyRootBlock = await newProllyRootNode.block;
245
- const additions = new Map(); // ; const removals = new Map()
246
- bigPut(prollyRootBlock, additions);
247
- for (const nb of newBlocks) {
248
- bigPut(nb, additions);
249
- }
250
- // additions are new blocks
251
- return createAndSaveNewEvent({
252
- inBlocks,
253
- bigPut,
254
- root: newProllyRootNode,
255
- event,
256
- head,
257
- additions: Array.from(additions.values()) /*, todo? Array.from(removals.values()) */
258
- });
259
- }
260
- }
261
- /**
262
- * Determine the effective prolly root given the current merkle clock head.
263
- *
264
- * @param {TransactionBlockstore} inBlocks Bucket block storage.
265
- * @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
266
- */
267
- export async function root(inBlocks, head, doFull = false) {
268
- if (!head.length) {
269
- throw new Error('no head');
270
- }
271
- // console.log('root', head.map(h => h.toString()))
272
- // todo maybe these should go to a temp blockstore?
273
- return await doTransaction('root', inBlocks, async (transactionBlocks) => {
274
- const { bigPut } = makeGetAndPutBlock(transactionBlocks);
275
- const { root: newProllyRootNode, blocks: newBlocks, clockCIDs } = await doProllyBulk(inBlocks, head, null, doFull);
276
- //
277
- // const rootBlock = await newProllyRootNode.block
278
- // bigPut(rootBlock)
279
- for (const nb of newBlocks) {
280
- bigPut(nb);
281
- }
282
- // console.log('root root', newProllyRootNode.constructor.name, newProllyRootNode)
283
- return { clockCIDs, node: newProllyRootNode, head };
284
- }, false);
285
- // return { clockCIDs, node: newProllyRootNode }
286
- }
287
- /**
288
- * Get the list of events not known by the `since` event
289
- * @param {TransactionBlockstore} blocks Bucket block storage.
290
- * @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
291
- * @param {import('./clock').EventLink<import('./clock').EventData>} since Event to compare against.
292
- * @returns {Promise<{clockCIDs: CIDCounter, result: import('./clock').EventData[]}>}
293
- */
294
- export async function eventsSince(blocks, head, since) {
295
- if (!head.length) {
296
- // throw new Error('no head')
297
- return { clockCIDs: [], result: [] };
298
- }
299
- // @ts-ignore
300
- const sinceHead = [...since, ...head].map(h => h.toString()); // ?
301
- // console.log('eventsSince', sinceHead.map(h => h.toString()))
302
- const { cids, events: unknownSorted3 } = await findEventsToSync(blocks, sinceHead);
303
- return { clockCIDs: cids, result: unknownSorted3.map(({ value: { data } }) => data) };
304
- }
305
- /**
306
- *
307
- * @param {TransactionBlockstore} blocks Bucket block storage.
308
- * @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
309
- *
310
- * @returns {Promise<{root: any, cids: CIDCounter, clockCIDs: CIDCounter, result: import('./clock').EventData[]}>}
311
- *
312
- */
313
- export async function getAll(blocks, head, rootCache = null, doFull = false) {
314
- if (!head.length) {
315
- return { root: null, clockCIDs: new CIDCounter(), cids: new CIDCounter(), result: [] };
316
- }
317
- const { node: prollyRootNode, clockCIDs } = await rootOrCache(blocks, head, rootCache, doFull);
318
- if (!prollyRootNode) {
319
- return { root: null, clockCIDs, cids: new CIDCounter(), result: [] };
320
- }
321
- const { result, cids } = await prollyRootNode.getAllEntries(); // todo params
322
- return { root: prollyRootNode, clockCIDs, cids, result: result.map(({ key, value }) => ({ key, value })) };
323
- }
324
- async function rootOrCache(blocks, head, rootCache, doFull = false) {
325
- let node;
326
- let clockCIDs;
327
- if (!doFull && rootCache && rootCache.root) {
328
- // console.log('get root from cache', rootCache)
329
- node = rootCache.root;
330
- clockCIDs = rootCache.clockCIDs;
331
- }
332
- else {
333
- // console.log('finding root')
334
- // const callTag = Math.random().toString(36).substring(7)
335
- // console.time(callTag + '.root')
336
- //
337
- // const prevClock = [...this.clock]
338
- ;
339
- ({ node, clockCIDs } = await root(blocks, head, doFull));
340
- // this.applyClock(prevClock, result.head)
341
- // await this.notifyListeners([decodedEvent])
342
- // console.timeEnd(callTag + '.root')
343
- // console.log('found root', node.entryList)
344
- }
345
- return { node, clockCIDs };
346
- }
347
- /**
348
- * @param {TransactionBlockstore} blocks Bucket block storage.
349
- * @param {import('./clock').EventLink<import('./clock').EventData>[]} head Merkle clock head.
350
- * @param {string} key The key of the value to retrieve.
351
- */
352
- export async function get(blocks, head, key, rootCache = null) {
353
- // instead pass root from db? and always update on change
354
- if (!head.length) {
355
- return { cids: new CIDCounter(), result: null };
356
- }
357
- const { node: prollyRootNode, clockCIDs } = await rootOrCache(blocks, head, rootCache);
358
- if (!prollyRootNode) {
359
- return { clockCIDs, cids: new CIDCounter(), result: null };
360
- }
361
- const { result, cids } = await prollyRootNode.get(key);
362
- return { result, cids, clockCIDs, root: prollyRootNode };
363
- }
364
- export async function* vis(blocks, head) {
365
- if (!head.length) {
366
- return { cids: new CIDCounter(), result: null };
367
- }
368
- const { node: prollyRootNode, cids } = await root(blocks, head);
369
- const lines = [];
370
- for await (const line of prollyRootNode.vis()) {
371
- yield line;
372
- lines.push(line);
373
- }
374
- return { vis: lines.join('\n'), cids };
375
- }
376
- export async function visMerkleTree(blocks, head) {
377
- // if (!head) return
378
- if (head && !Array.isArray(head)) {
379
- const getBl = makeGetBlock(blocks);
380
- const prollyRootNode = await load({
381
- cid: head,
382
- get: getBl.getBlock,
383
- ...blockOpts
384
- });
385
- const lines = [];
386
- for await (const line of prollyRootNode.vis()) {
387
- lines.push(line);
388
- }
389
- return { vis: lines.join('\n'), cids: new CIDCounter() };
390
- }
391
- const { node: prollyRootNode, cids } = await root(blocks, head);
392
- const lines = [];
393
- for await (const line of prollyRootNode.vis()) {
394
- lines.push(line);
395
- }
396
- return { vis: lines.join('\n'), cids };
397
- }
398
- export async function visMerkleClock(blocks, head) {
399
- const lines = [];
400
- for await (const line of visClock(blocks, head)) {
401
- // yield line
402
- lines.push(line);
403
- }
404
- return { vis: lines.join('\n') };
405
- }
package/dist/remote.js DELETED
@@ -1,102 +0,0 @@
1
- // when you call database.connect(email)
2
- // it will return a promise that resolves when the user is logged in
3
- // and sends you an email
4
- import { create } from '@web3-storage/w3up-client';
5
- import * as w3clock from '@web3-storage/clock/client';
6
- import { CID } from 'multiformats';
7
- export class Remote {
8
- client = null;
9
- name = 'unset';
10
- config = {};
11
- constructor(database, name, config) {
12
- this.name = name;
13
- this.config = config;
14
- this.database = database;
15
- }
16
- async clock(cid) {
17
- // const did = this.client.currentSpace()
18
- const agent = this.client.agent();
19
- const head = await w3clock.head({ issuer: agent, with: agent.did(), proofs: [] });
20
- console.log('head', head, JSON.stringify(head.root.data.ocm.out));
21
- const headCids = head.root.data.ocm.out.ok.head;
22
- const blocks = await Promise.all([this.database.blocks.get(CID.parse(cid)),
23
- ...headCids.map(c => this.database.blocks.get(c))]);
24
- console.log('blocks', blocks);
25
- const adv = await w3clock.advance({ issuer: agent, with: agent.did(), proofs: [] }, CID.parse(cid), { blocks });
26
- console.log('adv', adv, JSON.stringify(adv.root.data.ocm.out));
27
- return { head, adv };
28
- }
29
- async sync(cid) {
30
- // fetch the remote clock headCids using w3clock.head
31
- const agent = this.client.agent();
32
- const head = await w3clock.head({ issuer: agent, with: agent.did(), proofs: [] });
33
- console.log('head', head, JSON.stringify(head.root.data.ocm.out));
34
- const headCids = head.root.data.ocm.out.ok.head;
35
- const lastSyncHead = await this.database.blocks.valet.primary.getLastSynced();
36
- console.log('lastSyncHead', lastSyncHead);
37
- const headSet = new Set(headCids.map(c => c.toString()));
38
- const lastSyncSet = new Set(lastSyncHead.map(c => c.toString()));
39
- // are they the same?
40
- const same = headSet.size === lastSyncSet.size && [...headSet].every(value => lastSyncSet.has(value));
41
- // if the headCids and the lastSyncHead are the same, we are in sync and can push
42
- if (same) {
43
- const currentHead = this.database.clock;
44
- const currentHeadSet = new Set(currentHead.map(c => c.toString()));
45
- console.log('synced with cloud', headSet, lastSyncSet);
46
- // are they the same?
47
- const currentSame = headSet.size === currentHeadSet.size && [...headSet].every(value => currentHeadSet.has(value));
48
- if (currentSame) {
49
- // we are in sync, do nothing
50
- return true;
51
- }
52
- else {
53
- console.log('push to cloud', headSet, currentHeadSet);
54
- // we are ahead of the remote, push our clock
55
- // const lastCompact = this.database.blocks.valet.primary.getLastCompact()
56
- // get a compact since the last sync
57
- console.log('we are ahead of the remote, push our clock');
58
- // const compact = this.database.blocks.valet.primary.getCompactSince(lastSyncHead)
59
- }
60
- }
61
- else {
62
- // we are behind, fetch the remote
63
- console.log('we are behind, fetch the remote');
64
- }
65
- // if it is the same as the local (current metadata carcid? `newValetCidCar` / sync clock), do nothing, we are in sync
66
- // if it is the same as our previously pushed clock event, but our local clock is ahead of it, we need to push our clock
67
- // - we can store the previous clock event cid in the metadata
68
- // - sending our updates:
69
- // - get the _last_sync and _last_compact values from our metadata
70
- // - if last sync is after last compact
71
- // - make a merged car file for the syncs
72
- // - else
73
- // - upload the car file for the last compact
74
- // - make a merge car file for any uncompacted car files since the last compact, it should base its cidMap on the compact car file (as we go the sync stream will need to track it's own cidMap)
75
- // - if there is only one car file, it is the merge car file (already based on last compact)
76
- // - upload the merge car file
77
- // - create a new clock block with the current w3clock.head as parent and the merge car file cid as the data
78
- // - update the remote clock with the new clock block (it doesn't need to fetch the car file, and we dont need to store the clock blocks locally, just the most recent one)
79
- //
80
- // else if the remote head is not contained by our clock, it is is ahead of the local sync clock.
81
- // - get the car file it points to from its data field
82
- // - merge to the local clock (park that car so we have both carcid indexes)
83
- // - calculate a new root from the merged head, and update the local clock
84
- }
85
- async connect(email) {
86
- try {
87
- const client = await create();
88
- await client.authorize(email);
89
- const claims = await client.capability.access.claim();
90
- console.log('claims', claims);
91
- const space = await client.createSpace('fp.' + this.name);
92
- console.log('space', space);
93
- await client.setCurrentSpace(space.did());
94
- await client.registerSpace(email);
95
- this.client = client;
96
- console.log('client', client);
97
- }
98
- catch (err) {
99
- console.error('registration failed: ', err);
100
- }
101
- }
102
- }
package/dist/sha1.js DELETED
@@ -1,74 +0,0 @@
1
- // @ts-nocheck
2
- // from https://github.com/duzun/sync-sha1/blob/master/rawSha1.js
3
- // MIT License Copyright (c) 2020 Dumitru Uzun
4
- // Permission is hereby granted, free of charge, to any person obtaining a copy
5
- // of this software and associated documentation files (the "Software"), to deal
6
- // in the Software without restriction, including without limitation the rights
7
- // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8
- // copies of the Software, and to permit persons to whom the Software is
9
- // furnished to do so, subject to the following conditions:
10
- // The above copyright notice and this permission notice shall be included in all
11
- // copies or substantial portions of the Software.
12
- // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
13
- // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
14
- // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
15
- // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
16
- // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
17
- // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
18
- // SOFTWARE.
19
- // import {
20
- // isLittleEndian, switchEndianness32
21
- // } from 'string-encode'
22
- /**
23
- * SHA1 on binary array
24
- *
25
- * @param {Uint8Array} b Data to hash
26
- *
27
- * @return {Uint8Array} sha1 hash
28
- */
29
- export function rawSha1(b) {
30
- let i = b.byteLength;
31
- let bs = 0;
32
- let A;
33
- let B;
34
- let C;
35
- let D;
36
- let G;
37
- const H = Uint32Array.from([A = 0x67452301, B = 0xEFCDAB89, ~A, ~B, 0xC3D2E1F0]);
38
- const W = new Uint32Array(80);
39
- const nrWords = (i / 4 + 2) | 15;
40
- const words = new Uint32Array(nrWords + 1);
41
- let j;
42
- words[nrWords] = i * 8;
43
- words[i >> 2] |= 0x80 << (~i << 3);
44
- for (; i--;) {
45
- words[i >> 2] |= b[i] << (~i << 3);
46
- }
47
- for (A = H.slice(); bs < nrWords; bs += 16, A.set(H)) {
48
- for (i = 0; i < 80; A[0] = (G = ((b = A[0]) << 5 | b >>> 27) +
49
- A[4] +
50
- (W[i] = (i < 16) ? words[bs + i] : G << 1 | G >>> 31) +
51
- 0x5A827999,
52
- B = A[1],
53
- C = A[2],
54
- D = A[3],
55
- G + ((j = i / 5 >> 2) // eslint-disable-line no-cond-assign
56
- ? j !== 2
57
- ? (B ^ C ^ D) + (j & 2 ? 0x6FE0483D : 0x14577208)
58
- : (B & C | B & D | C & D) + 0x34994343
59
- : B & C | ~B & D))
60
- , A[1] = b
61
- , A[2] = B << 30 | B >>> 2
62
- , A[3] = C
63
- , A[4] = D
64
- , ++i) {
65
- G = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16];
66
- }
67
- for (i = 5; i;)
68
- H[--i] = H[i] + A[i];
69
- }
70
- // if (isLittleEndian()) {
71
- // H = H.map(switchEndianness32)
72
- // }
73
- return new Uint8Array(H.buffer, H.byteOffset, H.byteLength);
74
- }