@kubun/mutation 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE.md ADDED
@@ -0,0 +1,57 @@
1
+ # The Prosperity Public License 3.0.0
2
+
3
+ Contributor: Paul Le Cam
4
+
5
+ Source Code: https://github.com/PaulLeCam/kubun
6
+
7
+ ## Purpose
8
+
9
+ This license allows you to use and share this software for noncommercial purposes for free and to try this software for commercial purposes for thirty days.
10
+
11
+ ## Agreement
12
+
13
+ In order to receive this license, you have to agree to its rules. Those rules are both obligations under that agreement and conditions to your license. Don't do anything with this software that triggers a rule you can't or won't follow.
14
+
15
+ ## Notices
16
+
17
+ Make sure everyone who gets a copy of any part of this software from you, with or without changes, also gets the text of this license and the contributor and source code lines above.
18
+
19
+ ## Commercial Trial
20
+
21
+ Limit your use of this software for commercial purposes to a thirty-day trial period. If you use this software for work, your company gets one trial period for all personnel, not one trial per person.
22
+
23
+ ## Contributions Back
24
+
25
+ Developing feedback, changes, or additions that you contribute back to the contributor on the terms of a standardized public software license such as [the Blue Oak Model License 1.0.0](https://blueoakcouncil.org/license/1.0.0), [the Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0.html), [the MIT license](https://spdx.org/licenses/MIT.html), or [the two-clause BSD license](https://spdx.org/licenses/BSD-2-Clause.html) doesn't count as use for a commercial purpose.
26
+
27
+ ## Personal Uses
28
+
29
+ Personal use for research, experiment, and testing for the benefit of public knowledge, personal study, private entertainment, hobby projects, amateur pursuits, or religious observance, without any anticipated commercial application, doesn't count as use for a commercial purpose.
30
+
31
+ ## Noncommercial Organizations
32
+
33
+ Use by any charitable organization, educational institution, public research organization, public safety or health organization, environmental protection organization, or government institution doesn't count as use for a commercial purpose regardless of the source of funding or obligations resulting from the funding.
34
+
35
+ ## Defense
36
+
37
+ Don't make any legal claim against anyone accusing this software, with or without changes, alone or with other technology, of infringing any patent.
38
+
39
+ ## Copyright
40
+
41
+ The contributor licenses you to do everything with this software that would otherwise infringe their copyright in it.
42
+
43
+ ## Patent
44
+
45
+ The contributor licenses you to do everything with this software that would otherwise infringe any patents they can license or become able to license.
46
+
47
+ ## Reliability
48
+
49
+ The contributor can't revoke this license.
50
+
51
+ ## Excuse
52
+
53
+ You're excused for unknowingly breaking [Notices](#notices) if you take all practical steps to comply within thirty days of learning you broke the rule.
54
+
55
+ ## No Liability
56
+
57
+ ***As far as the law allows, this software comes as is, without any warranty or condition, and the contributor won't be liable to anyone for any damages related to this software or this license, under any kind of legal claim.***
package/README.md ADDED
@@ -0,0 +1,11 @@
1
+ # Kubun mutation
2
+
3
+ ## Installation
4
+
5
+ ```sh
6
+ npm install @kubun/mutation
7
+ ```
8
+
9
+ ## License
10
+
11
+ [Prosperity Public License 3.0.0](LICENSE.md)
package/lib/apply.d.ts ADDED
@@ -0,0 +1,14 @@
1
+ import { type Validator } from '@enkaku/schema';
2
+ import type { KubunDB } from '@kubun/db';
3
+ import type { ChangeDocumentMutation, DocumentData, DocumentMutation, DocumentNode, SetDocumentMutation } from '@kubun/protocol';
4
+ export type DocumentValidator = Validator<DocumentData>;
5
+ export type ValidatorsRecord = Record<string, Promise<DocumentValidator>>;
6
+ export type MutationContext = {
7
+ db: KubunDB;
8
+ validators: ValidatorsRecord;
9
+ };
10
+ export declare function getDocumentValidator(ctx: MutationContext, id: string): Promise<DocumentValidator>;
11
+ export declare function applyChangeMutation(ctx: MutationContext, mutation: ChangeDocumentMutation): Promise<DocumentNode>;
12
+ export declare function applySetMutation(ctx: MutationContext, mutation: SetDocumentMutation): Promise<DocumentNode>;
13
+ export declare function applyMutation(ctx: MutationContext, mutation: DocumentMutation): Promise<DocumentNode>;
14
+ //# sourceMappingURL=apply.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"apply.d.ts","sourceRoot":"","sources":["../src/apply.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,KAAK,SAAS,EAA2B,MAAM,gBAAgB,CAAA;AACxE,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,WAAW,CAAA;AAExC,OAAO,KAAK,EACV,sBAAsB,EACtB,YAAY,EACZ,gBAAgB,EAChB,YAAY,EACZ,mBAAmB,EACpB,MAAM,iBAAiB,CAAA;AAIxB,MAAM,MAAM,iBAAiB,GAAG,SAAS,CAAC,YAAY,CAAC,CAAA;AACvD,MAAM,MAAM,gBAAgB,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,iBAAiB,CAAC,CAAC,CAAA;AAEzE,MAAM,MAAM,eAAe,GAAG;IAC5B,EAAE,EAAE,OAAO,CAAA;IACX,UAAU,EAAE,gBAAgB,CAAA;CAC7B,CAAA;AAED,wBAAgB,oBAAoB,CAAC,GAAG,EAAE,eAAe,EAAE,EAAE,EAAE,MAAM,GAAG,OAAO,CAAC,iBAAiB,CAAC,CAOjG;AAED,wBAAsB,mBAAmB,CACvC,GAAG,EAAE,eAAe,EACpB,QAAQ,EAAE,sBAAsB,GAC/B,OAAO,CAAC,YAAY,CAAC,CAoCvB;AAED,wBAAsB,gBAAgB,CACpC,GAAG,EAAE,eAAe,EACpB,QAAQ,EAAE,mBAAmB,GAC5B,OAAO,CAAC,YAAY,CAAC,CAqCvB;AAED,wBAAsB,aAAa,CACjC,GAAG,EAAE,eAAe,EACpB,QAAQ,EAAE,gBAAgB,GACzB,OAAO,CAAC,YAAY,CAAC,CASvB"}
package/lib/apply.js ADDED
@@ -0,0 +1,100 @@
1
+ import * as A from '@automerge/automerge/slim';
2
+ import { fromB64 } from '@enkaku/codec';
3
+ import { asType, createValidator } from '@enkaku/schema';
4
+ import { DocumentID } from '@kubun/id';
5
+ import { automergeReady, automergeToData } from './automerge.js';
6
+ export function getDocumentValidator(ctx, id) {
7
+ if (ctx.validators[id] == null) {
8
+ ctx.validators[id] = ctx.db.getDocumentModel(id).then((model)=>createValidator({
9
+ ...model.schema,
10
+ $id: id
11
+ }));
12
+ }
13
+ return ctx.validators[id];
14
+ }
15
+ export async function applyChangeMutation(ctx, mutation) {
16
+ const id = DocumentID.fromString(mutation.sub);
17
+ const docID = id.toString();
18
+ const doc = await ctx.db.getDocument(id);
19
+ if (doc == null) {
20
+ throw new Error(`Document not found: ${docID}`);
21
+ }
22
+ if (mutation.iss !== doc.owner) {
23
+ // TODO: verify capabilities if issuer is not owner
24
+ throw new Error('Invalid mutation issuer');
25
+ }
26
+ if (mutation.data === null) {
27
+ return await ctx.db.saveDocument({
28
+ id,
29
+ existing: doc,
30
+ data: null,
31
+ state: null
32
+ });
33
+ }
34
+ if (doc.data === null) {
35
+ throw new Error(`Cannot apply changes to empty document: ${docID}`);
36
+ }
37
+ const [docStates, validator] = await Promise.all([
38
+ ctx.db.getDocumentStates([
39
+ docID
40
+ ]),
41
+ getDocumentValidator(ctx, id.model.toString()),
42
+ automergeReady
43
+ ]);
44
+ // Create doc from data object if state is not present
45
+ const currentDoc = docStates[docID] ? A.load(docStates[docID]) : A.from(doc.data);
46
+ const changes = fromB64(mutation.data);
47
+ // Apply incremental changes or full merge
48
+ const newDoc = mutation.inc ? A.loadIncremental(currentDoc, changes) : A.merge(currentDoc, A.load(changes));
49
+ // Validate merged data
50
+ const data = asType(validator, automergeToData(newDoc));
51
+ return await ctx.db.saveDocument({
52
+ id,
53
+ existing: doc,
54
+ data,
55
+ state: A.save(newDoc)
56
+ });
57
+ }
58
+ export async function applySetMutation(ctx, mutation) {
59
+ const owner = mutation.aud ?? mutation.iss;
60
+ if (mutation.iss !== owner) {
61
+ // TODO: verify capabilities if issuer is not owner
62
+ throw new Error('Invalid mutation issuer');
63
+ }
64
+ const id = DocumentID.fromString(mutation.sub);
65
+ const [doc, validator] = await Promise.all([
66
+ ctx.db.getDocument(id),
67
+ getDocumentValidator(ctx, id.model.toString()),
68
+ automergeReady
69
+ ]);
70
+ const mergeDoc = mutation.data === null ? null : A.load(fromB64(mutation.data));
71
+ const data = mergeDoc ? asType(validator, automergeToData(mergeDoc)) : null;
72
+ if (doc === null) {
73
+ return await ctx.db.createDocument({
74
+ id,
75
+ owner,
76
+ data,
77
+ state: mergeDoc ? A.save(mergeDoc) : null,
78
+ unique: fromB64(mutation.unq)
79
+ });
80
+ }
81
+ if (doc.owner !== owner) {
82
+ throw new Error(`Cannot change owner from ${doc.owner} to ${owner} in document: ${id.toString()}`);
83
+ }
84
+ return await ctx.db.saveDocument({
85
+ id,
86
+ existing: doc,
87
+ data,
88
+ state: mergeDoc ? A.save(mergeDoc) : null
89
+ });
90
+ }
91
+ export async function applyMutation(ctx, mutation) {
92
+ switch(mutation.typ){
93
+ case 'change':
94
+ return await applyChangeMutation(ctx, mutation);
95
+ case 'set':
96
+ return await applySetMutation(ctx, mutation);
97
+ default:
98
+ throw new Error('Unsupported mutation type');
99
+ }
100
+ }
@@ -0,0 +1,5 @@
1
+ import * as A from '@automerge/automerge/slim';
2
+ import type { DocumentData } from '@kubun/protocol';
3
+ export declare const automergeReady: PromiseLike<void>;
4
+ export declare function automergeToData(doc: A.Doc<DocumentData>): DocumentData;
5
+ //# sourceMappingURL=automerge.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"automerge.d.ts","sourceRoot":"","sources":["../src/automerge.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,CAAC,MAAM,2BAA2B,CAAA;AAE9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAA;AAEnD,eAAO,MAAM,cAAc,mBAA0D,CAAA;AAErF,wBAAgB,eAAe,CAAC,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,YAAY,CAAC,GAAG,YAAY,CAEtE"}
@@ -0,0 +1,8 @@
1
+ // @ts-ignore missing type definition
2
+ import { automergeWasmBase64 } from '@automerge/automerge/automerge.wasm.base64.js';
3
+ import * as A from '@automerge/automerge/slim';
4
+ import { lazy } from '@enkaku/async';
5
+ export const automergeReady = lazy(()=>A.initializeBase64Wasm(automergeWasmBase64));
6
+ export function automergeToData(doc) {
7
+ return JSON.parse(JSON.stringify(doc));
8
+ }
@@ -0,0 +1,25 @@
1
+ import type { PatchOperation } from '@kubun/graphql';
2
+ import { DocumentID, type DocumentModelID } from '@kubun/id';
3
+ import type { ChangeDocumentMutation, DocumentData, SetDocumentMutation } from '@kubun/protocol';
4
+ export type CreateSetMutationParams<Data extends DocumentData = DocumentData> = {
5
+ data: Data | null;
6
+ issuer: string;
7
+ modelID: DocumentModelID | string;
8
+ owner?: string;
9
+ unique: Uint8Array;
10
+ };
11
+ export declare function createSetMutation<Data extends DocumentData = DocumentData>(params: CreateSetMutationParams<Data>): Promise<SetDocumentMutation>;
12
+ export type CreateChangeMutationParams<Data extends DocumentData = DocumentData> = {
13
+ docID: DocumentID | string;
14
+ from?: Partial<Data>;
15
+ issuer: string;
16
+ loadState: (id: string) => Promise<Uint8Array | null>;
17
+ patch: Array<PatchOperation>;
18
+ };
19
+ export declare function createChangeMutation<Data extends DocumentData = DocumentData>(params: CreateChangeMutationParams<Data>): Promise<ChangeDocumentMutation>;
20
+ export type CreateRemoveMutationParams = {
21
+ docID: string;
22
+ issuer: string;
23
+ };
24
+ export declare function createRemoveMutation(params: CreateRemoveMutationParams): ChangeDocumentMutation;
25
+ //# sourceMappingURL=create.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"create.d.ts","sourceRoot":"","sources":["../src/create.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,gBAAgB,CAAA;AACpD,OAAO,EAAE,UAAU,EAAE,KAAK,eAAe,EAAE,MAAM,WAAW,CAAA;AAC5D,OAAO,KAAK,EAAE,sBAAsB,EAAE,YAAY,EAAE,mBAAmB,EAAE,MAAM,iBAAiB,CAAA;AAKhG,MAAM,MAAM,uBAAuB,CAAC,IAAI,SAAS,YAAY,GAAG,YAAY,IAAI;IAC9E,IAAI,EAAE,IAAI,GAAG,IAAI,CAAA;IACjB,MAAM,EAAE,MAAM,CAAA;IACd,OAAO,EAAE,eAAe,GAAG,MAAM,CAAA;IACjC,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,MAAM,EAAE,UAAU,CAAA;CACnB,CAAA;AAED,wBAAsB,iBAAiB,CAAC,IAAI,SAAS,YAAY,GAAG,YAAY,EAC9E,MAAM,EAAE,uBAAuB,CAAC,IAAI,CAAC,GACpC,OAAO,CAAC,mBAAmB,CAAC,CAe9B;AAED,MAAM,MAAM,0BAA0B,CAAC,IAAI,SAAS,YAAY,GAAG,YAAY,IAAI;IACjF,KAAK,EAAE,UAAU,GAAG,MAAM,CAAA;IAC1B,IAAI,CAAC,EAAE,OAAO,CAAC,IAAI,CAAC,CAAA;IACpB,MAAM,EAAE,MAAM,CAAA;IACd,SAAS,EAAE,CAAC,EAAE,EAAE,MAAM,KAAK,OAAO,CAAC,UAAU,GAAG,IAAI,CAAC,CAAA;IACrD,KAAK,EAAE,KAAK,CAAC,cAAc,CAAC,CAAA;CAC7B,CAAA;AAED,wBAAsB,oBAAoB,CAAC,IAAI,SAAS,YAAY,GAAG,YAAY,EACjF,MAAM,EAAE,0BAA0B,CAAC,IAAI,CAAC,GACvC,OAAO,CAAC,sBAAsB,CAAC,CAkBjC;AAED,MAAM,MAAM,0BAA0B,GAAG;IACvC,KAAK,EAAE,MAAM,CAAA;IACb,MAAM,EAAE,MAAM,CAAA;CACf,CAAA;AAED,wBAAgB,oBAAoB,CAAC,MAAM,EAAE,0BAA0B,GAAG,sBAAsB,CAQ/F"}
package/lib/create.js ADDED
@@ -0,0 +1,52 @@
1
+ import * as A from '@automerge/automerge/slim';
2
+ import { toB64 } from '@enkaku/codec';
3
+ import { DocumentID } from '@kubun/id';
4
+ import { automergeReady } from './automerge.js';
5
+ import { applyPatches } from './json-patch.js';
6
+ export async function createSetMutation(params) {
7
+ const issuer = params.issuer;
8
+ const owner = params.owner ?? issuer;
9
+ const [docID] = await Promise.all([
10
+ DocumentID.create(params.modelID, owner, params.unique),
11
+ automergeReady
12
+ ]);
13
+ return {
14
+ typ: 'set',
15
+ iss: issuer,
16
+ aud: owner,
17
+ sub: docID.toString(),
18
+ data: params.data == null ? null : toB64(A.save(A.from(params.data))),
19
+ unq: toB64(params.unique)
20
+ };
21
+ }
22
+ export async function createChangeMutation(params) {
23
+ const docID = DocumentID.from(params.docID).toString();
24
+ // Load current state of document from DB
25
+ const [state] = await Promise.all([
26
+ params.loadState(docID),
27
+ automergeReady
28
+ ]);
29
+ const loadedDoc = state ? A.load(state) : null;
30
+ // Apply patches to loaded doc or locally created one
31
+ const newDoc = A.change(loadedDoc ?? A.from(params.from ?? {}), (proxy)=>{
32
+ applyPatches(proxy, params.patch);
33
+ });
34
+ // Save incremental or full data state
35
+ const data = loadedDoc ? A.saveSince(newDoc, A.getHeads(loadedDoc)) : A.save(newDoc);
36
+ return {
37
+ typ: 'change',
38
+ iss: params.issuer,
39
+ sub: docID,
40
+ data: toB64(data),
41
+ inc: loadedDoc != null
42
+ };
43
+ }
44
+ export function createRemoveMutation(params) {
45
+ return {
46
+ typ: 'change',
47
+ iss: params.issuer,
48
+ sub: params.docID,
49
+ data: null,
50
+ inc: false
51
+ };
52
+ }
package/lib/index.d.ts ADDED
@@ -0,0 +1,5 @@
1
+ export type { DocumentValidator, MutationContext, ValidatorsRecord } from './apply.js';
2
+ export { applyChangeMutation, applyMutation, applySetMutation } from './apply.js';
3
+ export type { CreateChangeMutationParams, CreateRemoveMutationParams, CreateSetMutationParams, } from './create.js';
4
+ export { createChangeMutation, createRemoveMutation, createSetMutation } from './create.js';
5
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,YAAY,EAAE,iBAAiB,EAAE,eAAe,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAA;AACtF,OAAO,EAAE,mBAAmB,EAAE,aAAa,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAA;AACjF,YAAY,EACV,0BAA0B,EAC1B,0BAA0B,EAC1B,uBAAuB,GACxB,MAAM,aAAa,CAAA;AACpB,OAAO,EAAE,oBAAoB,EAAE,oBAAoB,EAAE,iBAAiB,EAAE,MAAM,aAAa,CAAA"}
package/lib/index.js ADDED
@@ -0,0 +1,2 @@
1
+ export { applyChangeMutation, applyMutation, applySetMutation } from './apply.js';
2
+ export { createChangeMutation, createRemoveMutation, createSetMutation } from './create.js';
@@ -0,0 +1,8 @@
1
+ import type { PatchOperation } from '@kubun/graphql';
2
+ import type { DocumentData } from '@kubun/protocol';
3
+ export declare function parsePath(path: string): Array<string | number>;
4
+ export declare function getPath(obj: unknown, path: string): unknown;
5
+ export declare function setPath(obj: Record<string, unknown> | Array<unknown>, path: string, value: unknown): void;
6
+ export declare function deletePath(obj: Record<string, unknown> | Array<unknown>, path: string): void;
7
+ export declare function applyPatches(data: DocumentData, patches: Array<PatchOperation>): void;
8
+ //# sourceMappingURL=json-patch.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"json-patch.d.ts","sourceRoot":"","sources":["../src/json-patch.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,gBAAgB,CAAA;AACpD,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAA;AAEnD,wBAAgB,SAAS,CAAC,IAAI,EAAE,MAAM,GAAG,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,CAS9D;AAED,wBAAgB,OAAO,CAAC,GAAG,EAAE,OAAO,EAAE,IAAI,EAAE,MAAM,GAAG,OAAO,CAI3D;AAED,wBAAgB,OAAO,CACrB,GAAG,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC,EAC7C,IAAI,EAAE,MAAM,EACZ,KAAK,EAAE,OAAO,GACb,IAAI,CAYN;AAED,wBAAgB,UAAU,CAAC,GAAG,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,KAAK,CAAC,OAAO,CAAC,EAAE,IAAI,EAAE,MAAM,GAAG,IAAI,CAY5F;AAED,wBAAgB,YAAY,CAAC,IAAI,EAAE,YAAY,EAAE,OAAO,EAAE,KAAK,CAAC,cAAc,CAAC,GAAG,IAAI,CA0BrF"}
@@ -0,0 +1,70 @@
1
+ export function parsePath(path) {
2
+ return path.slice(1).split('/').map((key)=>{
3
+ // Convert array indices to numbers
4
+ const index = Number(key);
5
+ return Number.isNaN(index) ? key : index;
6
+ });
7
+ }
8
+ export function getPath(obj, path) {
9
+ const keys = parsePath(path);
10
+ // @ts-ignore index signature
11
+ return keys.reduce((acc, key)=>acc?.[key], obj);
12
+ }
13
+ export function setPath(obj, path, value) {
14
+ const keys = parsePath(path);
15
+ const lastKey = keys.pop();
16
+ if (lastKey !== undefined) {
17
+ // @ts-ignore unknown object
18
+ const target = keys.reduce((acc, key)=>acc[key], obj);
19
+ if (Array.isArray(target) && typeof lastKey === 'number' && lastKey === target.length) {
20
+ target.push(value) // Append to array if index is equal to array length
21
+ ;
22
+ } else {
23
+ ;
24
+ target[lastKey] = value;
25
+ }
26
+ }
27
+ }
28
+ export function deletePath(obj, path) {
29
+ const keys = parsePath(path);
30
+ const lastKey = keys.pop();
31
+ if (lastKey !== undefined) {
32
+ // @ts-ignore unknown object
33
+ const target = keys.reduce((acc, key)=>acc[key], obj);
34
+ if (Array.isArray(target) && typeof lastKey === 'number') {
35
+ target.splice(lastKey, 1) // Remove from array if lastKey is an index
36
+ ;
37
+ } else {
38
+ delete target[lastKey];
39
+ }
40
+ }
41
+ }
42
+ export function applyPatches(data, patches) {
43
+ for (const patch of patches){
44
+ switch(patch.op){
45
+ case 'add':
46
+ case 'replace':
47
+ setPath(data, patch.path, patch.value);
48
+ break;
49
+ case 'remove':
50
+ deletePath(data, patch.path);
51
+ break;
52
+ case 'copy':
53
+ {
54
+ const value = getPath(data, patch.from);
55
+ setPath(data, patch.path, value);
56
+ break;
57
+ }
58
+ case 'move':
59
+ {
60
+ const value = getPath(data, patch.from);
61
+ deletePath(data, patch.from);
62
+ setPath(data, patch.path, value);
63
+ break;
64
+ }
65
+ default:
66
+ // @ts-ignore never type
67
+ throw new Error(`Unknown operation: ${patch.op}`);
68
+ }
69
+ }
70
+ }
package/package.json ADDED
@@ -0,0 +1,39 @@
1
+ {
2
+ "name": "@kubun/mutation",
3
+ "version": "0.3.0",
4
+ "license": "see LICENSE.md",
5
+ "keywords": [],
6
+ "type": "module",
7
+ "main": "lib/index.js",
8
+ "types": "lib/index.d.ts",
9
+ "exports": {
10
+ ".": "./lib/index.js"
11
+ },
12
+ "files": [
13
+ "lib/*",
14
+ "LICENSE.md"
15
+ ],
16
+ "sideEffects": false,
17
+ "dependencies": {
18
+ "@automerge/automerge": "^2.2.8",
19
+ "@enkaku/async": "^0.12.0",
20
+ "@enkaku/codec": "^0.12.0",
21
+ "@enkaku/schema": "^0.12.0",
22
+ "@kubun/id": "^0.3.0"
23
+ },
24
+ "devDependencies": {
25
+ "@kubun/db": "^0.3.3",
26
+ "@kubun/graphql": "^0.3.6",
27
+ "@kubun/protocol": "^0.3.4"
28
+ },
29
+ "scripts": {
30
+ "build:clean": "del lib",
31
+ "build:js": "swc src -d ./lib --config-file ../../swc.json --strip-leading-paths",
32
+ "build:types": "tsc --emitDeclarationOnly --skipLibCheck",
33
+ "build:types:ci": "tsc --emitDeclarationOnly --declarationMap false",
34
+ "build": "pnpm run build:clean && pnpm run build:js && pnpm run build:types",
35
+ "test:types": "tsc --noEmit",
36
+ "test:unit": "node --experimental-vm-modules ../../node_modules/jest/bin/jest.js",
37
+ "test": "pnpm run test:types && pnpm run test:unit"
38
+ }
39
+ }