@sanity/sdk 2.0.0 → 2.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sanity/sdk",
3
- "version": "2.0.0",
3
+ "version": "2.0.2",
4
4
  "private": false,
5
5
  "description": "Sanity SDK",
6
6
  "keywords": [
@@ -45,10 +45,10 @@
45
45
  "@sanity/client": "^7.2.1",
46
46
  "@sanity/comlink": "^3.0.4",
47
47
  "@sanity/diff-match-patch": "^3.2.0",
48
+ "@sanity/diff-patch": "^6.0.0",
48
49
  "@sanity/message-protocol": "^0.12.0",
49
50
  "@sanity/mutate": "^0.12.4",
50
51
  "@sanity/types": "^3.83.0",
51
- "@types/lodash-es": "^4.17.12",
52
52
  "groq": "3.88.1-typegen-experimental.0",
53
53
  "lodash-es": "^4.17.21",
54
54
  "reselect": "^5.1.1",
@@ -59,6 +59,7 @@
59
59
  "@sanity/browserslist-config": "^1.0.5",
60
60
  "@sanity/pkg-utils": "^7.2.2",
61
61
  "@sanity/prettier-config": "^1.0.3",
62
+ "@types/lodash-es": "^4.17.12",
62
63
  "@vitest/coverage-v8": "3.1.2",
63
64
  "eslint": "^9.22.0",
64
65
  "groq-js": "^1.16.1",
@@ -67,11 +68,11 @@
67
68
  "typescript": "^5.8.3",
68
69
  "vite": "^6.3.4",
69
70
  "vitest": "^3.1.2",
70
- "@repo/config-eslint": "0.0.0",
71
- "@repo/config-test": "0.0.1",
72
71
  "@repo/package.bundle": "3.82.0",
72
+ "@repo/config-eslint": "0.0.0",
73
73
  "@repo/package.config": "0.0.1",
74
- "@repo/tsconfig": "0.0.1"
74
+ "@repo/tsconfig": "0.0.1",
75
+ "@repo/config-test": "0.0.1"
75
76
  },
76
77
  "engines": {
77
78
  "node": ">=20.0.0"
@@ -13,11 +13,13 @@ export {
13
13
  getAuthState,
14
14
  getCurrentUserState,
15
15
  getDashboardOrganizationId,
16
+ getIsInDashboardState,
16
17
  getLoginUrlState,
17
18
  getTokenState,
18
19
  type LoggedInAuthState,
19
20
  type LoggedOutAuthState,
20
21
  type LoggingInAuthState,
22
+ setAuthToken,
21
23
  } from '../auth/authStore'
22
24
  export {observeOrganizationVerificationState} from '../auth/getOrganizationVerificationState'
23
25
  export {handleAuthCallback} from '../auth/handleAuthCallback'
@@ -32,8 +34,14 @@ export {
32
34
  releaseChannel,
33
35
  } from '../comlink/controller/comlinkControllerStore'
34
36
  export type {ComlinkNodeState} from '../comlink/node/comlinkNodeStore'
37
+ export {getOrCreateNode, releaseNode} from '../comlink/node/comlinkNodeStore'
35
38
  export {getNodeState, type NodeState} from '../comlink/node/getNodeState'
36
- export {type FrameMessage, type WindowMessage} from '../comlink/types'
39
+ export {
40
+ type FrameMessage,
41
+ type NewTokenResponseMessage,
42
+ type RequestNewTokenMessage,
43
+ type WindowMessage,
44
+ } from '../comlink/types'
37
45
  export {type AuthConfig, type AuthProvider} from '../config/authConfig'
38
46
  export {
39
47
  createDatasetHandle,
@@ -280,3 +280,48 @@ export const getDashboardOrganizationId = bindActionGlobally(
280
280
  authStore,
281
281
  createStateSourceAction(({state: {dashboardContext}}) => dashboardContext?.orgId),
282
282
  )
283
+
284
+ /**
285
+ * Returns a state source indicating if the SDK is running within a dashboard context.
286
+ * @public
287
+ */
288
+ export const getIsInDashboardState = bindActionGlobally(
289
+ authStore,
290
+ createStateSourceAction(
291
+ ({state: {dashboardContext}}) =>
292
+ // Check if dashboardContext exists and is not empty
293
+ !!dashboardContext && Object.keys(dashboardContext).length > 0,
294
+ ),
295
+ )
296
+
297
+ /**
298
+ * Action to explicitly set the authentication token.
299
+ * Used internally by the Comlink token refresh.
300
+ * @internal
301
+ */
302
+ export const setAuthToken = bindActionGlobally(authStore, ({state}, token: string | null) => {
303
+ const currentAuthState = state.get().authState
304
+ if (token) {
305
+ // Update state only if the new token is different or currently logged out
306
+ if (currentAuthState.type !== AuthStateType.LOGGED_IN || currentAuthState.token !== token) {
307
+ // This state update structure should trigger listeners in clientStore
308
+ state.set('setToken', {
309
+ authState: {
310
+ type: AuthStateType.LOGGED_IN,
311
+ token: token,
312
+ // Keep existing user or set to null? Setting to null forces refetch.
313
+ // Keep existing user to avoid unnecessary refetches if user data is still valid.
314
+ currentUser:
315
+ currentAuthState.type === AuthStateType.LOGGED_IN ? currentAuthState.currentUser : null,
316
+ },
317
+ })
318
+ }
319
+ } else {
320
+ // Handle setting token to null (logging out)
321
+ if (currentAuthState.type !== AuthStateType.LOGGED_OUT) {
322
+ state.set('setToken', {
323
+ authState: {type: AuthStateType.LOGGED_OUT, isDestroyingSession: false},
324
+ })
325
+ }
326
+ }
327
+ })
@@ -11,3 +11,21 @@ export type FrameMessage = Message
11
11
  * @public
12
12
  */
13
13
  export type WindowMessage = Message
14
+
15
+ /**
16
+ * Message from SDK (iframe) to Parent (dashboard) to request a new token
17
+ * @internal
18
+ */
19
+ export type RequestNewTokenMessage = {
20
+ type: 'dashboard/v1/auth/tokens/create'
21
+ payload?: undefined
22
+ }
23
+
24
+ /**
25
+ * Message from Parent (dashboard) to SDK (iframe) with the new token
26
+ * @internal
27
+ */
28
+ export type NewTokenResponseMessage = {
29
+ type: 'dashboard/v1/auth/tokens/create'
30
+ payload: {token: string | null; error?: string}
31
+ }
@@ -11,6 +11,7 @@ import {
11
11
  type UnfilteredResponseQueryOptions,
12
12
  type WelcomeEvent,
13
13
  } from '@sanity/client'
14
+ import {diffValue} from '@sanity/diff-patch'
14
15
  import {type Mutation, type SanityDocument} from '@sanity/types'
15
16
  import {evaluate, parse} from 'groq-js'
16
17
  import {delay, first, firstValueFrom, from, Observable, of, ReplaySubject, Subject} from 'rxjs'
@@ -30,7 +31,6 @@ import {
30
31
  unpublishDocument,
31
32
  } from './actions'
32
33
  import {applyDocumentActions} from './applyDocumentActions'
33
- import {diffPatch} from './diffPatch'
34
34
  import {
35
35
  getDocumentState,
36
36
  getDocumentSyncStatus,
@@ -980,7 +980,7 @@ beforeEach(() => {
980
980
  documentId: id,
981
981
  eventId: `${transactionId}#${id}`,
982
982
  identity: 'example-user',
983
- mutations: diffPatch(prevDoc, nextDoc).map(
983
+ mutations: diffValue(prevDoc, nextDoc).map(
984
984
  (patch): Mutation => ({patch: {...patch, id}}),
985
985
  ),
986
986
  timestamp,
@@ -189,8 +189,9 @@ const _getDocumentState = bindActionByDataset(
189
189
  const draft = documentStates[draftId]?.local
190
190
  const published = documentStates[publishedId]?.local
191
191
 
192
+ // wait for draft and published to be loaded before returning a value
193
+ if (draft === undefined || published === undefined) return undefined
192
194
  const document = draft ?? published
193
- if (document === undefined) return undefined
194
195
  if (path) return jsonMatch(document, path).at(0)?.value
195
196
  return document
196
197
  },
@@ -301,7 +301,7 @@ describe('jsonMatch', () => {
301
301
  it('matches an element in an array by keyed segment', () => {
302
302
  const input = [{_key: 'bar'}, {_key: 'foo'}, {_key: 'baz'}]
303
303
  const result = jsonMatch(input, '[_key=="foo"]')
304
- expect(result).toEqual([{value: {_key: 'foo'}, path: [1]}])
304
+ expect(result).toEqual([{value: {_key: 'foo'}, path: [{_key: 'foo'}]}])
305
305
  })
306
306
 
307
307
  it('returns no match for a keyed segment when the input is not an array', () => {
@@ -668,7 +668,7 @@ describe('unset', () => {
668
668
  it('unsets multiple array elements when using a range', () => {
669
669
  const input = {items: [1, 2, 3, 4, 5]}
670
670
  const output = unset(input, ['items[1:3]'])
671
- expect(output).toEqual({items: [1, 3, 5]})
671
+ expect(output).toEqual({items: [1, 4, 5]})
672
672
  })
673
673
 
674
674
  it('leaves input unchanged if no path expression matches', () => {
@@ -758,10 +758,10 @@ describe('insert', () => {
758
758
  expect(output).toEqual({some: {array: ['a', '!', 'b', 'c']}})
759
759
  })
760
760
 
761
- it('interprets a negative index for "before" as append', () => {
761
+ it('interprets a negative index for "before"', () => {
762
762
  const input = {some: {array: ['a', 'b', 'c']}}
763
763
  const output = insert(input, {before: 'some.array[-1]', items: ['!']})
764
- expect(output).toEqual({some: {array: ['a', 'b', 'c', '!']}})
764
+ expect(output).toEqual({some: {array: ['a', 'b', '!', 'c']}})
765
765
  })
766
766
 
767
767
  it('inserts items after a given positive index ("after" operation)', () => {
@@ -793,10 +793,10 @@ describe('insert', () => {
793
793
  })
794
794
  })
795
795
 
796
- it('inserts items after a negative index ("after" operation with negative index interpreted as prepend)', () => {
796
+ it('inserts items after a negative index ("after" operation with negative index interpreted as append)', () => {
797
797
  const input = {some: {array: ['a', 'b', 'c']}}
798
798
  const output = insert(input, {after: 'some.array[-1]', items: ['!']})
799
- expect(output).toEqual({some: {array: ['!', 'a', 'b', 'c']}})
799
+ expect(output).toEqual({some: {array: ['a', 'b', 'c', '!']}})
800
800
  })
801
801
 
802
802
  it('replaces a single matched element ("replace" operation, single match)', () => {
@@ -325,17 +325,9 @@ function matchRecursive(value: unknown, path: Path, currentPath: SingleValuePath
325
325
  const endIndex = end === '' ? value.length : end
326
326
 
327
327
  // We'll accumulate all matches from each index in the range
328
- let results: MatchEntry[] = []
329
-
330
- // Decide whether the range is exclusive or inclusive. The example in
331
- // the doc says "array[1:9]" => element 1 through 9 (non-inclusive?).
332
- // Typically, in slice terms, that is `array.slice(1, 9)` → includes
333
- // indices 1..8. If that's your intention, do i < endIndex.
334
- for (let i = startIndex; i < endIndex; i++) {
335
- results = results.concat(matchRecursive(value[i], rest, [...currentPath, i]))
336
- }
337
-
338
- return results
328
+ return value
329
+ .slice(startIndex, endIndex)
330
+ .flatMap((item, i) => matchRecursive(item, rest, [...currentPath, i + startIndex]))
339
331
  }
340
332
 
341
333
  // 4) Keyed segment => find index in array
@@ -347,7 +339,7 @@ function matchRecursive(value: unknown, path: Path, currentPath: SingleValuePath
347
339
  }
348
340
 
349
341
  const nextVal = value[arrIndex]
350
- return matchRecursive(nextVal, rest, [...currentPath, arrIndex])
342
+ return matchRecursive(nextVal, rest, [...currentPath, {_key: keyed._key}])
351
343
  }
352
344
 
353
345
  // this is a similar array key to the studio:
@@ -492,14 +484,14 @@ export function unset<R>(input: unknown, pathExpressions: string[]): R
492
484
  export function unset(input: unknown, pathExpressions: string[]): unknown {
493
485
  const result = pathExpressions
494
486
  .flatMap((pathExpression) => jsonMatch(input, pathExpression))
487
+ // ensure that we remove in the reverse order the paths were found in
488
+ // this is necessary for array unsets so the indexes don't change as we unset
489
+ .reverse()
495
490
  .reduce((acc, {path}) => unsetDeep(acc, path), input)
496
491
 
497
492
  return ensureArrayKeysDeep(result)
498
493
  }
499
494
 
500
- const operations = ['before', 'after', 'replace'] as const
501
- type Operation = (typeof operations)[number]
502
-
503
495
  /**
504
496
  * Given an input object, a path expression (inside the insert patch object), and an array of items,
505
497
  * this function will insert or replace the matched items.
@@ -579,105 +571,138 @@ type Operation = (typeof operations)[number]
579
571
  * ```
580
572
  */
581
573
  export function insert<R>(input: unknown, insertPatch: InsertPatch): R
582
- export function insert(input: unknown, insertPatch: InsertPatch): unknown {
583
- const operation = operations.find((op) => op in insertPatch)
574
+ export function insert(input: unknown, {items, ...insertPatch}: InsertPatch): unknown {
575
+ let operation
576
+ let pathExpression
577
+
578
+ // behavior observed from content-lake when inserting:
579
+ // 1. if the operation is before, out of all the matches, it will use the
580
+ // insert the items before the first match that appears in the array
581
+ // 2. if the operation is after, it will insert the items after the first
582
+ // match that appears in the array
583
+ // 3. if the operation is replace, then insert the items before the first
584
+ // match and then delete the rest
585
+ if ('before' in insertPatch) {
586
+ operation = 'before' as const
587
+ pathExpression = insertPatch.before
588
+ } else if ('after' in insertPatch) {
589
+ operation = 'after' as const
590
+ pathExpression = insertPatch.after
591
+ } else if ('replace' in insertPatch) {
592
+ operation = 'replace' as const
593
+ pathExpression = insertPatch.replace
594
+ }
584
595
  if (!operation) return input
585
-
586
- const {items} = insertPatch
587
- const pathExpression = (insertPatch as {[K in Operation]?: string} & {items: unknown})[operation]
588
596
  if (typeof pathExpression !== 'string') return input
589
597
 
590
- // Helper to normalize a matched index given the parent array's length.
591
- function normalizeIndex(index: number, parentLength: number): number {
592
- switch (operation) {
593
- case 'before':
594
- // A negative index means "append" (i.e. insert before a hypothetical element
595
- // beyond the end of the array).
596
- return index < 0 ? parentLength : index
597
- case 'after':
598
- // For "after", if the matched index is negative, we treat it as "prepend":
599
- // by convention, we convert it to -1 so that later adding 1 produces 0.
600
- return index < 0 ? -1 : index
601
- default: // default to 'replace'
602
- // For replace, convert a negative index to the corresponding positive one.
603
- return index < 0 ? parentLength + index : index
604
- }
605
- }
598
+ const parsedPath = parsePath(pathExpression)
599
+ // in order to do an insert patch, you need to provide at least one path segment
600
+ if (!parsedPath.length) return input
606
601
 
607
- // Group the matched array entries by their parent array.
608
- interface GroupEntry {
609
- array: unknown[]
610
- pathToArray: SingleValuePath
611
- indexes: number[]
612
- }
613
- const grouped = new Map<unknown, GroupEntry>()
614
- jsonMatch(input, pathExpression)
615
- .map(({path}) => {
616
- const segment = path[path.length - 1]
617
- let index: number | undefined
618
- if (isKeySegment(segment)) {
619
- index = getIndexForKey(input, segment._key)
620
- } else if (typeof segment === 'number') {
621
- index = segment
622
- }
623
- if (typeof index !== 'number') return null
602
+ const arrayPath = stringifyPath(parsedPath.slice(0, -1))
603
+ const positionPath = stringifyPath(parsedPath.slice(-1))
624
604
 
625
- const parentPath = path.slice(0, path.length - 1)
626
- const parent = getDeep(input, parentPath)
627
- if (!Array.isArray(parent)) return null
605
+ const arrayMatches = jsonMatch<unknown[]>(input, arrayPath)
628
606
 
629
- const normalizedIndex = normalizeIndex(index, parent.length)
630
- return {parent, parentPath, normalizedIndex}
631
- })
632
- .filter(isNonNullable)
633
- .forEach(({parent, parentPath, normalizedIndex}) => {
634
- if (grouped.has(parent)) {
635
- grouped.get(parent)!.indexes.push(normalizedIndex)
636
- } else {
637
- grouped.set(parent, {array: parent, pathToArray: parentPath, indexes: [normalizedIndex]})
638
- }
639
- })
607
+ let result = input
640
608
 
641
- // Sort the indexes for each grouped entry.
642
- const groupEntries = Array.from(grouped.values()).map((entry) => ({
643
- ...entry,
644
- indexes: entry.indexes.sort((a, b) => a - b),
645
- }))
609
+ for (const {path, value} of arrayMatches) {
610
+ if (!Array.isArray(value)) continue
611
+ let arr = value
646
612
 
647
- // For each group, update the parent array using setDeep.
648
- const result = groupEntries.reduce<unknown>((acc, {array, indexes, pathToArray}) => {
649
613
  switch (operation) {
614
+ case 'replace': {
615
+ const indexesToRemove = new Set<number>()
616
+ let position = Infinity
617
+
618
+ for (const itemMatch of jsonMatch(arr, positionPath)) {
619
+ // there should only be one path segment for an insert patch, invalid otherwise
620
+ if (itemMatch.path.length !== 1) continue
621
+ const [segment] = itemMatch.path
622
+ if (typeof segment === 'string') continue
623
+
624
+ let index
625
+
626
+ if (typeof segment === 'number') index = segment
627
+ if (typeof index === 'number' && index < 0) index = arr.length + index
628
+ if (isKeySegment(segment)) index = getIndexForKey(arr, segment._key)
629
+ if (typeof index !== 'number') continue
630
+ if (index < 0) index = arr.length + index
631
+
632
+ indexesToRemove.add(index)
633
+ if (index < position) position = index
634
+ }
635
+
636
+ if (position === Infinity) continue
637
+
638
+ // remove all other indexes
639
+ arr = arr
640
+ .map((item, index) => ({item, index}))
641
+ .filter(({index}) => !indexesToRemove.has(index))
642
+ .map(({item}) => item)
643
+
644
+ // insert at the min index
645
+ arr = [...arr.slice(0, position), ...items, ...arr.slice(position, arr.length)]
646
+
647
+ break
648
+ }
650
649
  case 'before': {
651
- // Insert items before the first matched index.
652
- const firstIndex = indexes[0]
653
- return setDeep(acc, pathToArray, [
654
- ...array.slice(0, firstIndex),
655
- ...items,
656
- ...array.slice(firstIndex),
657
- ])
650
+ let position = Infinity
651
+
652
+ for (const itemMatch of jsonMatch(arr, positionPath)) {
653
+ if (itemMatch.path.length !== 1) continue
654
+ const [segment] = itemMatch.path
655
+
656
+ if (typeof segment === 'string') continue
657
+
658
+ let index
659
+
660
+ if (typeof segment === 'number') index = segment
661
+ if (typeof index === 'number' && index < 0) index = arr.length + index
662
+ if (isKeySegment(segment)) index = getIndexForKey(arr, segment._key)
663
+ if (typeof index !== 'number') continue
664
+ if (index < 0) index = arr.length - index
665
+ if (index < position) position = index
666
+ }
667
+
668
+ if (position === Infinity) continue
669
+
670
+ arr = [...arr.slice(0, position), ...items, ...arr.slice(position, arr.length)]
671
+
672
+ break
658
673
  }
659
674
  case 'after': {
660
- // Insert items after the last matched index.
661
- const lastIndex = indexes[indexes.length - 1] + 1
662
- return setDeep(acc, pathToArray, [
663
- ...array.slice(0, lastIndex),
664
- ...items,
665
- ...array.slice(lastIndex),
666
- ])
675
+ let position = -Infinity
676
+
677
+ for (const itemMatch of jsonMatch(arr, positionPath)) {
678
+ if (itemMatch.path.length !== 1) continue
679
+ const [segment] = itemMatch.path
680
+
681
+ if (typeof segment === 'string') continue
682
+
683
+ let index
684
+
685
+ if (typeof segment === 'number') index = segment
686
+ if (typeof index === 'number' && index < 0) index = arr.length + index
687
+ if (isKeySegment(segment)) index = getIndexForKey(arr, segment._key)
688
+ if (typeof index !== 'number') continue
689
+ if (index > position) position = index
690
+ }
691
+
692
+ if (position === -Infinity) continue
693
+
694
+ arr = [...arr.slice(0, position + 1), ...items, ...arr.slice(position + 1, arr.length)]
695
+
696
+ break
667
697
  }
668
- // default to 'replace' behavior
698
+
669
699
  default: {
670
- // Remove all matched items then insert the new items at the first match.
671
- const firstIndex = indexes[0]
672
- const indexSet = new Set(indexes)
673
- return setDeep(acc, pathToArray, [
674
- ...array.slice(0, firstIndex),
675
- ...items,
676
- ...array.slice(firstIndex).filter((_, idx) => !indexSet.has(idx + firstIndex)),
677
- ])
700
+ continue
678
701
  }
679
702
  }
680
- }, input)
703
+
704
+ result = setDeep(result, path, arr)
705
+ }
681
706
 
682
707
  return ensureArrayKeysDeep(result)
683
708
  }
@@ -807,10 +832,6 @@ export function ifRevisionID(input: unknown, revisionId: string): unknown {
807
832
  return input
808
833
  }
809
834
 
810
- function isNonNullable<T>(t: T): t is NonNullable<T> {
811
- return t !== null && t !== undefined
812
- }
813
-
814
835
  const indexCache = new WeakMap<KeyedSegment[], Record<string, number | undefined>>()
815
836
  export function getIndexForKey(input: unknown, key: string): number | undefined {
816
837
  if (!Array.isArray(input)) return undefined
@@ -1,3 +1,4 @@
1
+ import {diffValue} from '@sanity/diff-patch'
1
2
  import {
2
3
  type Mutation,
3
4
  type PatchOperations,
@@ -10,7 +11,6 @@ import {isEqual} from 'lodash-es'
10
11
  import {getDraftId, getPublishedId} from '../utils/ids'
11
12
  import {evaluateSync} from './_synchronous-groq-js.mjs'
12
13
  import {type DocumentAction} from './actions'
13
- import {diffPatch} from './diffPatch'
14
14
  import {type Grant} from './permissions'
15
15
  import {type DocumentSet, getId, processMutations} from './processMutations'
16
16
  import {type HttpAction} from './reducers'
@@ -300,11 +300,7 @@ export function processActions({
300
300
  // this one will always be defined because a patch mutation will never
301
301
  // delete an input document
302
302
  const baseAfter = base[draftId] as SanityDocument
303
-
304
- // TODO: consider replacing with `sanity-diff-patch`. There seems to be
305
- // bug in `sanity-diff-patch` where differing strings are not creating
306
- // diff-match patches.
307
- const patches = diffPatch(baseBefore, baseAfter)
303
+ const patches = diffValue(baseBefore, baseAfter)
308
304
 
309
305
  const workingMutations: Mutation[] = []
310
306
  if (!working[draftId] && working[publishedId]) {
@@ -26,7 +26,7 @@ export type DocumentSet<TDocument extends SanityDocument = SanityDocument> = {
26
26
  [TDocumentId in string]?: TDocument | null
27
27
  }
28
28
 
29
- type SupportPatchOperation = Exclude<keyof PatchOperations, 'merge'>
29
+ type SupportedPatchOperation = Exclude<keyof PatchOperations, 'merge'>
30
30
 
31
31
  // > If multiple patches are included, then the order of execution is as follows:
32
32
  // > - set, setIfMissing, unset, inc, dec, insert.
@@ -41,7 +41,7 @@ const patchOperations = {
41
41
  insert,
42
42
  diffMatchPatch,
43
43
  } satisfies {
44
- [K in SupportPatchOperation]: (
44
+ [K in SupportedPatchOperation]: (
45
45
  input: unknown,
46
46
  pathExpressions: NonNullable<PatchOperations[K]>,
47
47
  ) => unknown