@atcute/lexicon-resolver 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,21 @@
1
+ import type { LexiconDoc } from '@atcute/lexicon-doc';
2
+ import type { AtprotoDid, Nsid } from '@atcute/lexicons/syntax';
3
+ export interface ResolveLexiconAuthorityOptions {
4
+ signal?: AbortSignal;
5
+ noCache?: boolean;
6
+ }
7
+ export interface LexiconAuthorityResolver {
8
+ resolve(nsid: Nsid, options?: ResolveLexiconAuthorityOptions): Promise<AtprotoDid>;
9
+ }
10
+ export interface ResolveLexiconRecordOptions {
11
+ signal?: AbortSignal;
12
+ noCache?: boolean;
13
+ }
14
+ export interface ResolvedSchema {
15
+ /** AT-URI of the lexicon record */
16
+ uri: string;
17
+ /** CID of the lexicon record */
18
+ cid: string;
19
+ /** Parsed lexicon schema document */
20
+ schema: LexiconDoc;
21
+ }
package/dist/types.js ADDED
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=types.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.js","sourceRoot":"","sources":["../lib/types.ts"],"names":[],"mappings":""}
@@ -0,0 +1,2 @@
1
+ import type { Nsid } from '@atcute/lexicons/syntax';
2
+ export declare const nsidToLookupDomain: (nsid: Nsid) => string;
package/dist/utils.js ADDED
@@ -0,0 +1,6 @@
1
+ export const nsidToLookupDomain = (nsid) => {
2
+ const segments = nsid.split('.');
3
+ // Remove the last segment (method name) and reverse to get domain format
4
+ return segments.slice(0, -1).reverse().join('.');
5
+ };
6
+ //# sourceMappingURL=utils.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"utils.js","sourceRoot":"","sources":["../lib/utils.ts"],"names":[],"mappings":"AAEA,MAAM,CAAC,MAAM,kBAAkB,GAAG,CAAC,IAAU,EAAU,EAAE;IACxD,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACjC,yEAAyE;IACzE,OAAO,QAAQ,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAClD,CAAC,CAAC"}
@@ -0,0 +1,142 @@
1
+ import * as v from '@badrap/valita';
2
+
3
+ import { isAtprotoDid } from '@atcute/identity';
4
+ import type { AtprotoDid, Nsid } from '@atcute/lexicons/syntax';
5
+ import { isResponseOk, parseResponseAsJson, pipe, validateJsonWith } from '@atcute/util-fetch';
6
+
7
+ import * as err from '../errors.js';
8
+ import type { LexiconAuthorityResolver, ResolveLexiconAuthorityOptions } from '../types.js';
9
+ import { nsidToLookupDomain } from '../utils.js';
10
+
11
+ const uint32 = v.number().assert((input) => Number.isInteger(input) && input >= 0 && input <= 2 ** 32 - 1);
12
+
13
+ const question = v.object({
14
+ name: v.string(),
15
+ type: v.literal(16), // TXT
16
+ });
17
+
18
+ const answer = v.object({
19
+ name: v.string(),
20
+ type: v.literal(16), // TXT
21
+ TTL: uint32,
22
+ data: v.string().chain((input) => {
23
+ return v.ok(input.replace(/^"|"$/g, '').replace(/\\"/g, '"'));
24
+ }),
25
+ });
26
+
27
+ const authority = v.object({
28
+ name: v.string(),
29
+ type: uint32,
30
+ TTL: uint32,
31
+ data: v.string(),
32
+ });
33
+
34
+ const result = v.object({
35
+ /** DNS response code */
36
+ Status: uint32,
37
+ /** Whether response is truncated */
38
+ TC: v.boolean(),
39
+ /** Whether recursive desired bit is set, always true for Google and Cloudflare DoH */
40
+ RD: v.boolean(),
41
+ /** Whether recursive available bit is set, always true for Google and Cloudflare DoH */
42
+ RA: v.boolean(),
43
+ /** Whether response data was validated with DNSSEC */
44
+ AD: v.boolean(),
45
+ /** Whether client asked to disable DNSSEC validation */
46
+ CD: v.boolean(),
47
+ /** Requested records */
48
+ Question: v.tuple([question]),
49
+ /** Answers */
50
+ Answer: v.array(answer).optional(() => []),
51
+ /** Authority */
52
+ Authority: v.array(authority).optional(),
53
+ /** Comment from the DNS server */
54
+ Comment: v.string().optional(),
55
+ });
56
+
57
+ const SUBDOMAIN = '_lexicon';
58
+ const PREFIX = 'did=';
59
+
60
+ const fetchDohJsonHandler = pipe(
61
+ isResponseOk,
62
+ parseResponseAsJson(/^application\/(dns-)?json$/, 16 * 1024),
63
+ validateJsonWith(result, { mode: 'passthrough' }),
64
+ );
65
+
66
+ export interface DohJsonLexiconAuthorityResolverOptions {
67
+ dohUrl: string;
68
+ fetch?: typeof fetch;
69
+ }
70
+
71
+ export class DohJsonLexiconAuthorityResolver implements LexiconAuthorityResolver {
72
+ readonly dohUrl: string;
73
+ #fetch: typeof fetch;
74
+
75
+ constructor({ dohUrl, fetch: fetchThis = fetch }: DohJsonLexiconAuthorityResolverOptions) {
76
+ this.dohUrl = dohUrl;
77
+ this.#fetch = fetchThis;
78
+ }
79
+
80
+ async resolve(nsid: Nsid, options?: ResolveLexiconAuthorityOptions): Promise<AtprotoDid> {
81
+ const lookupDomain = nsidToLookupDomain(nsid);
82
+
83
+ let json: v.Infer<typeof result>;
84
+
85
+ try {
86
+ const url = new URL(this.dohUrl);
87
+ url.searchParams.set('name', `${SUBDOMAIN}.${lookupDomain}`);
88
+ url.searchParams.set('type', 'TXT');
89
+
90
+ const response = await (0, this.#fetch)(url, {
91
+ signal: options?.signal,
92
+ cache: options?.noCache ? 'no-cache' : undefined,
93
+ headers: { accept: 'application/dns-json' },
94
+ });
95
+
96
+ const handled = await fetchDohJsonHandler(response);
97
+
98
+ json = handled.json;
99
+ } catch (cause) {
100
+ throw new err.FailedAuthorityResolutionError(nsid, { cause });
101
+ }
102
+
103
+ const status = json.Status;
104
+ const answers = json.Answer;
105
+
106
+ if (status !== 0 /* NOERROR */) {
107
+ if (status === 3 /* NXDOMAIN */) {
108
+ throw new err.AuthorityNotFoundError(nsid);
109
+ }
110
+
111
+ throw new err.FailedAuthorityResolutionError(nsid, {
112
+ cause: new TypeError(`dns returned ${status}`),
113
+ });
114
+ }
115
+
116
+ for (let i = 0, il = answers.length; i < il; i++) {
117
+ const answer = answers[i];
118
+ const data = answer.data;
119
+
120
+ if (!data.startsWith(PREFIX)) {
121
+ continue;
122
+ }
123
+
124
+ for (let j = i + 1; j < il; j++) {
125
+ const data = answers[j].data;
126
+ if (data.startsWith(PREFIX)) {
127
+ throw new err.AmbiguousAuthorityError(nsid);
128
+ }
129
+ }
130
+
131
+ const did = data.slice(PREFIX.length);
132
+ if (!isAtprotoDid(did)) {
133
+ throw new err.InvalidResolvedAuthorityError(nsid, did);
134
+ }
135
+
136
+ return did;
137
+ }
138
+
139
+ // theoretically this shouldn't happen, it should've returned NXDOMAIN
140
+ throw new err.AuthorityNotFoundError(nsid);
141
+ }
142
+ }
@@ -0,0 +1 @@
1
+ export const LEXICON_SCHEMA_COLLECTION = 'com.atproto.lexicon.schema';
package/lib/errors.ts ADDED
@@ -0,0 +1,85 @@
1
+ import type { Nsid } from '@atcute/lexicons/syntax';
2
+
3
+ // #region Lexicon authority resolution errors
4
+ export class LexiconAuthorityResolutionError extends Error {
5
+ override name = 'LexiconAuthorityResolutionError';
6
+ }
7
+
8
+ export class AuthorityNotFoundError extends LexiconAuthorityResolutionError {
9
+ override name = 'AuthorityNotFoundError';
10
+
11
+ constructor(public nsid: Nsid) {
12
+ super(`lexicon authority not found; nsid=${nsid}`);
13
+ }
14
+ }
15
+
16
+ export class FailedAuthorityResolutionError extends LexiconAuthorityResolutionError {
17
+ override name = 'FailedAuthorityResolutionError';
18
+
19
+ constructor(
20
+ public nsid: Nsid,
21
+ options?: ErrorOptions,
22
+ ) {
23
+ super(`failed to resolve lexicon authority; nsid=${nsid}`, options);
24
+ }
25
+ }
26
+
27
+ export class InvalidResolvedAuthorityError extends LexiconAuthorityResolutionError {
28
+ override name = 'InvalidResolvedAuthorityError';
29
+
30
+ constructor(
31
+ public nsid: Nsid,
32
+ public did: string,
33
+ ) {
34
+ super(`lexicon authority returned invalid did; nsid=${nsid}; did=${did}`);
35
+ }
36
+ }
37
+
38
+ export class AmbiguousAuthorityError extends LexiconAuthorityResolutionError {
39
+ override name = 'AmbiguousAuthorityError';
40
+
41
+ constructor(public nsid: Nsid) {
42
+ super(`lexicon authority returned multiple did values; nsid=${nsid}`);
43
+ }
44
+ }
45
+ // #endregion
46
+
47
+ // #region Lexicon resolution errors
48
+ export class LexiconResolutionError extends Error {
49
+ override name = 'LexiconResolutionError';
50
+ }
51
+
52
+
53
+ export class FailedLexiconResolutionError extends LexiconResolutionError {
54
+ override name = 'FailedLexiconResolutionError';
55
+
56
+ constructor(
57
+ public nsid: Nsid,
58
+ options?: ErrorOptions,
59
+ ) {
60
+ super(`failed to resolve lexicon; nsid=${nsid}`, options);
61
+ }
62
+ }
63
+
64
+ export class InvalidLexiconSchemaError extends LexiconResolutionError {
65
+ override name = 'InvalidLexiconSchemaError';
66
+
67
+ constructor(
68
+ public nsid: Nsid,
69
+ options?: ErrorOptions,
70
+ ) {
71
+ super(`invalid lexicon schema; nsid=${nsid}`, options);
72
+ }
73
+ }
74
+
75
+ export class InvalidLexiconProofError extends LexiconResolutionError {
76
+ override name = 'InvalidLexiconProofError';
77
+
78
+ constructor(
79
+ public nsid: Nsid,
80
+ options?: ErrorOptions,
81
+ ) {
82
+ super(`invalid lexicon record proof; nsid=${nsid}`, options);
83
+ }
84
+ }
85
+ // #endregion
package/lib/index.ts ADDED
@@ -0,0 +1,5 @@
1
+ export * from './authority/doh-json.js';
2
+ export * from './errors.js';
3
+ export * from './schemas/xrpc.js';
4
+ export * from './types.js';
5
+ export * from './utils.js';
@@ -0,0 +1,245 @@
1
+ import * as CAR from '@atcute/car';
2
+ import { CarReader } from '@atcute/car/v4';
3
+ import * as CBOR from '@atcute/cbor';
4
+ import * as CID from '@atcute/cid';
5
+ import { type FoundPublicKey, getPublicKeyFromDidController, verifySig } from '@atcute/crypto';
6
+ import { type DidDocument, getAtprotoVerificationMaterial } from '@atcute/identity';
7
+ import { type AtprotoDid } from '@atcute/lexicons/syntax';
8
+ import { toSha256 } from '@atcute/uint8array';
9
+
10
+ export interface VerifiedRecord {
11
+ /** AT-URI of the record */
12
+ uri: string;
13
+ /** CID of the record */
14
+ cid: string;
15
+ /** Record data */
16
+ record: unknown;
17
+ }
18
+
19
+ export interface VerifyRecordOptions {
20
+ did: AtprotoDid;
21
+ collection: string;
22
+ rkey: string;
23
+ didDocument: DidDocument;
24
+ carBytes: Uint8Array;
25
+ }
26
+
27
+ export const verifyRecord = async ({
28
+ did,
29
+ collection,
30
+ rkey,
31
+ didDocument,
32
+ carBytes,
33
+ }: VerifyRecordOptions): Promise<VerifiedRecord> => {
34
+ // grab public key from did document
35
+ let publicKey: FoundPublicKey;
36
+ {
37
+ const controller = getAtprotoVerificationMaterial(didDocument);
38
+ if (!controller) {
39
+ throw new Error(`did document does not contain verification material`);
40
+ }
41
+
42
+ publicKey = getPublicKeyFromDidController(controller);
43
+ }
44
+
45
+ // read the car
46
+ let blockmap: CAR.BlockMap;
47
+ let commit: CAR.Commit;
48
+ {
49
+ const reader = CarReader.fromUint8Array(carBytes);
50
+ if (reader.header.data.roots.length !== 1) {
51
+ throw new Error(`car must have exactly one root`);
52
+ }
53
+
54
+ blockmap = new Map();
55
+ for (const entry of reader) {
56
+ const cidString = CID.toString(entry.cid);
57
+
58
+ // Verify that `bytes` matches its associated CID
59
+ const expectedCid = CID.toString(await CID.create(entry.cid.codec as 85 | 113, entry.bytes));
60
+ if (cidString !== expectedCid) {
61
+ throw new Error(`cid does not match bytes`);
62
+ }
63
+
64
+ blockmap.set(cidString, entry);
65
+ }
66
+
67
+ if (blockmap.size === 0) {
68
+ throw new Error(`car must have at least one block`);
69
+ }
70
+
71
+ commit = CAR.readBlock(blockmap, reader.header.data.roots[0], CAR.isCommit);
72
+ }
73
+
74
+ // verify did in commit matches the did
75
+ if (commit.did !== did) {
76
+ throw new Error(`did in commit does not match expected did`);
77
+ }
78
+
79
+ // verify signature contained in commit is valid
80
+ {
81
+ const { sig, ...unsigned } = commit;
82
+
83
+ const data = CBOR.encode(unsigned);
84
+ const valid = await verifySig(
85
+ publicKey,
86
+ CBOR.fromBytes(sig) as Uint8Array<ArrayBuffer>,
87
+ data as Uint8Array<ArrayBuffer>,
88
+ );
89
+
90
+ if (!valid) {
91
+ throw new Error(`signature verification failed`);
92
+ }
93
+ }
94
+
95
+ // find and verify the record in the commit
96
+ const targetKey = `${collection}/${rkey}`;
97
+ const { found } = await dfs(blockmap, commit.data.$link, targetKey);
98
+ if (!found) {
99
+ throw new Error(`could not find record in car`);
100
+ }
101
+
102
+ return {
103
+ uri: `at://${did}/${collection}/${rkey}`,
104
+ cid: found.cid,
105
+ record: found.record,
106
+ };
107
+ };
108
+
109
+ interface DfsResult {
110
+ found: false | { cid: string; record: unknown };
111
+ min?: string;
112
+ max?: string;
113
+ depth?: number;
114
+ }
115
+
116
+ const encoder = new TextEncoder();
117
+ const decoder = new TextDecoder();
118
+
119
+ const dfs = async (
120
+ blockmap: CAR.BlockMap,
121
+ from: string | undefined,
122
+ targetKey: string,
123
+ visited = new Set<string>(),
124
+ ): Promise<DfsResult> => {
125
+ // If there's no starting point, return empty state
126
+ if (from == null) {
127
+ return { found: false };
128
+ }
129
+
130
+ // Check for cycles
131
+ {
132
+ if (visited.has(from)) {
133
+ throw new Error(`cycle detected; cid=${from}`);
134
+ }
135
+
136
+ visited.add(from);
137
+ }
138
+
139
+ // Get the block data
140
+ let node: CAR.MstNode;
141
+ {
142
+ const entry = blockmap.get(from);
143
+ if (!entry) {
144
+ return { found: false };
145
+ }
146
+
147
+ const decoded = CBOR.decode(entry.bytes);
148
+ if (!CAR.isMstNode(decoded)) {
149
+ throw new Error(`invalid mst node; cid=${from}`);
150
+ }
151
+
152
+ node = decoded;
153
+ }
154
+
155
+ // Recursively process the left child
156
+ const left = await dfs(blockmap, node.l?.$link, targetKey, visited);
157
+
158
+ let key = '';
159
+ let found = left.found;
160
+ let depth: number | undefined;
161
+ let firstKey: string | undefined;
162
+ let lastKey: string | undefined;
163
+
164
+ // Process all entries in this node
165
+ for (const entry of node.e) {
166
+ // Construct the key by truncating and appending
167
+ key = key.substring(0, entry.p) + decoder.decode(CBOR.fromBytes(entry.k));
168
+
169
+ // Check if this is our target key
170
+ if (key === targetKey) {
171
+ const recordBlock = blockmap.get(entry.v.$link);
172
+ if (recordBlock) {
173
+ const record = CBOR.decode(recordBlock.bytes);
174
+ found = { cid: entry.v.$link, record };
175
+ }
176
+ }
177
+
178
+ // Calculate depth based on leading zeros in the hash
179
+ const keyDigest = await toSha256(encoder.encode(key));
180
+ let zeroCount = 0;
181
+
182
+ outerLoop: for (const byte of keyDigest) {
183
+ for (let bit = 7; bit >= 0; bit--) {
184
+ if (((byte >> bit) & 1) !== 0) {
185
+ break outerLoop;
186
+ }
187
+ zeroCount++;
188
+ }
189
+ }
190
+
191
+ const thisDepth = Math.floor(zeroCount / 2);
192
+
193
+ // Ensure consistent depth
194
+ if (depth === undefined) {
195
+ depth = thisDepth;
196
+ } else if (depth !== thisDepth) {
197
+ throw new Error(`node has entries with different depths; cid=${from}`);
198
+ }
199
+
200
+ // Track first and last keys
201
+ if (lastKey === undefined) {
202
+ firstKey = key;
203
+ lastKey = key;
204
+ }
205
+
206
+ // Check key ordering
207
+ if (lastKey > key) {
208
+ throw new Error(`entries are out of order; cid=${from}`);
209
+ }
210
+
211
+ // Process right child
212
+ const right = await dfs(blockmap, entry.t?.$link, targetKey, visited);
213
+
214
+ // Check ordering with right subtree
215
+ if (right.min && right.min < lastKey) {
216
+ throw new Error(`entries are out of order; cid=${from}`);
217
+ }
218
+
219
+ found = found || right.found;
220
+
221
+ // Check depth ordering
222
+ if (left.depth !== undefined && left.depth >= thisDepth) {
223
+ throw new Error(`depths are out of order; cid=${from}`);
224
+ }
225
+
226
+ if (right.depth !== undefined && right.depth >= thisDepth) {
227
+ throw new Error(`depths are out of order; cid=${from}`);
228
+ }
229
+
230
+ // Update last key based on right subtree
231
+ lastKey = right.max ?? key;
232
+ }
233
+
234
+ // Check ordering with left subtree
235
+ if (left.max && firstKey && left.max > firstKey) {
236
+ throw new Error(`entries are out of order; cid=${from}`);
237
+ }
238
+
239
+ return {
240
+ found,
241
+ min: firstKey,
242
+ max: lastKey,
243
+ depth,
244
+ };
245
+ };
@@ -0,0 +1,107 @@
1
+ import { getPdsEndpoint } from '@atcute/identity';
2
+ import type { DidDocumentResolver } from '@atcute/identity-resolver';
3
+ import { lexiconDoc, type LexiconDoc } from '@atcute/lexicon-doc';
4
+ import type { AtprotoDid, Nsid } from '@atcute/lexicons/syntax';
5
+
6
+ import { FailedResponseError } from '@atcute/util-fetch';
7
+
8
+ import { LEXICON_SCHEMA_COLLECTION } from '../constants.js';
9
+ import * as err from '../errors.js';
10
+ import type { ResolvedSchema, ResolveLexiconRecordOptions } from '../types.js';
11
+ import { verifyRecord, type VerifiedRecord } from './verify.js';
12
+
13
+ export interface LexiconSchemaResolverOptions {
14
+ didDocumentResolver: DidDocumentResolver;
15
+ fetch?: typeof fetch;
16
+ }
17
+
18
+ export class LexiconSchemaResolver {
19
+ readonly didDocumentResolver: DidDocumentResolver;
20
+ #fetch: typeof fetch;
21
+
22
+ constructor({ didDocumentResolver, fetch: fetchThis = fetch }: LexiconSchemaResolverOptions) {
23
+ this.didDocumentResolver = didDocumentResolver;
24
+ this.#fetch = fetchThis;
25
+ }
26
+
27
+ async resolve(
28
+ authority: AtprotoDid,
29
+ nsid: Nsid,
30
+ options?: ResolveLexiconRecordOptions,
31
+ ): Promise<ResolvedSchema> {
32
+ // Step 1: Resolve DID to get PDS service endpoint
33
+ const didDocument = await this.didDocumentResolver.resolve(authority, {
34
+ signal: options?.signal,
35
+ noCache: options?.noCache,
36
+ });
37
+
38
+ const pdsEndpoint = getPdsEndpoint(didDocument);
39
+
40
+ if (!pdsEndpoint) {
41
+ throw new err.FailedLexiconResolutionError(nsid, {
42
+ cause: new TypeError(`no pds service in did document; did=${authority}`),
43
+ });
44
+ }
45
+
46
+ // Step 2: Fetch the record
47
+ let carBytes: Uint8Array;
48
+ try {
49
+ const url = new URL('/xrpc/com.atproto.sync.getRecord', pdsEndpoint);
50
+ url.searchParams.set('did', authority);
51
+ url.searchParams.set('collection', LEXICON_SCHEMA_COLLECTION);
52
+ url.searchParams.set('rkey', nsid);
53
+
54
+ const response = await (0, this.#fetch)(url, {
55
+ signal: options?.signal,
56
+ cache: options?.noCache ? 'no-cache' : undefined,
57
+ headers: { accept: 'application/vnd.ipld.car' },
58
+ });
59
+
60
+ if (!response.ok) {
61
+ throw new FailedResponseError(response.status, `got http ${response.status}`);
62
+ }
63
+
64
+ carBytes = await response.bytes();
65
+ } catch (cause) {
66
+ throw new err.FailedLexiconResolutionError(nsid, { cause });
67
+ }
68
+
69
+ // Step 3: Verify record and extract data
70
+ let verifiedRecord: VerifiedRecord;
71
+ try {
72
+ verifiedRecord = await verifyRecord({
73
+ did: authority,
74
+ collection: LEXICON_SCHEMA_COLLECTION,
75
+ rkey: nsid,
76
+ didDocument,
77
+ carBytes,
78
+ });
79
+ } catch (cause) {
80
+ throw new err.InvalidLexiconProofError(nsid, { cause });
81
+ }
82
+
83
+ // Step 4: Parse into lexicon schema
84
+ const rawSchema = verifiedRecord.record;
85
+ if (
86
+ typeof rawSchema !== 'object' ||
87
+ rawSchema === null ||
88
+ (rawSchema as any).$type !== LEXICON_SCHEMA_COLLECTION ||
89
+ (rawSchema as any).id !== nsid
90
+ ) {
91
+ throw new err.InvalidLexiconSchemaError(nsid);
92
+ }
93
+
94
+ let schema: LexiconDoc;
95
+ try {
96
+ schema = lexiconDoc.parse(rawSchema, { mode: 'passthrough' });
97
+ } catch (cause) {
98
+ throw new err.InvalidLexiconSchemaError(nsid, { cause });
99
+ }
100
+
101
+ return {
102
+ uri: verifiedRecord.uri,
103
+ cid: verifiedRecord.cid,
104
+ schema,
105
+ };
106
+ }
107
+ }
package/lib/types.ts ADDED
@@ -0,0 +1,25 @@
1
+ import type { LexiconDoc } from '@atcute/lexicon-doc';
2
+ import type { AtprotoDid, Nsid } from '@atcute/lexicons/syntax';
3
+
4
+ export interface ResolveLexiconAuthorityOptions {
5
+ signal?: AbortSignal;
6
+ noCache?: boolean;
7
+ }
8
+
9
+ export interface LexiconAuthorityResolver {
10
+ resolve(nsid: Nsid, options?: ResolveLexiconAuthorityOptions): Promise<AtprotoDid>;
11
+ }
12
+
13
+ export interface ResolveLexiconRecordOptions {
14
+ signal?: AbortSignal;
15
+ noCache?: boolean;
16
+ }
17
+
18
+ export interface ResolvedSchema {
19
+ /** AT-URI of the lexicon record */
20
+ uri: string;
21
+ /** CID of the lexicon record */
22
+ cid: string;
23
+ /** Parsed lexicon schema document */
24
+ schema: LexiconDoc;
25
+ }
package/lib/utils.ts ADDED
@@ -0,0 +1,7 @@
1
+ import type { Nsid } from '@atcute/lexicons/syntax';
2
+
3
+ export const nsidToLookupDomain = (nsid: Nsid): string => {
4
+ const segments = nsid.split('.');
5
+ // Remove the last segment (method name) and reverse to get domain format
6
+ return segments.slice(0, -1).reverse().join('.');
7
+ };