@uniforge/core 0.1.0-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/auth/index.d.cts +165 -0
- package/dist/auth/index.d.ts +165 -0
- package/dist/auth/index.js +443 -0
- package/dist/auth/index.js.map +1 -0
- package/dist/auth/index.mjs +406 -0
- package/dist/auth/index.mjs.map +1 -0
- package/dist/billing/index.d.cts +34 -0
- package/dist/billing/index.d.ts +34 -0
- package/dist/billing/index.js +254 -0
- package/dist/billing/index.js.map +1 -0
- package/dist/billing/index.mjs +225 -0
- package/dist/billing/index.mjs.map +1 -0
- package/dist/config/index.d.cts +12 -0
- package/dist/config/index.d.ts +12 -0
- package/dist/config/index.js +186 -0
- package/dist/config/index.js.map +1 -0
- package/dist/config/index.mjs +156 -0
- package/dist/config/index.mjs.map +1 -0
- package/dist/database/index.d.cts +33 -0
- package/dist/database/index.d.ts +33 -0
- package/dist/database/index.js +127 -0
- package/dist/database/index.js.map +1 -0
- package/dist/database/index.mjs +95 -0
- package/dist/database/index.mjs.map +1 -0
- package/dist/graphql/index.d.cts +36 -0
- package/dist/graphql/index.d.ts +36 -0
- package/dist/graphql/index.js +209 -0
- package/dist/graphql/index.js.map +1 -0
- package/dist/graphql/index.mjs +179 -0
- package/dist/graphql/index.mjs.map +1 -0
- package/dist/index.d.cts +16 -0
- package/dist/index.d.ts +16 -0
- package/dist/index.js +36 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +10 -0
- package/dist/index.mjs.map +1 -0
- package/dist/multi-store/index.d.cts +11 -0
- package/dist/multi-store/index.d.ts +11 -0
- package/dist/multi-store/index.js +473 -0
- package/dist/multi-store/index.js.map +1 -0
- package/dist/multi-store/index.mjs +447 -0
- package/dist/multi-store/index.mjs.map +1 -0
- package/dist/multi-tenant/index.d.cts +23 -0
- package/dist/multi-tenant/index.d.ts +23 -0
- package/dist/multi-tenant/index.js +69 -0
- package/dist/multi-tenant/index.js.map +1 -0
- package/dist/multi-tenant/index.mjs +41 -0
- package/dist/multi-tenant/index.mjs.map +1 -0
- package/dist/performance/index.d.cts +34 -0
- package/dist/performance/index.d.ts +34 -0
- package/dist/performance/index.js +319 -0
- package/dist/performance/index.js.map +1 -0
- package/dist/performance/index.mjs +290 -0
- package/dist/performance/index.mjs.map +1 -0
- package/dist/platform/index.d.cts +25 -0
- package/dist/platform/index.d.ts +25 -0
- package/dist/platform/index.js +91 -0
- package/dist/platform/index.js.map +1 -0
- package/dist/platform/index.mjs +62 -0
- package/dist/platform/index.mjs.map +1 -0
- package/dist/rbac/index.d.cts +24 -0
- package/dist/rbac/index.d.ts +24 -0
- package/dist/rbac/index.js +267 -0
- package/dist/rbac/index.js.map +1 -0
- package/dist/rbac/index.mjs +236 -0
- package/dist/rbac/index.mjs.map +1 -0
- package/dist/schema-CM7mHj_H.d.cts +53 -0
- package/dist/schema-CM7mHj_H.d.ts +53 -0
- package/dist/security/index.d.cts +47 -0
- package/dist/security/index.d.ts +47 -0
- package/dist/security/index.js +505 -0
- package/dist/security/index.js.map +1 -0
- package/dist/security/index.mjs +474 -0
- package/dist/security/index.mjs.map +1 -0
- package/dist/session-storage/index.d.cts +70 -0
- package/dist/session-storage/index.d.ts +70 -0
- package/dist/session-storage/index.js +271 -0
- package/dist/session-storage/index.js.map +1 -0
- package/dist/session-storage/index.mjs +242 -0
- package/dist/session-storage/index.mjs.map +1 -0
- package/dist/webhooks/index.d.cts +89 -0
- package/dist/webhooks/index.d.ts +89 -0
- package/dist/webhooks/index.js +380 -0
- package/dist/webhooks/index.js.map +1 -0
- package/dist/webhooks/index.mjs +348 -0
- package/dist/webhooks/index.mjs.map +1 -0
- package/package.json +119 -0
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
// src/database/client.ts
|
|
2
|
+
import { PrismaClient } from "@prisma/client";
|
|
3
|
+
var client = null;
|
|
4
|
+
function createPrismaClient(url) {
|
|
5
|
+
if (client) return client;
|
|
6
|
+
const options = {
|
|
7
|
+
log: process.env.NODE_ENV === "development" ? ["query", "warn", "error"] : ["error"]
|
|
8
|
+
};
|
|
9
|
+
if (url) {
|
|
10
|
+
options.datasourceUrl = url;
|
|
11
|
+
}
|
|
12
|
+
client = new PrismaClient(options);
|
|
13
|
+
return client;
|
|
14
|
+
}
|
|
15
|
+
function disconnectPrisma() {
|
|
16
|
+
if (!client) return Promise.resolve();
|
|
17
|
+
const c = client;
|
|
18
|
+
client = null;
|
|
19
|
+
return c.$disconnect();
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
// src/database/utils.ts
|
|
23
|
+
function shopToRecord(shop) {
|
|
24
|
+
return {
|
|
25
|
+
shopDomain: shop.shopDomain,
|
|
26
|
+
isInstalled: shop.isInstalled,
|
|
27
|
+
installedAt: shop.installedAt,
|
|
28
|
+
uninstalledAt: shop.uninstalledAt,
|
|
29
|
+
scopes: shop.scopes,
|
|
30
|
+
shopifyPlan: shop.shopifyPlan,
|
|
31
|
+
createdAt: shop.createdAt,
|
|
32
|
+
updatedAt: shop.updatedAt
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
function recordToShop(record) {
|
|
36
|
+
return {
|
|
37
|
+
shopDomain: record.shopDomain,
|
|
38
|
+
isInstalled: record.isInstalled,
|
|
39
|
+
installedAt: record.installedAt ?? null,
|
|
40
|
+
uninstalledAt: record.uninstalledAt ?? null,
|
|
41
|
+
scopes: record.scopes,
|
|
42
|
+
shopifyPlan: record.shopifyPlan ?? null,
|
|
43
|
+
createdAt: record.createdAt,
|
|
44
|
+
updatedAt: record.updatedAt
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
function sessionToRecord(session) {
|
|
48
|
+
const record = {
|
|
49
|
+
id: session.id,
|
|
50
|
+
shopDomain: session.shop,
|
|
51
|
+
state: session.state,
|
|
52
|
+
isOnline: session.isOnline,
|
|
53
|
+
scope: session.scope,
|
|
54
|
+
expires: session.expires,
|
|
55
|
+
accessToken: session.accessToken ?? null,
|
|
56
|
+
refreshToken: session.refreshToken ?? null,
|
|
57
|
+
refreshTokenExpiresAt: session.refreshTokenExpiresAt ?? null,
|
|
58
|
+
onlineAccessInfo: session.onlineAccessInfo ? session.onlineAccessInfo : null
|
|
59
|
+
};
|
|
60
|
+
return record;
|
|
61
|
+
}
|
|
62
|
+
function recordToSession(record) {
|
|
63
|
+
const session = {
|
|
64
|
+
id: record.id,
|
|
65
|
+
shop: record.shopDomain,
|
|
66
|
+
state: record.state,
|
|
67
|
+
isOnline: record.isOnline,
|
|
68
|
+
scope: record.scope,
|
|
69
|
+
expires: record.expires ?? null,
|
|
70
|
+
createdAt: record.createdAt,
|
|
71
|
+
updatedAt: record.updatedAt
|
|
72
|
+
};
|
|
73
|
+
if (record.accessToken) {
|
|
74
|
+
session.accessToken = record.accessToken;
|
|
75
|
+
}
|
|
76
|
+
if (record.refreshToken) {
|
|
77
|
+
session.refreshToken = record.refreshToken;
|
|
78
|
+
}
|
|
79
|
+
if (record.refreshTokenExpiresAt) {
|
|
80
|
+
session.refreshTokenExpiresAt = record.refreshTokenExpiresAt;
|
|
81
|
+
}
|
|
82
|
+
if (record.onlineAccessInfo) {
|
|
83
|
+
session.onlineAccessInfo = record.onlineAccessInfo;
|
|
84
|
+
}
|
|
85
|
+
return session;
|
|
86
|
+
}
|
|
87
|
+
export {
|
|
88
|
+
createPrismaClient,
|
|
89
|
+
disconnectPrisma,
|
|
90
|
+
recordToSession,
|
|
91
|
+
recordToShop,
|
|
92
|
+
sessionToRecord,
|
|
93
|
+
shopToRecord
|
|
94
|
+
};
|
|
95
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/database/client.ts","../../src/database/utils.ts"],"sourcesContent":["/**\n * PrismaClient singleton factory.\n *\n * Provides a single shared PrismaClient instance across the application.\n * Logging verbosity is controlled by NODE_ENV.\n */\n\nimport { PrismaClient } from '@prisma/client';\n\nlet client: PrismaClient | null = null;\n\n/** Create or return the singleton PrismaClient. */\nexport function createPrismaClient(url?: string): PrismaClient {\n if (client) return client;\n\n const options: ConstructorParameters<typeof PrismaClient>[0] = {\n log:\n process.env.NODE_ENV === 'development'\n ? ['query', 'warn', 'error']\n : ['error'],\n };\n if (url) {\n options.datasourceUrl = url;\n }\n\n client = new PrismaClient(options);\n return client;\n}\n\n/** Disconnect and discard the singleton PrismaClient. */\nexport function disconnectPrisma(): Promise<void> {\n if (!client) return Promise.resolve();\n const c = client;\n client = null;\n return c.$disconnect();\n}\n","/**\n * Converters between platform-core domain types and Prisma record types.\n *\n * The Prisma models use camelCase field names (shopDomain, isOnline, etc.)\n * while platform-core types use slightly different shapes (e.g. session.shop\n * vs record.shopDomain, onlineAccessInfo as object vs JSON).\n */\n\nimport type { Session, Shop } from '@uniforge/platform-core/auth';\nimport type {\n Shop as PrismaShop,\n Session as PrismaSession,\n Prisma,\n} from '@prisma/client';\n\n// ---------------------------------------------------------------------------\n// Shop converters\n// ---------------------------------------------------------------------------\n\n/** Convert a platform-core Shop to a Prisma-compatible record. */\nexport function shopToRecord(\n shop: Shop,\n): Omit<PrismaShop, 'id'> {\n return {\n shopDomain: shop.shopDomain,\n isInstalled: shop.isInstalled,\n installedAt: shop.installedAt,\n uninstalledAt: shop.uninstalledAt,\n scopes: shop.scopes,\n shopifyPlan: shop.shopifyPlan,\n createdAt: shop.createdAt,\n updatedAt: shop.updatedAt,\n };\n}\n\n/** Convert a Prisma Shop record to a platform-core Shop. */\nexport function recordToShop(record: PrismaShop): Shop {\n return {\n shopDomain: record.shopDomain,\n isInstalled: record.isInstalled,\n installedAt: record.installedAt ?? null,\n uninstalledAt: record.uninstalledAt ?? null,\n scopes: record.scopes,\n shopifyPlan: record.shopifyPlan ?? null,\n createdAt: record.createdAt,\n updatedAt: record.updatedAt,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Session converters\n// ---------------------------------------------------------------------------\n\n/** Convert a platform-core Session to a Prisma-compatible create/update input. */\nexport function sessionToRecord(\n session: Session,\n): Omit<PrismaSession, 'createdAt' | 'updatedAt'> {\n const record: Omit<PrismaSession, 'createdAt' | 'updatedAt'> = {\n id: session.id,\n shopDomain: session.shop,\n state: session.state,\n isOnline: session.isOnline,\n scope: session.scope,\n expires: session.expires,\n accessToken: session.accessToken ?? null,\n refreshToken: session.refreshToken ?? null,\n refreshTokenExpiresAt: session.refreshTokenExpiresAt ?? null,\n onlineAccessInfo: session.onlineAccessInfo\n ? (session.onlineAccessInfo as unknown as Prisma.JsonValue)\n : null,\n };\n\n return record;\n}\n\n/** Convert a Prisma Session record to a platform-core Session. */\nexport function recordToSession(record: PrismaSession): Session {\n const session: Session = {\n id: record.id,\n shop: record.shopDomain,\n state: record.state,\n isOnline: record.isOnline,\n scope: record.scope,\n expires: record.expires ?? null,\n createdAt: record.createdAt,\n updatedAt: record.updatedAt,\n };\n\n if (record.accessToken) {\n session.accessToken = record.accessToken;\n }\n if (record.refreshToken) {\n session.refreshToken = record.refreshToken;\n }\n if (record.refreshTokenExpiresAt) {\n session.refreshTokenExpiresAt = record.refreshTokenExpiresAt;\n }\n if (record.onlineAccessInfo) {\n session.onlineAccessInfo = record.onlineAccessInfo as unknown as NonNullable<Session['onlineAccessInfo']>;\n }\n\n return session;\n}\n"],"mappings":";AAOA,SAAS,oBAAoB;AAE7B,IAAI,SAA8B;AAG3B,SAAS,mBAAmB,KAA4B;AAC7D,MAAI,OAAQ,QAAO;AAEnB,QAAM,UAAyD;AAAA,IAC7D,KACE,QAAQ,IAAI,aAAa,gBACrB,CAAC,SAAS,QAAQ,OAAO,IACzB,CAAC,OAAO;AAAA,EAChB;AACA,MAAI,KAAK;AACP,YAAQ,gBAAgB;AAAA,EAC1B;AAEA,WAAS,IAAI,aAAa,OAAO;AACjC,SAAO;AACT;AAGO,SAAS,mBAAkC;AAChD,MAAI,CAAC,OAAQ,QAAO,QAAQ,QAAQ;AACpC,QAAM,IAAI;AACV,WAAS;AACT,SAAO,EAAE,YAAY;AACvB;;;ACfO,SAAS,aACd,MACwB;AACxB,SAAO;AAAA,IACL,YAAY,KAAK;AAAA,IACjB,aAAa,KAAK;AAAA,IAClB,aAAa,KAAK;AAAA,IAClB,eAAe,KAAK;AAAA,IACpB,QAAQ,KAAK;AAAA,IACb,aAAa,KAAK;AAAA,IAClB,WAAW,KAAK;AAAA,IAChB,WAAW,KAAK;AAAA,EAClB;AACF;AAGO,SAAS,aAAa,QAA0B;AACrD,SAAO;AAAA,IACL,YAAY,OAAO;AAAA,IACnB,aAAa,OAAO;AAAA,IACpB,aAAa,OAAO,eAAe;AAAA,IACnC,eAAe,OAAO,iBAAiB;AAAA,IACvC,QAAQ,OAAO;AAAA,IACf,aAAa,OAAO,eAAe;AAAA,IACnC,WAAW,OAAO;AAAA,IAClB,WAAW,OAAO;AAAA,EACpB;AACF;AAOO,SAAS,gBACd,SACgD;AAChD,QAAM,SAAyD;AAAA,IAC7D,IAAI,QAAQ;AAAA,IACZ,YAAY,QAAQ;AAAA,IACpB,OAAO,QAAQ;AAAA,IACf,UAAU,QAAQ;AAAA,IAClB,OAAO,QAAQ;AAAA,IACf,SAAS,QAAQ;AAAA,IACjB,aAAa,QAAQ,eAAe;AAAA,IACpC,cAAc,QAAQ,gBAAgB;AAAA,IACtC,uBAAuB,QAAQ,yBAAyB;AAAA,IACxD,kBAAkB,QAAQ,mBACrB,QAAQ,mBACT;AAAA,EACN;AAEA,SAAO;AACT;AAGO,SAAS,gBAAgB,QAAgC;AAC9D,QAAM,UAAmB;AAAA,IACvB,IAAI,OAAO;AAAA,IACX,MAAM,OAAO;AAAA,IACb,OAAO,OAAO;AAAA,IACd,UAAU,OAAO;AAAA,IACjB,OAAO,OAAO;AAAA,IACd,SAAS,OAAO,WAAW;AAAA,IAC3B,WAAW,OAAO;AAAA,IAClB,WAAW,OAAO;AAAA,EACpB;AAEA,MAAI,OAAO,aAAa;AACtB,YAAQ,cAAc,OAAO;AAAA,EAC/B;AACA,MAAI,OAAO,cAAc;AACvB,YAAQ,eAAe,OAAO;AAAA,EAChC;AACA,MAAI,OAAO,uBAAuB;AAChC,YAAQ,wBAAwB,OAAO;AAAA,EACzC;AACA,MAAI,OAAO,kBAAkB;AAC3B,YAAQ,mBAAmB,OAAO;AAAA,EACpC;AAEA,SAAO;AACT;","names":[]}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { GraphQLClientConfig, GraphQLClient, RateLimitConfig, RateLimiter, CacheConfig, GraphQLCache } from '@uniforge/platform-core/graphql';
|
|
2
|
+
import { Redis } from 'ioredis';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Base GraphQL client implementation using Node 20+ built-in fetch.
|
|
6
|
+
*
|
|
7
|
+
* Provides retry logic with exponential backoff for 429 responses
|
|
8
|
+
* and timeout support via AbortController.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
/** Create a base GraphQL client that communicates via HTTP POST. */
|
|
12
|
+
declare function createGraphQLClient(config: GraphQLClientConfig): GraphQLClient;
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Leaky bucket rate limiter for Shopify GraphQL API.
|
|
16
|
+
*
|
|
17
|
+
* Tracks available query cost points and throttles requests when
|
|
18
|
+
* points are low, restoring them over time based on Shopify's restore rate.
|
|
19
|
+
*/
|
|
20
|
+
|
|
21
|
+
/** Create an in-memory rate limiter that tracks Shopify query cost points. */
|
|
22
|
+
declare function createRateLimiter(config?: RateLimitConfig): RateLimiter;
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Redis-based GraphQL response cache.
|
|
26
|
+
*
|
|
27
|
+
* Caches query results keyed by SHA256 hash of query + variables.
|
|
28
|
+
* Supports TTL-based expiration and pattern-based invalidation.
|
|
29
|
+
*/
|
|
30
|
+
|
|
31
|
+
/** Generate a cache key from a GraphQL query and variables. */
|
|
32
|
+
declare function generateCacheKey(prefix: string, query: string, variables?: Record<string, unknown>): string;
|
|
33
|
+
/** Create a Redis-backed GraphQL cache. */
|
|
34
|
+
declare function createRedisGraphQLCache(redis: Redis, config?: CacheConfig): GraphQLCache;
|
|
35
|
+
|
|
36
|
+
export { createGraphQLClient, createRateLimiter, createRedisGraphQLCache, generateCacheKey };
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { GraphQLClientConfig, GraphQLClient, RateLimitConfig, RateLimiter, CacheConfig, GraphQLCache } from '@uniforge/platform-core/graphql';
|
|
2
|
+
import { Redis } from 'ioredis';
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Base GraphQL client implementation using Node 20+ built-in fetch.
|
|
6
|
+
*
|
|
7
|
+
* Provides retry logic with exponential backoff for 429 responses
|
|
8
|
+
* and timeout support via AbortController.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
/** Create a base GraphQL client that communicates via HTTP POST. */
|
|
12
|
+
declare function createGraphQLClient(config: GraphQLClientConfig): GraphQLClient;
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Leaky bucket rate limiter for Shopify GraphQL API.
|
|
16
|
+
*
|
|
17
|
+
* Tracks available query cost points and throttles requests when
|
|
18
|
+
* points are low, restoring them over time based on Shopify's restore rate.
|
|
19
|
+
*/
|
|
20
|
+
|
|
21
|
+
/** Create an in-memory rate limiter that tracks Shopify query cost points. */
|
|
22
|
+
declare function createRateLimiter(config?: RateLimitConfig): RateLimiter;
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Redis-based GraphQL response cache.
|
|
26
|
+
*
|
|
27
|
+
* Caches query results keyed by SHA256 hash of query + variables.
|
|
28
|
+
* Supports TTL-based expiration and pattern-based invalidation.
|
|
29
|
+
*/
|
|
30
|
+
|
|
31
|
+
/** Generate a cache key from a GraphQL query and variables. */
|
|
32
|
+
declare function generateCacheKey(prefix: string, query: string, variables?: Record<string, unknown>): string;
|
|
33
|
+
/** Create a Redis-backed GraphQL cache. */
|
|
34
|
+
declare function createRedisGraphQLCache(redis: Redis, config?: CacheConfig): GraphQLCache;
|
|
35
|
+
|
|
36
|
+
export { createGraphQLClient, createRateLimiter, createRedisGraphQLCache, generateCacheKey };
|
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/graphql/index.ts
|
|
21
|
+
var graphql_exports = {};
|
|
22
|
+
__export(graphql_exports, {
|
|
23
|
+
createGraphQLClient: () => createGraphQLClient,
|
|
24
|
+
createRateLimiter: () => createRateLimiter,
|
|
25
|
+
createRedisGraphQLCache: () => createRedisGraphQLCache,
|
|
26
|
+
generateCacheKey: () => generateCacheKey
|
|
27
|
+
});
|
|
28
|
+
module.exports = __toCommonJS(graphql_exports);
|
|
29
|
+
|
|
30
|
+
// src/graphql/client.ts
|
|
31
|
+
var DEFAULT_MAX_RETRIES = 3;
|
|
32
|
+
var DEFAULT_TIMEOUT_MS = 3e4;
|
|
33
|
+
var BASE_RETRY_DELAY_MS = 1e3;
|
|
34
|
+
function createGraphQLClient(config) {
|
|
35
|
+
const maxRetries = config.maxRetries ?? DEFAULT_MAX_RETRIES;
|
|
36
|
+
const timeoutMs = config.timeoutMs ?? DEFAULT_TIMEOUT_MS;
|
|
37
|
+
const endpoint = `https://${config.shop}/admin/api/${config.apiVersion}/graphql.json`;
|
|
38
|
+
async function execute(query, variables) {
|
|
39
|
+
let lastError;
|
|
40
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
41
|
+
const controller = new AbortController();
|
|
42
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
43
|
+
try {
|
|
44
|
+
const response = await fetch(endpoint, {
|
|
45
|
+
method: "POST",
|
|
46
|
+
headers: {
|
|
47
|
+
"Content-Type": "application/json",
|
|
48
|
+
"X-Shopify-Access-Token": config.accessToken
|
|
49
|
+
},
|
|
50
|
+
body: JSON.stringify({ query, variables }),
|
|
51
|
+
signal: controller.signal
|
|
52
|
+
});
|
|
53
|
+
if (response.status === 429) {
|
|
54
|
+
if (attempt < maxRetries) {
|
|
55
|
+
const delay = BASE_RETRY_DELAY_MS * Math.pow(2, attempt);
|
|
56
|
+
await sleep(delay);
|
|
57
|
+
continue;
|
|
58
|
+
}
|
|
59
|
+
throw new Error(
|
|
60
|
+
`GraphQL request throttled after ${maxRetries + 1} attempts`
|
|
61
|
+
);
|
|
62
|
+
}
|
|
63
|
+
if (!response.ok) {
|
|
64
|
+
throw new Error(
|
|
65
|
+
`GraphQL request failed with status ${response.status}: ${response.statusText}`
|
|
66
|
+
);
|
|
67
|
+
}
|
|
68
|
+
const json = await response.json();
|
|
69
|
+
return json;
|
|
70
|
+
} catch (error) {
|
|
71
|
+
if (error instanceof DOMException && error.name === "AbortError") {
|
|
72
|
+
throw new Error(
|
|
73
|
+
`GraphQL request timed out after ${timeoutMs}ms`
|
|
74
|
+
);
|
|
75
|
+
}
|
|
76
|
+
lastError = error instanceof Error ? error : new Error(String(error));
|
|
77
|
+
if (attempt >= maxRetries) {
|
|
78
|
+
throw lastError;
|
|
79
|
+
}
|
|
80
|
+
} finally {
|
|
81
|
+
clearTimeout(timer);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
throw lastError ?? new Error("GraphQL request failed");
|
|
85
|
+
}
|
|
86
|
+
return {
|
|
87
|
+
query: (query, variables) => execute(query, variables),
|
|
88
|
+
mutate: (mutation, variables) => execute(mutation, variables)
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
function sleep(ms) {
|
|
92
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// src/graphql/rate-limiter.ts
|
|
96
|
+
var DEFAULT_MAX_AVAILABLE = 1e3;
|
|
97
|
+
var DEFAULT_RESTORE_RATE = 50;
|
|
98
|
+
var DEFAULT_MINIMUM_AVAILABLE = 100;
|
|
99
|
+
function createRateLimiter(config) {
|
|
100
|
+
const maximum = config?.maxAvailable ?? DEFAULT_MAX_AVAILABLE;
|
|
101
|
+
const restoreRate = config?.restoreRatePerSecond ?? DEFAULT_RESTORE_RATE;
|
|
102
|
+
const minimumAvailable = config?.minimumAvailable ?? DEFAULT_MINIMUM_AVAILABLE;
|
|
103
|
+
let available = maximum;
|
|
104
|
+
let lastUpdateTime = Date.now();
|
|
105
|
+
function restorePoints() {
|
|
106
|
+
const now = Date.now();
|
|
107
|
+
const elapsed = (now - lastUpdateTime) / 1e3;
|
|
108
|
+
available = Math.min(maximum, available + elapsed * restoreRate);
|
|
109
|
+
lastUpdateTime = now;
|
|
110
|
+
}
|
|
111
|
+
return {
|
|
112
|
+
async acquire() {
|
|
113
|
+
restorePoints();
|
|
114
|
+
if (available < minimumAvailable) {
|
|
115
|
+
const waitMs = (minimumAvailable - available) / restoreRate * 1e3;
|
|
116
|
+
await new Promise((resolve) => setTimeout(resolve, waitMs));
|
|
117
|
+
restorePoints();
|
|
118
|
+
}
|
|
119
|
+
},
|
|
120
|
+
getStatus() {
|
|
121
|
+
restorePoints();
|
|
122
|
+
return {
|
|
123
|
+
available: Math.floor(available),
|
|
124
|
+
maximum,
|
|
125
|
+
restoreRate
|
|
126
|
+
};
|
|
127
|
+
},
|
|
128
|
+
updateFromResponse(cost) {
|
|
129
|
+
if (cost?.throttleStatus) {
|
|
130
|
+
available = cost.throttleStatus.currentlyAvailable;
|
|
131
|
+
lastUpdateTime = Date.now();
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// src/graphql/cache.ts
|
|
138
|
+
var import_node_crypto = require("crypto");
|
|
139
|
+
var DEFAULT_TTL_SECONDS = 300;
|
|
140
|
+
var DEFAULT_KEY_PREFIX = "uniforge:gql:";
|
|
141
|
+
var SCAN_COUNT = 100;
|
|
142
|
+
function generateCacheKey(prefix, query, variables) {
|
|
143
|
+
const content = variables ? query + JSON.stringify(variables) : query;
|
|
144
|
+
const hash = (0, import_node_crypto.createHash)("sha256").update(content).digest("hex");
|
|
145
|
+
return `${prefix}${hash}`;
|
|
146
|
+
}
|
|
147
|
+
function createRedisGraphQLCache(redis, config) {
|
|
148
|
+
const defaultTtl = config?.defaultTtlSeconds ?? DEFAULT_TTL_SECONDS;
|
|
149
|
+
const keyPrefix = config?.keyPrefix ?? DEFAULT_KEY_PREFIX;
|
|
150
|
+
return {
|
|
151
|
+
async get(key) {
|
|
152
|
+
const raw = await redis.get(key);
|
|
153
|
+
if (raw === null) {
|
|
154
|
+
return void 0;
|
|
155
|
+
}
|
|
156
|
+
return JSON.parse(raw);
|
|
157
|
+
},
|
|
158
|
+
async set(key, value, ttlSeconds) {
|
|
159
|
+
const ttl = ttlSeconds ?? defaultTtl;
|
|
160
|
+
if (ttl > 0) {
|
|
161
|
+
await redis.set(key, JSON.stringify(value), "EX", ttl);
|
|
162
|
+
} else {
|
|
163
|
+
await redis.set(key, JSON.stringify(value));
|
|
164
|
+
}
|
|
165
|
+
},
|
|
166
|
+
async invalidate(pattern) {
|
|
167
|
+
const scanPattern = `${keyPrefix}${pattern}`;
|
|
168
|
+
let cursor = "0";
|
|
169
|
+
do {
|
|
170
|
+
const [nextCursor, keys] = await redis.scan(
|
|
171
|
+
cursor,
|
|
172
|
+
"MATCH",
|
|
173
|
+
scanPattern,
|
|
174
|
+
"COUNT",
|
|
175
|
+
SCAN_COUNT
|
|
176
|
+
);
|
|
177
|
+
cursor = nextCursor;
|
|
178
|
+
if (keys.length > 0) {
|
|
179
|
+
await redis.del(...keys);
|
|
180
|
+
}
|
|
181
|
+
} while (cursor !== "0");
|
|
182
|
+
},
|
|
183
|
+
async clear() {
|
|
184
|
+
const scanPattern = `${keyPrefix}*`;
|
|
185
|
+
let cursor = "0";
|
|
186
|
+
do {
|
|
187
|
+
const [nextCursor, keys] = await redis.scan(
|
|
188
|
+
cursor,
|
|
189
|
+
"MATCH",
|
|
190
|
+
scanPattern,
|
|
191
|
+
"COUNT",
|
|
192
|
+
SCAN_COUNT
|
|
193
|
+
);
|
|
194
|
+
cursor = nextCursor;
|
|
195
|
+
if (keys.length > 0) {
|
|
196
|
+
await redis.del(...keys);
|
|
197
|
+
}
|
|
198
|
+
} while (cursor !== "0");
|
|
199
|
+
}
|
|
200
|
+
};
|
|
201
|
+
}
|
|
202
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
203
|
+
0 && (module.exports = {
|
|
204
|
+
createGraphQLClient,
|
|
205
|
+
createRateLimiter,
|
|
206
|
+
createRedisGraphQLCache,
|
|
207
|
+
generateCacheKey
|
|
208
|
+
});
|
|
209
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/graphql/index.ts","../../src/graphql/client.ts","../../src/graphql/rate-limiter.ts","../../src/graphql/cache.ts"],"sourcesContent":["/**\n * @uniforge/core - GraphQL\n *\n * Core GraphQL client, rate limiter, and caching implementations.\n */\n\n// Base client\nexport { createGraphQLClient } from './client';\n\n// Rate limiter\nexport { createRateLimiter } from './rate-limiter';\n\n// Cache\nexport { createRedisGraphQLCache, generateCacheKey } from './cache';\n","/**\n * Base GraphQL client implementation using Node 20+ built-in fetch.\n *\n * Provides retry logic with exponential backoff for 429 responses\n * and timeout support via AbortController.\n */\n\nimport type {\n GraphQLClient,\n GraphQLClientConfig,\n GraphQLResponse,\n} from '@uniforge/platform-core/graphql';\n\nconst DEFAULT_MAX_RETRIES = 3;\nconst DEFAULT_TIMEOUT_MS = 30_000;\nconst BASE_RETRY_DELAY_MS = 1_000;\n\n/** Create a base GraphQL client that communicates via HTTP POST. */\nexport function createGraphQLClient(config: GraphQLClientConfig): GraphQLClient {\n const maxRetries = config.maxRetries ?? DEFAULT_MAX_RETRIES;\n const timeoutMs = config.timeoutMs ?? DEFAULT_TIMEOUT_MS;\n const endpoint = `https://${config.shop}/admin/api/${config.apiVersion}/graphql.json`;\n\n async function execute<T>(\n query: string,\n variables?: Record<string, unknown>,\n ): Promise<GraphQLResponse<T>> {\n let lastError: Error | undefined;\n\n for (let attempt = 0; attempt <= maxRetries; attempt++) {\n const controller = new AbortController();\n const timer = setTimeout(() => controller.abort(), timeoutMs);\n\n try {\n const response = await fetch(endpoint, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'X-Shopify-Access-Token': config.accessToken,\n },\n body: JSON.stringify({ query, variables }),\n signal: controller.signal,\n });\n\n if (response.status === 429) {\n if (attempt < maxRetries) {\n const delay = BASE_RETRY_DELAY_MS * Math.pow(2, attempt);\n await sleep(delay);\n continue;\n }\n throw new Error(\n `GraphQL request throttled after ${maxRetries + 1} attempts`,\n );\n }\n\n if (!response.ok) {\n throw new Error(\n `GraphQL request failed with status ${response.status}: ${response.statusText}`,\n );\n }\n\n const json = (await response.json()) as GraphQLResponse<T>;\n return json;\n } catch (error) {\n if (error instanceof DOMException && error.name === 'AbortError') {\n throw new Error(\n `GraphQL request timed out after ${timeoutMs}ms`,\n );\n }\n lastError = error instanceof Error ? error : new Error(String(error));\n if (attempt >= maxRetries) {\n throw lastError;\n }\n } finally {\n clearTimeout(timer);\n }\n }\n\n throw lastError ?? new Error('GraphQL request failed');\n }\n\n return {\n query: <T>(query: string, variables?: Record<string, unknown>) =>\n execute<T>(query, variables),\n mutate: <T>(mutation: string, variables?: Record<string, unknown>) =>\n execute<T>(mutation, variables),\n };\n}\n\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n","/**\n * Leaky bucket rate limiter for Shopify GraphQL API.\n *\n * Tracks available query cost points and throttles requests when\n * points are low, restoring them over time based on Shopify's restore rate.\n */\n\nimport type {\n QueryCost,\n RateLimiter,\n RateLimitConfig,\n RateLimitStatus,\n} from '@uniforge/platform-core/graphql';\n\nconst DEFAULT_MAX_AVAILABLE = 1_000;\nconst DEFAULT_RESTORE_RATE = 50;\nconst DEFAULT_MINIMUM_AVAILABLE = 100;\n\n/** Create an in-memory rate limiter that tracks Shopify query cost points. */\nexport function createRateLimiter(config?: RateLimitConfig): RateLimiter {\n const maximum = config?.maxAvailable ?? DEFAULT_MAX_AVAILABLE;\n const restoreRate = config?.restoreRatePerSecond ?? DEFAULT_RESTORE_RATE;\n const minimumAvailable = config?.minimumAvailable ?? DEFAULT_MINIMUM_AVAILABLE;\n\n let available = maximum;\n let lastUpdateTime = Date.now();\n\n function restorePoints(): void {\n const now = Date.now();\n const elapsed = (now - lastUpdateTime) / 1_000;\n available = Math.min(maximum, available + elapsed * restoreRate);\n lastUpdateTime = now;\n }\n\n return {\n async acquire(): Promise<void> {\n restorePoints();\n\n if (available < minimumAvailable) {\n const waitMs = ((minimumAvailable - available) / restoreRate) * 1_000;\n await new Promise<void>((resolve) => setTimeout(resolve, waitMs));\n restorePoints();\n }\n },\n\n getStatus(): RateLimitStatus {\n restorePoints();\n return {\n available: Math.floor(available),\n maximum,\n restoreRate,\n };\n },\n\n updateFromResponse(cost?: QueryCost): void {\n if (cost?.throttleStatus) {\n available = cost.throttleStatus.currentlyAvailable;\n lastUpdateTime = Date.now();\n }\n },\n };\n}\n","/**\n * Redis-based GraphQL response cache.\n *\n * Caches query results keyed by SHA256 hash of query + variables.\n * Supports TTL-based expiration and pattern-based invalidation.\n */\n\nimport { createHash } from 'node:crypto';\nimport type { Redis } from 'ioredis';\nimport type {\n GraphQLCache,\n CacheConfig,\n} from '@uniforge/platform-core/graphql';\n\nconst DEFAULT_TTL_SECONDS = 300;\nconst DEFAULT_KEY_PREFIX = 'uniforge:gql:';\nconst SCAN_COUNT = 100;\n\n/** Generate a cache key from a GraphQL query and variables. */\nexport function generateCacheKey(\n prefix: string,\n query: string,\n variables?: Record<string, unknown>,\n): string {\n const content = variables\n ? query + JSON.stringify(variables)\n : query;\n const hash = createHash('sha256').update(content).digest('hex');\n return `${prefix}${hash}`;\n}\n\n/** Create a Redis-backed GraphQL cache. */\nexport function createRedisGraphQLCache(\n redis: Redis,\n config?: CacheConfig,\n): GraphQLCache {\n const defaultTtl = config?.defaultTtlSeconds ?? DEFAULT_TTL_SECONDS;\n const keyPrefix = config?.keyPrefix ?? DEFAULT_KEY_PREFIX;\n\n return {\n async get<T = unknown>(key: string): Promise<T | undefined> {\n const raw = await redis.get(key);\n if (raw === null) {\n return undefined;\n }\n return JSON.parse(raw) as T;\n },\n\n async set<T = unknown>(\n key: string,\n value: T,\n ttlSeconds?: number,\n ): Promise<void> {\n const ttl = ttlSeconds ?? defaultTtl;\n if (ttl > 0) {\n await redis.set(key, JSON.stringify(value), 'EX', ttl);\n } else {\n await redis.set(key, JSON.stringify(value));\n }\n },\n\n async invalidate(pattern: string): Promise<void> {\n const scanPattern = `${keyPrefix}${pattern}`;\n let cursor = '0';\n do {\n const [nextCursor, keys] = await redis.scan(\n cursor,\n 'MATCH',\n scanPattern,\n 'COUNT',\n SCAN_COUNT,\n );\n cursor = nextCursor;\n if (keys.length > 0) {\n await redis.del(...keys);\n }\n } while (cursor !== '0');\n },\n\n async clear(): Promise<void> {\n const scanPattern = `${keyPrefix}*`;\n let cursor = '0';\n do {\n const [nextCursor, keys] = await redis.scan(\n cursor,\n 'MATCH',\n scanPattern,\n 'COUNT',\n SCAN_COUNT,\n );\n cursor = nextCursor;\n if (keys.length > 0) {\n await redis.del(...keys);\n }\n } while (cursor !== '0');\n },\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACaA,IAAM,sBAAsB;AAC5B,IAAM,qBAAqB;AAC3B,IAAM,sBAAsB;AAGrB,SAAS,oBAAoB,QAA4C;AAC9E,QAAM,aAAa,OAAO,cAAc;AACxC,QAAM,YAAY,OAAO,aAAa;AACtC,QAAM,WAAW,WAAW,OAAO,IAAI,cAAc,OAAO,UAAU;AAEtE,iBAAe,QACb,OACA,WAC6B;AAC7B,QAAI;AAEJ,aAAS,UAAU,GAAG,WAAW,YAAY,WAAW;AACtD,YAAM,aAAa,IAAI,gBAAgB;AACvC,YAAM,QAAQ,WAAW,MAAM,WAAW,MAAM,GAAG,SAAS;AAE5D,UAAI;AACF,cAAM,WAAW,MAAM,MAAM,UAAU;AAAA,UACrC,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,gBAAgB;AAAA,YAChB,0BAA0B,OAAO;AAAA,UACnC;AAAA,UACA,MAAM,KAAK,UAAU,EAAE,OAAO,UAAU,CAAC;AAAA,UACzC,QAAQ,WAAW;AAAA,QACrB,CAAC;AAED,YAAI,SAAS,WAAW,KAAK;AAC3B,cAAI,UAAU,YAAY;AACxB,kBAAM,QAAQ,sBAAsB,KAAK,IAAI,GAAG,OAAO;AACvD,kBAAM,MAAM,KAAK;AACjB;AAAA,UACF;AACA,gBAAM,IAAI;AAAA,YACR,mCAAmC,aAAa,CAAC;AAAA,UACnD;AAAA,QACF;AAEA,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI;AAAA,YACR,sCAAsC,SAAS,MAAM,KAAK,SAAS,UAAU;AAAA,UAC/E;AAAA,QACF;AAEA,cAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,eAAO;AAAA,MACT,SAAS,OAAO;AACd,YAAI,iBAAiB,gBAAgB,MAAM,SAAS,cAAc;AAChE,gBAAM,IAAI;AAAA,YACR,mCAAmC,SAAS;AAAA,UAC9C;AAAA,QACF;AACA,oBAAY,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,YAAI,WAAW,YAAY;AACzB,gBAAM;AAAA,QACR;AAAA,MACF,UAAE;AACA,qBAAa,KAAK;AAAA,MACpB;AAAA,IACF;AAEA,UAAM,aAAa,IAAI,MAAM,wBAAwB;AAAA,EACvD;AAEA,SAAO;AAAA,IACL,OAAO,CAAI,OAAe,cACxB,QAAW,OAAO,SAAS;AAAA,IAC7B,QAAQ,CAAI,UAAkB,cAC5B,QAAW,UAAU,SAAS;AAAA,EAClC;AACF;AAEA,SAAS,MAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AACzD;;;AC7EA,IAAM,wBAAwB;AAC9B,IAAM,uBAAuB;AAC7B,IAAM,4BAA4B;AAG3B,SAAS,kBAAkB,QAAuC;AACvE,QAAM,UAAU,QAAQ,gBAAgB;AACxC,QAAM,cAAc,QAAQ,wBAAwB;AACpD,QAAM,mBAAmB,QAAQ,oBAAoB;AAErD,MAAI,YAAY;AAChB,MAAI,iBAAiB,KAAK,IAAI;AAE9B,WAAS,gBAAsB;AAC7B,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,WAAW,MAAM,kBAAkB;AACzC,gBAAY,KAAK,IAAI,SAAS,YAAY,UAAU,WAAW;AAC/D,qBAAiB;AAAA,EACnB;AAEA,SAAO;AAAA,IACL,MAAM,UAAyB;AAC7B,oBAAc;AAEd,UAAI,YAAY,kBAAkB;AAChC,cAAM,UAAW,mBAAmB,aAAa,cAAe;AAChE,cAAM,IAAI,QAAc,CAAC,YAAY,WAAW,SAAS,MAAM,CAAC;AAChE,sBAAc;AAAA,MAChB;AAAA,IACF;AAAA,IAEA,YAA6B;AAC3B,oBAAc;AACd,aAAO;AAAA,QACL,WAAW,KAAK,MAAM,SAAS;AAAA,QAC/B;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,IAEA,mBAAmB,MAAwB;AACzC,UAAI,MAAM,gBAAgB;AACxB,oBAAY,KAAK,eAAe;AAChC,yBAAiB,KAAK,IAAI;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AACF;;;ACtDA,yBAA2B;AAO3B,IAAM,sBAAsB;AAC5B,IAAM,qBAAqB;AAC3B,IAAM,aAAa;AAGZ,SAAS,iBACd,QACA,OACA,WACQ;AACR,QAAM,UAAU,YACZ,QAAQ,KAAK,UAAU,SAAS,IAChC;AACJ,QAAM,WAAO,+BAAW,QAAQ,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAC9D,SAAO,GAAG,MAAM,GAAG,IAAI;AACzB;AAGO,SAAS,wBACd,OACA,QACc;AACd,QAAM,aAAa,QAAQ,qBAAqB;AAChD,QAAM,YAAY,QAAQ,aAAa;AAEvC,SAAO;AAAA,IACL,MAAM,IAAiB,KAAqC;AAC1D,YAAM,MAAM,MAAM,MAAM,IAAI,GAAG;AAC/B,UAAI,QAAQ,MAAM;AAChB,eAAO;AAAA,MACT;AACA,aAAO,KAAK,MAAM,GAAG;AAAA,IACvB;AAAA,IAEA,MAAM,IACJ,KACA,OACA,YACe;AACf,YAAM,MAAM,cAAc;AAC1B,UAAI,MAAM,GAAG;AACX,cAAM,MAAM,IAAI,KAAK,KAAK,UAAU,KAAK,GAAG,MAAM,GAAG;AAAA,MACvD,OAAO;AACL,cAAM,MAAM,IAAI,KAAK,KAAK,UAAU,KAAK,CAAC;AAAA,MAC5C;AAAA,IACF;AAAA,IAEA,MAAM,WAAW,SAAgC;AAC/C,YAAM,cAAc,GAAG,SAAS,GAAG,OAAO;AAC1C,UAAI,SAAS;AACb,SAAG;AACD,cAAM,CAAC,YAAY,IAAI,IAAI,MAAM,MAAM;AAAA,UACrC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,iBAAS;AACT,YAAI,KAAK,SAAS,GAAG;AACnB,gBAAM,MAAM,IAAI,GAAG,IAAI;AAAA,QACzB;AAAA,MACF,SAAS,WAAW;AAAA,IACtB;AAAA,IAEA,MAAM,QAAuB;AAC3B,YAAM,cAAc,GAAG,SAAS;AAChC,UAAI,SAAS;AACb,SAAG;AACD,cAAM,CAAC,YAAY,IAAI,IAAI,MAAM,MAAM;AAAA,UACrC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,iBAAS;AACT,YAAI,KAAK,SAAS,GAAG;AACnB,gBAAM,MAAM,IAAI,GAAG,IAAI;AAAA,QACzB;AAAA,MACF,SAAS,WAAW;AAAA,IACtB;AAAA,EACF;AACF;","names":[]}
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
// src/graphql/client.ts
|
|
2
|
+
var DEFAULT_MAX_RETRIES = 3;
|
|
3
|
+
var DEFAULT_TIMEOUT_MS = 3e4;
|
|
4
|
+
var BASE_RETRY_DELAY_MS = 1e3;
|
|
5
|
+
function createGraphQLClient(config) {
|
|
6
|
+
const maxRetries = config.maxRetries ?? DEFAULT_MAX_RETRIES;
|
|
7
|
+
const timeoutMs = config.timeoutMs ?? DEFAULT_TIMEOUT_MS;
|
|
8
|
+
const endpoint = `https://${config.shop}/admin/api/${config.apiVersion}/graphql.json`;
|
|
9
|
+
async function execute(query, variables) {
|
|
10
|
+
let lastError;
|
|
11
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
12
|
+
const controller = new AbortController();
|
|
13
|
+
const timer = setTimeout(() => controller.abort(), timeoutMs);
|
|
14
|
+
try {
|
|
15
|
+
const response = await fetch(endpoint, {
|
|
16
|
+
method: "POST",
|
|
17
|
+
headers: {
|
|
18
|
+
"Content-Type": "application/json",
|
|
19
|
+
"X-Shopify-Access-Token": config.accessToken
|
|
20
|
+
},
|
|
21
|
+
body: JSON.stringify({ query, variables }),
|
|
22
|
+
signal: controller.signal
|
|
23
|
+
});
|
|
24
|
+
if (response.status === 429) {
|
|
25
|
+
if (attempt < maxRetries) {
|
|
26
|
+
const delay = BASE_RETRY_DELAY_MS * Math.pow(2, attempt);
|
|
27
|
+
await sleep(delay);
|
|
28
|
+
continue;
|
|
29
|
+
}
|
|
30
|
+
throw new Error(
|
|
31
|
+
`GraphQL request throttled after ${maxRetries + 1} attempts`
|
|
32
|
+
);
|
|
33
|
+
}
|
|
34
|
+
if (!response.ok) {
|
|
35
|
+
throw new Error(
|
|
36
|
+
`GraphQL request failed with status ${response.status}: ${response.statusText}`
|
|
37
|
+
);
|
|
38
|
+
}
|
|
39
|
+
const json = await response.json();
|
|
40
|
+
return json;
|
|
41
|
+
} catch (error) {
|
|
42
|
+
if (error instanceof DOMException && error.name === "AbortError") {
|
|
43
|
+
throw new Error(
|
|
44
|
+
`GraphQL request timed out after ${timeoutMs}ms`
|
|
45
|
+
);
|
|
46
|
+
}
|
|
47
|
+
lastError = error instanceof Error ? error : new Error(String(error));
|
|
48
|
+
if (attempt >= maxRetries) {
|
|
49
|
+
throw lastError;
|
|
50
|
+
}
|
|
51
|
+
} finally {
|
|
52
|
+
clearTimeout(timer);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
throw lastError ?? new Error("GraphQL request failed");
|
|
56
|
+
}
|
|
57
|
+
return {
|
|
58
|
+
query: (query, variables) => execute(query, variables),
|
|
59
|
+
mutate: (mutation, variables) => execute(mutation, variables)
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
function sleep(ms) {
|
|
63
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// src/graphql/rate-limiter.ts
|
|
67
|
+
var DEFAULT_MAX_AVAILABLE = 1e3;
|
|
68
|
+
var DEFAULT_RESTORE_RATE = 50;
|
|
69
|
+
var DEFAULT_MINIMUM_AVAILABLE = 100;
|
|
70
|
+
function createRateLimiter(config) {
|
|
71
|
+
const maximum = config?.maxAvailable ?? DEFAULT_MAX_AVAILABLE;
|
|
72
|
+
const restoreRate = config?.restoreRatePerSecond ?? DEFAULT_RESTORE_RATE;
|
|
73
|
+
const minimumAvailable = config?.minimumAvailable ?? DEFAULT_MINIMUM_AVAILABLE;
|
|
74
|
+
let available = maximum;
|
|
75
|
+
let lastUpdateTime = Date.now();
|
|
76
|
+
function restorePoints() {
|
|
77
|
+
const now = Date.now();
|
|
78
|
+
const elapsed = (now - lastUpdateTime) / 1e3;
|
|
79
|
+
available = Math.min(maximum, available + elapsed * restoreRate);
|
|
80
|
+
lastUpdateTime = now;
|
|
81
|
+
}
|
|
82
|
+
return {
|
|
83
|
+
async acquire() {
|
|
84
|
+
restorePoints();
|
|
85
|
+
if (available < minimumAvailable) {
|
|
86
|
+
const waitMs = (minimumAvailable - available) / restoreRate * 1e3;
|
|
87
|
+
await new Promise((resolve) => setTimeout(resolve, waitMs));
|
|
88
|
+
restorePoints();
|
|
89
|
+
}
|
|
90
|
+
},
|
|
91
|
+
getStatus() {
|
|
92
|
+
restorePoints();
|
|
93
|
+
return {
|
|
94
|
+
available: Math.floor(available),
|
|
95
|
+
maximum,
|
|
96
|
+
restoreRate
|
|
97
|
+
};
|
|
98
|
+
},
|
|
99
|
+
updateFromResponse(cost) {
|
|
100
|
+
if (cost?.throttleStatus) {
|
|
101
|
+
available = cost.throttleStatus.currentlyAvailable;
|
|
102
|
+
lastUpdateTime = Date.now();
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// src/graphql/cache.ts
|
|
109
|
+
import { createHash } from "crypto";
|
|
110
|
+
var DEFAULT_TTL_SECONDS = 300;
|
|
111
|
+
var DEFAULT_KEY_PREFIX = "uniforge:gql:";
|
|
112
|
+
var SCAN_COUNT = 100;
|
|
113
|
+
function generateCacheKey(prefix, query, variables) {
|
|
114
|
+
const content = variables ? query + JSON.stringify(variables) : query;
|
|
115
|
+
const hash = createHash("sha256").update(content).digest("hex");
|
|
116
|
+
return `${prefix}${hash}`;
|
|
117
|
+
}
|
|
118
|
+
function createRedisGraphQLCache(redis, config) {
|
|
119
|
+
const defaultTtl = config?.defaultTtlSeconds ?? DEFAULT_TTL_SECONDS;
|
|
120
|
+
const keyPrefix = config?.keyPrefix ?? DEFAULT_KEY_PREFIX;
|
|
121
|
+
return {
|
|
122
|
+
async get(key) {
|
|
123
|
+
const raw = await redis.get(key);
|
|
124
|
+
if (raw === null) {
|
|
125
|
+
return void 0;
|
|
126
|
+
}
|
|
127
|
+
return JSON.parse(raw);
|
|
128
|
+
},
|
|
129
|
+
async set(key, value, ttlSeconds) {
|
|
130
|
+
const ttl = ttlSeconds ?? defaultTtl;
|
|
131
|
+
if (ttl > 0) {
|
|
132
|
+
await redis.set(key, JSON.stringify(value), "EX", ttl);
|
|
133
|
+
} else {
|
|
134
|
+
await redis.set(key, JSON.stringify(value));
|
|
135
|
+
}
|
|
136
|
+
},
|
|
137
|
+
async invalidate(pattern) {
|
|
138
|
+
const scanPattern = `${keyPrefix}${pattern}`;
|
|
139
|
+
let cursor = "0";
|
|
140
|
+
do {
|
|
141
|
+
const [nextCursor, keys] = await redis.scan(
|
|
142
|
+
cursor,
|
|
143
|
+
"MATCH",
|
|
144
|
+
scanPattern,
|
|
145
|
+
"COUNT",
|
|
146
|
+
SCAN_COUNT
|
|
147
|
+
);
|
|
148
|
+
cursor = nextCursor;
|
|
149
|
+
if (keys.length > 0) {
|
|
150
|
+
await redis.del(...keys);
|
|
151
|
+
}
|
|
152
|
+
} while (cursor !== "0");
|
|
153
|
+
},
|
|
154
|
+
async clear() {
|
|
155
|
+
const scanPattern = `${keyPrefix}*`;
|
|
156
|
+
let cursor = "0";
|
|
157
|
+
do {
|
|
158
|
+
const [nextCursor, keys] = await redis.scan(
|
|
159
|
+
cursor,
|
|
160
|
+
"MATCH",
|
|
161
|
+
scanPattern,
|
|
162
|
+
"COUNT",
|
|
163
|
+
SCAN_COUNT
|
|
164
|
+
);
|
|
165
|
+
cursor = nextCursor;
|
|
166
|
+
if (keys.length > 0) {
|
|
167
|
+
await redis.del(...keys);
|
|
168
|
+
}
|
|
169
|
+
} while (cursor !== "0");
|
|
170
|
+
}
|
|
171
|
+
};
|
|
172
|
+
}
|
|
173
|
+
export {
|
|
174
|
+
createGraphQLClient,
|
|
175
|
+
createRateLimiter,
|
|
176
|
+
createRedisGraphQLCache,
|
|
177
|
+
generateCacheKey
|
|
178
|
+
};
|
|
179
|
+
//# sourceMappingURL=index.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/graphql/client.ts","../../src/graphql/rate-limiter.ts","../../src/graphql/cache.ts"],"sourcesContent":["/**\n * Base GraphQL client implementation using Node 20+ built-in fetch.\n *\n * Provides retry logic with exponential backoff for 429 responses\n * and timeout support via AbortController.\n */\n\nimport type {\n GraphQLClient,\n GraphQLClientConfig,\n GraphQLResponse,\n} from '@uniforge/platform-core/graphql';\n\nconst DEFAULT_MAX_RETRIES = 3;\nconst DEFAULT_TIMEOUT_MS = 30_000;\nconst BASE_RETRY_DELAY_MS = 1_000;\n\n/** Create a base GraphQL client that communicates via HTTP POST. */\nexport function createGraphQLClient(config: GraphQLClientConfig): GraphQLClient {\n const maxRetries = config.maxRetries ?? DEFAULT_MAX_RETRIES;\n const timeoutMs = config.timeoutMs ?? DEFAULT_TIMEOUT_MS;\n const endpoint = `https://${config.shop}/admin/api/${config.apiVersion}/graphql.json`;\n\n async function execute<T>(\n query: string,\n variables?: Record<string, unknown>,\n ): Promise<GraphQLResponse<T>> {\n let lastError: Error | undefined;\n\n for (let attempt = 0; attempt <= maxRetries; attempt++) {\n const controller = new AbortController();\n const timer = setTimeout(() => controller.abort(), timeoutMs);\n\n try {\n const response = await fetch(endpoint, {\n method: 'POST',\n headers: {\n 'Content-Type': 'application/json',\n 'X-Shopify-Access-Token': config.accessToken,\n },\n body: JSON.stringify({ query, variables }),\n signal: controller.signal,\n });\n\n if (response.status === 429) {\n if (attempt < maxRetries) {\n const delay = BASE_RETRY_DELAY_MS * Math.pow(2, attempt);\n await sleep(delay);\n continue;\n }\n throw new Error(\n `GraphQL request throttled after ${maxRetries + 1} attempts`,\n );\n }\n\n if (!response.ok) {\n throw new Error(\n `GraphQL request failed with status ${response.status}: ${response.statusText}`,\n );\n }\n\n const json = (await response.json()) as GraphQLResponse<T>;\n return json;\n } catch (error) {\n if (error instanceof DOMException && error.name === 'AbortError') {\n throw new Error(\n `GraphQL request timed out after ${timeoutMs}ms`,\n );\n }\n lastError = error instanceof Error ? error : new Error(String(error));\n if (attempt >= maxRetries) {\n throw lastError;\n }\n } finally {\n clearTimeout(timer);\n }\n }\n\n throw lastError ?? new Error('GraphQL request failed');\n }\n\n return {\n query: <T>(query: string, variables?: Record<string, unknown>) =>\n execute<T>(query, variables),\n mutate: <T>(mutation: string, variables?: Record<string, unknown>) =>\n execute<T>(mutation, variables),\n };\n}\n\nfunction sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n}\n","/**\n * Leaky bucket rate limiter for Shopify GraphQL API.\n *\n * Tracks available query cost points and throttles requests when\n * points are low, restoring them over time based on Shopify's restore rate.\n */\n\nimport type {\n QueryCost,\n RateLimiter,\n RateLimitConfig,\n RateLimitStatus,\n} from '@uniforge/platform-core/graphql';\n\nconst DEFAULT_MAX_AVAILABLE = 1_000;\nconst DEFAULT_RESTORE_RATE = 50;\nconst DEFAULT_MINIMUM_AVAILABLE = 100;\n\n/** Create an in-memory rate limiter that tracks Shopify query cost points. */\nexport function createRateLimiter(config?: RateLimitConfig): RateLimiter {\n const maximum = config?.maxAvailable ?? DEFAULT_MAX_AVAILABLE;\n const restoreRate = config?.restoreRatePerSecond ?? DEFAULT_RESTORE_RATE;\n const minimumAvailable = config?.minimumAvailable ?? DEFAULT_MINIMUM_AVAILABLE;\n\n let available = maximum;\n let lastUpdateTime = Date.now();\n\n function restorePoints(): void {\n const now = Date.now();\n const elapsed = (now - lastUpdateTime) / 1_000;\n available = Math.min(maximum, available + elapsed * restoreRate);\n lastUpdateTime = now;\n }\n\n return {\n async acquire(): Promise<void> {\n restorePoints();\n\n if (available < minimumAvailable) {\n const waitMs = ((minimumAvailable - available) / restoreRate) * 1_000;\n await new Promise<void>((resolve) => setTimeout(resolve, waitMs));\n restorePoints();\n }\n },\n\n getStatus(): RateLimitStatus {\n restorePoints();\n return {\n available: Math.floor(available),\n maximum,\n restoreRate,\n };\n },\n\n updateFromResponse(cost?: QueryCost): void {\n if (cost?.throttleStatus) {\n available = cost.throttleStatus.currentlyAvailable;\n lastUpdateTime = Date.now();\n }\n },\n };\n}\n","/**\n * Redis-based GraphQL response cache.\n *\n * Caches query results keyed by SHA256 hash of query + variables.\n * Supports TTL-based expiration and pattern-based invalidation.\n */\n\nimport { createHash } from 'node:crypto';\nimport type { Redis } from 'ioredis';\nimport type {\n GraphQLCache,\n CacheConfig,\n} from '@uniforge/platform-core/graphql';\n\nconst DEFAULT_TTL_SECONDS = 300;\nconst DEFAULT_KEY_PREFIX = 'uniforge:gql:';\nconst SCAN_COUNT = 100;\n\n/** Generate a cache key from a GraphQL query and variables. */\nexport function generateCacheKey(\n prefix: string,\n query: string,\n variables?: Record<string, unknown>,\n): string {\n const content = variables\n ? query + JSON.stringify(variables)\n : query;\n const hash = createHash('sha256').update(content).digest('hex');\n return `${prefix}${hash}`;\n}\n\n/** Create a Redis-backed GraphQL cache. */\nexport function createRedisGraphQLCache(\n redis: Redis,\n config?: CacheConfig,\n): GraphQLCache {\n const defaultTtl = config?.defaultTtlSeconds ?? DEFAULT_TTL_SECONDS;\n const keyPrefix = config?.keyPrefix ?? DEFAULT_KEY_PREFIX;\n\n return {\n async get<T = unknown>(key: string): Promise<T | undefined> {\n const raw = await redis.get(key);\n if (raw === null) {\n return undefined;\n }\n return JSON.parse(raw) as T;\n },\n\n async set<T = unknown>(\n key: string,\n value: T,\n ttlSeconds?: number,\n ): Promise<void> {\n const ttl = ttlSeconds ?? defaultTtl;\n if (ttl > 0) {\n await redis.set(key, JSON.stringify(value), 'EX', ttl);\n } else {\n await redis.set(key, JSON.stringify(value));\n }\n },\n\n async invalidate(pattern: string): Promise<void> {\n const scanPattern = `${keyPrefix}${pattern}`;\n let cursor = '0';\n do {\n const [nextCursor, keys] = await redis.scan(\n cursor,\n 'MATCH',\n scanPattern,\n 'COUNT',\n SCAN_COUNT,\n );\n cursor = nextCursor;\n if (keys.length > 0) {\n await redis.del(...keys);\n }\n } while (cursor !== '0');\n },\n\n async clear(): Promise<void> {\n const scanPattern = `${keyPrefix}*`;\n let cursor = '0';\n do {\n const [nextCursor, keys] = await redis.scan(\n cursor,\n 'MATCH',\n scanPattern,\n 'COUNT',\n SCAN_COUNT,\n );\n cursor = nextCursor;\n if (keys.length > 0) {\n await redis.del(...keys);\n }\n } while (cursor !== '0');\n },\n };\n}\n"],"mappings":";AAaA,IAAM,sBAAsB;AAC5B,IAAM,qBAAqB;AAC3B,IAAM,sBAAsB;AAGrB,SAAS,oBAAoB,QAA4C;AAC9E,QAAM,aAAa,OAAO,cAAc;AACxC,QAAM,YAAY,OAAO,aAAa;AACtC,QAAM,WAAW,WAAW,OAAO,IAAI,cAAc,OAAO,UAAU;AAEtE,iBAAe,QACb,OACA,WAC6B;AAC7B,QAAI;AAEJ,aAAS,UAAU,GAAG,WAAW,YAAY,WAAW;AACtD,YAAM,aAAa,IAAI,gBAAgB;AACvC,YAAM,QAAQ,WAAW,MAAM,WAAW,MAAM,GAAG,SAAS;AAE5D,UAAI;AACF,cAAM,WAAW,MAAM,MAAM,UAAU;AAAA,UACrC,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,gBAAgB;AAAA,YAChB,0BAA0B,OAAO;AAAA,UACnC;AAAA,UACA,MAAM,KAAK,UAAU,EAAE,OAAO,UAAU,CAAC;AAAA,UACzC,QAAQ,WAAW;AAAA,QACrB,CAAC;AAED,YAAI,SAAS,WAAW,KAAK;AAC3B,cAAI,UAAU,YAAY;AACxB,kBAAM,QAAQ,sBAAsB,KAAK,IAAI,GAAG,OAAO;AACvD,kBAAM,MAAM,KAAK;AACjB;AAAA,UACF;AACA,gBAAM,IAAI;AAAA,YACR,mCAAmC,aAAa,CAAC;AAAA,UACnD;AAAA,QACF;AAEA,YAAI,CAAC,SAAS,IAAI;AAChB,gBAAM,IAAI;AAAA,YACR,sCAAsC,SAAS,MAAM,KAAK,SAAS,UAAU;AAAA,UAC/E;AAAA,QACF;AAEA,cAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,eAAO;AAAA,MACT,SAAS,OAAO;AACd,YAAI,iBAAiB,gBAAgB,MAAM,SAAS,cAAc;AAChE,gBAAM,IAAI;AAAA,YACR,mCAAmC,SAAS;AAAA,UAC9C;AAAA,QACF;AACA,oBAAY,iBAAiB,QAAQ,QAAQ,IAAI,MAAM,OAAO,KAAK,CAAC;AACpE,YAAI,WAAW,YAAY;AACzB,gBAAM;AAAA,QACR;AAAA,MACF,UAAE;AACA,qBAAa,KAAK;AAAA,MACpB;AAAA,IACF;AAEA,UAAM,aAAa,IAAI,MAAM,wBAAwB;AAAA,EACvD;AAEA,SAAO;AAAA,IACL,OAAO,CAAI,OAAe,cACxB,QAAW,OAAO,SAAS;AAAA,IAC7B,QAAQ,CAAI,UAAkB,cAC5B,QAAW,UAAU,SAAS;AAAA,EAClC;AACF;AAEA,SAAS,MAAM,IAA2B;AACxC,SAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AACzD;;;AC7EA,IAAM,wBAAwB;AAC9B,IAAM,uBAAuB;AAC7B,IAAM,4BAA4B;AAG3B,SAAS,kBAAkB,QAAuC;AACvE,QAAM,UAAU,QAAQ,gBAAgB;AACxC,QAAM,cAAc,QAAQ,wBAAwB;AACpD,QAAM,mBAAmB,QAAQ,oBAAoB;AAErD,MAAI,YAAY;AAChB,MAAI,iBAAiB,KAAK,IAAI;AAE9B,WAAS,gBAAsB;AAC7B,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,WAAW,MAAM,kBAAkB;AACzC,gBAAY,KAAK,IAAI,SAAS,YAAY,UAAU,WAAW;AAC/D,qBAAiB;AAAA,EACnB;AAEA,SAAO;AAAA,IACL,MAAM,UAAyB;AAC7B,oBAAc;AAEd,UAAI,YAAY,kBAAkB;AAChC,cAAM,UAAW,mBAAmB,aAAa,cAAe;AAChE,cAAM,IAAI,QAAc,CAAC,YAAY,WAAW,SAAS,MAAM,CAAC;AAChE,sBAAc;AAAA,MAChB;AAAA,IACF;AAAA,IAEA,YAA6B;AAC3B,oBAAc;AACd,aAAO;AAAA,QACL,WAAW,KAAK,MAAM,SAAS;AAAA,QAC/B;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,IAEA,mBAAmB,MAAwB;AACzC,UAAI,MAAM,gBAAgB;AACxB,oBAAY,KAAK,eAAe;AAChC,yBAAiB,KAAK,IAAI;AAAA,MAC5B;AAAA,IACF;AAAA,EACF;AACF;;;ACtDA,SAAS,kBAAkB;AAO3B,IAAM,sBAAsB;AAC5B,IAAM,qBAAqB;AAC3B,IAAM,aAAa;AAGZ,SAAS,iBACd,QACA,OACA,WACQ;AACR,QAAM,UAAU,YACZ,QAAQ,KAAK,UAAU,SAAS,IAChC;AACJ,QAAM,OAAO,WAAW,QAAQ,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK;AAC9D,SAAO,GAAG,MAAM,GAAG,IAAI;AACzB;AAGO,SAAS,wBACd,OACA,QACc;AACd,QAAM,aAAa,QAAQ,qBAAqB;AAChD,QAAM,YAAY,QAAQ,aAAa;AAEvC,SAAO;AAAA,IACL,MAAM,IAAiB,KAAqC;AAC1D,YAAM,MAAM,MAAM,MAAM,IAAI,GAAG;AAC/B,UAAI,QAAQ,MAAM;AAChB,eAAO;AAAA,MACT;AACA,aAAO,KAAK,MAAM,GAAG;AAAA,IACvB;AAAA,IAEA,MAAM,IACJ,KACA,OACA,YACe;AACf,YAAM,MAAM,cAAc;AAC1B,UAAI,MAAM,GAAG;AACX,cAAM,MAAM,IAAI,KAAK,KAAK,UAAU,KAAK,GAAG,MAAM,GAAG;AAAA,MACvD,OAAO;AACL,cAAM,MAAM,IAAI,KAAK,KAAK,UAAU,KAAK,CAAC;AAAA,MAC5C;AAAA,IACF;AAAA,IAEA,MAAM,WAAW,SAAgC;AAC/C,YAAM,cAAc,GAAG,SAAS,GAAG,OAAO;AAC1C,UAAI,SAAS;AACb,SAAG;AACD,cAAM,CAAC,YAAY,IAAI,IAAI,MAAM,MAAM;AAAA,UACrC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,iBAAS;AACT,YAAI,KAAK,SAAS,GAAG;AACnB,gBAAM,MAAM,IAAI,GAAG,IAAI;AAAA,QACzB;AAAA,MACF,SAAS,WAAW;AAAA,IACtB;AAAA,IAEA,MAAM,QAAuB;AAC3B,YAAM,cAAc,GAAG,SAAS;AAChC,UAAI,SAAS;AACb,SAAG;AACD,cAAM,CAAC,YAAY,IAAI,IAAI,MAAM,MAAM;AAAA,UACrC;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QACF;AACA,iBAAS;AACT,YAAI,KAAK,SAAS,GAAG;AACnB,gBAAM,MAAM,IAAI,GAAG,IAAI;AAAA,QACzB;AAAA,MACF,SAAS,WAAW;AAAA,IACtB;AAAA,EACF;AACF;","names":[]}
|
package/dist/index.d.cts
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @uniforge/core
|
|
3
|
+
*
|
|
4
|
+
* Core framework functionality including authentication, GraphQL client,
|
|
5
|
+
* webhooks, and billing integration.
|
|
6
|
+
*/
|
|
7
|
+
declare const VERSION = "0.0.0";
|
|
8
|
+
/**
|
|
9
|
+
* UniForge core module placeholder.
|
|
10
|
+
* Full implementation will include auth, GraphQL, webhooks, and billing.
|
|
11
|
+
*/
|
|
12
|
+
declare const uniforge: {
|
|
13
|
+
version: string;
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
export { VERSION, uniforge };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @uniforge/core
|
|
3
|
+
*
|
|
4
|
+
* Core framework functionality including authentication, GraphQL client,
|
|
5
|
+
* webhooks, and billing integration.
|
|
6
|
+
*/
|
|
7
|
+
declare const VERSION = "0.0.0";
|
|
8
|
+
/**
|
|
9
|
+
* UniForge core module placeholder.
|
|
10
|
+
* Full implementation will include auth, GraphQL, webhooks, and billing.
|
|
11
|
+
*/
|
|
12
|
+
declare const uniforge: {
|
|
13
|
+
version: string;
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
export { VERSION, uniforge };
|