@peam-ai/next 0.1.4 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +14 -12
- package/dist/index.d.ts +14 -12
- package/dist/index.js +150 -144
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +148 -140
- package/dist/index.mjs.map +1 -1
- package/dist/peam.adapter.js +3 -6
- package/dist/peam.adapter.js.map +1 -1
- package/dist/route.js +37 -9
- package/dist/route.js.map +1 -1
- package/dist/route.mjs +36 -9
- package/dist/route.mjs.map +1 -1
- package/package.json +2 -2
package/dist/peam.adapter.js
CHANGED
|
@@ -126,22 +126,19 @@ function createPeamAdapter(config) {
|
|
|
126
126
|
}
|
|
127
127
|
};
|
|
128
128
|
}
|
|
129
|
-
({
|
|
130
|
-
searchExporter: {
|
|
131
|
-
config: { baseDir: process.cwd()}
|
|
132
|
-
}});
|
|
133
129
|
var getConfig = () => {
|
|
134
130
|
if (!process.env.PEAM_SEARCH_EXPORTER_TYPE || !process.env.PEAM_SEARCH_EXPORTER_CONFIG) {
|
|
135
131
|
throw new Error(
|
|
136
132
|
"Peam configuration not found. Make sure withPeam() is properly configured in your next.config file."
|
|
137
133
|
);
|
|
138
134
|
}
|
|
139
|
-
const
|
|
135
|
+
const searchExporterConfig = {
|
|
140
136
|
type: process.env.PEAM_SEARCH_EXPORTER_TYPE,
|
|
141
137
|
config: JSON.parse(process.env.PEAM_SEARCH_EXPORTER_CONFIG)
|
|
142
138
|
};
|
|
143
139
|
const resolvedConfig = {
|
|
144
|
-
|
|
140
|
+
searchExporter: searchExporterConfig,
|
|
141
|
+
searchIndexExporter: search$1.createExporterFromConfig(searchExporterConfig),
|
|
145
142
|
respectRobotsTxt: process.env.PEAM_RESPECT_ROBOTS_TXT === "true",
|
|
146
143
|
robotsTxtPath: process.env.PEAM_ROBOTS_TXT_PATH || void 0,
|
|
147
144
|
exclude: process.env.PEAM_EXCLUDE ? JSON.parse(process.env.PEAM_EXCLUDE) : []
|
package/dist/peam.adapter.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/adapter.ts","../src/config.ts","../src/peam.adapter.ts"],"names":["loggers","readFileSync","baseLoadRobotsTxt","createRobotsParser","shouldIncludePath","parseHTML","buildSearchIndex","createExporterFromConfig"],"mappings":";;;;;;;;;AAcA,IAAM,MAAMA,cAAA,CAAQ,OAAA;AAiBpB,SAAS,2BAA2B,SAAA,EAA2C;AA/B/E,EAAA,IAAA,EAAA;AAgCE,EAAA,IAAI;AACF,IAAA,IAAI,SAAA,CAAU,aAAa,aAAA,EAAe;AACxC,MAAA,OAAO,IAAA;AAAA,IACT;AAEA,IAAA,IAAA,CAAI,EAAA,GAAA,SAAA,CAAU,QAAA,KAAV,IAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAoB,QAAA,EAAU;AAChC,MAAA,MAAM,OAAA,GAAUC,eAAA,CAAa,SAAA,CAAU,QAAA,CAAS,UAAU,OAAO,CAAA;AACjE,MAAA,OAAO,OAAA;AAAA,IACT;AAAA,EACF,SAAS,KAAA,EAAO;AACd,IAAA,GAAA,CAAI,KAAA,CAAM,2CAA2C,KAAK,CAAA;AAAA,EAC5D;AAEA,EAAA,OAAO,IAAA;AACT;AAEA,SAAS,aAAA,CACP,UAAA,EACA,UAAA,EACA,aAAA,EACwB;AACxB,EAAA,IAAI;AACF,IAAA,IAAI,aAAA,GAA+B,IAAA;AACnC,IAAA,IAAI,SAAA,GAA2B,IAAA;AAE/B,IAAA,IAAI,UAAA,IAAc,UAAA,CAAW,MAAA,GAAS,CAAA,EAAG;AACvC,MAAA,KAAA,MAAW,aAAa,UAAA,EAAY;AAClC,QAAA,MAAM,OAAA,GAAU,2BAA2B,SAAS,CAAA;AACpD,QAAA,IAAI,OAAA,EAAS;AACX,UAAA,GAAA,CAAI,MAAM,0CAA0C,CAAA;AACpD,UAAA,aAAA,GAAgB,OAAA;AAChB,UAAA,SAAA,GAAY,SAAA,CAAU,QAAA;AACtB,UAAA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,IAAA,IAAI,CAAC,aAAA,EAAe;AAClB,MAAA,MAAM,WAAA,GAAc,CAAC,mBAAA,EAAqB,gBAAA,EAAkB,oBAAoB,CAAA;AAChF,MAAA,MAAM,MAAA,GAASC,oBAAA,CAAkB,UAAA,EAAY,WAAA,EAAa,aAAa,CAAA;AACvE,MAAA,IAAI,MAAA,EAAQ;AACV,QAAA,GAAA,CAAI,KAAA,CAAM,yBAAA,EAA2B,MAAA,CAAO,IAAI,CAAA;AAChD,QAAA,OAAO,MAAA;AAAA,MACT;AACA,MAAA,OAAO,IAAA;AAAA,IACT;AAEA,IAAA,OAAO;AAAA,MACL,MAAA,EAAQC,0BAAmB,aAAa,CAAA;AAAA,MACxC,MAAM,SAAA,IAAa;AAAA,KACrB;AAAA,EACF,SAAS,KAAA,EAAO;AACd,IAAA,GAAA,CAAI,KAAA,CAAM,6BAA6B,KAAK,CAAA;AAC5C,IAAA,OAAO,IAAA;AAAA,EACT;AACF;AAEO,SAAS,kBAAkB,MAAA,EAAgD;AAChF,EAAA,OAAO;AAAA,IACL,IAAA,EAAM,cAAA;AAAA,IAEN,MAAM,gBAAgB,GAAA,EAAK;AA7F/B,MAAA,IAAA,EAAA,EAAA,EAAA;AA8FM,MAAA,GAAA,CAAI,MAAM,qCAAqC,CAAA;AAE/C,MAAA,MAAM,UAAU,GAAA,CAAI,OAAA;AACpB,MAAA,IAAI,UAAA;AAEJ,MAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,OAAO,CAAA,EAAG;AAC1B,QAAA,UAAA,GAAa,QAAQ,MAAA,CAAO,CAAC,MAAA,KAA2B,MAAA,CAAO,SAAS,WAAW,CAAA;AAAA,MACrF,CAAA,MAAO;AACL,QAAA,UAAA,GAAa,OAAA,CAAQ,cAAc,EAAC;AAAA,MACtC;AAEA,MAAA,GAAA,CAAI,KAAA,CAAM,mBAAA,EAAqB,UAAA,CAAW,MAAM,CAAA;AAEhD,MAAA,MAAM,UAAA,GAAa,GAAA,CAAI,UAAA,IAAc,OAAA,CAAQ,GAAA,EAAI;AAEjD,MAAA,MAAM,YAAA,GAAe,OAAO,gBAAA,GAAmB,aAAA,CAAc,YAAY,UAAA,EAAY,MAAA,CAAO,aAAa,CAAA,GAAI,IAAA;AAE7G,MAAA,IAAI,YAAA,EAAc;AAChB,QAAA,GAAA,CAAI,KAAA,CAAM,wBAAA,EAA0B,YAAA,CAAa,IAAI,CAAA;AAAA,MACvD;AAEA,MAAA,MAAM,QAMD,EAAC;AAEN,MAAA,KAAA,MAAW,aAAa,UAAA,EAAY;AAClC,QAAA,MAAM,WAAW,SAAA,CAAU,QAAA;AAC3B,QAAA,IAAI,gBAAA,GAAA,CAAmB,EAAA,GAAA,SAAA,CAAU,QAAA,KAAV,IAAA,GAAA,MAAA,GAAA,EAAA,CAAoB,QAAA;AAE3C,QAAA,IAAI,CAAC,gBAAA,EAAkB;AACrB,UAAA;AAAA,QACF;AAGA,QAAA,IAAI,gBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,gBAAA,CAAkB,SAAS,QAAA,CAAA,EAAW;AACxC,UAAA,gBAAA,GAAmB,gBAAA,CAAiB,OAAA,CAAQ,QAAA,EAAU,aAAa,CAAA;AAAA,QACrE;AAEA,QAAA,MAAM,YAAA,GAAeC,wBAAA;AAAA,UACnB,QAAA;AAAA,UAAA,CACA,EAAA,GAAA,YAAA,IAAA,IAAA,GAAA,MAAA,GAAA,YAAA,CAAc,WAAd,IAAA,GAAA,EAAA,GAAwB,IAAA;AAAA,UACxB,MAAA,CAAO,OAAA;AAAA,UACP,MAAA,CAAO;AAAA,SACT;AAEA,QAAA,IAAI,CAAC,aAAa,QAAA,EAAU;AAC1B,UAAA,IAAI,YAAA,CAAa,WAAW,YAAA,EAAc;AACxC,YAAA,GAAA,CAAI,KAAA,CAAM,gCAAgC,QAAQ,CAAA;AAAA,UACpD,CAAA,MAAA,IAAW,YAAA,CAAa,MAAA,KAAW,iBAAA,EAAmB;AACpD,YAAA,GAAA,CAAI,KAAA,CAAM,kCAAkC,QAAQ,CAAA;AAAA,UACtD;AACA,UAAA;AAAA,QACF;AAEA,QAAA,IAAI;AACF,UAAA,GAAA,CAAI,KAAA,CAAM,sBAAsB,gBAAgB,CAAA;AAEhD,UAAA,MAAM,IAAA,GAAOH,eAAA,CAAa,gBAAA,EAAkB,OAAO,CAAA;AACnD,UAAA,MAAM,cAAA,GAAiBI,iBAAU,IAAI,CAAA;AAErC,UAAA,IAAI,CAAC,cAAA,EAAgB;AACnB,YAAA,GAAA,CAAI,IAAA,CAAK,6BAA6B,QAAQ,CAAA;AAC9C,YAAA;AAAA,UACF;AAEA,UAAA,GAAA,CAAI,KAAA,CAAM,uCAAuC,QAAQ,CAAA;AACzD,UAAA,KAAA,CAAM,IAAA,CAAK;AAAA,YACT,IAAA,EAAM,QAAA;AAAA,YACN,QAAA,EAAU,gBAAA,CAAiB,OAAA,CAAQ,UAAA,GAAa,KAAK,EAAE,CAAA;AAAA,YACvD,cAAA;AAAA,YACA,IAAA,EAAM;AAAA,WACP,CAAA;AAAA,QACH,SAAS,KAAA,EAAO;AACd,UAAA,GAAA,CAAI,KAAA,CAAM,kBAAA,EAAoB,QAAA,EAAU,KAAK,CAAA;AAAA,QAC/C;AAAA,MACF;AAEA,MAAA,GAAA,CAAI,MAAM,0BAA0B,CAAA;AACpC,MAAA,MAAM,eAAA,GAAkB,MAAMC,uBAAA,CAAiB,KAAK,CAAA;AAGpD,MAAA,MAAM,MAAA,CAAO,mBAAA,CAAoB,MAAA,CAAO,eAAe,CAAA;AAEvD,MAAA,GAAA,CAAI,MAAM,iCAAiC,CAAA;AAC3C,MAAA,GAAA,CAAI,KAAA,CAAM,uCAAA,EAAyC,KAAA,CAAM,MAAM,CAAA;AAAA,IACjE;AAAA,GACF;AACF;CCtJsB;AAAA,EACpB,cAAA,EAAgB;AAAA,IAEd,QAAQ,EAAE,OAAA,EAAS,QAAQ,GAAA,EAAqC;AAAA,GAKpE;AA0BO,IAAM,YAAY,MAAiC;AACxD,EAAA,IAAI,CAAC,OAAA,CAAQ,GAAA,CAAI,6BAA6B,CAAC,OAAA,CAAQ,IAAI,2BAAA,EAA6B;AACtF,IAAA,MAAM,IAAI,KAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AAEA,EAAA,MAAM,cAAA,GAAuC;AAAA,IAC3C,IAAA,EAAM,QAAQ,GAAA,CAAI,yBAAA;AAAA,IAClB,MAAA,EAAQ,IAAA,CAAK,KAAA,CAAM,OAAA,CAAQ,IAAI,2BAA2B;AAAA,GAC5D;AAEA,EAAA,MAAM,cAAA,GAAiB;AAAA,IACrB,mBAAA,EAAqBC,kCAAyB,cAAc,CAAA;AAAA,IAC5D,gBAAA,EAAkB,OAAA,CAAQ,GAAA,CAAI,uBAAA,KAA4B,MAAA;AAAA,IAC1D,aAAA,EAAe,OAAA,CAAQ,GAAA,CAAI,oBAAA,IAAwB,MAAA;AAAA,IACnD,OAAA,EAAS,OAAA,CAAQ,GAAA,CAAI,YAAA,GAAe,IAAA,CAAK,MAAM,OAAA,CAAQ,GAAA,CAAI,YAAY,CAAA,GAAI;AAAC,GAC9E;AAEA,EAAA,OAAO,cAAA;AACT,CAAA;;;ACtFA,IAAO,oBAAA,GAAQ,iBAAA,CAAkB,SAAA,EAAW","file":"peam.adapter.js","sourcesContent":["import { readFileSync } from 'fs';\nimport type { NextAdapter } from 'next';\nimport { loggers } from 'peam/logger';\nimport {\n loadRobotsTxt as baseLoadRobotsTxt,\n createRobotsParser,\n parseHTML,\n shouldIncludePath,\n type RobotsTxtResult,\n type StructuredPage,\n} from 'peam/parser';\nimport { buildSearchIndex } from 'peam/search';\nimport { type ResolvedPeamAdapterConfig } from './config';\n\nconst log = loggers.adapter;\n\ninterface PrerenderOutput {\n pathname: string;\n fallback?: {\n filePath: string;\n };\n}\n\ninterface NextJS15Output extends PrerenderOutput {\n type: string;\n}\n\ninterface NextJS16Outputs {\n prerenders: Array<PrerenderOutput>;\n}\n\nfunction extractRobotsFromPrerender(prerender: PrerenderOutput): string | null {\n try {\n if (prerender.pathname !== '/robots.txt') {\n return null;\n }\n\n if (prerender.fallback?.filePath) {\n const content = readFileSync(prerender.fallback.filePath, 'utf-8');\n return content;\n }\n } catch (error) {\n log.error('Error extracting robots from prerender:', error);\n }\n\n return null;\n}\n\nfunction loadRobotsTxt(\n projectDir: string,\n prerenders: PrerenderOutput[],\n robotsTxtPath?: string\n): RobotsTxtResult | null {\n try {\n let robotsContent: string | null = null;\n let foundPath: string | null = null;\n\n if (prerenders && prerenders.length > 0) {\n for (const prerender of prerenders) {\n const content = extractRobotsFromPrerender(prerender);\n if (content) {\n log.debug('Found dynamic robots.txt from prerenders');\n robotsContent = content;\n foundPath = prerender.pathname;\n break;\n }\n }\n }\n\n if (!robotsContent) {\n const searchPaths = ['public/robots.txt', 'app/robots.txt', 'src/app/robots.txt'];\n const result = baseLoadRobotsTxt(projectDir, searchPaths, robotsTxtPath);\n if (result) {\n log.debug('Loaded robots.txt from:', result.path);\n return result;\n }\n return null;\n }\n\n return {\n parser: createRobotsParser(robotsContent),\n path: foundPath || '',\n };\n } catch (error) {\n log.error('Error loading robots.txt:', error);\n return null;\n }\n}\n\nexport function createPeamAdapter(config: ResolvedPeamAdapterConfig): NextAdapter {\n return {\n name: 'peam-adapter',\n\n async onBuildComplete(ctx) {\n log.debug('Extracting page content via adapter');\n\n const outputs = ctx.outputs as NextJS15Output[] | NextJS16Outputs;\n let prerenders: PrerenderOutput[];\n\n if (Array.isArray(outputs)) {\n prerenders = outputs.filter((output: NextJS15Output) => output.type === 'PRERENDER');\n } else {\n prerenders = outputs.prerenders || [];\n }\n\n log.debug('Total prerenders:', prerenders.length);\n\n const projectDir = ctx.projectDir || process.cwd();\n\n const robotsResult = config.respectRobotsTxt ? loadRobotsTxt(projectDir, prerenders, config.robotsTxtPath) : null;\n\n if (robotsResult) {\n log.debug('Using robots.txt from:', robotsResult.path);\n }\n\n const pages: Array<{\n path: string;\n htmlFile: string;\n structuredPage: StructuredPage;\n type: string;\n runtime?: string;\n }> = [];\n\n for (const prerender of prerenders) {\n const pathname = prerender.pathname;\n let fallbackFilePath = prerender.fallback?.filePath;\n\n if (!fallbackFilePath) {\n continue;\n }\n\n // Fix for Next.js 15\n if (fallbackFilePath?.endsWith('/.html')) {\n fallbackFilePath = fallbackFilePath.replace('/.html', '/index.html');\n }\n\n const filterResult = shouldIncludePath(\n pathname,\n robotsResult?.parser ?? null,\n config.exclude,\n config.respectRobotsTxt\n );\n\n if (!filterResult.included) {\n if (filterResult.reason === 'robots-txt') {\n log.debug('Path excluded by robots.txt:', pathname);\n } else if (filterResult.reason === 'exclude-pattern') {\n log.debug('Path excluded by user pattern:', pathname);\n }\n continue;\n }\n\n try {\n log.debug('Reading HTML from:', fallbackFilePath);\n\n const html = readFileSync(fallbackFilePath, 'utf-8');\n const structuredPage = parseHTML(html);\n\n if (!structuredPage) {\n log.warn('No content extracted from', pathname);\n continue;\n }\n\n log.debug('Successfully extracted content from', pathname);\n pages.push({\n path: pathname,\n htmlFile: fallbackFilePath.replace(projectDir + '/', ''),\n structuredPage,\n type: 'page',\n });\n } catch (error) {\n log.error('Error processing', pathname, error);\n }\n }\n\n log.debug('Creating search index...');\n const searchIndexData = await buildSearchIndex(pages);\n\n // Use the exporter to save the search index\n await config.searchIndexExporter.export(searchIndexData);\n\n log.debug('Saved search index via exporter');\n log.debug('Extraction complete with total pages:', pages.length);\n },\n };\n}\n","import { createExporterFromConfig, type SearchExporterConfig, type SearchIndexExporter } from '@peam-ai/search';\nimport { NextConfig } from 'next';\n\nexport interface PeamConfig {\n /**\n * Search exporter configuration\n * @default { type: 'fileBased', config: { indexPath: '.peam/index.json' } }\n */\n searchExporter?: SearchExporterConfig;\n /**\n * Whether to respect robots.txt rules when indexing pages\n * @default true\n */\n respectRobotsTxt?: boolean;\n /**\n * Path to a custom robots.txt file relative to the project root\n * If not specified, the adapter will look for static or dynamic robots.txt files in standard locations\n * @example 'custom/robots.txt' or 'config/production-robots.txt'\n * @default undefined\n */\n robotsTxtPath?: string;\n /**\n * Array of wildcard patterns to exclude from indexing\n * Supports * (matches any characters except /), ** (matches any characters including /), and ? (single character)\n * @example ['/admin/**', '/api/*', '/private-*']\n * @default []\n */\n exclude?: string[];\n}\n\nexport type ResolvedPeamAdapterConfig = Required<Omit<PeamConfig, 'robotsTxtPath' | 'searchExporter'>> & {\n robotsTxtPath?: string;\n searchIndexExporter: SearchIndexExporter;\n};\n\nconst defaultConfig = {\n searchExporter: {\n type: 'fileBased' as const,\n config: { baseDir: process.cwd(), indexPath: '.peam/index.json' },\n },\n respectRobotsTxt: true,\n robotsTxtPath: undefined,\n exclude: [],\n} satisfies PeamConfig;\n\nexport function setNextConfig(nextConfig: NextConfig, peamConfig?: PeamConfig): void {\n const envVars = {\n PEAM_SEARCH_EXPORTER_TYPE: peamConfig?.searchExporter?.type ?? defaultConfig.searchExporter.type,\n PEAM_SEARCH_EXPORTER_CONFIG:\n JSON.stringify(peamConfig?.searchExporter?.config) ?? JSON.stringify(defaultConfig.searchExporter.config),\n PEAM_RESPECT_ROBOTS_TXT: String(peamConfig?.respectRobotsTxt ?? defaultConfig.respectRobotsTxt),\n PEAM_EXCLUDE: JSON.stringify(peamConfig?.exclude) ?? JSON.stringify(defaultConfig.exclude),\n PEAM_ROBOTS_TXT_PATH: '',\n };\n\n if (peamConfig?.robotsTxtPath) {\n envVars.PEAM_ROBOTS_TXT_PATH = String(peamConfig.robotsTxtPath);\n }\n\n // Set build time vars\n Object.assign(process.env, envVars);\n\n // Set runtime vars\n nextConfig.env = {\n ...nextConfig.env,\n ...envVars,\n };\n}\n\nexport const getConfig = (): ResolvedPeamAdapterConfig => {\n if (!process.env.PEAM_SEARCH_EXPORTER_TYPE || !process.env.PEAM_SEARCH_EXPORTER_CONFIG) {\n throw new Error(\n 'Peam configuration not found. Make sure withPeam() is properly configured in your next.config file.'\n );\n }\n\n const searchExporter: SearchExporterConfig = {\n type: process.env.PEAM_SEARCH_EXPORTER_TYPE as SearchExporterConfig['type'],\n config: JSON.parse(process.env.PEAM_SEARCH_EXPORTER_CONFIG),\n };\n\n const resolvedConfig = {\n searchIndexExporter: createExporterFromConfig(searchExporter),\n respectRobotsTxt: process.env.PEAM_RESPECT_ROBOTS_TXT === 'true',\n robotsTxtPath: process.env.PEAM_ROBOTS_TXT_PATH || undefined,\n exclude: process.env.PEAM_EXCLUDE ? JSON.parse(process.env.PEAM_EXCLUDE) : [],\n };\n\n return resolvedConfig;\n};\n","import { createPeamAdapter } from './adapter';\nimport { getConfig } from './config';\n\nexport default createPeamAdapter(getConfig());\n"]}
|
|
1
|
+
{"version":3,"sources":["../src/adapter.ts","../src/config.ts","../src/peam.adapter.ts"],"names":["loggers","readFileSync","baseLoadRobotsTxt","createRobotsParser","shouldIncludePath","parseHTML","buildSearchIndex","createExporterFromConfig"],"mappings":";;;;;;;;;AAcA,IAAM,MAAMA,cAAA,CAAQ,OAAA;AAiBpB,SAAS,2BAA2B,SAAA,EAA2C;AA/B/E,EAAA,IAAA,EAAA;AAgCE,EAAA,IAAI;AACF,IAAA,IAAI,SAAA,CAAU,aAAa,aAAA,EAAe;AACxC,MAAA,OAAO,IAAA;AAAA,IACT;AAEA,IAAA,IAAA,CAAI,EAAA,GAAA,SAAA,CAAU,QAAA,KAAV,IAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAoB,QAAA,EAAU;AAChC,MAAA,MAAM,OAAA,GAAUC,eAAA,CAAa,SAAA,CAAU,QAAA,CAAS,UAAU,OAAO,CAAA;AACjE,MAAA,OAAO,OAAA;AAAA,IACT;AAAA,EACF,SAAS,KAAA,EAAO;AACd,IAAA,GAAA,CAAI,KAAA,CAAM,2CAA2C,KAAK,CAAA;AAAA,EAC5D;AAEA,EAAA,OAAO,IAAA;AACT;AAEA,SAAS,aAAA,CACP,UAAA,EACA,UAAA,EACA,aAAA,EACwB;AACxB,EAAA,IAAI;AACF,IAAA,IAAI,aAAA,GAA+B,IAAA;AACnC,IAAA,IAAI,SAAA,GAA2B,IAAA;AAE/B,IAAA,IAAI,UAAA,IAAc,UAAA,CAAW,MAAA,GAAS,CAAA,EAAG;AACvC,MAAA,KAAA,MAAW,aAAa,UAAA,EAAY;AAClC,QAAA,MAAM,OAAA,GAAU,2BAA2B,SAAS,CAAA;AACpD,QAAA,IAAI,OAAA,EAAS;AACX,UAAA,GAAA,CAAI,MAAM,0CAA0C,CAAA;AACpD,UAAA,aAAA,GAAgB,OAAA;AAChB,UAAA,SAAA,GAAY,SAAA,CAAU,QAAA;AACtB,UAAA;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,IAAA,IAAI,CAAC,aAAA,EAAe;AAClB,MAAA,MAAM,WAAA,GAAc,CAAC,mBAAA,EAAqB,gBAAA,EAAkB,oBAAoB,CAAA;AAChF,MAAA,MAAM,MAAA,GAASC,oBAAA,CAAkB,UAAA,EAAY,WAAA,EAAa,aAAa,CAAA;AACvE,MAAA,IAAI,MAAA,EAAQ;AACV,QAAA,GAAA,CAAI,KAAA,CAAM,yBAAA,EAA2B,MAAA,CAAO,IAAI,CAAA;AAChD,QAAA,OAAO,MAAA;AAAA,MACT;AACA,MAAA,OAAO,IAAA;AAAA,IACT;AAEA,IAAA,OAAO;AAAA,MACL,MAAA,EAAQC,0BAAmB,aAAa,CAAA;AAAA,MACxC,MAAM,SAAA,IAAa;AAAA,KACrB;AAAA,EACF,SAAS,KAAA,EAAO;AACd,IAAA,GAAA,CAAI,KAAA,CAAM,6BAA6B,KAAK,CAAA;AAC5C,IAAA,OAAO,IAAA;AAAA,EACT;AACF;AAEO,SAAS,kBAAkB,MAAA,EAAgD;AAChF,EAAA,OAAO;AAAA,IACL,IAAA,EAAM,cAAA;AAAA,IAEN,MAAM,gBAAgB,GAAA,EAAK;AA7F/B,MAAA,IAAA,EAAA,EAAA,EAAA;AA8FM,MAAA,GAAA,CAAI,MAAM,qCAAqC,CAAA;AAE/C,MAAA,MAAM,UAAU,GAAA,CAAI,OAAA;AACpB,MAAA,IAAI,UAAA;AAEJ,MAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,OAAO,CAAA,EAAG;AAC1B,QAAA,UAAA,GAAa,QAAQ,MAAA,CAAO,CAAC,MAAA,KAA2B,MAAA,CAAO,SAAS,WAAW,CAAA;AAAA,MACrF,CAAA,MAAO;AACL,QAAA,UAAA,GAAa,OAAA,CAAQ,cAAc,EAAC;AAAA,MACtC;AAEA,MAAA,GAAA,CAAI,KAAA,CAAM,mBAAA,EAAqB,UAAA,CAAW,MAAM,CAAA;AAEhD,MAAA,MAAM,UAAA,GAAa,GAAA,CAAI,UAAA,IAAc,OAAA,CAAQ,GAAA,EAAI;AAEjD,MAAA,MAAM,YAAA,GAAe,OAAO,gBAAA,GAAmB,aAAA,CAAc,YAAY,UAAA,EAAY,MAAA,CAAO,aAAa,CAAA,GAAI,IAAA;AAE7G,MAAA,IAAI,YAAA,EAAc;AAChB,QAAA,GAAA,CAAI,KAAA,CAAM,wBAAA,EAA0B,YAAA,CAAa,IAAI,CAAA;AAAA,MACvD;AAEA,MAAA,MAAM,QAMD,EAAC;AAEN,MAAA,KAAA,MAAW,aAAa,UAAA,EAAY;AAClC,QAAA,MAAM,WAAW,SAAA,CAAU,QAAA;AAC3B,QAAA,IAAI,gBAAA,GAAA,CAAmB,EAAA,GAAA,SAAA,CAAU,QAAA,KAAV,IAAA,GAAA,MAAA,GAAA,EAAA,CAAoB,QAAA;AAE3C,QAAA,IAAI,CAAC,gBAAA,EAAkB;AACrB,UAAA;AAAA,QACF;AAGA,QAAA,IAAI,gBAAA,IAAA,IAAA,GAAA,MAAA,GAAA,gBAAA,CAAkB,SAAS,QAAA,CAAA,EAAW;AACxC,UAAA,gBAAA,GAAmB,gBAAA,CAAiB,OAAA,CAAQ,QAAA,EAAU,aAAa,CAAA;AAAA,QACrE;AAEA,QAAA,MAAM,YAAA,GAAeC,wBAAA;AAAA,UACnB,QAAA;AAAA,UAAA,CACA,EAAA,GAAA,YAAA,IAAA,IAAA,GAAA,MAAA,GAAA,YAAA,CAAc,WAAd,IAAA,GAAA,EAAA,GAAwB,IAAA;AAAA,UACxB,MAAA,CAAO,OAAA;AAAA,UACP,MAAA,CAAO;AAAA,SACT;AAEA,QAAA,IAAI,CAAC,aAAa,QAAA,EAAU;AAC1B,UAAA,IAAI,YAAA,CAAa,WAAW,YAAA,EAAc;AACxC,YAAA,GAAA,CAAI,KAAA,CAAM,gCAAgC,QAAQ,CAAA;AAAA,UACpD,CAAA,MAAA,IAAW,YAAA,CAAa,MAAA,KAAW,iBAAA,EAAmB;AACpD,YAAA,GAAA,CAAI,KAAA,CAAM,kCAAkC,QAAQ,CAAA;AAAA,UACtD;AACA,UAAA;AAAA,QACF;AAEA,QAAA,IAAI;AACF,UAAA,GAAA,CAAI,KAAA,CAAM,sBAAsB,gBAAgB,CAAA;AAEhD,UAAA,MAAM,IAAA,GAAOH,eAAA,CAAa,gBAAA,EAAkB,OAAO,CAAA;AACnD,UAAA,MAAM,cAAA,GAAiBI,iBAAU,IAAI,CAAA;AAErC,UAAA,IAAI,CAAC,cAAA,EAAgB;AACnB,YAAA,GAAA,CAAI,IAAA,CAAK,6BAA6B,QAAQ,CAAA;AAC9C,YAAA;AAAA,UACF;AAEA,UAAA,GAAA,CAAI,KAAA,CAAM,uCAAuC,QAAQ,CAAA;AACzD,UAAA,KAAA,CAAM,IAAA,CAAK;AAAA,YACT,IAAA,EAAM,QAAA;AAAA,YACN,QAAA,EAAU,gBAAA,CAAiB,OAAA,CAAQ,UAAA,GAAa,KAAK,EAAE,CAAA;AAAA,YACvD,cAAA;AAAA,YACA,IAAA,EAAM;AAAA,WACP,CAAA;AAAA,QACH,SAAS,KAAA,EAAO;AACd,UAAA,GAAA,CAAI,KAAA,CAAM,kBAAA,EAAoB,QAAA,EAAU,KAAK,CAAA;AAAA,QAC/C;AAAA,MACF;AAEA,MAAA,GAAA,CAAI,MAAM,0BAA0B,CAAA;AACpC,MAAA,MAAM,eAAA,GAAkB,MAAMC,uBAAA,CAAiB,KAAK,CAAA;AAGpD,MAAA,MAAM,MAAA,CAAO,mBAAA,CAAoB,MAAA,CAAO,eAAe,CAAA;AAEvD,MAAA,GAAA,CAAI,MAAM,iCAAiC,CAAA;AAC3C,MAAA,GAAA,CAAI,KAAA,CAAM,uCAAA,EAAyC,KAAA,CAAM,MAAM,CAAA;AAAA,IACjE;AAAA,GACF;AACF;ACpHO,IAAM,YAAY,MAAiC;AACxD,EAAA,IAAI,CAAC,OAAA,CAAQ,GAAA,CAAI,6BAA6B,CAAC,OAAA,CAAQ,IAAI,2BAAA,EAA6B;AACtF,IAAA,MAAM,IAAI,KAAA;AAAA,MACR;AAAA,KACF;AAAA,EACF;AAEA,EAAA,MAAM,oBAAA,GAA6C;AAAA,IACjD,IAAA,EAAM,QAAQ,GAAA,CAAI,yBAAA;AAAA,IAClB,MAAA,EAAQ,IAAA,CAAK,KAAA,CAAM,OAAA,CAAQ,IAAI,2BAA2B;AAAA,GAC5D;AAEA,EAAA,MAAM,cAAA,GAAiB;AAAA,IACrB,cAAA,EAAgB,oBAAA;AAAA,IAChB,mBAAA,EAAqBC,kCAAyB,oBAAoB,CAAA;AAAA,IAClE,gBAAA,EAAkB,OAAA,CAAQ,GAAA,CAAI,uBAAA,KAA4B,MAAA;AAAA,IAC1D,aAAA,EAAe,OAAA,CAAQ,GAAA,CAAI,oBAAA,IAAwB,MAAA;AAAA,IACnD,OAAA,EAAS,OAAA,CAAQ,GAAA,CAAI,YAAA,GAAe,IAAA,CAAK,MAAM,OAAA,CAAQ,GAAA,CAAI,YAAY,CAAA,GAAI;AAAC,GAC9E;AAEA,EAAA,OAAO,cAAA;AACT,CAAA;;;ACvFA,IAAO,oBAAA,GAAQ,iBAAA,CAAkB,SAAA,EAAW","file":"peam.adapter.js","sourcesContent":["import { readFileSync } from 'fs';\nimport type { NextAdapter } from 'next';\nimport { loggers } from 'peam/logger';\nimport {\n loadRobotsTxt as baseLoadRobotsTxt,\n createRobotsParser,\n parseHTML,\n shouldIncludePath,\n type RobotsTxtResult,\n type StructuredPage,\n} from 'peam/parser';\nimport { buildSearchIndex } from 'peam/search';\nimport { type ResolvedPeamAdapterConfig } from './config';\n\nconst log = loggers.adapter;\n\ninterface PrerenderOutput {\n pathname: string;\n fallback?: {\n filePath: string;\n };\n}\n\ninterface NextJS15Output extends PrerenderOutput {\n type: string;\n}\n\ninterface NextJS16Outputs {\n prerenders: Array<PrerenderOutput>;\n}\n\nfunction extractRobotsFromPrerender(prerender: PrerenderOutput): string | null {\n try {\n if (prerender.pathname !== '/robots.txt') {\n return null;\n }\n\n if (prerender.fallback?.filePath) {\n const content = readFileSync(prerender.fallback.filePath, 'utf-8');\n return content;\n }\n } catch (error) {\n log.error('Error extracting robots from prerender:', error);\n }\n\n return null;\n}\n\nfunction loadRobotsTxt(\n projectDir: string,\n prerenders: PrerenderOutput[],\n robotsTxtPath?: string\n): RobotsTxtResult | null {\n try {\n let robotsContent: string | null = null;\n let foundPath: string | null = null;\n\n if (prerenders && prerenders.length > 0) {\n for (const prerender of prerenders) {\n const content = extractRobotsFromPrerender(prerender);\n if (content) {\n log.debug('Found dynamic robots.txt from prerenders');\n robotsContent = content;\n foundPath = prerender.pathname;\n break;\n }\n }\n }\n\n if (!robotsContent) {\n const searchPaths = ['public/robots.txt', 'app/robots.txt', 'src/app/robots.txt'];\n const result = baseLoadRobotsTxt(projectDir, searchPaths, robotsTxtPath);\n if (result) {\n log.debug('Loaded robots.txt from:', result.path);\n return result;\n }\n return null;\n }\n\n return {\n parser: createRobotsParser(robotsContent),\n path: foundPath || '',\n };\n } catch (error) {\n log.error('Error loading robots.txt:', error);\n return null;\n }\n}\n\nexport function createPeamAdapter(config: ResolvedPeamAdapterConfig): NextAdapter {\n return {\n name: 'peam-adapter',\n\n async onBuildComplete(ctx) {\n log.debug('Extracting page content via adapter');\n\n const outputs = ctx.outputs as NextJS15Output[] | NextJS16Outputs;\n let prerenders: PrerenderOutput[];\n\n if (Array.isArray(outputs)) {\n prerenders = outputs.filter((output: NextJS15Output) => output.type === 'PRERENDER');\n } else {\n prerenders = outputs.prerenders || [];\n }\n\n log.debug('Total prerenders:', prerenders.length);\n\n const projectDir = ctx.projectDir || process.cwd();\n\n const robotsResult = config.respectRobotsTxt ? loadRobotsTxt(projectDir, prerenders, config.robotsTxtPath) : null;\n\n if (robotsResult) {\n log.debug('Using robots.txt from:', robotsResult.path);\n }\n\n const pages: Array<{\n path: string;\n htmlFile: string;\n structuredPage: StructuredPage;\n type: string;\n runtime?: string;\n }> = [];\n\n for (const prerender of prerenders) {\n const pathname = prerender.pathname;\n let fallbackFilePath = prerender.fallback?.filePath;\n\n if (!fallbackFilePath) {\n continue;\n }\n\n // Fix for Next.js 15\n if (fallbackFilePath?.endsWith('/.html')) {\n fallbackFilePath = fallbackFilePath.replace('/.html', '/index.html');\n }\n\n const filterResult = shouldIncludePath(\n pathname,\n robotsResult?.parser ?? null,\n config.exclude,\n config.respectRobotsTxt\n );\n\n if (!filterResult.included) {\n if (filterResult.reason === 'robots-txt') {\n log.debug('Path excluded by robots.txt:', pathname);\n } else if (filterResult.reason === 'exclude-pattern') {\n log.debug('Path excluded by user pattern:', pathname);\n }\n continue;\n }\n\n try {\n log.debug('Reading HTML from:', fallbackFilePath);\n\n const html = readFileSync(fallbackFilePath, 'utf-8');\n const structuredPage = parseHTML(html);\n\n if (!structuredPage) {\n log.warn('No content extracted from', pathname);\n continue;\n }\n\n log.debug('Successfully extracted content from', pathname);\n pages.push({\n path: pathname,\n htmlFile: fallbackFilePath.replace(projectDir + '/', ''),\n structuredPage,\n type: 'page',\n });\n } catch (error) {\n log.error('Error processing', pathname, error);\n }\n }\n\n log.debug('Creating search index...');\n const searchIndexData = await buildSearchIndex(pages);\n\n // Use the exporter to save the search index\n await config.searchIndexExporter.export(searchIndexData);\n\n log.debug('Saved search index via exporter');\n log.debug('Extraction complete with total pages:', pages.length);\n },\n };\n}\n","import { createExporterFromConfig, type SearchExporterConfig, type SearchIndexExporter } from '@peam-ai/search';\nimport { NextConfig } from 'next';\n\nexport interface PeamConfig {\n /**\n * Search exporter configuration\n * @default { type: 'fileBased', config: { indexPath: '.peam/index.json' } }\n */\n searchExporter?: SearchExporterConfig;\n /**\n * Whether to respect robots.txt rules when indexing pages\n * @default true\n */\n respectRobotsTxt?: boolean;\n /**\n * Path to a custom robots.txt file relative to the project root\n * If not specified, the adapter will look for static or dynamic robots.txt files in standard locations\n * @example 'custom/robots.txt' or 'config/production-robots.txt'\n * @default undefined\n */\n robotsTxtPath?: string;\n /**\n * Array of wildcard patterns to exclude from indexing\n * Supports * (matches any characters except /), ** (matches any characters including /), and ? (single character)\n * @example ['/admin/**', '/api/*', '/private-*']\n * @default []\n */\n exclude?: string[];\n}\n\nexport type ResolvedPeamAdapterConfig = Required<Omit<PeamConfig, 'robotsTxtPath'>> & {\n robotsTxtPath?: string;\n searchIndexExporter: SearchIndexExporter;\n};\n\nconst defaultConfig = {\n searchExporter: {\n type: 'fileBased' as const,\n config: { indexPath: '.peam/index.json' },\n },\n respectRobotsTxt: true,\n robotsTxtPath: undefined,\n exclude: [],\n} satisfies PeamConfig;\n\nexport function setNextConfig(nextConfig: NextConfig, peamConfig?: PeamConfig): void {\n const envVars = {\n PEAM_SEARCH_EXPORTER_TYPE: peamConfig?.searchExporter?.type ?? defaultConfig.searchExporter.type,\n PEAM_SEARCH_EXPORTER_CONFIG:\n JSON.stringify(peamConfig?.searchExporter?.config) ?? JSON.stringify(defaultConfig.searchExporter.config),\n PEAM_RESPECT_ROBOTS_TXT: String(peamConfig?.respectRobotsTxt ?? defaultConfig.respectRobotsTxt),\n PEAM_EXCLUDE: JSON.stringify(peamConfig?.exclude) ?? JSON.stringify(defaultConfig.exclude),\n PEAM_ROBOTS_TXT_PATH: '',\n };\n\n if (peamConfig?.robotsTxtPath) {\n envVars.PEAM_ROBOTS_TXT_PATH = String(peamConfig.robotsTxtPath);\n }\n\n // Set build time vars\n Object.assign(process.env, envVars);\n\n // Set runtime vars\n nextConfig.env = {\n ...nextConfig.env,\n ...envVars,\n };\n}\n\nexport const getConfig = (): ResolvedPeamAdapterConfig => {\n if (!process.env.PEAM_SEARCH_EXPORTER_TYPE || !process.env.PEAM_SEARCH_EXPORTER_CONFIG) {\n throw new Error(\n 'Peam configuration not found. Make sure withPeam() is properly configured in your next.config file.'\n );\n }\n\n const searchExporterConfig: SearchExporterConfig = {\n type: process.env.PEAM_SEARCH_EXPORTER_TYPE as SearchExporterConfig['type'],\n config: JSON.parse(process.env.PEAM_SEARCH_EXPORTER_CONFIG),\n };\n\n const resolvedConfig = {\n searchExporter: searchExporterConfig,\n searchIndexExporter: createExporterFromConfig(searchExporterConfig),\n respectRobotsTxt: process.env.PEAM_RESPECT_ROBOTS_TXT === 'true',\n robotsTxtPath: process.env.PEAM_ROBOTS_TXT_PATH || undefined,\n exclude: process.env.PEAM_EXCLUDE ? JSON.parse(process.env.PEAM_EXCLUDE) : [],\n };\n\n return resolvedConfig;\n};\n","import { createPeamAdapter } from './adapter';\nimport { getConfig } from './config';\n\nexport default createPeamAdapter(getConfig());\n"]}
|
package/dist/route.js
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
3
|
var server = require('peam/server');
|
|
4
|
+
var fsSync = require('fs');
|
|
4
5
|
var fs = require('fs/promises');
|
|
5
6
|
var path = require('path');
|
|
6
7
|
|
|
@@ -22,6 +23,7 @@ function _interopNamespace(e) {
|
|
|
22
23
|
return Object.freeze(n);
|
|
23
24
|
}
|
|
24
25
|
|
|
26
|
+
var fsSync__namespace = /*#__PURE__*/_interopNamespace(fsSync);
|
|
25
27
|
var fs__namespace = /*#__PURE__*/_interopNamespace(fs);
|
|
26
28
|
var path__namespace = /*#__PURE__*/_interopNamespace(path);
|
|
27
29
|
|
|
@@ -2468,7 +2470,8 @@ var log3 = loggers.search;
|
|
|
2468
2470
|
var FileBasedSearchIndexExporter = class {
|
|
2469
2471
|
constructor(options) {
|
|
2470
2472
|
this.cachedData = null;
|
|
2471
|
-
|
|
2473
|
+
var _a;
|
|
2474
|
+
this.baseDir = (_a = options.baseDir) != null ? _a : process.cwd();
|
|
2472
2475
|
this.indexPath = options.indexPath;
|
|
2473
2476
|
}
|
|
2474
2477
|
getFullPath() {
|
|
@@ -2506,10 +2509,18 @@ var FileBasedSearchIndexExporter = class {
|
|
|
2506
2509
|
return data;
|
|
2507
2510
|
});
|
|
2508
2511
|
}
|
|
2509
|
-
export(
|
|
2510
|
-
return __async(this,
|
|
2512
|
+
export(_0) {
|
|
2513
|
+
return __async(this, arguments, function* (data, options = { override: true }) {
|
|
2511
2514
|
const fullPath = this.getFullPath();
|
|
2512
2515
|
try {
|
|
2516
|
+
if (!(options == null ? void 0 : options.override)) {
|
|
2517
|
+
try {
|
|
2518
|
+
yield fs__namespace.access(fullPath);
|
|
2519
|
+
log3.debug("Search index file already exists and override is false, skipping export:", fullPath);
|
|
2520
|
+
return;
|
|
2521
|
+
} catch (e) {
|
|
2522
|
+
}
|
|
2523
|
+
}
|
|
2513
2524
|
const dir = path__namespace.dirname(fullPath);
|
|
2514
2525
|
yield fs__namespace.mkdir(dir, { recursive: true });
|
|
2515
2526
|
yield fs__namespace.writeFile(fullPath, JSON.stringify(data, null, 2), "utf-8");
|
|
@@ -2520,6 +2531,26 @@ var FileBasedSearchIndexExporter = class {
|
|
|
2520
2531
|
}
|
|
2521
2532
|
});
|
|
2522
2533
|
}
|
|
2534
|
+
exportSync(data, options = { override: true }) {
|
|
2535
|
+
const fullPath = this.getFullPath();
|
|
2536
|
+
try {
|
|
2537
|
+
if (!(options == null ? void 0 : options.override)) {
|
|
2538
|
+
try {
|
|
2539
|
+
fsSync__namespace.accessSync(fullPath);
|
|
2540
|
+
log3.debug("Search index file already exists and override is false, skipping export:", fullPath);
|
|
2541
|
+
return;
|
|
2542
|
+
} catch (e) {
|
|
2543
|
+
}
|
|
2544
|
+
}
|
|
2545
|
+
const dir = path__namespace.dirname(fullPath);
|
|
2546
|
+
fsSync__namespace.mkdirSync(dir, { recursive: true });
|
|
2547
|
+
fsSync__namespace.writeFileSync(fullPath, JSON.stringify(data, null, 2), "utf-8");
|
|
2548
|
+
log3.debug("Search index saved to file:", fullPath, "with", data.keys.length, "keys");
|
|
2549
|
+
} catch (error) {
|
|
2550
|
+
log3.error("Failed to save search index to file:", fullPath, error);
|
|
2551
|
+
throw error;
|
|
2552
|
+
}
|
|
2553
|
+
}
|
|
2523
2554
|
};
|
|
2524
2555
|
function createExporterFromConfig(exporterConfig) {
|
|
2525
2556
|
if (exporterConfig.type === "fileBased") {
|
|
@@ -2529,22 +2560,19 @@ function createExporterFromConfig(exporterConfig) {
|
|
|
2529
2560
|
}
|
|
2530
2561
|
|
|
2531
2562
|
// src/config.ts
|
|
2532
|
-
({
|
|
2533
|
-
searchExporter: {
|
|
2534
|
-
config: { baseDir: process.cwd()}
|
|
2535
|
-
}});
|
|
2536
2563
|
var getConfig = () => {
|
|
2537
2564
|
if (!process.env.PEAM_SEARCH_EXPORTER_TYPE || !process.env.PEAM_SEARCH_EXPORTER_CONFIG) {
|
|
2538
2565
|
throw new Error(
|
|
2539
2566
|
"Peam configuration not found. Make sure withPeam() is properly configured in your next.config file."
|
|
2540
2567
|
);
|
|
2541
2568
|
}
|
|
2542
|
-
const
|
|
2569
|
+
const searchExporterConfig = {
|
|
2543
2570
|
type: process.env.PEAM_SEARCH_EXPORTER_TYPE,
|
|
2544
2571
|
config: JSON.parse(process.env.PEAM_SEARCH_EXPORTER_CONFIG)
|
|
2545
2572
|
};
|
|
2546
2573
|
const resolvedConfig = {
|
|
2547
|
-
|
|
2574
|
+
searchExporter: searchExporterConfig,
|
|
2575
|
+
searchIndexExporter: createExporterFromConfig(searchExporterConfig),
|
|
2548
2576
|
respectRobotsTxt: process.env.PEAM_RESPECT_ROBOTS_TXT === "true",
|
|
2549
2577
|
robotsTxtPath: process.env.PEAM_ROBOTS_TXT_PATH || void 0,
|
|
2550
2578
|
exclude: process.env.PEAM_EXCLUDE ? JSON.parse(process.env.PEAM_EXCLUDE) : []
|