@peam-ai/server 0.1.1 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -1,4 +1,4 @@
1
- import { SearchEngine } from '@peam-ai/search';
1
+ import { SearchIndexExporter, SearchEngine } from '@peam-ai/search';
2
2
  import { LanguageModel, UIMessage } from 'ai';
3
3
 
4
4
  /**
@@ -18,10 +18,6 @@ interface CurrentPageMetadata {
18
18
  */
19
19
  path: string;
20
20
  }
21
- /**
22
- * Function to retrieve the search engine instance.
23
- */
24
- type GetSearchEngine = () => Promise<SearchEngine | undefined>;
25
21
  /**
26
22
  * Options for creating a chat handler.
27
23
  */
@@ -32,10 +28,10 @@ interface CreateHandlerOptions {
32
28
  */
33
29
  model?: LanguageModel;
34
30
  /**
35
- * Function to retrieve the search engine instance.
36
- * If not provided, the handler will return an error when no search engine is available.
31
+ * Search index exporter to use for loading the search index.
32
+ * If not provided, the handler will return an error when search is needed.
37
33
  */
38
- getSearchEngine?: GetSearchEngine;
34
+ searchIndexExporter?: SearchIndexExporter;
39
35
  }
40
36
  /**
41
37
  * Request body structure for chat API.
@@ -52,23 +48,25 @@ interface ChatRequestBody {
52
48
  *
53
49
  * @param options - Configuration options for the handler
54
50
  * @param options.model - The language model to use (default: GPT-4o)
55
- * @param options.getSearchEngine - An optional function to retrieve the search engine
51
+ * @param options.searchIndexExporter - The search index exporter to use for loading the search index (required)
56
52
  * @returns An async function that handles HTTP requests
57
53
  *
58
54
  * @example
59
55
  * ```typescript
60
- * // Next.js
61
56
  * import { createHandler } from 'peam/server';
62
57
  * import { openai } from '@ai-sdk/openai';
58
+ * import { FileBasedSearchIndexExporter } from '@peam-ai/search';
63
59
  *
64
60
  * export const POST = createHandler({
65
61
  * model: openai('gpt-4o'),
66
- * getSearchEngine: async () => mySearchEngine,
62
+ * searchIndexExporter: new FileBasedSearchIndexExporter({
63
+ * indexPath: 'generated/index.json'
64
+ * }),
67
65
  * });
68
66
  * ```
69
67
  */
70
68
  declare function createHandler(options?: CreateHandlerOptions): (req: Request) => Promise<Response>;
71
69
 
72
- declare function getSearchEngine(): Promise<SearchEngine | undefined>;
70
+ declare function getSearchEngine(exporter: SearchIndexExporter): Promise<SearchEngine | undefined>;
73
71
 
74
- export { type ChatRequestBody, type CreateHandlerOptions, type CurrentPageMetadata, type GetSearchEngine, createHandler, getSearchEngine };
72
+ export { type ChatRequestBody, type CreateHandlerOptions, type CurrentPageMetadata, createHandler, getSearchEngine };
package/dist/index.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { SearchEngine } from '@peam-ai/search';
1
+ import { SearchIndexExporter, SearchEngine } from '@peam-ai/search';
2
2
  import { LanguageModel, UIMessage } from 'ai';
3
3
 
4
4
  /**
@@ -18,10 +18,6 @@ interface CurrentPageMetadata {
18
18
  */
19
19
  path: string;
20
20
  }
21
- /**
22
- * Function to retrieve the search engine instance.
23
- */
24
- type GetSearchEngine = () => Promise<SearchEngine | undefined>;
25
21
  /**
26
22
  * Options for creating a chat handler.
27
23
  */
@@ -32,10 +28,10 @@ interface CreateHandlerOptions {
32
28
  */
33
29
  model?: LanguageModel;
34
30
  /**
35
- * Function to retrieve the search engine instance.
36
- * If not provided, the handler will return an error when no search engine is available.
31
+ * Search index exporter to use for loading the search index.
32
+ * If not provided, the handler will return an error when search is needed.
37
33
  */
38
- getSearchEngine?: GetSearchEngine;
34
+ searchIndexExporter?: SearchIndexExporter;
39
35
  }
40
36
  /**
41
37
  * Request body structure for chat API.
@@ -52,23 +48,25 @@ interface ChatRequestBody {
52
48
  *
53
49
  * @param options - Configuration options for the handler
54
50
  * @param options.model - The language model to use (default: GPT-4o)
55
- * @param options.getSearchEngine - An optional function to retrieve the search engine
51
+ * @param options.searchIndexExporter - The search index exporter to use for loading the search index (required)
56
52
  * @returns An async function that handles HTTP requests
57
53
  *
58
54
  * @example
59
55
  * ```typescript
60
- * // Next.js
61
56
  * import { createHandler } from 'peam/server';
62
57
  * import { openai } from '@ai-sdk/openai';
58
+ * import { FileBasedSearchIndexExporter } from '@peam-ai/search';
63
59
  *
64
60
  * export const POST = createHandler({
65
61
  * model: openai('gpt-4o'),
66
- * getSearchEngine: async () => mySearchEngine,
62
+ * searchIndexExporter: new FileBasedSearchIndexExporter({
63
+ * indexPath: 'generated/index.json'
64
+ * }),
67
65
  * });
68
66
  * ```
69
67
  */
70
68
  declare function createHandler(options?: CreateHandlerOptions): (req: Request) => Promise<Response>;
71
69
 
72
- declare function getSearchEngine(): Promise<SearchEngine | undefined>;
70
+ declare function getSearchEngine(exporter: SearchIndexExporter): Promise<SearchEngine | undefined>;
73
71
 
74
- export { type ChatRequestBody, type CreateHandlerOptions, type CurrentPageMetadata, type GetSearchEngine, createHandler, getSearchEngine };
72
+ export { type ChatRequestBody, type CreateHandlerOptions, type CurrentPageMetadata, createHandler, getSearchEngine };
package/dist/index.js CHANGED
@@ -1,9 +1,7 @@
1
1
  "use strict";
2
- var __create = Object.create;
3
2
  var __defProp = Object.defineProperty;
4
3
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
4
  var __getOwnPropNames = Object.getOwnPropertyNames;
6
- var __getProtoOf = Object.getPrototypeOf;
7
5
  var __hasOwnProp = Object.prototype.hasOwnProperty;
8
6
  var __export = (target, all) => {
9
7
  for (var name in all)
@@ -17,14 +15,6 @@ var __copyProps = (to, from, except, desc) => {
17
15
  }
18
16
  return to;
19
17
  };
20
- var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
- // If the importer is in node compatibility mode or this is not an ESM
22
- // file that has been converted to a CommonJS file using a Babel-
23
- // compatible transform (i.e. "__esModule" has not been set), then set
24
- // "default" to the CommonJS "module.exports" for node compatibility.
25
- isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
- mod
27
- ));
28
18
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
19
  var __async = (__this, __arguments, generator) => {
30
20
  return new Promise((resolve, reject) => {
@@ -93,20 +83,19 @@ var import_logger = require("@peam-ai/logger");
93
83
  var import_search = require("@peam-ai/search");
94
84
  var log = import_logger.loggers.server;
95
85
  var searchEngine = null;
96
- function getSearchEngine() {
86
+ function getSearchEngine(exporter) {
97
87
  return __async(this, null, function* () {
98
- var _a;
99
88
  if (searchEngine) return searchEngine;
100
89
  try {
101
- const indexFile = (_a = yield import("peam_index/generated")) == null ? void 0 : _a.default;
102
- if (!indexFile || !indexFile.data || !indexFile.keys || indexFile.keys.length === 0) {
90
+ const indexData = yield exporter.import();
91
+ if (!indexData || !indexData.keys || indexData.keys.length === 0) {
103
92
  log.debug("Search index not yet generated. Run build first to generate the index.");
104
93
  return void 0;
105
94
  }
106
95
  searchEngine = new import_search.SearchEngine();
107
96
  yield searchEngine.import((key) => __async(null, null, function* () {
108
- return indexFile.data[key];
109
- }), indexFile.keys);
97
+ return indexData.data[key];
98
+ }), indexData.keys);
110
99
  const totalDocs = searchEngine.count();
111
100
  log.debug("Index loaded successfully with", totalDocs, "documents");
112
101
  return searchEngine;
@@ -163,7 +152,18 @@ function createHandler(options = {}) {
163
152
  const { summary } = body;
164
153
  const lastMessage = messages[messages.length - 1];
165
154
  const currentPage = getCurrentPage({ request: req, message: lastMessage });
166
- const searchEngine2 = options.getSearchEngine ? yield options.getSearchEngine() : yield getSearchEngine();
155
+ if (!options.searchIndexExporter) {
156
+ return new Response(
157
+ JSON.stringify({
158
+ error: "Search index exporter not configured"
159
+ }),
160
+ {
161
+ status: 500,
162
+ headers: { "Content-Type": "application/json" }
163
+ }
164
+ );
165
+ }
166
+ const searchEngine2 = yield getSearchEngine(options.searchIndexExporter);
167
167
  if (!searchEngine2) {
168
168
  return new Response(
169
169
  JSON.stringify({
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/createHandler.ts","../src/utils/getCurrentPage.ts","../src/utils/getSearchEngine.ts"],"sourcesContent":["export { createHandler } from './createHandler';\nexport type { ChatRequestBody, CreateHandlerOptions, CurrentPageMetadata, GetSearchEngine } from './types';\nexport { getSearchEngine } from './utils/getSearchEngine';\n","import { openai } from '@ai-sdk/openai';\nimport { streamSearchText, streamSummarize } from '@peam-ai/ai';\nimport { loggers } from '@peam-ai/logger';\nimport { createUIMessageStreamResponse } from 'ai';\nimport { type CreateHandlerOptions, type HandlerRequestBody } from './types';\nimport { getCurrentPage } from './utils/getCurrentPage';\nimport { getSearchEngine } from './utils/getSearchEngine';\n\nconst MAX_MESSAGE_LENGTH = 30000;\nconst log = loggers.server;\n\n/**\n * Creates a HTTP handler for the chat API.\n * This handler processes incoming chat messages and streams responses back to the client.\n *\n * @param options - Configuration options for the handler\n * @param options.model - The language model to use (default: GPT-4o)\n * @param options.getSearchEngine - An optional function to retrieve the search engine\n * @returns An async function that handles HTTP requests\n *\n * @example\n * ```typescript\n * // Next.js\n * import { createHandler } from 'peam/server';\n * import { openai } from '@ai-sdk/openai';\n *\n * export const POST = createHandler({\n * model: openai('gpt-4o'),\n * getSearchEngine: async () => mySearchEngine,\n * });\n * ```\n */\nexport function createHandler(options: CreateHandlerOptions = {}) {\n const model = options.model || openai('gpt-4o');\n\n const handler = async (req: Request): Promise<Response> => {\n try {\n const body = (await req.json()) as HandlerRequestBody;\n const { messages, mode } = body;\n\n if (!messages || messages.length === 0) {\n return new Response(\n JSON.stringify({\n error: 'No messages provided',\n }),\n {\n status: 400,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n\n // Validate message length\n for (const message of messages) {\n const messageContent = message.parts\n .filter((part) => part.type === 'text')\n .map((part) => ('text' in part ? part.text : ''))\n .join('');\n\n if (messageContent.length > MAX_MESSAGE_LENGTH) {\n return new Response(\n JSON.stringify({\n error: `Message exceeds maximum length of ${MAX_MESSAGE_LENGTH} characters`,\n }),\n {\n status: 400,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n }\n\n // Handle summarization\n if (mode === 'summarize') {\n const { previousSummary } = body;\n const stream = streamSummarize({\n model,\n messages,\n previousSummary,\n });\n\n return createUIMessageStreamResponse({ stream });\n }\n\n // Handle chat\n const { summary } = body;\n const lastMessage = messages[messages.length - 1];\n const currentPage = getCurrentPage({ request: req, message: lastMessage });\n\n // Get search engine using the provided function\n const searchEngine = options.getSearchEngine ? await options.getSearchEngine() : await getSearchEngine();\n\n if (!searchEngine) {\n return new Response(\n JSON.stringify({\n error: 'Search engine not available',\n }),\n {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n\n const stream = streamSearchText({\n model,\n searchEngine,\n messages,\n currentPage,\n summary,\n });\n\n return createUIMessageStreamResponse({ stream });\n } catch (error) {\n log.error('Error in the chat route:', error);\n\n return new Response(\n JSON.stringify({\n error: 'Error while processing the chat request',\n }),\n {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n };\n\n return handler;\n}\n","import { UIMessage } from 'ai';\nimport { type CurrentPageMetadata } from '../types';\n\n/**\n * Extracts the current page metadata from the request and message.\n */\nexport const getCurrentPage = ({\n request,\n message,\n}: {\n request: Request;\n message: UIMessage;\n}): CurrentPageMetadata | undefined => {\n const messageMetadata = message.metadata as { currentPage?: { title: string; origin: string; path: string } };\n const messageCurrentPage = messageMetadata.currentPage;\n\n if (messageCurrentPage && messageCurrentPage.path && messageCurrentPage.origin) {\n return {\n title: messageCurrentPage.title,\n origin: messageCurrentPage.origin,\n path: messageCurrentPage.path,\n };\n }\n\n try {\n if (request.headers.has('referer')) {\n const refererUrl = new URL(request.headers.get('referer') || '');\n return {\n path: refererUrl.pathname,\n origin: refererUrl.origin,\n };\n }\n } catch {\n // Invalid referer URL\n }\n\n return undefined;\n};\n","import { loggers } from '@peam-ai/logger';\nimport { SearchEngine } from '@peam-ai/search';\n\nconst log = loggers.server;\nlet searchEngine: SearchEngine | null = null;\n\nexport async function getSearchEngine(): Promise<SearchEngine | undefined> {\n if (searchEngine) return searchEngine;\n\n try {\n // @ts-expect-error - peam_index/generated is resolved via webpack/turbopack alias\n const indexFile = (await import('peam_index/generated'))?.default;\n\n // Check if this is the stub\n if (!indexFile || !indexFile.data || !indexFile.keys || indexFile.keys.length === 0) {\n log.debug('Search index not yet generated. Run build first to generate the index.');\n return undefined;\n }\n\n searchEngine = new SearchEngine();\n await searchEngine.import(async (key: string) => {\n return indexFile.data[key];\n }, indexFile.keys);\n\n const totalDocs = searchEngine.count();\n log.debug('Index loaded successfully with', totalDocs, 'documents');\n return searchEngine;\n } catch (error) {\n log.error('Failed to load search index:', error);\n }\n\n return undefined;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,oBAAuB;AACvB,gBAAkD;AAClD,IAAAA,iBAAwB;AACxB,IAAAC,aAA8C;;;ACGvC,IAAM,iBAAiB,CAAC;AAAA,EAC7B;AAAA,EACA;AACF,MAGuC;AACrC,QAAM,kBAAkB,QAAQ;AAChC,QAAM,qBAAqB,gBAAgB;AAE3C,MAAI,sBAAsB,mBAAmB,QAAQ,mBAAmB,QAAQ;AAC9E,WAAO;AAAA,MACL,OAAO,mBAAmB;AAAA,MAC1B,QAAQ,mBAAmB;AAAA,MAC3B,MAAM,mBAAmB;AAAA,IAC3B;AAAA,EACF;AAEA,MAAI;AACF,QAAI,QAAQ,QAAQ,IAAI,SAAS,GAAG;AAClC,YAAM,aAAa,IAAI,IAAI,QAAQ,QAAQ,IAAI,SAAS,KAAK,EAAE;AAC/D,aAAO;AAAA,QACL,MAAM,WAAW;AAAA,QACjB,QAAQ,WAAW;AAAA,MACrB;AAAA,IACF;AAAA,EACF,SAAQ;AAAA,EAER;AAEA,SAAO;AACT;;;ACrCA,oBAAwB;AACxB,oBAA6B;AAE7B,IAAM,MAAM,sBAAQ;AACpB,IAAI,eAAoC;AAExC,SAAsB,kBAAqD;AAAA;AAN3E;AAOE,QAAI,aAAc,QAAO;AAEzB,QAAI;AAEF,YAAM,aAAa,WAAM,OAAO,sBAAsB,MAAnC,mBAAuC;AAG1D,UAAI,CAAC,aAAa,CAAC,UAAU,QAAQ,CAAC,UAAU,QAAQ,UAAU,KAAK,WAAW,GAAG;AACnF,YAAI,MAAM,wEAAwE;AAClF,eAAO;AAAA,MACT;AAEA,qBAAe,IAAI,2BAAa;AAChC,YAAM,aAAa,OAAO,CAAO,QAAgB;AAC/C,eAAO,UAAU,KAAK,GAAG;AAAA,MAC3B,IAAG,UAAU,IAAI;AAEjB,YAAM,YAAY,aAAa,MAAM;AACrC,UAAI,MAAM,kCAAkC,WAAW,WAAW;AAClE,aAAO;AAAA,IACT,SAAS,OAAO;AACd,UAAI,MAAM,gCAAgC,KAAK;AAAA,IACjD;AAEA,WAAO;AAAA,EACT;AAAA;;;AFxBA,IAAM,qBAAqB;AAC3B,IAAMC,OAAM,uBAAQ;AAuBb,SAAS,cAAc,UAAgC,CAAC,GAAG;AAChE,QAAM,QAAQ,QAAQ,aAAS,sBAAO,QAAQ;AAE9C,QAAM,UAAU,CAAO,QAAoC;AACzD,QAAI;AACF,YAAM,OAAQ,MAAM,IAAI,KAAK;AAC7B,YAAM,EAAE,UAAU,KAAK,IAAI;AAE3B,UAAI,CAAC,YAAY,SAAS,WAAW,GAAG;AACtC,eAAO,IAAI;AAAA,UACT,KAAK,UAAU;AAAA,YACb,OAAO;AAAA,UACT,CAAC;AAAA,UACD;AAAA,YACE,QAAQ;AAAA,YACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAGA,iBAAW,WAAW,UAAU;AAC9B,cAAM,iBAAiB,QAAQ,MAC5B,OAAO,CAAC,SAAS,KAAK,SAAS,MAAM,EACrC,IAAI,CAAC,SAAU,UAAU,OAAO,KAAK,OAAO,EAAG,EAC/C,KAAK,EAAE;AAEV,YAAI,eAAe,SAAS,oBAAoB;AAC9C,iBAAO,IAAI;AAAA,YACT,KAAK,UAAU;AAAA,cACb,OAAO,qCAAqC,kBAAkB;AAAA,YAChE,CAAC;AAAA,YACD;AAAA,cACE,QAAQ;AAAA,cACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,YAChD;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,UAAI,SAAS,aAAa;AACxB,cAAM,EAAE,gBAAgB,IAAI;AAC5B,cAAMC,cAAS,2BAAgB;AAAA,UAC7B;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAED,mBAAO,0CAA8B,EAAE,QAAAA,QAAO,CAAC;AAAA,MACjD;AAGA,YAAM,EAAE,QAAQ,IAAI;AACpB,YAAM,cAAc,SAAS,SAAS,SAAS,CAAC;AAChD,YAAM,cAAc,eAAe,EAAE,SAAS,KAAK,SAAS,YAAY,CAAC;AAGzE,YAAMC,gBAAe,QAAQ,kBAAkB,MAAM,QAAQ,gBAAgB,IAAI,MAAM,gBAAgB;AAEvG,UAAI,CAACA,eAAc;AACjB,eAAO,IAAI;AAAA,UACT,KAAK,UAAU;AAAA,YACb,OAAO;AAAA,UACT,CAAC;AAAA,UACD;AAAA,YACE,QAAQ;AAAA,YACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAEA,YAAM,aAAS,4BAAiB;AAAA,QAC9B;AAAA,QACA,cAAAA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAED,iBAAO,0CAA8B,EAAE,OAAO,CAAC;AAAA,IACjD,SAAS,OAAO;AACd,MAAAF,KAAI,MAAM,4BAA4B,KAAK;AAE3C,aAAO,IAAI;AAAA,QACT,KAAK,UAAU;AAAA,UACb,OAAO;AAAA,QACT,CAAC;AAAA,QACD;AAAA,UACE,QAAQ;AAAA,UACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,QAChD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;","names":["import_logger","import_ai","log","stream","searchEngine"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/createHandler.ts","../src/utils/getCurrentPage.ts","../src/utils/getSearchEngine.ts"],"sourcesContent":["export { createHandler } from './createHandler';\nexport type { ChatRequestBody, CreateHandlerOptions, CurrentPageMetadata } from './types';\nexport { getSearchEngine } from './utils/getSearchEngine';\n","import { openai } from '@ai-sdk/openai';\nimport { streamSearchText, streamSummarize } from '@peam-ai/ai';\nimport { loggers } from '@peam-ai/logger';\nimport { createUIMessageStreamResponse } from 'ai';\nimport { type CreateHandlerOptions, type HandlerRequestBody } from './types';\nimport { getCurrentPage } from './utils/getCurrentPage';\nimport { getSearchEngine } from './utils/getSearchEngine';\n\nconst MAX_MESSAGE_LENGTH = 30000;\nconst log = loggers.server;\n\n/**\n * Creates a HTTP handler for the chat API.\n * This handler processes incoming chat messages and streams responses back to the client.\n *\n * @param options - Configuration options for the handler\n * @param options.model - The language model to use (default: GPT-4o)\n * @param options.searchIndexExporter - The search index exporter to use for loading the search index (required)\n * @returns An async function that handles HTTP requests\n *\n * @example\n * ```typescript\n * import { createHandler } from 'peam/server';\n * import { openai } from '@ai-sdk/openai';\n * import { FileBasedSearchIndexExporter } from '@peam-ai/search';\n *\n * export const POST = createHandler({\n * model: openai('gpt-4o'),\n * searchIndexExporter: new FileBasedSearchIndexExporter({\n * indexPath: 'generated/index.json'\n * }),\n * });\n * ```\n */\nexport function createHandler(options: CreateHandlerOptions = {}) {\n const model = options.model || openai('gpt-4o');\n\n const handler = async (req: Request): Promise<Response> => {\n try {\n const body = (await req.json()) as HandlerRequestBody;\n const { messages, mode } = body;\n\n if (!messages || messages.length === 0) {\n return new Response(\n JSON.stringify({\n error: 'No messages provided',\n }),\n {\n status: 400,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n\n // Validate message length\n for (const message of messages) {\n const messageContent = message.parts\n .filter((part) => part.type === 'text')\n .map((part) => ('text' in part ? part.text : ''))\n .join('');\n\n if (messageContent.length > MAX_MESSAGE_LENGTH) {\n return new Response(\n JSON.stringify({\n error: `Message exceeds maximum length of ${MAX_MESSAGE_LENGTH} characters`,\n }),\n {\n status: 400,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n }\n\n // Handle summarization\n if (mode === 'summarize') {\n const { previousSummary } = body;\n const stream = streamSummarize({\n model,\n messages,\n previousSummary,\n });\n\n return createUIMessageStreamResponse({ stream });\n }\n\n // Handle chat\n const { summary } = body;\n const lastMessage = messages[messages.length - 1];\n const currentPage = getCurrentPage({ request: req, message: lastMessage });\n\n if (!options.searchIndexExporter) {\n return new Response(\n JSON.stringify({\n error: 'Search index exporter not configured',\n }),\n {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n\n const searchEngine = await getSearchEngine(options.searchIndexExporter);\n\n if (!searchEngine) {\n return new Response(\n JSON.stringify({\n error: 'Search engine not available',\n }),\n {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n\n const stream = streamSearchText({\n model,\n searchEngine,\n messages,\n currentPage,\n summary,\n });\n\n return createUIMessageStreamResponse({ stream });\n } catch (error) {\n log.error('Error in the chat route:', error);\n\n return new Response(\n JSON.stringify({\n error: 'Error while processing the chat request',\n }),\n {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n };\n\n return handler;\n}\n","import { UIMessage } from 'ai';\nimport { type CurrentPageMetadata } from '../types';\n\n/**\n * Extracts the current page metadata from the request and message.\n */\nexport const getCurrentPage = ({\n request,\n message,\n}: {\n request: Request;\n message: UIMessage;\n}): CurrentPageMetadata | undefined => {\n const messageMetadata = message.metadata as { currentPage?: { title: string; origin: string; path: string } };\n const messageCurrentPage = messageMetadata.currentPage;\n\n if (messageCurrentPage && messageCurrentPage.path && messageCurrentPage.origin) {\n return {\n title: messageCurrentPage.title,\n origin: messageCurrentPage.origin,\n path: messageCurrentPage.path,\n };\n }\n\n try {\n if (request.headers.has('referer')) {\n const refererUrl = new URL(request.headers.get('referer') || '');\n return {\n path: refererUrl.pathname,\n origin: refererUrl.origin,\n };\n }\n } catch {\n // Invalid referer URL\n }\n\n return undefined;\n};\n","import { loggers } from '@peam-ai/logger';\nimport { SearchEngine, type SearchIndexExporter } from '@peam-ai/search';\n\nconst log = loggers.server;\nlet searchEngine: SearchEngine | null = null;\n\nexport async function getSearchEngine(exporter: SearchIndexExporter): Promise<SearchEngine | undefined> {\n if (searchEngine) return searchEngine;\n\n try {\n const indexData = await exporter.import();\n\n if (!indexData || !indexData.keys || indexData.keys.length === 0) {\n log.debug('Search index not yet generated. Run build first to generate the index.');\n return undefined;\n }\n\n searchEngine = new SearchEngine();\n await searchEngine.import(async (key: string) => {\n return indexData.data[key];\n }, indexData.keys);\n\n const totalDocs = searchEngine.count();\n log.debug('Index loaded successfully with', totalDocs, 'documents');\n return searchEngine;\n } catch (error) {\n log.error('Failed to load search index:', error);\n }\n\n return undefined;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACAA,oBAAuB;AACvB,gBAAkD;AAClD,IAAAA,iBAAwB;AACxB,IAAAC,aAA8C;;;ACGvC,IAAM,iBAAiB,CAAC;AAAA,EAC7B;AAAA,EACA;AACF,MAGuC;AACrC,QAAM,kBAAkB,QAAQ;AAChC,QAAM,qBAAqB,gBAAgB;AAE3C,MAAI,sBAAsB,mBAAmB,QAAQ,mBAAmB,QAAQ;AAC9E,WAAO;AAAA,MACL,OAAO,mBAAmB;AAAA,MAC1B,QAAQ,mBAAmB;AAAA,MAC3B,MAAM,mBAAmB;AAAA,IAC3B;AAAA,EACF;AAEA,MAAI;AACF,QAAI,QAAQ,QAAQ,IAAI,SAAS,GAAG;AAClC,YAAM,aAAa,IAAI,IAAI,QAAQ,QAAQ,IAAI,SAAS,KAAK,EAAE;AAC/D,aAAO;AAAA,QACL,MAAM,WAAW;AAAA,QACjB,QAAQ,WAAW;AAAA,MACrB;AAAA,IACF;AAAA,EACF,SAAQ;AAAA,EAER;AAEA,SAAO;AACT;;;ACrCA,oBAAwB;AACxB,oBAAuD;AAEvD,IAAM,MAAM,sBAAQ;AACpB,IAAI,eAAoC;AAExC,SAAsB,gBAAgB,UAAkE;AAAA;AACtG,QAAI,aAAc,QAAO;AAEzB,QAAI;AACF,YAAM,YAAY,MAAM,SAAS,OAAO;AAExC,UAAI,CAAC,aAAa,CAAC,UAAU,QAAQ,UAAU,KAAK,WAAW,GAAG;AAChE,YAAI,MAAM,wEAAwE;AAClF,eAAO;AAAA,MACT;AAEA,qBAAe,IAAI,2BAAa;AAChC,YAAM,aAAa,OAAO,CAAO,QAAgB;AAC/C,eAAO,UAAU,KAAK,GAAG;AAAA,MAC3B,IAAG,UAAU,IAAI;AAEjB,YAAM,YAAY,aAAa,MAAM;AACrC,UAAI,MAAM,kCAAkC,WAAW,WAAW;AAClE,aAAO;AAAA,IACT,SAAS,OAAO;AACd,UAAI,MAAM,gCAAgC,KAAK;AAAA,IACjD;AAEA,WAAO;AAAA,EACT;AAAA;;;AFtBA,IAAM,qBAAqB;AAC3B,IAAMC,OAAM,uBAAQ;AAyBb,SAAS,cAAc,UAAgC,CAAC,GAAG;AAChE,QAAM,QAAQ,QAAQ,aAAS,sBAAO,QAAQ;AAE9C,QAAM,UAAU,CAAO,QAAoC;AACzD,QAAI;AACF,YAAM,OAAQ,MAAM,IAAI,KAAK;AAC7B,YAAM,EAAE,UAAU,KAAK,IAAI;AAE3B,UAAI,CAAC,YAAY,SAAS,WAAW,GAAG;AACtC,eAAO,IAAI;AAAA,UACT,KAAK,UAAU;AAAA,YACb,OAAO;AAAA,UACT,CAAC;AAAA,UACD;AAAA,YACE,QAAQ;AAAA,YACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAGA,iBAAW,WAAW,UAAU;AAC9B,cAAM,iBAAiB,QAAQ,MAC5B,OAAO,CAAC,SAAS,KAAK,SAAS,MAAM,EACrC,IAAI,CAAC,SAAU,UAAU,OAAO,KAAK,OAAO,EAAG,EAC/C,KAAK,EAAE;AAEV,YAAI,eAAe,SAAS,oBAAoB;AAC9C,iBAAO,IAAI;AAAA,YACT,KAAK,UAAU;AAAA,cACb,OAAO,qCAAqC,kBAAkB;AAAA,YAChE,CAAC;AAAA,YACD;AAAA,cACE,QAAQ;AAAA,cACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,YAChD;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,UAAI,SAAS,aAAa;AACxB,cAAM,EAAE,gBAAgB,IAAI;AAC5B,cAAMC,cAAS,2BAAgB;AAAA,UAC7B;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAED,mBAAO,0CAA8B,EAAE,QAAAA,QAAO,CAAC;AAAA,MACjD;AAGA,YAAM,EAAE,QAAQ,IAAI;AACpB,YAAM,cAAc,SAAS,SAAS,SAAS,CAAC;AAChD,YAAM,cAAc,eAAe,EAAE,SAAS,KAAK,SAAS,YAAY,CAAC;AAEzE,UAAI,CAAC,QAAQ,qBAAqB;AAChC,eAAO,IAAI;AAAA,UACT,KAAK,UAAU;AAAA,YACb,OAAO;AAAA,UACT,CAAC;AAAA,UACD;AAAA,YACE,QAAQ;AAAA,YACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAEA,YAAMC,gBAAe,MAAM,gBAAgB,QAAQ,mBAAmB;AAEtE,UAAI,CAACA,eAAc;AACjB,eAAO,IAAI;AAAA,UACT,KAAK,UAAU;AAAA,YACb,OAAO;AAAA,UACT,CAAC;AAAA,UACD;AAAA,YACE,QAAQ;AAAA,YACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAEA,YAAM,aAAS,4BAAiB;AAAA,QAC9B;AAAA,QACA,cAAAA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAED,iBAAO,0CAA8B,EAAE,OAAO,CAAC;AAAA,IACjD,SAAS,OAAO;AACd,MAAAF,KAAI,MAAM,4BAA4B,KAAK;AAE3C,aAAO,IAAI;AAAA,QACT,KAAK,UAAU;AAAA,UACb,OAAO;AAAA,QACT,CAAC;AAAA,QACD;AAAA,UACE,QAAQ;AAAA,UACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,QAChD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;","names":["import_logger","import_ai","log","stream","searchEngine"]}
package/dist/index.mjs CHANGED
@@ -57,20 +57,19 @@ import { loggers } from "@peam-ai/logger";
57
57
  import { SearchEngine } from "@peam-ai/search";
58
58
  var log = loggers.server;
59
59
  var searchEngine = null;
60
- function getSearchEngine() {
60
+ function getSearchEngine(exporter) {
61
61
  return __async(this, null, function* () {
62
- var _a;
63
62
  if (searchEngine) return searchEngine;
64
63
  try {
65
- const indexFile = (_a = yield import("peam_index/generated")) == null ? void 0 : _a.default;
66
- if (!indexFile || !indexFile.data || !indexFile.keys || indexFile.keys.length === 0) {
64
+ const indexData = yield exporter.import();
65
+ if (!indexData || !indexData.keys || indexData.keys.length === 0) {
67
66
  log.debug("Search index not yet generated. Run build first to generate the index.");
68
67
  return void 0;
69
68
  }
70
69
  searchEngine = new SearchEngine();
71
70
  yield searchEngine.import((key) => __async(null, null, function* () {
72
- return indexFile.data[key];
73
- }), indexFile.keys);
71
+ return indexData.data[key];
72
+ }), indexData.keys);
74
73
  const totalDocs = searchEngine.count();
75
74
  log.debug("Index loaded successfully with", totalDocs, "documents");
76
75
  return searchEngine;
@@ -127,7 +126,18 @@ function createHandler(options = {}) {
127
126
  const { summary } = body;
128
127
  const lastMessage = messages[messages.length - 1];
129
128
  const currentPage = getCurrentPage({ request: req, message: lastMessage });
130
- const searchEngine2 = options.getSearchEngine ? yield options.getSearchEngine() : yield getSearchEngine();
129
+ if (!options.searchIndexExporter) {
130
+ return new Response(
131
+ JSON.stringify({
132
+ error: "Search index exporter not configured"
133
+ }),
134
+ {
135
+ status: 500,
136
+ headers: { "Content-Type": "application/json" }
137
+ }
138
+ );
139
+ }
140
+ const searchEngine2 = yield getSearchEngine(options.searchIndexExporter);
131
141
  if (!searchEngine2) {
132
142
  return new Response(
133
143
  JSON.stringify({
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/createHandler.ts","../src/utils/getCurrentPage.ts","../src/utils/getSearchEngine.ts"],"sourcesContent":["import { openai } from '@ai-sdk/openai';\nimport { streamSearchText, streamSummarize } from '@peam-ai/ai';\nimport { loggers } from '@peam-ai/logger';\nimport { createUIMessageStreamResponse } from 'ai';\nimport { type CreateHandlerOptions, type HandlerRequestBody } from './types';\nimport { getCurrentPage } from './utils/getCurrentPage';\nimport { getSearchEngine } from './utils/getSearchEngine';\n\nconst MAX_MESSAGE_LENGTH = 30000;\nconst log = loggers.server;\n\n/**\n * Creates a HTTP handler for the chat API.\n * This handler processes incoming chat messages and streams responses back to the client.\n *\n * @param options - Configuration options for the handler\n * @param options.model - The language model to use (default: GPT-4o)\n * @param options.getSearchEngine - An optional function to retrieve the search engine\n * @returns An async function that handles HTTP requests\n *\n * @example\n * ```typescript\n * // Next.js\n * import { createHandler } from 'peam/server';\n * import { openai } from '@ai-sdk/openai';\n *\n * export const POST = createHandler({\n * model: openai('gpt-4o'),\n * getSearchEngine: async () => mySearchEngine,\n * });\n * ```\n */\nexport function createHandler(options: CreateHandlerOptions = {}) {\n const model = options.model || openai('gpt-4o');\n\n const handler = async (req: Request): Promise<Response> => {\n try {\n const body = (await req.json()) as HandlerRequestBody;\n const { messages, mode } = body;\n\n if (!messages || messages.length === 0) {\n return new Response(\n JSON.stringify({\n error: 'No messages provided',\n }),\n {\n status: 400,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n\n // Validate message length\n for (const message of messages) {\n const messageContent = message.parts\n .filter((part) => part.type === 'text')\n .map((part) => ('text' in part ? part.text : ''))\n .join('');\n\n if (messageContent.length > MAX_MESSAGE_LENGTH) {\n return new Response(\n JSON.stringify({\n error: `Message exceeds maximum length of ${MAX_MESSAGE_LENGTH} characters`,\n }),\n {\n status: 400,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n }\n\n // Handle summarization\n if (mode === 'summarize') {\n const { previousSummary } = body;\n const stream = streamSummarize({\n model,\n messages,\n previousSummary,\n });\n\n return createUIMessageStreamResponse({ stream });\n }\n\n // Handle chat\n const { summary } = body;\n const lastMessage = messages[messages.length - 1];\n const currentPage = getCurrentPage({ request: req, message: lastMessage });\n\n // Get search engine using the provided function\n const searchEngine = options.getSearchEngine ? await options.getSearchEngine() : await getSearchEngine();\n\n if (!searchEngine) {\n return new Response(\n JSON.stringify({\n error: 'Search engine not available',\n }),\n {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n\n const stream = streamSearchText({\n model,\n searchEngine,\n messages,\n currentPage,\n summary,\n });\n\n return createUIMessageStreamResponse({ stream });\n } catch (error) {\n log.error('Error in the chat route:', error);\n\n return new Response(\n JSON.stringify({\n error: 'Error while processing the chat request',\n }),\n {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n };\n\n return handler;\n}\n","import { UIMessage } from 'ai';\nimport { type CurrentPageMetadata } from '../types';\n\n/**\n * Extracts the current page metadata from the request and message.\n */\nexport const getCurrentPage = ({\n request,\n message,\n}: {\n request: Request;\n message: UIMessage;\n}): CurrentPageMetadata | undefined => {\n const messageMetadata = message.metadata as { currentPage?: { title: string; origin: string; path: string } };\n const messageCurrentPage = messageMetadata.currentPage;\n\n if (messageCurrentPage && messageCurrentPage.path && messageCurrentPage.origin) {\n return {\n title: messageCurrentPage.title,\n origin: messageCurrentPage.origin,\n path: messageCurrentPage.path,\n };\n }\n\n try {\n if (request.headers.has('referer')) {\n const refererUrl = new URL(request.headers.get('referer') || '');\n return {\n path: refererUrl.pathname,\n origin: refererUrl.origin,\n };\n }\n } catch {\n // Invalid referer URL\n }\n\n return undefined;\n};\n","import { loggers } from '@peam-ai/logger';\nimport { SearchEngine } from '@peam-ai/search';\n\nconst log = loggers.server;\nlet searchEngine: SearchEngine | null = null;\n\nexport async function getSearchEngine(): Promise<SearchEngine | undefined> {\n if (searchEngine) return searchEngine;\n\n try {\n // @ts-expect-error - peam_index/generated is resolved via webpack/turbopack alias\n const indexFile = (await import('peam_index/generated'))?.default;\n\n // Check if this is the stub\n if (!indexFile || !indexFile.data || !indexFile.keys || indexFile.keys.length === 0) {\n log.debug('Search index not yet generated. Run build first to generate the index.');\n return undefined;\n }\n\n searchEngine = new SearchEngine();\n await searchEngine.import(async (key: string) => {\n return indexFile.data[key];\n }, indexFile.keys);\n\n const totalDocs = searchEngine.count();\n log.debug('Index loaded successfully with', totalDocs, 'documents');\n return searchEngine;\n } catch (error) {\n log.error('Failed to load search index:', error);\n }\n\n return undefined;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,SAAS,cAAc;AACvB,SAAS,kBAAkB,uBAAuB;AAClD,SAAS,WAAAA,gBAAe;AACxB,SAAS,qCAAqC;;;ACGvC,IAAM,iBAAiB,CAAC;AAAA,EAC7B;AAAA,EACA;AACF,MAGuC;AACrC,QAAM,kBAAkB,QAAQ;AAChC,QAAM,qBAAqB,gBAAgB;AAE3C,MAAI,sBAAsB,mBAAmB,QAAQ,mBAAmB,QAAQ;AAC9E,WAAO;AAAA,MACL,OAAO,mBAAmB;AAAA,MAC1B,QAAQ,mBAAmB;AAAA,MAC3B,MAAM,mBAAmB;AAAA,IAC3B;AAAA,EACF;AAEA,MAAI;AACF,QAAI,QAAQ,QAAQ,IAAI,SAAS,GAAG;AAClC,YAAM,aAAa,IAAI,IAAI,QAAQ,QAAQ,IAAI,SAAS,KAAK,EAAE;AAC/D,aAAO;AAAA,QACL,MAAM,WAAW;AAAA,QACjB,QAAQ,WAAW;AAAA,MACrB;AAAA,IACF;AAAA,EACF,SAAQ;AAAA,EAER;AAEA,SAAO;AACT;;;ACrCA,SAAS,eAAe;AACxB,SAAS,oBAAoB;AAE7B,IAAM,MAAM,QAAQ;AACpB,IAAI,eAAoC;AAExC,SAAsB,kBAAqD;AAAA;AAN3E;AAOE,QAAI,aAAc,QAAO;AAEzB,QAAI;AAEF,YAAM,aAAa,WAAM,OAAO,sBAAsB,MAAnC,mBAAuC;AAG1D,UAAI,CAAC,aAAa,CAAC,UAAU,QAAQ,CAAC,UAAU,QAAQ,UAAU,KAAK,WAAW,GAAG;AACnF,YAAI,MAAM,wEAAwE;AAClF,eAAO;AAAA,MACT;AAEA,qBAAe,IAAI,aAAa;AAChC,YAAM,aAAa,OAAO,CAAO,QAAgB;AAC/C,eAAO,UAAU,KAAK,GAAG;AAAA,MAC3B,IAAG,UAAU,IAAI;AAEjB,YAAM,YAAY,aAAa,MAAM;AACrC,UAAI,MAAM,kCAAkC,WAAW,WAAW;AAClE,aAAO;AAAA,IACT,SAAS,OAAO;AACd,UAAI,MAAM,gCAAgC,KAAK;AAAA,IACjD;AAEA,WAAO;AAAA,EACT;AAAA;;;AFxBA,IAAM,qBAAqB;AAC3B,IAAMC,OAAMC,SAAQ;AAuBb,SAAS,cAAc,UAAgC,CAAC,GAAG;AAChE,QAAM,QAAQ,QAAQ,SAAS,OAAO,QAAQ;AAE9C,QAAM,UAAU,CAAO,QAAoC;AACzD,QAAI;AACF,YAAM,OAAQ,MAAM,IAAI,KAAK;AAC7B,YAAM,EAAE,UAAU,KAAK,IAAI;AAE3B,UAAI,CAAC,YAAY,SAAS,WAAW,GAAG;AACtC,eAAO,IAAI;AAAA,UACT,KAAK,UAAU;AAAA,YACb,OAAO;AAAA,UACT,CAAC;AAAA,UACD;AAAA,YACE,QAAQ;AAAA,YACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAGA,iBAAW,WAAW,UAAU;AAC9B,cAAM,iBAAiB,QAAQ,MAC5B,OAAO,CAAC,SAAS,KAAK,SAAS,MAAM,EACrC,IAAI,CAAC,SAAU,UAAU,OAAO,KAAK,OAAO,EAAG,EAC/C,KAAK,EAAE;AAEV,YAAI,eAAe,SAAS,oBAAoB;AAC9C,iBAAO,IAAI;AAAA,YACT,KAAK,UAAU;AAAA,cACb,OAAO,qCAAqC,kBAAkB;AAAA,YAChE,CAAC;AAAA,YACD;AAAA,cACE,QAAQ;AAAA,cACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,YAChD;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,UAAI,SAAS,aAAa;AACxB,cAAM,EAAE,gBAAgB,IAAI;AAC5B,cAAMC,UAAS,gBAAgB;AAAA,UAC7B;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAED,eAAO,8BAA8B,EAAE,QAAAA,QAAO,CAAC;AAAA,MACjD;AAGA,YAAM,EAAE,QAAQ,IAAI;AACpB,YAAM,cAAc,SAAS,SAAS,SAAS,CAAC;AAChD,YAAM,cAAc,eAAe,EAAE,SAAS,KAAK,SAAS,YAAY,CAAC;AAGzE,YAAMC,gBAAe,QAAQ,kBAAkB,MAAM,QAAQ,gBAAgB,IAAI,MAAM,gBAAgB;AAEvG,UAAI,CAACA,eAAc;AACjB,eAAO,IAAI;AAAA,UACT,KAAK,UAAU;AAAA,YACb,OAAO;AAAA,UACT,CAAC;AAAA,UACD;AAAA,YACE,QAAQ;AAAA,YACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAEA,YAAM,SAAS,iBAAiB;AAAA,QAC9B;AAAA,QACA,cAAAA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAED,aAAO,8BAA8B,EAAE,OAAO,CAAC;AAAA,IACjD,SAAS,OAAO;AACd,MAAAH,KAAI,MAAM,4BAA4B,KAAK;AAE3C,aAAO,IAAI;AAAA,QACT,KAAK,UAAU;AAAA,UACb,OAAO;AAAA,QACT,CAAC;AAAA,QACD;AAAA,UACE,QAAQ;AAAA,UACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,QAChD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;","names":["loggers","log","loggers","stream","searchEngine"]}
1
+ {"version":3,"sources":["../src/createHandler.ts","../src/utils/getCurrentPage.ts","../src/utils/getSearchEngine.ts"],"sourcesContent":["import { openai } from '@ai-sdk/openai';\nimport { streamSearchText, streamSummarize } from '@peam-ai/ai';\nimport { loggers } from '@peam-ai/logger';\nimport { createUIMessageStreamResponse } from 'ai';\nimport { type CreateHandlerOptions, type HandlerRequestBody } from './types';\nimport { getCurrentPage } from './utils/getCurrentPage';\nimport { getSearchEngine } from './utils/getSearchEngine';\n\nconst MAX_MESSAGE_LENGTH = 30000;\nconst log = loggers.server;\n\n/**\n * Creates a HTTP handler for the chat API.\n * This handler processes incoming chat messages and streams responses back to the client.\n *\n * @param options - Configuration options for the handler\n * @param options.model - The language model to use (default: GPT-4o)\n * @param options.searchIndexExporter - The search index exporter to use for loading the search index (required)\n * @returns An async function that handles HTTP requests\n *\n * @example\n * ```typescript\n * import { createHandler } from 'peam/server';\n * import { openai } from '@ai-sdk/openai';\n * import { FileBasedSearchIndexExporter } from '@peam-ai/search';\n *\n * export const POST = createHandler({\n * model: openai('gpt-4o'),\n * searchIndexExporter: new FileBasedSearchIndexExporter({\n * indexPath: 'generated/index.json'\n * }),\n * });\n * ```\n */\nexport function createHandler(options: CreateHandlerOptions = {}) {\n const model = options.model || openai('gpt-4o');\n\n const handler = async (req: Request): Promise<Response> => {\n try {\n const body = (await req.json()) as HandlerRequestBody;\n const { messages, mode } = body;\n\n if (!messages || messages.length === 0) {\n return new Response(\n JSON.stringify({\n error: 'No messages provided',\n }),\n {\n status: 400,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n\n // Validate message length\n for (const message of messages) {\n const messageContent = message.parts\n .filter((part) => part.type === 'text')\n .map((part) => ('text' in part ? part.text : ''))\n .join('');\n\n if (messageContent.length > MAX_MESSAGE_LENGTH) {\n return new Response(\n JSON.stringify({\n error: `Message exceeds maximum length of ${MAX_MESSAGE_LENGTH} characters`,\n }),\n {\n status: 400,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n }\n\n // Handle summarization\n if (mode === 'summarize') {\n const { previousSummary } = body;\n const stream = streamSummarize({\n model,\n messages,\n previousSummary,\n });\n\n return createUIMessageStreamResponse({ stream });\n }\n\n // Handle chat\n const { summary } = body;\n const lastMessage = messages[messages.length - 1];\n const currentPage = getCurrentPage({ request: req, message: lastMessage });\n\n if (!options.searchIndexExporter) {\n return new Response(\n JSON.stringify({\n error: 'Search index exporter not configured',\n }),\n {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n\n const searchEngine = await getSearchEngine(options.searchIndexExporter);\n\n if (!searchEngine) {\n return new Response(\n JSON.stringify({\n error: 'Search engine not available',\n }),\n {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n\n const stream = streamSearchText({\n model,\n searchEngine,\n messages,\n currentPage,\n summary,\n });\n\n return createUIMessageStreamResponse({ stream });\n } catch (error) {\n log.error('Error in the chat route:', error);\n\n return new Response(\n JSON.stringify({\n error: 'Error while processing the chat request',\n }),\n {\n status: 500,\n headers: { 'Content-Type': 'application/json' },\n }\n );\n }\n };\n\n return handler;\n}\n","import { UIMessage } from 'ai';\nimport { type CurrentPageMetadata } from '../types';\n\n/**\n * Extracts the current page metadata from the request and message.\n */\nexport const getCurrentPage = ({\n request,\n message,\n}: {\n request: Request;\n message: UIMessage;\n}): CurrentPageMetadata | undefined => {\n const messageMetadata = message.metadata as { currentPage?: { title: string; origin: string; path: string } };\n const messageCurrentPage = messageMetadata.currentPage;\n\n if (messageCurrentPage && messageCurrentPage.path && messageCurrentPage.origin) {\n return {\n title: messageCurrentPage.title,\n origin: messageCurrentPage.origin,\n path: messageCurrentPage.path,\n };\n }\n\n try {\n if (request.headers.has('referer')) {\n const refererUrl = new URL(request.headers.get('referer') || '');\n return {\n path: refererUrl.pathname,\n origin: refererUrl.origin,\n };\n }\n } catch {\n // Invalid referer URL\n }\n\n return undefined;\n};\n","import { loggers } from '@peam-ai/logger';\nimport { SearchEngine, type SearchIndexExporter } from '@peam-ai/search';\n\nconst log = loggers.server;\nlet searchEngine: SearchEngine | null = null;\n\nexport async function getSearchEngine(exporter: SearchIndexExporter): Promise<SearchEngine | undefined> {\n if (searchEngine) return searchEngine;\n\n try {\n const indexData = await exporter.import();\n\n if (!indexData || !indexData.keys || indexData.keys.length === 0) {\n log.debug('Search index not yet generated. Run build first to generate the index.');\n return undefined;\n }\n\n searchEngine = new SearchEngine();\n await searchEngine.import(async (key: string) => {\n return indexData.data[key];\n }, indexData.keys);\n\n const totalDocs = searchEngine.count();\n log.debug('Index loaded successfully with', totalDocs, 'documents');\n return searchEngine;\n } catch (error) {\n log.error('Failed to load search index:', error);\n }\n\n return undefined;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,SAAS,cAAc;AACvB,SAAS,kBAAkB,uBAAuB;AAClD,SAAS,WAAAA,gBAAe;AACxB,SAAS,qCAAqC;;;ACGvC,IAAM,iBAAiB,CAAC;AAAA,EAC7B;AAAA,EACA;AACF,MAGuC;AACrC,QAAM,kBAAkB,QAAQ;AAChC,QAAM,qBAAqB,gBAAgB;AAE3C,MAAI,sBAAsB,mBAAmB,QAAQ,mBAAmB,QAAQ;AAC9E,WAAO;AAAA,MACL,OAAO,mBAAmB;AAAA,MAC1B,QAAQ,mBAAmB;AAAA,MAC3B,MAAM,mBAAmB;AAAA,IAC3B;AAAA,EACF;AAEA,MAAI;AACF,QAAI,QAAQ,QAAQ,IAAI,SAAS,GAAG;AAClC,YAAM,aAAa,IAAI,IAAI,QAAQ,QAAQ,IAAI,SAAS,KAAK,EAAE;AAC/D,aAAO;AAAA,QACL,MAAM,WAAW;AAAA,QACjB,QAAQ,WAAW;AAAA,MACrB;AAAA,IACF;AAAA,EACF,SAAQ;AAAA,EAER;AAEA,SAAO;AACT;;;ACrCA,SAAS,eAAe;AACxB,SAAS,oBAA8C;AAEvD,IAAM,MAAM,QAAQ;AACpB,IAAI,eAAoC;AAExC,SAAsB,gBAAgB,UAAkE;AAAA;AACtG,QAAI,aAAc,QAAO;AAEzB,QAAI;AACF,YAAM,YAAY,MAAM,SAAS,OAAO;AAExC,UAAI,CAAC,aAAa,CAAC,UAAU,QAAQ,UAAU,KAAK,WAAW,GAAG;AAChE,YAAI,MAAM,wEAAwE;AAClF,eAAO;AAAA,MACT;AAEA,qBAAe,IAAI,aAAa;AAChC,YAAM,aAAa,OAAO,CAAO,QAAgB;AAC/C,eAAO,UAAU,KAAK,GAAG;AAAA,MAC3B,IAAG,UAAU,IAAI;AAEjB,YAAM,YAAY,aAAa,MAAM;AACrC,UAAI,MAAM,kCAAkC,WAAW,WAAW;AAClE,aAAO;AAAA,IACT,SAAS,OAAO;AACd,UAAI,MAAM,gCAAgC,KAAK;AAAA,IACjD;AAEA,WAAO;AAAA,EACT;AAAA;;;AFtBA,IAAM,qBAAqB;AAC3B,IAAMC,OAAMC,SAAQ;AAyBb,SAAS,cAAc,UAAgC,CAAC,GAAG;AAChE,QAAM,QAAQ,QAAQ,SAAS,OAAO,QAAQ;AAE9C,QAAM,UAAU,CAAO,QAAoC;AACzD,QAAI;AACF,YAAM,OAAQ,MAAM,IAAI,KAAK;AAC7B,YAAM,EAAE,UAAU,KAAK,IAAI;AAE3B,UAAI,CAAC,YAAY,SAAS,WAAW,GAAG;AACtC,eAAO,IAAI;AAAA,UACT,KAAK,UAAU;AAAA,YACb,OAAO;AAAA,UACT,CAAC;AAAA,UACD;AAAA,YACE,QAAQ;AAAA,YACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAGA,iBAAW,WAAW,UAAU;AAC9B,cAAM,iBAAiB,QAAQ,MAC5B,OAAO,CAAC,SAAS,KAAK,SAAS,MAAM,EACrC,IAAI,CAAC,SAAU,UAAU,OAAO,KAAK,OAAO,EAAG,EAC/C,KAAK,EAAE;AAEV,YAAI,eAAe,SAAS,oBAAoB;AAC9C,iBAAO,IAAI;AAAA,YACT,KAAK,UAAU;AAAA,cACb,OAAO,qCAAqC,kBAAkB;AAAA,YAChE,CAAC;AAAA,YACD;AAAA,cACE,QAAQ;AAAA,cACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,YAChD;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAGA,UAAI,SAAS,aAAa;AACxB,cAAM,EAAE,gBAAgB,IAAI;AAC5B,cAAMC,UAAS,gBAAgB;AAAA,UAC7B;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAED,eAAO,8BAA8B,EAAE,QAAAA,QAAO,CAAC;AAAA,MACjD;AAGA,YAAM,EAAE,QAAQ,IAAI;AACpB,YAAM,cAAc,SAAS,SAAS,SAAS,CAAC;AAChD,YAAM,cAAc,eAAe,EAAE,SAAS,KAAK,SAAS,YAAY,CAAC;AAEzE,UAAI,CAAC,QAAQ,qBAAqB;AAChC,eAAO,IAAI;AAAA,UACT,KAAK,UAAU;AAAA,YACb,OAAO;AAAA,UACT,CAAC;AAAA,UACD;AAAA,YACE,QAAQ;AAAA,YACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAEA,YAAMC,gBAAe,MAAM,gBAAgB,QAAQ,mBAAmB;AAEtE,UAAI,CAACA,eAAc;AACjB,eAAO,IAAI;AAAA,UACT,KAAK,UAAU;AAAA,YACb,OAAO;AAAA,UACT,CAAC;AAAA,UACD;AAAA,YACE,QAAQ;AAAA,YACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,UAChD;AAAA,QACF;AAAA,MACF;AAEA,YAAM,SAAS,iBAAiB;AAAA,QAC9B;AAAA,QACA,cAAAA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,CAAC;AAED,aAAO,8BAA8B,EAAE,OAAO,CAAC;AAAA,IACjD,SAAS,OAAO;AACd,MAAAH,KAAI,MAAM,4BAA4B,KAAK;AAE3C,aAAO,IAAI;AAAA,QACT,KAAK,UAAU;AAAA,UACb,OAAO;AAAA,QACT,CAAC;AAAA,QACD;AAAA,UACE,QAAQ;AAAA,UACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,QAChD;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;","names":["loggers","log","loggers","stream","searchEngine"]}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@peam-ai/server",
3
3
  "description": "Server handler for Peam",
4
- "version": "0.1.1",
4
+ "version": "0.1.3",
5
5
  "main": "./dist/index.js",
6
6
  "module": "./dist/index.mjs",
7
7
  "types": "./dist/index.d.ts",
@@ -30,9 +30,9 @@
30
30
  "dependencies": {
31
31
  "@ai-sdk/openai": "^3.0.0",
32
32
  "ai": "^6.0.1",
33
- "@peam-ai/ai": "0.1.1",
34
- "@peam-ai/search": "0.1.1",
35
- "@peam-ai/logger": "0.1.1"
33
+ "@peam-ai/ai": "0.1.3",
34
+ "@peam-ai/search": "0.1.3",
35
+ "@peam-ai/logger": "0.1.3"
36
36
  },
37
37
  "devDependencies": {
38
38
  "tsup": "^8.2.4",