@ncukondo/reference-manager 0.3.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. package/README.md +282 -107
  2. package/dist/chunks/file-watcher-B-SiUw5f.js +1557 -0
  3. package/dist/chunks/file-watcher-B-SiUw5f.js.map +1 -0
  4. package/dist/chunks/{search-Be9vzUIH.js → index-DLIGxQaB.js} +399 -89
  5. package/dist/chunks/index-DLIGxQaB.js.map +1 -0
  6. package/dist/chunks/loader-DuzyKV70.js +394 -0
  7. package/dist/chunks/loader-DuzyKV70.js.map +1 -0
  8. package/dist/cli/commands/add.d.ts +3 -3
  9. package/dist/cli/commands/add.d.ts.map +1 -1
  10. package/dist/cli/commands/cite.d.ts +3 -3
  11. package/dist/cli/commands/cite.d.ts.map +1 -1
  12. package/dist/cli/commands/fulltext.d.ts +5 -34
  13. package/dist/cli/commands/fulltext.d.ts.map +1 -1
  14. package/dist/cli/commands/list.d.ts +3 -3
  15. package/dist/cli/commands/list.d.ts.map +1 -1
  16. package/dist/cli/commands/mcp.d.ts +16 -0
  17. package/dist/cli/commands/mcp.d.ts.map +1 -0
  18. package/dist/cli/commands/remove.d.ts +3 -3
  19. package/dist/cli/commands/remove.d.ts.map +1 -1
  20. package/dist/cli/commands/search.d.ts +3 -3
  21. package/dist/cli/commands/search.d.ts.map +1 -1
  22. package/dist/cli/commands/server.d.ts +2 -0
  23. package/dist/cli/commands/server.d.ts.map +1 -1
  24. package/dist/cli/commands/update.d.ts +3 -3
  25. package/dist/cli/commands/update.d.ts.map +1 -1
  26. package/dist/cli/execution-context.d.ts +23 -36
  27. package/dist/cli/execution-context.d.ts.map +1 -1
  28. package/dist/cli/index.d.ts.map +1 -1
  29. package/dist/cli/server-client.d.ts +24 -40
  30. package/dist/cli/server-client.d.ts.map +1 -1
  31. package/dist/cli/server-detection.d.ts +1 -0
  32. package/dist/cli/server-detection.d.ts.map +1 -1
  33. package/dist/cli.js +21061 -317
  34. package/dist/cli.js.map +1 -1
  35. package/dist/config/defaults.d.ts.map +1 -1
  36. package/dist/config/loader.d.ts.map +1 -1
  37. package/dist/config/schema.d.ts +2 -3
  38. package/dist/config/schema.d.ts.map +1 -1
  39. package/dist/core/csl-json/types.d.ts +3 -0
  40. package/dist/core/csl-json/types.d.ts.map +1 -1
  41. package/dist/core/library-interface.d.ts +100 -0
  42. package/dist/core/library-interface.d.ts.map +1 -0
  43. package/dist/core/library.d.ts +29 -46
  44. package/dist/core/library.d.ts.map +1 -1
  45. package/dist/features/operations/add.d.ts +2 -2
  46. package/dist/features/operations/add.d.ts.map +1 -1
  47. package/dist/features/operations/cite.d.ts +2 -2
  48. package/dist/features/operations/cite.d.ts.map +1 -1
  49. package/dist/features/operations/fulltext/attach.d.ts +47 -0
  50. package/dist/features/operations/fulltext/attach.d.ts.map +1 -0
  51. package/dist/features/operations/fulltext/detach.d.ts +38 -0
  52. package/dist/features/operations/fulltext/detach.d.ts.map +1 -0
  53. package/dist/features/operations/fulltext/get.d.ts +41 -0
  54. package/dist/features/operations/fulltext/get.d.ts.map +1 -0
  55. package/dist/features/operations/fulltext/index.d.ts +9 -0
  56. package/dist/features/operations/fulltext/index.d.ts.map +1 -0
  57. package/dist/features/operations/index.d.ts +15 -0
  58. package/dist/features/operations/index.d.ts.map +1 -0
  59. package/dist/features/operations/library-operations.d.ts +64 -0
  60. package/dist/features/operations/library-operations.d.ts.map +1 -0
  61. package/dist/features/operations/list.d.ts +2 -2
  62. package/dist/features/operations/list.d.ts.map +1 -1
  63. package/dist/features/operations/operations-library.d.ts +36 -0
  64. package/dist/features/operations/operations-library.d.ts.map +1 -0
  65. package/dist/features/operations/remove.d.ts +4 -4
  66. package/dist/features/operations/remove.d.ts.map +1 -1
  67. package/dist/features/operations/search.d.ts +2 -2
  68. package/dist/features/operations/search.d.ts.map +1 -1
  69. package/dist/features/operations/update.d.ts +2 -2
  70. package/dist/features/operations/update.d.ts.map +1 -1
  71. package/dist/features/search/matcher.d.ts.map +1 -1
  72. package/dist/features/search/normalizer.d.ts +12 -0
  73. package/dist/features/search/normalizer.d.ts.map +1 -1
  74. package/dist/features/search/tokenizer.d.ts.map +1 -1
  75. package/dist/features/search/types.d.ts +1 -1
  76. package/dist/features/search/types.d.ts.map +1 -1
  77. package/dist/features/search/uppercase.d.ts +41 -0
  78. package/dist/features/search/uppercase.d.ts.map +1 -0
  79. package/dist/index.js +24 -192
  80. package/dist/index.js.map +1 -1
  81. package/dist/mcp/context.d.ts +19 -0
  82. package/dist/mcp/context.d.ts.map +1 -0
  83. package/dist/mcp/index.d.ts +20 -0
  84. package/dist/mcp/index.d.ts.map +1 -0
  85. package/dist/mcp/resources/index.d.ts +10 -0
  86. package/dist/mcp/resources/index.d.ts.map +1 -0
  87. package/dist/mcp/resources/library.d.ts +26 -0
  88. package/dist/mcp/resources/library.d.ts.map +1 -0
  89. package/dist/mcp/tools/add.d.ts +17 -0
  90. package/dist/mcp/tools/add.d.ts.map +1 -0
  91. package/dist/mcp/tools/cite.d.ts +15 -0
  92. package/dist/mcp/tools/cite.d.ts.map +1 -0
  93. package/dist/mcp/tools/fulltext.d.ts +51 -0
  94. package/dist/mcp/tools/fulltext.d.ts.map +1 -0
  95. package/dist/mcp/tools/index.d.ts +12 -0
  96. package/dist/mcp/tools/index.d.ts.map +1 -0
  97. package/dist/mcp/tools/list.d.ts +13 -0
  98. package/dist/mcp/tools/list.d.ts.map +1 -0
  99. package/dist/mcp/tools/remove.d.ts +19 -0
  100. package/dist/mcp/tools/remove.d.ts.map +1 -0
  101. package/dist/mcp/tools/search.d.ts +13 -0
  102. package/dist/mcp/tools/search.d.ts.map +1 -0
  103. package/dist/server/index.d.ts +23 -1
  104. package/dist/server/index.d.ts.map +1 -1
  105. package/dist/server/routes/references.d.ts.map +1 -1
  106. package/dist/server.js +5 -271
  107. package/dist/server.js.map +1 -1
  108. package/package.json +2 -1
  109. package/dist/chunks/detector-DHztTaFY.js +0 -619
  110. package/dist/chunks/detector-DHztTaFY.js.map +0 -1
  111. package/dist/chunks/loader-mQ25o6cV.js +0 -1054
  112. package/dist/chunks/loader-mQ25o6cV.js.map +0 -1
  113. package/dist/chunks/search-Be9vzUIH.js.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"file":"server.js","sources":["../src/server/routes/add.ts","../src/server/routes/cite.ts","../src/server/routes/health.ts","../src/server/routes/list.ts","../src/server/routes/references.ts","../src/server/routes/search.ts","../src/server/index.ts"],"sourcesContent":["import { Hono } from \"hono\";\nimport type { Config } from \"../../config/schema.js\";\nimport type { Library } from \"../../core/library.js\";\nimport type { InputFormat } from \"../../features/import/detector.js\";\nimport type { PubmedConfig } from \"../../features/import/fetcher.js\";\nimport { type AddReferencesOptions, addReferences } from \"../../features/operations/add.js\";\n\n/**\n * Build PubmedConfig from config, filtering out undefined values\n * to satisfy exactOptionalPropertyTypes requirements.\n */\nfunction buildPubmedConfig(config: Config): PubmedConfig {\n const pubmedConfig: PubmedConfig = {};\n if (config.pubmed.email !== undefined) {\n pubmedConfig.email = config.pubmed.email;\n }\n if (config.pubmed.apiKey !== undefined) {\n pubmedConfig.apiKey = config.pubmed.apiKey;\n }\n return pubmedConfig;\n}\n\n/**\n * Create add route for importing references.\n * @param library - Library instance to use for operations\n * @param config - Configuration with PubMed settings\n * @returns Hono app with add route\n */\nexport function createAddRoute(library: Library, config: Config) {\n const route = new Hono();\n\n // POST / - Add references from inputs\n route.post(\"/\", async (c) => {\n // Parse request body\n let body: unknown;\n try {\n body = await c.req.json();\n } catch {\n return c.json({ error: \"Invalid JSON\" }, 400);\n }\n\n // Validate inputs\n if (!body || typeof body !== \"object\") {\n return c.json({ error: \"Request body must be an object\" }, 400);\n }\n\n const { inputs, options } = body as {\n inputs?: unknown;\n options?: { force?: boolean; format?: string };\n };\n\n if (!inputs || !Array.isArray(inputs) || inputs.length === 0) {\n return c.json({ error: \"inputs must be a non-empty array of strings\" }, 400);\n }\n\n // Ensure all inputs are strings\n if (!inputs.every((input) => typeof input === \"string\")) {\n return c.json({ error: \"All inputs must be strings\" }, 400);\n }\n\n // Build options\n const addOptions: AddReferencesOptions = {\n force: options?.force ?? false,\n pubmedConfig: buildPubmedConfig(config),\n };\n\n if (options?.format) {\n addOptions.format = options.format as InputFormat | \"auto\";\n }\n\n // Call addReferences\n const result = await addReferences(inputs as string[], library, addOptions);\n\n return c.json(result);\n });\n\n return route;\n}\n","import { Hono } from \"hono\";\nimport { z } from \"zod\";\nimport type { Library } from \"../../core/library.js\";\nimport {\n type CiteOperationOptions,\n type CiteResult,\n citeReferences,\n} from \"../../features/operations/cite.js\";\n\n/**\n * Zod schema for cite request body\n */\nconst CiteRequestSchema = z.object({\n identifiers: z.array(z.string()).min(1, \"identifiers must be a non-empty array\"),\n byUuid: z.boolean().optional(),\n inText: z.boolean().optional(),\n style: z.string().optional(),\n cslFile: z.string().optional(),\n locale: z.string().optional(),\n format: z.enum([\"text\", \"html\"]).optional(),\n});\n\ntype CiteRequestBody = z.infer<typeof CiteRequestSchema>;\n\n/**\n * Build cite operation options from validated request body.\n */\nfunction buildCiteOptions(body: CiteRequestBody): CiteOperationOptions {\n return {\n identifiers: body.identifiers,\n ...(body.byUuid !== undefined && { byUuid: body.byUuid }),\n ...(body.inText !== undefined && { inText: body.inText }),\n ...(body.style !== undefined && { style: body.style }),\n ...(body.cslFile !== undefined && { cslFile: body.cslFile }),\n ...(body.locale !== undefined && { locale: body.locale }),\n ...(body.format !== undefined && { format: body.format }),\n };\n}\n\n/**\n * Create cite route for generating citations.\n * @param library - Library instance to use for operations\n * @returns Hono app with cite route\n */\nexport function createCiteRoute(library: Library) {\n const route = new Hono();\n\n // POST / - Generate citations for identifiers\n route.post(\"/\", async (c) => {\n // Parse request body\n let rawBody: unknown;\n try {\n rawBody = await c.req.json();\n } catch {\n return c.json({ error: \"Invalid JSON\" }, 400);\n }\n\n // Validate with zod schema\n const parseResult = CiteRequestSchema.safeParse(rawBody);\n if (!parseResult.success) {\n const errorMessage = parseResult.error.issues[0]?.message ?? \"Invalid request body\";\n return c.json({ error: errorMessage }, 400);\n }\n\n // Call citeReferences operation\n const result: CiteResult = await citeReferences(library, buildCiteOptions(parseResult.data));\n\n return c.json(result);\n });\n\n return route;\n}\n","import { Hono } from \"hono\";\n\n/**\n * Health check route.\n * Returns a simple status to verify the server is running.\n */\nexport const healthRoute = new Hono();\n\nhealthRoute.get(\"/\", (c) => {\n return c.json({ status: \"ok\" });\n});\n","import { Hono } from \"hono\";\nimport { z } from \"zod\";\nimport type { Library } from \"../../core/library.js\";\nimport { type ListOptions, listReferences } from \"../../features/operations/list.js\";\n\n/**\n * Request body schema for list endpoint\n */\nconst listRequestBodySchema = z.object({\n format: z.enum([\"pretty\", \"json\", \"bibtex\", \"ids-only\", \"uuid\"]).optional(),\n});\n\n/**\n * Request body type for list endpoint\n */\nexport type ListRequestBody = z.infer<typeof listRequestBodySchema>;\n\n/**\n * Creates list route for HTTP server\n */\nexport function createListRoute(library: Library) {\n const route = new Hono();\n\n // POST / - List all references\n route.post(\"/\", async (c) => {\n // Parse request body\n let body: unknown;\n try {\n body = await c.req.json();\n } catch {\n return c.json({ error: \"Invalid JSON\" }, 400);\n }\n\n // Validate body with zod\n const parseResult = listRequestBodySchema.safeParse(body);\n if (!parseResult.success) {\n return c.json({ error: \"Request body must be an object\" }, 400);\n }\n\n const requestBody = parseResult.data;\n\n // Build options for listReferences\n const options: ListOptions = {};\n if (requestBody.format !== undefined) {\n options.format = requestBody.format;\n }\n\n // Call listReferences operation\n const result = listReferences(library, options);\n\n return c.json(result);\n });\n\n return route;\n}\n","import { Hono } from \"hono\";\nimport type { Library } from \"../../core/library.js\";\nimport { removeReference } from \"../../features/operations/remove.js\";\nimport { updateReference } from \"../../features/operations/update.js\";\n\n/**\n * Create references CRUD route with the given library.\n * @param library - Library instance to use for operations\n * @returns Hono app with references routes\n */\nexport function createReferencesRoute(library: Library) {\n const route = new Hono();\n\n // GET / - Get all references\n route.get(\"/\", (c) => {\n const references = library.getAll();\n const items = references.map((ref) => ref.getItem());\n return c.json(items);\n });\n\n // GET /uuid/:uuid - Get reference by UUID\n route.get(\"/uuid/:uuid\", (c) => {\n const uuid = c.req.param(\"uuid\");\n const ref = library.findByUuid(uuid);\n\n if (!ref) {\n return c.json({ error: \"Reference not found\" }, 404);\n }\n\n return c.json(ref.getItem());\n });\n\n // GET /id/:id - Get reference by citation ID\n route.get(\"/id/:id\", (c) => {\n const id = c.req.param(\"id\");\n const ref = library.findById(id);\n\n if (!ref) {\n return c.json({ error: \"Reference not found\" }, 404);\n }\n\n return c.json(ref.getItem());\n });\n\n // POST / - Create new reference\n route.post(\"/\", async (c) => {\n try {\n const body = await c.req.json();\n\n // Create and add reference (library.add handles validation)\n library.add(body);\n\n // Find the newly added reference by UUID (it was just added)\n const allRefs = library.getAll();\n const addedRef = allRefs[allRefs.length - 1];\n\n if (!addedRef) {\n return c.json({ error: \"Failed to add reference\" }, 500);\n }\n\n return c.json(addedRef.getItem(), 201);\n } catch (error) {\n return c.json(\n {\n error: \"Invalid request body\",\n details: error instanceof Error ? error.message : String(error),\n },\n 400\n );\n }\n });\n\n // PUT /uuid/:uuid - Update reference by UUID\n route.put(\"/uuid/:uuid\", async (c) => {\n const uuid = c.req.param(\"uuid\");\n\n let body: unknown;\n try {\n body = await c.req.json();\n } catch {\n return c.json({ error: \"Invalid JSON\" }, 400);\n }\n\n if (!body || typeof body !== \"object\") {\n return c.json({ error: \"Request body must be an object\" }, 400);\n }\n\n // Use updateReference operation\n const result = await updateReference(library, {\n identifier: uuid,\n byUuid: true,\n updates: body as Partial<import(\"../../core/csl-json/types.js\").CslItem>,\n onIdCollision: \"suffix\",\n });\n\n // Return operation result with appropriate status code\n if (!result.updated) {\n const status = result.idCollision ? 409 : 404;\n return c.json(result, status);\n }\n\n return c.json(result);\n });\n\n // PUT /id/:id - Update reference by citation ID\n route.put(\"/id/:id\", async (c) => {\n const id = c.req.param(\"id\");\n\n let body: unknown;\n try {\n body = await c.req.json();\n } catch {\n return c.json({ error: \"Invalid JSON\" }, 400);\n }\n\n if (!body || typeof body !== \"object\") {\n return c.json({ error: \"Request body must be an object\" }, 400);\n }\n\n // Use updateReference operation with byUuid: false\n const result = await updateReference(library, {\n identifier: id,\n byUuid: false,\n updates: body as Partial<import(\"../../core/csl-json/types.js\").CslItem>,\n onIdCollision: \"suffix\",\n });\n\n // Return operation result with appropriate status code\n if (!result.updated) {\n const status = result.idCollision ? 409 : 404;\n return c.json(result, status);\n }\n\n return c.json(result);\n });\n\n // DELETE /uuid/:uuid - Delete reference by UUID\n route.delete(\"/uuid/:uuid\", async (c) => {\n const uuid = c.req.param(\"uuid\");\n\n // Use removeReference operation\n const result = await removeReference(library, {\n identifier: uuid,\n byUuid: true,\n });\n\n // Return operation result with appropriate status code\n if (!result.removed) {\n return c.json(result, 404);\n }\n\n return c.json(result);\n });\n\n // DELETE /id/:id - Delete reference by citation ID\n route.delete(\"/id/:id\", async (c) => {\n const id = c.req.param(\"id\");\n\n // Use removeReference operation with byUuid: false\n const result = await removeReference(library, {\n identifier: id,\n byUuid: false,\n });\n\n // Return operation result with appropriate status code\n if (!result.removed) {\n return c.json(result, 404);\n }\n\n return c.json(result);\n });\n\n return route;\n}\n","import { Hono } from \"hono\";\nimport { z } from \"zod\";\nimport type { Library } from \"../../core/library.js\";\nimport { type SearchOperationOptions, searchReferences } from \"../../features/operations/search.js\";\n\n/**\n * Request body schema for search endpoint\n */\nconst searchRequestBodySchema = z.object({\n query: z.string(),\n format: z.enum([\"pretty\", \"json\", \"bibtex\", \"ids-only\", \"uuid\"]).optional(),\n});\n\n/**\n * Request body type for search endpoint\n */\nexport type SearchRequestBody = z.infer<typeof searchRequestBodySchema>;\n\n/**\n * Creates search route for HTTP server\n */\nexport function createSearchRoute(library: Library) {\n const route = new Hono();\n\n // POST / - Search references\n route.post(\"/\", async (c) => {\n // Parse request body\n let body: unknown;\n try {\n body = await c.req.json();\n } catch {\n return c.json({ error: \"Invalid JSON\" }, 400);\n }\n\n // Validate body with zod\n const parseResult = searchRequestBodySchema.safeParse(body);\n if (!parseResult.success) {\n return c.json({ error: \"Invalid request body\" }, 400);\n }\n\n const requestBody = parseResult.data;\n\n // Build options for searchReferences\n const options: SearchOperationOptions = {\n query: requestBody.query,\n };\n if (requestBody.format !== undefined) {\n options.format = requestBody.format;\n }\n\n // Call searchReferences operation\n const result = searchReferences(library, options);\n\n return c.json(result);\n });\n\n return route;\n}\n","import { Hono } from \"hono\";\nimport type { Config } from \"../config/schema.js\";\nimport type { Library } from \"../core/library.js\";\nimport { createAddRoute } from \"./routes/add.js\";\nimport { createCiteRoute } from \"./routes/cite.js\";\nimport { healthRoute } from \"./routes/health.js\";\nimport { createListRoute } from \"./routes/list.js\";\nimport { createReferencesRoute } from \"./routes/references.js\";\nimport { createSearchRoute } from \"./routes/search.js\";\n\n/**\n * Create the main Hono server application.\n * @param library - Library instance for the references API\n * @param config - Configuration for the server\n * @returns Hono application\n */\nexport function createServer(library: Library, config: Config) {\n const app = new Hono();\n\n // Health check route\n app.route(\"/health\", healthRoute);\n\n // References API routes\n const referencesRoute = createReferencesRoute(library);\n app.route(\"/api/references\", referencesRoute);\n\n // Add references route\n const addRoute = createAddRoute(library, config);\n app.route(\"/api/add\", addRoute);\n\n // Cite route\n const citeRoute = createCiteRoute(library);\n app.route(\"/api/cite\", citeRoute);\n\n // List route\n const listRoute = createListRoute(library);\n app.route(\"/api/list\", listRoute);\n\n // Search route\n const searchRoute = createSearchRoute(library);\n app.route(\"/api/search\", searchRoute);\n\n return app;\n}\n"],"names":[],"mappings":";;;AAWA,SAAS,kBAAkB,QAA8B;AACvD,QAAM,eAA6B,CAAA;AACnC,MAAI,OAAO,OAAO,UAAU,QAAW;AACrC,iBAAa,QAAQ,OAAO,OAAO;AAAA,EACrC;AACA,MAAI,OAAO,OAAO,WAAW,QAAW;AACtC,iBAAa,SAAS,OAAO,OAAO;AAAA,EACtC;AACA,SAAO;AACT;AAQO,SAAS,eAAe,SAAkB,QAAgB;AAC/D,QAAM,QAAQ,IAAI,KAAA;AAGlB,QAAM,KAAK,KAAK,OAAO,MAAM;AAE3B,QAAI;AACJ,QAAI;AACF,aAAO,MAAM,EAAE,IAAI,KAAA;AAAA,IACrB,QAAQ;AACN,aAAO,EAAE,KAAK,EAAE,OAAO,eAAA,GAAkB,GAAG;AAAA,IAC9C;AAGA,QAAI,CAAC,QAAQ,OAAO,SAAS,UAAU;AACrC,aAAO,EAAE,KAAK,EAAE,OAAO,iCAAA,GAAoC,GAAG;AAAA,IAChE;AAEA,UAAM,EAAE,QAAQ,QAAA,IAAY;AAK5B,QAAI,CAAC,UAAU,CAAC,MAAM,QAAQ,MAAM,KAAK,OAAO,WAAW,GAAG;AAC5D,aAAO,EAAE,KAAK,EAAE,OAAO,8CAAA,GAAiD,GAAG;AAAA,IAC7E;AAGA,QAAI,CAAC,OAAO,MAAM,CAAC,UAAU,OAAO,UAAU,QAAQ,GAAG;AACvD,aAAO,EAAE,KAAK,EAAE,OAAO,6BAAA,GAAgC,GAAG;AAAA,IAC5D;AAGA,UAAM,aAAmC;AAAA,MACvC,OAAO,SAAS,SAAS;AAAA,MACzB,cAAc,kBAAkB,MAAM;AAAA,IAAA;AAGxC,QAAI,SAAS,QAAQ;AACnB,iBAAW,SAAS,QAAQ;AAAA,IAC9B;AAGA,UAAM,SAAS,MAAM,cAAc,QAAoB,SAAS,UAAU;AAE1E,WAAO,EAAE,KAAK,MAAM;AAAA,EACtB,CAAC;AAED,SAAO;AACT;ACjEA,MAAM,oBAAoB,EAAE,OAAO;AAAA,EACjC,aAAa,EAAE,MAAM,EAAE,QAAQ,EAAE,IAAI,GAAG,uCAAuC;AAAA,EAC/E,QAAQ,EAAE,QAAA,EAAU,SAAA;AAAA,EACpB,QAAQ,EAAE,QAAA,EAAU,SAAA;AAAA,EACpB,OAAO,EAAE,OAAA,EAAS,SAAA;AAAA,EAClB,SAAS,EAAE,OAAA,EAAS,SAAA;AAAA,EACpB,QAAQ,EAAE,OAAA,EAAS,SAAA;AAAA,EACnB,QAAQ,EAAE,KAAK,CAAC,QAAQ,MAAM,CAAC,EAAE,SAAA;AACnC,CAAC;AAOD,SAAS,iBAAiB,MAA6C;AACrE,SAAO;AAAA,IACL,aAAa,KAAK;AAAA,IAClB,GAAI,KAAK,WAAW,UAAa,EAAE,QAAQ,KAAK,OAAA;AAAA,IAChD,GAAI,KAAK,WAAW,UAAa,EAAE,QAAQ,KAAK,OAAA;AAAA,IAChD,GAAI,KAAK,UAAU,UAAa,EAAE,OAAO,KAAK,MAAA;AAAA,IAC9C,GAAI,KAAK,YAAY,UAAa,EAAE,SAAS,KAAK,QAAA;AAAA,IAClD,GAAI,KAAK,WAAW,UAAa,EAAE,QAAQ,KAAK,OAAA;AAAA,IAChD,GAAI,KAAK,WAAW,UAAa,EAAE,QAAQ,KAAK,OAAA;AAAA,EAAO;AAE3D;AAOO,SAAS,gBAAgB,SAAkB;AAChD,QAAM,QAAQ,IAAI,KAAA;AAGlB,QAAM,KAAK,KAAK,OAAO,MAAM;AAE3B,QAAI;AACJ,QAAI;AACF,gBAAU,MAAM,EAAE,IAAI,KAAA;AAAA,IACxB,QAAQ;AACN,aAAO,EAAE,KAAK,EAAE,OAAO,eAAA,GAAkB,GAAG;AAAA,IAC9C;AAGA,UAAM,cAAc,kBAAkB,UAAU,OAAO;AACvD,QAAI,CAAC,YAAY,SAAS;AACxB,YAAM,eAAe,YAAY,MAAM,OAAO,CAAC,GAAG,WAAW;AAC7D,aAAO,EAAE,KAAK,EAAE,OAAO,aAAA,GAAgB,GAAG;AAAA,IAC5C;AAGA,UAAM,SAAqB,MAAM,eAAe,SAAS,iBAAiB,YAAY,IAAI,CAAC;AAE3F,WAAO,EAAE,KAAK,MAAM;AAAA,EACtB,CAAC;AAED,SAAO;AACT;ACjEO,MAAM,cAAc,IAAI,KAAA;AAE/B,YAAY,IAAI,KAAK,CAAC,MAAM;AAC1B,SAAO,EAAE,KAAK,EAAE,QAAQ,MAAM;AAChC,CAAC;ACFD,MAAM,wBAAwB,EAAE,OAAO;AAAA,EACrC,QAAQ,EAAE,KAAK,CAAC,UAAU,QAAQ,UAAU,YAAY,MAAM,CAAC,EAAE,SAAA;AACnE,CAAC;AAUM,SAAS,gBAAgB,SAAkB;AAChD,QAAM,QAAQ,IAAI,KAAA;AAGlB,QAAM,KAAK,KAAK,OAAO,MAAM;AAE3B,QAAI;AACJ,QAAI;AACF,aAAO,MAAM,EAAE,IAAI,KAAA;AAAA,IACrB,QAAQ;AACN,aAAO,EAAE,KAAK,EAAE,OAAO,eAAA,GAAkB,GAAG;AAAA,IAC9C;AAGA,UAAM,cAAc,sBAAsB,UAAU,IAAI;AACxD,QAAI,CAAC,YAAY,SAAS;AACxB,aAAO,EAAE,KAAK,EAAE,OAAO,iCAAA,GAAoC,GAAG;AAAA,IAChE;AAEA,UAAM,cAAc,YAAY;AAGhC,UAAM,UAAuB,CAAA;AAC7B,QAAI,YAAY,WAAW,QAAW;AACpC,cAAQ,SAAS,YAAY;AAAA,IAC/B;AAGA,UAAM,SAAS,eAAe,SAAS,OAAO;AAE9C,WAAO,EAAE,KAAK,MAAM;AAAA,EACtB,CAAC;AAED,SAAO;AACT;AC5CO,SAAS,sBAAsB,SAAkB;AACtD,QAAM,QAAQ,IAAI,KAAA;AAGlB,QAAM,IAAI,KAAK,CAAC,MAAM;AACpB,UAAM,aAAa,QAAQ,OAAA;AAC3B,UAAM,QAAQ,WAAW,IAAI,CAAC,QAAQ,IAAI,SAAS;AACnD,WAAO,EAAE,KAAK,KAAK;AAAA,EACrB,CAAC;AAGD,QAAM,IAAI,eAAe,CAAC,MAAM;AAC9B,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAC/B,UAAM,MAAM,QAAQ,WAAW,IAAI;AAEnC,QAAI,CAAC,KAAK;AACR,aAAO,EAAE,KAAK,EAAE,OAAO,sBAAA,GAAyB,GAAG;AAAA,IACrD;AAEA,WAAO,EAAE,KAAK,IAAI,QAAA,CAAS;AAAA,EAC7B,CAAC;AAGD,QAAM,IAAI,WAAW,CAAC,MAAM;AAC1B,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,MAAM,QAAQ,SAAS,EAAE;AAE/B,QAAI,CAAC,KAAK;AACR,aAAO,EAAE,KAAK,EAAE,OAAO,sBAAA,GAAyB,GAAG;AAAA,IACrD;AAEA,WAAO,EAAE,KAAK,IAAI,QAAA,CAAS;AAAA,EAC7B,CAAC;AAGD,QAAM,KAAK,KAAK,OAAO,MAAM;AAC3B,QAAI;AACF,YAAM,OAAO,MAAM,EAAE,IAAI,KAAA;AAGzB,cAAQ,IAAI,IAAI;AAGhB,YAAM,UAAU,QAAQ,OAAA;AACxB,YAAM,WAAW,QAAQ,QAAQ,SAAS,CAAC;AAE3C,UAAI,CAAC,UAAU;AACb,eAAO,EAAE,KAAK,EAAE,OAAO,0BAAA,GAA6B,GAAG;AAAA,MACzD;AAEA,aAAO,EAAE,KAAK,SAAS,QAAA,GAAW,GAAG;AAAA,IACvC,SAAS,OAAO;AACd,aAAO,EAAE;AAAA,QACP;AAAA,UACE,OAAO;AAAA,UACP,SAAS,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,QAAA;AAAA,QAEhE;AAAA,MAAA;AAAA,IAEJ;AAAA,EACF,CAAC;AAGD,QAAM,IAAI,eAAe,OAAO,MAAM;AACpC,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAE/B,QAAI;AACJ,QAAI;AACF,aAAO,MAAM,EAAE,IAAI,KAAA;AAAA,IACrB,QAAQ;AACN,aAAO,EAAE,KAAK,EAAE,OAAO,eAAA,GAAkB,GAAG;AAAA,IAC9C;AAEA,QAAI,CAAC,QAAQ,OAAO,SAAS,UAAU;AACrC,aAAO,EAAE,KAAK,EAAE,OAAO,iCAAA,GAAoC,GAAG;AAAA,IAChE;AAGA,UAAM,SAAS,MAAM,gBAAgB,SAAS;AAAA,MAC5C,YAAY;AAAA,MACZ,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,eAAe;AAAA,IAAA,CAChB;AAGD,QAAI,CAAC,OAAO,SAAS;AACnB,YAAM,SAAS,OAAO,cAAc,MAAM;AAC1C,aAAO,EAAE,KAAK,QAAQ,MAAM;AAAA,IAC9B;AAEA,WAAO,EAAE,KAAK,MAAM;AAAA,EACtB,CAAC;AAGD,QAAM,IAAI,WAAW,OAAO,MAAM;AAChC,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAE3B,QAAI;AACJ,QAAI;AACF,aAAO,MAAM,EAAE,IAAI,KAAA;AAAA,IACrB,QAAQ;AACN,aAAO,EAAE,KAAK,EAAE,OAAO,eAAA,GAAkB,GAAG;AAAA,IAC9C;AAEA,QAAI,CAAC,QAAQ,OAAO,SAAS,UAAU;AACrC,aAAO,EAAE,KAAK,EAAE,OAAO,iCAAA,GAAoC,GAAG;AAAA,IAChE;AAGA,UAAM,SAAS,MAAM,gBAAgB,SAAS;AAAA,MAC5C,YAAY;AAAA,MACZ,QAAQ;AAAA,MACR,SAAS;AAAA,MACT,eAAe;AAAA,IAAA,CAChB;AAGD,QAAI,CAAC,OAAO,SAAS;AACnB,YAAM,SAAS,OAAO,cAAc,MAAM;AAC1C,aAAO,EAAE,KAAK,QAAQ,MAAM;AAAA,IAC9B;AAEA,WAAO,EAAE,KAAK,MAAM;AAAA,EACtB,CAAC;AAGD,QAAM,OAAO,eAAe,OAAO,MAAM;AACvC,UAAM,OAAO,EAAE,IAAI,MAAM,MAAM;AAG/B,UAAM,SAAS,MAAM,gBAAgB,SAAS;AAAA,MAC5C,YAAY;AAAA,MACZ,QAAQ;AAAA,IAAA,CACT;AAGD,QAAI,CAAC,OAAO,SAAS;AACnB,aAAO,EAAE,KAAK,QAAQ,GAAG;AAAA,IAC3B;AAEA,WAAO,EAAE,KAAK,MAAM;AAAA,EACtB,CAAC;AAGD,QAAM,OAAO,WAAW,OAAO,MAAM;AACnC,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAG3B,UAAM,SAAS,MAAM,gBAAgB,SAAS;AAAA,MAC5C,YAAY;AAAA,MACZ,QAAQ;AAAA,IAAA,CACT;AAGD,QAAI,CAAC,OAAO,SAAS;AACnB,aAAO,EAAE,KAAK,QAAQ,GAAG;AAAA,IAC3B;AAEA,WAAO,EAAE,KAAK,MAAM;AAAA,EACtB,CAAC;AAED,SAAO;AACT;ACrKA,MAAM,0BAA0B,EAAE,OAAO;AAAA,EACvC,OAAO,EAAE,OAAA;AAAA,EACT,QAAQ,EAAE,KAAK,CAAC,UAAU,QAAQ,UAAU,YAAY,MAAM,CAAC,EAAE,SAAA;AACnE,CAAC;AAUM,SAAS,kBAAkB,SAAkB;AAClD,QAAM,QAAQ,IAAI,KAAA;AAGlB,QAAM,KAAK,KAAK,OAAO,MAAM;AAE3B,QAAI;AACJ,QAAI;AACF,aAAO,MAAM,EAAE,IAAI,KAAA;AAAA,IACrB,QAAQ;AACN,aAAO,EAAE,KAAK,EAAE,OAAO,eAAA,GAAkB,GAAG;AAAA,IAC9C;AAGA,UAAM,cAAc,wBAAwB,UAAU,IAAI;AAC1D,QAAI,CAAC,YAAY,SAAS;AACxB,aAAO,EAAE,KAAK,EAAE,OAAO,uBAAA,GAA0B,GAAG;AAAA,IACtD;AAEA,UAAM,cAAc,YAAY;AAGhC,UAAM,UAAkC;AAAA,MACtC,OAAO,YAAY;AAAA,IAAA;AAErB,QAAI,YAAY,WAAW,QAAW;AACpC,cAAQ,SAAS,YAAY;AAAA,IAC/B;AAGA,UAAM,SAAS,iBAAiB,SAAS,OAAO;AAEhD,WAAO,EAAE,KAAK,MAAM;AAAA,EACtB,CAAC;AAED,SAAO;AACT;ACzCO,SAAS,aAAa,SAAkB,QAAgB;AAC7D,QAAM,MAAM,IAAI,KAAA;AAGhB,MAAI,MAAM,WAAW,WAAW;AAGhC,QAAM,kBAAkB,sBAAsB,OAAO;AACrD,MAAI,MAAM,mBAAmB,eAAe;AAG5C,QAAM,WAAW,eAAe,SAAS,MAAM;AAC/C,MAAI,MAAM,YAAY,QAAQ;AAG9B,QAAM,YAAY,gBAAgB,OAAO;AACzC,MAAI,MAAM,aAAa,SAAS;AAGhC,QAAM,YAAY,gBAAgB,OAAO;AACzC,MAAI,MAAM,aAAa,SAAS;AAGhC,QAAM,cAAc,kBAAkB,OAAO;AAC7C,MAAI,MAAM,eAAe,WAAW;AAEpC,SAAO;AACT;"}
1
+ {"version":3,"file":"server.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ncukondo/reference-manager",
3
- "version": "0.3.0",
3
+ "version": "0.5.0",
4
4
  "description": "A local reference management tool using CSL-JSON as the single source of truth",
5
5
  "publishConfig": {
6
6
  "access": "public"
@@ -49,6 +49,7 @@
49
49
  "@citation-js/plugin-doi": "^0.7.21",
50
50
  "@citation-js/plugin-ris": "^0.7.21",
51
51
  "@iarna/toml": "^2.2.5",
52
+ "@modelcontextprotocol/sdk": "^1.25.1",
52
53
  "chokidar": "^5.0.0",
53
54
  "commander": "^12.1.0",
54
55
  "hono": "^4.11.1",
@@ -1,619 +0,0 @@
1
- import { z } from "zod";
2
- const CslNameSchema = z.object({
3
- family: z.string().optional(),
4
- given: z.string().optional(),
5
- literal: z.string().optional(),
6
- "dropping-particle": z.string().optional(),
7
- "non-dropping-particle": z.string().optional(),
8
- suffix: z.string().optional()
9
- });
10
- const CslDateSchema = z.object({
11
- "date-parts": z.array(z.array(z.number())).optional(),
12
- raw: z.string().optional(),
13
- season: z.string().optional(),
14
- circa: z.boolean().optional(),
15
- literal: z.string().optional()
16
- });
17
- const CslFulltextSchema = z.object({
18
- pdf: z.string().optional(),
19
- markdown: z.string().optional()
20
- });
21
- const CslCustomSchema = z.object({
22
- uuid: z.string(),
23
- created_at: z.string(),
24
- timestamp: z.string(),
25
- additional_urls: z.array(z.string()).optional(),
26
- fulltext: CslFulltextSchema.optional()
27
- }).passthrough();
28
- const CslItemSchema = z.object({
29
- id: z.string(),
30
- type: z.string(),
31
- title: z.string().optional(),
32
- author: z.array(CslNameSchema).optional(),
33
- editor: z.array(CslNameSchema).optional(),
34
- issued: CslDateSchema.optional(),
35
- accessed: CslDateSchema.optional(),
36
- "container-title": z.string().optional(),
37
- volume: z.string().optional(),
38
- issue: z.string().optional(),
39
- page: z.string().optional(),
40
- DOI: z.string().optional(),
41
- PMID: z.string().optional(),
42
- PMCID: z.string().optional(),
43
- ISBN: z.string().optional(),
44
- ISSN: z.string().optional(),
45
- URL: z.string().optional(),
46
- abstract: z.string().optional(),
47
- publisher: z.string().optional(),
48
- "publisher-place": z.string().optional(),
49
- note: z.string().optional(),
50
- keyword: z.array(z.string()).optional(),
51
- custom: CslCustomSchema.optional()
52
- // Allow additional fields
53
- }).passthrough();
54
- const CslLibrarySchema = z.array(CslItemSchema);
55
- const VALID_FIELDS = /* @__PURE__ */ new Set([
56
- "author",
57
- "title",
58
- "year",
59
- "doi",
60
- "pmid",
61
- "pmcid",
62
- "url",
63
- "keyword"
64
- ]);
65
- function isWhitespace(query, index) {
66
- return /\s/.test(query.charAt(index));
67
- }
68
- function isQuote(query, index) {
69
- return query.charAt(index) === '"';
70
- }
71
- function tokenize(query) {
72
- const tokens = [];
73
- let i = 0;
74
- while (i < query.length) {
75
- if (isWhitespace(query, i)) {
76
- i++;
77
- continue;
78
- }
79
- const result = parseNextToken(query, i);
80
- if (result.token) {
81
- tokens.push(result.token);
82
- }
83
- i = result.nextIndex;
84
- }
85
- return {
86
- original: query,
87
- tokens
88
- };
89
- }
90
- function hasWhitespaceBetween(query, start, end) {
91
- for (let j = start; j < end; j++) {
92
- if (isWhitespace(query, j)) {
93
- return true;
94
- }
95
- }
96
- return false;
97
- }
98
- function tryParseFieldValue(query, startIndex) {
99
- const colonIndex = query.indexOf(":", startIndex);
100
- if (colonIndex === -1) {
101
- return null;
102
- }
103
- if (hasWhitespaceBetween(query, startIndex, colonIndex)) {
104
- return null;
105
- }
106
- const fieldName = query.substring(startIndex, colonIndex);
107
- if (!VALID_FIELDS.has(fieldName)) {
108
- return null;
109
- }
110
- const afterColon = colonIndex + 1;
111
- if (afterColon >= query.length || isWhitespace(query, afterColon)) {
112
- return { token: null, nextIndex: afterColon };
113
- }
114
- if (isQuote(query, afterColon)) {
115
- const quoteResult = parseQuotedValue(query, afterColon);
116
- if (quoteResult.value !== null) {
117
- return {
118
- token: {
119
- raw: query.substring(startIndex, quoteResult.nextIndex),
120
- value: quoteResult.value,
121
- field: fieldName,
122
- isPhrase: true
123
- },
124
- nextIndex: quoteResult.nextIndex
125
- };
126
- }
127
- return null;
128
- }
129
- const valueResult = parseUnquotedValue(query, afterColon);
130
- return {
131
- token: {
132
- raw: query.substring(startIndex, valueResult.nextIndex),
133
- value: valueResult.value,
134
- field: fieldName,
135
- isPhrase: false
136
- },
137
- nextIndex: valueResult.nextIndex
138
- };
139
- }
140
- function parseQuotedToken(query, startIndex) {
141
- const quoteResult = parseQuotedValue(query, startIndex);
142
- if (quoteResult.value !== null) {
143
- return {
144
- token: {
145
- raw: query.substring(startIndex, quoteResult.nextIndex),
146
- value: quoteResult.value,
147
- isPhrase: true
148
- },
149
- nextIndex: quoteResult.nextIndex
150
- };
151
- }
152
- if (quoteResult.nextIndex > startIndex) {
153
- return { token: null, nextIndex: quoteResult.nextIndex };
154
- }
155
- const valueResult = parseUnquotedValue(query, startIndex, true);
156
- return {
157
- token: {
158
- raw: valueResult.value,
159
- value: valueResult.value,
160
- isPhrase: false
161
- },
162
- nextIndex: valueResult.nextIndex
163
- };
164
- }
165
- function parseRegularToken(query, startIndex) {
166
- const valueResult = parseUnquotedValue(query, startIndex);
167
- return {
168
- token: {
169
- raw: valueResult.value,
170
- value: valueResult.value,
171
- isPhrase: false
172
- },
173
- nextIndex: valueResult.nextIndex
174
- };
175
- }
176
- function parseNextToken(query, startIndex) {
177
- const fieldResult = tryParseFieldValue(query, startIndex);
178
- if (fieldResult !== null) {
179
- return fieldResult;
180
- }
181
- if (isQuote(query, startIndex)) {
182
- return parseQuotedToken(query, startIndex);
183
- }
184
- return parseRegularToken(query, startIndex);
185
- }
186
- function parseQuotedValue(query, startIndex) {
187
- if (!isQuote(query, startIndex)) {
188
- return { value: null, nextIndex: startIndex };
189
- }
190
- let i = startIndex + 1;
191
- const valueStart = i;
192
- while (i < query.length && !isQuote(query, i)) {
193
- i++;
194
- }
195
- if (i >= query.length) {
196
- return { value: null, nextIndex: startIndex };
197
- }
198
- const value = query.substring(valueStart, i);
199
- i++;
200
- if (value.trim() === "") {
201
- return { value: null, nextIndex: i };
202
- }
203
- return { value, nextIndex: i };
204
- }
205
- function parseUnquotedValue(query, startIndex, includeQuotes = false) {
206
- let i = startIndex;
207
- while (i < query.length && !isWhitespace(query, i)) {
208
- if (!includeQuotes && isQuote(query, i)) {
209
- break;
210
- }
211
- i++;
212
- }
213
- return {
214
- value: query.substring(startIndex, i),
215
- nextIndex: i
216
- };
217
- }
218
- function normalize(text) {
219
- let normalized = text.normalize("NFKC");
220
- normalized = normalized.toLowerCase();
221
- normalized = normalized.normalize("NFD").replace(new RegExp("\\p{M}", "gu"), "");
222
- normalized = normalized.replace(/[^\p{L}\p{N}/\s]/gu, " ");
223
- normalized = normalized.replace(/\s+/g, " ").trim();
224
- return normalized;
225
- }
226
- const ID_FIELDS = /* @__PURE__ */ new Set(["DOI", "PMID", "PMCID", "URL"]);
227
- function extractYear$2(reference) {
228
- if (reference.issued?.["date-parts"]?.[0]?.[0]) {
229
- return String(reference.issued["date-parts"][0][0]);
230
- }
231
- return "0000";
232
- }
233
- function extractAuthors(reference) {
234
- if (!reference.author || reference.author.length === 0) {
235
- return "";
236
- }
237
- return reference.author.map((author) => {
238
- const family = author.family || "";
239
- const givenInitial = author.given ? author.given[0] : "";
240
- return givenInitial ? `${family} ${givenInitial}` : family;
241
- }).join(" ");
242
- }
243
- function getFieldValue(reference, field) {
244
- if (field === "year") {
245
- return extractYear$2(reference);
246
- }
247
- if (field === "author") {
248
- return extractAuthors(reference);
249
- }
250
- const value = reference[field];
251
- if (typeof value === "string") {
252
- return value;
253
- }
254
- if (field.startsWith("custom.")) {
255
- const customField = field.substring(7);
256
- const customValue = reference.custom?.[customField];
257
- if (typeof customValue === "string") {
258
- return customValue;
259
- }
260
- }
261
- return null;
262
- }
263
- function matchUrl(queryValue, reference) {
264
- if (reference.URL === queryValue) {
265
- return {
266
- field: "URL",
267
- strength: "exact",
268
- value: reference.URL
269
- };
270
- }
271
- const additionalUrls = reference.custom?.additional_urls;
272
- if (Array.isArray(additionalUrls)) {
273
- for (const url of additionalUrls) {
274
- if (typeof url === "string" && url === queryValue) {
275
- return {
276
- field: "custom.additional_urls",
277
- strength: "exact",
278
- value: url
279
- };
280
- }
281
- }
282
- }
283
- return null;
284
- }
285
- function matchKeyword(queryValue, reference) {
286
- if (!reference.keyword || !Array.isArray(reference.keyword)) {
287
- return null;
288
- }
289
- const normalizedQuery = normalize(queryValue);
290
- for (const keyword of reference.keyword) {
291
- if (typeof keyword === "string") {
292
- const normalizedKeyword = normalize(keyword);
293
- if (normalizedKeyword.includes(normalizedQuery)) {
294
- return {
295
- field: "keyword",
296
- strength: "partial",
297
- value: keyword
298
- };
299
- }
300
- }
301
- }
302
- return null;
303
- }
304
- const FIELD_MAP = {
305
- author: "author",
306
- title: "title",
307
- doi: "DOI",
308
- pmid: "PMID",
309
- pmcid: "PMCID"
310
- };
311
- function matchYearField(tokenValue, reference) {
312
- const year = extractYear$2(reference);
313
- if (year === tokenValue) {
314
- return {
315
- field: "year",
316
- strength: "exact",
317
- value: year
318
- };
319
- }
320
- return null;
321
- }
322
- function matchFieldValue(field, tokenValue, reference) {
323
- const fieldValue = getFieldValue(reference, field);
324
- if (fieldValue === null) {
325
- return null;
326
- }
327
- if (ID_FIELDS.has(field)) {
328
- if (fieldValue === tokenValue) {
329
- return {
330
- field,
331
- strength: "exact",
332
- value: fieldValue
333
- };
334
- }
335
- return null;
336
- }
337
- const normalizedFieldValue = normalize(fieldValue);
338
- const normalizedQuery = normalize(tokenValue);
339
- if (normalizedFieldValue.includes(normalizedQuery)) {
340
- return {
341
- field,
342
- strength: "partial",
343
- value: fieldValue
344
- };
345
- }
346
- return null;
347
- }
348
- function matchSpecificField(token, reference) {
349
- const matches = [];
350
- const fieldToSearch = token.field;
351
- if (fieldToSearch === "url") {
352
- const urlMatch = matchUrl(token.value, reference);
353
- if (urlMatch) matches.push(urlMatch);
354
- return matches;
355
- }
356
- if (fieldToSearch === "year") {
357
- const yearMatch = matchYearField(token.value, reference);
358
- if (yearMatch) matches.push(yearMatch);
359
- return matches;
360
- }
361
- if (fieldToSearch === "keyword") {
362
- const keywordMatch = matchKeyword(token.value, reference);
363
- if (keywordMatch) matches.push(keywordMatch);
364
- return matches;
365
- }
366
- const actualField = FIELD_MAP[fieldToSearch] || fieldToSearch;
367
- const match = matchFieldValue(actualField, token.value, reference);
368
- if (match) matches.push(match);
369
- return matches;
370
- }
371
- const STANDARD_SEARCH_FIELDS = [
372
- "title",
373
- "author",
374
- "container-title",
375
- "publisher",
376
- "DOI",
377
- "PMID",
378
- "PMCID",
379
- "abstract"
380
- ];
381
- function matchSingleField(field, tokenValue, reference) {
382
- if (field === "year") {
383
- return matchYearField(tokenValue, reference);
384
- }
385
- if (field === "URL") {
386
- return matchUrl(tokenValue, reference);
387
- }
388
- if (field === "keyword") {
389
- return matchKeyword(tokenValue, reference);
390
- }
391
- return matchFieldValue(field, tokenValue, reference);
392
- }
393
- function matchAllFields(token, reference) {
394
- const matches = [];
395
- const specialFields = ["year", "URL", "keyword"];
396
- for (const field of specialFields) {
397
- const match = matchSingleField(field, token.value, reference);
398
- if (match) matches.push(match);
399
- }
400
- for (const field of STANDARD_SEARCH_FIELDS) {
401
- const match = matchFieldValue(field, token.value, reference);
402
- if (match) matches.push(match);
403
- }
404
- return matches;
405
- }
406
- function matchToken(token, reference) {
407
- if (token.field) {
408
- return matchSpecificField(token, reference);
409
- }
410
- return matchAllFields(token, reference);
411
- }
412
- function matchReference(reference, tokens) {
413
- if (tokens.length === 0) {
414
- return null;
415
- }
416
- const tokenMatches = [];
417
- let overallStrength = "none";
418
- for (const token of tokens) {
419
- const matches = matchToken(token, reference);
420
- if (matches.length === 0) {
421
- return null;
422
- }
423
- const tokenStrength = matches.some((m) => m.strength === "exact") ? "exact" : "partial";
424
- if (tokenStrength === "exact") {
425
- overallStrength = "exact";
426
- } else if (tokenStrength === "partial" && overallStrength === "none") {
427
- overallStrength = "partial";
428
- }
429
- tokenMatches.push({
430
- token,
431
- matches
432
- });
433
- }
434
- const score = overallStrength === "exact" ? 100 + tokenMatches.length : 50 + tokenMatches.length;
435
- return {
436
- reference,
437
- tokenMatches,
438
- overallStrength,
439
- score
440
- };
441
- }
442
- function search(references, tokens) {
443
- const results = [];
444
- for (const reference of references) {
445
- const match = matchReference(reference, tokens);
446
- if (match) {
447
- results.push(match);
448
- }
449
- }
450
- return results;
451
- }
452
- function extractYear$1(reference) {
453
- if (reference.issued?.["date-parts"]?.[0]?.[0]) {
454
- return String(reference.issued["date-parts"][0][0]);
455
- }
456
- return "0000";
457
- }
458
- function extractFirstAuthorFamily(reference) {
459
- if (!reference.author || reference.author.length === 0) {
460
- return "";
461
- }
462
- return reference.author[0]?.family || "";
463
- }
464
- function extractTitle(reference) {
465
- return reference.title || "";
466
- }
467
- function compareStrength(a, b) {
468
- const strengthOrder = { exact: 2, partial: 1, none: 0 };
469
- return strengthOrder[b] - strengthOrder[a];
470
- }
471
- function compareYear(a, b) {
472
- const yearA = extractYear$1(a);
473
- const yearB = extractYear$1(b);
474
- return Number(yearB) - Number(yearA);
475
- }
476
- function compareAuthor(a, b) {
477
- const authorA = extractFirstAuthorFamily(a).toLowerCase();
478
- const authorB = extractFirstAuthorFamily(b).toLowerCase();
479
- if (authorA === "" && authorB !== "") return 1;
480
- if (authorA !== "" && authorB === "") return -1;
481
- return authorA.localeCompare(authorB);
482
- }
483
- function compareTitle(a, b) {
484
- const titleA = extractTitle(a).toLowerCase();
485
- const titleB = extractTitle(b).toLowerCase();
486
- if (titleA === "" && titleB !== "") return 1;
487
- if (titleA !== "" && titleB === "") return -1;
488
- return titleA.localeCompare(titleB);
489
- }
490
- function sortResults(results) {
491
- const indexed = results.map((result, index) => ({ result, index }));
492
- const sorted = indexed.sort((a, b) => {
493
- const strengthDiff = compareStrength(a.result.overallStrength, b.result.overallStrength);
494
- if (strengthDiff !== 0) return strengthDiff;
495
- const yearDiff = compareYear(a.result.reference, b.result.reference);
496
- if (yearDiff !== 0) return yearDiff;
497
- const authorDiff = compareAuthor(a.result.reference, b.result.reference);
498
- if (authorDiff !== 0) return authorDiff;
499
- const titleDiff = compareTitle(a.result.reference, b.result.reference);
500
- if (titleDiff !== 0) return titleDiff;
501
- return a.index - b.index;
502
- });
503
- return sorted.map((item) => item.result);
504
- }
505
- function normalizeDoi(doi) {
506
- const normalized = doi.replace(/^https?:\/\/doi\.org\//i, "").replace(/^https?:\/\/dx\.doi\.org\//i, "").replace(/^doi:/i, "");
507
- return normalized;
508
- }
509
- function extractYear(item) {
510
- const dateParts = item.issued?.["date-parts"]?.[0];
511
- if (!dateParts || dateParts.length === 0) {
512
- return null;
513
- }
514
- return String(dateParts[0]);
515
- }
516
- function normalizeAuthors(item) {
517
- if (!item.author || item.author.length === 0) {
518
- return null;
519
- }
520
- const authorStrings = item.author.map((author) => {
521
- const family = author.family || "";
522
- const givenInitial = author.given ? author.given.charAt(0) : "";
523
- return `${family} ${givenInitial}`.trim();
524
- });
525
- return normalize(authorStrings.join(" "));
526
- }
527
- function checkDoiMatch(item, existing) {
528
- if (!item.DOI || !existing.DOI) {
529
- return null;
530
- }
531
- const normalizedItemDoi = normalizeDoi(item.DOI);
532
- const normalizedExistingDoi = normalizeDoi(existing.DOI);
533
- if (normalizedItemDoi === normalizedExistingDoi) {
534
- return {
535
- type: "doi",
536
- existing,
537
- details: {
538
- doi: normalizedExistingDoi
539
- }
540
- };
541
- }
542
- return null;
543
- }
544
- function checkPmidMatch(item, existing) {
545
- if (!item.PMID || !existing.PMID) {
546
- return null;
547
- }
548
- if (item.PMID === existing.PMID) {
549
- return {
550
- type: "pmid",
551
- existing,
552
- details: {
553
- pmid: existing.PMID
554
- }
555
- };
556
- }
557
- return null;
558
- }
559
- function checkTitleAuthorYearMatch(item, existing) {
560
- const itemTitle = item.title ? normalize(item.title) : null;
561
- const existingTitle = existing.title ? normalize(existing.title) : null;
562
- const itemAuthors = normalizeAuthors(item);
563
- const existingAuthors = normalizeAuthors(existing);
564
- const itemYear = extractYear(item);
565
- const existingYear = extractYear(existing);
566
- if (!itemTitle || !existingTitle || !itemAuthors || !existingAuthors || !itemYear || !existingYear) {
567
- return null;
568
- }
569
- if (itemTitle === existingTitle && itemAuthors === existingAuthors && itemYear === existingYear) {
570
- return {
571
- type: "title-author-year",
572
- existing,
573
- details: {
574
- normalizedTitle: existingTitle,
575
- normalizedAuthors: existingAuthors,
576
- year: existingYear
577
- }
578
- };
579
- }
580
- return null;
581
- }
582
- function checkSingleDuplicate(item, existing) {
583
- const doiMatch = checkDoiMatch(item, existing);
584
- if (doiMatch) {
585
- return doiMatch;
586
- }
587
- const pmidMatch = checkPmidMatch(item, existing);
588
- if (pmidMatch) {
589
- return pmidMatch;
590
- }
591
- return checkTitleAuthorYearMatch(item, existing);
592
- }
593
- function detectDuplicate(item, existingReferences) {
594
- const matches = [];
595
- const itemUuid = item.custom?.uuid;
596
- for (const existing of existingReferences) {
597
- if (itemUuid && existing.custom?.uuid === itemUuid) {
598
- continue;
599
- }
600
- const match = checkSingleDuplicate(item, existing);
601
- if (match) {
602
- matches.push(match);
603
- }
604
- }
605
- return {
606
- isDuplicate: matches.length > 0,
607
- matches
608
- };
609
- }
610
- export {
611
- CslLibrarySchema as C,
612
- sortResults as a,
613
- CslItemSchema as b,
614
- detectDuplicate as d,
615
- normalize as n,
616
- search as s,
617
- tokenize as t
618
- };
619
- //# sourceMappingURL=detector-DHztTaFY.js.map