@hkdigital/lib-core 0.5.12 → 0.5.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,37 @@
1
+ /** @typedef {import('./typedef.js').RobotsConfig} RobotsConfig */
2
+ /**
3
+ * Check if hostname matches allowed hosts pattern
4
+ *
5
+ * @param {string} hostname - Hostname to check (e.g., test.mysite.com)
6
+ * @param {string[] | '*' | undefined} allowedHosts - Allowed host patterns
7
+ *
8
+ * @returns {boolean} True if host is allowed
9
+ */
10
+ export function isHostAllowed(hostname: string, allowedHosts: string[] | "*" | undefined): boolean;
11
+ /**
12
+ * Generate robots.txt with sitemap reference
13
+ *
14
+ * NOTE: If deployed behind a reverse proxy (nginx, Cloudflare, etc.),
15
+ * ensure your adapter is configured to trust proxy headers for correct
16
+ * origin detection:
17
+ *
18
+ * // svelte.config.js
19
+ * export default {
20
+ * kit: {
21
+ * adapter: adapter({
22
+ * // Trust X-Forwarded-* headers from proxy
23
+ * trustProxy: true
24
+ * })
25
+ * }
26
+ * };
27
+ *
28
+ * Without this, url.origin may be http://localhost instead of your
29
+ * actual domain, and the sitemap directive will be omitted.
30
+ *
31
+ * @param {URL} url - Request URL object
32
+ * @param {RobotsConfig} [config] - Robots configuration object
33
+ *
34
+ * @returns {string} robots.txt content
35
+ */
36
+ export function generateRobotsTxt(url: URL, config?: RobotsConfig): string;
37
+ export type RobotsConfig = import("./typedef.js").RobotsConfig;
@@ -0,0 +1,83 @@
1
+ /** @typedef {import('./typedef.js').RobotsConfig} RobotsConfig */
2
+
3
+ /**
4
+ * Check if hostname matches allowed hosts pattern
5
+ *
6
+ * @param {string} hostname - Hostname to check (e.g., test.mysite.com)
7
+ * @param {string[] | '*' | undefined} allowedHosts - Allowed host patterns
8
+ *
9
+ * @returns {boolean} True if host is allowed
10
+ */
11
+ export function isHostAllowed(hostname, allowedHosts) {
12
+ // If not configured or set to '*', allow all hosts
13
+ if (!allowedHosts || allowedHosts === '*') {
14
+ return true;
15
+ }
16
+
17
+ if (typeof allowedHosts === 'string') {
18
+ allowedHosts = [allowedHosts];
19
+ }
20
+
21
+ // Check if hostname matches any allowed pattern
22
+ return allowedHosts.some((pattern) => {
23
+ // Convert wildcard pattern to regex
24
+ // Example: *.mysite.com -> ^.*\.mysite\.com$
25
+ const regexPattern = pattern
26
+ .replace(/\./g, '\\.') // Escape dots
27
+ .replace(/\*/g, '.*'); // * becomes .*
28
+
29
+ const regex = new RegExp(`^${regexPattern}$`, 'i');
30
+ return regex.test(hostname);
31
+ });
32
+ }
33
+
34
+ /**
35
+ * Generate robots.txt with sitemap reference
36
+ *
37
+ * NOTE: If deployed behind a reverse proxy (nginx, Cloudflare, etc.),
38
+ * ensure your adapter is configured to trust proxy headers for correct
39
+ * origin detection:
40
+ *
41
+ * // svelte.config.js
42
+ * export default {
43
+ * kit: {
44
+ * adapter: adapter({
45
+ * // Trust X-Forwarded-* headers from proxy
46
+ * trustProxy: true
47
+ * })
48
+ * }
49
+ * };
50
+ *
51
+ * Without this, url.origin may be http://localhost instead of your
52
+ * actual domain, and the sitemap directive will be omitted.
53
+ *
54
+ * @param {URL} url - Request URL object
55
+ * @param {RobotsConfig} [config] - Robots configuration object
56
+ *
57
+ * @returns {string} robots.txt content
58
+ */
59
+ export function generateRobotsTxt(url, config = {}) {
60
+ const hostAllowed = isHostAllowed(url.hostname, config.allowedHosts);
61
+
62
+ // Block entire site if host is not allowed
63
+ if (!hostAllowed) {
64
+ return 'User-agent: *\nDisallow: /';
65
+ }
66
+
67
+ // Allow site, but add specific path blocks
68
+ let content = 'User-agent: *\nAllow: /';
69
+
70
+ // Add disallowed paths
71
+ if (config.disallowedPaths && config.disallowedPaths.length > 0) {
72
+ config.disallowedPaths.forEach((path) => {
73
+ content += `\nDisallow: ${path}`;
74
+ });
75
+ }
76
+
77
+ // Add sitemap reference if enabled and origin is available
78
+ if (url.origin && config.includeSitemap !== false) {
79
+ content += `\nSitemap: ${url.origin}/sitemap.xml`;
80
+ }
81
+
82
+ return content;
83
+ }
@@ -0,0 +1,17 @@
1
+ declare const _default: {};
2
+ export default _default;
3
+ export type RobotsConfig = {
4
+ /**
5
+ * Allowed host patterns. Use '*' or omit to allow all hosts.
6
+ * Supports wildcards (e.g., '*.example.com')
7
+ */
8
+ allowedHosts?: string[] | "*" | undefined;
9
+ /**
10
+ * Paths to block from indexing (e.g., '/admin', '/api/*')
11
+ */
12
+ disallowedPaths?: string[] | undefined;
13
+ /**
14
+ * Include sitemap reference in robots.txt (default: true)
15
+ */
16
+ includeSitemap?: boolean | undefined;
17
+ };
@@ -0,0 +1,12 @@
1
+ /**
2
+ * @typedef {Object} RobotsConfig
3
+ * @property {string[] | '*'} [allowedHosts]
4
+ * Allowed host patterns. Use '*' or omit to allow all hosts.
5
+ * Supports wildcards (e.g., '*.example.com')
6
+ * @property {string[]} [disallowedPaths]
7
+ * Paths to block from indexing (e.g., '/admin', '/api/*')
8
+ * @property {boolean} [includeSitemap]
9
+ * Include sitemap reference in robots.txt (default: true)
10
+ */
11
+
12
+ export default {};
@@ -0,0 +1 @@
1
+ export { generateRobotsTxt, isHostAllowed } from "./robots/index.js";
@@ -0,0 +1,5 @@
1
+ /**
2
+ * Public exports for robots.txt utilities
3
+ */
4
+
5
+ export { generateRobotsTxt, isHostAllowed } from './robots/index.js';
@@ -0,0 +1,11 @@
1
+ /**
2
+ * Generate sitemap XML
3
+ *
4
+ * @param {string} origin - Base URL (e.g., https://example.com)
5
+ * @param {SitemapRoute[]} [routes=[]] - Array of routes
6
+ *
7
+ * @returns {string} XML sitemap
8
+ */
9
+ export function generateSitemap(origin: string, routes?: SitemapRoute[]): string;
10
+ export type SitemapRoute = import("./typedef.js").SitemapRoute;
11
+ export type SitemapRouteObject = import("./typedef.js").SitemapRouteObject;
@@ -0,0 +1,63 @@
1
+ // @see https://www.sitemaps.org/protocol.html
2
+
3
+ /** @typedef {import('./typedef.js').SitemapRoute} SitemapRoute */
4
+ /** @typedef {import('./typedef.js').SitemapRouteObject} SitemapRouteObject */
5
+
6
+ /**
7
+ * Normalize route to full route object with defaults
8
+ *
9
+ * @param {import('./typedef.js').SitemapRoute} route - Route path string or route object
10
+ *
11
+ * @returns {SitemapRouteObject} Normalized route object
12
+ */
13
+ function normalizeRoute(route) {
14
+ // Handle simple string format
15
+ if (typeof route === 'string') {
16
+ return {
17
+ path: route,
18
+ priority: route === '/' ? 1.0 : 0.8,
19
+ changefreq: route === '/' ? 'daily' : 'weekly'
20
+ };
21
+ }
22
+
23
+ // Handle object format with defaults
24
+ return {
25
+ priority: 0.8,
26
+ changefreq: 'weekly',
27
+ ...route
28
+ };
29
+ }
30
+
31
+ /**
32
+ * Generate sitemap XML
33
+ *
34
+ * @param {string} origin - Base URL (e.g., https://example.com)
35
+ * @param {SitemapRoute[]} [routes=[]] - Array of routes
36
+ *
37
+ * @returns {string} XML sitemap
38
+ */
39
+ export function generateSitemap(origin, routes = []) {
40
+ // Ensure root path is always included (failsafe)
41
+ const hasRoot = routes.some((route) => {
42
+ const path = typeof route === 'string' ? route : route.path;
43
+ return path === '/';
44
+ });
45
+
46
+ const normalizedRoutes = hasRoot ? routes : ['/', ...routes];
47
+
48
+ const urls = normalizedRoutes
49
+ .map(normalizeRoute)
50
+ .map(
51
+ (route) => `
52
+ <url>
53
+ <loc>${origin}${route.path}</loc>
54
+ <changefreq>${route.changefreq}</changefreq>
55
+ <priority>${route.priority}</priority>
56
+ </url>`
57
+ )
58
+ .join('');
59
+
60
+ return `<?xml version="1.0" encoding="UTF-8"?>
61
+ <urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">${urls}
62
+ </urlset>`;
63
+ }
@@ -0,0 +1,20 @@
1
+ declare const _default: {};
2
+ export default _default;
3
+ export type SitemapRouteObject = {
4
+ /**
5
+ * - Route path (e.g., '/about')
6
+ */
7
+ path: string;
8
+ /**
9
+ * - Priority (0.0 to 1.0)
10
+ */
11
+ priority?: number | undefined;
12
+ /**
13
+ * - Change frequency
14
+ */
15
+ changefreq?: "hourly" | "daily" | "weekly" | "always" | "monthly" | "yearly" | "never" | undefined;
16
+ };
17
+ /**
18
+ * Route can be a simple string path or an object with details
19
+ */
20
+ export type SitemapRoute = string | SitemapRouteObject;
@@ -0,0 +1,14 @@
1
+ /**
2
+ * @typedef {Object} SitemapRouteObject
3
+ * @property {string} path - Route path (e.g., '/about')
4
+ * @property {number} [priority] - Priority (0.0 to 1.0)
5
+ * @property {'always'|'hourly'|'daily'|'weekly'|'monthly'|'yearly'|'never'}
6
+ * [changefreq] - Change frequency
7
+ */
8
+
9
+ /**
10
+ * @typedef {string | SitemapRouteObject} SitemapRoute
11
+ * Route can be a simple string path or an object with details
12
+ */
13
+
14
+ export default {};
@@ -0,0 +1 @@
1
+ export { generateSitemap } from "./sitemap/index.js";
@@ -0,0 +1,5 @@
1
+ /**
2
+ * Public exports for sitemap utilities
3
+ */
4
+
5
+ export { generateSitemap } from './sitemap/index.js';
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@hkdigital/lib-core",
3
- "version": "0.5.12",
3
+ "version": "0.5.13",
4
4
  "author": {
5
5
  "name": "HKdigital",
6
6
  "url": "https://hkdigital.nl"