@seo-console/package 1.0.0 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/index.d.mts +48 -3
- package/dist/components/index.d.ts +48 -3
- package/dist/components/index.js +3 -1
- package/dist/components/index.js.map +1 -1
- package/dist/components/index.mjs +3 -1
- package/dist/components/index.mjs.map +1 -1
- package/dist/hooks/index.js +443 -3
- package/dist/hooks/index.js.map +1 -1
- package/dist/hooks/index.mjs +443 -3
- package/dist/hooks/index.mjs.map +1 -1
- package/dist/index.d.mts +71 -66
- package/dist/index.d.ts +71 -66
- package/dist/index.js +804 -692
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +788 -682
- package/dist/index.mjs.map +1 -1
- package/dist/robots-generator-B1KOf8vn.d.ts +166 -0
- package/dist/robots-generator-D6T5HVNx.d.mts +166 -0
- package/dist/{index-6lAOwFXQ.d.mts → seo-schema-D8EwzllB.d.mts} +1 -46
- package/dist/{index-6lAOwFXQ.d.ts → seo-schema-D8EwzllB.d.ts} +1 -46
- package/dist/server.d.mts +88 -0
- package/dist/server.d.ts +88 -0
- package/dist/server.js +1547 -0
- package/dist/server.js.map +1 -0
- package/dist/server.mjs +1485 -0
- package/dist/server.mjs.map +1 -0
- package/package.json +13 -3
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
import { S as SEORecord } from './seo-schema-D8EwzllB.js';
|
|
2
|
+
|
|
3
|
+
type ValidationSeverity = "critical" | "warning" | "info";
|
|
4
|
+
interface ValidationIssue {
|
|
5
|
+
field: string;
|
|
6
|
+
severity: ValidationSeverity;
|
|
7
|
+
message: string;
|
|
8
|
+
expected?: string;
|
|
9
|
+
actual?: string;
|
|
10
|
+
}
|
|
11
|
+
interface ValidationResult {
|
|
12
|
+
isValid: boolean;
|
|
13
|
+
issues: ValidationIssue[];
|
|
14
|
+
validatedAt: Date;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Validate HTML content against SEO record requirements
|
|
18
|
+
*/
|
|
19
|
+
declare function validateHTML(html: string, record: SEORecord, _baseUrl?: string): Promise<ValidationResult>;
|
|
20
|
+
/**
|
|
21
|
+
* Fetch HTML from a URL and validate it
|
|
22
|
+
*/
|
|
23
|
+
declare function validateURL(url: string, record: SEORecord): Promise<ValidationResult>;
|
|
24
|
+
|
|
25
|
+
interface ImageValidationResult {
|
|
26
|
+
isValid: boolean;
|
|
27
|
+
issues: Array<{
|
|
28
|
+
field: string;
|
|
29
|
+
severity: "critical" | "warning" | "info";
|
|
30
|
+
message: string;
|
|
31
|
+
expected?: string;
|
|
32
|
+
actual?: string;
|
|
33
|
+
}>;
|
|
34
|
+
metadata?: {
|
|
35
|
+
width: number;
|
|
36
|
+
height: number;
|
|
37
|
+
format: string;
|
|
38
|
+
size: number;
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Validate OG image URL
|
|
43
|
+
* Checks dimensions, format, file size, and accessibility
|
|
44
|
+
*/
|
|
45
|
+
declare function validateOGImage(imageUrl: string, expectedWidth?: number, expectedHeight?: number): Promise<ImageValidationResult>;
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Route Discovery Utility
|
|
49
|
+
* Automatically discovers Next.js routes from the file system
|
|
50
|
+
*/
|
|
51
|
+
interface DiscoveredRoute {
|
|
52
|
+
routePath: string;
|
|
53
|
+
filePath: string;
|
|
54
|
+
isDynamic: boolean;
|
|
55
|
+
isCatchAll: boolean;
|
|
56
|
+
params: string[];
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Discover all Next.js routes from the app directory
|
|
60
|
+
*/
|
|
61
|
+
declare function discoverNextJSRoutes(appDir?: string, rootDir?: string): Promise<DiscoveredRoute[]>;
|
|
62
|
+
/**
|
|
63
|
+
* Generate example route paths for dynamic routes
|
|
64
|
+
* Useful for creating sample SEO records
|
|
65
|
+
*/
|
|
66
|
+
declare function generateExamplePaths(route: DiscoveredRoute, count?: number): string[];
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Metadata Extractor
|
|
70
|
+
* Extracts SEO metadata from HTML pages or Next.js metadata exports
|
|
71
|
+
*/
|
|
72
|
+
|
|
73
|
+
interface ExtractedMetadata {
|
|
74
|
+
title?: string;
|
|
75
|
+
description?: string;
|
|
76
|
+
ogTitle?: string;
|
|
77
|
+
ogDescription?: string;
|
|
78
|
+
ogImageUrl?: string;
|
|
79
|
+
ogType?: string;
|
|
80
|
+
ogUrl?: string;
|
|
81
|
+
canonicalUrl?: string;
|
|
82
|
+
robots?: string;
|
|
83
|
+
keywords?: string[];
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Extract metadata from HTML string
|
|
87
|
+
*/
|
|
88
|
+
declare function extractMetadataFromHTML(html: string, baseUrl?: string): ExtractedMetadata;
|
|
89
|
+
/**
|
|
90
|
+
* Extract metadata from a live URL
|
|
91
|
+
*/
|
|
92
|
+
declare function extractMetadataFromURL(url: string): Promise<ExtractedMetadata>;
|
|
93
|
+
/**
|
|
94
|
+
* Convert extracted metadata to SEO record format
|
|
95
|
+
*/
|
|
96
|
+
declare function metadataToSEORecord(metadata: ExtractedMetadata, routePath: string, userId?: string): Partial<SEORecord>;
|
|
97
|
+
/**
|
|
98
|
+
* Crawl a site and extract metadata from all pages
|
|
99
|
+
*/
|
|
100
|
+
declare function crawlSiteForSEO(baseUrl: string, routes: string[]): Promise<Map<string, ExtractedMetadata>>;
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Sitemap Generator
|
|
104
|
+
* Generates sitemap.xml from SEO records
|
|
105
|
+
* Only includes routes with canonical URLs
|
|
106
|
+
*/
|
|
107
|
+
|
|
108
|
+
interface SitemapEntry {
|
|
109
|
+
loc: string;
|
|
110
|
+
lastmod?: string;
|
|
111
|
+
changefreq?: "always" | "hourly" | "daily" | "weekly" | "monthly" | "yearly" | "never";
|
|
112
|
+
priority?: number;
|
|
113
|
+
}
|
|
114
|
+
interface SitemapOptions {
|
|
115
|
+
baseUrl: string;
|
|
116
|
+
entries: SitemapEntry[];
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Generate sitemap.xml content
|
|
120
|
+
*/
|
|
121
|
+
declare function generateSitemapXML(options: SitemapOptions): string;
|
|
122
|
+
/**
|
|
123
|
+
* Convert SEO records to sitemap entries
|
|
124
|
+
* Only includes records with canonical URLs
|
|
125
|
+
*/
|
|
126
|
+
declare function seoRecordsToSitemapEntries(records: SEORecord[], baseUrl: string): SitemapEntry[];
|
|
127
|
+
/**
|
|
128
|
+
* Generate sitemap from SEO records
|
|
129
|
+
*/
|
|
130
|
+
declare function generateSitemapFromRecords(records: SEORecord[], baseUrl: string): string;
|
|
131
|
+
/**
|
|
132
|
+
* Validate sitemap entry
|
|
133
|
+
*/
|
|
134
|
+
declare function validateSitemapEntry(entry: SitemapEntry): {
|
|
135
|
+
valid: boolean;
|
|
136
|
+
errors: string[];
|
|
137
|
+
};
|
|
138
|
+
|
|
139
|
+
/**
|
|
140
|
+
* Robots.txt Generator
|
|
141
|
+
* Generates or updates robots.txt with sitemap reference
|
|
142
|
+
*/
|
|
143
|
+
interface RobotsTxtOptions {
|
|
144
|
+
userAgents?: Array<{
|
|
145
|
+
agent: string;
|
|
146
|
+
allow?: string[];
|
|
147
|
+
disallow?: string[];
|
|
148
|
+
}>;
|
|
149
|
+
sitemapUrl?: string;
|
|
150
|
+
crawlDelay?: number;
|
|
151
|
+
}
|
|
152
|
+
/**
|
|
153
|
+
* Generate robots.txt content
|
|
154
|
+
*/
|
|
155
|
+
declare function generateRobotsTxt(options?: RobotsTxtOptions): string;
|
|
156
|
+
/**
|
|
157
|
+
* Update existing robots.txt to include sitemap
|
|
158
|
+
* Preserves existing content
|
|
159
|
+
*/
|
|
160
|
+
declare function updateRobotsTxtWithSitemap(existingContent: string, sitemapUrl: string): string;
|
|
161
|
+
/**
|
|
162
|
+
* Extract sitemap URL from robots.txt
|
|
163
|
+
*/
|
|
164
|
+
declare function extractSitemapFromRobotsTxt(content: string): string | null;
|
|
165
|
+
|
|
166
|
+
export { type DiscoveredRoute as D, type ExtractedMetadata as E, type ImageValidationResult as I, type RobotsTxtOptions as R, type SitemapEntry as S, type ValidationResult as V, type ValidationIssue as a, extractMetadataFromURL as b, crawlSiteForSEO as c, discoverNextJSRoutes as d, extractMetadataFromHTML as e, generateSitemapXML as f, generateExamplePaths as g, generateSitemapFromRecords as h, type SitemapOptions as i, generateRobotsTxt as j, extractSitemapFromRobotsTxt as k, validateOGImage as l, metadataToSEORecord as m, validateHTML as n, validateURL as o, seoRecordsToSitemapEntries as s, updateRobotsTxtWithSitemap as u, validateSitemapEntry as v };
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
import { S as SEORecord } from './seo-schema-D8EwzllB.mjs';
|
|
2
|
+
|
|
3
|
+
type ValidationSeverity = "critical" | "warning" | "info";
|
|
4
|
+
interface ValidationIssue {
|
|
5
|
+
field: string;
|
|
6
|
+
severity: ValidationSeverity;
|
|
7
|
+
message: string;
|
|
8
|
+
expected?: string;
|
|
9
|
+
actual?: string;
|
|
10
|
+
}
|
|
11
|
+
interface ValidationResult {
|
|
12
|
+
isValid: boolean;
|
|
13
|
+
issues: ValidationIssue[];
|
|
14
|
+
validatedAt: Date;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Validate HTML content against SEO record requirements
|
|
18
|
+
*/
|
|
19
|
+
declare function validateHTML(html: string, record: SEORecord, _baseUrl?: string): Promise<ValidationResult>;
|
|
20
|
+
/**
|
|
21
|
+
* Fetch HTML from a URL and validate it
|
|
22
|
+
*/
|
|
23
|
+
declare function validateURL(url: string, record: SEORecord): Promise<ValidationResult>;
|
|
24
|
+
|
|
25
|
+
interface ImageValidationResult {
|
|
26
|
+
isValid: boolean;
|
|
27
|
+
issues: Array<{
|
|
28
|
+
field: string;
|
|
29
|
+
severity: "critical" | "warning" | "info";
|
|
30
|
+
message: string;
|
|
31
|
+
expected?: string;
|
|
32
|
+
actual?: string;
|
|
33
|
+
}>;
|
|
34
|
+
metadata?: {
|
|
35
|
+
width: number;
|
|
36
|
+
height: number;
|
|
37
|
+
format: string;
|
|
38
|
+
size: number;
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Validate OG image URL
|
|
43
|
+
* Checks dimensions, format, file size, and accessibility
|
|
44
|
+
*/
|
|
45
|
+
declare function validateOGImage(imageUrl: string, expectedWidth?: number, expectedHeight?: number): Promise<ImageValidationResult>;
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Route Discovery Utility
|
|
49
|
+
* Automatically discovers Next.js routes from the file system
|
|
50
|
+
*/
|
|
51
|
+
interface DiscoveredRoute {
|
|
52
|
+
routePath: string;
|
|
53
|
+
filePath: string;
|
|
54
|
+
isDynamic: boolean;
|
|
55
|
+
isCatchAll: boolean;
|
|
56
|
+
params: string[];
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Discover all Next.js routes from the app directory
|
|
60
|
+
*/
|
|
61
|
+
declare function discoverNextJSRoutes(appDir?: string, rootDir?: string): Promise<DiscoveredRoute[]>;
|
|
62
|
+
/**
|
|
63
|
+
* Generate example route paths for dynamic routes
|
|
64
|
+
* Useful for creating sample SEO records
|
|
65
|
+
*/
|
|
66
|
+
declare function generateExamplePaths(route: DiscoveredRoute, count?: number): string[];
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Metadata Extractor
|
|
70
|
+
* Extracts SEO metadata from HTML pages or Next.js metadata exports
|
|
71
|
+
*/
|
|
72
|
+
|
|
73
|
+
interface ExtractedMetadata {
|
|
74
|
+
title?: string;
|
|
75
|
+
description?: string;
|
|
76
|
+
ogTitle?: string;
|
|
77
|
+
ogDescription?: string;
|
|
78
|
+
ogImageUrl?: string;
|
|
79
|
+
ogType?: string;
|
|
80
|
+
ogUrl?: string;
|
|
81
|
+
canonicalUrl?: string;
|
|
82
|
+
robots?: string;
|
|
83
|
+
keywords?: string[];
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Extract metadata from HTML string
|
|
87
|
+
*/
|
|
88
|
+
declare function extractMetadataFromHTML(html: string, baseUrl?: string): ExtractedMetadata;
|
|
89
|
+
/**
|
|
90
|
+
* Extract metadata from a live URL
|
|
91
|
+
*/
|
|
92
|
+
declare function extractMetadataFromURL(url: string): Promise<ExtractedMetadata>;
|
|
93
|
+
/**
|
|
94
|
+
* Convert extracted metadata to SEO record format
|
|
95
|
+
*/
|
|
96
|
+
declare function metadataToSEORecord(metadata: ExtractedMetadata, routePath: string, userId?: string): Partial<SEORecord>;
|
|
97
|
+
/**
|
|
98
|
+
* Crawl a site and extract metadata from all pages
|
|
99
|
+
*/
|
|
100
|
+
declare function crawlSiteForSEO(baseUrl: string, routes: string[]): Promise<Map<string, ExtractedMetadata>>;
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Sitemap Generator
|
|
104
|
+
* Generates sitemap.xml from SEO records
|
|
105
|
+
* Only includes routes with canonical URLs
|
|
106
|
+
*/
|
|
107
|
+
|
|
108
|
+
interface SitemapEntry {
|
|
109
|
+
loc: string;
|
|
110
|
+
lastmod?: string;
|
|
111
|
+
changefreq?: "always" | "hourly" | "daily" | "weekly" | "monthly" | "yearly" | "never";
|
|
112
|
+
priority?: number;
|
|
113
|
+
}
|
|
114
|
+
interface SitemapOptions {
|
|
115
|
+
baseUrl: string;
|
|
116
|
+
entries: SitemapEntry[];
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Generate sitemap.xml content
|
|
120
|
+
*/
|
|
121
|
+
declare function generateSitemapXML(options: SitemapOptions): string;
|
|
122
|
+
/**
|
|
123
|
+
* Convert SEO records to sitemap entries
|
|
124
|
+
* Only includes records with canonical URLs
|
|
125
|
+
*/
|
|
126
|
+
declare function seoRecordsToSitemapEntries(records: SEORecord[], baseUrl: string): SitemapEntry[];
|
|
127
|
+
/**
|
|
128
|
+
* Generate sitemap from SEO records
|
|
129
|
+
*/
|
|
130
|
+
declare function generateSitemapFromRecords(records: SEORecord[], baseUrl: string): string;
|
|
131
|
+
/**
|
|
132
|
+
* Validate sitemap entry
|
|
133
|
+
*/
|
|
134
|
+
declare function validateSitemapEntry(entry: SitemapEntry): {
|
|
135
|
+
valid: boolean;
|
|
136
|
+
errors: string[];
|
|
137
|
+
};
|
|
138
|
+
|
|
139
|
+
/**
|
|
140
|
+
* Robots.txt Generator
|
|
141
|
+
* Generates or updates robots.txt with sitemap reference
|
|
142
|
+
*/
|
|
143
|
+
interface RobotsTxtOptions {
|
|
144
|
+
userAgents?: Array<{
|
|
145
|
+
agent: string;
|
|
146
|
+
allow?: string[];
|
|
147
|
+
disallow?: string[];
|
|
148
|
+
}>;
|
|
149
|
+
sitemapUrl?: string;
|
|
150
|
+
crawlDelay?: number;
|
|
151
|
+
}
|
|
152
|
+
/**
|
|
153
|
+
* Generate robots.txt content
|
|
154
|
+
*/
|
|
155
|
+
declare function generateRobotsTxt(options?: RobotsTxtOptions): string;
|
|
156
|
+
/**
|
|
157
|
+
* Update existing robots.txt to include sitemap
|
|
158
|
+
* Preserves existing content
|
|
159
|
+
*/
|
|
160
|
+
declare function updateRobotsTxtWithSitemap(existingContent: string, sitemapUrl: string): string;
|
|
161
|
+
/**
|
|
162
|
+
* Extract sitemap URL from robots.txt
|
|
163
|
+
*/
|
|
164
|
+
declare function extractSitemapFromRobotsTxt(content: string): string | null;
|
|
165
|
+
|
|
166
|
+
export { type DiscoveredRoute as D, type ExtractedMetadata as E, type ImageValidationResult as I, type RobotsTxtOptions as R, type SitemapEntry as S, type ValidationResult as V, type ValidationIssue as a, extractMetadataFromURL as b, crawlSiteForSEO as c, discoverNextJSRoutes as d, extractMetadataFromHTML as e, generateSitemapXML as f, generateExamplePaths as g, generateSitemapFromRecords as h, type SitemapOptions as i, generateRobotsTxt as j, extractSitemapFromRobotsTxt as k, validateOGImage as l, metadataToSEORecord as m, validateHTML as n, validateURL as o, seoRecordsToSitemapEntries as s, updateRobotsTxtWithSitemap as u, validateSitemapEntry as v };
|
|
@@ -1,8 +1,4 @@
|
|
|
1
|
-
import * as react_jsx_runtime from 'react/jsx-runtime';
|
|
2
1
|
import { z } from 'zod';
|
|
3
|
-
import * as React from 'react';
|
|
4
|
-
|
|
5
|
-
declare function SEORecordList(): react_jsx_runtime.JSX.Element;
|
|
6
2
|
|
|
7
3
|
declare const seoRecordSchema: z.ZodObject<{
|
|
8
4
|
title: z.ZodOptional<z.ZodString>;
|
|
@@ -285,45 +281,4 @@ type SEORecord = z.infer<typeof seoRecordSchema>;
|
|
|
285
281
|
type CreateSEORecord = z.infer<typeof createSEORecordSchema>;
|
|
286
282
|
type UpdateSEORecord = z.infer<typeof updateSEORecordSchema>;
|
|
287
283
|
|
|
288
|
-
|
|
289
|
-
record?: SEORecord;
|
|
290
|
-
onSuccess?: () => void;
|
|
291
|
-
onCancel?: () => void;
|
|
292
|
-
}
|
|
293
|
-
declare function SEORecordForm({ record, onSuccess, onCancel }: SEORecordFormProps): react_jsx_runtime.JSX.Element;
|
|
294
|
-
|
|
295
|
-
declare function ValidationDashboard(): react_jsx_runtime.JSX.Element;
|
|
296
|
-
|
|
297
|
-
interface OGImagePreviewProps {
|
|
298
|
-
imageUrl: string;
|
|
299
|
-
expectedWidth?: number;
|
|
300
|
-
expectedHeight?: number;
|
|
301
|
-
title?: string;
|
|
302
|
-
description?: string;
|
|
303
|
-
}
|
|
304
|
-
declare function OGImagePreview({ imageUrl, expectedWidth, expectedHeight, title, description, }: OGImagePreviewProps): react_jsx_runtime.JSX.Element;
|
|
305
|
-
|
|
306
|
-
interface ButtonProps extends React.ButtonHTMLAttributes<HTMLButtonElement> {
|
|
307
|
-
variant?: "default" | "destructive" | "outline" | "secondary" | "ghost" | "link";
|
|
308
|
-
size?: "default" | "sm" | "lg" | "icon";
|
|
309
|
-
}
|
|
310
|
-
declare const Button: React.ForwardRefExoticComponent<ButtonProps & React.RefAttributes<HTMLButtonElement>>;
|
|
311
|
-
|
|
312
|
-
declare const Card: React.ForwardRefExoticComponent<React.HTMLAttributes<HTMLDivElement> & React.RefAttributes<HTMLDivElement>>;
|
|
313
|
-
declare const CardHeader: React.ForwardRefExoticComponent<React.HTMLAttributes<HTMLDivElement> & React.RefAttributes<HTMLDivElement>>;
|
|
314
|
-
declare const CardTitle: React.ForwardRefExoticComponent<React.HTMLAttributes<HTMLHeadingElement> & React.RefAttributes<HTMLParagraphElement>>;
|
|
315
|
-
declare const CardDescription: React.ForwardRefExoticComponent<React.HTMLAttributes<HTMLParagraphElement> & React.RefAttributes<HTMLParagraphElement>>;
|
|
316
|
-
declare const CardContent: React.ForwardRefExoticComponent<React.HTMLAttributes<HTMLDivElement> & React.RefAttributes<HTMLDivElement>>;
|
|
317
|
-
declare const CardFooter: React.ForwardRefExoticComponent<React.HTMLAttributes<HTMLDivElement> & React.RefAttributes<HTMLDivElement>>;
|
|
318
|
-
|
|
319
|
-
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
|
|
320
|
-
}
|
|
321
|
-
declare const Input: React.ForwardRefExoticComponent<InputProps & React.RefAttributes<HTMLInputElement>>;
|
|
322
|
-
|
|
323
|
-
interface SpinnerProps {
|
|
324
|
-
className?: string;
|
|
325
|
-
size?: "sm" | "default" | "lg";
|
|
326
|
-
}
|
|
327
|
-
declare function Spinner({ className, size }: SpinnerProps): react_jsx_runtime.JSX.Element;
|
|
328
|
-
|
|
329
|
-
export { Button as B, type CreateSEORecord as C, Input as I, OGImagePreview as O, type SEORecord as S, type UpdateSEORecord as U, ValidationDashboard as V, SEORecordList as a, SEORecordForm as b, createSEORecordSchema as c, Card as d, CardHeader as e, CardFooter as f, CardTitle as g, CardDescription as h, CardContent as i, Spinner as j, updateSEORecordSchema as u };
|
|
284
|
+
export { type CreateSEORecord as C, type SEORecord as S, type UpdateSEORecord as U, createSEORecordSchema as c, updateSEORecordSchema as u };
|
|
@@ -1,8 +1,4 @@
|
|
|
1
|
-
import * as react_jsx_runtime from 'react/jsx-runtime';
|
|
2
1
|
import { z } from 'zod';
|
|
3
|
-
import * as React from 'react';
|
|
4
|
-
|
|
5
|
-
declare function SEORecordList(): react_jsx_runtime.JSX.Element;
|
|
6
2
|
|
|
7
3
|
declare const seoRecordSchema: z.ZodObject<{
|
|
8
4
|
title: z.ZodOptional<z.ZodString>;
|
|
@@ -285,45 +281,4 @@ type SEORecord = z.infer<typeof seoRecordSchema>;
|
|
|
285
281
|
type CreateSEORecord = z.infer<typeof createSEORecordSchema>;
|
|
286
282
|
type UpdateSEORecord = z.infer<typeof updateSEORecordSchema>;
|
|
287
283
|
|
|
288
|
-
|
|
289
|
-
record?: SEORecord;
|
|
290
|
-
onSuccess?: () => void;
|
|
291
|
-
onCancel?: () => void;
|
|
292
|
-
}
|
|
293
|
-
declare function SEORecordForm({ record, onSuccess, onCancel }: SEORecordFormProps): react_jsx_runtime.JSX.Element;
|
|
294
|
-
|
|
295
|
-
declare function ValidationDashboard(): react_jsx_runtime.JSX.Element;
|
|
296
|
-
|
|
297
|
-
interface OGImagePreviewProps {
|
|
298
|
-
imageUrl: string;
|
|
299
|
-
expectedWidth?: number;
|
|
300
|
-
expectedHeight?: number;
|
|
301
|
-
title?: string;
|
|
302
|
-
description?: string;
|
|
303
|
-
}
|
|
304
|
-
declare function OGImagePreview({ imageUrl, expectedWidth, expectedHeight, title, description, }: OGImagePreviewProps): react_jsx_runtime.JSX.Element;
|
|
305
|
-
|
|
306
|
-
interface ButtonProps extends React.ButtonHTMLAttributes<HTMLButtonElement> {
|
|
307
|
-
variant?: "default" | "destructive" | "outline" | "secondary" | "ghost" | "link";
|
|
308
|
-
size?: "default" | "sm" | "lg" | "icon";
|
|
309
|
-
}
|
|
310
|
-
declare const Button: React.ForwardRefExoticComponent<ButtonProps & React.RefAttributes<HTMLButtonElement>>;
|
|
311
|
-
|
|
312
|
-
declare const Card: React.ForwardRefExoticComponent<React.HTMLAttributes<HTMLDivElement> & React.RefAttributes<HTMLDivElement>>;
|
|
313
|
-
declare const CardHeader: React.ForwardRefExoticComponent<React.HTMLAttributes<HTMLDivElement> & React.RefAttributes<HTMLDivElement>>;
|
|
314
|
-
declare const CardTitle: React.ForwardRefExoticComponent<React.HTMLAttributes<HTMLHeadingElement> & React.RefAttributes<HTMLParagraphElement>>;
|
|
315
|
-
declare const CardDescription: React.ForwardRefExoticComponent<React.HTMLAttributes<HTMLParagraphElement> & React.RefAttributes<HTMLParagraphElement>>;
|
|
316
|
-
declare const CardContent: React.ForwardRefExoticComponent<React.HTMLAttributes<HTMLDivElement> & React.RefAttributes<HTMLDivElement>>;
|
|
317
|
-
declare const CardFooter: React.ForwardRefExoticComponent<React.HTMLAttributes<HTMLDivElement> & React.RefAttributes<HTMLDivElement>>;
|
|
318
|
-
|
|
319
|
-
interface InputProps extends React.InputHTMLAttributes<HTMLInputElement> {
|
|
320
|
-
}
|
|
321
|
-
declare const Input: React.ForwardRefExoticComponent<InputProps & React.RefAttributes<HTMLInputElement>>;
|
|
322
|
-
|
|
323
|
-
interface SpinnerProps {
|
|
324
|
-
className?: string;
|
|
325
|
-
size?: "sm" | "default" | "lg";
|
|
326
|
-
}
|
|
327
|
-
declare function Spinner({ className, size }: SpinnerProps): react_jsx_runtime.JSX.Element;
|
|
328
|
-
|
|
329
|
-
export { Button as B, type CreateSEORecord as C, Input as I, OGImagePreview as O, type SEORecord as S, type UpdateSEORecord as U, ValidationDashboard as V, SEORecordList as a, SEORecordForm as b, createSEORecordSchema as c, Card as d, CardHeader as e, CardFooter as f, CardTitle as g, CardDescription as h, CardContent as i, Spinner as j, updateSEORecordSchema as u };
|
|
284
|
+
export { type CreateSEORecord as C, type SEORecord as S, type UpdateSEORecord as U, createSEORecordSchema as c, updateSEORecordSchema as u };
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import { S as SEORecord, C as CreateSEORecord, U as UpdateSEORecord } from './seo-schema-D8EwzllB.mjs';
|
|
2
|
+
export { c as createSEORecordSchema, u as updateSEORecordSchema } from './seo-schema-D8EwzllB.mjs';
|
|
3
|
+
export { I as ImageValidationResult, a as ValidationIssue, V as ValidationResult, c as crawlSiteForSEO, d as discoverNextJSRoutes, b as extractMetadataFromURL, j as generateRobotsTxt, h as generateSitemapFromRecords, f as generateSitemapXML, m as metadataToSEORecord, s as seoRecordsToSitemapEntries, u as updateRobotsTxtWithSitemap, n as validateHTML, l as validateOGImage, o as validateURL } from './robots-generator-D6T5HVNx.mjs';
|
|
4
|
+
export { GenerateMetadataOptions, getRoutePathFromParams, useGenerateMetadata } from './hooks/index.mjs';
|
|
5
|
+
import 'zod';
|
|
6
|
+
import 'next';
|
|
7
|
+
|
|
8
|
+
type Result$1<T, E = Error> = {
|
|
9
|
+
success: true;
|
|
10
|
+
data: T;
|
|
11
|
+
} | {
|
|
12
|
+
success: false;
|
|
13
|
+
error: E;
|
|
14
|
+
};
|
|
15
|
+
/**
|
|
16
|
+
* Get all SEO records for the current user
|
|
17
|
+
*/
|
|
18
|
+
declare function getSEORecords(): Promise<Result$1<SEORecord[]>>;
|
|
19
|
+
/**
|
|
20
|
+
* Get a single SEO record by ID
|
|
21
|
+
*/
|
|
22
|
+
declare function getSEORecordById(id: string): Promise<Result$1<SEORecord>>;
|
|
23
|
+
/**
|
|
24
|
+
* Get SEO record by route path
|
|
25
|
+
*/
|
|
26
|
+
declare function getSEORecordByRoute(routePath: string): Promise<Result$1<SEORecord | null>>;
|
|
27
|
+
/**
|
|
28
|
+
* Create a new SEO record
|
|
29
|
+
*/
|
|
30
|
+
declare function createSEORecord(record: CreateSEORecord): Promise<Result$1<SEORecord>>;
|
|
31
|
+
/**
|
|
32
|
+
* Update an existing SEO record
|
|
33
|
+
*/
|
|
34
|
+
declare function updateSEORecord(record: UpdateSEORecord): Promise<Result$1<SEORecord>>;
|
|
35
|
+
/**
|
|
36
|
+
* Delete an SEO record
|
|
37
|
+
*/
|
|
38
|
+
declare function deleteSEORecord(id: string): Promise<Result$1<void>>;
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Crawlability Validator
|
|
42
|
+
* Validates that search engines can crawl and index pages
|
|
43
|
+
*/
|
|
44
|
+
interface CrawlabilityResult {
|
|
45
|
+
crawlable: boolean;
|
|
46
|
+
indexable: boolean;
|
|
47
|
+
issues: CrawlabilityIssue[];
|
|
48
|
+
warnings: CrawlabilityIssue[];
|
|
49
|
+
}
|
|
50
|
+
interface CrawlabilityIssue {
|
|
51
|
+
type: "noindex" | "nofollow" | "robots_blocked" | "auth_wall" | "redirect_loop" | "404" | "canonical_missing";
|
|
52
|
+
severity: "error" | "warning";
|
|
53
|
+
message: string;
|
|
54
|
+
page: string;
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Validate that a page is crawlable and indexable
|
|
58
|
+
*/
|
|
59
|
+
declare function validateCrawlability(url: string, html?: string): Promise<CrawlabilityResult>;
|
|
60
|
+
/**
|
|
61
|
+
* Validate robots.txt allows crawling
|
|
62
|
+
*/
|
|
63
|
+
declare function validateRobotsTxt(baseUrl: string, routePath: string): Promise<{
|
|
64
|
+
allowed: boolean;
|
|
65
|
+
reason?: string;
|
|
66
|
+
}>;
|
|
67
|
+
/**
|
|
68
|
+
* Check if site is publicly accessible (no auth wall)
|
|
69
|
+
*/
|
|
70
|
+
declare function validatePublicAccess(url: string): Promise<{
|
|
71
|
+
accessible: boolean;
|
|
72
|
+
requiresAuth: boolean;
|
|
73
|
+
}>;
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Server-side only exports
|
|
77
|
+
* These can be safely imported in API routes and server components
|
|
78
|
+
*/
|
|
79
|
+
|
|
80
|
+
type Result<T> = {
|
|
81
|
+
success: true;
|
|
82
|
+
data: T;
|
|
83
|
+
} | {
|
|
84
|
+
success: false;
|
|
85
|
+
error: Error;
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
export { CreateSEORecord, type Result, SEORecord, UpdateSEORecord, createSEORecord, deleteSEORecord, getSEORecords as getAllSEORecords, getSEORecordById, getSEORecordByRoute, getSEORecords, updateSEORecord, validateCrawlability, validatePublicAccess, validateRobotsTxt };
|
package/dist/server.d.ts
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import { S as SEORecord, C as CreateSEORecord, U as UpdateSEORecord } from './seo-schema-D8EwzllB.js';
|
|
2
|
+
export { c as createSEORecordSchema, u as updateSEORecordSchema } from './seo-schema-D8EwzllB.js';
|
|
3
|
+
export { I as ImageValidationResult, a as ValidationIssue, V as ValidationResult, c as crawlSiteForSEO, d as discoverNextJSRoutes, b as extractMetadataFromURL, j as generateRobotsTxt, h as generateSitemapFromRecords, f as generateSitemapXML, m as metadataToSEORecord, s as seoRecordsToSitemapEntries, u as updateRobotsTxtWithSitemap, n as validateHTML, l as validateOGImage, o as validateURL } from './robots-generator-B1KOf8vn.js';
|
|
4
|
+
export { GenerateMetadataOptions, getRoutePathFromParams, useGenerateMetadata } from './hooks/index.js';
|
|
5
|
+
import 'zod';
|
|
6
|
+
import 'next';
|
|
7
|
+
|
|
8
|
+
type Result$1<T, E = Error> = {
|
|
9
|
+
success: true;
|
|
10
|
+
data: T;
|
|
11
|
+
} | {
|
|
12
|
+
success: false;
|
|
13
|
+
error: E;
|
|
14
|
+
};
|
|
15
|
+
/**
|
|
16
|
+
* Get all SEO records for the current user
|
|
17
|
+
*/
|
|
18
|
+
declare function getSEORecords(): Promise<Result$1<SEORecord[]>>;
|
|
19
|
+
/**
|
|
20
|
+
* Get a single SEO record by ID
|
|
21
|
+
*/
|
|
22
|
+
declare function getSEORecordById(id: string): Promise<Result$1<SEORecord>>;
|
|
23
|
+
/**
|
|
24
|
+
* Get SEO record by route path
|
|
25
|
+
*/
|
|
26
|
+
declare function getSEORecordByRoute(routePath: string): Promise<Result$1<SEORecord | null>>;
|
|
27
|
+
/**
|
|
28
|
+
* Create a new SEO record
|
|
29
|
+
*/
|
|
30
|
+
declare function createSEORecord(record: CreateSEORecord): Promise<Result$1<SEORecord>>;
|
|
31
|
+
/**
|
|
32
|
+
* Update an existing SEO record
|
|
33
|
+
*/
|
|
34
|
+
declare function updateSEORecord(record: UpdateSEORecord): Promise<Result$1<SEORecord>>;
|
|
35
|
+
/**
|
|
36
|
+
* Delete an SEO record
|
|
37
|
+
*/
|
|
38
|
+
declare function deleteSEORecord(id: string): Promise<Result$1<void>>;
|
|
39
|
+
|
|
40
|
+
/**
|
|
41
|
+
* Crawlability Validator
|
|
42
|
+
* Validates that search engines can crawl and index pages
|
|
43
|
+
*/
|
|
44
|
+
interface CrawlabilityResult {
|
|
45
|
+
crawlable: boolean;
|
|
46
|
+
indexable: boolean;
|
|
47
|
+
issues: CrawlabilityIssue[];
|
|
48
|
+
warnings: CrawlabilityIssue[];
|
|
49
|
+
}
|
|
50
|
+
interface CrawlabilityIssue {
|
|
51
|
+
type: "noindex" | "nofollow" | "robots_blocked" | "auth_wall" | "redirect_loop" | "404" | "canonical_missing";
|
|
52
|
+
severity: "error" | "warning";
|
|
53
|
+
message: string;
|
|
54
|
+
page: string;
|
|
55
|
+
}
|
|
56
|
+
/**
|
|
57
|
+
* Validate that a page is crawlable and indexable
|
|
58
|
+
*/
|
|
59
|
+
declare function validateCrawlability(url: string, html?: string): Promise<CrawlabilityResult>;
|
|
60
|
+
/**
|
|
61
|
+
* Validate robots.txt allows crawling
|
|
62
|
+
*/
|
|
63
|
+
declare function validateRobotsTxt(baseUrl: string, routePath: string): Promise<{
|
|
64
|
+
allowed: boolean;
|
|
65
|
+
reason?: string;
|
|
66
|
+
}>;
|
|
67
|
+
/**
|
|
68
|
+
* Check if site is publicly accessible (no auth wall)
|
|
69
|
+
*/
|
|
70
|
+
declare function validatePublicAccess(url: string): Promise<{
|
|
71
|
+
accessible: boolean;
|
|
72
|
+
requiresAuth: boolean;
|
|
73
|
+
}>;
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Server-side only exports
|
|
77
|
+
* These can be safely imported in API routes and server components
|
|
78
|
+
*/
|
|
79
|
+
|
|
80
|
+
type Result<T> = {
|
|
81
|
+
success: true;
|
|
82
|
+
data: T;
|
|
83
|
+
} | {
|
|
84
|
+
success: false;
|
|
85
|
+
error: Error;
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
export { CreateSEORecord, type Result, SEORecord, UpdateSEORecord, createSEORecord, deleteSEORecord, getSEORecords as getAllSEORecords, getSEORecordById, getSEORecordByRoute, getSEORecords, updateSEORecord, validateCrawlability, validatePublicAccess, validateRobotsTxt };
|