@fractary/codex 0.7.0 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +209 -3
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +69 -1
- package/dist/index.d.ts +69 -1
- package/dist/index.js +210 -4
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.d.cts
CHANGED
|
@@ -424,6 +424,43 @@ declare const CodexConfigSchema: z.ZodObject<{
|
|
|
424
424
|
default_to_codex?: string[] | undefined;
|
|
425
425
|
default_from_codex?: string[] | undefined;
|
|
426
426
|
}>>;
|
|
427
|
+
archive: z.ZodOptional<z.ZodObject<{
|
|
428
|
+
projects: z.ZodRecord<z.ZodString, z.ZodObject<{
|
|
429
|
+
enabled: z.ZodBoolean;
|
|
430
|
+
handler: z.ZodEnum<["s3", "r2", "gcs", "local"]>;
|
|
431
|
+
bucket: z.ZodOptional<z.ZodString>;
|
|
432
|
+
prefix: z.ZodOptional<z.ZodString>;
|
|
433
|
+
patterns: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
|
|
434
|
+
}, "strip", z.ZodTypeAny, {
|
|
435
|
+
enabled: boolean;
|
|
436
|
+
handler: "local" | "s3" | "r2" | "gcs";
|
|
437
|
+
patterns?: string[] | undefined;
|
|
438
|
+
bucket?: string | undefined;
|
|
439
|
+
prefix?: string | undefined;
|
|
440
|
+
}, {
|
|
441
|
+
enabled: boolean;
|
|
442
|
+
handler: "local" | "s3" | "r2" | "gcs";
|
|
443
|
+
patterns?: string[] | undefined;
|
|
444
|
+
bucket?: string | undefined;
|
|
445
|
+
prefix?: string | undefined;
|
|
446
|
+
}>>;
|
|
447
|
+
}, "strip", z.ZodTypeAny, {
|
|
448
|
+
projects: Record<string, {
|
|
449
|
+
enabled: boolean;
|
|
450
|
+
handler: "local" | "s3" | "r2" | "gcs";
|
|
451
|
+
patterns?: string[] | undefined;
|
|
452
|
+
bucket?: string | undefined;
|
|
453
|
+
prefix?: string | undefined;
|
|
454
|
+
}>;
|
|
455
|
+
}, {
|
|
456
|
+
projects: Record<string, {
|
|
457
|
+
enabled: boolean;
|
|
458
|
+
handler: "local" | "s3" | "r2" | "gcs";
|
|
459
|
+
patterns?: string[] | undefined;
|
|
460
|
+
bucket?: string | undefined;
|
|
461
|
+
prefix?: string | undefined;
|
|
462
|
+
}>;
|
|
463
|
+
}>>;
|
|
427
464
|
}, "strict", z.ZodTypeAny, {
|
|
428
465
|
organizationSlug: string;
|
|
429
466
|
directories?: {
|
|
@@ -449,6 +486,15 @@ declare const CodexConfigSchema: z.ZodObject<{
|
|
|
449
486
|
default_to_codex?: string[] | undefined;
|
|
450
487
|
default_from_codex?: string[] | undefined;
|
|
451
488
|
} | undefined;
|
|
489
|
+
archive?: {
|
|
490
|
+
projects: Record<string, {
|
|
491
|
+
enabled: boolean;
|
|
492
|
+
handler: "local" | "s3" | "r2" | "gcs";
|
|
493
|
+
patterns?: string[] | undefined;
|
|
494
|
+
bucket?: string | undefined;
|
|
495
|
+
prefix?: string | undefined;
|
|
496
|
+
}>;
|
|
497
|
+
} | undefined;
|
|
452
498
|
}, {
|
|
453
499
|
organizationSlug: string;
|
|
454
500
|
directories?: {
|
|
@@ -474,6 +520,15 @@ declare const CodexConfigSchema: z.ZodObject<{
|
|
|
474
520
|
default_to_codex?: string[] | undefined;
|
|
475
521
|
default_from_codex?: string[] | undefined;
|
|
476
522
|
} | undefined;
|
|
523
|
+
archive?: {
|
|
524
|
+
projects: Record<string, {
|
|
525
|
+
enabled: boolean;
|
|
526
|
+
handler: "local" | "s3" | "r2" | "gcs";
|
|
527
|
+
patterns?: string[] | undefined;
|
|
528
|
+
bucket?: string | undefined;
|
|
529
|
+
prefix?: string | undefined;
|
|
530
|
+
}>;
|
|
531
|
+
} | undefined;
|
|
477
532
|
}>;
|
|
478
533
|
type CodexConfig = z.infer<typeof CodexConfigSchema>;
|
|
479
534
|
|
|
@@ -549,7 +604,7 @@ interface CustomSyncDestination {
|
|
|
549
604
|
declare function parseCustomDestination(value: string): CustomSyncDestination;
|
|
550
605
|
declare function getCustomSyncDestinations(metadata: Metadata): CustomSyncDestination[];
|
|
551
606
|
|
|
552
|
-
type StorageProviderType = 'local' | 'github' | 'http' | 's3' | 'r2' | 'gcs' | 'drive';
|
|
607
|
+
type StorageProviderType = 'local' | 'github' | 'http' | 's3' | 's3-archive' | 'r2' | 'gcs' | 'drive';
|
|
553
608
|
interface FetchResult {
|
|
554
609
|
content: Buffer;
|
|
555
610
|
contentType: string;
|
|
@@ -653,10 +708,23 @@ declare class HttpStorage implements StorageProvider {
|
|
|
653
708
|
}
|
|
654
709
|
declare function createHttpStorage(options?: HttpStorageOptions): HttpStorage;
|
|
655
710
|
|
|
711
|
+
interface ArchiveProjectConfig {
|
|
712
|
+
enabled: boolean;
|
|
713
|
+
handler: 's3' | 'r2' | 'gcs' | 'local';
|
|
714
|
+
bucket?: string;
|
|
715
|
+
prefix?: string;
|
|
716
|
+
patterns?: string[];
|
|
717
|
+
}
|
|
718
|
+
interface S3ArchiveStorageOptions {
|
|
719
|
+
projects?: Record<string, ArchiveProjectConfig>;
|
|
720
|
+
fractaryCli?: string;
|
|
721
|
+
}
|
|
722
|
+
|
|
656
723
|
interface StorageManagerConfig {
|
|
657
724
|
local?: LocalStorageOptions;
|
|
658
725
|
github?: GitHubStorageOptions;
|
|
659
726
|
http?: HttpStorageOptions;
|
|
727
|
+
s3Archive?: S3ArchiveStorageOptions;
|
|
660
728
|
priority?: StorageProviderType[];
|
|
661
729
|
enableCaching?: boolean;
|
|
662
730
|
}
|
package/dist/index.d.ts
CHANGED
|
@@ -424,6 +424,43 @@ declare const CodexConfigSchema: z.ZodObject<{
|
|
|
424
424
|
default_to_codex?: string[] | undefined;
|
|
425
425
|
default_from_codex?: string[] | undefined;
|
|
426
426
|
}>>;
|
|
427
|
+
archive: z.ZodOptional<z.ZodObject<{
|
|
428
|
+
projects: z.ZodRecord<z.ZodString, z.ZodObject<{
|
|
429
|
+
enabled: z.ZodBoolean;
|
|
430
|
+
handler: z.ZodEnum<["s3", "r2", "gcs", "local"]>;
|
|
431
|
+
bucket: z.ZodOptional<z.ZodString>;
|
|
432
|
+
prefix: z.ZodOptional<z.ZodString>;
|
|
433
|
+
patterns: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
|
|
434
|
+
}, "strip", z.ZodTypeAny, {
|
|
435
|
+
enabled: boolean;
|
|
436
|
+
handler: "local" | "s3" | "r2" | "gcs";
|
|
437
|
+
patterns?: string[] | undefined;
|
|
438
|
+
bucket?: string | undefined;
|
|
439
|
+
prefix?: string | undefined;
|
|
440
|
+
}, {
|
|
441
|
+
enabled: boolean;
|
|
442
|
+
handler: "local" | "s3" | "r2" | "gcs";
|
|
443
|
+
patterns?: string[] | undefined;
|
|
444
|
+
bucket?: string | undefined;
|
|
445
|
+
prefix?: string | undefined;
|
|
446
|
+
}>>;
|
|
447
|
+
}, "strip", z.ZodTypeAny, {
|
|
448
|
+
projects: Record<string, {
|
|
449
|
+
enabled: boolean;
|
|
450
|
+
handler: "local" | "s3" | "r2" | "gcs";
|
|
451
|
+
patterns?: string[] | undefined;
|
|
452
|
+
bucket?: string | undefined;
|
|
453
|
+
prefix?: string | undefined;
|
|
454
|
+
}>;
|
|
455
|
+
}, {
|
|
456
|
+
projects: Record<string, {
|
|
457
|
+
enabled: boolean;
|
|
458
|
+
handler: "local" | "s3" | "r2" | "gcs";
|
|
459
|
+
patterns?: string[] | undefined;
|
|
460
|
+
bucket?: string | undefined;
|
|
461
|
+
prefix?: string | undefined;
|
|
462
|
+
}>;
|
|
463
|
+
}>>;
|
|
427
464
|
}, "strict", z.ZodTypeAny, {
|
|
428
465
|
organizationSlug: string;
|
|
429
466
|
directories?: {
|
|
@@ -449,6 +486,15 @@ declare const CodexConfigSchema: z.ZodObject<{
|
|
|
449
486
|
default_to_codex?: string[] | undefined;
|
|
450
487
|
default_from_codex?: string[] | undefined;
|
|
451
488
|
} | undefined;
|
|
489
|
+
archive?: {
|
|
490
|
+
projects: Record<string, {
|
|
491
|
+
enabled: boolean;
|
|
492
|
+
handler: "local" | "s3" | "r2" | "gcs";
|
|
493
|
+
patterns?: string[] | undefined;
|
|
494
|
+
bucket?: string | undefined;
|
|
495
|
+
prefix?: string | undefined;
|
|
496
|
+
}>;
|
|
497
|
+
} | undefined;
|
|
452
498
|
}, {
|
|
453
499
|
organizationSlug: string;
|
|
454
500
|
directories?: {
|
|
@@ -474,6 +520,15 @@ declare const CodexConfigSchema: z.ZodObject<{
|
|
|
474
520
|
default_to_codex?: string[] | undefined;
|
|
475
521
|
default_from_codex?: string[] | undefined;
|
|
476
522
|
} | undefined;
|
|
523
|
+
archive?: {
|
|
524
|
+
projects: Record<string, {
|
|
525
|
+
enabled: boolean;
|
|
526
|
+
handler: "local" | "s3" | "r2" | "gcs";
|
|
527
|
+
patterns?: string[] | undefined;
|
|
528
|
+
bucket?: string | undefined;
|
|
529
|
+
prefix?: string | undefined;
|
|
530
|
+
}>;
|
|
531
|
+
} | undefined;
|
|
477
532
|
}>;
|
|
478
533
|
type CodexConfig = z.infer<typeof CodexConfigSchema>;
|
|
479
534
|
|
|
@@ -549,7 +604,7 @@ interface CustomSyncDestination {
|
|
|
549
604
|
declare function parseCustomDestination(value: string): CustomSyncDestination;
|
|
550
605
|
declare function getCustomSyncDestinations(metadata: Metadata): CustomSyncDestination[];
|
|
551
606
|
|
|
552
|
-
type StorageProviderType = 'local' | 'github' | 'http' | 's3' | 'r2' | 'gcs' | 'drive';
|
|
607
|
+
type StorageProviderType = 'local' | 'github' | 'http' | 's3' | 's3-archive' | 'r2' | 'gcs' | 'drive';
|
|
553
608
|
interface FetchResult {
|
|
554
609
|
content: Buffer;
|
|
555
610
|
contentType: string;
|
|
@@ -653,10 +708,23 @@ declare class HttpStorage implements StorageProvider {
|
|
|
653
708
|
}
|
|
654
709
|
declare function createHttpStorage(options?: HttpStorageOptions): HttpStorage;
|
|
655
710
|
|
|
711
|
+
interface ArchiveProjectConfig {
|
|
712
|
+
enabled: boolean;
|
|
713
|
+
handler: 's3' | 'r2' | 'gcs' | 'local';
|
|
714
|
+
bucket?: string;
|
|
715
|
+
prefix?: string;
|
|
716
|
+
patterns?: string[];
|
|
717
|
+
}
|
|
718
|
+
interface S3ArchiveStorageOptions {
|
|
719
|
+
projects?: Record<string, ArchiveProjectConfig>;
|
|
720
|
+
fractaryCli?: string;
|
|
721
|
+
}
|
|
722
|
+
|
|
656
723
|
interface StorageManagerConfig {
|
|
657
724
|
local?: LocalStorageOptions;
|
|
658
725
|
github?: GitHubStorageOptions;
|
|
659
726
|
http?: HttpStorageOptions;
|
|
727
|
+
s3Archive?: S3ArchiveStorageOptions;
|
|
660
728
|
priority?: StorageProviderType[];
|
|
661
729
|
enableCaching?: boolean;
|
|
662
730
|
}
|
package/dist/index.js
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
import micromatch3 from 'micromatch';
|
|
2
2
|
import path3 from 'path';
|
|
3
|
-
import { execSync } from 'child_process';
|
|
3
|
+
import { execFile, execSync } from 'child_process';
|
|
4
4
|
import { z } from 'zod';
|
|
5
5
|
import yaml from 'js-yaml';
|
|
6
6
|
import fs2 from 'fs/promises';
|
|
7
|
+
import { promisify } from 'util';
|
|
7
8
|
|
|
8
9
|
var __defProp = Object.defineProperty;
|
|
9
10
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
@@ -834,6 +835,16 @@ var DirectionalSyncSchema = z.object({
|
|
|
834
835
|
default_to_codex: z.array(z.string()).optional(),
|
|
835
836
|
default_from_codex: z.array(z.string()).optional()
|
|
836
837
|
});
|
|
838
|
+
var ArchiveProjectConfigSchema = z.object({
|
|
839
|
+
enabled: z.boolean(),
|
|
840
|
+
handler: z.enum(["s3", "r2", "gcs", "local"]),
|
|
841
|
+
bucket: z.string().optional(),
|
|
842
|
+
prefix: z.string().optional(),
|
|
843
|
+
patterns: z.array(z.string()).optional()
|
|
844
|
+
});
|
|
845
|
+
var ArchiveConfigSchema = z.object({
|
|
846
|
+
projects: z.record(ArchiveProjectConfigSchema)
|
|
847
|
+
});
|
|
837
848
|
var CodexConfigSchema = z.object({
|
|
838
849
|
organizationSlug: z.string(),
|
|
839
850
|
directories: z.object({
|
|
@@ -843,7 +854,9 @@ var CodexConfigSchema = z.object({
|
|
|
843
854
|
}).optional(),
|
|
844
855
|
rules: SyncRulesSchema.optional(),
|
|
845
856
|
// Directional sync configuration
|
|
846
|
-
sync: DirectionalSyncSchema.optional()
|
|
857
|
+
sync: DirectionalSyncSchema.optional(),
|
|
858
|
+
// Archive configuration
|
|
859
|
+
archive: ArchiveConfigSchema.optional()
|
|
847
860
|
}).strict();
|
|
848
861
|
function parseMetadata(content, options = {}) {
|
|
849
862
|
const { strict = true, normalize = true } = options;
|
|
@@ -968,11 +981,17 @@ function getDefaultRules() {
|
|
|
968
981
|
defaultExclude: []
|
|
969
982
|
};
|
|
970
983
|
}
|
|
984
|
+
function getDefaultArchiveConfig() {
|
|
985
|
+
return {
|
|
986
|
+
projects: {}
|
|
987
|
+
};
|
|
988
|
+
}
|
|
971
989
|
function getDefaultConfig(orgSlug) {
|
|
972
990
|
return {
|
|
973
991
|
organizationSlug: orgSlug,
|
|
974
992
|
directories: getDefaultDirectories(orgSlug),
|
|
975
|
-
rules: getDefaultRules()
|
|
993
|
+
rules: getDefaultRules(),
|
|
994
|
+
archive: getDefaultArchiveConfig()
|
|
976
995
|
};
|
|
977
996
|
}
|
|
978
997
|
|
|
@@ -1694,6 +1713,190 @@ var HttpStorage = class {
|
|
|
1694
1713
|
function createHttpStorage(options) {
|
|
1695
1714
|
return new HttpStorage(options);
|
|
1696
1715
|
}
|
|
1716
|
+
var execFileAsync = promisify(execFile);
|
|
1717
|
+
async function execFileNoThrow(command, args = [], options) {
|
|
1718
|
+
try {
|
|
1719
|
+
const { stdout, stderr } = await execFileAsync(command, args, {
|
|
1720
|
+
...options,
|
|
1721
|
+
maxBuffer: options?.maxBuffer || 1024 * 1024 * 10
|
|
1722
|
+
// 10MB default
|
|
1723
|
+
});
|
|
1724
|
+
return {
|
|
1725
|
+
stdout: stdout || "",
|
|
1726
|
+
stderr: stderr || "",
|
|
1727
|
+
exitCode: 0
|
|
1728
|
+
};
|
|
1729
|
+
} catch (error) {
|
|
1730
|
+
const exitCode = typeof error.exitCode === "number" ? error.exitCode : 1;
|
|
1731
|
+
return {
|
|
1732
|
+
stdout: error.stdout || "",
|
|
1733
|
+
stderr: error.stderr || error.message || "",
|
|
1734
|
+
exitCode
|
|
1735
|
+
};
|
|
1736
|
+
}
|
|
1737
|
+
}
|
|
1738
|
+
|
|
1739
|
+
// src/storage/s3-archive.ts
|
|
1740
|
+
var S3ArchiveStorage = class {
|
|
1741
|
+
name = "s3-archive";
|
|
1742
|
+
type = "s3-archive";
|
|
1743
|
+
projects;
|
|
1744
|
+
fractaryCli;
|
|
1745
|
+
constructor(options = {}) {
|
|
1746
|
+
this.projects = options.projects || {};
|
|
1747
|
+
this.fractaryCli = options.fractaryCli || "fractary";
|
|
1748
|
+
}
|
|
1749
|
+
/**
|
|
1750
|
+
* Check if this provider can handle the reference
|
|
1751
|
+
*
|
|
1752
|
+
* S3 Archive provider handles references that:
|
|
1753
|
+
* 1. Are for the current project (same org/project)
|
|
1754
|
+
* 2. Have archive enabled in config
|
|
1755
|
+
* 3. Match configured patterns (if specified)
|
|
1756
|
+
*/
|
|
1757
|
+
canHandle(reference) {
|
|
1758
|
+
if (!reference.isCurrentProject) {
|
|
1759
|
+
return false;
|
|
1760
|
+
}
|
|
1761
|
+
const projectKey = `${reference.org}/${reference.project}`;
|
|
1762
|
+
const config = this.projects[projectKey];
|
|
1763
|
+
if (!config || !config.enabled) {
|
|
1764
|
+
return false;
|
|
1765
|
+
}
|
|
1766
|
+
if (config.patterns && config.patterns.length > 0) {
|
|
1767
|
+
return this.matchesPatterns(reference.path, config.patterns);
|
|
1768
|
+
}
|
|
1769
|
+
return true;
|
|
1770
|
+
}
|
|
1771
|
+
/**
|
|
1772
|
+
* Fetch content from S3 archive via fractary-file CLI
|
|
1773
|
+
*/
|
|
1774
|
+
async fetch(reference, options) {
|
|
1775
|
+
const opts = mergeFetchOptions(options);
|
|
1776
|
+
const projectKey = `${reference.org}/${reference.project}`;
|
|
1777
|
+
const config = this.projects[projectKey];
|
|
1778
|
+
if (!config) {
|
|
1779
|
+
throw new Error(`No archive config for project: ${projectKey}`);
|
|
1780
|
+
}
|
|
1781
|
+
const archivePath = this.calculateArchivePath(reference, config);
|
|
1782
|
+
try {
|
|
1783
|
+
const result = await execFileNoThrow(
|
|
1784
|
+
this.fractaryCli,
|
|
1785
|
+
[
|
|
1786
|
+
"file",
|
|
1787
|
+
"read",
|
|
1788
|
+
"--remote-path",
|
|
1789
|
+
archivePath,
|
|
1790
|
+
"--handler",
|
|
1791
|
+
config.handler,
|
|
1792
|
+
...config.bucket ? ["--bucket", config.bucket] : []
|
|
1793
|
+
],
|
|
1794
|
+
{
|
|
1795
|
+
timeout: opts.timeout
|
|
1796
|
+
}
|
|
1797
|
+
);
|
|
1798
|
+
if (result.exitCode !== 0) {
|
|
1799
|
+
throw new Error(`fractary-file read failed: ${result.stderr}`);
|
|
1800
|
+
}
|
|
1801
|
+
const content = Buffer.from(result.stdout);
|
|
1802
|
+
return {
|
|
1803
|
+
content,
|
|
1804
|
+
contentType: detectContentType(reference.path),
|
|
1805
|
+
size: content.length,
|
|
1806
|
+
source: "s3-archive",
|
|
1807
|
+
metadata: {
|
|
1808
|
+
archivePath,
|
|
1809
|
+
bucket: config.bucket,
|
|
1810
|
+
handler: config.handler
|
|
1811
|
+
}
|
|
1812
|
+
};
|
|
1813
|
+
} catch (error) {
|
|
1814
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
1815
|
+
throw new Error(`Failed to fetch from archive: ${message}`);
|
|
1816
|
+
}
|
|
1817
|
+
}
|
|
1818
|
+
/**
|
|
1819
|
+
* Check if archived file exists
|
|
1820
|
+
*
|
|
1821
|
+
* Note: This currently downloads the file to check existence.
|
|
1822
|
+
* TODO: Optimize by using fractary-file 'stat' or 'head' command when available
|
|
1823
|
+
* to avoid downloading full file for existence checks.
|
|
1824
|
+
*/
|
|
1825
|
+
async exists(reference, options) {
|
|
1826
|
+
const projectKey = `${reference.org}/${reference.project}`;
|
|
1827
|
+
const config = this.projects[projectKey];
|
|
1828
|
+
if (!config) {
|
|
1829
|
+
return false;
|
|
1830
|
+
}
|
|
1831
|
+
try {
|
|
1832
|
+
await this.fetch(reference, { ...options, timeout: 5e3 });
|
|
1833
|
+
return true;
|
|
1834
|
+
} catch {
|
|
1835
|
+
return false;
|
|
1836
|
+
}
|
|
1837
|
+
}
|
|
1838
|
+
/**
|
|
1839
|
+
* Calculate archive path from reference
|
|
1840
|
+
*
|
|
1841
|
+
* Pattern: {prefix}/{type}/{org}/{project}/{original-path}
|
|
1842
|
+
*
|
|
1843
|
+
* Examples (with default prefix "archive/"):
|
|
1844
|
+
* specs/WORK-123.md → archive/specs/org/project/specs/WORK-123.md
|
|
1845
|
+
* docs/api.md → archive/docs/org/project/docs/api.md
|
|
1846
|
+
*
|
|
1847
|
+
* Examples (with custom prefix "archived-docs/"):
|
|
1848
|
+
* specs/WORK-123.md → archived-docs/specs/org/project/specs/WORK-123.md
|
|
1849
|
+
*/
|
|
1850
|
+
calculateArchivePath(reference, config) {
|
|
1851
|
+
const type = this.detectType(reference.path);
|
|
1852
|
+
const prefix = config.prefix || "archive/";
|
|
1853
|
+
const trimmedPrefix = prefix.trim();
|
|
1854
|
+
if (!trimmedPrefix) {
|
|
1855
|
+
throw new Error("Archive prefix cannot be empty or whitespace-only");
|
|
1856
|
+
}
|
|
1857
|
+
const normalizedPrefix = trimmedPrefix.endsWith("/") ? trimmedPrefix : `${trimmedPrefix}/`;
|
|
1858
|
+
return `${normalizedPrefix}${type}/${reference.org}/${reference.project}/${reference.path}`;
|
|
1859
|
+
}
|
|
1860
|
+
/**
|
|
1861
|
+
* Detect artifact type from path
|
|
1862
|
+
*
|
|
1863
|
+
* Used to organize archives by type
|
|
1864
|
+
*/
|
|
1865
|
+
detectType(path6) {
|
|
1866
|
+
if (path6.startsWith("specs/")) return "specs";
|
|
1867
|
+
if (path6.startsWith("docs/")) return "docs";
|
|
1868
|
+
if (path6.includes("/logs/")) return "logs";
|
|
1869
|
+
return "misc";
|
|
1870
|
+
}
|
|
1871
|
+
/**
|
|
1872
|
+
* Check if path matches any of the patterns
|
|
1873
|
+
*
|
|
1874
|
+
* Supports glob-style patterns:
|
|
1875
|
+
* - specs/** (all files in specs/)
|
|
1876
|
+
* - *.md (all markdown files)
|
|
1877
|
+
* - docs/*.md (markdown files in docs/)
|
|
1878
|
+
*/
|
|
1879
|
+
matchesPatterns(path6, patterns) {
|
|
1880
|
+
for (const pattern of patterns) {
|
|
1881
|
+
if (this.matchesPattern(path6, pattern)) {
|
|
1882
|
+
return true;
|
|
1883
|
+
}
|
|
1884
|
+
}
|
|
1885
|
+
return false;
|
|
1886
|
+
}
|
|
1887
|
+
/**
|
|
1888
|
+
* Check if path matches a single pattern
|
|
1889
|
+
*/
|
|
1890
|
+
matchesPattern(path6, pattern) {
|
|
1891
|
+
const DOUBLE_STAR = "\0DOUBLE_STAR\0";
|
|
1892
|
+
let regexPattern = pattern.replace(/\*\*/g, DOUBLE_STAR);
|
|
1893
|
+
regexPattern = regexPattern.replace(/[.[\](){}+^$|\\]/g, "\\$&");
|
|
1894
|
+
regexPattern = regexPattern.replace(/\*/g, "[^/]*").replace(/\?/g, "[^/]");
|
|
1895
|
+
regexPattern = regexPattern.replace(new RegExp(DOUBLE_STAR, "g"), ".*");
|
|
1896
|
+
const regex = new RegExp(`^${regexPattern}$`);
|
|
1897
|
+
return regex.test(path6);
|
|
1898
|
+
}
|
|
1899
|
+
};
|
|
1697
1900
|
|
|
1698
1901
|
// src/storage/manager.ts
|
|
1699
1902
|
var StorageManager = class {
|
|
@@ -1703,7 +1906,10 @@ var StorageManager = class {
|
|
|
1703
1906
|
this.providers.set("local", new LocalStorage(config.local));
|
|
1704
1907
|
this.providers.set("github", new GitHubStorage(config.github));
|
|
1705
1908
|
this.providers.set("http", new HttpStorage(config.http));
|
|
1706
|
-
|
|
1909
|
+
if (config.s3Archive) {
|
|
1910
|
+
this.providers.set("s3-archive", new S3ArchiveStorage(config.s3Archive));
|
|
1911
|
+
}
|
|
1912
|
+
this.priority = config.priority || (config.s3Archive ? ["local", "s3-archive", "github", "http"] : ["local", "github", "http"]);
|
|
1707
1913
|
}
|
|
1708
1914
|
/**
|
|
1709
1915
|
* Register a custom storage provider
|