pgflow 0.0.12 → 0.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +5 -2
- package/CHANGELOG.md +0 -13
- package/eslint.config.cjs +0 -3
- package/project.json +0 -40
- package/src/commands/install/copy-migrations.ts +0 -101
- package/src/commands/install/index.ts +0 -45
- package/src/commands/install/supabase-path-prompt.ts +0 -28
- package/src/commands/install/update-config-toml.ts +0 -130
- package/src/index.ts +0 -76
- package/src/types.d.ts +0 -1
- package/tsconfig.json +0 -20
- package/tsconfig.lib.json +0 -19
- package/tsconfig.spec.json +0 -33
- package/vite.config.ts +0 -25
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "pgflow",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.13",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"main": "./dist/index.js",
|
|
6
6
|
"typings": "./dist/index.d.ts",
|
|
@@ -30,5 +30,8 @@
|
|
|
30
30
|
"publishConfig": {
|
|
31
31
|
"access": "public",
|
|
32
32
|
"directory": "."
|
|
33
|
-
}
|
|
33
|
+
},
|
|
34
|
+
"files": [
|
|
35
|
+
"dist"
|
|
36
|
+
]
|
|
34
37
|
}
|
package/CHANGELOG.md
DELETED
package/eslint.config.cjs
DELETED
package/project.json
DELETED
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"name": "cli",
|
|
3
|
-
"$schema": "../../node_modules/nx/schemas/project-schema.json",
|
|
4
|
-
"sourceRoot": "{projectRoot}/src",
|
|
5
|
-
"projectType": "library",
|
|
6
|
-
"tags": [],
|
|
7
|
-
"// targets": "to see all targets run: nx show project cli --web",
|
|
8
|
-
"targets": {
|
|
9
|
-
"build": {
|
|
10
|
-
"executor": "@nx/js:tsc",
|
|
11
|
-
"options": {
|
|
12
|
-
"main": "{projectRoot}/src/index.ts",
|
|
13
|
-
"tsConfig": "{projectRoot}/tsconfig.lib.json",
|
|
14
|
-
"outputPath": "{projectRoot}/dist",
|
|
15
|
-
"assets": [
|
|
16
|
-
{
|
|
17
|
-
"input": "pkgs/core/supabase/migrations",
|
|
18
|
-
"glob": "*.sql",
|
|
19
|
-
"output": "migrations"
|
|
20
|
-
}
|
|
21
|
-
]
|
|
22
|
-
}
|
|
23
|
-
},
|
|
24
|
-
"serve": {
|
|
25
|
-
"executor": "nx:run-commands",
|
|
26
|
-
"options": {
|
|
27
|
-
"command": "tsx src/index.ts",
|
|
28
|
-
"cwd": "{projectRoot}"
|
|
29
|
-
}
|
|
30
|
-
},
|
|
31
|
-
"test": {
|
|
32
|
-
"executor": "@nx/vite:test",
|
|
33
|
-
"outputs": ["{workspaceRoot}/coverage/{projectRoot}"],
|
|
34
|
-
"options": {
|
|
35
|
-
"passWithNoTests": true,
|
|
36
|
-
"reportsDirectory": "{workspaceRoot}/coverage/{projectRoot}"
|
|
37
|
-
}
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
}
|
|
@@ -1,101 +0,0 @@
|
|
|
1
|
-
import fs from 'fs';
|
|
2
|
-
import path from 'path';
|
|
3
|
-
import { fileURLToPath } from 'url';
|
|
4
|
-
import { log, confirm, note } from '@clack/prompts';
|
|
5
|
-
import chalk from 'chalk';
|
|
6
|
-
|
|
7
|
-
// Get the directory name in ES modules
|
|
8
|
-
const __filename = fileURLToPath(import.meta.url);
|
|
9
|
-
const __dirname = path.dirname(__filename);
|
|
10
|
-
|
|
11
|
-
// Path to the pgflow migrations
|
|
12
|
-
const sourcePath = path.resolve(__dirname, '../../../migrations');
|
|
13
|
-
|
|
14
|
-
export async function copyMigrations({
|
|
15
|
-
supabasePath,
|
|
16
|
-
}: {
|
|
17
|
-
supabasePath: string;
|
|
18
|
-
}): Promise<boolean> {
|
|
19
|
-
const migrationsPath = path.join(supabasePath, 'migrations');
|
|
20
|
-
|
|
21
|
-
if (!fs.existsSync(migrationsPath)) {
|
|
22
|
-
fs.mkdirSync(migrationsPath);
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
// Check if pgflow migrations directory exists
|
|
26
|
-
if (!fs.existsSync(sourcePath)) {
|
|
27
|
-
log.error(`Source migrations directory not found at ${sourcePath}`);
|
|
28
|
-
log.info(
|
|
29
|
-
"This might happen if you're running from source instead of the built package."
|
|
30
|
-
);
|
|
31
|
-
log.info('Try building the package first with: nx build cli');
|
|
32
|
-
return false;
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
const files = fs.readdirSync(sourcePath);
|
|
36
|
-
const filesToCopy: string[] = [];
|
|
37
|
-
const skippedFiles: string[] = [];
|
|
38
|
-
|
|
39
|
-
// Determine which files need to be copied
|
|
40
|
-
for (const file of files) {
|
|
41
|
-
const destination = path.join(migrationsPath, file);
|
|
42
|
-
|
|
43
|
-
if (fs.existsSync(destination)) {
|
|
44
|
-
skippedFiles.push(file);
|
|
45
|
-
} else {
|
|
46
|
-
filesToCopy.push(file);
|
|
47
|
-
}
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
// If no files to copy, show message and return false (no changes made)
|
|
51
|
-
if (filesToCopy.length === 0) {
|
|
52
|
-
log.info('No new migrations to copy - all migrations are already in place');
|
|
53
|
-
return false;
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
// Prepare summary message with colored output
|
|
57
|
-
const summaryParts = [];
|
|
58
|
-
|
|
59
|
-
if (filesToCopy.length > 0) {
|
|
60
|
-
summaryParts.push(`${chalk.green('Files to be copied:')}
|
|
61
|
-
${filesToCopy.map((file) => `${chalk.green('+')} ${file}`).join('\n')}`);
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
if (skippedFiles.length > 0) {
|
|
65
|
-
summaryParts.push(`${chalk.yellow('Files to be skipped (already exist):')}
|
|
66
|
-
${skippedFiles.map((file) => `${chalk.yellow('=')} ${file}`).join('\n')}`);
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
// Show summary and ask for confirmation
|
|
70
|
-
note(summaryParts.join('\n\n'), 'Migration Summary');
|
|
71
|
-
|
|
72
|
-
const shouldContinue = await confirm({
|
|
73
|
-
message: `Do you want to proceed with copying ${
|
|
74
|
-
filesToCopy.length
|
|
75
|
-
} migration file${filesToCopy.length !== 1 ? 's' : ''}?`,
|
|
76
|
-
});
|
|
77
|
-
|
|
78
|
-
if (!shouldContinue) {
|
|
79
|
-
log.info('Migration copy cancelled');
|
|
80
|
-
return false;
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
log.info(`Copying migrations`);
|
|
84
|
-
|
|
85
|
-
// Copy the files
|
|
86
|
-
for (const file of filesToCopy) {
|
|
87
|
-
const source = path.join(sourcePath, file);
|
|
88
|
-
const destination = path.join(migrationsPath, file);
|
|
89
|
-
|
|
90
|
-
fs.copyFileSync(source, destination);
|
|
91
|
-
log.step(`Copied ${file}`);
|
|
92
|
-
}
|
|
93
|
-
|
|
94
|
-
log.success(
|
|
95
|
-
`Successfully copied ${filesToCopy.length} migration file${
|
|
96
|
-
filesToCopy.length !== 1 ? 's' : ''
|
|
97
|
-
}`
|
|
98
|
-
);
|
|
99
|
-
|
|
100
|
-
return true; // Return true to indicate migrations were copied
|
|
101
|
-
}
|
|
@@ -1,45 +0,0 @@
|
|
|
1
|
-
import { type Command } from 'commander';
|
|
2
|
-
import { intro, isCancel, log } from '@clack/prompts';
|
|
3
|
-
import { copyMigrations } from './copy-migrations.js';
|
|
4
|
-
import { updateConfigToml } from './update-config-toml.js';
|
|
5
|
-
import { supabasePathPrompt } from './supabase-path-prompt.js';
|
|
6
|
-
|
|
7
|
-
export default (program: Command) => {
|
|
8
|
-
program
|
|
9
|
-
.command('install')
|
|
10
|
-
.description('Copies migrations and sets config.toml values')
|
|
11
|
-
.action(async () => {
|
|
12
|
-
intro('pgflow - Postgres-native workflows for Supabase');
|
|
13
|
-
|
|
14
|
-
const supabasePath = await supabasePathPrompt();
|
|
15
|
-
|
|
16
|
-
if (isCancel(supabasePath)) {
|
|
17
|
-
log.error('Aborting installation');
|
|
18
|
-
return;
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
const migrationsCopied = await copyMigrations({ supabasePath });
|
|
22
|
-
const configUpdated = await updateConfigToml({ supabasePath });
|
|
23
|
-
|
|
24
|
-
if (migrationsCopied || configUpdated) {
|
|
25
|
-
log.success('pgflow installation is completed');
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
if (!migrationsCopied && !configUpdated) {
|
|
29
|
-
log.success(
|
|
30
|
-
'No changes were made - pgflow is already properly configured.'
|
|
31
|
-
);
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
// Show specific reminders based on what was actually done
|
|
35
|
-
if (configUpdated) {
|
|
36
|
-
log.warn(
|
|
37
|
-
'Remember to restart Supabase for the configuration changes to take effect!'
|
|
38
|
-
);
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
if (migrationsCopied) {
|
|
42
|
-
log.warn('Remember to apply the migrations!');
|
|
43
|
-
}
|
|
44
|
-
});
|
|
45
|
-
};
|
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
import fs from 'fs';
|
|
2
|
-
import path from 'path';
|
|
3
|
-
import { text } from '@clack/prompts';
|
|
4
|
-
|
|
5
|
-
export async function supabasePathPrompt() {
|
|
6
|
-
return await text({
|
|
7
|
-
message: 'Enter the path to your supabase/ directory',
|
|
8
|
-
placeholder: 'supabase/',
|
|
9
|
-
validate,
|
|
10
|
-
});
|
|
11
|
-
}
|
|
12
|
-
|
|
13
|
-
function validate(inputPath: string) {
|
|
14
|
-
const pathsToTest = [
|
|
15
|
-
[inputPath, 'is not a valid path'],
|
|
16
|
-
[path.join(inputPath, 'config.toml'), 'does not contain config.toml'],
|
|
17
|
-
];
|
|
18
|
-
|
|
19
|
-
// if any of the pathsToTest fail, return the error message
|
|
20
|
-
for (const [testPath, errorMessage] of pathsToTest) {
|
|
21
|
-
if (!fs.existsSync(testPath)) {
|
|
22
|
-
return `${testPath} ${errorMessage}`;
|
|
23
|
-
}
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
// otherwise, return undefined
|
|
27
|
-
return undefined;
|
|
28
|
-
}
|
|
@@ -1,130 +0,0 @@
|
|
|
1
|
-
import fs from 'fs';
|
|
2
|
-
import path from 'path';
|
|
3
|
-
import { log, confirm, note } from '@clack/prompts';
|
|
4
|
-
import * as TOML from 'toml-patch';
|
|
5
|
-
import chalk from 'chalk';
|
|
6
|
-
|
|
7
|
-
/**
|
|
8
|
-
* Type definition for the parsed config.toml structure
|
|
9
|
-
*/
|
|
10
|
-
type SupabaseConfig = {
|
|
11
|
-
db?: {
|
|
12
|
-
pooler?: {
|
|
13
|
-
enabled?: boolean;
|
|
14
|
-
pool_mode?: string;
|
|
15
|
-
};
|
|
16
|
-
};
|
|
17
|
-
edge_runtime?: {
|
|
18
|
-
policy?: string;
|
|
19
|
-
};
|
|
20
|
-
};
|
|
21
|
-
|
|
22
|
-
/**
|
|
23
|
-
* Updates the config.toml file with necessary configurations for EdgeWorker
|
|
24
|
-
* while preserving comments and formatting
|
|
25
|
-
*
|
|
26
|
-
* Makes the following changes:
|
|
27
|
-
* 1. Enables the connection pooler
|
|
28
|
-
* 2. Ensures pool_mode is set to "transaction"
|
|
29
|
-
* 3. Changes edge_runtime policy from "oneshot" to "per_worker"
|
|
30
|
-
* 4. Creates a backup of the original config.toml file before making changes
|
|
31
|
-
*
|
|
32
|
-
* @param options.supabasePath - Path to the supabase directory
|
|
33
|
-
* @returns Promise<boolean> - True if changes were made, false otherwise
|
|
34
|
-
*/
|
|
35
|
-
export async function updateConfigToml({
|
|
36
|
-
supabasePath,
|
|
37
|
-
}: {
|
|
38
|
-
supabasePath: string;
|
|
39
|
-
}): Promise<boolean> {
|
|
40
|
-
const configPath = path.join(supabasePath, 'config.toml');
|
|
41
|
-
const backupPath = `${configPath}.backup`;
|
|
42
|
-
|
|
43
|
-
try {
|
|
44
|
-
if (!fs.existsSync(configPath)) {
|
|
45
|
-
throw new Error(`config.toml not found at ${configPath}`);
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
const configContent = fs.readFileSync(configPath, 'utf8');
|
|
49
|
-
const config = TOML.parse(configContent) as SupabaseConfig;
|
|
50
|
-
|
|
51
|
-
const currentSettings = {
|
|
52
|
-
poolerEnabled: config.db?.pooler?.enabled ?? false,
|
|
53
|
-
poolMode: config.db?.pooler?.pool_mode ?? 'none',
|
|
54
|
-
edgeRuntimePolicy: config.edge_runtime?.policy ?? 'oneshot',
|
|
55
|
-
};
|
|
56
|
-
|
|
57
|
-
const needsChanges =
|
|
58
|
-
currentSettings.poolerEnabled !== true ||
|
|
59
|
-
currentSettings.poolMode !== 'transaction' ||
|
|
60
|
-
currentSettings.edgeRuntimePolicy !== 'per_worker';
|
|
61
|
-
|
|
62
|
-
if (!needsChanges) {
|
|
63
|
-
log.info(
|
|
64
|
-
`No changes needed in config.toml - all required settings are already configured`
|
|
65
|
-
);
|
|
66
|
-
return false;
|
|
67
|
-
}
|
|
68
|
-
|
|
69
|
-
const changes = [];
|
|
70
|
-
|
|
71
|
-
if (currentSettings.poolerEnabled !== true) {
|
|
72
|
-
changes.push(`[db.pooler]
|
|
73
|
-
${chalk.red(`- enabled = ${currentSettings.poolerEnabled}`)}
|
|
74
|
-
${chalk.green('+ enabled = true')}`);
|
|
75
|
-
}
|
|
76
|
-
|
|
77
|
-
if (currentSettings.poolMode !== 'transaction') {
|
|
78
|
-
changes.push(`[db.pooler]
|
|
79
|
-
${chalk.red(`- pool_mode = "${currentSettings.poolMode}"`)}
|
|
80
|
-
${chalk.green('+ pool_mode = "transaction"')}`);
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
if (currentSettings.edgeRuntimePolicy !== 'per_worker') {
|
|
84
|
-
changes.push(`[edge_runtime]
|
|
85
|
-
${chalk.red(`- policy = "${currentSettings.edgeRuntimePolicy}"`)}
|
|
86
|
-
${chalk.green('+ policy = "per_worker"')}`);
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
note(changes.join('\n\n'), 'Config Changes');
|
|
90
|
-
|
|
91
|
-
const shouldContinue = await confirm({
|
|
92
|
-
message: `Do you want to proceed with these configuration changes? A backup will be created at ${backupPath}`,
|
|
93
|
-
});
|
|
94
|
-
|
|
95
|
-
if (!shouldContinue) {
|
|
96
|
-
log.info('Configuration update cancelled');
|
|
97
|
-
return false;
|
|
98
|
-
}
|
|
99
|
-
|
|
100
|
-
fs.copyFileSync(configPath, backupPath);
|
|
101
|
-
log.info(`Created backup at ${backupPath}`);
|
|
102
|
-
log.info(`Updating config.toml`);
|
|
103
|
-
|
|
104
|
-
const updatedConfig = { ...config };
|
|
105
|
-
|
|
106
|
-
// Ensure required objects exist and set values
|
|
107
|
-
if (!updatedConfig.db) updatedConfig.db = {};
|
|
108
|
-
if (!updatedConfig.db.pooler) updatedConfig.db.pooler = {};
|
|
109
|
-
if (!updatedConfig.edge_runtime) updatedConfig.edge_runtime = {};
|
|
110
|
-
|
|
111
|
-
updatedConfig.db.pooler.enabled = true;
|
|
112
|
-
updatedConfig.db.pooler.pool_mode = 'transaction';
|
|
113
|
-
updatedConfig.edge_runtime.policy = 'per_worker';
|
|
114
|
-
|
|
115
|
-
const updatedContent = TOML.patch(configContent, updatedConfig);
|
|
116
|
-
fs.writeFileSync(configPath, updatedContent);
|
|
117
|
-
|
|
118
|
-
log.success(
|
|
119
|
-
`Successfully updated ${configPath} (backup created at ${backupPath})`
|
|
120
|
-
);
|
|
121
|
-
return true;
|
|
122
|
-
} catch (error) {
|
|
123
|
-
log.error(
|
|
124
|
-
`Failed to update ${configPath}: ${
|
|
125
|
-
error instanceof Error ? error.message : String(error)
|
|
126
|
-
}`
|
|
127
|
-
);
|
|
128
|
-
throw error;
|
|
129
|
-
}
|
|
130
|
-
}
|
package/src/index.ts
DELETED
|
@@ -1,76 +0,0 @@
|
|
|
1
|
-
import { Command } from 'commander';
|
|
2
|
-
import { fileURLToPath } from 'url';
|
|
3
|
-
import { readFileSync } from 'fs';
|
|
4
|
-
import { dirname, join } from 'path';
|
|
5
|
-
import installCommand from './commands/install/index.js';
|
|
6
|
-
|
|
7
|
-
// Create a function to handle errors
|
|
8
|
-
const errorHandler = (error: unknown) => {
|
|
9
|
-
console.error(
|
|
10
|
-
'Error:',
|
|
11
|
-
error instanceof Error ? error.message : String(error)
|
|
12
|
-
);
|
|
13
|
-
process.exit(1);
|
|
14
|
-
};
|
|
15
|
-
|
|
16
|
-
// Set up process-wide unhandled rejection handler
|
|
17
|
-
process.on('unhandledRejection', errorHandler);
|
|
18
|
-
|
|
19
|
-
// Function to get version from package.json
|
|
20
|
-
function getVersion(): string {
|
|
21
|
-
const __filename = fileURLToPath(import.meta.url);
|
|
22
|
-
const __dirname = dirname(__filename);
|
|
23
|
-
const packageJsonPath = join(__dirname, '..', 'package.json');
|
|
24
|
-
|
|
25
|
-
try {
|
|
26
|
-
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8'));
|
|
27
|
-
return packageJson.version || 'unknown';
|
|
28
|
-
} catch (error) {
|
|
29
|
-
// Log error but don't display it to the user when showing version
|
|
30
|
-
console.error('Error reading package.json:', error);
|
|
31
|
-
return 'unknown';
|
|
32
|
-
}
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
const program = new Command();
|
|
36
|
-
program
|
|
37
|
-
.name('npx pgflow')
|
|
38
|
-
.description('Command line interface to help you work with pgflow')
|
|
39
|
-
.version(getVersion())
|
|
40
|
-
.exitOverride((err) => {
|
|
41
|
-
// Don't treat version display as an error
|
|
42
|
-
if (err.code === 'commander.version') {
|
|
43
|
-
process.exit(0);
|
|
44
|
-
}
|
|
45
|
-
throw err;
|
|
46
|
-
});
|
|
47
|
-
|
|
48
|
-
installCommand(program);
|
|
49
|
-
|
|
50
|
-
// Use a promise-aware approach to parse arguments
|
|
51
|
-
async function main() {
|
|
52
|
-
try {
|
|
53
|
-
await program.parseAsync(process.argv);
|
|
54
|
-
// If we get here with no command specified, it's not an error
|
|
55
|
-
process.exitCode = 0;
|
|
56
|
-
} catch (err) {
|
|
57
|
-
// Commander throws a CommanderError when help or version is displayed
|
|
58
|
-
// We want to exit with code 0 in these cases
|
|
59
|
-
if (
|
|
60
|
-
err &&
|
|
61
|
-
typeof err === 'object' &&
|
|
62
|
-
'code' in err &&
|
|
63
|
-
(err.code === 'commander.helpDisplayed' ||
|
|
64
|
-
err.code === 'commander.help' ||
|
|
65
|
-
err.code === 'commander.version')
|
|
66
|
-
) {
|
|
67
|
-
process.exitCode = 0;
|
|
68
|
-
} else {
|
|
69
|
-
// For other errors, use our error handler
|
|
70
|
-
errorHandler(err);
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
// Execute and handle any errors
|
|
76
|
-
main();
|
package/src/types.d.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
/// <reference types="@commander-js/extra-typings" />
|
package/tsconfig.json
DELETED
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"extends": "../../tsconfig.base.json",
|
|
3
|
-
"files": [],
|
|
4
|
-
"include": [],
|
|
5
|
-
"compilerOptions": {
|
|
6
|
-
"baseUrl": ".",
|
|
7
|
-
"rootDir": "src",
|
|
8
|
-
"outDir": "dist",
|
|
9
|
-
"tsBuildInfoFile": "dist/tsconfig.lib.tsbuildinfo",
|
|
10
|
-
"types": ["node"]
|
|
11
|
-
},
|
|
12
|
-
"references": [
|
|
13
|
-
{
|
|
14
|
-
"path": "./tsconfig.lib.json"
|
|
15
|
-
},
|
|
16
|
-
{
|
|
17
|
-
"path": "./tsconfig.spec.json"
|
|
18
|
-
}
|
|
19
|
-
]
|
|
20
|
-
}
|
package/tsconfig.lib.json
DELETED
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"extends": "./tsconfig.json",
|
|
3
|
-
"include": ["src/**/*.ts"],
|
|
4
|
-
"references": [],
|
|
5
|
-
"exclude": [
|
|
6
|
-
"vite.config.ts",
|
|
7
|
-
"vite.config.mts",
|
|
8
|
-
"vitest.config.ts",
|
|
9
|
-
"vitest.config.mts",
|
|
10
|
-
"src/**/*.test.ts",
|
|
11
|
-
"src/**/*.spec.ts",
|
|
12
|
-
"src/**/*.test.tsx",
|
|
13
|
-
"src/**/*.spec.tsx",
|
|
14
|
-
"src/**/*.test.js",
|
|
15
|
-
"src/**/*.spec.js",
|
|
16
|
-
"src/**/*.test.jsx",
|
|
17
|
-
"src/**/*.spec.jsx"
|
|
18
|
-
]
|
|
19
|
-
}
|
package/tsconfig.spec.json
DELETED
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"extends": "../../tsconfig.base.json",
|
|
3
|
-
"compilerOptions": {
|
|
4
|
-
"outDir": "./out-tsc/vitest",
|
|
5
|
-
"types": [
|
|
6
|
-
"vitest/globals",
|
|
7
|
-
"vitest/importMeta",
|
|
8
|
-
"vite/client",
|
|
9
|
-
"node",
|
|
10
|
-
"vitest"
|
|
11
|
-
]
|
|
12
|
-
},
|
|
13
|
-
"include": [
|
|
14
|
-
"vite.config.ts",
|
|
15
|
-
"vite.config.mts",
|
|
16
|
-
"vitest.config.ts",
|
|
17
|
-
"vitest.config.mts",
|
|
18
|
-
"src/**/*.test.ts",
|
|
19
|
-
"src/**/*.spec.ts",
|
|
20
|
-
"src/**/*.test.tsx",
|
|
21
|
-
"src/**/*.spec.tsx",
|
|
22
|
-
"src/**/*.test.js",
|
|
23
|
-
"src/**/*.spec.js",
|
|
24
|
-
"src/**/*.test.jsx",
|
|
25
|
-
"src/**/*.spec.jsx",
|
|
26
|
-
"src/**/*.d.ts"
|
|
27
|
-
],
|
|
28
|
-
"references": [
|
|
29
|
-
{
|
|
30
|
-
"path": "./tsconfig.lib.json"
|
|
31
|
-
}
|
|
32
|
-
]
|
|
33
|
-
}
|
package/vite.config.ts
DELETED
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
/// <reference types='vitest' />
|
|
2
|
-
import { defineConfig } from 'vite';
|
|
3
|
-
|
|
4
|
-
export default defineConfig({
|
|
5
|
-
root: __dirname,
|
|
6
|
-
cacheDir: '../../node_modules/.vite/pkgs/cli',
|
|
7
|
-
plugins: [],
|
|
8
|
-
// Uncomment this if you are using workers.
|
|
9
|
-
// worker: {
|
|
10
|
-
// plugins: [ nxViteTsPaths() ],
|
|
11
|
-
// },
|
|
12
|
-
test: {
|
|
13
|
-
watch: false,
|
|
14
|
-
globals: true,
|
|
15
|
-
environment: 'jsdom',
|
|
16
|
-
include: [
|
|
17
|
-
'__tests__/**/*.{test,spec,test-d,spec-d}.{js,mjs,cjs,ts,mts,cts,jsx,tsx}',
|
|
18
|
-
],
|
|
19
|
-
reporters: ['default'],
|
|
20
|
-
coverage: {
|
|
21
|
-
reportsDirectory: './test-output/vitest/coverage',
|
|
22
|
-
provider: 'v8',
|
|
23
|
-
},
|
|
24
|
-
},
|
|
25
|
-
});
|