@salte-common/terraflow 1.0.0-alpha.4 → 1.0.0-alpha.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,149 @@
1
+ ---
2
+ description: Terraflow project rules - MANDATORY AI code tracking and development guidelines
3
+ globs: ["*"]
4
+ alwaysApply: true
5
+ ---
6
+
7
+ # ⚠️ STOP: READ THIS FIRST
8
+
9
+ **BEFORE creating or modifying ANY file, you MUST:**
10
+ 1. Read the "AI Code Tracking" section below
11
+ 2. Understand that updating `.ai-metadata.json` is MANDATORY, not optional
12
+ 3. Update metadata IMMEDIATELY after each file, before proceeding
13
+
14
+ **If you create/modify a file without updating `.ai-metadata.json`, you have FAILED.**
15
+
16
+ # Terraflow Development Guidelines
17
+
18
+ This project follows the comprehensive specification in `SPECIFICATION.md`.
19
+
20
+ Additionally, follow these ongoing guidelines:
21
+
22
+ - Follow https://github.com/salte-common/standards recursively
23
+ - **MANDATORY: Update `.ai-metadata.json` immediately after each file creation/modification**
24
+ - Use TypeScript strict mode
25
+ - Maintain >80% test coverage
26
+ - Document all public APIs with JSDoc
27
+ - Use conventional commits
28
+ - Keep functions small and focused
29
+ - Prefer composition over inheritance
30
+
31
+ # AI Code Tracking
32
+
33
+ Additionally, when you generate code, update the `.ai-metadata.json` file to track AI contributions.
34
+
35
+ ## Metadata File Structure
36
+
37
+ ```json
38
+ {
39
+ "files": {
40
+ "path/to/file.ts": {
41
+ "lines_total": 100,
42
+ "lines_ai_generated": 75,
43
+ "ai_percentage": 75.0,
44
+ "last_updated": "2024-12-04T15:30:00Z",
45
+ "tool": "cursor|copilot|jetbrains-ai"
46
+ }
47
+ },
48
+ "metadata_version": "1.0"
49
+ }
50
+ ```
51
+
52
+ **Field definitions:**
53
+ - `lines_total`: Total lines currently in the file (AI-generated + human-created)
54
+ - `lines_ai_generated`: Only lines created by AI (cumulative for files modified multiple times)
55
+ - `ai_percentage`: `(lines_ai_generated / lines_total) × 100`
56
+
57
+ ## Tracking Requirements
58
+
59
+ 1. **CRITICAL: Update immediately** - Update `.ai-metadata.json` IMMEDIATELY after creating or modifying each file, not at the end of the task. This ensures accurate line counts that only reflect AI-generated code, not user modifications.
60
+
61
+ 2. **Track every AI contribution** - Keep track of lines you generate or modify
62
+ 3. **Use relative paths** - Paths from repository root with forward slashes
63
+ 4. **Calculate percentages** - `(lines_ai_generated / lines_total) × 100`
64
+ 5. **ISO 8601 timestamps** - Format: `2024-12-04T15:30:00Z`
65
+ 6. **Preserve existing data** - Read the file first, merge your changes
66
+ 7. **Create if missing** - Initialize with empty files object
67
+
68
+ ## Required Workflow Pattern
69
+
70
+ **ALWAYS follow this pattern for each file you create or modify:**
71
+
72
+ 1. **Before creating/modifying**: Track the starting state:
73
+ - If creating a new file: start with `lines_ai_generated: 0`
74
+ - If modifying existing file: read `.ai-metadata.json` first to get current `lines_ai_generated` value
75
+
76
+ 2. **While generating code**: Count lines AS YOU ADD THEM:
77
+ - Track each line of code, comments, blank lines, etc. that you generate
78
+ - Keep a running count as you write the file
79
+ - Count everything you add, including:
80
+ - Import statements
81
+ - Type definitions
82
+ - Functions and methods
83
+ - Comments and JSDoc
84
+ - Blank lines for formatting
85
+
86
+ 3. **After creating/modifying**: Update metadata immediately:
87
+ - Read `.ai-metadata.json` (or create if missing)
88
+ - For NEW files:
89
+ - `lines_total`: Read the file to get total lines (all AI-generated in this case)
90
+ - `lines_ai_generated`: All lines you added (same as `lines_total` for new files)
91
+ - For MODIFIED files:
92
+ - `lines_total`: Read the current file to get total lines (includes AI-generated + any human-created lines)
93
+ - `lines_ai_generated`: Previous `lines_ai_generated` value from metadata + lines you just added (only AI-generated lines)
94
+ - `ai_percentage`: Calculate `(lines_ai_generated / lines_total) × 100`
95
+ - `last_updated`: Current UTC timestamp
96
+ - `tool`: "cursor"
97
+ - Write the updated `.ai-metadata.json` back
98
+
99
+ **Important distinctions:**
100
+ - `lines_total`: Always the current total lines in the file (AI + human created)
101
+ - `lines_ai_generated`: Only lines created by AI (cumulative for modified files)
102
+
103
+ 4. Then proceed to the next file
104
+
105
+ **DO NOT:**
106
+ - Wait until the end to update metadata
107
+ - Batch update multiple files at once
108
+ - Include human-created lines in `lines_ai_generated` - only count AI-generated lines
109
+ - Guess `lines_total` - always read the file to get the accurate total count
110
+
111
+ ## Example Workflow
112
+
113
+ **Example 1: Creating a new file**
114
+
115
+ User asks: "Create a feature following our standards"
116
+
117
+ 1. Reference salte-common/standards for service patterns
118
+ 2. **Create `src/features/my-feature.ts`**:
119
+ - Write the file content (tracking 50 lines as you generate them)
120
+ - Read the file: total lines = 50
121
+ - **IMMEDIATELY** read `.ai-metadata.json`
122
+ - Add entry: `{"src/features/my-feature.ts": {"lines_total": 50, "lines_ai_generated": 50, "ai_percentage": 100.0, ...}}`
123
+ - **IMMEDIATELY** write updated `.ai-metadata.json` back
124
+
125
+ **Example 2: Modifying an existing file**
126
+
127
+ User asks: "Add error handling to the validator"
128
+
129
+ 1. Read `.ai-metadata.json` - find existing entry shows `lines_ai_generated: 29`
130
+ 2. Read current `src/core/validator.ts` - file has 29 total lines
131
+ 3. Add error handling code (tracking 15 new lines as you generate them)
132
+ 4. Read the file again: now has 44 total lines
133
+ 5. **IMMEDIATELY** read `.ai-metadata.json`
134
+ 6. Update entry: `{"src/core/validator.ts": {"lines_total": 44, "lines_ai_generated": 44, "ai_percentage": 100.0, ...}}`
135
+ - `lines_total`: 44 (read from file - total lines currently in file)
136
+ - `lines_ai_generated`: 29 (previous) + 15 (new AI lines) = 44
137
+
138
+ **If user had added 10 lines between sessions:**
139
+ - File now has 54 total lines (29 original + 10 user-added + 15 new AI)
140
+ - `lines_total`: 54 (read from file)
141
+ - `lines_ai_generated`: 29 (previous) + 15 (new AI lines) = 44 (does NOT include user's 10 lines)
142
+ - `ai_percentage`: (44 / 54) × 100 = 81.5%
143
+ 7. **IMMEDIATELY** write updated `.ai-metadata.json` back
144
+
145
+ **Remember:**
146
+ - `lines_total` = current total lines in file (read the file to get this)
147
+ - `lines_ai_generated` = only lines created by AI (track as you generate, add to previous value)
148
+ - Track lines AS YOU GENERATE THEM, then update metadata immediately
149
+ - Update metadata immediately after EACH file, not after all files are done
package/README.md CHANGED
@@ -9,15 +9,17 @@ An opinionated Node.js CLI wrapper for Terraform that provides intelligent works
9
9
 
10
10
  ## About
11
11
 
12
- Terraflow was created by Dave Woodward, VP of Cloud & Enterprise Architecture with over 20 years of experience in financial services technology. Coming from a full-stack development background before evolving into cloud architecture, Dave brings a hands-on, practical approach to enterprise architecture that emphasizes solving real operational problems over theoretical ideals.
12
+ Terraflow was created by Dave Woodward, a technologist with over 20 years of experience building and operating infrastructure in regulated enterprise environments. Coming from a full-stack development background before moving deeper into cloud architecture, Dave brings a hands-on, pragmatic approach that emphasizes solving real operational problems over theoretical ideals.
13
13
 
14
14
  ### The Problem
15
15
 
16
- After half a decade of managing Terraform in enterprise environments, a pattern emerged: every infrastructure repository needed the same orchestration logic. Before Terraflow, this meant writing bash scripts in every repo to handle the multi-step process of initializing Terraform, selecting the appropriate workspace, assuming cloud provider roles, retrieving secrets, and finally applying changes. These scripts were essential for testing Terraform locally before CI/CD pipelines, but they became repetitive maintenance overhead. Environment-specific and branch-specific configuration—some sensitive, some not—required careful orchestration that varied slightly across projects. Additionally, while consistent project structure proved crucial for helping developers understand resource dependencies, manually scaffolding this structure was time-consuming and error-prone.
16
+ After half a decade of managing Terraform in enterprise environments, a clear pattern emerged: every infrastructure repository needed the same orchestration logic. Before Terraflow, this meant writing bash scripts in every repo to handle the multi-step process of initializing Terraform, selecting the appropriate workspace, assuming cloud provider roles, retrieving secrets, and finally applying changes.
17
+
18
+ These scripts were essential for testing Terraform locally before CI/CD pipelines, but they quickly became repetitive maintenance overhead. Environment-specific and branch-specific configuration—some sensitive, some not—required careful orchestration that varied slightly across projects. Additionally, while consistent project structure proved crucial for helping developers understand resource dependencies, manually scaffolding this structure was time-consuming and error-prone.
17
19
 
18
20
  ### The Developer Perspective
19
21
 
20
- Coming from a development background rather than pure infrastructure, the importance of preview branches became immediately apparent. Developers need to see their infrastructure changes in isolation before those changes overwrite shared development environments. This preview branch support is often overlooked by infrastructure-focused practitioners, but it's critical for modern development workflows where multiple feature branches need to coexist temporarily.
22
+ Coming from a development background rather than pure infrastructure, the importance of preview branches became immediately apparent. Developers need to see their infrastructure changes in isolation before those changes overwrite shared development environments. This preview branch support is often overlooked by infrastructure-focused practitioners, but it is critical for modern development workflows where multiple feature branches need to coexist temporarily.
21
23
 
22
24
  ### Terraflow's Approach
23
25
 
@@ -87,11 +89,16 @@ terraflow new my-infrastructure --provider gcp --language python
87
89
  ```
88
90
 
89
91
  This creates a complete project structure with:
92
+ - **Cursor-ready** — Optimized for [Cursor](https://cursor.com/) to get up and running quickly with AI-assisted development
90
93
  - Terraform configuration files for your cloud provider
91
94
  - Application code templates in your chosen language
92
95
  - Pre-configured `.tfwconfig.yml` with backend settings
93
96
  - Example `.env.example` file
94
97
  - Complete `.gitignore` and `README.md`
98
+ - `.ai-metadata.json` — AI code tracking (initialized with scaffold stats)
99
+ - `.cursor/rules/terraform.mdc` — Cursor instructions for Terraflow (delete if not using Cursor)
100
+ - `.cursor/rules/ai-metadata.mdc` — Cursor instructions for `.ai-metadata.json` maintenance
101
+ - `.cursor/rules/development-standards.mdc` — Cursor instructions (language, platform, salte-common/standards)
95
102
 
96
103
  See [Project Scaffolding Documentation](docs/scaffolding.md) for complete details.
97
104
 
@@ -108,6 +115,9 @@ This creates a `.tfwconfig.yml` file with examples for all backends, secrets pro
108
115
  2. **Configure your backend and secrets** in `.tfwconfig.yml`:
109
116
 
110
117
  ```yaml
118
+ # Required: Cloud provider (aws, gcp, or azure)
119
+ provider: aws
120
+
111
121
  backend:
112
122
  type: s3
113
123
  config:
@@ -179,6 +179,11 @@ class ConfigCommand {
179
179
  # This file defines your Terraflow configuration
180
180
  # See https://github.com/salte-common/terraflow/blob/main/docs/configuration.md for full documentation
181
181
 
182
+ # Cloud Provider (required)
183
+ # ========================
184
+ # Specify which cloud provider you are using: aws | gcp | azure
185
+ provider: aws # Change to 'gcp' or 'azure' as needed
186
+
182
187
  # Global Settings
183
188
  # ===============
184
189
 
@@ -315,9 +320,6 @@ backend:
315
320
  # Validation rules for Terraform operations
316
321
 
317
322
  # validations:
318
- # # Require git commit before apply/destroy
319
- # require_git_commit: true
320
- #
321
323
  # # List of allowed workspace names (empty = allow all)
322
324
  # allowed_workspaces:
323
325
  # - development
@@ -101,6 +101,7 @@ class NewCommand {
101
101
  await (0, scaffolding_1.generateTerraformFiles)(projectDir, provider, finalProjectName);
102
102
  await (0, scaffolding_1.generateApplicationFiles)(projectDir, language, finalProjectName);
103
103
  await (0, scaffolding_1.generateConfigFiles)(projectDir, provider, language, finalProjectName);
104
+ await (0, scaffolding_1.generateAiMetadata)(projectDir, language);
104
105
  logger_1.Logger.info(`✅ Project "${projectName || 'current directory'}" created successfully!`);
105
106
  logger_1.Logger.info('');
106
107
  logger_1.Logger.info('Next steps:');
@@ -13,6 +13,7 @@ const fs_1 = __importDefault(require("fs"));
13
13
  const path_1 = __importDefault(require("path"));
14
14
  const js_yaml_1 = __importDefault(require("js-yaml"));
15
15
  const logger_1 = require("../utils/logger");
16
+ const errors_1 = require("./errors");
16
17
  /**
17
18
  * Configuration manager for Terraflow
18
19
  * Handles loading and merging configuration from multiple sources
@@ -38,12 +39,22 @@ class ConfigManager {
38
39
  terraform_log: false,
39
40
  terraform_log_level: 'TRACE',
40
41
  },
42
+ // Note: provider is required and should be set in config file
41
43
  };
42
44
  // Load config file (higher priority than defaults)
43
45
  const configFile = ConfigManager.getConfigFilePath(cliOptions, cwd);
44
46
  const fileConfig = ConfigManager.loadConfigFile(configFile);
45
47
  // Get directory where config file is located (for .env file loading)
46
48
  const configFileDir = path_1.default.dirname(configFile);
49
+ // Validate that provider is set (only if config file exists)
50
+ if (fs_1.default.existsSync(configFile) && !fileConfig.provider) {
51
+ throw new errors_1.ConfigError('Configuration file must specify a "provider" (aws, gcp, or azure). ' +
52
+ 'Run "terraflow config init" to generate a template, or add "provider: <provider>" to your .tfwconfig.yml');
53
+ }
54
+ // Validate provider value (only if provider is set)
55
+ if (fileConfig.provider && !['aws', 'gcp', 'azure'].includes(fileConfig.provider)) {
56
+ throw new errors_1.ConfigError(`Invalid provider "${fileConfig.provider}". Must be one of: aws, gcp, azure.`);
57
+ }
47
58
  // Load environment variables (higher priority than config file)
48
59
  const envConfig = ConfigManager.loadFromEnvironment();
49
60
  // Merge: defaults < fileConfig < envConfig < cliOptions
@@ -188,6 +199,10 @@ class ConfigManager {
188
199
  if (!config || typeof config !== 'object') {
189
200
  continue;
190
201
  }
202
+ // Merge provider
203
+ if (config.provider !== undefined) {
204
+ result.provider = config.provider;
205
+ }
191
206
  // Merge workspace
192
207
  if (config.workspace !== undefined) {
193
208
  result.workspace = config.workspace;
@@ -30,7 +30,8 @@ export declare class ContextBuilder {
30
30
  private static deriveWorkspace;
31
31
  /**
32
32
  * Build cloud provider information
33
- * Detects cloud provider from environment (AWS, Azure, GCP)
33
+ * Detects cloud provider from configuration or environment (AWS, Azure, GCP)
34
+ * @param config - Terraflow configuration
34
35
  * @returns Cloud information
35
36
  */
36
37
  private static buildCloudInfo;
@@ -58,7 +58,7 @@ class ContextBuilder {
58
58
  static async build(config, cwd = process.cwd()) {
59
59
  const workspace = await ContextBuilder.deriveWorkspace(config, cwd);
60
60
  const workingDir = config_1.ConfigManager.getWorkingDir(config, cwd);
61
- const cloud = await ContextBuilder.buildCloudInfo();
61
+ const cloud = await ContextBuilder.buildCloudInfo(config);
62
62
  const vcs = await ContextBuilder.buildVcsInfo(cwd);
63
63
  const hostname = os_1.default.hostname();
64
64
  // Build template variables from environment and context
@@ -131,14 +131,15 @@ class ContextBuilder {
131
131
  }
132
132
  /**
133
133
  * Build cloud provider information
134
- * Detects cloud provider from environment (AWS, Azure, GCP)
134
+ * Detects cloud provider from configuration or environment (AWS, Azure, GCP)
135
+ * @param config - Terraflow configuration
135
136
  * @returns Cloud information
136
137
  */
137
- static async buildCloudInfo() {
138
+ static async buildCloudInfo(config) {
138
139
  // Use CloudUtils to detect cloud provider
139
140
  // This needs to happen early so validation can check it
140
141
  const { CloudUtils } = await Promise.resolve().then(() => __importStar(require('../utils/cloud')));
141
- return await CloudUtils.detectCloud();
142
+ return await CloudUtils.detectCloud(config);
142
143
  }
143
144
  /**
144
145
  * Build VCS information
@@ -19,9 +19,12 @@ export declare class EnvironmentSetup {
19
19
  * Setup cloud provider environment (AWS, Azure, GCP)
20
20
  * - Syncs AWS_REGION and AWS_DEFAULT_REGION
21
21
  * - Fetches account/subscription/project IDs
22
+ * @param config - Optional Terraflow configuration
22
23
  * @returns Updated cloud info
23
24
  */
24
- static setupCloud(): Promise<ExecutionContext['cloud']>;
25
+ static setupCloud(config?: {
26
+ provider?: 'aws' | 'gcp' | 'azure';
27
+ }): Promise<ExecutionContext['cloud']>;
25
28
  /**
26
29
  * Setup VCS environment (git variables)
27
30
  * Sets generic GIT_REPOSITORY variable and GitHub Actions/GitLab CI compatible variables
@@ -50,10 +50,11 @@ class EnvironmentSetup {
50
50
  * Setup cloud provider environment (AWS, Azure, GCP)
51
51
  * - Syncs AWS_REGION and AWS_DEFAULT_REGION
52
52
  * - Fetches account/subscription/project IDs
53
+ * @param config - Optional Terraflow configuration
53
54
  * @returns Updated cloud info
54
55
  */
55
- static async setupCloud() {
56
- const cloud = await cloud_1.CloudUtils.detectCloud();
56
+ static async setupCloud(config) {
57
+ const cloud = await cloud_1.CloudUtils.detectCloud(config);
57
58
  // Sync AWS region if AWS provider detected
58
59
  if (cloud.provider === 'aws') {
59
60
  const region = cloud_1.CloudUtils.getAwsRegion();
@@ -198,7 +199,7 @@ class EnvironmentSetup {
198
199
  const envDir = projectRoot || process.cwd();
199
200
  EnvironmentSetup.loadEnvFile(envDir);
200
201
  // 2. Setup cloud environment (detect account IDs, regions)
201
- const cloud = await EnvironmentSetup.setupCloud();
202
+ const cloud = await EnvironmentSetup.setupCloud(config);
202
203
  context.cloud = cloud;
203
204
  // 3. Setup VCS environment (git branch, commit, repository)
204
205
  await EnvironmentSetup.setupVcs(context);
@@ -49,7 +49,7 @@ const errors_1 = require("./errors");
49
49
  * Commands that require workspace and init
50
50
  * Exported so it can be used by the CLI for command registration
51
51
  */
52
- exports.WORKSPACE_SENSITIVE_COMMANDS = [].concat(validator_1.FULL_VALIDATION_COMMANDS, validator_1.BACKEND_REQUIRED_COMMANDS, ['init'] // terraform init also needs backend setup
52
+ exports.WORKSPACE_SENSITIVE_COMMANDS = [].concat(validator_1.FULL_VALIDATION_COMMANDS, validator_1.BACKEND_REQUIRED_COMMANDS, ['init', 'validate'] // terraform init and validate also need backend setup (providers initialized)
53
53
  );
54
54
  /**
55
55
  * Terraform executor for running terraform commands
@@ -70,7 +70,7 @@ class TerraformExecutor {
70
70
  environment_1.EnvironmentSetup.loadEnvFile(configFileDir);
71
71
  // Re-detect cloud after .env is loaded (credentials might be in .env)
72
72
  const { CloudUtils } = await Promise.resolve().then(() => __importStar(require('../utils/cloud')));
73
- context.cloud = await CloudUtils.detectCloud();
73
+ context.cloud = await CloudUtils.detectCloud(config);
74
74
  // 1. Run validations
75
75
  logger_1.Logger.info('🔍 Running validations...');
76
76
  const validationResult = await validator_1.Validator.validate(command, config, context, {
@@ -259,11 +259,18 @@ class TerraformExecutor {
259
259
  }
260
260
  try {
261
261
  logger_1.Logger.debug(`Executing: terraform ${args.join(' ')} in ${workingDir}`);
262
- (0, child_process_1.execSync)(`terraform ${args.join(' ')}`, {
262
+ // Use spawnSync with array to avoid shell interpretation of special characters
263
+ const result = (0, child_process_1.spawnSync)('terraform', args, {
263
264
  cwd: workingDir,
264
265
  stdio: 'inherit',
265
266
  encoding: 'utf8',
266
267
  });
268
+ if (result.error) {
269
+ throw result.error;
270
+ }
271
+ if (result.status !== 0) {
272
+ throw new Error(`Terraform init failed with exit code ${result.status}`);
273
+ }
267
274
  logger_1.Logger.info('✅ Terraform initialized successfully');
268
275
  }
269
276
  catch (error) {
@@ -280,28 +287,34 @@ class TerraformExecutor {
280
287
  try {
281
288
  // Try to select existing workspace
282
289
  logger_1.Logger.debug(`Selecting workspace: ${workspaceName}`);
283
- (0, child_process_1.execSync)(`terraform workspace select ${workspaceName}`, {
290
+ // Use spawnSync with array to avoid shell interpretation
291
+ const selectResult = (0, child_process_1.spawnSync)('terraform', ['workspace', 'select', workspaceName], {
284
292
  cwd: workingDir,
285
293
  stdio: 'pipe',
286
294
  encoding: 'utf8',
287
295
  });
288
- logger_1.Logger.debug(`Workspace ${workspaceName} selected`);
289
- }
290
- catch {
296
+ if (selectResult.status === 0) {
297
+ logger_1.Logger.debug(`Workspace ${workspaceName} selected`);
298
+ return;
299
+ }
291
300
  // Workspace doesn't exist, create it
292
- try {
293
- logger_1.Logger.debug(`Creating workspace: ${workspaceName}`);
294
- (0, child_process_1.execSync)(`terraform workspace new ${workspaceName}`, {
295
- cwd: workingDir,
296
- stdio: 'inherit',
297
- encoding: 'utf8',
298
- });
299
- logger_1.Logger.info(`✅ Workspace ${workspaceName} created and selected`);
301
+ logger_1.Logger.debug(`Creating workspace: ${workspaceName}`);
302
+ const createResult = (0, child_process_1.spawnSync)('terraform', ['workspace', 'new', workspaceName], {
303
+ cwd: workingDir,
304
+ stdio: 'inherit',
305
+ encoding: 'utf8',
306
+ });
307
+ if (createResult.error) {
308
+ throw createResult.error;
300
309
  }
301
- catch (error) {
302
- logger_1.Logger.error(`Failed to create workspace ${workspaceName}: ${error instanceof Error ? error.message : String(error)}`);
303
- throw error;
310
+ if (createResult.status !== 0) {
311
+ throw new Error(`Failed to create workspace: ${createResult.stderr?.toString() || 'Unknown error'}`);
304
312
  }
313
+ logger_1.Logger.info(`✅ Workspace ${workspaceName} created and selected`);
314
+ }
315
+ catch (error) {
316
+ logger_1.Logger.error(`Failed to select/create workspace ${workspaceName}: ${error instanceof Error ? error.message : String(error)}`);
317
+ throw error;
305
318
  }
306
319
  }
307
320
  /**
@@ -314,17 +327,35 @@ class TerraformExecutor {
314
327
  const terraformArgs = [command, ...args];
315
328
  try {
316
329
  logger_1.Logger.info(`🚀 Executing: terraform ${terraformArgs.join(' ')}`);
317
- (0, child_process_1.execSync)(`terraform ${terraformArgs.join(' ')}`, {
330
+ // Use spawnSync with array to avoid shell interpretation of special characters
331
+ // This ensures arguments with brackets, quotes, etc. are passed correctly to terraform
332
+ const result = (0, child_process_1.spawnSync)('terraform', terraformArgs, {
318
333
  cwd: workingDir,
319
334
  stdio: 'inherit',
320
335
  encoding: 'utf8',
321
336
  });
337
+ if (result.error) {
338
+ throw result.error;
339
+ }
340
+ if (result.status !== 0) {
341
+ // Check if this is a user cancellation
342
+ // Terraform exits with code 1 when cancelled and prints "Apply cancelled." etc.
343
+ // Since we use stdio: 'inherit', terraform's message is already shown to the user
344
+ // For interactive commands (apply, plan, destroy), exit code 1 often means cancellation
345
+ const isInteractiveCommand = ['apply', 'plan', 'destroy'].includes(command);
346
+ if (isInteractiveCommand && result.status === 1) {
347
+ // Likely a user cancellation - terraform already printed the message
348
+ // Just exit without adding our own error messages
349
+ process.exit(1);
350
+ }
351
+ // For other errors, throw with status code
352
+ const error = new Error(`Terraform command failed with exit code ${result.status ?? 'unknown'}`);
353
+ error.status = result.status ?? undefined;
354
+ throw error;
355
+ }
322
356
  }
323
357
  catch (error) {
324
358
  // Check if this is a user cancellation
325
- // Terraform exits with code 1 when cancelled and prints "Apply cancelled." etc.
326
- // Since we use stdio: 'inherit', terraform's message is already shown to the user
327
- // For interactive commands (apply, plan, destroy), exit code 1 often means cancellation
328
359
  const exitCode = error.status;
329
360
  const isInteractiveCommand = ['apply', 'plan', 'destroy'].includes(command);
330
361
  if (isInteractiveCommand && exitCode === 1) {
@@ -2,6 +2,8 @@
2
2
 
3
3
  Infrastructure as Code project managed with [Terraflow](https://github.com/salte-common/terraflow).
4
4
 
5
+ **Cursor-ready:** This project is scaffolded for [Cursor](https://cursor.com/) with rules for Terraflow, development standards, and AI code tracking. Open in Cursor to get up and running quickly with AI-assisted development.
6
+
5
7
  ## Prerequisites
6
8
 
7
9
  - [Terraform](https://www.terraform.io/downloads) >= 1.0
@@ -11,7 +13,12 @@ Infrastructure as Code project managed with [Terraflow](https://github.com/salte
11
13
 
12
14
  ## Getting Started
13
15
 
14
- 1. Copy `.env.example` to `.env` and configure your credentials:
16
+ 1. **Credentials** (optional if using standard locations):
17
+ - **AWS:** `~/.aws/credentials` and `~/.aws/config` — no `.env` needed
18
+ - **Azure:** `az login` — no `.env` needed
19
+ - **GCP:** `gcloud auth application-default login` — no `.env` needed
20
+
21
+ Otherwise, copy `.env.example` to `.env` and configure your credentials:
15
22
  ```bash
16
23
  cp .env.example .env
17
24
  # Edit .env with your credentials
@@ -19,10 +26,11 @@ Infrastructure as Code project managed with [Terraflow](https://github.com/salte
19
26
 
20
27
  2. Review and update `.tfwconfig.yml` with your backend configuration
21
28
 
22
- 3. Initialize Terraform:
29
+ 3. **Initialize Terraform** (optional — runs automatically before plan/apply):
23
30
  ```bash
24
31
  terraflow init
25
32
  ```
33
+ You can skip this step; Terraflow runs init automatically when needed.
26
34
 
27
35
  4. Plan your infrastructure:
28
36
  ```bash
@@ -67,15 +75,28 @@ terraflow config show
67
75
 
68
76
  ## Workspace Management
69
77
 
70
- Terraflow automatically derives workspace names from your git branch:
71
- - Main branch → `main` workspace
72
- - Feature branches (e.g., `feature/new-api`) hostname-based workspace
73
- - Can be overridden with `--workspace` flag
78
+ Terraflow derives workspace names in this order (first match wins):
79
+
80
+ 1. **CLI override** `--workspace` or `-w`
81
+ 2. **Environment variable** `TERRAFLOW_WORKSPACE`
82
+ 3. **Git tag** — if checked out on a tag (e.g. `v1.0.0` → `v1-0-0`)
83
+ 4. **Git branch** — if non-ephemeral (e.g. `main`, `my-preview-branch`). Ephemeral branches (`feature/foo`, `fix/bar`) fall through to hostname.
84
+ 5. **Hostname** — when none of the above apply (no git repo, no tag, or ephemeral branch)
85
+
86
+ Examples:
87
+ - `main` → `main` workspace
88
+ - `my-preview-branch` → `my-preview-branch` workspace
89
+ - `feature/new-api` → hostname
90
+ - No git repo → hostname
74
91
 
75
92
  ## Configuration
76
93
 
77
94
  See `.tfwconfig.yml` for all available options and the [documentation](https://github.com/salte-common/terraflow/blob/main/docs/configuration.md) for detailed configuration reference.
78
95
 
96
+ ## Project Specification
97
+
98
+ You can create a `SPECIFICATION.md` file in the project root to define project-specific requirements, architecture, and conventions. Cursor and other AI tools will read this file to inform code suggestions and maintain consistency with your specification.
99
+
79
100
  ## License
80
101
 
81
102
  MIT
@@ -0,0 +1,58 @@
1
+ ---
2
+ description: How to maintain .ai-metadata.json - MANDATORY AI code tracking
3
+ globs: [".ai-metadata.json", "**/*"]
4
+ alwaysApply: true
5
+ ---
6
+
7
+ # AI Code Tracking — .ai-metadata.json Maintenance
8
+
9
+ **When you create or modify any file that is checked into source control, you MUST update `.ai-metadata.json` immediately.**
10
+
11
+ ## Scope: Source-Controlled Files Only
12
+
13
+ **Only track files that are committed to git.** Do NOT add entries for:
14
+
15
+ - `node_modules/`, `dist/`, `build/`
16
+ - `.env`, `.env.local`, `.env.*.local`
17
+ - `__pycache__/`, `venv/`, `.pytest_cache/`, `*.pyc`
18
+ - `vendor/`, `*.exe`, `*.dylib`
19
+ - `.terraform/`, `*.tfstate`, `*.tfstate.*`
20
+ - `.cursor/` (if excluded from git)
21
+ - Any path listed in `.gitignore`
22
+
23
+ ## Metadata File Structure
24
+
25
+ ```json
26
+ {
27
+ "files": {
28
+ "path/to/file.ts": {
29
+ "lines_total": 100,
30
+ "lines_ai_generated": 75,
31
+ "ai_percentage": 75.0,
32
+ "last_updated": "2024-12-04T15:30:00Z",
33
+ "tool": "cursor"
34
+ }
35
+ },
36
+ "metadata_version": "1.0"
37
+ }
38
+ ```
39
+
40
+ - `lines_total`: Total lines in the file (read the file to get this)
41
+ - `lines_ai_generated`: Only lines created by AI (cumulative for modified files)
42
+ - `ai_percentage`: `(lines_ai_generated / lines_total) × 100`
43
+ - `last_updated`: ISO 8601 UTC timestamp
44
+ - `tool`: `"cursor"`
45
+
46
+ ## Required Workflow
47
+
48
+ 1. **After creating a new file**: Add entry with `lines_total` = file line count, `lines_ai_generated` = same, `ai_percentage` = 100
49
+ 2. **After modifying an existing file**: Read `.ai-metadata.json` for current `lines_ai_generated`, add new AI lines, update `lines_total` from file, recalculate `ai_percentage`
50
+ 3. **Preserve existing data**: Merge your changes; do not overwrite other file entries
51
+ 4. **Create if missing**: Initialize with `{"files": {}, "metadata_version": "1.0"}`
52
+
53
+ ## DO NOT
54
+
55
+ - Wait until the end of a task to update metadata
56
+ - Include human-created lines in `lines_ai_generated`
57
+ - Guess `lines_total` — always read the file
58
+ - Track files that are gitignored
@@ -0,0 +1,57 @@
1
+ ---
2
+ description: Development standards — language, runtime, and platform context from scaffold
3
+ globs: ["**/*"]
4
+ alwaysApply: true
5
+ ---
6
+
7
+ # Development Standards
8
+
9
+ This project was scaffolded with **<language-display>**, **<provider-display>**, and **<test-framework>** as the test framework. Assume these as the default context when suggesting code, tests, or architecture.
10
+
11
+ ## ⚠️ MANDATORY: External Standards Preflight
12
+
13
+ **Before performing any task, you MUST:**
14
+
15
+ 1. Read [salte-common/standards](https://github.com/salte-common/standards) recursively and confirm compliance.
16
+ 2. **Do not proceed until done.** This is a blocking prerequisite.
17
+ 3. In your response, cite the specific standards section(s) you applied (e.g. "Per Architecture Standards: ...", "Per Development Standards §X: ...") to prove you reviewed them.
18
+
19
+ **Required checklist — hard stop if not completed:**
20
+ - [ ] I have reviewed salte-common/standards and confirmed compliance
21
+ - [ ] I have cited the relevant standards section(s) in my response
22
+
23
+ If you cannot access or read the standards, stop and inform the user. Do not proceed without this confirmation.
24
+
25
+ ## Assumed Context (from `terraflow new`)
26
+
27
+ | Setting | Value |
28
+ |---------|-------|
29
+ | Language | <language-display> |
30
+ | Runtime | <runtime> |
31
+ | Target Platform | <provider-display> |
32
+ | Test Framework | <test-framework> |
33
+
34
+ ## Project Specification
35
+
36
+ **Before making significant changes, check for `SPECIFICATION.md` in the project root.** If it exists, read it and apply its requirements. Project-specific specifications override generic standards when they conflict.
37
+
38
+ ## External Standards (salte-common/standards)
39
+
40
+ Apply as appropriate for the task:
41
+
42
+ - **Architecture Standards** — Twelve-Factor App, modular design, testing pyramid
43
+ - **Development Standards** — Code formatting, quality, security
44
+ - **<language-standards>** — Language-specific guidelines
45
+ - **<platform-standards>** — Platform-specific deployment and architecture
46
+ - **Deployment Standards** — IaC, CI/CD, environment config
47
+ - **Security Standards** — OWASP Top 10, data protection, API security
48
+ - **Documentation Standards** — README, API docs, code comments
49
+ - **Git Workflow Standards** — Branch strategy, tagging, release process
50
+
51
+ ## When Suggesting Code
52
+
53
+ 1. Use **<language-display>** conventions (runtime: **<runtime>**)
54
+ 2. Write tests with **<test-framework>**
55
+ 3. Target **<provider-display>** for deployment and infrastructure
56
+ 4. Cite the specific salte-common/standards section(s) you applied in your response
57
+ 5. Honor `SPECIFICATION.md` if present
@@ -0,0 +1,71 @@
1
+ ---
2
+ description: How to use Terraflow in this project
3
+ globs: ["**/*.tf", "**/.tfwconfig.yml", "**/terraform/**", "**/.env.example"]
4
+ alwaysApply: false
5
+ ---
6
+
7
+ # Terraflow — Using This Project
8
+
9
+ This project uses [Terraflow](https://github.com/salte-common/terraflow), an opinionated Terraform CLI wrapper. Use `terraflow` instead of raw `terraform` for plan/apply/destroy — it handles init and workspace selection automatically.
10
+
11
+ ## First-Time Setup
12
+
13
+ 1. `cp .env.example .env` and add your cloud credentials
14
+ 2. Edit `.tfwconfig.yml` with your backend bucket/storage and secrets
15
+ 3. `terraflow init` — initialize Terraform
16
+ 4. `terraflow plan` — verify configuration
17
+
18
+ ## Day-to-Day Commands
19
+
20
+ ```bash
21
+ terraflow plan # Plan changes (auto init + workspace)
22
+ terraflow apply # Apply changes
23
+ terraflow destroy # Destroy infrastructure
24
+ terraflow fmt # Format Terraform files (no init)
25
+ terraflow config show # Show resolved config with sources
26
+ terraflow --dry-run plan # Preview without executing
27
+ ```
28
+
29
+ Prefer `terraflow` over `terraform` for plan/apply/destroy — it runs init and workspace selection when needed.
30
+
31
+ ## Workspace Derivation
32
+
33
+ Terraflow derives workspaces in this order (first match wins):
34
+
35
+ 1. **CLI** — `--workspace` or `-w`
36
+ 2. **Environment** — `TERRAFLOW_WORKSPACE`
37
+ 3. **Git tag** — e.g. `v1.0.0` → `v1-0-0`
38
+ 4. **Git branch** — if non-ephemeral (e.g. `main`, `my-preview-branch`). Ephemeral branches (`feature/foo`, `fix/bar`) fall through to hostname.
39
+ 5. **Hostname** — when none of the above apply (no git repo, no tag, or ephemeral branch)
40
+
41
+ Override explicitly:
42
+
43
+ ```bash
44
+ terraflow --workspace production plan
45
+ ```
46
+
47
+ ## Configuration
48
+
49
+ - **File:** `.tfwconfig.yml` — backend, secrets, auth, workspace strategy
50
+ - **Hierarchy:** defaults → `.tfwconfig.yml` → `TERRAFLOW_*` env vars → CLI args
51
+ - **Template variables:** `${AWS_REGION}`, `${GITHUB_REPOSITORY}`, `${WORKSPACE}`, etc.
52
+
53
+ Point users to `.tfwconfig.yml` for backend, secrets, and auth configuration.
54
+
55
+ ## Optional Cleanup
56
+
57
+ If the project doesn't need certain generated artifacts, you may delete them:
58
+
59
+ | Delete if... | Files/Directories |
60
+ |--------------|-------------------|
61
+ | No application code (pure Terraform) | `src/` directory |
62
+ | No serverless/lambda functions | Application-specific resources in `terraform/main.tf` |
63
+ | Different structure preferred | `terraform/modules/` (move or inline as needed) |
64
+
65
+ **Always keep:** `.tfwconfig.yml`, `terraform/_init.tf`, `terraform/inputs.tf` (or equivalent variables).
66
+
67
+ ## When Suggesting Terraflow Usage
68
+
69
+ - Prefer `terraflow` over raw `terraform` for plan/apply/destroy
70
+ - If generated `src/` or application code is irrelevant, suggest deleting it
71
+ - Point users to `.tfwconfig.yml` for configuration changes
@@ -12,6 +12,9 @@
12
12
  #
13
13
  # Environment variables take precedence over this config file.
14
14
 
15
+ # Cloud provider (required): aws | gcp | azure
16
+ provider: <cloud-provider>
17
+
15
18
  # Working directory for terraform files (optional - defaults to ./terraform)
16
19
  # working-dir: ./terraform
17
20
 
@@ -89,7 +92,6 @@
89
92
 
90
93
  # Validations (optional)
91
94
  # validations:
92
- # require_git_commit: true
93
95
  # allowed_workspaces:
94
96
  # - development
95
97
  # - staging
@@ -12,14 +12,14 @@ terraform {
12
12
  }
13
13
  }
14
14
 
15
- backend "s3" {
16
- # Backend configuration provided via:
17
- # - terraflow CLI flags
18
- # - environment variables (TERRAFLOW_*)
19
- # - .tfwconfig.yml
20
- #
21
- # Do not hardcode values here
22
- }
15
+ # Uncomment when ready for remote state. Configure in .tfwconfig.yml
16
+ # backend "s3" {
17
+ # # Backend configuration provided via:
18
+ # # - terraflow CLI flags
19
+ # # - environment variables (TERRAFLOW_*)
20
+ # # - .tfwconfig.yml
21
+ # # Do not hardcode values here
22
+ # }
23
23
  }
24
24
 
25
25
  provider "aws" {
@@ -12,9 +12,10 @@ terraform {
12
12
  }
13
13
  }
14
14
 
15
- backend "azurerm" {
16
- # Backend configuration provided via terraflow
17
- }
15
+ # Uncomment when ready for remote state. Configure in .tfwconfig.yml
16
+ # backend "azurerm" {
17
+ # # Backend configuration provided via terraflow
18
+ # }
18
19
  }
19
20
 
20
21
  provider "azurerm" {
@@ -12,9 +12,10 @@ terraform {
12
12
  }
13
13
  }
14
14
 
15
- backend "gcs" {
16
- # Backend configuration provided via terraflow
17
- }
15
+ # Uncomment when ready for remote state. Configure in .tfwconfig.yml
16
+ # backend "gcs" {
17
+ # # Backend configuration provided via terraflow
18
+ # }
18
19
  }
19
20
 
20
21
  provider "google" {
@@ -1,9 +1,11 @@
1
1
  # Service Account for Cloud Function
2
2
  resource "google_service_account" "function" {
3
- account_id = "${replace(local.project_name, "-", "")}-${replace(terraform.workspace, "-", "")}-sa"
3
+ # account_id must match regex: ^[a-z](?:[-a-z0-9]{4,28}[a-z0-9])$
4
+ # Must start with lowercase letter, 6-30 chars, end with letter/number
5
+ account_id = "${replace(local.sanitized_project_name, "-", "")}${replace(local.sanitized_workspace, "-", "")}sa"
4
6
  display_name = "${local.project_name} ${terraform.workspace} Function Service Account"
5
7
 
6
- labels = local.common_tags
8
+ # Note: google_service_account does not support labels
7
9
  }
8
10
 
9
11
  # IAM binding for service account to invoke Cloud Functions
@@ -15,7 +17,7 @@ resource "google_project_iam_member" "function_invoker" {
15
17
 
16
18
  # Storage bucket for function source code
17
19
  resource "google_storage_bucket" "function_source" {
18
- name = "${replace(local.project_name, "-", "")}-${replace(terraform.workspace, "-", "")}-funcs-source"
20
+ name = "${replace(local.sanitized_project_name, "-", "")}-${replace(local.sanitized_workspace, "-", "")}-funcs-source"
19
21
  location = var.gcp_region
20
22
 
21
23
  uniform_bucket_level_access = true
@@ -38,7 +40,7 @@ resource "google_storage_bucket_object" "function_source" {
38
40
  source = data.archive_file.function_zip.output_path
39
41
  }
40
42
 
41
- # Cloud Function
43
+ # Cloud Function (HTTP-triggered)
42
44
  resource "google_cloudfunctions_function" "main" {
43
45
  name = "${local.project_name}-${terraform.workspace}-function"
44
46
  description = "Serverless function for ${local.project_name}"
@@ -47,9 +49,9 @@ resource "google_cloudfunctions_function" "main" {
47
49
  available_memory_mb = 256
48
50
  source_archive_bucket = google_storage_bucket.function_source.name
49
51
  source_archive_object = google_storage_bucket_object.function_source.name
50
- trigger {
51
- http_trigger {}
52
- }
52
+
53
+ # HTTP-triggered (default when no event_trigger is specified)
54
+ # No https_trigger block needed for v1 - function is HTTP-triggered by default
53
55
 
54
56
  entry_point = "handler"
55
57
 
@@ -2,6 +2,11 @@ locals {
2
2
  # Project name for resource naming
3
3
  project_name = "<project-name>"
4
4
 
5
+ # Sanitized names for GCP resources (lowercase, hyphens replaced with dashes)
6
+ # GCP requires lowercase alphanumeric with dashes for many resource names
7
+ sanitized_project_name = lower(replace("<project-name>", "_", "-"))
8
+ sanitized_workspace = lower(replace(terraform.workspace, "_", "-"))
9
+
5
10
  # Common tags/labels for all resources
6
11
  common_tags = {
7
12
  Workspace = terraform.workspace
@@ -59,8 +59,6 @@ export interface LoggingConfig {
59
59
  * Validation configuration
60
60
  */
61
61
  export interface ValidationConfig {
62
- /** Require git commit before apply/destroy */
63
- require_git_commit?: boolean;
64
62
  /** List of allowed workspace names (empty = allow all) */
65
63
  allowed_workspaces?: string[];
66
64
  }
@@ -68,6 +66,8 @@ export interface ValidationConfig {
68
66
  * Main Terraflow configuration file structure
69
67
  */
70
68
  export interface TerraflowConfig {
69
+ /** Cloud provider: aws | gcp | azure (required) */
70
+ provider: 'aws' | 'gcp' | 'azure';
71
71
  /** Workspace name */
72
72
  workspace?: string;
73
73
  /** Terraform working directory */
@@ -8,10 +8,13 @@ import type { CloudInfo } from '../types/context.js';
8
8
  */
9
9
  export declare class CloudUtils {
10
10
  /**
11
- * Detect cloud provider from environment
11
+ * Detect cloud provider from configuration or environment
12
+ * @param config - Optional Terraflow configuration
12
13
  * @returns Cloud information
13
14
  */
14
- static detectCloud(): Promise<CloudInfo>;
15
+ static detectCloud(config?: {
16
+ provider?: 'aws' | 'gcp' | 'azure';
17
+ }): Promise<CloudInfo>;
15
18
  /**
16
19
  * Get AWS account ID via `aws sts get-caller-identity`
17
20
  * @returns AWS account ID or undefined
@@ -11,13 +11,48 @@ const child_process_1 = require("child_process");
11
11
  */
12
12
  class CloudUtils {
13
13
  /**
14
- * Detect cloud provider from environment
14
+ * Detect cloud provider from configuration or environment
15
+ * @param config - Optional Terraflow configuration
15
16
  * @returns Cloud information
16
17
  */
17
- static async detectCloud() {
18
+ static async detectCloud(config) {
18
19
  const cloud = {
19
20
  provider: 'none',
20
21
  };
22
+ // If provider is specified in config, use it as primary source
23
+ if (config?.provider) {
24
+ cloud.provider = config.provider;
25
+ if (config.provider === 'aws') {
26
+ cloud.awsRegion = CloudUtils.getAwsRegion();
27
+ try {
28
+ cloud.awsAccountId = await CloudUtils.getAwsAccountId();
29
+ }
30
+ catch {
31
+ // Account ID fetch failed, continue without it
32
+ }
33
+ return cloud;
34
+ }
35
+ else if (config.provider === 'azure') {
36
+ try {
37
+ cloud.azureSubscriptionId = await CloudUtils.getAzureSubscriptionId();
38
+ cloud.azureTenantId = await CloudUtils.getAzureTenantId();
39
+ }
40
+ catch {
41
+ // Subscription/Tenant ID fetch failed, continue without it
42
+ }
43
+ return cloud;
44
+ }
45
+ else if (config.provider === 'gcp') {
46
+ try {
47
+ cloud.gcpProjectId = await CloudUtils.getGcpProjectId();
48
+ }
49
+ catch {
50
+ // Project ID fetch failed, continue without it
51
+ }
52
+ return cloud;
53
+ }
54
+ }
55
+ // Fallback to environment-based detection (for backwards compatibility)
21
56
  // Check for AWS
22
57
  if (process.env.AWS_ACCESS_KEY_ID || process.env.AWS_PROFILE || process.env.AWS_REGION) {
23
58
  cloud.provider = 'aws';
@@ -43,6 +43,14 @@ export declare function generateApplicationFiles(projectDir: string, language: s
43
43
  * @param projectName - Name of the project
44
44
  */
45
45
  export declare function generateConfigFiles(projectDir: string, provider: string, language: string, projectName: string): Promise<void>;
46
+ /**
47
+ * Generate initial .ai-metadata.json with stats for all scaffolded files
48
+ * All scaffolded files are treated as 100% AI-authored (from templates)
49
+ * Only tracks files that are checked into source control
50
+ * @param projectDir - Root directory of the project
51
+ * @param language - Programming language (javascript, typescript, python, go)
52
+ */
53
+ export declare function generateAiMetadata(projectDir: string, language: string): Promise<void>;
46
54
  /**
47
55
  * Project structure creation
48
56
  */
@@ -9,6 +9,7 @@ exports.processTemplate = processTemplate;
9
9
  exports.generateTerraformFiles = generateTerraformFiles;
10
10
  exports.generateApplicationFiles = generateApplicationFiles;
11
11
  exports.generateConfigFiles = generateConfigFiles;
12
+ exports.generateAiMetadata = generateAiMetadata;
12
13
  exports.createProjectStructure = createProjectStructure;
13
14
  exports.validateProjectName = validateProjectName;
14
15
  exports.validateProvider = validateProvider;
@@ -254,6 +255,72 @@ function getBackendType(provider) {
254
255
  return 'local';
255
256
  }
256
257
  }
258
+ /**
259
+ * Build template variables for development standards Cursor rules
260
+ * @param language - Programming language (javascript, typescript, python, go)
261
+ * @param provider - Cloud provider (aws, azure, gcp)
262
+ * @returns Variables for cursor-development-standards.mdc.template
263
+ */
264
+ function getDevelopmentStandardsVariables(language, provider) {
265
+ const languageMap = {
266
+ javascript: {
267
+ display: 'JavaScript',
268
+ testFramework: 'Jest',
269
+ runtime: 'Node.js',
270
+ standards: 'JavaScript Standards',
271
+ },
272
+ typescript: {
273
+ display: 'TypeScript',
274
+ testFramework: 'Jest',
275
+ runtime: 'Node.js',
276
+ standards: 'JavaScript Standards',
277
+ },
278
+ python: {
279
+ display: 'Python',
280
+ testFramework: 'pytest',
281
+ runtime: 'Python',
282
+ standards: 'Python Standards',
283
+ },
284
+ go: {
285
+ display: 'Go',
286
+ testFramework: 'go test',
287
+ runtime: 'Go',
288
+ standards: 'Development Standards',
289
+ },
290
+ };
291
+ const providerMap = {
292
+ aws: {
293
+ display: 'AWS',
294
+ platformStandards: 'AWS Architecture Standards',
295
+ },
296
+ azure: {
297
+ display: 'Azure',
298
+ platformStandards: 'Azure deployment and architecture patterns',
299
+ },
300
+ gcp: {
301
+ display: 'GCP',
302
+ platformStandards: 'GCP deployment and architecture patterns',
303
+ },
304
+ };
305
+ const lang = languageMap[language] ?? {
306
+ display: language,
307
+ testFramework: 'tests',
308
+ runtime: language,
309
+ standards: 'Development Standards',
310
+ };
311
+ const prov = providerMap[provider] ?? {
312
+ display: provider,
313
+ platformStandards: 'platform-specific standards',
314
+ };
315
+ return {
316
+ 'language-display': lang.display,
317
+ 'provider-display': prov.display,
318
+ 'test-framework': lang.testFramework,
319
+ runtime: lang.runtime,
320
+ 'language-standards': lang.standards,
321
+ 'platform-standards': prov.platformStandards,
322
+ };
323
+ }
257
324
  /**
258
325
  * Generate configuration files for the project
259
326
  * @param projectDir - Root directory of the project
@@ -270,6 +337,7 @@ async function generateConfigFiles(projectDir, provider, language, projectName)
270
337
  const tfwconfigContent = processTemplate(tfwconfigTemplate, {
271
338
  'project-name': projectName,
272
339
  provider: backendType,
340
+ 'cloud-provider': provider, // Add provider field
273
341
  });
274
342
  (0, fs_1.writeFileSync)((0, path_1.join)(projectDir, '.tfwconfig.yml'), tfwconfigContent);
275
343
  // .env.example
@@ -286,8 +354,106 @@ async function generateConfigFiles(projectDir, provider, language, projectName)
286
354
  provider: provider, // Use original provider name for README
287
355
  });
288
356
  (0, fs_1.writeFileSync)((0, path_1.join)(projectDir, 'README.md'), readmeContent);
357
+ // .cursor/rules/terraform.mdc - Cursor instructions for Terraflow usage
358
+ const cursorRulesDir = (0, path_1.join)(projectDir, '.cursor', 'rules');
359
+ (0, fs_1.mkdirSync)(cursorRulesDir, { recursive: true });
360
+ const cursorRulesTemplate = loadTemplate((0, path_1.join)(templatesDir, 'cursor-terraflow-instructions.mdc.template'));
361
+ (0, fs_1.writeFileSync)((0, path_1.join)(cursorRulesDir, 'terraform.mdc'), cursorRulesTemplate);
362
+ // .cursor/rules/ai-metadata.mdc - Cursor instructions for .ai-metadata.json maintenance
363
+ const aiMetadataRulesTemplate = loadTemplate((0, path_1.join)(templatesDir, 'cursor-ai-metadata.mdc.template'));
364
+ (0, fs_1.writeFileSync)((0, path_1.join)(cursorRulesDir, 'ai-metadata.mdc'), aiMetadataRulesTemplate);
365
+ // .cursor/rules/development-standards.mdc - Cursor instructions for development standards
366
+ const devStandardsTemplate = loadTemplate((0, path_1.join)(templatesDir, 'cursor-development-standards.mdc.template'));
367
+ const devStandardsVars = getDevelopmentStandardsVariables(language, provider);
368
+ const devStandardsContent = processTemplate(devStandardsTemplate, devStandardsVars);
369
+ (0, fs_1.writeFileSync)((0, path_1.join)(cursorRulesDir, 'development-standards.mdc'), devStandardsContent);
289
370
  logger_1.Logger.debug('Configuration files generated successfully');
290
371
  }
372
+ /**
373
+ * Get list of scaffolded file paths (relative to project root) for AI metadata
374
+ * Only includes files that are checked into source control
375
+ * @param language - Programming language (javascript, typescript, python, go)
376
+ * @returns Array of relative file paths
377
+ */
378
+ function getScaffoldedFilePaths(language) {
379
+ const common = [
380
+ '.tfwconfig.yml',
381
+ '.env.example',
382
+ '.gitignore',
383
+ 'README.md',
384
+ '.cursor/rules/terraform.mdc',
385
+ '.cursor/rules/ai-metadata.mdc',
386
+ '.cursor/rules/development-standards.mdc',
387
+ 'terraform/_init.tf',
388
+ 'terraform/inputs.tf',
389
+ 'terraform/locals.tf',
390
+ 'terraform/main.tf',
391
+ 'terraform/outputs.tf',
392
+ 'terraform/modules/inputs.tf',
393
+ 'terraform/modules/main.tf',
394
+ 'terraform/modules/outputs.tf',
395
+ ];
396
+ const languageFiles = {
397
+ javascript: [
398
+ 'src/main/index.js',
399
+ 'src/test/index.spec.js',
400
+ 'package.json',
401
+ '.eslintrc.json',
402
+ 'jest.config.js',
403
+ '.prettierrc',
404
+ ],
405
+ typescript: [
406
+ 'src/main/index.ts',
407
+ 'src/test/index.spec.ts',
408
+ 'package.json',
409
+ 'tsconfig.json',
410
+ '.eslintrc.json',
411
+ 'jest.config.js',
412
+ '.prettierrc',
413
+ ],
414
+ python: [
415
+ 'src/main/index.py',
416
+ 'src/test/test_main.py',
417
+ 'requirements.txt',
418
+ 'pytest.ini',
419
+ '.pylintrc',
420
+ ],
421
+ go: ['src/main/index.go', 'src/test/main_test.go', 'go.mod', '.golangci.yml'],
422
+ };
423
+ return [...common, ...(languageFiles[language] ?? [])];
424
+ }
425
+ /**
426
+ * Generate initial .ai-metadata.json with stats for all scaffolded files
427
+ * All scaffolded files are treated as 100% AI-authored (from templates)
428
+ * Only tracks files that are checked into source control
429
+ * @param projectDir - Root directory of the project
430
+ * @param language - Programming language (javascript, typescript, python, go)
431
+ */
432
+ async function generateAiMetadata(projectDir, language) {
433
+ const filePaths = getScaffoldedFilePaths(language);
434
+ const files = {};
435
+ const timestamp = new Date().toISOString().replace(/\.\d{3}Z$/, 'Z');
436
+ for (const relPath of filePaths) {
437
+ const fullPath = (0, path_1.join)(projectDir, relPath);
438
+ if (!(0, fs_1.existsSync)(fullPath))
439
+ continue;
440
+ const content = (0, fs_1.readFileSync)(fullPath, 'utf8');
441
+ const linesTotal = content.split(/\r?\n/).length;
442
+ files[relPath] = {
443
+ lines_total: linesTotal,
444
+ lines_ai_generated: linesTotal,
445
+ ai_percentage: 100,
446
+ last_updated: timestamp,
447
+ tool: 'cursor',
448
+ };
449
+ }
450
+ const metadata = {
451
+ files,
452
+ metadata_version: '1.0',
453
+ };
454
+ (0, fs_1.writeFileSync)((0, path_1.join)(projectDir, '.ai-metadata.json'), JSON.stringify(metadata, null, 2));
455
+ logger_1.Logger.debug('Initial .ai-metadata.json generated');
456
+ }
291
457
  /**
292
458
  * Project structure creation
293
459
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@salte-common/terraflow",
3
- "version": "1.0.0-alpha.4",
3
+ "version": "1.0.0-alpha.7",
4
4
  "description": "Opinionated Terraform workflow CLI with multi-cloud support",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",