@stackql/provider-utils 0.3.1 → 0.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/providerdev/analyze.js +66 -6
package/package.json
CHANGED
|
@@ -105,13 +105,60 @@ export async function analyze(options) {
|
|
|
105
105
|
} = options;
|
|
106
106
|
|
|
107
107
|
try {
|
|
108
|
-
|
|
108
|
+
// In the analyze function
|
|
109
109
|
const outputPath = path.join(outputDir, 'all_services.csv');
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
110
|
+
|
|
111
|
+
// Check if output file already exists
|
|
112
|
+
let fileExists = false;
|
|
113
|
+
if (fs.existsSync(outputPath)) {
|
|
114
|
+
logger.info(`Output file already exists: ${outputPath}`);
|
|
115
|
+
fileExists = true;
|
|
116
|
+
} else if (!fs.existsSync(outputDir)) {
|
|
117
|
+
logger.info(`Output directory does not exist. Creating: ${outputDir}`);
|
|
118
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
// get existing mappings
|
|
122
|
+
const existingMappings = {};
|
|
123
|
+
if (fileExists) {
|
|
124
|
+
try {
|
|
125
|
+
await new Promise((resolve, reject) => {
|
|
126
|
+
createReadStream(outputPath)
|
|
127
|
+
.pipe(csv())
|
|
128
|
+
.on('data', (row) => {
|
|
129
|
+
if (row.operationId) {
|
|
130
|
+
const key = `${row.filename}::${row.operationId}`;
|
|
131
|
+
existingMappings[key] = {
|
|
132
|
+
resourceName: row.stackql_resource_name || '',
|
|
133
|
+
methodName: row.stackql_method_name || '',
|
|
134
|
+
sqlVerb: row.stackql_verb || ''
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
})
|
|
138
|
+
.on('end', () => {
|
|
139
|
+
logger.info(`Loaded ${Object.keys(existingMappings).length} mappings from existing CSV`);
|
|
140
|
+
resolve();
|
|
141
|
+
})
|
|
142
|
+
.on('error', (error) => {
|
|
143
|
+
logger.error(`Failed to load existing CSV: ${error.message}`);
|
|
144
|
+
reject(error);
|
|
145
|
+
});
|
|
146
|
+
});
|
|
147
|
+
} catch (error) {
|
|
148
|
+
logger.error(`Error processing CSV: ${error.message}`);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// Create write stream - append if file exists
|
|
153
|
+
const writer = fs.createWriteStream(outputPath, {
|
|
154
|
+
encoding: 'utf8',
|
|
155
|
+
flags: fileExists ? 'a' : 'w' // Use 'a' for append if file exists, 'w' for write if new
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
// Only write header if creating a new file
|
|
159
|
+
if (!fileExists) {
|
|
160
|
+
writer.write('filename,path,operationId,formatted_op_id,verb,response_object,tags,formatted_tags,stackql_resource_name,stackql_method_name,stackql_verb\n');
|
|
161
|
+
}
|
|
115
162
|
|
|
116
163
|
const files = fs.readdirSync(inputDir);
|
|
117
164
|
|
|
@@ -129,7 +176,20 @@ export async function analyze(options) {
|
|
|
129
176
|
continue;
|
|
130
177
|
}
|
|
131
178
|
|
|
179
|
+
// Then in the operation processing loop:
|
|
132
180
|
const operationId = operation.operationId || '';
|
|
181
|
+
// Check if operation is already mapped in CSV
|
|
182
|
+
const mappingKey = `${filename}::${operationId}`;
|
|
183
|
+
if (operationId && existingMappings[mappingKey]) {
|
|
184
|
+
const mapping = existingMappings[mappingKey];
|
|
185
|
+
if (mapping.resourceName && mapping.methodName && mapping.sqlVerb) {
|
|
186
|
+
logger.info(`Skipping already mapped operation: ${mappingKey} (${mapping.resourceName}.${mapping.methodName} - ${mapping.sqlVerb})`);
|
|
187
|
+
continue; // Skip to next operation
|
|
188
|
+
} else {
|
|
189
|
+
logger.warn(`Operation ${mappingKey} found in CSV but has incomplete mapping`);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
|
|
133
193
|
// Format operationId as snake_case
|
|
134
194
|
const formattedOpId = operationId ? camelToSnake(operationId) : '';
|
|
135
195
|
|