@burgan-tech/vnext-workflow-cli 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/build-and-publish.yml +702 -0
- package/.github/workflows/check-sonar.yml +69 -0
- package/README.md +382 -0
- package/bin/workflow.js +70 -0
- package/package.json +44 -0
- package/src/commands/check.js +74 -0
- package/src/commands/config.js +31 -0
- package/src/commands/csx.js +85 -0
- package/src/commands/reset.js +161 -0
- package/src/commands/sync.js +189 -0
- package/src/commands/update.js +203 -0
- package/src/lib/api.js +72 -0
- package/src/lib/config.js +29 -0
- package/src/lib/csx.js +191 -0
- package/src/lib/db.js +122 -0
- package/src/lib/discover.js +65 -0
- package/src/lib/workflow.js +162 -0
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
const Conf = require('conf');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
const config = new Conf({
|
|
5
|
+
projectName: 'vnext-workflow-cli',
|
|
6
|
+
defaults: {
|
|
7
|
+
PROJECT_ROOT: process.cwd(),
|
|
8
|
+
AUTO_DISCOVER: true,
|
|
9
|
+
API_BASE_URL: 'http://localhost:4201',
|
|
10
|
+
API_VERSION: 'v1',
|
|
11
|
+
DB_HOST: 'localhost',
|
|
12
|
+
DB_PORT: 5432,
|
|
13
|
+
DB_NAME: 'vNext_WorkflowDb',
|
|
14
|
+
DB_USER: 'postgres',
|
|
15
|
+
DB_PASSWORD: 'postgres',
|
|
16
|
+
USE_DOCKER: false,
|
|
17
|
+
DOCKER_POSTGRES_CONTAINER: 'vnext-postgres',
|
|
18
|
+
DEBUG_MODE: false
|
|
19
|
+
}
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
module.exports = {
|
|
23
|
+
get: (key) => config.get(key),
|
|
24
|
+
set: (key, value) => config.set(key, value),
|
|
25
|
+
getAll: () => config.store,
|
|
26
|
+
clear: () => config.clear(),
|
|
27
|
+
path: config.path
|
|
28
|
+
};
|
|
29
|
+
|
package/src/lib/csx.js
ADDED
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
const fs = require('fs').promises;
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { glob } = require('glob');
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* CSX dosyasını Base64'e çevirir
|
|
7
|
+
*/
|
|
8
|
+
async function encodeToBase64(csxPath) {
|
|
9
|
+
const content = await fs.readFile(csxPath, 'utf8');
|
|
10
|
+
return Buffer.from(content).toString('base64');
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* CSX dosyası için ilgili JSON dosyalarını bulur
|
|
15
|
+
*/
|
|
16
|
+
async function findJsonFilesForCsx(csxPath, projectRoot) {
|
|
17
|
+
const csxBaseName = path.basename(csxPath);
|
|
18
|
+
|
|
19
|
+
// Tüm JSON dosyalarını bul
|
|
20
|
+
const pattern = path.join(projectRoot, '**', '*.json');
|
|
21
|
+
const jsonFiles = await glob(pattern, {
|
|
22
|
+
ignore: ['**/node_modules/**', '**/dist/**', '**/package*.json', '**/*config*.json']
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
// CSX referansı olan JSON'ları filtrele
|
|
26
|
+
const matchingJsons = [];
|
|
27
|
+
|
|
28
|
+
for (const jsonFile of jsonFiles) {
|
|
29
|
+
try {
|
|
30
|
+
const content = await fs.readFile(jsonFile, 'utf8');
|
|
31
|
+
if (content.includes(csxBaseName)) {
|
|
32
|
+
matchingJsons.push(jsonFile);
|
|
33
|
+
}
|
|
34
|
+
} catch (error) {
|
|
35
|
+
// Ignore
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
return matchingJsons;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* CSX location path'ini hesaplar
|
|
44
|
+
*/
|
|
45
|
+
function getCsxLocation(csxPath) {
|
|
46
|
+
// ./src/Rules/MyRule.csx formatına çevir
|
|
47
|
+
const parts = csxPath.split('/');
|
|
48
|
+
const srcIndex = parts.lastIndexOf('src');
|
|
49
|
+
|
|
50
|
+
if (srcIndex !== -1) {
|
|
51
|
+
const relevantParts = parts.slice(srcIndex);
|
|
52
|
+
return './' + relevantParts.join('/');
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
return './' + path.basename(csxPath);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* JSON dosyasında CSX code'unu günceller
|
|
60
|
+
*/
|
|
61
|
+
async function updateCodeInJson(jsonPath, csxLocation, base64Code) {
|
|
62
|
+
const content = await fs.readFile(jsonPath, 'utf8');
|
|
63
|
+
const data = JSON.parse(content);
|
|
64
|
+
|
|
65
|
+
// Recursive olarak location'ı bul ve güncelle
|
|
66
|
+
function updateRecursive(obj) {
|
|
67
|
+
if (typeof obj !== 'object' || obj === null) return false;
|
|
68
|
+
|
|
69
|
+
if (obj.location === csxLocation && 'code' in obj) {
|
|
70
|
+
obj.code = base64Code;
|
|
71
|
+
return true;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
for (const key in obj) {
|
|
75
|
+
if (updateRecursive(obj[key])) {
|
|
76
|
+
return true;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
return false;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const updated = updateRecursive(data);
|
|
84
|
+
|
|
85
|
+
if (updated) {
|
|
86
|
+
await fs.writeFile(jsonPath, JSON.stringify(data, null, 2), 'utf8');
|
|
87
|
+
return true;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return false;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Tek bir CSX dosyasını işler
|
|
95
|
+
*/
|
|
96
|
+
async function processCsxFile(csxPath, projectRoot) {
|
|
97
|
+
// Base64'e çevir
|
|
98
|
+
const base64Code = await encodeToBase64(csxPath);
|
|
99
|
+
|
|
100
|
+
// İlgili JSON'ları bul
|
|
101
|
+
const jsonFiles = await findJsonFilesForCsx(csxPath, projectRoot);
|
|
102
|
+
|
|
103
|
+
if (jsonFiles.length === 0) {
|
|
104
|
+
return { success: false, message: 'İlgili JSON bulunamadı' };
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// CSX location'ı hesapla
|
|
108
|
+
const csxLocation = getCsxLocation(csxPath);
|
|
109
|
+
|
|
110
|
+
// Her JSON'u güncelle
|
|
111
|
+
let updatedCount = 0;
|
|
112
|
+
for (const jsonFile of jsonFiles) {
|
|
113
|
+
try {
|
|
114
|
+
const updated = await updateCodeInJson(jsonFile, csxLocation, base64Code);
|
|
115
|
+
if (updated) {
|
|
116
|
+
updatedCount++;
|
|
117
|
+
}
|
|
118
|
+
} catch (error) {
|
|
119
|
+
// Continue with next file
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
return {
|
|
124
|
+
success: updatedCount > 0,
|
|
125
|
+
updatedCount,
|
|
126
|
+
jsonFiles: jsonFiles.map(f => path.basename(f))
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
/**
|
|
131
|
+
* Git'te değişen CSX dosyalarını bulur
|
|
132
|
+
*/
|
|
133
|
+
async function getGitChangedCsx(projectRoot) {
|
|
134
|
+
const { exec } = require('child_process');
|
|
135
|
+
const util = require('util');
|
|
136
|
+
const execPromise = util.promisify(exec);
|
|
137
|
+
|
|
138
|
+
try {
|
|
139
|
+
// Git root'u bul
|
|
140
|
+
const { stdout: gitRoot } = await execPromise('git rev-parse --show-toplevel', { cwd: projectRoot });
|
|
141
|
+
const gitRootDir = gitRoot.trim();
|
|
142
|
+
|
|
143
|
+
// Git status'u git root'tan çalıştır
|
|
144
|
+
const { stdout } = await execPromise('git status --porcelain', { cwd: gitRootDir });
|
|
145
|
+
const lines = stdout.split('\n').filter(Boolean);
|
|
146
|
+
|
|
147
|
+
const csxFiles = lines
|
|
148
|
+
.filter(line => line.includes('.csx'))
|
|
149
|
+
.map(line => {
|
|
150
|
+
// Git status output format: "XY filename"
|
|
151
|
+
const file = line.substring(3).trim();
|
|
152
|
+
|
|
153
|
+
// Git output is relative to git root, not project root
|
|
154
|
+
const fullPath = path.join(gitRootDir, file);
|
|
155
|
+
|
|
156
|
+
return path.normalize(fullPath);
|
|
157
|
+
})
|
|
158
|
+
.filter(file => {
|
|
159
|
+
// Only .csx files that exist and are in our project
|
|
160
|
+
return file.endsWith('.csx') &&
|
|
161
|
+
require('fs').existsSync(file) &&
|
|
162
|
+
file.startsWith(path.normalize(projectRoot));
|
|
163
|
+
});
|
|
164
|
+
|
|
165
|
+
return csxFiles;
|
|
166
|
+
} catch (error) {
|
|
167
|
+
return [];
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
/**
|
|
172
|
+
* Tüm CSX dosyalarını bulur
|
|
173
|
+
*/
|
|
174
|
+
async function findAllCsx(projectRoot) {
|
|
175
|
+
const pattern = path.join(projectRoot, '**', 'src', '**', '*.csx');
|
|
176
|
+
const files = await glob(pattern, {
|
|
177
|
+
ignore: ['**/node_modules/**', '**/dist/**']
|
|
178
|
+
});
|
|
179
|
+
return files;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
module.exports = {
|
|
183
|
+
encodeToBase64,
|
|
184
|
+
findJsonFilesForCsx,
|
|
185
|
+
getCsxLocation,
|
|
186
|
+
updateCodeInJson,
|
|
187
|
+
processCsxFile,
|
|
188
|
+
getGitChangedCsx,
|
|
189
|
+
findAllCsx
|
|
190
|
+
};
|
|
191
|
+
|
package/src/lib/db.js
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
const { Client } = require('pg');
|
|
2
|
+
const { exec } = require('child_process');
|
|
3
|
+
const util = require('util');
|
|
4
|
+
|
|
5
|
+
const execPromise = util.promisify(exec);
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* DB bağlantısını test eder
|
|
9
|
+
*/
|
|
10
|
+
async function testDbConnection(dbConfig) {
|
|
11
|
+
if (dbConfig.useDocker) {
|
|
12
|
+
// Docker üzerinden test
|
|
13
|
+
try {
|
|
14
|
+
const cmd = `docker exec ${dbConfig.dockerContainer} psql -U ${dbConfig.user} -d ${dbConfig.database} -c "SELECT 1;"`;
|
|
15
|
+
await execPromise(cmd);
|
|
16
|
+
return true;
|
|
17
|
+
} catch (error) {
|
|
18
|
+
return false;
|
|
19
|
+
}
|
|
20
|
+
} else {
|
|
21
|
+
// Direkt bağlantı
|
|
22
|
+
const client = new Client({
|
|
23
|
+
host: dbConfig.host,
|
|
24
|
+
port: dbConfig.port,
|
|
25
|
+
database: dbConfig.database,
|
|
26
|
+
user: dbConfig.user,
|
|
27
|
+
password: dbConfig.password
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
try {
|
|
31
|
+
await client.connect();
|
|
32
|
+
await client.query('SELECT 1');
|
|
33
|
+
await client.end();
|
|
34
|
+
return true;
|
|
35
|
+
} catch (error) {
|
|
36
|
+
return false;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Workflow'un DB'deki ID'sini bulur
|
|
43
|
+
* NOT: Version kontrolü yapmıyoruz, sadece Key'e bakıyoruz (bash script gibi)
|
|
44
|
+
*/
|
|
45
|
+
async function getInstanceId(dbConfig, schema, key, version) {
|
|
46
|
+
const dbSchema = schema.replace(/-/g, '_');
|
|
47
|
+
const query = `SELECT "Id" FROM "${dbSchema}"."Instances" WHERE "Key" = $1 ORDER BY "CreatedAt" DESC LIMIT 1`;
|
|
48
|
+
|
|
49
|
+
if (dbConfig.useDocker) {
|
|
50
|
+
// Docker üzerinden - SQL içindeki double quote'ları backslash ile escape et
|
|
51
|
+
const cmd = `docker exec ${dbConfig.dockerContainer} psql -U ${dbConfig.user} -d ${dbConfig.database} -t -c "SELECT \\"Id\\" FROM \\"${dbSchema}\\".\\"Instances\\" WHERE \\"Key\\" = '${key}' ORDER BY \\"CreatedAt\\" DESC LIMIT 1"`;
|
|
52
|
+
try {
|
|
53
|
+
const { stdout } = await execPromise(cmd);
|
|
54
|
+
const id = stdout.trim();
|
|
55
|
+
return id || null;
|
|
56
|
+
} catch (error) {
|
|
57
|
+
return null;
|
|
58
|
+
}
|
|
59
|
+
} else {
|
|
60
|
+
// Direkt bağlantı
|
|
61
|
+
const client = new Client({
|
|
62
|
+
host: dbConfig.host,
|
|
63
|
+
port: dbConfig.port,
|
|
64
|
+
database: dbConfig.database,
|
|
65
|
+
user: dbConfig.user,
|
|
66
|
+
password: dbConfig.password
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
try {
|
|
70
|
+
await client.connect();
|
|
71
|
+
const result = await client.query(query, [key]);
|
|
72
|
+
await client.end();
|
|
73
|
+
return result.rows.length > 0 ? result.rows[0].Id : null;
|
|
74
|
+
} catch (error) {
|
|
75
|
+
return null;
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Workflow'u DB'den siler
|
|
82
|
+
*/
|
|
83
|
+
async function deleteWorkflow(dbConfig, schema, instanceId) {
|
|
84
|
+
const dbSchema = schema.replace(/-/g, '_');
|
|
85
|
+
|
|
86
|
+
if (dbConfig.useDocker) {
|
|
87
|
+
// Docker üzerinden - Double quote'ları escape et
|
|
88
|
+
const cmd = `docker exec ${dbConfig.dockerContainer} psql -U ${dbConfig.user} -d ${dbConfig.database} -c "DELETE FROM \\"${dbSchema}\\".\\"Instances\\" WHERE \\"Id\\" = '${instanceId}'"`;
|
|
89
|
+
try {
|
|
90
|
+
await execPromise(cmd);
|
|
91
|
+
return true;
|
|
92
|
+
} catch (error) {
|
|
93
|
+
return false;
|
|
94
|
+
}
|
|
95
|
+
} else {
|
|
96
|
+
// Direkt bağlantı
|
|
97
|
+
const query = `DELETE FROM "${dbSchema}"."Instances" WHERE "Id" = $1`;
|
|
98
|
+
const client = new Client({
|
|
99
|
+
host: dbConfig.host,
|
|
100
|
+
port: dbConfig.port,
|
|
101
|
+
database: dbConfig.database,
|
|
102
|
+
user: dbConfig.user,
|
|
103
|
+
password: dbConfig.password
|
|
104
|
+
});
|
|
105
|
+
|
|
106
|
+
try {
|
|
107
|
+
await client.connect();
|
|
108
|
+
await client.query(query, [instanceId]);
|
|
109
|
+
await client.end();
|
|
110
|
+
return true;
|
|
111
|
+
} catch (error) {
|
|
112
|
+
return false;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
module.exports = {
|
|
118
|
+
testDbConnection,
|
|
119
|
+
getInstanceId,
|
|
120
|
+
deleteWorkflow
|
|
121
|
+
};
|
|
122
|
+
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
const { glob } = require('glob');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
|
|
5
|
+
const COMPONENTS = ['Workflows', 'Tasks', 'Schemas', 'Views', 'Functions', 'Extensions'];
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Proje içinde component klasörlerini otomatik bulur
|
|
9
|
+
*/
|
|
10
|
+
async function discoverComponents(projectRoot) {
|
|
11
|
+
const discovered = {};
|
|
12
|
+
|
|
13
|
+
for (const component of COMPONENTS) {
|
|
14
|
+
try {
|
|
15
|
+
// 3 seviye derinlikte ara
|
|
16
|
+
const pattern = path.join(projectRoot, '**', component);
|
|
17
|
+
const matches = await glob(pattern, {
|
|
18
|
+
maxDepth: 3,
|
|
19
|
+
ignore: ['**/node_modules/**', '**/dist/**', '**/.git/**']
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
if (matches.length > 0) {
|
|
23
|
+
// İlk bulunanı al
|
|
24
|
+
const dir = matches[0];
|
|
25
|
+
if (fs.existsSync(dir) && fs.statSync(dir).isDirectory()) {
|
|
26
|
+
discovered[component] = dir;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
} catch (error) {
|
|
30
|
+
// Ignore errors
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
return discovered;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Belirli bir component klasörünü getirir
|
|
39
|
+
*/
|
|
40
|
+
function getComponentDir(discovered, component) {
|
|
41
|
+
return discovered[component] || null;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Bulunan klasörleri listeler
|
|
46
|
+
*/
|
|
47
|
+
function listDiscovered(discovered) {
|
|
48
|
+
const results = [];
|
|
49
|
+
for (const component of COMPONENTS) {
|
|
50
|
+
results.push({
|
|
51
|
+
name: component,
|
|
52
|
+
path: discovered[component] || null,
|
|
53
|
+
found: !!discovered[component]
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
return results;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
module.exports = {
|
|
60
|
+
discoverComponents,
|
|
61
|
+
getComponentDir,
|
|
62
|
+
listDiscovered,
|
|
63
|
+
COMPONENTS
|
|
64
|
+
};
|
|
65
|
+
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
const fs = require('fs').promises;
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { glob } = require('glob');
|
|
4
|
+
const { getInstanceId, deleteWorkflow } = require('./db');
|
|
5
|
+
const { postWorkflow, activateWorkflow, reinitializeSystem } = require('./api');
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* JSON dosyasından key ve version değerlerini alır
|
|
9
|
+
*/
|
|
10
|
+
async function getJsonMetadata(jsonPath) {
|
|
11
|
+
const content = await fs.readFile(jsonPath, 'utf8');
|
|
12
|
+
const data = JSON.parse(content);
|
|
13
|
+
|
|
14
|
+
return {
|
|
15
|
+
key: data.key || null,
|
|
16
|
+
version: data.version || null,
|
|
17
|
+
flow: data.flow || null,
|
|
18
|
+
data: data
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
/**
|
|
23
|
+
* Dosya yolundan flow tipini belirler
|
|
24
|
+
*/
|
|
25
|
+
function detectFlowFromPath(jsonPath) {
|
|
26
|
+
const pathLower = jsonPath.toLowerCase();
|
|
27
|
+
|
|
28
|
+
if (pathLower.includes('/workflows/')) return 'sys-flows';
|
|
29
|
+
if (pathLower.includes('/tasks/')) return 'sys-tasks';
|
|
30
|
+
if (pathLower.includes('/schemas/')) return 'sys-schemas';
|
|
31
|
+
if (pathLower.includes('/views/')) return 'sys-views';
|
|
32
|
+
if (pathLower.includes('/functions/')) return 'sys-functions';
|
|
33
|
+
if (pathLower.includes('/extensions/')) return 'sys-extensions';
|
|
34
|
+
|
|
35
|
+
return 'sys-flows'; // default
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Tek bir workflow'u işler (DB'den sil, POST, Activate)
|
|
40
|
+
*/
|
|
41
|
+
async function processWorkflow(jsonPath, dbConfig, apiConfig) {
|
|
42
|
+
const metadata = await getJsonMetadata(jsonPath);
|
|
43
|
+
|
|
44
|
+
if (!metadata.key || !metadata.version) {
|
|
45
|
+
throw new Error('JSON\'da key veya version bulunamadı');
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
const flow = metadata.flow || detectFlowFromPath(jsonPath);
|
|
49
|
+
|
|
50
|
+
// 1. DB'de var mı kontrol et
|
|
51
|
+
const instanceId = await getInstanceId(dbConfig, flow, metadata.key, metadata.version);
|
|
52
|
+
|
|
53
|
+
// 2. Varsa sil
|
|
54
|
+
if (instanceId) {
|
|
55
|
+
await deleteWorkflow(dbConfig, flow, instanceId);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// 3. API'ye POST et
|
|
59
|
+
const apiUrl = apiConfig.baseUrl;
|
|
60
|
+
const apiVersion = apiConfig.version;
|
|
61
|
+
|
|
62
|
+
const postResult = await postWorkflow(apiUrl, apiVersion, flow, metadata.data);
|
|
63
|
+
const newInstanceId = postResult.id || postResult.Id;
|
|
64
|
+
|
|
65
|
+
if (!newInstanceId) {
|
|
66
|
+
throw new Error('API POST başarılı ama instance ID alınamadı');
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// 4. Aktif et
|
|
70
|
+
await activateWorkflow(apiUrl, apiVersion, flow, newInstanceId, metadata.version);
|
|
71
|
+
|
|
72
|
+
return {
|
|
73
|
+
key: metadata.key,
|
|
74
|
+
version: metadata.version,
|
|
75
|
+
flow: flow,
|
|
76
|
+
instanceId: newInstanceId,
|
|
77
|
+
wasDeleted: !!instanceId
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* Git'te değişen JSON dosyalarını bulur
|
|
83
|
+
*/
|
|
84
|
+
async function getGitChangedJson(projectRoot) {
|
|
85
|
+
const { exec } = require('child_process');
|
|
86
|
+
const util = require('util');
|
|
87
|
+
const execPromise = util.promisify(exec);
|
|
88
|
+
const fs = require('fs');
|
|
89
|
+
|
|
90
|
+
try {
|
|
91
|
+
// Git root'u bul
|
|
92
|
+
const { stdout: gitRoot } = await execPromise('git rev-parse --show-toplevel', { cwd: projectRoot });
|
|
93
|
+
const gitRootDir = gitRoot.trim();
|
|
94
|
+
|
|
95
|
+
// Git status'u git root'tan çalıştır
|
|
96
|
+
const { stdout } = await execPromise('git status --porcelain', { cwd: gitRootDir });
|
|
97
|
+
const lines = stdout.split('\n').filter(Boolean);
|
|
98
|
+
|
|
99
|
+
const jsonFiles = lines
|
|
100
|
+
.filter(line => line.includes('.json'))
|
|
101
|
+
.map(line => {
|
|
102
|
+
// Git status output format: "XY filename"
|
|
103
|
+
const file = line.substring(3).trim();
|
|
104
|
+
|
|
105
|
+
// Git output is relative to git root
|
|
106
|
+
const fullPath = path.join(gitRootDir, file);
|
|
107
|
+
|
|
108
|
+
return path.normalize(fullPath);
|
|
109
|
+
})
|
|
110
|
+
.filter(file => {
|
|
111
|
+
// Workflow JSON'larını filtrele ve sadece project içindekileri al
|
|
112
|
+
const fileName = path.basename(file);
|
|
113
|
+
return file.endsWith('.json') &&
|
|
114
|
+
!fileName.includes('package') &&
|
|
115
|
+
!fileName.includes('config') &&
|
|
116
|
+
fs.existsSync(file) &&
|
|
117
|
+
file.startsWith(path.normalize(projectRoot));
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
return jsonFiles;
|
|
121
|
+
} catch (error) {
|
|
122
|
+
return [];
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
/**
|
|
127
|
+
* Belirli bir component klasöründeki tüm JSON dosyalarını bulur
|
|
128
|
+
*/
|
|
129
|
+
async function findAllJsonInComponent(componentDir) {
|
|
130
|
+
const pattern = path.join(componentDir, '**/*.json');
|
|
131
|
+
const files = await glob(pattern, {
|
|
132
|
+
ignore: ['**/package*.json', '**/*config*.json', '**/*.diagram.json']
|
|
133
|
+
});
|
|
134
|
+
return files;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Tüm component'lerdeki JSON dosyalarını bulur
|
|
139
|
+
*/
|
|
140
|
+
async function findAllJson(discovered) {
|
|
141
|
+
const allJsons = [];
|
|
142
|
+
|
|
143
|
+
for (const component in discovered) {
|
|
144
|
+
const componentDir = discovered[component];
|
|
145
|
+
if (componentDir) {
|
|
146
|
+
const jsons = await findAllJsonInComponent(componentDir);
|
|
147
|
+
allJsons.push(...jsons);
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
return allJsons;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
module.exports = {
|
|
155
|
+
getJsonMetadata,
|
|
156
|
+
detectFlowFromPath,
|
|
157
|
+
processWorkflow,
|
|
158
|
+
getGitChangedJson,
|
|
159
|
+
findAllJsonInComponent,
|
|
160
|
+
findAllJson
|
|
161
|
+
};
|
|
162
|
+
|