flowmo 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +131 -0
- package/bin/flowmo.js +72 -0
- package/package.json +39 -0
- package/src/commands/db-query.js +127 -0
- package/src/commands/db-seed.js +37 -0
- package/src/commands/db-setup.js +43 -0
- package/src/lib/db.js +36 -0
- package/src/lib/parser.js +105 -0
- package/src/lib/table.js +72 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Izam Basiron
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
# flowmo
|
|
2
|
+
|
|
3
|
+
The local, zero-infrastructure prototyping engine for OutSystems-Lite workflows.
|
|
4
|
+
|
|
5
|
+
Flowmo provides a lightning-fast, offline-first development environment to rapidly prototype screens, test complex SQL logic, and design workflows before ever touching an OutSystems Developer Cloud (ODC) environment.
|
|
6
|
+
|
|
7
|
+
Powered by [PGLite](https://pglite.dev/) (WASM PostgreSQL) — no containers, no servers, no cloud dependencies.
|
|
8
|
+
|
|
9
|
+
## Quick Start
|
|
10
|
+
|
|
11
|
+
Scaffold a new project with [`create-flowmo`](https://www.npmjs.com/package/create-flowmo), then install `flowmo` inside it:
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
npx create-flowmo
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
You'll be prompted for a project name, target platform, and app type. Then:
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
cd my-prototype
|
|
21
|
+
npm install
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
The generated project already has `flowmo` listed as a dependency and the `database/` folder pre-configured.
|
|
25
|
+
|
|
26
|
+
## Commands
|
|
27
|
+
|
|
28
|
+
### `flowmo db:setup`
|
|
29
|
+
|
|
30
|
+
Reads `database/schema.sql`, drops the existing schema, and provisions the local database from scratch.
|
|
31
|
+
|
|
32
|
+
```bash
|
|
33
|
+
npx flowmo db:setup
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
Run this any time you change your schema.
|
|
37
|
+
|
|
38
|
+
### `flowmo db:seed`
|
|
39
|
+
|
|
40
|
+
Reads `database/seeds.sql` and inserts dummy data.
|
|
41
|
+
|
|
42
|
+
```bash
|
|
43
|
+
npx flowmo db:seed
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
### `flowmo db:query <file> [params-json]`
|
|
47
|
+
|
|
48
|
+
Executes a `.sql` or `.advance.sql` file against the local database and prints results as an ASCII table.
|
|
49
|
+
|
|
50
|
+
**Standard SQL:**
|
|
51
|
+
```bash
|
|
52
|
+
npx flowmo db:query database/queries/get_users.sql
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
**OutSystems Advanced SQL** (`.advance.sql`) with parameters:
|
|
56
|
+
```bash
|
|
57
|
+
npx flowmo db:query database/queries/get_user.advance.sql '{"UserId": 1, "Status": true}'
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
The Advanced SQL parser handles OutSystems syntax automatically:
|
|
61
|
+
|
|
62
|
+
| OutSystems syntax | Translated to |
|
|
63
|
+
|---|---|
|
|
64
|
+
| `{Entity}.[Attribute]` | `entity.attribute` |
|
|
65
|
+
| `{Entity}` | `entity` |
|
|
66
|
+
| `@ParamName` | `$1`, `$2`, … |
|
|
67
|
+
|
|
68
|
+
**Example `.advance.sql`:**
|
|
69
|
+
```sql
|
|
70
|
+
SELECT {Users}.[Name], {Users}.[Email]
|
|
71
|
+
FROM {Users}
|
|
72
|
+
WHERE {Users}.[Id] = @UserId AND {Users}.[IsActive] = @Status
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
**Output:**
|
|
76
|
+
```
|
|
77
|
+
┌─────────┬──────────────────┐
|
|
78
|
+
│ Name │ Email │
|
|
79
|
+
├─────────┼──────────────────┤
|
|
80
|
+
│ Izam B. │ izam@example.com │
|
|
81
|
+
└─────────┴──────────────────┘
|
|
82
|
+
(1 row)
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
## Project Structure
|
|
86
|
+
|
|
87
|
+
A scaffolded Flowmo project looks like this:
|
|
88
|
+
|
|
89
|
+
```
|
|
90
|
+
my-prototype/
|
|
91
|
+
├── database/
|
|
92
|
+
│ ├── schema.sql # DDL — your single source of truth
|
|
93
|
+
│ ├── seeds.sql # Dummy data for UI prototyping
|
|
94
|
+
│ └── queries/ # .sql and .advance.sql files
|
|
95
|
+
├── logic/ # .flowchart.md server action flows
|
|
96
|
+
├── screens/ # .visual.html UI prototypes
|
|
97
|
+
├── theme/ # OutSystems UI CSS
|
|
98
|
+
└── .flowmo/ # Local database (auto-generated, gitignored)
|
|
99
|
+
└── database/
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
## The AI Bridge (ODC to Flowmo)
|
|
103
|
+
|
|
104
|
+
Because ODC restricts direct database access, the recommended workflow to mirror your schema locally is:
|
|
105
|
+
|
|
106
|
+
1. Open ODC Service Studio and ask **Mentor AI** to generate a PostgreSQL `CREATE TABLE` script from your data model.
|
|
107
|
+
2. Paste the output into `database/schema.sql`.
|
|
108
|
+
3. Run `npx flowmo db:setup`.
|
|
109
|
+
|
|
110
|
+
If you have the Flowmo Copilot skills installed (bundled by `create-flowmo`), your AI assistant can generate the schema directly from a description or screenshot of your ODC data model.
|
|
111
|
+
|
|
112
|
+
## VS Code Ecosystem
|
|
113
|
+
|
|
114
|
+
Flowmo CLI works alongside the Flowmo VS Code extensions:
|
|
115
|
+
|
|
116
|
+
- **[Flowmo Visual Inspector](https://marketplace.visualstudio.com/items?itemName=flowmo.flowmo-visual-inspector)** — Live layer panel and element inspector for `.visual.html` screens.
|
|
117
|
+
- **[Flowmo Flowchart Editor](https://marketplace.visualstudio.com/items?itemName=flowmo.flowmo-flowchart-editor)** — Drag-and-drop node editor for `.flowchart.md` server action flows.
|
|
118
|
+
- **[PGlite Explorer](https://marketplace.visualstudio.com/search?term=pglite&target=VSCode)** (third-party) — Browse your `.flowmo/database` tables and run queries directly in VS Code.
|
|
119
|
+
|
|
120
|
+
Install the Visual Inspector and Flowchart Editor together with the **[Flowmo Extension Pack](https://marketplace.visualstudio.com/items?itemName=flowmo.flowmo-extension-pack)**.
|
|
121
|
+
|
|
122
|
+
## Links and Support
|
|
123
|
+
|
|
124
|
+
- Web: [flowmo.lol](https://flowmo.lol)
|
|
125
|
+
- Issues: [GitHub Issues](https://github.com/izambasiron/flowmo/issues)
|
|
126
|
+
- Email: [support@flowmo.lol](mailto:support@flowmo.lol)
|
|
127
|
+
- Support model: best-effort, no response-time guarantee
|
|
128
|
+
|
|
129
|
+
## License
|
|
130
|
+
|
|
131
|
+
MIT
|
package/bin/flowmo.js
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import picocolors from 'picocolors';
|
|
3
|
+
|
|
4
|
+
const [, , command, ...args] = process.argv;
|
|
5
|
+
|
|
6
|
+
const HELP = `
|
|
7
|
+
${picocolors.cyan(`
|
|
8
|
+
___ ___ ___
|
|
9
|
+
/\\__\\ /\\ \\ /\\__\\
|
|
10
|
+
/:/ / /::\\ \\ /:/ /
|
|
11
|
+
/:/ / /:/\\:\\ \\ /:/ /
|
|
12
|
+
/:/ / /:/ \\:\\ \\ /:/ /
|
|
13
|
+
/:/__/ /:/__/ \\:\\__\\ /:/__/
|
|
14
|
+
\\:\\ \\ \\:\\ \\ /:/ / \\:\\ \\
|
|
15
|
+
\\:\\ \\ \\:\\ /:/ / \\:\\ \\
|
|
16
|
+
\\:\\ \\ \\:\\/:/ / \\:\\ \\
|
|
17
|
+
\\:\\__\\ \\::/ / \\:\\__\\
|
|
18
|
+
\\/__/ \\/__/ \\/__/
|
|
19
|
+
`)}
|
|
20
|
+
${picocolors.bold('Flowmo CLI')} — Local prototyping engine for OutSystems-Lite workflows.
|
|
21
|
+
|
|
22
|
+
${picocolors.bold('Usage:')}
|
|
23
|
+
flowmo <command> [options]
|
|
24
|
+
|
|
25
|
+
${picocolors.bold('Commands:')}
|
|
26
|
+
${picocolors.cyan('db:setup')} Reset and provision the local database from database/schema.sql
|
|
27
|
+
${picocolors.cyan('db:seed')} Insert seed data from database/seeds.sql
|
|
28
|
+
${picocolors.cyan('db:query')} <file> [params-json] Execute a .sql or .advance.sql query file
|
|
29
|
+
${picocolors.dim('--limit <n>')} Max rows to show (default: 10)
|
|
30
|
+
${picocolors.dim('--simple')} Plain key: value output, no truncation
|
|
31
|
+
|
|
32
|
+
${picocolors.bold('Examples:')}
|
|
33
|
+
flowmo db:setup
|
|
34
|
+
flowmo db:seed
|
|
35
|
+
flowmo db:query database/queries/get_users.sql
|
|
36
|
+
flowmo db:query database/queries/get_user.advance.sql '{"UserId": 1}'
|
|
37
|
+
flowmo db:query database/queries/get_users.sql --limit 25
|
|
38
|
+
flowmo db:query database/queries/get_users.sql --simple
|
|
39
|
+
`;
|
|
40
|
+
|
|
41
|
+
async function run() {
|
|
42
|
+
if (!command || command === '--help' || command === '-h') {
|
|
43
|
+
console.log(HELP);
|
|
44
|
+
process.exit(command ? 1 : 0);
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
const { dbSetup } = await import('../src/commands/db-setup.js');
|
|
48
|
+
const { dbSeed } = await import('../src/commands/db-seed.js');
|
|
49
|
+
const { dbQuery } = await import('../src/commands/db-query.js');
|
|
50
|
+
|
|
51
|
+
const commands = {
|
|
52
|
+
'db:setup': () => dbSetup(),
|
|
53
|
+
'db:seed': () => dbSeed(),
|
|
54
|
+
// Join all args after the file path in case the shell splits the JSON string.
|
|
55
|
+
'db:query': () => dbQuery(args),
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
if (!commands[command]) {
|
|
59
|
+
console.error(picocolors.red(`Unknown command: ${command}`));
|
|
60
|
+
console.log(HELP);
|
|
61
|
+
process.exit(1);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
try {
|
|
65
|
+
await commands[command]();
|
|
66
|
+
} catch (err) {
|
|
67
|
+
console.error(picocolors.red(`\nError: ${err.message}`));
|
|
68
|
+
process.exit(1);
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
run();
|
package/package.json
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "flowmo",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Local, zero-infrastructure prototyping engine for OutSystems-Lite workflows",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"author": {
|
|
8
|
+
"name": "Flowmo",
|
|
9
|
+
"email": "support@flowmo.lol"
|
|
10
|
+
},
|
|
11
|
+
"homepage": "https://flowmo.lol",
|
|
12
|
+
"repository": {
|
|
13
|
+
"type": "git",
|
|
14
|
+
"url": "https://github.com/izambasiron/flowmo"
|
|
15
|
+
},
|
|
16
|
+
"bugs": {
|
|
17
|
+
"url": "https://github.com/izambasiron/flowmo/issues"
|
|
18
|
+
},
|
|
19
|
+
"keywords": [
|
|
20
|
+
"outsystems",
|
|
21
|
+
"flowmo",
|
|
22
|
+
"pglite",
|
|
23
|
+
"postgres",
|
|
24
|
+
"prototyping",
|
|
25
|
+
"low-code"
|
|
26
|
+
],
|
|
27
|
+
"bin": {
|
|
28
|
+
"flowmo": "./bin/flowmo.js"
|
|
29
|
+
},
|
|
30
|
+
"files": [
|
|
31
|
+
"bin",
|
|
32
|
+
"src"
|
|
33
|
+
],
|
|
34
|
+
"dependencies": {
|
|
35
|
+
"@electric-sql/pglite": "^0.2.0",
|
|
36
|
+
"cli-table3": "^0.6.3",
|
|
37
|
+
"picocolors": "^1.1.0"
|
|
38
|
+
}
|
|
39
|
+
}
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import picocolors from 'picocolors';
|
|
4
|
+
import { getDb, closeDb } from '../lib/db.js';
|
|
5
|
+
import { parseAdvancedSql } from '../lib/parser.js';
|
|
6
|
+
import { renderTable } from '../lib/table.js';
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Parse a JSON parameter string from the shell.
|
|
10
|
+
*
|
|
11
|
+
* On Windows, cmd.exe (invoked via npx) strips double quotes from arguments,
|
|
12
|
+
* turning {"Key":"val"} into {Key:val}. This helper attempts to recover by
|
|
13
|
+
* re-quoting bare keys and string values before parsing.
|
|
14
|
+
*/
|
|
15
|
+
function parseJsonArg(raw) {
|
|
16
|
+
if (!raw) return {};
|
|
17
|
+
|
|
18
|
+
// 1. Try strict JSON first — covers correctly-quoted input.
|
|
19
|
+
try {
|
|
20
|
+
return JSON.parse(raw);
|
|
21
|
+
} catch {}
|
|
22
|
+
|
|
23
|
+
// 2. Try to restore quotes stripped by Windows cmd.exe.
|
|
24
|
+
try {
|
|
25
|
+
const fixed = raw
|
|
26
|
+
// Restore empty strings collapsed to a lone " by cmd.exe: Key: ", or Key: "}
|
|
27
|
+
.replace(/:\s*"(\s*[,}])/g, ': ""$1')
|
|
28
|
+
// Quote unquoted object keys: {Key: or ,Key:
|
|
29
|
+
.replace(/([{,]\s*)([A-Za-z_$][A-Za-z0-9_$-]*)\s*:/g, '$1"$2":')
|
|
30
|
+
// Restore empty string values stripped by cmd.exe: "key":, or "key":}
|
|
31
|
+
.replace(/("[\w$-]+":\s*)(,|})/g, (m, key, end) => `${key}""${end}`)
|
|
32
|
+
// Quote unquoted string values (leave numbers, booleans, null untouched)
|
|
33
|
+
.replace(/:\s*([^",{\[\]}\s][^,}\]]*?)(\s*[,}])/g, (m, val, end) => {
|
|
34
|
+
const t = val.trim();
|
|
35
|
+
if (t === 'true' || t === 'false' || t === 'null' || /^-?\d+(\.\d+)?$/.test(t)) {
|
|
36
|
+
return m;
|
|
37
|
+
}
|
|
38
|
+
return `:"${t}"${end}`;
|
|
39
|
+
});
|
|
40
|
+
return JSON.parse(fixed);
|
|
41
|
+
} catch {}
|
|
42
|
+
|
|
43
|
+
throw new Error(
|
|
44
|
+
`Could not parse parameters as JSON.\nReceived: ${raw}\n\n` +
|
|
45
|
+
`On Windows PowerShell, wrap the JSON in single quotes:\n` +
|
|
46
|
+
` '{"Key":"Value"}'`
|
|
47
|
+
);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export async function dbQuery(rawArgs = []) {
|
|
51
|
+
// Parse flags: --simple, --limit <n> / --limit=<n>
|
|
52
|
+
let simple = false;
|
|
53
|
+
let limit = 10;
|
|
54
|
+
const positional = [];
|
|
55
|
+
|
|
56
|
+
for (let i = 0; i < rawArgs.length; i++) {
|
|
57
|
+
const a = rawArgs[i];
|
|
58
|
+
if (a === '--simple') {
|
|
59
|
+
simple = true;
|
|
60
|
+
} else if (a === '--limit') {
|
|
61
|
+
limit = Math.max(1, parseInt(rawArgs[++i], 10) || 10);
|
|
62
|
+
} else if (a.startsWith('--limit=')) {
|
|
63
|
+
limit = Math.max(1, parseInt(a.slice(8), 10) || 10);
|
|
64
|
+
} else {
|
|
65
|
+
positional.push(a);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
const filePath = positional[0];
|
|
70
|
+
const paramsJson = positional.length > 1 ? positional.slice(1).join(' ') : undefined;
|
|
71
|
+
|
|
72
|
+
if (!filePath) {
|
|
73
|
+
throw new Error('Usage: flowmo db:query <file> [params-json] [--limit <n>] [--simple]');
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
const resolved = path.resolve(process.cwd(), filePath);
|
|
77
|
+
|
|
78
|
+
if (!fs.existsSync(resolved)) {
|
|
79
|
+
throw new Error(`File not found: ${filePath}`);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
const rawSql = fs.readFileSync(resolved, 'utf-8').trim();
|
|
83
|
+
|
|
84
|
+
if (!rawSql) {
|
|
85
|
+
throw new Error(`Query file is empty: ${filePath}`);
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
const isAdvanced = resolved.endsWith('.advance.sql');
|
|
89
|
+
let sql = rawSql;
|
|
90
|
+
let params = [];
|
|
91
|
+
|
|
92
|
+
if (isAdvanced) {
|
|
93
|
+
// Parse OutSystems syntax and map named @params to Postgres positional bindings.
|
|
94
|
+
const { sql: parsedSql, paramNames } = parseAdvancedSql(rawSql);
|
|
95
|
+
sql = parsedSql;
|
|
96
|
+
|
|
97
|
+
if (paramNames.length > 0) {
|
|
98
|
+
const paramsObj = paramsJson ? parseJsonArg(paramsJson) : {};
|
|
99
|
+
|
|
100
|
+
// Validate all required params are supplied.
|
|
101
|
+
const missing = paramNames.filter((n) => !(n in paramsObj));
|
|
102
|
+
if (missing.length > 0) {
|
|
103
|
+
throw new Error(
|
|
104
|
+
`Missing required parameter(s): ${missing.map((n) => `@${n}`).join(', ')}\n` +
|
|
105
|
+
`Pass them as a JSON string, e.g.: '${JSON.stringify(
|
|
106
|
+
Object.fromEntries(paramNames.map((n) => [n, '...']))
|
|
107
|
+
)}'`
|
|
108
|
+
);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
params = paramNames.map((n) => paramsObj[n]);
|
|
112
|
+
|
|
113
|
+
const binding = paramNames.map((n, i) => `@${n} → $${i + 1}`).join(', ');
|
|
114
|
+
console.log(picocolors.dim(`Bindings: ${binding}\n`));
|
|
115
|
+
}
|
|
116
|
+
} else if (paramsJson) {
|
|
117
|
+
// For plain .sql files, accept a JSON array of positional params.
|
|
118
|
+
const parsed = parseJsonArg(paramsJson);
|
|
119
|
+
params = Array.isArray(parsed) ? parsed : Object.values(parsed);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
const db = await getDb();
|
|
123
|
+
const result = await db.query(sql, params);
|
|
124
|
+
await closeDb();
|
|
125
|
+
|
|
126
|
+
renderTable(result.fields, result.rows, { simple, limit });
|
|
127
|
+
}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import picocolors from 'picocolors';
|
|
4
|
+
import { getDb, closeDb } from '../lib/db.js';
|
|
5
|
+
|
|
6
|
+
function resolveFile(filename) {
|
|
7
|
+
const candidates = [
|
|
8
|
+
path.join(process.cwd(), 'database', filename),
|
|
9
|
+
path.join(process.cwd(), filename),
|
|
10
|
+
];
|
|
11
|
+
return candidates.find((p) => fs.existsSync(p)) ?? null;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export async function dbSeed() {
|
|
15
|
+
const seedsPath = resolveFile('seeds.sql');
|
|
16
|
+
|
|
17
|
+
if (!seedsPath) {
|
|
18
|
+
throw new Error(
|
|
19
|
+
'seeds.sql not found. Looked in database/seeds.sql and ./seeds.sql.\nAre you in a Flowmo project directory?'
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const seeds = fs.readFileSync(seedsPath, 'utf-8').trim();
|
|
24
|
+
|
|
25
|
+
if (!seeds) {
|
|
26
|
+
throw new Error(`${seedsPath} is empty. Add your INSERT statements first.`);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
console.log(picocolors.cyan('Seeding database…'));
|
|
30
|
+
console.log(picocolors.dim(`Using: ${seedsPath}`));
|
|
31
|
+
|
|
32
|
+
const db = await getDb();
|
|
33
|
+
await db.exec(seeds);
|
|
34
|
+
await closeDb();
|
|
35
|
+
|
|
36
|
+
console.log(picocolors.green(`✓ Database seeded from ${seedsPath}`));
|
|
37
|
+
}
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import picocolors from 'picocolors';
|
|
4
|
+
import { getDb, closeDb } from '../lib/db.js';
|
|
5
|
+
|
|
6
|
+
function resolveFile(filename) {
|
|
7
|
+
const candidates = [
|
|
8
|
+
path.join(process.cwd(), 'database', filename),
|
|
9
|
+
path.join(process.cwd(), filename),
|
|
10
|
+
];
|
|
11
|
+
return candidates.find((p) => fs.existsSync(p)) ?? null;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export async function dbSetup() {
|
|
15
|
+
const schemaPath = resolveFile('schema.sql');
|
|
16
|
+
|
|
17
|
+
if (!schemaPath) {
|
|
18
|
+
throw new Error(
|
|
19
|
+
'schema.sql not found. Looked in database/schema.sql and ./schema.sql.\nAre you in a Flowmo project directory?'
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const schema = fs.readFileSync(schemaPath, 'utf-8').trim();
|
|
24
|
+
|
|
25
|
+
if (!schema) {
|
|
26
|
+
throw new Error(`${schemaPath} is empty. Add your CREATE TABLE statements first.`);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
console.log(picocolors.cyan('Setting up database…'));
|
|
30
|
+
console.log(picocolors.dim(`Using: ${schemaPath}`));
|
|
31
|
+
|
|
32
|
+
const db = await getDb();
|
|
33
|
+
|
|
34
|
+
// Wipe the public schema so all user objects are removed cleanly.
|
|
35
|
+
await db.exec('DROP SCHEMA public CASCADE; CREATE SCHEMA public;');
|
|
36
|
+
|
|
37
|
+
// Apply the user-provided DDL.
|
|
38
|
+
await db.exec(schema);
|
|
39
|
+
|
|
40
|
+
await closeDb();
|
|
41
|
+
|
|
42
|
+
console.log(picocolors.green(`✓ Database schema applied from ${schemaPath}`));
|
|
43
|
+
}
|
package/src/lib/db.js
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { PGlite } from '@electric-sql/pglite';
|
|
2
|
+
import fs from 'fs';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
|
|
5
|
+
let _db = null;
|
|
6
|
+
|
|
7
|
+
export function getDbPath() {
|
|
8
|
+
return path.join(process.cwd(), '.flowmo', 'database');
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Returns a singleton PGlite instance, initialised and ready to query.
|
|
13
|
+
* Data is persisted to <cwd>/.flowmo/database.
|
|
14
|
+
*/
|
|
15
|
+
export async function getDb() {
|
|
16
|
+
if (_db) return _db;
|
|
17
|
+
|
|
18
|
+
const dbPath = getDbPath();
|
|
19
|
+
fs.mkdirSync(dbPath, { recursive: true });
|
|
20
|
+
|
|
21
|
+
_db = new PGlite(dbPath);
|
|
22
|
+
await _db.waitReady;
|
|
23
|
+
|
|
24
|
+
return _db;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Close the singleton database connection.
|
|
29
|
+
* Call at the end of a command to release file locks.
|
|
30
|
+
*/
|
|
31
|
+
export async function closeDb() {
|
|
32
|
+
if (_db) {
|
|
33
|
+
await _db.close();
|
|
34
|
+
_db = null;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Converts a PascalCase or camelCase identifier to snake_case.
|
|
3
|
+
* Examples:
|
|
4
|
+
* RollingWeek -> rolling_week
|
|
5
|
+
* WorkLog -> work_log
|
|
6
|
+
* SSRClient -> ssr_client
|
|
7
|
+
* NonBillableRefId -> non_billable_ref_id
|
|
8
|
+
* TaskRoleId -> task_role_id
|
|
9
|
+
*/
|
|
10
|
+
function toSnakeCase(str) {
|
|
11
|
+
return str
|
|
12
|
+
.replace(/([A-Z]+)([A-Z][a-z])/g, '$1_$2')
|
|
13
|
+
.replace(/([a-z\d])([A-Z])/g, '$1_$2')
|
|
14
|
+
.toLowerCase();
|
|
15
|
+
// NOTE: trailing underscores are preserved intentionally.
|
|
16
|
+
// e.g. NonBillableRefTaskId_ → non_billable_ref_task_id_
|
|
17
|
+
// rolling_week uses trailing _ to disambiguate from identically-named columns on other tables.
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
/**
|
|
21
|
+
* Parses an OutSystems Advanced SQL file into standard PostgreSQL.
|
|
22
|
+
*
|
|
23
|
+
* Transformations applied (in order):
|
|
24
|
+
* {Entity}.[Attribute] -> entity_name.attribute_name (snake_case)
|
|
25
|
+
* {Entity} -> entity_name (snake_case)
|
|
26
|
+
* AS PascalAlias -> AS snake_alias (CTE cols + table aliases)
|
|
27
|
+
* alias.[Attribute] -> snake_alias.attribute_name (snake_case both sides)
|
|
28
|
+
* [Identifier] -> identifier (standalone bracket refs)
|
|
29
|
+
* @ParamName -> $1, $2, ... (first-appearance order)
|
|
30
|
+
*
|
|
31
|
+
* Returns { sql: string, paramNames: string[] } where paramNames[i] is the
|
|
32
|
+
* original @-prefixed name that maps to positional binding $(i+1).
|
|
33
|
+
*/
|
|
34
|
+
export function parseAdvancedSql(sql) {
|
|
35
|
+
// 1. Replace {Entity}.[Attribute] with snake_case table and column
|
|
36
|
+
// 'user' is a PostgreSQL reserved word; quote it so it resolves to our view.
|
|
37
|
+
let parsed = sql.replace(/\{(\w+)\}\.\[(\w+)\]/g, (_, entity, attr) => {
|
|
38
|
+
const table = toSnakeCase(entity);
|
|
39
|
+
return `${table === 'user' ? '"user"' : table}.${toSnakeCase(attr)}`;
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
// 2. Replace bare {Entity} (table reference without attribute)
|
|
43
|
+
parsed = parsed.replace(/\{(\w+)\}/g, (_, entity) => {
|
|
44
|
+
const name = toSnakeCase(entity);
|
|
45
|
+
return name === 'user' ? '"user"' : name;
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
// 3a. Convert CTE definition names in WITH clause to snake_case.
|
|
49
|
+
// The name comes BEFORE "AS", so it must be handled separately.
|
|
50
|
+
// e.g. WITH TimesheetSkeleton AS ( -> WITH timesheet_skeleton AS (
|
|
51
|
+
parsed = parsed.replace(/\bWITH\s+([A-Z][A-Za-z0-9]+)\s+AS\b/g, (_, name) => {
|
|
52
|
+
return `WITH ${toSnakeCase(name)} AS`;
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
// 3b. Convert bare "AS PascalCase" aliases to snake_case.
|
|
56
|
+
// Applies to CTE column aliases and table aliases in JOINs.
|
|
57
|
+
// e.g. NULL::bigint AS NonBillableRefTaskId -> AS non_billable_ref_task_id
|
|
58
|
+
// LEFT JOIN ref_task AS NBRefTask -> AS nb_ref_task
|
|
59
|
+
// Bracket aliases (AS [Col]) are left for step 5.
|
|
60
|
+
parsed = parsed.replace(/\b(?:AS|as)\s+([A-Z][A-Za-z0-9]+)\b/g, (_, alias) => {
|
|
61
|
+
return `AS ${toSnakeCase(alias)}`;
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
// 4. Replace alias.[Attribute] bracket notation still remaining.
|
|
65
|
+
// Lowercases the alias prefix too so it matches the snake_case alias
|
|
66
|
+
// defined in step 3.
|
|
67
|
+
// e.g. TimesheetSkeleton.[TaskId] -> timesheetskeleton.task_id
|
|
68
|
+
// NBRefTask.[Id] -> nb_ref_task.id
|
|
69
|
+
parsed = parsed.replace(/(\w+)\.\[(\w+)\]/g, (_, alias, attr) => {
|
|
70
|
+
return `${toSnakeCase(alias)}.${toSnakeCase(attr)}`;
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
// 5. Replace remaining standalone [Identifier] brackets
|
|
74
|
+
// e.g. AS [Row_UniqueId] -> AS row_uniqueid
|
|
75
|
+
// ORDER BY [Col] -> ORDER BY col
|
|
76
|
+
parsed = parsed.replace(/\[(\w+)\]/g, (_, name) => {
|
|
77
|
+
return name.toLowerCase();
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
// 6. Collect @params in order of first appearance and replace with $N.
|
|
81
|
+
// Must run BEFORE the bare PascalCase step so that @UserId is captured
|
|
82
|
+
// as "UserId" and not converted to "user_id" first.
|
|
83
|
+
const paramNames = [];
|
|
84
|
+
const paramIndex = {};
|
|
85
|
+
|
|
86
|
+
parsed = parsed.replace(/@(\w+)/g, (_, name) => {
|
|
87
|
+
if (!(name in paramIndex)) {
|
|
88
|
+
paramNames.push(name);
|
|
89
|
+
paramIndex[name] = paramNames.length;
|
|
90
|
+
}
|
|
91
|
+
return `$${paramIndex[name]}`;
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
// 7. Convert any remaining bare PascalCase identifiers to snake_case.
|
|
95
|
+
// Catches CTE name references like FROM TimesheetSkeleton that have no
|
|
96
|
+
// dot or brackets. SQL keywords are ALL CAPS and never match because
|
|
97
|
+
// the regex requires at least one lowercase letter in the word.
|
|
98
|
+
// @params are already replaced with $N so they are safe.
|
|
99
|
+
// e.g. FROM TimesheetSkeleton -> FROM timesheet_skeleton
|
|
100
|
+
parsed = parsed.replace(/\b([A-Z][a-zA-Z0-9]*[a-z][a-zA-Z0-9]*)\b/g, (_, name) => {
|
|
101
|
+
return toSnakeCase(name);
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
return { sql: parsed, paramNames };
|
|
105
|
+
}
|
package/src/lib/table.js
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import Table from 'cli-table3';
|
|
2
|
+
import picocolors from 'picocolors';
|
|
3
|
+
|
|
4
|
+
const TERMINAL_WIDTH = process.stdout.columns || 120;
|
|
5
|
+
const KEY_COL_WIDTH = 28;
|
|
6
|
+
const VAL_COL_WIDTH = TERMINAL_WIDTH - KEY_COL_WIDTH - 7; // 7 = borders + padding
|
|
7
|
+
|
|
8
|
+
// Outer frame + vertical divider only — no horizontal inner lines
|
|
9
|
+
const CHARS = {
|
|
10
|
+
top: '─', 'top-mid': '─', 'top-left': '┌', 'top-right': '┐',
|
|
11
|
+
bottom: '─', 'bottom-mid': '─', 'bottom-left': '└', 'bottom-right': '┘',
|
|
12
|
+
left: '│', 'left-mid': '', mid: '', 'mid-mid': '',
|
|
13
|
+
right: '│', 'right-mid': '',
|
|
14
|
+
middle: '│',
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
function formatVal(val) {
|
|
18
|
+
if (val === null || val === undefined) return picocolors.dim('NULL');
|
|
19
|
+
if (val instanceof Date) return val.toISOString().slice(0, 10);
|
|
20
|
+
if (typeof val === 'string' && /^\d{4}-\d{2}-\d{2}T/.test(val)) return val.slice(0, 10);
|
|
21
|
+
return String(val);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function truncate(str, maxLen) {
|
|
25
|
+
if (str.length <= maxLen) return str;
|
|
26
|
+
return str.slice(0, maxLen - 1) + '…';
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Renders query results as a vertical key/value block per row.
|
|
31
|
+
*
|
|
32
|
+
* @param {Array<{ name: string }>} fields
|
|
33
|
+
* @param {Array<Record<string, unknown>>} rows
|
|
34
|
+
* @param {{ simple?: boolean, limit?: number }} opts
|
|
35
|
+
*/
|
|
36
|
+
export function renderTable(fields, rows, { simple = false, limit = 10 } = {}) {
|
|
37
|
+
if (rows.length === 0) {
|
|
38
|
+
console.log(picocolors.dim('(0 rows)'));
|
|
39
|
+
return;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
const total = rows.length;
|
|
43
|
+
const displayed = rows.slice(0, limit);
|
|
44
|
+
|
|
45
|
+
if (simple) {
|
|
46
|
+
displayed.forEach((row, i) => {
|
|
47
|
+
console.log(picocolors.dim(`-[ Row ${i + 1} ]` + '-'.repeat(30)));
|
|
48
|
+
for (const f of fields) {
|
|
49
|
+
console.log(`${picocolors.bold(f.name)}: ${formatVal(row[f.name])}`);
|
|
50
|
+
}
|
|
51
|
+
});
|
|
52
|
+
} else {
|
|
53
|
+
displayed.forEach((row, i) => {
|
|
54
|
+
console.log(picocolors.dim(`-[ Row ${i + 1} ]` + '-'.repeat(30)));
|
|
55
|
+
const table = new Table({
|
|
56
|
+
chars: CHARS,
|
|
57
|
+
style: { head: [], border: ['grey'] },
|
|
58
|
+
colWidths: [KEY_COL_WIDTH, VAL_COL_WIDTH],
|
|
59
|
+
});
|
|
60
|
+
for (const f of fields) {
|
|
61
|
+
table.push([picocolors.bold(f.name), truncate(formatVal(row[f.name]), VAL_COL_WIDTH - 2)]);
|
|
62
|
+
}
|
|
63
|
+
console.log(table.toString());
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
const rowLabel = `(${displayed.length} row${displayed.length !== 1 ? 's' : ''})`;
|
|
68
|
+
const limitNote = total > limit
|
|
69
|
+
? picocolors.yellow(` — showing ${limit} of ${total}, use --limit to change`)
|
|
70
|
+
: '';
|
|
71
|
+
console.log(picocolors.dim(rowLabel) + limitNote);
|
|
72
|
+
}
|