@agentuity/cli 0.0.55 → 0.0.57
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.d.ts.map +1 -1
- package/dist/cli.js +41 -2
- package/dist/cli.js.map +1 -1
- package/dist/cmd/ai/schema/generate.d.ts +3 -0
- package/dist/cmd/ai/schema/generate.d.ts.map +1 -0
- package/dist/cmd/ai/schema/generate.js +50 -0
- package/dist/cmd/ai/schema/generate.js.map +1 -0
- package/dist/cmd/ai/schema/index.d.ts.map +1 -1
- package/dist/cmd/ai/schema/index.js +2 -1
- package/dist/cmd/ai/schema/index.js.map +1 -1
- package/dist/cmd/build/ast.d.ts +4 -10
- package/dist/cmd/build/ast.d.ts.map +1 -1
- package/dist/cmd/build/ast.js +110 -91
- package/dist/cmd/build/ast.js.map +1 -1
- package/dist/cmd/build/ast.test.js +135 -370
- package/dist/cmd/build/ast.test.js.map +1 -1
- package/dist/cmd/build/bundler.d.ts +25 -2
- package/dist/cmd/build/bundler.d.ts.map +1 -1
- package/dist/cmd/build/bundler.js +138 -43
- package/dist/cmd/build/bundler.js.map +1 -1
- package/dist/cmd/build/plugin.d.ts.map +1 -1
- package/dist/cmd/build/plugin.js +16 -8
- package/dist/cmd/build/plugin.js.map +1 -1
- package/dist/cmd/build/workbench-templates.d.ts +4 -0
- package/dist/cmd/build/workbench-templates.d.ts.map +1 -0
- package/dist/cmd/build/workbench-templates.js +49 -0
- package/dist/cmd/build/workbench-templates.js.map +1 -0
- package/dist/cmd/cloud/deploy.d.ts.map +1 -1
- package/dist/cmd/cloud/deploy.js +11 -3
- package/dist/cmd/cloud/deploy.js.map +1 -1
- package/dist/cmd/cloud/deployment/show.d.ts.map +1 -1
- package/dist/cmd/cloud/deployment/show.js +73 -20
- package/dist/cmd/cloud/deployment/show.js.map +1 -1
- package/dist/cmd/cloud/session/get.d.ts.map +1 -1
- package/dist/cmd/cloud/session/get.js +77 -17
- package/dist/cmd/cloud/session/get.js.map +1 -1
- package/dist/cmd/index.d.ts.map +1 -1
- package/dist/cmd/index.js +2 -0
- package/dist/cmd/index.js.map +1 -1
- package/dist/cmd/project/template-flow.d.ts.map +1 -1
- package/dist/cmd/project/template-flow.js +1 -0
- package/dist/cmd/project/template-flow.js.map +1 -1
- package/dist/config.d.ts +27 -3
- package/dist/config.d.ts.map +1 -1
- package/dist/config.js +31 -3
- package/dist/config.js.map +1 -1
- package/dist/schema-generator.d.ts +1 -1
- package/dist/schema-generator.d.ts.map +1 -1
- package/dist/schema-parser.d.ts +2 -1
- package/dist/schema-parser.d.ts.map +1 -1
- package/dist/schema-parser.js +18 -2
- package/dist/schema-parser.js.map +1 -1
- package/dist/steps.d.ts +2 -1
- package/dist/steps.d.ts.map +1 -1
- package/dist/steps.js +26 -3
- package/dist/steps.js.map +1 -1
- package/dist/types.d.ts +39 -2
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js +4 -75
- package/dist/types.js.map +1 -1
- package/package.json +3 -3
- package/src/cli.ts +49 -2
- package/src/cmd/ai/schema/generate.ts +64 -0
- package/src/cmd/ai/schema/index.ts +2 -1
- package/src/cmd/build/ast.test.ts +157 -549
- package/src/cmd/build/ast.ts +130 -116
- package/src/cmd/build/bundler.ts +157 -42
- package/src/cmd/build/plugin.ts +18 -9
- package/src/cmd/build/workbench-templates.ts +52 -0
- package/src/cmd/cloud/deploy.ts +11 -3
- package/src/cmd/cloud/deployment/show.ts +60 -17
- package/src/cmd/cloud/session/get.ts +91 -19
- package/src/cmd/index.ts +2 -0
- package/src/cmd/project/template-flow.ts +1 -0
- package/src/config.ts +44 -5
- package/src/schema-generator.ts +1 -1
- package/src/schema-parser.ts +19 -4
- package/src/steps.ts +27 -4
- package/src/types.ts +5 -84
package/src/cmd/build/ast.ts
CHANGED
|
@@ -3,6 +3,11 @@ import { basename, dirname, relative } from 'node:path';
|
|
|
3
3
|
import { generate } from 'astring';
|
|
4
4
|
import type { BuildMetadata } from '../../types';
|
|
5
5
|
import { createLogger } from '@agentuity/server';
|
|
6
|
+
import * as ts from 'typescript';
|
|
7
|
+
import type { WorkbenchConfig } from '@agentuity/core';
|
|
8
|
+
import type { LogLevel } from '../../types';
|
|
9
|
+
|
|
10
|
+
const logger = createLogger((process.env.AGENTUITY_LOG_LEVEL || 'info') as LogLevel);
|
|
6
11
|
|
|
7
12
|
interface ASTNode {
|
|
8
13
|
type: string;
|
|
@@ -790,123 +795,141 @@ export async function parseRoute(
|
|
|
790
795
|
const routes: RouteDefinition = [];
|
|
791
796
|
const routePrefix = filename.includes('src/agents') ? '/agent' : '/api';
|
|
792
797
|
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
if (
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
798
|
+
try {
|
|
799
|
+
for (const body of ast.body) {
|
|
800
|
+
if (body.type === 'ExpressionStatement') {
|
|
801
|
+
const statement = body as ASTExpressionStatement;
|
|
802
|
+
|
|
803
|
+
// Validate that the expression is a call expression (e.g. function call)
|
|
804
|
+
if (statement.expression.type !== 'CallExpression') {
|
|
805
|
+
continue;
|
|
806
|
+
}
|
|
807
|
+
|
|
808
|
+
const callee = statement.expression.callee;
|
|
809
|
+
|
|
810
|
+
// Validate that the callee is a member expression (e.g. object.method())
|
|
811
|
+
// This handles cases like 'console.log()' or 'router.get()'
|
|
812
|
+
// direct function calls like 'myFunc()' have type 'Identifier' and will be skipped
|
|
813
|
+
if (callee.type !== 'MemberExpression') {
|
|
814
|
+
continue;
|
|
815
|
+
}
|
|
816
|
+
|
|
817
|
+
if (callee.object.type === 'Identifier' && statement.expression.arguments?.length > 0) {
|
|
818
|
+
const identifier = callee.object as ASTNodeIdentifier;
|
|
819
|
+
if (identifier.name === variableName) {
|
|
820
|
+
let method = (callee.property as ASTNodeIdentifier).name;
|
|
821
|
+
let type = 'api';
|
|
822
|
+
const action = statement.expression.arguments[0];
|
|
823
|
+
let suffix = '';
|
|
824
|
+
let config: Record<string, unknown> | undefined;
|
|
825
|
+
switch (method) {
|
|
826
|
+
case 'get':
|
|
827
|
+
case 'put':
|
|
828
|
+
case 'post':
|
|
829
|
+
case 'patch':
|
|
830
|
+
case 'delete': {
|
|
831
|
+
if (action && (action as ASTLiteral).type === 'Literal') {
|
|
832
|
+
suffix = (action as ASTLiteral).value;
|
|
833
|
+
} else {
|
|
834
|
+
throw new Error(
|
|
835
|
+
`unsupported HTTP method ${method} in ${filename} at line ${body.start}`
|
|
836
|
+
);
|
|
837
|
+
}
|
|
814
838
|
break;
|
|
815
839
|
}
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
840
|
+
case 'stream':
|
|
841
|
+
case 'sse':
|
|
842
|
+
case 'websocket': {
|
|
843
|
+
type = method;
|
|
844
|
+
method = 'post';
|
|
845
|
+
const theaction = action as ASTLiteral;
|
|
846
|
+
if (theaction.type === 'Literal') {
|
|
847
|
+
suffix = theaction.value;
|
|
848
|
+
break;
|
|
849
|
+
}
|
|
826
850
|
break;
|
|
827
851
|
}
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
852
|
+
case 'sms': {
|
|
853
|
+
type = method;
|
|
854
|
+
method = 'post';
|
|
855
|
+
const theaction = action as ASTObjectExpression;
|
|
856
|
+
if (theaction.type === 'ObjectExpression') {
|
|
857
|
+
config = {};
|
|
858
|
+
theaction.properties.forEach((p) => {
|
|
859
|
+
if (p.value.type === 'Literal') {
|
|
860
|
+
const literal = p.value as ASTLiteral;
|
|
861
|
+
config![p.key.name] = literal.value;
|
|
862
|
+
}
|
|
863
|
+
});
|
|
864
|
+
const number = theaction.properties.find((p) => p.key.name === 'number');
|
|
865
|
+
if (number && number.value.type === 'Literal') {
|
|
866
|
+
const phoneNumber = number.value as ASTLiteral;
|
|
867
|
+
suffix = hash(phoneNumber.value);
|
|
868
|
+
break;
|
|
840
869
|
}
|
|
841
|
-
});
|
|
842
|
-
const number = theaction.properties.find((p) => p.key.name === 'number');
|
|
843
|
-
if (number && number.value.type === 'Literal') {
|
|
844
|
-
const phoneNumber = number.value as ASTLiteral;
|
|
845
|
-
suffix = hash(phoneNumber.value);
|
|
846
|
-
break;
|
|
847
870
|
}
|
|
871
|
+
break;
|
|
848
872
|
}
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
873
|
+
case 'email': {
|
|
874
|
+
type = method;
|
|
875
|
+
method = 'post';
|
|
876
|
+
const theaction = action as ASTLiteral;
|
|
877
|
+
if (theaction.type === 'Literal') {
|
|
878
|
+
const email = theaction.value;
|
|
879
|
+
suffix = hash(email);
|
|
880
|
+
break;
|
|
881
|
+
}
|
|
858
882
|
break;
|
|
859
883
|
}
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
884
|
+
case 'cron': {
|
|
885
|
+
type = method;
|
|
886
|
+
method = 'post';
|
|
887
|
+
const theaction = action as ASTLiteral;
|
|
888
|
+
if (theaction.type === 'Literal') {
|
|
889
|
+
const number = theaction.value;
|
|
890
|
+
suffix = hash(number);
|
|
891
|
+
break;
|
|
892
|
+
}
|
|
869
893
|
break;
|
|
870
894
|
}
|
|
871
|
-
|
|
895
|
+
default: {
|
|
896
|
+
throw new Error(
|
|
897
|
+
`unsupported router method ${method} in ${filename} at line ${body.start}`
|
|
898
|
+
);
|
|
899
|
+
}
|
|
872
900
|
}
|
|
901
|
+
const thepath = `${routePrefix}/${routeName}/${suffix}`
|
|
902
|
+
.replaceAll(/\/{2,}/g, '/')
|
|
903
|
+
.replaceAll(/\/$/g, '');
|
|
904
|
+
const id = generateRouteId(
|
|
905
|
+
projectId,
|
|
906
|
+
deploymentId,
|
|
907
|
+
type,
|
|
908
|
+
method,
|
|
909
|
+
rel,
|
|
910
|
+
thepath,
|
|
911
|
+
version
|
|
912
|
+
);
|
|
913
|
+
routes.push({
|
|
914
|
+
id,
|
|
915
|
+
method: method as 'get' | 'post' | 'put' | 'delete' | 'patch',
|
|
916
|
+
type: type as 'api' | 'sms' | 'email' | 'cron',
|
|
917
|
+
filename: rel,
|
|
918
|
+
path: thepath,
|
|
919
|
+
version,
|
|
920
|
+
config,
|
|
921
|
+
});
|
|
873
922
|
}
|
|
874
|
-
const thepath = `${routePrefix}/${routeName}/${suffix}`
|
|
875
|
-
.replaceAll(/\/{2,}/g, '/')
|
|
876
|
-
.replaceAll(/\/$/g, '');
|
|
877
|
-
const id = generateRouteId(
|
|
878
|
-
projectId,
|
|
879
|
-
deploymentId,
|
|
880
|
-
type,
|
|
881
|
-
method,
|
|
882
|
-
rel,
|
|
883
|
-
thepath,
|
|
884
|
-
version
|
|
885
|
-
);
|
|
886
|
-
routes.push({
|
|
887
|
-
id,
|
|
888
|
-
method: method as 'get' | 'post' | 'put' | 'delete' | 'patch',
|
|
889
|
-
type: type as 'api' | 'sms' | 'email' | 'cron',
|
|
890
|
-
filename: rel,
|
|
891
|
-
path: thepath,
|
|
892
|
-
version,
|
|
893
|
-
config,
|
|
894
|
-
});
|
|
895
923
|
}
|
|
896
924
|
}
|
|
897
925
|
}
|
|
926
|
+
} catch (error) {
|
|
927
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
928
|
+
throw new Error(`Failed to parse route file ${filename}: ${err.message}`);
|
|
898
929
|
}
|
|
899
930
|
return routes;
|
|
900
931
|
}
|
|
901
932
|
|
|
902
|
-
/**
|
|
903
|
-
* Configuration extracted from createWorkbench call
|
|
904
|
-
*/
|
|
905
|
-
export interface WorkbenchConfig {
|
|
906
|
-
route: string;
|
|
907
|
-
headers?: Record<string, string>;
|
|
908
|
-
}
|
|
909
|
-
|
|
910
933
|
/**
|
|
911
934
|
* Result of workbench analysis
|
|
912
935
|
*/
|
|
@@ -923,15 +946,14 @@ export interface WorkbenchAnalysis {
|
|
|
923
946
|
* @param functionName - The function name to check for (e.g., 'createWorkbench')
|
|
924
947
|
* @returns true if the function is both imported and called
|
|
925
948
|
*/
|
|
926
|
-
export
|
|
949
|
+
export function checkFunctionUsage(content: string, functionName: string): boolean {
|
|
927
950
|
try {
|
|
928
|
-
const ts = await import('typescript');
|
|
929
951
|
const sourceFile = ts.createSourceFile('temp.ts', content, ts.ScriptTarget.Latest, true);
|
|
930
952
|
|
|
931
953
|
let hasImport = false;
|
|
932
954
|
let hasUsage = false;
|
|
933
955
|
|
|
934
|
-
function visitNode(node:
|
|
956
|
+
function visitNode(node: ts.Node): void {
|
|
935
957
|
// Check for import declarations with the function
|
|
936
958
|
if (ts.isImportDeclaration(node) && node.importClause?.namedBindings) {
|
|
937
959
|
if (ts.isNamedImports(node.importClause.namedBindings)) {
|
|
@@ -957,7 +979,7 @@ export async function checkFunctionUsage(content: string, functionName: string):
|
|
|
957
979
|
return hasImport && hasUsage;
|
|
958
980
|
} catch (error) {
|
|
959
981
|
// Fallback to string check if AST parsing fails
|
|
960
|
-
|
|
982
|
+
logger.warn(`AST parsing failed for ${functionName}, falling back to string check:`, error);
|
|
961
983
|
return content.includes(functionName);
|
|
962
984
|
}
|
|
963
985
|
}
|
|
@@ -965,17 +987,13 @@ export async function checkFunctionUsage(content: string, functionName: string):
|
|
|
965
987
|
/**
|
|
966
988
|
* Check if app.ts contains conflicting routes for a given endpoint
|
|
967
989
|
*/
|
|
968
|
-
export
|
|
969
|
-
content: string,
|
|
970
|
-
workbenchEndpoint: string
|
|
971
|
-
): Promise<boolean> {
|
|
990
|
+
export function checkRouteConflicts(content: string, workbenchEndpoint: string): boolean {
|
|
972
991
|
try {
|
|
973
|
-
const ts = await import('typescript');
|
|
974
992
|
const sourceFile = ts.createSourceFile('app.ts', content, ts.ScriptTarget.Latest, true);
|
|
975
993
|
|
|
976
994
|
let hasConflict = false;
|
|
977
995
|
|
|
978
|
-
function visitNode(node:
|
|
996
|
+
function visitNode(node: ts.Node): void {
|
|
979
997
|
// Check for router.get calls
|
|
980
998
|
if (
|
|
981
999
|
ts.isCallExpression(node) &&
|
|
@@ -1007,16 +1025,15 @@ export async function checkRouteConflicts(
|
|
|
1007
1025
|
* @param content - The TypeScript source code
|
|
1008
1026
|
* @returns workbench analysis including usage and config
|
|
1009
1027
|
*/
|
|
1010
|
-
export
|
|
1028
|
+
export function analyzeWorkbench(content: string): WorkbenchAnalysis {
|
|
1011
1029
|
try {
|
|
1012
|
-
const ts = await import('typescript');
|
|
1013
1030
|
const sourceFile = ts.createSourceFile('app.ts', content, ts.ScriptTarget.Latest, true);
|
|
1014
1031
|
|
|
1015
1032
|
let hasImport = false;
|
|
1016
1033
|
let hasUsage = false;
|
|
1017
1034
|
let config: WorkbenchConfig | null = null;
|
|
1018
1035
|
|
|
1019
|
-
function visitNode(node:
|
|
1036
|
+
function visitNode(node: ts.Node): void {
|
|
1020
1037
|
// Check for import declarations with createWorkbench
|
|
1021
1038
|
if (ts.isImportDeclaration(node) && node.importClause?.namedBindings) {
|
|
1022
1039
|
if (ts.isNamedImports(node.importClause.namedBindings)) {
|
|
@@ -1036,7 +1053,7 @@ export async function analyzeWorkbench(content: string): Promise<WorkbenchAnalys
|
|
|
1036
1053
|
// Extract configuration from the first argument (if any)
|
|
1037
1054
|
if (node.arguments.length > 0) {
|
|
1038
1055
|
const configArg = node.arguments[0];
|
|
1039
|
-
config = parseConfigObject(configArg
|
|
1056
|
+
config = parseConfigObject(configArg);
|
|
1040
1057
|
} else {
|
|
1041
1058
|
// Default config if no arguments provided
|
|
1042
1059
|
config = { route: '/workbench' };
|
|
@@ -1061,7 +1078,7 @@ export async function analyzeWorkbench(content: string): Promise<WorkbenchAnalys
|
|
|
1061
1078
|
};
|
|
1062
1079
|
} catch (error) {
|
|
1063
1080
|
// Fallback to simple check if AST parsing fails
|
|
1064
|
-
|
|
1081
|
+
logger.warn('Workbench AST parsing failed, falling back to string check:', error);
|
|
1065
1082
|
const hasWorkbench = content.includes('createWorkbench');
|
|
1066
1083
|
return {
|
|
1067
1084
|
hasWorkbench,
|
|
@@ -1073,10 +1090,7 @@ export async function analyzeWorkbench(content: string): Promise<WorkbenchAnalys
|
|
|
1073
1090
|
/**
|
|
1074
1091
|
* Parse a TypeScript object literal to extract configuration
|
|
1075
1092
|
*/
|
|
1076
|
-
function parseConfigObject(
|
|
1077
|
-
node: import('typescript').Node,
|
|
1078
|
-
ts: typeof import('typescript')
|
|
1079
|
-
): WorkbenchConfig | null {
|
|
1093
|
+
function parseConfigObject(node: ts.Node): WorkbenchConfig | null {
|
|
1080
1094
|
if (!ts.isObjectLiteralExpression(node)) {
|
|
1081
1095
|
return { route: '/workbench' }; // Default config
|
|
1082
1096
|
}
|
package/src/cmd/build/bundler.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { $ } from 'bun';
|
|
2
|
+
import { z } from 'zod';
|
|
2
3
|
import { join, relative, resolve, dirname } from 'node:path';
|
|
3
|
-
import { createRequire } from 'node:module';
|
|
4
4
|
import { cpSync, existsSync, mkdirSync, rmSync } from 'node:fs';
|
|
5
5
|
import gitParseUrl from 'git-url-parse';
|
|
6
6
|
import AgentuityBundler, { getBuildMetadata } from './plugin';
|
|
@@ -10,8 +10,37 @@ import type { Project } from '../../types';
|
|
|
10
10
|
import { fixDuplicateExportsInDirectory } from './fix-duplicate-exports';
|
|
11
11
|
import { createLogger } from '@agentuity/server';
|
|
12
12
|
import type { LogLevel } from '../../types';
|
|
13
|
+
import { generateWorkbenchMainTsx, generateWorkbenchIndexHtml } from './workbench-templates';
|
|
14
|
+
import { analyzeWorkbench } from './ast';
|
|
15
|
+
import { encodeWorkbenchConfig } from '@agentuity/core';
|
|
13
16
|
|
|
14
|
-
export
|
|
17
|
+
export const DeployOptionsSchema = z.object({
|
|
18
|
+
tag: z
|
|
19
|
+
.array(z.string())
|
|
20
|
+
.default(['latest'])
|
|
21
|
+
.optional()
|
|
22
|
+
.describe('One or more tags to add to the deployment'),
|
|
23
|
+
logsUrl: z.url().optional().describe('The url to the CI build logs'),
|
|
24
|
+
trigger: z
|
|
25
|
+
.enum(['cli', 'workflow', 'webhook'])
|
|
26
|
+
.default('cli')
|
|
27
|
+
.optional()
|
|
28
|
+
.describe('The trigger that caused the build'),
|
|
29
|
+
commitUrl: z.url().optional().describe('The url to the CI commit'),
|
|
30
|
+
provider: z.string().optional().describe('The CI provider name (attempts to autodetect)'),
|
|
31
|
+
event: z
|
|
32
|
+
.enum(['pull_request', 'push', 'manual', 'workflow'])
|
|
33
|
+
.default('manual')
|
|
34
|
+
.optional()
|
|
35
|
+
.describe('The event that triggered the deployment'),
|
|
36
|
+
pullRequestNumber: z.number().optional().describe('the pull request number'),
|
|
37
|
+
pullRequestCommentId: z.string().optional().describe('the pull request comment id'),
|
|
38
|
+
pullRequestURL: z.url().optional().describe('the pull request url'),
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
type DeployOptions = z.infer<typeof DeployOptionsSchema>;
|
|
42
|
+
|
|
43
|
+
export interface BundleOptions extends DeployOptions {
|
|
15
44
|
rootDir: string;
|
|
16
45
|
dev?: boolean;
|
|
17
46
|
env?: Map<string, string>;
|
|
@@ -30,6 +59,15 @@ export async function bundle({
|
|
|
30
59
|
rootDir,
|
|
31
60
|
project,
|
|
32
61
|
port,
|
|
62
|
+
tag,
|
|
63
|
+
logsUrl,
|
|
64
|
+
commitUrl,
|
|
65
|
+
provider,
|
|
66
|
+
trigger,
|
|
67
|
+
event,
|
|
68
|
+
pullRequestNumber,
|
|
69
|
+
pullRequestCommentId,
|
|
70
|
+
pullRequestURL,
|
|
33
71
|
}: BundleOptions) {
|
|
34
72
|
const appFile = join(rootDir, 'app.ts');
|
|
35
73
|
if (!existsSync(appFile)) {
|
|
@@ -254,14 +292,12 @@ export async function bundle({
|
|
|
254
292
|
}
|
|
255
293
|
|
|
256
294
|
// Bundle workbench app if detected via setupWorkbench
|
|
257
|
-
const { analyzeWorkbench } = await import('./ast');
|
|
258
295
|
if (existsSync(appFile)) {
|
|
259
296
|
const appContent = await Bun.file(appFile).text();
|
|
260
|
-
const analysis =
|
|
297
|
+
const analysis = analyzeWorkbench(appContent);
|
|
261
298
|
|
|
262
299
|
if (analysis.hasWorkbench) {
|
|
263
300
|
// Encode workbench config for environment variable
|
|
264
|
-
const { encodeWorkbenchConfig } = await import('@agentuity/core');
|
|
265
301
|
const config = analysis.config || { route: '/workbench', headers: {} };
|
|
266
302
|
// Add port to config (defaults to 3500 if not provided)
|
|
267
303
|
const configWithPort = { ...config, port: port || 3500 };
|
|
@@ -272,41 +308,55 @@ export async function bundle({
|
|
|
272
308
|
};
|
|
273
309
|
const logger = createLogger((process.env.AGENTUITY_LOG_LEVEL as LogLevel) || 'info');
|
|
274
310
|
try {
|
|
275
|
-
|
|
276
|
-
const
|
|
277
|
-
|
|
311
|
+
// Generate workbench files on the fly instead of using files from package
|
|
312
|
+
const tempWorkbenchDir = join(outDir, 'temp-workbench');
|
|
313
|
+
mkdirSync(tempWorkbenchDir, { recursive: true });
|
|
278
314
|
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
const workbenchBuildConfig: Bun.BuildConfig = {
|
|
284
|
-
entrypoints: [workbenchIndexFile],
|
|
285
|
-
root: workbenchAppDir,
|
|
286
|
-
outdir: join(outDir, 'workbench'),
|
|
287
|
-
define: workbenchDefine,
|
|
288
|
-
sourcemap: dev ? 'inline' : 'linked',
|
|
289
|
-
plugins: [AgentuityBundler],
|
|
290
|
-
target: 'browser',
|
|
291
|
-
format: 'esm',
|
|
292
|
-
banner: `// Generated file. DO NOT EDIT`,
|
|
293
|
-
minify: true,
|
|
294
|
-
splitting: true,
|
|
295
|
-
packages: 'bundle',
|
|
296
|
-
naming: {
|
|
297
|
-
entry: '[dir]/[name].[ext]',
|
|
298
|
-
chunk: 'workbench/chunk/[name]-[hash].[ext]',
|
|
299
|
-
asset: 'workbench/asset/[name]-[hash].[ext]',
|
|
300
|
-
},
|
|
301
|
-
};
|
|
315
|
+
// Generate files using templates
|
|
316
|
+
await Bun.write(join(tempWorkbenchDir, 'main.tsx'), generateWorkbenchMainTsx(config));
|
|
317
|
+
const workbenchIndexFile = join(tempWorkbenchDir, 'index.html');
|
|
318
|
+
await Bun.write(workbenchIndexFile, generateWorkbenchIndexHtml());
|
|
302
319
|
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
320
|
+
// Bundle workbench using generated files
|
|
321
|
+
const workbenchBuildConfig: Bun.BuildConfig = {
|
|
322
|
+
entrypoints: [workbenchIndexFile],
|
|
323
|
+
root: tempWorkbenchDir,
|
|
324
|
+
outdir: join(outDir, 'workbench'),
|
|
325
|
+
define: workbenchDefine,
|
|
326
|
+
sourcemap: dev ? 'inline' : 'linked',
|
|
327
|
+
plugins: [AgentuityBundler],
|
|
328
|
+
target: 'browser',
|
|
329
|
+
format: 'esm',
|
|
330
|
+
banner: `// Generated file. DO NOT EDIT`,
|
|
331
|
+
minify: true,
|
|
332
|
+
splitting: true,
|
|
333
|
+
packages: 'bundle',
|
|
334
|
+
naming: {
|
|
335
|
+
entry: '[dir]/[name].[ext]',
|
|
336
|
+
chunk: 'workbench/chunk/[name]-[hash].[ext]',
|
|
337
|
+
asset: 'workbench/asset/[name]-[hash].[ext]',
|
|
338
|
+
},
|
|
339
|
+
};
|
|
340
|
+
|
|
341
|
+
const workbenchResult = await Bun.build(workbenchBuildConfig);
|
|
342
|
+
if (workbenchResult.success) {
|
|
343
|
+
logger.debug('Workbench bundled successfully');
|
|
344
|
+
// Clean up temp directory
|
|
345
|
+
rmSync(tempWorkbenchDir, { recursive: true, force: true });
|
|
346
|
+
} else {
|
|
347
|
+
logger.error('Workbench bundling failed. Logs:', workbenchResult.logs);
|
|
348
|
+
if (workbenchResult.logs.length === 0) {
|
|
349
|
+
logger.error('No build logs available. Checking generated files...');
|
|
350
|
+
logger.error('Temp dir exists:', await Bun.file(tempWorkbenchDir).exists());
|
|
351
|
+
logger.error('Index file exists:', await Bun.file(workbenchIndexFile).exists());
|
|
352
|
+
logger.error(
|
|
353
|
+
'Main.tsx exists:',
|
|
354
|
+
await Bun.file(join(tempWorkbenchDir, 'main.tsx')).exists()
|
|
355
|
+
);
|
|
309
356
|
}
|
|
357
|
+
// Clean up temp directory even on failure
|
|
358
|
+
rmSync(tempWorkbenchDir, { recursive: true, force: true });
|
|
359
|
+
process.exit(1);
|
|
310
360
|
}
|
|
311
361
|
} catch (error) {
|
|
312
362
|
logger.error('Failed to bundle workbench:', error);
|
|
@@ -339,7 +389,14 @@ export async function bundle({
|
|
|
339
389
|
repo: process.env.GITHUB_REPOSITORY
|
|
340
390
|
? gitParseUrl(process.env.GITHUB_REPOSITORY).toString('https')
|
|
341
391
|
: '',
|
|
392
|
+
provider: 'git',
|
|
342
393
|
};
|
|
394
|
+
if (process.env.GITHUB_REPOSITORY) {
|
|
395
|
+
buildmetadata.deployment.git.provider = 'github';
|
|
396
|
+
}
|
|
397
|
+
if (process.env.CI && !trigger) {
|
|
398
|
+
buildmetadata.deployment.git.trigger = 'ci';
|
|
399
|
+
}
|
|
343
400
|
// pull out the git information if we have it
|
|
344
401
|
try {
|
|
345
402
|
let gitDir = join(rootDir, '.git');
|
|
@@ -358,13 +415,16 @@ export async function bundle({
|
|
|
358
415
|
.map((s) => s.trim())
|
|
359
416
|
.filter(Boolean);
|
|
360
417
|
}
|
|
361
|
-
|
|
362
|
-
if (branch) {
|
|
363
|
-
const
|
|
364
|
-
if (
|
|
365
|
-
|
|
418
|
+
let branch = process.env.GITHUB_HEAD_REF;
|
|
419
|
+
if (!branch) {
|
|
420
|
+
const branchText = $`git branch --show-current`.nothrow().quiet();
|
|
421
|
+
if (branchText) {
|
|
422
|
+
branch = await branchText.text();
|
|
366
423
|
}
|
|
367
424
|
}
|
|
425
|
+
if (branch) {
|
|
426
|
+
buildmetadata.deployment.git.branch = branch.trim();
|
|
427
|
+
}
|
|
368
428
|
const commit = $`git rev-parse HEAD`.nothrow().quiet();
|
|
369
429
|
if (commit) {
|
|
370
430
|
const sha = await commit.text();
|
|
@@ -393,6 +453,61 @@ export async function bundle({
|
|
|
393
453
|
}
|
|
394
454
|
}
|
|
395
455
|
|
|
456
|
+
// if in gitlab CI, set defaults before user overrides
|
|
457
|
+
if (process.env.GITLAB_CI && buildmetadata?.deployment) {
|
|
458
|
+
buildmetadata.deployment.git ??= {};
|
|
459
|
+
buildmetadata.deployment.git.provider ??= 'gitlab';
|
|
460
|
+
buildmetadata.deployment.git.branch ??= process.env.CI_COMMIT_REF_NAME;
|
|
461
|
+
buildmetadata.deployment.git.commit ??= process.env.CI_COMMIT_SHA;
|
|
462
|
+
buildmetadata.deployment.git.buildUrl ??=
|
|
463
|
+
process.env.CI_JOB_URL ?? process.env.CI_PIPELINE_URL;
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
// configure any overrides or any that aren't detected automatically
|
|
467
|
+
if (buildmetadata?.deployment) {
|
|
468
|
+
buildmetadata.deployment.git ??= {};
|
|
469
|
+
|
|
470
|
+
// build tags: start with existing discovered tags, add defaults, then merge explicit tags
|
|
471
|
+
const tags = new Set(buildmetadata.deployment.git.tags ?? []);
|
|
472
|
+
tags.add('latest');
|
|
473
|
+
if (buildmetadata.deployment.git.branch) {
|
|
474
|
+
tags.add(buildmetadata.deployment.git.branch);
|
|
475
|
+
}
|
|
476
|
+
if (buildmetadata.deployment.git.commit) {
|
|
477
|
+
tags.add(buildmetadata.deployment.git.commit.substring(0, 7));
|
|
478
|
+
}
|
|
479
|
+
if (tag?.length && !(tag.length === 1 && tag[0] === 'latest')) {
|
|
480
|
+
for (const t of tag) {
|
|
481
|
+
tags.add(t);
|
|
482
|
+
}
|
|
483
|
+
tags.delete('latest'); // if you specify explicit tags we remove latest
|
|
484
|
+
}
|
|
485
|
+
buildmetadata.deployment.git.tags = Array.from(tags);
|
|
486
|
+
|
|
487
|
+
if (provider) {
|
|
488
|
+
buildmetadata.deployment.git.provider = provider;
|
|
489
|
+
}
|
|
490
|
+
if (logsUrl) {
|
|
491
|
+
buildmetadata.deployment.git.buildUrl = logsUrl;
|
|
492
|
+
}
|
|
493
|
+
if (commitUrl) {
|
|
494
|
+
buildmetadata.deployment.git.url = commitUrl;
|
|
495
|
+
}
|
|
496
|
+
if (trigger) {
|
|
497
|
+
buildmetadata.deployment.git.trigger = trigger;
|
|
498
|
+
}
|
|
499
|
+
if (event) {
|
|
500
|
+
buildmetadata.deployment.git.event = event;
|
|
501
|
+
}
|
|
502
|
+
if (pullRequestNumber) {
|
|
503
|
+
buildmetadata.deployment.git.pull_request = {
|
|
504
|
+
number: pullRequestNumber,
|
|
505
|
+
url: pullRequestURL,
|
|
506
|
+
commentId: pullRequestCommentId,
|
|
507
|
+
};
|
|
508
|
+
}
|
|
509
|
+
}
|
|
510
|
+
|
|
396
511
|
await Bun.write(
|
|
397
512
|
`${outDir}/package.json`,
|
|
398
513
|
JSON.stringify({ name: pkgContents.name, version: pkgContents.version }, null, 2)
|