@forvibe/cli 1.0.3 → 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
// src/ai/aso-generator.ts
|
|
2
|
-
import { GoogleGenerativeAI } from "@google/generative-ai";
|
|
3
2
|
var ASO_SYSTEM_PROMPT = `You are a world-class App Store Optimization (ASO) specialist with deep expertise in keyword strategy, conversion optimization, and store listing copywriting.
|
|
4
3
|
|
|
5
4
|
Your task is to generate a complete, ASO-optimized store listing for a mobile app.
|
|
@@ -103,21 +102,9 @@ function enforceCharLimits(raw) {
|
|
|
103
102
|
}
|
|
104
103
|
return result;
|
|
105
104
|
}
|
|
106
|
-
async function generateASOContent(report,
|
|
107
|
-
const genAI = new GoogleGenerativeAI(apiKey);
|
|
108
|
-
const model = genAI.getGenerativeModel({
|
|
109
|
-
model: "gemini-2.5-flash",
|
|
110
|
-
generationConfig: {
|
|
111
|
-
temperature: 0.5,
|
|
112
|
-
responseMimeType: "application/json"
|
|
113
|
-
}
|
|
114
|
-
});
|
|
105
|
+
async function generateASOContent(report, provider) {
|
|
115
106
|
const userPrompt = buildASOPrompt(report);
|
|
116
|
-
const
|
|
117
|
-
{ text: ASO_SYSTEM_PROMPT },
|
|
118
|
-
{ text: userPrompt }
|
|
119
|
-
]);
|
|
120
|
-
const responseText = result.response.text();
|
|
107
|
+
const responseText = await provider.generateJSON(ASO_SYSTEM_PROMPT, userPrompt, 0.5);
|
|
121
108
|
let parsed;
|
|
122
109
|
try {
|
|
123
110
|
parsed = JSON.parse(responseText);
|
package/dist/index.js
CHANGED
|
@@ -1930,6 +1930,113 @@ var ForvibeClient = class {
|
|
|
1930
1930
|
}
|
|
1931
1931
|
};
|
|
1932
1932
|
|
|
1933
|
+
// src/ai/providers.ts
|
|
1934
|
+
import { GoogleGenerativeAI } from "@google/generative-ai";
|
|
1935
|
+
function createGeminiProvider(apiKey) {
|
|
1936
|
+
return {
|
|
1937
|
+
name: "Gemini",
|
|
1938
|
+
async generateJSON(systemPrompt, userPrompt, temperature) {
|
|
1939
|
+
const genAI = new GoogleGenerativeAI(apiKey);
|
|
1940
|
+
const model = genAI.getGenerativeModel({
|
|
1941
|
+
model: "gemini-2.5-flash",
|
|
1942
|
+
generationConfig: {
|
|
1943
|
+
temperature,
|
|
1944
|
+
responseMimeType: "application/json"
|
|
1945
|
+
}
|
|
1946
|
+
});
|
|
1947
|
+
const result = await model.generateContent([
|
|
1948
|
+
{ text: systemPrompt },
|
|
1949
|
+
{ text: userPrompt }
|
|
1950
|
+
]);
|
|
1951
|
+
return result.response.text();
|
|
1952
|
+
}
|
|
1953
|
+
};
|
|
1954
|
+
}
|
|
1955
|
+
function createOpenAIProvider(apiKey) {
|
|
1956
|
+
return {
|
|
1957
|
+
name: "OpenAI",
|
|
1958
|
+
async generateJSON(systemPrompt, userPrompt, temperature) {
|
|
1959
|
+
const response = await fetch(
|
|
1960
|
+
"https://api.openai.com/v1/chat/completions",
|
|
1961
|
+
{
|
|
1962
|
+
method: "POST",
|
|
1963
|
+
headers: {
|
|
1964
|
+
"Content-Type": "application/json",
|
|
1965
|
+
Authorization: `Bearer ${apiKey}`
|
|
1966
|
+
},
|
|
1967
|
+
body: JSON.stringify({
|
|
1968
|
+
model: "gpt-4o-mini",
|
|
1969
|
+
messages: [
|
|
1970
|
+
{ role: "system", content: systemPrompt },
|
|
1971
|
+
{ role: "user", content: userPrompt }
|
|
1972
|
+
],
|
|
1973
|
+
response_format: { type: "json_object" },
|
|
1974
|
+
temperature
|
|
1975
|
+
})
|
|
1976
|
+
}
|
|
1977
|
+
);
|
|
1978
|
+
if (!response.ok) {
|
|
1979
|
+
const body = await response.text().catch(() => "");
|
|
1980
|
+
throw new Error(
|
|
1981
|
+
`OpenAI API error (${response.status}): ${body.substring(0, 200)}`
|
|
1982
|
+
);
|
|
1983
|
+
}
|
|
1984
|
+
const data = await response.json();
|
|
1985
|
+
const content = data.choices?.[0]?.message?.content;
|
|
1986
|
+
if (!content) {
|
|
1987
|
+
throw new Error("Empty response from OpenAI");
|
|
1988
|
+
}
|
|
1989
|
+
return content;
|
|
1990
|
+
}
|
|
1991
|
+
};
|
|
1992
|
+
}
|
|
1993
|
+
function createClaudeProvider(apiKey) {
|
|
1994
|
+
return {
|
|
1995
|
+
name: "Claude",
|
|
1996
|
+
async generateJSON(systemPrompt, userPrompt, temperature) {
|
|
1997
|
+
const response = await fetch(
|
|
1998
|
+
"https://api.anthropic.com/v1/messages",
|
|
1999
|
+
{
|
|
2000
|
+
method: "POST",
|
|
2001
|
+
headers: {
|
|
2002
|
+
"Content-Type": "application/json",
|
|
2003
|
+
"x-api-key": apiKey,
|
|
2004
|
+
"anthropic-version": "2023-06-01"
|
|
2005
|
+
},
|
|
2006
|
+
body: JSON.stringify({
|
|
2007
|
+
model: "claude-sonnet-4-6",
|
|
2008
|
+
max_tokens: 8192,
|
|
2009
|
+
system: systemPrompt,
|
|
2010
|
+
messages: [{ role: "user", content: userPrompt }],
|
|
2011
|
+
temperature
|
|
2012
|
+
})
|
|
2013
|
+
}
|
|
2014
|
+
);
|
|
2015
|
+
if (!response.ok) {
|
|
2016
|
+
const body = await response.text().catch(() => "");
|
|
2017
|
+
throw new Error(
|
|
2018
|
+
`Claude API error (${response.status}): ${body.substring(0, 200)}`
|
|
2019
|
+
);
|
|
2020
|
+
}
|
|
2021
|
+
const data = await response.json();
|
|
2022
|
+
const text = data.content?.find((b) => b.type === "text")?.text;
|
|
2023
|
+
if (!text) {
|
|
2024
|
+
throw new Error("Empty response from Claude");
|
|
2025
|
+
}
|
|
2026
|
+
return text;
|
|
2027
|
+
}
|
|
2028
|
+
};
|
|
2029
|
+
}
|
|
2030
|
+
function detectProvider() {
|
|
2031
|
+
const geminiKey = process.env.GEMINI_API_KEY || process.env.GOOGLE_AI_API_KEY;
|
|
2032
|
+
if (geminiKey) return createGeminiProvider(geminiKey);
|
|
2033
|
+
const openaiKey = process.env.OPENAI_API_KEY;
|
|
2034
|
+
if (openaiKey) return createOpenAIProvider(openaiKey);
|
|
2035
|
+
const anthropicKey = process.env.ANTHROPIC_API_KEY;
|
|
2036
|
+
if (anthropicKey) return createClaudeProvider(anthropicKey);
|
|
2037
|
+
throw new Error("NO_API_KEY");
|
|
2038
|
+
}
|
|
2039
|
+
|
|
1933
2040
|
// src/commands/analyze.ts
|
|
1934
2041
|
function askQuestion(question) {
|
|
1935
2042
|
const rl = createInterface({
|
|
@@ -1950,15 +2057,19 @@ async function analyzeCommand(options) {
|
|
|
1950
2057
|
chalk.bold(" Forvibe CLI") + chalk.gray(" \u2014 AI-powered App Store automation")
|
|
1951
2058
|
);
|
|
1952
2059
|
console.log();
|
|
1953
|
-
|
|
1954
|
-
|
|
1955
|
-
|
|
1956
|
-
|
|
1957
|
-
console.log(chalk.
|
|
1958
|
-
console.log(chalk.
|
|
2060
|
+
let provider;
|
|
2061
|
+
try {
|
|
2062
|
+
provider = detectProvider();
|
|
2063
|
+
} catch {
|
|
2064
|
+
console.log(chalk.red(" \u2717 No AI API key found. Set one of the following:\n"));
|
|
2065
|
+
console.log(chalk.cyan(" export GEMINI_API_KEY=your-key") + chalk.gray(" https://aistudio.google.com/apikey"));
|
|
2066
|
+
console.log(chalk.cyan(" export OPENAI_API_KEY=your-key") + chalk.gray(" https://platform.openai.com/api-keys"));
|
|
2067
|
+
console.log(chalk.cyan(" export ANTHROPIC_API_KEY=your-key") + chalk.gray(" https://console.anthropic.com/settings/keys"));
|
|
2068
|
+
console.log();
|
|
1959
2069
|
console.log(chalk.gray(" Your source code is analyzed locally \u2014 it never leaves your machine.\n"));
|
|
1960
2070
|
process.exit(1);
|
|
1961
2071
|
}
|
|
2072
|
+
console.log(chalk.gray(` AI Provider: ${provider.name} \u2713`));
|
|
1962
2073
|
const otcCode = await askQuestion(
|
|
1963
2074
|
chalk.cyan(" \u{1F517} Enter your Forvibe connection code: ")
|
|
1964
2075
|
);
|
|
@@ -2060,14 +2171,14 @@ async function analyzeCommand(options) {
|
|
|
2060
2171
|
console.log();
|
|
2061
2172
|
let report;
|
|
2062
2173
|
const aiSpinner = ora({
|
|
2063
|
-
text:
|
|
2174
|
+
text: `Analyzing locally with ${provider.name} (your source code never leaves your machine)...`,
|
|
2064
2175
|
prefixText: " "
|
|
2065
2176
|
}).start();
|
|
2066
2177
|
try {
|
|
2067
|
-
const { generateReport } = await import("./report-generator-
|
|
2178
|
+
const { generateReport } = await import("./report-generator-QV2BWI2J.js");
|
|
2068
2179
|
report = await generateReport(
|
|
2069
2180
|
{ techStack, config, sdkScan, branding, readmeContent, sourceCode, projectTree },
|
|
2070
|
-
|
|
2181
|
+
provider
|
|
2071
2182
|
);
|
|
2072
2183
|
if (appAssets.length > 0) {
|
|
2073
2184
|
report.app_assets = appAssets;
|
|
@@ -2084,8 +2195,8 @@ async function analyzeCommand(options) {
|
|
|
2084
2195
|
prefixText: " "
|
|
2085
2196
|
}).start();
|
|
2086
2197
|
try {
|
|
2087
|
-
const { generateASOContent } = await import("./aso-generator-
|
|
2088
|
-
const asoContent = await generateASOContent(report,
|
|
2198
|
+
const { generateASOContent } = await import("./aso-generator-5I3OEF5J.js");
|
|
2199
|
+
const asoContent = await generateASOContent(report, provider);
|
|
2089
2200
|
report.aso_content = asoContent;
|
|
2090
2201
|
asoSpinner.succeed(chalk.green("Store listing content generated!"));
|
|
2091
2202
|
} catch (error) {
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
// src/ai/report-generator.ts
|
|
2
|
-
import { GoogleGenerativeAI } from "@google/generative-ai";
|
|
3
2
|
var SYSTEM_PROMPT = `You are a senior mobile app analyst and App Store Optimization specialist. You will receive technical data about a mobile application project including its tech stack, dependencies, config files, README, project file tree, and source code excerpts.
|
|
4
3
|
|
|
5
4
|
Your job is to deeply analyze the project and produce a comprehensive structured JSON analysis report.
|
|
@@ -92,21 +91,9 @@ ${input.sourceCode.substring(0, 4e4)}`);
|
|
|
92
91
|
}`);
|
|
93
92
|
return parts.join("\n\n");
|
|
94
93
|
}
|
|
95
|
-
async function generateReport(input,
|
|
96
|
-
const genAI = new GoogleGenerativeAI(apiKey);
|
|
97
|
-
const model = genAI.getGenerativeModel({
|
|
98
|
-
model: "gemini-2.5-flash",
|
|
99
|
-
generationConfig: {
|
|
100
|
-
temperature: 0.4,
|
|
101
|
-
responseMimeType: "application/json"
|
|
102
|
-
}
|
|
103
|
-
});
|
|
94
|
+
async function generateReport(input, provider) {
|
|
104
95
|
const userPrompt = buildUserPrompt(input);
|
|
105
|
-
const
|
|
106
|
-
{ text: SYSTEM_PROMPT },
|
|
107
|
-
{ text: userPrompt }
|
|
108
|
-
]);
|
|
109
|
-
const responseText = result.response.text();
|
|
96
|
+
const responseText = await provider.generateJSON(SYSTEM_PROMPT, userPrompt, 0.4);
|
|
110
97
|
let aiAnalysis;
|
|
111
98
|
try {
|
|
112
99
|
aiAnalysis = JSON.parse(responseText);
|