@easybits.cloud/html-tailwind-generator 0.1.2 → 0.1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/package.json +1 -1
- package/src/generate.ts +3 -3
- package/src/refine.ts +3 -3
package/README.md
CHANGED
|
@@ -32,6 +32,8 @@ All API keys can be set via environment variables instead of passing them explic
|
|
|
32
32
|
|
|
33
33
|
**Priority**: If both `OPENAI_API_KEY` and `ANTHROPIC_API_KEY` are set, OpenAI takes precedence. To force Anthropic, pass `anthropicApiKey` explicitly and omit `openaiApiKey`.
|
|
34
34
|
|
|
35
|
+
**Planned**: Mix providers — use Anthropic for text generation + DALL-E for images (best of both). This is on the roadmap; [open an issue](https://github.com/blissito/easybits/issues) to bump priority.
|
|
36
|
+
|
|
35
37
|
## Quick Start
|
|
36
38
|
|
|
37
39
|
### Generate a landing page (server-side)
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@easybits.cloud/html-tailwind-generator",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.4",
|
|
4
4
|
"description": "AI-powered landing page generator with Tailwind CSS — canvas editor, streaming generation, and one-click deploy",
|
|
5
5
|
"license": "PolyForm-Noncommercial-1.0.0",
|
|
6
6
|
"type": "module",
|
package/src/generate.ts
CHANGED
|
@@ -6,10 +6,10 @@ import { searchImage } from "./images/pexels";
|
|
|
6
6
|
import { generateImage } from "./images/dalleImages";
|
|
7
7
|
import type { Section3 } from "./types";
|
|
8
8
|
|
|
9
|
-
function resolveModel(opts: { openaiApiKey?: string; anthropicApiKey?: string; modelId?: string; defaultOpenai: string; defaultAnthropic: string }) {
|
|
9
|
+
async function resolveModel(opts: { openaiApiKey?: string; anthropicApiKey?: string; modelId?: string; defaultOpenai: string; defaultAnthropic: string }) {
|
|
10
10
|
const openaiKey = opts.openaiApiKey || process.env.OPENAI_API_KEY;
|
|
11
11
|
if (openaiKey) {
|
|
12
|
-
const { createOpenAI } =
|
|
12
|
+
const { createOpenAI } = await import("@ai-sdk/openai");
|
|
13
13
|
const openai = createOpenAI({ apiKey: openaiKey });
|
|
14
14
|
return openai(opts.modelId || opts.defaultOpenai);
|
|
15
15
|
}
|
|
@@ -172,7 +172,7 @@ export async function generateLanding(options: GenerateOptions): Promise<Section
|
|
|
172
172
|
} = options;
|
|
173
173
|
|
|
174
174
|
const openaiApiKey = _openaiApiKey || process.env.OPENAI_API_KEY;
|
|
175
|
-
const model = resolveModel({ openaiApiKey, anthropicApiKey, modelId, defaultOpenai: "gpt-4o", defaultAnthropic: "claude-sonnet-4-6" });
|
|
175
|
+
const model = await resolveModel({ openaiApiKey, anthropicApiKey, modelId, defaultOpenai: "gpt-4o", defaultAnthropic: "claude-sonnet-4-6" });
|
|
176
176
|
|
|
177
177
|
// Build prompt content (supports multimodal with reference image)
|
|
178
178
|
const extra = extraInstructions ? `\nAdditional instructions: ${extraInstructions}` : "";
|
package/src/refine.ts
CHANGED
|
@@ -2,10 +2,10 @@ import { streamText } from "ai";
|
|
|
2
2
|
import { createAnthropic } from "@ai-sdk/anthropic";
|
|
3
3
|
import { enrichImages } from "./images/enrichImages";
|
|
4
4
|
|
|
5
|
-
function resolveModel(opts: { openaiApiKey?: string; anthropicApiKey?: string; modelId?: string; defaultOpenai: string; defaultAnthropic: string }) {
|
|
5
|
+
async function resolveModel(opts: { openaiApiKey?: string; anthropicApiKey?: string; modelId?: string; defaultOpenai: string; defaultAnthropic: string }) {
|
|
6
6
|
const openaiKey = opts.openaiApiKey || process.env.OPENAI_API_KEY;
|
|
7
7
|
if (openaiKey) {
|
|
8
|
-
const { createOpenAI } =
|
|
8
|
+
const { createOpenAI } = await import("@ai-sdk/openai");
|
|
9
9
|
const openai = createOpenAI({ apiKey: openaiKey });
|
|
10
10
|
return openai(opts.modelId || opts.defaultOpenai);
|
|
11
11
|
}
|
|
@@ -83,7 +83,7 @@ export async function refineLanding(options: RefineOptions): Promise<string> {
|
|
|
83
83
|
const openaiApiKey = _openaiApiKey || process.env.OPENAI_API_KEY;
|
|
84
84
|
const defaultOpenai = referenceImage ? "gpt-4o" : "gpt-4o-mini";
|
|
85
85
|
const defaultAnthropic = referenceImage ? "claude-sonnet-4-6" : "claude-haiku-4-5-20251001";
|
|
86
|
-
const model = resolveModel({ openaiApiKey, anthropicApiKey, modelId, defaultOpenai, defaultAnthropic });
|
|
86
|
+
const model = await resolveModel({ openaiApiKey, anthropicApiKey, modelId, defaultOpenai, defaultAnthropic });
|
|
87
87
|
|
|
88
88
|
// Build content (supports multimodal with reference image)
|
|
89
89
|
const content: any[] = [];
|