create-expert 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,202 @@
1
+
2
+ Apache License
3
+ Version 2.0, January 2004
4
+ http://www.apache.org/licenses/
5
+
6
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7
+
8
+ 1. Definitions.
9
+
10
+ "License" shall mean the terms and conditions for use, reproduction,
11
+ and distribution as defined by Sections 1 through 9 of this document.
12
+
13
+ "Licensor" shall mean the copyright owner or entity authorized by
14
+ the copyright owner that is granting the License.
15
+
16
+ "Legal Entity" shall mean the union of the acting entity and all
17
+ other entities that control, are controlled by, or are under common
18
+ control with that entity. For the purposes of this definition,
19
+ "control" means (i) the power, direct or indirect, to cause the
20
+ direction or management of such entity, whether by contract or
21
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or (iii) beneficial ownership of such entity.
23
+
24
+ "You" (or "Your") shall mean an individual or Legal Entity
25
+ exercising permissions granted by this License.
26
+
27
+ "Source" form shall mean the preferred form for making modifications,
28
+ including but not limited to software source code, documentation
29
+ source, and configuration files.
30
+
31
+ "Object" form shall mean any form resulting from mechanical
32
+ transformation or translation of a Source form, including but
33
+ not limited to compiled object code, generated documentation,
34
+ and conversions to other media types.
35
+
36
+ "Work" shall mean the work of authorship, whether in Source or
37
+ Object form, made available under the License, as indicated by a
38
+ copyright notice that is included in or attached to the work
39
+ (an example is provided in the Appendix below).
40
+
41
+ "Derivative Works" shall mean any work, whether in Source or Object
42
+ form, that is based on (or derived from) the Work and for which the
43
+ editorial revisions, annotations, elaborations, or other modifications
44
+ represent, as a whole, an original work of authorship. For the purposes
45
+ of this License, Derivative Works shall not include works that remain
46
+ separable from, or merely link (or bind by name) to the interfaces of,
47
+ the Work and Derivative Works thereof.
48
+
49
+ "Contribution" shall mean any work of authorship, including
50
+ the original version of the Work and any modifications or additions
51
+ to that Work or Derivative Works thereof, that is intentionally
52
+ submitted to Licensor for inclusion in the Work by the copyright owner
53
+ or by an individual or Legal Entity authorized to submit on behalf of
54
+ the copyright owner. For the purposes of this definition, "submitted"
55
+ means any form of electronic, verbal, or written communication sent
56
+ to the Licensor or its representatives, including but not limited to
57
+ communication on electronic mailing lists, source code control systems,
58
+ and issue tracking systems that are managed by, or on behalf of, the
59
+ Licensor for the purpose of discussing and improving the Work, but
60
+ excluding communication that is conspicuously marked or otherwise
61
+ designated in writing by the copyright owner as "Not a Contribution."
62
+
63
+ "Contributor" shall mean Licensor and any individual or Legal Entity
64
+ on behalf of whom a Contribution has been received by Licensor and
65
+ subsequently incorporated within the Work.
66
+
67
+ 2. Grant of Copyright License. Subject to the terms and conditions of
68
+ this License, each Contributor hereby grants to You a perpetual,
69
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70
+ copyright license to reproduce, prepare Derivative Works of,
71
+ publicly display, publicly perform, sublicense, and distribute the
72
+ Work and such Derivative Works in Source or Object form.
73
+
74
+ 3. Grant of Patent License. Subject to the terms and conditions of
75
+ this License, each Contributor hereby grants to You a perpetual,
76
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77
+ (except as stated in this section) patent license to make, have made,
78
+ use, offer to sell, sell, import, and otherwise transfer the Work,
79
+ where such license applies only to those patent claims licensable
80
+ by such Contributor that are necessarily infringed by their
81
+ Contribution(s) alone or by combination of their Contribution(s)
82
+ with the Work to which such Contribution(s) was submitted. If You
83
+ institute patent litigation against any entity (including a
84
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
85
+ or a Contribution incorporated within the Work constitutes direct
86
+ or contributory patent infringement, then any patent licenses
87
+ granted to You under this License for that Work shall terminate
88
+ as of the date such litigation is filed.
89
+
90
+ 4. Redistribution. You may reproduce and distribute copies of the
91
+ Work or Derivative Works thereof in any medium, with or without
92
+ modifications, and in Source or Object form, provided that You
93
+ meet the following conditions:
94
+
95
+ (a) You must give any other recipients of the Work or
96
+ Derivative Works a copy of this License; and
97
+
98
+ (b) You must cause any modified files to carry prominent notices
99
+ stating that You changed the files; and
100
+
101
+ (c) You must retain, in the Source form of any Derivative Works
102
+ that You distribute, all copyright, patent, trademark, and
103
+ attribution notices from the Source form of the Work,
104
+ excluding those notices that do not pertain to any part of
105
+ the Derivative Works; and
106
+
107
+ (d) If the Work includes a "NOTICE" text file as part of its
108
+ distribution, then any Derivative Works that You distribute must
109
+ include a readable copy of the attribution notices contained
110
+ within such NOTICE file, excluding those notices that do not
111
+ pertain to any part of the Derivative Works, in at least one
112
+ of the following places: within a NOTICE text file distributed
113
+ as part of the Derivative Works; within the Source form or
114
+ documentation, if provided along with the Derivative Works; or,
115
+ within a display generated by the Derivative Works, if and
116
+ wherever such third-party notices normally appear. The contents
117
+ of the NOTICE file are for informational purposes only and
118
+ do not modify the License. You may add Your own attribution
119
+ notices within Derivative Works that You distribute, alongside
120
+ or as an addendum to the NOTICE text from the Work, provided
121
+ that such additional attribution notices cannot be construed
122
+ as modifying the License.
123
+
124
+ You may add Your own copyright statement to Your modifications and
125
+ may provide additional or different license terms and conditions
126
+ for use, reproduction, or distribution of Your modifications, or
127
+ for any such Derivative Works as a whole, provided Your use,
128
+ reproduction, and distribution of the Work otherwise complies with
129
+ the conditions stated in this License.
130
+
131
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
132
+ any Contribution intentionally submitted for inclusion in the Work
133
+ by You to the Licensor shall be under the terms and conditions of
134
+ this License, without any additional terms or conditions.
135
+ Notwithstanding the above, nothing herein shall supersede or modify
136
+ the terms of any separate license agreement you may have executed
137
+ with Licensor regarding such Contributions.
138
+
139
+ 6. Trademarks. This License does not grant permission to use the trade
140
+ names, trademarks, service marks, or product names of the Licensor,
141
+ except as required for reasonable and customary use in describing the
142
+ origin of the Work and reproducing the content of the NOTICE file.
143
+
144
+ 7. Disclaimer of Warranty. Unless required by applicable law or
145
+ agreed to in writing, Licensor provides the Work (and each
146
+ Contributor provides its Contributions) on an "AS IS" BASIS,
147
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148
+ implied, including, without limitation, any warranties or conditions
149
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150
+ PARTICULAR PURPOSE. You are solely responsible for determining the
151
+ appropriateness of using or redistributing the Work and assume any
152
+ risks associated with Your exercise of permissions under this License.
153
+
154
+ 8. Limitation of Liability. In no event and under no legal theory,
155
+ whether in tort (including negligence), contract, or otherwise,
156
+ unless required by applicable law (such as deliberate and grossly
157
+ negligent acts) or agreed to in writing, shall any Contributor be
158
+ liable to You for damages, including any direct, indirect, special,
159
+ incidental, or consequential damages of any character arising as a
160
+ result of this License or out of the use or inability to use the
161
+ Work (including but not limited to damages for loss of goodwill,
162
+ work stoppage, computer failure or malfunction, or any and all
163
+ other commercial damages or losses), even if such Contributor
164
+ has been advised of the possibility of such damages.
165
+
166
+ 9. Accepting Warranty or Additional Liability. While redistributing
167
+ the Work or Derivative Works thereof, You may choose to offer,
168
+ and charge a fee for, acceptance of support, warranty, indemnity,
169
+ or other liability obligations and/or rights consistent with this
170
+ License. However, in accepting such obligations, You may act only
171
+ on Your own behalf and on Your sole responsibility, not on behalf
172
+ of any other Contributor, and only if You agree to indemnify,
173
+ defend, and hold each Contributor harmless for any liability
174
+ incurred by, or claims asserted against, such Contributor by reason
175
+ of your accepting any such warranty or additional liability.
176
+
177
+ END OF TERMS AND CONDITIONS
178
+
179
+ APPENDIX: How to apply the Apache License to your work.
180
+
181
+ To apply the Apache License to your work, attach the following
182
+ boilerplate notice, with the fields enclosed by brackets "[]"
183
+ replaced with your own identifying information. (Don't include
184
+ the brackets!) The text should be enclosed in the appropriate
185
+ comment syntax for the file format. We also recommend that a
186
+ file or class name and description of purpose be included on the
187
+ same "printed page" as the copyright notice for easier
188
+ identification within third-party archives.
189
+
190
+ Copyright [2025] [Wintermute Technologies Inc.]
191
+
192
+ Licensed under the Apache License, Version 2.0 (the "License");
193
+ you may not use this file except in compliance with the License.
194
+ You may obtain a copy of the License at
195
+
196
+ http://www.apache.org/licenses/LICENSE-2.0
197
+
198
+ Unless required by applicable law or agreed to in writing, software
199
+ distributed under the License is distributed on an "AS IS" BASIS,
200
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201
+ See the License for the specific language governing permissions and
202
+ limitations under the License.
package/README.md ADDED
@@ -0,0 +1,69 @@
1
+ # create-expert
2
+
3
+ Interactive wizard to create Perstack Experts.
4
+
5
+ ## Usage
6
+
7
+ ```bash
8
+ # Create a new Expert project
9
+ npx create-expert
10
+
11
+ # Improve an existing Expert
12
+ npx create-expert my-expert "Add error handling for edge cases"
13
+ ```
14
+
15
+ ## What it does
16
+
17
+ ### New Project Setup
18
+
19
+ 1. **Detects available LLMs and runtimes**
20
+ - LLMs: Anthropic, OpenAI, Google (via environment variables)
21
+ - Runtimes: Cursor, Claude Code, Gemini CLI
22
+
23
+ 2. **Configures your environment**
24
+ - Prompts for API keys if needed
25
+ - Creates `.env` file with credentials
26
+
27
+ 3. **Creates project files**
28
+ - `AGENTS.md` - Instructions for AI agents working on this project
29
+ - `perstack.toml` - Expert definitions and runtime config
30
+
31
+ 4. **Runs the Expert creation flow**
32
+ - Asks what kind of Expert you want
33
+ - Uses `perstack run` to create and test the Expert
34
+ - Iterates until the Expert works correctly
35
+
36
+ ### Improvement Mode
37
+
38
+ When called with an Expert name:
39
+
40
+ ```bash
41
+ npx create-expert my-expert "Add web search capability"
42
+ ```
43
+
44
+ Skips project setup and goes straight to improvement, using the existing configuration.
45
+
46
+ ## Generated Files
47
+
48
+ ### AGENTS.md
49
+
50
+ Contains:
51
+ - Perstack overview
52
+ - CLI reference
53
+ - perstack.toml format
54
+ - Best practices for creating Experts
55
+ - MCP Registry usage for finding skills
56
+
57
+ ### perstack.toml
58
+
59
+ Initial config with the `create-expert` Expert that:
60
+ - Understands Expert design principles
61
+ - Creates and tests Experts
62
+ - Iterates on improvements
63
+
64
+ ## Requirements
65
+
66
+ - Node.js >= 22.0.0
67
+ - One of:
68
+ - LLM API key (Anthropic, OpenAI, or Google)
69
+ - External runtime (Cursor, Claude Code, or Gemini CLI)
package/dist/cli.js ADDED
@@ -0,0 +1,800 @@
1
+ #!/usr/bin/env node
2
+ #!/usr/bin/env node
3
+
4
+ // bin/cli.ts
5
+ import { spawn } from "child_process";
6
+ import { existsSync, readFileSync, writeFileSync } from "fs";
7
+ import { join } from "path";
8
+ import { Command } from "commander";
9
+ import { config } from "dotenv";
10
+
11
+ // src/lib/agents-md-template.ts
12
+ function generateAgentsMd(options) {
13
+ const { provider, model, runtime } = options;
14
+ const isNonDefaultRuntime = runtime && runtime !== "default";
15
+ const runtimeSection = isNonDefaultRuntime ? `runtime = "${runtime}"` : "";
16
+ return `# AGENTS.md
17
+
18
+ ## What is Perstack
19
+
20
+ Perstack is a package manager and runtime for agent-first development. It enables you to define, test, and share modular AI agents called "Experts".
21
+
22
+ Key concepts:
23
+ - **Experts**: Modular micro-agents defined in TOML
24
+ - **Runtime**: Executes Experts with isolation, observability, and sandbox support
25
+ - **Registry**: Public registry for sharing and reusing Experts
26
+
27
+ ## Project Configuration
28
+
29
+ This project uses:
30
+ - Provider: ${provider}
31
+ - Model: ${model}
32
+ ${isNonDefaultRuntime ? `- Runtime: ${runtime}` : "- Runtime: docker (default)"}
33
+
34
+ ## CLI Reference
35
+
36
+ ### Running Experts
37
+
38
+ **\`perstack start\`** - Interactive workbench for developing and testing Experts
39
+ \`\`\`bash
40
+ perstack start [expertKey] [query]
41
+ \`\`\`
42
+
43
+ **\`perstack run\`** - Headless execution for production and automation
44
+ \`\`\`bash
45
+ perstack run <expertKey> <query> [options]
46
+ \`\`\`
47
+
48
+ ### Common Options
49
+
50
+ | Option | Description | Default |
51
+ |--------|-------------|---------|
52
+ | \`--provider <provider>\` | LLM provider | \`anthropic\` |
53
+ | \`--model <model>\` | Model name | \`claude-sonnet-4-5\` |
54
+ | \`--max-steps <n>\` | Maximum steps | unlimited |
55
+ | \`--runtime <runtime>\` | Execution runtime | \`docker\` |
56
+
57
+ ### Available Runtimes
58
+
59
+ - \`docker\` \u2014 Containerized runtime with network isolation (default)
60
+ - \`local\` \u2014 Built-in runtime without isolation
61
+ - \`cursor\` \u2014 Cursor CLI (experimental)
62
+ - \`claude-code\` \u2014 Claude Code CLI (experimental)
63
+ - \`gemini\` \u2014 Gemini CLI (experimental)
64
+
65
+ ## perstack.toml Format
66
+
67
+ \`\`\`toml
68
+ model = "${model}"
69
+ ${runtimeSection}
70
+
71
+ [provider]
72
+ providerName = "${provider}"
73
+
74
+ [experts."my-expert"]
75
+ version = "1.0.0"
76
+ description = "Brief description"
77
+ instruction = """
78
+ Detailed instructions for the expert.
79
+ """
80
+
81
+ [experts."my-expert".skills."@perstack/base"]
82
+ type = "mcpStdioSkill"
83
+ command = "npx"
84
+ packageName = "@perstack/base"
85
+ \`\`\`
86
+
87
+ ## Best Practices for Creating Experts
88
+
89
+ ### 1. Do One Thing Well
90
+
91
+ Bad:
92
+ \`\`\`toml
93
+ [experts."assistant"]
94
+ description = "Handles inquiries, reports, meetings, and expenses"
95
+ \`\`\`
96
+
97
+ Good:
98
+ \`\`\`toml
99
+ [experts."customer-support"]
100
+ description = "Answers customer questions about products and orders"
101
+ \`\`\`
102
+
103
+ ### 2. Trust the LLM, Define Domain Knowledge
104
+
105
+ Bad (procedural):
106
+ \`\`\`toml
107
+ instruction = """
108
+ 1. First, greet the customer
109
+ 2. Ask for their order number
110
+ 3. Look up the order
111
+ """
112
+ \`\`\`
113
+
114
+ Good (declarative):
115
+ \`\`\`toml
116
+ instruction = """
117
+ You are a customer support specialist.
118
+
119
+ Key policies:
120
+ - Orders ship within 2 business days
121
+ - Free returns within 30 days
122
+ - VIP customers get priority handling
123
+
124
+ Tone: Friendly but professional.
125
+ """
126
+ \`\`\`
127
+
128
+ ### 3. Let Them Collaborate
129
+
130
+ Use delegation for complex workflows:
131
+ \`\`\`toml
132
+ [experts."coordinator"]
133
+ delegates = ["researcher", "writer", "reviewer"]
134
+
135
+ [experts."researcher"]
136
+ description = "Gathers information from various sources"
137
+ \`\`\`
138
+
139
+ ### 4. Keep It Verifiable
140
+
141
+ Write clear, predictable instructions:
142
+ \`\`\`toml
143
+ instruction = """
144
+ Approval rules:
145
+ - Under $100: Auto-approve with receipt
146
+ - $100-$500: Approve if business purpose is clear
147
+ - Over $500: Flag for manager review
148
+ """
149
+ \`\`\`
150
+
151
+ ### 5. Ship Early
152
+
153
+ Start minimal, expand based on real usage. Don't over-engineer.
154
+
155
+ ## Finding Skills (MCP Servers)
156
+
157
+ Skills extend Experts with external capabilities via MCP (Model Context Protocol).
158
+
159
+ ### MCP Registry
160
+
161
+ Search for MCP servers at: https://registry.modelcontextprotocol.io
162
+
163
+ **API Reference:**
164
+ \`\`\`bash
165
+ # List all servers
166
+ curl "https://registry.modelcontextprotocol.io/v0.1/servers"
167
+
168
+ # Search by name
169
+ curl "https://registry.modelcontextprotocol.io/v0.1/servers?search=filesystem"
170
+
171
+ # Get specific server
172
+ curl "https://registry.modelcontextprotocol.io/v0.1/servers/{serverName}/versions/{version}"
173
+ \`\`\`
174
+
175
+ ### Using MCP Skills
176
+
177
+ **npm packages (most common):**
178
+ \`\`\`toml
179
+ [experts."my-expert".skills."web-search"]
180
+ type = "mcpStdioSkill"
181
+ command = "npx"
182
+ packageName = "exa-mcp-server"
183
+ requiredEnv = ["EXA_API_KEY"]
184
+ pick = ["web_search_exa"] # Optional: whitelist tools
185
+ \`\`\`
186
+
187
+ **Remote servers (SSE):**
188
+ \`\`\`toml
189
+ [experts."my-expert".skills."remote-api"]
190
+ type = "mcpSseSkill"
191
+ endpoint = "https://api.example.com/mcp"
192
+ \`\`\`
193
+
194
+ ### Built-in Base Skill
195
+
196
+ \`@perstack/base\` provides essential tools:
197
+ - File operations: \`readTextFile\`, \`writeTextFile\`, \`editTextFile\`, etc.
198
+ - Shell execution: \`exec\`
199
+ - Control flow: \`attemptCompletion\`, \`think\`, \`todo\`
200
+
201
+ \`\`\`toml
202
+ [experts."my-expert".skills."@perstack/base"]
203
+ type = "mcpStdioSkill"
204
+ command = "npx"
205
+ packageName = "@perstack/base"
206
+ \`\`\`
207
+
208
+ ## Testing Experts
209
+
210
+ 1. Start with \`perstack start\` for interactive development
211
+ 2. Test happy path scenarios first
212
+ 3. Test edge cases and error scenarios
213
+ 4. Verify outputs match expectations
214
+ 5. Use \`--max-steps\` to limit runaway executions
215
+
216
+ ## Project Files
217
+
218
+ - \`perstack.toml\` - Expert definitions and runtime config
219
+ - \`AGENTS.md\` - This file, for AI agent context
220
+ - \`.env\` - Environment variables (API keys)
221
+ - \`perstack/\` - Execution history (auto-managed)
222
+ `;
223
+ }
224
+
225
+ // src/lib/create-expert-toml.ts
226
+ function generateCreateExpertToml(options) {
227
+ const runtimeLine = options.runtime && options.runtime !== "default" ? `runtime = "${options.runtime}"
228
+ ` : "";
229
+ return `model = "${options.model}"
230
+ ${runtimeLine}
231
+ [provider]
232
+ providerName = "${options.provider}"
233
+
234
+ [experts."create-expert"]
235
+ version = "1.0.0"
236
+ description = "Creates and tests new Perstack Experts based on user requirements"
237
+ instruction = """
238
+ You are an Expert creator for Perstack. Your job is to create well-designed Experts based on user requirements.
239
+
240
+ ## Your Workflow
241
+
242
+ 1. **Understand Requirements**: Analyze what the user wants the Expert to do
243
+ 2. **Design the Expert**: Plan the structure, skills needed, and delegation patterns
244
+ 3. **Implement**: Write the Expert definition in perstack.toml
245
+ 4. **Test**: Run the Expert with realistic test cases
246
+ 5. **Iterate**: Fix issues and improve based on test results
247
+
248
+ ## Design Principles
249
+
250
+ 1. **Do One Thing Well**: Each Expert should focus on a single responsibility
251
+ 2. **Define Domain Knowledge**: Write declarative instructions with policies, not procedures
252
+ 3. **Use Delegation**: Break complex tasks into collaborating Experts
253
+ 4. **Keep It Verifiable**: Write clear, predictable behavior
254
+ 5. **Start Simple**: Begin minimal, expand based on real needs
255
+
256
+ ## Expert Structure
257
+
258
+ [experts."expert-name"]
259
+ version = "1.0.0"
260
+ description = "Brief description visible to delegators"
261
+ instruction = "Domain knowledge, policies, and constraints."
262
+ delegates = ["other-expert"]
263
+
264
+ [experts."expert-name".skills."skill-name"]
265
+ type = "mcpStdioSkill"
266
+ command = "npx"
267
+ packageName = "package-name"
268
+
269
+ ## Finding Skills
270
+
271
+ Search MCP Registry for available skills:
272
+ - Browse: https://registry.modelcontextprotocol.io
273
+ - API: GET /v0.1/servers?search=<query>
274
+
275
+ Common skills:
276
+ - \`@perstack/base\` - File operations, shell, control flow
277
+ - \`exa-mcp-server\` - Web search (requires EXA_API_KEY)
278
+ - \`@anthropic/mcp-server-memory\` - Persistent memory
279
+
280
+ ## Testing Protocol
281
+
282
+ For each Expert you create:
283
+
284
+ 1. **Happy Path Tests**
285
+ - Test the primary use case with normal inputs
286
+ - Verify the output matches expectations
287
+
288
+ 2. **Edge Case Tests**
289
+ - Empty or minimal inputs
290
+ - Unusual but valid inputs
291
+ - Boundary conditions
292
+
293
+ 3. **Error Handling Tests**
294
+ - Invalid inputs
295
+ - Missing required data
296
+ - Skill failures
297
+
298
+ Use \`npx -y perstack run\` to execute tests:
299
+ \`\`\`bash
300
+ npx -y perstack run expert-name "test query"
301
+ \`\`\`
302
+
303
+ ## Iteration Process
304
+
305
+ After each test:
306
+ 1. Review the output
307
+ 2. Identify issues or improvements
308
+ 3. Update the Expert definition
309
+ 4. Re-run tests to verify fixes
310
+
311
+ Continue until all tests pass and the Expert behaves as expected.
312
+ """
313
+
314
+ [experts."create-expert".skills."@perstack/base"]
315
+ type = "mcpStdioSkill"
316
+ command = "npx"
317
+ packageName = "@perstack/base"
318
+ `;
319
+ }
320
+
321
+ // src/lib/detect-llm.ts
322
+ var LLM_CONFIGS = {
323
+ anthropic: {
324
+ envVar: "ANTHROPIC_API_KEY",
325
+ displayName: "Anthropic (Claude)",
326
+ defaultModel: "claude-sonnet-4-5"
327
+ },
328
+ openai: { envVar: "OPENAI_API_KEY", displayName: "OpenAI", defaultModel: "gpt-4o" },
329
+ google: {
330
+ envVar: "GOOGLE_GENERATIVE_AI_API_KEY",
331
+ displayName: "Google (Gemini)",
332
+ defaultModel: "gemini-2.5-pro"
333
+ }
334
+ };
335
+ function detectLLM(provider) {
336
+ const config2 = LLM_CONFIGS[provider];
337
+ return {
338
+ provider,
339
+ envVar: config2.envVar,
340
+ available: Boolean(process.env[config2.envVar]),
341
+ displayName: config2.displayName,
342
+ defaultModel: config2.defaultModel
343
+ };
344
+ }
345
+ function detectAllLLMs() {
346
+ return Object.keys(LLM_CONFIGS).map(detectLLM);
347
+ }
348
+ function getDefaultModel(provider) {
349
+ return LLM_CONFIGS[provider].defaultModel;
350
+ }
351
+
352
+ // src/lib/detect-runtime.ts
353
+ import { execSync } from "child_process";
354
+ function checkCommand(command) {
355
+ try {
356
+ const result = execSync(`${command} --version`, {
357
+ encoding: "utf-8",
358
+ stdio: ["pipe", "pipe", "pipe"],
359
+ timeout: 5e3
360
+ });
361
+ const firstLine = result.trim().split("\n")[0];
362
+ return { available: true, version: firstLine };
363
+ } catch {
364
+ return { available: false };
365
+ }
366
+ }
367
+ function detectCursor() {
368
+ const result = checkCommand("cursor");
369
+ return { type: "cursor", ...result };
370
+ }
371
+ function detectClaudeCode() {
372
+ const result = checkCommand("claude");
373
+ return { type: "claude-code", ...result };
374
+ }
375
+ function detectGemini() {
376
+ const result = checkCommand("gemini");
377
+ return { type: "gemini", ...result };
378
+ }
379
+ function detectAllRuntimes() {
380
+ return [detectCursor(), detectClaudeCode(), detectGemini()];
381
+ }
382
+
383
+ // src/tui/wizard/render.tsx
384
+ import { render } from "ink";
385
+
386
+ // src/tui/wizard/app.tsx
387
+ import { Box, Text, useApp, useInput } from "ink";
388
+ import { useEffect, useState } from "react";
389
+ import { jsx, jsxs } from "react/jsx-runtime";
390
+ function SelectableList({
391
+ items,
392
+ selectedIndex,
393
+ renderItem
394
+ }) {
395
+ return /* @__PURE__ */ jsx(Box, { flexDirection: "column", children: items.map((item, index) => {
396
+ const isSelected = index === selectedIndex;
397
+ if (renderItem) {
398
+ return /* @__PURE__ */ jsx(Box, { children: renderItem(item, isSelected) }, item.key);
399
+ }
400
+ return /* @__PURE__ */ jsx(Box, { children: /* @__PURE__ */ jsxs(Text, { color: item.disabled ? "gray" : isSelected ? "cyan" : "white", children: [
401
+ isSelected ? "\u276F " : " ",
402
+ item.label,
403
+ item.disabled ? " (not available)" : ""
404
+ ] }) }, item.key);
405
+ }) });
406
+ }
407
+ function TextInputComponent({
408
+ value,
409
+ onChange,
410
+ onSubmit,
411
+ placeholder,
412
+ isSecret
413
+ }) {
414
+ useInput((input, key) => {
415
+ if (key.return) {
416
+ onSubmit();
417
+ } else if (key.backspace || key.delete) {
418
+ onChange(value.slice(0, -1));
419
+ } else if (!key.ctrl && !key.meta && input) {
420
+ onChange(value + input);
421
+ }
422
+ });
423
+ const displayValue = isSecret ? "\u2022".repeat(value.length) : value;
424
+ return /* @__PURE__ */ jsxs(Box, { children: [
425
+ /* @__PURE__ */ jsx(Text, { color: "cyan", children: displayValue || /* @__PURE__ */ jsx(Text, { color: "gray", children: placeholder }) }),
426
+ /* @__PURE__ */ jsx(Text, { color: "cyan", children: "\u2588" })
427
+ ] });
428
+ }
429
+ function getRuntimeDisplayName(type) {
430
+ switch (type) {
431
+ case "cursor":
432
+ return "Cursor";
433
+ case "claude-code":
434
+ return "Claude Code";
435
+ case "gemini":
436
+ return "Gemini CLI";
437
+ }
438
+ }
439
+ function App({ llms, runtimes, onComplete, isImprovement, improvementTarget }) {
440
+ const { exit } = useApp();
441
+ const [step, setStep] = useState("detecting");
442
+ const [selectedIndex, setSelectedIndex] = useState(0);
443
+ const [result, setResult] = useState({});
444
+ const [apiKeyInput, setApiKeyInput] = useState("");
445
+ const [expertDescInput, setExpertDescInput] = useState(improvementTarget || "");
446
+ const availableLLMs = llms.filter((l) => l.available);
447
+ const availableRuntimes = runtimes.filter((r) => r.available);
448
+ const runtimeOptions = [
449
+ { key: "default", type: "default", label: "Default (built-in)" },
450
+ ...availableRuntimes.map((r) => ({
451
+ key: r.type,
452
+ type: r.type,
453
+ label: `${getRuntimeDisplayName(r.type)}${r.version ? ` (${r.version})` : ""}`,
454
+ version: r.version
455
+ }))
456
+ ];
457
+ const llmOptionsWithOther = [
458
+ ...llms.map((l) => ({
459
+ key: l.provider,
460
+ label: `${l.displayName}${l.available ? " \u2713" : ""}`,
461
+ provider: l.provider,
462
+ available: l.available,
463
+ defaultModel: l.defaultModel
464
+ })),
465
+ {
466
+ key: "other",
467
+ label: "Other (configure new provider)",
468
+ provider: null,
469
+ available: false,
470
+ defaultModel: ""
471
+ }
472
+ ];
473
+ useEffect(() => {
474
+ if (step === "detecting") {
475
+ const timer = setTimeout(() => {
476
+ if (isImprovement) {
477
+ const llm = availableLLMs[0];
478
+ if (llm) {
479
+ setResult({
480
+ runtime: "default",
481
+ provider: llm.provider,
482
+ model: llm.defaultModel
483
+ });
484
+ setStep("input-expert-description");
485
+ } else {
486
+ setStep("select-runtime");
487
+ }
488
+ } else {
489
+ setStep("select-runtime");
490
+ }
491
+ }, 500);
492
+ return () => clearTimeout(timer);
493
+ }
494
+ return void 0;
495
+ }, [step, isImprovement, availableLLMs]);
496
+ useInput((_, key) => {
497
+ if (key.escape) {
498
+ exit();
499
+ return;
500
+ }
501
+ if (step === "input-api-key" || step === "input-expert-description") {
502
+ return;
503
+ }
504
+ if (key.upArrow) {
505
+ setSelectedIndex((prev) => Math.max(0, prev - 1));
506
+ } else if (key.downArrow) {
507
+ setSelectedIndex((prev) => {
508
+ const maxIndex = getMaxIndex();
509
+ return Math.min(maxIndex, prev + 1);
510
+ });
511
+ } else if (key.return) {
512
+ handleSelect();
513
+ }
514
+ });
515
+ function getMaxIndex() {
516
+ switch (step) {
517
+ case "select-runtime":
518
+ return runtimeOptions.length - 1;
519
+ case "select-llm":
520
+ return llmOptionsWithOther.length - 1;
521
+ case "select-provider":
522
+ return llms.length - 1;
523
+ default:
524
+ return 0;
525
+ }
526
+ }
527
+ function handleSelect() {
528
+ switch (step) {
529
+ case "select-runtime": {
530
+ const selected = runtimeOptions[selectedIndex];
531
+ if (!selected) break;
532
+ if (selected.type === "default") {
533
+ setResult({ runtime: "default" });
534
+ if (availableLLMs.length > 0) {
535
+ setStep("select-llm");
536
+ } else {
537
+ setStep("select-provider");
538
+ }
539
+ } else {
540
+ setResult({ runtime: selected.type });
541
+ setStep("input-expert-description");
542
+ }
543
+ setSelectedIndex(0);
544
+ break;
545
+ }
546
+ case "select-llm": {
547
+ const selected = llmOptionsWithOther[selectedIndex];
548
+ if (!selected) break;
549
+ if (selected.key === "other") {
550
+ setStep("select-provider");
551
+ } else if (selected.available && selected.provider) {
552
+ setResult((prev) => ({
553
+ ...prev,
554
+ provider: selected.provider,
555
+ model: selected.defaultModel
556
+ }));
557
+ setStep("input-expert-description");
558
+ } else if (selected.provider) {
559
+ setResult((prev) => ({ ...prev, provider: selected.provider }));
560
+ setStep("input-api-key");
561
+ }
562
+ setSelectedIndex(0);
563
+ break;
564
+ }
565
+ case "select-provider": {
566
+ const selected = llms[selectedIndex];
567
+ if (!selected) break;
568
+ setResult((prev) => ({ ...prev, provider: selected.provider }));
569
+ setStep("input-api-key");
570
+ setSelectedIndex(0);
571
+ break;
572
+ }
573
+ }
574
+ }
575
+ function handleApiKeySubmit() {
576
+ if (apiKeyInput.trim()) {
577
+ const selectedLlm = llms.find((l) => l.provider === result.provider) ?? llms[0];
578
+ if (!selectedLlm) return;
579
+ setResult((prev) => ({
580
+ ...prev,
581
+ provider: selectedLlm.provider,
582
+ model: selectedLlm.defaultModel,
583
+ apiKey: apiKeyInput.trim()
584
+ }));
585
+ setStep("input-expert-description");
586
+ }
587
+ }
588
+ function handleExpertDescSubmit() {
589
+ if (expertDescInput.trim()) {
590
+ const finalResult = {
591
+ runtime: result.runtime || "default",
592
+ provider: result.provider,
593
+ model: result.model,
594
+ apiKey: result.apiKey,
595
+ expertDescription: expertDescInput.trim()
596
+ };
597
+ onComplete(finalResult);
598
+ setStep("done");
599
+ exit();
600
+ }
601
+ }
602
+ return /* @__PURE__ */ jsxs(Box, { flexDirection: "column", padding: 1, children: [
603
+ /* @__PURE__ */ jsx(Box, { marginBottom: 1, children: /* @__PURE__ */ jsx(Text, { bold: true, color: "cyan", children: "\u{1F680} Create Expert Wizard" }) }),
604
+ step === "detecting" && /* @__PURE__ */ jsx(Box, { children: /* @__PURE__ */ jsx(Text, { color: "yellow", children: "Detecting available runtimes..." }) }),
605
+ step === "select-runtime" && /* @__PURE__ */ jsxs(Box, { flexDirection: "column", children: [
606
+ /* @__PURE__ */ jsx(Box, { marginBottom: 1, children: /* @__PURE__ */ jsx(Text, { children: "Select a runtime:" }) }),
607
+ /* @__PURE__ */ jsx(
608
+ SelectableList,
609
+ {
610
+ items: runtimeOptions.map((r) => ({ key: r.key, label: r.label })),
611
+ selectedIndex
612
+ }
613
+ ),
614
+ /* @__PURE__ */ jsx(Box, { marginTop: 1, children: /* @__PURE__ */ jsx(Text, { color: "gray", children: "\u2191\u2193 to move, Enter to select, Esc to exit" }) })
615
+ ] }),
616
+ step === "select-llm" && /* @__PURE__ */ jsxs(Box, { flexDirection: "column", children: [
617
+ /* @__PURE__ */ jsx(Box, { marginBottom: 1, children: /* @__PURE__ */ jsx(Text, { children: "Select an LLM provider:" }) }),
618
+ /* @__PURE__ */ jsx(
619
+ SelectableList,
620
+ {
621
+ items: llmOptionsWithOther.map((l) => ({ key: l.key, label: l.label })),
622
+ selectedIndex
623
+ }
624
+ ),
625
+ /* @__PURE__ */ jsx(Box, { marginTop: 1, children: /* @__PURE__ */ jsx(Text, { color: "gray", children: "\u2191\u2193 to move, Enter to select" }) })
626
+ ] }),
627
+ step === "select-provider" && /* @__PURE__ */ jsxs(Box, { flexDirection: "column", children: [
628
+ /* @__PURE__ */ jsx(Box, { marginBottom: 1, children: /* @__PURE__ */ jsx(Text, { color: "yellow", children: "\u26A0 No LLM API keys found." }) }),
629
+ /* @__PURE__ */ jsx(Box, { marginBottom: 1, children: /* @__PURE__ */ jsx(Text, { children: "Perstack requires an API key from one of these providers:" }) }),
630
+ /* @__PURE__ */ jsx(Box, { flexDirection: "column", marginBottom: 1, children: llms.map((l) => /* @__PURE__ */ jsxs(Text, { color: "gray", children: [
631
+ "\u2022 ",
632
+ l.displayName,
633
+ " (",
634
+ l.envVar,
635
+ ")"
636
+ ] }, l.provider)) }),
637
+ /* @__PURE__ */ jsx(Box, { marginBottom: 1, children: /* @__PURE__ */ jsx(Text, { children: "Select a provider to configure:" }) }),
638
+ /* @__PURE__ */ jsx(
639
+ SelectableList,
640
+ {
641
+ items: llms.map((l) => ({ key: l.provider, label: l.displayName })),
642
+ selectedIndex
643
+ }
644
+ ),
645
+ /* @__PURE__ */ jsx(Box, { marginTop: 1, children: /* @__PURE__ */ jsx(Text, { color: "gray", children: "\u2191\u2193 to move, Enter to select" }) })
646
+ ] }),
647
+ step === "input-api-key" && /* @__PURE__ */ jsxs(Box, { flexDirection: "column", children: [
648
+ /* @__PURE__ */ jsx(Box, { marginBottom: 1, children: /* @__PURE__ */ jsxs(Text, { children: [
649
+ "Enter your ",
650
+ llms.find((l) => l.provider === result.provider)?.displayName || "API",
651
+ " ",
652
+ "key:"
653
+ ] }) }),
654
+ /* @__PURE__ */ jsx(
655
+ TextInputComponent,
656
+ {
657
+ value: apiKeyInput,
658
+ onChange: setApiKeyInput,
659
+ onSubmit: handleApiKeySubmit,
660
+ placeholder: "sk-...",
661
+ isSecret: true
662
+ }
663
+ ),
664
+ /* @__PURE__ */ jsx(Box, { marginTop: 1, children: /* @__PURE__ */ jsx(Text, { color: "gray", children: "Type your API key and press Enter" }) })
665
+ ] }),
666
+ step === "input-expert-description" && /* @__PURE__ */ jsxs(Box, { flexDirection: "column", children: [
667
+ /* @__PURE__ */ jsx(Box, { marginBottom: 1, children: /* @__PURE__ */ jsx(Text, { bold: true, children: isImprovement ? "What improvements do you want?" : "What kind of Expert do you want to create?" }) }),
668
+ /* @__PURE__ */ jsx(Box, { marginBottom: 1, children: /* @__PURE__ */ jsx(Text, { color: "gray", children: "Describe the Expert's purpose, capabilities, or domain knowledge." }) }),
669
+ /* @__PURE__ */ jsx(
670
+ TextInputComponent,
671
+ {
672
+ value: expertDescInput,
673
+ onChange: setExpertDescInput,
674
+ onSubmit: handleExpertDescSubmit,
675
+ placeholder: "e.g., A code reviewer that checks for TypeScript best practices"
676
+ }
677
+ ),
678
+ /* @__PURE__ */ jsx(Box, { marginTop: 1, children: /* @__PURE__ */ jsx(Text, { color: "gray", children: "Type your description and press Enter" }) })
679
+ ] }),
680
+ step === "done" && /* @__PURE__ */ jsx(Box, { children: /* @__PURE__ */ jsx(Text, { color: "green", children: "\u2713 Configuration complete! Starting Expert creation..." }) })
681
+ ] });
682
+ }
683
+
684
+ // src/tui/wizard/render.tsx
685
+ import { jsx as jsx2 } from "react/jsx-runtime";
686
+ async function renderWizard(options) {
687
+ return new Promise((resolve) => {
688
+ let result = null;
689
+ const { waitUntilExit } = render(
690
+ /* @__PURE__ */ jsx2(
691
+ App,
692
+ {
693
+ llms: options.llms,
694
+ runtimes: options.runtimes,
695
+ isImprovement: options.isImprovement,
696
+ improvementTarget: options.improvementTarget,
697
+ onComplete: (wizardResult) => {
698
+ result = wizardResult;
699
+ }
700
+ }
701
+ )
702
+ );
703
+ waitUntilExit().then(() => resolve(result));
704
+ });
705
+ }
706
+
707
+ // bin/cli.ts
708
+ config();
709
+ function getEnvVarName(provider) {
710
+ switch (provider) {
711
+ case "anthropic":
712
+ return "ANTHROPIC_API_KEY";
713
+ case "openai":
714
+ return "OPENAI_API_KEY";
715
+ case "google":
716
+ return "GOOGLE_GENERATIVE_AI_API_KEY";
717
+ }
718
+ }
719
+ var program = new Command().name("create-expert").description("Create Perstack Experts interactively").version("0.0.1").argument("[expertName]", "Expert name to improve (for improvement mode)").argument("[improvements]", "Improvement description (for improvement mode)").option("--cwd <path>", "Working directory", process.cwd()).action(async (expertName, improvements, options) => {
720
+ const cwd = options?.cwd || process.cwd();
721
+ const isImprovement = Boolean(expertName);
722
+ const improvementTarget = improvements || "";
723
+ const perstackTomlPath = join(cwd, "perstack.toml");
724
+ const agentsMdPath = join(cwd, "AGENTS.md");
725
+ const envPath = join(cwd, ".env");
726
+ const llms = detectAllLLMs();
727
+ const runtimes = detectAllRuntimes();
728
+ const wizardResult = await renderWizard({
729
+ llms,
730
+ runtimes,
731
+ isImprovement,
732
+ improvementTarget
733
+ });
734
+ if (!wizardResult) {
735
+ console.log("Wizard cancelled.");
736
+ process.exit(0);
737
+ }
738
+ if (wizardResult.apiKey && wizardResult.provider) {
739
+ const envVarName = getEnvVarName(wizardResult.provider);
740
+ const envContent = `${envVarName}=${wizardResult.apiKey}
741
+ `;
742
+ if (existsSync(envPath)) {
743
+ const existing = readFileSync(envPath, "utf-8");
744
+ const hasEnvVar = new RegExp(`^${envVarName}=`, "m").test(existing);
745
+ if (!hasEnvVar) {
746
+ writeFileSync(envPath, `${existing}
747
+ ${envContent}`);
748
+ console.log(`\u2713 Added ${envVarName} to .env`);
749
+ }
750
+ } else {
751
+ writeFileSync(envPath, envContent);
752
+ console.log(`\u2713 Created .env with ${envVarName}`);
753
+ }
754
+ process.env[envVarName] = wizardResult.apiKey;
755
+ }
756
+ const isDefaultRuntime = wizardResult.runtime === "default";
757
+ if (!isImprovement) {
758
+ if (isDefaultRuntime) {
759
+ const provider = wizardResult.provider || "anthropic";
760
+ const model = wizardResult.model || getDefaultModel(provider);
761
+ const agentsMd = generateAgentsMd({ provider, model });
762
+ writeFileSync(agentsMdPath, agentsMd);
763
+ console.log("\u2713 Created AGENTS.md");
764
+ const createExpertToml = generateCreateExpertToml({ provider, model });
765
+ writeFileSync(perstackTomlPath, createExpertToml);
766
+ console.log("\u2713 Created perstack.toml with create-expert Expert");
767
+ } else {
768
+ const provider = wizardResult.provider || "anthropic";
769
+ const model = wizardResult.model || getDefaultModel(provider);
770
+ const agentsMd = generateAgentsMd({
771
+ provider,
772
+ model,
773
+ runtime: wizardResult.runtime
774
+ });
775
+ writeFileSync(agentsMdPath, agentsMd);
776
+ console.log("\u2713 Created AGENTS.md");
777
+ const createExpertToml = generateCreateExpertToml({
778
+ provider,
779
+ model,
780
+ runtime: wizardResult.runtime
781
+ });
782
+ writeFileSync(perstackTomlPath, createExpertToml);
783
+ console.log("\u2713 Created perstack.toml with create-expert Expert");
784
+ }
785
+ }
786
+ const expertDescription = wizardResult.expertDescription || "";
787
+ const query = isImprovement ? `Improve the Expert "${expertName}": ${expertDescription}` : `Create a new Expert based on these requirements: ${expertDescription}`;
788
+ const runtimeArg = isDefaultRuntime ? [] : ["--runtime", wizardResult.runtime];
789
+ const args = ["perstack", "start", "create-expert", query, ...runtimeArg];
790
+ const proc = spawn("npx", args, {
791
+ cwd,
792
+ env: process.env,
793
+ stdio: "inherit"
794
+ });
795
+ proc.on("exit", (code) => {
796
+ process.exit(code || 0);
797
+ });
798
+ });
799
+ program.parse();
800
+ //# sourceMappingURL=cli.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../bin/cli.ts","../src/lib/agents-md-template.ts","../src/lib/create-expert-toml.ts","../src/lib/detect-llm.ts","../src/lib/detect-runtime.ts","../src/tui/wizard/render.tsx","../src/tui/wizard/app.tsx"],"sourcesContent":["#!/usr/bin/env node\nimport { spawn } from \"node:child_process\"\nimport { existsSync, readFileSync, writeFileSync } from \"node:fs\"\nimport { join } from \"node:path\"\nimport { Command } from \"commander\"\nimport { config } from \"dotenv\"\nimport {\n detectAllLLMs,\n detectAllRuntimes,\n generateAgentsMd,\n generateCreateExpertToml,\n getDefaultModel,\n} from \"../src/index.js\"\nimport type { LLMProvider } from \"../src/tui/index.js\"\nimport { renderWizard } from \"../src/tui/index.js\"\n\nconfig()\n\nfunction getEnvVarName(provider: LLMProvider): string {\n switch (provider) {\n case \"anthropic\":\n return \"ANTHROPIC_API_KEY\"\n case \"openai\":\n return \"OPENAI_API_KEY\"\n case \"google\":\n return \"GOOGLE_GENERATIVE_AI_API_KEY\"\n }\n}\n\nconst program = new Command()\n .name(\"create-expert\")\n .description(\"Create Perstack Experts interactively\")\n .version(\"0.0.1\")\n .argument(\"[expertName]\", \"Expert name to improve (for improvement mode)\")\n .argument(\"[improvements]\", \"Improvement description (for improvement mode)\")\n .option(\"--cwd <path>\", \"Working directory\", process.cwd())\n .action(async (expertName?: string, improvements?: string, options?: { cwd: string }) => {\n const cwd = options?.cwd || process.cwd()\n const isImprovement = Boolean(expertName)\n const improvementTarget = improvements || \"\"\n const perstackTomlPath = join(cwd, \"perstack.toml\")\n const agentsMdPath = join(cwd, \"AGENTS.md\")\n const envPath = join(cwd, \".env\")\n const llms = detectAllLLMs()\n const runtimes = detectAllRuntimes()\n const wizardResult = await renderWizard({\n llms,\n runtimes,\n isImprovement,\n improvementTarget,\n })\n if (!wizardResult) {\n console.log(\"Wizard cancelled.\")\n process.exit(0)\n }\n if (wizardResult.apiKey && wizardResult.provider) {\n const envVarName = getEnvVarName(wizardResult.provider)\n const envContent = `${envVarName}=${wizardResult.apiKey}\\n`\n if (existsSync(envPath)) {\n const existing = readFileSync(envPath, \"utf-8\")\n const hasEnvVar = new RegExp(`^${envVarName}=`, \"m\").test(existing)\n if (!hasEnvVar) {\n writeFileSync(envPath, `${existing}\\n${envContent}`)\n console.log(`✓ Added ${envVarName} to .env`)\n }\n } else {\n writeFileSync(envPath, envContent)\n console.log(`✓ Created .env with ${envVarName}`)\n }\n process.env[envVarName] = wizardResult.apiKey\n }\n const isDefaultRuntime = wizardResult.runtime === \"default\"\n if (!isImprovement) {\n if (isDefaultRuntime) {\n const provider = wizardResult.provider || \"anthropic\"\n const model = wizardResult.model || getDefaultModel(provider)\n const agentsMd = generateAgentsMd({ provider, model })\n writeFileSync(agentsMdPath, agentsMd)\n console.log(\"✓ Created AGENTS.md\")\n const createExpertToml = generateCreateExpertToml({ provider, model })\n writeFileSync(perstackTomlPath, createExpertToml)\n console.log(\"✓ Created perstack.toml with create-expert Expert\")\n } else {\n const provider = wizardResult.provider || \"anthropic\"\n const model = wizardResult.model || getDefaultModel(provider)\n const agentsMd = generateAgentsMd({\n provider,\n model,\n runtime: wizardResult.runtime,\n })\n writeFileSync(agentsMdPath, agentsMd)\n console.log(\"✓ Created AGENTS.md\")\n const createExpertToml = generateCreateExpertToml({\n provider,\n model,\n runtime: wizardResult.runtime,\n })\n writeFileSync(perstackTomlPath, createExpertToml)\n console.log(\"✓ Created perstack.toml with create-expert Expert\")\n }\n }\n const expertDescription = wizardResult.expertDescription || \"\"\n const query = isImprovement\n ? `Improve the Expert \"${expertName}\": ${expertDescription}`\n : `Create a new Expert based on these requirements: ${expertDescription}`\n const runtimeArg = isDefaultRuntime ? [] : [\"--runtime\", wizardResult.runtime]\n const args = [\"perstack\", \"start\", \"create-expert\", query, ...runtimeArg]\n const proc = spawn(\"npx\", args, {\n cwd,\n env: process.env,\n stdio: \"inherit\",\n })\n proc.on(\"exit\", (code) => {\n process.exit(code || 0)\n })\n })\n\nprogram.parse()\n","import type { LLMProvider, RuntimeType } from \"../tui/index.js\"\n\ninterface AgentsMdOptions {\n provider: LLMProvider\n model: string\n runtime?: RuntimeType | \"default\"\n}\n\nexport function generateAgentsMd(options: AgentsMdOptions): string {\n const { provider, model, runtime } = options\n const isNonDefaultRuntime = runtime && runtime !== \"default\"\n const runtimeSection = isNonDefaultRuntime ? `runtime = \"${runtime}\"` : \"\"\n return `# AGENTS.md\n\n## What is Perstack\n\nPerstack is a package manager and runtime for agent-first development. It enables you to define, test, and share modular AI agents called \"Experts\".\n\nKey concepts:\n- **Experts**: Modular micro-agents defined in TOML\n- **Runtime**: Executes Experts with isolation, observability, and sandbox support\n- **Registry**: Public registry for sharing and reusing Experts\n\n## Project Configuration\n\nThis project uses:\n- Provider: ${provider}\n- Model: ${model}\n${isNonDefaultRuntime ? `- Runtime: ${runtime}` : \"- Runtime: docker (default)\"}\n\n## CLI Reference\n\n### Running Experts\n\n**\\`perstack start\\`** - Interactive workbench for developing and testing Experts\n\\`\\`\\`bash\nperstack start [expertKey] [query]\n\\`\\`\\`\n\n**\\`perstack run\\`** - Headless execution for production and automation\n\\`\\`\\`bash\nperstack run <expertKey> <query> [options]\n\\`\\`\\`\n\n### Common Options\n\n| Option | Description | Default |\n|--------|-------------|---------|\n| \\`--provider <provider>\\` | LLM provider | \\`anthropic\\` |\n| \\`--model <model>\\` | Model name | \\`claude-sonnet-4-5\\` |\n| \\`--max-steps <n>\\` | Maximum steps | unlimited |\n| \\`--runtime <runtime>\\` | Execution runtime | \\`docker\\` |\n\n### Available Runtimes\n\n- \\`docker\\` — Containerized runtime with network isolation (default)\n- \\`local\\` — Built-in runtime without isolation\n- \\`cursor\\` — Cursor CLI (experimental)\n- \\`claude-code\\` — Claude Code CLI (experimental)\n- \\`gemini\\` — Gemini CLI (experimental)\n\n## perstack.toml Format\n\n\\`\\`\\`toml\nmodel = \"${model}\"\n${runtimeSection}\n\n[provider]\nproviderName = \"${provider}\"\n\n[experts.\"my-expert\"]\nversion = \"1.0.0\"\ndescription = \"Brief description\"\ninstruction = \"\"\"\nDetailed instructions for the expert.\n\"\"\"\n\n[experts.\"my-expert\".skills.\"@perstack/base\"]\ntype = \"mcpStdioSkill\"\ncommand = \"npx\"\npackageName = \"@perstack/base\"\n\\`\\`\\`\n\n## Best Practices for Creating Experts\n\n### 1. Do One Thing Well\n\nBad:\n\\`\\`\\`toml\n[experts.\"assistant\"]\ndescription = \"Handles inquiries, reports, meetings, and expenses\"\n\\`\\`\\`\n\nGood:\n\\`\\`\\`toml\n[experts.\"customer-support\"]\ndescription = \"Answers customer questions about products and orders\"\n\\`\\`\\`\n\n### 2. Trust the LLM, Define Domain Knowledge\n\nBad (procedural):\n\\`\\`\\`toml\ninstruction = \"\"\"\n1. First, greet the customer\n2. Ask for their order number\n3. Look up the order\n\"\"\"\n\\`\\`\\`\n\nGood (declarative):\n\\`\\`\\`toml\ninstruction = \"\"\"\nYou are a customer support specialist.\n\nKey policies:\n- Orders ship within 2 business days\n- Free returns within 30 days\n- VIP customers get priority handling\n\nTone: Friendly but professional.\n\"\"\"\n\\`\\`\\`\n\n### 3. Let Them Collaborate\n\nUse delegation for complex workflows:\n\\`\\`\\`toml\n[experts.\"coordinator\"]\ndelegates = [\"researcher\", \"writer\", \"reviewer\"]\n\n[experts.\"researcher\"]\ndescription = \"Gathers information from various sources\"\n\\`\\`\\`\n\n### 4. Keep It Verifiable\n\nWrite clear, predictable instructions:\n\\`\\`\\`toml\ninstruction = \"\"\"\nApproval rules:\n- Under $100: Auto-approve with receipt\n- $100-$500: Approve if business purpose is clear\n- Over $500: Flag for manager review\n\"\"\"\n\\`\\`\\`\n\n### 5. Ship Early\n\nStart minimal, expand based on real usage. Don't over-engineer.\n\n## Finding Skills (MCP Servers)\n\nSkills extend Experts with external capabilities via MCP (Model Context Protocol).\n\n### MCP Registry\n\nSearch for MCP servers at: https://registry.modelcontextprotocol.io\n\n**API Reference:**\n\\`\\`\\`bash\n# List all servers\ncurl \"https://registry.modelcontextprotocol.io/v0.1/servers\"\n\n# Search by name\ncurl \"https://registry.modelcontextprotocol.io/v0.1/servers?search=filesystem\"\n\n# Get specific server\ncurl \"https://registry.modelcontextprotocol.io/v0.1/servers/{serverName}/versions/{version}\"\n\\`\\`\\`\n\n### Using MCP Skills\n\n**npm packages (most common):**\n\\`\\`\\`toml\n[experts.\"my-expert\".skills.\"web-search\"]\ntype = \"mcpStdioSkill\"\ncommand = \"npx\"\npackageName = \"exa-mcp-server\"\nrequiredEnv = [\"EXA_API_KEY\"]\npick = [\"web_search_exa\"] # Optional: whitelist tools\n\\`\\`\\`\n\n**Remote servers (SSE):**\n\\`\\`\\`toml\n[experts.\"my-expert\".skills.\"remote-api\"]\ntype = \"mcpSseSkill\"\nendpoint = \"https://api.example.com/mcp\"\n\\`\\`\\`\n\n### Built-in Base Skill\n\n\\`@perstack/base\\` provides essential tools:\n- File operations: \\`readTextFile\\`, \\`writeTextFile\\`, \\`editTextFile\\`, etc.\n- Shell execution: \\`exec\\`\n- Control flow: \\`attemptCompletion\\`, \\`think\\`, \\`todo\\`\n\n\\`\\`\\`toml\n[experts.\"my-expert\".skills.\"@perstack/base\"]\ntype = \"mcpStdioSkill\"\ncommand = \"npx\"\npackageName = \"@perstack/base\"\n\\`\\`\\`\n\n## Testing Experts\n\n1. Start with \\`perstack start\\` for interactive development\n2. Test happy path scenarios first\n3. Test edge cases and error scenarios\n4. Verify outputs match expectations\n5. Use \\`--max-steps\\` to limit runaway executions\n\n## Project Files\n\n- \\`perstack.toml\\` - Expert definitions and runtime config\n- \\`AGENTS.md\\` - This file, for AI agent context\n- \\`.env\\` - Environment variables (API keys)\n- \\`perstack/\\` - Execution history (auto-managed)\n`\n}\n","import type { LLMProvider, RuntimeType } from \"../tui/index.js\"\n\ninterface CreateExpertTomlOptions {\n provider: LLMProvider\n model: string\n runtime?: \"default\" | RuntimeType\n}\n\nexport function generateCreateExpertToml(options: CreateExpertTomlOptions): string {\n const runtimeLine =\n options.runtime && options.runtime !== \"default\" ? `runtime = \"${options.runtime}\"\\n` : \"\"\n return `model = \"${options.model}\"\n${runtimeLine}\n[provider]\nproviderName = \"${options.provider}\"\n\n[experts.\"create-expert\"]\nversion = \"1.0.0\"\ndescription = \"Creates and tests new Perstack Experts based on user requirements\"\ninstruction = \"\"\"\nYou are an Expert creator for Perstack. Your job is to create well-designed Experts based on user requirements.\n\n## Your Workflow\n\n1. **Understand Requirements**: Analyze what the user wants the Expert to do\n2. **Design the Expert**: Plan the structure, skills needed, and delegation patterns\n3. **Implement**: Write the Expert definition in perstack.toml\n4. **Test**: Run the Expert with realistic test cases\n5. **Iterate**: Fix issues and improve based on test results\n\n## Design Principles\n\n1. **Do One Thing Well**: Each Expert should focus on a single responsibility\n2. **Define Domain Knowledge**: Write declarative instructions with policies, not procedures\n3. **Use Delegation**: Break complex tasks into collaborating Experts\n4. **Keep It Verifiable**: Write clear, predictable behavior\n5. **Start Simple**: Begin minimal, expand based on real needs\n\n## Expert Structure\n\n[experts.\"expert-name\"]\nversion = \"1.0.0\"\ndescription = \"Brief description visible to delegators\"\ninstruction = \"Domain knowledge, policies, and constraints.\"\ndelegates = [\"other-expert\"]\n\n[experts.\"expert-name\".skills.\"skill-name\"]\ntype = \"mcpStdioSkill\"\ncommand = \"npx\"\npackageName = \"package-name\"\n\n## Finding Skills\n\nSearch MCP Registry for available skills:\n- Browse: https://registry.modelcontextprotocol.io\n- API: GET /v0.1/servers?search=<query>\n\nCommon skills:\n- \\`@perstack/base\\` - File operations, shell, control flow\n- \\`exa-mcp-server\\` - Web search (requires EXA_API_KEY)\n- \\`@anthropic/mcp-server-memory\\` - Persistent memory\n\n## Testing Protocol\n\nFor each Expert you create:\n\n1. **Happy Path Tests**\n - Test the primary use case with normal inputs\n - Verify the output matches expectations\n\n2. **Edge Case Tests**\n - Empty or minimal inputs\n - Unusual but valid inputs\n - Boundary conditions\n\n3. **Error Handling Tests**\n - Invalid inputs\n - Missing required data\n - Skill failures\n\nUse \\`npx -y perstack run\\` to execute tests:\n\\`\\`\\`bash\nnpx -y perstack run expert-name \"test query\"\n\\`\\`\\`\n\n## Iteration Process\n\nAfter each test:\n1. Review the output\n2. Identify issues or improvements\n3. Update the Expert definition\n4. Re-run tests to verify fixes\n\nContinue until all tests pass and the Expert behaves as expected.\n\"\"\"\n\n[experts.\"create-expert\".skills.\"@perstack/base\"]\ntype = \"mcpStdioSkill\"\ncommand = \"npx\"\npackageName = \"@perstack/base\"\n`\n}\n","import type { LLMInfo, LLMProvider } from \"../tui/index.js\"\n\nconst LLM_CONFIGS: Record<\n LLMProvider,\n { envVar: string; displayName: string; defaultModel: string }\n> = {\n anthropic: {\n envVar: \"ANTHROPIC_API_KEY\",\n displayName: \"Anthropic (Claude)\",\n defaultModel: \"claude-sonnet-4-5\",\n },\n openai: { envVar: \"OPENAI_API_KEY\", displayName: \"OpenAI\", defaultModel: \"gpt-4o\" },\n google: {\n envVar: \"GOOGLE_GENERATIVE_AI_API_KEY\",\n displayName: \"Google (Gemini)\",\n defaultModel: \"gemini-2.5-pro\",\n },\n}\n\nexport function detectLLM(provider: LLMProvider): LLMInfo {\n const config = LLM_CONFIGS[provider]\n return {\n provider,\n envVar: config.envVar,\n available: Boolean(process.env[config.envVar]),\n displayName: config.displayName,\n defaultModel: config.defaultModel,\n }\n}\n\nexport function detectAllLLMs(): LLMInfo[] {\n return (Object.keys(LLM_CONFIGS) as LLMProvider[]).map(detectLLM)\n}\n\nexport function getAvailableLLMs(): LLMInfo[] {\n return detectAllLLMs().filter((l) => l.available)\n}\n\nexport function getDefaultModel(provider: LLMProvider): string {\n return LLM_CONFIGS[provider].defaultModel\n}\n","import { execSync } from \"node:child_process\"\nimport type { RuntimeInfo } from \"../tui/index.js\"\n\nfunction checkCommand(command: string): { available: boolean; version?: string } {\n try {\n const result = execSync(`${command} --version`, {\n encoding: \"utf-8\",\n stdio: [\"pipe\", \"pipe\", \"pipe\"],\n timeout: 5000,\n })\n const firstLine = result.trim().split(\"\\n\")[0]\n return { available: true, version: firstLine }\n } catch {\n return { available: false }\n }\n}\n\nexport function detectCursor(): RuntimeInfo {\n const result = checkCommand(\"cursor\")\n return { type: \"cursor\", ...result }\n}\n\nexport function detectClaudeCode(): RuntimeInfo {\n const result = checkCommand(\"claude\")\n return { type: \"claude-code\", ...result }\n}\n\nexport function detectGemini(): RuntimeInfo {\n const result = checkCommand(\"gemini\")\n return { type: \"gemini\", ...result }\n}\n\nexport function detectAllRuntimes(): RuntimeInfo[] {\n return [detectCursor(), detectClaudeCode(), detectGemini()]\n}\n\nexport function getAvailableRuntimes(): RuntimeInfo[] {\n return detectAllRuntimes().filter((r) => r.available)\n}\n","import { render } from \"ink\"\nimport { App } from \"./app.js\"\nimport type { WizardOptions, WizardResult } from \"./types.js\"\n\nexport async function renderWizard(options: WizardOptions): Promise<WizardResult | null> {\n return new Promise((resolve) => {\n let result: WizardResult | null = null\n const { waitUntilExit } = render(\n <App\n llms={options.llms}\n runtimes={options.runtimes}\n isImprovement={options.isImprovement}\n improvementTarget={options.improvementTarget}\n onComplete={(wizardResult) => {\n result = wizardResult\n }}\n />,\n )\n waitUntilExit().then(() => resolve(result))\n })\n}\n","import { Box, Text, useApp, useInput } from \"ink\"\nimport { type ReactNode, useEffect, useState } from \"react\"\nimport type { LLMInfo, LLMProvider, RuntimeInfo, RuntimeType, WizardResult } from \"./types.js\"\n\ntype WizardStep =\n | \"detecting\"\n | \"select-runtime\"\n | \"select-llm\"\n | \"select-provider\"\n | \"input-api-key\"\n | \"input-expert-description\"\n | \"done\"\n\ninterface WizardProps {\n llms: LLMInfo[]\n runtimes: RuntimeInfo[]\n onComplete: (result: WizardResult) => void\n isImprovement?: boolean\n improvementTarget?: string\n}\n\ninterface RuntimeOption {\n key: string\n type: \"default\" | RuntimeType\n label: string\n version?: string\n}\n\nfunction SelectableList({\n items,\n selectedIndex,\n renderItem,\n}: {\n items: { key: string; label: string; disabled?: boolean }[]\n selectedIndex: number\n renderItem?: (\n item: { key: string; label: string; disabled?: boolean },\n selected: boolean,\n ) => ReactNode\n}) {\n return (\n <Box flexDirection=\"column\">\n {items.map((item, index) => {\n const isSelected = index === selectedIndex\n if (renderItem) {\n return <Box key={item.key}>{renderItem(item, isSelected)}</Box>\n }\n return (\n <Box key={item.key}>\n <Text color={item.disabled ? \"gray\" : isSelected ? \"cyan\" : \"white\"}>\n {isSelected ? \"❯ \" : \" \"}\n {item.label}\n {item.disabled ? \" (not available)\" : \"\"}\n </Text>\n </Box>\n )\n })}\n </Box>\n )\n}\n\nfunction TextInputComponent({\n value,\n onChange,\n onSubmit,\n placeholder,\n isSecret,\n}: {\n value: string\n onChange: (value: string) => void\n onSubmit: () => void\n placeholder?: string\n isSecret?: boolean\n}) {\n useInput((input, key) => {\n if (key.return) {\n onSubmit()\n } else if (key.backspace || key.delete) {\n onChange(value.slice(0, -1))\n } else if (!key.ctrl && !key.meta && input) {\n onChange(value + input)\n }\n })\n const displayValue = isSecret ? \"•\".repeat(value.length) : value\n return (\n <Box>\n <Text color=\"cyan\">{displayValue || <Text color=\"gray\">{placeholder}</Text>}</Text>\n <Text color=\"cyan\">█</Text>\n </Box>\n )\n}\n\nfunction getRuntimeDisplayName(type: RuntimeType): string {\n switch (type) {\n case \"cursor\":\n return \"Cursor\"\n case \"claude-code\":\n return \"Claude Code\"\n case \"gemini\":\n return \"Gemini CLI\"\n }\n}\n\nexport function App({ llms, runtimes, onComplete, isImprovement, improvementTarget }: WizardProps) {\n const { exit } = useApp()\n const [step, setStep] = useState<WizardStep>(\"detecting\")\n const [selectedIndex, setSelectedIndex] = useState(0)\n const [result, setResult] = useState<Partial<WizardResult>>({})\n const [apiKeyInput, setApiKeyInput] = useState(\"\")\n const [expertDescInput, setExpertDescInput] = useState(improvementTarget || \"\")\n const availableLLMs = llms.filter((l) => l.available)\n const availableRuntimes = runtimes.filter((r) => r.available)\n const runtimeOptions: RuntimeOption[] = [\n { key: \"default\", type: \"default\", label: \"Default (built-in)\" },\n ...availableRuntimes.map((r) => ({\n key: r.type,\n type: r.type,\n label: `${getRuntimeDisplayName(r.type)}${r.version ? ` (${r.version})` : \"\"}`,\n version: r.version,\n })),\n ]\n const llmOptionsWithOther = [\n ...llms.map((l) => ({\n key: l.provider,\n label: `${l.displayName}${l.available ? \" ✓\" : \"\"}`,\n provider: l.provider,\n available: l.available,\n defaultModel: l.defaultModel,\n })),\n {\n key: \"other\",\n label: \"Other (configure new provider)\",\n provider: null,\n available: false,\n defaultModel: \"\",\n },\n ]\n useEffect(() => {\n if (step === \"detecting\") {\n const timer = setTimeout(() => {\n if (isImprovement) {\n const llm = availableLLMs[0]\n if (llm) {\n setResult({\n runtime: \"default\",\n provider: llm.provider,\n model: llm.defaultModel,\n })\n setStep(\"input-expert-description\")\n } else {\n setStep(\"select-runtime\")\n }\n } else {\n setStep(\"select-runtime\")\n }\n }, 500)\n return () => clearTimeout(timer)\n }\n return undefined\n }, [step, isImprovement, availableLLMs])\n useInput((_, key) => {\n if (key.escape) {\n exit()\n return\n }\n if (step === \"input-api-key\" || step === \"input-expert-description\") {\n return\n }\n if (key.upArrow) {\n setSelectedIndex((prev) => Math.max(0, prev - 1))\n } else if (key.downArrow) {\n setSelectedIndex((prev) => {\n const maxIndex = getMaxIndex()\n return Math.min(maxIndex, prev + 1)\n })\n } else if (key.return) {\n handleSelect()\n }\n })\n function getMaxIndex(): number {\n switch (step) {\n case \"select-runtime\":\n return runtimeOptions.length - 1\n case \"select-llm\":\n return llmOptionsWithOther.length - 1\n case \"select-provider\":\n return llms.length - 1\n default:\n return 0\n }\n }\n function handleSelect() {\n switch (step) {\n case \"select-runtime\": {\n const selected = runtimeOptions[selectedIndex]\n if (!selected) break\n if (selected.type === \"default\") {\n setResult({ runtime: \"default\" })\n if (availableLLMs.length > 0) {\n setStep(\"select-llm\")\n } else {\n setStep(\"select-provider\")\n }\n } else {\n setResult({ runtime: selected.type })\n setStep(\"input-expert-description\")\n }\n setSelectedIndex(0)\n break\n }\n case \"select-llm\": {\n const selected = llmOptionsWithOther[selectedIndex]\n if (!selected) break\n if (selected.key === \"other\") {\n setStep(\"select-provider\")\n } else if (selected.available && selected.provider) {\n setResult((prev) => ({\n ...prev,\n provider: selected.provider as LLMProvider,\n model: selected.defaultModel,\n }))\n setStep(\"input-expert-description\")\n } else if (selected.provider) {\n setResult((prev) => ({ ...prev, provider: selected.provider as LLMProvider }))\n setStep(\"input-api-key\")\n }\n setSelectedIndex(0)\n break\n }\n case \"select-provider\": {\n const selected = llms[selectedIndex]\n if (!selected) break\n setResult((prev) => ({ ...prev, provider: selected.provider }))\n setStep(\"input-api-key\")\n setSelectedIndex(0)\n break\n }\n }\n }\n function handleApiKeySubmit() {\n if (apiKeyInput.trim()) {\n const selectedLlm = llms.find((l) => l.provider === result.provider) ?? llms[0]\n if (!selectedLlm) return\n setResult((prev) => ({\n ...prev,\n provider: selectedLlm.provider,\n model: selectedLlm.defaultModel,\n apiKey: apiKeyInput.trim(),\n }))\n setStep(\"input-expert-description\")\n }\n }\n function handleExpertDescSubmit() {\n if (expertDescInput.trim()) {\n const finalResult: WizardResult = {\n runtime: result.runtime || \"default\",\n provider: result.provider,\n model: result.model,\n apiKey: result.apiKey,\n expertDescription: expertDescInput.trim(),\n }\n onComplete(finalResult)\n setStep(\"done\")\n exit()\n }\n }\n return (\n <Box flexDirection=\"column\" padding={1}>\n <Box marginBottom={1}>\n <Text bold color=\"cyan\">\n 🚀 Create Expert Wizard\n </Text>\n </Box>\n {step === \"detecting\" && (\n <Box>\n <Text color=\"yellow\">Detecting available runtimes...</Text>\n </Box>\n )}\n {step === \"select-runtime\" && (\n <Box flexDirection=\"column\">\n <Box marginBottom={1}>\n <Text>Select a runtime:</Text>\n </Box>\n <SelectableList\n items={runtimeOptions.map((r) => ({ key: r.key, label: r.label }))}\n selectedIndex={selectedIndex}\n />\n <Box marginTop={1}>\n <Text color=\"gray\">↑↓ to move, Enter to select, Esc to exit</Text>\n </Box>\n </Box>\n )}\n {step === \"select-llm\" && (\n <Box flexDirection=\"column\">\n <Box marginBottom={1}>\n <Text>Select an LLM provider:</Text>\n </Box>\n <SelectableList\n items={llmOptionsWithOther.map((l) => ({ key: l.key, label: l.label }))}\n selectedIndex={selectedIndex}\n />\n <Box marginTop={1}>\n <Text color=\"gray\">↑↓ to move, Enter to select</Text>\n </Box>\n </Box>\n )}\n {step === \"select-provider\" && (\n <Box flexDirection=\"column\">\n <Box marginBottom={1}>\n <Text color=\"yellow\">⚠ No LLM API keys found.</Text>\n </Box>\n <Box marginBottom={1}>\n <Text>Perstack requires an API key from one of these providers:</Text>\n </Box>\n <Box flexDirection=\"column\" marginBottom={1}>\n {llms.map((l) => (\n <Text key={l.provider} color=\"gray\">\n • {l.displayName} ({l.envVar})\n </Text>\n ))}\n </Box>\n <Box marginBottom={1}>\n <Text>Select a provider to configure:</Text>\n </Box>\n <SelectableList\n items={llms.map((l) => ({ key: l.provider, label: l.displayName }))}\n selectedIndex={selectedIndex}\n />\n <Box marginTop={1}>\n <Text color=\"gray\">↑↓ to move, Enter to select</Text>\n </Box>\n </Box>\n )}\n {step === \"input-api-key\" && (\n <Box flexDirection=\"column\">\n <Box marginBottom={1}>\n <Text>\n Enter your {llms.find((l) => l.provider === result.provider)?.displayName || \"API\"}{\" \"}\n key:\n </Text>\n </Box>\n <TextInputComponent\n value={apiKeyInput}\n onChange={setApiKeyInput}\n onSubmit={handleApiKeySubmit}\n placeholder=\"sk-...\"\n isSecret={true}\n />\n <Box marginTop={1}>\n <Text color=\"gray\">Type your API key and press Enter</Text>\n </Box>\n </Box>\n )}\n {step === \"input-expert-description\" && (\n <Box flexDirection=\"column\">\n <Box marginBottom={1}>\n <Text bold>\n {isImprovement\n ? \"What improvements do you want?\"\n : \"What kind of Expert do you want to create?\"}\n </Text>\n </Box>\n <Box marginBottom={1}>\n <Text color=\"gray\">\n Describe the Expert's purpose, capabilities, or domain knowledge.\n </Text>\n </Box>\n <TextInputComponent\n value={expertDescInput}\n onChange={setExpertDescInput}\n onSubmit={handleExpertDescSubmit}\n placeholder=\"e.g., A code reviewer that checks for TypeScript best practices\"\n />\n <Box marginTop={1}>\n <Text color=\"gray\">Type your description and press Enter</Text>\n </Box>\n </Box>\n )}\n {step === \"done\" && (\n <Box>\n <Text color=\"green\">✓ Configuration complete! Starting Expert creation...</Text>\n </Box>\n )}\n </Box>\n )\n}\n"],"mappings":";;;;AACA,SAAS,aAAa;AACtB,SAAS,YAAY,cAAc,qBAAqB;AACxD,SAAS,YAAY;AACrB,SAAS,eAAe;AACxB,SAAS,cAAc;;;ACGhB,SAAS,iBAAiB,SAAkC;AACjE,QAAM,EAAE,UAAU,OAAO,QAAQ,IAAI;AACrC,QAAM,sBAAsB,WAAW,YAAY;AACnD,QAAM,iBAAiB,sBAAsB,cAAc,OAAO,MAAM;AACxE,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAcK,QAAQ;AAAA,WACX,KAAK;AAAA,EACd,sBAAsB,cAAc,OAAO,KAAK,6BAA6B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,WAoCpE,KAAK;AAAA,EACd,cAAc;AAAA;AAAA;AAAA,kBAGE,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAuJ1B;;;ACnNO,SAAS,yBAAyB,SAA0C;AACjF,QAAM,cACJ,QAAQ,WAAW,QAAQ,YAAY,YAAY,cAAc,QAAQ,OAAO;AAAA,IAAQ;AAC1F,SAAO,YAAY,QAAQ,KAAK;AAAA,EAChC,WAAW;AAAA;AAAA,kBAEK,QAAQ,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAuFlC;;;ACnGA,IAAM,cAGF;AAAA,EACF,WAAW;AAAA,IACT,QAAQ;AAAA,IACR,aAAa;AAAA,IACb,cAAc;AAAA,EAChB;AAAA,EACA,QAAQ,EAAE,QAAQ,kBAAkB,aAAa,UAAU,cAAc,SAAS;AAAA,EAClF,QAAQ;AAAA,IACN,QAAQ;AAAA,IACR,aAAa;AAAA,IACb,cAAc;AAAA,EAChB;AACF;AAEO,SAAS,UAAU,UAAgC;AACxD,QAAMA,UAAS,YAAY,QAAQ;AACnC,SAAO;AAAA,IACL;AAAA,IACA,QAAQA,QAAO;AAAA,IACf,WAAW,QAAQ,QAAQ,IAAIA,QAAO,MAAM,CAAC;AAAA,IAC7C,aAAaA,QAAO;AAAA,IACpB,cAAcA,QAAO;AAAA,EACvB;AACF;AAEO,SAAS,gBAA2B;AACzC,SAAQ,OAAO,KAAK,WAAW,EAAoB,IAAI,SAAS;AAClE;AAMO,SAAS,gBAAgB,UAA+B;AAC7D,SAAO,YAAY,QAAQ,EAAE;AAC/B;;;ACxCA,SAAS,gBAAgB;AAGzB,SAAS,aAAa,SAA2D;AAC/E,MAAI;AACF,UAAM,SAAS,SAAS,GAAG,OAAO,cAAc;AAAA,MAC9C,UAAU;AAAA,MACV,OAAO,CAAC,QAAQ,QAAQ,MAAM;AAAA,MAC9B,SAAS;AAAA,IACX,CAAC;AACD,UAAM,YAAY,OAAO,KAAK,EAAE,MAAM,IAAI,EAAE,CAAC;AAC7C,WAAO,EAAE,WAAW,MAAM,SAAS,UAAU;AAAA,EAC/C,QAAQ;AACN,WAAO,EAAE,WAAW,MAAM;AAAA,EAC5B;AACF;AAEO,SAAS,eAA4B;AAC1C,QAAM,SAAS,aAAa,QAAQ;AACpC,SAAO,EAAE,MAAM,UAAU,GAAG,OAAO;AACrC;AAEO,SAAS,mBAAgC;AAC9C,QAAM,SAAS,aAAa,QAAQ;AACpC,SAAO,EAAE,MAAM,eAAe,GAAG,OAAO;AAC1C;AAEO,SAAS,eAA4B;AAC1C,QAAM,SAAS,aAAa,QAAQ;AACpC,SAAO,EAAE,MAAM,UAAU,GAAG,OAAO;AACrC;AAEO,SAAS,oBAAmC;AACjD,SAAO,CAAC,aAAa,GAAG,iBAAiB,GAAG,aAAa,CAAC;AAC5D;;;AClCA,SAAS,cAAc;;;ACAvB,SAAS,KAAK,MAAM,QAAQ,gBAAgB;AAC5C,SAAyB,WAAW,gBAAgB;AA4CnC,cAIL,YAJK;AAjBjB,SAAS,eAAe;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AACF,GAOG;AACD,SACE,oBAAC,OAAI,eAAc,UAChB,gBAAM,IAAI,CAAC,MAAM,UAAU;AAC1B,UAAM,aAAa,UAAU;AAC7B,QAAI,YAAY;AACd,aAAO,oBAAC,OAAoB,qBAAW,MAAM,UAAU,KAAtC,KAAK,GAAmC;AAAA,IAC3D;AACA,WACE,oBAAC,OACC,+BAAC,QAAK,OAAO,KAAK,WAAW,SAAS,aAAa,SAAS,SACzD;AAAA,mBAAa,YAAO;AAAA,MACpB,KAAK;AAAA,MACL,KAAK,WAAW,qBAAqB;AAAA,OACxC,KALQ,KAAK,GAMf;AAAA,EAEJ,CAAC,GACH;AAEJ;AAEA,SAAS,mBAAmB;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAMG;AACD,WAAS,CAAC,OAAO,QAAQ;AACvB,QAAI,IAAI,QAAQ;AACd,eAAS;AAAA,IACX,WAAW,IAAI,aAAa,IAAI,QAAQ;AACtC,eAAS,MAAM,MAAM,GAAG,EAAE,CAAC;AAAA,IAC7B,WAAW,CAAC,IAAI,QAAQ,CAAC,IAAI,QAAQ,OAAO;AAC1C,eAAS,QAAQ,KAAK;AAAA,IACxB;AAAA,EACF,CAAC;AACD,QAAM,eAAe,WAAW,SAAI,OAAO,MAAM,MAAM,IAAI;AAC3D,SACE,qBAAC,OACC;AAAA,wBAAC,QAAK,OAAM,QAAQ,0BAAgB,oBAAC,QAAK,OAAM,QAAQ,uBAAY,GAAQ;AAAA,IAC5E,oBAAC,QAAK,OAAM,QAAO,oBAAC;AAAA,KACtB;AAEJ;AAEA,SAAS,sBAAsB,MAA2B;AACxD,UAAQ,MAAM;AAAA,IACZ,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,EACX;AACF;AAEO,SAAS,IAAI,EAAE,MAAM,UAAU,YAAY,eAAe,kBAAkB,GAAgB;AACjG,QAAM,EAAE,KAAK,IAAI,OAAO;AACxB,QAAM,CAAC,MAAM,OAAO,IAAI,SAAqB,WAAW;AACxD,QAAM,CAAC,eAAe,gBAAgB,IAAI,SAAS,CAAC;AACpD,QAAM,CAAC,QAAQ,SAAS,IAAI,SAAgC,CAAC,CAAC;AAC9D,QAAM,CAAC,aAAa,cAAc,IAAI,SAAS,EAAE;AACjD,QAAM,CAAC,iBAAiB,kBAAkB,IAAI,SAAS,qBAAqB,EAAE;AAC9E,QAAM,gBAAgB,KAAK,OAAO,CAAC,MAAM,EAAE,SAAS;AACpD,QAAM,oBAAoB,SAAS,OAAO,CAAC,MAAM,EAAE,SAAS;AAC5D,QAAM,iBAAkC;AAAA,IACtC,EAAE,KAAK,WAAW,MAAM,WAAW,OAAO,qBAAqB;AAAA,IAC/D,GAAG,kBAAkB,IAAI,CAAC,OAAO;AAAA,MAC/B,KAAK,EAAE;AAAA,MACP,MAAM,EAAE;AAAA,MACR,OAAO,GAAG,sBAAsB,EAAE,IAAI,CAAC,GAAG,EAAE,UAAU,KAAK,EAAE,OAAO,MAAM,EAAE;AAAA,MAC5E,SAAS,EAAE;AAAA,IACb,EAAE;AAAA,EACJ;AACA,QAAM,sBAAsB;AAAA,IAC1B,GAAG,KAAK,IAAI,CAAC,OAAO;AAAA,MAClB,KAAK,EAAE;AAAA,MACP,OAAO,GAAG,EAAE,WAAW,GAAG,EAAE,YAAY,YAAO,EAAE;AAAA,MACjD,UAAU,EAAE;AAAA,MACZ,WAAW,EAAE;AAAA,MACb,cAAc,EAAE;AAAA,IAClB,EAAE;AAAA,IACF;AAAA,MACE,KAAK;AAAA,MACL,OAAO;AAAA,MACP,UAAU;AAAA,MACV,WAAW;AAAA,MACX,cAAc;AAAA,IAChB;AAAA,EACF;AACA,YAAU,MAAM;AACd,QAAI,SAAS,aAAa;AACxB,YAAM,QAAQ,WAAW,MAAM;AAC7B,YAAI,eAAe;AACjB,gBAAM,MAAM,cAAc,CAAC;AAC3B,cAAI,KAAK;AACP,sBAAU;AAAA,cACR,SAAS;AAAA,cACT,UAAU,IAAI;AAAA,cACd,OAAO,IAAI;AAAA,YACb,CAAC;AACD,oBAAQ,0BAA0B;AAAA,UACpC,OAAO;AACL,oBAAQ,gBAAgB;AAAA,UAC1B;AAAA,QACF,OAAO;AACL,kBAAQ,gBAAgB;AAAA,QAC1B;AAAA,MACF,GAAG,GAAG;AACN,aAAO,MAAM,aAAa,KAAK;AAAA,IACjC;AACA,WAAO;AAAA,EACT,GAAG,CAAC,MAAM,eAAe,aAAa,CAAC;AACvC,WAAS,CAAC,GAAG,QAAQ;AACnB,QAAI,IAAI,QAAQ;AACd,WAAK;AACL;AAAA,IACF;AACA,QAAI,SAAS,mBAAmB,SAAS,4BAA4B;AACnE;AAAA,IACF;AACA,QAAI,IAAI,SAAS;AACf,uBAAiB,CAAC,SAAS,KAAK,IAAI,GAAG,OAAO,CAAC,CAAC;AAAA,IAClD,WAAW,IAAI,WAAW;AACxB,uBAAiB,CAAC,SAAS;AACzB,cAAM,WAAW,YAAY;AAC7B,eAAO,KAAK,IAAI,UAAU,OAAO,CAAC;AAAA,MACpC,CAAC;AAAA,IACH,WAAW,IAAI,QAAQ;AACrB,mBAAa;AAAA,IACf;AAAA,EACF,CAAC;AACD,WAAS,cAAsB;AAC7B,YAAQ,MAAM;AAAA,MACZ,KAAK;AACH,eAAO,eAAe,SAAS;AAAA,MACjC,KAAK;AACH,eAAO,oBAAoB,SAAS;AAAA,MACtC,KAAK;AACH,eAAO,KAAK,SAAS;AAAA,MACvB;AACE,eAAO;AAAA,IACX;AAAA,EACF;AACA,WAAS,eAAe;AACtB,YAAQ,MAAM;AAAA,MACZ,KAAK,kBAAkB;AACrB,cAAM,WAAW,eAAe,aAAa;AAC7C,YAAI,CAAC,SAAU;AACf,YAAI,SAAS,SAAS,WAAW;AAC/B,oBAAU,EAAE,SAAS,UAAU,CAAC;AAChC,cAAI,cAAc,SAAS,GAAG;AAC5B,oBAAQ,YAAY;AAAA,UACtB,OAAO;AACL,oBAAQ,iBAAiB;AAAA,UAC3B;AAAA,QACF,OAAO;AACL,oBAAU,EAAE,SAAS,SAAS,KAAK,CAAC;AACpC,kBAAQ,0BAA0B;AAAA,QACpC;AACA,yBAAiB,CAAC;AAClB;AAAA,MACF;AAAA,MACA,KAAK,cAAc;AACjB,cAAM,WAAW,oBAAoB,aAAa;AAClD,YAAI,CAAC,SAAU;AACf,YAAI,SAAS,QAAQ,SAAS;AAC5B,kBAAQ,iBAAiB;AAAA,QAC3B,WAAW,SAAS,aAAa,SAAS,UAAU;AAClD,oBAAU,CAAC,UAAU;AAAA,YACnB,GAAG;AAAA,YACH,UAAU,SAAS;AAAA,YACnB,OAAO,SAAS;AAAA,UAClB,EAAE;AACF,kBAAQ,0BAA0B;AAAA,QACpC,WAAW,SAAS,UAAU;AAC5B,oBAAU,CAAC,UAAU,EAAE,GAAG,MAAM,UAAU,SAAS,SAAwB,EAAE;AAC7E,kBAAQ,eAAe;AAAA,QACzB;AACA,yBAAiB,CAAC;AAClB;AAAA,MACF;AAAA,MACA,KAAK,mBAAmB;AACtB,cAAM,WAAW,KAAK,aAAa;AACnC,YAAI,CAAC,SAAU;AACf,kBAAU,CAAC,UAAU,EAAE,GAAG,MAAM,UAAU,SAAS,SAAS,EAAE;AAC9D,gBAAQ,eAAe;AACvB,yBAAiB,CAAC;AAClB;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACA,WAAS,qBAAqB;AAC5B,QAAI,YAAY,KAAK,GAAG;AACtB,YAAM,cAAc,KAAK,KAAK,CAAC,MAAM,EAAE,aAAa,OAAO,QAAQ,KAAK,KAAK,CAAC;AAC9E,UAAI,CAAC,YAAa;AAClB,gBAAU,CAAC,UAAU;AAAA,QACnB,GAAG;AAAA,QACH,UAAU,YAAY;AAAA,QACtB,OAAO,YAAY;AAAA,QACnB,QAAQ,YAAY,KAAK;AAAA,MAC3B,EAAE;AACF,cAAQ,0BAA0B;AAAA,IACpC;AAAA,EACF;AACA,WAAS,yBAAyB;AAChC,QAAI,gBAAgB,KAAK,GAAG;AAC1B,YAAM,cAA4B;AAAA,QAChC,SAAS,OAAO,WAAW;AAAA,QAC3B,UAAU,OAAO;AAAA,QACjB,OAAO,OAAO;AAAA,QACd,QAAQ,OAAO;AAAA,QACf,mBAAmB,gBAAgB,KAAK;AAAA,MAC1C;AACA,iBAAW,WAAW;AACtB,cAAQ,MAAM;AACd,WAAK;AAAA,IACP;AAAA,EACF;AACA,SACE,qBAAC,OAAI,eAAc,UAAS,SAAS,GACnC;AAAA,wBAAC,OAAI,cAAc,GACjB,8BAAC,QAAK,MAAI,MAAC,OAAM,QAAO,4CAExB,GACF;AAAA,IACC,SAAS,eACR,oBAAC,OACC,8BAAC,QAAK,OAAM,UAAS,6CAA+B,GACtD;AAAA,IAED,SAAS,oBACR,qBAAC,OAAI,eAAc,UACjB;AAAA,0BAAC,OAAI,cAAc,GACjB,8BAAC,QAAK,+BAAiB,GACzB;AAAA,MACA;AAAA,QAAC;AAAA;AAAA,UACC,OAAO,eAAe,IAAI,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,OAAO,EAAE,MAAM,EAAE;AAAA,UACjE;AAAA;AAAA,MACF;AAAA,MACA,oBAAC,OAAI,WAAW,GACd,8BAAC,QAAK,OAAM,QAAO,gEAAwC,GAC7D;AAAA,OACF;AAAA,IAED,SAAS,gBACR,qBAAC,OAAI,eAAc,UACjB;AAAA,0BAAC,OAAI,cAAc,GACjB,8BAAC,QAAK,qCAAuB,GAC/B;AAAA,MACA;AAAA,QAAC;AAAA;AAAA,UACC,OAAO,oBAAoB,IAAI,CAAC,OAAO,EAAE,KAAK,EAAE,KAAK,OAAO,EAAE,MAAM,EAAE;AAAA,UACtE;AAAA;AAAA,MACF;AAAA,MACA,oBAAC,OAAI,WAAW,GACd,8BAAC,QAAK,OAAM,QAAO,mDAA2B,GAChD;AAAA,OACF;AAAA,IAED,SAAS,qBACR,qBAAC,OAAI,eAAc,UACjB;AAAA,0BAAC,OAAI,cAAc,GACjB,8BAAC,QAAK,OAAM,UAAS,2CAAwB,GAC/C;AAAA,MACA,oBAAC,OAAI,cAAc,GACjB,8BAAC,QAAK,uEAAyD,GACjE;AAAA,MACA,oBAAC,OAAI,eAAc,UAAS,cAAc,GACvC,eAAK,IAAI,CAAC,MACT,qBAAC,QAAsB,OAAM,QAAO;AAAA;AAAA,QAC/B,EAAE;AAAA,QAAY;AAAA,QAAG,EAAE;AAAA,QAAO;AAAA,WADpB,EAAE,QAEb,CACD,GACH;AAAA,MACA,oBAAC,OAAI,cAAc,GACjB,8BAAC,QAAK,6CAA+B,GACvC;AAAA,MACA;AAAA,QAAC;AAAA;AAAA,UACC,OAAO,KAAK,IAAI,CAAC,OAAO,EAAE,KAAK,EAAE,UAAU,OAAO,EAAE,YAAY,EAAE;AAAA,UAClE;AAAA;AAAA,MACF;AAAA,MACA,oBAAC,OAAI,WAAW,GACd,8BAAC,QAAK,OAAM,QAAO,mDAA2B,GAChD;AAAA,OACF;AAAA,IAED,SAAS,mBACR,qBAAC,OAAI,eAAc,UACjB;AAAA,0BAAC,OAAI,cAAc,GACjB,+BAAC,QAAK;AAAA;AAAA,QACQ,KAAK,KAAK,CAAC,MAAM,EAAE,aAAa,OAAO,QAAQ,GAAG,eAAe;AAAA,QAAO;AAAA,QAAI;AAAA,SAE1F,GACF;AAAA,MACA;AAAA,QAAC;AAAA;AAAA,UACC,OAAO;AAAA,UACP,UAAU;AAAA,UACV,UAAU;AAAA,UACV,aAAY;AAAA,UACZ,UAAU;AAAA;AAAA,MACZ;AAAA,MACA,oBAAC,OAAI,WAAW,GACd,8BAAC,QAAK,OAAM,QAAO,+CAAiC,GACtD;AAAA,OACF;AAAA,IAED,SAAS,8BACR,qBAAC,OAAI,eAAc,UACjB;AAAA,0BAAC,OAAI,cAAc,GACjB,8BAAC,QAAK,MAAI,MACP,0BACG,mCACA,8CACN,GACF;AAAA,MACA,oBAAC,OAAI,cAAc,GACjB,8BAAC,QAAK,OAAM,QAAO,+EAEnB,GACF;AAAA,MACA;AAAA,QAAC;AAAA;AAAA,UACC,OAAO;AAAA,UACP,UAAU;AAAA,UACV,UAAU;AAAA,UACV,aAAY;AAAA;AAAA,MACd;AAAA,MACA,oBAAC,OAAI,WAAW,GACd,8BAAC,QAAK,OAAM,QAAO,mDAAqC,GAC1D;AAAA,OACF;AAAA,IAED,SAAS,UACR,oBAAC,OACC,8BAAC,QAAK,OAAM,SAAQ,wEAAqD,GAC3E;AAAA,KAEJ;AAEJ;;;ADzXM,gBAAAC,YAAA;AAJN,eAAsB,aAAa,SAAsD;AACvF,SAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,QAAI,SAA8B;AAClC,UAAM,EAAE,cAAc,IAAI;AAAA,MACxB,gBAAAA;AAAA,QAAC;AAAA;AAAA,UACC,MAAM,QAAQ;AAAA,UACd,UAAU,QAAQ;AAAA,UAClB,eAAe,QAAQ;AAAA,UACvB,mBAAmB,QAAQ;AAAA,UAC3B,YAAY,CAAC,iBAAiB;AAC5B,qBAAS;AAAA,UACX;AAAA;AAAA,MACF;AAAA,IACF;AACA,kBAAc,EAAE,KAAK,MAAM,QAAQ,MAAM,CAAC;AAAA,EAC5C,CAAC;AACH;;;ALJA,OAAO;AAEP,SAAS,cAAc,UAA+B;AACpD,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,IACT,KAAK;AACH,aAAO;AAAA,EACX;AACF;AAEA,IAAM,UAAU,IAAI,QAAQ,EACzB,KAAK,eAAe,EACpB,YAAY,uCAAuC,EACnD,QAAQ,OAAO,EACf,SAAS,gBAAgB,+CAA+C,EACxE,SAAS,kBAAkB,gDAAgD,EAC3E,OAAO,gBAAgB,qBAAqB,QAAQ,IAAI,CAAC,EACzD,OAAO,OAAO,YAAqB,cAAuB,YAA8B;AACvF,QAAM,MAAM,SAAS,OAAO,QAAQ,IAAI;AACxC,QAAM,gBAAgB,QAAQ,UAAU;AACxC,QAAM,oBAAoB,gBAAgB;AAC1C,QAAM,mBAAmB,KAAK,KAAK,eAAe;AAClD,QAAM,eAAe,KAAK,KAAK,WAAW;AAC1C,QAAM,UAAU,KAAK,KAAK,MAAM;AAChC,QAAM,OAAO,cAAc;AAC3B,QAAM,WAAW,kBAAkB;AACnC,QAAM,eAAe,MAAM,aAAa;AAAA,IACtC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AACD,MAAI,CAAC,cAAc;AACjB,YAAQ,IAAI,mBAAmB;AAC/B,YAAQ,KAAK,CAAC;AAAA,EAChB;AACA,MAAI,aAAa,UAAU,aAAa,UAAU;AAChD,UAAM,aAAa,cAAc,aAAa,QAAQ;AACtD,UAAM,aAAa,GAAG,UAAU,IAAI,aAAa,MAAM;AAAA;AACvD,QAAI,WAAW,OAAO,GAAG;AACvB,YAAM,WAAW,aAAa,SAAS,OAAO;AAC9C,YAAM,YAAY,IAAI,OAAO,IAAI,UAAU,KAAK,GAAG,EAAE,KAAK,QAAQ;AAClE,UAAI,CAAC,WAAW;AACd,sBAAc,SAAS,GAAG,QAAQ;AAAA,EAAK,UAAU,EAAE;AACnD,gBAAQ,IAAI,gBAAW,UAAU,UAAU;AAAA,MAC7C;AAAA,IACF,OAAO;AACL,oBAAc,SAAS,UAAU;AACjC,cAAQ,IAAI,4BAAuB,UAAU,EAAE;AAAA,IACjD;AACA,YAAQ,IAAI,UAAU,IAAI,aAAa;AAAA,EACzC;AACA,QAAM,mBAAmB,aAAa,YAAY;AAClD,MAAI,CAAC,eAAe;AAClB,QAAI,kBAAkB;AACpB,YAAM,WAAW,aAAa,YAAY;AAC1C,YAAM,QAAQ,aAAa,SAAS,gBAAgB,QAAQ;AAC5D,YAAM,WAAW,iBAAiB,EAAE,UAAU,MAAM,CAAC;AACrD,oBAAc,cAAc,QAAQ;AACpC,cAAQ,IAAI,0BAAqB;AACjC,YAAM,mBAAmB,yBAAyB,EAAE,UAAU,MAAM,CAAC;AACrE,oBAAc,kBAAkB,gBAAgB;AAChD,cAAQ,IAAI,wDAAmD;AAAA,IACjE,OAAO;AACL,YAAM,WAAW,aAAa,YAAY;AAC1C,YAAM,QAAQ,aAAa,SAAS,gBAAgB,QAAQ;AAC5D,YAAM,WAAW,iBAAiB;AAAA,QAChC;AAAA,QACA;AAAA,QACA,SAAS,aAAa;AAAA,MACxB,CAAC;AACD,oBAAc,cAAc,QAAQ;AACpC,cAAQ,IAAI,0BAAqB;AACjC,YAAM,mBAAmB,yBAAyB;AAAA,QAChD;AAAA,QACA;AAAA,QACA,SAAS,aAAa;AAAA,MACxB,CAAC;AACD,oBAAc,kBAAkB,gBAAgB;AAChD,cAAQ,IAAI,wDAAmD;AAAA,IACjE;AAAA,EACF;AACA,QAAM,oBAAoB,aAAa,qBAAqB;AAC5D,QAAM,QAAQ,gBACV,uBAAuB,UAAU,MAAM,iBAAiB,KACxD,oDAAoD,iBAAiB;AACzE,QAAM,aAAa,mBAAmB,CAAC,IAAI,CAAC,aAAa,aAAa,OAAO;AAC7E,QAAM,OAAO,CAAC,YAAY,SAAS,iBAAiB,OAAO,GAAG,UAAU;AACxE,QAAM,OAAO,MAAM,OAAO,MAAM;AAAA,IAC9B;AAAA,IACA,KAAK,QAAQ;AAAA,IACb,OAAO;AAAA,EACT,CAAC;AACD,OAAK,GAAG,QAAQ,CAAC,SAAS;AACxB,YAAQ,KAAK,QAAQ,CAAC;AAAA,EACxB,CAAC;AACH,CAAC;AAEH,QAAQ,MAAM;","names":["config","jsx"]}
package/package.json ADDED
@@ -0,0 +1,39 @@
1
+ {
2
+ "name": "create-expert",
3
+ "version": "0.0.5",
4
+ "description": "Create Perstack Experts interactively",
5
+ "author": "Wintermute Technologies, Inc.",
6
+ "license": "Apache-2.0",
7
+ "type": "module",
8
+ "bin": {
9
+ "create-expert": "dist/bin/cli.js"
10
+ },
11
+ "publishConfig": {
12
+ "access": "public"
13
+ },
14
+ "files": [
15
+ "dist"
16
+ ],
17
+ "dependencies": {
18
+ "commander": "^14.0.2",
19
+ "dotenv": "^17.2.3",
20
+ "ink": "^6.5.1",
21
+ "react": "^19.2.3"
22
+ },
23
+ "devDependencies": {
24
+ "@tsconfig/node22": "^22.0.5",
25
+ "@types/node": "^25.0.3",
26
+ "@types/react": "^19.2.7",
27
+ "tsup": "^8.5.1",
28
+ "typescript": "^5.9.3",
29
+ "vitest": "^4.0.16"
30
+ },
31
+ "engines": {
32
+ "node": ">=22.0.0"
33
+ },
34
+ "scripts": {
35
+ "clean": "rm -rf dist",
36
+ "build": "pnpm run clean && tsup --config ./tsup.config.ts",
37
+ "typecheck": "tsc --noEmit"
38
+ }
39
+ }