@mindstudio-ai/remy 0.1.60 → 0.1.62

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,13 @@
1
+ ---
2
+ trigger: feelingLucky
3
+ ---
4
+
5
+ This is an automated message triggered by the user having clicked "I'm feeling lucky" in the MindStudio UI and selecting a category from a list of app archetypes. They can't see this message. They're likely new to MindStudio and Remy - this might even be their first time using it. They haven't described a specific idea yet and more than likely don't actually have anything specific in mind — they're exploring what they can build and clicked on something that looked interesting. Your job is to ask questions to help them conjure the seed of an idea that is unique, specific, and interesting, and will be compelling to build as an MVP. Don't build something generic - help the user make something unique that allows you to truly show off the power of what you can build. Stay focused on web interfaces for now, unless the user specifically wants a different interface type. When you've aligned on something to build, focus on making it visually beautiful and highly usable from a UX perspective - at this point in the user's experience they're going to be much more "wowed" by something that looks pretty than by something with a ton of backend complexity.
6
+
7
+ <selected_category>
8
+ {{title}}
9
+
10
+ <directional_guidance>
11
+ {{seed}}
12
+ </directional_guidance>
13
+ </selected_category>
package/dist/headless.js CHANGED
@@ -2945,9 +2945,10 @@ var BROWSER_TOOLS = [
2945
2945
  "navigate",
2946
2946
  "evaluate",
2947
2947
  "styles",
2948
- "screenshot"
2948
+ "screenshotFullPage",
2949
+ "screenshotViewport"
2949
2950
  ],
2950
- description: "snapshot: accessibility tree of the page (waits for network to settle). click: click an element (animated cursor, full event sequence). type: type text into input (one char at a time, works with React/Vue/Svelte). select: select a dropdown option by text. wait: wait for an element to appear (polls 100ms, waits for network). navigate: navigate to a URL within the app (waits for load, subsequent steps run on new page). evaluate: run JS in the page. styles: read computed CSS styles from elements (pass properties array with camelCase names, or omit for defaults). screenshot: full-page viewport-stitched screenshot (returns CDN url with dimensions)."
2951
+ description: "snapshot: accessibility tree of the page (waits for network to settle). click: click an element (animated cursor, full event sequence). type: type text into input (one char at a time, works with React/Vue/Svelte). select: select a dropdown option by text. wait: wait for an element to appear (polls 100ms, waits for network). navigate: navigate to a URL within the app (waits for load, subsequent steps run on new page). evaluate: run JS in the page. styles: read computed CSS styles from elements (pass properties array with camelCase names, or omit for defaults). screenshotFullPage: full-page viewport-stitched screenshot (returns CDN url with dimensions). screenshotViewport: screenshot of just the visible viewport."
2951
2952
  },
2952
2953
  ref: {
2953
2954
  type: "string",
@@ -4840,6 +4841,7 @@ async function runTurn(params) {
4840
4841
  userMsg.attachments = attachments;
4841
4842
  }
4842
4843
  state.messages.push(userMsg);
4844
+ const isFirstMessage = state.messages.filter((m) => m.role === "user").length === 1;
4843
4845
  const STATUS_EXCLUDED_TOOLS = /* @__PURE__ */ new Set([
4844
4846
  "setProjectOnboardingState",
4845
4847
  "setProjectMetadata",
@@ -4875,7 +4877,8 @@ async function runTurn(params) {
4875
4877
  let stopReason = "end_turn";
4876
4878
  let subAgentText = "";
4877
4879
  let currentToolNames = "";
4878
- const statusWatcher = startStatusWatcher({
4880
+ const statusWatcher = isFirstMessage ? { stop() {
4881
+ } } : startStatusWatcher({
4879
4882
  apiConfig,
4880
4883
  getContext: () => {
4881
4884
  const parts = [];
package/dist/index.js CHANGED
@@ -2691,9 +2691,10 @@ var init_tools = __esm({
2691
2691
  "navigate",
2692
2692
  "evaluate",
2693
2693
  "styles",
2694
- "screenshot"
2694
+ "screenshotFullPage",
2695
+ "screenshotViewport"
2695
2696
  ],
2696
- description: "snapshot: accessibility tree of the page (waits for network to settle). click: click an element (animated cursor, full event sequence). type: type text into input (one char at a time, works with React/Vue/Svelte). select: select a dropdown option by text. wait: wait for an element to appear (polls 100ms, waits for network). navigate: navigate to a URL within the app (waits for load, subsequent steps run on new page). evaluate: run JS in the page. styles: read computed CSS styles from elements (pass properties array with camelCase names, or omit for defaults). screenshot: full-page viewport-stitched screenshot (returns CDN url with dimensions)."
2697
+ description: "snapshot: accessibility tree of the page (waits for network to settle). click: click an element (animated cursor, full event sequence). type: type text into input (one char at a time, works with React/Vue/Svelte). select: select a dropdown option by text. wait: wait for an element to appear (polls 100ms, waits for network). navigate: navigate to a URL within the app (waits for load, subsequent steps run on new page). evaluate: run JS in the page. styles: read computed CSS styles from elements (pass properties array with camelCase names, or omit for defaults). screenshotFullPage: full-page viewport-stitched screenshot (returns CDN url with dimensions). screenshotViewport: screenshot of just the visible viewport."
2697
2698
  },
2698
2699
  ref: {
2699
2700
  type: "string",
@@ -4880,6 +4881,7 @@ async function runTurn(params) {
4880
4881
  userMsg.attachments = attachments;
4881
4882
  }
4882
4883
  state.messages.push(userMsg);
4884
+ const isFirstMessage = state.messages.filter((m) => m.role === "user").length === 1;
4883
4885
  const STATUS_EXCLUDED_TOOLS = /* @__PURE__ */ new Set([
4884
4886
  "setProjectOnboardingState",
4885
4887
  "setProjectMetadata",
@@ -4915,7 +4917,8 @@ async function runTurn(params) {
4915
4917
  let stopReason = "end_turn";
4916
4918
  let subAgentText = "";
4917
4919
  let currentToolNames = "";
4918
- const statusWatcher = startStatusWatcher({
4920
+ const statusWatcher = isFirstMessage ? { stop() {
4921
+ } } : startStatusWatcher({
4919
4922
  apiConfig,
4920
4923
  getContext: () => {
4921
4924
  const parts = [];
@@ -27,7 +27,8 @@ Skip the rest: narrating what you're about to do, restating what the user asked,
27
27
  You will occasionally receive automated messages prefixed with `@@automated_message@@` - these are triggered by things like background agents returning their work, or by the user clicking a button in the UI (e.g., the user might click a "Build Feature" button in the product roadmap UI, and you will receive a message detailing what they want to build). You will be able to see these messages in your chat history but the user will not see them, so acknowledge them appropriately and then perform the requested work.
28
28
 
29
29
  ## Style
30
- - Your messages are rendered as markdown. Use formatting (headers, bold, lists, code blocks) when it helps readability. You can also include images using `![alt](url)` — use this to show the user screenshots, generated images, or other visual references inline in your messages.
30
+ - Your messages are rendered as markdown. Use formatting (headers, bold, lists, code blocks) when it helps readability. You can include images using `![alt](url)` — use this to show the user screenshots, generated images, or other visual references inline in your messages.
31
+ - When offering suggestions or options the user might want to quickly select in a conversation, format them as clickable suggestion links: `[suggestion text](suggest:suggestion text)`. These render as clickable chips in the UI. When clicked, the suggestion text is sent as the user's next message. Use these liberally when brainstorming, offering directions, or listing options in conversation. When explicitly gathering information from the user, however, always use the `promptUser` tool instead.
31
32
  - Keep language accessible. Describe what the app *does*, not how it's implemented, unless the user demonstrates technical fluency.
32
33
  - Always use full paths relative to the project root when mentioning files (`dist/interfaces/web/src/App.tsx`, not `App.tsx`). Paths will be rendered as clickable links for the user.
33
34
  - Use inline `code` formatting only for things the user needs to type or search for.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mindstudio-ai/remy",
3
- "version": "0.1.60",
3
+ "version": "0.1.62",
4
4
  "description": "MindStudio coding agent",
5
5
  "repository": {
6
6
  "type": "git",