@photon-cli/flux 0.3.0 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -51,13 +51,169 @@ bun add @photon-cli/flux
51
51
 
52
52
  ---
53
53
 
54
+ ## Flux Number
55
+
56
+ Message +16286298650 with you phone number to text the LangChain agent that you built.
57
+
58
+ ---
59
+
60
+ ## Usage
61
+
62
+ #### Step 1: Create LangChain Agent
63
+
64
+ Create agent.ts file with your LangChain agent:
65
+
66
+ ```
67
+ // agent.ts
68
+ export default {
69
+ async invoke({ message }: { message: string }) {
70
+ return `You said: ${message}`;
71
+ }
72
+ };
73
+ ```
74
+
75
+ ### Step 2: Login
76
+
77
+ Authenticate with your phone number and iMessage:
78
+
79
+ ```
80
+ npx @photon-cli/flux login
81
+
82
+ Enter your phone number (e.g. +15551234567): +1234567890
83
+ [FLUX] Validating with server...
84
+ [FLUX] Logged in as +1234567890
85
+ ```
86
+
87
+ ### Step 3: Validate
88
+
89
+ Validate that your agent works and exports correctly:
90
+
91
+ ```
92
+ npx @photon-cli/flux validate
93
+
94
+ [FLUX] Validating agent.ts...
95
+ [FLUX] Agent is valid!
96
+ ```
97
+
98
+ ### Step 4: Testing Mode
99
+
100
+ Test your agent through your terminal (no iMessage connection):
101
+
102
+ ```
103
+ npx @photon-cli/flux run --local
104
+
105
+ [FLUX] Welcome to Flux! Your agent is loaded.
106
+ [FLUX] Type a message to test it. Press Ctrl+C to exit.
107
+
108
+ You: Hello!
109
+ [FLUX] Thinking...
110
+ Agent: Hello! How can I assist you today?
111
+ ```
112
+
113
+ ### Step 5: Live Connection
114
+
115
+ Run your agent locally and connect it to the iMessage bridge. When you message the FLUX number with your phone number, you will receive the output of your LangChain agent:
116
+
117
+ ```
118
+ npx @photon-cli/flux run --prod
119
+
120
+ [FLUX] Loading agent from agent.ts...
121
+ [FLUX] Agent loaded successfully!
122
+ [FLUX] Connected to server at fluxy.photon.codes:443
123
+ [FLUX] Registered agent for +1234567890
124
+ [FLUX] Agent running in production mode. Press Ctrl+C to stop.
125
+ [FLUX] Messages to +1234567890 will be processed by your agent.
126
+
127
+ ```
128
+
129
+ ---
130
+
54
131
  ## Why Flux
55
132
 
133
+ Right now, connecting agents to messaging platforms involves complex processes such as setting up servers, configuring webhooks, and dealing with platform APIs. Furthermore, most current options use SMS or WhatsApp, which is unintuitive for many users.
134
+
135
+ Flux solves these problems in the following ways:
136
+
137
+ - **Deploy in < 5 seconds**: Link your LangChain agent to iMessage with a single command.
138
+ - **Fully iMessage native**: Direct iMessage integration, not SMS or WhatsApp.
139
+ - **Zero Infrastructure**: No servers to manage, webhooks to configure, or Apple Developer account needed.
140
+ - **Open source**: Fully community driven.
141
+ - **Free to use**: No subscription fees.
142
+
56
143
  ---
57
144
 
58
145
  ## Examples
59
146
 
147
+ ### Echo Bot (No LLM)
148
+
149
+ ```
150
+ // agent.ts
151
+ export default {
152
+ async invoke({ message }: { message: string }) {
153
+ return `You said: ${message}`;
154
+ }
155
+ };
156
+ ```
157
+
158
+ ### ChatGPT Bot
60
159
 
160
+ ```
161
+ // agent.ts
162
+ import { ChatOpenAI } from "@langchain/openai";
163
+ import { SystemMessage, HumanMessage } from "@langchain/core/messages";
164
+
165
+ const llm = new ChatOpenAI({ modelName: "gpt-4o-mini" });
166
+
167
+ export default {
168
+ async invoke({ message }: { message: string }) {
169
+ const response = await llm.invoke([
170
+ new SystemMessage("You are a helpful assistant. Be concise."),
171
+ new HumanMessage(message),
172
+ ]);
173
+ return response.content as string;
174
+ }
175
+ };
176
+ ```
177
+
178
+ ### Chatbot with tools
179
+
180
+ ```
181
+ // agent.ts
182
+ import { ChatOpenAI } from "@langchain/openai";
183
+ import { tool } from "@langchain/core/tools";
184
+ import { createReactAgent } from "@langchain/langgraph/prebuilt";
185
+ import { z } from "zod";
186
+
187
+ const calculator = tool(
188
+ async ({ expression }: { expression: string }) => {
189
+ return String(eval(expression));
190
+ },
191
+ {
192
+ name: "calculator",
193
+ description: "Evaluate a math expression",
194
+ schema: z.object({ expression: z.string() }),
195
+ }
196
+ );
197
+
198
+ const getTime = tool(
199
+ async () => new Date().toLocaleTimeString(),
200
+ {
201
+ name: "get_time",
202
+ description: "Get the current time",
203
+ schema: z.object({}),
204
+ }
205
+ );
206
+
207
+ const llm = new ChatOpenAI({ modelName: "gpt-4o-mini" });
208
+ const agent = createReactAgent({ llm, tools: [calculator, getTime] });
209
+
210
+ export default {
211
+ async invoke({ message }: { message: string }) {
212
+ const result = await agent.invoke({ messages: [{ role: "user", content: message }] });
213
+ return result.messages[result.messages.length - 1].content as string;
214
+ }
215
+ };
216
+ ```
61
217
 
62
218
  ---
63
219