@basestream/cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of @basestream/cli might be problematic. Click here for more details.
- package/README.md +380 -0
- package/bin/basestream.js +3 -0
- package/dist/cli/commands/hook-stop.d.ts +1 -0
- package/dist/cli/commands/hook-stop.js +247 -0
- package/dist/cli/commands/init.d.ts +1 -0
- package/dist/cli/commands/init.js +92 -0
- package/dist/cli/commands/login.d.ts +1 -0
- package/dist/cli/commands/login.js +75 -0
- package/dist/cli/commands/status.d.ts +1 -0
- package/dist/cli/commands/status.js +97 -0
- package/dist/cli/commands/sync.d.ts +7 -0
- package/dist/cli/commands/sync.js +77 -0
- package/dist/cli/config.d.ts +12 -0
- package/dist/cli/config.js +31 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.js +57 -0
- package/dist/cli/util.d.ts +11 -0
- package/dist/cli/util.js +18 -0
- package/package.json +64 -0
package/README.md
ADDED
|
@@ -0,0 +1,380 @@
|
|
|
1
|
+
# Basestream.ai
|
|
2
|
+
|
|
3
|
+
**Your team uses AI every day. Now you can see what it's building.**
|
|
4
|
+
|
|
5
|
+
Basestream is an AI Work Intelligence platform that automatically tracks what developers build with AI tools (Claude Code, Cursor, etc.) and gives engineering teams visibility into AI-powered output.
|
|
6
|
+
|
|
7
|
+
A product of [Sublevel Inc.](https://sublevel.com)
|
|
8
|
+
|
|
9
|
+
---
|
|
10
|
+
|
|
11
|
+
## Quick Start
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
# 1. Start Postgres (from monorepo root — shared with packages/api)
|
|
15
|
+
cd packages/api && docker compose up -d && cd ../basestream
|
|
16
|
+
|
|
17
|
+
# 2. Install dependencies
|
|
18
|
+
npm install --legacy-peer-deps
|
|
19
|
+
|
|
20
|
+
# 3. Set up environment
|
|
21
|
+
cp .env.example .env
|
|
22
|
+
# .env is pre-configured for local dev with SKIP_PAYMENT=true
|
|
23
|
+
|
|
24
|
+
# 4. Push schema to database
|
|
25
|
+
npx drizzle-kit generate
|
|
26
|
+
# Then apply the migration:
|
|
27
|
+
cat src/db/migrations/0000_*.sql | docker exec -i basement-pg psql -U postgres -d basement_starter
|
|
28
|
+
|
|
29
|
+
# 5. Seed demo data (3 users, 44 entries, 1 org)
|
|
30
|
+
npx tsx src/db/seed.ts
|
|
31
|
+
|
|
32
|
+
# 6. Run dev server
|
|
33
|
+
npm run dev
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
Open [http://localhost:3000](http://localhost:3000) to see the landing page.
|
|
37
|
+
|
|
38
|
+
### End-to-End Local Testing
|
|
39
|
+
|
|
40
|
+
With `SKIP_PAYMENT=true` in `.env`, you can:
|
|
41
|
+
|
|
42
|
+
- Sign up via email magic link (check console for the link)
|
|
43
|
+
- Skip Stripe checkout — clicking "Get early access" instantly upgrades to Team
|
|
44
|
+
- Create orgs, invite members, manage integrations
|
|
45
|
+
- Ingest work entries via API key (`POST /api/entries`)
|
|
46
|
+
- Bulk sync entries (`POST /api/sync` — idempotent via sessionId)
|
|
47
|
+
|
|
48
|
+
---
|
|
49
|
+
|
|
50
|
+
## Architecture
|
|
51
|
+
|
|
52
|
+
```
|
|
53
|
+
packages/basestream/
|
|
54
|
+
├── src/
|
|
55
|
+
│ ├── db/
|
|
56
|
+
│ │ ├── schema.ts # Drizzle schema: 10 tables, 7 enums (bs_ prefixed)
|
|
57
|
+
│ │ ├── index.ts # Drizzle client (pg Pool → port 5434)
|
|
58
|
+
│ │ ├── seed.ts # Demo: 3 users, 44 entries, 1 org
|
|
59
|
+
│ │ └── migrations/ # Generated SQL migrations
|
|
60
|
+
│ ├── app/
|
|
61
|
+
│ │ ├── page.tsx # Landing page (marketing)
|
|
62
|
+
│ │ ├── layout.tsx # Root layout (fonts, metadata)
|
|
63
|
+
│ │ ├── login/page.tsx # Auth: email + GitHub + Google
|
|
64
|
+
│ │ ├── signup/page.tsx # Auth + Stripe checkout flow
|
|
65
|
+
│ │ ├── api/ # 16 API routes
|
|
66
|
+
│ │ │ ├── auth/ # NextAuth handler
|
|
67
|
+
│ │ │ ├── stripe/ # Checkout + webhook
|
|
68
|
+
│ │ │ ├── entries/ # CRUD + pagination + filters
|
|
69
|
+
│ │ │ ├── sync/ # Bulk upsert (CLI sync)
|
|
70
|
+
│ │ │ ├── summary/ # Aggregated stats + ROI
|
|
71
|
+
│ │ │ ├── search/ # Full-text search
|
|
72
|
+
│ │ │ ├── orgs/ # Org + members + invites
|
|
73
|
+
│ │ │ └── integrations/ # 5 integration stubs
|
|
74
|
+
│ │ └── dashboard/ # 9 authenticated pages
|
|
75
|
+
│ │ ├── page.tsx # Overview (feed + heatmap)
|
|
76
|
+
│ │ ├── team/ # Manager view (charts)
|
|
77
|
+
│ │ ├── projects/ # Project grid
|
|
78
|
+
│ │ ├── roi/ # ROI metrics + Recharts
|
|
79
|
+
│ │ ├── integrations/ # Connect/disconnect tools
|
|
80
|
+
│ │ └── settings/ # Profile, billing, team mgmt
|
|
81
|
+
│ ├── components/
|
|
82
|
+
│ │ ├── landing/ # 12 marketing components
|
|
83
|
+
│ │ ├── dashboard/ # 8 dashboard components
|
|
84
|
+
│ │ └── ui/ # 5 shared UI primitives
|
|
85
|
+
│ ├── lib/ # Core libraries
|
|
86
|
+
│ │ ├── auth.ts # NextAuth config (DrizzleAdapter)
|
|
87
|
+
│ │ ├── stripe.ts # Stripe helpers
|
|
88
|
+
│ │ ├── api-auth.ts # API key + session auth
|
|
89
|
+
│ │ ├── utils.ts # Utility functions
|
|
90
|
+
│ │ └── integrations/ # 6 integration modules
|
|
91
|
+
│ ├── hooks/ # 3 data hooks
|
|
92
|
+
│ ├── types/ # TypeScript types
|
|
93
|
+
│ └── middleware.ts # Route protection
|
|
94
|
+
├── drizzle.config.ts # Drizzle Kit config
|
|
95
|
+
├── tailwind.config.ts # Design system
|
|
96
|
+
└── package.json
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
**86 files total.** TypeScript strict mode, zero type errors.
|
|
100
|
+
|
|
101
|
+
---
|
|
102
|
+
|
|
103
|
+
## Tech Stack
|
|
104
|
+
|
|
105
|
+
| Layer | Technology |
|
|
106
|
+
| --------- | ----------------------------------------------------------------------------------- |
|
|
107
|
+
| Framework | Next.js 14 (App Router) |
|
|
108
|
+
| Language | TypeScript (strict) |
|
|
109
|
+
| Styling | Tailwind CSS + CSS variables |
|
|
110
|
+
| Database | PostgreSQL 16 via Drizzle ORM (shared `basement_starter` DB, `bs_` prefixed tables) |
|
|
111
|
+
| Auth | NextAuth.js (GitHub, Google, email magic link) |
|
|
112
|
+
| Payments | Stripe (Checkout, Webhooks, Customer Portal) |
|
|
113
|
+
| Charts | Recharts |
|
|
114
|
+
| Local dev | Docker Compose |
|
|
115
|
+
|
|
116
|
+
---
|
|
117
|
+
|
|
118
|
+
## Design System
|
|
119
|
+
|
|
120
|
+
The visual identity is **warm, editorial, premium** -- not the typical dark dev-tool aesthetic.
|
|
121
|
+
|
|
122
|
+
### Colors
|
|
123
|
+
|
|
124
|
+
| Token | Hex | Usage |
|
|
125
|
+
| ------------- | --------- | ----------------------- |
|
|
126
|
+
| `cream` | `#FAF8F4` | Primary background |
|
|
127
|
+
| `warm-white` | `#FFFDF9` | Card backgrounds |
|
|
128
|
+
| `ink` | `#1A1714` | Primary text |
|
|
129
|
+
| `ink-soft` | `#4A453D` | Secondary text |
|
|
130
|
+
| `ink-muted` | `#8A8379` | Tertiary text, labels |
|
|
131
|
+
| `accent` | `#C4653A` | Terracotta orange, CTAs |
|
|
132
|
+
| `accent-soft` | `#E8956A` | Hover states |
|
|
133
|
+
| `accent-bg` | `#FFF3ED` | Accent backgrounds |
|
|
134
|
+
| `border` | `#E8E4DE` | Card borders |
|
|
135
|
+
| `sage` | `#6B7E6B` | Success, dev indicators |
|
|
136
|
+
| `blue` | `#4A6FA5` | Info, links |
|
|
137
|
+
|
|
138
|
+
### Typography
|
|
139
|
+
|
|
140
|
+
- **Headings:** Newsreader (serif), weight 400, letter-spacing -0.025em
|
|
141
|
+
- **Body:** DM Sans (sans-serif), weight 300 body / 500 labels
|
|
142
|
+
- **Code:** SF Mono / Fira Code (monospace)
|
|
143
|
+
|
|
144
|
+
### Animations
|
|
145
|
+
|
|
146
|
+
- Word-by-word blur reveal (hero headline)
|
|
147
|
+
- Terminal typing sequence with cursor blink
|
|
148
|
+
- Scroll-triggered reveal (opacity + translateY)
|
|
149
|
+
- Animated counters (eased count-up)
|
|
150
|
+
- Tool chip stagger reveal
|
|
151
|
+
- Divider accent wipe
|
|
152
|
+
- Ambient glow orb drift
|
|
153
|
+
- Card hover transitions
|
|
154
|
+
- Nav scroll blur transition
|
|
155
|
+
|
|
156
|
+
---
|
|
157
|
+
|
|
158
|
+
## Landing Page
|
|
159
|
+
|
|
160
|
+
The landing page at `/` is a single-page marketing site with 12 sections:
|
|
161
|
+
|
|
162
|
+
1. **Navbar** -- Fixed, blur-on-scroll, logo + links + CTA
|
|
163
|
+
2. **Hero** -- Glow orbs, word-by-word blur reveal headline, badge, dual CTAs
|
|
164
|
+
3. **Terminal Demo** -- Typing animation showing `npx basestream init` flow
|
|
165
|
+
4. **Problem Section** -- "Sessions vanish", "Tokens != outcomes", "Status is manual"
|
|
166
|
+
5. **Divider** -- Accent wipe on scroll
|
|
167
|
+
6. **How It Works** -- 3 numbered steps with hover accent lines
|
|
168
|
+
7. **Divider**
|
|
169
|
+
8. **Two Views** -- Developer journal + Manager dashboard mockups with animated counters
|
|
170
|
+
9. **Integrations** -- Tool chips with stagger animation (6 active, 3 coming soon)
|
|
171
|
+
10. **Pricing** -- 3-tier (Free / $12 Team / Custom Enterprise)
|
|
172
|
+
11. **CTA** -- Final conversion with accent glow
|
|
173
|
+
12. **Footer** -- Copyright + links
|
|
174
|
+
|
|
175
|
+
---
|
|
176
|
+
|
|
177
|
+
## Authentication
|
|
178
|
+
|
|
179
|
+
Three providers via NextAuth.js:
|
|
180
|
+
|
|
181
|
+
- **GitHub OAuth** -- `GITHUB_CLIENT_ID` + `GITHUB_CLIENT_SECRET`
|
|
182
|
+
- **Google OAuth** -- `GOOGLE_CLIENT_ID` + `GOOGLE_CLIENT_SECRET`
|
|
183
|
+
- **Email magic link** -- Sends sign-in link to email
|
|
184
|
+
|
|
185
|
+
Session includes `user.id` and `user.plan` via callback augmentation.
|
|
186
|
+
|
|
187
|
+
Middleware at `src/middleware.ts` protects all `/dashboard/*` routes.
|
|
188
|
+
|
|
189
|
+
---
|
|
190
|
+
|
|
191
|
+
## Stripe Integration
|
|
192
|
+
|
|
193
|
+
### Flow
|
|
194
|
+
|
|
195
|
+
1. User clicks "Get early access" on Team pricing -> `/signup?plan=team`
|
|
196
|
+
2. After auth, redirects to `GET /api/stripe/checkout` -> Stripe Checkout
|
|
197
|
+
3. Stripe webhook (`POST /api/stripe/webhook`) handles:
|
|
198
|
+
- `checkout.session.completed` -- Upgrade user to TEAM plan
|
|
199
|
+
- `customer.subscription.updated` -- Update billing period
|
|
200
|
+
- `customer.subscription.deleted` -- Downgrade to FREE
|
|
201
|
+
|
|
202
|
+
### Plans
|
|
203
|
+
|
|
204
|
+
| Plan | Price | Features |
|
|
205
|
+
| ---------- | ----------- | ----------------------------------------------------- |
|
|
206
|
+
| Personal | Free | Auto-logging, personal dashboard, CLI, weekly digest |
|
|
207
|
+
| Team | $12/user/mo | Team dashboard, privacy controls, GitHub, ROI metrics |
|
|
208
|
+
| Enterprise | Custom | SSO/SAML, SOC 2, self-hosted, RBAC, audit logs |
|
|
209
|
+
|
|
210
|
+
---
|
|
211
|
+
|
|
212
|
+
## API Routes
|
|
213
|
+
|
|
214
|
+
### Data Ingestion
|
|
215
|
+
|
|
216
|
+
| Method | Route | Auth | Description |
|
|
217
|
+
| ------ | -------------- | ------- | -------------------------------------- |
|
|
218
|
+
| `POST` | `/api/entries` | API key | Create single work entry |
|
|
219
|
+
| `POST` | `/api/sync` | API key | Bulk upsert (idempotent via sessionId) |
|
|
220
|
+
|
|
221
|
+
### Data Retrieval
|
|
222
|
+
|
|
223
|
+
| Method | Route | Auth | Description |
|
|
224
|
+
| -------- | ------------------- | --------------- | -------------------------------------- |
|
|
225
|
+
| `GET` | `/api/entries` | Session/API key | List entries (paginated, filtered) |
|
|
226
|
+
| `GET` | `/api/entries/[id]` | Session | Get single entry |
|
|
227
|
+
| `PATCH` | `/api/entries/[id]` | Session | Update entry |
|
|
228
|
+
| `DELETE` | `/api/entries/[id]` | Session | Delete entry |
|
|
229
|
+
| `GET` | `/api/summary` | Session | Aggregated stats (optional `type=roi`) |
|
|
230
|
+
| `GET` | `/api/search?q=` | Session | Full-text search on summary/why |
|
|
231
|
+
|
|
232
|
+
### Organization
|
|
233
|
+
|
|
234
|
+
| Method | Route | Auth | Description |
|
|
235
|
+
| ----------------- | --------------------------- | ------------------ | ------------------------- |
|
|
236
|
+
| `GET/POST` | `/api/orgs` | Session | List/create organizations |
|
|
237
|
+
| `GET/POST` | `/api/orgs/[orgId]/members` | Session (Manager+) | List/add members |
|
|
238
|
+
| `GET/POST/DELETE` | `/api/orgs/[orgId]/invites` | Session (Admin) | Manage invites |
|
|
239
|
+
|
|
240
|
+
### Integrations
|
|
241
|
+
|
|
242
|
+
| Method | Route | Auth | Description |
|
|
243
|
+
| ----------------- | ----------------------------------- | -------------------- | --------------------- |
|
|
244
|
+
| `GET/POST/DELETE` | `/api/integrations/github` | Session + org member | GitHub integration |
|
|
245
|
+
| `GET/POST/DELETE` | `/api/integrations/hubspot` | Session + org member | HubSpot integration |
|
|
246
|
+
| `GET/POST/DELETE` | `/api/integrations/jira` | Session + org member | Jira integration |
|
|
247
|
+
| `GET/POST/DELETE` | `/api/integrations/google-calendar` | Session + org member | Google Calendar |
|
|
248
|
+
| `GET/POST/DELETE` | `/api/integrations/fireflies` | Session + org member | Fireflies integration |
|
|
249
|
+
|
|
250
|
+
All integration routes verify org membership. DELETE requires ADMIN role.
|
|
251
|
+
|
|
252
|
+
---
|
|
253
|
+
|
|
254
|
+
## Dashboard Pages
|
|
255
|
+
|
|
256
|
+
All pages use the warm cream aesthetic with shared components.
|
|
257
|
+
|
|
258
|
+
| Page | Path | Description |
|
|
259
|
+
| ------------ | ----------------------------- | ------------------------------------------------------------ |
|
|
260
|
+
| Overview | `/dashboard` | Personal feed + activity heatmap + quick stats |
|
|
261
|
+
| Team | `/dashboard/team` | Manager view: stacked bar chart, per-person activity, digest |
|
|
262
|
+
| Projects | `/dashboard/projects` | Project grid with category breakdown bars |
|
|
263
|
+
| ROI | `/dashboard/roi` | Snapshot metrics + bar/pie/line Recharts |
|
|
264
|
+
| Integrations | `/dashboard/integrations` | Connect/disconnect tool integrations |
|
|
265
|
+
| Settings | `/dashboard/settings` | Profile, API keys, notification prefs |
|
|
266
|
+
| Billing | `/dashboard/settings/billing` | Plan management, Stripe portal |
|
|
267
|
+
| Team Mgmt | `/dashboard/settings/team` | Members, roles, invites |
|
|
268
|
+
|
|
269
|
+
---
|
|
270
|
+
|
|
271
|
+
## Database Schema
|
|
272
|
+
|
|
273
|
+
10 models across auth, work tracking, organizations, and integrations:
|
|
274
|
+
|
|
275
|
+
- **User** -- Auth + Stripe billing fields + plan
|
|
276
|
+
- **Account/Session/VerificationToken** -- NextAuth adapter tables
|
|
277
|
+
- **Org** -- Organization with slug
|
|
278
|
+
- **OrgMember** -- User-org junction with role (ADMIN/MANAGER/DEVELOPER)
|
|
279
|
+
- **OrgInvite** -- Email invitations with expiry
|
|
280
|
+
- **WorkEntry** -- Core data: summary, category, why, outcome, git context, AI metadata
|
|
281
|
+
- **ApiKey** -- `bs_`-prefixed keys for CLI auth
|
|
282
|
+
- **Integration** -- Per-org tool connections
|
|
283
|
+
|
|
284
|
+
### Seed Data
|
|
285
|
+
|
|
286
|
+
Run `npx prisma db seed` to populate:
|
|
287
|
+
|
|
288
|
+
- **Acme Engineering** org with 3 team members
|
|
289
|
+
- **Sarah Chen** (Manager), **James Rodriguez** (Developer), **Priya Patel** (Developer)
|
|
290
|
+
- **~50 work entries** across 5 projects over 2 weeks
|
|
291
|
+
- Realistic summaries, PR URLs, branch names, session durations
|
|
292
|
+
|
|
293
|
+
---
|
|
294
|
+
|
|
295
|
+
## Claude Code Integration
|
|
296
|
+
|
|
297
|
+
The core product mechanic: `src/lib/integrations/claude-code-rules.ts` exports a CLAUDE.md template that instructs Claude Code to silently log structured work entries to `~/.basestream/buffer/` after every meaningful task.
|
|
298
|
+
|
|
299
|
+
Each entry captures:
|
|
300
|
+
|
|
301
|
+
- Summary, category, business context (why)
|
|
302
|
+
- Git context (repo, branch, files changed)
|
|
303
|
+
- Outcome and complexity
|
|
304
|
+
- AI metadata (model, tool version, session duration, turns)
|
|
305
|
+
|
|
306
|
+
The CLI syncs buffered entries to the API via `POST /api/sync`.
|
|
307
|
+
|
|
308
|
+
---
|
|
309
|
+
|
|
310
|
+
## Environment Variables
|
|
311
|
+
|
|
312
|
+
```bash
|
|
313
|
+
# Database (shared with packages/api — same Postgres instance)
|
|
314
|
+
DATABASE_URL=postgresql://postgres:postgres@localhost:5434/basement_starter
|
|
315
|
+
|
|
316
|
+
# Skip Stripe for local dev
|
|
317
|
+
SKIP_PAYMENT=true
|
|
318
|
+
|
|
319
|
+
# Auth
|
|
320
|
+
NEXTAUTH_URL=http://localhost:3000
|
|
321
|
+
NEXTAUTH_SECRET= # openssl rand -base64 32
|
|
322
|
+
|
|
323
|
+
# OAuth
|
|
324
|
+
GITHUB_CLIENT_ID=
|
|
325
|
+
GITHUB_CLIENT_SECRET=
|
|
326
|
+
GOOGLE_CLIENT_ID=
|
|
327
|
+
GOOGLE_CLIENT_SECRET=
|
|
328
|
+
|
|
329
|
+
# Stripe
|
|
330
|
+
STRIPE_SECRET_KEY=
|
|
331
|
+
STRIPE_PUBLISHABLE_KEY=
|
|
332
|
+
STRIPE_WEBHOOK_SECRET=
|
|
333
|
+
STRIPE_TEAM_PRICE_ID=
|
|
334
|
+
|
|
335
|
+
# Integrations (optional)
|
|
336
|
+
HUBSPOT_CLIENT_ID=
|
|
337
|
+
HUBSPOT_CLIENT_SECRET=
|
|
338
|
+
JIRA_CLIENT_ID=
|
|
339
|
+
JIRA_CLIENT_SECRET=
|
|
340
|
+
GOOGLE_CALENDAR_CLIENT_ID=
|
|
341
|
+
GOOGLE_CALENDAR_CLIENT_SECRET=
|
|
342
|
+
FIREFLIES_API_KEY=
|
|
343
|
+
```
|
|
344
|
+
|
|
345
|
+
---
|
|
346
|
+
|
|
347
|
+
## Scripts
|
|
348
|
+
|
|
349
|
+
```bash
|
|
350
|
+
npm run dev # Start dev server (port 3000)
|
|
351
|
+
npm run build # Production build
|
|
352
|
+
npm run start # Start production server
|
|
353
|
+
npm run db:generate # Generate SQL migration from schema
|
|
354
|
+
npm run db:push # Push schema to database
|
|
355
|
+
npm run db:seed # Seed demo data (npx tsx src/db/seed.ts)
|
|
356
|
+
npx drizzle-kit studio # Visual database browser
|
|
357
|
+
npx tsc --noEmit # Type check
|
|
358
|
+
```
|
|
359
|
+
|
|
360
|
+
---
|
|
361
|
+
|
|
362
|
+
## Verification Status
|
|
363
|
+
|
|
364
|
+
| Check | Status |
|
|
365
|
+
| ---------------------------------- | ------------------------------------------------------ |
|
|
366
|
+
| TypeScript strict (`tsc --noEmit`) | 0 errors |
|
|
367
|
+
| Next.js build (`next build`) | All 26 pages generated |
|
|
368
|
+
| Landing page sections | All 12 implemented |
|
|
369
|
+
| Dashboard pages | All 9 implemented |
|
|
370
|
+
| API routes | All 16 implemented |
|
|
371
|
+
| Shared components used | All 8 dashboard + 5 UI components wired in |
|
|
372
|
+
| Auth flow | NextAuth + DrizzleAdapter + middleware + 3 providers |
|
|
373
|
+
| Stripe flow | Checkout + webhook + portal + SKIP_PAYMENT bypass |
|
|
374
|
+
| Drizzle schema | 10 tables (bs\_ prefixed), 7 enums, all indexes |
|
|
375
|
+
| Seed data | 3 users, 44 entries, 1 org |
|
|
376
|
+
| Monorepo integration | `packages/basestream/`, shared Postgres on port 5434 |
|
|
377
|
+
| E2E API tested | POST/GET entries, bulk sync (idempotent), API key auth |
|
|
378
|
+
| Design system | Colors, fonts, animations all matching spec |
|
|
379
|
+
| Mobile responsive | Sidebar collapse, grid stack, nav hide |
|
|
380
|
+
| Branding | "Basestream" + "A Sublevel product" throughout |
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function hookStop(): Promise<void>;
|
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { execSync } from "node:child_process";
|
|
4
|
+
import { BUFFER_DIR, SESSION_DIR, ensureDirs, readConfig } from "../config.js";
|
|
5
|
+
function parseTranscriptLine(line) {
|
|
6
|
+
try {
|
|
7
|
+
return JSON.parse(line);
|
|
8
|
+
}
|
|
9
|
+
catch {
|
|
10
|
+
return null;
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
function extractGitInfo(cwd) {
|
|
14
|
+
const result = {};
|
|
15
|
+
try {
|
|
16
|
+
result.branch = execSync("git rev-parse --abbrev-ref HEAD", {
|
|
17
|
+
cwd,
|
|
18
|
+
encoding: "utf-8",
|
|
19
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
20
|
+
}).trim();
|
|
21
|
+
}
|
|
22
|
+
catch { }
|
|
23
|
+
try {
|
|
24
|
+
result.repo = execSync("git remote get-url origin", {
|
|
25
|
+
cwd,
|
|
26
|
+
encoding: "utf-8",
|
|
27
|
+
stdio: ["pipe", "pipe", "pipe"],
|
|
28
|
+
}).trim();
|
|
29
|
+
}
|
|
30
|
+
catch { }
|
|
31
|
+
result.projectName = path.basename(cwd);
|
|
32
|
+
return result;
|
|
33
|
+
}
|
|
34
|
+
function readSessionAccumulator(sessionId) {
|
|
35
|
+
const sessionFile = path.join(SESSION_DIR, `${sessionId}.json`);
|
|
36
|
+
if (!fs.existsSync(sessionFile))
|
|
37
|
+
return null;
|
|
38
|
+
try {
|
|
39
|
+
const raw = JSON.parse(fs.readFileSync(sessionFile, "utf-8"));
|
|
40
|
+
raw.filesWritten = new Set(raw.filesWritten || []);
|
|
41
|
+
raw.commitShas = new Set(raw.commitShas || []);
|
|
42
|
+
return raw;
|
|
43
|
+
}
|
|
44
|
+
catch {
|
|
45
|
+
return null;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
function writeSessionAccumulator(acc) {
|
|
49
|
+
const sessionFile = path.join(SESSION_DIR, `${acc.sessionId}.json`);
|
|
50
|
+
const serializable = {
|
|
51
|
+
...acc,
|
|
52
|
+
filesWritten: Array.from(acc.filesWritten),
|
|
53
|
+
commitShas: Array.from(acc.commitShas),
|
|
54
|
+
};
|
|
55
|
+
fs.writeFileSync(sessionFile, JSON.stringify(serializable, null, 2));
|
|
56
|
+
}
|
|
57
|
+
function analyzeTranscript(transcriptPath, acc) {
|
|
58
|
+
if (!fs.existsSync(transcriptPath))
|
|
59
|
+
return acc;
|
|
60
|
+
const content = fs.readFileSync(transcriptPath, "utf-8");
|
|
61
|
+
const lines = content.split("\n").filter(Boolean);
|
|
62
|
+
for (const line of lines) {
|
|
63
|
+
const msg = parseTranscriptLine(line);
|
|
64
|
+
if (!msg)
|
|
65
|
+
continue;
|
|
66
|
+
// Count assistant turns
|
|
67
|
+
if (msg.role === "assistant") {
|
|
68
|
+
acc.turns++;
|
|
69
|
+
}
|
|
70
|
+
// Track tool usage
|
|
71
|
+
if (msg.type === "tool_use" || msg.type === "tool_call") {
|
|
72
|
+
const toolName = (msg).name ||
|
|
73
|
+
msg.tool_name ||
|
|
74
|
+
"unknown";
|
|
75
|
+
acc.toolCalls.push({ tool: toolName, timestamp: msg.timestamp });
|
|
76
|
+
// Extract file paths from Write/Edit tool calls
|
|
77
|
+
if (toolName === "Write" ||
|
|
78
|
+
toolName === "Edit" ||
|
|
79
|
+
toolName === "NotebookEdit") {
|
|
80
|
+
const input = (msg).input;
|
|
81
|
+
const filePath = input?.file_path || input?.path;
|
|
82
|
+
if (filePath)
|
|
83
|
+
acc.filesWritten.add(filePath);
|
|
84
|
+
}
|
|
85
|
+
// Extract commit SHAs from Bash tool calls
|
|
86
|
+
if (toolName === "Bash") {
|
|
87
|
+
const input = (msg).input;
|
|
88
|
+
const cmd = input?.command || "";
|
|
89
|
+
if (cmd.includes("git commit")) {
|
|
90
|
+
// Look ahead for the result containing the SHA
|
|
91
|
+
// We'll catch it in the tool_result handler
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
// Extract commit SHAs from tool results
|
|
96
|
+
if (msg.type === "tool_result") {
|
|
97
|
+
const resultContent = String((msg).content || "");
|
|
98
|
+
const shaMatch = resultContent.match(/\[([a-f0-9]{7,12})\]\s/);
|
|
99
|
+
if (shaMatch) {
|
|
100
|
+
acc.commitShas.add(shaMatch[1]);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
return acc;
|
|
105
|
+
}
|
|
106
|
+
function categorizeWork(acc) {
|
|
107
|
+
const files = Array.from(acc.filesWritten);
|
|
108
|
+
const fileCount = files.length;
|
|
109
|
+
// Categorize based on file patterns
|
|
110
|
+
let category = "OTHER";
|
|
111
|
+
const testFiles = files.filter((f) => f.includes(".test.") || f.includes(".spec.") || f.includes("__tests__"));
|
|
112
|
+
const docFiles = files.filter((f) => f.endsWith(".md") || f.includes("/docs/"));
|
|
113
|
+
const configFiles = files.filter((f) => f.includes("Dockerfile") ||
|
|
114
|
+
f.includes(".yml") ||
|
|
115
|
+
f.includes(".yaml") ||
|
|
116
|
+
f.includes("ci"));
|
|
117
|
+
if (testFiles.length > fileCount / 2)
|
|
118
|
+
category = "TESTING";
|
|
119
|
+
else if (docFiles.length > fileCount / 2)
|
|
120
|
+
category = "DOCS";
|
|
121
|
+
else if (configFiles.length > fileCount / 2)
|
|
122
|
+
category = "DEVOPS";
|
|
123
|
+
else if (fileCount > 0)
|
|
124
|
+
category = "FEATURE";
|
|
125
|
+
// Complexity
|
|
126
|
+
let complexity = "LOW";
|
|
127
|
+
if (fileCount === 0)
|
|
128
|
+
complexity = "LOW";
|
|
129
|
+
else if (fileCount <= 2)
|
|
130
|
+
complexity = "LOW";
|
|
131
|
+
else if (fileCount <= 6)
|
|
132
|
+
complexity = "MEDIUM";
|
|
133
|
+
else
|
|
134
|
+
complexity = "HIGH";
|
|
135
|
+
return { category, complexity };
|
|
136
|
+
}
|
|
137
|
+
function flushToBuffer(acc) {
|
|
138
|
+
const { category, complexity } = categorizeWork(acc);
|
|
139
|
+
const now = new Date();
|
|
140
|
+
const startTime = new Date(acc.startedAt);
|
|
141
|
+
const durationMin = Math.round((now.getTime() - startTime.getTime()) / 60_000);
|
|
142
|
+
const entry = {
|
|
143
|
+
sessionId: acc.sessionId,
|
|
144
|
+
tool: "claude-code",
|
|
145
|
+
projectName: acc.projectName || path.basename(acc.cwd),
|
|
146
|
+
repo: acc.gitRepo || null,
|
|
147
|
+
branch: acc.gitBranch || null,
|
|
148
|
+
summary: buildSummary(acc, category),
|
|
149
|
+
category,
|
|
150
|
+
why: null, // Hook-based: no "why" available (CLAUDE.md rules would provide this)
|
|
151
|
+
whatChanged: Array.from(acc.filesWritten).map((f) => path.relative(acc.cwd, f) || f),
|
|
152
|
+
outcome: "IN_PROGRESS",
|
|
153
|
+
filesTouched: acc.filesWritten.size,
|
|
154
|
+
complexity,
|
|
155
|
+
toolVersion: null,
|
|
156
|
+
model: null,
|
|
157
|
+
sessionDurationMin: durationMin > 0 ? durationMin : 1,
|
|
158
|
+
turns: acc.turns,
|
|
159
|
+
prUrl: null,
|
|
160
|
+
ticketUrl: null,
|
|
161
|
+
commitShas: Array.from(acc.commitShas),
|
|
162
|
+
visibility: "TEAM",
|
|
163
|
+
bufferedAt: now.toISOString(),
|
|
164
|
+
};
|
|
165
|
+
ensureDirs();
|
|
166
|
+
const filename = `${Date.now()}-${acc.sessionId.slice(0, 8)}.json`;
|
|
167
|
+
fs.writeFileSync(path.join(BUFFER_DIR, filename), JSON.stringify(entry, null, 2));
|
|
168
|
+
}
|
|
169
|
+
function buildSummary(acc, category) {
|
|
170
|
+
const fileCount = acc.filesWritten.size;
|
|
171
|
+
const commitCount = acc.commitShas.size;
|
|
172
|
+
const parts = [];
|
|
173
|
+
if (category !== "OTHER") {
|
|
174
|
+
parts.push(category.toLowerCase());
|
|
175
|
+
}
|
|
176
|
+
if (fileCount > 0) {
|
|
177
|
+
parts.push(`${fileCount} file${fileCount !== 1 ? "s" : ""} modified`);
|
|
178
|
+
}
|
|
179
|
+
if (commitCount > 0) {
|
|
180
|
+
parts.push(`${commitCount} commit${commitCount !== 1 ? "s" : ""}`);
|
|
181
|
+
}
|
|
182
|
+
if (acc.projectName) {
|
|
183
|
+
parts.push(`in ${acc.projectName}`);
|
|
184
|
+
}
|
|
185
|
+
return parts.length > 0
|
|
186
|
+
? parts.join(", ")
|
|
187
|
+
: `Claude Code session in ${acc.projectName || "unknown project"}`;
|
|
188
|
+
}
|
|
189
|
+
export async function hookStop() {
|
|
190
|
+
// Read hook payload from stdin
|
|
191
|
+
let payload;
|
|
192
|
+
try {
|
|
193
|
+
const chunks = [];
|
|
194
|
+
for await (const chunk of process.stdin) {
|
|
195
|
+
chunks.push(chunk);
|
|
196
|
+
}
|
|
197
|
+
payload = JSON.parse(Buffer.concat(chunks).toString("utf-8"));
|
|
198
|
+
}
|
|
199
|
+
catch {
|
|
200
|
+
// No valid payload — nothing to do
|
|
201
|
+
process.exit(0);
|
|
202
|
+
}
|
|
203
|
+
const { session_id, transcript_path, cwd } = payload;
|
|
204
|
+
if (!session_id)
|
|
205
|
+
process.exit(0);
|
|
206
|
+
ensureDirs();
|
|
207
|
+
// Load or create session accumulator
|
|
208
|
+
let acc = readSessionAccumulator(session_id);
|
|
209
|
+
if (!acc) {
|
|
210
|
+
const gitInfo = extractGitInfo(cwd);
|
|
211
|
+
acc = {
|
|
212
|
+
sessionId: session_id,
|
|
213
|
+
cwd,
|
|
214
|
+
startedAt: new Date().toISOString(),
|
|
215
|
+
lastUpdatedAt: new Date().toISOString(),
|
|
216
|
+
turns: 0,
|
|
217
|
+
toolCalls: [],
|
|
218
|
+
filesWritten: new Set(),
|
|
219
|
+
commitShas: new Set(),
|
|
220
|
+
gitBranch: gitInfo.branch,
|
|
221
|
+
gitRepo: gitInfo.repo,
|
|
222
|
+
projectName: gitInfo.projectName,
|
|
223
|
+
};
|
|
224
|
+
}
|
|
225
|
+
// Analyze transcript to accumulate data
|
|
226
|
+
if (transcript_path) {
|
|
227
|
+
acc = analyzeTranscript(transcript_path, acc);
|
|
228
|
+
}
|
|
229
|
+
acc.lastUpdatedAt = new Date().toISOString();
|
|
230
|
+
// Save accumulator for future Stop events in this session
|
|
231
|
+
writeSessionAccumulator(acc);
|
|
232
|
+
// Flush to buffer if we have meaningful work
|
|
233
|
+
if (acc.filesWritten.size > 0 || acc.commitShas.size > 0 || acc.turns >= 3) {
|
|
234
|
+
flushToBuffer(acc);
|
|
235
|
+
// Auto-sync if config exists
|
|
236
|
+
const config = readConfig();
|
|
237
|
+
if (config?.apiKey) {
|
|
238
|
+
try {
|
|
239
|
+
const { syncEntries } = await import("./sync.js");
|
|
240
|
+
await syncEntries(config);
|
|
241
|
+
}
|
|
242
|
+
catch {
|
|
243
|
+
// Sync failure is non-fatal — entries stay in buffer
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function init(): Promise<void>;
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import os from "node:os";
|
|
4
|
+
import { execSync } from "node:child_process";
|
|
5
|
+
import { ensureDirs, readConfig } from "../config.js";
|
|
6
|
+
import { c, check, warn } from "../util.js";
|
|
7
|
+
import { login } from "./login.js";
|
|
8
|
+
const CLAUDE_SETTINGS_PATH = path.join(os.homedir(), ".claude", "settings.json");
|
|
9
|
+
const HOOK_COMMAND = "npx @basestream/cli _hook-stop";
|
|
10
|
+
function detectClaudeCode() {
|
|
11
|
+
try {
|
|
12
|
+
const version = execSync("claude --version 2>/dev/null", {
|
|
13
|
+
encoding: "utf-8",
|
|
14
|
+
}).trim();
|
|
15
|
+
return version || null;
|
|
16
|
+
}
|
|
17
|
+
catch {
|
|
18
|
+
return null;
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
function injectClaudeCodeHook() {
|
|
22
|
+
const settingsDir = path.dirname(CLAUDE_SETTINGS_PATH);
|
|
23
|
+
fs.mkdirSync(settingsDir, { recursive: true });
|
|
24
|
+
let settings = {};
|
|
25
|
+
if (fs.existsSync(CLAUDE_SETTINGS_PATH)) {
|
|
26
|
+
try {
|
|
27
|
+
settings = JSON.parse(fs.readFileSync(CLAUDE_SETTINGS_PATH, "utf-8"));
|
|
28
|
+
}
|
|
29
|
+
catch {
|
|
30
|
+
// Corrupted settings — start fresh
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
// Initialize hooks structure
|
|
34
|
+
if (!settings.hooks || typeof settings.hooks !== "object") {
|
|
35
|
+
settings.hooks = {};
|
|
36
|
+
}
|
|
37
|
+
const hooks = settings.hooks;
|
|
38
|
+
// Check if our hook is already installed
|
|
39
|
+
if (Array.isArray(hooks.Stop)) {
|
|
40
|
+
const existing = hooks.Stop;
|
|
41
|
+
const alreadyInstalled = existing.some((entry) => entry.hooks?.some((h) => h.command?.includes("@basestream/cli")));
|
|
42
|
+
if (alreadyInstalled) {
|
|
43
|
+
check("Claude Code hook already installed");
|
|
44
|
+
return;
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
// Add our Stop hook
|
|
48
|
+
if (!Array.isArray(hooks.Stop)) {
|
|
49
|
+
hooks.Stop = [];
|
|
50
|
+
}
|
|
51
|
+
hooks.Stop.push({
|
|
52
|
+
matcher: "*",
|
|
53
|
+
hooks: [
|
|
54
|
+
{
|
|
55
|
+
type: "command",
|
|
56
|
+
command: HOOK_COMMAND,
|
|
57
|
+
timeout: 30,
|
|
58
|
+
},
|
|
59
|
+
],
|
|
60
|
+
});
|
|
61
|
+
fs.writeFileSync(CLAUDE_SETTINGS_PATH, JSON.stringify(settings, null, 2));
|
|
62
|
+
check("Injected tracking hook into ~/.claude/settings.json");
|
|
63
|
+
}
|
|
64
|
+
export async function init() {
|
|
65
|
+
console.log();
|
|
66
|
+
// 1. Detect Claude Code
|
|
67
|
+
const ccVersion = detectClaudeCode();
|
|
68
|
+
if (ccVersion) {
|
|
69
|
+
console.log(` ${c.dim(`Detected: Claude Code ${ccVersion}`)}`);
|
|
70
|
+
}
|
|
71
|
+
else {
|
|
72
|
+
warn("Claude Code not detected — hook will activate when installed");
|
|
73
|
+
}
|
|
74
|
+
console.log();
|
|
75
|
+
// 2. Inject hook
|
|
76
|
+
injectClaudeCodeHook();
|
|
77
|
+
// 3. Create buffer directory
|
|
78
|
+
ensureDirs();
|
|
79
|
+
check(`Created ~/.basestream/buffer/`);
|
|
80
|
+
// 4. Authenticate
|
|
81
|
+
const existing = readConfig();
|
|
82
|
+
if (existing?.apiKey) {
|
|
83
|
+
check("Already authenticated");
|
|
84
|
+
}
|
|
85
|
+
else {
|
|
86
|
+
console.log();
|
|
87
|
+
await login();
|
|
88
|
+
}
|
|
89
|
+
console.log();
|
|
90
|
+
console.log(` ${c.dim("That's it. Work normally — every session is now tracked.")}`);
|
|
91
|
+
console.log();
|
|
92
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function login(): Promise<string>;
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import http from "node:http";
|
|
2
|
+
import { execSync } from "node:child_process";
|
|
3
|
+
import { writeConfig, readConfig } from "../config.js";
|
|
4
|
+
import { c, check } from "../util.js";
|
|
5
|
+
const DEFAULT_BASE_URL = "https://basestream.ai";
|
|
6
|
+
function openBrowser(url) {
|
|
7
|
+
try {
|
|
8
|
+
const platform = process.platform;
|
|
9
|
+
if (platform === "darwin")
|
|
10
|
+
execSync(`open "${url}"`);
|
|
11
|
+
else if (platform === "win32")
|
|
12
|
+
execSync(`start "${url}"`);
|
|
13
|
+
else
|
|
14
|
+
execSync(`xdg-open "${url}"`);
|
|
15
|
+
}
|
|
16
|
+
catch {
|
|
17
|
+
console.log(` ${c.dim(`Open this URL in your browser: ${url}`)}`);
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
export async function login() {
|
|
21
|
+
const existing = readConfig();
|
|
22
|
+
const baseUrl = existing?.baseUrl ||
|
|
23
|
+
process.env.BASESTREAM_URL ||
|
|
24
|
+
DEFAULT_BASE_URL;
|
|
25
|
+
console.log(` ${c.dim("Opening browser for authentication...")}`);
|
|
26
|
+
// Start a temporary local server to receive the API key callback
|
|
27
|
+
const apiKey = await new Promise((resolve, reject) => {
|
|
28
|
+
const server = http.createServer((req, res) => {
|
|
29
|
+
const url = new URL(req.url || "/", `http://localhost`);
|
|
30
|
+
const key = url.searchParams.get("key");
|
|
31
|
+
const orgId = url.searchParams.get("orgId");
|
|
32
|
+
if (key) {
|
|
33
|
+
res.writeHead(200, { "Content-Type": "text/html" });
|
|
34
|
+
res.end(`
|
|
35
|
+
<html>
|
|
36
|
+
<body style="font-family: system-ui; display: flex; align-items: center; justify-content: center; height: 100vh; margin: 0; background: #0a0a0a; color: #fafafa;">
|
|
37
|
+
<div style="text-align: center;">
|
|
38
|
+
<h1 style="color: #22c55e;">✓ Authenticated</h1>
|
|
39
|
+
<p style="color: #a1a1aa;">You can close this tab and return to your terminal.</p>
|
|
40
|
+
</div>
|
|
41
|
+
</body>
|
|
42
|
+
</html>
|
|
43
|
+
`);
|
|
44
|
+
server.close();
|
|
45
|
+
writeConfig({
|
|
46
|
+
apiKey: key,
|
|
47
|
+
baseUrl,
|
|
48
|
+
orgId: orgId || undefined,
|
|
49
|
+
});
|
|
50
|
+
resolve(key);
|
|
51
|
+
}
|
|
52
|
+
else {
|
|
53
|
+
res.writeHead(400);
|
|
54
|
+
res.end("Missing key parameter");
|
|
55
|
+
}
|
|
56
|
+
});
|
|
57
|
+
server.listen(0, "127.0.0.1", () => {
|
|
58
|
+
const addr = server.address();
|
|
59
|
+
if (!addr || typeof addr === "string") {
|
|
60
|
+
reject(new Error("Failed to start callback server"));
|
|
61
|
+
return;
|
|
62
|
+
}
|
|
63
|
+
const callbackPort = addr.port;
|
|
64
|
+
const authUrl = `${baseUrl}/api/auth/cli?callback=http://127.0.0.1:${callbackPort}`;
|
|
65
|
+
openBrowser(authUrl);
|
|
66
|
+
});
|
|
67
|
+
// Timeout after 2 minutes
|
|
68
|
+
setTimeout(() => {
|
|
69
|
+
server.close();
|
|
70
|
+
reject(new Error("Authentication timed out"));
|
|
71
|
+
}, 120_000);
|
|
72
|
+
});
|
|
73
|
+
check("Authenticated with Basestream");
|
|
74
|
+
return apiKey;
|
|
75
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function status(): Promise<void>;
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { BUFFER_DIR, ensureDirs, readConfig } from "../config.js";
|
|
4
|
+
import { c } from "../util.js";
|
|
5
|
+
function groupBy(arr, fn) {
|
|
6
|
+
const result = {};
|
|
7
|
+
for (const item of arr) {
|
|
8
|
+
const key = fn(item);
|
|
9
|
+
(result[key] ??= []).push(item);
|
|
10
|
+
}
|
|
11
|
+
return result;
|
|
12
|
+
}
|
|
13
|
+
function formatOutcome(outcome) {
|
|
14
|
+
switch (outcome) {
|
|
15
|
+
case "COMPLETED":
|
|
16
|
+
return c.green("completed");
|
|
17
|
+
case "IN_PROGRESS":
|
|
18
|
+
return c.yellow("in progress");
|
|
19
|
+
case "BLOCKED":
|
|
20
|
+
return c.red("blocked");
|
|
21
|
+
default:
|
|
22
|
+
return c.dim(outcome.toLowerCase());
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
async function fetchRemoteEntries(config) {
|
|
26
|
+
try {
|
|
27
|
+
const res = await fetch(`${config.baseUrl}/api/entries?period=week&limit=100`, {
|
|
28
|
+
headers: { Authorization: `Bearer ${config.apiKey}` },
|
|
29
|
+
});
|
|
30
|
+
if (!res.ok)
|
|
31
|
+
return [];
|
|
32
|
+
const data = (await res.json());
|
|
33
|
+
return data.entries || [];
|
|
34
|
+
}
|
|
35
|
+
catch {
|
|
36
|
+
return [];
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
function readLocalEntries() {
|
|
40
|
+
ensureDirs();
|
|
41
|
+
const files = fs
|
|
42
|
+
.readdirSync(BUFFER_DIR)
|
|
43
|
+
.filter((f) => f.endsWith(".json"));
|
|
44
|
+
const entries = [];
|
|
45
|
+
for (const file of files) {
|
|
46
|
+
try {
|
|
47
|
+
entries.push(JSON.parse(fs.readFileSync(path.join(BUFFER_DIR, file), "utf-8")));
|
|
48
|
+
}
|
|
49
|
+
catch { }
|
|
50
|
+
}
|
|
51
|
+
return entries;
|
|
52
|
+
}
|
|
53
|
+
export async function status() {
|
|
54
|
+
const config = readConfig();
|
|
55
|
+
// Combine local + remote entries
|
|
56
|
+
const entries = readLocalEntries();
|
|
57
|
+
if (config?.apiKey) {
|
|
58
|
+
const remote = await fetchRemoteEntries(config);
|
|
59
|
+
// Merge, dedup by sessionId
|
|
60
|
+
const seen = new Set(entries.map((e) => e.sessionId).filter(Boolean));
|
|
61
|
+
for (const re of remote) {
|
|
62
|
+
if (!re.sessionId || !seen.has(re.sessionId)) {
|
|
63
|
+
entries.push(re);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
if (entries.length === 0) {
|
|
68
|
+
console.log();
|
|
69
|
+
console.log(` ${c.dim("No sessions logged this week.")}`);
|
|
70
|
+
console.log(` ${c.dim("Start using Claude Code — entries will appear here.")}`);
|
|
71
|
+
console.log();
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
74
|
+
console.log();
|
|
75
|
+
console.log(` ${c.yellow(`This week — ${entries.length} sessions logged`)}`);
|
|
76
|
+
console.log();
|
|
77
|
+
// Group by project
|
|
78
|
+
const byProject = groupBy(entries, (e) => e.projectName || "unknown");
|
|
79
|
+
for (const [project, projectEntries] of Object.entries(byProject)) {
|
|
80
|
+
const commits = projectEntries.reduce((sum, e) => sum + (e.commitShas?.length || 0), 0);
|
|
81
|
+
const latestOutcome = projectEntries[0]?.outcome || "IN_PROGRESS";
|
|
82
|
+
const outcomeStr = formatOutcome(latestOutcome);
|
|
83
|
+
const parts = [`${projectEntries.length} sessions`];
|
|
84
|
+
if (commits > 0) {
|
|
85
|
+
parts.push(`${commits} commit${commits !== 1 ? "s" : ""}`);
|
|
86
|
+
}
|
|
87
|
+
parts.push(outcomeStr);
|
|
88
|
+
console.log(` ${c.bold(project)} ${c.dim(parts.join(" · "))}`);
|
|
89
|
+
}
|
|
90
|
+
// Show pending sync count
|
|
91
|
+
const localOnly = readLocalEntries();
|
|
92
|
+
if (localOnly.length > 0) {
|
|
93
|
+
console.log();
|
|
94
|
+
console.log(` ${c.dim(`${localOnly.length} entries pending sync`)}`);
|
|
95
|
+
}
|
|
96
|
+
console.log();
|
|
97
|
+
}
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { BUFFER_DIR, ensureDirs, readConfig } from "../config.js";
|
|
4
|
+
import { c, check, fail } from "../util.js";
|
|
5
|
+
export async function syncEntries(config) {
|
|
6
|
+
const cfg = config || readConfig();
|
|
7
|
+
if (!cfg?.apiKey) {
|
|
8
|
+
fail("Not authenticated. Run: basestream login");
|
|
9
|
+
process.exit(1);
|
|
10
|
+
}
|
|
11
|
+
ensureDirs();
|
|
12
|
+
const files = fs
|
|
13
|
+
.readdirSync(BUFFER_DIR)
|
|
14
|
+
.filter((f) => f.endsWith(".json"))
|
|
15
|
+
.sort();
|
|
16
|
+
if (files.length === 0)
|
|
17
|
+
return;
|
|
18
|
+
const entries = [];
|
|
19
|
+
const successFiles = [];
|
|
20
|
+
for (const file of files) {
|
|
21
|
+
try {
|
|
22
|
+
const raw = fs.readFileSync(path.join(BUFFER_DIR, file), "utf-8");
|
|
23
|
+
const entry = JSON.parse(raw);
|
|
24
|
+
entries.push(entry);
|
|
25
|
+
successFiles.push(file);
|
|
26
|
+
}
|
|
27
|
+
catch {
|
|
28
|
+
// Skip corrupt files
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
if (entries.length === 0)
|
|
32
|
+
return;
|
|
33
|
+
const res = await fetch(`${cfg.baseUrl}/api/sync`, {
|
|
34
|
+
method: "POST",
|
|
35
|
+
headers: {
|
|
36
|
+
"Content-Type": "application/json",
|
|
37
|
+
Authorization: `Bearer ${cfg.apiKey}`,
|
|
38
|
+
},
|
|
39
|
+
body: JSON.stringify({ entries }),
|
|
40
|
+
});
|
|
41
|
+
if (!res.ok) {
|
|
42
|
+
const body = await res.text().catch(() => "");
|
|
43
|
+
throw new Error(`Sync failed (${res.status}): ${body}`);
|
|
44
|
+
}
|
|
45
|
+
const result = (await res.json());
|
|
46
|
+
// Clean up synced files
|
|
47
|
+
for (const file of successFiles) {
|
|
48
|
+
fs.unlinkSync(path.join(BUFFER_DIR, file));
|
|
49
|
+
}
|
|
50
|
+
return result;
|
|
51
|
+
}
|
|
52
|
+
export async function sync() {
|
|
53
|
+
const cfg = readConfig();
|
|
54
|
+
if (!cfg?.apiKey) {
|
|
55
|
+
fail("Not authenticated. Run: basestream login");
|
|
56
|
+
process.exit(1);
|
|
57
|
+
}
|
|
58
|
+
ensureDirs();
|
|
59
|
+
const files = fs
|
|
60
|
+
.readdirSync(BUFFER_DIR)
|
|
61
|
+
.filter((f) => f.endsWith(".json"));
|
|
62
|
+
if (files.length === 0) {
|
|
63
|
+
console.log(` ${c.dim("No entries to sync.")}`);
|
|
64
|
+
return;
|
|
65
|
+
}
|
|
66
|
+
console.log(` ${c.dim(`Syncing ${files.length} entries...`)}`);
|
|
67
|
+
try {
|
|
68
|
+
const result = await syncEntries(cfg);
|
|
69
|
+
if (result) {
|
|
70
|
+
check(`Synced ${result.synced} entries (${result.created} new, ${result.updated} updated)`);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
catch (err) {
|
|
74
|
+
fail(`Sync failed: ${err instanceof Error ? err.message : String(err)}`);
|
|
75
|
+
process.exit(1);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
export declare const BASESTREAM_DIR: string;
|
|
2
|
+
export declare const BUFFER_DIR: string;
|
|
3
|
+
export declare const CONFIG_FILE: string;
|
|
4
|
+
export declare const SESSION_DIR: string;
|
|
5
|
+
export interface BasestreamConfig {
|
|
6
|
+
apiKey: string;
|
|
7
|
+
baseUrl: string;
|
|
8
|
+
orgId?: string;
|
|
9
|
+
}
|
|
10
|
+
export declare function ensureDirs(): void;
|
|
11
|
+
export declare function readConfig(): BasestreamConfig | null;
|
|
12
|
+
export declare function writeConfig(config: BasestreamConfig): void;
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import os from "node:os";
|
|
4
|
+
export const BASESTREAM_DIR = path.join(os.homedir(), ".basestream");
|
|
5
|
+
export const BUFFER_DIR = path.join(BASESTREAM_DIR, "buffer");
|
|
6
|
+
export const CONFIG_FILE = path.join(BASESTREAM_DIR, "config.json");
|
|
7
|
+
export const SESSION_DIR = path.join(BASESTREAM_DIR, "sessions");
|
|
8
|
+
export function ensureDirs() {
|
|
9
|
+
for (const dir of [BASESTREAM_DIR, BUFFER_DIR, SESSION_DIR]) {
|
|
10
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
export function readConfig() {
|
|
14
|
+
if (!fs.existsSync(CONFIG_FILE))
|
|
15
|
+
return null;
|
|
16
|
+
try {
|
|
17
|
+
const config = JSON.parse(fs.readFileSync(CONFIG_FILE, "utf-8"));
|
|
18
|
+
// BASESTREAM_URL env var always overrides stored baseUrl (useful for local testing)
|
|
19
|
+
if (process.env.BASESTREAM_URL) {
|
|
20
|
+
config.baseUrl = process.env.BASESTREAM_URL;
|
|
21
|
+
}
|
|
22
|
+
return config;
|
|
23
|
+
}
|
|
24
|
+
catch {
|
|
25
|
+
return null;
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
export function writeConfig(config) {
|
|
29
|
+
ensureDirs();
|
|
30
|
+
fs.writeFileSync(CONFIG_FILE, JSON.stringify(config, null, 2));
|
|
31
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { init } from "./commands/init.js";
|
|
3
|
+
import { status } from "./commands/status.js";
|
|
4
|
+
import { sync } from "./commands/sync.js";
|
|
5
|
+
import { hookStop } from "./commands/hook-stop.js";
|
|
6
|
+
import { login } from "./commands/login.js";
|
|
7
|
+
const HELP = `
|
|
8
|
+
basestream — AI work intelligence for teams
|
|
9
|
+
|
|
10
|
+
Usage:
|
|
11
|
+
basestream init Detect AI tools, inject hooks, authenticate
|
|
12
|
+
basestream login Authenticate with your Basestream account
|
|
13
|
+
basestream status Show this week's logged sessions
|
|
14
|
+
basestream sync Manually sync buffered entries to Basestream
|
|
15
|
+
basestream _hook-stop (internal) Claude Code Stop hook handler
|
|
16
|
+
|
|
17
|
+
Options:
|
|
18
|
+
--help, -h Show this help message
|
|
19
|
+
--version, -v Show version
|
|
20
|
+
`;
|
|
21
|
+
async function main() {
|
|
22
|
+
const command = process.argv[2];
|
|
23
|
+
if (!command || command === "--help" || command === "-h") {
|
|
24
|
+
console.log(HELP.trim());
|
|
25
|
+
process.exit(0);
|
|
26
|
+
}
|
|
27
|
+
if (command === "--version" || command === "-v") {
|
|
28
|
+
const pkg = await import("../../package.json", { with: { type: "json" } });
|
|
29
|
+
console.log(pkg.default.version);
|
|
30
|
+
process.exit(0);
|
|
31
|
+
}
|
|
32
|
+
switch (command) {
|
|
33
|
+
case "init":
|
|
34
|
+
await init();
|
|
35
|
+
break;
|
|
36
|
+
case "login":
|
|
37
|
+
await login();
|
|
38
|
+
break;
|
|
39
|
+
case "status":
|
|
40
|
+
await status();
|
|
41
|
+
break;
|
|
42
|
+
case "sync":
|
|
43
|
+
await sync();
|
|
44
|
+
break;
|
|
45
|
+
case "_hook-stop":
|
|
46
|
+
await hookStop();
|
|
47
|
+
break;
|
|
48
|
+
default:
|
|
49
|
+
console.error(`Unknown command: ${command}`);
|
|
50
|
+
console.log(HELP.trim());
|
|
51
|
+
process.exit(1);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
main().catch((err) => {
|
|
55
|
+
console.error(err.message || err);
|
|
56
|
+
process.exit(1);
|
|
57
|
+
});
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
export declare const c: {
|
|
2
|
+
green: (s: string) => string;
|
|
3
|
+
yellow: (s: string) => string;
|
|
4
|
+
red: (s: string) => string;
|
|
5
|
+
cyan: (s: string) => string;
|
|
6
|
+
dim: (s: string) => string;
|
|
7
|
+
bold: (s: string) => string;
|
|
8
|
+
};
|
|
9
|
+
export declare function check(msg: string): void;
|
|
10
|
+
export declare function warn(msg: string): void;
|
|
11
|
+
export declare function fail(msg: string): void;
|
package/dist/cli/util.js
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
// Terminal color helpers (no dependencies)
|
|
2
|
+
export const c = {
|
|
3
|
+
green: (s) => `\x1b[32m${s}\x1b[0m`,
|
|
4
|
+
yellow: (s) => `\x1b[33m${s}\x1b[0m`,
|
|
5
|
+
red: (s) => `\x1b[31m${s}\x1b[0m`,
|
|
6
|
+
cyan: (s) => `\x1b[36m${s}\x1b[0m`,
|
|
7
|
+
dim: (s) => `\x1b[2m${s}\x1b[0m`,
|
|
8
|
+
bold: (s) => `\x1b[1m${s}\x1b[0m`,
|
|
9
|
+
};
|
|
10
|
+
export function check(msg) {
|
|
11
|
+
console.log(` ${c.green("✓")} ${c.green(msg)}`);
|
|
12
|
+
}
|
|
13
|
+
export function warn(msg) {
|
|
14
|
+
console.log(` ${c.yellow("!")} ${c.yellow(msg)}`);
|
|
15
|
+
}
|
|
16
|
+
export function fail(msg) {
|
|
17
|
+
console.log(` ${c.red("✗")} ${c.red(msg)}`);
|
|
18
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@basestream/cli",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "AI work intelligence for teams — automatic work tracking for Claude Code",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"bin": {
|
|
7
|
+
"basestream": "./bin/basestream.js"
|
|
8
|
+
},
|
|
9
|
+
"files": [
|
|
10
|
+
"bin/",
|
|
11
|
+
"dist/cli/",
|
|
12
|
+
"README.md"
|
|
13
|
+
],
|
|
14
|
+
"scripts": {
|
|
15
|
+
"dev": "NODE_OPTIONS='--max-old-space-size=4096' next dev",
|
|
16
|
+
"build": "next build",
|
|
17
|
+
"build:cli": "tsc -p tsconfig.cli.json",
|
|
18
|
+
"dev:cli": "tsc -p tsconfig.cli.json --watch",
|
|
19
|
+
"test:cli": "tsc -p tsconfig.cli.json && node bin/basestream.js",
|
|
20
|
+
"link:cli": "pnpm run build:cli && npm link",
|
|
21
|
+
"unlink:cli": "npm unlink -g @basestream/cli",
|
|
22
|
+
"prebuild:publish": "npm run build:cli",
|
|
23
|
+
"start": "next start",
|
|
24
|
+
"lint": "eslint .",
|
|
25
|
+
"db:generate": "drizzle-kit generate",
|
|
26
|
+
"db:migrate": "drizzle-kit migrate",
|
|
27
|
+
"db:push": "drizzle-kit push",
|
|
28
|
+
"db:seed": "tsx src/db/seed.ts"
|
|
29
|
+
},
|
|
30
|
+
"dependencies": {
|
|
31
|
+
"@neondatabase/serverless": "^1.0.2",
|
|
32
|
+
"@octokit/auth-app": "^8.2.0",
|
|
33
|
+
"@octokit/rest": "^22.0.1",
|
|
34
|
+
"@paralleldrive/cuid2": "^2.2.2",
|
|
35
|
+
"@stripe/stripe-js": "^3.0.0",
|
|
36
|
+
"better-auth": "^1.6.0",
|
|
37
|
+
"dotenv": "^17.4.1",
|
|
38
|
+
"drizzle-orm": "^0.45.2",
|
|
39
|
+
"next": "^16.2.2",
|
|
40
|
+
"pg": "^8.20.0",
|
|
41
|
+
"react": "^19.2.4",
|
|
42
|
+
"react-dom": "^19.2.4",
|
|
43
|
+
"react-markdown": "^10.1.0",
|
|
44
|
+
"recharts": "^2.12.7",
|
|
45
|
+
"stripe": "^15.0.0",
|
|
46
|
+
"swr": "^2.4.1",
|
|
47
|
+
"zod": "^4.3.6"
|
|
48
|
+
},
|
|
49
|
+
"devDependencies": {
|
|
50
|
+
"@tailwindcss/postcss": "^4.2.2",
|
|
51
|
+
"@types/node": "^25.5.2",
|
|
52
|
+
"@types/pg": "^8.20.0",
|
|
53
|
+
"@types/react": "^19.2.14",
|
|
54
|
+
"@types/react-dom": "^19.2.3",
|
|
55
|
+
"@eslint/eslintrc": "^3",
|
|
56
|
+
"drizzle-kit": "^0.31.10",
|
|
57
|
+
"eslint": "^9",
|
|
58
|
+
"eslint-config-next": "16.2.2",
|
|
59
|
+
"postcss": "^8.4.38",
|
|
60
|
+
"tailwindcss": "^4.2.2",
|
|
61
|
+
"tsx": "^4.19.0",
|
|
62
|
+
"typescript": "^5.4.5"
|
|
63
|
+
}
|
|
64
|
+
}
|