@forwardimpact/map 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. package/LICENSE +201 -0
  2. package/README.md +67 -0
  3. package/bin/fit-map.js +287 -0
  4. package/examples/behaviours/_index.yaml +8 -0
  5. package/examples/behaviours/outcome_ownership.yaml +43 -0
  6. package/examples/behaviours/polymathic_knowledge.yaml +41 -0
  7. package/examples/behaviours/precise_communication.yaml +39 -0
  8. package/examples/behaviours/relentless_curiosity.yaml +37 -0
  9. package/examples/behaviours/systems_thinking.yaml +40 -0
  10. package/examples/capabilities/_index.yaml +8 -0
  11. package/examples/capabilities/business.yaml +205 -0
  12. package/examples/capabilities/delivery.yaml +1001 -0
  13. package/examples/capabilities/people.yaml +68 -0
  14. package/examples/capabilities/reliability.yaml +349 -0
  15. package/examples/capabilities/scale.yaml +1672 -0
  16. package/examples/copilot-setup-steps.yaml +25 -0
  17. package/examples/devcontainer.yaml +21 -0
  18. package/examples/disciplines/_index.yaml +6 -0
  19. package/examples/disciplines/data_engineering.yaml +68 -0
  20. package/examples/disciplines/engineering_management.yaml +61 -0
  21. package/examples/disciplines/software_engineering.yaml +68 -0
  22. package/examples/drivers.yaml +202 -0
  23. package/examples/framework.yaml +73 -0
  24. package/examples/levels.yaml +115 -0
  25. package/examples/questions/behaviours/outcome_ownership.yaml +228 -0
  26. package/examples/questions/behaviours/polymathic_knowledge.yaml +275 -0
  27. package/examples/questions/behaviours/precise_communication.yaml +248 -0
  28. package/examples/questions/behaviours/relentless_curiosity.yaml +248 -0
  29. package/examples/questions/behaviours/systems_thinking.yaml +238 -0
  30. package/examples/questions/capabilities/business.yaml +107 -0
  31. package/examples/questions/capabilities/delivery.yaml +101 -0
  32. package/examples/questions/capabilities/people.yaml +106 -0
  33. package/examples/questions/capabilities/reliability.yaml +105 -0
  34. package/examples/questions/capabilities/scale.yaml +104 -0
  35. package/examples/questions/skills/architecture_design.yaml +115 -0
  36. package/examples/questions/skills/cloud_platforms.yaml +105 -0
  37. package/examples/questions/skills/code_quality.yaml +162 -0
  38. package/examples/questions/skills/data_modeling.yaml +107 -0
  39. package/examples/questions/skills/devops.yaml +111 -0
  40. package/examples/questions/skills/full_stack_development.yaml +118 -0
  41. package/examples/questions/skills/sre_practices.yaml +113 -0
  42. package/examples/questions/skills/stakeholder_management.yaml +116 -0
  43. package/examples/questions/skills/team_collaboration.yaml +106 -0
  44. package/examples/questions/skills/technical_writing.yaml +110 -0
  45. package/examples/self-assessments.yaml +64 -0
  46. package/examples/stages.yaml +191 -0
  47. package/examples/tracks/_index.yaml +5 -0
  48. package/examples/tracks/platform.yaml +47 -0
  49. package/examples/tracks/sre.yaml +46 -0
  50. package/examples/vscode-settings.yaml +21 -0
  51. package/package.json +49 -0
  52. package/schema/json/behaviour-questions.schema.json +95 -0
  53. package/schema/json/behaviour.schema.json +73 -0
  54. package/schema/json/capability-questions.schema.json +95 -0
  55. package/schema/json/capability.schema.json +229 -0
  56. package/schema/json/defs.schema.json +132 -0
  57. package/schema/json/discipline.schema.json +123 -0
  58. package/schema/json/drivers.schema.json +48 -0
  59. package/schema/json/framework.schema.json +68 -0
  60. package/schema/json/levels.schema.json +121 -0
  61. package/schema/json/self-assessments.schema.json +52 -0
  62. package/schema/json/skill-questions.schema.json +83 -0
  63. package/schema/json/stages.schema.json +88 -0
  64. package/schema/json/track.schema.json +95 -0
  65. package/schema/rdf/behaviour-questions.ttl +128 -0
  66. package/schema/rdf/behaviour.ttl +130 -0
  67. package/schema/rdf/capability.ttl +466 -0
  68. package/schema/rdf/defs.ttl +396 -0
  69. package/schema/rdf/discipline.ttl +313 -0
  70. package/schema/rdf/drivers.ttl +84 -0
  71. package/schema/rdf/framework.ttl +166 -0
  72. package/schema/rdf/levels.ttl +357 -0
  73. package/schema/rdf/self-assessments.ttl +147 -0
  74. package/schema/rdf/skill-questions.ttl +155 -0
  75. package/schema/rdf/stages.ttl +166 -0
  76. package/schema/rdf/track.ttl +225 -0
  77. package/src/index-generator.js +65 -0
  78. package/src/index.js +44 -0
  79. package/src/levels.js +553 -0
  80. package/src/loader.js +608 -0
  81. package/src/modifiers.js +23 -0
  82. package/src/schema-validation.js +438 -0
  83. package/src/validation.js +2136 -0
@@ -0,0 +1,1001 @@
1
+ # yaml-language-server: $schema=https://www.forwardimpact.team/schema/json/capability.schema.json
2
+
3
+ id: delivery
4
+ name: Delivery
5
+ emojiIcon: 🚀
6
+ ordinalRank: 3
7
+ description: |
8
+ Building and shipping solutions that solve real problems.
9
+ Encompasses full-stack development, data integration, problem discovery,
10
+ and rapid prototyping.
11
+ professionalResponsibilities:
12
+ awareness:
13
+ You complete assigned implementation tasks within established patterns with
14
+ guidance from senior engineers
15
+ foundational:
16
+ You deliver small features end-to-end with minimal guidance, understanding
17
+ how your code fits the broader system
18
+ working:
19
+ You own feature delivery from design through deployment, making sound
20
+ technical trade-offs to ship value on time
21
+ practitioner:
22
+ You lead technical delivery of complex projects across multiple teams,
23
+ unblock others through hands-on contributions, and ensure engineering
24
+ quality
25
+ expert:
26
+ You drive delivery of the most critical technical initiatives, establish
27
+ engineering delivery practices across the business unit, and are the
28
+ technical authority on high-stakes projects
29
+ managementResponsibilities:
30
+ awareness:
31
+ You track team progress and communicate status to stakeholders with guidance
32
+ foundational:
33
+ You coordinate team delivery by managing dependencies, removing blockers,
34
+ and keeping stakeholders informed
35
+ working:
36
+ You own team delivery outcomes—balance scope, staffing, and timeline; make
37
+ resourcing decisions to meet commitments
38
+ practitioner:
39
+ You drive delivery excellence across multiple teams, establish delivery
40
+ metrics and practices for your area, hold teams accountable, and escalate
41
+ cross-team risks
42
+ expert:
43
+ You shape delivery culture across the business unit, lead strategic delivery
44
+ transformations, and represent delivery commitments at executive level
45
+ skills:
46
+ - id: data_integration
47
+ name: Data Integration
48
+ human:
49
+ description:
50
+ Gaining access to enterprise data, cleaning messy real-world datasets,
51
+ and making information usable for decision-making—often with
52
+ inconsistent formats, missing values, and undocumented schemas. The
53
+ heart of embedded engineering work.
54
+ proficiencyDescriptions:
55
+ awareness:
56
+ You understand how data flows through systems and can use existing
57
+ pipelines, APIs, and data sources with guidance. You know to ask about
58
+ data quality.
59
+ foundational:
60
+ You create simple data transformations and handle common formats (CSV,
61
+ JSON, SQL). You identify and report data quality issues and understand
62
+ basic ETL concepts.
63
+ working:
64
+ You integrate multiple data sources independently, clean messy
65
+ datasets, handle inconsistent formats and missing values, and document
66
+ data lineage. You troubleshoot integration failures.
67
+ practitioner:
68
+ You navigate complex enterprise data landscapes across teams, build
69
+ relationships to gain data access, handle undocumented schemas through
70
+ investigation, and build robust, maintainable integration solutions.
71
+ You mentor engineers in your area on data integration challenges.
72
+ expert:
73
+ You define data integration patterns and best practices across the
74
+ business unit. You architect large-scale data flows, solve the most
75
+ complex integration challenges, and are the authority on enterprise
76
+ data integration.
77
+ agent:
78
+ name: data-integration
79
+ description: |
80
+ Guide for integrating data from multiple sources, cleaning messy
81
+ datasets, and handling data quality issues.
82
+ useWhen: |
83
+ Working with enterprise data, ETL pipelines, or data transformation
84
+ tasks.
85
+ stages:
86
+ specify:
87
+ focus: |
88
+ Define data integration requirements and acceptance criteria.
89
+ Clarify data sources, formats, and quality expectations.
90
+ readChecklist:
91
+ - Identify source and target data systems
92
+ - Document data format and schema requirements
93
+ - Define data quality acceptance criteria
94
+ - Clarify data freshness and latency requirements
95
+ - Mark ambiguities with [NEEDS CLARIFICATION]
96
+ confirmChecklist:
97
+ - Data sources are identified and accessible
98
+ - Data format requirements are documented
99
+ - Quality criteria are defined
100
+ - Latency requirements are clear
101
+ plan:
102
+ focus: |
103
+ Plan data integration approach. Identify sources, assess quality,
104
+ and plan transformation logic.
105
+ readChecklist:
106
+ - Identify data sources and access requirements
107
+ - Assess data quality and completeness
108
+ - Plan transformation logic and validation
109
+ - Document data lineage approach
110
+ confirmChecklist:
111
+ - Data sources are identified
112
+ - Data formats are understood
113
+ - Data quality requirements are defined
114
+ - Transformation logic is planned
115
+ onboard:
116
+ focus: |
117
+ Set up the data integration environment. Install data
118
+ processing tools, configure data source access, and verify
119
+ connectivity to all required systems.
120
+ readChecklist:
121
+ - Install data tools (DuckDB, Polars, Great Expectations)
122
+ - Configure database connections and API credentials
123
+ - Verify access to all identified data sources
124
+ - Set up virtual environment and pin dependency versions
125
+ - Create .env file with connection strings and credentials
126
+ confirmChecklist:
127
+ - All data processing libraries installed and importable
128
+ - Data source connections verified and working
129
+ - Credentials stored securely in .env (not committed to git)
130
+ - Sample queries run successfully against each data source
131
+ - Virtual environment is reproducible (requirements.txt or
132
+ pyproject.toml)
133
+ code:
134
+ focus: |
135
+ Implement data transformations with robust quality checks
136
+ and error handling for messy real-world data.
137
+ readChecklist:
138
+ - Implement data extraction and loading
139
+ - Handle data quality issues (nulls, formats, duplicates)
140
+ - Create transformation logic
141
+ - Add validation and error handling
142
+ - Document data lineage
143
+ confirmChecklist:
144
+ - Data transformations produce expected output
145
+ - Basic validation exists for input data
146
+ - Data formats are handled correctly
147
+ - Error handling exists for malformed data
148
+ - Pipeline is idempotent
149
+ review:
150
+ focus: |
151
+ Validate data quality, transformation correctness, and
152
+ operational readiness.
153
+ readChecklist:
154
+ - Verify data quality checks
155
+ - Test with edge cases and malformed data
156
+ - Review error handling coverage
157
+ - Validate documentation completeness
158
+ confirmChecklist:
159
+ - Data quality checks are implemented
160
+ - Edge cases are handled
161
+ - Data lineage is documented
162
+ - Failures are logged and alertable
163
+ deploy:
164
+ focus: |
165
+ Deploy data pipeline to production and verify data flow.
166
+ Monitor for data quality and latency issues.
167
+ readChecklist:
168
+ - Deploy pipeline configuration
169
+ - Verify data flows end-to-end in production
170
+ - Monitor data quality metrics
171
+ - Confirm alerting is operational
172
+ confirmChecklist:
173
+ - Pipeline deployed successfully
174
+ - Data flowing in production
175
+ - Quality metrics within thresholds
176
+ - Alerting verified working
177
+ toolReferences:
178
+ - name: DuckDB
179
+ url: https://duckdb.org/docs/
180
+ simpleIcon: duckdb
181
+ description: In-process analytical database
182
+ useWhen: Querying CSV/Parquet files with SQL or quick data exploration
183
+ - name: Polars
184
+ url: https://docs.pola.rs/
185
+ simpleIcon: polars
186
+ description: Fast DataFrame library with lazy evaluation
187
+ useWhen: Transforming and cleaning large datasets programmatically
188
+ - name: Great Expectations
189
+ url: https://docs.greatexpectations.io/
190
+ simpleIcon: python
191
+ description: Data validation and profiling framework
192
+ useWhen: Validating data quality and creating data documentation
193
+ instructions: |
194
+ ## Step 1: Explore the Source Data
195
+
196
+ Use DuckDB to quickly inspect files without loading into memory.
197
+ Check schema, data types, row counts, and null distributions.
198
+
199
+ ## Step 2: Transform with Polars
200
+
201
+ Use lazy evaluation for large datasets: filter, fill nulls,
202
+ parse dates, and aggregate. Collect only when the query plan
203
+ is complete. Write cleaned data to Parquet.
204
+
205
+ ## Step 3: Validate Data Quality
206
+
207
+ Define expectations with Great Expectations: not-null checks,
208
+ uniqueness constraints, value ranges. Run validation and
209
+ check results.
210
+
211
+ ## Step 4: Export to Target Format
212
+
213
+ Use DuckDB COPY or Polars write methods to export transformed
214
+ data to the target format and location.
215
+ installScript: |
216
+ set -e
217
+ pip install duckdb polars great-expectations
218
+ python -c "import duckdb, polars, great_expectations"
219
+ implementationReference: |
220
+ ## SQL Exploration
221
+
222
+ ```sql
223
+ SELECT * FROM read_csv('data.csv') LIMIT 10;
224
+ DESCRIBE SELECT * FROM read_csv('data.csv');
225
+ SELECT COUNT(*), COUNT(id), COUNT(email) FROM read_csv('data.csv');
226
+ ```
227
+
228
+ ## Polars Transformation
229
+
230
+ ```python
231
+ import polars as pl
232
+
233
+ df = (
234
+ pl.scan_csv("source_data.csv")
235
+ .filter(pl.col("status") == "active")
236
+ .with_columns(
237
+ pl.col("value").fill_null(0),
238
+ pl.col("date").str.to_date("%Y-%m-%d")
239
+ )
240
+ .group_by("category")
241
+ .agg(pl.col("value").sum())
242
+ .collect()
243
+ )
244
+ df.write_parquet("cleaned_data.parquet")
245
+ ```
246
+
247
+ ## Data Quality Validation
248
+
249
+ ```python
250
+ import great_expectations as gx
251
+
252
+ context = gx.get_context()
253
+ validator = context.sources.pandas_default.read_csv("cleaned_data.csv")
254
+ validator.expect_column_values_to_not_be_null("id")
255
+ validator.expect_column_values_to_be_unique("id")
256
+ validator.expect_column_values_to_be_between("age", 0, 120)
257
+ results = validator.validate()
258
+ ```
259
+
260
+ ## Verification
261
+
262
+ Your pipeline is working when:
263
+ - Source data loads without errors
264
+ - Transformation produces expected row counts
265
+ - Data quality checks pass
266
+ - Output file is readable and contains expected data
267
+
268
+ ```python
269
+ result = pl.read_parquet("output.parquet")
270
+ assert len(result) > 0, "Output should have rows"
271
+ ```
272
+
273
+ ## Common Pitfalls
274
+
275
+ - **Data leakage**: Using future data in training sets
276
+ - **Silent nulls**: Empty strings vs NULL vs placeholder values
277
+ - **Schema drift**: Columns change without warning
278
+ - **Encoding issues**: UTF-8 vs Latin-1 in CSV files
279
+ - id: full_stack_development
280
+ name: Full-Stack Development
281
+ human:
282
+ description:
283
+ Building complete solutions across frontend, APIs, databases, and
284
+ infrastructure without dependencies on specialists. JavaScript and
285
+ Python are our primary languages, with CloudFormation and Terraform for
286
+ infrastructure. Essential for rapid delivery and embedded engineering
287
+ work.
288
+ proficiencyDescriptions:
289
+ awareness:
290
+ You understand how frontend, backend, and database layers work
291
+ together. You can make changes in one layer with guidance and
292
+ understand the impact on other layers.
293
+ foundational:
294
+ You build simple features across frontend and backend using JavaScript
295
+ or Python. You understand how layers connect through APIs and can
296
+ debug across the stack.
297
+ working:
298
+ You deliver complete features end-to-end independently—frontend,
299
+ backend, database, and infrastructure (CloudFormation/Terraform). You
300
+ make pragmatic technology choices and deploy what you build.
301
+ practitioner:
302
+ You build complete applications rapidly across any technology stack
303
+ for teams in your area. You select the right tools for each problem,
304
+ balance technical debt with delivery speed, and mentor engineers on
305
+ full-stack development.
306
+ expert:
307
+ You work comfortably in any language and rapidly acquire new skills as
308
+ needed. You deliver production solutions in days not months, shape
309
+ full-stack practices across the business unit, and exemplify
310
+ polymathic engineering.
311
+ agent:
312
+ name: full-stack-development
313
+ description: |
314
+ Guide for building complete solutions across the full technology
315
+ stack.
316
+ useWhen: |
317
+ Asked to implement features spanning frontend, backend, database,
318
+ and infrastructure layers.
319
+ stages:
320
+ specify:
321
+ focus: |
322
+ Define full-stack feature requirements and acceptance criteria.
323
+ Clarify user needs and system integration points.
324
+ readChecklist:
325
+ - Identify user stories and acceptance criteria
326
+ - Document expected user interactions
327
+ - Clarify integration requirements with existing systems
328
+ - Define non-functional requirements (performance, security)
329
+ - Mark ambiguities with [NEEDS CLARIFICATION]
330
+ confirmChecklist:
331
+ - User stories are documented
332
+ - Acceptance criteria are defined
333
+ - Integration points are identified
334
+ - Non-functional requirements are clear
335
+ plan:
336
+ focus: |
337
+ Design the full-stack solution architecture. Define API
338
+ contracts and plan layer interactions.
339
+ readChecklist:
340
+ - Define the API contract first
341
+ - Plan frontend and backend responsibilities
342
+ - Design database schema
343
+ - Plan infrastructure requirements
344
+ confirmChecklist:
345
+ - API contract is defined
346
+ - Layer responsibilities are clear
347
+ - Database schema is planned
348
+ - Infrastructure approach is decided
349
+ onboard:
350
+ focus: |
351
+ Set up the full-stack development environment. Install
352
+ frameworks, configure services, set up database, and verify
353
+ the development server runs.
354
+ readChecklist:
355
+ - Install project dependencies (npm install, pip install)
356
+ - Configure environment variables in .env.local or .env
357
+ - Start local database and apply schema/migrations
358
+ - Configure linter, formatter, and pre-commit hooks
359
+ - Set up GitHub tokens for API access if needed
360
+ - Verify development server starts without errors
361
+ confirmChecklist:
362
+ - All dependencies installed and versions locked
363
+ - Environment variables configured for local development
364
+ - Database running locally with schema applied
365
+ - All credentials stored in .env — NEVER hardcoded in code,
366
+ including seed scripts and utility scripts
367
+ - Linter and formatter pass on existing code
368
+ - Development server starts and responds to requests
369
+ - CI pipeline configuration is valid
370
+ code:
371
+ focus: |
372
+ Build vertically—complete one feature end-to-end before
373
+ starting another. Validates assumptions early.
374
+ readChecklist:
375
+ - Implement API endpoints
376
+ - Build frontend integration
377
+ - Create database schema and queries
378
+ - Configure infrastructure as needed
379
+ - Test across layers
380
+ confirmChecklist:
381
+ - Frontend connects to backend correctly
382
+ - Database schema supports the feature
383
+ - Error handling spans all layers
384
+ - Feature works end-to-end
385
+ - At least one test exists for each API route and passes when run
386
+ - Deployment is automated
387
+ review:
388
+ focus: |
389
+ Verify integration across layers and ensure deployment
390
+ readiness.
391
+ readChecklist:
392
+ - Test integration across all layers
393
+ - Verify error handling end-to-end
394
+ - Check deployment configuration
395
+ - Review documentation
396
+ confirmChecklist:
397
+ - Integration tests pass
398
+ - Deployment verified
399
+ - Documentation is complete
400
+ - Feature is production-ready
401
+ deploy:
402
+ focus: |
403
+ Deploy full-stack feature to production and verify end-to-end
404
+ functionality in live environment.
405
+ readChecklist:
406
+ - Deploy backend services
407
+ - Deploy frontend changes
408
+ - Run database migrations
409
+ - Verify feature works in production
410
+ - Monitor for errors and performance issues
411
+ confirmChecklist:
412
+ - All components deployed successfully
413
+ - Feature works end-to-end in production
414
+ - No errors in monitoring
415
+ - Performance meets requirements
416
+ toolReferences:
417
+ - name: Supabase
418
+ url: https://supabase.com/docs
419
+ simpleIcon: supabase
420
+ description: Open source Firebase alternative with PostgreSQL
421
+ useWhen:
422
+ Building applications with PostgreSQL, auth, and real-time features
423
+ - name: Next.js
424
+ url: https://nextjs.org/docs
425
+ simpleIcon: nextdotjs
426
+ description: React framework for full-stack web applications
427
+ useWhen:
428
+ Building React applications with server-side rendering or API routes
429
+ - name: GitHub Actions
430
+ url: https://docs.github.com/en/actions
431
+ simpleIcon: githubactions
432
+ description: CI/CD and automation platform
433
+ useWhen: Automating builds, tests, and deployments
434
+ - name: Nixpacks
435
+ url: https://nixpacks.com/docs
436
+ simpleIcon: nixos
437
+ description: Build tool that auto-detects and builds applications
438
+ useWhen: Auto-building and deploying applications to containers
439
+ - name: Colima
440
+ url: https://github.com/abiosoft/colima
441
+ simpleIcon: docker
442
+ description:
443
+ Lightweight container runtime for macOS with Docker-compatible CLI
444
+ useWhen:
445
+ Running containers locally for development, building images, or
446
+ testing containerized apps
447
+ instructions: |
448
+ ## Step 1: Configure Environment
449
+
450
+ Get connection details from `supabase status`. Create `.env.local`
451
+ with Supabase URL and anon key. Create the Supabase client module.
452
+
453
+ ## Step 2: Create Database Schema
454
+
455
+ Create a migration with `supabase migration new`, define the
456
+ SQL schema with RLS enabled, and apply with `supabase db push`.
457
+
458
+ ## Step 3: Build API Routes
459
+
460
+ Create Next.js API routes for GET and POST operations using
461
+ the Supabase client.
462
+
463
+ ## Step 4: Build Frontend
464
+
465
+ Create a React component that fetches from the API and renders
466
+ data. Start with a simple list display.
467
+
468
+ ## Step 5: Deploy
469
+
470
+ Use Nixpacks to auto-detect and build the image. Run it
471
+ locally with Colima's Docker-compatible runtime to verify
472
+ before deploying to production.
473
+ installScript: |
474
+ set -e
475
+ brew install colima
476
+ colima start
477
+ brew install supabase/tap/supabase || npm install -g supabase
478
+ npx --yes create-next-app@latest my-app --typescript --yes
479
+ cd my-app
480
+ supabase init
481
+ supabase start
482
+ npm install @supabase/supabase-js
483
+ colima status
484
+ implementationReference: |
485
+ ## Supabase Client Setup
486
+
487
+ ```typescript
488
+ // lib/supabase.ts
489
+ import { createClient } from '@supabase/supabase-js'
490
+
491
+ export const supabase = createClient(
492
+ process.env.NEXT_PUBLIC_SUPABASE_URL!,
493
+ process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!
494
+ )
495
+ ```
496
+
497
+ ## Database Schema
498
+
499
+ ```sql
500
+ CREATE TABLE items (
501
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
502
+ name TEXT NOT NULL,
503
+ description TEXT,
504
+ created_at TIMESTAMPTZ DEFAULT NOW()
505
+ );
506
+ ALTER TABLE items ENABLE ROW LEVEL SECURITY;
507
+ ```
508
+
509
+ ## API Route
510
+
511
+ ```typescript
512
+ // app/api/items/route.ts
513
+ import { supabase } from '@/lib/supabase'
514
+ import { NextResponse } from 'next/server'
515
+
516
+ export async function GET() {
517
+ const { data, error } = await supabase.from('items').select('*')
518
+ if (error) return NextResponse.json({ error }, { status: 500 })
519
+ return NextResponse.json(data)
520
+ }
521
+ ```
522
+
523
+ ## Frontend Component
524
+
525
+ ```typescript
526
+ // app/page.tsx
527
+ 'use client'
528
+ import { useEffect, useState } from 'react'
529
+
530
+ export default function Home() {
531
+ const [items, setItems] = useState([])
532
+ useEffect(() => {
533
+ fetch('/api/items').then(r => r.json()).then(setItems)
534
+ }, [])
535
+ return (
536
+ <main>
537
+ <h1>Items</h1>
538
+ <ul>{items.map((item: any) => <li key={item.id}>{item.name}</li>)}</ul>
539
+ </main>
540
+ )
541
+ }
542
+ ```
543
+
544
+ ## Verification
545
+
546
+ Your full-stack app is working when:
547
+ - `npm run dev` starts without errors
548
+ - Frontend loads at http://localhost:3000
549
+ - API responds at http://localhost:3000/api/items
550
+ - Data persists in database (check Supabase Studio at http://localhost:54323)
551
+
552
+ ## Common Pitfalls
553
+
554
+ - **Missing env vars**: Supabase client fails silently
555
+ - **RLS without policies**: Queries return empty results
556
+ - **Type mismatch**: Generate types with `supabase gen types typescript`
557
+ - **Migration order**: Migrations apply alphabetically by filename
558
+
559
+ ## Local Container Testing with Colima
560
+
561
+ ```bash
562
+ # Start Colima (lightweight Docker-compatible runtime)
563
+ colima start
564
+
565
+ # Build with Nixpacks and run locally
566
+ nixpacks build . --name my-app
567
+ docker run --rm -p 3000:3000 --env-file .env.local my-app
568
+
569
+ # Verify app responds
570
+ curl http://localhost:3000
571
+ ```
572
+ - id: problem_discovery
573
+ name: Problem Discovery
574
+ human:
575
+ description:
576
+ Navigating undefined problem spaces to uncover real requirements through
577
+ observation and immersion. Where most engineers expect specifications,
578
+ FDEs embrace ambiguity—starting with open questions like "How can we
579
+ accelerate patient recruitment?" rather than detailed requirements
580
+ documents.
581
+ proficiencyDescriptions:
582
+ awareness:
583
+ You recognize that initial requirements are often incomplete. You ask
584
+ clarifying questions when you encounter gaps and don't make
585
+ assumptions.
586
+ foundational:
587
+ You actively seek context beyond initial requirements, interview
588
+ stakeholders to understand "why" behind requests, and document
589
+ discovered constraints and assumptions.
590
+ working:
591
+ You navigate ambiguous problem spaces independently. You discover
592
+ requirements through observation and user shadowing, reframe problems
593
+ to find higher-value solutions, and distinguish symptoms from root
594
+ causes.
595
+ practitioner:
596
+ You seek out undefined problems rather than avoiding them. You embed
597
+ with users to discover latent needs, coach engineers in your area on
598
+ problem discovery techniques, and turn ambiguity into clear problem
599
+ statements.
600
+ expert:
601
+ You shape approaches to problem discovery across the business unit.
602
+ You are recognized for transforming ambiguous situations into clear
603
+ opportunities, influence how teams engage with business problems, and
604
+ are the go-to person for the most undefined challenges.
605
+ agent:
606
+ name: problem-discovery
607
+ description: |
608
+ Guide for navigating undefined problem spaces and uncovering real
609
+ requirements.
610
+ useWhen: |
611
+ Facing ambiguous requests, exploring user needs, or translating vague
612
+ asks into clear problem statements.
613
+ stages:
614
+ specify:
615
+ focus: |
616
+ Explore the problem space and document what is known.
617
+ Surface ambiguities and unknowns before attempting solutions.
618
+ readChecklist:
619
+ - Document the initial problem statement as understood
620
+ - ASK the user who the stakeholders are and what their perspectives
621
+ are
622
+ - ASK the user what is known vs unknown about the problem
623
+ - ASK the user to confirm or reject your assumptions
624
+ - Mark all ambiguities with [NEEDS CLARIFICATION] and ASK the user
625
+ to clarify them
626
+ confirmChecklist:
627
+ - Initial problem statement is documented
628
+ - Stakeholders are identified
629
+ - Known vs unknown is explicit
630
+ - Assumptions are listed for validation
631
+ plan:
632
+ focus: |
633
+ Embrace ambiguity and explore the problem space. Understand
634
+ context deeply before proposing solutions.
635
+ readChecklist:
636
+ - Ask open-ended questions about goals and context
637
+ - Identify stakeholders and their needs
638
+ - Discover constraints and prior attempts
639
+ - Distinguish symptoms from root causes
640
+ - Write clear problem statement
641
+ confirmChecklist:
642
+ - Understand who has the problem
643
+ - Success criteria are clear
644
+ - Root cause identified, not just symptoms
645
+ - Constraints and assumptions documented
646
+ - Problem statement is validated
647
+ onboard:
648
+ focus: |
649
+ Set up the environment for solution implementation.
650
+ Install required tools, configure access to relevant
651
+ systems, and prepare workspace for development.
652
+ readChecklist:
653
+ - Install project dependencies from plan requirements
654
+ - Configure access to relevant data sources and APIs
655
+ - Set up environment variables and credentials
656
+ - Verify access to stakeholder communication channels
657
+ - Create workspace structure for documentation and code
658
+ confirmChecklist:
659
+ - All planned tools and dependencies are installed
660
+ - API keys and credentials are configured securely
661
+ - Workspace structure supports the planned approach
662
+ - Access to all required systems is verified
663
+ - Development environment matches plan requirements
664
+ code:
665
+ focus: |
666
+ Implement solution while staying connected to the original
667
+ problem. Validate assumptions as you build.
668
+ readChecklist:
669
+ - Build incrementally to validate understanding
670
+ - Check in with stakeholders frequently
671
+ - Adjust as new information emerges
672
+ - Document discovered requirements
673
+ confirmChecklist:
674
+ - Solution addresses the validated problem
675
+ - Stakeholder feedback is incorporated
676
+ - Discovered requirements are documented
677
+ - Scope boundaries are maintained
678
+ review:
679
+ focus: |
680
+ Verify solution addresses the real problem and stakeholders
681
+ agree on success.
682
+ readChecklist:
683
+ - Validate with original stakeholders
684
+ - Confirm problem is addressed
685
+ - Document learnings for future reference
686
+ confirmChecklist:
687
+ - Stakeholders confirm problem is solved
688
+ - Success criteria are met
689
+ - Learnings are documented
690
+ deploy:
691
+ focus: |
692
+ Release solution and verify it addresses the real problem
693
+ in production context.
694
+ readChecklist:
695
+ - Deploy solution to production
696
+ - Gather stakeholder feedback on live solution
697
+ - Monitor for unexpected usage patterns
698
+ - Document discovered requirements for future iterations
699
+ confirmChecklist:
700
+ - Solution is deployed
701
+ - Stakeholders have validated in production
702
+ - Usage patterns match expectations
703
+ - Learnings are captured
704
+ instructions: |
705
+ ## Discovery Process
706
+
707
+ ### 1. Embrace Ambiguity
708
+ - Don't rush to solutions
709
+ - Resist the urge to fill gaps with assumptions
710
+ - Ask open-ended questions
711
+ - Seek to understand context deeply
712
+
713
+ ### 2. Understand the Context
714
+ - Who are the stakeholders?
715
+ - What triggered this request?
716
+ - What has been tried before?
717
+ - What constraints exist?
718
+ - What does success look like?
719
+
720
+ ### 3. Find the Real Problem
721
+ - Ask "why" repeatedly (5 Whys technique)
722
+ - Distinguish wants from needs
723
+ - Identify root causes vs symptoms
724
+ - Challenge initial framing
725
+
726
+ ### 4. Validate Understanding
727
+ - Restate the problem in your own words
728
+ - Confirm with stakeholders
729
+ - Check for hidden assumptions
730
+ - Identify what's still unknown
731
+ implementationReference: |
732
+ ## Key Questions
733
+
734
+ ### Understanding Goals
735
+ - What outcome are you trying to achieve?
736
+ - How will you know if this succeeds?
737
+ - What happens if we do nothing?
738
+ - What's the deadline and why?
739
+
740
+ ### Understanding Context
741
+ - Who uses this and how?
742
+ - What's the current workaround?
743
+ - What constraints must we work within?
744
+ - What has been tried before?
745
+
746
+ ### Understanding Scope
747
+ - What's in scope vs out of scope?
748
+ - What's the minimum viable solution?
749
+ - What could we cut if needed?
750
+ - What can't we compromise on?
751
+
752
+ ## Problem Statement Template
753
+
754
+ A good problem statement answers:
755
+ - **Who** has this problem?
756
+ - **What** is the problem they face?
757
+ - **Why** does it matter?
758
+ - **When/Where** does it occur?
759
+ - **How** is it currently handled?
760
+
761
+ Format: "[User type] needs [capability] because [reason], but currently [obstacle]."
762
+
763
+ ## Common Pitfalls
764
+
765
+ - **Solutioning too early**: Jumping to "how" before understanding "what"
766
+ - **Taking requests literally**: Building what was asked, not what's needed
767
+ - **Assuming completeness**: Believing initial requirements are complete
768
+ - **Ignoring context**: Missing business or user context
769
+ - **Single perspective**: Only talking to one stakeholder
770
+ - id: rapid_prototyping
771
+ name: Rapid Prototyping & Validation
772
+ human:
773
+ description:
774
+ Building working solutions quickly to validate ideas and build trust
775
+ through delivery. Credibility comes from showing real software in days,
776
+ not months— demonstrating value before polishing details. "Working
777
+ solutions delivered in days" is the FDE standard.
778
+ proficiencyDescriptions:
779
+ awareness:
780
+ You understand the value of prototypes for learning quickly. You can
781
+ create simple demos and mockups with guidance.
782
+ foundational:
783
+ You build functional prototypes to validate ideas, prioritize core
784
+ functionality over polish, and iterate based on user feedback. You
785
+ know the difference between prototype and production code.
786
+ working:
787
+ You deliver working solutions rapidly (days not weeks). You use
788
+ prototypes to build stakeholder trust, know when to stop prototyping
789
+ and start productionizing, and balance speed with appropriate quality.
790
+ practitioner:
791
+ You lead rapid delivery initiatives across teams in your area, coach
792
+ on prototype-first approaches, establish trust through consistent fast
793
+ delivery, and define clear criteria for prototype-to-production
794
+ transitions.
795
+ expert:
796
+ You shape culture around rapid validation and iterative delivery
797
+ across the business unit. You are recognized for transformative fast
798
+ delivery, define standards for prototype-to-production, and exemplify
799
+ the "deliver in days" mindset.
800
+ agent:
801
+ name: rapid-prototyping
802
+ description: |
803
+ Guide for building working prototypes quickly to validate ideas and
804
+ demonstrate feasibility.
805
+ useWhen: |
806
+ Asked to build a quick demo, proof of concept, MVP, or prototype
807
+ something rapidly.
808
+ stages:
809
+ specify:
810
+ focus: |
811
+ Define what the prototype must demonstrate and success criteria.
812
+ Scope ruthlessly—prototypes are for learning, not production.
813
+ readChecklist:
814
+ - Identify the key question or hypothesis to validate
815
+ - Document minimum acceptable demonstration
816
+ - Define what success looks like for this prototype
817
+ - Explicitly mark what is out of scope
818
+ - Mark any ambiguities with [NEEDS CLARIFICATION]
819
+ confirmChecklist:
820
+ - Key question to answer is clear
821
+ - Minimum viable demonstration is defined
822
+ - Success criteria are explicit
823
+ - Out of scope items are documented
824
+ plan:
825
+ focus: |
826
+ Define what the prototype needs to demonstrate and set
827
+ success criteria. Scope ruthlessly for speed.
828
+ readChecklist:
829
+ - Define the key question to answer
830
+ - Scope to minimum viable demonstration
831
+ - Identify what can be hardcoded or skipped
832
+ - Set time box for delivery
833
+ confirmChecklist:
834
+ - Success criteria are defined
835
+ - Scope is minimal and focused
836
+ - Time box is agreed
837
+ - It's clear this is a prototype
838
+ onboard:
839
+ focus: |
840
+ Set up the prototyping environment as fast as possible.
841
+ Use scaffolding tools, install minimal dependencies,
842
+ and get to a running state quickly.
843
+ readChecklist:
844
+ - Scaffold project using template or CLI tool
845
+ - Install only essential dependencies
846
+ - Configure minimal environment variables
847
+ - Start development server and verify it runs
848
+ - Skip non-essential tooling (linters, CI) for speed
849
+ confirmChecklist:
850
+ - Project scaffolded and running locally
851
+ - Core dependencies installed
852
+ - Development server responds to requests
853
+ - Ready to start building visible output immediately
854
+ code:
855
+ focus: |
856
+ Build the simplest thing that demonstrates the concept.
857
+ Prioritize visible progress over backend elegance.
858
+ readChecklist:
859
+ - Start with visible UI/output
860
+ - Hardcode values that would normally be configurable
861
+ - Skip edge cases that won't appear in demo
862
+ - Show progress frequently
863
+ - Document shortcuts taken
864
+ confirmChecklist:
865
+ - Core concept is demonstrable
866
+ - Happy path works end-to-end
867
+ - At least one smoke test verifying the happy path exists
868
+ - SSR pages that fetch from API routes use environment-aware base
869
+ URLs (not hardcoded localhost)
870
+ - Known limitations are documented
871
+ - Stakeholders can interact with it
872
+ review:
873
+ focus: |
874
+ Validate prototype answers the original question. Decide
875
+ whether to iterate, productionize, or abandon.
876
+ readChecklist:
877
+ - Demo to stakeholders
878
+ - Gather feedback on the concept
879
+ - Decide next steps
880
+ - Document learnings
881
+ confirmChecklist:
882
+ - Stakeholders have seen the prototype
883
+ - Original question is answered
884
+ - Next steps are decided
885
+ - Learnings are captured
886
+ deploy:
887
+ focus: |
888
+ Make prototype accessible to stakeholders for evaluation.
889
+ Prototypes may not need production deployment.
890
+ readChecklist:
891
+ - Deploy to accessible environment (staging or demo)
892
+ - Share access with stakeholders
893
+ - Gather hands-on feedback
894
+ - Decide on next phase (iterate, productionize, or abandon)
895
+ confirmChecklist:
896
+ - Prototype is accessible to stakeholders
897
+ - Feedback has been gathered
898
+ - Decision on next steps is made
899
+ - Learnings are documented
900
+ toolReferences:
901
+ - name: Supabase
902
+ url: https://supabase.com/docs
903
+ simpleIcon: supabase
904
+ description: Open source Firebase alternative with PostgreSQL
905
+ useWhen: Instant PostgreSQL database with auth for rapid prototypes
906
+ - name: Next.js
907
+ url: https://nextjs.org/docs
908
+ simpleIcon: nextdotjs
909
+ description: React framework for full-stack web applications
910
+ useWhen: Scaffolding a full-stack prototype with server-side rendering
911
+ - name: Nixpacks
912
+ url: https://nixpacks.com/docs
913
+ simpleIcon: nixos
914
+ description: Build tool that auto-detects and builds applications
915
+ useWhen: Deploying prototypes to containers without writing Dockerfiles
916
+ instructions: |
917
+ ## Step 1: Define What to Demonstrate
918
+
919
+ Before writing code, answer: What question does this prototype
920
+ answer? What's the minimum to demonstrate the concept? What can
921
+ be hardcoded or skipped? When will you stop?
922
+
923
+ ## Step 2: Start with Visible Output
924
+
925
+ Build the UI first—stakeholders need to see something.
926
+ Hardcode data initially so you have working output in minutes.
927
+
928
+ ## Step 3: Add Real Data When Needed
929
+
930
+ Only add database when the UI needs real data. Use Supabase
931
+ Studio to create tables directly (skip migrations for prototypes).
932
+
933
+ ## Step 4: Document Shortcuts
934
+
935
+ Add a README section listing what was skipped and what's needed
936
+ to productionize. This prevents confusion later.
937
+ installScript: |
938
+ set -e
939
+ npx --yes create-next-app@latest my-prototype --typescript --yes
940
+ cd my-prototype
941
+ supabase init
942
+ supabase start
943
+ npm run dev
944
+ implementationReference: |
945
+ ## Start with Hardcoded UI
946
+
947
+ ```typescript
948
+ // app/page.tsx
949
+ export default function Home() {
950
+ const items = [
951
+ { id: 1, name: 'Demo Item 1' },
952
+ { id: 2, name: 'Demo Item 2' },
953
+ ]
954
+ return (
955
+ <main style={{ padding: '2rem' }}>
956
+ <h1>Prototype Demo</h1>
957
+ <ul>{items.map(item => <li key={item.id}>{item.name}</li>)}</ul>
958
+ </main>
959
+ )
960
+ }
961
+ ```
962
+
963
+ ## Replace with Real Data
964
+
965
+ ```typescript
966
+ import { supabase } from '@/lib/supabase'
967
+ const { data: items } = await supabase.from('items').select('*')
968
+ ```
969
+
970
+ ## Document Shortcuts
971
+
972
+ ```markdown
973
+ ## Prototype Limitations
974
+ This is a prototype for [purpose]. Not production-ready.
975
+
976
+ **Shortcuts taken:**
977
+ - No authentication
978
+ - Hardcoded configuration in code
979
+ - No error handling for edge cases
980
+
981
+ **To productionize:**
982
+ - Add authentication
983
+ - Move config to environment variables
984
+ - Add proper error handling
985
+ ```
986
+
987
+ ## Acceptable vs Required
988
+
989
+ | Acceptable to Skip | Still Required |
990
+ |-------------------|----------------|
991
+ | Authentication | Core functionality works |
992
+ | Error handling | Happy path is reliable |
993
+ | Migrations | It's clear this is a prototype |
994
+ | Tests | Limitations are documented |
995
+
996
+ ## Common Pitfalls
997
+
998
+ - **Over-engineering**: Adding features "while you're at it"
999
+ - **No stopping point**: Polishing what you might throw away
1000
+ - **Unclear purpose**: Building without knowing what question to answer
1001
+ - **Hidden shortcuts**: Not documenting what was skipped