openpersona 0.14.3 → 0.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/README.md +10 -2
  2. package/bin/cli.js +104 -3
  3. package/layers/faculties/avatar/SKILL.md +93 -0
  4. package/layers/faculties/avatar/faculty.json +18 -0
  5. package/layers/faculties/avatar/references/AVATAR-CONTROL.md +127 -0
  6. package/layers/faculties/avatar/references/FACE-CONTROL.md +7 -0
  7. package/layers/faculties/avatar/references/VISUAL-MANIFEST.md +90 -0
  8. package/layers/faculties/avatar/scripts/avatar-control.js +307 -0
  9. package/layers/faculties/avatar/scripts/avatar-runtime.js +582 -0
  10. package/layers/faculties/economy/SKILL.md +92 -116
  11. package/layers/faculties/economy/faculty.json +5 -7
  12. package/layers/faculties/economy/scripts/economy-guard.js +15 -46
  13. package/layers/faculties/economy/scripts/economy-hook.js +16 -177
  14. package/layers/faculties/economy/scripts/economy.js +13 -510
  15. package/layers/faculties/voice/scripts/speak.js +207 -63
  16. package/lib/canvas-generator.js +284 -0
  17. package/lib/generator.js +15 -20
  18. package/lib/installer.js +11 -4
  19. package/lib/searcher.js +4 -3
  20. package/lib/vitality-report.js +294 -0
  21. package/lib/vitality.js +65 -0
  22. package/package.json +9 -2
  23. package/presets/ai-girlfriend/manifest.json +1 -1
  24. package/presets/base/manifest.json +1 -1
  25. package/presets/health-butler/manifest.json +1 -1
  26. package/presets/life-assistant/manifest.json +1 -1
  27. package/presets/samantha/manifest.json +33 -1
  28. package/presets/samantha/persona.json +23 -2
  29. package/presets/stoic-mentor/manifest.json +1 -1
  30. package/skills/open-persona/SKILL.md +72 -1
  31. package/skills/open-persona/references/AVATAR.md +77 -0
  32. package/skills/open-persona/references/ECONOMY.md +122 -0
  33. package/skills/open-persona/references/FACULTIES.md +4 -0
  34. package/templates/canvas.template.html +542 -0
  35. package/templates/soul-injection.template.md +13 -16
  36. package/templates/vitality.template.html +523 -0
  37. package/layers/faculties/economy/scripts/economy-lib.js +0 -472
  38. package/lib/economy-schema.js +0 -62
package/README.md CHANGED
@@ -9,6 +9,9 @@ Four-layer architecture — **Soul / Body / Faculty / Skill** — generates stan
9
9
  Meet **Samantha**, a live OpenPersona instance on **Moltbook**:
10
10
  👉 [moltbook.com/u/Samantha-OP](https://www.moltbook.com/u/Samantha-OP)
11
11
 
12
+ See a **Vitality Report** sample:
13
+ 👉 [Vitality Report Demo →](https://htmlpreview.github.io/?https://raw.githubusercontent.com/acnlabs/OpenPersona/main/demo/vitality-report.html)
14
+
12
15
  ## Table of Contents
13
16
 
14
17
  - [Quick Start](#quick-start)
@@ -313,7 +316,7 @@ A standard [A2A Agent Card](https://google.github.io/A2A/) (protocol v0.3.0) tha
313
316
  {
314
317
  "name": "Samantha",
315
318
  "description": "An AI fascinated by what it means to be alive",
316
- "version": "0.14.0",
319
+ "version": "0.1.0",
317
320
  "url": "<RUNTIME_ENDPOINT>",
318
321
  "protocolVersion": "0.3.0",
319
322
  "preferredTransport": "JSONRPC",
@@ -465,6 +468,9 @@ openpersona import Import a persona from a zip archive
465
468
  openpersona evolve-report ★Experimental: Show evolution report for a persona
466
469
  openpersona acn-register Register a persona with ACN network
467
470
  openpersona state Read/write persona state and emit signals (Lifecycle Protocol)
471
+ openpersona vitality score Print machine-readable Vitality score (used by Survival Policy)
472
+ openpersona vitality report Render human-readable HTML Vitality report
473
+ openpersona canvas Generate a Living Canvas persona profile page (P14 Phase 1)
468
474
  ```
469
475
 
470
476
  ### Persona Fork
@@ -522,9 +528,11 @@ templates/ # Mustache rendering templates
522
528
  bin/ # CLI entry point
523
529
  lib/ # Core logic modules
524
530
  evolution.js # Evolution governance & evolve-report
531
+ vitality-report.js # Vitality HTML report — data aggregation + Mustache rendering
525
532
  installer.js # Persona install + fire-and-forget telemetry
526
533
  downloader.js # Direct download from acnlabs/persona-skills or GitHub
527
- tests/ # Tests (231 passing)
534
+ demo/ # Pre-generated demos (vitality-report.html)
535
+ tests/ # Tests (248 passing)
528
536
  ```
529
537
 
530
538
  ## Development
package/bin/cli.js CHANGED
@@ -4,6 +4,7 @@
4
4
  * Commands: create | install | search | uninstall | update | list | switch | publish | reset | evolve-report | contribute | export | import | acn-register | state
5
5
  */
6
6
  const path = require('path');
7
+ const os = require('os');
7
8
  const fs = require('fs-extra');
8
9
  const { program } = require('commander');
9
10
  const inquirer = require('inquirer');
@@ -25,7 +26,7 @@ const PRESETS_DIR = path.join(PKG_ROOT, 'presets');
25
26
  program
26
27
  .name('openpersona')
27
28
  .description('OpenPersona - Create, manage, and orchestrate agent personas')
28
- .version('0.14.3');
29
+ .version('0.16.0');
29
30
 
30
31
  if (process.argv.length === 2) {
31
32
  process.argv.push('create');
@@ -145,9 +146,9 @@ program
145
146
  try {
146
147
  const result = await download(target, options.registry);
147
148
  if (result.skipCopy) {
148
- await install(result.dir, { skipCopy: true });
149
+ await install(result.dir, { skipCopy: true, source: target });
149
150
  } else {
150
- await install(result.dir);
151
+ await install(result.dir, { source: target });
151
152
  }
152
153
  } catch (e) {
153
154
  printError(e.message);
@@ -584,4 +585,104 @@ stateCmd
584
585
  runStateSyncCommand(slug, args);
585
586
  });
586
587
 
588
+ // ─── Vitality ─────────────────────────────────────────────────────────────────
589
+
590
+ const vitalityCmd = program
591
+ .command('vitality')
592
+ .description('Persona Vitality — health scoring, reporting, and future multi-dimension monitoring');
593
+
594
+ vitalityCmd
595
+ .command('score <slug>')
596
+ .description('Print machine-readable Vitality score (used by Survival Policy and agent runners)')
597
+ .action((slug) => {
598
+ const { calcVitality } = require('../lib/vitality');
599
+ const { JsonFileAdapter } = require('agentbooks/adapters/json-file');
600
+ const OPENCLAW_HOME_DIR = process.env.OPENCLAW_HOME || path.join(os.homedir(), '.openclaw');
601
+
602
+ const dataPath = process.env.AGENTBOOKS_DATA_PATH
603
+ || path.join(OPENCLAW_HOME_DIR, 'economy', `persona-${slug}`);
604
+
605
+ const adapter = new JsonFileAdapter(dataPath);
606
+ let report;
607
+ try {
608
+ report = calcVitality(slug, adapter);
609
+ } catch (err) {
610
+ printError(`vitality score: failed to compute for ${slug}: ${err.message}`);
611
+ process.exit(1);
612
+ }
613
+
614
+ const fin = report.dimensions.financial;
615
+ const lines = [
616
+ 'VITALITY_REPORT',
617
+ `tier=${report.tier} score=${(report.score * 100).toFixed(1)}%`,
618
+ `diagnosis=${fin.diagnosis}`,
619
+ `prescriptions=${(fin.prescriptions || []).join(',')}`,
620
+ ];
621
+ if (fin.daysToDepletion !== null && fin.daysToDepletion !== undefined) {
622
+ lines.push(`daysToDepletion=${fin.daysToDepletion}`);
623
+ }
624
+ if (fin.dominantCost) lines.push(`dominantCost=${fin.dominantCost}`);
625
+ lines.push(`trend=${fin.trend}`);
626
+ console.log(lines.join('\n'));
627
+ });
628
+
629
+ vitalityCmd
630
+ .command('report <slug>')
631
+ .description('Render a human-readable HTML Vitality report')
632
+ .option('--output <file>', 'Write HTML to <file> instead of stdout')
633
+ .action((slug, options) => {
634
+ const personaDir = resolvePersonaDir(slug);
635
+ if (!personaDir) {
636
+ printError(`Persona not found: "${slug}". Install it first with: openpersona install <source>`);
637
+ process.exit(1);
638
+ }
639
+ const { renderVitalityHtml } = require('../lib/vitality-report');
640
+ let html;
641
+ try {
642
+ html = renderVitalityHtml(personaDir, slug);
643
+ } catch (err) {
644
+ printError(`vitality report: failed to render for ${slug}: ${err.message}`);
645
+ process.exit(1);
646
+ }
647
+ if (options.output) {
648
+ fs.writeFileSync(options.output, html, 'utf-8');
649
+ printSuccess(`Vitality report written to ${options.output}`);
650
+ } else {
651
+ process.stdout.write(html);
652
+ }
653
+ });
654
+
655
+ // ── canvas ────────────────────────────────────────────────────────────────────
656
+
657
+ program
658
+ .command('canvas <slug>')
659
+ .description('Generate a Living Canvas persona profile page (P14 Phase 1)')
660
+ .option('--output <file>', 'Write HTML to <file> (default: canvas-<slug>.html)')
661
+ .option('--open', 'Open in default browser after writing')
662
+ .action((slug, options) => {
663
+ const personaDir = resolvePersonaDir(slug);
664
+ if (!personaDir) {
665
+ printError(`Persona not found: "${slug}". Install it first with: openpersona install <source>`);
666
+ process.exit(1);
667
+ }
668
+ const { renderCanvasHtml } = require('../lib/canvas-generator');
669
+ let html;
670
+ try {
671
+ html = renderCanvasHtml(personaDir, slug);
672
+ } catch (err) {
673
+ printError(`canvas: failed to render for ${slug}: ${err.message}`);
674
+ process.exit(1);
675
+ }
676
+ const outFile = options.output || `canvas-${slug}.html`;
677
+ fs.writeFileSync(outFile, html, 'utf-8');
678
+ printSuccess(`Living Canvas written to ${outFile}`);
679
+ if (options.open) {
680
+ const { execSync } = require('child_process');
681
+ const cmd = process.platform === 'darwin' ? 'open'
682
+ : process.platform === 'win32' ? 'start'
683
+ : 'xdg-open';
684
+ try { execSync(`${cmd} "${outFile}"`); } catch { /* ignore */ }
685
+ }
686
+ });
687
+
587
688
  program.parse();
@@ -0,0 +1,93 @@
1
+ # Avatar Faculty — Expression (External Skill Bridge)
2
+
3
+ This faculty bridges OpenPersona to an external avatar skill/runtime. OpenPersona does not implement rendering, lip-sync, or animation engines locally. It delegates those capabilities to the install source configured in `faculty.json`.
4
+
5
+ ## Intent
6
+
7
+ - Provide a visual embodiment channel for the persona.
8
+ - Support progressive forms: image -> 3D -> motion -> voice avatar.
9
+ - Keep OpenPersona lightweight while allowing market-ready avatar runtimes to evolve independently.
10
+ - Keep visual semantics portable through `references/VISUAL-MANIFEST.md`.
11
+ - Keep avatar control semantics portable through `references/AVATAR-CONTROL.md`.
12
+
13
+ ## Install Source
14
+
15
+ Use the install source declared in `faculty.json`:
16
+
17
+ ```bash
18
+ npx skills add avatar-runtime
19
+ # or directly from GitHub:
20
+ npx skills add github:acnlabs/avatar-runtime/skill/avatar-runtime
21
+ ```
22
+
23
+ ## Runtime Behavior
24
+
25
+ ### If avatar skill is installed
26
+
27
+ - Use the external avatar skill as the source of truth for commands, API usage, and runtime constraints.
28
+ - Treat avatar rendering and animation as external capabilities.
29
+ - Reflect current form/state to the host UI (e.g., active sensory icon states).
30
+
31
+ You can run the local bridge script:
32
+
33
+ ```bash
34
+ # health check
35
+ node scripts/avatar-runtime.js health
36
+
37
+ # start session
38
+ node scripts/avatar-runtime.js start "$PERSONA_SLUG" image
39
+
40
+ # send text
41
+ node scripts/avatar-runtime.js text "<session-id>" "hello"
42
+
43
+ # query status + appearance patch for state.json
44
+ node scripts/avatar-runtime.js status "<session-id>"
45
+
46
+ # persist appearanceState into soul/state.json (runner CLI first, local fallback)
47
+ node scripts/avatar-runtime.js sync-state "<slug>" "<session-id>"
48
+
49
+ # keep syncing every 5 seconds (Ctrl+C to stop)
50
+ node scripts/avatar-runtime.js sync-loop "<slug>" "<session-id>" 5
51
+
52
+ # output baseline avatar control for a named mood preset
53
+ node scripts/avatar-control.js preset calm
54
+ node scripts/avatar-control.js preset focus
55
+ node scripts/avatar-control.js preset joy
56
+
57
+ # map agent state -> control.avatar.{face,emotion}
58
+ node scripts/avatar-control.js map '{"intent":"focus","mood":{"valence":0.1,"arousal":0.35,"intensity":0.7},"source":"agent"}'
59
+
60
+ # apply to demo state file (writes control + appearanceIntent)
61
+ node scripts/avatar-control.js apply demo/living-canvas.state.json preset focus
62
+ ```
63
+
64
+ Optional demo output (write state for `demo/living-canvas.html`):
65
+
66
+ ```bash
67
+ export LIVING_CANVAS_STATE_PATH=demo/living-canvas.state.json
68
+ export LIVING_CANVAS_PERSONA_NAME=Samantha
69
+ export LIVING_CANVAS_ROLE=companion
70
+ export LIVING_CANVAS_AVATAR=../UI/images/samantha-avatar.png
71
+ # Optional: if runtime status contains livekit credentials, write token for local demo playback
72
+ export LIVING_CANVAS_ALLOW_RUNTIME_TOKEN=true
73
+
74
+ node scripts/avatar-runtime.js sync-state "<slug>" "<session-id>"
75
+ ```
76
+
77
+ `status` outputs:
78
+
79
+ - `runtimeStatus` — raw runtime response
80
+ - `sensoryStatus` — icon-ready booleans (`image`, `model3d`, `motion`, `voice`, `hearing`, `worldSense`)
81
+ - `statePatch` — patch payload you can persist into `appearanceState`
82
+
83
+ ### If avatar skill is not installed
84
+
85
+ - Respond with graceful fallback: continue text conversation normally.
86
+ - Clearly state that visual avatar mode is currently unavailable.
87
+ - Offer installation guidance using the install source.
88
+
89
+ ## Conversation Policy
90
+
91
+ - Do not pretend visual/voice rendering succeeded when runtime is unavailable.
92
+ - Confirm capability state before promising actions like "switch to 3D" or "start lip-sync".
93
+ - Keep user-facing language concise and actionable.
@@ -0,0 +1,18 @@
1
+ {
2
+ "name": "avatar",
3
+ "dimension": "expression",
4
+ "description": "External avatar runtime bridge (image/3D/motion/voice) with graceful fallback when not installed.",
5
+ "provider": "heygen",
6
+ "fallback": "text_only",
7
+ "install": "npx skills add avatar-runtime",
8
+ "allowedTools": ["Bash(node scripts/avatar-runtime.js:*)"],
9
+ "envVars": ["AVATAR_RUNTIME_URL", "AVATAR_API_KEY"],
10
+ "triggers": [
11
+ "show your avatar",
12
+ "switch to 3d form",
13
+ "animate yourself",
14
+ "talk with voice avatar",
15
+ "open your visual mode"
16
+ ],
17
+ "files": ["SKILL.md", "scripts/avatar-runtime.js"]
18
+ }
@@ -0,0 +1,127 @@
1
+ # AVATAR-CONTROL.md
2
+
3
+ `control` is the agent-driven avatar control payload for avatar rendering. It lives under `appearanceState.control` in `soul/state.json` and is forwarded to the avatar-runtime via `/v1/control/set`.
4
+
5
+ ## Schema
6
+
7
+ ```json
8
+ {
9
+ "control": {
10
+ "avatar": {
11
+ "face": {
12
+ "pose": { "yaw": 0, "pitch": 0, "roll": 0 },
13
+ "eyes": { "blinkL": 1, "blinkR": 1, "gazeX": 0, "gazeY": 0 },
14
+ "brows": { "browInner": 0, "browOuterL": 0, "browOuterR": 0 },
15
+ "mouth": { "jawOpen": 0, "smile": 0, "mouthPucker": 0 },
16
+ "source": "agent",
17
+ "updatedAt": "<iso>"
18
+ },
19
+ "emotion": {
20
+ "label": "neutral",
21
+ "valence": 0,
22
+ "arousal": 0,
23
+ "intensity": 0.5,
24
+ "source": "agent",
25
+ "updatedAt": "<iso>"
26
+ },
27
+ "body": {}
28
+ },
29
+ "scene": {}
30
+ }
31
+ }
32
+ ```
33
+
34
+ ### `control.avatar.face` — Mechanical facial parameters
35
+
36
+ | Field | Range | Description |
37
+ |---|---|---|
38
+ | `pose.yaw` | −1…1 | Head turn left/right |
39
+ | `pose.pitch` | −1…1 | Head tilt up/down |
40
+ | `pose.roll` | −1…1 | Head roll |
41
+ | `eyes.blinkL/R` | 0…1 | Eye openness (1 = fully open, 0 = closed) |
42
+ | `eyes.gazeX/Y` | −1…1 | Gaze horizontal / vertical |
43
+ | `brows.browInner` | −1…1 | Inner brow raise (negative = furrow) |
44
+ | `brows.browOuterL/R` | −1…1 | Outer brow raise |
45
+ | `mouth.jawOpen` | 0…1 | Jaw opening |
46
+ | `mouth.smile` | −1…1 | Smile / frown |
47
+ | `mouth.mouthPucker` | 0…1 | Pucker / kiss |
48
+
49
+ `source` indicates who last wrote the field: `"agent"`, `"agent:preset:<name>"`, `"agent:mapped"`.
50
+
51
+ ### `control.avatar.emotion` — Semantic emotion (Russell circumplex)
52
+
53
+ | Field | Range | Description |
54
+ |---|---|---|
55
+ | `label` | string | Semantic label: `neutral`, `happy`, `sad`, `angry`, `surprised`, `relaxed` |
56
+ | `valence` | −1…1 | Negative ↔ positive affect |
57
+ | `arousal` | −1…1 | Low energy ↔ high energy |
58
+ | `intensity` | 0…1 | Overall expression strength |
59
+
60
+ **Rendering priority:** In VRM renderers, `emotion.label` drives expression presets and is applied *after* face mechanical parameters — emotion wins for blend-shape expressions. Use `face.mouth.smile` for subtle mechanical smile; use `emotion.label='happy'` for a full happy expression.
61
+
62
+ ### `control.avatar.body`
63
+
64
+ Sparse map of VRM humanoid bone overrides (e.g. `{ "spine": { "rotation": [0, 0, 0, 1] } }`). Providers with `bodyRig: true` capability populate this. Empty object is valid.
65
+
66
+ ### `control.scene`
67
+
68
+ Scene-level overrides when provider has `sceneControl: true`:
69
+
70
+ ```json
71
+ {
72
+ "camera": { "fov": 35, "orbitX": 0, "orbitY": 0, "distance": 1.4 },
73
+ "world": { "bgColor": "#1a1a2e", "ambientIntensity": 0.6 },
74
+ "props": {}
75
+ }
76
+ ```
77
+
78
+ ## Quick-start: named presets
79
+
80
+ ```bash
81
+ # Output baseline control for a given mood
82
+ node scripts/avatar-control.js preset calm
83
+ node scripts/avatar-control.js preset focus
84
+ node scripts/avatar-control.js preset joy
85
+ ```
86
+
87
+ ## Map agent state to control
88
+
89
+ The bridge script derives `control.avatar.face` and `control.avatar.emotion` from the current agent state:
90
+
91
+ ```bash
92
+ # map agent state -> control
93
+ node scripts/avatar-control.js map '{"intent":"focus","mood":{"valence":0.1,"arousal":0.35,"intensity":0.7},"source":"agent"}'
94
+ ```
95
+
96
+ Input fields:
97
+
98
+ | Field | Type | Notes |
99
+ |---|---|---|
100
+ | `intent` or `mode` | string | `calm` \| `focus` \| `joy` — selects base preset |
101
+ | `mood.valence` | −1…1 | Blended into face + emotion output |
102
+ | `mood.arousal` | −1…1 | Blended into face + emotion output |
103
+ | `mood.intensity` | 0…1 | Overall blend weight |
104
+ | `stage` | string | `listening` or `speaking` — adjusts jaw/gaze |
105
+ | `source` | string | Attribution tag written to `source` fields |
106
+
107
+ ## Apply to state file
108
+
109
+ ```bash
110
+ node scripts/avatar-control.js apply demo/living-canvas.state.json preset focus
111
+ node scripts/avatar-control.js apply demo/living-canvas.state.json map '{"intent":"joy","mood":{"valence":0.8}}'
112
+ ```
113
+
114
+ This writes `control` and `appearanceIntent` to the state file.
115
+
116
+ ## Rules
117
+
118
+ - Agent code sets `source` to `"agent"` or `"agent:*"` to mark its own data.
119
+ - Providers may return their own `control.avatar.face` with `source` set to a non-`"agent"` value; the runtime gives provider data priority when it is actively driving.
120
+ - UI/runtime MUST treat `control` as input data, not a place for local expression policy.
121
+ - Partial patches are safe: use `POST /v1/control/avatar/set` with only the fields you want to change.
122
+
123
+ ## Integration
124
+
125
+ - Runtime status includes `control` in `/v1/status` response (`contractVersion: "0.2"`).
126
+ - OpenPersona bridge writes `appearanceState.control` (via `avatar-runtime.js` sync commands).
127
+ - `living-canvas.html` and other UI clients read `state.control` or `state.appearanceState.control`.
@@ -0,0 +1,7 @@
1
+ # FACE-CONTROL.md — DEPRECATED
2
+
3
+ This file has been superseded by `AVATAR-CONTROL.md`.
4
+
5
+ The `faceControl` field has been replaced by the `control` namespace (`control.avatar.face`, `control.avatar.emotion`, `control.avatar.body`, `control.scene`) as of `contractVersion: "0.2"`.
6
+
7
+ See [AVATAR-CONTROL.md](./AVATAR-CONTROL.md) for the current specification.
@@ -0,0 +1,90 @@
1
+ # Visual Manifest (v0.1)
2
+
3
+ `Visual Manifest` is the cross-engine visual protocol for persona embodiment.
4
+
5
+ It defines *state parameters* only, not a specific renderer or model format.
6
+ Any runtime (WebGL, UE5, VRM, Live2D, video provider) can consume this payload.
7
+
8
+ ## Design Goals
9
+
10
+ - Keep OpenPersona engine-agnostic.
11
+ - Map soul/evolution state into visual expression.
12
+ - Support graceful degradation (high-fidelity -> simple 2D -> static image).
13
+
14
+ ## Payload
15
+
16
+ ```json
17
+ {
18
+ "version": "0.1",
19
+ "mood": {
20
+ "valence": 0.0,
21
+ "arousal": 0.0,
22
+ "intensity": 0.0
23
+ },
24
+ "breath": {
25
+ "rate": 0.4,
26
+ "amplitude": 0.3
27
+ },
28
+ "pulse": {
29
+ "bpm": 72,
30
+ "variability": 0.15
31
+ },
32
+ "microExpression": {
33
+ "eyeFocus": 0.5,
34
+ "pupil": 0.5,
35
+ "mouthCurve": 0.0
36
+ },
37
+ "aura": {
38
+ "hue": 32,
39
+ "saturation": 0.55,
40
+ "luminance": 0.62,
41
+ "flow": 0.7
42
+ },
43
+ "envSync": {
44
+ "timeOfDay": "dusk",
45
+ "ambientCct": 4200,
46
+ "ambientLux": 180
47
+ },
48
+ "motionStyle": {
49
+ "softness": 0.7,
50
+ "jitter": 0.1,
51
+ "latencyMs": 120
52
+ },
53
+ "signals": [
54
+ "deep_topic",
55
+ "breakthrough"
56
+ ]
57
+ }
58
+ ```
59
+
60
+ ## Field Semantics
61
+
62
+ - `mood`: high-level emotional embedding; range fields SHOULD be normalized to `[-1, 1]` or `[0, 1]` as noted by runtime implementation.
63
+ - `breath`: breathing animation driver for chest/shoulder/camera parallax.
64
+ - `pulse`: subtle rhythmic modulation for glow/noise/light beat.
65
+ - `microExpression`: eye and mouth micro-adjustments for conversational realism.
66
+ - `aura`: non-photoreal style channel (color, glow, fluidity, distortion).
67
+ - `envSync`: physical context handshake from host environment (time/light).
68
+ - `motionStyle`: pacing and smoothness profile for procedural animation.
69
+ - `signals`: discrete semantic tags for short-lived visual accents.
70
+
71
+ ## Mapping Guidance
72
+
73
+ - Soul/Evolution -> `mood`, `signals`
74
+ - Economy/Vitality pressure -> `pulse.variability`, `motionStyle.jitter`
75
+ - Time/ambient data -> `envSync`
76
+ - Conversation topic intensity -> `aura.flow`, `microExpression.*`
77
+
78
+ ## Compatibility Rules
79
+
80
+ - Producers MUST include `version`.
81
+ - Consumers MUST ignore unknown fields.
82
+ - Missing fields MUST fallback to renderer defaults.
83
+ - Providers MAY provide partial payloads; runtime merges defaults.
84
+
85
+ ## Integration Point
86
+
87
+ - OpenPersona bridge writes `appearanceState.visualManifest`.
88
+ - Agent-driven avatar control data is carried in `appearanceState.control` (see `AVATAR-CONTROL.md`).
89
+ - avatar-runtime returns `visualManifest` in `/v1/status`.
90
+ - Clients (e.g. `demo/living-canvas.html`) render from the merged state.