claude-presentation-master 8.0.0 → 8.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/bin/cli.js CHANGED
@@ -20,7 +20,7 @@ const args = process.argv.slice(2);
20
20
 
21
21
  // Help text
22
22
  const helpText = `
23
- Claude Presentation Master v7.2.0
23
+ Claude Presentation Master v8.1.0
24
24
  Generate world-class presentations using expert methodologies
25
25
 
26
26
  100% FREE • Zero API Keys • Runs Locally • KB-Driven Quality
@@ -30,11 +30,16 @@ USAGE:
30
30
  cpm <command> [options]
31
31
 
32
32
  COMMANDS:
33
- generate <input> Generate presentation from input file
33
+ generate [input] Generate presentation (auto-detects file if omitted)
34
34
  validate <file> Validate an existing HTML presentation
35
35
  visual-qa <file> Run HIGH-LEVEL visual quality evaluation (Playwright)
36
36
  info Show package information
37
37
 
38
+ AUTO-DETECTION:
39
+ If no input file is specified, the CLI scans your current directory for:
40
+ - Priority files: README.md, content.md, presentation.md, slides.md, pitch.md
41
+ - Any .md, .txt, .json, .yaml file
42
+
38
43
  OPTIONS:
39
44
  -o, --output <dir> Output directory (default: ./output)
40
45
  -m, --mode <mode> Presentation mode: keynote or business (default: keynote)
@@ -55,17 +60,20 @@ OPTIONS:
55
60
  -v, --version Show version number
56
61
 
57
62
  EXAMPLES:
58
- # Quick one-off: Generate keynote from markdown (npx - no install needed)
63
+ # Auto-detect content in current directory
64
+ npx claude-presentation-master generate
65
+
66
+ # Or specify a file
59
67
  npx claude-presentation-master generate notes.md -m keynote -f html
60
68
 
61
69
  # Generate consulting deck as PowerPoint
62
70
  cpm generate strategy.md -m business --type consulting_deck -f pptx
63
71
 
64
72
  # Investment banking pitchbook
65
- cpm generate deal-memo.md --type investment_banking -f pptx -o ./pitchbook
73
+ cpm generate --type investment_banking -f pptx -o ./pitchbook
66
74
 
67
75
  # Generate both formats
68
- cpm generate quarterly-review.md -m business -f html,pptx -o ./slides
76
+ cpm generate -f html,pptx -o ./slides
69
77
 
70
78
  # Validate an existing presentation
71
79
  cpm validate output/presentation.html -m keynote
@@ -89,7 +97,7 @@ For more information: https://github.com/Stuinfla/claude-presentation-master
89
97
  `;
90
98
 
91
99
  // Version
92
- const version = '7.2.0';
100
+ const version = '8.1.0';
93
101
 
94
102
  // Parse arguments
95
103
  function parseArgs(args) {
@@ -194,6 +202,49 @@ function parseArgs(args) {
194
202
  return options;
195
203
  }
196
204
 
205
+ /**
206
+ * Auto-detect a content file in the given directory.
207
+ * Prioritizes: README.md, content.md, presentation.md, then any .md, .txt, .json, .yaml
208
+ */
209
+ function autoDetectContentFile(directory) {
210
+ const contentExtensions = ['.md', '.markdown', '.txt', '.json', '.yaml', '.yml'];
211
+ const priorityFiles = [
212
+ 'README.md', 'readme.md',
213
+ 'content.md', 'Content.md',
214
+ 'presentation.md', 'Presentation.md',
215
+ 'slides.md', 'Slides.md',
216
+ 'pitch.md', 'Pitch.md',
217
+ 'deck.md', 'Deck.md'
218
+ ];
219
+
220
+ // First, check for priority files
221
+ for (const file of priorityFiles) {
222
+ const filePath = join(directory, file);
223
+ if (existsSync(filePath)) {
224
+ return file;
225
+ }
226
+ }
227
+
228
+ // Then scan for any content file
229
+ try {
230
+ const files = readdirSync(directory);
231
+ for (const file of files) {
232
+ const ext = extname(file).toLowerCase();
233
+ if (contentExtensions.includes(ext)) {
234
+ // Skip node_modules, hidden files, package files
235
+ if (file.startsWith('.') || file.startsWith('_')) continue;
236
+ if (file === 'package.json' || file === 'package-lock.json') continue;
237
+ if (file === 'tsconfig.json' || file === 'jest.config.js') continue;
238
+ return file;
239
+ }
240
+ }
241
+ } catch (e) {
242
+ // Directory not readable
243
+ }
244
+
245
+ return null;
246
+ }
247
+
197
248
  // Determine content type from file extension
198
249
  function getContentType(filename) {
199
250
  const ext = extname(filename).toLowerCase();
@@ -293,8 +344,19 @@ Usage:
293
344
  process.exit(1);
294
345
  }
295
346
 
296
- // Validate input
297
- if (!options.input) {
347
+ // Auto-detect input file if not specified
348
+ if (!options.input && options.command === 'generate') {
349
+ const detected = autoDetectContentFile(process.cwd());
350
+ if (detected) {
351
+ options.input = detected;
352
+ console.log(`📁 Auto-detected: ${detected}`);
353
+ } else {
354
+ console.error(`Error: No content file found in current directory.`);
355
+ console.error(`Looked for: *.md, *.txt, *.json, *.yaml, *.yml`);
356
+ console.error(`\nEither create a content file or specify one: cpm generate <file>`);
357
+ process.exit(1);
358
+ }
359
+ } else if (!options.input) {
298
360
  console.error(`Error: No input file specified for "${options.command}" command.`);
299
361
  process.exit(1);
300
362
  }
package/dist/index.d.mts CHANGED
@@ -1579,22 +1579,48 @@ declare class QAEngine {
1579
1579
  }
1580
1580
 
1581
1581
  /**
1582
- * VisualQualityEvaluator - Evaluates presentations like a human expert.
1582
+ * VisualQualityEvaluator - EXPERT-LEVEL Presentation Evaluation
1583
1583
  *
1584
- * THIS IS NOT A LINTER. This evaluates:
1585
- * - Is this slide visually compelling?
1586
- * - Does the presentation tell a coherent story?
1587
- * - Would an executive be impressed?
1588
- * - Does it look like McKinsey made it?
1584
+ * THIS IS NOT A LINTER. This evaluates like a world-class presentation coach:
1589
1585
  *
1590
- * Uses Playwright to render and screenshot each slide, then evaluates
1591
- * the ACTUAL visual output, not just data structures.
1586
+ * EXPERT PRINCIPLES APPLIED:
1587
+ * 1. Nancy Duarte's Glance Test - Can you understand each slide in 3 seconds?
1588
+ * 2. Chris Anderson's One Idea Rule - Does each slide communicate ONE message?
1589
+ * 3. Edward Tufte's Data-Ink Ratio - Does every element serve a purpose?
1590
+ * 4. Barbara Minto's Pyramid Principle - Is the answer first, then support?
1591
+ * 5. Garr Reynolds' Signal to Noise - Is decoration minimized?
1592
+ * 6. Carmine Gallo's Rule of Three - Are key points structured effectively?
1593
+ * 7. Cole Knaflic's Context First - Does data have proper context?
1592
1594
  *
1593
- * @version 9.0.0
1595
+ * WHAT EXPERTS WOULD CHECK:
1596
+ * - Does the deck accomplish its purpose? (Persuade, inform, inspire)
1597
+ * - Does the narrative hang together? (Story arc, tension, resolution)
1598
+ * - Is each slide communicative? (Message clear in 3 seconds)
1599
+ * - Are visuals used intelligently? (Supporting message, not decorating)
1600
+ * - Is typography/layout professional? (Hierarchy, spacing, balance)
1601
+ * - Would McKinsey show this to a Fortune 500 CEO?
1602
+ *
1603
+ * CRITICAL VISUAL FAILURES (immediate score penalty):
1604
+ * - Truncated text (content cut off)
1605
+ * - Empty slides (missing content)
1606
+ * - Broken layouts (overflow, misalignment)
1607
+ * - Low contrast (illegible text)
1608
+ *
1609
+ * @version 10.0.0 - Expert-Driven QA
1594
1610
  */
1595
1611
  interface SlideVisualScore {
1596
1612
  slideIndex: number;
1597
1613
  slideType: string;
1614
+ glanceTest: number;
1615
+ glanceTestNotes: string;
1616
+ oneIdea: number;
1617
+ oneIdeaNotes: string;
1618
+ dataInkRatio: number;
1619
+ dataInkNotes: string;
1620
+ professionalExecution: number;
1621
+ professionalExecutionNotes: string;
1622
+ hasCriticalFailure: boolean;
1623
+ criticalFailures: string[];
1598
1624
  visualImpact: number;
1599
1625
  visualImpactNotes: string;
1600
1626
  contentClarity: number;
@@ -1660,7 +1686,17 @@ declare class VisualQualityEvaluator {
1660
1686
  private closeBrowser;
1661
1687
  private getSlideCount;
1662
1688
  private evaluateSlide;
1689
+ /**
1690
+ * EXPERT-LEVEL SLIDE SCORING
1691
+ *
1692
+ * This evaluates each slide like Nancy Duarte, Carmine Gallo, or a McKinsey partner would.
1693
+ * It's not about rules - it's about whether the slide WORKS.
1694
+ */
1663
1695
  private scoreSlide;
1696
+ /**
1697
+ * Create a failed slide score (for critical failures)
1698
+ */
1699
+ private createFailedSlideScore;
1664
1700
  private inferSlideType;
1665
1701
  private evaluateNarrativeFlow;
1666
1702
  private evaluateVisualConsistency;
package/dist/index.d.ts CHANGED
@@ -1579,22 +1579,48 @@ declare class QAEngine {
1579
1579
  }
1580
1580
 
1581
1581
  /**
1582
- * VisualQualityEvaluator - Evaluates presentations like a human expert.
1582
+ * VisualQualityEvaluator - EXPERT-LEVEL Presentation Evaluation
1583
1583
  *
1584
- * THIS IS NOT A LINTER. This evaluates:
1585
- * - Is this slide visually compelling?
1586
- * - Does the presentation tell a coherent story?
1587
- * - Would an executive be impressed?
1588
- * - Does it look like McKinsey made it?
1584
+ * THIS IS NOT A LINTER. This evaluates like a world-class presentation coach:
1589
1585
  *
1590
- * Uses Playwright to render and screenshot each slide, then evaluates
1591
- * the ACTUAL visual output, not just data structures.
1586
+ * EXPERT PRINCIPLES APPLIED:
1587
+ * 1. Nancy Duarte's Glance Test - Can you understand each slide in 3 seconds?
1588
+ * 2. Chris Anderson's One Idea Rule - Does each slide communicate ONE message?
1589
+ * 3. Edward Tufte's Data-Ink Ratio - Does every element serve a purpose?
1590
+ * 4. Barbara Minto's Pyramid Principle - Is the answer first, then support?
1591
+ * 5. Garr Reynolds' Signal to Noise - Is decoration minimized?
1592
+ * 6. Carmine Gallo's Rule of Three - Are key points structured effectively?
1593
+ * 7. Cole Knaflic's Context First - Does data have proper context?
1592
1594
  *
1593
- * @version 9.0.0
1595
+ * WHAT EXPERTS WOULD CHECK:
1596
+ * - Does the deck accomplish its purpose? (Persuade, inform, inspire)
1597
+ * - Does the narrative hang together? (Story arc, tension, resolution)
1598
+ * - Is each slide communicative? (Message clear in 3 seconds)
1599
+ * - Are visuals used intelligently? (Supporting message, not decorating)
1600
+ * - Is typography/layout professional? (Hierarchy, spacing, balance)
1601
+ * - Would McKinsey show this to a Fortune 500 CEO?
1602
+ *
1603
+ * CRITICAL VISUAL FAILURES (immediate score penalty):
1604
+ * - Truncated text (content cut off)
1605
+ * - Empty slides (missing content)
1606
+ * - Broken layouts (overflow, misalignment)
1607
+ * - Low contrast (illegible text)
1608
+ *
1609
+ * @version 10.0.0 - Expert-Driven QA
1594
1610
  */
1595
1611
  interface SlideVisualScore {
1596
1612
  slideIndex: number;
1597
1613
  slideType: string;
1614
+ glanceTest: number;
1615
+ glanceTestNotes: string;
1616
+ oneIdea: number;
1617
+ oneIdeaNotes: string;
1618
+ dataInkRatio: number;
1619
+ dataInkNotes: string;
1620
+ professionalExecution: number;
1621
+ professionalExecutionNotes: string;
1622
+ hasCriticalFailure: boolean;
1623
+ criticalFailures: string[];
1598
1624
  visualImpact: number;
1599
1625
  visualImpactNotes: string;
1600
1626
  contentClarity: number;
@@ -1660,7 +1686,17 @@ declare class VisualQualityEvaluator {
1660
1686
  private closeBrowser;
1661
1687
  private getSlideCount;
1662
1688
  private evaluateSlide;
1689
+ /**
1690
+ * EXPERT-LEVEL SLIDE SCORING
1691
+ *
1692
+ * This evaluates each slide like Nancy Duarte, Carmine Gallo, or a McKinsey partner would.
1693
+ * It's not about rules - it's about whether the slide WORKS.
1694
+ */
1663
1695
  private scoreSlide;
1696
+ /**
1697
+ * Create a failed slide score (for critical failures)
1698
+ */
1699
+ private createFailedSlideScore;
1664
1700
  private inferSlideType;
1665
1701
  private evaluateNarrativeFlow;
1666
1702
  private evaluateVisualConsistency;