videonut 1.2.7 ā 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +272 -272
- package/USER_GUIDE.md +90 -90
- package/agents/core/eic.md +771 -771
- package/agents/creative/director.md +246 -246
- package/agents/creative/scriptwriter.md +207 -207
- package/agents/research/investigator.md +394 -394
- package/agents/technical/archivist.md +288 -288
- package/agents/technical/scavenger.md +247 -247
- package/bin/videonut.js +37 -21
- package/config.yaml +61 -61
- package/docs/scriptwriter.md +42 -42
- package/file_validator.py +186 -186
- package/memory/short_term/asset_manifest.md +64 -64
- package/memory/short_term/investigation_dossier.md +31 -31
- package/memory/short_term/master_script.md +51 -51
- package/package.json +61 -64
- package/requirements.txt +8 -8
- package/setup.js +33 -15
- package/tools/check_env.py +76 -76
- package/tools/downloaders/caption_reader.py +237 -237
- package/tools/downloaders/clip_grabber.py +82 -82
- package/tools/downloaders/image_grabber.py +105 -105
- package/tools/downloaders/pdf_reader.py +163 -163
- package/tools/downloaders/screenshotter.py +58 -58
- package/tools/downloaders/web_reader.py +69 -69
- package/tools/validators/link_checker.py +45 -45
- package/workflow_orchestrator.py +336 -336
- package/.claude/commands/archivist.toml +0 -12
- package/.claude/commands/director.toml +0 -12
- package/.claude/commands/eic.toml +0 -12
- package/.claude/commands/investigator.toml +0 -12
- package/.claude/commands/prompt.toml +0 -12
- package/.claude/commands/scavenger.toml +0 -12
- package/.claude/commands/scout.toml +0 -12
- package/.claude/commands/scriptwriter.toml +0 -12
- package/.claude/commands/seo.toml +0 -12
- package/.claude/commands/thumbnail.toml +0 -12
- package/.claude/commands/topic_scout.toml +0 -12
- package/.gemini/commands/archivist.toml +0 -12
- package/.gemini/commands/director.toml +0 -12
- package/.gemini/commands/eic.toml +0 -12
- package/.gemini/commands/investigator.toml +0 -12
- package/.gemini/commands/prompt.toml +0 -12
- package/.gemini/commands/scavenger.toml +0 -12
- package/.gemini/commands/scout.toml +0 -12
- package/.gemini/commands/scriptwriter.toml +0 -12
- package/.gemini/commands/seo.toml +0 -12
- package/.gemini/commands/thumbnail.toml +0 -12
- package/.gemini/commands/topic_scout.toml +0 -12
- package/.qwen/commands/archivist.toml +0 -12
- package/.qwen/commands/director.toml +0 -12
- package/.qwen/commands/eic.toml +0 -12
- package/.qwen/commands/investigator.toml +0 -12
- package/.qwen/commands/prompt.toml +0 -12
- package/.qwen/commands/scavenger.toml +0 -12
- package/.qwen/commands/scout.toml +0 -12
- package/.qwen/commands/scriptwriter.toml +0 -12
- package/.qwen/commands/seo.toml +0 -12
- package/.qwen/commands/thumbnail.toml +0 -12
- package/.qwen/commands/topic_scout.toml +0 -12
|
@@ -1,64 +1,64 @@
|
|
|
1
|
-
# Asset Manifest: The Transformer Paradox
|
|
2
|
-
|
|
3
|
-
**Project:** 2017-TRANS-AI
|
|
4
|
-
**Status:** Pending Download
|
|
5
|
-
**Hunter:** Scavenger
|
|
6
|
-
|
|
7
|
-
---
|
|
8
|
-
|
|
9
|
-
## Visual Assets
|
|
10
|
-
|
|
11
|
-
### Scene 1: The Lab
|
|
12
|
-
* **Asset ID:** V01-PAPER-HEADER
|
|
13
|
-
* **Description:** Header of the paper "Attention Is All You Need".
|
|
14
|
-
* **Search Query:** "Attention Is All You Need paper pdf header 2017"
|
|
15
|
-
* **Type:** Image (Screenshot)
|
|
16
|
-
|
|
17
|
-
* **Asset ID:** V02-GOOGLE-OFFICE-2017
|
|
18
|
-
* **Description:** Google Brain office aesthetic, cluttered desk, code on screen.
|
|
19
|
-
* **Search Query:** "Google Brain office interior 2017 messy desk code monitor stock"
|
|
20
|
-
* **Type:** Stock Image/B-Roll
|
|
21
|
-
|
|
22
|
-
### Scene 2: The Publication
|
|
23
|
-
* **Asset ID:** V03-TWITTER-SCROLL
|
|
24
|
-
* **Description:** Fast scrolling social media feed, tech twitter reaction.
|
|
25
|
-
* **Search Query:** "fast scrolling twitter feed stock video blue light"
|
|
26
|
-
* **Type:** Stock Video
|
|
27
|
-
|
|
28
|
-
### Scene 3: The Boardroom (Google)
|
|
29
|
-
* **Asset ID:** V04-BOARDROOM-TENSION
|
|
30
|
-
* **Description:** Sterile glass meeting room, executives arguing, corporate silhouette.
|
|
31
|
-
* **Search Query:** "corporate boardroom argument glass walls cinematic stock video"
|
|
32
|
-
* **Type:** Stock Video
|
|
33
|
-
|
|
34
|
-
### Scene 4: The Garage (OpenAI)
|
|
35
|
-
* **Asset ID:** V05-OPENAI-EARLY-DAYS
|
|
36
|
-
* **Description:** OpenAI early team working, Ilya Sutskever/Greg Brockman candid, hackathon vibe.
|
|
37
|
-
* **Search Query:** "OpenAI team photo 2016 2017 Ilya Sutskever Greg Brockman coding"
|
|
38
|
-
* **Type:** Image
|
|
39
|
-
|
|
40
|
-
### Scene 5: The Scaling Laws
|
|
41
|
-
* **Asset ID:** V06-SCALING-GRAPH
|
|
42
|
-
* **Description:** Graph from "Scaling Laws for Neural Language Models" (2020).
|
|
43
|
-
* **Search Query:** "OpenAI Scaling Laws for Neural Language Models graph loss vs compute"
|
|
44
|
-
* **Type:** Image
|
|
45
|
-
|
|
46
|
-
* **Asset ID:** V07-RACE-CAR
|
|
47
|
-
* **Description:** Formula 1 car speeding past a slow bus.
|
|
48
|
-
* **Search Query:** "Formula 1 race car overtaking bus stock video"
|
|
49
|
-
* **Type:** Stock Video (Metaphor)
|
|
50
|
-
|
|
51
|
-
### Scene 6: The Empty Chairs (The Authors)
|
|
52
|
-
* **Asset ID:** V08-AUTHORS-COLLAGE
|
|
53
|
-
* **Description:** Photos of the 8 authors: Vaswani, Shazeer, Parmar, Uszkoreit, Jones, Gomez, Kaiser, Polosukhin.
|
|
54
|
-
* **Search Query:** "Attention Is All You Need authors photo collage"
|
|
55
|
-
* **Type:** Image
|
|
56
|
-
|
|
57
|
-
### Scene 7: The Conclusion
|
|
58
|
-
* **Asset ID:** V09-OPENAI-LOGO-SHIFT
|
|
59
|
-
* **Description:** OpenAI logo (Green/White) fading to Black/Closed.
|
|
60
|
-
* **Search Query:** "OpenAI logo animation green to black"
|
|
61
|
-
* **Type:** Motion Graphic/Image
|
|
62
|
-
|
|
63
|
-
---
|
|
64
|
-
*End of Manifest*
|
|
1
|
+
# Asset Manifest: The Transformer Paradox
|
|
2
|
+
|
|
3
|
+
**Project:** 2017-TRANS-AI
|
|
4
|
+
**Status:** Pending Download
|
|
5
|
+
**Hunter:** Scavenger
|
|
6
|
+
|
|
7
|
+
---
|
|
8
|
+
|
|
9
|
+
## Visual Assets
|
|
10
|
+
|
|
11
|
+
### Scene 1: The Lab
|
|
12
|
+
* **Asset ID:** V01-PAPER-HEADER
|
|
13
|
+
* **Description:** Header of the paper "Attention Is All You Need".
|
|
14
|
+
* **Search Query:** "Attention Is All You Need paper pdf header 2017"
|
|
15
|
+
* **Type:** Image (Screenshot)
|
|
16
|
+
|
|
17
|
+
* **Asset ID:** V02-GOOGLE-OFFICE-2017
|
|
18
|
+
* **Description:** Google Brain office aesthetic, cluttered desk, code on screen.
|
|
19
|
+
* **Search Query:** "Google Brain office interior 2017 messy desk code monitor stock"
|
|
20
|
+
* **Type:** Stock Image/B-Roll
|
|
21
|
+
|
|
22
|
+
### Scene 2: The Publication
|
|
23
|
+
* **Asset ID:** V03-TWITTER-SCROLL
|
|
24
|
+
* **Description:** Fast scrolling social media feed, tech twitter reaction.
|
|
25
|
+
* **Search Query:** "fast scrolling twitter feed stock video blue light"
|
|
26
|
+
* **Type:** Stock Video
|
|
27
|
+
|
|
28
|
+
### Scene 3: The Boardroom (Google)
|
|
29
|
+
* **Asset ID:** V04-BOARDROOM-TENSION
|
|
30
|
+
* **Description:** Sterile glass meeting room, executives arguing, corporate silhouette.
|
|
31
|
+
* **Search Query:** "corporate boardroom argument glass walls cinematic stock video"
|
|
32
|
+
* **Type:** Stock Video
|
|
33
|
+
|
|
34
|
+
### Scene 4: The Garage (OpenAI)
|
|
35
|
+
* **Asset ID:** V05-OPENAI-EARLY-DAYS
|
|
36
|
+
* **Description:** OpenAI early team working, Ilya Sutskever/Greg Brockman candid, hackathon vibe.
|
|
37
|
+
* **Search Query:** "OpenAI team photo 2016 2017 Ilya Sutskever Greg Brockman coding"
|
|
38
|
+
* **Type:** Image
|
|
39
|
+
|
|
40
|
+
### Scene 5: The Scaling Laws
|
|
41
|
+
* **Asset ID:** V06-SCALING-GRAPH
|
|
42
|
+
* **Description:** Graph from "Scaling Laws for Neural Language Models" (2020).
|
|
43
|
+
* **Search Query:** "OpenAI Scaling Laws for Neural Language Models graph loss vs compute"
|
|
44
|
+
* **Type:** Image
|
|
45
|
+
|
|
46
|
+
* **Asset ID:** V07-RACE-CAR
|
|
47
|
+
* **Description:** Formula 1 car speeding past a slow bus.
|
|
48
|
+
* **Search Query:** "Formula 1 race car overtaking bus stock video"
|
|
49
|
+
* **Type:** Stock Video (Metaphor)
|
|
50
|
+
|
|
51
|
+
### Scene 6: The Empty Chairs (The Authors)
|
|
52
|
+
* **Asset ID:** V08-AUTHORS-COLLAGE
|
|
53
|
+
* **Description:** Photos of the 8 authors: Vaswani, Shazeer, Parmar, Uszkoreit, Jones, Gomez, Kaiser, Polosukhin.
|
|
54
|
+
* **Search Query:** "Attention Is All You Need authors photo collage"
|
|
55
|
+
* **Type:** Image
|
|
56
|
+
|
|
57
|
+
### Scene 7: The Conclusion
|
|
58
|
+
* **Asset ID:** V09-OPENAI-LOGO-SHIFT
|
|
59
|
+
* **Description:** OpenAI logo (Green/White) fading to Black/Closed.
|
|
60
|
+
* **Search Query:** "OpenAI logo animation green to black"
|
|
61
|
+
* **Type:** Motion Graphic/Image
|
|
62
|
+
|
|
63
|
+
---
|
|
64
|
+
*End of Manifest*
|
|
@@ -1,31 +1,31 @@
|
|
|
1
|
-
# Truth Dossier: The Transformer Paradox
|
|
2
|
-
|
|
3
|
-
## Case File: 2017-TRANS-AI
|
|
4
|
-
**Subject:** How OpenAI Weaponized Google's "Attention Is All You Need"
|
|
5
|
-
**Investigator:** Sherlock
|
|
6
|
-
**Date:** 2025-12-28
|
|
7
|
-
|
|
8
|
-
## 1. The Sweep (Context)
|
|
9
|
-
In 2017, Google Brain researchers released the seminal paper "Attention Is All You Need," introducing the **Transformer** architecture. At the time, it was a breakthrough for machine translation, replacing the sequential processing of RNNs/LSTMs with a parallel "self-attention" mechanism.
|
|
10
|
-
* **The Official Narrative:** Google published this to advance the field of NLP (Natural Language Processing) and improve Google Translate.
|
|
11
|
-
* **The Reality:** They handed the blueprint for AGI (Artificial General Intelligence) to the world on a silver platter.
|
|
12
|
-
|
|
13
|
-
## 2. The Pivot (The Turning Point)
|
|
14
|
-
While Google integrated Transformers into Search (BERT) and research models (T5, Meena), they remained cautious about releasing generative agents due to "reputational risk" and "safety alignment."
|
|
15
|
-
**OpenAI saw what Google missed:** The Transformer wasn't just good for translation; it was the *only* architecture that scaled predictably with compute.
|
|
16
|
-
* **The Shift:** OpenAI abandoned their work on LSTMs (which they had heavily invested in) and went all-in on the Transformer Decoder.
|
|
17
|
-
* **The Bet:** They hypothesized that simply throwing massive data and compute at this specific architecture (GPT - Generative Pre-trained Transformer) would yield emergent reasoning capabilities.
|
|
18
|
-
|
|
19
|
-
## 3. The Deep Dive (The Hidden Agenda)
|
|
20
|
-
Why did the underdog (OpenAI) beat the giant (Google) using the giant's own sword?
|
|
21
|
-
* **The Scaling Laws:** In 2020, OpenAI published "Scaling Laws for Neural Language Models." They cracked the code: Performance depends mostly on scale (parameters, data, compute), not just architectural tweaking. Google knew this but was paralyzed by the Innovator's Dilemmaāreleasing a chatbot might cannibalize their Search ad revenue.
|
|
22
|
-
* **The Brain Drain:** Of the 8 authors of "Attention Is All You Need," *none* remain at Google. Many founded competitors (Cohere, Character.AI) or joined the ecosystem OpenAI cultivated. OpenAI capitalized on this fragmentation.
|
|
23
|
-
* **Closed vs. Open:** OpenAI started as "Open" but realized the Transformer's value lay in the *weights* and the *data recipe*, closing their doors (GPT-3 onwards) just as they perfected Google's invention.
|
|
24
|
-
|
|
25
|
-
## 4. Conclusion (The Verdict)
|
|
26
|
-
OpenAI didn't "steal" the technology; they **recognized its potential for domination** while Google treated it as an academic curiosity. Google built the engine; OpenAI built the race car and drove it off the lot while Google was still checking the tire pressure.
|
|
27
|
-
|
|
28
|
-
**Motive Uncovered:** OpenAI's usage wasn't just adoption; it was an aggressive strategy to exploit a scalability loophole that Google was too bureaucratic to commercialize.
|
|
29
|
-
|
|
30
|
-
---
|
|
31
|
-
*End of Dossier*
|
|
1
|
+
# Truth Dossier: The Transformer Paradox
|
|
2
|
+
|
|
3
|
+
## Case File: 2017-TRANS-AI
|
|
4
|
+
**Subject:** How OpenAI Weaponized Google's "Attention Is All You Need"
|
|
5
|
+
**Investigator:** Sherlock
|
|
6
|
+
**Date:** 2025-12-28
|
|
7
|
+
|
|
8
|
+
## 1. The Sweep (Context)
|
|
9
|
+
In 2017, Google Brain researchers released the seminal paper "Attention Is All You Need," introducing the **Transformer** architecture. At the time, it was a breakthrough for machine translation, replacing the sequential processing of RNNs/LSTMs with a parallel "self-attention" mechanism.
|
|
10
|
+
* **The Official Narrative:** Google published this to advance the field of NLP (Natural Language Processing) and improve Google Translate.
|
|
11
|
+
* **The Reality:** They handed the blueprint for AGI (Artificial General Intelligence) to the world on a silver platter.
|
|
12
|
+
|
|
13
|
+
## 2. The Pivot (The Turning Point)
|
|
14
|
+
While Google integrated Transformers into Search (BERT) and research models (T5, Meena), they remained cautious about releasing generative agents due to "reputational risk" and "safety alignment."
|
|
15
|
+
**OpenAI saw what Google missed:** The Transformer wasn't just good for translation; it was the *only* architecture that scaled predictably with compute.
|
|
16
|
+
* **The Shift:** OpenAI abandoned their work on LSTMs (which they had heavily invested in) and went all-in on the Transformer Decoder.
|
|
17
|
+
* **The Bet:** They hypothesized that simply throwing massive data and compute at this specific architecture (GPT - Generative Pre-trained Transformer) would yield emergent reasoning capabilities.
|
|
18
|
+
|
|
19
|
+
## 3. The Deep Dive (The Hidden Agenda)
|
|
20
|
+
Why did the underdog (OpenAI) beat the giant (Google) using the giant's own sword?
|
|
21
|
+
* **The Scaling Laws:** In 2020, OpenAI published "Scaling Laws for Neural Language Models." They cracked the code: Performance depends mostly on scale (parameters, data, compute), not just architectural tweaking. Google knew this but was paralyzed by the Innovator's Dilemmaāreleasing a chatbot might cannibalize their Search ad revenue.
|
|
22
|
+
* **The Brain Drain:** Of the 8 authors of "Attention Is All You Need," *none* remain at Google. Many founded competitors (Cohere, Character.AI) or joined the ecosystem OpenAI cultivated. OpenAI capitalized on this fragmentation.
|
|
23
|
+
* **Closed vs. Open:** OpenAI started as "Open" but realized the Transformer's value lay in the *weights* and the *data recipe*, closing their doors (GPT-3 onwards) just as they perfected Google's invention.
|
|
24
|
+
|
|
25
|
+
## 4. Conclusion (The Verdict)
|
|
26
|
+
OpenAI didn't "steal" the technology; they **recognized its potential for domination** while Google treated it as an academic curiosity. Google built the engine; OpenAI built the race car and drove it off the lot while Google was still checking the tire pressure.
|
|
27
|
+
|
|
28
|
+
**Motive Uncovered:** OpenAI's usage wasn't just adoption; it was an aggressive strategy to exploit a scalability loophole that Google was too bureaucratic to commercialize.
|
|
29
|
+
|
|
30
|
+
---
|
|
31
|
+
*End of Dossier*
|
|
@@ -1,51 +1,51 @@
|
|
|
1
|
-
# Master Script: The Transformer Paradox
|
|
2
|
-
|
|
3
|
-
**Title:** The Engine and the Race Car
|
|
4
|
-
**Logline:** Google built the engine of the future. OpenAI stole the keys and drove it like they stole it.
|
|
5
|
-
**Director:** Spielberg
|
|
6
|
-
**Based on Dossier:** 2017-TRANS-AI
|
|
7
|
-
|
|
8
|
-
---
|
|
9
|
-
|
|
10
|
-
## Act 1: The Golden Ticket
|
|
11
|
-
|
|
12
|
-
**Scene 1: The Lab**
|
|
13
|
-
* **Visual:** Low angle, slow dolly in on a messy desk at Google Brain, 2017. Stacks of papers. A monitor glowing with code. The words "Attention Is All You Need" are typed onto a title page.
|
|
14
|
-
* **Audio:** Soft, rhythmic keyboard clicking. A low, ambient hum of servers.
|
|
15
|
-
* **Narrator (V.O.):** It was the greatest gift in the history of artificial intelligence. And Google... just gave it away.
|
|
16
|
-
|
|
17
|
-
**Scene 2: The Publication**
|
|
18
|
-
* **Visual:** Montage of a PDF being uploaded. The internet exploding with activity. Twitter feeds scrolling fast.
|
|
19
|
-
* **Audio:** A "whoosh" sound of a file sending. The chatter of a thousand voices overlapping.
|
|
20
|
-
* **Narrator (V.O.):** "Attention Is All You Need." A boring title for a weapon. Google thought they were improving translation. In reality? They had just published the blueprints for the atom bomb.
|
|
21
|
-
|
|
22
|
-
## Act 2: The Hesitation vs. The Bet
|
|
23
|
-
|
|
24
|
-
**Scene 3: The Boardroom (Google)**
|
|
25
|
-
* **Visual:** A sterile, glass-walled meeting room. Executives in suits looking worriedly at a whiteboard marked "Risk" and "Ad Revenue."
|
|
26
|
-
* **Audio:** Muffled arguing. "Safety." "Cannibalization." The ticking of a clock.
|
|
27
|
-
* **Narrator (V.O.):** Google had the engine. But they were paralyzed. If they released a chatbot, it would kill their Search ads. They were the king afraid to leave his castle.
|
|
28
|
-
|
|
29
|
-
**Scene 4: The Garage (OpenAI)**
|
|
30
|
-
* **Visual:** Cut to a grittier, darker room. OpenAI researchers. Hoodies. Late nights. Red Bull cans. They are looking at the same "Attention" paper, but their eyes are wide.
|
|
31
|
-
* **Audio:** Fast-paced, electronic synth music starts building. The sound of a race car engine revving up.
|
|
32
|
-
* **Narrator (V.O.):** Across town, the underdog saw something else. They saw a race car. They realized: "If we just make this bigger... it doesn't just translate. It *thinks*."
|
|
33
|
-
|
|
34
|
-
## Act 3: The Getaway
|
|
35
|
-
|
|
36
|
-
**Scene 5: The Scaling Laws**
|
|
37
|
-
* **Visual:** A graph on a screen shooting upward exponentially. The line turns into a road. A sleek car (labeled GPT) tears down it, leaving a clunky bus (labeled Google) in the dust.
|
|
38
|
-
* **Audio:** The engine roar screams. Tires screeching.
|
|
39
|
-
* **Narrator (V.O.):** OpenAI stopped trying to be clever. They just poured gas on the fire. Data. Compute. More. They bet the farm on Google's own invention.
|
|
40
|
-
|
|
41
|
-
**Scene 6: The Empty Chairs**
|
|
42
|
-
* **Visual:** Back to the Google desk from Scene 1. It's empty. Dust motes dancing in the light. We pan across 8 nameplatesāthe authors of the paper. One by one, they fade away.
|
|
43
|
-
* **Audio:** A melancholic wind.
|
|
44
|
-
* **Narrator (V.O.):** Eight researchers wrote that paper. Today? Not a single one is left at Google. They're gone.
|
|
45
|
-
|
|
46
|
-
**Scene 7: The Conclusion**
|
|
47
|
-
* **Visual:** The OpenAI logo glowing neon in the dark. It shifts from "Open" to "Closed."
|
|
48
|
-
* **Audio:** A heavy metallic door slamming shut. Silence.
|
|
49
|
-
* **Narrator (V.O.):** Google built the engine. OpenAI won the race. And the irony? They did it by reading Google's manual.
|
|
50
|
-
|
|
51
|
-
**[FADE TO BLACK]**
|
|
1
|
+
# Master Script: The Transformer Paradox
|
|
2
|
+
|
|
3
|
+
**Title:** The Engine and the Race Car
|
|
4
|
+
**Logline:** Google built the engine of the future. OpenAI stole the keys and drove it like they stole it.
|
|
5
|
+
**Director:** Spielberg
|
|
6
|
+
**Based on Dossier:** 2017-TRANS-AI
|
|
7
|
+
|
|
8
|
+
---
|
|
9
|
+
|
|
10
|
+
## Act 1: The Golden Ticket
|
|
11
|
+
|
|
12
|
+
**Scene 1: The Lab**
|
|
13
|
+
* **Visual:** Low angle, slow dolly in on a messy desk at Google Brain, 2017. Stacks of papers. A monitor glowing with code. The words "Attention Is All You Need" are typed onto a title page.
|
|
14
|
+
* **Audio:** Soft, rhythmic keyboard clicking. A low, ambient hum of servers.
|
|
15
|
+
* **Narrator (V.O.):** It was the greatest gift in the history of artificial intelligence. And Google... just gave it away.
|
|
16
|
+
|
|
17
|
+
**Scene 2: The Publication**
|
|
18
|
+
* **Visual:** Montage of a PDF being uploaded. The internet exploding with activity. Twitter feeds scrolling fast.
|
|
19
|
+
* **Audio:** A "whoosh" sound of a file sending. The chatter of a thousand voices overlapping.
|
|
20
|
+
* **Narrator (V.O.):** "Attention Is All You Need." A boring title for a weapon. Google thought they were improving translation. In reality? They had just published the blueprints for the atom bomb.
|
|
21
|
+
|
|
22
|
+
## Act 2: The Hesitation vs. The Bet
|
|
23
|
+
|
|
24
|
+
**Scene 3: The Boardroom (Google)**
|
|
25
|
+
* **Visual:** A sterile, glass-walled meeting room. Executives in suits looking worriedly at a whiteboard marked "Risk" and "Ad Revenue."
|
|
26
|
+
* **Audio:** Muffled arguing. "Safety." "Cannibalization." The ticking of a clock.
|
|
27
|
+
* **Narrator (V.O.):** Google had the engine. But they were paralyzed. If they released a chatbot, it would kill their Search ads. They were the king afraid to leave his castle.
|
|
28
|
+
|
|
29
|
+
**Scene 4: The Garage (OpenAI)**
|
|
30
|
+
* **Visual:** Cut to a grittier, darker room. OpenAI researchers. Hoodies. Late nights. Red Bull cans. They are looking at the same "Attention" paper, but their eyes are wide.
|
|
31
|
+
* **Audio:** Fast-paced, electronic synth music starts building. The sound of a race car engine revving up.
|
|
32
|
+
* **Narrator (V.O.):** Across town, the underdog saw something else. They saw a race car. They realized: "If we just make this bigger... it doesn't just translate. It *thinks*."
|
|
33
|
+
|
|
34
|
+
## Act 3: The Getaway
|
|
35
|
+
|
|
36
|
+
**Scene 5: The Scaling Laws**
|
|
37
|
+
* **Visual:** A graph on a screen shooting upward exponentially. The line turns into a road. A sleek car (labeled GPT) tears down it, leaving a clunky bus (labeled Google) in the dust.
|
|
38
|
+
* **Audio:** The engine roar screams. Tires screeching.
|
|
39
|
+
* **Narrator (V.O.):** OpenAI stopped trying to be clever. They just poured gas on the fire. Data. Compute. More. They bet the farm on Google's own invention.
|
|
40
|
+
|
|
41
|
+
**Scene 6: The Empty Chairs**
|
|
42
|
+
* **Visual:** Back to the Google desk from Scene 1. It's empty. Dust motes dancing in the light. We pan across 8 nameplatesāthe authors of the paper. One by one, they fade away.
|
|
43
|
+
* **Audio:** A melancholic wind.
|
|
44
|
+
* **Narrator (V.O.):** Eight researchers wrote that paper. Today? Not a single one is left at Google. They're gone.
|
|
45
|
+
|
|
46
|
+
**Scene 7: The Conclusion**
|
|
47
|
+
* **Visual:** The OpenAI logo glowing neon in the dark. It shifts from "Open" to "Closed."
|
|
48
|
+
* **Audio:** A heavy metallic door slamming shut. Silence.
|
|
49
|
+
* **Narrator (V.O.):** Google built the engine. OpenAI won the race. And the irony? They did it by reading Google's manual.
|
|
50
|
+
|
|
51
|
+
**[FADE TO BLACK]**
|
package/package.json
CHANGED
|
@@ -1,65 +1,62 @@
|
|
|
1
|
-
{
|
|
2
|
-
"name": "videonut",
|
|
3
|
-
"version": "1.
|
|
4
|
-
"description": "AI-powered YouTube documentary production pipeline with 10 specialized agents for research, scripting, and asset management",
|
|
5
|
-
"keywords": [
|
|
6
|
-
"youtube",
|
|
7
|
-
"documentary",
|
|
8
|
-
"ai-agents",
|
|
9
|
-
"video-production",
|
|
10
|
-
"gemini-cli",
|
|
11
|
-
"qwen",
|
|
12
|
-
"claude",
|
|
13
|
-
"content-creation",
|
|
14
|
-
"scriptwriting",
|
|
15
|
-
"research",
|
|
16
|
-
"automation"
|
|
17
|
-
],
|
|
18
|
-
"homepage": "https://github.com/konda-vamshi-krishna/videonut",
|
|
19
|
-
"bugs": {
|
|
20
|
-
"url": "https://github.com/konda-vamshi-krishna/videonut/issues"
|
|
21
|
-
},
|
|
22
|
-
"license": "MIT",
|
|
23
|
-
"author": {
|
|
24
|
-
"name": "Vamshi Krishna",
|
|
25
|
-
"email": "vamshikrishna131437@gmail.com"
|
|
26
|
-
},
|
|
27
|
-
"repository": {
|
|
28
|
-
"type": "git",
|
|
29
|
-
"url": "git+https://github.com/konda-vamshi-krishna/videonut.git"
|
|
30
|
-
},
|
|
31
|
-
"scripts": {
|
|
32
|
-
"setup": "node setup.js"
|
|
33
|
-
},
|
|
34
|
-
"bin": {
|
|
35
|
-
"videonut": "./bin/videonut.js"
|
|
36
|
-
},
|
|
37
|
-
"files": [
|
|
38
|
-
"agents/",
|
|
39
|
-
"tools/downloaders/",
|
|
40
|
-
"tools/validators/",
|
|
41
|
-
"tools/logging/",
|
|
42
|
-
"tools/check_env.py",
|
|
43
|
-
"workflows/",
|
|
44
|
-
"docs/",
|
|
45
|
-
"memory/",
|
|
46
|
-
"scripts/",
|
|
47
|
-
".
|
|
48
|
-
"
|
|
49
|
-
".
|
|
50
|
-
".
|
|
51
|
-
"
|
|
52
|
-
"
|
|
53
|
-
"
|
|
54
|
-
"
|
|
55
|
-
"
|
|
56
|
-
"
|
|
57
|
-
"
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
"
|
|
61
|
-
|
|
62
|
-
"engines": {
|
|
63
|
-
"node": ">=20.0.0"
|
|
64
|
-
}
|
|
1
|
+
{
|
|
2
|
+
"name": "videonut",
|
|
3
|
+
"version": "1.3.0",
|
|
4
|
+
"description": "AI-powered YouTube documentary production pipeline with 10 specialized agents for research, scripting, and asset management",
|
|
5
|
+
"keywords": [
|
|
6
|
+
"youtube",
|
|
7
|
+
"documentary",
|
|
8
|
+
"ai-agents",
|
|
9
|
+
"video-production",
|
|
10
|
+
"gemini-cli",
|
|
11
|
+
"qwen",
|
|
12
|
+
"claude",
|
|
13
|
+
"content-creation",
|
|
14
|
+
"scriptwriting",
|
|
15
|
+
"research",
|
|
16
|
+
"automation"
|
|
17
|
+
],
|
|
18
|
+
"homepage": "https://github.com/konda-vamshi-krishna/videonut",
|
|
19
|
+
"bugs": {
|
|
20
|
+
"url": "https://github.com/konda-vamshi-krishna/videonut/issues"
|
|
21
|
+
},
|
|
22
|
+
"license": "MIT",
|
|
23
|
+
"author": {
|
|
24
|
+
"name": "Vamshi Krishna",
|
|
25
|
+
"email": "vamshikrishna131437@gmail.com"
|
|
26
|
+
},
|
|
27
|
+
"repository": {
|
|
28
|
+
"type": "git",
|
|
29
|
+
"url": "git+https://github.com/konda-vamshi-krishna/videonut.git"
|
|
30
|
+
},
|
|
31
|
+
"scripts": {
|
|
32
|
+
"setup": "node setup.js"
|
|
33
|
+
},
|
|
34
|
+
"bin": {
|
|
35
|
+
"videonut": "./bin/videonut.js"
|
|
36
|
+
},
|
|
37
|
+
"files": [
|
|
38
|
+
"agents/",
|
|
39
|
+
"tools/downloaders/",
|
|
40
|
+
"tools/validators/",
|
|
41
|
+
"tools/logging/",
|
|
42
|
+
"tools/check_env.py",
|
|
43
|
+
"workflows/",
|
|
44
|
+
"docs/",
|
|
45
|
+
"memory/",
|
|
46
|
+
"scripts/",
|
|
47
|
+
".antigravity/",
|
|
48
|
+
"bin/",
|
|
49
|
+
"setup.js",
|
|
50
|
+
"config.yaml",
|
|
51
|
+
"requirements.txt",
|
|
52
|
+
"workflow_orchestrator.py",
|
|
53
|
+
"file_validator.py",
|
|
54
|
+
"README.md",
|
|
55
|
+
"USER_GUIDE.md",
|
|
56
|
+
"CONTRIBUTING.md",
|
|
57
|
+
"LICENSE"
|
|
58
|
+
],
|
|
59
|
+
"engines": {
|
|
60
|
+
"node": ">=20.0.0"
|
|
61
|
+
}
|
|
65
62
|
}
|
package/requirements.txt
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
# VideoNut Dependencies
|
|
2
|
-
yt-dlp
|
|
3
|
-
playwright
|
|
4
|
-
requests
|
|
5
|
-
beautifulsoup4
|
|
6
|
-
pypdf
|
|
7
|
-
youtube-transcript-api
|
|
8
|
-
youtube-search-python
|
|
1
|
+
# VideoNut Dependencies
|
|
2
|
+
yt-dlp
|
|
3
|
+
playwright
|
|
4
|
+
requests
|
|
5
|
+
beautifulsoup4
|
|
6
|
+
pypdf
|
|
7
|
+
youtube-transcript-api
|
|
8
|
+
youtube-search-python
|
|
9
9
|
httpx
|
package/setup.js
CHANGED
|
@@ -297,46 +297,64 @@ async function main() {
|
|
|
297
297
|
info(' No AI CLI currently installed');
|
|
298
298
|
}
|
|
299
299
|
|
|
300
|
-
// Always offer installation choice
|
|
300
|
+
// Always offer installation choice
|
|
301
301
|
console.log('\nš¦ CLI Installation:');
|
|
302
|
-
console.log(' 1. Install Gemini
|
|
303
|
-
console.log('
|
|
304
|
-
console.log('
|
|
302
|
+
console.log(' 1. Install BOTH Gemini + Qwen (ā RECOMMENDED)');
|
|
303
|
+
console.log(' ā Gemini: Best for content writing & creativity');
|
|
304
|
+
console.log(' ā Qwen: Best for instruction following & agent tasks');
|
|
305
|
+
console.log(' 2. Install Gemini CLI only (by Google)');
|
|
306
|
+
console.log(' 3. Install Qwen CLI only (by Alibaba)');
|
|
305
307
|
if (hasGemini || hasQwen || hasClaude) {
|
|
306
308
|
console.log(' 4. Skip - Use existing CLI\n');
|
|
307
309
|
} else {
|
|
308
310
|
console.log(' 4. Skip - I will install manually\n');
|
|
309
311
|
}
|
|
310
312
|
|
|
311
|
-
const answer = await ask('Enter choice [1 for
|
|
313
|
+
const answer = await ask('Enter choice [1 for BOTH]: ');
|
|
312
314
|
|
|
313
315
|
if (answer === '2') {
|
|
316
|
+
// Install Gemini CLI only
|
|
314
317
|
try {
|
|
315
|
-
info('Installing
|
|
316
|
-
execSync('npm install -g @
|
|
317
|
-
success('
|
|
318
|
+
info('Installing Gemini CLI globally...');
|
|
319
|
+
execSync('npm install -g @google/gemini-cli', { stdio: 'inherit' });
|
|
320
|
+
success('Gemini CLI installed! (Best for content writing)');
|
|
318
321
|
} catch (e) {
|
|
319
|
-
warning('Could not install CLI.');
|
|
320
|
-
info('Install manually: npm install -g @
|
|
322
|
+
warning('Could not install Gemini CLI.');
|
|
323
|
+
info('Install manually: npm install -g @google/gemini-cli');
|
|
321
324
|
}
|
|
322
325
|
} else if (answer === '3') {
|
|
326
|
+
// Install Qwen CLI only
|
|
323
327
|
try {
|
|
324
328
|
info('Installing Qwen CLI globally...');
|
|
325
329
|
execSync('npm install -g @qwen-code/qwen-code', { stdio: 'inherit' });
|
|
326
|
-
success('Qwen CLI installed!
|
|
330
|
+
success('Qwen CLI installed! (Best for instruction following)');
|
|
327
331
|
} catch (e) {
|
|
328
|
-
warning('Could not install CLI.');
|
|
332
|
+
warning('Could not install Qwen CLI.');
|
|
329
333
|
info('Install manually: npm install -g @qwen-code/qwen-code');
|
|
330
334
|
}
|
|
331
335
|
} else if (answer !== '4') {
|
|
336
|
+
// Install BOTH Gemini + Qwen (default)
|
|
337
|
+
info('Installing BOTH Gemini CLI and Qwen CLI...\n');
|
|
338
|
+
|
|
332
339
|
try {
|
|
333
340
|
info('Installing Gemini CLI globally...');
|
|
334
341
|
execSync('npm install -g @google/gemini-cli', { stdio: 'inherit' });
|
|
335
|
-
success('Gemini CLI installed!
|
|
342
|
+
success('Gemini CLI installed! (Best for content writing)');
|
|
336
343
|
} catch (e) {
|
|
337
|
-
warning('Could not install CLI.');
|
|
338
|
-
|
|
344
|
+
warning('Could not install Gemini CLI.');
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
try {
|
|
348
|
+
info('Installing Qwen CLI globally...');
|
|
349
|
+
execSync('npm install -g @qwen-code/qwen-code', { stdio: 'inherit' });
|
|
350
|
+
success('Qwen CLI installed! (Best for instruction following)');
|
|
351
|
+
} catch (e) {
|
|
352
|
+
warning('Could not install Qwen CLI.');
|
|
339
353
|
}
|
|
354
|
+
|
|
355
|
+
console.log('\nā
Both CLIs installed!');
|
|
356
|
+
console.log(' Use "gemini" for creative content');
|
|
357
|
+
console.log(' Use "qwen" for agent/instruction tasks');
|
|
340
358
|
}
|
|
341
359
|
|
|
342
360
|
// āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā
|