bmad-plus 0.4.0 → 0.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/CHANGELOG.md +29 -0
  2. package/README.md +13 -56
  3. package/osint-agent-package/skills/bmad-osint-investigate/osint/SKILL.md +452 -452
  4. package/osint-agent-package/skills/bmad-osint-investigate/osint/assets/dossier-template.md +116 -116
  5. package/osint-agent-package/skills/bmad-osint-investigate/osint/references/content-extraction.md +100 -100
  6. package/osint-agent-package/skills/bmad-osint-investigate/osint/references/platforms.md +130 -130
  7. package/osint-agent-package/skills/bmad-osint-investigate/osint/references/psychoprofile.md +69 -69
  8. package/osint-agent-package/skills/bmad-osint-investigate/osint/references/tools.md +281 -281
  9. package/osint-agent-package/skills/bmad-osint-investigate/osint/scripts/mcp-client.py +136 -136
  10. package/package.json +1 -1
  11. package/readme-international/README.de.md +1 -1
  12. package/readme-international/README.es.md +1 -1
  13. package/readme-international/README.fr.md +1 -1
  14. package/tools/cli/commands/install.js +88 -59
  15. package/tools/cli/i18n.js +501 -0
  16. package/oveanet-pack/animated-website/DEPLOYMENT.md +0 -104
  17. package/oveanet-pack/animated-website/README.md +0 -63
  18. package/oveanet-pack/animated-website/agent.yaml +0 -63
  19. package/oveanet-pack/seo-audit-360/DEPLOYMENT.md +0 -115
  20. package/oveanet-pack/seo-audit-360/README.md +0 -66
  21. package/oveanet-pack/seo-audit-360/agent.yaml +0 -70
  22. package/oveanet-pack/seo-audit-360/extensions/google-analytics/EXTENSION.md +0 -79
  23. package/oveanet-pack/seo-audit-360/extensions/google-analytics/ga4_client.py +0 -200
  24. package/oveanet-pack/seo-audit-360/extensions/google-analytics/requirements.txt +0 -4
  25. package/oveanet-pack/seo-audit-360/extensions/google-search-console/EXTENSION.md +0 -109
  26. package/oveanet-pack/seo-audit-360/extensions/google-search-console/gsc_client.py +0 -186
  27. package/oveanet-pack/seo-audit-360/extensions/google-search-console/requirements.txt +0 -4
  28. package/oveanet-pack/seo-audit-360/hooks/seo-check.sh +0 -95
  29. package/oveanet-pack/seo-audit-360/requirements.txt +0 -14
  30. package/oveanet-pack/seo-audit-360/scripts/__pycache__/seo_crawl.cpython-314.pyc +0 -0
  31. package/oveanet-pack/seo-audit-360/scripts/__pycache__/seo_parse.cpython-314.pyc +0 -0
  32. package/oveanet-pack/seo-audit-360/scripts/install.ps1 +0 -53
  33. package/oveanet-pack/seo-audit-360/scripts/install.sh +0 -48
  34. package/oveanet-pack/seo-audit-360/scripts/seo_apis.py +0 -464
  35. package/oveanet-pack/seo-audit-360/scripts/seo_crawl.py +0 -282
  36. package/oveanet-pack/seo-audit-360/scripts/seo_fetch.py +0 -231
  37. package/oveanet-pack/seo-audit-360/scripts/seo_parse.py +0 -255
  38. package/oveanet-pack/seo-audit-360/scripts/seo_report.py +0 -403
  39. package/oveanet-pack/seo-audit-360/scripts/seo_screenshot.py +0 -202
  40. package/oveanet-pack/seo-audit-360/tests/__pycache__/test_crawl.cpython-314-pytest-9.0.2.pyc +0 -0
  41. package/oveanet-pack/seo-audit-360/tests/__pycache__/test_parse.cpython-314-pytest-9.0.2.pyc +0 -0
  42. package/oveanet-pack/seo-audit-360/tests/fixtures/sample_page.html +0 -62
  43. package/oveanet-pack/seo-audit-360/tests/test_apis.py +0 -75
  44. package/oveanet-pack/seo-audit-360/tests/test_crawl.py +0 -121
  45. package/oveanet-pack/seo-audit-360/tests/test_fetch.py +0 -70
  46. package/oveanet-pack/seo-audit-360/tests/test_parse.py +0 -184
  47. package/oveanet-pack/universal-backup/DEPLOYMENT.md +0 -80
  48. package/oveanet-pack/universal-backup/README.md +0 -58
  49. package/oveanet-pack/universal-backup/agent.yaml +0 -45
  50. /package/{oveanet-pack/animated-website/agent → src/bmad-plus/agents/pack-animated}/animated-website-agent.md +0 -0
  51. /package/{oveanet-pack/animated-website → src/bmad-plus/agents/pack-animated}/templates/animated-website-workflow.md +0 -0
  52. /package/{oveanet-pack/universal-backup/agent → src/bmad-plus/agents/pack-backup}/backup-agent.md +0 -0
  53. /package/{oveanet-pack/universal-backup → src/bmad-plus/agents/pack-backup}/templates/backup-workflow.md +0 -0
  54. /package/{oveanet-pack/seo-audit-360 → src/bmad-plus/agents/pack-seo}/SKILL.md +0 -0
  55. /package/{oveanet-pack/seo-audit-360 → src/bmad-plus/agents/pack-seo}/checklist.md +0 -0
  56. /package/{oveanet-pack/seo-audit-360 → src/bmad-plus/agents/pack-seo}/pagespeed-playbook.md +0 -0
  57. /package/{oveanet-pack/seo-audit-360 → src/bmad-plus/agents/pack-seo}/ref/audit-schema.json +0 -0
  58. /package/{oveanet-pack/seo-audit-360 → src/bmad-plus/agents/pack-seo}/ref/cwv-thresholds.md +0 -0
  59. /package/{oveanet-pack/seo-audit-360 → src/bmad-plus/agents/pack-seo}/ref/eeat-criteria.md +0 -0
  60. /package/{oveanet-pack/seo-audit-360 → src/bmad-plus/agents/pack-seo}/ref/geo-signals.md +0 -0
  61. /package/{oveanet-pack/seo-audit-360 → src/bmad-plus/agents/pack-seo}/ref/hreflang-rules.md +0 -0
  62. /package/{oveanet-pack/seo-audit-360 → src/bmad-plus/agents/pack-seo}/ref/quality-gates.md +0 -0
  63. /package/{oveanet-pack/seo-audit-360 → src/bmad-plus/agents/pack-seo}/ref/schema-catalog.md +0 -0
  64. /package/{oveanet-pack/seo-audit-360 → src/bmad-plus/agents/pack-seo}/ref/schema-templates.json +0 -0
  65. /package/{oveanet-pack/seo-audit-360/agent → src/bmad-plus/agents/pack-seo}/seo-chief.md +0 -0
  66. /package/{oveanet-pack/seo-audit-360/agent → src/bmad-plus/agents/pack-seo}/seo-judge.md +0 -0
  67. /package/{oveanet-pack/seo-audit-360/agent → src/bmad-plus/agents/pack-seo}/seo-scout.md +0 -0
  68. /package/{oveanet-pack/seo-audit-360 → src/bmad-plus/agents/pack-seo}/templates/seo-audit-workflow.md +0 -0
@@ -1,184 +0,0 @@
1
- """
2
- Tests for seo_parse.py — HTML parsing and SEO element extraction.
3
-
4
- Author: Laurent Rochetta
5
- """
6
-
7
- import json
8
- import os
9
- import sys
10
-
11
- sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "scripts"))
12
-
13
- from seo_parse import parse_html
14
-
15
- FIXTURES_DIR = os.path.join(os.path.dirname(__file__), "fixtures")
16
-
17
-
18
- def load_fixture(name: str) -> str:
19
- with open(os.path.join(FIXTURES_DIR, name), "r", encoding="utf-8") as f:
20
- return f.read()
21
-
22
-
23
- class TestTitleParsing:
24
- def test_extracts_title(self):
25
- result = parse_html(load_fixture("sample_page.html"))
26
- assert result["title"] == "SEO Test Page — BMAD+ Fixture"
27
-
28
- def test_title_length(self):
29
- result = parse_html(load_fixture("sample_page.html"))
30
- assert result["title_length"] == len("SEO Test Page — BMAD+ Fixture")
31
-
32
- def test_missing_title(self):
33
- result = parse_html("<html><body><p>No title</p></body></html>")
34
- assert result["title"] is None
35
- assert result["title_length"] == 0
36
-
37
-
38
- class TestMetaTags:
39
- def test_extracts_description(self):
40
- result = parse_html(load_fixture("sample_page.html"))
41
- assert "test page" in result["meta_description"].lower()
42
-
43
- def test_extracts_robots(self):
44
- result = parse_html(load_fixture("sample_page.html"))
45
- assert result["meta_robots"] == "index, follow"
46
-
47
- def test_extracts_viewport(self):
48
- result = parse_html(load_fixture("sample_page.html"))
49
- assert "width=device-width" in result["meta_viewport"]
50
-
51
- def test_missing_description(self):
52
- result = parse_html("<html><head><title>T</title></head><body></body></html>")
53
- assert result["meta_description"] is None
54
-
55
-
56
- class TestCanonical:
57
- def test_extracts_canonical(self):
58
- result = parse_html(load_fixture("sample_page.html"))
59
- assert result["canonical"] == "https://example.com/test"
60
-
61
- def test_missing_canonical(self):
62
- result = parse_html("<html><body></body></html>")
63
- assert result["canonical"] is None
64
-
65
-
66
- class TestHeadings:
67
- def test_h1_count(self):
68
- result = parse_html(load_fixture("sample_page.html"))
69
- assert len(result["headings"]["h1"]) == 1
70
-
71
- def test_h2_count(self):
72
- result = parse_html(load_fixture("sample_page.html"))
73
- assert len(result["headings"]["h2"]) == 2
74
-
75
- def test_h3_count(self):
76
- result = parse_html(load_fixture("sample_page.html"))
77
- assert len(result["headings"]["h3"]) == 1
78
-
79
- def test_multiple_h1_detection(self):
80
- html = "<html><body><h1>First</h1><h1>Second</h1></body></html>"
81
- result = parse_html(html)
82
- assert len(result["headings"]["h1"]) == 2
83
-
84
-
85
- class TestImages:
86
- def test_image_count(self):
87
- result = parse_html(load_fixture("sample_page.html"))
88
- assert len(result["images"]) == 3
89
-
90
- def test_image_with_alt(self):
91
- result = parse_html(load_fixture("sample_page.html"))
92
- hero = [i for i in result["images"] if "hero" in i["src"]]
93
- assert len(hero) == 1
94
- assert hero[0]["has_alt"] is True
95
- assert hero[0]["alt"] == "Hero image for testing"
96
-
97
- def test_image_without_alt(self):
98
- result = parse_html(load_fixture("sample_page.html"))
99
- no_alt = [i for i in result["images"] if "no-alt" in i["src"]]
100
- assert len(no_alt) == 1
101
- assert no_alt[0]["has_alt"] is False
102
-
103
- def test_image_with_empty_alt(self):
104
- result = parse_html(load_fixture("sample_page.html"))
105
- empty = [i for i in result["images"] if "empty-alt" in i["src"]]
106
- assert len(empty) == 1
107
- assert empty[0]["has_alt"] is True
108
- assert empty[0]["alt_empty"] is True
109
-
110
-
111
- class TestLinks:
112
- def test_internal_links(self):
113
- result = parse_html(load_fixture("sample_page.html"), base_url="https://example.com")
114
- assert len(result["links"]["internal"]) >= 2
115
-
116
- def test_external_links(self):
117
- result = parse_html(load_fixture("sample_page.html"), base_url="https://example.com")
118
- assert len(result["links"]["external"]) >= 1
119
-
120
- def test_nofollow_detection(self):
121
- result = parse_html(load_fixture("sample_page.html"), base_url="https://example.com")
122
- nofollow = [l for l in result["links"]["external"] if l["is_nofollow"]]
123
- assert len(nofollow) >= 1
124
-
125
-
126
- class TestSchema:
127
- def test_schema_block_count(self):
128
- result = parse_html(load_fixture("sample_page.html"))
129
- assert len(result["schema_blocks"]) == 2
130
-
131
- def test_schema_types(self):
132
- result = parse_html(load_fixture("sample_page.html"))
133
- types = [s["type"] for s in result["schema_blocks"]]
134
- assert "Organization" in types
135
- assert "BreadcrumbList" in types
136
-
137
- def test_schema_parse_error(self):
138
- html = '<html><body><script type="application/ld+json">{invalid json}</script></body></html>'
139
- result = parse_html(html)
140
- assert len(result["schema_blocks"]) == 1
141
- assert result["schema_blocks"][0]["type"] == "PARSE_ERROR"
142
-
143
-
144
- class TestOpenGraph:
145
- def test_og_title(self):
146
- result = parse_html(load_fixture("sample_page.html"))
147
- assert result["open_graph"].get("og:title") == "SEO Test Page"
148
-
149
- def test_og_type(self):
150
- result = parse_html(load_fixture("sample_page.html"))
151
- assert result["open_graph"].get("og:type") == "website"
152
-
153
-
154
- class TestHreflang:
155
- def test_hreflang_count(self):
156
- result = parse_html(load_fixture("sample_page.html"))
157
- assert len(result["hreflang"]) == 3 # en, fr, x-default
158
-
159
- def test_hreflang_languages(self):
160
- result = parse_html(load_fixture("sample_page.html"))
161
- langs = [h["lang"] for h in result["hreflang"]]
162
- assert "en" in langs
163
- assert "fr" in langs
164
- assert "x-default" in langs
165
-
166
-
167
- class TestContentMetrics:
168
- def test_word_count_positive(self):
169
- result = parse_html(load_fixture("sample_page.html"))
170
- assert result["word_count"] > 30
171
-
172
- def test_text_ratio_range(self):
173
- result = parse_html(load_fixture("sample_page.html"))
174
- assert 0 < result["text_ratio"] < 1
175
-
176
- def test_has_lang_attr(self):
177
- result = parse_html(load_fixture("sample_page.html"))
178
- assert result["has_lang_attr"] is True
179
- assert result["lang"] == "en"
180
-
181
- def test_html_size(self):
182
- html = load_fixture("sample_page.html")
183
- result = parse_html(html)
184
- assert result["html_size_bytes"] == len(html.encode("utf-8"))
@@ -1,80 +0,0 @@
1
- # 🚀 Guide de déploiement — Universal Backup Agent
2
-
3
- ## Méthode 1 : Intégration BMAD (recommandée)
4
-
5
- ### Étape 1 — Copier l'agent
6
- Copier `agent/backup-agent.md` dans le dossier agents de votre projet BMAD :
7
- ```
8
- votre-projet/
9
- ├── _bmad/
10
- │ └── agents/
11
- │ └── backup-agent.md ← copier ici
12
- ```
13
-
14
- ### Étape 2 — Déclarer dans le manifest
15
- Ajouter cette ligne dans `_bmad_config/agent-manifest.csv` :
16
- ```csv
17
- backup-agent,Universal Backup Manager,backup,Gère les sauvegardes ZIP horodatées du projet,_bmad/agents/backup-agent.md
18
- ```
19
-
20
- ### Étape 3 — Ajouter le workflow Gemini
21
- Copier `templates/backup-workflow.md` dans :
22
- ```
23
- votre-projet/.agent/workflows/backup-project.md
24
- ```
25
- **Important :** Remplacer `%PROJECT_ROOT%` par le chemin réel du projet.
26
-
27
- ---
28
-
29
- ## Méthode 2 : Standalone (sans BMAD)
30
-
31
- ### Option A — Workflow Gemini uniquement
32
- 1. Créer `.agent/workflows/` dans votre projet
33
- 2. Copier `templates/backup-workflow.md` → `.agent/workflows/backup-project.md`
34
- 3. Remplacer `%PROJECT_ROOT%` par le chemin réel
35
- 4. Utiliser avec `/backup-project`
36
-
37
- ### Option B — Commande directe
38
- Coller cette commande dans votre terminal :
39
-
40
- **Windows :**
41
- ```powershell
42
- $timestamp = Get-Date -Format "yyyyMMdd_HHmmss"
43
- $projectRoot = "C:\chemin\vers\votre\projet"
44
- $projectName = Split-Path $projectRoot -Leaf
45
- $backupDir = "$projectRoot\backups"
46
- if (!(Test-Path $backupDir)) { New-Item -ItemType Directory -Path $backupDir -Force }
47
- Get-ChildItem $projectRoot -Exclude "backups","node_modules",".git","vendor","__pycache__" |
48
- Compress-Archive -DestinationPath "$backupDir\${projectName}_backup_$timestamp.zip" -Force
49
- ```
50
-
51
- **Linux/Mac :**
52
- ```bash
53
- TIMESTAMP=$(date +%Y%m%d_%H%M%S)
54
- PROJECT_ROOT="/chemin/vers/votre/projet"
55
- PROJECT_NAME=$(basename "$PROJECT_ROOT")
56
- mkdir -p "$PROJECT_ROOT/backups"
57
- cd "$PROJECT_ROOT" && zip -r "backups/${PROJECT_NAME}_backup_$TIMESTAMP.zip" . \
58
- -x "backups/*" "node_modules/*" ".git/*" "vendor/*" "__pycache__/*"
59
- ```
60
-
61
- ---
62
-
63
- ## 📁 Exclusions par défaut
64
-
65
- | Dossier/Pattern | Raison |
66
- |----------------|--------|
67
- | `backups/` | Éviter la récursion |
68
- | `node_modules/` | Dépendances, se réinstallent avec `npm install` |
69
- | `.git/` | Historique Git, lourd et inutile dans un backup |
70
- | `vendor/` | Dépendances PHP/Composer |
71
- | `__pycache__/` | Cache Python compilé |
72
- | `*.backup_*` | Anciens fichiers de backup individuels |
73
-
74
- ## 🔧 Personnalisation
75
-
76
- Pour exclure d'autres dossiers, ajoutez-les à la liste `Exclude` :
77
- ```powershell
78
- # Exemple: exclure aussi "storage" et "tmp"
79
- Get-ChildItem $projectRoot -Exclude "backups","node_modules",".git","vendor","__pycache__","storage","tmp"
80
- ```
@@ -1,58 +0,0 @@
1
- # 🗂️ Universal Project Backup Agent
2
-
3
- Agent BMAD universel pour créer des backups ZIP horodatés de n'importe quel projet web.
4
-
5
- ## 📁 Structure
6
-
7
- ```
8
- Universal Backup Agent/
9
- ├── agent/
10
- │ └── backup-agent.md # Agent BMAD
11
- ├── templates/
12
- │ └── backup-workflow.md # Workflow Gemini prêt à copier
13
- ├── README.md # Ce fichier
14
- └── DEPLOYMENT.md # Guide de déploiement
15
- ```
16
-
17
- ## 🚀 Utilisation rapide
18
-
19
- ### Commande slash
20
- ```
21
- /backup-project
22
- ```
23
-
24
- ### Commande manuelle (PowerShell)
25
- ```powershell
26
- $timestamp = Get-Date -Format "yyyyMMdd_HHmmss"
27
- $projectRoot = "CHEMIN_DU_PROJET"
28
- $backupDir = "$projectRoot\backups"
29
- if (!(Test-Path $backupDir)) { New-Item -ItemType Directory -Path $backupDir -Force }
30
- Get-ChildItem $projectRoot -Exclude "backups","node_modules",".git","vendor","__pycache__","*.backup_*" |
31
- Compress-Archive -DestinationPath "$backupDir\backup_$timestamp.zip" -Force
32
- ```
33
-
34
- ### Commande manuelle (Bash/Linux)
35
- ```bash
36
- TIMESTAMP=$(date +%Y%m%d_%H%M%S)
37
- PROJECT_ROOT="CHEMIN_DU_PROJET"
38
- mkdir -p "$PROJECT_ROOT/backups"
39
- cd "$PROJECT_ROOT" && zip -r "backups/backup_$TIMESTAMP.zip" . \
40
- -x "backups/*" "node_modules/*" ".git/*" "vendor/*" "__pycache__/*" "*.backup_*"
41
- ```
42
-
43
- ## ⚙️ Fonctionnalités
44
-
45
- - **Horodatage automatique** : chaque backup est nommé avec date + heure
46
- - **Exclusions intelligentes** : ignore `node_modules`, `.git`, `vendor`, `backups/`, `__pycache__`
47
- - **Multi-plateforme** : PowerShell (Windows) et Bash (Linux/Mac)
48
- - **Compatible BMAD** : intégrable comme agent dans le framework BMAD
49
- - **Workflow Gemini** : fichier `.md` prêt à copier dans `.agent/workflows/`
50
-
51
- ## 📋 Projets compatibles
52
-
53
- Fonctionne avec tout type de projet :
54
- - PHP / Laravel / Symfony
55
- - Node.js / Next.js / Vite
56
- - Python / Django / Flask
57
- - HTML/CSS/JS statique
58
- - WordPress / CMS
@@ -1,45 +0,0 @@
1
- name: universal-backup
2
- version: 1.0.0
3
- title: "Universal Backup"
4
- description: "Backup ZIP horodaté multi-plateforme avec exclusions intelligentes"
5
- author: Laurent ROCHETTA AI
6
- icon: "🗂️"
7
- tags: [backup, zip, devops, utilities]
8
- triggers:
9
- - "backup"
10
- - "backup project"
11
- - "create backup"
12
- - "save project"
13
- - "zip project"
14
- requires:
15
- tools: []
16
- scripts: []
17
- commands:
18
- - id: backup
19
- name: "Create Backup"
20
- description: "Crée un backup ZIP horodaté"
21
- - id: list
22
- name: "List Backups"
23
- description: "Liste les backups existants"
24
- - id: restore
25
- name: "Restore Backup"
26
- description: "Restaure un backup"
27
- - id: clean
28
- name: "Clean Backups"
29
- description: "Supprime les vieux backups"
30
- platforms:
31
- bmad:
32
- target: "_bmad/core/agents/"
33
- file: "backup-agent.md"
34
- claude:
35
- target: ".claude/skills/"
36
- skillName: "backup"
37
- gemini:
38
- target: ".agent/workflows/"
39
- file: "backup-project.md"
40
- opencode:
41
- target: ".opencode/agents/"
42
- file: "backup-agent.md"
43
- codex:
44
- target: ".codex/agents/"
45
- file: "backup-agent.md"