opencode-skills-antigravity 1.0.31 → 1.0.32
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bundled-skills/.antigravity-install-manifest.json +1 -1
- package/bundled-skills/docs/contributors/quality-bar.md +2 -0
- package/bundled-skills/docs/maintainers/audit.md +1 -0
- package/bundled-skills/docs/maintainers/release-process.md +10 -0
- package/bundled-skills/docx/ooxml/scripts/unpack.py +28 -3
- package/bundled-skills/docx-official/ooxml/scripts/unpack.py +28 -3
- package/bundled-skills/pptx/ooxml/scripts/unpack.py +28 -3
- package/bundled-skills/pptx-official/ooxml/scripts/unpack.py +28 -3
- package/package.json +1 -1
|
@@ -47,6 +47,7 @@ A list of known edge cases or things the skill _cannot_ do.
|
|
|
47
47
|
If a skill includes command examples, remote fetch steps, secrets, or mutation guidance, the PR must document the risk and pass `npm run security:docs` in addition to normal validation.
|
|
48
48
|
|
|
49
49
|
For pull requests that add or modify `SKILL.md`, GitHub also runs the automated `skill-review` workflow. Treat that review as part of the normal PR quality gate and address any actionable findings before merge.
|
|
50
|
+
Automated checks are necessary, but they do **not** replace manual reviewer judgment on logic, safety, and likely failure modes.
|
|
50
51
|
|
|
51
52
|
`npm run security:docs` enforces a repo-wide scan for:
|
|
52
53
|
|
|
@@ -95,5 +96,6 @@ Notes:
|
|
|
95
96
|
- `npm run audit:skills` is the maintainer-facing compliance/usability report for the full library.
|
|
96
97
|
- `npm run security:docs` is required for command-heavy or risky skill content.
|
|
97
98
|
- PRs that touch `SKILL.md` also get an automated `skill-review` GitHub Actions check.
|
|
99
|
+
- Skill changes and risky guidance still require a manual logic review before merge, even when the automated gates pass.
|
|
98
100
|
- `npm run validate:strict` is a useful hardening pass, but the repository still contains legacy skills that do not yet satisfy strict validation.
|
|
99
101
|
- Examples and limitations remain part of the quality bar even when they are not fully auto-enforced by the current validator.
|
|
@@ -37,6 +37,7 @@ This document summarizes the repository coherence audit performed after the `app
|
|
|
37
37
|
- missing examples and missing limitations sections,
|
|
38
38
|
- overly long `SKILL.md` files that should probably be split into `references/`,
|
|
39
39
|
- plus the existing structural/safety checks (frontmatter, risk, `When to Use`, offensive disclaimer, dangling links).
|
|
40
|
+
- The report also includes a non-blocking `suggested_risk` for skills that are still marked `unknown` or appear to be misclassified, so maintainers can resolve risk classification during PR review without changing the contributor gate.
|
|
40
41
|
- Use `npm run audit:skills` for the maintainer view and `npm run audit:skills -- --json-out ... --markdown-out ...` when you want artifacts for triage or cleanup tracking.
|
|
41
42
|
|
|
42
43
|
### 3. Riferimenti incrociati
|
|
@@ -75,6 +75,16 @@ npm publish
|
|
|
75
75
|
Normally this still happens via the existing GitHub release workflow after the GitHub release is published.
|
|
76
76
|
That workflow now reruns `sync:release-state`, refreshes tracked web assets, fails on canonical drift via `git diff --exit-code`, executes tests and docs security checks, builds the web app, and dry-runs the npm package before `npm publish`.
|
|
77
77
|
|
|
78
|
+
## Canonical Sync Bot
|
|
79
|
+
|
|
80
|
+
`main` still uses the repository's auto-sync model for canonical generated artifacts, but with a narrow contract:
|
|
81
|
+
|
|
82
|
+
- PRs stay source-only.
|
|
83
|
+
- After merge, the `main` workflow may commit generated canonical files directly to `main` with `[ci skip]`.
|
|
84
|
+
- The bot commit is only allowed to stage files resolved from `tools/scripts/generated_files.js --include-mixed`.
|
|
85
|
+
- If repo-state sync leaves any unmanaged tracked or untracked drift, the workflow fails instead of pushing a partial fix.
|
|
86
|
+
- The scheduled hygiene workflow follows the same contract and shares the same concurrency group so only one canonical sync writer runs at a time.
|
|
87
|
+
|
|
78
88
|
## Rollback Notes
|
|
79
89
|
|
|
80
90
|
- If the release tag is wrong, delete the tag locally and remotely before republishing.
|
|
@@ -2,22 +2,47 @@
|
|
|
2
2
|
"""Unpack and format XML contents of Office files (.docx, .pptx, .xlsx)"""
|
|
3
3
|
|
|
4
4
|
import random
|
|
5
|
+
import shutil
|
|
6
|
+
import stat
|
|
5
7
|
import sys
|
|
6
8
|
import zipfile
|
|
7
9
|
from pathlib import Path
|
|
8
10
|
|
|
9
11
|
|
|
12
|
+
def _is_zip_symlink(member: zipfile.ZipInfo) -> bool:
|
|
13
|
+
return stat.S_ISLNK(member.external_attr >> 16)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _is_safe_destination(output_root: Path, member_name: str) -> bool:
|
|
17
|
+
destination = output_root / member_name
|
|
18
|
+
return destination.resolve().is_relative_to(output_root.resolve())
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _extract_member(archive: zipfile.ZipFile, member: zipfile.ZipInfo, output_root: Path):
|
|
22
|
+
destination = output_root / member.filename
|
|
23
|
+
if member.is_dir():
|
|
24
|
+
destination.mkdir(parents=True, exist_ok=True)
|
|
25
|
+
return
|
|
26
|
+
|
|
27
|
+
destination.parent.mkdir(parents=True, exist_ok=True)
|
|
28
|
+
with archive.open(member, "r") as source, open(destination, "wb") as target:
|
|
29
|
+
shutil.copyfileobj(source, target)
|
|
30
|
+
|
|
31
|
+
|
|
10
32
|
def extract_archive_safely(input_file: str | Path, output_dir: str | Path):
|
|
11
33
|
output_path = Path(output_dir)
|
|
12
34
|
output_path.mkdir(parents=True, exist_ok=True)
|
|
35
|
+
output_root = output_path.resolve()
|
|
13
36
|
|
|
14
37
|
with zipfile.ZipFile(input_file) as archive:
|
|
15
38
|
for member in archive.infolist():
|
|
16
|
-
|
|
17
|
-
|
|
39
|
+
if _is_zip_symlink(member):
|
|
40
|
+
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
|
41
|
+
if not _is_safe_destination(output_root, member.filename):
|
|
18
42
|
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
|
19
43
|
|
|
20
|
-
archive.
|
|
44
|
+
for member in archive.infolist():
|
|
45
|
+
_extract_member(archive, member, output_path)
|
|
21
46
|
|
|
22
47
|
|
|
23
48
|
def pretty_print_xml(output_path: Path):
|
|
@@ -2,22 +2,47 @@
|
|
|
2
2
|
"""Unpack and format XML contents of Office files (.docx, .pptx, .xlsx)"""
|
|
3
3
|
|
|
4
4
|
import random
|
|
5
|
+
import shutil
|
|
6
|
+
import stat
|
|
5
7
|
import sys
|
|
6
8
|
import zipfile
|
|
7
9
|
from pathlib import Path
|
|
8
10
|
|
|
9
11
|
|
|
12
|
+
def _is_zip_symlink(member: zipfile.ZipInfo) -> bool:
|
|
13
|
+
return stat.S_ISLNK(member.external_attr >> 16)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _is_safe_destination(output_root: Path, member_name: str) -> bool:
|
|
17
|
+
destination = output_root / member_name
|
|
18
|
+
return destination.resolve().is_relative_to(output_root.resolve())
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _extract_member(archive: zipfile.ZipFile, member: zipfile.ZipInfo, output_root: Path):
|
|
22
|
+
destination = output_root / member.filename
|
|
23
|
+
if member.is_dir():
|
|
24
|
+
destination.mkdir(parents=True, exist_ok=True)
|
|
25
|
+
return
|
|
26
|
+
|
|
27
|
+
destination.parent.mkdir(parents=True, exist_ok=True)
|
|
28
|
+
with archive.open(member, "r") as source, open(destination, "wb") as target:
|
|
29
|
+
shutil.copyfileobj(source, target)
|
|
30
|
+
|
|
31
|
+
|
|
10
32
|
def extract_archive_safely(input_file: str | Path, output_dir: str | Path):
|
|
11
33
|
output_path = Path(output_dir)
|
|
12
34
|
output_path.mkdir(parents=True, exist_ok=True)
|
|
35
|
+
output_root = output_path.resolve()
|
|
13
36
|
|
|
14
37
|
with zipfile.ZipFile(input_file) as archive:
|
|
15
38
|
for member in archive.infolist():
|
|
16
|
-
|
|
17
|
-
|
|
39
|
+
if _is_zip_symlink(member):
|
|
40
|
+
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
|
41
|
+
if not _is_safe_destination(output_root, member.filename):
|
|
18
42
|
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
|
19
43
|
|
|
20
|
-
archive.
|
|
44
|
+
for member in archive.infolist():
|
|
45
|
+
_extract_member(archive, member, output_path)
|
|
21
46
|
|
|
22
47
|
|
|
23
48
|
def pretty_print_xml(output_path: Path):
|
|
@@ -2,22 +2,47 @@
|
|
|
2
2
|
"""Unpack and format XML contents of Office files (.docx, .pptx, .xlsx)"""
|
|
3
3
|
|
|
4
4
|
import random
|
|
5
|
+
import shutil
|
|
6
|
+
import stat
|
|
5
7
|
import sys
|
|
6
8
|
import zipfile
|
|
7
9
|
from pathlib import Path
|
|
8
10
|
|
|
9
11
|
|
|
12
|
+
def _is_zip_symlink(member: zipfile.ZipInfo) -> bool:
|
|
13
|
+
return stat.S_ISLNK(member.external_attr >> 16)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _is_safe_destination(output_root: Path, member_name: str) -> bool:
|
|
17
|
+
destination = output_root / member_name
|
|
18
|
+
return destination.resolve().is_relative_to(output_root.resolve())
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _extract_member(archive: zipfile.ZipFile, member: zipfile.ZipInfo, output_root: Path):
|
|
22
|
+
destination = output_root / member.filename
|
|
23
|
+
if member.is_dir():
|
|
24
|
+
destination.mkdir(parents=True, exist_ok=True)
|
|
25
|
+
return
|
|
26
|
+
|
|
27
|
+
destination.parent.mkdir(parents=True, exist_ok=True)
|
|
28
|
+
with archive.open(member, "r") as source, open(destination, "wb") as target:
|
|
29
|
+
shutil.copyfileobj(source, target)
|
|
30
|
+
|
|
31
|
+
|
|
10
32
|
def extract_archive_safely(input_file: str | Path, output_dir: str | Path):
|
|
11
33
|
output_path = Path(output_dir)
|
|
12
34
|
output_path.mkdir(parents=True, exist_ok=True)
|
|
35
|
+
output_root = output_path.resolve()
|
|
13
36
|
|
|
14
37
|
with zipfile.ZipFile(input_file) as archive:
|
|
15
38
|
for member in archive.infolist():
|
|
16
|
-
|
|
17
|
-
|
|
39
|
+
if _is_zip_symlink(member):
|
|
40
|
+
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
|
41
|
+
if not _is_safe_destination(output_root, member.filename):
|
|
18
42
|
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
|
19
43
|
|
|
20
|
-
archive.
|
|
44
|
+
for member in archive.infolist():
|
|
45
|
+
_extract_member(archive, member, output_path)
|
|
21
46
|
|
|
22
47
|
|
|
23
48
|
def pretty_print_xml(output_path: Path):
|
|
@@ -2,22 +2,47 @@
|
|
|
2
2
|
"""Unpack and format XML contents of Office files (.docx, .pptx, .xlsx)"""
|
|
3
3
|
|
|
4
4
|
import random
|
|
5
|
+
import shutil
|
|
6
|
+
import stat
|
|
5
7
|
import sys
|
|
6
8
|
import zipfile
|
|
7
9
|
from pathlib import Path
|
|
8
10
|
|
|
9
11
|
|
|
12
|
+
def _is_zip_symlink(member: zipfile.ZipInfo) -> bool:
|
|
13
|
+
return stat.S_ISLNK(member.external_attr >> 16)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _is_safe_destination(output_root: Path, member_name: str) -> bool:
|
|
17
|
+
destination = output_root / member_name
|
|
18
|
+
return destination.resolve().is_relative_to(output_root.resolve())
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _extract_member(archive: zipfile.ZipFile, member: zipfile.ZipInfo, output_root: Path):
|
|
22
|
+
destination = output_root / member.filename
|
|
23
|
+
if member.is_dir():
|
|
24
|
+
destination.mkdir(parents=True, exist_ok=True)
|
|
25
|
+
return
|
|
26
|
+
|
|
27
|
+
destination.parent.mkdir(parents=True, exist_ok=True)
|
|
28
|
+
with archive.open(member, "r") as source, open(destination, "wb") as target:
|
|
29
|
+
shutil.copyfileobj(source, target)
|
|
30
|
+
|
|
31
|
+
|
|
10
32
|
def extract_archive_safely(input_file: str | Path, output_dir: str | Path):
|
|
11
33
|
output_path = Path(output_dir)
|
|
12
34
|
output_path.mkdir(parents=True, exist_ok=True)
|
|
35
|
+
output_root = output_path.resolve()
|
|
13
36
|
|
|
14
37
|
with zipfile.ZipFile(input_file) as archive:
|
|
15
38
|
for member in archive.infolist():
|
|
16
|
-
|
|
17
|
-
|
|
39
|
+
if _is_zip_symlink(member):
|
|
40
|
+
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
|
41
|
+
if not _is_safe_destination(output_root, member.filename):
|
|
18
42
|
raise ValueError(f"Unsafe archive entry: {member.filename}")
|
|
19
43
|
|
|
20
|
-
archive.
|
|
44
|
+
for member in archive.infolist():
|
|
45
|
+
_extract_member(archive, member, output_path)
|
|
21
46
|
|
|
22
47
|
|
|
23
48
|
def pretty_print_xml(output_path: Path):
|
package/package.json
CHANGED