withub-cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +41 -0
- package/dist/commands/account.js +109 -0
- package/dist/commands/checkout.js +213 -0
- package/dist/commands/clone.js +263 -0
- package/dist/commands/commit.js +150 -0
- package/dist/commands/diff.js +159 -0
- package/dist/commands/fetch.js +169 -0
- package/dist/commands/init.js +125 -0
- package/dist/commands/invite.js +72 -0
- package/dist/commands/list.js +214 -0
- package/dist/commands/plumbing.js +98 -0
- package/dist/commands/pull.js +160 -0
- package/dist/commands/push.js +371 -0
- package/dist/commands/registerCommands.js +183 -0
- package/dist/commands/removeUser.js +63 -0
- package/dist/commands/stub.js +11 -0
- package/dist/commands/transfer.js +46 -0
- package/dist/commands/walrusBlob.js +50 -0
- package/dist/commands/walrusQuilt.js +282 -0
- package/dist/commands/workspace.js +260 -0
- package/dist/index.js +46 -0
- package/dist/lib/config.js +49 -0
- package/dist/lib/constants.js +6 -0
- package/dist/lib/fs.js +154 -0
- package/dist/lib/keys.js +224 -0
- package/dist/lib/manifest.js +37 -0
- package/dist/lib/quilt.js +38 -0
- package/dist/lib/repo.js +70 -0
- package/dist/lib/schema.js +53 -0
- package/dist/lib/seal.js +157 -0
- package/dist/lib/serialize.js +30 -0
- package/dist/lib/state.js +57 -0
- package/dist/lib/suiRepo.js +220 -0
- package/dist/lib/ui.js +51 -0
- package/dist/lib/validate.js +13 -0
- package/dist/lib/walrus.js +237 -0
- package/package.json +57 -0
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.fetchAction = fetchAction;
|
|
7
|
+
const client_1 = require("@mysten/sui/client");
|
|
8
|
+
const ui_1 = require("../lib/ui");
|
|
9
|
+
const walrus_1 = require("../lib/walrus");
|
|
10
|
+
const suiRepo_1 = require("../lib/suiRepo");
|
|
11
|
+
const schema_1 = require("../lib/schema");
|
|
12
|
+
const manifest_1 = require("../lib/manifest");
|
|
13
|
+
const serialize_1 = require("../lib/serialize");
|
|
14
|
+
const state_1 = require("../lib/state");
|
|
15
|
+
const repo_1 = require("../lib/repo");
|
|
16
|
+
const promises_1 = __importDefault(require("fs/promises"));
|
|
17
|
+
const path_1 = __importDefault(require("path"));
|
|
18
|
+
async function fetchAction() {
|
|
19
|
+
const witPath = await (0, repo_1.requireWitDir)();
|
|
20
|
+
const repoCfg = await (0, repo_1.readRepoConfig)(witPath);
|
|
21
|
+
if (!repoCfg.repo_id) {
|
|
22
|
+
throw new Error('Missing repo_id in .wit/config.json. Cannot fetch.');
|
|
23
|
+
}
|
|
24
|
+
// eslint-disable-next-line no-console
|
|
25
|
+
console.log(ui_1.colors.header('Fetching remote metadata...'));
|
|
26
|
+
const resolved = await (0, walrus_1.resolveWalrusConfig)(process.cwd());
|
|
27
|
+
const suiClient = new client_1.SuiClient({ url: resolved.suiRpcUrl });
|
|
28
|
+
const walrusSvc = await walrus_1.WalrusService.fromRepo();
|
|
29
|
+
const onchain = await (0, suiRepo_1.fetchRepositoryStateWithRetry)(suiClient, repoCfg.repo_id);
|
|
30
|
+
if (!onchain.headCommit || !onchain.headManifest || !onchain.headQuilt) {
|
|
31
|
+
// eslint-disable-next-line no-console
|
|
32
|
+
console.log(ui_1.colors.yellow('Remote repository has no head; nothing to fetch.'));
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
if (onchain.sealPolicyId && repoCfg.seal_policy_id !== onchain.sealPolicyId) {
|
|
36
|
+
repoCfg.seal_policy_id = onchain.sealPolicyId;
|
|
37
|
+
await promises_1.default.writeFile(path_1.default.join(witPath, 'config.json'), JSON.stringify(repoCfg, null, 2) + '\n', 'utf8');
|
|
38
|
+
}
|
|
39
|
+
// Download manifest and commit for validation/cache
|
|
40
|
+
const manifest = await loadManifestCached(walrusSvc, witPath, onchain.headManifest);
|
|
41
|
+
const computedRoot = (0, manifest_1.computeRootHash)(Object.fromEntries(Object.entries(manifest.files).map(([rel, meta]) => [
|
|
42
|
+
rel,
|
|
43
|
+
{ hash: meta.hash, size: meta.size, mode: meta.mode, mtime: meta.mtime },
|
|
44
|
+
])));
|
|
45
|
+
if (computedRoot !== manifest.root_hash) {
|
|
46
|
+
throw new Error('Manifest root_hash mismatch; aborting fetch.');
|
|
47
|
+
}
|
|
48
|
+
const commit = await loadCommitCached(walrusSvc, witPath, onchain.headCommit);
|
|
49
|
+
if (commit.tree.root_hash !== manifest.root_hash) {
|
|
50
|
+
throw new Error('Commit root_hash does not match manifest; aborting fetch.');
|
|
51
|
+
}
|
|
52
|
+
// Download commit chain (and manifests) for history
|
|
53
|
+
const map = await readCommitIdMapSafe(witPath);
|
|
54
|
+
await downloadCommitChain(walrusSvc, onchain.headCommit, witPath, map);
|
|
55
|
+
await (0, state_1.writeCommitIdMap)(witPath, map);
|
|
56
|
+
// Update remote refs/state
|
|
57
|
+
await (0, repo_1.writeRemoteRef)(witPath, onchain.headCommit);
|
|
58
|
+
await (0, repo_1.writeRemoteState)(witPath, {
|
|
59
|
+
repo_id: repoCfg.repo_id,
|
|
60
|
+
head_commit: onchain.headCommit,
|
|
61
|
+
head_manifest: onchain.headManifest,
|
|
62
|
+
head_quilt: onchain.headQuilt,
|
|
63
|
+
version: onchain.version,
|
|
64
|
+
});
|
|
65
|
+
// eslint-disable-next-line no-console
|
|
66
|
+
console.log(ui_1.colors.green('Fetch complete (worktree unchanged).'));
|
|
67
|
+
// eslint-disable-next-line no-console
|
|
68
|
+
console.log(`Head: ${ui_1.colors.hash(onchain.headCommit)}`);
|
|
69
|
+
// eslint-disable-next-line no-console
|
|
70
|
+
console.log(`Manifest: ${ui_1.colors.hash(onchain.headManifest)}`);
|
|
71
|
+
// eslint-disable-next-line no-console
|
|
72
|
+
console.log(`Quilt: ${ui_1.colors.hash(onchain.headQuilt)}`);
|
|
73
|
+
}
|
|
74
|
+
async function cacheJson(filePath, content) {
|
|
75
|
+
await promises_1.default.mkdir(path_1.default.dirname(filePath), { recursive: true });
|
|
76
|
+
await promises_1.default.writeFile(filePath, content, 'utf8');
|
|
77
|
+
}
|
|
78
|
+
function parseRemoteCommit(buf) {
|
|
79
|
+
const parsed = JSON.parse(buf.toString('utf8'));
|
|
80
|
+
if (!parsed?.tree?.root_hash || !parsed?.tree?.manifest_id) {
|
|
81
|
+
throw new Error('Invalid remote commit object');
|
|
82
|
+
}
|
|
83
|
+
return parsed;
|
|
84
|
+
}
|
|
85
|
+
async function readCommitIdMapSafe(witPath) {
|
|
86
|
+
try {
|
|
87
|
+
return await (0, state_1.readCommitIdMap)(witPath);
|
|
88
|
+
}
|
|
89
|
+
catch {
|
|
90
|
+
return {};
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
async function loadManifestCached(walrusSvc, witPath, manifestId) {
|
|
94
|
+
const file = path_1.default.join(witPath, 'objects', 'manifests', `${(0, state_1.idToFileName)(manifestId)}.json`);
|
|
95
|
+
try {
|
|
96
|
+
const raw = await promises_1.default.readFile(file, 'utf8');
|
|
97
|
+
return schema_1.ManifestSchema.parse(JSON.parse(raw));
|
|
98
|
+
}
|
|
99
|
+
catch (err) {
|
|
100
|
+
if (err?.code !== 'ENOENT') {
|
|
101
|
+
// fall through to re-download if parse failed
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
const buf = Buffer.from(await walrusSvc.readBlob(manifestId));
|
|
105
|
+
const manifest = schema_1.ManifestSchema.parse(JSON.parse(buf.toString('utf8')));
|
|
106
|
+
await cacheJson(file, (0, serialize_1.canonicalStringify)(manifest));
|
|
107
|
+
return manifest;
|
|
108
|
+
}
|
|
109
|
+
async function loadCommitCached(walrusSvc, witPath, commitId) {
|
|
110
|
+
const file = path_1.default.join(witPath, 'objects', 'commits', `${(0, state_1.idToFileName)(commitId)}.json`);
|
|
111
|
+
try {
|
|
112
|
+
const raw = await promises_1.default.readFile(file, 'utf8');
|
|
113
|
+
return parseRemoteCommit(Buffer.from(raw, 'utf8'));
|
|
114
|
+
}
|
|
115
|
+
catch (err) {
|
|
116
|
+
if (err?.code !== 'ENOENT') {
|
|
117
|
+
// fall through to re-download if parse failed
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
const buf = Buffer.from(await walrusSvc.readBlob(commitId));
|
|
121
|
+
const commit = parseRemoteCommit(buf);
|
|
122
|
+
await cacheJson(file, buf.toString('utf8'));
|
|
123
|
+
return commit;
|
|
124
|
+
}
|
|
125
|
+
async function downloadCommitChain(walrusSvc, startId, witPath, map) {
|
|
126
|
+
const seen = new Set();
|
|
127
|
+
let current = startId;
|
|
128
|
+
while (current && !seen.has(current)) {
|
|
129
|
+
seen.add(current);
|
|
130
|
+
const commit = await loadCommitCached(walrusSvc, witPath, current);
|
|
131
|
+
if (!map[current]) {
|
|
132
|
+
map[current] = current;
|
|
133
|
+
}
|
|
134
|
+
if (commit.tree?.manifest_id) {
|
|
135
|
+
await ensureManifestCached(walrusSvc, witPath, commit.tree.manifest_id, commit.tree.root_hash);
|
|
136
|
+
}
|
|
137
|
+
current = commit.parent;
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
async function ensureManifestCached(walrusSvc, witPath, manifestId, expectedRoot) {
|
|
141
|
+
const file = path_1.default.join(witPath, 'objects', 'manifests', `${(0, state_1.idToFileName)(manifestId)}.json`);
|
|
142
|
+
try {
|
|
143
|
+
await promises_1.default.access(file);
|
|
144
|
+
const raw = await promises_1.default.readFile(file, 'utf8');
|
|
145
|
+
const manifest = schema_1.ManifestSchema.parse(JSON.parse(raw));
|
|
146
|
+
const computed = (0, manifest_1.computeRootHash)(Object.fromEntries(Object.entries(manifest.files).map(([rel, meta]) => [
|
|
147
|
+
rel,
|
|
148
|
+
{ hash: meta.hash, size: meta.size, mode: meta.mode, mtime: meta.mtime },
|
|
149
|
+
])));
|
|
150
|
+
if (computed === expectedRoot)
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
catch (err) {
|
|
154
|
+
if (err?.code !== 'ENOENT') {
|
|
155
|
+
// fall through to refetch
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
// Fetch from Walrus
|
|
159
|
+
const buf = Buffer.from(await walrusSvc.readBlob(manifestId));
|
|
160
|
+
const manifest = schema_1.ManifestSchema.parse(JSON.parse(buf.toString('utf8')));
|
|
161
|
+
const computed = (0, manifest_1.computeRootHash)(Object.fromEntries(Object.entries(manifest.files).map(([rel, meta]) => [
|
|
162
|
+
rel,
|
|
163
|
+
{ hash: meta.hash, size: meta.size, mode: meta.mode, mtime: meta.mtime },
|
|
164
|
+
])));
|
|
165
|
+
if (computed !== expectedRoot) {
|
|
166
|
+
throw new Error('Fetched manifest root_hash mismatch; aborting.');
|
|
167
|
+
}
|
|
168
|
+
await cacheJson(file, (0, serialize_1.canonicalStringify)(manifest));
|
|
169
|
+
}
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.initAction = initAction;
|
|
7
|
+
const promises_1 = __importDefault(require("fs/promises"));
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const keys_1 = require("../lib/keys");
|
|
10
|
+
const DEFAULT_RELAYS = ['https://upload-relay.testnet.walrus.space'];
|
|
11
|
+
const DEFAULT_NETWORK = 'testnet';
|
|
12
|
+
const IGNORE_ENTRIES = ['.wit/', '~/.wit/keys', '.env.local', '*.pem', '.wit/seal'];
|
|
13
|
+
async function initAction(name, options) {
|
|
14
|
+
const cwd = process.cwd();
|
|
15
|
+
const repoName = name || path_1.default.basename(cwd);
|
|
16
|
+
const witDir = path_1.default.join(cwd, '.wit');
|
|
17
|
+
await promises_1.default.mkdir(witDir, { recursive: true });
|
|
18
|
+
await ensureLayout(witDir);
|
|
19
|
+
const globalCfg = await readGlobalConfig();
|
|
20
|
+
const activeAddress = await (0, keys_1.readActiveAddress)();
|
|
21
|
+
const repoCfg = buildRepoConfig(repoName, globalCfg, activeAddress);
|
|
22
|
+
const wantsPrivate = options?.private || Boolean(options?.sealPolicy || options?.sealSecret);
|
|
23
|
+
if (wantsPrivate) {
|
|
24
|
+
// We mark it as pending. The actual policy ID will be generated on-chain during 'wit push'.
|
|
25
|
+
repoCfg.seal_policy_id = 'pending';
|
|
26
|
+
// eslint-disable-next-line no-console
|
|
27
|
+
console.log('Initialized as PRIVATE repository. Encryption will be enabled on first push.');
|
|
28
|
+
}
|
|
29
|
+
await writeConfigIfMissing(path_1.default.join(witDir, 'config.json'), repoCfg);
|
|
30
|
+
await ensureFile(path_1.default.join(witDir, 'HEAD'), 'refs/heads/main\n');
|
|
31
|
+
await ensureFile(path_1.default.join(witDir, 'refs', 'heads', 'main'), '');
|
|
32
|
+
await ensureFile(path_1.default.join(witDir, 'index'), '{}\n');
|
|
33
|
+
await ensureIgnoreFile(path_1.default.join(cwd, '.gitignore'), IGNORE_ENTRIES);
|
|
34
|
+
await ensureIgnoreFile(path_1.default.join(cwd, '.witignore'), IGNORE_ENTRIES);
|
|
35
|
+
// eslint-disable-next-line no-console
|
|
36
|
+
console.log(`Initialized wit repo scaffold in ${witDir}`);
|
|
37
|
+
}
|
|
38
|
+
async function ensureLayout(witDir) {
|
|
39
|
+
const subdirs = [
|
|
40
|
+
'refs/heads',
|
|
41
|
+
'refs/remotes',
|
|
42
|
+
'objects/blobs',
|
|
43
|
+
'objects/commits',
|
|
44
|
+
'objects/manifests',
|
|
45
|
+
'objects/quilts',
|
|
46
|
+
'objects/maps',
|
|
47
|
+
'state',
|
|
48
|
+
];
|
|
49
|
+
await Promise.all(subdirs.map((dir) => promises_1.default.mkdir(path_1.default.join(witDir, dir), { recursive: true })));
|
|
50
|
+
}
|
|
51
|
+
async function readGlobalConfig() {
|
|
52
|
+
const home = process.env.HOME;
|
|
53
|
+
if (!home)
|
|
54
|
+
return {};
|
|
55
|
+
const file = path_1.default.join(home, '.witconfig');
|
|
56
|
+
try {
|
|
57
|
+
const raw = await promises_1.default.readFile(file, 'utf8');
|
|
58
|
+
return JSON.parse(raw);
|
|
59
|
+
}
|
|
60
|
+
catch (err) {
|
|
61
|
+
if (err?.code === 'ENOENT')
|
|
62
|
+
return {};
|
|
63
|
+
// eslint-disable-next-line no-console
|
|
64
|
+
console.warn(`Warning: could not read ${file}: ${err.message}`);
|
|
65
|
+
return {};
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
function buildRepoConfig(repoName, globalCfg, activeAddress) {
|
|
69
|
+
return {
|
|
70
|
+
repo_name: repoName,
|
|
71
|
+
repo_id: null,
|
|
72
|
+
network: globalCfg.network || DEFAULT_NETWORK,
|
|
73
|
+
relays: globalCfg.relays?.length ? globalCfg.relays : DEFAULT_RELAYS,
|
|
74
|
+
author: globalCfg.author || 'unknown',
|
|
75
|
+
key_alias: globalCfg.key_alias || 'default',
|
|
76
|
+
seal_policy_id: null,
|
|
77
|
+
created_at: new Date().toISOString(),
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
async function writeConfigIfMissing(file, cfg) {
|
|
81
|
+
try {
|
|
82
|
+
await promises_1.default.access(file);
|
|
83
|
+
}
|
|
84
|
+
catch (err) {
|
|
85
|
+
if (err?.code === 'ENOENT') {
|
|
86
|
+
await promises_1.default.writeFile(file, JSON.stringify(cfg, null, 2) + '\n', 'utf8');
|
|
87
|
+
}
|
|
88
|
+
else {
|
|
89
|
+
throw err;
|
|
90
|
+
}
|
|
91
|
+
return;
|
|
92
|
+
}
|
|
93
|
+
// eslint-disable-next-line no-console
|
|
94
|
+
console.log(`Config already exists, left untouched: ${file}`);
|
|
95
|
+
}
|
|
96
|
+
async function ensureFile(file, content = '') {
|
|
97
|
+
try {
|
|
98
|
+
await promises_1.default.access(file);
|
|
99
|
+
return;
|
|
100
|
+
}
|
|
101
|
+
catch (err) {
|
|
102
|
+
if (err?.code !== 'ENOENT')
|
|
103
|
+
throw err;
|
|
104
|
+
}
|
|
105
|
+
await promises_1.default.writeFile(file, content, 'utf8');
|
|
106
|
+
}
|
|
107
|
+
async function ensureIgnoreFile(file, entries) {
|
|
108
|
+
let existing = [];
|
|
109
|
+
try {
|
|
110
|
+
const raw = await promises_1.default.readFile(file, 'utf8');
|
|
111
|
+
existing = raw.split(/\r?\n/);
|
|
112
|
+
}
|
|
113
|
+
catch (err) {
|
|
114
|
+
if (err?.code !== 'ENOENT')
|
|
115
|
+
throw err;
|
|
116
|
+
}
|
|
117
|
+
const lines = existing.filter((line) => line.trim() !== '');
|
|
118
|
+
for (const entry of entries) {
|
|
119
|
+
if (!lines.includes(entry)) {
|
|
120
|
+
lines.push(entry);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
const content = lines.join('\n') + '\n';
|
|
124
|
+
await promises_1.default.writeFile(file, content, 'utf8');
|
|
125
|
+
}
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.inviteAction = inviteAction;
|
|
4
|
+
const client_1 = require("@mysten/sui/client");
|
|
5
|
+
const ui_1 = require("../lib/ui");
|
|
6
|
+
const repo_1 = require("../lib/repo");
|
|
7
|
+
const walrus_1 = require("../lib/walrus");
|
|
8
|
+
const keys_1 = require("../lib/keys");
|
|
9
|
+
const suiRepo_1 = require("../lib/suiRepo");
|
|
10
|
+
async function inviteAction(address) {
|
|
11
|
+
if (!address) {
|
|
12
|
+
throw new Error('Usage: wit invite <address>');
|
|
13
|
+
}
|
|
14
|
+
// eslint-disable-next-line no-console
|
|
15
|
+
console.log(ui_1.colors.header('Adding collaborator...'));
|
|
16
|
+
let witPath;
|
|
17
|
+
try {
|
|
18
|
+
witPath = await (0, repo_1.requireWitDir)();
|
|
19
|
+
}
|
|
20
|
+
catch (err) {
|
|
21
|
+
// eslint-disable-next-line no-console
|
|
22
|
+
console.log(ui_1.colors.red(err?.message || 'Not a wit repository. Run `wit init` first.'));
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
const repoCfg = await (0, repo_1.readRepoConfig)(witPath);
|
|
26
|
+
if (!repoCfg.repo_id) {
|
|
27
|
+
throw new Error('Missing repo_id. Run `wit push` once to create the remote repository.');
|
|
28
|
+
}
|
|
29
|
+
const signerInfo = await (0, keys_1.loadSigner)();
|
|
30
|
+
const resolved = await (0, walrus_1.resolveWalrusConfig)(process.cwd());
|
|
31
|
+
const suiClient = new client_1.SuiClient({ url: resolved.suiRpcUrl });
|
|
32
|
+
// Check if repo is private by fetching on-chain state
|
|
33
|
+
// We could rely on local config, but on-chain is truth.
|
|
34
|
+
let whitelistId;
|
|
35
|
+
try {
|
|
36
|
+
const state = await (0, suiRepo_1.fetchRepositoryState)(suiClient, repoCfg.repo_id);
|
|
37
|
+
if (state.sealPolicyId) {
|
|
38
|
+
whitelistId = state.sealPolicyId;
|
|
39
|
+
// Update local config if missing
|
|
40
|
+
if (repoCfg.seal_policy_id !== whitelistId) {
|
|
41
|
+
repoCfg.seal_policy_id = whitelistId;
|
|
42
|
+
await (0, repo_1.writeRepoConfig)(witPath, repoCfg);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
catch (err) {
|
|
47
|
+
// ignore fetch error, assume public or will fail later
|
|
48
|
+
}
|
|
49
|
+
try {
|
|
50
|
+
await (0, suiRepo_1.addCollaborator)(suiClient, signerInfo.signer, {
|
|
51
|
+
repoId: repoCfg.repo_id,
|
|
52
|
+
collaborator: address,
|
|
53
|
+
whitelistId
|
|
54
|
+
});
|
|
55
|
+
// eslint-disable-next-line no-console
|
|
56
|
+
console.log(ui_1.colors.green(`Added ${ui_1.colors.hash(address)} as collaborator.`));
|
|
57
|
+
if (whitelistId) {
|
|
58
|
+
// eslint-disable-next-line no-console
|
|
59
|
+
console.log(ui_1.colors.cyan(`User added to Whitelist (${whitelistId}). They can now decrypt the repository.`));
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
catch (err) {
|
|
63
|
+
const msg = err?.message || String(err);
|
|
64
|
+
if (msg.includes('ENotAuthorized') || msg.includes('NotAuthorized')) {
|
|
65
|
+
// eslint-disable-next-line no-console
|
|
66
|
+
console.log(ui_1.colors.red('Add failed: current account is not authorized (owner or collaborator required).'));
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
// eslint-disable-next-line no-console
|
|
70
|
+
console.log(ui_1.colors.red(`Add collaborator failed: ${msg}`));
|
|
71
|
+
}
|
|
72
|
+
}
|
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.listAction = listAction;
|
|
4
|
+
const client_1 = require("@mysten/sui/client");
|
|
5
|
+
const keys_1 = require("../lib/keys");
|
|
6
|
+
const walrus_1 = require("../lib/walrus");
|
|
7
|
+
const constants_1 = require("../lib/constants");
|
|
8
|
+
const suiRepo_1 = require("../lib/suiRepo");
|
|
9
|
+
async function listAction(options) {
|
|
10
|
+
const signer = await (0, keys_1.loadSigner)();
|
|
11
|
+
const address = signer.address;
|
|
12
|
+
console.log(`Listing repositories for ${address}...`);
|
|
13
|
+
const config = await (0, walrus_1.resolveWalrusConfig)();
|
|
14
|
+
const client = new client_1.SuiClient({ url: config.suiRpcUrl });
|
|
15
|
+
const repos = new Map();
|
|
16
|
+
// Helper to fetch events
|
|
17
|
+
const fetchEvents = async (query) => {
|
|
18
|
+
let cursor = null;
|
|
19
|
+
let hasNextPage = true;
|
|
20
|
+
const results = [];
|
|
21
|
+
while (hasNextPage) {
|
|
22
|
+
const resp = await client.queryEvents({
|
|
23
|
+
query,
|
|
24
|
+
cursor,
|
|
25
|
+
limit: 50,
|
|
26
|
+
});
|
|
27
|
+
results.push(...resp.data);
|
|
28
|
+
cursor = resp.nextCursor;
|
|
29
|
+
hasNextPage = resp.hasNextPage;
|
|
30
|
+
}
|
|
31
|
+
return results;
|
|
32
|
+
};
|
|
33
|
+
// 1. Fetch Owned Repos (Filter by Sender = Me)
|
|
34
|
+
if (!options.collaborated) {
|
|
35
|
+
try {
|
|
36
|
+
const createdEvents = await fetchEvents({
|
|
37
|
+
Sender: address,
|
|
38
|
+
});
|
|
39
|
+
const ownershipEvents = await fetchEvents({
|
|
40
|
+
MoveEventType: `${constants_1.WIT_PACKAGE_ID}::${constants_1.WIT_MODULE_NAME}::OwnershipTransferredEvent`,
|
|
41
|
+
});
|
|
42
|
+
for (const event of createdEvents) {
|
|
43
|
+
if (event.type === `${constants_1.WIT_PACKAGE_ID}::${constants_1.WIT_MODULE_NAME}::RepositoryCreatedEvent`) {
|
|
44
|
+
const parsed = event.parsedJson;
|
|
45
|
+
repos.set(parsed.repo_id, {
|
|
46
|
+
id: parsed.repo_id,
|
|
47
|
+
name: (0, suiRepo_1.decodeVecAsString)(parsed.name) || 'Unknown',
|
|
48
|
+
role: 'Owner',
|
|
49
|
+
});
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
for (const event of ownershipEvents) {
|
|
53
|
+
const parsed = event.parsedJson;
|
|
54
|
+
if (parsed.new_owner !== address)
|
|
55
|
+
continue;
|
|
56
|
+
const repoId = parsed.repo_id;
|
|
57
|
+
const existing = repos.get(repoId);
|
|
58
|
+
if (existing) {
|
|
59
|
+
existing.role = 'Owner';
|
|
60
|
+
}
|
|
61
|
+
else {
|
|
62
|
+
repos.set(repoId, {
|
|
63
|
+
id: repoId,
|
|
64
|
+
name: 'Loading...',
|
|
65
|
+
role: 'Owner',
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
catch (e) {
|
|
71
|
+
console.warn('Failed to fetch owned repositories:', e);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
// 2. Collaborators: process add/remove events to derive membership
|
|
75
|
+
if (!options.owned) {
|
|
76
|
+
try {
|
|
77
|
+
const addedEvents = await fetchEvents({
|
|
78
|
+
MoveEventType: `${constants_1.WIT_PACKAGE_ID}::${constants_1.WIT_MODULE_NAME}::CollaboratorAddedEvent`,
|
|
79
|
+
});
|
|
80
|
+
const removedEvents = await fetchEvents({
|
|
81
|
+
MoveEventType: `${constants_1.WIT_PACKAGE_ID}::${constants_1.WIT_MODULE_NAME}::CollaboratorRemovedEvent`,
|
|
82
|
+
});
|
|
83
|
+
const events = [];
|
|
84
|
+
for (const e of addedEvents) {
|
|
85
|
+
const parsed = e.parsedJson;
|
|
86
|
+
if (parsed.user_address !== address)
|
|
87
|
+
continue;
|
|
88
|
+
events.push({
|
|
89
|
+
kind: 'add',
|
|
90
|
+
repoId: parsed.repo_id,
|
|
91
|
+
ts: Number(e.timestampMs ?? 0),
|
|
92
|
+
tx: e.id?.txDigest ?? '',
|
|
93
|
+
seq: BigInt(e.id?.eventSeq ?? 0),
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
for (const e of removedEvents) {
|
|
97
|
+
const parsed = e.parsedJson;
|
|
98
|
+
if (parsed.user_address !== address)
|
|
99
|
+
continue;
|
|
100
|
+
events.push({
|
|
101
|
+
kind: 'remove',
|
|
102
|
+
repoId: parsed.repo_id,
|
|
103
|
+
ts: Number(e.timestampMs ?? 0),
|
|
104
|
+
tx: e.id?.txDigest ?? '',
|
|
105
|
+
seq: BigInt(e.id?.eventSeq ?? 0),
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
events.sort((a, b) => {
|
|
109
|
+
if (a.ts !== b.ts)
|
|
110
|
+
return a.ts - b.ts;
|
|
111
|
+
if (a.tx !== b.tx)
|
|
112
|
+
return a.tx.localeCompare(b.tx);
|
|
113
|
+
return a.seq < b.seq ? -1 : a.seq > b.seq ? 1 : 0;
|
|
114
|
+
});
|
|
115
|
+
const membership = new Map();
|
|
116
|
+
for (const ev of events) {
|
|
117
|
+
if (ev.kind === 'add')
|
|
118
|
+
membership.set(ev.repoId, true);
|
|
119
|
+
else
|
|
120
|
+
membership.set(ev.repoId, false);
|
|
121
|
+
}
|
|
122
|
+
for (const [repoId, present] of membership.entries()) {
|
|
123
|
+
if (!present)
|
|
124
|
+
continue;
|
|
125
|
+
if (!repos.has(repoId)) {
|
|
126
|
+
repos.set(repoId, {
|
|
127
|
+
id: repoId,
|
|
128
|
+
name: 'Loading...', // Placeholder, will fetch below
|
|
129
|
+
role: 'Collaborator',
|
|
130
|
+
});
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
catch (e) {
|
|
135
|
+
console.warn('Failed to fetch collaborated repositories:', e);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
// 3. Fetch missing names for collaborated repos
|
|
139
|
+
const missingNames = Array.from(repos.values()).filter((r) => r.name === 'Loading...');
|
|
140
|
+
if (missingNames.length > 0) {
|
|
141
|
+
const ids = missingNames.map((r) => r.id);
|
|
142
|
+
const chunkSize = 50;
|
|
143
|
+
for (let i = 0; i < ids.length; i += chunkSize) {
|
|
144
|
+
const chunk = ids.slice(i, i + chunkSize);
|
|
145
|
+
try {
|
|
146
|
+
const objects = await client.multiGetObjects({
|
|
147
|
+
ids: chunk,
|
|
148
|
+
options: { showContent: true },
|
|
149
|
+
});
|
|
150
|
+
for (const obj of objects) {
|
|
151
|
+
if (obj.data?.content?.dataType === 'moveObject') {
|
|
152
|
+
const fields = obj.data.content.fields;
|
|
153
|
+
const name = (0, suiRepo_1.decodeVecAsString)(fields.name) || 'Unknown';
|
|
154
|
+
const r = repos.get(obj.data.objectId);
|
|
155
|
+
if (r)
|
|
156
|
+
r.name = name;
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
catch (e) {
|
|
161
|
+
console.warn('Failed to fetch repo details:', e);
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
// 4. Re-evaluate roles against current on-chain state (handles ownership transfer/removal)
|
|
166
|
+
const allRepoIds = Array.from(repos.keys());
|
|
167
|
+
if (allRepoIds.length) {
|
|
168
|
+
const chunkSize = 50;
|
|
169
|
+
for (let i = 0; i < allRepoIds.length; i += chunkSize) {
|
|
170
|
+
const chunk = allRepoIds.slice(i, i + chunkSize);
|
|
171
|
+
try {
|
|
172
|
+
const objects = await client.multiGetObjects({
|
|
173
|
+
ids: chunk,
|
|
174
|
+
options: { showContent: true },
|
|
175
|
+
});
|
|
176
|
+
for (const obj of objects) {
|
|
177
|
+
const rec = repos.get(obj.data?.objectId || '');
|
|
178
|
+
if (!rec)
|
|
179
|
+
continue;
|
|
180
|
+
if (obj.data?.content?.dataType !== 'moveObject') {
|
|
181
|
+
repos.delete(rec.id);
|
|
182
|
+
continue;
|
|
183
|
+
}
|
|
184
|
+
const fields = obj.data.content.fields;
|
|
185
|
+
const owner = fields.owner;
|
|
186
|
+
const collaborators = fields.collaborators || [];
|
|
187
|
+
const isOwner = owner === address;
|
|
188
|
+
const isCollab = collaborators.includes(address);
|
|
189
|
+
if (isOwner) {
|
|
190
|
+
rec.role = 'Owner';
|
|
191
|
+
}
|
|
192
|
+
else if (isCollab) {
|
|
193
|
+
rec.role = 'Collaborator';
|
|
194
|
+
}
|
|
195
|
+
else {
|
|
196
|
+
repos.delete(rec.id);
|
|
197
|
+
}
|
|
198
|
+
if ((!rec.name || rec.name === 'Loading...') && fields.name) {
|
|
199
|
+
rec.name = (0, suiRepo_1.decodeVecAsString)(fields.name) || rec.name;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
catch (e) {
|
|
204
|
+
console.warn('Failed to reconcile roles from objects:', e);
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
if (repos.size === 0) {
|
|
209
|
+
console.log('No repositories found.');
|
|
210
|
+
return;
|
|
211
|
+
}
|
|
212
|
+
const list = Array.from(repos.values()).sort((a, b) => a.name.localeCompare(b.name));
|
|
213
|
+
console.table(list);
|
|
214
|
+
}
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.pushBlobAction = pushBlobAction;
|
|
7
|
+
exports.pullBlobAction = pullBlobAction;
|
|
8
|
+
const promises_1 = __importDefault(require("fs/promises"));
|
|
9
|
+
const path_1 = __importDefault(require("path"));
|
|
10
|
+
const crypto_1 = require("crypto");
|
|
11
|
+
const ui_1 = require("../lib/ui");
|
|
12
|
+
const walrus_1 = require("../lib/walrus");
|
|
13
|
+
const keys_1 = require("../lib/keys");
|
|
14
|
+
async function pushBlobAction(filePath, opts) {
|
|
15
|
+
const fullPath = path_1.default.resolve(process.cwd(), filePath);
|
|
16
|
+
try {
|
|
17
|
+
await promises_1.default.access(fullPath);
|
|
18
|
+
}
|
|
19
|
+
catch {
|
|
20
|
+
throw new Error(`File not found: ${filePath}`);
|
|
21
|
+
}
|
|
22
|
+
const content = await promises_1.default.readFile(fullPath);
|
|
23
|
+
const hash = (0, crypto_1.createHash)('sha256').update(content).digest('hex');
|
|
24
|
+
const size = content.length;
|
|
25
|
+
// eslint-disable-next-line no-console
|
|
26
|
+
console.log(ui_1.colors.gray(`File: ${filePath} (${size} bytes)`));
|
|
27
|
+
// eslint-disable-next-line no-console
|
|
28
|
+
console.log(ui_1.colors.gray(`SHA-256: ${hash}`));
|
|
29
|
+
const { signer, address } = await (0, keys_1.loadSigner)();
|
|
30
|
+
// eslint-disable-next-line no-console
|
|
31
|
+
console.log(ui_1.colors.gray(`Signer: ${ui_1.colors.hash(address)}`));
|
|
32
|
+
const resources = await (0, keys_1.checkResources)(address);
|
|
33
|
+
if (resources.error) {
|
|
34
|
+
console.warn(ui_1.colors.yellow(`Warning: Failed to check resources: ${resources.error}`));
|
|
35
|
+
}
|
|
36
|
+
else if (resources.hasMinSui === false) {
|
|
37
|
+
throw new Error(`Insufficient SUI balance. Need at least 1 SUI.`);
|
|
38
|
+
}
|
|
39
|
+
const epochs = opts.epochs ? parseInt(opts.epochs, 10) : 1;
|
|
40
|
+
if (isNaN(epochs) || epochs < 1) {
|
|
41
|
+
throw new Error('Epochs must be a positive integer.');
|
|
42
|
+
}
|
|
43
|
+
// eslint-disable-next-line no-console
|
|
44
|
+
console.log(ui_1.colors.blue(`Uploading to Walrus (epochs=${epochs})...`));
|
|
45
|
+
const service = await walrus_1.WalrusService.fromRepo();
|
|
46
|
+
const result = await service.writeBlob({
|
|
47
|
+
blob: content,
|
|
48
|
+
signer,
|
|
49
|
+
epochs,
|
|
50
|
+
});
|
|
51
|
+
const blobId = result.blobId;
|
|
52
|
+
// eslint-disable-next-line no-console
|
|
53
|
+
console.log(ui_1.colors.green(`Blob ID: ${blobId}`));
|
|
54
|
+
if (result.blobObject?.id?.id) {
|
|
55
|
+
// eslint-disable-next-line no-console
|
|
56
|
+
console.log(ui_1.colors.gray(`Sui Object: ${result.blobObject.id.id}`));
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
async function pullBlobAction(blobId, opts) {
|
|
60
|
+
if (!blobId) {
|
|
61
|
+
throw new Error('Blob ID is required.');
|
|
62
|
+
}
|
|
63
|
+
// eslint-disable-next-line no-console
|
|
64
|
+
console.log(ui_1.colors.blue(`Downloading blob ${blobId}...`));
|
|
65
|
+
const service = await walrus_1.WalrusService.fromRepo();
|
|
66
|
+
let content;
|
|
67
|
+
try {
|
|
68
|
+
content = await service.readBlob(blobId);
|
|
69
|
+
}
|
|
70
|
+
catch (err) {
|
|
71
|
+
throw new Error(`Failed to download blob: ${err.message}`);
|
|
72
|
+
}
|
|
73
|
+
const hash = (0, crypto_1.createHash)('sha256').update(content).digest('hex');
|
|
74
|
+
// eslint-disable-next-line no-console
|
|
75
|
+
console.log(ui_1.colors.gray(`Downloaded ${content.length} bytes`));
|
|
76
|
+
// eslint-disable-next-line no-console
|
|
77
|
+
console.log(ui_1.colors.gray(`SHA-256: ${hash}`));
|
|
78
|
+
if (opts.out) {
|
|
79
|
+
const outPath = path_1.default.resolve(process.cwd(), opts.out);
|
|
80
|
+
await promises_1.default.writeFile(outPath, content);
|
|
81
|
+
// eslint-disable-next-line no-console
|
|
82
|
+
console.log(ui_1.colors.green(`Saved to ${opts.out}`));
|
|
83
|
+
}
|
|
84
|
+
else {
|
|
85
|
+
// Try to detect if binary
|
|
86
|
+
const isBinary = content.some((b) => b === 0);
|
|
87
|
+
if (isBinary) {
|
|
88
|
+
// eslint-disable-next-line no-console
|
|
89
|
+
console.log(ui_1.colors.yellow('Binary content suppressed. Use --out to save to file.'));
|
|
90
|
+
}
|
|
91
|
+
else {
|
|
92
|
+
// eslint-disable-next-line no-console
|
|
93
|
+
console.log(ui_1.colors.header('Content:'));
|
|
94
|
+
// eslint-disable-next-line no-console
|
|
95
|
+
console.log(new TextDecoder().decode(content));
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|