@fyresmith/hive-server 1.0.1-3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +143 -0
- package/bin/hive.js +4 -0
- package/cli/checks.js +28 -0
- package/cli/config.js +33 -0
- package/cli/constants.js +45 -0
- package/cli/env-file.js +141 -0
- package/cli/errors.js +11 -0
- package/cli/exec.js +12 -0
- package/cli/main.js +730 -0
- package/cli/output.js +21 -0
- package/cli/service.js +360 -0
- package/cli/tunnel.js +238 -0
- package/index.js +129 -0
- package/lib/auth.js +50 -0
- package/lib/socketHandler.js +226 -0
- package/lib/vaultManager.js +258 -0
- package/lib/yjsServer.js +277 -0
- package/package.json +52 -0
- package/routes/auth.js +99 -0
package/README.md
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
# Hive Server
|
|
2
|
+
|
|
3
|
+
Hive server now ships with a first-class `hive` operations CLI for install, setup, tunnel management, env management, and service lifecycle.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm i -g hive-server
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
The global install exposes:
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
hive --help
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
To build/verify the current local checkout and install it globally:
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
npm run install-hive
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
## Release System (GitHub Actions + npm)
|
|
24
|
+
|
|
25
|
+
This repo now includes a release pipeline for `hive-server`:
|
|
26
|
+
|
|
27
|
+
- CI: `.github/workflows/hive-server-ci.yml`
|
|
28
|
+
- Release tag workflow: `.github/workflows/hive-server-release-tag.yml`
|
|
29
|
+
- npm publish workflow: `.github/workflows/hive-server-publish.yml`
|
|
30
|
+
|
|
31
|
+
### One-time repo setup
|
|
32
|
+
|
|
33
|
+
Configure npm Trusted Publisher for this repo/workflow:
|
|
34
|
+
|
|
35
|
+
- Package: `hive-server`
|
|
36
|
+
- Provider: GitHub Actions
|
|
37
|
+
- Repository: this repository
|
|
38
|
+
- Workflow file: `.github/workflows/hive-server-publish.yml`
|
|
39
|
+
|
|
40
|
+
No `NPM_TOKEN` secret is required when Trusted Publishing is configured.
|
|
41
|
+
|
|
42
|
+
### How releases work
|
|
43
|
+
|
|
44
|
+
1. Run workflow `hive-server-release-tag` from the default branch.
|
|
45
|
+
2. Choose release type (`patch|minor|major|prerelease|custom`).
|
|
46
|
+
3. Workflow bumps `package.json`, commits, and pushes tag `hive-server-vX.Y.Z`.
|
|
47
|
+
4. Tag push triggers `hive-server-publish`, which:
|
|
48
|
+
- verifies the package
|
|
49
|
+
- checks tag version matches `package.json`
|
|
50
|
+
- publishes to npm with provenance
|
|
51
|
+
- creates a GitHub Release with generated notes
|
|
52
|
+
|
|
53
|
+
## Fast Path
|
|
54
|
+
|
|
55
|
+
Run the guided setup:
|
|
56
|
+
|
|
57
|
+
```bash
|
|
58
|
+
hive setup
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
Non-interactive defaults:
|
|
62
|
+
|
|
63
|
+
```bash
|
|
64
|
+
hive setup --yes
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
`hive setup` can:
|
|
68
|
+
|
|
69
|
+
- initialize and validate `.env`
|
|
70
|
+
- configure Cloudflare Tunnel
|
|
71
|
+
- sync `DISCORD_REDIRECT_URI`
|
|
72
|
+
- install Hive as a launchd/systemd service
|
|
73
|
+
- run post-setup checks
|
|
74
|
+
|
|
75
|
+
## Config and Env
|
|
76
|
+
|
|
77
|
+
Operator config:
|
|
78
|
+
|
|
79
|
+
- `~/.hive/config.json`
|
|
80
|
+
|
|
81
|
+
Default env location:
|
|
82
|
+
|
|
83
|
+
- `~/.hive/server/.env`
|
|
84
|
+
|
|
85
|
+
Env commands:
|
|
86
|
+
|
|
87
|
+
```bash
|
|
88
|
+
hive env init
|
|
89
|
+
hive env edit
|
|
90
|
+
hive env check
|
|
91
|
+
hive env print
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
## Tunnel Operations
|
|
95
|
+
|
|
96
|
+
```bash
|
|
97
|
+
hive tunnel setup
|
|
98
|
+
hive tunnel status
|
|
99
|
+
hive tunnel run
|
|
100
|
+
hive tunnel service-install
|
|
101
|
+
hive tunnel service-status
|
|
102
|
+
```
|
|
103
|
+
|
|
104
|
+
## Server Service Operations
|
|
105
|
+
|
|
106
|
+
```bash
|
|
107
|
+
hive service install
|
|
108
|
+
hive service start
|
|
109
|
+
hive service stop
|
|
110
|
+
hive service restart
|
|
111
|
+
hive service status
|
|
112
|
+
hive service logs
|
|
113
|
+
hive service uninstall
|
|
114
|
+
```
|
|
115
|
+
|
|
116
|
+
## Runtime and Diagnostics
|
|
117
|
+
|
|
118
|
+
Run directly in foreground:
|
|
119
|
+
|
|
120
|
+
```bash
|
|
121
|
+
hive run
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
Diagnostics:
|
|
125
|
+
|
|
126
|
+
```bash
|
|
127
|
+
hive doctor
|
|
128
|
+
hive status
|
|
129
|
+
```
|
|
130
|
+
|
|
131
|
+
## Migration Notes
|
|
132
|
+
|
|
133
|
+
On first `hive setup`, if legacy `server/.env` exists and no `~/.hive/config.json` exists, setup will offer to import legacy env values.
|
|
134
|
+
|
|
135
|
+
## Legacy Scripts (Deprecated)
|
|
136
|
+
|
|
137
|
+
Legacy operational files are still present for one release cycle:
|
|
138
|
+
|
|
139
|
+
- `/setup-tunnel.sh`
|
|
140
|
+
- `/infra/cloudflare-tunnel.yml`
|
|
141
|
+
- `/infra/collab-server.service`
|
|
142
|
+
|
|
143
|
+
Use `hive` commands instead of editing legacy templates manually.
|
package/bin/hive.js
ADDED
package/cli/checks.js
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { createServer } from 'net';
|
|
2
|
+
import { access } from 'fs/promises';
|
|
3
|
+
|
|
4
|
+
export async function isPortAvailable(port) {
|
|
5
|
+
return await new Promise((resolve) => {
|
|
6
|
+
const server = createServer();
|
|
7
|
+
server.once('error', () => resolve(false));
|
|
8
|
+
server.once('listening', () => {
|
|
9
|
+
server.close(() => resolve(true));
|
|
10
|
+
});
|
|
11
|
+
server.listen(port, '127.0.0.1');
|
|
12
|
+
});
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export async function pathExists(pathValue) {
|
|
16
|
+
if (!pathValue) return false;
|
|
17
|
+
try {
|
|
18
|
+
await access(pathValue);
|
|
19
|
+
return true;
|
|
20
|
+
} catch {
|
|
21
|
+
return false;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export function validateDomain(domain) {
|
|
26
|
+
if (!domain) return false;
|
|
27
|
+
return /^[a-zA-Z0-9.-]+$/.test(domain) && domain.includes('.');
|
|
28
|
+
}
|
package/cli/config.js
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { mkdir, readFile, writeFile } from 'fs/promises';
|
|
2
|
+
import { existsSync } from 'fs';
|
|
3
|
+
import { dirname } from 'path';
|
|
4
|
+
import { DEFAULT_CONFIG, HIVE_CONFIG_FILE } from './constants.js';
|
|
5
|
+
|
|
6
|
+
export function getConfigPath() {
|
|
7
|
+
return HIVE_CONFIG_FILE;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
export async function ensureConfigDir() {
|
|
11
|
+
await mkdir(dirname(HIVE_CONFIG_FILE), { recursive: true });
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export async function loadHiveConfig() {
|
|
15
|
+
if (!existsSync(HIVE_CONFIG_FILE)) {
|
|
16
|
+
return { ...DEFAULT_CONFIG };
|
|
17
|
+
}
|
|
18
|
+
const raw = await readFile(HIVE_CONFIG_FILE, 'utf-8');
|
|
19
|
+
const parsed = JSON.parse(raw);
|
|
20
|
+
return { ...DEFAULT_CONFIG, ...parsed };
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export async function saveHiveConfig(config) {
|
|
24
|
+
await ensureConfigDir();
|
|
25
|
+
await writeFile(HIVE_CONFIG_FILE, `${JSON.stringify(config, null, 2)}\n`, 'utf-8');
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export async function updateHiveConfig(patch) {
|
|
29
|
+
const current = await loadHiveConfig();
|
|
30
|
+
const next = { ...current, ...patch };
|
|
31
|
+
await saveHiveConfig(next);
|
|
32
|
+
return next;
|
|
33
|
+
}
|
package/cli/constants.js
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { homedir } from 'os';
|
|
2
|
+
import { dirname, join } from 'path';
|
|
3
|
+
import { fileURLToPath } from 'url';
|
|
4
|
+
|
|
5
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
6
|
+
|
|
7
|
+
export const SERVER_ROOT = join(__dirname, '..');
|
|
8
|
+
export const HIVE_HOME = join(homedir(), '.hive');
|
|
9
|
+
export const HIVE_CONFIG_FILE = join(HIVE_HOME, 'config.json');
|
|
10
|
+
export const DEFAULT_ENV_FILE = join(HIVE_HOME, 'server', '.env');
|
|
11
|
+
export const LEGACY_ENV_FILE = join(SERVER_ROOT, '.env');
|
|
12
|
+
export const DEFAULT_DOMAIN = 'collab.example.com';
|
|
13
|
+
export const DEFAULT_TUNNEL_NAME = 'hive';
|
|
14
|
+
export const DEFAULT_CLOUDFLARED_CONFIG = join(homedir(), '.cloudflared', 'config.yml');
|
|
15
|
+
export const DEFAULT_CLOUDFLARED_CERT = join(homedir(), '.cloudflared', 'cert.pem');
|
|
16
|
+
|
|
17
|
+
export const REQUIRED_ENV_KEYS = [
|
|
18
|
+
'DISCORD_CLIENT_ID',
|
|
19
|
+
'DISCORD_CLIENT_SECRET',
|
|
20
|
+
'DISCORD_REDIRECT_URI',
|
|
21
|
+
'DISCORD_GUILD_ID',
|
|
22
|
+
'JWT_SECRET',
|
|
23
|
+
'VAULT_PATH',
|
|
24
|
+
'PORT',
|
|
25
|
+
'YJS_PORT',
|
|
26
|
+
];
|
|
27
|
+
|
|
28
|
+
export const DEFAULT_ENV_VALUES = {
|
|
29
|
+
PORT: '3000',
|
|
30
|
+
YJS_PORT: '3001',
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
export const DEFAULT_CONFIG = {
|
|
34
|
+
version: 1,
|
|
35
|
+
envFile: DEFAULT_ENV_FILE,
|
|
36
|
+
domain: DEFAULT_DOMAIN,
|
|
37
|
+
tunnelName: DEFAULT_TUNNEL_NAME,
|
|
38
|
+
cloudflaredConfigFile: DEFAULT_CLOUDFLARED_CONFIG,
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
export const EXIT = {
|
|
42
|
+
OK: 0,
|
|
43
|
+
FAIL: 1,
|
|
44
|
+
PREREQ: 2,
|
|
45
|
+
};
|
package/cli/env-file.js
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import dotenv from 'dotenv';
|
|
2
|
+
import prompts from 'prompts';
|
|
3
|
+
import { access, mkdir, readFile, writeFile } from 'fs/promises';
|
|
4
|
+
import { existsSync } from 'fs';
|
|
5
|
+
import { dirname } from 'path';
|
|
6
|
+
import { DEFAULT_ENV_VALUES, REQUIRED_ENV_KEYS } from './constants.js';
|
|
7
|
+
import { CliError } from './errors.js';
|
|
8
|
+
|
|
9
|
+
const SECRET_RE = /(SECRET|TOKEN|JWT|PASSWORD|KEY)/i;
|
|
10
|
+
|
|
11
|
+
function envSortKey(a, b) {
|
|
12
|
+
const ai = REQUIRED_ENV_KEYS.indexOf(a);
|
|
13
|
+
const bi = REQUIRED_ENV_KEYS.indexOf(b);
|
|
14
|
+
if (ai >= 0 && bi >= 0) return ai - bi;
|
|
15
|
+
if (ai >= 0) return -1;
|
|
16
|
+
if (bi >= 0) return 1;
|
|
17
|
+
return a.localeCompare(b);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export async function loadEnvFile(envFile) {
|
|
21
|
+
if (!existsSync(envFile)) return {};
|
|
22
|
+
const raw = await readFile(envFile, 'utf-8');
|
|
23
|
+
return dotenv.parse(raw);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export function redactEnv(values) {
|
|
27
|
+
const out = {};
|
|
28
|
+
for (const [k, v] of Object.entries(values)) {
|
|
29
|
+
if (SECRET_RE.test(k)) {
|
|
30
|
+
if (!v) out[k] = '';
|
|
31
|
+
else if (v.length <= 6) out[k] = '******';
|
|
32
|
+
else out[k] = `${v.slice(0, 2)}***${v.slice(-2)}`;
|
|
33
|
+
} else {
|
|
34
|
+
out[k] = v;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
return out;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export function normalizeEnv(values) {
|
|
41
|
+
const merged = { ...DEFAULT_ENV_VALUES, ...values };
|
|
42
|
+
for (const key of REQUIRED_ENV_KEYS) {
|
|
43
|
+
if (merged[key] === undefined || merged[key] === null) {
|
|
44
|
+
merged[key] = '';
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
return merged;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export function serializeEnv(values) {
|
|
51
|
+
const keys = Object.keys(values).sort(envSortKey);
|
|
52
|
+
return `${keys.map((k) => `${k}=${values[k] ?? ''}`).join('\n')}\n`;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
export async function writeEnvFile(envFile, values) {
|
|
56
|
+
await mkdir(dirname(envFile), { recursive: true });
|
|
57
|
+
await writeFile(envFile, serializeEnv(normalizeEnv(values)), 'utf-8');
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
export function inferDomainFromRedirect(redirectUri) {
|
|
61
|
+
if (!redirectUri) return null;
|
|
62
|
+
try {
|
|
63
|
+
const u = new URL(redirectUri);
|
|
64
|
+
return u.host;
|
|
65
|
+
} catch {
|
|
66
|
+
return null;
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
export function validateEnvValues(values) {
|
|
71
|
+
const issues = [];
|
|
72
|
+
for (const key of REQUIRED_ENV_KEYS) {
|
|
73
|
+
if (!String(values[key] ?? '').trim()) {
|
|
74
|
+
issues.push(`Missing ${key}`);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
const port = parseInt(values.PORT ?? '', 10);
|
|
79
|
+
const yjsPort = parseInt(values.YJS_PORT ?? '', 10);
|
|
80
|
+
if (!Number.isInteger(port) || port <= 0) issues.push('PORT must be a positive integer');
|
|
81
|
+
if (!Number.isInteger(yjsPort) || yjsPort <= 0) issues.push('YJS_PORT must be a positive integer');
|
|
82
|
+
|
|
83
|
+
try {
|
|
84
|
+
const uri = new URL(values.DISCORD_REDIRECT_URI ?? '');
|
|
85
|
+
if (!/^https?:$/.test(uri.protocol)) {
|
|
86
|
+
issues.push('DISCORD_REDIRECT_URI must use http or https');
|
|
87
|
+
}
|
|
88
|
+
} catch {
|
|
89
|
+
issues.push('DISCORD_REDIRECT_URI must be a valid URL');
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
return issues;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
export async function ensureVaultPathReadable(pathValue) {
|
|
96
|
+
if (!pathValue) return false;
|
|
97
|
+
try {
|
|
98
|
+
await access(pathValue);
|
|
99
|
+
return true;
|
|
100
|
+
} catch {
|
|
101
|
+
return false;
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
export async function promptForEnv({ envFile, existing, yes = false, preset = {} }) {
|
|
106
|
+
const current = normalizeEnv({ ...existing, ...preset });
|
|
107
|
+
|
|
108
|
+
if (yes) {
|
|
109
|
+
await writeEnvFile(envFile, current);
|
|
110
|
+
return current;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
const questions = [
|
|
114
|
+
{ name: 'DISCORD_CLIENT_ID', message: 'Discord Client ID' },
|
|
115
|
+
{ name: 'DISCORD_CLIENT_SECRET', message: 'Discord Client Secret', secret: true },
|
|
116
|
+
{ name: 'DISCORD_GUILD_ID', message: 'Discord Guild ID' },
|
|
117
|
+
{ name: 'JWT_SECRET', message: 'JWT secret', secret: true },
|
|
118
|
+
{ name: 'VAULT_PATH', message: 'Vault absolute path' },
|
|
119
|
+
{ name: 'PORT', message: 'HTTP port' },
|
|
120
|
+
{ name: 'YJS_PORT', message: 'Yjs WS port' },
|
|
121
|
+
{ name: 'DISCORD_REDIRECT_URI', message: 'Discord redirect URI' },
|
|
122
|
+
];
|
|
123
|
+
|
|
124
|
+
const answers = {};
|
|
125
|
+
for (const q of questions) {
|
|
126
|
+
const response = await prompts({
|
|
127
|
+
type: q.secret ? 'password' : 'text',
|
|
128
|
+
name: 'value',
|
|
129
|
+
message: q.message,
|
|
130
|
+
initial: current[q.name] ?? '',
|
|
131
|
+
});
|
|
132
|
+
if (response.value === undefined) {
|
|
133
|
+
throw new CliError('Cancelled by user');
|
|
134
|
+
}
|
|
135
|
+
answers[q.name] = String(response.value ?? '').trim();
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const merged = normalizeEnv({ ...current, ...answers });
|
|
139
|
+
await writeEnvFile(envFile, merged);
|
|
140
|
+
return merged;
|
|
141
|
+
}
|
package/cli/errors.js
ADDED
package/cli/exec.js
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import { execa } from 'execa';
|
|
2
|
+
|
|
3
|
+
export async function run(cmd, args = [], options = {}) {
|
|
4
|
+
return execa(cmd, args, {
|
|
5
|
+
stdio: options.stdio ?? 'pipe',
|
|
6
|
+
...options,
|
|
7
|
+
});
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
export async function runInherit(cmd, args = [], options = {}) {
|
|
11
|
+
return run(cmd, args, { ...options, stdio: 'inherit' });
|
|
12
|
+
}
|